From 883e65384f1e36a0310f4fdcff57ac486890cd5e Mon Sep 17 00:00:00 2001 From: Jon Herron Date: Wed, 12 Oct 2022 10:22:22 -0400 Subject: [PATCH] Squashed 'spiffworkflow-backend/' content from commit 50f28073 git-subtree-dir: spiffworkflow-backend git-subtree-split: 50f28073add91265f00826bd175c8b2fff76cdc5 --- .cookiecutter.json | 12 + .darglint | 2 + .flake8 | 29 + .gitattributes | 1 + .github/dependabot.yml | 18 + .github/labels.yml | 66 + .github/release-drafter.yml | 29 + .../workflows/auto-merge-dependabot-prs.yml | 72 + .github/workflows/constraints.txt | 5 + .github/workflows/labeler.yml | 18 + .github/workflows/tests.yml | 260 ++ .gitignore | 21 + .pre-commit-config.yaml | 63 + .readthedocs.yml | 12 + .tool-versions | 1 + CODE_OF_CONDUCT.rst | 105 + CONTRIBUTING.rst | 123 + Dockerfile | 28 + LICENSE.rst | 504 +++ README.rst | 92 + bin/boot_in_docker_debug_mode | 14 + bin/boot_server_in_docker | 45 + bin/build_and_run_with_docker_compose | 42 + bin/deploy | 48 + bin/export_keycloak_realms | 25 + bin/finance-realm.json | 2175 ++++++++++ bin/get_bpmn_json_for_process_instance | 35 + bin/get_logs_from_docker_compose | 10 + bin/get_token | 103 + bin/git_commit_bpmn_models_repo | 36 + bin/import_tickets_for_command_line.py | 112 + bin/import_tickets_for_script_task.py | 110 + bin/keycloak_test_secrets.json | 12 + bin/keycloak_test_server.py | 104 + bin/quarkus-realm.json | 1964 +++++++++ bin/recreate_db | 49 + ...replicate_resource_set_denied_based_on_uri | 111 + .../testing-realm.json | 2815 +++++++++++++ bin/run_server_locally | 29 + bin/run_sql | 25 + bin/save_all_bpmn.py | 99 + bin/spiffworkflow-realm.json | 2834 +++++++++++++ bin/start_keycloak | 80 + bin/test_file_upload | 10 + bin/test_with_curl | 26 + bin/wait_for_keycloak | 24 + bin/wait_for_server_to_be_up | 24 + codecov.yml | 9 + conftest.py | 106 + docker-compose.yml | 100 + docs/codeofconduct.rst | 1 + docs/conf.py | 17 + docs/contributing.rst | 4 + docs/index.rst | 16 + docs/license.rst | 1 + docs/reference.rst | 9 + docs/requirements.txt | 3 + docs/usage.rst | 6 + log/.keep | 0 migrations/README | 1 + migrations/alembic.ini | 50 + migrations/env.py | 89 + migrations/script.py.mako | 24 + migrations/versions/88e30afd19ac_.py | 350 ++ noxfile.py | 220 + perms.yml | 32 + poetry.lock | 3524 +++++++++++++++++ pyproject.toml | 146 + sonar-project.properties | 13 + src/spiffworkflow_backend/__init__.py | 139 + src/spiffworkflow_backend/__main__.py | 13 + src/spiffworkflow_backend/api.yml | 2049 ++++++++++ src/spiffworkflow_backend/config/__init__.py | 73 + src/spiffworkflow_backend/config/default.py | 47 + .../config/development.py | 1 + src/spiffworkflow_backend/config/staging.py | 4 + src/spiffworkflow_backend/config/testing.py | 9 + .../process_entity_not_found_error.py | 5 + .../helpers/fixture_data.py | 1 + .../helpers/spiff_enum.py | 11 + .../load_database_models.py | 57 + src/spiffworkflow_backend/models/__init__.py | 1 + .../models/active_task.py | 72 + .../models/bpmn_process_id_lookup.py | 13 + .../models/data_store.py | 31 + src/spiffworkflow_backend/models/file.py | 179 + src/spiffworkflow_backend/models/group.py | 32 + .../models/message_correlation.py | 49 + .../message_correlation_message_instance.py | 32 + .../models/message_correlation_property.py | 25 + .../models/message_instance.py | 88 + .../models/message_model.py | 13 + .../message_triggerable_process_model.py | 22 + .../models/permission_assignment.py | 55 + .../models/permission_target.py | 26 + src/spiffworkflow_backend/models/principal.py | 30 + .../models/process_group.py | 62 + .../models/process_instance.py | 295 ++ .../models/process_instance_report.py | 335 ++ .../models/process_model.py | 90 + .../models/secret_model.py | 65 + .../models/spiff_logging.py | 27 + src/spiffworkflow_backend/models/task.py | 299 ++ .../models/task_event.py | 100 + src/spiffworkflow_backend/models/user.py | 117 + .../models/user_group_assignment.py | 24 + src/spiffworkflow_backend/py.typed | 0 src/spiffworkflow_backend/routes/__init__.py | 1 + .../routes/admin_blueprint/__init__.py | 1 + .../routes/admin_blueprint/admin_blueprint.py | 186 + .../routes/admin_blueprint/static/app.js | 26 + .../admin_blueprint/static/package-lock.json | 3172 +++++++++++++++ .../admin_blueprint/static/package.json | 18 + .../routes/admin_blueprint/static/style.css | 2 + .../admin_blueprint/templates/layout.html | 23 + .../templates/process_group_show.html | 25 + .../templates/process_groups_list.html | 18 + .../templates/process_model_edit.html | 167 + .../templates/process_model_show.html | 159 + .../routes/process_api_blueprint.py | 1432 +++++++ src/spiffworkflow_backend/routes/user.py | 376 ++ .../routes/user_blueprint.py | 240 ++ .../scripts/fact_service.py | 41 + src/spiffworkflow_backend/scripts/get_env.py | 25 + src/spiffworkflow_backend/scripts/script.py | 112 + .../services/acceptance_test_fixtures.py | 43 + .../services/authentication_service.py | 213 + .../services/authorization_service.py | 340 ++ .../services/background_processing_service.py | 25 + .../services/email_service.py | 49 + .../services/error_handling_service.py | 113 + .../services/file_system_service.py | 200 + .../services/git_service.py | 56 + .../services/logging_service.py | 204 + .../services/message_service.py | 233 ++ .../services/process_instance_processor.py | 1134 ++++++ .../services/process_instance_service.py | 437 ++ .../services/process_model_service.py | 329 ++ .../services/script_unit_test_runner.py | 121 + .../services/secret_service.py | 209 + .../services/service_task_service.py | 75 + .../services/spec_file_service.py | 500 +++ .../services/user_service.py | 297 ++ tests/__init__.py | 1 + .../call_activity_nested_duplicate.bpmn | 72 + .../call_activity_level_2.bpmn | 55 + .../call_activity_level_2b.bpmn | 44 + .../call_activity_level_3.bpmn | 43 + .../call_activity_nested.bpmn | 55 + tests/data/call_activity_nested/level2c.dmn | 20 + .../call_activity_test.bpmn | 39 + .../callable_process.bpmn | 26 + tests/data/dangerous-scripts/read_env.bpmn | 42 + .../dangerous-scripts/read_etc_passwd.bpmn | 42 + tests/data/dot_notation/diagram.bpmn | 74 + tests/data/dot_notation/json_schema.json | 36 + tests/data/dot_notation/ui_schema.json | 19 + .../color_question.json | 18 + .../dynamic_enums_ask_for_color.bpmn | 57 + tests/data/error/error.bpmn | 39 + tests/data/hello_world/hello_world.bpmn | 91 + .../message_receiver.bpmn | 99 + .../message_sender.bpmn | 136 + .../message_receiver_one.bpmn | 114 + .../message_receiver_two.bpmn | 114 + .../message_sender.bpmn | 231 ++ tests/data/random_fact/random_fact.bpmn | 200 + tests/data/random_fact/random_fact2.bpmn | 200 + tests/data/sample/sample.bpmn | 62 + tests/data/sample/wonderful.dmn | 44 + .../script_with_unit_tests.bpmn | 70 + tests/data/simple_script/simple_script.bpmn | 67 + .../spiff_example/call_activity_multi.bpmn | 318 ++ tests/data/spiff_example/multiinstance.bpmn | 914 +++++ tests/data/spiff_example/product_prices.dmn | 83 + tests/data/spiff_example/shipping_costs.dmn | 38 + .../timer_intermediate_catch_event.bpmn | 44 + tests/data/user_task/user_task.bpmn | 65 + tests/files/.keep | 0 tests/spiffworkflow_backend/__init__.py | 1 + .../spiffworkflow_backend/helpers/__init__.py | 1 + .../helpers/base_test.py | 264 ++ .../helpers/example_data.py | 109 + .../helpers/test_data.py | 81 + .../integration/__init__.py | 1 + .../integration/bank-api-authz-config.json | 102 + .../integration/bpmn.json | 430 ++ .../integration/test_authentication.py | 174 + .../integration/test_authorization.py | 159 + .../integration/test_logging_service.py | 46 + .../integration/test_process_api.py | 1664 ++++++++ .../integration/test_secret_service.py | 494 +++ .../integration/test_user_blueprint.py | 190 + .../unit/test_acceptance_test_fixtures.py | 16 + .../unit/test_dot_notation.py | 56 + .../unit/test_environment_var_script.py | 23 + tests/spiffworkflow_backend/unit/test_file.py | 27 + .../unit/test_message_instance.py | 162 + .../unit/test_message_service.py | 246 ++ .../unit/test_permissions.py | 131 + .../unit/test_process_group.py | 16 + .../unit/test_process_instance_processor.py | 36 + .../unit/test_process_instance_report.py | 144 + .../unit/test_process_model.py | 107 + .../unit/test_process_model_service.py | 25 + .../unit/test_restricted_script_engine.py | 58 + .../unit/test_script_unit_test_runner.py | 138 + .../unit/test_spec_file_service.py | 97 + .../unit/test_spiff_logging.py | 45 + .../unit/test_various_bpmn_constructs.py | 27 + tests/test_main.py | 17 + wsgi.py | 13 + 212 files changed, 40908 insertions(+) create mode 100644 .cookiecutter.json create mode 100644 .darglint create mode 100644 .flake8 create mode 100644 .gitattributes create mode 100644 .github/dependabot.yml create mode 100644 .github/labels.yml create mode 100644 .github/release-drafter.yml create mode 100644 .github/workflows/auto-merge-dependabot-prs.yml create mode 100644 .github/workflows/constraints.txt create mode 100644 .github/workflows/labeler.yml create mode 100644 .github/workflows/tests.yml create mode 100644 .gitignore create mode 100644 .pre-commit-config.yaml create mode 100644 .readthedocs.yml create mode 100644 .tool-versions create mode 100644 CODE_OF_CONDUCT.rst create mode 100644 CONTRIBUTING.rst create mode 100644 Dockerfile create mode 100644 LICENSE.rst create mode 100644 README.rst create mode 100755 bin/boot_in_docker_debug_mode create mode 100755 bin/boot_server_in_docker create mode 100755 bin/build_and_run_with_docker_compose create mode 100755 bin/deploy create mode 100755 bin/export_keycloak_realms create mode 100644 bin/finance-realm.json create mode 100755 bin/get_bpmn_json_for_process_instance create mode 100755 bin/get_logs_from_docker_compose create mode 100755 bin/get_token create mode 100755 bin/git_commit_bpmn_models_repo create mode 100644 bin/import_tickets_for_command_line.py create mode 100644 bin/import_tickets_for_script_task.py create mode 100644 bin/keycloak_test_secrets.json create mode 100644 bin/keycloak_test_server.py create mode 100644 bin/quarkus-realm.json create mode 100755 bin/recreate_db create mode 100755 bin/replicate_resource_set_denied_based_on_uri_with_keycloak/replicate_resource_set_denied_based_on_uri create mode 100644 bin/replicate_resource_set_denied_based_on_uri_with_keycloak/testing-realm.json create mode 100755 bin/run_server_locally create mode 100755 bin/run_sql create mode 100644 bin/save_all_bpmn.py create mode 100644 bin/spiffworkflow-realm.json create mode 100755 bin/start_keycloak create mode 100644 bin/test_file_upload create mode 100755 bin/test_with_curl create mode 100755 bin/wait_for_keycloak create mode 100755 bin/wait_for_server_to_be_up create mode 100644 codecov.yml create mode 100644 conftest.py create mode 100644 docker-compose.yml create mode 100644 docs/codeofconduct.rst create mode 100644 docs/conf.py create mode 100644 docs/contributing.rst create mode 100644 docs/index.rst create mode 100644 docs/license.rst create mode 100644 docs/reference.rst create mode 100644 docs/requirements.txt create mode 100644 docs/usage.rst create mode 100644 log/.keep create mode 100644 migrations/README create mode 100644 migrations/alembic.ini create mode 100644 migrations/env.py create mode 100644 migrations/script.py.mako create mode 100644 migrations/versions/88e30afd19ac_.py create mode 100644 noxfile.py create mode 100644 perms.yml create mode 100644 poetry.lock create mode 100644 pyproject.toml create mode 100644 sonar-project.properties create mode 100644 src/spiffworkflow_backend/__init__.py create mode 100644 src/spiffworkflow_backend/__main__.py create mode 100755 src/spiffworkflow_backend/api.yml create mode 100644 src/spiffworkflow_backend/config/__init__.py create mode 100644 src/spiffworkflow_backend/config/default.py create mode 100644 src/spiffworkflow_backend/config/development.py create mode 100644 src/spiffworkflow_backend/config/staging.py create mode 100644 src/spiffworkflow_backend/config/testing.py create mode 100644 src/spiffworkflow_backend/exceptions/process_entity_not_found_error.py create mode 100644 src/spiffworkflow_backend/helpers/fixture_data.py create mode 100644 src/spiffworkflow_backend/helpers/spiff_enum.py create mode 100644 src/spiffworkflow_backend/load_database_models.py create mode 100644 src/spiffworkflow_backend/models/__init__.py create mode 100644 src/spiffworkflow_backend/models/active_task.py create mode 100644 src/spiffworkflow_backend/models/bpmn_process_id_lookup.py create mode 100644 src/spiffworkflow_backend/models/data_store.py create mode 100644 src/spiffworkflow_backend/models/file.py create mode 100644 src/spiffworkflow_backend/models/group.py create mode 100644 src/spiffworkflow_backend/models/message_correlation.py create mode 100644 src/spiffworkflow_backend/models/message_correlation_message_instance.py create mode 100644 src/spiffworkflow_backend/models/message_correlation_property.py create mode 100644 src/spiffworkflow_backend/models/message_instance.py create mode 100644 src/spiffworkflow_backend/models/message_model.py create mode 100644 src/spiffworkflow_backend/models/message_triggerable_process_model.py create mode 100644 src/spiffworkflow_backend/models/permission_assignment.py create mode 100644 src/spiffworkflow_backend/models/permission_target.py create mode 100644 src/spiffworkflow_backend/models/principal.py create mode 100644 src/spiffworkflow_backend/models/process_group.py create mode 100644 src/spiffworkflow_backend/models/process_instance.py create mode 100644 src/spiffworkflow_backend/models/process_instance_report.py create mode 100644 src/spiffworkflow_backend/models/process_model.py create mode 100644 src/spiffworkflow_backend/models/secret_model.py create mode 100644 src/spiffworkflow_backend/models/spiff_logging.py create mode 100644 src/spiffworkflow_backend/models/task.py create mode 100644 src/spiffworkflow_backend/models/task_event.py create mode 100644 src/spiffworkflow_backend/models/user.py create mode 100644 src/spiffworkflow_backend/models/user_group_assignment.py create mode 100644 src/spiffworkflow_backend/py.typed create mode 100644 src/spiffworkflow_backend/routes/__init__.py create mode 100644 src/spiffworkflow_backend/routes/admin_blueprint/__init__.py create mode 100644 src/spiffworkflow_backend/routes/admin_blueprint/admin_blueprint.py create mode 100644 src/spiffworkflow_backend/routes/admin_blueprint/static/app.js create mode 100644 src/spiffworkflow_backend/routes/admin_blueprint/static/package-lock.json create mode 100644 src/spiffworkflow_backend/routes/admin_blueprint/static/package.json create mode 100644 src/spiffworkflow_backend/routes/admin_blueprint/static/style.css create mode 100644 src/spiffworkflow_backend/routes/admin_blueprint/templates/layout.html create mode 100644 src/spiffworkflow_backend/routes/admin_blueprint/templates/process_group_show.html create mode 100644 src/spiffworkflow_backend/routes/admin_blueprint/templates/process_groups_list.html create mode 100644 src/spiffworkflow_backend/routes/admin_blueprint/templates/process_model_edit.html create mode 100644 src/spiffworkflow_backend/routes/admin_blueprint/templates/process_model_show.html create mode 100644 src/spiffworkflow_backend/routes/process_api_blueprint.py create mode 100644 src/spiffworkflow_backend/routes/user.py create mode 100644 src/spiffworkflow_backend/routes/user_blueprint.py create mode 100644 src/spiffworkflow_backend/scripts/fact_service.py create mode 100644 src/spiffworkflow_backend/scripts/get_env.py create mode 100644 src/spiffworkflow_backend/scripts/script.py create mode 100644 src/spiffworkflow_backend/services/acceptance_test_fixtures.py create mode 100644 src/spiffworkflow_backend/services/authentication_service.py create mode 100644 src/spiffworkflow_backend/services/authorization_service.py create mode 100644 src/spiffworkflow_backend/services/background_processing_service.py create mode 100644 src/spiffworkflow_backend/services/email_service.py create mode 100644 src/spiffworkflow_backend/services/error_handling_service.py create mode 100644 src/spiffworkflow_backend/services/file_system_service.py create mode 100644 src/spiffworkflow_backend/services/git_service.py create mode 100644 src/spiffworkflow_backend/services/logging_service.py create mode 100644 src/spiffworkflow_backend/services/message_service.py create mode 100644 src/spiffworkflow_backend/services/process_instance_processor.py create mode 100644 src/spiffworkflow_backend/services/process_instance_service.py create mode 100644 src/spiffworkflow_backend/services/process_model_service.py create mode 100644 src/spiffworkflow_backend/services/script_unit_test_runner.py create mode 100644 src/spiffworkflow_backend/services/secret_service.py create mode 100644 src/spiffworkflow_backend/services/service_task_service.py create mode 100644 src/spiffworkflow_backend/services/spec_file_service.py create mode 100644 src/spiffworkflow_backend/services/user_service.py create mode 100644 tests/__init__.py create mode 100644 tests/data/call_activity_duplicate/call_activity_nested_duplicate.bpmn create mode 100644 tests/data/call_activity_nested/call_activity_level_2.bpmn create mode 100644 tests/data/call_activity_nested/call_activity_level_2b.bpmn create mode 100644 tests/data/call_activity_nested/call_activity_level_3.bpmn create mode 100644 tests/data/call_activity_nested/call_activity_nested.bpmn create mode 100644 tests/data/call_activity_nested/level2c.dmn create mode 100644 tests/data/call_activity_same_directory/call_activity_test.bpmn create mode 100644 tests/data/call_activity_same_directory/callable_process.bpmn create mode 100644 tests/data/dangerous-scripts/read_env.bpmn create mode 100644 tests/data/dangerous-scripts/read_etc_passwd.bpmn create mode 100644 tests/data/dot_notation/diagram.bpmn create mode 100644 tests/data/dot_notation/json_schema.json create mode 100644 tests/data/dot_notation/ui_schema.json create mode 100644 tests/data/dynamic_enum_select_fields/color_question.json create mode 100644 tests/data/dynamic_enum_select_fields/dynamic_enums_ask_for_color.bpmn create mode 100644 tests/data/error/error.bpmn create mode 100644 tests/data/hello_world/hello_world.bpmn create mode 100644 tests/data/message_send_one_conversation/message_receiver.bpmn create mode 100644 tests/data/message_send_one_conversation/message_sender.bpmn create mode 100644 tests/data/message_send_two_conversations/message_receiver_one.bpmn create mode 100644 tests/data/message_send_two_conversations/message_receiver_two.bpmn create mode 100644 tests/data/message_send_two_conversations/message_sender.bpmn create mode 100644 tests/data/random_fact/random_fact.bpmn create mode 100644 tests/data/random_fact/random_fact2.bpmn create mode 100644 tests/data/sample/sample.bpmn create mode 100644 tests/data/sample/wonderful.dmn create mode 100644 tests/data/script_with_unit_tests/script_with_unit_tests.bpmn create mode 100644 tests/data/simple_script/simple_script.bpmn create mode 100644 tests/data/spiff_example/call_activity_multi.bpmn create mode 100644 tests/data/spiff_example/multiinstance.bpmn create mode 100644 tests/data/spiff_example/product_prices.dmn create mode 100644 tests/data/spiff_example/shipping_costs.dmn create mode 100644 tests/data/timer_intermediate_catch_event/timer_intermediate_catch_event.bpmn create mode 100644 tests/data/user_task/user_task.bpmn create mode 100644 tests/files/.keep create mode 100644 tests/spiffworkflow_backend/__init__.py create mode 100644 tests/spiffworkflow_backend/helpers/__init__.py create mode 100644 tests/spiffworkflow_backend/helpers/base_test.py create mode 100644 tests/spiffworkflow_backend/helpers/example_data.py create mode 100644 tests/spiffworkflow_backend/helpers/test_data.py create mode 100644 tests/spiffworkflow_backend/integration/__init__.py create mode 100644 tests/spiffworkflow_backend/integration/bank-api-authz-config.json create mode 100644 tests/spiffworkflow_backend/integration/bpmn.json create mode 100644 tests/spiffworkflow_backend/integration/test_authentication.py create mode 100644 tests/spiffworkflow_backend/integration/test_authorization.py create mode 100644 tests/spiffworkflow_backend/integration/test_logging_service.py create mode 100644 tests/spiffworkflow_backend/integration/test_process_api.py create mode 100644 tests/spiffworkflow_backend/integration/test_secret_service.py create mode 100644 tests/spiffworkflow_backend/integration/test_user_blueprint.py create mode 100644 tests/spiffworkflow_backend/unit/test_acceptance_test_fixtures.py create mode 100644 tests/spiffworkflow_backend/unit/test_dot_notation.py create mode 100644 tests/spiffworkflow_backend/unit/test_environment_var_script.py create mode 100644 tests/spiffworkflow_backend/unit/test_file.py create mode 100644 tests/spiffworkflow_backend/unit/test_message_instance.py create mode 100644 tests/spiffworkflow_backend/unit/test_message_service.py create mode 100644 tests/spiffworkflow_backend/unit/test_permissions.py create mode 100644 tests/spiffworkflow_backend/unit/test_process_group.py create mode 100644 tests/spiffworkflow_backend/unit/test_process_instance_processor.py create mode 100644 tests/spiffworkflow_backend/unit/test_process_instance_report.py create mode 100644 tests/spiffworkflow_backend/unit/test_process_model.py create mode 100644 tests/spiffworkflow_backend/unit/test_process_model_service.py create mode 100644 tests/spiffworkflow_backend/unit/test_restricted_script_engine.py create mode 100644 tests/spiffworkflow_backend/unit/test_script_unit_test_runner.py create mode 100644 tests/spiffworkflow_backend/unit/test_spec_file_service.py create mode 100644 tests/spiffworkflow_backend/unit/test_spiff_logging.py create mode 100644 tests/spiffworkflow_backend/unit/test_various_bpmn_constructs.py create mode 100644 tests/test_main.py create mode 100644 wsgi.py diff --git a/.cookiecutter.json b/.cookiecutter.json new file mode 100644 index 00000000..84cc583a --- /dev/null +++ b/.cookiecutter.json @@ -0,0 +1,12 @@ +{ + "_template": "gh:cjolowicz/cookiecutter-hypermodern-python", + "author": "Sartography", + "development_status": "Development Status :: 1 - Planning", + "email": "sartography@users.noreply.github.com", + "friendly_name": "Spiffworkflow Backend", + "github_user": "sartography", + "license": "MIT", + "package_name": "spiffworkflow_backend", + "project_name": "spiffworkflow-backend", + "version": "0.0.1" +} diff --git a/.darglint b/.darglint new file mode 100644 index 00000000..72ccc6c5 --- /dev/null +++ b/.darglint @@ -0,0 +1,2 @@ +[darglint] +strictness = long diff --git a/.flake8 b/.flake8 new file mode 100644 index 00000000..16f7c559 --- /dev/null +++ b/.flake8 @@ -0,0 +1,29 @@ +[flake8] +select = B,B9,C,D,DAR,E,F,N,RST,S,W +ignore = E203,E501,RST201,RST203,RST301,W503,S410,S320 +max-line-length = 120 +max-complexity = 30 +docstring-convention = google +rst-roles = class,const,func,meth,mod,ref +rst-directives = deprecated + +per-file-ignores = + # prefer naming tests descriptively rather than forcing comments + tests/*:S101,D103 + + bin/keycloak_test_server.py:B950,D + conftest.py:S105 + wsgi.py:S104 + + # allow writing to /tmp for throwaway script output + bin/get_bpmn_json_for_process_instance:S108 + + # the exclude=./migrations option doesn't seem to work with pre-commit + # migrations are autogenerated from "flask db migration" so ignore them + migrations/*:D + src/spiffworkflow_backend/config/testing.py:S105 + src/spiffworkflow_backend/load_database_models.py:F401 + + # this file overwrites methods from the logging library so we can't change them + # and ignore long comment line + src/spiffworkflow_backend/services/logging_service.py:N802,B950 diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 00000000..6313b56c --- /dev/null +++ b/.gitattributes @@ -0,0 +1 @@ +* text=auto eol=lf diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 00000000..a0a5c735 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,18 @@ +version: 2 +updates: + - package-ecosystem: github-actions + directory: "/" + schedule: + interval: daily + - package-ecosystem: pip + directory: "/.github/workflows" + schedule: + interval: daily + - package-ecosystem: pip + directory: "/docs" + schedule: + interval: daily + - package-ecosystem: pip + directory: "/" + schedule: + interval: daily diff --git a/.github/labels.yml b/.github/labels.yml new file mode 100644 index 00000000..f7f83aad --- /dev/null +++ b/.github/labels.yml @@ -0,0 +1,66 @@ +--- +# Labels names are important as they are used by Release Drafter to decide +# regarding where to record them in changelog or if to skip them. +# +# The repository labels will be automatically configured using this file and +# the GitHub Action https://github.com/marketplace/actions/github-labeler. +- name: breaking + description: Breaking Changes + color: bfd4f2 +- name: bug + description: Something isn't working + color: d73a4a +- name: build + description: Build System and Dependencies + color: bfdadc +- name: ci + description: Continuous Integration + color: 4a97d6 +- name: dependencies + description: Pull requests that update a dependency file + color: 0366d6 +- name: documentation + description: Improvements or additions to documentation + color: 0075ca +- name: duplicate + description: This issue or pull request already exists + color: cfd3d7 +- name: enhancement + description: New feature or request + color: a2eeef +- name: github_actions + description: Pull requests that update Github_actions code + color: "000000" +- name: good first issue + description: Good for newcomers + color: 7057ff +- name: help wanted + description: Extra attention is needed + color: 008672 +- name: invalid + description: This doesn't seem right + color: e4e669 +- name: performance + description: Performance + color: "016175" +- name: python + description: Pull requests that update Python code + color: 2b67c6 +- name: question + description: Further information is requested + color: d876e3 +- name: refactoring + description: Refactoring + color: ef67c4 +- name: removal + description: Removals and Deprecations + color: 9ae7ea +- name: style + description: Style + color: c120e5 +- name: testing + description: Testing + color: b1fc6f +- name: wontfix + description: This will not be worked on + color: ffffff diff --git a/.github/release-drafter.yml b/.github/release-drafter.yml new file mode 100644 index 00000000..7a04410f --- /dev/null +++ b/.github/release-drafter.yml @@ -0,0 +1,29 @@ +categories: + - title: ":boom: Breaking Changes" + label: "breaking" + - title: ":rocket: Features" + label: "enhancement" + - title: ":fire: Removals and Deprecations" + label: "removal" + - title: ":beetle: Fixes" + label: "bug" + - title: ":racehorse: Performance" + label: "performance" + - title: ":rotating_light: Testing" + label: "testing" + - title: ":construction_worker: Continuous Integration" + label: "ci" + - title: ":books: Documentation" + label: "documentation" + - title: ":hammer: Refactoring" + label: "refactoring" + - title: ":lipstick: Style" + label: "style" + - title: ":package: Dependencies" + labels: + - "dependencies" + - "build" +template: | + ## Changes + + $CHANGES diff --git a/.github/workflows/auto-merge-dependabot-prs.yml b/.github/workflows/auto-merge-dependabot-prs.yml new file mode 100644 index 00000000..b5c60e1d --- /dev/null +++ b/.github/workflows/auto-merge-dependabot-prs.yml @@ -0,0 +1,72 @@ +name: Dependabot auto-merge +on: + workflow_run: + workflows: ["Tests"] + # completed does not mean success of Tests workflow. see below checking github.event.workflow_run.conclusion + types: + - completed + +# workflow_call is used to indicate that a workflow can be called by another workflow. When a workflow is triggered with the workflow_call event, the event payload in the called workflow is the same event payload from the calling workflow. For more information see, "Reusing workflows." + +# https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request +# maybe hook into this instead of workflow_run: +# on: +# pull_request: +# pull_request_target: +# types: [labeled] + +permissions: + contents: write + +jobs: + # print the context for debugging in case a job gets skipped + printJob: + name: Print event + runs-on: ubuntu-latest + steps: + - name: Dump GitHub context + env: + GITHUB_CONTEXT: ${{ toJson(github) }} + run: | + echo "$GITHUB_CONTEXT" + + dependabot: + runs-on: ubuntu-latest + if: ${{ github.actor == 'dependabot[bot]' && github.event.workflow_run.event == 'pull_request' && github.event.workflow_run.conclusion == 'success' }} + steps: + - name: Development Code + uses: actions/checkout@v3 + + ###### GET PR NUMBER + # we saved the pr_number in tests.yml. fetch it so we can merge the correct PR. + # https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#workflow_run + - name: "Download artifact" + uses: actions/github-script@v6 + with: + script: | + let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({ + owner: context.repo.owner, + repo: context.repo.repo, + run_id: context.payload.workflow_run.id, + }); + let matchArtifact = allArtifacts.data.artifacts.filter((artifact) => { + return artifact.name == "pr_number" + })[0]; + let download = await github.rest.actions.downloadArtifact({ + owner: context.repo.owner, + repo: context.repo.repo, + artifact_id: matchArtifact.id, + archive_format: 'zip', + }); + let fs = require('fs'); + fs.writeFileSync(`${process.env.GITHUB_WORKSPACE}/pr_number.zip`, Buffer.from(download.data)); + - name: "Unzip artifact" + run: unzip pr_number.zip + ########### + + - name: print pr number + run: cat pr_number + - name: actually merge it + run: gh pr merge --auto --merge "$(cat pr_number)" + env: + GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} diff --git a/.github/workflows/constraints.txt b/.github/workflows/constraints.txt new file mode 100644 index 00000000..52d4485d --- /dev/null +++ b/.github/workflows/constraints.txt @@ -0,0 +1,5 @@ +pip==22.2.2 +nox==2022.8.7 +nox-poetry==1.0.1 +poetry==1.2.1 +virtualenv==20.16.5 diff --git a/.github/workflows/labeler.yml b/.github/workflows/labeler.yml new file mode 100644 index 00000000..f1955376 --- /dev/null +++ b/.github/workflows/labeler.yml @@ -0,0 +1,18 @@ +name: Labeler + +on: + push: + branches: + - main + +jobs: + labeler: + runs-on: ubuntu-latest + steps: + - name: Check out the repository + uses: actions/checkout@v3.0.2 + + - name: Run Labeler + uses: crazy-max/ghaction-github-labeler@v3.1.1 + with: + skip-delete: true diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml new file mode 100644 index 00000000..47c5f4a2 --- /dev/null +++ b/.github/workflows/tests.yml @@ -0,0 +1,260 @@ +name: Tests + +on: + - push + - pull_request + +jobs: + tests: + name: ${{ matrix.session }} ${{ matrix.python }} / ${{ matrix.os }} ${{ matrix.database }} + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + include: + - { python: "3.10", os: "ubuntu-latest", session: "pre-commit" } + - { python: "3.10", os: "ubuntu-latest", session: "safety" } + - { python: "3.10", os: "ubuntu-latest", session: "mypy" } + - { python: "3.9", os: "ubuntu-latest", session: "mypy" } + - { + python: "3.10", + os: "ubuntu-latest", + session: "tests", + database: "mysql", + } + - { + python: "3.10", + os: "ubuntu-latest", + session: "tests", + database: "postgres", + } + - { + python: "3.10", + os: "ubuntu-latest", + session: "tests", + database: "sqlite", + } + - { + python: "3.9", + os: "ubuntu-latest", + session: "tests", + database: "sqlite", + } + - { + python: "3.10", + os: "windows-latest", + session: "tests", + database: "sqlite", + } + - { + python: "3.10", + os: "macos-latest", + session: "tests", + database: "sqlite", + } + - { + # typeguard 2.13.3 is broken with TypeDict in 3.10. + # probably the next release fixes it. + # https://github.com/agronholm/typeguard/issues/242 + python: "3.9", + os: "ubuntu-latest", + session: "typeguard", + database: "sqlite", + } + - { python: "3.10", os: "ubuntu-latest", session: "xdoctest" } + - { python: "3.10", os: "ubuntu-latest", session: "docs-build" } + + env: + NOXSESSION: ${{ matrix.session }} + SPIFF_DATABASE_TYPE: ${{ matrix.database }} + FORCE_COLOR: "1" + PRE_COMMIT_COLOR: "always" + DB_PASSWORD: password + FLASK_SESSION_SECRET_KEY: super_secret_key + + steps: + - name: Check out the repository + uses: actions/checkout@v3.0.2 + + - name: Set up Python ${{ matrix.python }} + uses: actions/setup-python@v4.2.0 + with: + python-version: ${{ matrix.python }} + + - name: Upgrade pip + run: | + pip install --constraint=.github/workflows/constraints.txt pip + pip --version + + - name: Upgrade pip in virtual environments + shell: python + run: | + import os + import pip + + with open(os.environ["GITHUB_ENV"], mode="a") as io: + print(f"VIRTUALENV_PIP={pip.__version__}", file=io) + + - name: Install Poetry + run: | + pipx install --pip-args=--constraint=.github/workflows/constraints.txt poetry + poetry --version + + - name: Install Nox + run: | + pipx install --pip-args=--constraint=.github/workflows/constraints.txt nox + pipx inject --pip-args=--constraint=.github/workflows/constraints.txt nox nox-poetry + nox --version + + - name: Compute pre-commit cache key + if: matrix.session == 'pre-commit' + id: pre-commit-cache + shell: python + run: | + import hashlib + import sys + + python = "py{}.{}".format(*sys.version_info[:2]) + payload = sys.version.encode() + sys.executable.encode() + digest = hashlib.sha256(payload).hexdigest() + result = "${{ runner.os }}-{}-{}-pre-commit".format(python, digest[:8]) + + print("::set-output name=result::{}".format(result)) + + - name: Restore pre-commit cache + uses: actions/cache@v3.0.10 + if: matrix.session == 'pre-commit' + with: + path: ~/.cache/pre-commit + key: ${{ steps.pre-commit-cache.outputs.result }}-${{ hashFiles('.pre-commit-config.yaml') }} + restore-keys: | + ${{ steps.pre-commit-cache.outputs.result }}- + - name: Setup Mysql + uses: mirromutth/mysql-action@v1.1 + with: + host port: 3306 + container port: 3306 + mysql version: "8.0" + mysql database: "spiffworkflow_backend_testing" + mysql root password: password + if: matrix.database == 'mysql' + + - name: Setup Postgres + run: docker run --name postgres-spiff -p 5432:5432 -e POSTGRES_PASSWORD=spiffworkflow_backend -e POSTGRES_USER=spiffworkflow_backend -e POSTGRES_DB=spiffworkflow_backend_testing -d postgres + if: matrix.database == 'postgres' + + - name: Run Nox + run: | + nox --force-color --python=${{ matrix.python }} + + - name: Upload coverage data + # pin to upload coverage from only one matrix entry, otherwise coverage gets confused later + if: always() && matrix.session == 'tests' && matrix.python == '3.10' && matrix.os == 'ubuntu-latest' + uses: "actions/upload-artifact@v3.0.0" + with: + name: coverage-data + path: ".coverage.*" + + - name: Upload documentation + if: matrix.session == 'docs-build' + uses: actions/upload-artifact@v3.0.0 + with: + name: docs + path: docs/_build + + - name: Upload logs + if: failure() && matrix.session == 'tests' + uses: "actions/upload-artifact@v3.0.0" + with: + name: logs-${{matrix.python}}-${{matrix.os}}-${{matrix.database}} + path: "./log/*.log" + + check_docker_start_script: + runs-on: ubuntu-latest + steps: + - name: Check out the repository + uses: actions/checkout@v3.0.2 + with: + # Disabling shallow clone is recommended for improving relevancy of reporting in sonarcloud + fetch-depth: 0 + - name: start_backend + run: ./bin/build_and_run_with_docker_compose + timeout-minutes: 20 + env: + SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA: "true" + - name: wait_for_backend + run: ./bin/wait_for_server_to_be_up 5 + + coverage: + runs-on: ubuntu-latest + needs: tests + steps: + - name: Check out the repository + uses: actions/checkout@v3.0.2 + with: + # Disabling shallow clone is recommended for improving relevancy of reporting in sonarcloud + fetch-depth: 0 + + - name: Set up Python + uses: actions/setup-python@v4.2.0 + with: + python-version: "3.10" + + - name: Upgrade pip + run: | + pip install --constraint=.github/workflows/constraints.txt pip + pip --version + + - name: Install Poetry + run: | + pipx install --pip-args=--constraint=.github/workflows/constraints.txt poetry + poetry --version + + - name: Install Nox + run: | + pipx install --pip-args=--constraint=.github/workflows/constraints.txt nox + pipx inject --pip-args=--constraint=.github/workflows/constraints.txt nox nox-poetry + nox --version + + - name: Download coverage data + uses: actions/download-artifact@v3.0.0 + with: + name: coverage-data + + - name: Combine coverage data and display human readable report + run: | + find . -name \*.pyc -delete + nox --force-color --session=coverage + + - name: Create coverage report + run: | + nox --force-color --session=coverage -- xml + + - name: Upload coverage report + uses: codecov/codecov-action@v3.1.0 + + - name: SonarCloud Scan + uses: sonarsource/sonarcloud-github-action@master + # thought about just skipping dependabot + # if: ${{ github.actor != 'dependabot[bot]' }} + # but figured all pull requests seems better, since none of them will have access to sonarcloud. + # however, with just skipping pull requests, the build associated with "Triggered via push" is also associated with the pull request and also fails hitting sonarcloud + # if: ${{ github.event_name != 'pull_request' }} + # so just skip everything but main + if: github.ref_name == 'main' + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} + # part about saving PR number and then using it from auto-merge-dependabot-prs from: + # https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#workflow_run + - name: Save PR number + if: ${{ github.event_name == 'pull_request' }} + env: + PR_NUMBER: ${{ github.event.number }} + run: | + mkdir -p ./pr + echo "$PR_NUMBER" > ./pr/pr_number + - uses: actions/upload-artifact@v3 + with: + name: pr_number + path: pr/ diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..22fea4c9 --- /dev/null +++ b/.gitignore @@ -0,0 +1,21 @@ +.mypy_cache/ +/.idea/ +/.coverage +/.coverage.* +/.nox/ +/.python-version +/.pytype/ +/dist/ +/docs/_build/ +/src/*.egg-info/ +/src/instance +__pycache__/ +*.sqlite3 +node_modules +/pyrightconfig.json +/tests/files/tickets.csv +/log/*.log +/tests/spiffworkflow_backend/files +/bin/import_secrets.py +/src/spiffworkflow_backend/config/secrets.py +_null-ls_* diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..b4c05e7c --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,63 @@ +repos: + - repo: local + hooks: + - id: black + name: black + entry: black + language: system + types: [python] + require_serial: true + exclude: ^migrations/ + - id: check-added-large-files + name: Check for added large files + entry: check-added-large-files + language: system + - id: check-toml + name: Check Toml + entry: check-toml + language: system + types: [toml] + - id: check-yaml + name: Check Yaml + entry: check-yaml + language: system + types: [yaml] + - id: end-of-file-fixer + name: Fix End of Files + entry: end-of-file-fixer + language: system + types: [text] + stages: [commit, push, manual] + - id: flake8 + name: flake8 + entry: flake8 + language: system + types: [python] + require_serial: true + exclude: ^migrations/ + - id: pyupgrade + name: pyupgrade + description: Automatically upgrade syntax for newer versions. + entry: pyupgrade + language: system + types: [python] + args: [--py37-plus] + - id: reorder-python-imports + name: Reorder python imports + entry: reorder-python-imports + language: system + types: [python] + args: [--application-directories=src] + exclude: "(^migrations/|load_database_models)" + - id: trailing-whitespace + name: Trim Trailing Whitespace + entry: trailing-whitespace-fixer + language: system + types: [text] + stages: [commit, push, manual] + exclude: ^migrations/ + - repo: https://github.com/pre-commit/mirrors-prettier + rev: v2.4.1 + hooks: + - id: prettier + exclude_types: [html] diff --git a/.readthedocs.yml b/.readthedocs.yml new file mode 100644 index 00000000..66f2a214 --- /dev/null +++ b/.readthedocs.yml @@ -0,0 +1,12 @@ +version: 2 +build: + os: ubuntu-20.04 + tools: + python: "3.10" +sphinx: + configuration: docs/conf.py +formats: all +python: + install: + - requirements: docs/requirements.txt + - path: . diff --git a/.tool-versions b/.tool-versions new file mode 100644 index 00000000..7e78d9af --- /dev/null +++ b/.tool-versions @@ -0,0 +1 @@ +python 3.10.4 diff --git a/CODE_OF_CONDUCT.rst b/CODE_OF_CONDUCT.rst new file mode 100644 index 00000000..0af086d5 --- /dev/null +++ b/CODE_OF_CONDUCT.rst @@ -0,0 +1,105 @@ +Contributor Covenant Code of Conduct +==================================== + +Our Pledge +---------- + +We as members, contributors, and leaders pledge to make participation in our community a harassment-free experience for everyone, regardless of age, body size, visible or invisible disability, ethnicity, sex characteristics, gender identity and expression, level of experience, education, socio-economic status, nationality, personal appearance, race, religion, or sexual identity and orientation. + +We pledge to act and interact in ways that contribute to an open, welcoming, diverse, inclusive, and healthy community. + + +Our Standards +------------- + +Examples of behavior that contributes to a positive environment for our community include: + +- Demonstrating empathy and kindness toward other people +- Being respectful of differing opinions, viewpoints, and experiences +- Giving and gracefully accepting constructive feedback +- Accepting responsibility and apologizing to those affected by our mistakes, and learning from the experience +- Focusing on what is best not just for us as individuals, but for the overall community + +Examples of unacceptable behavior include: + +- The use of sexualized language or imagery, and sexual attention or + advances of any kind +- Trolling, insulting or derogatory comments, and personal or political attacks +- Public or private harassment +- Publishing others' private information, such as a physical or email + address, without their explicit permission +- Other conduct which could reasonably be considered inappropriate in a + professional setting + +Enforcement Responsibilities +---------------------------- + +Community leaders are responsible for clarifying and enforcing our standards of acceptable behavior and will take appropriate and fair corrective action in response to any behavior that they deem inappropriate, threatening, offensive, or harmful. + +Community leaders have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, and will communicate reasons for moderation decisions when appropriate. + + +Scope +----- + +This Code of Conduct applies within all community spaces, and also applies when an individual is officially representing the community in public spaces. Examples of representing our community include using an official e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. + + +Enforcement +----------- + +Instances of abusive, harassing, or otherwise unacceptable behavior may be reported to the community leaders responsible for enforcement at sartography@users.noreply.github.com. All complaints will be reviewed and investigated promptly and fairly. + +All community leaders are obligated to respect the privacy and security of the reporter of any incident. + + +Enforcement Guidelines +---------------------- + +Community leaders will follow these Community Impact Guidelines in determining the consequences for any action they deem in violation of this Code of Conduct: + + +1. Correction +~~~~~~~~~~~~~ + +**Community Impact**: Use of inappropriate language or other behavior deemed unprofessional or unwelcome in the community. + +**Consequence**: A private, written warning from community leaders, providing clarity around the nature of the violation and an explanation of why the behavior was inappropriate. A public apology may be requested. + + +2. Warning +~~~~~~~~~~ + +**Community Impact**: A violation through a single incident or series of actions. + +**Consequence**: A warning with consequences for continued behavior. No interaction with the people involved, including unsolicited interaction with those enforcing the Code of Conduct, for a specified period of time. This includes avoiding interactions in community spaces as well as external channels like social media. Violating these terms may lead to a temporary or permanent ban. + + +3. Temporary Ban +~~~~~~~~~~~~~~~~ + +**Community Impact**: A serious violation of community standards, including sustained inappropriate behavior. + +**Consequence**: A temporary ban from any sort of interaction or public communication with the community for a specified period of time. No public or private interaction with the people involved, including unsolicited interaction with those enforcing the Code of Conduct, is allowed during this period. Violating these terms may lead to a permanent ban. + + +4. Permanent Ban +~~~~~~~~~~~~~~~~ + +**Community Impact**: Demonstrating a pattern of violation of community standards, including sustained inappropriate behavior, harassment of an individual, or aggression toward or disparagement of classes of individuals. + +**Consequence**: A permanent ban from any sort of public interaction within the community. + + +Attribution +----------- + +This Code of Conduct is adapted from the `Contributor Covenant `__, version 2.0, +available at https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. + +Community Impact Guidelines were inspired by `Mozilla’s code of conduct enforcement ladder `__. + +.. _homepage: https://www.contributor-covenant.org + +For answers to common questions about this code of conduct, see the FAQ at +https://www.contributor-covenant.org/faq. Translations are available at https://www.contributor-covenant.org/translations. diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst new file mode 100644 index 00000000..8604c594 --- /dev/null +++ b/CONTRIBUTING.rst @@ -0,0 +1,123 @@ +Contributor Guide +================= + +Thank you for your interest in improving this project. +This project is open-source under the `MIT license`_ and +welcomes contributions in the form of bug reports, feature requests, and pull requests. + +Here is a list of important resources for contributors: + +- `Source Code`_ +- `Documentation`_ +- `Issue Tracker`_ +- `Code of Conduct`_ + +.. _MIT license: https://opensource.org/licenses/MIT +.. _Source Code: https://github.com/sartography/spiffworkflow-backend +.. _Documentation: https://spiffworkflow-backend.readthedocs.io/ +.. _Issue Tracker: https://github.com/sartography/spiffworkflow-backend/issues + +How to report a bug +------------------- + +Report bugs on the `Issue Tracker`_. + +When filing an issue, make sure to answer these questions: + +- Which operating system and Python version are you using? +- Which version of this project are you using? +- What did you do? +- What did you expect to see? +- What did you see instead? + +The best way to get your bug fixed is to provide a test case, +and/or steps to reproduce the issue. + + +How to request a feature +------------------------ + +Request features on the `Issue Tracker`_. + + +How to set up your development environment +------------------------------------------ + +You need Python 3.9+ and the following tools: + +- Poetry_ +- Nox_ +- nox-poetry_ + +Install the package with development requirements: + +.. code:: console + + $ poetry install + +You can now run an interactive Python session, +or the command-line interface: + +.. code:: console + + $ poetry run python + $ poetry run spiffworkflow-backend + +.. _Poetry: https://python-poetry.org/ +.. _Nox: https://nox.thea.codes/ +.. _nox-poetry: https://nox-poetry.readthedocs.io/ + + +How to test the project +----------------------- + +Run the full test suite: + +.. code:: console + + $ nox + +List the available Nox sessions: + +.. code:: console + + $ nox --list-sessions + +You can also run a specific Nox session. +For example, invoke the unit test suite like this: + +.. code:: console + + $ nox --session=tests + +Unit tests are located in the ``tests`` directory, +and are written using the pytest_ testing framework. + +.. _pytest: https://pytest.readthedocs.io/ + + +How to submit changes +--------------------- + +Open a `pull request`_ to submit changes to this project. + +Your pull request needs to meet the following guidelines for acceptance: + +- The Nox test suite must pass without errors and warnings. +- Include unit tests. This project maintains 100% code coverage. +- If your changes add functionality, update the documentation accordingly. + +Feel free to submit early, though—we can always iterate on this. + +To run linting and code formatting checks before committing your change, you can install pre-commit as a Git hook by running the following command: + +.. code:: console + + $ nox --session=pre-commit -- install + +It is recommended to open an issue before starting work on anything. +This will allow a chance to talk it over with the owners and validate your approach. + +.. _pull request: https://github.com/sartography/spiffworkflow-backend/pulls +.. github-only +.. _Code of Conduct: CODE_OF_CONDUCT.rst diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 00000000..6c3fabcf --- /dev/null +++ b/Dockerfile @@ -0,0 +1,28 @@ +FROM ghcr.io/sartography/python:3.10 + +RUN pip install poetry +RUN useradd _gunicorn --no-create-home --user-group + +RUN apt-get update && \ + apt-get install -y -q \ + gcc libssl-dev \ + curl git-core libpq-dev \ + gunicorn3 default-mysql-client + +WORKDIR /app +COPY pyproject.toml poetry.lock /app/ +RUN poetry install + +RUN set -xe \ + && apt-get remove -y gcc python3-dev libssl-dev \ + && apt-get autoremove -y \ + && apt-get clean -y \ + && rm -rf /var/lib/apt/lists/* + +COPY . /app/ + +# run poetry install again AFTER copying the app into the image +# otherwise it does not know what the main app module is +RUN poetry install + +CMD ./bin/boot_server_in_docker diff --git a/LICENSE.rst b/LICENSE.rst new file mode 100644 index 00000000..8000a6fa --- /dev/null +++ b/LICENSE.rst @@ -0,0 +1,504 @@ + GNU LESSER GENERAL PUBLIC LICENSE + Version 2.1, February 1999 + + Copyright (C) 1991, 1999 Free Software Foundation, Inc. + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + +[This is the first released version of the Lesser GPL. It also counts + as the successor of the GNU Library Public License, version 2, hence + the version number 2.1.] + + Preamble + + The licenses for most software are designed to take away your +freedom to share and change it. By contrast, the GNU General Public +Licenses are intended to guarantee your freedom to share and change +free software--to make sure the software is free for all its users. + + This license, the Lesser General Public License, applies to some +specially designated software packages--typically libraries--of the +Free Software Foundation and other authors who decide to use it. You +can use it too, but we suggest you first think carefully about whether +this license or the ordinary General Public License is the better +strategy to use in any particular case, based on the explanations below. + + When we speak of free software, we are referring to freedom of use, +not price. Our General Public Licenses are designed to make sure that +you have the freedom to distribute copies of free software (and charge +for this service if you wish); that you receive source code or can get +it if you want it; that you can change the software and use pieces of +it in new free programs; and that you are informed that you can do +these things. + + To protect your rights, we need to make restrictions that forbid +distributors to deny you these rights or to ask you to surrender these +rights. These restrictions translate to certain responsibilities for +you if you distribute copies of the library or if you modify it. + + For example, if you distribute copies of the library, whether gratis +or for a fee, you must give the recipients all the rights that we gave +you. You must make sure that they, too, receive or can get the source +code. If you link other code with the library, you must provide +complete object files to the recipients, so that they can relink them +with the library after making changes to the library and recompiling +it. And you must show them these terms so they know their rights. + + We protect your rights with a two-step method: (1) we copyright the +library, and (2) we offer you this license, which gives you legal +permission to copy, distribute and/or modify the library. + + To protect each distributor, we want to make it very clear that +there is no warranty for the free library. Also, if the library is +modified by someone else and passed on, the recipients should know +that what they have is not the original version, so that the original +author's reputation will not be affected by problems that might be +introduced by others. + + Finally, software patents pose a constant threat to the existence of +any free program. We wish to make sure that a company cannot +effectively restrict the users of a free program by obtaining a +restrictive license from a patent holder. Therefore, we insist that +any patent license obtained for a version of the library must be +consistent with the full freedom of use specified in this license. + + Most GNU software, including some libraries, is covered by the +ordinary GNU General Public License. This license, the GNU Lesser +General Public License, applies to certain designated libraries, and +is quite different from the ordinary General Public License. We use +this license for certain libraries in order to permit linking those +libraries into non-free programs. + + When a program is linked with a library, whether statically or using +a shared library, the combination of the two is legally speaking a +combined work, a derivative of the original library. The ordinary +General Public License therefore permits such linking only if the +entire combination fits its criteria of freedom. The Lesser General +Public License permits more lax criteria for linking other code with +the library. + + We call this license the "Lesser" General Public License because it +does Less to protect the user's freedom than the ordinary General +Public License. It also provides other free software developers Less +of an advantage over competing non-free programs. These disadvantages +are the reason we use the ordinary General Public License for many +libraries. However, the Lesser license provides advantages in certain +special circumstances. + + For example, on rare occasions, there may be a special need to +encourage the widest possible use of a certain library, so that it becomes +a de-facto standard. To achieve this, non-free programs must be +allowed to use the library. A more frequent case is that a free +library does the same job as widely used non-free libraries. In this +case, there is little to gain by limiting the free library to free +software only, so we use the Lesser General Public License. + + In other cases, permission to use a particular library in non-free +programs enables a greater number of people to use a large body of +free software. For example, permission to use the GNU C Library in +non-free programs enables many more people to use the whole GNU +operating system, as well as its variant, the GNU/Linux operating +system. + + Although the Lesser General Public License is Less protective of the +users' freedom, it does ensure that the user of a program that is +linked with the Library has the freedom and the wherewithal to run +that program using a modified version of the Library. + + The precise terms and conditions for copying, distribution and +modification follow. Pay close attention to the difference between a +"work based on the library" and a "work that uses the library". The +former contains code derived from the library, whereas the latter must +be combined with the library in order to run. + + GNU LESSER GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License Agreement applies to any software library or other +program which contains a notice placed by the copyright holder or +other authorized party saying it may be distributed under the terms of +this Lesser General Public License (also called "this License"). +Each licensee is addressed as "you". + + A "library" means a collection of software functions and/or data +prepared so as to be conveniently linked with application programs +(which use some of those functions and data) to form executables. + + The "Library", below, refers to any such software library or work +which has been distributed under these terms. A "work based on the +Library" means either the Library or any derivative work under +copyright law: that is to say, a work containing the Library or a +portion of it, either verbatim or with modifications and/or translated +straightforwardly into another language. (Hereinafter, translation is +included without limitation in the term "modification".) + + "Source code" for a work means the preferred form of the work for +making modifications to it. For a library, complete source code means +all the source code for all modules it contains, plus any associated +interface definition files, plus the scripts used to control compilation +and installation of the library. + + Activities other than copying, distribution and modification are not +covered by this License; they are outside its scope. The act of +running a program using the Library is not restricted, and output from +such a program is covered only if its contents constitute a work based +on the Library (independent of the use of the Library in a tool for +writing it). Whether that is true depends on what the Library does +and what the program that uses the Library does. + + 1. You may copy and distribute verbatim copies of the Library's +complete source code as you receive it, in any medium, provided that +you conspicuously and appropriately publish on each copy an +appropriate copyright notice and disclaimer of warranty; keep intact +all the notices that refer to this License and to the absence of any +warranty; and distribute a copy of this License along with the +Library. + + You may charge a fee for the physical act of transferring a copy, +and you may at your option offer warranty protection in exchange for a +fee. + + 2. You may modify your copy or copies of the Library or any portion +of it, thus forming a work based on the Library, and copy and +distribute such modifications or work under the terms of Section 1 +above, provided that you also meet all of these conditions: + + a) The modified work must itself be a software library. + + b) You must cause the files modified to carry prominent notices + stating that you changed the files and the date of any change. + + c) You must cause the whole of the work to be licensed at no + charge to all third parties under the terms of this License. + + d) If a facility in the modified Library refers to a function or a + table of data to be supplied by an application program that uses + the facility, other than as an argument passed when the facility + is invoked, then you must make a good faith effort to ensure that, + in the event an application does not supply such function or + table, the facility still operates, and performs whatever part of + its purpose remains meaningful. + + (For example, a function in a library to compute square roots has + a purpose that is entirely well-defined independent of the + application. Therefore, Subsection 2d requires that any + application-supplied function or table used by this function must + be optional: if the application does not supply it, the square + root function must still compute square roots.) + +These requirements apply to the modified work as a whole. If +identifiable sections of that work are not derived from the Library, +and can be reasonably considered independent and separate works in +themselves, then this License, and its terms, do not apply to those +sections when you distribute them as separate works. But when you +distribute the same sections as part of a whole which is a work based +on the Library, the distribution of the whole must be on the terms of +this License, whose permissions for other licensees extend to the +entire whole, and thus to each and every part regardless of who wrote +it. + +Thus, it is not the intent of this section to claim rights or contest +your rights to work written entirely by you; rather, the intent is to +exercise the right to control the distribution of derivative or +collective works based on the Library. + +In addition, mere aggregation of another work not based on the Library +with the Library (or with a work based on the Library) on a volume of +a storage or distribution medium does not bring the other work under +the scope of this License. + + 3. You may opt to apply the terms of the ordinary GNU General Public +License instead of this License to a given copy of the Library. To do +this, you must alter all the notices that refer to this License, so +that they refer to the ordinary GNU General Public License, version 2, +instead of to this License. (If a newer version than version 2 of the +ordinary GNU General Public License has appeared, then you can specify +that version instead if you wish.) Do not make any other change in +these notices. + + Once this change is made in a given copy, it is irreversible for +that copy, so the ordinary GNU General Public License applies to all +subsequent copies and derivative works made from that copy. + + This option is useful when you wish to copy part of the code of +the Library into a program that is not a library. + + 4. You may copy and distribute the Library (or a portion or +derivative of it, under Section 2) in object code or executable form +under the terms of Sections 1 and 2 above provided that you accompany +it with the complete corresponding machine-readable source code, which +must be distributed under the terms of Sections 1 and 2 above on a +medium customarily used for software interchange. + + If distribution of object code is made by offering access to copy +from a designated place, then offering equivalent access to copy the +source code from the same place satisfies the requirement to +distribute the source code, even though third parties are not +compelled to copy the source along with the object code. + + 5. A program that contains no derivative of any portion of the +Library, but is designed to work with the Library by being compiled or +linked with it, is called a "work that uses the Library". Such a +work, in isolation, is not a derivative work of the Library, and +therefore falls outside the scope of this License. + + However, linking a "work that uses the Library" with the Library +creates an executable that is a derivative of the Library (because it +contains portions of the Library), rather than a "work that uses the +library". The executable is therefore covered by this License. +Section 6 states terms for distribution of such executables. + + When a "work that uses the Library" uses material from a header file +that is part of the Library, the object code for the work may be a +derivative work of the Library even though the source code is not. +Whether this is true is especially significant if the work can be +linked without the Library, or if the work is itself a library. The +threshold for this to be true is not precisely defined by law. + + If such an object file uses only numerical parameters, data +structure layouts and accessors, and small macros and small inline +functions (ten lines or less in length), then the use of the object +file is unrestricted, regardless of whether it is legally a derivative +work. (Executables containing this object code plus portions of the +Library will still fall under Section 6.) + + Otherwise, if the work is a derivative of the Library, you may +distribute the object code for the work under the terms of Section 6. +Any executables containing that work also fall under Section 6, +whether or not they are linked directly with the Library itself. + + 6. As an exception to the Sections above, you may also combine or +link a "work that uses the Library" with the Library to produce a +work containing portions of the Library, and distribute that work +under terms of your choice, provided that the terms permit +modification of the work for the customer's own use and reverse +engineering for debugging such modifications. + + You must give prominent notice with each copy of the work that the +Library is used in it and that the Library and its use are covered by +this License. You must supply a copy of this License. If the work +during execution displays copyright notices, you must include the +copyright notice for the Library among them, as well as a reference +directing the user to the copy of this License. Also, you must do one +of these things: + + a) Accompany the work with the complete corresponding + machine-readable source code for the Library including whatever + changes were used in the work (which must be distributed under + Sections 1 and 2 above); and, if the work is an executable linked + with the Library, with the complete machine-readable "work that + uses the Library", as object code and/or source code, so that the + user can modify the Library and then relink to produce a modified + executable containing the modified Library. (It is understood + that the user who changes the contents of definitions files in the + Library will not necessarily be able to recompile the application + to use the modified definitions.) + + b) Use a suitable shared library mechanism for linking with the + Library. A suitable mechanism is one that (1) uses at run time a + copy of the library already present on the user's computer system, + rather than copying library functions into the executable, and (2) + will operate properly with a modified version of the library, if + the user installs one, as long as the modified version is + interface-compatible with the version that the work was made with. + + c) Accompany the work with a written offer, valid for at + least three years, to give the same user the materials + specified in Subsection 6a, above, for a charge no more + than the cost of performing this distribution. + + d) If distribution of the work is made by offering access to copy + from a designated place, offer equivalent access to copy the above + specified materials from the same place. + + e) Verify that the user has already received a copy of these + materials or that you have already sent this user a copy. + + For an executable, the required form of the "work that uses the +Library" must include any data and utility programs needed for +reproducing the executable from it. However, as a special exception, +the materials to be distributed need not include anything that is +normally distributed (in either source or binary form) with the major +components (compiler, kernel, and so on) of the operating system on +which the executable runs, unless that component itself accompanies +the executable. + + It may happen that this requirement contradicts the license +restrictions of other proprietary libraries that do not normally +accompany the operating system. Such a contradiction means you cannot +use both them and the Library together in an executable that you +distribute. + + 7. You may place library facilities that are a work based on the +Library side-by-side in a single library together with other library +facilities not covered by this License, and distribute such a combined +library, provided that the separate distribution of the work based on +the Library and of the other library facilities is otherwise +permitted, and provided that you do these two things: + + a) Accompany the combined library with a copy of the same work + based on the Library, uncombined with any other library + facilities. This must be distributed under the terms of the + Sections above. + + b) Give prominent notice with the combined library of the fact + that part of it is a work based on the Library, and explaining + where to find the accompanying uncombined form of the same work. + + 8. You may not copy, modify, sublicense, link with, or distribute +the Library except as expressly provided under this License. Any +attempt otherwise to copy, modify, sublicense, link with, or +distribute the Library is void, and will automatically terminate your +rights under this License. However, parties who have received copies, +or rights, from you under this License will not have their licenses +terminated so long as such parties remain in full compliance. + + 9. You are not required to accept this License, since you have not +signed it. However, nothing else grants you permission to modify or +distribute the Library or its derivative works. These actions are +prohibited by law if you do not accept this License. Therefore, by +modifying or distributing the Library (or any work based on the +Library), you indicate your acceptance of this License to do so, and +all its terms and conditions for copying, distributing or modifying +the Library or works based on it. + + 10. Each time you redistribute the Library (or any work based on the +Library), the recipient automatically receives a license from the +original licensor to copy, distribute, link with or modify the Library +subject to these terms and conditions. You may not impose any further +restrictions on the recipients' exercise of the rights granted herein. +You are not responsible for enforcing compliance by third parties with +this License. + + 11. If, as a consequence of a court judgment or allegation of patent +infringement or for any other reason (not limited to patent issues), +conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot +distribute so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you +may not distribute the Library at all. For example, if a patent +license would not permit royalty-free redistribution of the Library by +all those who receive copies directly or indirectly through you, then +the only way you could satisfy both it and this License would be to +refrain entirely from distribution of the Library. + +If any portion of this section is held invalid or unenforceable under any +particular circumstance, the balance of the section is intended to apply, +and the section as a whole is intended to apply in other circumstances. + +It is not the purpose of this section to induce you to infringe any +patents or other property right claims or to contest validity of any +such claims; this section has the sole purpose of protecting the +integrity of the free software distribution system which is +implemented by public license practices. Many people have made +generous contributions to the wide range of software distributed +through that system in reliance on consistent application of that +system; it is up to the author/donor to decide if he or she is willing +to distribute software through any other system and a licensee cannot +impose that choice. + +This section is intended to make thoroughly clear what is believed to +be a consequence of the rest of this License. + + 12. If the distribution and/or use of the Library is restricted in +certain countries either by patents or by copyrighted interfaces, the +original copyright holder who places the Library under this License may add +an explicit geographical distribution limitation excluding those countries, +so that distribution is permitted only in or among countries not thus +excluded. In such case, this License incorporates the limitation as if +written in the body of this License. + + 13. The Free Software Foundation may publish revised and/or new +versions of the Lesser General Public License from time to time. +Such new versions will be similar in spirit to the present version, +but may differ in detail to address new problems or concerns. + +Each version is given a distinguishing version number. If the Library +specifies a version number of this License which applies to it and +"any later version", you have the option of following the terms and +conditions either of that version or of any later version published by +the Free Software Foundation. If the Library does not specify a +license version number, you may choose any version ever published by +the Free Software Foundation. + + 14. If you wish to incorporate parts of the Library into other free +programs whose distribution conditions are incompatible with these, +write to the author to ask for permission. For software which is +copyrighted by the Free Software Foundation, write to the Free +Software Foundation; we sometimes make exceptions for this. Our +decision will be guided by the two goals of preserving the free status +of all derivatives of our free software and of promoting the sharing +and reuse of software generally. + + NO WARRANTY + + 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO +WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. +EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR +OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY +KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE +LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME +THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN +WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY +AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU +FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR +CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE +LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING +RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A +FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF +SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH +DAMAGES. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Libraries + + If you develop a new library, and you want it to be of the greatest +possible use to the public, we recommend making it free software that +everyone can redistribute and change. You can do so by permitting +redistribution under these terms (or, alternatively, under the terms of the +ordinary General Public License). + + To apply these terms, attach the following notices to the library. It is +safest to attach them to the start of each source file to most effectively +convey the exclusion of warranty; and each file should have at least the +"copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 + USA + +Also add information on how to contact you by electronic and paper mail. + +You should also get your employer (if you work as a programmer) or your +school, if any, to sign a "copyright disclaimer" for the library, if +necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the + library `Frob' (a library for tweaking knobs) written by James Random + Hacker. + + , 1 April 1990 + Ty Coon, President of Vice + +That's all there is to it! diff --git a/README.rst b/README.rst new file mode 100644 index 00000000..e9d3a68d --- /dev/null +++ b/README.rst @@ -0,0 +1,92 @@ +Spiffworkflow Backend +========== + +|Tests| |Codecov| + +|pre-commit| |Black| + +.. |Tests| image:: https://github.com/sartography/spiffworkflow-backend/workflows/Tests/badge.svg + :target: https://github.com/sartography/spiffworkflow-backend/actions?workflow=Tests + :alt: Tests +.. |Codecov| image:: https://codecov.io/gh/sartography/spiffworkflow-backend/branch/main/graph/badge.svg + :target: https://codecov.io/gh/sartography/spiffworkflow-backend + :alt: Codecov +.. |pre-commit| image:: https://img.shields.io/badge/pre--commit-enabled-brightgreen?logo=pre-commit&logoColor=white + :target: https://github.com/pre-commit/pre-commit + :alt: pre-commit +.. |Black| image:: https://img.shields.io/badge/code%20style-black-000000.svg + :target: https://github.com/psf/black + :alt: Black + + +Features +-------- + +* Backend API portion of the spiffworkflow engine webapp + + +Running Locally +--------------- + +* Install libraries using poetry: + +.. code:: console + + $ poetry install + +* Setup the database - uses mysql and assumes server is running by default: + +.. code:: console + + $ ./bin/recreate_db clean + +* Run the server: + +.. code:: console + + $ ./bin/run_server_locally + + +Requirements +------------ + +* Python 3.9+ +* Poetry + + +Contributing +------------ + +Contributions are very welcome. +To learn more, see the `Contributor Guide`_. + + +License +------- + +Distributed under the terms of the `MIT license`_, +*Spiffworkflow Backend* is free and open source software. + + +Issues +------ + +If you encounter any problems, +please `file an issue`_ along with a detailed description. + + +Credits +------- + +This project was generated from `@cjolowicz`_'s `Hypermodern Python Cookiecutter`_ template. + +.. _@cjolowicz: https://github.com/cjolowicz +.. _Cookiecutter: https://github.com/audreyr/cookiecutter +.. _MIT license: https://opensource.org/licenses/MIT +.. _PyPI: https://pypi.org/ +.. _Hypermodern Python Cookiecutter: https://github.com/cjolowicz/cookiecutter-hypermodern-python +.. _file an issue: https://github.com/sartography/spiffworkflow-backend/issues +.. _pip: https://pip.pypa.io/ +.. github-only +.. _Contributor Guide: CONTRIBUTING.rst +.. _Usage: https://spiffworkflow-backend.readthedocs.io/en/latest/usage.html diff --git a/bin/boot_in_docker_debug_mode b/bin/boot_in_docker_debug_mode new file mode 100755 index 00000000..388c7365 --- /dev/null +++ b/bin/boot_in_docker_debug_mode @@ -0,0 +1,14 @@ +#!/usr/bin/env bash + +function error_handler() { + >&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}." + exit "$2" +} +trap 'error_handler ${LINENO} $?' ERR +set -o errtrace -o errexit -o nounset -o pipefail + +if [[ ! -f /app/log/db_development.log ]]; then + touch /app/log/db_development.log +fi + +tail -f /app/log/db_development.log diff --git a/bin/boot_server_in_docker b/bin/boot_server_in_docker new file mode 100755 index 00000000..23aab408 --- /dev/null +++ b/bin/boot_server_in_docker @@ -0,0 +1,45 @@ +#!/usr/bin/env bash + +function error_handler() { + >&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}." + exit "$2" +} +trap 'error_handler ${LINENO} $?' ERR +set -o errtrace -o errexit -o nounset -o pipefail + +# run migrations +export FLASK_APP=/app/src/spiffworkflow_backend + +if [[ "${DOWNGRADE_DB:-}" == "true" ]]; then + echo 'Downgrading database...' + poetry run flask db downgrade +fi + +if [[ "${SPIFFWORKFLOW_BACKEND_UPGRADE_DB:-}" == "true" ]]; then + echo 'Upgrading database...' + poetry run flask db upgrade +fi + +port="${SPIFFWORKFLOW_BACKEND_PORT:-}" +if [[ -z "$port" ]]; then + port=7000 +fi + +additional_args="" + +if [[ "${APPLICATION_ROOT:-}" != "/" ]]; then + additional_args="${additional_args} -e SCRIPT_NAME=${APPLICATION_ROOT}" +fi + +# HACK: if loading fixtures for acceptance tests when we do not need multiple workers +# it causes issues with attempting to add duplicate data to the db +workers=3 +if [[ "${SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA:-}" == "true" ]]; then + workers=1 +fi + +export IS_GUNICORN="true" +export PROCESS_WAITING_MESSAGES="true" + +# THIS MUST BE THE LAST COMMAND! +exec poetry run gunicorn ${additional_args} --bind "0.0.0.0:$port" --workers="$workers" --timeout 90 --capture-output --access-logfile '-' --log-level debug wsgi:app diff --git a/bin/build_and_run_with_docker_compose b/bin/build_and_run_with_docker_compose new file mode 100755 index 00000000..4356d974 --- /dev/null +++ b/bin/build_and_run_with_docker_compose @@ -0,0 +1,42 @@ +#!/usr/bin/env bash + +function error_handler() { + >&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}." + exit "$2" +} +trap 'error_handler ${LINENO} $?' ERR +set -o errtrace -o errexit -o nounset -o pipefail + +if [[ -z "${BPMN_SPEC_ABSOLUTE_DIR:-}" ]]; then + script_dir="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" + export BPMN_SPEC_ABSOLUTE_DIR="$script_dir/../../sample-process-models" +fi + +if [[ -z "${SPIFFWORKFLOW_BACKEND_DOCKER_COMPOSE_PROFILE:-}" ]]; then + export SPIFFWORKFLOW_BACKEND_DOCKER_COMPOSE_PROFILE=run +fi + +additional_args="" +if [[ "${RUN_WITH_DAEMON:-}" != "false" ]]; then + additional_args="${additional_args} -d" +fi + +docker compose --profile "$SPIFFWORKFLOW_BACKEND_DOCKER_COMPOSE_PROFILE" build +docker compose --profile "$SPIFFWORKFLOW_BACKEND_DOCKER_COMPOSE_PROFILE" stop + +if [[ "${SPIFFWORKFLOW_BACKEND_RECREATE_DATABASE:-}" == "true" ]]; then + docker stop db + docker rm db + docker volume rm spiffworkflow-backend_spiffworkflow_backend + + # i observed a case locally where the db had a stale sqlalchemy revision which + # caused the backend to exit and when docker compose up was running with + # --wait, it just said waiting forever (like we have seen in CI). so removing + # the volume would work around that case, if the volumes are not cleaned up in + # CI. also removing the wait prevents it from hanging forever in the case where + # the backend crashes, so then we'll just wait for the timeout to happen in the + # bin/wait_for_server_to_be_up script. + docker volume rm spiffworkflow-backend_spiffworkflow_backend || echo 'docker volume not found' +fi + +docker compose --profile "$SPIFFWORKFLOW_BACKEND_DOCKER_COMPOSE_PROFILE" up --wait $additional_args diff --git a/bin/deploy b/bin/deploy new file mode 100755 index 00000000..466bb6d1 --- /dev/null +++ b/bin/deploy @@ -0,0 +1,48 @@ +#!/usr/bin/env bash + +function error_handler() { + >&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}." + exit "$2" +} +trap 'error_handler ${LINENO} $?' ERR +set -o errtrace -o errexit -o nounset -o pipefail + +if [[ -z "${SPIFFWORKFLOW_BACKEND_ENV:-}" ]]; then + export SPIFFWORKFLOW_BACKEND_ENV=staging +fi + +if [[ -z "${FLASK_SESSION_SECRET_KEY:-}" ]]; then + export FLASK_SESSION_SECRET_KEY=staging_super_secret_key_dont_tell_anyone +fi + +if [[ -z "${SPIFFWORKFLOW_BACKEND_MYSQL_ROOT_PASSWORD:-}" ]]; then + export SPIFFWORKFLOW_BACKEND_MYSQL_ROOT_PASSWORD=St4g3Th1515 +fi + +if [[ -z "${SPIFFWORKFLOW_BACKEND_DATABASE_NAME:-}" ]]; then + export SPIFFWORKFLOW_BACKEND_DATABASE_NAME=spiffworkflow_backend_staging +fi + +if [[ -z "${SPIFFWORKFLOW_BACKEND_DATABASE_DOCKER_RESTART_POLICY:-}" ]]; then + export SPIFFWORKFLOW_BACKEND_DATABASE_DOCKER_RESTART_POLICY=always +fi + +if [[ -z "${SPIFFWORKFLOW_BACKEND_DOCKER_COMPOSE_PROFILE:-}" ]]; then + export SPIFFWORKFLOW_BACKEND_DOCKER_COMPOSE_PROFILE=run +fi + +if [[ -z "${SPIFFWORKFLOW_FRONTEND_URL:-}" ]]; then + export SPIFFWORKFLOW_FRONTEND_URL='http://167.172.242.138:7001' +fi + +if [[ -z "${SPIFFWORKFLOW_BACKEND_URL:-}" ]]; then + export SPIFFWORKFLOW_BACKEND_URL='http://167.172.242.138:7000' +fi + +if [[ -z "${OPEN_ID_SERVER_URL:-}" ]]; then + export OPEN_ID_SERVER_URL='http://167.172.242.138:7002' +fi + +git pull +./bin/build_and_run_with_docker_compose +./bin/wait_for_server_to_be_up diff --git a/bin/export_keycloak_realms b/bin/export_keycloak_realms new file mode 100755 index 00000000..97eafc8e --- /dev/null +++ b/bin/export_keycloak_realms @@ -0,0 +1,25 @@ +#!/usr/bin/env bash + +function error_handler() { + >&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}." + exit "$2" +} +trap 'error_handler ${LINENO} $?' ERR +set -o errtrace -o errexit -o nounset -o pipefail + +realms="$*" +if [[ -z "$realms" ]]; then + realms="spiffworkflow-realm" +fi + +docker_container_path=/tmp/hey +local_tmp_dir=$(mktemp -d -t ci-XXXXXXXXXX) +docker exec keycloak rm -rf "$docker_container_path" +docker exec keycloak /opt/keycloak/bin/kc.sh export --dir "${docker_container_path}" --users realm_file || echo '' +docker cp "keycloak:${docker_container_path}" "$local_tmp_dir" + +for realm in $realms ; do + cp "${local_tmp_dir}/hey/${realm}.json" bin/ +done + +rm -rf "$local_tmp_dir" diff --git a/bin/finance-realm.json b/bin/finance-realm.json new file mode 100644 index 00000000..b0705c5b --- /dev/null +++ b/bin/finance-realm.json @@ -0,0 +1,2175 @@ +{ + "id": "finance", + "realm": "finance", + "notBefore": 0, + "defaultSignatureAlgorithm": "RS256", + "revokeRefreshToken": false, + "refreshTokenMaxReuse": 0, + "accessTokenLifespan": 300, + "accessTokenLifespanForImplicitFlow": 900, + "ssoSessionIdleTimeout": 1800, + "ssoSessionMaxLifespan": 36000, + "ssoSessionIdleTimeoutRememberMe": 0, + "ssoSessionMaxLifespanRememberMe": 0, + "offlineSessionIdleTimeout": 2592000, + "offlineSessionMaxLifespanEnabled": false, + "offlineSessionMaxLifespan": 5184000, + "clientSessionIdleTimeout": 0, + "clientSessionMaxLifespan": 0, + "clientOfflineSessionIdleTimeout": 0, + "clientOfflineSessionMaxLifespan": 0, + "accessCodeLifespan": 60, + "accessCodeLifespanUserAction": 300, + "accessCodeLifespanLogin": 1800, + "actionTokenGeneratedByAdminLifespan": 43200, + "actionTokenGeneratedByUserLifespan": 300, + "oauth2DeviceCodeLifespan": 600, + "oauth2DevicePollingInterval": 5, + "enabled": true, + "sslRequired": "external", + "registrationAllowed": false, + "registrationEmailAsUsername": false, + "rememberMe": false, + "verifyEmail": false, + "loginWithEmailAllowed": true, + "duplicateEmailsAllowed": false, + "resetPasswordAllowed": false, + "editUsernameAllowed": false, + "bruteForceProtected": false, + "permanentLockout": false, + "maxFailureWaitSeconds": 900, + "minimumQuickLoginWaitSeconds": 60, + "waitIncrementSeconds": 60, + "quickLoginCheckMilliSeconds": 1000, + "maxDeltaTimeSeconds": 43200, + "failureFactor": 30, + "roles": { + "realm": [ + { + "id": "91c53dd8-cef7-41c1-b5dd-d1ea56f3b3c6", + "name": "uma_authorization", + "description": "${role_uma_authorization}", + "composite": false, + "clientRole": false, + "containerId": "finance", + "attributes": {} + }, + { + "id": "66340ad0-99c3-41ff-b252-fdda5d4e25e2", + "name": "offline_access", + "description": "${role_offline-access}", + "composite": false, + "clientRole": false, + "containerId": "finance", + "attributes": {} + }, + { + "id": "e242e32c-d024-4ce1-a14d-edb0bdc698ca", + "name": "default-roles-finance", + "description": "${role_default-roles}", + "composite": true, + "composites": { + "realm": ["offline_access", "uma_authorization"], + "client": { + "account": ["view-profile", "manage-account"] + } + }, + "clientRole": false, + "containerId": "finance", + "attributes": {} + } + ], + "client": { + "myclient": [], + "realm-management": [ + { + "id": "93da3502-aff8-4360-af69-c873b213cbe0", + "name": "view-identity-providers", + "description": "${role_view-identity-providers}", + "composite": false, + "clientRole": true, + "containerId": "f7cfc114-24d1-4c1a-9079-c630fe150ebe", + "attributes": {} + }, + { + "id": "2078ed6a-713b-43ae-a77f-63eafec8a6a9", + "name": "manage-realm", + "description": "${role_manage-realm}", + "composite": false, + "clientRole": true, + "containerId": "f7cfc114-24d1-4c1a-9079-c630fe150ebe", + "attributes": {} + }, + { + "id": "047c723f-f838-441b-9524-f074a8385e0b", + "name": "query-realms", + "description": "${role_query-realms}", + "composite": false, + "clientRole": true, + "containerId": "f7cfc114-24d1-4c1a-9079-c630fe150ebe", + "attributes": {} + }, + { + "id": "93aa19cf-0b88-4858-a3e0-394096e7e3fa", + "name": "manage-events", + "description": "${role_manage-events}", + "composite": false, + "clientRole": true, + "containerId": "f7cfc114-24d1-4c1a-9079-c630fe150ebe", + "attributes": {} + }, + { + "id": "8a2b5032-d73f-45f8-91b5-6948baa114a8", + "name": "view-events", + "description": "${role_view-events}", + "composite": false, + "clientRole": true, + "containerId": "f7cfc114-24d1-4c1a-9079-c630fe150ebe", + "attributes": {} + }, + { + "id": "d2cee878-d3cf-48dc-9350-f5cc5eaece2d", + "name": "impersonation", + "description": "${role_impersonation}", + "composite": false, + "clientRole": true, + "containerId": "f7cfc114-24d1-4c1a-9079-c630fe150ebe", + "attributes": {} + }, + { + "id": "722e7467-5670-4cc6-aedd-111cf79c47bc", + "name": "manage-identity-providers", + "description": "${role_manage-identity-providers}", + "composite": false, + "clientRole": true, + "containerId": "f7cfc114-24d1-4c1a-9079-c630fe150ebe", + "attributes": {} + }, + { + "id": "10813786-1ef3-45d0-b91e-4b5bf48a210a", + "name": "manage-authorization", + "description": "${role_manage-authorization}", + "composite": false, + "clientRole": true, + "containerId": "f7cfc114-24d1-4c1a-9079-c630fe150ebe", + "attributes": {} + }, + { + "id": "8ae7aa75-86b1-4cf3-b08f-20b1ba101b35", + "name": "realm-admin", + "description": "${role_realm-admin}", + "composite": true, + "composites": { + "client": { + "realm-management": [ + "view-identity-providers", + "manage-realm", + "query-realms", + "view-events", + "manage-events", + "impersonation", + "manage-identity-providers", + "manage-authorization", + "view-realm", + "manage-clients", + "create-client", + "manage-users", + "view-clients", + "query-clients", + "query-groups", + "view-users", + "view-authorization", + "query-users" + ] + } + }, + "clientRole": true, + "containerId": "f7cfc114-24d1-4c1a-9079-c630fe150ebe", + "attributes": {} + }, + { + "id": "dbe4b8ec-0d4e-4bec-b216-f32ee9e5066f", + "name": "manage-clients", + "description": "${role_manage-clients}", + "composite": false, + "clientRole": true, + "containerId": "f7cfc114-24d1-4c1a-9079-c630fe150ebe", + "attributes": {} + }, + { + "id": "38975601-e621-41ed-ae11-1129e623a521", + "name": "view-realm", + "description": "${role_view-realm}", + "composite": false, + "clientRole": true, + "containerId": "f7cfc114-24d1-4c1a-9079-c630fe150ebe", + "attributes": {} + }, + { + "id": "4ed08d6b-3007-4756-9d8d-4b6fb15a5cfa", + "name": "create-client", + "description": "${role_create-client}", + "composite": false, + "clientRole": true, + "containerId": "f7cfc114-24d1-4c1a-9079-c630fe150ebe", + "attributes": {} + }, + { + "id": "21071417-8445-40f2-9213-dca727200d48", + "name": "manage-users", + "description": "${role_manage-users}", + "composite": false, + "clientRole": true, + "containerId": "f7cfc114-24d1-4c1a-9079-c630fe150ebe", + "attributes": {} + }, + { + "id": "a8201ba5-009e-44aa-8bd9-00c3a9bd8d9d", + "name": "view-clients", + "description": "${role_view-clients}", + "composite": true, + "composites": { + "client": { + "realm-management": ["query-clients"] + } + }, + "clientRole": true, + "containerId": "f7cfc114-24d1-4c1a-9079-c630fe150ebe", + "attributes": {} + }, + { + "id": "37bce984-5fe7-4a65-b79c-dd8e39711299", + "name": "query-clients", + "description": "${role_query-clients}", + "composite": false, + "clientRole": true, + "containerId": "f7cfc114-24d1-4c1a-9079-c630fe150ebe", + "attributes": {} + }, + { + "id": "e9fa2bd5-19e5-49bd-b378-0dd57ce3e613", + "name": "query-groups", + "description": "${role_query-groups}", + "composite": false, + "clientRole": true, + "containerId": "f7cfc114-24d1-4c1a-9079-c630fe150ebe", + "attributes": {} + }, + { + "id": "1269ce35-e8ab-420e-8859-6e125e77fc0d", + "name": "view-authorization", + "description": "${role_view-authorization}", + "composite": false, + "clientRole": true, + "containerId": "f7cfc114-24d1-4c1a-9079-c630fe150ebe", + "attributes": {} + }, + { + "id": "0f0a08d3-f9bc-411e-8b83-57755b5a9781", + "name": "view-users", + "description": "${role_view-users}", + "composite": true, + "composites": { + "client": { + "realm-management": ["query-groups", "query-users"] + } + }, + "clientRole": true, + "containerId": "f7cfc114-24d1-4c1a-9079-c630fe150ebe", + "attributes": {} + }, + { + "id": "e121c64b-aad1-46d3-89b4-d1f4153c90b6", + "name": "query-users", + "description": "${role_query-users}", + "composite": false, + "clientRole": true, + "containerId": "f7cfc114-24d1-4c1a-9079-c630fe150ebe", + "attributes": {} + } + ], + "security-admin-console": [], + "admin-cli": [], + "account-console": [], + "broker": [ + { + "id": "267cd124-0c66-416a-a106-7033901670ea", + "name": "read-token", + "description": "${role_read-token}", + "composite": false, + "clientRole": true, + "containerId": "c37d2d5b-15f6-48bc-8325-3be33787dae4", + "attributes": {} + } + ], + "account": [ + { + "id": "4a1f6c73-951a-48ef-b29c-216dafe7a28b", + "name": "delete-account", + "description": "${role_delete-account}", + "composite": false, + "clientRole": true, + "containerId": "e2e246a4-d75f-4163-8605-703d16a26f27", + "attributes": {} + }, + { + "id": "6d79ccbb-1bbf-441d-8aa4-4157e150a34f", + "name": "manage-consent", + "description": "${role_manage-consent}", + "composite": true, + "composites": { + "client": { + "account": ["view-consent"] + } + }, + "clientRole": true, + "containerId": "e2e246a4-d75f-4163-8605-703d16a26f27", + "attributes": {} + }, + { + "id": "f9536a0a-a36b-454f-b1d5-773544618853", + "name": "view-profile", + "description": "${role_view-profile}", + "composite": false, + "clientRole": true, + "containerId": "e2e246a4-d75f-4163-8605-703d16a26f27", + "attributes": {} + }, + { + "id": "f1b737bd-afaa-45a4-8613-1e37db0d05e5", + "name": "view-applications", + "description": "${role_view-applications}", + "composite": false, + "clientRole": true, + "containerId": "e2e246a4-d75f-4163-8605-703d16a26f27", + "attributes": {} + }, + { + "id": "34d08665-3194-43fd-94c4-482039ca32db", + "name": "view-consent", + "description": "${role_view-consent}", + "composite": false, + "clientRole": true, + "containerId": "e2e246a4-d75f-4163-8605-703d16a26f27", + "attributes": {} + }, + { + "id": "96e2cb37-6d07-4dce-804d-c9b286a815e6", + "name": "manage-account", + "description": "${role_manage-account}", + "composite": true, + "composites": { + "client": { + "account": ["manage-account-links"] + } + }, + "clientRole": true, + "containerId": "e2e246a4-d75f-4163-8605-703d16a26f27", + "attributes": {} + }, + { + "id": "c2d5d2d2-c524-4a75-a98a-510083496448", + "name": "manage-account-links", + "description": "${role_manage-account-links}", + "composite": false, + "clientRole": true, + "containerId": "e2e246a4-d75f-4163-8605-703d16a26f27", + "attributes": {} + } + ] + } + }, + "groups": [], + "defaultRole": { + "id": "e242e32c-d024-4ce1-a14d-edb0bdc698ca", + "name": "default-roles-finance", + "description": "${role_default-roles}", + "composite": true, + "clientRole": false, + "containerId": "finance" + }, + "requiredCredentials": ["password"], + "otpPolicyType": "totp", + "otpPolicyAlgorithm": "HmacSHA1", + "otpPolicyInitialCounter": 0, + "otpPolicyDigits": 6, + "otpPolicyLookAheadWindow": 1, + "otpPolicyPeriod": 30, + "otpSupportedApplications": ["FreeOTP", "Google Authenticator"], + "webAuthnPolicyRpEntityName": "keycloak", + "webAuthnPolicySignatureAlgorithms": ["ES256"], + "webAuthnPolicyRpId": "", + "webAuthnPolicyAttestationConveyancePreference": "not specified", + "webAuthnPolicyAuthenticatorAttachment": "not specified", + "webAuthnPolicyRequireResidentKey": "not specified", + "webAuthnPolicyUserVerificationRequirement": "not specified", + "webAuthnPolicyCreateTimeout": 0, + "webAuthnPolicyAvoidSameAuthenticatorRegister": false, + "webAuthnPolicyAcceptableAaguids": [], + "webAuthnPolicyPasswordlessRpEntityName": "keycloak", + "webAuthnPolicyPasswordlessSignatureAlgorithms": ["ES256"], + "webAuthnPolicyPasswordlessRpId": "", + "webAuthnPolicyPasswordlessAttestationConveyancePreference": "not specified", + "webAuthnPolicyPasswordlessAuthenticatorAttachment": "not specified", + "webAuthnPolicyPasswordlessRequireResidentKey": "not specified", + "webAuthnPolicyPasswordlessUserVerificationRequirement": "not specified", + "webAuthnPolicyPasswordlessCreateTimeout": 0, + "webAuthnPolicyPasswordlessAvoidSameAuthenticatorRegister": false, + "webAuthnPolicyPasswordlessAcceptableAaguids": [], + "users": [ + { + "id": "005c3d73-6330-4fdb-99c2-55222ecfb45c", + "createdTimestamp": 1653320371308, + "username": "user1", + "enabled": true, + "totp": false, + "emailVerified": false, + "credentials": [ + { + "id": "10256a1f-6966-49c9-a052-46098b447820", + "type": "password", + "createdDate": 1653326993658, + "secretData": "{\"value\":\"+1pFlfNSbupUdw/TCFYOwLVlBkaqQPox767DifigdbBH/+n7EUVk1QBMCEOByX3Eah2AHAHHjOgBSiK6G3OXUQ==\",\"salt\":\"UZPJ0/Rtkbfb/9xnxmiAbw==\",\"additionalParameters\":{}}", + "credentialData": "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } + ], + "disableableCredentialTypes": [], + "requiredActions": [], + "realmRoles": ["default-roles-finance"], + "notBefore": 0, + "groups": [] + } + ], + "scopeMappings": [ + { + "clientScope": "offline_access", + "roles": ["offline_access"] + } + ], + "clients": [ + { + "id": "e2e246a4-d75f-4163-8605-703d16a26f27", + "clientId": "account", + "name": "${client_account}", + "rootUrl": "${authBaseUrl}", + "baseUrl": "/realms/finance/account/", + "surrogateAuthRequired": false, + "enabled": true, + "alwaysDisplayInConsole": false, + "clientAuthenticatorType": "client-secret", + "redirectUris": ["/realms/finance/account/*"], + "webOrigins": [], + "notBefore": 0, + "bearerOnly": false, + "consentRequired": false, + "standardFlowEnabled": true, + "implicitFlowEnabled": false, + "directAccessGrantsEnabled": false, + "serviceAccountsEnabled": false, + "publicClient": true, + "frontchannelLogout": false, + "protocol": "openid-connect", + "attributes": {}, + "authenticationFlowBindingOverrides": {}, + "fullScopeAllowed": false, + "nodeReRegistrationTimeout": 0, + "defaultClientScopes": [ + "web-origins", + "acr", + "roles", + "profile", + "email" + ], + "optionalClientScopes": [ + "address", + "phone", + "offline_access", + "microprofile-jwt" + ] + }, + { + "id": "b676e1d8-4b72-47f7-bac9-b46c19b6c1d5", + "clientId": "account-console", + "name": "${client_account-console}", + "rootUrl": "${authBaseUrl}", + "baseUrl": "/realms/finance/account/", + "surrogateAuthRequired": false, + "enabled": true, + "alwaysDisplayInConsole": false, + "clientAuthenticatorType": "client-secret", + "redirectUris": ["/realms/finance/account/*"], + "webOrigins": [], + "notBefore": 0, + "bearerOnly": false, + "consentRequired": false, + "standardFlowEnabled": true, + "implicitFlowEnabled": false, + "directAccessGrantsEnabled": false, + "serviceAccountsEnabled": false, + "publicClient": true, + "frontchannelLogout": false, + "protocol": "openid-connect", + "attributes": { + "pkce.code.challenge.method": "S256" + }, + "authenticationFlowBindingOverrides": {}, + "fullScopeAllowed": false, + "nodeReRegistrationTimeout": 0, + "protocolMappers": [ + { + "id": "cbfda10b-14c2-4a4a-ac0b-b3164b1a6707", + "name": "audience resolve", + "protocol": "openid-connect", + "protocolMapper": "oidc-audience-resolve-mapper", + "consentRequired": false, + "config": {} + } + ], + "defaultClientScopes": [ + "web-origins", + "acr", + "profile", + "roles", + "email" + ], + "optionalClientScopes": [ + "address", + "phone", + "offline_access", + "microprofile-jwt" + ] + }, + { + "id": "91b23b28-6e4b-4bd6-9444-883c01164cbf", + "clientId": "admin-cli", + "name": "${client_admin-cli}", + "surrogateAuthRequired": false, + "enabled": true, + "alwaysDisplayInConsole": false, + "clientAuthenticatorType": "client-secret", + "redirectUris": [], + "webOrigins": [], + "notBefore": 0, + "bearerOnly": false, + "consentRequired": false, + "standardFlowEnabled": false, + "implicitFlowEnabled": false, + "directAccessGrantsEnabled": true, + "serviceAccountsEnabled": false, + "publicClient": true, + "frontchannelLogout": false, + "protocol": "openid-connect", + "attributes": {}, + "authenticationFlowBindingOverrides": {}, + "fullScopeAllowed": false, + "nodeReRegistrationTimeout": 0, + "defaultClientScopes": [ + "web-origins", + "acr", + "roles", + "profile", + "email" + ], + "optionalClientScopes": [ + "address", + "phone", + "offline_access", + "microprofile-jwt" + ] + }, + { + "id": "c37d2d5b-15f6-48bc-8325-3be33787dae4", + "clientId": "broker", + "name": "${client_broker}", + "surrogateAuthRequired": false, + "enabled": true, + "alwaysDisplayInConsole": false, + "clientAuthenticatorType": "client-secret", + "redirectUris": [], + "webOrigins": [], + "notBefore": 0, + "bearerOnly": true, + "consentRequired": false, + "standardFlowEnabled": true, + "implicitFlowEnabled": false, + "directAccessGrantsEnabled": false, + "serviceAccountsEnabled": false, + "publicClient": false, + "frontchannelLogout": false, + "protocol": "openid-connect", + "attributes": {}, + "authenticationFlowBindingOverrides": {}, + "fullScopeAllowed": false, + "nodeReRegistrationTimeout": 0, + "defaultClientScopes": [ + "web-origins", + "acr", + "roles", + "profile", + "email" + ], + "optionalClientScopes": [ + "address", + "phone", + "offline_access", + "microprofile-jwt" + ] + }, + { + "id": "df91423f-c80a-40e9-90f7-9619b9937a88", + "clientId": "myclient", + "rootUrl": "https://www.keycloak.org/app/", + "adminUrl": "https://www.keycloak.org/app/", + "surrogateAuthRequired": false, + "enabled": true, + "alwaysDisplayInConsole": false, + "clientAuthenticatorType": "client-secret", + "secret": "OAh6rkjXIiPJDtPOz4459i3VtdlxGcce", + "redirectUris": ["http://localhost:5005/*"], + "webOrigins": ["https://www.keycloak.org"], + "notBefore": 0, + "bearerOnly": false, + "consentRequired": false, + "standardFlowEnabled": true, + "implicitFlowEnabled": false, + "directAccessGrantsEnabled": true, + "serviceAccountsEnabled": false, + "publicClient": false, + "frontchannelLogout": false, + "protocol": "openid-connect", + "attributes": { + "saml.force.post.binding": "false", + "saml.multivalued.roles": "false", + "frontchannel.logout.session.required": "false", + "oauth2.device.authorization.grant.enabled": "false", + "backchannel.logout.revoke.offline.tokens": "false", + "saml.server.signature.keyinfo.ext": "false", + "use.refresh.tokens": "true", + "oidc.ciba.grant.enabled": "false", + "backchannel.logout.session.required": "true", + "client_credentials.use_refresh_token": "false", + "require.pushed.authorization.requests": "false", + "saml.client.signature": "false", + "saml.allow.ecp.flow": "false", + "id.token.as.detached.signature": "false", + "saml.assertion.signature": "false", + "client.secret.creation.time": "1653320645", + "saml.encrypt": "false", + "saml.server.signature": "false", + "exclude.session.state.from.auth.response": "false", + "saml.artifact.binding": "false", + "saml_force_name_id_format": "false", + "acr.loa.map": "{}", + "tls.client.certificate.bound.access.tokens": "false", + "saml.authnstatement": "false", + "display.on.consent.screen": "false", + "token.response.type.bearer.lower-case": "false", + "saml.onetimeuse.condition": "false" + }, + "authenticationFlowBindingOverrides": {}, + "fullScopeAllowed": true, + "nodeReRegistrationTimeout": -1, + "defaultClientScopes": [ + "web-origins", + "acr", + "profile", + "roles", + "email" + ], + "optionalClientScopes": [ + "address", + "phone", + "offline_access", + "microprofile-jwt" + ] + }, + { + "id": "f7cfc114-24d1-4c1a-9079-c630fe150ebe", + "clientId": "realm-management", + "name": "${client_realm-management}", + "surrogateAuthRequired": false, + "enabled": true, + "alwaysDisplayInConsole": false, + "clientAuthenticatorType": "client-secret", + "redirectUris": [], + "webOrigins": [], + "notBefore": 0, + "bearerOnly": true, + "consentRequired": false, + "standardFlowEnabled": true, + "implicitFlowEnabled": false, + "directAccessGrantsEnabled": false, + "serviceAccountsEnabled": false, + "publicClient": false, + "frontchannelLogout": false, + "protocol": "openid-connect", + "attributes": {}, + "authenticationFlowBindingOverrides": {}, + "fullScopeAllowed": false, + "nodeReRegistrationTimeout": 0, + "defaultClientScopes": [ + "web-origins", + "acr", + "roles", + "profile", + "email" + ], + "optionalClientScopes": [ + "address", + "phone", + "offline_access", + "microprofile-jwt" + ] + }, + { + "id": "b5dcaf49-414a-4dc9-a322-343b3a42bc2c", + "clientId": "security-admin-console", + "name": "${client_security-admin-console}", + "rootUrl": "${authAdminUrl}", + "baseUrl": "/admin/finance/console/", + "surrogateAuthRequired": false, + "enabled": true, + "alwaysDisplayInConsole": false, + "clientAuthenticatorType": "client-secret", + "redirectUris": ["/admin/finance/console/*"], + "webOrigins": ["+"], + "notBefore": 0, + "bearerOnly": false, + "consentRequired": false, + "standardFlowEnabled": true, + "implicitFlowEnabled": false, + "directAccessGrantsEnabled": false, + "serviceAccountsEnabled": false, + "publicClient": true, + "frontchannelLogout": false, + "protocol": "openid-connect", + "attributes": { + "pkce.code.challenge.method": "S256" + }, + "authenticationFlowBindingOverrides": {}, + "fullScopeAllowed": false, + "nodeReRegistrationTimeout": 0, + "protocolMappers": [ + { + "id": "88310466-5078-4046-bf2c-f224409ca180", + "name": "locale", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "locale", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "locale", + "jsonType.label": "String" + } + } + ], + "defaultClientScopes": [ + "web-origins", + "acr", + "roles", + "profile", + "email" + ], + "optionalClientScopes": [ + "address", + "phone", + "offline_access", + "microprofile-jwt" + ] + } + ], + "clientScopes": [ + { + "id": "af549e77-9526-4df2-99c4-d03b930f3754", + "name": "offline_access", + "description": "OpenID Connect built-in scope: offline_access", + "protocol": "openid-connect", + "attributes": { + "consent.screen.text": "${offlineAccessScopeConsentText}", + "display.on.consent.screen": "true" + } + }, + { + "id": "b491f331-3ba8-4caf-bb5c-60605fbea094", + "name": "web-origins", + "description": "OpenID Connect scope for add allowed web origins to the access token", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "false", + "display.on.consent.screen": "false", + "consent.screen.text": "" + }, + "protocolMappers": [ + { + "id": "636f662f-1f3b-490b-a350-50dbdb33dbd6", + "name": "allowed web origins", + "protocol": "openid-connect", + "protocolMapper": "oidc-allowed-origins-mapper", + "consentRequired": false, + "config": {} + } + ] + }, + { + "id": "3075a8bb-feec-4317-b6b8-199fff003e78", + "name": "microprofile-jwt", + "description": "Microprofile - JWT built-in scope", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "true", + "display.on.consent.screen": "false" + }, + "protocolMappers": [ + { + "id": "9b0b611e-5a74-42cb-8a8b-db57b072798c", + "name": "upn", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-property-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "username", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "upn", + "jsonType.label": "String" + } + }, + { + "id": "a71a281e-3397-4b59-8c8f-90a0c2596ced", + "name": "groups", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-realm-role-mapper", + "consentRequired": false, + "config": { + "multivalued": "true", + "userinfo.token.claim": "true", + "user.attribute": "foo", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "groups", + "jsonType.label": "String" + } + } + ] + }, + { + "id": "497f4647-6fda-4414-b28a-de9dd2fa71be", + "name": "email", + "description": "OpenID Connect built-in scope: email", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "true", + "display.on.consent.screen": "true", + "consent.screen.text": "${emailScopeConsentText}" + }, + "protocolMappers": [ + { + "id": "38883a67-1be8-4b3b-b49b-2958eb3ac537", + "name": "email", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-property-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "email", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "email", + "jsonType.label": "String" + } + }, + { + "id": "cfe47ce1-29f0-47b2-a4ee-62583d0219ef", + "name": "email verified", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-property-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "emailVerified", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "email_verified", + "jsonType.label": "boolean" + } + } + ] + }, + { + "id": "57757394-3904-49dd-b006-761f9deb8b3c", + "name": "address", + "description": "OpenID Connect built-in scope: address", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "true", + "display.on.consent.screen": "true", + "consent.screen.text": "${addressScopeConsentText}" + }, + "protocolMappers": [ + { + "id": "49fd59ae-f76c-4563-a55b-246c0927c71b", + "name": "address", + "protocol": "openid-connect", + "protocolMapper": "oidc-address-mapper", + "consentRequired": false, + "config": { + "user.attribute.formatted": "formatted", + "user.attribute.country": "country", + "user.attribute.postal_code": "postal_code", + "userinfo.token.claim": "true", + "user.attribute.street": "street", + "id.token.claim": "true", + "user.attribute.region": "region", + "access.token.claim": "true", + "user.attribute.locality": "locality" + } + } + ] + }, + { + "id": "854568f7-6ee5-457b-b14c-5624e6712d47", + "name": "acr", + "description": "OpenID Connect scope for add acr (authentication context class reference) to the token", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "false", + "display.on.consent.screen": "false" + }, + "protocolMappers": [ + { + "id": "122e4c4b-6db8-4ca0-b2ae-040bd4062fc1", + "name": "acr loa level", + "protocol": "openid-connect", + "protocolMapper": "oidc-acr-mapper", + "consentRequired": false, + "config": { + "id.token.claim": "true", + "access.token.claim": "true", + "userinfo.token.claim": "true" + } + } + ] + }, + { + "id": "b08be6ff-96fc-4ef1-811f-73d3c8eef401", + "name": "phone", + "description": "OpenID Connect built-in scope: phone", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "true", + "display.on.consent.screen": "true", + "consent.screen.text": "${phoneScopeConsentText}" + }, + "protocolMappers": [ + { + "id": "a6b7b8bd-b6ca-43e0-aa97-c6ef2985ae71", + "name": "phone number", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "phoneNumber", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "phone_number", + "jsonType.label": "String" + } + }, + { + "id": "8375a9f1-5355-457f-96b4-33fb687c9352", + "name": "phone number verified", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "phoneNumberVerified", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "phone_number_verified", + "jsonType.label": "boolean" + } + } + ] + }, + { + "id": "20dd9fdb-e39f-4e8d-873e-4d03b0f52fd6", + "name": "profile", + "description": "OpenID Connect built-in scope: profile", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "true", + "display.on.consent.screen": "true", + "consent.screen.text": "${profileScopeConsentText}" + }, + "protocolMappers": [ + { + "id": "a4fa4e5c-7503-4974-9efa-166fe982df0b", + "name": "given name", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-property-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "firstName", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "given_name", + "jsonType.label": "String" + } + }, + { + "id": "88b23bba-5a2e-4f46-b786-d049c0f84175", + "name": "profile", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "profile", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "profile", + "jsonType.label": "String" + } + }, + { + "id": "949c9cd3-5a2c-43ee-9b65-20c36957c023", + "name": "picture", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "picture", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "picture", + "jsonType.label": "String" + } + }, + { + "id": "dea37916-c26a-405d-aefc-9d66ffaea3e2", + "name": "username", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-property-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "username", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "preferred_username", + "jsonType.label": "String" + } + }, + { + "id": "31891161-c569-4168-a8c9-250fd63c8e9e", + "name": "gender", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "gender", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "gender", + "jsonType.label": "String" + } + }, + { + "id": "a4e05e95-4c9f-45e7-8d88-d439ee6a1cc9", + "name": "birthdate", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "birthdate", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "birthdate", + "jsonType.label": "String" + } + }, + { + "id": "7ce53f52-eadc-4b71-b66b-029d01291e8f", + "name": "zoneinfo", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "zoneinfo", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "zoneinfo", + "jsonType.label": "String" + } + }, + { + "id": "dffa1341-02d6-4bd6-aa20-3de3d2ecb500", + "name": "nickname", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "nickname", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "nickname", + "jsonType.label": "String" + } + }, + { + "id": "dd4882ce-352c-4118-b5d2-9e5e67158a56", + "name": "middle name", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "middleName", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "middle_name", + "jsonType.label": "String" + } + }, + { + "id": "f4c115d4-8fdd-409f-8ae1-6f7669dd0400", + "name": "full name", + "protocol": "openid-connect", + "protocolMapper": "oidc-full-name-mapper", + "consentRequired": false, + "config": { + "id.token.claim": "true", + "access.token.claim": "true", + "userinfo.token.claim": "true" + } + }, + { + "id": "9fda30b9-b251-4177-9046-557478df1c3f", + "name": "family name", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-property-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "lastName", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "family_name", + "jsonType.label": "String" + } + }, + { + "id": "b05b341f-1f37-439c-90fc-0366c964d34a", + "name": "locale", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "locale", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "locale", + "jsonType.label": "String" + } + }, + { + "id": "c1969485-c36f-437b-a681-42bfb3a15b37", + "name": "website", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "website", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "website", + "jsonType.label": "String" + } + }, + { + "id": "7d95cd7f-8f25-474a-a9ef-1bd793f7c52f", + "name": "updated at", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "updatedAt", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "updated_at", + "jsonType.label": "long" + } + } + ] + }, + { + "id": "57a8e94a-3275-4d31-8409-a33e4efea380", + "name": "role_list", + "description": "SAML role list", + "protocol": "saml", + "attributes": { + "consent.screen.text": "${samlRoleListScopeConsentText}", + "display.on.consent.screen": "true" + }, + "protocolMappers": [ + { + "id": "e1582dca-683f-4743-bdc8-4fa9b6580f1e", + "name": "role list", + "protocol": "saml", + "protocolMapper": "saml-role-list-mapper", + "consentRequired": false, + "config": { + "single": "false", + "attribute.nameformat": "Basic", + "attribute.name": "Role" + } + } + ] + }, + { + "id": "1d67898f-ed21-4943-adaf-749fdf007e32", + "name": "roles", + "description": "OpenID Connect scope for add user roles to the access token", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "false", + "display.on.consent.screen": "true", + "consent.screen.text": "${rolesScopeConsentText}" + }, + "protocolMappers": [ + { + "id": "5b736866-be1d-42b3-ab22-56614b40a55c", + "name": "client roles", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-client-role-mapper", + "consentRequired": false, + "config": { + "user.attribute": "foo", + "access.token.claim": "true", + "claim.name": "resource_access.${client_id}.roles", + "jsonType.label": "String", + "multivalued": "true" + } + }, + { + "id": "7a5031af-da7f-4832-b1ae-97a3d36eba0d", + "name": "audience resolve", + "protocol": "openid-connect", + "protocolMapper": "oidc-audience-resolve-mapper", + "consentRequired": false, + "config": {} + }, + { + "id": "8ecfdd17-b555-42c3-b86b-088b88531575", + "name": "realm roles", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-realm-role-mapper", + "consentRequired": false, + "config": { + "user.attribute": "foo", + "access.token.claim": "true", + "claim.name": "realm_access.roles", + "jsonType.label": "String", + "multivalued": "true" + } + } + ] + } + ], + "defaultDefaultClientScopes": [ + "roles", + "profile", + "email", + "role_list", + "acr", + "web-origins" + ], + "defaultOptionalClientScopes": [ + "microprofile-jwt", + "address", + "offline_access", + "phone" + ], + "browserSecurityHeaders": { + "contentSecurityPolicyReportOnly": "", + "xContentTypeOptions": "nosniff", + "xRobotsTag": "none", + "xFrameOptions": "SAMEORIGIN", + "contentSecurityPolicy": "frame-src 'self'; frame-ancestors 'self'; object-src 'none';", + "xXSSProtection": "1; mode=block", + "strictTransportSecurity": "max-age=31536000; includeSubDomains" + }, + "smtpServer": {}, + "eventsEnabled": false, + "eventsListeners": ["jboss-logging"], + "enabledEventTypes": [], + "adminEventsEnabled": false, + "adminEventsDetailsEnabled": false, + "identityProviders": [], + "identityProviderMappers": [], + "components": { + "org.keycloak.services.clientregistration.policy.ClientRegistrationPolicy": [ + { + "id": "07e852dc-d1b8-446e-8e6c-cc8cba09d67b", + "name": "Allowed Client Scopes", + "providerId": "allowed-client-templates", + "subType": "anonymous", + "subComponents": {}, + "config": { + "allow-default-scopes": ["true"] + } + }, + { + "id": "6a31e7b4-c1e4-4b98-aff8-b797635c8685", + "name": "Full Scope Disabled", + "providerId": "scope", + "subType": "anonymous", + "subComponents": {}, + "config": {} + }, + { + "id": "3b45a6bb-607a-42ba-828d-cbe3bb4822d1", + "name": "Allowed Protocol Mapper Types", + "providerId": "allowed-protocol-mappers", + "subType": "authenticated", + "subComponents": {}, + "config": { + "allowed-protocol-mapper-types": [ + "oidc-usermodel-attribute-mapper", + "saml-role-list-mapper", + "saml-user-attribute-mapper", + "oidc-address-mapper", + "saml-user-property-mapper", + "oidc-full-name-mapper", + "oidc-sha256-pairwise-sub-mapper", + "oidc-usermodel-property-mapper" + ] + } + }, + { + "id": "3d0c68f7-fadc-45e5-9cb1-011569ff4848", + "name": "Max Clients Limit", + "providerId": "max-clients", + "subType": "anonymous", + "subComponents": {}, + "config": { + "max-clients": ["200"] + } + }, + { + "id": "568cdc09-0bc8-4d55-9571-ec79111afda2", + "name": "Allowed Protocol Mapper Types", + "providerId": "allowed-protocol-mappers", + "subType": "anonymous", + "subComponents": {}, + "config": { + "allowed-protocol-mapper-types": [ + "oidc-address-mapper", + "saml-role-list-mapper", + "saml-user-property-mapper", + "oidc-usermodel-property-mapper", + "oidc-usermodel-attribute-mapper", + "oidc-sha256-pairwise-sub-mapper", + "oidc-full-name-mapper", + "saml-user-attribute-mapper" + ] + } + }, + { + "id": "04676764-b892-4c0a-86ec-9bb2ab43941a", + "name": "Consent Required", + "providerId": "consent-required", + "subType": "anonymous", + "subComponents": {}, + "config": {} + }, + { + "id": "7876d668-bfec-44c2-8531-43d05ce26a55", + "name": "Allowed Client Scopes", + "providerId": "allowed-client-templates", + "subType": "authenticated", + "subComponents": {}, + "config": { + "allow-default-scopes": ["true"] + } + }, + { + "id": "84af08d2-2d53-491c-b474-ff048c113893", + "name": "Trusted Hosts", + "providerId": "trusted-hosts", + "subType": "anonymous", + "subComponents": {}, + "config": { + "host-sending-registration-request-must-match": ["true"], + "client-uris-must-match": ["true"] + } + } + ], + "org.keycloak.keys.KeyProvider": [ + { + "id": "7702a515-b69c-49e0-81c9-41995c9336a2", + "name": "rsa-enc-generated", + "providerId": "rsa-enc-generated", + "subComponents": {}, + "config": { + "privateKey": [ + "MIIEpQIBAAKCAQEA0vzcavVrsY6k9yLNvLlYv+0HYoz8tJMsG121qIlYRmHMqehXNdQeTCwG9eT22CnCwIeuarzO7G5nx1Gr6+Xtwijpw9+F1apmXgw41bXAvF0v7ujvxCoUVczKEfjP/9dpFWbgXYiB+A2/FzBgpPtGAqpdt0JVkMbINwYyOKc8hNfEJpogvb7VLzyiVSTBSHg7MK+U2XwQZ7p9hwY4Y+yTBRr9NKlrZhiwY49+iBg5bBYINeCmK6dYQp0IsdvSKSXfgHKGbIF9b8uVTyjJPHe6JMirxRWFWRRU8s5QdMeA2/kbnx9UV4nVi9XixAg7/94qe/HGeZvf3uqcEdmQrchHgQIDAQABAoIBAARfNPoKZ9bgC88LK5QziwE/Gh4SqdHcEtfmUzNQ1UFwGVuT8uqsoIwXhXVm7y5/fweaSCTyEJ+ZkhDxQky7EPgq/N0QLqfExiZAvpS/FWVR0QJ0VTONY6p7edxxeW3lusm0vgVyJK9NKZ6DZB1HD6ZSfWqktz0uwUtSl4vQ+1INVOFPNKNP+DpKeyol4BLZ8KLWEZcxCOm8s6i+5DQTODQE+a1+wAzsqQii7iHuOKkLZVGdDBWMd1ydP6mBtH99bMheFOkaSy+zsVV042JVv88ohh95emx95zCFIRSgvS4OqGCRBpk57feHD+XF7nWfIY3QoOPMez1DEbWKKvEsxzkCgYEA8mf+Cpinay2U+x6ep234H+RDMSEzpcluxV8dUyZWSmW1akP5YZq1alW01S4clOP6/vHjQmRrN3yG1/s5karBgyJcE8BUAmM0jJaGOAIfJRJVJqbGo4B1/eKfOBHhNSItwjWXujCqGOQmUTOB3r0t9tTa4PLXtPFnZ2yCniMYzD8CgYEA3tHSY1ZeAST2LVC4NBu5IlDQmQUT/YM4+17aAzhq35N8oS2vXq+7INx4fXRO2pWq/cO7d6Xz8dGozQlSUl2a+mFRZmJuRkQ5Viv2fELNz1cByz7Otn5yqk5VQAUN628sijHXsFlLt9sk7ohDNcHSqp1AnQ6dI2+HK3uDWHZg/D8CgYEAqDKkQXH+6Q1tzHlImOBjRK28OcDR9303kVgYHvQA+ApuwNd9mM4ihzAFE861IlK2fGQWzqCjDJMpdX/qSDr1nyoePc47/JQdiFeBticyuSRpVTJvrk+aj5nJ5LvzSkz290T8jMLro+3a8IkO2iDmXdkALwoRxomiuE7Aqz2rHT0CgYEAhGfqvVz1P3jXUNZGn7BZsyqSyltaJat0kOssYf2EhJb/2EdtjraeXEim0OXsdbe6hFFrdQ4IiAouZKjFYmx9S4vI8y5KmTaxW6OENRL9edgaGMyZyZgaS1mBDynxu5W0weocYY6uj/KJX7VZOviSVxBa9PlaJ1R+3jfnNQojV8ECgYEAxRphKa2E6b9dANF1uyincbmafp/yJ0bStmzyY2gbAak/gBQw8Nyamo/CVDIGusQJdb0hsd7hB8QjC20i/T1uFz2KE4w1RzTHvtiO4z3FBdlYDJ+nZOQb8KRH8YATRn4gtAxeM1mOiT9ZT3t1LMTNZOVBMXnN1xQQa/iIZ+96Iow=" + ], + "keyUse": ["ENC"], + "certificate": [ + "MIICnTCCAYUCBgGA8Y4ipTANBgkqhkiG9w0BAQsFADASMRAwDgYDVQQDDAdmaW5hbmNlMB4XDTIyMDUyMzE1MzI0NVoXDTMyMDUyMzE1MzQyNVowEjEQMA4GA1UEAwwHZmluYW5jZTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANL83Gr1a7GOpPcizby5WL/tB2KM/LSTLBtdtaiJWEZhzKnoVzXUHkwsBvXk9tgpwsCHrmq8zuxuZ8dRq+vl7cIo6cPfhdWqZl4MONW1wLxdL+7o78QqFFXMyhH4z//XaRVm4F2IgfgNvxcwYKT7RgKqXbdCVZDGyDcGMjinPITXxCaaIL2+1S88olUkwUh4OzCvlNl8EGe6fYcGOGPskwUa/TSpa2YYsGOPfogYOWwWCDXgpiunWEKdCLHb0ikl34ByhmyBfW/LlU8oyTx3uiTIq8UVhVkUVPLOUHTHgNv5G58fVFeJ1YvV4sQIO//eKnvxxnmb397qnBHZkK3IR4ECAwEAATANBgkqhkiG9w0BAQsFAAOCAQEAngNl7wr2JsoTsAML0aUMFhRKuZKhsOluUlR5BaU5FpCIM902YqVtDmlDaPJhbGr6+JvDsAaHvhqiv9V4ShzcjYEiYJzDYnvJb3tA+AhSXAIU1WpeVEikeiHltb6repc2BFvMmVKOWntv6JSi35WopxQU6WwCjquZdYFGLDFrS2TsOo4N5n6h15IDUNLQxd+g4v4u8rWO/f0vmtLinIurW8CpK3uq5rm6gfy8ooJ7VKbyF0EP3wecLZr32NQ4DZpct1CSl6pr3Rvh9JMWrIr+5kXkVFLpnuOT0d5Y4ZiKPBZd1+rLLqblj6U6UPa7kcxay8R+jU5YrZLZO3VuIM3R7A==" + ], + "priority": ["100"], + "algorithm": ["RSA-OAEP"] + } + }, + { + "id": "1c415214-0b91-4853-810c-aea64593896e", + "name": "rsa-generated", + "providerId": "rsa-generated", + "subComponents": {}, + "config": { + "privateKey": [ + "MIIEogIBAAKCAQEAge/hw7KOQWkafuPbIMWNtxOmaAhc/nEI5RjEbDCyU3eqKOT7NYdRM0IpcAG0L6K+LW38c5oUrnHw525z9DJ8rNedMCRJsLZiJlz+PdJBWlfFImS3jzDRL0lbqZLvF3In9dIAsOFvHN8CQdnU8QPWQ22S+LIdMed3M1FPV4wtnv3QGATDGEbwiALgF12wpOUQP+bJrdBSpkQMGY8WTWMe5OXbXCvhp9XDExaXrrJigCLqug6WrNeo7UKEi5yaOgwC/msNXxQmXKBIke7WaLuX2mFmJ7jRDHI09WmYGTRMyi+bebRq063IulTNXWsVfOYhC63PAoT9QwiChKlRtey7gQIDAQABAoIBAFTP1+Pds17clK/IWGpuqE6cY9y5wMFP1Id9ABDCRFOY4xdoRtQsrAKyJ8v1QMBrEIkkpkhqhR9MPftBrCvznE9ROUeIGBVhJnV183k0uPWYHpORfALZ0WB1GIB56Ne76ntYhJzvuI/2vElxPr3GTF+csPblg4rkU3jKlOhHFiwU++IAJ9zTO97XqSPSvt5VmpjKNRRN0dodY1cVxPiCybi1F6Xk4E5XTqWoBxggVwM4BbXxDply+Ec7+RULfE2TTFb6YNHbnyV8xE1TlCZPU4+Tws6izJZi/YPwBWgv6CZF1zTLkNMPiCp3+isZN79wpqYKvjk0Y1SrtcawIi/9ocECgYEA2FTiFWdvzHn9vgw2wTxCb2pY46TTeQ/kjSKqIQDYZwzy2kNFDF/NZXIFp3FdROO0LJyrBwtp5PdDMDrKWhEu2eVNNFoqaCcS/LpY+m0MNw5fhQSfyHm8jPUZGsrKHh24T7hRi0S7N92gTos0bp0JURJ4/E+4MFE86Woic9pnrqkCgYEAmcNvOKtk1pczO1x26gpGQe3jd1F/LhPKDb2JvFrEviFczXT26AeTX9Tpe+S9sDwLrmTHmeKRITFO+bpUv0CqsAR+4EPZg1CYtcL8xC7wjmxqI7fyfeeREs0t7sojD2pa4m+aY1PwH3GD8Gs/V6Ei6JoDn2fHw64cLtiT8Y1EZRkCgYBon0yjy12PR2RHGEMW2xzKI2WP2rQHdX80HpKm2XDPDb0Zf7USEfcU27sOSs4vtev6M16nTobNHCQbXCNmwa6l5Lac3gpqSpNtNHY5zqk8qy/Y9e6MlwtwJE0a3v426Gqt61dMglYE46NWuQ4HfNz3n3Zxl+BdRbyuqgy1eOfx2QKBgGxO2EIPoGjzehffKrfyYsi/21DJrA71y4ZgMuvk996BO5BRSjXoWCvRmmE1n1PLB+Ngg6RmjhnWVK3dTSKmV1TC5efsgwNXOyoU3d0Gv1VjEmR0hq+e9HZM5oTkB0wtezUyrYo3rtVHLUulD6oXC7LSAak//dtdAecRkSw9eKbJAoGAdKWKSl1CJ5MKKbLcCcNY474Ifzop+ZJF+wHwBnKwsNuDhAuccXeqlkfspE+sEiKhdHxRp2FgWbvbp83d1HFM1eRZcF8xNxNTQ1kGIjFO73ONroQBzOQq6N2XTWmdQWsNB/0gqlfQ0g6ltrNIWQJ6dBTh45C/c8wwFPRJ3IdQD/s=" + ], + "keyUse": ["SIG"], + "certificate": [ + "MIICnTCCAYUCBgGA8Y4idDANBgkqhkiG9w0BAQsFADASMRAwDgYDVQQDDAdmaW5hbmNlMB4XDTIyMDUyMzE1MzI0NVoXDTMyMDUyMzE1MzQyNVowEjEQMA4GA1UEAwwHZmluYW5jZTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAIHv4cOyjkFpGn7j2yDFjbcTpmgIXP5xCOUYxGwwslN3qijk+zWHUTNCKXABtC+ivi1t/HOaFK5x8Oduc/QyfKzXnTAkSbC2YiZc/j3SQVpXxSJkt48w0S9JW6mS7xdyJ/XSALDhbxzfAkHZ1PED1kNtkviyHTHndzNRT1eMLZ790BgEwxhG8IgC4BddsKTlED/mya3QUqZEDBmPFk1jHuTl21wr4afVwxMWl66yYoAi6roOlqzXqO1ChIucmjoMAv5rDV8UJlygSJHu1mi7l9phZie40QxyNPVpmBk0TMovm3m0atOtyLpUzV1rFXzmIQutzwKE/UMIgoSpUbXsu4ECAwEAATANBgkqhkiG9w0BAQsFAAOCAQEAUAB+19re9bQdlSZv4O6xSqI+7bMtdNxQxxb2wr1Iy8dmH7gPIg4DY7qilFnGvyY0B1CrOomov4tTha8v4YUvX2/Q3Ped5AHWlymWLbM/Mnxc8mcAsa3UmIgF8zEKkIEeXz876owXaUIy+o+hx6uDr6J/GvTtIYW/t7hVWkbHUxozJBrxYL9AKqFIBlc9B2jMiV81qNvHN17OWZRRXwP6mk+zY1CTJrbDaeK0bThF66Ff0uJMuK4CYJyyDDXL+9CguIW0G8hIhFeDXKZ/aAoVCULo+b653REYitpHD8PuuldDkthSSSZq9PnR+7ttDQ+Zt/8rWe+tBNzJCIx4vzY2rg==" + ], + "priority": ["100"] + } + }, + { + "id": "0c1cb39b-19a4-4103-b5e5-ead9993d25d8", + "name": "aes-generated", + "providerId": "aes-generated", + "subComponents": {}, + "config": { + "kid": ["01cccc91-2bd5-4107-9abe-73eefa08844e"], + "secret": ["UFMiYfKaAgrSES3eknZGNA"], + "priority": ["100"] + } + }, + { + "id": "2be427e9-7d31-46f2-b315-27eabf13c750", + "name": "hmac-generated", + "providerId": "hmac-generated", + "subComponents": {}, + "config": { + "kid": ["88dc0a34-df52-4ba9-b120-f64f5f58814b"], + "secret": [ + "NsTtKtnEUPVAGynVRWUNbyBPGPogEB1DfhTikcPF357dwL1yD_4tiv7HmjJ8CSvQP4ILuuTxhDqO3z-zjAoROA" + ], + "priority": ["100"], + "algorithm": ["HS256"] + } + } + ] + }, + "internationalizationEnabled": false, + "supportedLocales": [], + "authenticationFlows": [ + { + "id": "0fde2792-dcf5-487d-ac16-f7fe6aa6452a", + "alias": "Account verification options", + "description": "Method with which to verity the existing account", + "providerId": "basic-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "idp-email-verification", + "authenticatorFlow": false, + "requirement": "ALTERNATIVE", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticatorFlow": true, + "requirement": "ALTERNATIVE", + "priority": 20, + "autheticatorFlow": true, + "flowAlias": "Verify Existing Account by Re-authentication", + "userSetupAllowed": false + } + ] + }, + { + "id": "86dddafa-1c63-408b-a7f1-cf82c7fdf3dd", + "alias": "Authentication Options", + "description": "Authentication options.", + "providerId": "basic-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "basic-auth", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "basic-auth-otp", + "authenticatorFlow": false, + "requirement": "DISABLED", + "priority": 20, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "auth-spnego", + "authenticatorFlow": false, + "requirement": "DISABLED", + "priority": 30, + "autheticatorFlow": false, + "userSetupAllowed": false + } + ] + }, + { + "id": "a47eb419-ed9e-42fd-b4ec-6074323fa6ba", + "alias": "Browser - Conditional OTP", + "description": "Flow to determine if the OTP is required for the authentication", + "providerId": "basic-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "conditional-user-configured", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "auth-otp-form", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 20, + "autheticatorFlow": false, + "userSetupAllowed": false + } + ] + }, + { + "id": "3368f154-d4ba-4839-95a3-e02fb50bf17c", + "alias": "Direct Grant - Conditional OTP", + "description": "Flow to determine if the OTP is required for the authentication", + "providerId": "basic-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "conditional-user-configured", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "direct-grant-validate-otp", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 20, + "autheticatorFlow": false, + "userSetupAllowed": false + } + ] + }, + { + "id": "81ce715c-c31d-4ac6-9f41-587c1a105786", + "alias": "First broker login - Conditional OTP", + "description": "Flow to determine if the OTP is required for the authentication", + "providerId": "basic-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "conditional-user-configured", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "auth-otp-form", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 20, + "autheticatorFlow": false, + "userSetupAllowed": false + } + ] + }, + { + "id": "cc14c5a6-5a36-4601-99c1-e0b8b5c33c57", + "alias": "Handle Existing Account", + "description": "Handle what to do if there is existing account with same email/username like authenticated identity provider", + "providerId": "basic-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "idp-confirm-link", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticatorFlow": true, + "requirement": "REQUIRED", + "priority": 20, + "autheticatorFlow": true, + "flowAlias": "Account verification options", + "userSetupAllowed": false + } + ] + }, + { + "id": "fd19942f-8d99-4179-961e-9bc52c124fe6", + "alias": "Reset - Conditional OTP", + "description": "Flow to determine if the OTP should be reset or not. Set to REQUIRED to force.", + "providerId": "basic-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "conditional-user-configured", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "reset-otp", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 20, + "autheticatorFlow": false, + "userSetupAllowed": false + } + ] + }, + { + "id": "b27dd02c-9023-414e-949d-9b44c47768ab", + "alias": "User creation or linking", + "description": "Flow for the existing/non-existing user alternatives", + "providerId": "basic-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticatorConfig": "create unique user config", + "authenticator": "idp-create-user-if-unique", + "authenticatorFlow": false, + "requirement": "ALTERNATIVE", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticatorFlow": true, + "requirement": "ALTERNATIVE", + "priority": 20, + "autheticatorFlow": true, + "flowAlias": "Handle Existing Account", + "userSetupAllowed": false + } + ] + }, + { + "id": "6460144e-b28b-45a5-940c-a73855c53e1a", + "alias": "Verify Existing Account by Re-authentication", + "description": "Reauthentication of existing account", + "providerId": "basic-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "idp-username-password-form", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticatorFlow": true, + "requirement": "CONDITIONAL", + "priority": 20, + "autheticatorFlow": true, + "flowAlias": "First broker login - Conditional OTP", + "userSetupAllowed": false + } + ] + }, + { + "id": "da8c0b0c-f3d3-49e4-88eb-127d27029c76", + "alias": "browser", + "description": "browser based authentication", + "providerId": "basic-flow", + "topLevel": true, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "auth-cookie", + "authenticatorFlow": false, + "requirement": "ALTERNATIVE", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "auth-spnego", + "authenticatorFlow": false, + "requirement": "DISABLED", + "priority": 20, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "identity-provider-redirector", + "authenticatorFlow": false, + "requirement": "ALTERNATIVE", + "priority": 25, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticatorFlow": true, + "requirement": "ALTERNATIVE", + "priority": 30, + "autheticatorFlow": true, + "flowAlias": "forms", + "userSetupAllowed": false + } + ] + }, + { + "id": "5103be99-4355-4031-9a32-ad8837f6d972", + "alias": "clients", + "description": "Base authentication for clients", + "providerId": "client-flow", + "topLevel": true, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "client-secret", + "authenticatorFlow": false, + "requirement": "ALTERNATIVE", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "client-jwt", + "authenticatorFlow": false, + "requirement": "ALTERNATIVE", + "priority": 20, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "client-secret-jwt", + "authenticatorFlow": false, + "requirement": "ALTERNATIVE", + "priority": 30, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "client-x509", + "authenticatorFlow": false, + "requirement": "ALTERNATIVE", + "priority": 40, + "autheticatorFlow": false, + "userSetupAllowed": false + } + ] + }, + { + "id": "6996f613-218b-4442-9596-84ac217c87dd", + "alias": "direct grant", + "description": "OpenID Connect Resource Owner Grant", + "providerId": "basic-flow", + "topLevel": true, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "direct-grant-validate-username", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "direct-grant-validate-password", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 20, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticatorFlow": true, + "requirement": "CONDITIONAL", + "priority": 30, + "autheticatorFlow": true, + "flowAlias": "Direct Grant - Conditional OTP", + "userSetupAllowed": false + } + ] + }, + { + "id": "a5198547-b614-43b7-a7bf-152a82199c67", + "alias": "docker auth", + "description": "Used by Docker clients to authenticate against the IDP", + "providerId": "basic-flow", + "topLevel": true, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "docker-http-basic-authenticator", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + } + ] + }, + { + "id": "8a7d1775-6a9e-45c8-8797-d6bbdde5a359", + "alias": "first broker login", + "description": "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account", + "providerId": "basic-flow", + "topLevel": true, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticatorConfig": "review profile config", + "authenticator": "idp-review-profile", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticatorFlow": true, + "requirement": "REQUIRED", + "priority": 20, + "autheticatorFlow": true, + "flowAlias": "User creation or linking", + "userSetupAllowed": false + } + ] + }, + { + "id": "e707bd54-507a-4534-a192-53fe8939e094", + "alias": "forms", + "description": "Username, password, otp and other auth forms.", + "providerId": "basic-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "auth-username-password-form", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticatorFlow": true, + "requirement": "CONDITIONAL", + "priority": 20, + "autheticatorFlow": true, + "flowAlias": "Browser - Conditional OTP", + "userSetupAllowed": false + } + ] + }, + { + "id": "ed74db0e-1714-44d3-824e-a687be5b8c47", + "alias": "http challenge", + "description": "An authentication flow based on challenge-response HTTP Authentication Schemes", + "providerId": "basic-flow", + "topLevel": true, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "no-cookie-redirect", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticatorFlow": true, + "requirement": "REQUIRED", + "priority": 20, + "autheticatorFlow": true, + "flowAlias": "Authentication Options", + "userSetupAllowed": false + } + ] + }, + { + "id": "02fcb19a-ed47-4bef-a33d-5329bf683e36", + "alias": "registration", + "description": "registration flow", + "providerId": "basic-flow", + "topLevel": true, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "registration-page-form", + "authenticatorFlow": true, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": true, + "flowAlias": "registration form", + "userSetupAllowed": false + } + ] + }, + { + "id": "21b1748a-8641-4102-a7fa-783818f22b61", + "alias": "registration form", + "description": "registration form", + "providerId": "form-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "registration-user-creation", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 20, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "registration-profile-action", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 40, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "registration-password-action", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 50, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "registration-recaptcha-action", + "authenticatorFlow": false, + "requirement": "DISABLED", + "priority": 60, + "autheticatorFlow": false, + "userSetupAllowed": false + } + ] + }, + { + "id": "3b8ac3fc-33c3-4256-86a9-ffffdacb23a2", + "alias": "reset credentials", + "description": "Reset credentials for a user if they forgot their password or something", + "providerId": "basic-flow", + "topLevel": true, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "reset-credentials-choose-user", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "reset-credential-email", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 20, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "reset-password", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 30, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticatorFlow": true, + "requirement": "CONDITIONAL", + "priority": 40, + "autheticatorFlow": true, + "flowAlias": "Reset - Conditional OTP", + "userSetupAllowed": false + } + ] + }, + { + "id": "4990a687-af96-4554-bd1f-f734fcd9ca1f", + "alias": "saml ecp", + "description": "SAML ECP Profile Authentication Flow", + "providerId": "basic-flow", + "topLevel": true, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "http-basic-authenticator", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + } + ] + } + ], + "authenticatorConfig": [ + { + "id": "d0afe54f-2e16-4643-974e-1ed037675314", + "alias": "create unique user config", + "config": { + "require.password.update.after.registration": "false" + } + }, + { + "id": "44a4393f-8f1c-4192-9645-2c413b1f39ff", + "alias": "review profile config", + "config": { + "update.profile.on.first.login": "missing" + } + } + ], + "requiredActions": [ + { + "alias": "CONFIGURE_TOTP", + "name": "Configure OTP", + "providerId": "CONFIGURE_TOTP", + "enabled": true, + "defaultAction": false, + "priority": 10, + "config": {} + }, + { + "alias": "terms_and_conditions", + "name": "Terms and Conditions", + "providerId": "terms_and_conditions", + "enabled": false, + "defaultAction": false, + "priority": 20, + "config": {} + }, + { + "alias": "UPDATE_PASSWORD", + "name": "Update Password", + "providerId": "UPDATE_PASSWORD", + "enabled": true, + "defaultAction": false, + "priority": 30, + "config": {} + }, + { + "alias": "UPDATE_PROFILE", + "name": "Update Profile", + "providerId": "UPDATE_PROFILE", + "enabled": true, + "defaultAction": false, + "priority": 40, + "config": {} + }, + { + "alias": "VERIFY_EMAIL", + "name": "Verify Email", + "providerId": "VERIFY_EMAIL", + "enabled": true, + "defaultAction": false, + "priority": 50, + "config": {} + }, + { + "alias": "delete_account", + "name": "Delete Account", + "providerId": "delete_account", + "enabled": false, + "defaultAction": false, + "priority": 60, + "config": {} + }, + { + "alias": "update_user_locale", + "name": "Update User Locale", + "providerId": "update_user_locale", + "enabled": true, + "defaultAction": false, + "priority": 1000, + "config": {} + } + ], + "browserFlow": "browser", + "registrationFlow": "registration", + "directGrantFlow": "direct grant", + "resetCredentialsFlow": "reset credentials", + "clientAuthenticationFlow": "clients", + "dockerAuthenticationFlow": "docker auth", + "attributes": { + "cibaBackchannelTokenDeliveryMode": "poll", + "cibaExpiresIn": "120", + "cibaAuthRequestedUserHint": "login_hint", + "oauth2DeviceCodeLifespan": "600", + "clientOfflineSessionMaxLifespan": "0", + "oauth2DevicePollingInterval": "5", + "clientSessionIdleTimeout": "0", + "parRequestUriLifespan": "60", + "clientSessionMaxLifespan": "0", + "clientOfflineSessionIdleTimeout": "0", + "cibaInterval": "5" + }, + "keycloakVersion": "18.0.0", + "userManagedAccessAllowed": false, + "clientProfiles": { + "profiles": [] + }, + "clientPolicies": { + "policies": [] + } +} diff --git a/bin/get_bpmn_json_for_process_instance b/bin/get_bpmn_json_for_process_instance new file mode 100755 index 00000000..9b6b4c75 --- /dev/null +++ b/bin/get_bpmn_json_for_process_instance @@ -0,0 +1,35 @@ +"""Get the bpmn process json for a given process instance id and store it in /tmp.""" +#!/usr/bin/env python +import os +import sys + +from spiffworkflow_backend import create_app +from spiffworkflow_backend.models.process_instance import ProcessInstanceModel + + +def main(process_instance_id: str): + """Main.""" + os.environ["SPIFFWORKFLOW_BACKEND_ENV"] = "development" + flask_env_key = "FLASK_SESSION_SECRET_KEY" + os.environ[flask_env_key] = "whatevs" + app = create_app() + with app.app_context(): + process_instance = ProcessInstanceModel.query.filter_by( + id=process_instance_id + ).first() + + if not process_instance: + raise Exception( + f"Could not find a process instance with id: {process_instance_id}" + ) + + with open( + f"/tmp/{process_instance_id}_bpmn_json.json", "w", encoding="utf-8" + ) as f: + f.write(process_instance.bpmn_json) + + +if len(sys.argv) < 2: + raise Exception("Process instance id not supplied") + +main(sys.argv[1]) diff --git a/bin/get_logs_from_docker_compose b/bin/get_logs_from_docker_compose new file mode 100755 index 00000000..78c7684e --- /dev/null +++ b/bin/get_logs_from_docker_compose @@ -0,0 +1,10 @@ +#!/usr/bin/env bash + +function error_handler() { + >&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}." + exit "$2" +} +trap 'error_handler ${LINENO} $?' ERR +set -o errtrace -o errexit -o nounset -o pipefail + +docker compose logs "$@" diff --git a/bin/get_token b/bin/get_token new file mode 100755 index 00000000..908b6362 --- /dev/null +++ b/bin/get_token @@ -0,0 +1,103 @@ +#!/usr/bin/env bash + +function error_handler() { + >&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}." + exit "$2" +} +trap 'error_handler ${LINENO} $?' ERR +set -o errtrace -o errexit -o nounset -o pipefail + +# this tests we can get a token from a public client and exchange it with a confidential client +# so we can see what resources that user has access to + +# originally from https://medium.com/keycloak/keycloak-jwt-token-using-curl-post-72c9e791ba8c +# btw, meta config endpoint: http://localhost:7002/realms/spiffworkflow/.well-known/openid-configuration +# token exchange described at https://github.com/keycloak/keycloak-documentation/blob/main/securing_apps/topics/token-exchange/token-exchange.adoc +# some UMA stuff at https://github.com/keycloak/keycloak-documentation/blob/main/authorization_services/topics/service-authorization-obtaining-permission.adoc, +# though resource_set docs are elsewhere. + +# ./bin/get_token # uses ciuser1 ciuser1 +# ./bin/get_token ciadmin1 ciadmin1 +# ./bin/get_token repeat_form_user_1 repeat_form_user_1 # actually has permissions to the resource in this script +# ./bin/get_token ciadmin1 ciadmin1 '%2Fprocess-models' + +HOSTNAME=localhost:7002 +REALM_NAME=spiffworkflow +USERNAME=${1-ciuser1} +PASSWORD=${2-ciuser1} +URI_TO_TEST_AGAINST=${3-'%2Fprocess-models%2Fcategory_number_one%2Fprocess-model-with-repeating-form'} + +FRONTEND_CLIENT_ID=spiffworkflow-frontend +BACKEND_CLIENT_ID=spiffworkflow-backend +BACKEND_CLIENT_SECRET="JXeQExm0JhQPLumgHtIIqf52bDalHz0q" # noqa: S105 +SECURE=false + +BACKEND_BASIC_AUTH=$(echo -n "${BACKEND_CLIENT_ID}:${BACKEND_CLIENT_SECRET}" | base64) +KEYCLOAK_URL=http://$HOSTNAME/realms/$REALM_NAME/protocol/openid-connect/token + +echo "Using Keycloak: $KEYCLOAK_URL" +echo "realm: $REALM_NAME" +echo "client-id: $FRONTEND_CLIENT_ID" +echo "username: $USERNAME" +echo "password: $PASSWORD" +echo "secure: $SECURE" + + +if [[ $SECURE = 'y' ]]; then + INSECURE= +else + INSECURE=--insecure +fi + +result=$(curl -s -X POST "$KEYCLOAK_URL" "$INSECURE" \ + -H "Content-Type: application/x-www-form-urlencoded" \ + -d "username=$USERNAME" \ + -d "password=$PASSWORD" \ + -d 'grant_type=password' \ + -d "client_id=$FRONTEND_CLIENT_ID" \ +) +frontend_token=$(jq -r '.access_token' <<< "$result") + +result=$(curl -s -X POST "$KEYCLOAK_URL" "$INSECURE" \ + -H "Content-Type: application/x-www-form-urlencoded" \ + --data-urlencode 'grant_type=urn:ietf:params:oauth:grant-type:token-exchange' \ + -d "client_id=$BACKEND_CLIENT_ID" \ + -d "subject_token=${frontend_token}" \ + -H "Authorization: Basic $BACKEND_BASIC_AUTH" \ + -d "audience=${BACKEND_CLIENT_ID}" \ +) +backend_token=$(jq -r '.access_token' <<< "$result") + +if [[ "$backend_token" != 'null' ]]; then + echo "backend_token: $backend_token" + + echo "Getting resource set" + # everything_resource_id='446bdcf4-a3bd-41c7-a0f8-67a225ba6b57' + resource_result=$(curl -s "http://${HOSTNAME}/realms/spiffworkflow/authz/protection/resource_set?matchingUri=true&deep=true&max=-1&exactName=false&uri=${URI_TO_TEST_AGAINST}" -H "Authorization: Bearer $backend_token") + # resource_result=$(curl -s "http://${HOSTNAME}/realms/spiffworkflow/authz/protection/resource_set?matchingUri=false&deep=true&max=-1&exactName=false&type=admin" -H "Authorization: Bearer $backend_token") + + resource_id_name_pairs=$(jq -r '.[] | "\(._id):\(.name)"' <<<"$resource_result" || echo '') + if [[ -z "$resource_id_name_pairs" || "$resource_id_name_pairs" == "null" ]]; then + >&2 echo "ERROR: Could not find the resource id from the result: ${resource_result}" + exit 1 + fi + echo $resource_id_name_pairs + + echo "Getting permissions" + for resource_id_name_pair in $resource_id_name_pairs ; do + resource_id=$(awk -F ':' '{print $1}' <<<"$resource_id_name_pair") + resource_name=$(awk -F ':' '{print $2}' <<<"$resource_id_name_pair") + + echo "Checking $resource_name" + curl -s -X POST "$KEYCLOAK_URL" "$INSECURE" \ + -H "Content-Type: application/x-www-form-urlencoded" \ + -H "Authorization: Basic $BACKEND_BASIC_AUTH" \ + -d "audience=${BACKEND_CLIENT_ID}" \ + --data-urlencode "grant_type=urn:ietf:params:oauth:grant-type:uma-ticket" \ + -d "permission=${resource_id}" \ + -d "subject_token=${backend_token}" \ + | jq . + done +else + echo "Failed auth result: $result" +fi diff --git a/bin/git_commit_bpmn_models_repo b/bin/git_commit_bpmn_models_repo new file mode 100755 index 00000000..13e18da9 --- /dev/null +++ b/bin/git_commit_bpmn_models_repo @@ -0,0 +1,36 @@ +#!/usr/bin/env bash + +function error_handler() { + >&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}." + exit "$2" +} +trap 'error_handler ${LINENO} $?' ERR +set -o errtrace -o errexit -o nounset -o pipefail + +# HELP: git adds and commits the entire BPMN models directory, including all process groups + +bpmn_models_absolute_dir="$1" +git_commit_message="$2" +git_commit_username="$3" +git_commit_email="$4" + +if [[ -z "${2:-}" ]]; then + >&2 echo "usage: $(basename "$0") [bpmn_models_absolute_dir] [git_commit_message]" + exit 1 +fi + +cd "$bpmn_models_absolute_dir" +git add . + +# https://unix.stackexchange.com/a/155077/456630 +if [ -z "$(git status --porcelain)" ]; then + echo "No changes to commit" +else + if [[ -n "$git_commit_username" ]]; then + git config --local user.name "$git_commit_username" + fi + if [[ -n "$git_commit_email" ]]; then + git config --local user.email "$git_commit_email" + fi + git commit -m "$git_commit_message" +fi diff --git a/bin/import_tickets_for_command_line.py b/bin/import_tickets_for_command_line.py new file mode 100644 index 00000000..34b2b9af --- /dev/null +++ b/bin/import_tickets_for_command_line.py @@ -0,0 +1,112 @@ +"""Grabs tickets from csv and makes process instances.""" +import csv +import os + +from flask_bpmn.models.db import db + +from spiffworkflow_backend import create_app +from spiffworkflow_backend.models.process_instance import ProcessInstanceModel +from spiffworkflow_backend.models.user import UserModel +from spiffworkflow_backend.services.process_instance_processor import ( + ProcessInstanceProcessor, +) +from spiffworkflow_backend.services.process_instance_service import ( + ProcessInstanceService, +) + + +def print_process_instance_count(process_model_identifier_ticket: str) -> None: + """Print process instance count.""" + process_instances = ProcessInstanceModel.query.filter_by( + process_model_identifier=process_model_identifier_ticket + ).all() + process_instance_count = len(process_instances) + print(f"process_instance_count: {process_instance_count}") + + +def main(): + """Main.""" + os.environ["SPIFFWORKFLOW_BACKEND_ENV"] = "development" + flask_env_key = "FLASK_SESSION_SECRET_KEY" + os.environ[flask_env_key] = "whatevs" + app = create_app() + with app.app_context(): + + process_model_identifier_ticket = "ticket" + db.session.query(ProcessInstanceModel).filter( + ProcessInstanceModel.process_model_identifier + == process_model_identifier_ticket + ).delete() + db.session.commit() + + """Print process instance count.""" + process_instances = ProcessInstanceModel.query.filter_by( + process_model_identifier=process_model_identifier_ticket + ).all() + process_instance_count = len(process_instances) + print(f"process_instance_count: {process_instance_count}") + + columns_to_data_key_mappings = { + "Month": "month", + "MS": "milestone", + "ID": "req_id", + "Dev Days": "dev_days", + "Feature": "feature", + "Priority": "priority", + } + columns_to_header_index_mappings = {} + + user = UserModel.query.first() + + with open("tests/files/tickets.csv") as infile: + reader = csv.reader(infile, delimiter=",") + + # first row is garbage + next(reader) + + header = next(reader) + for column_name in columns_to_data_key_mappings: + columns_to_header_index_mappings[column_name] = header.index( + column_name + ) + id_index = header.index("ID") + priority_index = header.index("Priority") + print(f"header: {header}") + for row in reader: + ticket_identifier = row[id_index] + priority = row[priority_index] + print(f"ticket_identifier: {ticket_identifier}") + print(f"priority: {priority}") + + process_instance = ProcessInstanceService.create_process_instance( + process_model_identifier_ticket, + user, + process_group_identifier="sartography-admin", + ) + processor = ProcessInstanceProcessor(process_instance) + + processor.do_engine_steps() + # processor.save() + + for ( + column_name, + desired_data_key, + ) in columns_to_data_key_mappings.items(): + appropriate_index = columns_to_header_index_mappings[column_name] + processor.bpmn_process_instance.data[desired_data_key] = row[ + appropriate_index + ] + + print(f"datas: {processor.bpmn_process_instance.data}") + if processor.bpmn_process_instance.data["month"] == "": + continue + + # you at least need a month, or else this row in the csv is considered garbage + # if processor.bpmn_process_instance.data["month"] is None: + # continue + + processor.save() + + +# if __name__ == "__main__": +main() diff --git a/bin/import_tickets_for_script_task.py b/bin/import_tickets_for_script_task.py new file mode 100644 index 00000000..f747c5f7 --- /dev/null +++ b/bin/import_tickets_for_script_task.py @@ -0,0 +1,110 @@ +"""Import tickets, for use in script task.""" + + +def main(): + """Use main to avoid global namespace.""" + import csv + + from flask_bpmn.models.db import db + + from spiffworkflow_backend.models.process_instance import ProcessInstanceModel + from spiffworkflow_backend.models.user import UserModel + from spiffworkflow_backend.services.process_instance_processor import ( + ProcessInstanceProcessor, + ) + from spiffworkflow_backend.services.process_instance_service import ( + ProcessInstanceService, + ) + from spiffworkflow_backend.models.process_instance_report import ( + ProcessInstanceReportModel, + ) + + process_model_identifier_ticket = "ticket" + db.session.query(ProcessInstanceModel).filter( + ProcessInstanceModel.process_model_identifier == process_model_identifier_ticket + ).delete() + db.session.commit() + + """Print process instance count.""" + process_instances = ProcessInstanceModel.query.filter_by( + process_model_identifier=process_model_identifier_ticket + ).all() + process_instance_count = len(process_instances) + print(f"process_instance_count: {process_instance_count}") + + columns_to_data_key_mappings = { + "Month": "month", + "MS": "milestone", + "Done?": "done", + "#": "notion_id", + "ID": "req_id", + "Dev Days": "dev_days", + "Feature": "feature", + "Feature description": "feature_description", + "Priority": "priority", + } + columns_to_header_index_mappings = {} + + user = UserModel.query.first() + + with open("tests/files/tickets.csv") as infile: + reader = csv.reader(infile, delimiter=",") + + # first row is garbage + next(reader) + + header = next(reader) + for column_name in columns_to_data_key_mappings: + columns_to_header_index_mappings[column_name] = header.index(column_name) + id_index = header.index("ID") + priority_index = header.index("Priority") + month_index = header.index("Month") + print(f"header: {header}") + for row in reader: + ticket_identifier = row[id_index] + priority = row[priority_index] + month = row[month_index] + print(f"ticket_identifier: {ticket_identifier}") + print(f"priority: {priority}") + # if there is no month, who cares about it. + if month: + process_instance = ProcessInstanceService.create_process_instance( + process_model_identifier=process_model_identifier_ticket, + user=user, + process_group_identifier="sartography-admin", + ) + processor = ProcessInstanceProcessor(process_instance) + + processor.do_engine_steps() + # processor.save() + + for ( + column_name, + desired_data_key, + ) in columns_to_data_key_mappings.items(): + appropriate_index = columns_to_header_index_mappings[column_name] + print(f"appropriate_index: {appropriate_index}") + processor.bpmn_process_instance.data[desired_data_key] = row[ + appropriate_index + ] + + # you at least need a month, or else this row in the csv is considered garbage + month_value = processor.bpmn_process_instance.data["month"] + if month_value == "" or month_value is None: + db.delete(process_instance) + db.session.commit() + continue + + processor.save() + + process_instance_data = processor.get_data() + print(f"process_instance_data: {process_instance_data}") + + ProcessInstanceReportModel.add_fixtures() + print("added report fixtures") + + +main() + +# to avoid serialization issues +del main diff --git a/bin/keycloak_test_secrets.json b/bin/keycloak_test_secrets.json new file mode 100644 index 00000000..12dc3faa --- /dev/null +++ b/bin/keycloak_test_secrets.json @@ -0,0 +1,12 @@ +{ + "web": { + "issuer": "http://localhost:8080/realms/finance", + "auth_uri": "http://localhost:8080/realms/finance/protocol/openid-connect/auth", + "client_id": "myclient", + "client_secret": "OAh6rkjXIiPJDtPOz4459i3VtdlxGcce", + "redirect_uris": ["http://localhost:5005/*"], + "userinfo_uri": "http://localhost:8080/realms/finance/protocol/openid-connect/userinfo", + "token_uri": "http://localhost:8080/realms/finance/protocol/openid-connect/token", + "token_introspection_uri": "http://localhost:8080/realms/finance/protocol/openid-connect/token/introspect" + } +} diff --git a/bin/keycloak_test_server.py b/bin/keycloak_test_server.py new file mode 100644 index 00000000..59efd36c --- /dev/null +++ b/bin/keycloak_test_server.py @@ -0,0 +1,104 @@ +# type: ignore +"""keycloak_test_server.""" +# ./bin/start_keycloak # starts keycloak on 8080 +# pip install flask_oidc +# pip install itsdangerous==2.0.1 +# python ./bin/keycloak_test_server.py # starts flask on 5005 +import json +import logging + +import requests +from flask import Flask +from flask import g +from flask_oidc import OpenIDConnect + +logging.basicConfig(level=logging.DEBUG) + +app = Flask(__name__) +app.config.update( + { + "SECRET_KEY": "SomethingNotEntirelySecret", + "TESTING": True, + "DEBUG": True, + "OIDC_CLIENT_SECRETS": "bin/keycloak_test_secrets.json", + "OIDC_ID_TOKEN_COOKIE_SECURE": False, + "OIDC_REQUIRE_VERIFIED_EMAIL": False, + "OIDC_USER_INFO_ENABLED": True, + "OIDC_OPENID_REALM": "flask-demo", + "OIDC_SCOPES": ["openid", "email", "profile"], + "OIDC_INTROSPECTION_AUTH_METHOD": "client_secret_post", + } +) + +oidc = OpenIDConnect(app) + + +@app.route("/") +def hello_world(): + """Hello_world.""" + if oidc.user_loggedin: + return ( + 'Hello, %s, See private ' + 'Log out' + ) % oidc.user_getfield("preferred_username") + else: + return 'Welcome anonymous, Log in' + + +@app.route("/private") +@oidc.require_login +def hello_me(): + """Example for protected endpoint that extracts private information from the OpenID Connect id_token. + + Uses the accompanied access_token to access a backend service. + """ + info = oidc.user_getinfo(["preferred_username", "email", "sub"]) + + username = info.get("preferred_username") + email = info.get("email") + user_id = info.get("sub") + + if user_id in oidc.credentials_store: + try: + from oauth2client.client import OAuth2Credentials + + access_token = OAuth2Credentials.from_json( + oidc.credentials_store[user_id] + ).access_token + print("access_token=<%s>" % access_token) + headers = {"Authorization": "Bearer %s" % (access_token)} + # YOLO + greeting = requests.get( + "http://localhost:8080/greeting", headers=headers + ).text + except BaseException: + print("Could not access greeting-service") + greeting = "Hello %s" % username + + return """{} your email is {} and your user_id is {}! + """.format( + greeting, + email, + user_id, + ) + + +@app.route("/api", methods=["POST"]) +@oidc.accept_token(require_token=True, scopes_required=["openid"]) +def hello_api(): + """OAuth 2.0 protected API endpoint accessible via AccessToken.""" + return json.dumps({"hello": "Welcome %s" % g.oidc_token_info["sub"]}) + + +@app.route("/logout") +def logout(): + """Performs local logout by removing the session cookie.""" + oidc.logout() + return 'Hi, you have been logged out! Return' + + +if __name__ == "__main__": + app.run(port=5005) diff --git a/bin/quarkus-realm.json b/bin/quarkus-realm.json new file mode 100644 index 00000000..5a8750cb --- /dev/null +++ b/bin/quarkus-realm.json @@ -0,0 +1,1964 @@ +{ + "id": "11d78bf6-6d10-4484-baba-a1388379d68b", + "realm": "quarkus", + "notBefore": 0, + "revokeRefreshToken": false, + "refreshTokenMaxReuse": 0, + "accessTokenLifespan": 300, + "accessTokenLifespanForImplicitFlow": 900, + "ssoSessionIdleTimeout": 1800, + "ssoSessionMaxLifespan": 36000, + "ssoSessionIdleTimeoutRememberMe": 0, + "ssoSessionMaxLifespanRememberMe": 0, + "offlineSessionIdleTimeout": 2592000, + "offlineSessionMaxLifespanEnabled": false, + "offlineSessionMaxLifespan": 5184000, + "accessCodeLifespan": 60, + "accessCodeLifespanUserAction": 300, + "accessCodeLifespanLogin": 1800, + "actionTokenGeneratedByAdminLifespan": 43200, + "actionTokenGeneratedByUserLifespan": 300, + "enabled": true, + "sslRequired": "external", + "registrationAllowed": false, + "registrationEmailAsUsername": false, + "rememberMe": false, + "verifyEmail": false, + "loginWithEmailAllowed": true, + "duplicateEmailsAllowed": false, + "resetPasswordAllowed": false, + "editUsernameAllowed": false, + "bruteForceProtected": false, + "permanentLockout": false, + "maxFailureWaitSeconds": 900, + "minimumQuickLoginWaitSeconds": 60, + "waitIncrementSeconds": 60, + "quickLoginCheckMilliSeconds": 1000, + "maxDeltaTimeSeconds": 43200, + "failureFactor": 30, + "roles": { + "realm": [ + { + "id": "3fc80564-13ac-4e7b-9986-322f571e82bc", + "name": "confidential", + "composite": false, + "clientRole": false, + "containerId": "11d78bf6-6d10-4484-baba-a1388379d68b", + "attributes": {} + }, + { + "id": "39eb64c8-66a9-4983-9c81-27ea7e2f6273", + "name": "uma_authorization", + "description": "${role_uma_authorization}", + "composite": false, + "clientRole": false, + "containerId": "11d78bf6-6d10-4484-baba-a1388379d68b", + "attributes": {} + }, + { + "id": "8c1abe12-62fe-4a06-ae0d-f5fb67dddbb0", + "name": "admin", + "composite": false, + "clientRole": false, + "containerId": "11d78bf6-6d10-4484-baba-a1388379d68b", + "attributes": {} + }, + { + "id": "5afce544-6a3c-495f-b805-fd737cf5081e", + "name": "user", + "composite": false, + "clientRole": false, + "containerId": "11d78bf6-6d10-4484-baba-a1388379d68b", + "attributes": {} + }, + { + "id": "bc431d62-a80a-425b-961a-0fb3fc59006d", + "name": "offline_access", + "description": "${role_offline-access}", + "composite": false, + "clientRole": false, + "containerId": "11d78bf6-6d10-4484-baba-a1388379d68b", + "attributes": {} + } + ], + "client": { + "realm-management": [ + { + "id": "7db1f38d-d436-4725-93fd-030a3bbe628e", + "name": "manage-identity-providers", + "description": "${role_manage-identity-providers}", + "composite": false, + "clientRole": true, + "containerId": "376bd940-e50a-4495-80fc-9c6c07312748", + "attributes": {} + }, + { + "id": "1163b9bd-7319-4154-a25f-0101b2548d21", + "name": "impersonation", + "description": "${role_impersonation}", + "composite": false, + "clientRole": true, + "containerId": "376bd940-e50a-4495-80fc-9c6c07312748", + "attributes": {} + }, + { + "id": "73d0a556-072b-404f-bf8e-10e2544c8c27", + "name": "view-identity-providers", + "description": "${role_view-identity-providers}", + "composite": false, + "clientRole": true, + "containerId": "376bd940-e50a-4495-80fc-9c6c07312748", + "attributes": {} + }, + { + "id": "7e727e28-2095-4443-b2da-865e684f2308", + "name": "view-realm", + "description": "${role_view-realm}", + "composite": false, + "clientRole": true, + "containerId": "376bd940-e50a-4495-80fc-9c6c07312748", + "attributes": {} + }, + { + "id": "df9e5352-f835-4467-bcaf-cb1b5f55c1ec", + "name": "query-users", + "description": "${role_query-users}", + "composite": false, + "clientRole": true, + "containerId": "376bd940-e50a-4495-80fc-9c6c07312748", + "attributes": {} + }, + { + "id": "fa77909a-32a3-41ae-9983-2b92ae03080c", + "name": "manage-clients", + "description": "${role_manage-clients}", + "composite": false, + "clientRole": true, + "containerId": "376bd940-e50a-4495-80fc-9c6c07312748", + "attributes": {} + }, + { + "id": "a8780507-dc72-4433-8b95-b8e4f3c37d0e", + "name": "manage-events", + "description": "${role_manage-events}", + "composite": false, + "clientRole": true, + "containerId": "376bd940-e50a-4495-80fc-9c6c07312748", + "attributes": {} + }, + { + "id": "f7f4697a-3977-42f6-af86-9bb006cf4d04", + "name": "realm-admin", + "description": "${role_realm-admin}", + "composite": true, + "composites": { + "client": { + "realm-management": [ + "impersonation", + "manage-identity-providers", + "view-identity-providers", + "view-realm", + "query-users", + "manage-clients", + "manage-events", + "manage-realm", + "view-authorization", + "manage-authorization", + "view-users", + "create-client", + "query-clients", + "query-groups", + "manage-users", + "view-clients", + "view-events", + "query-realms" + ] + } + }, + "clientRole": true, + "containerId": "376bd940-e50a-4495-80fc-9c6c07312748", + "attributes": {} + }, + { + "id": "ca7dc1ce-a981-4efe-b3f0-a7192b6d3943", + "name": "manage-realm", + "description": "${role_manage-realm}", + "composite": false, + "clientRole": true, + "containerId": "376bd940-e50a-4495-80fc-9c6c07312748", + "attributes": {} + }, + { + "id": "a0ab4faa-00a9-4f52-ac9f-8e764b6a8126", + "name": "view-authorization", + "description": "${role_view-authorization}", + "composite": false, + "clientRole": true, + "containerId": "376bd940-e50a-4495-80fc-9c6c07312748", + "attributes": {} + }, + { + "id": "0b4ed5e0-eceb-4d81-ba05-fa67022abe59", + "name": "manage-authorization", + "description": "${role_manage-authorization}", + "composite": false, + "clientRole": true, + "containerId": "376bd940-e50a-4495-80fc-9c6c07312748", + "attributes": {} + }, + { + "id": "c10336be-06f3-40ef-bef5-28d8c9b8a1e2", + "name": "create-client", + "description": "${role_create-client}", + "composite": false, + "clientRole": true, + "containerId": "376bd940-e50a-4495-80fc-9c6c07312748", + "attributes": {} + }, + { + "id": "1a1ffadc-11d5-44ea-bac0-d94372c8ae5c", + "name": "view-users", + "description": "${role_view-users}", + "composite": true, + "composites": { + "client": { + "realm-management": ["query-groups", "query-users"] + } + }, + "clientRole": true, + "containerId": "376bd940-e50a-4495-80fc-9c6c07312748", + "attributes": {} + }, + { + "id": "5ba9a1a3-9027-4531-8253-b91f6058513c", + "name": "query-clients", + "description": "${role_query-clients}", + "composite": false, + "clientRole": true, + "containerId": "376bd940-e50a-4495-80fc-9c6c07312748", + "attributes": {} + }, + { + "id": "b4fba807-7a7e-4e3e-bd31-45703305a9e3", + "name": "query-groups", + "description": "${role_query-groups}", + "composite": false, + "clientRole": true, + "containerId": "376bd940-e50a-4495-80fc-9c6c07312748", + "attributes": {} + }, + { + "id": "c9384254-0af3-434c-b4ed-7c94f59a8247", + "name": "manage-users", + "description": "${role_manage-users}", + "composite": false, + "clientRole": true, + "containerId": "376bd940-e50a-4495-80fc-9c6c07312748", + "attributes": {} + }, + { + "id": "9a0022f2-bd58-4418-828c-a8e7abe3346b", + "name": "view-clients", + "description": "${role_view-clients}", + "composite": true, + "composites": { + "client": { + "realm-management": ["query-clients"] + } + }, + "clientRole": true, + "containerId": "376bd940-e50a-4495-80fc-9c6c07312748", + "attributes": {} + }, + { + "id": "83df8311-4366-4d22-9425-eccc343faa3f", + "name": "view-events", + "description": "${role_view-events}", + "composite": false, + "clientRole": true, + "containerId": "376bd940-e50a-4495-80fc-9c6c07312748", + "attributes": {} + }, + { + "id": "e81bf277-047f-4bdd-afd6-59e2016c5066", + "name": "query-realms", + "description": "${role_query-realms}", + "composite": false, + "clientRole": true, + "containerId": "376bd940-e50a-4495-80fc-9c6c07312748", + "attributes": {} + } + ], + "security-admin-console": [], + "admin-cli": [], + "backend-service": [ + { + "id": "df147a91-6da7-4bbc-866c-f30cf99b2637", + "name": "uma_protection", + "composite": false, + "clientRole": true, + "containerId": "0ac5df91-e044-4051-bd03-106a3a5fb9cc", + "attributes": {} + } + ], + "broker": [ + { + "id": "d36865b0-7ade-4bcd-a7dc-1dacbd80f169", + "name": "read-token", + "description": "${role_read-token}", + "composite": false, + "clientRole": true, + "containerId": "53d4fe53-a039-471e-886a-28eddc950e95", + "attributes": {} + } + ], + "account": [ + { + "id": "539325a0-d9b3-4821-97ee-d42999296b62", + "name": "view-profile", + "description": "${role_view-profile}", + "composite": false, + "clientRole": true, + "containerId": "e55e1234-38fa-432d-8d90-39f5e024688d", + "attributes": {} + }, + { + "id": "e4af836c-c884-4a57-8b1d-fb673b0fe3a5", + "name": "manage-account", + "description": "${role_manage-account}", + "composite": true, + "composites": { + "client": { + "account": ["manage-account-links"] + } + }, + "clientRole": true, + "containerId": "e55e1234-38fa-432d-8d90-39f5e024688d", + "attributes": {} + }, + { + "id": "35d1c998-bcae-4ab1-a026-4c67bff49a98", + "name": "manage-account-links", + "description": "${role_manage-account-links}", + "composite": false, + "clientRole": true, + "containerId": "e55e1234-38fa-432d-8d90-39f5e024688d", + "attributes": {} + } + ] + } + }, + "groups": [], + "defaultRoles": ["uma_authorization", "offline_access"], + "requiredCredentials": ["password"], + "otpPolicyType": "totp", + "otpPolicyAlgorithm": "HmacSHA1", + "otpPolicyInitialCounter": 0, + "otpPolicyDigits": 6, + "otpPolicyLookAheadWindow": 1, + "otpPolicyPeriod": 30, + "otpSupportedApplications": ["FreeOTP", "Google Authenticator"], + "scopeMappings": [ + { + "clientScope": "offline_access", + "roles": ["offline_access"] + } + ], + "clients": [ + { + "id": "e55e1234-38fa-432d-8d90-39f5e024688d", + "clientId": "account", + "name": "${client_account}", + "baseUrl": "/auth/realms/quarkus/account", + "surrogateAuthRequired": false, + "enabled": true, + "clientAuthenticatorType": "client-secret", + "secret": "0136c3ef-0dfd-4b13-a6d0-2c8b6358edec", + "defaultRoles": ["view-profile", "manage-account"], + "redirectUris": ["/auth/realms/quarkus/account/*"], + "webOrigins": [], + "notBefore": 0, + "bearerOnly": false, + "consentRequired": false, + "standardFlowEnabled": true, + "implicitFlowEnabled": false, + "directAccessGrantsEnabled": false, + "serviceAccountsEnabled": false, + "publicClient": false, + "frontchannelLogout": false, + "protocol": "openid-connect", + "attributes": {}, + "authenticationFlowBindingOverrides": {}, + "fullScopeAllowed": false, + "nodeReRegistrationTimeout": 0, + "defaultClientScopes": [ + "web-origins", + "role_list", + "profile", + "roles", + "email" + ], + "optionalClientScopes": [ + "address", + "phone", + "offline_access", + "microprofile-jwt" + ] + }, + { + "id": "e9cc41a2-8e35-4d5e-949e-4879880c2ddb", + "clientId": "admin-cli", + "name": "${client_admin-cli}", + "surrogateAuthRequired": false, + "enabled": true, + "clientAuthenticatorType": "client-secret", + "secret": "a951803a-79c7-46a6-8197-e32835286971", + "redirectUris": [], + "webOrigins": [], + "notBefore": 0, + "bearerOnly": false, + "consentRequired": false, + "standardFlowEnabled": false, + "implicitFlowEnabled": false, + "directAccessGrantsEnabled": true, + "serviceAccountsEnabled": false, + "publicClient": true, + "frontchannelLogout": false, + "protocol": "openid-connect", + "attributes": {}, + "authenticationFlowBindingOverrides": {}, + "fullScopeAllowed": false, + "nodeReRegistrationTimeout": 0, + "defaultClientScopes": [ + "web-origins", + "role_list", + "profile", + "roles", + "email" + ], + "optionalClientScopes": [ + "address", + "phone", + "offline_access", + "microprofile-jwt" + ] + }, + { + "id": "53d4fe53-a039-471e-886a-28eddc950e95", + "clientId": "broker", + "name": "${client_broker}", + "surrogateAuthRequired": false, + "enabled": true, + "clientAuthenticatorType": "client-secret", + "secret": "e1f7edd7-e15c-43b4-8736-ff8204d16836", + "redirectUris": [], + "webOrigins": [], + "notBefore": 0, + "bearerOnly": false, + "consentRequired": false, + "standardFlowEnabled": true, + "implicitFlowEnabled": false, + "directAccessGrantsEnabled": false, + "serviceAccountsEnabled": false, + "publicClient": false, + "frontchannelLogout": false, + "protocol": "openid-connect", + "attributes": {}, + "authenticationFlowBindingOverrides": {}, + "fullScopeAllowed": false, + "nodeReRegistrationTimeout": 0, + "defaultClientScopes": [ + "web-origins", + "role_list", + "profile", + "roles", + "email" + ], + "optionalClientScopes": [ + "address", + "phone", + "offline_access", + "microprofile-jwt" + ] + }, + { + "id": "0ac5df91-e044-4051-bd03-106a3a5fb9cc", + "clientId": "backend-service", + "surrogateAuthRequired": false, + "enabled": true, + "clientAuthenticatorType": "client-secret", + "secret": "secret", + "redirectUris": ["*"], + "webOrigins": [], + "notBefore": 0, + "bearerOnly": false, + "consentRequired": false, + "standardFlowEnabled": true, + "implicitFlowEnabled": false, + "directAccessGrantsEnabled": true, + "serviceAccountsEnabled": true, + "authorizationServicesEnabled": true, + "publicClient": false, + "frontchannelLogout": false, + "protocol": "openid-connect", + "attributes": {}, + "authenticationFlowBindingOverrides": {}, + "fullScopeAllowed": true, + "nodeReRegistrationTimeout": -1, + "protocolMappers": [ + { + "id": "3eac903f-c16b-4a78-a7e8-eb8f4d402b71", + "name": "Client ID", + "protocol": "openid-connect", + "protocolMapper": "oidc-usersessionmodel-note-mapper", + "consentRequired": false, + "config": { + "user.session.note": "clientId", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "clientId", + "jsonType.label": "String" + } + }, + { + "id": "8422cefe-7f42-4f3b-abad-5f06f7d4b748", + "name": "Client IP Address", + "protocol": "openid-connect", + "protocolMapper": "oidc-usersessionmodel-note-mapper", + "consentRequired": false, + "config": { + "user.session.note": "clientAddress", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "clientAddress", + "jsonType.label": "String" + } + }, + { + "id": "988e47d6-2055-45eb-82d6-0b8b25c629fc", + "name": "Client Host", + "protocol": "openid-connect", + "protocolMapper": "oidc-usersessionmodel-note-mapper", + "consentRequired": false, + "config": { + "user.session.note": "clientHost", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "clientHost", + "jsonType.label": "String" + } + } + ], + "defaultClientScopes": [ + "web-origins", + "role_list", + "profile", + "roles", + "email" + ], + "optionalClientScopes": [ + "address", + "phone", + "offline_access", + "microprofile-jwt" + ], + "authorizationSettings": { + "allowRemoteResourceManagement": true, + "policyEnforcementMode": "ENFORCING", + "resources": [ + { + "name": "User Resource", + "ownerManagedAccess": false, + "attributes": {}, + "_id": "df1b74a9-3f10-499d-a581-368de48e512b", + "uris": ["/api/users/*"] + }, + { + "name": "Administration Resource", + "ownerManagedAccess": false, + "attributes": {}, + "_id": "7124e2f1-e6dc-44b4-87ab-24b010090b97", + "uris": ["/api/admin/*"] + } + ], + "policies": [ + { + "id": "b8710fa6-160e-4de0-adf3-398c7007a0af", + "name": "Any User Policy", + "description": "Any user granted with the user role can access something", + "type": "role", + "logic": "POSITIVE", + "decisionStrategy": "UNANIMOUS", + "config": { + "roles": "[{\"id\":\"user\",\"required\":false}]" + } + }, + { + "id": "fcef30b2-68b2-4b78-9f3d-9162c6cdf5cb", + "name": "Only Administrators", + "description": "Only administrators can access", + "type": "role", + "logic": "POSITIVE", + "decisionStrategy": "UNANIMOUS", + "config": { + "roles": "[{\"id\":\"admin\",\"required\":false}]" + } + }, + { + "id": "3479dd56-02e9-4222-94fe-6a13cd065195", + "name": "User Resource Permission", + "type": "resource", + "logic": "POSITIVE", + "decisionStrategy": "UNANIMOUS", + "config": { + "resources": "[\"User Resource\"]", + "applyPolicies": "[\"Any User Policy\"]" + } + }, + { + "id": "60188298-d55b-4066-b231-6a7c56ff7cc5", + "name": "Administration Resource Permission", + "type": "resource", + "logic": "POSITIVE", + "decisionStrategy": "UNANIMOUS", + "config": { + "resources": "[\"Administration Resource\"]", + "applyPolicies": "[\"Only Administrators\"]" + } + } + ], + "scopes": [], + "decisionStrategy": "UNANIMOUS" + } + }, + { + "id": "376bd940-e50a-4495-80fc-9c6c07312748", + "clientId": "realm-management", + "name": "${client_realm-management}", + "surrogateAuthRequired": false, + "enabled": true, + "clientAuthenticatorType": "client-secret", + "secret": "c41b709a-a012-4c69-89d7-4f926dba0619", + "redirectUris": [], + "webOrigins": [], + "notBefore": 0, + "bearerOnly": true, + "consentRequired": false, + "standardFlowEnabled": true, + "implicitFlowEnabled": false, + "directAccessGrantsEnabled": false, + "serviceAccountsEnabled": false, + "publicClient": false, + "frontchannelLogout": false, + "protocol": "openid-connect", + "attributes": {}, + "authenticationFlowBindingOverrides": {}, + "fullScopeAllowed": false, + "nodeReRegistrationTimeout": 0, + "defaultClientScopes": [ + "web-origins", + "role_list", + "profile", + "roles", + "email" + ], + "optionalClientScopes": [ + "address", + "phone", + "offline_access", + "microprofile-jwt" + ] + }, + { + "id": "a8732cac-ae0f-44ec-b7f3-bd2c41eff13c", + "clientId": "security-admin-console", + "name": "${client_security-admin-console}", + "baseUrl": "/auth/admin/quarkus/console/index.html", + "surrogateAuthRequired": false, + "enabled": true, + "clientAuthenticatorType": "client-secret", + "secret": "e571b211-2550-475d-b87f-116ff54091ee", + "redirectUris": ["/auth/admin/quarkus/console/*"], + "webOrigins": [], + "notBefore": 0, + "bearerOnly": false, + "consentRequired": false, + "standardFlowEnabled": true, + "implicitFlowEnabled": false, + "directAccessGrantsEnabled": false, + "serviceAccountsEnabled": false, + "publicClient": true, + "frontchannelLogout": false, + "protocol": "openid-connect", + "attributes": {}, + "authenticationFlowBindingOverrides": {}, + "fullScopeAllowed": false, + "nodeReRegistrationTimeout": 0, + "protocolMappers": [ + { + "id": "280528ca-5e96-4bb9-9fc0-20311caac32d", + "name": "locale", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "locale", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "locale", + "jsonType.label": "String" + } + } + ], + "defaultClientScopes": [ + "web-origins", + "role_list", + "profile", + "roles", + "email" + ], + "optionalClientScopes": [ + "address", + "phone", + "offline_access", + "microprofile-jwt" + ] + } + ], + "clientScopes": [ + { + "id": "520cc3ef-2c6b-4d84-bcde-8c063241f4bd", + "name": "address", + "description": "OpenID Connect built-in scope: address", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "true", + "display.on.consent.screen": "true", + "consent.screen.text": "${addressScopeConsentText}" + }, + "protocolMappers": [ + { + "id": "c1d3bd07-0a5f-4f4f-b381-c58a7b723029", + "name": "address", + "protocol": "openid-connect", + "protocolMapper": "oidc-address-mapper", + "consentRequired": false, + "config": { + "user.attribute.formatted": "formatted", + "user.attribute.country": "country", + "user.attribute.postal_code": "postal_code", + "userinfo.token.claim": "true", + "user.attribute.street": "street", + "id.token.claim": "true", + "user.attribute.region": "region", + "access.token.claim": "true", + "user.attribute.locality": "locality" + } + } + ] + }, + { + "id": "19920c96-a383-4f35-8ee9-27833263cf03", + "name": "email", + "description": "OpenID Connect built-in scope: email", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "true", + "display.on.consent.screen": "true", + "consent.screen.text": "${emailScopeConsentText}" + }, + "protocolMappers": [ + { + "id": "36a0adf0-6c25-419f-98d7-cdeada8661aa", + "name": "email", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-property-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "email", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "email", + "jsonType.label": "String" + } + }, + { + "id": "b0c39901-5e5d-4436-b685-908bb90ea1d9", + "name": "email verified", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-property-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "emailVerified", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "email_verified", + "jsonType.label": "boolean" + } + } + ] + }, + { + "id": "55b3ee1c-cbf9-4526-93d7-aa56a9c5f1cb", + "name": "microprofile-jwt", + "description": "Microprofile - JWT built-in scope", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "true", + "display.on.consent.screen": "false" + }, + "protocolMappers": [ + { + "id": "59128144-a21a-4744-bb55-e66ff0503b18", + "name": "upn", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-property-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "username", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "upn", + "jsonType.label": "String" + } + }, + { + "id": "69351a63-7d6e-45d0-be47-088c83b20fdb", + "name": "groups", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-realm-role-mapper", + "consentRequired": false, + "config": { + "multivalued": "true", + "user.attribute": "foo", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "groups", + "jsonType.label": "String" + } + } + ] + }, + { + "id": "3f190f54-8e3a-4c82-a799-bd12ddc475b2", + "name": "offline_access", + "description": "OpenID Connect built-in scope: offline_access", + "protocol": "openid-connect", + "attributes": { + "consent.screen.text": "${offlineAccessScopeConsentText}", + "display.on.consent.screen": "true" + } + }, + { + "id": "defa3480-5368-4f34-8075-49fb982b71b3", + "name": "phone", + "description": "OpenID Connect built-in scope: phone", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "true", + "display.on.consent.screen": "true", + "consent.screen.text": "${phoneScopeConsentText}" + }, + "protocolMappers": [ + { + "id": "069ae414-9e98-4612-a3d6-e8b5a1fa841d", + "name": "phone number verified", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "phoneNumberVerified", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "phone_number_verified", + "jsonType.label": "boolean" + } + }, + { + "id": "cea58e24-d0e0-4cc6-9e34-7b3bf7d6d85b", + "name": "phone number", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "phoneNumber", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "phone_number", + "jsonType.label": "String" + } + } + ] + }, + { + "id": "b7321e2e-dd8e-41cf-a527-c765155c3f78", + "name": "profile", + "description": "OpenID Connect built-in scope: profile", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "true", + "display.on.consent.screen": "true", + "consent.screen.text": "${profileScopeConsentText}" + }, + "protocolMappers": [ + { + "id": "1d4d3df5-7af5-488e-8477-0ad7cb74d50a", + "name": "nickname", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "nickname", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "nickname", + "jsonType.label": "String" + } + }, + { + "id": "1a5e26d6-211e-4f8a-b696-0ea9577db25a", + "name": "zoneinfo", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "zoneinfo", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "zoneinfo", + "jsonType.label": "String" + } + }, + { + "id": "18971685-6dd7-420f-9c09-879c4f2d54d8", + "name": "updated at", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "updatedAt", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "updated_at", + "jsonType.label": "String" + } + }, + { + "id": "b970d96b-0156-4db0-9beb-9c84c173e619", + "name": "birthdate", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "birthdate", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "birthdate", + "jsonType.label": "String" + } + }, + { + "id": "50287033-df21-45c6-aa46-c3060e6f9855", + "name": "given name", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-property-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "firstName", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "given_name", + "jsonType.label": "String" + } + }, + { + "id": "3dc6b97e-7063-4077-98d1-0cacf9029c7b", + "name": "full name", + "protocol": "openid-connect", + "protocolMapper": "oidc-full-name-mapper", + "consentRequired": false, + "config": { + "id.token.claim": "true", + "access.token.claim": "true", + "userinfo.token.claim": "true" + } + }, + { + "id": "3fb9391b-376c-42ef-b012-4df461c617cc", + "name": "middle name", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "middleName", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "middle_name", + "jsonType.label": "String" + } + }, + { + "id": "83f7fc4a-5386-4f86-a103-6585e138b61d", + "name": "username", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-property-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "username", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "preferred_username", + "jsonType.label": "String" + } + }, + { + "id": "8ef177b3-f485-44b1-afee-1901393b00c7", + "name": "family name", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-property-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "lastName", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "family_name", + "jsonType.label": "String" + } + }, + { + "id": "e994cbc7-2a1a-4465-b7b7-12b35b4fe49e", + "name": "gender", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "gender", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "gender", + "jsonType.label": "String" + } + }, + { + "id": "abaa4c9e-1fa2-4b45-a1bb-b3d650de9aca", + "name": "picture", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "picture", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "picture", + "jsonType.label": "String" + } + }, + { + "id": "bf21b514-81fd-4bbe-9236-bab5fcf54561", + "name": "locale", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "locale", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "locale", + "jsonType.label": "String" + } + }, + { + "id": "254f8de4-08e7-4d3d-a87f-4b238f0f922b", + "name": "profile", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "profile", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "profile", + "jsonType.label": "String" + } + }, + { + "id": "7934bf2a-cfc3-4b2d-a5cb-287f3ed2a977", + "name": "website", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "website", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "website", + "jsonType.label": "String" + } + } + ] + }, + { + "id": "f3dc793d-6011-4861-b538-399dde5434c0", + "name": "role_list", + "description": "SAML role list", + "protocol": "saml", + "attributes": { + "consent.screen.text": "${samlRoleListScopeConsentText}", + "display.on.consent.screen": "true" + }, + "protocolMappers": [ + { + "id": "22eeabf8-a3c3-4026-a351-367f8ace7927", + "name": "role list", + "protocol": "saml", + "protocolMapper": "saml-role-list-mapper", + "consentRequired": false, + "config": { + "single": "false", + "attribute.nameformat": "Basic", + "attribute.name": "Role" + } + } + ] + }, + { + "id": "f72c1acd-c367-41b1-8646-b6bd5fff3e3f", + "name": "roles", + "description": "OpenID Connect scope for add user roles to the access token", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "false", + "display.on.consent.screen": "true", + "consent.screen.text": "${rolesScopeConsentText}" + }, + "protocolMappers": [ + { + "id": "cd8e589e-5fa7-4dae-bf6e-e8f6a3fd3cff", + "name": "realm roles", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-realm-role-mapper", + "consentRequired": false, + "config": { + "user.attribute": "foo", + "access.token.claim": "true", + "claim.name": "realm_access.roles", + "jsonType.label": "String", + "multivalued": "true" + } + }, + { + "id": "708b19d1-0709-4278-b5a1-bcbeec11f51a", + "name": "audience resolve", + "protocol": "openid-connect", + "protocolMapper": "oidc-audience-resolve-mapper", + "consentRequired": false, + "config": {} + }, + { + "id": "25e97210-30c7-4f35-be11-407f1fa674cb", + "name": "client roles", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-client-role-mapper", + "consentRequired": false, + "config": { + "user.attribute": "foo", + "access.token.claim": "true", + "claim.name": "resource_access.${client_id}.roles", + "jsonType.label": "String", + "multivalued": "true" + } + } + ] + }, + { + "id": "52618957-a4e8-4c6f-a902-217f2c41a2fd", + "name": "web-origins", + "description": "OpenID Connect scope for add allowed web origins to the access token", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "false", + "display.on.consent.screen": "false", + "consent.screen.text": "" + }, + "protocolMappers": [ + { + "id": "a66ddadf-312f-491f-993c-fa58685815c6", + "name": "allowed web origins", + "protocol": "openid-connect", + "protocolMapper": "oidc-allowed-origins-mapper", + "consentRequired": false, + "config": {} + } + ] + } + ], + "defaultDefaultClientScopes": [ + "role_list", + "profile", + "email", + "roles", + "web-origins" + ], + "defaultOptionalClientScopes": [ + "offline_access", + "address", + "phone", + "microprofile-jwt" + ], + "browserSecurityHeaders": { + "contentSecurityPolicyReportOnly": "", + "xContentTypeOptions": "nosniff", + "xRobotsTag": "none", + "xFrameOptions": "SAMEORIGIN", + "xXSSProtection": "1; mode=block", + "contentSecurityPolicy": "frame-src 'self'; frame-ancestors 'self'; object-src 'none';", + "strictTransportSecurity": "max-age=31536000; includeSubDomains" + }, + "smtpServer": {}, + "eventsEnabled": false, + "eventsListeners": ["jboss-logging"], + "enabledEventTypes": [], + "adminEventsEnabled": false, + "adminEventsDetailsEnabled": false, + "components": { + "org.keycloak.services.clientregistration.policy.ClientRegistrationPolicy": [ + { + "id": "a7679218-373d-48ca-88f8-429985faeae3", + "name": "Allowed Protocol Mapper Types", + "providerId": "allowed-protocol-mappers", + "subType": "anonymous", + "subComponents": {}, + "config": { + "allowed-protocol-mapper-types": [ + "oidc-full-name-mapper", + "saml-user-attribute-mapper", + "saml-user-property-mapper", + "oidc-address-mapper", + "saml-role-list-mapper", + "oidc-sha256-pairwise-sub-mapper", + "oidc-usermodel-attribute-mapper", + "oidc-usermodel-property-mapper" + ] + } + }, + { + "id": "2ebf6f9f-4bfc-44b9-ad7c-282f2274d35b", + "name": "Allowed Client Scopes", + "providerId": "allowed-client-templates", + "subType": "authenticated", + "subComponents": {}, + "config": { + "allow-default-scopes": ["true"] + } + }, + { + "id": "552093c3-0a0a-4234-ad7c-ae660f0f0db1", + "name": "Allowed Client Scopes", + "providerId": "allowed-client-templates", + "subType": "anonymous", + "subComponents": {}, + "config": { + "allow-default-scopes": ["true"] + } + }, + { + "id": "8f27cf74-cee7-4a73-851f-982ee45157ca", + "name": "Trusted Hosts", + "providerId": "trusted-hosts", + "subType": "anonymous", + "subComponents": {}, + "config": { + "host-sending-registration-request-must-match": ["true"], + "client-uris-must-match": ["true"] + } + }, + { + "id": "ff570525-6c96-4500-9d73-c02e708b39de", + "name": "Full Scope Disabled", + "providerId": "scope", + "subType": "anonymous", + "subComponents": {}, + "config": {} + }, + { + "id": "b52284eb-123a-4718-aac9-857530a24a9b", + "name": "Max Clients Limit", + "providerId": "max-clients", + "subType": "anonymous", + "subComponents": {}, + "config": { + "max-clients": ["200"] + } + }, + { + "id": "2b8c0a6d-d5c0-4ea2-8a9c-4843d3e04ec6", + "name": "Consent Required", + "providerId": "consent-required", + "subType": "anonymous", + "subComponents": {}, + "config": {} + }, + { + "id": "bf59de5a-2c93-43cc-a9aa-03be0129fe53", + "name": "Allowed Protocol Mapper Types", + "providerId": "allowed-protocol-mappers", + "subType": "authenticated", + "subComponents": {}, + "config": { + "allowed-protocol-mapper-types": [ + "saml-user-attribute-mapper", + "oidc-full-name-mapper", + "saml-role-list-mapper", + "saml-user-property-mapper", + "oidc-usermodel-attribute-mapper", + "oidc-address-mapper", + "oidc-usermodel-property-mapper", + "oidc-sha256-pairwise-sub-mapper" + ] + } + } + ], + "org.keycloak.keys.KeyProvider": [ + { + "id": "b3efd9cc-28b6-4404-82af-8a48a966b8ff", + "name": "rsa-generated", + "providerId": "rsa-generated", + "subComponents": {}, + "config": { + "privateKey": [ + "MIIEowIBAAKCAQEAn5T13suF8mlS+pJXp0U1bto41nW55wpcs+Rps8ZVCRyJKWqzwSCYnI7lm0rB2wBpAAO4OPoj1zlmVoFmBPsDU9Xf7rjsJb5LIzIQDCZY44aSDZt6RR+gakPiQvlzHyW/RozYpngDJF7TsTD7rdRF1xQ4RprfBF8fwK/xsU7pxbeom5xDHZhz3fiw8s+7UdbmnazDHfAjU58aUrLGgVRfUsuoHjtsptYlOIXEifaeMetXZE+HhqLYRHQPDap5fbBJl773Trosn7N9nmzN4x1xxGj9So21WC5UboQs9sAIVgizc4omjZ5Y4RN9HLH7G4YwJctNntzmnJhDui9zAO+zSQIDAQABAoIBADi+F7rTtVoft0Cfnok8o6Y58/HVxHdxiMryUd95iy0FN4RBi48FTx6D9QKFz25Ws/8sU2n3D51srIXf1u24b1N0/f39RQKaqk7mcyxOylaEuBQcj5pah4ihgKd92UBfBKdKV5LBo6RgD3e2yhbiHr8+UlBQqzH7vOef6Bm6zIbfmi3N88swAJhP0YizRZFklsbmLsK6nkwyro00CHJvPVKSBbM+ad+/zIBsLw56MvNngB5TuFguUgoljd6M1T2z4utmZGlTUqrfE1onAVLJZoGnRohyIr7dJEg6YxWR70PxsgmkDKyeRvet9P1trO0n+OSprusfrC3cHJStabap1V0CgYEA1A/CtsqTnjdYYsB19eumZgdpzUgNc/YEAzZ/OWb8yTLoB2ncci+63A1rXHUXAqJFY7vtjn5mxv7SuASNbUrzq+6KfZvC1x9XEtnczqT/ypunNfxmIZuj8Nuu6vtURguZ8kPPwdkI8toTizRFeRE5ZDBvoQryiEVYugfHaHT5vzsCgYEAwKWODwquI0Lv9BuwdNVrBXQpkKh3ZfYOA7i9xvhxlM7xUu8OMCwwCPn3r7vrW5APjTqX4h330mJ44SLEs+7gbCUs4BbJBLA6g0ChlHa9PTkxp6tk2nDF/B34fxiZSRkE85L+d+at0Dc3hnlzLCJCzJawGpoPniPU9e4w0p4dN0sCgYAsGnMGjS8SUrRhJWHjGXVr9tK8TOXvXhULjgP7rj2Yoqu7Dvs4DFEyft/7RKbad2EzEtyfLA64CDtO5jN7rYDsGxpWcVSeZPg5BXJ0z8AbJTArfCjJiJMZ/rZsTIUEZFlKF2xYBolj6JLz+pUQTtK+0YwF1D8ItFN1rTR9twZSDQKBgQC6sPXNX+VH6LuPTjIf1x8CxwLs3EXxOpV0R9kp9GRl+HJnk6GlT30xhcThufQo5KAdllXQXIhoiuNoEoCbevhj9Vbax1oBQCNERSMRNEzKAx46xd9TzYwgeo7x5E3QR/3DaoVOfu+cY5ZcrF/PulgP2kxJS1mtQD5GIpGP2oinpwKBgGqiqTFPqRcelx76vBvTU+Jp1zM62T4AotbMrSQR/oUvqHe5Ytj/SbZx+wbbHAiyGgV700Mosyviik83YEAbR3kdOPjgYvAJJW2Y3jEMdQ7MwriXz8XLh5BGmYfVjkSOJXed9ua9WlYLKOJeXXv191BbDvrx5NXuJyVVU4vJx3YZ" + ], + "certificate": [ + "MIICnTCCAYUCBgFp4EYIrjANBgkqhkiG9w0BAQsFADASMRAwDgYDVQQDDAdwcm90ZWFuMB4XDTE5MDQwMjIyNTYxOVoXDTI5MDQwMjIyNTc1OVowEjEQMA4GA1UEAwwHcHJvdGVhbjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAJ+U9d7LhfJpUvqSV6dFNW7aONZ1uecKXLPkabPGVQkciSlqs8EgmJyO5ZtKwdsAaQADuDj6I9c5ZlaBZgT7A1PV3+647CW+SyMyEAwmWOOGkg2bekUfoGpD4kL5cx8lv0aM2KZ4AyRe07Ew+63URdcUOEaa3wRfH8Cv8bFO6cW3qJucQx2Yc934sPLPu1HW5p2swx3wI1OfGlKyxoFUX1LLqB47bKbWJTiFxIn2njHrV2RPh4ai2ER0Dw2qeX2wSZe+9066LJ+zfZ5szeMdccRo/UqNtVguVG6ELPbACFYIs3OKJo2eWOETfRyx+xuGMCXLTZ7c5pyYQ7ovcwDvs0kCAwEAATANBgkqhkiG9w0BAQsFAAOCAQEAVtmRKDb4OK5iSA46tagMBkp6L7WuPpCWuHGWwobEP+BecYsShW7zP3s12oA8SNSwbhvu0CRqgzxhuypgf3hKQFVU153Erv4hzkj+8S0s5LR/ZE7tDNY2lzJ3yQKXy3Md7EkuzzvOZ50MTrcSKAanWq/ZW1OTnrtGymj5zGJnTg7mMnJzEIGePxkvPu/QdchiPBLqxfZYm1jsFGY25djOC3N/KmVcRVmPRGuu6D8tBFHlKoPfZYPdbMvsvs24aupHKRcZ+ofTCpK+2Qo8c0pSSqeEYHGmuGqC6lC6ozxtxSABPO9Q1R1tZBU7Kg5HvXUwwmoVS3EGub46YbHqbmWMLg==" + ], + "priority": ["100"] + } + }, + { + "id": "20460ca5-ec24-4a9b-839a-457743d3f841", + "name": "hmac-generated", + "providerId": "hmac-generated", + "subComponents": {}, + "config": { + "kid": ["96afd00e-85cf-4d35-b18e-061d3813d8b2"], + "secret": [ + "qBFGKdUGf6xDgKphnRfoFzIzaFHJW4bYnZ9MinPFzN38X5_ctq-2u1q5RdZzeJukXvk2biHB8_s3DxWmmLZFsA" + ], + "priority": ["100"], + "algorithm": ["HS256"] + } + }, + { + "id": "4f02d984-7a23-4ce1-8591-848a71390efe", + "name": "aes-generated", + "providerId": "aes-generated", + "subComponents": {}, + "config": { + "kid": ["b04473d3-8395-4016-b455-19a9e951106b"], + "secret": ["x68mMOVdz3qKWzltzReV0g"], + "priority": ["100"] + } + } + ] + }, + "internationalizationEnabled": false, + "supportedLocales": [], + "authenticationFlows": [ + { + "id": "d6c3e282-a738-4b8b-98c2-378b9faf8344", + "alias": "Handle Existing Account", + "description": "Handle what to do if there is existing account with same email/username like authenticated identity provider", + "providerId": "basic-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "idp-confirm-link", + "requirement": "REQUIRED", + "priority": 10, + "userSetupAllowed": false, + "autheticatorFlow": false + }, + { + "authenticator": "idp-email-verification", + "requirement": "ALTERNATIVE", + "priority": 20, + "userSetupAllowed": false, + "autheticatorFlow": false + }, + { + "requirement": "ALTERNATIVE", + "priority": 30, + "flowAlias": "Verify Existing Account by Re-authentication", + "userSetupAllowed": false, + "autheticatorFlow": true + } + ] + }, + { + "id": "4855860b-4009-4f1b-ba6b-60581618ea62", + "alias": "Verify Existing Account by Re-authentication", + "description": "Reauthentication of existing account", + "providerId": "basic-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "idp-username-password-form", + "requirement": "REQUIRED", + "priority": 10, + "userSetupAllowed": false, + "autheticatorFlow": false + }, + { + "authenticator": "auth-otp-form", + "requirement": "OPTIONAL", + "priority": 20, + "userSetupAllowed": false, + "autheticatorFlow": false + } + ] + }, + { + "id": "8a9872b0-65f1-47ff-9565-fa826ac64cd4", + "alias": "browser", + "description": "browser based authentication", + "providerId": "basic-flow", + "topLevel": true, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "auth-cookie", + "requirement": "ALTERNATIVE", + "priority": 10, + "userSetupAllowed": false, + "autheticatorFlow": false + }, + { + "authenticator": "auth-spnego", + "requirement": "DISABLED", + "priority": 20, + "userSetupAllowed": false, + "autheticatorFlow": false + }, + { + "authenticator": "identity-provider-redirector", + "requirement": "ALTERNATIVE", + "priority": 25, + "userSetupAllowed": false, + "autheticatorFlow": false + }, + { + "requirement": "ALTERNATIVE", + "priority": 30, + "flowAlias": "forms", + "userSetupAllowed": false, + "autheticatorFlow": true + } + ] + }, + { + "id": "51b8ed14-62b6-49b3-b602-0b51508349e0", + "alias": "clients", + "description": "Base authentication for clients", + "providerId": "client-flow", + "topLevel": true, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "client-secret", + "requirement": "ALTERNATIVE", + "priority": 10, + "userSetupAllowed": false, + "autheticatorFlow": false + }, + { + "authenticator": "client-jwt", + "requirement": "ALTERNATIVE", + "priority": 20, + "userSetupAllowed": false, + "autheticatorFlow": false + }, + { + "authenticator": "client-secret-jwt", + "requirement": "ALTERNATIVE", + "priority": 30, + "userSetupAllowed": false, + "autheticatorFlow": false + }, + { + "authenticator": "client-x509", + "requirement": "ALTERNATIVE", + "priority": 40, + "userSetupAllowed": false, + "autheticatorFlow": false + } + ] + }, + { + "id": "9b65133a-ee71-494a-a659-6804513fc30b", + "alias": "direct grant", + "description": "OpenID Connect Resource Owner Grant", + "providerId": "basic-flow", + "topLevel": true, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "direct-grant-validate-username", + "requirement": "REQUIRED", + "priority": 10, + "userSetupAllowed": false, + "autheticatorFlow": false + }, + { + "authenticator": "direct-grant-validate-password", + "requirement": "REQUIRED", + "priority": 20, + "userSetupAllowed": false, + "autheticatorFlow": false + }, + { + "authenticator": "direct-grant-validate-otp", + "requirement": "OPTIONAL", + "priority": 30, + "userSetupAllowed": false, + "autheticatorFlow": false + } + ] + }, + { + "id": "f62bc4ad-25ac-4f83-963b-32820af3a683", + "alias": "docker auth", + "description": "Used by Docker clients to authenticate against the IDP", + "providerId": "basic-flow", + "topLevel": true, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "docker-http-basic-authenticator", + "requirement": "REQUIRED", + "priority": 10, + "userSetupAllowed": false, + "autheticatorFlow": false + } + ] + }, + { + "id": "1b423fe7-f312-404c-903b-f1260a77259b", + "alias": "first broker login", + "description": "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account", + "providerId": "basic-flow", + "topLevel": true, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticatorConfig": "review profile config", + "authenticator": "idp-review-profile", + "requirement": "REQUIRED", + "priority": 10, + "userSetupAllowed": false, + "autheticatorFlow": false + }, + { + "authenticatorConfig": "create unique user config", + "authenticator": "idp-create-user-if-unique", + "requirement": "ALTERNATIVE", + "priority": 20, + "userSetupAllowed": false, + "autheticatorFlow": false + }, + { + "requirement": "ALTERNATIVE", + "priority": 30, + "flowAlias": "Handle Existing Account", + "userSetupAllowed": false, + "autheticatorFlow": true + } + ] + }, + { + "id": "9c9530b3-e3c6-481b-99e8-1461a9752e8e", + "alias": "forms", + "description": "Username, password, otp and other auth forms.", + "providerId": "basic-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "auth-username-password-form", + "requirement": "REQUIRED", + "priority": 10, + "userSetupAllowed": false, + "autheticatorFlow": false + }, + { + "authenticator": "auth-otp-form", + "requirement": "OPTIONAL", + "priority": 20, + "userSetupAllowed": false, + "autheticatorFlow": false + } + ] + }, + { + "id": "70fb94ac-354c-4629-a5fe-5135d0137964", + "alias": "http challenge", + "description": "An authentication flow based on challenge-response HTTP Authentication Schemes", + "providerId": "basic-flow", + "topLevel": true, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "no-cookie-redirect", + "requirement": "REQUIRED", + "priority": 10, + "userSetupAllowed": false, + "autheticatorFlow": false + }, + { + "authenticator": "basic-auth", + "requirement": "REQUIRED", + "priority": 20, + "userSetupAllowed": false, + "autheticatorFlow": false + }, + { + "authenticator": "basic-auth-otp", + "requirement": "DISABLED", + "priority": 30, + "userSetupAllowed": false, + "autheticatorFlow": false + }, + { + "authenticator": "auth-spnego", + "requirement": "DISABLED", + "priority": 40, + "userSetupAllowed": false, + "autheticatorFlow": false + } + ] + }, + { + "id": "08292a4a-6722-4e33-a5d9-354c2628f567", + "alias": "registration", + "description": "registration flow", + "providerId": "basic-flow", + "topLevel": true, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "registration-page-form", + "requirement": "REQUIRED", + "priority": 10, + "flowAlias": "registration form", + "userSetupAllowed": false, + "autheticatorFlow": true + } + ] + }, + { + "id": "668dc4b6-fe1a-4d24-ab5b-bc76e20ac390", + "alias": "registration form", + "description": "registration form", + "providerId": "form-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "registration-user-creation", + "requirement": "REQUIRED", + "priority": 20, + "userSetupAllowed": false, + "autheticatorFlow": false + }, + { + "authenticator": "registration-profile-action", + "requirement": "REQUIRED", + "priority": 40, + "userSetupAllowed": false, + "autheticatorFlow": false + }, + { + "authenticator": "registration-password-action", + "requirement": "REQUIRED", + "priority": 50, + "userSetupAllowed": false, + "autheticatorFlow": false + }, + { + "authenticator": "registration-recaptcha-action", + "requirement": "DISABLED", + "priority": 60, + "userSetupAllowed": false, + "autheticatorFlow": false + } + ] + }, + { + "id": "a0e191f0-ce9a-4a75-b6e4-97332b05f7e5", + "alias": "reset credentials", + "description": "Reset credentials for a user if they forgot their password or something", + "providerId": "basic-flow", + "topLevel": true, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "reset-credentials-choose-user", + "requirement": "REQUIRED", + "priority": 10, + "userSetupAllowed": false, + "autheticatorFlow": false + }, + { + "authenticator": "reset-credential-email", + "requirement": "REQUIRED", + "priority": 20, + "userSetupAllowed": false, + "autheticatorFlow": false + }, + { + "authenticator": "reset-password", + "requirement": "REQUIRED", + "priority": 30, + "userSetupAllowed": false, + "autheticatorFlow": false + }, + { + "authenticator": "reset-otp", + "requirement": "OPTIONAL", + "priority": 40, + "userSetupAllowed": false, + "autheticatorFlow": false + } + ] + }, + { + "id": "ad4beb21-8e9a-4fca-af41-0f757169f26c", + "alias": "saml ecp", + "description": "SAML ECP Profile Authentication Flow", + "providerId": "basic-flow", + "topLevel": true, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "http-basic-authenticator", + "requirement": "REQUIRED", + "priority": 10, + "userSetupAllowed": false, + "autheticatorFlow": false + } + ] + } + ], + "authenticatorConfig": [ + { + "id": "25632f91-6071-423a-8e9c-7322cdc1b011", + "alias": "create unique user config", + "config": { + "require.password.update.after.registration": "false" + } + }, + { + "id": "02d7f70b-1ebc-4e72-a65c-d94a600895ac", + "alias": "review profile config", + "config": { + "update.profile.on.first.login": "missing" + } + } + ], + "requiredActions": [ + { + "alias": "CONFIGURE_TOTP", + "name": "Configure OTP", + "providerId": "CONFIGURE_TOTP", + "enabled": true, + "defaultAction": false, + "priority": 10, + "config": {} + }, + { + "alias": "terms_and_conditions", + "name": "Terms and Conditions", + "providerId": "terms_and_conditions", + "enabled": false, + "defaultAction": false, + "priority": 20, + "config": {} + }, + { + "alias": "UPDATE_PASSWORD", + "name": "Update Password", + "providerId": "UPDATE_PASSWORD", + "enabled": true, + "defaultAction": false, + "priority": 30, + "config": {} + }, + { + "alias": "UPDATE_PROFILE", + "name": "Update Profile", + "providerId": "UPDATE_PROFILE", + "enabled": true, + "defaultAction": false, + "priority": 40, + "config": {} + }, + { + "alias": "VERIFY_EMAIL", + "name": "Verify Email", + "providerId": "VERIFY_EMAIL", + "enabled": true, + "defaultAction": false, + "priority": 50, + "config": {} + } + ], + "browserFlow": "browser", + "registrationFlow": "registration", + "directGrantFlow": "direct grant", + "resetCredentialsFlow": "reset credentials", + "clientAuthenticationFlow": "clients", + "dockerAuthenticationFlow": "docker auth", + "attributes": { + "_browser_header.xXSSProtection": "1; mode=block", + "_browser_header.xFrameOptions": "SAMEORIGIN", + "_browser_header.strictTransportSecurity": "max-age=31536000; includeSubDomains", + "permanentLockout": "false", + "quickLoginCheckMilliSeconds": "1000", + "_browser_header.xRobotsTag": "none", + "maxFailureWaitSeconds": "900", + "minimumQuickLoginWaitSeconds": "60", + "failureFactor": "30", + "actionTokenGeneratedByUserLifespan": "300", + "maxDeltaTimeSeconds": "43200", + "_browser_header.xContentTypeOptions": "nosniff", + "offlineSessionMaxLifespan": "5184000", + "actionTokenGeneratedByAdminLifespan": "43200", + "_browser_header.contentSecurityPolicyReportOnly": "", + "bruteForceProtected": "false", + "_browser_header.contentSecurityPolicy": "frame-src 'self'; frame-ancestors 'self'; object-src 'none';", + "waitIncrementSeconds": "60", + "offlineSessionMaxLifespanEnabled": "false" + }, + "users": [ + { + "id": "af134cab-f41c-4675-b141-205f975db679", + "username": "admin", + "enabled": true, + "totp": false, + "emailVerified": false, + "credentials": [ + { + "type": "password", + "hashedSaltedValue": "NICTtwsvSxJ5hL8hLAuleDUv9jwZcuXgxviMXvR++cciyPtiIEStEaJUyfA9DOir59awjPrHOumsclPVjNBplA==", + "salt": "T/2P5o5oxFJUEk68BRURRg==", + "hashIterations": 27500, + "counter": 0, + "algorithm": "pbkdf2-sha256", + "digits": 0, + "period": 0, + "createdDate": 1554245879354, + "config": {} + } + ], + "disableableCredentialTypes": ["password"], + "requiredActions": [], + "realmRoles": ["admin", "user"], + "notBefore": 0, + "groups": [] + }, + { + "id": "eb4123a3-b722-4798-9af5-8957f823657a", + "username": "alice", + "enabled": true, + "totp": false, + "emailVerified": false, + "credentials": [ + { + "type": "password", + "hashedSaltedValue": "A3okqV2T/ybXTVEgKfosoSjP8Yc9IZbFP/SY4cEd6hag7TABQrQ6nUSuwagGt96l8cw1DTijO75PqX6uiTXMzw==", + "salt": "sl4mXx6T9FypPH/s9TngfQ==", + "hashIterations": 27500, + "counter": 0, + "algorithm": "pbkdf2-sha256", + "digits": 0, + "period": 0, + "createdDate": 1554245879116, + "config": {} + } + ], + "disableableCredentialTypes": ["password"], + "requiredActions": [], + "realmRoles": ["user"], + "notBefore": 0, + "groups": [] + }, + { + "id": "1eed6a8e-a853-4597-b4c6-c4c2533546a0", + "username": "jdoe", + "enabled": true, + "totp": false, + "emailVerified": false, + "credentials": [ + { + "type": "password", + "hashedSaltedValue": "JV3DUNLjqOadjbBOtC4rvacQI553CGaDGAzBS8MR5ReCr7SwF3E6CsW3T7/XO8ITZAsch8+A/6loeuCoVLLJrg==", + "salt": "uCbOH7HZtyDtMd0E9DG/nw==", + "hashIterations": 27500, + "counter": 0, + "algorithm": "pbkdf2-sha256", + "digits": 0, + "period": 0, + "createdDate": 1554245879227, + "config": {} + } + ], + "disableableCredentialTypes": ["password"], + "requiredActions": [], + "realmRoles": ["confidential", "user"], + "notBefore": 0, + "groups": [] + }, + { + "id": "948c59ec-46ed-4d99-aa43-02900029b930", + "createdTimestamp": 1554245880023, + "username": "service-account-backend-service", + "enabled": true, + "totp": false, + "emailVerified": false, + "email": "service-account-backend-service@placeholder.org", + "serviceAccountClientId": "backend-service", + "credentials": [], + "disableableCredentialTypes": [], + "requiredActions": [], + "realmRoles": ["offline_access"], + "clientRoles": { + "backend-service": ["uma_protection"], + "account": ["view-profile", "manage-account"] + }, + "notBefore": 0, + "groups": [] + } + ], + "keycloakVersion": "6.0.0", + "userManagedAccessAllowed": false +} diff --git a/bin/recreate_db b/bin/recreate_db new file mode 100755 index 00000000..57ec07aa --- /dev/null +++ b/bin/recreate_db @@ -0,0 +1,49 @@ +#!/usr/bin/env bash + +function error_handler() { + >&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}." + exit "$2" +} +trap 'error_handler ${LINENO} $?' ERR +set -o errtrace -o errexit -o nounset -o pipefail + +export FLASK_SESSION_SECRET_KEY="this_is_recreate_db_secret_key" + +tasks="" +if [[ "${1:-}" == "clean" ]]; then + subcommand="${2:-}" + if [[ "$subcommand" == "rmall" ]]; then + tasks="$tasks init migrate" + rm -rf migrations/ + elif [[ -n "$subcommand" ]]; then + >&2 echo "ERROR: you passed a subcommand that was not rmall, and that is not supported: $subcommand" + exit 1 + fi + + rm -f ./src/instance/*.sqlite3 + mysql -uroot -e "DROP DATABASE IF EXISTS spiffworkflow_backend_development" + mysql -uroot -e "DROP DATABASE IF EXISTS spiffworkflow_backend_testing" + + # TODO: check to see if the db already exists and we can connect to it. also actually clean it up. + # start postgres in background with one db + if [[ "${SPIFF_DATABASE_TYPE:-}" == "postgres" ]]; then + if ! docker exec -it postgres-spiff psql -U spiffworkflow_backend spiffworkflow_backend_testing -c "select 1"; then + docker run --name postgres-spiff -p 5432:5432 -e POSTGRES_PASSWORD=spiffworkflow_backend -e POSTGRES_USER=spiffworkflow_backend -e POSTGRES_DB=spiffworkflow_backend_testing -d postgres + sleep 4 # classy + fi + if ! docker exec -it postgres-spiff psql -U spiffworkflow_backend spiffworkflow_backend_development -c "select 1"; then + # create other db. spiffworkflow_backend_testing came with the docker run. + docker exec -it postgres-spiff psql -U spiffworkflow_backend spiffworkflow_backend_testing -c "create database spiffworkflow_backend_development;" + fi + fi +fi +tasks="$tasks upgrade" + +mysql -uroot -e "CREATE DATABASE IF NOT EXISTS spiffworkflow_backend_development" +mysql -uroot -e "CREATE DATABASE IF NOT EXISTS spiffworkflow_backend_testing" + +for task in $tasks; do + SPIFFWORKFLOW_BACKEND_ENV=development FLASK_APP=src/spiffworkflow_backend poetry run flask db "$task" +done + +SPIFFWORKFLOW_BACKEND_ENV=testing FLASK_APP=src/spiffworkflow_backend poetry run flask db upgrade diff --git a/bin/replicate_resource_set_denied_based_on_uri_with_keycloak/replicate_resource_set_denied_based_on_uri b/bin/replicate_resource_set_denied_based_on_uri_with_keycloak/replicate_resource_set_denied_based_on_uri new file mode 100755 index 00000000..6a934907 --- /dev/null +++ b/bin/replicate_resource_set_denied_based_on_uri_with_keycloak/replicate_resource_set_denied_based_on_uri @@ -0,0 +1,111 @@ +#!/usr/bin/env bash + +function error_handler() { + >&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}." + exit "$2" +} +trap 'error_handler ${LINENO} $?' ERR +set -o errtrace -o errexit -o nounset -o pipefail + +script_dir="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" + +docker stop keycloak || echo 'no keycloak container found' +docker rm keycloak 2>/dev/null || echo 'no keycloak container found' +docker run -p 8080:8080 -d --name keycloak -e KEYCLOAK_LOGLEVEL=ALL -e ROOT_LOGLEVEL=ALL -e KEYCLOAK_ADMIN=admin -e KEYCLOAK_ADMIN_PASSWORD=admin quay.io/keycloak/keycloak:18.0.2 start-dev -Dkeycloak.profile.feature.token_exchange=enabled -Dkeycloak.profile.feature.admin_fine_grained_authz=enabled + +docker cp "${script_dir}/testing-realm.json" keycloak:/tmp + +sleep 10 +docker exec keycloak /opt/keycloak/bin/kc.sh import --file /tmp/testing-realm.json || echo '' + +docker stop keycloak +docker start keycloak +sleep 10 + +HOSTNAME=localhost:8080 +REALM_NAME=testing +USERS=( + ciadmin1 + repeat_form_user_1 +) +URIS_TO_TEST_AGAINST=( + /blog/post/1 + /blog +) + +FRONTEND_CLIENT_ID=testing-frontend +BACKEND_CLIENT_ID=testing-backend +BACKEND_CLIENT_SECRET="JXeQExm0JhQPLumgHtIIqf52bDalHz0q" # noqa: S105 + +BACKEND_BASIC_AUTH=$(echo -n "${BACKEND_CLIENT_ID}:${BACKEND_CLIENT_SECRET}" | base64) +KEYCLOAK_URL=http://$HOSTNAME/realms/$REALM_NAME/protocol/openid-connect/token + +result_array=() +for user in "${USERS[@]}" ; do + result=$(curl -s -X POST "$KEYCLOAK_URL" \ + -H "Content-Type: application/x-www-form-urlencoded" \ + -d "username=$user" \ + -d "password=$user" \ + -d 'grant_type=password' \ + -d "client_id=$FRONTEND_CLIENT_ID" \ + ) + frontend_token=$(jq -r '.access_token' <<< "$result") + + result=$(curl -s -X POST "$KEYCLOAK_URL" \ + -H "Content-Type: application/x-www-form-urlencoded" \ + --data-urlencode 'grant_type=urn:ietf:params:oauth:grant-type:token-exchange' \ + -d "client_id=$BACKEND_CLIENT_ID" \ + -d "subject_token=${frontend_token}" \ + -H "Authorization: Basic $BACKEND_BASIC_AUTH" \ + -d "audience=${BACKEND_CLIENT_ID}" \ + ) + backend_token=$(jq -r '.access_token' <<< "$result") + + if [[ "$backend_token" != 'null' ]]; then + echo "Getting resource set" + + for uri in "${URIS_TO_TEST_AGAINST[@]}" ; do + escaped_uri=$(sed 's|/|%2F|g' <<<"$uri") + resource_result=$(curl -s "http://${HOSTNAME}/realms/testing/authz/protection/resource_set?matchingUri=true&deep=true&max=-1&exactName=false&uri=${escaped_uri}" -H "Authorization: Bearer $backend_token") + + resource_id_name_pairs=$(jq -r '.[] | "\(._id):\(.name)"' <<<"$resource_result" || echo '') + if [[ -z "$resource_id_name_pairs" || "$resource_id_name_pairs" == "null" ]]; then + >&2 echo "ERROR: Could not find the resource id from the result: ${resource_result}" + exit 1 + fi + + echo "Getting permissions" + for resource_id_name_pair in $resource_id_name_pairs ; do + resource_id=$(awk -F ':' '{print $1}' <<<"$resource_id_name_pair") + resource_name=$(awk -F ':' '{print $2}' <<<"$resource_id_name_pair") + + echo "Checking $resource_name" + auth_result=$(curl -s -X POST "$KEYCLOAK_URL" \ + -H "Content-Type: application/x-www-form-urlencoded" \ + -H "Authorization: Basic $BACKEND_BASIC_AUTH" \ + -d "audience=${BACKEND_CLIENT_ID}" \ + --data-urlencode "grant_type=urn:ietf:params:oauth:grant-type:uma-ticket" \ + -d "permission=${resource_id}" \ + -d "subject_token=${backend_token}" \ + ) + + error_message=$(jq -r '.error' <<<"$auth_result" || echo -n '') + if [[ -n "$error_message" && "$error_message" != "null" ]]; then + result_array+=("${user}, ${uri}, DENY") + fi + access_token=$(jq -r '.access_token' <<<"$auth_result" || echo -n '') + if [[ -n "$access_token"&& "$access_token" != "null" ]]; then + result_array+=("${user}, ${uri}, APPROVED") + fi + done + done + + else + echo "Failed auth result: $result" + fi +done + +echo -e "\n\nRESULTS:\n" +for final_result in "${result_array[@]}" ; do + echo "$final_result" +done diff --git a/bin/replicate_resource_set_denied_based_on_uri_with_keycloak/testing-realm.json b/bin/replicate_resource_set_denied_based_on_uri_with_keycloak/testing-realm.json new file mode 100644 index 00000000..23bc9bac --- /dev/null +++ b/bin/replicate_resource_set_denied_based_on_uri_with_keycloak/testing-realm.json @@ -0,0 +1,2815 @@ +{ + "id": "testing", + "realm": "testing", + "notBefore": 0, + "defaultSignatureAlgorithm": "RS256", + "revokeRefreshToken": false, + "refreshTokenMaxReuse": 0, + "accessTokenLifespan": 300, + "accessTokenLifespanForImplicitFlow": 900, + "ssoSessionIdleTimeout": 1800, + "ssoSessionMaxLifespan": 36000, + "ssoSessionIdleTimeoutRememberMe": 0, + "ssoSessionMaxLifespanRememberMe": 0, + "offlineSessionIdleTimeout": 2592000, + "offlineSessionMaxLifespanEnabled": false, + "offlineSessionMaxLifespan": 5184000, + "clientSessionIdleTimeout": 0, + "clientSessionMaxLifespan": 0, + "clientOfflineSessionIdleTimeout": 0, + "clientOfflineSessionMaxLifespan": 0, + "accessCodeLifespan": 60, + "accessCodeLifespanUserAction": 300, + "accessCodeLifespanLogin": 1800, + "actionTokenGeneratedByAdminLifespan": 43200, + "actionTokenGeneratedByUserLifespan": 300, + "oauth2DeviceCodeLifespan": 600, + "oauth2DevicePollingInterval": 5, + "enabled": true, + "sslRequired": "external", + "registrationAllowed": false, + "registrationEmailAsUsername": false, + "rememberMe": false, + "verifyEmail": false, + "loginWithEmailAllowed": true, + "duplicateEmailsAllowed": false, + "resetPasswordAllowed": false, + "editUsernameAllowed": false, + "bruteForceProtected": false, + "permanentLockout": false, + "maxFailureWaitSeconds": 900, + "minimumQuickLoginWaitSeconds": 60, + "waitIncrementSeconds": 60, + "quickLoginCheckMilliSeconds": 1000, + "maxDeltaTimeSeconds": 43200, + "failureFactor": 30, + "roles": { + "realm": [ + { + "id": "c9f0ff93-642d-402b-965a-04d70719886b", + "name": "default-roles-testing", + "description": "${role_default-roles}", + "composite": true, + "composites": { + "realm": ["offline_access", "uma_authorization"], + "client": { + "account": ["view-profile", "manage-account"] + } + }, + "clientRole": false, + "containerId": "testing", + "attributes": {} + }, + { + "id": "9f474167-5707-4c10-8f9e-bb54ec715cd3", + "name": "uma_authorization", + "description": "${role_uma_authorization}", + "composite": false, + "clientRole": false, + "containerId": "testing", + "attributes": {} + }, + { + "id": "6738d143-2d1d-4458-8a98-01ea003fde14", + "name": "admin", + "composite": false, + "clientRole": false, + "containerId": "testing", + "attributes": {} + }, + { + "id": "6cbcdea5-0083-469d-9576-1d245fb3cdfd", + "name": "repeat-form-role-realm", + "composite": false, + "clientRole": false, + "containerId": "testing", + "attributes": {} + }, + { + "id": "b5a92aee-82d2-4687-8282-365df4df21a9", + "name": "offline_access", + "description": "${role_offline-access}", + "composite": false, + "clientRole": false, + "containerId": "testing", + "attributes": {} + } + ], + "client": { + "realm-management": [ + { + "id": "257c348c-4b9e-4fea-be39-5fdd28e8bb93", + "name": "manage-authorization", + "description": "${role_manage-authorization}", + "composite": false, + "clientRole": true, + "containerId": "4ce68130-aced-4e67-936a-8082dc843cc2", + "attributes": {} + }, + { + "id": "1d224265-63a8-40ea-9316-47627d0aed8c", + "name": "view-authorization", + "description": "${role_view-authorization}", + "composite": false, + "clientRole": true, + "containerId": "4ce68130-aced-4e67-936a-8082dc843cc2", + "attributes": {} + }, + { + "id": "535d7ca0-0f06-42d8-938b-e6e7aabffb42", + "name": "query-groups", + "description": "${role_query-groups}", + "composite": false, + "clientRole": true, + "containerId": "4ce68130-aced-4e67-936a-8082dc843cc2", + "attributes": {} + }, + { + "id": "9ff52ab5-2558-4cb0-901f-6e6f1469d075", + "name": "realm-admin", + "description": "${role_realm-admin}", + "composite": true, + "composites": { + "client": { + "realm-management": [ + "manage-authorization", + "view-authorization", + "query-groups", + "view-clients", + "view-realm", + "manage-users", + "query-users", + "impersonation", + "manage-clients", + "view-identity-providers", + "create-client", + "query-realms", + "view-users", + "view-events", + "manage-identity-providers", + "manage-events", + "query-clients", + "manage-realm" + ] + } + }, + "clientRole": true, + "containerId": "4ce68130-aced-4e67-936a-8082dc843cc2", + "attributes": {} + }, + { + "id": "98db35e3-833f-4b61-83af-fc50484fda57", + "name": "view-clients", + "description": "${role_view-clients}", + "composite": true, + "composites": { + "client": { + "realm-management": ["query-clients"] + } + }, + "clientRole": true, + "containerId": "4ce68130-aced-4e67-936a-8082dc843cc2", + "attributes": {} + }, + { + "id": "e0dc0e0c-eba4-4de7-b2eb-2ba095c4c6d4", + "name": "manage-users", + "description": "${role_manage-users}", + "composite": false, + "clientRole": true, + "containerId": "4ce68130-aced-4e67-936a-8082dc843cc2", + "attributes": {} + }, + { + "id": "69ce3805-1897-4291-842b-b8e8e9f29bd7", + "name": "view-realm", + "description": "${role_view-realm}", + "composite": false, + "clientRole": true, + "containerId": "4ce68130-aced-4e67-936a-8082dc843cc2", + "attributes": {} + }, + { + "id": "3e803641-96b1-44d8-9de5-7dee83a0a75b", + "name": "impersonation", + "description": "${role_impersonation}", + "composite": false, + "clientRole": true, + "containerId": "4ce68130-aced-4e67-936a-8082dc843cc2", + "attributes": {} + }, + { + "id": "2c92c3e5-1a0a-4318-9b63-617c5dca0b66", + "name": "query-users", + "description": "${role_query-users}", + "composite": false, + "clientRole": true, + "containerId": "4ce68130-aced-4e67-936a-8082dc843cc2", + "attributes": {} + }, + { + "id": "326a3718-390d-4e41-af00-2197d3ef6858", + "name": "manage-clients", + "description": "${role_manage-clients}", + "composite": false, + "clientRole": true, + "containerId": "4ce68130-aced-4e67-936a-8082dc843cc2", + "attributes": {} + }, + { + "id": "e4c69181-5e0d-484e-ac31-be6beef57c28", + "name": "create-client", + "description": "${role_create-client}", + "composite": false, + "clientRole": true, + "containerId": "4ce68130-aced-4e67-936a-8082dc843cc2", + "attributes": {} + }, + { + "id": "f4ac66cc-97b4-4590-beae-5ff23c9935b3", + "name": "query-realms", + "description": "${role_query-realms}", + "composite": false, + "clientRole": true, + "containerId": "4ce68130-aced-4e67-936a-8082dc843cc2", + "attributes": {} + }, + { + "id": "a24704fe-13fd-40e6-bf2d-29014f63c069", + "name": "view-identity-providers", + "description": "${role_view-identity-providers}", + "composite": false, + "clientRole": true, + "containerId": "4ce68130-aced-4e67-936a-8082dc843cc2", + "attributes": {} + }, + { + "id": "7deec87c-2716-40c1-a115-2a0fe840b119", + "name": "view-users", + "description": "${role_view-users}", + "composite": true, + "composites": { + "client": { + "realm-management": ["query-groups", "query-users"] + } + }, + "clientRole": true, + "containerId": "4ce68130-aced-4e67-936a-8082dc843cc2", + "attributes": {} + }, + { + "id": "827c40ae-b4c2-4574-9f34-db33925cd19c", + "name": "view-events", + "description": "${role_view-events}", + "composite": false, + "clientRole": true, + "containerId": "4ce68130-aced-4e67-936a-8082dc843cc2", + "attributes": {} + }, + { + "id": "cbe05c62-2b07-4ac7-a33a-ffca7c176252", + "name": "manage-events", + "description": "${role_manage-events}", + "composite": false, + "clientRole": true, + "containerId": "4ce68130-aced-4e67-936a-8082dc843cc2", + "attributes": {} + }, + { + "id": "8ca56814-a817-4849-a515-45399eb1dcc1", + "name": "manage-identity-providers", + "description": "${role_manage-identity-providers}", + "composite": false, + "clientRole": true, + "containerId": "4ce68130-aced-4e67-936a-8082dc843cc2", + "attributes": {} + }, + { + "id": "1134c6df-d0ff-498d-9dc4-ad989f7cfe93", + "name": "query-clients", + "description": "${role_query-clients}", + "composite": false, + "clientRole": true, + "containerId": "4ce68130-aced-4e67-936a-8082dc843cc2", + "attributes": {} + }, + { + "id": "3bb14549-60f6-4078-8f4e-47a1162412f2", + "name": "manage-realm", + "description": "${role_manage-realm}", + "composite": false, + "clientRole": true, + "containerId": "4ce68130-aced-4e67-936a-8082dc843cc2", + "attributes": {} + } + ], + "testing-frontend": [], + "security-admin-console": [], + "admin-cli": [], + "testing-backend": [ + { + "id": "4d71d1bb-d627-43c8-bc07-d542f816e04b", + "name": "testing-admin", + "composite": false, + "clientRole": true, + "containerId": "f44558af-3601-4e54-b854-08396a247544", + "attributes": {} + }, + { + "id": "2341ca1c-24c8-4ddf-874c-7153c9408068", + "name": "uma_protection", + "composite": false, + "clientRole": true, + "containerId": "f44558af-3601-4e54-b854-08396a247544", + "attributes": {} + }, + { + "id": "cf88054e-4bdc-491c-bf93-c660cdaad72d", + "name": "repeat-form-role-2", + "composite": false, + "clientRole": true, + "containerId": "f44558af-3601-4e54-b854-08396a247544", + "attributes": { + "repeat-form-role-2-att-key": ["repeat-form-role-2-att-value"] + } + } + ], + "withAuth": [ + { + "id": "87673823-6a5a-4cb2-baa7-6c8b5da5d402", + "name": "uma_protection", + "composite": false, + "clientRole": true, + "containerId": "5d94a8c3-f56b-4eff-ac39-8580053a7fbe", + "attributes": {} + } + ], + "broker": [ + { + "id": "6d688d72-cf5b-4450-a902-cb2d41f0e04c", + "name": "read-token", + "description": "${role_read-token}", + "composite": false, + "clientRole": true, + "containerId": "55d75754-cf1b-4875-bf3e-15add4be8c99", + "attributes": {} + } + ], + "account": [ + { + "id": "9c51c3e1-028d-4a0d-96dc-6619196b49f0", + "name": "delete-account", + "description": "${role_delete-account}", + "composite": false, + "clientRole": true, + "containerId": "e39b3c85-bb9d-4c73-8250-be087c82ae48", + "attributes": {} + }, + { + "id": "f395d221-7f80-4fcf-90ac-0a89c8b15a9b", + "name": "manage-consent", + "description": "${role_manage-consent}", + "composite": true, + "composites": { + "client": { + "account": ["view-consent"] + } + }, + "clientRole": true, + "containerId": "e39b3c85-bb9d-4c73-8250-be087c82ae48", + "attributes": {} + }, + { + "id": "7abb4169-1960-4b4d-b5ae-6ea45cf91ee4", + "name": "view-consent", + "description": "${role_view-consent}", + "composite": false, + "clientRole": true, + "containerId": "e39b3c85-bb9d-4c73-8250-be087c82ae48", + "attributes": {} + }, + { + "id": "4d3c24ed-cc61-4a6e-ac78-47af4545b415", + "name": "manage-account-links", + "description": "${role_manage-account-links}", + "composite": false, + "clientRole": true, + "containerId": "e39b3c85-bb9d-4c73-8250-be087c82ae48", + "attributes": {} + }, + { + "id": "a4954091-9be9-4b7c-a196-1af934917ff7", + "name": "view-profile", + "description": "${role_view-profile}", + "composite": false, + "clientRole": true, + "containerId": "e39b3c85-bb9d-4c73-8250-be087c82ae48", + "attributes": {} + }, + { + "id": "0810773c-a57d-449e-a31f-1344e1eb4b9b", + "name": "manage-account", + "description": "${role_manage-account}", + "composite": true, + "composites": { + "client": { + "account": ["manage-account-links"] + } + }, + "clientRole": true, + "containerId": "e39b3c85-bb9d-4c73-8250-be087c82ae48", + "attributes": {} + }, + { + "id": "ae774a41-a274-4f99-9d7f-f4a0d5dbc085", + "name": "view-applications", + "description": "${role_view-applications}", + "composite": false, + "clientRole": true, + "containerId": "e39b3c85-bb9d-4c73-8250-be087c82ae48", + "attributes": {} + } + ] + } + }, + "groups": [], + "defaultRole": { + "id": "c9f0ff93-642d-402b-965a-04d70719886b", + "name": "default-roles-testing", + "description": "${role_default-roles}", + "composite": true, + "clientRole": false, + "containerId": "testing" + }, + "requiredCredentials": ["password"], + "otpPolicyType": "totp", + "otpPolicyAlgorithm": "HmacSHA1", + "otpPolicyInitialCounter": 0, + "otpPolicyDigits": 6, + "otpPolicyLookAheadWindow": 1, + "otpPolicyPeriod": 30, + "otpSupportedApplications": ["FreeOTP", "Google Authenticator"], + "webAuthnPolicyRpEntityName": "keycloak", + "webAuthnPolicySignatureAlgorithms": ["ES256"], + "webAuthnPolicyRpId": "", + "webAuthnPolicyAttestationConveyancePreference": "not specified", + "webAuthnPolicyAuthenticatorAttachment": "not specified", + "webAuthnPolicyRequireResidentKey": "not specified", + "webAuthnPolicyUserVerificationRequirement": "not specified", + "webAuthnPolicyCreateTimeout": 0, + "webAuthnPolicyAvoidSameAuthenticatorRegister": false, + "webAuthnPolicyAcceptableAaguids": [], + "webAuthnPolicyPasswordlessRpEntityName": "keycloak", + "webAuthnPolicyPasswordlessSignatureAlgorithms": ["ES256"], + "webAuthnPolicyPasswordlessRpId": "", + "webAuthnPolicyPasswordlessAttestationConveyancePreference": "not specified", + "webAuthnPolicyPasswordlessAuthenticatorAttachment": "not specified", + "webAuthnPolicyPasswordlessRequireResidentKey": "not specified", + "webAuthnPolicyPasswordlessUserVerificationRequirement": "not specified", + "webAuthnPolicyPasswordlessCreateTimeout": 0, + "webAuthnPolicyPasswordlessAvoidSameAuthenticatorRegister": false, + "webAuthnPolicyPasswordlessAcceptableAaguids": [], + "users": [ + { + "id": "4c436296-8471-4105-b551-80eee96b43bb", + "createdTimestamp": 1657139858075, + "username": "ciadmin1", + "enabled": true, + "totp": false, + "emailVerified": false, + "credentials": [ + { + "id": "111b5ea1-c2ab-470a-a16b-2373bc94de7a", + "type": "password", + "createdDate": 1657139904275, + "secretData": "{\"value\":\"e5MjWAk7RPspQIh9gEOKyv3AV/DHNoWk8w1tf+MRLh2oxrKmnnizOj0eFtIadT/q/i5JRfUq5IYBPLL/4nEJDw==\",\"salt\":\"5inqMqqTR6+PBYriy3RPjA==\",\"additionalParameters\":{}}", + "credentialData": "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } + ], + "disableableCredentialTypes": [], + "requiredActions": [], + "realmRoles": ["default-roles-testing", "admin"], + "clientRoles": { + "testing-backend": ["testing-admin", "uma_protection"] + }, + "notBefore": 0, + "groups": [] + }, + { + "id": "56457e8f-47c6-4f9f-a72b-473dea5edfeb", + "createdTimestamp": 1657139955336, + "username": "ciuser1", + "enabled": true, + "totp": false, + "emailVerified": false, + "credentials": [ + { + "id": "762f36e9-47af-44da-8520-cf09d752497a", + "type": "password", + "createdDate": 1657139966468, + "secretData": "{\"value\":\"Dpn9QBJSxvl54b0Fu+OKrKRwmDJbk28FQ3xhlOdJPvZVJU/SpdrcsH7ktYAIkVLkRC5qILSZuNPQ3vDGzE2r1Q==\",\"salt\":\"yXd7N8XIQBkJ7swHDeRzXw==\",\"additionalParameters\":{}}", + "credentialData": "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } + ], + "disableableCredentialTypes": [], + "requiredActions": [], + "realmRoles": ["default-roles-testing"], + "clientRoles": { + "testing-backend": ["uma_protection"] + }, + "notBefore": 0, + "groups": [] + }, + { + "id": "a15da457-7ebb-49d4-9dcc-6876cb71600d", + "createdTimestamp": 1657115919770, + "username": "repeat_form_user_1", + "enabled": true, + "totp": false, + "emailVerified": false, + "credentials": [ + { + "id": "509dfd8d-a54e-4d8b-b250-ec99e585e15d", + "type": "password", + "createdDate": 1657298008525, + "secretData": "{\"value\":\"/47zG9XBvKg+1P2z6fRL4cyUNn+sB4BgXsxBsvi1NYR9Z20WTeWzzOT2uXvv2ajKMRHrv0OqTesldvSJXARPqA==\",\"salt\":\"dODEHOF24xGPx+7QGaIXWQ==\",\"additionalParameters\":{}}", + "credentialData": "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } + ], + "disableableCredentialTypes": [], + "requiredActions": [], + "realmRoles": ["default-roles-testing"], + "clientRoles": { + "testing-backend": ["uma_protection", "repeat-form-role-2"] + }, + "notBefore": 0, + "groups": [] + }, + { + "id": "487d3a85-89dd-4839-957a-c3f6d70551f6", + "createdTimestamp": 1657115173081, + "username": "service-account-testing-backend", + "enabled": true, + "totp": false, + "emailVerified": false, + "serviceAccountClientId": "testing-backend", + "credentials": [], + "disableableCredentialTypes": [], + "requiredActions": [], + "realmRoles": ["default-roles-testing"], + "clientRoles": { + "testing-backend": ["uma_protection"] + }, + "notBefore": 0, + "groups": [] + }, + { + "id": "22de68b1-4b06-4bc2-8da6-0c577e7e62ad", + "createdTimestamp": 1657055472800, + "username": "service-account-withauth", + "enabled": true, + "totp": false, + "emailVerified": false, + "serviceAccountClientId": "withAuth", + "credentials": [], + "disableableCredentialTypes": [], + "requiredActions": [], + "realmRoles": ["default-roles-testing"], + "clientRoles": { + "withAuth": ["uma_protection"] + }, + "notBefore": 0, + "groups": [] + } + ], + "scopeMappings": [ + { + "clientScope": "offline_access", + "roles": ["offline_access"] + } + ], + "clients": [ + { + "id": "e39b3c85-bb9d-4c73-8250-be087c82ae48", + "clientId": "account", + "name": "${client_account}", + "rootUrl": "${authBaseUrl}", + "baseUrl": "/realms/testing/account/", + "surrogateAuthRequired": false, + "enabled": false, + "alwaysDisplayInConsole": false, + "clientAuthenticatorType": "client-secret", + "redirectUris": ["/realms/testing/account/*"], + "webOrigins": [], + "notBefore": 0, + "bearerOnly": false, + "consentRequired": false, + "standardFlowEnabled": true, + "implicitFlowEnabled": false, + "directAccessGrantsEnabled": false, + "serviceAccountsEnabled": false, + "publicClient": true, + "frontchannelLogout": false, + "protocol": "openid-connect", + "attributes": { + "saml.force.post.binding": "false", + "saml.multivalued.roles": "false", + "frontchannel.logout.session.required": "false", + "oauth2.device.authorization.grant.enabled": "false", + "backchannel.logout.revoke.offline.tokens": "false", + "saml.server.signature.keyinfo.ext": "false", + "use.refresh.tokens": "true", + "oidc.ciba.grant.enabled": "false", + "backchannel.logout.session.required": "false", + "client_credentials.use_refresh_token": "false", + "require.pushed.authorization.requests": "false", + "saml.client.signature": "false", + "saml.allow.ecp.flow": "false", + "id.token.as.detached.signature": "false", + "saml.assertion.signature": "false", + "saml.encrypt": "false", + "saml.server.signature": "false", + "exclude.session.state.from.auth.response": "false", + "saml.artifact.binding": "false", + "saml_force_name_id_format": "false", + "acr.loa.map": "{}", + "tls.client.certificate.bound.access.tokens": "false", + "saml.authnstatement": "false", + "display.on.consent.screen": "false", + "token.response.type.bearer.lower-case": "false", + "saml.onetimeuse.condition": "false" + }, + "authenticationFlowBindingOverrides": {}, + "fullScopeAllowed": false, + "nodeReRegistrationTimeout": 0, + "defaultClientScopes": [ + "web-origins", + "acr", + "profile", + "roles", + "email" + ], + "optionalClientScopes": [ + "address", + "phone", + "offline_access", + "microprofile-jwt" + ] + }, + { + "id": "02fa6179-9399-4bb1-970f-c4d8e8b5f99f", + "clientId": "admin-cli", + "name": "${client_admin-cli}", + "surrogateAuthRequired": false, + "enabled": false, + "alwaysDisplayInConsole": false, + "clientAuthenticatorType": "client-secret", + "redirectUris": [], + "webOrigins": [], + "notBefore": 0, + "bearerOnly": false, + "consentRequired": false, + "standardFlowEnabled": false, + "implicitFlowEnabled": false, + "directAccessGrantsEnabled": true, + "serviceAccountsEnabled": false, + "publicClient": true, + "frontchannelLogout": false, + "protocol": "openid-connect", + "attributes": { + "saml.force.post.binding": "false", + "saml.multivalued.roles": "false", + "frontchannel.logout.session.required": "false", + "oauth2.device.authorization.grant.enabled": "false", + "backchannel.logout.revoke.offline.tokens": "false", + "saml.server.signature.keyinfo.ext": "false", + "use.refresh.tokens": "true", + "oidc.ciba.grant.enabled": "false", + "backchannel.logout.session.required": "false", + "client_credentials.use_refresh_token": "false", + "require.pushed.authorization.requests": "false", + "saml.client.signature": "false", + "saml.allow.ecp.flow": "false", + "id.token.as.detached.signature": "false", + "saml.assertion.signature": "false", + "saml.encrypt": "false", + "saml.server.signature": "false", + "exclude.session.state.from.auth.response": "false", + "saml.artifact.binding": "false", + "saml_force_name_id_format": "false", + "acr.loa.map": "{}", + "tls.client.certificate.bound.access.tokens": "false", + "saml.authnstatement": "false", + "display.on.consent.screen": "false", + "token.response.type.bearer.lower-case": "false", + "saml.onetimeuse.condition": "false" + }, + "authenticationFlowBindingOverrides": {}, + "fullScopeAllowed": false, + "nodeReRegistrationTimeout": 0, + "defaultClientScopes": [ + "web-origins", + "acr", + "profile", + "roles", + "email" + ], + "optionalClientScopes": [ + "address", + "phone", + "offline_access", + "microprofile-jwt" + ] + }, + { + "id": "55d75754-cf1b-4875-bf3e-15add4be8c99", + "clientId": "broker", + "name": "${client_broker}", + "surrogateAuthRequired": false, + "enabled": false, + "alwaysDisplayInConsole": false, + "clientAuthenticatorType": "client-secret", + "redirectUris": [], + "webOrigins": [], + "notBefore": 0, + "bearerOnly": true, + "consentRequired": false, + "standardFlowEnabled": true, + "implicitFlowEnabled": false, + "directAccessGrantsEnabled": false, + "serviceAccountsEnabled": false, + "publicClient": false, + "frontchannelLogout": false, + "protocol": "openid-connect", + "attributes": { + "saml.force.post.binding": "false", + "saml.multivalued.roles": "false", + "frontchannel.logout.session.required": "false", + "oauth2.device.authorization.grant.enabled": "false", + "backchannel.logout.revoke.offline.tokens": "false", + "saml.server.signature.keyinfo.ext": "false", + "use.refresh.tokens": "true", + "oidc.ciba.grant.enabled": "false", + "backchannel.logout.session.required": "false", + "client_credentials.use_refresh_token": "false", + "require.pushed.authorization.requests": "false", + "saml.client.signature": "false", + "saml.allow.ecp.flow": "false", + "id.token.as.detached.signature": "false", + "saml.assertion.signature": "false", + "saml.encrypt": "false", + "saml.server.signature": "false", + "exclude.session.state.from.auth.response": "false", + "saml.artifact.binding": "false", + "saml_force_name_id_format": "false", + "acr.loa.map": "{}", + "tls.client.certificate.bound.access.tokens": "false", + "saml.authnstatement": "false", + "display.on.consent.screen": "false", + "token.response.type.bearer.lower-case": "false", + "saml.onetimeuse.condition": "false" + }, + "authenticationFlowBindingOverrides": {}, + "fullScopeAllowed": false, + "nodeReRegistrationTimeout": 0, + "defaultClientScopes": [ + "web-origins", + "acr", + "profile", + "roles", + "email" + ], + "optionalClientScopes": [ + "address", + "phone", + "offline_access", + "microprofile-jwt" + ] + }, + { + "id": "4ce68130-aced-4e67-936a-8082dc843cc2", + "clientId": "realm-management", + "name": "${client_realm-management}", + "surrogateAuthRequired": false, + "enabled": false, + "alwaysDisplayInConsole": false, + "clientAuthenticatorType": "client-secret", + "redirectUris": [], + "webOrigins": [], + "notBefore": 0, + "bearerOnly": true, + "consentRequired": false, + "standardFlowEnabled": true, + "implicitFlowEnabled": false, + "directAccessGrantsEnabled": false, + "serviceAccountsEnabled": false, + "publicClient": false, + "frontchannelLogout": false, + "protocol": "openid-connect", + "attributes": { + "saml.force.post.binding": "false", + "saml.multivalued.roles": "false", + "frontchannel.logout.session.required": "false", + "oauth2.device.authorization.grant.enabled": "false", + "backchannel.logout.revoke.offline.tokens": "false", + "saml.server.signature.keyinfo.ext": "false", + "use.refresh.tokens": "true", + "oidc.ciba.grant.enabled": "false", + "backchannel.logout.session.required": "false", + "client_credentials.use_refresh_token": "false", + "require.pushed.authorization.requests": "false", + "saml.client.signature": "false", + "saml.allow.ecp.flow": "false", + "id.token.as.detached.signature": "false", + "saml.assertion.signature": "false", + "saml.encrypt": "false", + "saml.server.signature": "false", + "exclude.session.state.from.auth.response": "false", + "saml.artifact.binding": "false", + "saml_force_name_id_format": "false", + "acr.loa.map": "{}", + "tls.client.certificate.bound.access.tokens": "false", + "saml.authnstatement": "false", + "display.on.consent.screen": "false", + "token.response.type.bearer.lower-case": "false", + "saml.onetimeuse.condition": "false" + }, + "authenticationFlowBindingOverrides": {}, + "fullScopeAllowed": false, + "nodeReRegistrationTimeout": 0, + "defaultClientScopes": [ + "web-origins", + "acr", + "profile", + "roles", + "email" + ], + "optionalClientScopes": [ + "address", + "phone", + "offline_access", + "microprofile-jwt" + ] + }, + { + "id": "7c82344d-d4ae-4599-bbce-583cc8848199", + "clientId": "security-admin-console", + "name": "${client_security-admin-console}", + "rootUrl": "${authAdminUrl}", + "baseUrl": "/admin/testing/console/", + "surrogateAuthRequired": false, + "enabled": false, + "alwaysDisplayInConsole": false, + "clientAuthenticatorType": "client-secret", + "redirectUris": ["/admin/testing/console/*"], + "webOrigins": ["+"], + "notBefore": 0, + "bearerOnly": false, + "consentRequired": false, + "standardFlowEnabled": true, + "implicitFlowEnabled": false, + "directAccessGrantsEnabled": false, + "serviceAccountsEnabled": false, + "publicClient": true, + "frontchannelLogout": false, + "protocol": "openid-connect", + "attributes": { + "saml.force.post.binding": "false", + "saml.multivalued.roles": "false", + "frontchannel.logout.session.required": "false", + "oauth2.device.authorization.grant.enabled": "false", + "backchannel.logout.revoke.offline.tokens": "false", + "saml.server.signature.keyinfo.ext": "false", + "use.refresh.tokens": "true", + "oidc.ciba.grant.enabled": "false", + "backchannel.logout.session.required": "false", + "client_credentials.use_refresh_token": "false", + "require.pushed.authorization.requests": "false", + "saml.client.signature": "false", + "pkce.code.challenge.method": "S256", + "saml.allow.ecp.flow": "false", + "id.token.as.detached.signature": "false", + "saml.assertion.signature": "false", + "saml.encrypt": "false", + "saml.server.signature": "false", + "exclude.session.state.from.auth.response": "false", + "saml.artifact.binding": "false", + "saml_force_name_id_format": "false", + "acr.loa.map": "{}", + "tls.client.certificate.bound.access.tokens": "false", + "saml.authnstatement": "false", + "display.on.consent.screen": "false", + "token.response.type.bearer.lower-case": "false", + "saml.onetimeuse.condition": "false" + }, + "authenticationFlowBindingOverrides": {}, + "fullScopeAllowed": false, + "nodeReRegistrationTimeout": 0, + "protocolMappers": [ + { + "id": "949c8afa-a06e-4a86-9260-6f477fc9ad9d", + "name": "locale", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "locale", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "locale", + "jsonType.label": "String" + } + } + ], + "defaultClientScopes": [ + "web-origins", + "acr", + "profile", + "roles", + "email" + ], + "optionalClientScopes": [ + "address", + "phone", + "offline_access", + "microprofile-jwt" + ] + }, + { + "id": "f44558af-3601-4e54-b854-08396a247544", + "clientId": "testing-backend", + "surrogateAuthRequired": false, + "enabled": true, + "alwaysDisplayInConsole": false, + "clientAuthenticatorType": "client-secret", + "secret": "JXeQExm0JhQPLumgHtIIqf52bDalHz0q", + "redirectUris": ["http://localhost:7000/*"], + "webOrigins": [], + "notBefore": 0, + "bearerOnly": false, + "consentRequired": false, + "standardFlowEnabled": true, + "implicitFlowEnabled": false, + "directAccessGrantsEnabled": true, + "serviceAccountsEnabled": true, + "authorizationServicesEnabled": true, + "publicClient": false, + "frontchannelLogout": false, + "protocol": "openid-connect", + "attributes": { + "saml.force.post.binding": "false", + "saml.multivalued.roles": "false", + "frontchannel.logout.session.required": "false", + "oauth2.device.authorization.grant.enabled": "false", + "backchannel.logout.revoke.offline.tokens": "false", + "saml.server.signature.keyinfo.ext": "false", + "use.refresh.tokens": "true", + "oidc.ciba.grant.enabled": "false", + "backchannel.logout.session.required": "true", + "client_credentials.use_refresh_token": "false", + "require.pushed.authorization.requests": "false", + "saml.client.signature": "false", + "saml.allow.ecp.flow": "false", + "id.token.as.detached.signature": "false", + "saml.assertion.signature": "false", + "client.secret.creation.time": "1657115173", + "saml.encrypt": "false", + "saml.server.signature": "false", + "exclude.session.state.from.auth.response": "false", + "saml.artifact.binding": "false", + "saml_force_name_id_format": "false", + "acr.loa.map": "{}", + "tls.client.certificate.bound.access.tokens": "false", + "saml.authnstatement": "false", + "display.on.consent.screen": "false", + "token.response.type.bearer.lower-case": "false", + "saml.onetimeuse.condition": "false" + }, + "authenticationFlowBindingOverrides": {}, + "fullScopeAllowed": true, + "nodeReRegistrationTimeout": -1, + "protocolMappers": [ + { + "id": "af3598ab-74a9-48ba-956f-431b14acd896", + "name": "Client IP Address", + "protocol": "openid-connect", + "protocolMapper": "oidc-usersessionmodel-note-mapper", + "consentRequired": false, + "config": { + "user.session.note": "clientAddress", + "userinfo.token.claim": "true", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "clientAddress", + "jsonType.label": "String" + } + }, + { + "id": "87369cf7-2a77-40fd-a926-a26d689831a0", + "name": "Client Host", + "protocol": "openid-connect", + "protocolMapper": "oidc-usersessionmodel-note-mapper", + "consentRequired": false, + "config": { + "user.session.note": "clientHost", + "userinfo.token.claim": "true", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "clientHost", + "jsonType.label": "String" + } + }, + { + "id": "2c78d7e8-0a99-43bd-bc29-0ba062ed8750", + "name": "Client ID", + "protocol": "openid-connect", + "protocolMapper": "oidc-usersessionmodel-note-mapper", + "consentRequired": false, + "config": { + "user.session.note": "clientId", + "userinfo.token.claim": "true", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "clientId", + "jsonType.label": "String" + } + } + ], + "defaultClientScopes": [ + "web-origins", + "acr", + "profile", + "roles", + "email" + ], + "optionalClientScopes": [ + "address", + "phone", + "offline_access", + "microprofile-jwt" + ], + "authorizationSettings": { + "allowRemoteResourceManagement": true, + "policyEnforcementMode": "ENFORCING", + "resources": [ + { + "name": "Default Resource", + "type": "urn:testing-backend:resources:default", + "ownerManagedAccess": false, + "attributes": {}, + "_id": "8e00e4a3-3fff-4521-b7f0-95f66c2f79d2", + "uris": ["/*"] + }, + { + "name": "1-crud", + "type": "blog", + "ownerManagedAccess": false, + "displayName": "1-crud", + "attributes": { + "test_resource_att1": ["this_is_the_value"] + }, + "_id": "e294304c-796e-4c56-bdf2-8c854f65db59", + "uris": ["/blog/post/1"], + "scopes": [ + { + "name": "read" + }, + { + "name": "update" + }, + { + "name": "delete" + }, + { + "name": "instantiate" + } + ] + }, + { + "name": "everything", + "ownerManagedAccess": false, + "attributes": {}, + "_id": "446bdcf4-a3bd-41c7-a0f8-67a225ba6b57", + "uris": ["/*"], + "scopes": [ + { + "name": "read" + }, + { + "name": "update" + }, + { + "name": "delete" + }, + { + "name": "instantiate" + } + ] + } + ], + "policies": [ + { + "id": "048d043e-d98c-44d8-8c85-656ba117053e", + "name": "repeat-form-role-policy", + "type": "role", + "logic": "POSITIVE", + "decisionStrategy": "UNANIMOUS", + "config": { + "roles": "[{\"id\":\"testing-backend/repeat-form-role-2\",\"required\":false}]" + } + }, + { + "id": "7dac9bea-d415-4bc4-8817-7a71c2b3ce32", + "name": "Default Policy", + "description": "A policy that grants access only for users within this realm", + "type": "role", + "logic": "POSITIVE", + "decisionStrategy": "AFFIRMATIVE", + "config": { + "roles": "[{\"id\":\"testing-backend/repeat-form-role-2\",\"required\":false}]" + } + }, + { + "id": "ac55237b-6ec9-4f66-bb8e-bee94a5bb5e9", + "name": "admins have everything", + "type": "role", + "logic": "POSITIVE", + "decisionStrategy": "UNANIMOUS", + "config": { + "roles": "[{\"id\":\"testing-backend/testing-admin\",\"required\":false}]" + } + }, + { + "id": "5133ae0b-5e90-48a6-bdd9-3f323e10c44d", + "name": "repeat-form-read", + "type": "scope", + "logic": "POSITIVE", + "decisionStrategy": "UNANIMOUS", + "config": { + "resources": "[\"1-crud\"]", + "scopes": "[\"read\"]", + "applyPolicies": "[\"repeat-form-role-policy\"]" + } + }, + { + "id": "4b634627-51d9-4257-91d9-29503490e4fb", + "name": "Default Permission", + "description": "A permission that applies to the default resource type", + "type": "resource", + "logic": "POSITIVE", + "decisionStrategy": "UNANIMOUS", + "config": { + "defaultResourceType": "urn:testing-backend:resources:default", + "applyPolicies": "[\"Default Policy\"]" + } + }, + { + "id": "0a86ae38-7460-4bc2-b1f9-f933531303ac", + "name": "all_permissions", + "type": "resource", + "logic": "POSITIVE", + "decisionStrategy": "UNANIMOUS", + "config": { + "resources": "[\"everything\"]", + "applyPolicies": "[\"admins have everything\"]" + } + } + ], + "scopes": [ + { + "id": "c03b5c4e-f1bb-4066-8666-3c8a6f44ddb3", + "name": "read", + "displayName": "read" + }, + { + "id": "f55c3e81-9257-4618-9acb-32c57fc561a6", + "name": "update", + "displayName": "update" + }, + { + "id": "c8628417-7ffa-4675-9cda-955df62ea1db", + "name": "delete", + "displayName": "delete" + }, + { + "id": "50ef4129-aa88-4ecd-9afe-c7e5a1b66142", + "name": "instantiate", + "displayName": "instantiate" + } + ], + "decisionStrategy": "UNANIMOUS" + } + }, + { + "id": "9f340eba-2b84-43d0-a976-010e270e3981", + "clientId": "testing-frontend", + "surrogateAuthRequired": false, + "enabled": true, + "alwaysDisplayInConsole": false, + "clientAuthenticatorType": "client-secret", + "redirectUris": ["http://localhost:7001/*"], + "webOrigins": ["*"], + "notBefore": 0, + "bearerOnly": false, + "consentRequired": false, + "standardFlowEnabled": true, + "implicitFlowEnabled": false, + "directAccessGrantsEnabled": true, + "serviceAccountsEnabled": false, + "publicClient": true, + "frontchannelLogout": false, + "protocol": "openid-connect", + "attributes": { + "saml.force.post.binding": "false", + "saml.multivalued.roles": "false", + "frontchannel.logout.session.required": "false", + "oauth2.device.authorization.grant.enabled": "false", + "backchannel.logout.revoke.offline.tokens": "false", + "saml.server.signature.keyinfo.ext": "false", + "use.refresh.tokens": "true", + "oidc.ciba.grant.enabled": "false", + "backchannel.logout.session.required": "true", + "client_credentials.use_refresh_token": "false", + "require.pushed.authorization.requests": "false", + "saml.client.signature": "false", + "saml.allow.ecp.flow": "false", + "id.token.as.detached.signature": "false", + "saml.assertion.signature": "false", + "saml.encrypt": "false", + "saml.server.signature": "false", + "exclude.session.state.from.auth.response": "false", + "saml.artifact.binding": "false", + "saml_force_name_id_format": "false", + "acr.loa.map": "{}", + "tls.client.certificate.bound.access.tokens": "false", + "saml.authnstatement": "false", + "display.on.consent.screen": "false", + "token.response.type.bearer.lower-case": "false", + "saml.onetimeuse.condition": "false" + }, + "authenticationFlowBindingOverrides": {}, + "fullScopeAllowed": true, + "nodeReRegistrationTimeout": -1, + "defaultClientScopes": [ + "web-origins", + "acr", + "profile", + "roles", + "email" + ], + "optionalClientScopes": [ + "address", + "phone", + "offline_access", + "microprofile-jwt" + ] + }, + { + "id": "5d94a8c3-f56b-4eff-ac39-8580053a7fbe", + "clientId": "withAuth", + "surrogateAuthRequired": false, + "enabled": true, + "alwaysDisplayInConsole": false, + "clientAuthenticatorType": "client-secret", + "secret": "6o8kIKQznQtejHOdRhWeKorBJclMGcgA", + "redirectUris": ["http://localhost:7001/*"], + "webOrigins": [], + "notBefore": 0, + "bearerOnly": false, + "consentRequired": false, + "standardFlowEnabled": true, + "implicitFlowEnabled": false, + "directAccessGrantsEnabled": true, + "serviceAccountsEnabled": true, + "authorizationServicesEnabled": true, + "publicClient": false, + "frontchannelLogout": false, + "protocol": "openid-connect", + "attributes": { + "saml.force.post.binding": "false", + "saml.multivalued.roles": "false", + "frontchannel.logout.session.required": "false", + "oauth2.device.authorization.grant.enabled": "false", + "backchannel.logout.revoke.offline.tokens": "false", + "saml.server.signature.keyinfo.ext": "false", + "use.refresh.tokens": "true", + "oidc.ciba.grant.enabled": "false", + "backchannel.logout.session.required": "true", + "client_credentials.use_refresh_token": "false", + "require.pushed.authorization.requests": "false", + "saml.client.signature": "false", + "saml.allow.ecp.flow": "false", + "id.token.as.detached.signature": "false", + "saml.assertion.signature": "false", + "client.secret.creation.time": "1657055472", + "saml.encrypt": "false", + "saml.server.signature": "false", + "exclude.session.state.from.auth.response": "false", + "saml.artifact.binding": "false", + "saml_force_name_id_format": "false", + "acr.loa.map": "{}", + "tls.client.certificate.bound.access.tokens": "false", + "saml.authnstatement": "false", + "display.on.consent.screen": "false", + "token.response.type.bearer.lower-case": "false", + "saml.onetimeuse.condition": "false" + }, + "authenticationFlowBindingOverrides": {}, + "fullScopeAllowed": true, + "nodeReRegistrationTimeout": -1, + "protocolMappers": [ + { + "id": "abfc756f-fc57-45b4-8a40-0cd0f8081f0c", + "name": "Client ID", + "protocol": "openid-connect", + "protocolMapper": "oidc-usersessionmodel-note-mapper", + "consentRequired": false, + "config": { + "user.session.note": "clientId", + "userinfo.token.claim": "true", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "clientId", + "jsonType.label": "String" + } + }, + { + "id": "c05d38b7-9b4d-4286-b40c-f48b3cca42e3", + "name": "Client Host", + "protocol": "openid-connect", + "protocolMapper": "oidc-usersessionmodel-note-mapper", + "consentRequired": false, + "config": { + "user.session.note": "clientHost", + "userinfo.token.claim": "true", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "clientHost", + "jsonType.label": "String" + } + }, + { + "id": "b27d0bd8-b8d9-43cb-a07a-3ec4bdc818dc", + "name": "Client IP Address", + "protocol": "openid-connect", + "protocolMapper": "oidc-usersessionmodel-note-mapper", + "consentRequired": false, + "config": { + "user.session.note": "clientAddress", + "userinfo.token.claim": "true", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "clientAddress", + "jsonType.label": "String" + } + } + ], + "defaultClientScopes": [ + "web-origins", + "acr", + "profile", + "roles", + "email" + ], + "optionalClientScopes": [ + "address", + "phone", + "offline_access", + "microprofile-jwt" + ], + "authorizationSettings": { + "allowRemoteResourceManagement": true, + "policyEnforcementMode": "ENFORCING", + "resources": [ + { + "name": "Default Resource", + "type": "urn:withAuth:resources:default", + "ownerManagedAccess": false, + "attributes": {}, + "_id": "c882ad40-c15d-4f88-ad60-c2ea2f486ce2", + "uris": ["/*"] + } + ], + "policies": [ + { + "id": "b8b338bc-884d-43cf-96d8-3776f2b220f3", + "name": "Default Policy", + "description": "A policy that grants access only for users within this realm", + "type": "role", + "logic": "POSITIVE", + "decisionStrategy": "AFFIRMATIVE", + "config": { + "roles": "[{\"id\":\"testing-backend/repeat-form-role-2\",\"required\":false}]" + } + }, + { + "id": "4f5afa22-0fdf-4ed7-97b9-35400591bf6f", + "name": "Default Permission", + "description": "A permission that applies to the default resource type", + "type": "resource", + "logic": "POSITIVE", + "decisionStrategy": "UNANIMOUS", + "config": { + "defaultResourceType": "urn:withAuth:resources:default", + "applyPolicies": "[\"Default Policy\"]" + } + } + ], + "scopes": [], + "decisionStrategy": "UNANIMOUS" + } + } + ], + "clientScopes": [ + { + "id": "fa3d9944-cf66-4af9-b931-1f3b02943e5b", + "name": "acr", + "description": "OpenID Connect scope for add acr (authentication context class reference) to the token", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "false", + "display.on.consent.screen": "false" + }, + "protocolMappers": [ + { + "id": "12ad0a69-d414-4b5b-9f5f-b647db5f8959", + "name": "acr loa level", + "protocol": "openid-connect", + "protocolMapper": "oidc-acr-mapper", + "consentRequired": false, + "config": { + "id.token.claim": "true", + "access.token.claim": "true", + "userinfo.token.claim": "true" + } + } + ] + }, + { + "id": "4e69d058-1229-4704-9411-decf25da0a49", + "name": "profile", + "description": "OpenID Connect built-in scope: profile", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "true", + "display.on.consent.screen": "true", + "consent.screen.text": "${profileScopeConsentText}" + }, + "protocolMappers": [ + { + "id": "d0d7334e-3f11-45d2-9670-46dbc1977cb2", + "name": "full name", + "protocol": "openid-connect", + "protocolMapper": "oidc-full-name-mapper", + "consentRequired": false, + "config": { + "id.token.claim": "true", + "access.token.claim": "true", + "userinfo.token.claim": "true" + } + }, + { + "id": "4efcf169-4df2-4cdb-b331-005aff1cee28", + "name": "website", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "website", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "website", + "jsonType.label": "String" + } + }, + { + "id": "3f639f2f-cf0e-4651-ab93-15a77023b5a0", + "name": "given name", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-property-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "firstName", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "given_name", + "jsonType.label": "String" + } + }, + { + "id": "16e93663-bf6a-4f6d-b5ab-8e68bf118f72", + "name": "nickname", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "nickname", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "nickname", + "jsonType.label": "String" + } + }, + { + "id": "b9c97283-8153-4c4d-b8d8-dd1bde17823b", + "name": "username", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-property-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "username", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "preferred_username", + "jsonType.label": "String" + } + }, + { + "id": "eeead6c7-1dae-4be1-9eca-988ffb38aaf4", + "name": "zoneinfo", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "zoneinfo", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "zoneinfo", + "jsonType.label": "String" + } + }, + { + "id": "d62991bc-2583-42be-bb08-8d1527c4f162", + "name": "family name", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-property-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "lastName", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "family_name", + "jsonType.label": "String" + } + }, + { + "id": "9f761222-f84d-4a25-a53f-13e196d38a46", + "name": "profile", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "profile", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "profile", + "jsonType.label": "String" + } + }, + { + "id": "ec866e3c-582f-4c99-920f-d57cf03d772d", + "name": "gender", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "gender", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "gender", + "jsonType.label": "String" + } + }, + { + "id": "b05e679c-e00e-427e-8e47-0a4fd411c7a6", + "name": "updated at", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "updatedAt", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "updated_at", + "jsonType.label": "long" + } + }, + { + "id": "505ff402-5533-48ea-91f9-ab4804c3826b", + "name": "middle name", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "middleName", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "middle_name", + "jsonType.label": "String" + } + }, + { + "id": "d546af31-b669-442b-9a9d-8a6478364002", + "name": "picture", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "picture", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "picture", + "jsonType.label": "String" + } + }, + { + "id": "5a75c993-290f-4bfb-9044-5d7d269378b2", + "name": "birthdate", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "birthdate", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "birthdate", + "jsonType.label": "String" + } + }, + { + "id": "2d387240-0f2f-4f30-8464-0e7c57946743", + "name": "locale", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "locale", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "locale", + "jsonType.label": "String" + } + } + ] + }, + { + "id": "2efee39d-723c-44af-9eb1-4dde9635b249", + "name": "email", + "description": "OpenID Connect built-in scope: email", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "true", + "display.on.consent.screen": "true", + "consent.screen.text": "${emailScopeConsentText}" + }, + "protocolMappers": [ + { + "id": "5bf7db0f-a915-43c2-bff4-475ee5c3259b", + "name": "email", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-property-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "email", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "email", + "jsonType.label": "String" + } + }, + { + "id": "687a8c7d-c93f-47d9-a176-78b0954429c7", + "name": "email verified", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-property-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "emailVerified", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "email_verified", + "jsonType.label": "boolean" + } + } + ] + }, + { + "id": "4a7737cf-83e3-40e1-b36d-9566b34e4148", + "name": "phone", + "description": "OpenID Connect built-in scope: phone", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "true", + "display.on.consent.screen": "true", + "consent.screen.text": "${phoneScopeConsentText}" + }, + "protocolMappers": [ + { + "id": "14bd2816-a2f3-4fde-9ac2-452dea2e9e58", + "name": "phone number", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "phoneNumber", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "phone_number", + "jsonType.label": "String" + } + }, + { + "id": "6172e315-8999-4df8-89fa-75ffd1981793", + "name": "phone number verified", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "phoneNumberVerified", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "phone_number_verified", + "jsonType.label": "boolean" + } + } + ] + }, + { + "id": "5ad0c621-d3ec-4018-98c8-d6fb630d661f", + "name": "microprofile-jwt", + "description": "Microprofile - JWT built-in scope", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "true", + "display.on.consent.screen": "false" + }, + "protocolMappers": [ + { + "id": "252fdd9f-cc91-4ca3-aaab-cdf053360e94", + "name": "groups", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-realm-role-mapper", + "consentRequired": false, + "config": { + "multivalued": "true", + "userinfo.token.claim": "true", + "user.attribute": "foo", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "groups", + "jsonType.label": "String" + } + }, + { + "id": "8e9b880e-6dd8-4e2f-ade2-77fc8fd0bc6d", + "name": "upn", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-property-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "username", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "upn", + "jsonType.label": "String" + } + } + ] + }, + { + "id": "77ca4f26-3777-451b-a907-e258f46f7b95", + "name": "roles", + "description": "OpenID Connect scope for add user roles to the access token", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "false", + "display.on.consent.screen": "true", + "consent.screen.text": "${rolesScopeConsentText}" + }, + "protocolMappers": [ + { + "id": "e7ebb9c0-5ed3-4c6f-bb69-22e01d26b49f", + "name": "audience resolve", + "protocol": "openid-connect", + "protocolMapper": "oidc-audience-resolve-mapper", + "consentRequired": false, + "config": {} + }, + { + "id": "66fd470f-419e-44cd-822e-43df8ee5fe1b", + "name": "realm roles", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-realm-role-mapper", + "consentRequired": false, + "config": { + "user.attribute": "foo", + "access.token.claim": "true", + "claim.name": "realm_access.roles", + "jsonType.label": "String", + "multivalued": "true" + } + }, + { + "id": "f3c313bc-7da7-4cf6-a0df-b62e77209b7c", + "name": "client roles", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-client-role-mapper", + "consentRequired": false, + "config": { + "user.attribute": "foo", + "access.token.claim": "true", + "claim.name": "resource_access.${client_id}.roles", + "jsonType.label": "String", + "multivalued": "true" + } + } + ] + }, + { + "id": "3e9849f5-15ff-43c6-b929-40f26fda2c05", + "name": "offline_access", + "description": "OpenID Connect built-in scope: offline_access", + "protocol": "openid-connect", + "attributes": { + "consent.screen.text": "${offlineAccessScopeConsentText}", + "display.on.consent.screen": "true" + } + }, + { + "id": "ffda6ea6-8add-4c7e-9754-66d00c6735a1", + "name": "web-origins", + "description": "OpenID Connect scope for add allowed web origins to the access token", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "false", + "display.on.consent.screen": "false", + "consent.screen.text": "" + }, + "protocolMappers": [ + { + "id": "05635d42-8bb3-440b-b871-b64c97f524da", + "name": "allowed web origins", + "protocol": "openid-connect", + "protocolMapper": "oidc-allowed-origins-mapper", + "consentRequired": false, + "config": {} + } + ] + }, + { + "id": "6f56ae2b-253f-40f7-ba99-e8c5bbc71423", + "name": "role_list", + "description": "SAML role list", + "protocol": "saml", + "attributes": { + "consent.screen.text": "${samlRoleListScopeConsentText}", + "display.on.consent.screen": "true" + }, + "protocolMappers": [ + { + "id": "7036c17a-9306-4481-82a1-d8d9d77077e5", + "name": "role list", + "protocol": "saml", + "protocolMapper": "saml-role-list-mapper", + "consentRequired": false, + "config": { + "single": "false", + "attribute.nameformat": "Basic", + "attribute.name": "Role" + } + } + ] + }, + { + "id": "ce4493c0-ccb4-45f9-a46e-a40cc3f6d4b2", + "name": "address", + "description": "OpenID Connect built-in scope: address", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "true", + "display.on.consent.screen": "true", + "consent.screen.text": "${addressScopeConsentText}" + }, + "protocolMappers": [ + { + "id": "8a0d3248-d231-40b2-9b8e-3d63bd5a5d12", + "name": "address", + "protocol": "openid-connect", + "protocolMapper": "oidc-address-mapper", + "consentRequired": false, + "config": { + "user.attribute.formatted": "formatted", + "user.attribute.country": "country", + "user.attribute.postal_code": "postal_code", + "userinfo.token.claim": "true", + "user.attribute.street": "street", + "id.token.claim": "true", + "user.attribute.region": "region", + "access.token.claim": "true", + "user.attribute.locality": "locality" + } + } + ] + } + ], + "defaultDefaultClientScopes": [ + "email", + "profile", + "role_list", + "roles", + "acr", + "web-origins" + ], + "defaultOptionalClientScopes": [ + "offline_access", + "phone", + "microprofile-jwt", + "address" + ], + "browserSecurityHeaders": { + "contentSecurityPolicyReportOnly": "", + "xContentTypeOptions": "nosniff", + "xRobotsTag": "none", + "xFrameOptions": "SAMEORIGIN", + "contentSecurityPolicy": "frame-src 'self'; frame-ancestors 'self'; object-src 'none';", + "xXSSProtection": "1; mode=block", + "strictTransportSecurity": "max-age=31536000; includeSubDomains" + }, + "smtpServer": {}, + "eventsEnabled": false, + "eventsListeners": ["jboss-logging"], + "enabledEventTypes": [], + "adminEventsEnabled": false, + "adminEventsDetailsEnabled": false, + "identityProviders": [], + "identityProviderMappers": [], + "components": { + "org.keycloak.services.clientregistration.policy.ClientRegistrationPolicy": [ + { + "id": "d68e938d-dde6-47d9-bdc8-8e8523eb08cd", + "name": "Max Clients Limit", + "providerId": "max-clients", + "subType": "anonymous", + "subComponents": {}, + "config": { + "max-clients": ["200"] + } + }, + { + "id": "b8617465-1c84-4a5f-a16f-a6f10f0f66b1", + "name": "Trusted Hosts", + "providerId": "trusted-hosts", + "subType": "anonymous", + "subComponents": {}, + "config": { + "host-sending-registration-request-must-match": ["true"], + "client-uris-must-match": ["true"] + } + }, + { + "id": "1209fa5d-37df-4f9a-b4fa-4a3cd94e21fe", + "name": "Allowed Protocol Mapper Types", + "providerId": "allowed-protocol-mappers", + "subType": "anonymous", + "subComponents": {}, + "config": { + "allowed-protocol-mapper-types": [ + "saml-user-attribute-mapper", + "oidc-full-name-mapper", + "oidc-usermodel-attribute-mapper", + "oidc-usermodel-property-mapper", + "saml-role-list-mapper", + "oidc-sha256-pairwise-sub-mapper", + "oidc-address-mapper", + "saml-user-property-mapper" + ] + } + }, + { + "id": "3854361d-3fe5-47fb-9417-a99592e3dc5c", + "name": "Allowed Client Scopes", + "providerId": "allowed-client-templates", + "subType": "authenticated", + "subComponents": {}, + "config": { + "allow-default-scopes": ["true"] + } + }, + { + "id": "4c4076ec-68ed-46c1-b0a5-3c8ed08dd4f6", + "name": "Consent Required", + "providerId": "consent-required", + "subType": "anonymous", + "subComponents": {}, + "config": {} + }, + { + "id": "bbbe2ea2-2a36-494b-b57f-8b202740ebf4", + "name": "Full Scope Disabled", + "providerId": "scope", + "subType": "anonymous", + "subComponents": {}, + "config": {} + }, + { + "id": "41eef3e1-bf71-4e8a-b729-fea8eb16b5d8", + "name": "Allowed Client Scopes", + "providerId": "allowed-client-templates", + "subType": "anonymous", + "subComponents": {}, + "config": { + "allow-default-scopes": ["true"] + } + }, + { + "id": "6061713a-c1f5-46e1-adfb-762b8768976a", + "name": "Allowed Protocol Mapper Types", + "providerId": "allowed-protocol-mappers", + "subType": "authenticated", + "subComponents": {}, + "config": { + "allowed-protocol-mapper-types": [ + "oidc-address-mapper", + "saml-user-property-mapper", + "oidc-full-name-mapper", + "oidc-usermodel-property-mapper", + "oidc-usermodel-attribute-mapper", + "saml-user-attribute-mapper", + "saml-role-list-mapper", + "oidc-sha256-pairwise-sub-mapper" + ] + } + } + ], + "org.keycloak.keys.KeyProvider": [ + { + "id": "1f9958a4-b3ac-4a1b-af95-fd8e6053864a", + "name": "hmac-generated", + "providerId": "hmac-generated", + "subComponents": {}, + "config": { + "kid": ["4e99c641-0494-49d5-979f-45cb5126f6f1"], + "secret": [ + "4wV4voiQmFajEegv83Ugd8DxFoy3JpN4YzO5qMx4XfB7Abq8NKU4Az5AkSpxYBSdb5GJEQypA4aLmnaDyCWLIw" + ], + "priority": ["100"], + "algorithm": ["HS256"] + } + }, + { + "id": "70fe0720-f3b7-47b4-a625-ae8fb6635da1", + "name": "aes-generated", + "providerId": "aes-generated", + "subComponents": {}, + "config": { + "kid": ["76118b54-fc74-4149-9028-fab1fdc07860"], + "secret": ["DvxTn0KA4TEUPqSFBw8qAw"], + "priority": ["100"] + } + }, + { + "id": "a12fdd97-1d72-4d9e-9e6a-f9e0b5d4e5f0", + "name": "rsa-generated", + "providerId": "rsa-generated", + "subComponents": {}, + "config": { + "privateKey": [ + "MIIEpAIBAAKCAQEAimbfmG2pL3qesWhUrQayRyYBbRFE0Ul5Ii/AW8Kq6Kad9R2n2sT2BvXWnsWBH6KuINUFJz3Tb+gWy235Jy0Idmekwx63JR20//ZJ7dyQ+b1iadmYPpqyixGL7NrVxQYT0AEGLcD/Fwsh869F3jgfQt7N15q2arRnOrW5NMwi+IvtHxZRZ3UluxShut2577ef8cakwCv4zoTV29y+Z3XhtlKZ4WOCuqIHL3SRHwNkb+k8cY0Gwc88FHl/ihFR0fX/lc7W2AHRd98ex8il4kBFfShBZur8ZLE7QWQdXRY2EYYr3D/W6/5wf/R2fAvbVmGzcYGZ2qm6d+K1XH8VU3X84wIDAQABAoIBABXXrHwa+nOCz57CD3MLNoGiDuGOsySwisyJartQmraC7TTtDDurkASDMe72zq0WeJK368tIp6DmqQpL/eFf6xD8xHUC2PajnJg033AJuluftvNroupmcb0e9M1ZsBkbH29Zagc4iUmyuRYDWGx8wPpFvYjEYvuuIwiR+3vIp9A/0ZbcBwdtml3Of5gYTXChPj28PrA4K7oFib2Zu1aYCBEdF8h9bKRF/UlvyWeSajjddexSQ6gkEjzAEMpliCDbOGSFGwNu1pY7FF4EpyJbalzdpn44m5v9bqfS9/CDrIOOUus88Nn5wCD2OAmAQnWn0Hnh7at4A5fw3VBUmEt70ckCgYEAx0Fg8Gp3SuMaytrf9HJHJcltyDRsdSxysF1ZvDV9cDUsD28QOa/wFJRVsABxqElU+W6QEc20NMgOHVyPFed5UhQA6WfmydzGIcF5C6T5IbE/5Uk3ptGuPdI0aR7rlRfefQOnUBr28dz5UDBTb93t9+Klxcss+nLGRbugnFBAtTUCgYEAsdD+92nuF/GfET97vbHxtJ6+epHddttWlsa5PVeVOZBE/LUsOZRxmxm4afvZGOkhUrvmA1+U0arcp9crS5+Ol2LUGh/9efqLvoBImBxLwB37VcIYLJi0EVPrhVPh+9r3vah1YMBhtapS0VtuEZOr47Yz7asBg1s1Z06l+bD1JLcCgYA+3YS9NYn/qZl5aQcBs9B4vo2RfeC+M1DYDgvS0rmJ3mzRTcQ7vyOrCoXiarFxW/mgXN69jz4M7RVu9BX83jQrzj3fZjWteKdWXRlYsCseEzNKnwgc7MjhnmGEzQmc15QNs0plfqxs8MAEKcsZX1bGP873kbvWJMIjnCf3SWaxBQKBgQCh9zt2w19jIewA+vFMbXw7SGk6Hgk6zTlG50YtkMxU/YtJIAFjhUohu8DVkNhDr35x7MLribF1dYu9ueku3ew1CokmLsNkywllAVaebw+0s9qOV9hLLuC989HQxQJPtTj54SrhcPrPTZBYME7G5dqo9PrB3oTnUDoJmoLmOABjawKBgQCeyd12ShpKYHZS4ZvE87OfXanuNfpVxhcXOqYHpQz2W0a+oUu9e78MlwTVooR4O52W/Ohch2FPEzq/1DBjJrK6PrMY8DS018BIVpQ9DS35/Ga9NtSi8DX7jTXacYPwL9n/+//U3vw0mjaoMXgCv44nYu4ro62J6wvVM98hjQmLJw==" + ], + "keyUse": ["SIG"], + "certificate": [ + "MIICqTCCAZECBgGBz6+bXzANBgkqhkiG9w0BAQsFADAYMRYwFAYDVQQDDA1zcGlmZndvcmtmbG93MB4XDTIyMDcwNTE4NDUwMVoXDTMyMDcwNTE4NDY0MVowGDEWMBQGA1UEAwwNc3BpZmZ3b3JrZmxvdzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAIpm35htqS96nrFoVK0GskcmAW0RRNFJeSIvwFvCquimnfUdp9rE9gb11p7FgR+iriDVBSc902/oFstt+SctCHZnpMMetyUdtP/2Se3ckPm9YmnZmD6asosRi+za1cUGE9ABBi3A/xcLIfOvRd44H0Lezdeatmq0Zzq1uTTMIviL7R8WUWd1JbsUobrdue+3n/HGpMAr+M6E1dvcvmd14bZSmeFjgrqiBy90kR8DZG/pPHGNBsHPPBR5f4oRUdH1/5XO1tgB0XffHsfIpeJARX0oQWbq/GSxO0FkHV0WNhGGK9w/1uv+cH/0dnwL21Zhs3GBmdqpunfitVx/FVN1/OMCAwEAATANBgkqhkiG9w0BAQsFAAOCAQEAaI7BEPZpf4MU7bMWmNgyfRTRDy5wtpyfuLPGHZ9EqtnvwwzsmlmXXsC55SLXx3wJETm+rFqeRFbo/hamlRajzzD317AUpE7nhnONTukmh6UuB8hXoWiQTD+YDYMy8kneSP4zvfm27F+TgUC4cvJSYuWVaCxFx52kxqW1hZkBzYUcfi21Qb1jRrbTbso37BxuVX+GdN015If3DPD6QnAhLPAYEFA9jiL16YeMdWHdvlXXmvriDegMUYQjFYPRh6iPzUEdG6KGHItF4AkOYBQAcoaYhfxpxofVlDdOqMZ/1c7AAbe4lR6/jYQ0CbHwdUu4dzJQe3vxr7GdxcB1ypvXPA==" + ], + "priority": ["100"] + } + }, + { + "id": "e16c740d-3ae2-4cc5-a68d-49d99e079672", + "name": "rsa-enc-generated", + "providerId": "rsa-enc-generated", + "subComponents": {}, + "config": { + "privateKey": [ + "MIIEowIBAAKCAQEAsqGsclDQDFSTn8HS1LiiNAnTwn3CS8HXPLDYMHr/jUQ8r5eD+vQY5ICh5V5c8l8J6ydbpzffFEKam54Ypp4yzaWJZ4huYBMf4vL7xrAZ4VXBreu16BIxOrThzrJe9WmI8+Annzo62mNYZbjf4WNpZDURmxZSo7v6Czprd5O6T4N5bxr8sjRRptZR8hxtrRvJnuC0jF+dLHIO5SKR1hUVG/gbpIBqGcsLkNC9nnS6M/N5YFzUIV5JhXo3+mrR/yvw7m+oS5yRsN0raCSXVenNP05Dhsd4FOYqoXBBcdgXXbiDxed0HWB/g5dASqyMydHriddGr8FU0W8/uZmF79wxPwIDAQABAoIBAFsWCaL5Bj1jWytZYDJMO5mhcTN5gPu0ShaObo66CVl1dCRtdEUg9xh9ZxBYf7ivMZWRKjEoUj44gDHd+d/sRyeJw3jhnraqydWl5TC5V1kJq4sN6GH/9M5kscf+OGGXgNgqcsnEnYICqm6kSLTbRkBstx+H0HfhQG09StNcpuIn4MsoMZT8XmZbXRLb3FhfpuTSX3t2nbSDRfUf7LI1EDnFQen/AJAA5lOHthLCdz4Gj1vfalOFjCMYOUWmL/mCDEb38F6QJZxkyhmS/r2kM09PFLOio6z3J8C8mVeq7uao0s5xAKj5SJqx4r+TTvL5aOF8JBWm8Hz1Vcip9/MjsQECgYEA/8Hpb4RggNyn+YzTxqxtPtbLFL0YywtNT+gutmJH1gyTjfx7p3dmA/NsdIeuJmBpZfA7oDXIqfj2M9QLfC5bdKnggQzrIO3BgClI88zOIWd229Bt6D1yx92k4+9eaRwOKBPn8+u0mCk8TBv32ecMLQ9o8AKNIHeCZQjByvOrIMECgYEAss0J3TzrRuEOpnxJ9fNOeB3rNpIFrpNua+oEQI4gDbBvyT7osBKkGqfXJpUQMftr8a6uBHLHV7/Wq6/aRkRhk+aER8h01DUIWGLmbCUdkFSJZ8iObMZQvURtckhzxxhYu0Ybwn0RJg/zzR4onTRO+eL1fTnb5Id55PyPt3Pp0f8CgYEAovDOoP6MYOyzk5h1/7gwrX04ytCicBGWQtdgk0/QBn3ir+3wdcPq2Y+HREKA3/BClfBUfIBnhGqZqHFqk8YQ/CWSY4Vwc30l71neIX0UwlFhdy+2JeSoMM9z0sfYtUxrdHsiJtO/LcXvpWmYIVpC9p4/s9FcShf5mhbXKE7PcsECgYBN7qqvAH94LF4rWJ8QEZWRK1E7Ptg1KFOHu79Qt+HmtZFzwPTA0c8vQxq22V/uuSxqcf2tOK4EZDxYJtTXrbRuN5pOg2PQnrDdfXX7iw3gu8gMMVFKvgGxDSM7HbNBAy6hqcQtuD+CPI/CRrPjGUqXBkKD63UZnacWlLK7fk1a1wKBgExUaqOBKmr0vldVn66E1XzZj4F4+fV5Ggka9289pBNBRlJFD4VmIYkDkOrLimyy2cYeCkocrOvF6HMJqTcOzD50pj44OWkYFRbs6vK0S7iLSX0eR158XOR9C+uZzp1vIA4sYwW3504HVdVoIU5M8ItSgDsFjGnvHopTGu3MBWPT" + ], + "keyUse": ["ENC"], + "certificate": [ + "MIICqTCCAZECBgGBz6+byzANBgkqhkiG9w0BAQsFADAYMRYwFAYDVQQDDA1zcGlmZndvcmtmbG93MB4XDTIyMDcwNTE4NDUwMVoXDTMyMDcwNTE4NDY0MVowGDEWMBQGA1UEAwwNc3BpZmZ3b3JrZmxvdzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALKhrHJQ0AxUk5/B0tS4ojQJ08J9wkvB1zyw2DB6/41EPK+Xg/r0GOSAoeVeXPJfCesnW6c33xRCmpueGKaeMs2liWeIbmATH+Ly+8awGeFVwa3rtegSMTq04c6yXvVpiPPgJ586OtpjWGW43+FjaWQ1EZsWUqO7+gs6a3eTuk+DeW8a/LI0UabWUfIcba0byZ7gtIxfnSxyDuUikdYVFRv4G6SAahnLC5DQvZ50ujPzeWBc1CFeSYV6N/pq0f8r8O5vqEuckbDdK2gkl1XpzT9OQ4bHeBTmKqFwQXHYF124g8XndB1gf4OXQEqsjMnR64nXRq/BVNFvP7mZhe/cMT8CAwEAATANBgkqhkiG9w0BAQsFAAOCAQEArDDC7bYbuBg33PbUQi7P77lV7PuE9uQU1F3HqulhkARQeM/xmBdJRj9CHjj62shkI3An70tJtGBJkVAHltmvjC+A6IDO5I8IbnPkvWJFu9HwphdP/C1HXYmGPPe7yGdKpy6mdCZ+LMZP7BENhOlx9yXLDFYtcGvqZ4u3XvfsLqUsRGqZHNlhVJD13dUbI6pvbwMsb3gIxozgTIa2ySHMbHafln2UQk5jD0eOIVkaNAdlHqMHiBpPjkoVxnhAmJ/dUIAqKBvuIbCOu9N0kOQSl82LqC7CZ21JCyT86Ll3n1RTkxY5G3JzGW4dyJMOGSyVnWaQ9Z+C92ZMFcOt611M2A==" + ], + "priority": ["100"], + "algorithm": ["RSA-OAEP"] + } + } + ] + }, + "internationalizationEnabled": false, + "supportedLocales": [], + "authenticationFlows": [ + { + "id": "7142a143-abd3-47ca-aaa9-13b9f7ccc385", + "alias": "Account verification options", + "description": "Method with which to verity the existing account", + "providerId": "basic-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "idp-email-verification", + "authenticatorFlow": false, + "requirement": "ALTERNATIVE", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticatorFlow": true, + "requirement": "ALTERNATIVE", + "priority": 20, + "autheticatorFlow": true, + "flowAlias": "Verify Existing Account by Re-authentication", + "userSetupAllowed": false + } + ] + }, + { + "id": "242c0c2f-296c-40af-89ae-2b4aa3900645", + "alias": "Authentication Options", + "description": "Authentication options.", + "providerId": "basic-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "basic-auth", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "basic-auth-otp", + "authenticatorFlow": false, + "requirement": "DISABLED", + "priority": 20, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "auth-spnego", + "authenticatorFlow": false, + "requirement": "DISABLED", + "priority": 30, + "autheticatorFlow": false, + "userSetupAllowed": false + } + ] + }, + { + "id": "6b0b49ae-9dc0-4a0b-9bac-df481c2b7f96", + "alias": "Browser - Conditional OTP", + "description": "Flow to determine if the OTP is required for the authentication", + "providerId": "basic-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "conditional-user-configured", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "auth-otp-form", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 20, + "autheticatorFlow": false, + "userSetupAllowed": false + } + ] + }, + { + "id": "62a73d27-f241-4c16-84d6-3ac099c1a48d", + "alias": "Direct Grant - Conditional OTP", + "description": "Flow to determine if the OTP is required for the authentication", + "providerId": "basic-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "conditional-user-configured", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "direct-grant-validate-otp", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 20, + "autheticatorFlow": false, + "userSetupAllowed": false + } + ] + }, + { + "id": "48634cb2-2dae-424e-848e-18de107696c8", + "alias": "First broker login - Conditional OTP", + "description": "Flow to determine if the OTP is required for the authentication", + "providerId": "basic-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "conditional-user-configured", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "auth-otp-form", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 20, + "autheticatorFlow": false, + "userSetupAllowed": false + } + ] + }, + { + "id": "5a11d869-4928-4583-a050-4672dbc46a46", + "alias": "Handle Existing Account", + "description": "Handle what to do if there is existing account with same email/username like authenticated identity provider", + "providerId": "basic-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "idp-confirm-link", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticatorFlow": true, + "requirement": "REQUIRED", + "priority": 20, + "autheticatorFlow": true, + "flowAlias": "Account verification options", + "userSetupAllowed": false + } + ] + }, + { + "id": "bf19b759-dcbf-434a-bed8-347aba2010cf", + "alias": "Reset - Conditional OTP", + "description": "Flow to determine if the OTP should be reset or not. Set to REQUIRED to force.", + "providerId": "basic-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "conditional-user-configured", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "reset-otp", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 20, + "autheticatorFlow": false, + "userSetupAllowed": false + } + ] + }, + { + "id": "40c42a8c-a850-4a7e-b771-fc8faedf0040", + "alias": "User creation or linking", + "description": "Flow for the existing/non-existing user alternatives", + "providerId": "basic-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticatorConfig": "create unique user config", + "authenticator": "idp-create-user-if-unique", + "authenticatorFlow": false, + "requirement": "ALTERNATIVE", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticatorFlow": true, + "requirement": "ALTERNATIVE", + "priority": 20, + "autheticatorFlow": true, + "flowAlias": "Handle Existing Account", + "userSetupAllowed": false + } + ] + }, + { + "id": "ddb96119-b8cf-4bcf-b8f6-f2fd4d422c6e", + "alias": "Verify Existing Account by Re-authentication", + "description": "Reauthentication of existing account", + "providerId": "basic-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "idp-username-password-form", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticatorFlow": true, + "requirement": "CONDITIONAL", + "priority": 20, + "autheticatorFlow": true, + "flowAlias": "First broker login - Conditional OTP", + "userSetupAllowed": false + } + ] + }, + { + "id": "d5cb0994-62ce-413a-bc48-a30eb68cf234", + "alias": "browser", + "description": "browser based authentication", + "providerId": "basic-flow", + "topLevel": true, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "auth-cookie", + "authenticatorFlow": false, + "requirement": "ALTERNATIVE", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "auth-spnego", + "authenticatorFlow": false, + "requirement": "DISABLED", + "priority": 20, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "identity-provider-redirector", + "authenticatorFlow": false, + "requirement": "ALTERNATIVE", + "priority": 25, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticatorFlow": true, + "requirement": "ALTERNATIVE", + "priority": 30, + "autheticatorFlow": true, + "flowAlias": "forms", + "userSetupAllowed": false + } + ] + }, + { + "id": "2720f0ec-f7c1-4bf6-b338-9db5d72ac753", + "alias": "clients", + "description": "Base authentication for clients", + "providerId": "client-flow", + "topLevel": true, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "client-secret", + "authenticatorFlow": false, + "requirement": "ALTERNATIVE", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "client-jwt", + "authenticatorFlow": false, + "requirement": "ALTERNATIVE", + "priority": 20, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "client-secret-jwt", + "authenticatorFlow": false, + "requirement": "ALTERNATIVE", + "priority": 30, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "client-x509", + "authenticatorFlow": false, + "requirement": "ALTERNATIVE", + "priority": 40, + "autheticatorFlow": false, + "userSetupAllowed": false + } + ] + }, + { + "id": "ae542f35-38cf-4443-b273-9dea7385eff6", + "alias": "direct grant", + "description": "OpenID Connect Resource Owner Grant", + "providerId": "basic-flow", + "topLevel": true, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "direct-grant-validate-username", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "direct-grant-validate-password", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 20, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticatorFlow": true, + "requirement": "CONDITIONAL", + "priority": 30, + "autheticatorFlow": true, + "flowAlias": "Direct Grant - Conditional OTP", + "userSetupAllowed": false + } + ] + }, + { + "id": "8a6b1d02-5145-4169-a6b3-58103495c870", + "alias": "docker auth", + "description": "Used by Docker clients to authenticate against the IDP", + "providerId": "basic-flow", + "topLevel": true, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "docker-http-basic-authenticator", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + } + ] + }, + { + "id": "c83c3b0b-6037-4903-8b6b-22381c546226", + "alias": "first broker login", + "description": "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account", + "providerId": "basic-flow", + "topLevel": true, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticatorConfig": "review profile config", + "authenticator": "idp-review-profile", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticatorFlow": true, + "requirement": "REQUIRED", + "priority": 20, + "autheticatorFlow": true, + "flowAlias": "User creation or linking", + "userSetupAllowed": false + } + ] + }, + { + "id": "483b9aca-309a-450f-a036-3f91ed427169", + "alias": "forms", + "description": "Username, password, otp and other auth forms.", + "providerId": "basic-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "auth-username-password-form", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticatorFlow": true, + "requirement": "CONDITIONAL", + "priority": 20, + "autheticatorFlow": true, + "flowAlias": "Browser - Conditional OTP", + "userSetupAllowed": false + } + ] + }, + { + "id": "e626fb7e-bfbf-45d0-ac7a-1248612dbc08", + "alias": "http challenge", + "description": "An authentication flow based on challenge-response HTTP Authentication Schemes", + "providerId": "basic-flow", + "topLevel": true, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "no-cookie-redirect", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticatorFlow": true, + "requirement": "REQUIRED", + "priority": 20, + "autheticatorFlow": true, + "flowAlias": "Authentication Options", + "userSetupAllowed": false + } + ] + }, + { + "id": "be19b74b-c500-4e5f-9e0a-8d8bde67fad8", + "alias": "registration", + "description": "registration flow", + "providerId": "basic-flow", + "topLevel": true, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "registration-page-form", + "authenticatorFlow": true, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": true, + "flowAlias": "registration form", + "userSetupAllowed": false + } + ] + }, + { + "id": "70aa5f09-5412-4b43-afad-8e81c55c91ee", + "alias": "registration form", + "description": "registration form", + "providerId": "form-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "registration-user-creation", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 20, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "registration-profile-action", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 40, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "registration-password-action", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 50, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "registration-recaptcha-action", + "authenticatorFlow": false, + "requirement": "DISABLED", + "priority": 60, + "autheticatorFlow": false, + "userSetupAllowed": false + } + ] + }, + { + "id": "6901823a-7f69-4bbc-866e-1f0f61e4879e", + "alias": "reset credentials", + "description": "Reset credentials for a user if they forgot their password or something", + "providerId": "basic-flow", + "topLevel": true, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "reset-credentials-choose-user", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "reset-credential-email", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 20, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "reset-password", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 30, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticatorFlow": true, + "requirement": "CONDITIONAL", + "priority": 40, + "autheticatorFlow": true, + "flowAlias": "Reset - Conditional OTP", + "userSetupAllowed": false + } + ] + }, + { + "id": "03e09258-ab92-4b75-a9c9-b701c4ed9d66", + "alias": "saml ecp", + "description": "SAML ECP Profile Authentication Flow", + "providerId": "basic-flow", + "topLevel": true, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "http-basic-authenticator", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + } + ] + } + ], + "authenticatorConfig": [ + { + "id": "45eb38fc-db41-45a8-b75d-fd6e1fe42a44", + "alias": "create unique user config", + "config": { + "require.password.update.after.registration": "false" + } + }, + { + "id": "7d5b9e6c-6959-410d-a3ef-40f189cf332e", + "alias": "review profile config", + "config": { + "update.profile.on.first.login": "missing" + } + } + ], + "requiredActions": [ + { + "alias": "CONFIGURE_TOTP", + "name": "Configure OTP", + "providerId": "CONFIGURE_TOTP", + "enabled": true, + "defaultAction": false, + "priority": 10, + "config": {} + }, + { + "alias": "terms_and_conditions", + "name": "Terms and Conditions", + "providerId": "terms_and_conditions", + "enabled": false, + "defaultAction": false, + "priority": 20, + "config": {} + }, + { + "alias": "UPDATE_PASSWORD", + "name": "Update Password", + "providerId": "UPDATE_PASSWORD", + "enabled": true, + "defaultAction": false, + "priority": 30, + "config": {} + }, + { + "alias": "UPDATE_PROFILE", + "name": "Update Profile", + "providerId": "UPDATE_PROFILE", + "enabled": true, + "defaultAction": false, + "priority": 40, + "config": {} + }, + { + "alias": "VERIFY_EMAIL", + "name": "Verify Email", + "providerId": "VERIFY_EMAIL", + "enabled": true, + "defaultAction": false, + "priority": 50, + "config": {} + }, + { + "alias": "delete_account", + "name": "Delete Account", + "providerId": "delete_account", + "enabled": false, + "defaultAction": false, + "priority": 60, + "config": {} + }, + { + "alias": "update_user_locale", + "name": "Update User Locale", + "providerId": "update_user_locale", + "enabled": true, + "defaultAction": false, + "priority": 1000, + "config": {} + } + ], + "browserFlow": "browser", + "registrationFlow": "registration", + "directGrantFlow": "direct grant", + "resetCredentialsFlow": "reset credentials", + "clientAuthenticationFlow": "clients", + "dockerAuthenticationFlow": "docker auth", + "attributes": { + "cibaBackchannelTokenDeliveryMode": "poll", + "cibaExpiresIn": "120", + "cibaAuthRequestedUserHint": "login_hint", + "oauth2DeviceCodeLifespan": "600", + "clientOfflineSessionMaxLifespan": "0", + "oauth2DevicePollingInterval": "5", + "clientSessionIdleTimeout": "0", + "parRequestUriLifespan": "60", + "clientSessionMaxLifespan": "0", + "clientOfflineSessionIdleTimeout": "0", + "cibaInterval": "5" + }, + "keycloakVersion": "18.0.2", + "userManagedAccessAllowed": false, + "clientProfiles": { + "profiles": [] + }, + "clientPolicies": { + "policies": [] + } +} diff --git a/bin/run_server_locally b/bin/run_server_locally new file mode 100755 index 00000000..46e96051 --- /dev/null +++ b/bin/run_server_locally @@ -0,0 +1,29 @@ +#!/usr/bin/env bash + +function error_handler() { + >&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}." + exit "$2" +} +trap 'error_handler ${LINENO} $?' ERR +set -o errtrace -o errexit -o nounset -o pipefail + +if [[ -z "${SPIFFWORKFLOW_BACKEND_ENV:-}" ]]; then + export SPIFFWORKFLOW_BACKEND_ENV=development +fi + +if [[ -z "${BPMN_SPEC_ABSOLUTE_DIR:-}" ]]; then + script_dir="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" + export BPMN_SPEC_ABSOLUTE_DIR="$script_dir/../../sample-process-models" +fi + +export FLASK_SESSION_SECRET_KEY=super_secret_key +export APPLICATION_ROOT="/" + +if [[ -n "${SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA:-}" ]]; then + ./bin/boot_server_in_docker +else + if [[ -z "${PROCESS_WAITING_MESSAGES:-}" ]]; then + export PROCESS_WAITING_MESSAGES="true" + fi + FLASK_APP=src/spiffworkflow_backend poetry run flask run -p 7000 +fi diff --git a/bin/run_sql b/bin/run_sql new file mode 100755 index 00000000..19ebf820 --- /dev/null +++ b/bin/run_sql @@ -0,0 +1,25 @@ +#!/usr/bin/env bash + +function error_handler() { + >&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}." + exit "$2" +} +trap 'error_handler ${LINENO} $?' ERR +set -o errtrace -o errexit -o nounset -o pipefail + + +tables=( +message_model +message_instance +message_correlation +message_correlation_property +message_correlation_message_instance +) + +for i in "${tables[@]}" ;do + echo "$i" + mysql -uroot -e "select * from spiffworkflow_backend_development.${i}" +done + +echo "process_instance" +mysql -uroot -e "select id,process_model_identifier,process_group_identifier,status from spiffworkflow_backend_development.process_instance" diff --git a/bin/save_all_bpmn.py b/bin/save_all_bpmn.py new file mode 100644 index 00000000..cf5dc51c --- /dev/null +++ b/bin/save_all_bpmn.py @@ -0,0 +1,99 @@ +"""Grabs tickets from csv and makes process instances.""" +import os + +from spiffworkflow_backend import create_app +from spiffworkflow_backend.services.process_model_service import ProcessModelService +from spiffworkflow_backend.services.spec_file_service import SpecFileService + +# from lxml.etree import Element as EtreeElement + + +def main(): + """Main.""" + os.environ["SPIFFWORKFLOW_BACKEND_ENV"] = "development" + flask_env_key = "FLASK_SESSION_SECRET_KEY" + os.environ[flask_env_key] = "whatevs" + if "BPMN_SPEC_ABSOLUTE_DIR" not in os.environ: + home = os.environ["HOME"] + full_process_model_path = ( + f"{home}/projects/github/sartography/sample-process-models" + ) + if os.path.isdir(full_process_model_path): + os.environ["BPMN_SPEC_ABSOLUTE_DIR"] = full_process_model_path + else: + raise Exception(f"Could not find {full_process_model_path}") + app = create_app() + with app.app_context(): + no_primary = [] + failing_process_models = [] + process_models = ProcessModelService().get_process_models() + for process_model in process_models: + if process_model.primary_file_name: + + bpmn_xml_file_contents = SpecFileService.get_data( + process_model, process_model.primary_file_name + ) + bad_files = [ + "B.1.0.bpmn", + "C.1.0.bpmn", + "C.2.0.bpmn", + "C.6.0.bpmn", + "TC-5.1.bpmn", + ] + if process_model.primary_file_name in bad_files: + continue + print(f"primary_file_name: {process_model.primary_file_name}") + try: + SpecFileService.update_file( + process_model, + process_model.primary_file_name, + bpmn_xml_file_contents, + ) + except Exception as ex: + failing_process_models.append( + (process_model.primary_file_name, str(ex)) + ) + # files = SpecFileService.get_files( + # process_model, extension_filter="bpmn" + # ) + # bpmn_etree_element: EtreeElement = ( + # SpecFileService.get_etree_element_from_binary_data( + # bpmn_xml_file_contents, process_model.primary_file_name + # ) + # ) + # if len(files) == 1: + # try: + # new_bpmn_process_identifier = ( + # SpecFileService.get_bpmn_process_identifier( + # bpmn_etree_element + # ) + # ) + # if ( + # process_model.primary_process_id + # != new_bpmn_process_identifier + # ): + # print( + # "primary_process_id: ", process_model.primary_process_id + # ) + # # attributes_to_update = { + # # "primary_process_id": new_bpmn_process_identifier + # # } + # # ProcessModelService().update_spec( + # # process_model, attributes_to_update + # # ) + # # except Exception as exception: + # except Exception: + # print(f"BAD ONE: {process_model.id}") + # # raise exception + else: + no_primary.append(process_model) + # for bpmn in no_primary: + # print(bpmn) + for bpmn_errors in failing_process_models: + print(bpmn_errors) + if len(failing_process_models) > 0: + exit(1) + + +if __name__ == "__main__": + main() diff --git a/bin/spiffworkflow-realm.json b/bin/spiffworkflow-realm.json new file mode 100644 index 00000000..a2778fee --- /dev/null +++ b/bin/spiffworkflow-realm.json @@ -0,0 +1,2834 @@ +{ + "id": "spiffworkflow", + "realm": "spiffworkflow", + "notBefore": 0, + "defaultSignatureAlgorithm": "RS256", + "revokeRefreshToken": false, + "refreshTokenMaxReuse": 0, + "accessTokenLifespan": 86400, + "accessTokenLifespanForImplicitFlow": 900, + "ssoSessionIdleTimeout": 1800, + "ssoSessionMaxLifespan": 36000, + "ssoSessionIdleTimeoutRememberMe": 0, + "ssoSessionMaxLifespanRememberMe": 0, + "offlineSessionIdleTimeout": 2592000, + "offlineSessionMaxLifespanEnabled": false, + "offlineSessionMaxLifespan": 5184000, + "clientSessionIdleTimeout": 0, + "clientSessionMaxLifespan": 0, + "clientOfflineSessionIdleTimeout": 0, + "clientOfflineSessionMaxLifespan": 0, + "accessCodeLifespan": 60, + "accessCodeLifespanUserAction": 300, + "accessCodeLifespanLogin": 1800, + "actionTokenGeneratedByAdminLifespan": 43200, + "actionTokenGeneratedByUserLifespan": 300, + "oauth2DeviceCodeLifespan": 600, + "oauth2DevicePollingInterval": 5, + "enabled": true, + "sslRequired": "external", + "registrationAllowed": false, + "registrationEmailAsUsername": false, + "rememberMe": false, + "verifyEmail": false, + "loginWithEmailAllowed": true, + "duplicateEmailsAllowed": false, + "resetPasswordAllowed": false, + "editUsernameAllowed": false, + "bruteForceProtected": false, + "permanentLockout": false, + "maxFailureWaitSeconds": 900, + "minimumQuickLoginWaitSeconds": 60, + "waitIncrementSeconds": 60, + "quickLoginCheckMilliSeconds": 1000, + "maxDeltaTimeSeconds": 43200, + "failureFactor": 30, + "roles": { + "realm": [ + { + "id": "c9f0ff93-642d-402b-965a-04d70719886b", + "name": "default-roles-spiffworkflow", + "description": "${role_default-roles}", + "composite": true, + "composites": { + "realm": ["offline_access", "uma_authorization"], + "client": { + "account": ["view-profile", "manage-account"] + } + }, + "clientRole": false, + "containerId": "spiffworkflow", + "attributes": {} + }, + { + "id": "9f474167-5707-4c10-8f9e-bb54ec715cd3", + "name": "uma_authorization", + "description": "${role_uma_authorization}", + "composite": false, + "clientRole": false, + "containerId": "spiffworkflow", + "attributes": {} + }, + { + "id": "6738d143-2d1d-4458-8a98-01ea003fde14", + "name": "admin", + "composite": false, + "clientRole": false, + "containerId": "spiffworkflow", + "attributes": {} + }, + { + "id": "6cbcdea5-0083-469d-9576-1d245fb3cdfd", + "name": "repeat-form-role-realm", + "composite": false, + "clientRole": false, + "containerId": "spiffworkflow", + "attributes": {} + }, + { + "id": "b5a92aee-82d2-4687-8282-365df4df21a9", + "name": "offline_access", + "description": "${role_offline-access}", + "composite": false, + "clientRole": false, + "containerId": "spiffworkflow", + "attributes": {} + } + ], + "client": { + "realm-management": [ + { + "id": "257c348c-4b9e-4fea-be39-5fdd28e8bb93", + "name": "manage-authorization", + "description": "${role_manage-authorization}", + "composite": false, + "clientRole": true, + "containerId": "4ce68130-aced-4e67-936a-8082dc843cc2", + "attributes": {} + }, + { + "id": "1d224265-63a8-40ea-9316-47627d0aed8c", + "name": "view-authorization", + "description": "${role_view-authorization}", + "composite": false, + "clientRole": true, + "containerId": "4ce68130-aced-4e67-936a-8082dc843cc2", + "attributes": {} + }, + { + "id": "535d7ca0-0f06-42d8-938b-e6e7aabffb42", + "name": "query-groups", + "description": "${role_query-groups}", + "composite": false, + "clientRole": true, + "containerId": "4ce68130-aced-4e67-936a-8082dc843cc2", + "attributes": {} + }, + { + "id": "9ff52ab5-2558-4cb0-901f-6e6f1469d075", + "name": "realm-admin", + "description": "${role_realm-admin}", + "composite": true, + "composites": { + "client": { + "realm-management": [ + "manage-authorization", + "view-authorization", + "query-groups", + "view-clients", + "view-realm", + "manage-users", + "query-users", + "impersonation", + "manage-clients", + "view-identity-providers", + "create-client", + "query-realms", + "view-users", + "view-events", + "manage-identity-providers", + "manage-events", + "query-clients", + "manage-realm" + ] + } + }, + "clientRole": true, + "containerId": "4ce68130-aced-4e67-936a-8082dc843cc2", + "attributes": {} + }, + { + "id": "98db35e3-833f-4b61-83af-fc50484fda57", + "name": "view-clients", + "description": "${role_view-clients}", + "composite": true, + "composites": { + "client": { + "realm-management": ["query-clients"] + } + }, + "clientRole": true, + "containerId": "4ce68130-aced-4e67-936a-8082dc843cc2", + "attributes": {} + }, + { + "id": "e0dc0e0c-eba4-4de7-b2eb-2ba095c4c6d4", + "name": "manage-users", + "description": "${role_manage-users}", + "composite": false, + "clientRole": true, + "containerId": "4ce68130-aced-4e67-936a-8082dc843cc2", + "attributes": {} + }, + { + "id": "69ce3805-1897-4291-842b-b8e8e9f29bd7", + "name": "view-realm", + "description": "${role_view-realm}", + "composite": false, + "clientRole": true, + "containerId": "4ce68130-aced-4e67-936a-8082dc843cc2", + "attributes": {} + }, + { + "id": "3e803641-96b1-44d8-9de5-7dee83a0a75b", + "name": "impersonation", + "description": "${role_impersonation}", + "composite": false, + "clientRole": true, + "containerId": "4ce68130-aced-4e67-936a-8082dc843cc2", + "attributes": {} + }, + { + "id": "2c92c3e5-1a0a-4318-9b63-617c5dca0b66", + "name": "query-users", + "description": "${role_query-users}", + "composite": false, + "clientRole": true, + "containerId": "4ce68130-aced-4e67-936a-8082dc843cc2", + "attributes": {} + }, + { + "id": "326a3718-390d-4e41-af00-2197d3ef6858", + "name": "manage-clients", + "description": "${role_manage-clients}", + "composite": false, + "clientRole": true, + "containerId": "4ce68130-aced-4e67-936a-8082dc843cc2", + "attributes": {} + }, + { + "id": "e4c69181-5e0d-484e-ac31-be6beef57c28", + "name": "create-client", + "description": "${role_create-client}", + "composite": false, + "clientRole": true, + "containerId": "4ce68130-aced-4e67-936a-8082dc843cc2", + "attributes": {} + }, + { + "id": "f4ac66cc-97b4-4590-beae-5ff23c9935b3", + "name": "query-realms", + "description": "${role_query-realms}", + "composite": false, + "clientRole": true, + "containerId": "4ce68130-aced-4e67-936a-8082dc843cc2", + "attributes": {} + }, + { + "id": "a24704fe-13fd-40e6-bf2d-29014f63c069", + "name": "view-identity-providers", + "description": "${role_view-identity-providers}", + "composite": false, + "clientRole": true, + "containerId": "4ce68130-aced-4e67-936a-8082dc843cc2", + "attributes": {} + }, + { + "id": "7deec87c-2716-40c1-a115-2a0fe840b119", + "name": "view-users", + "description": "${role_view-users}", + "composite": true, + "composites": { + "client": { + "realm-management": ["query-groups", "query-users"] + } + }, + "clientRole": true, + "containerId": "4ce68130-aced-4e67-936a-8082dc843cc2", + "attributes": {} + }, + { + "id": "827c40ae-b4c2-4574-9f34-db33925cd19c", + "name": "view-events", + "description": "${role_view-events}", + "composite": false, + "clientRole": true, + "containerId": "4ce68130-aced-4e67-936a-8082dc843cc2", + "attributes": {} + }, + { + "id": "cbe05c62-2b07-4ac7-a33a-ffca7c176252", + "name": "manage-events", + "description": "${role_manage-events}", + "composite": false, + "clientRole": true, + "containerId": "4ce68130-aced-4e67-936a-8082dc843cc2", + "attributes": {} + }, + { + "id": "8ca56814-a817-4849-a515-45399eb1dcc1", + "name": "manage-identity-providers", + "description": "${role_manage-identity-providers}", + "composite": false, + "clientRole": true, + "containerId": "4ce68130-aced-4e67-936a-8082dc843cc2", + "attributes": {} + }, + { + "id": "1134c6df-d0ff-498d-9dc4-ad989f7cfe93", + "name": "query-clients", + "description": "${role_query-clients}", + "composite": false, + "clientRole": true, + "containerId": "4ce68130-aced-4e67-936a-8082dc843cc2", + "attributes": {} + }, + { + "id": "3bb14549-60f6-4078-8f4e-47a1162412f2", + "name": "manage-realm", + "description": "${role_manage-realm}", + "composite": false, + "clientRole": true, + "containerId": "4ce68130-aced-4e67-936a-8082dc843cc2", + "attributes": {} + } + ], + "spiffworkflow-frontend": [], + "security-admin-console": [], + "admin-cli": [], + "spiffworkflow-backend": [ + { + "id": "4d71d1bb-d627-43c8-bc07-d542f816e04b", + "name": "spiffworkflow-admin", + "composite": false, + "clientRole": true, + "containerId": "f44558af-3601-4e54-b854-08396a247544", + "attributes": {} + }, + { + "id": "2341ca1c-24c8-4ddf-874c-7153c9408068", + "name": "uma_protection", + "composite": false, + "clientRole": true, + "containerId": "f44558af-3601-4e54-b854-08396a247544", + "attributes": {} + }, + { + "id": "cf88054e-4bdc-491c-bf93-c660cdaad72d", + "name": "repeat-form-role-2", + "composite": false, + "clientRole": true, + "containerId": "f44558af-3601-4e54-b854-08396a247544", + "attributes": { + "repeat-form-role-2-att-key": ["repeat-form-role-2-att-value"] + } + } + ], + "withAuth": [ + { + "id": "87673823-6a5a-4cb2-baa7-6c8b5da5d402", + "name": "uma_protection", + "composite": false, + "clientRole": true, + "containerId": "5d94a8c3-f56b-4eff-ac39-8580053a7fbe", + "attributes": {} + } + ], + "broker": [ + { + "id": "6d688d72-cf5b-4450-a902-cb2d41f0e04c", + "name": "read-token", + "description": "${role_read-token}", + "composite": false, + "clientRole": true, + "containerId": "55d75754-cf1b-4875-bf3e-15add4be8c99", + "attributes": {} + } + ], + "account": [ + { + "id": "9c51c3e1-028d-4a0d-96dc-6619196b49f0", + "name": "delete-account", + "description": "${role_delete-account}", + "composite": false, + "clientRole": true, + "containerId": "e39b3c85-bb9d-4c73-8250-be087c82ae48", + "attributes": {} + }, + { + "id": "f395d221-7f80-4fcf-90ac-0a89c8b15a9b", + "name": "manage-consent", + "description": "${role_manage-consent}", + "composite": true, + "composites": { + "client": { + "account": ["view-consent"] + } + }, + "clientRole": true, + "containerId": "e39b3c85-bb9d-4c73-8250-be087c82ae48", + "attributes": {} + }, + { + "id": "7abb4169-1960-4b4d-b5ae-6ea45cf91ee4", + "name": "view-consent", + "description": "${role_view-consent}", + "composite": false, + "clientRole": true, + "containerId": "e39b3c85-bb9d-4c73-8250-be087c82ae48", + "attributes": {} + }, + { + "id": "4d3c24ed-cc61-4a6e-ac78-47af4545b415", + "name": "manage-account-links", + "description": "${role_manage-account-links}", + "composite": false, + "clientRole": true, + "containerId": "e39b3c85-bb9d-4c73-8250-be087c82ae48", + "attributes": {} + }, + { + "id": "a4954091-9be9-4b7c-a196-1af934917ff7", + "name": "view-profile", + "description": "${role_view-profile}", + "composite": false, + "clientRole": true, + "containerId": "e39b3c85-bb9d-4c73-8250-be087c82ae48", + "attributes": {} + }, + { + "id": "0810773c-a57d-449e-a31f-1344e1eb4b9b", + "name": "manage-account", + "description": "${role_manage-account}", + "composite": true, + "composites": { + "client": { + "account": ["manage-account-links"] + } + }, + "clientRole": true, + "containerId": "e39b3c85-bb9d-4c73-8250-be087c82ae48", + "attributes": {} + }, + { + "id": "ae774a41-a274-4f99-9d7f-f4a0d5dbc085", + "name": "view-applications", + "description": "${role_view-applications}", + "composite": false, + "clientRole": true, + "containerId": "e39b3c85-bb9d-4c73-8250-be087c82ae48", + "attributes": {} + } + ] + } + }, + "groups": [], + "defaultRole": { + "id": "c9f0ff93-642d-402b-965a-04d70719886b", + "name": "default-roles-spiffworkflow", + "description": "${role_default-roles}", + "composite": true, + "clientRole": false, + "containerId": "spiffworkflow" + }, + "requiredCredentials": ["password"], + "otpPolicyType": "totp", + "otpPolicyAlgorithm": "HmacSHA1", + "otpPolicyInitialCounter": 0, + "otpPolicyDigits": 6, + "otpPolicyLookAheadWindow": 1, + "otpPolicyPeriod": 30, + "otpSupportedApplications": ["FreeOTP", "Google Authenticator"], + "webAuthnPolicyRpEntityName": "keycloak", + "webAuthnPolicySignatureAlgorithms": ["ES256"], + "webAuthnPolicyRpId": "", + "webAuthnPolicyAttestationConveyancePreference": "not specified", + "webAuthnPolicyAuthenticatorAttachment": "not specified", + "webAuthnPolicyRequireResidentKey": "not specified", + "webAuthnPolicyUserVerificationRequirement": "not specified", + "webAuthnPolicyCreateTimeout": 0, + "webAuthnPolicyAvoidSameAuthenticatorRegister": false, + "webAuthnPolicyAcceptableAaguids": [], + "webAuthnPolicyPasswordlessRpEntityName": "keycloak", + "webAuthnPolicyPasswordlessSignatureAlgorithms": ["ES256"], + "webAuthnPolicyPasswordlessRpId": "", + "webAuthnPolicyPasswordlessAttestationConveyancePreference": "not specified", + "webAuthnPolicyPasswordlessAuthenticatorAttachment": "not specified", + "webAuthnPolicyPasswordlessRequireResidentKey": "not specified", + "webAuthnPolicyPasswordlessUserVerificationRequirement": "not specified", + "webAuthnPolicyPasswordlessCreateTimeout": 0, + "webAuthnPolicyPasswordlessAvoidSameAuthenticatorRegister": false, + "webAuthnPolicyPasswordlessAcceptableAaguids": [], + "users": [ + { + "id": "4c436296-8471-4105-b551-80eee96b43bb", + "createdTimestamp": 1657139858075, + "username": "ciadmin1", + "enabled": true, + "totp": false, + "emailVerified": false, + "credentials": [ + { + "id": "111b5ea1-c2ab-470a-a16b-2373bc94de7a", + "type": "password", + "createdDate": 1657139904275, + "secretData": "{\"value\":\"e5MjWAk7RPspQIh9gEOKyv3AV/DHNoWk8w1tf+MRLh2oxrKmnnizOj0eFtIadT/q/i5JRfUq5IYBPLL/4nEJDw==\",\"salt\":\"5inqMqqTR6+PBYriy3RPjA==\",\"additionalParameters\":{}}", + "credentialData": "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } + ], + "disableableCredentialTypes": [], + "requiredActions": [], + "realmRoles": ["default-roles-spiffworkflow", "admin"], + "clientRoles": { + "spiffworkflow-backend": ["spiffworkflow-admin", "uma_protection"] + }, + "notBefore": 0, + "groups": [] + }, + { + "id": "56457e8f-47c6-4f9f-a72b-473dea5edfeb", + "createdTimestamp": 1657139955336, + "username": "ciuser1", + "enabled": true, + "totp": false, + "emailVerified": false, + "credentials": [ + { + "id": "762f36e9-47af-44da-8520-cf09d752497a", + "type": "password", + "createdDate": 1657139966468, + "secretData": "{\"value\":\"Dpn9QBJSxvl54b0Fu+OKrKRwmDJbk28FQ3xhlOdJPvZVJU/SpdrcsH7ktYAIkVLkRC5qILSZuNPQ3vDGzE2r1Q==\",\"salt\":\"yXd7N8XIQBkJ7swHDeRzXw==\",\"additionalParameters\":{}}", + "credentialData": "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } + ], + "disableableCredentialTypes": [], + "requiredActions": [], + "realmRoles": ["default-roles-spiffworkflow"], + "clientRoles": { + "spiffworkflow-backend": ["uma_protection"] + }, + "notBefore": 0, + "groups": [] + }, + { + "id": "a15da457-7ebb-49d4-9dcc-6876cb71600d", + "createdTimestamp": 1657115919770, + "username": "repeat_form_user_1", + "enabled": true, + "totp": false, + "emailVerified": false, + "credentials": [ + { + "id": "509dfd8d-a54e-4d8b-b250-ec99e585e15d", + "type": "password", + "createdDate": 1657298008525, + "secretData": "{\"value\":\"/47zG9XBvKg+1P2z6fRL4cyUNn+sB4BgXsxBsvi1NYR9Z20WTeWzzOT2uXvv2ajKMRHrv0OqTesldvSJXARPqA==\",\"salt\":\"dODEHOF24xGPx+7QGaIXWQ==\",\"additionalParameters\":{}}", + "credentialData": "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}" + } + ], + "disableableCredentialTypes": [], + "requiredActions": [], + "realmRoles": ["default-roles-spiffworkflow"], + "clientRoles": { + "spiffworkflow-backend": ["uma_protection", "repeat-form-role-2"] + }, + "notBefore": 0, + "groups": [] + }, + { + "id": "487d3a85-89dd-4839-957a-c3f6d70551f6", + "createdTimestamp": 1657115173081, + "username": "service-account-spiffworkflow-backend", + "enabled": true, + "totp": false, + "emailVerified": false, + "serviceAccountClientId": "spiffworkflow-backend", + "credentials": [], + "disableableCredentialTypes": [], + "requiredActions": [], + "realmRoles": ["default-roles-spiffworkflow"], + "clientRoles": { + "spiffworkflow-backend": ["uma_protection"] + }, + "notBefore": 0, + "groups": [] + }, + { + "id": "22de68b1-4b06-4bc2-8da6-0c577e7e62ad", + "createdTimestamp": 1657055472800, + "username": "service-account-withauth", + "enabled": true, + "totp": false, + "emailVerified": false, + "serviceAccountClientId": "withAuth", + "credentials": [], + "disableableCredentialTypes": [], + "requiredActions": [], + "realmRoles": ["default-roles-spiffworkflow"], + "clientRoles": { + "withAuth": ["uma_protection"] + }, + "notBefore": 0, + "groups": [] + } + ], + "scopeMappings": [ + { + "clientScope": "offline_access", + "roles": ["offline_access"] + } + ], + "clients": [ + { + "id": "e39b3c85-bb9d-4c73-8250-be087c82ae48", + "clientId": "account", + "name": "${client_account}", + "rootUrl": "${authBaseUrl}", + "baseUrl": "/realms/spiffworkflow/account/", + "surrogateAuthRequired": false, + "enabled": false, + "alwaysDisplayInConsole": false, + "clientAuthenticatorType": "client-secret", + "redirectUris": ["/realms/spiffworkflow/account/*"], + "webOrigins": [], + "notBefore": 0, + "bearerOnly": false, + "consentRequired": false, + "standardFlowEnabled": true, + "implicitFlowEnabled": false, + "directAccessGrantsEnabled": false, + "serviceAccountsEnabled": false, + "publicClient": true, + "frontchannelLogout": false, + "protocol": "openid-connect", + "attributes": { + "saml.force.post.binding": "false", + "saml.multivalued.roles": "false", + "frontchannel.logout.session.required": "false", + "oauth2.device.authorization.grant.enabled": "false", + "backchannel.logout.revoke.offline.tokens": "false", + "saml.server.signature.keyinfo.ext": "false", + "use.refresh.tokens": "true", + "oidc.ciba.grant.enabled": "false", + "backchannel.logout.session.required": "false", + "client_credentials.use_refresh_token": "false", + "require.pushed.authorization.requests": "false", + "saml.client.signature": "false", + "saml.allow.ecp.flow": "false", + "id.token.as.detached.signature": "false", + "saml.assertion.signature": "false", + "saml.encrypt": "false", + "saml.server.signature": "false", + "exclude.session.state.from.auth.response": "false", + "saml.artifact.binding": "false", + "saml_force_name_id_format": "false", + "acr.loa.map": "{}", + "tls.client.certificate.bound.access.tokens": "false", + "saml.authnstatement": "false", + "display.on.consent.screen": "false", + "token.response.type.bearer.lower-case": "false", + "saml.onetimeuse.condition": "false" + }, + "authenticationFlowBindingOverrides": {}, + "fullScopeAllowed": false, + "nodeReRegistrationTimeout": 0, + "defaultClientScopes": [ + "web-origins", + "acr", + "profile", + "roles", + "email" + ], + "optionalClientScopes": [ + "address", + "phone", + "offline_access", + "microprofile-jwt" + ] + }, + { + "id": "02fa6179-9399-4bb1-970f-c4d8e8b5f99f", + "clientId": "admin-cli", + "name": "${client_admin-cli}", + "surrogateAuthRequired": false, + "enabled": false, + "alwaysDisplayInConsole": false, + "clientAuthenticatorType": "client-secret", + "redirectUris": [], + "webOrigins": [], + "notBefore": 0, + "bearerOnly": false, + "consentRequired": false, + "standardFlowEnabled": false, + "implicitFlowEnabled": false, + "directAccessGrantsEnabled": true, + "serviceAccountsEnabled": false, + "publicClient": true, + "frontchannelLogout": false, + "protocol": "openid-connect", + "attributes": { + "saml.force.post.binding": "false", + "saml.multivalued.roles": "false", + "frontchannel.logout.session.required": "false", + "oauth2.device.authorization.grant.enabled": "false", + "backchannel.logout.revoke.offline.tokens": "false", + "saml.server.signature.keyinfo.ext": "false", + "use.refresh.tokens": "true", + "oidc.ciba.grant.enabled": "false", + "backchannel.logout.session.required": "false", + "client_credentials.use_refresh_token": "false", + "require.pushed.authorization.requests": "false", + "saml.client.signature": "false", + "saml.allow.ecp.flow": "false", + "id.token.as.detached.signature": "false", + "saml.assertion.signature": "false", + "saml.encrypt": "false", + "saml.server.signature": "false", + "exclude.session.state.from.auth.response": "false", + "saml.artifact.binding": "false", + "saml_force_name_id_format": "false", + "acr.loa.map": "{}", + "tls.client.certificate.bound.access.tokens": "false", + "saml.authnstatement": "false", + "display.on.consent.screen": "false", + "token.response.type.bearer.lower-case": "false", + "saml.onetimeuse.condition": "false" + }, + "authenticationFlowBindingOverrides": {}, + "fullScopeAllowed": false, + "nodeReRegistrationTimeout": 0, + "defaultClientScopes": [ + "web-origins", + "acr", + "profile", + "roles", + "email" + ], + "optionalClientScopes": [ + "address", + "phone", + "offline_access", + "microprofile-jwt" + ] + }, + { + "id": "55d75754-cf1b-4875-bf3e-15add4be8c99", + "clientId": "broker", + "name": "${client_broker}", + "surrogateAuthRequired": false, + "enabled": false, + "alwaysDisplayInConsole": false, + "clientAuthenticatorType": "client-secret", + "redirectUris": [], + "webOrigins": [], + "notBefore": 0, + "bearerOnly": true, + "consentRequired": false, + "standardFlowEnabled": true, + "implicitFlowEnabled": false, + "directAccessGrantsEnabled": false, + "serviceAccountsEnabled": false, + "publicClient": false, + "frontchannelLogout": false, + "protocol": "openid-connect", + "attributes": { + "saml.force.post.binding": "false", + "saml.multivalued.roles": "false", + "frontchannel.logout.session.required": "false", + "oauth2.device.authorization.grant.enabled": "false", + "backchannel.logout.revoke.offline.tokens": "false", + "saml.server.signature.keyinfo.ext": "false", + "use.refresh.tokens": "true", + "oidc.ciba.grant.enabled": "false", + "backchannel.logout.session.required": "false", + "client_credentials.use_refresh_token": "false", + "require.pushed.authorization.requests": "false", + "saml.client.signature": "false", + "saml.allow.ecp.flow": "false", + "id.token.as.detached.signature": "false", + "saml.assertion.signature": "false", + "saml.encrypt": "false", + "saml.server.signature": "false", + "exclude.session.state.from.auth.response": "false", + "saml.artifact.binding": "false", + "saml_force_name_id_format": "false", + "acr.loa.map": "{}", + "tls.client.certificate.bound.access.tokens": "false", + "saml.authnstatement": "false", + "display.on.consent.screen": "false", + "token.response.type.bearer.lower-case": "false", + "saml.onetimeuse.condition": "false" + }, + "authenticationFlowBindingOverrides": {}, + "fullScopeAllowed": false, + "nodeReRegistrationTimeout": 0, + "defaultClientScopes": [ + "web-origins", + "acr", + "profile", + "roles", + "email" + ], + "optionalClientScopes": [ + "address", + "phone", + "offline_access", + "microprofile-jwt" + ] + }, + { + "id": "4ce68130-aced-4e67-936a-8082dc843cc2", + "clientId": "realm-management", + "name": "${client_realm-management}", + "surrogateAuthRequired": false, + "enabled": false, + "alwaysDisplayInConsole": false, + "clientAuthenticatorType": "client-secret", + "redirectUris": [], + "webOrigins": [], + "notBefore": 0, + "bearerOnly": true, + "consentRequired": false, + "standardFlowEnabled": true, + "implicitFlowEnabled": false, + "directAccessGrantsEnabled": false, + "serviceAccountsEnabled": false, + "publicClient": false, + "frontchannelLogout": false, + "protocol": "openid-connect", + "attributes": { + "saml.force.post.binding": "false", + "saml.multivalued.roles": "false", + "frontchannel.logout.session.required": "false", + "oauth2.device.authorization.grant.enabled": "false", + "backchannel.logout.revoke.offline.tokens": "false", + "saml.server.signature.keyinfo.ext": "false", + "use.refresh.tokens": "true", + "oidc.ciba.grant.enabled": "false", + "backchannel.logout.session.required": "false", + "client_credentials.use_refresh_token": "false", + "require.pushed.authorization.requests": "false", + "saml.client.signature": "false", + "saml.allow.ecp.flow": "false", + "id.token.as.detached.signature": "false", + "saml.assertion.signature": "false", + "saml.encrypt": "false", + "saml.server.signature": "false", + "exclude.session.state.from.auth.response": "false", + "saml.artifact.binding": "false", + "saml_force_name_id_format": "false", + "acr.loa.map": "{}", + "tls.client.certificate.bound.access.tokens": "false", + "saml.authnstatement": "false", + "display.on.consent.screen": "false", + "token.response.type.bearer.lower-case": "false", + "saml.onetimeuse.condition": "false" + }, + "authenticationFlowBindingOverrides": {}, + "fullScopeAllowed": false, + "nodeReRegistrationTimeout": 0, + "defaultClientScopes": [ + "web-origins", + "acr", + "profile", + "roles", + "email" + ], + "optionalClientScopes": [ + "address", + "phone", + "offline_access", + "microprofile-jwt" + ] + }, + { + "id": "7c82344d-d4ae-4599-bbce-583cc8848199", + "clientId": "security-admin-console", + "name": "${client_security-admin-console}", + "rootUrl": "${authAdminUrl}", + "baseUrl": "/admin/spiffworkflow/console/", + "surrogateAuthRequired": false, + "enabled": false, + "alwaysDisplayInConsole": false, + "clientAuthenticatorType": "client-secret", + "redirectUris": ["/admin/spiffworkflow/console/*"], + "webOrigins": ["+"], + "notBefore": 0, + "bearerOnly": false, + "consentRequired": false, + "standardFlowEnabled": true, + "implicitFlowEnabled": false, + "directAccessGrantsEnabled": false, + "serviceAccountsEnabled": false, + "publicClient": true, + "frontchannelLogout": false, + "protocol": "openid-connect", + "attributes": { + "saml.force.post.binding": "false", + "saml.multivalued.roles": "false", + "frontchannel.logout.session.required": "false", + "oauth2.device.authorization.grant.enabled": "false", + "backchannel.logout.revoke.offline.tokens": "false", + "saml.server.signature.keyinfo.ext": "false", + "use.refresh.tokens": "true", + "oidc.ciba.grant.enabled": "false", + "backchannel.logout.session.required": "false", + "client_credentials.use_refresh_token": "false", + "require.pushed.authorization.requests": "false", + "saml.client.signature": "false", + "pkce.code.challenge.method": "S256", + "saml.allow.ecp.flow": "false", + "id.token.as.detached.signature": "false", + "saml.assertion.signature": "false", + "saml.encrypt": "false", + "saml.server.signature": "false", + "exclude.session.state.from.auth.response": "false", + "saml.artifact.binding": "false", + "saml_force_name_id_format": "false", + "acr.loa.map": "{}", + "tls.client.certificate.bound.access.tokens": "false", + "saml.authnstatement": "false", + "display.on.consent.screen": "false", + "token.response.type.bearer.lower-case": "false", + "saml.onetimeuse.condition": "false" + }, + "authenticationFlowBindingOverrides": {}, + "fullScopeAllowed": false, + "nodeReRegistrationTimeout": 0, + "protocolMappers": [ + { + "id": "949c8afa-a06e-4a86-9260-6f477fc9ad9d", + "name": "locale", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "locale", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "locale", + "jsonType.label": "String" + } + } + ], + "defaultClientScopes": [ + "web-origins", + "acr", + "profile", + "roles", + "email" + ], + "optionalClientScopes": [ + "address", + "phone", + "offline_access", + "microprofile-jwt" + ] + }, + { + "id": "f44558af-3601-4e54-b854-08396a247544", + "clientId": "spiffworkflow-backend", + "surrogateAuthRequired": false, + "enabled": true, + "alwaysDisplayInConsole": false, + "clientAuthenticatorType": "client-secret", + "secret": "JXeQExm0JhQPLumgHtIIqf52bDalHz0q", + "redirectUris": [ + "http://localhost:7000/*", + "http://167.172.242.138:7000/*" + ], + "webOrigins": [], + "notBefore": 0, + "bearerOnly": false, + "consentRequired": false, + "standardFlowEnabled": true, + "implicitFlowEnabled": false, + "directAccessGrantsEnabled": true, + "serviceAccountsEnabled": true, + "authorizationServicesEnabled": true, + "publicClient": false, + "frontchannelLogout": false, + "protocol": "openid-connect", + "attributes": { + "saml.force.post.binding": "false", + "saml.multivalued.roles": "false", + "frontchannel.logout.session.required": "false", + "oauth2.device.authorization.grant.enabled": "false", + "backchannel.logout.revoke.offline.tokens": "false", + "saml.server.signature.keyinfo.ext": "false", + "use.refresh.tokens": "true", + "oidc.ciba.grant.enabled": "false", + "backchannel.logout.session.required": "true", + "client_credentials.use_refresh_token": "false", + "require.pushed.authorization.requests": "false", + "saml.client.signature": "false", + "saml.allow.ecp.flow": "false", + "id.token.as.detached.signature": "false", + "saml.assertion.signature": "false", + "client.secret.creation.time": "1657115173", + "saml.encrypt": "false", + "saml.server.signature": "false", + "exclude.session.state.from.auth.response": "false", + "saml.artifact.binding": "false", + "saml_force_name_id_format": "false", + "acr.loa.map": "{}", + "tls.client.certificate.bound.access.tokens": "false", + "saml.authnstatement": "false", + "display.on.consent.screen": "false", + "token.response.type.bearer.lower-case": "false", + "saml.onetimeuse.condition": "false" + }, + "authenticationFlowBindingOverrides": {}, + "fullScopeAllowed": true, + "nodeReRegistrationTimeout": -1, + "protocolMappers": [ + { + "id": "af3598ab-74a9-48ba-956f-431b14acd896", + "name": "Client IP Address", + "protocol": "openid-connect", + "protocolMapper": "oidc-usersessionmodel-note-mapper", + "consentRequired": false, + "config": { + "user.session.note": "clientAddress", + "userinfo.token.claim": "true", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "clientAddress", + "jsonType.label": "String" + } + }, + { + "id": "87369cf7-2a77-40fd-a926-a26d689831a0", + "name": "Client Host", + "protocol": "openid-connect", + "protocolMapper": "oidc-usersessionmodel-note-mapper", + "consentRequired": false, + "config": { + "user.session.note": "clientHost", + "userinfo.token.claim": "true", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "clientHost", + "jsonType.label": "String" + } + }, + { + "id": "2c78d7e8-0a99-43bd-bc29-0ba062ed8750", + "name": "Client ID", + "protocol": "openid-connect", + "protocolMapper": "oidc-usersessionmodel-note-mapper", + "consentRequired": false, + "config": { + "user.session.note": "clientId", + "userinfo.token.claim": "true", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "clientId", + "jsonType.label": "String" + } + } + ], + "defaultClientScopes": [ + "web-origins", + "acr", + "profile", + "roles", + "email" + ], + "optionalClientScopes": [ + "address", + "phone", + "offline_access", + "microprofile-jwt" + ], + "authorizationSettings": { + "allowRemoteResourceManagement": true, + "policyEnforcementMode": "ENFORCING", + "resources": [ + { + "name": "Default Resource", + "type": "urn:spiffworkflow-backend:resources:default", + "ownerManagedAccess": false, + "attributes": {}, + "_id": "8e00e4a3-3fff-4521-b7f0-95f66c2f79d2", + "uris": ["/*"] + }, + { + "name": "everything", + "ownerManagedAccess": false, + "attributes": {}, + "_id": "446bdcf4-a3bd-41c7-a0f8-67a225ba6b57", + "uris": ["/*"], + "scopes": [ + { + "name": "read" + }, + { + "name": "update" + }, + { + "name": "delete" + }, + { + "name": "instantiate" + } + ] + }, + { + "name": "process-model-with-repeating-form-crud", + "type": "process-model", + "ownerManagedAccess": false, + "displayName": "process-model-with-repeating-form-crud", + "attributes": { + "test_resource_att1": ["this_is_the_value"] + }, + "_id": "e294304c-796e-4c56-bdf2-8c854f65db59", + "uris": [ + "/process-models/category_number_one/process-model-with-repeating-form" + ], + "scopes": [ + { + "name": "read" + }, + { + "name": "update" + }, + { + "name": "delete" + }, + { + "name": "instantiate" + } + ] + } + ], + "policies": [ + { + "id": "048d043e-d98c-44d8-8c85-656ba117053e", + "name": "repeat-form-role-policy", + "type": "role", + "logic": "POSITIVE", + "decisionStrategy": "UNANIMOUS", + "config": { + "roles": "[{\"id\":\"spiffworkflow-backend/repeat-form-role-2\",\"required\":false}]" + } + }, + { + "id": "ac55237b-6ec9-4f66-bb8e-bee94a5bb5e9", + "name": "admins have everything", + "type": "role", + "logic": "POSITIVE", + "decisionStrategy": "UNANIMOUS", + "config": { + "roles": "[{\"id\":\"spiffworkflow-backend/spiffworkflow-admin\",\"required\":false}]" + } + }, + { + "id": "7dac9bea-d415-4bc4-8817-7a71c2b3ce32", + "name": "Default Policy", + "description": "A policy that grants access only for users within this realm", + "type": "role", + "logic": "POSITIVE", + "decisionStrategy": "AFFIRMATIVE", + "config": { + "roles": "[{\"id\":\"spiffworkflow-backend/repeat-form-role-2\",\"required\":false}]" + } + }, + { + "id": "5133ae0b-5e90-48a6-bdd9-3f323e10c44d", + "name": "repeat-form-read", + "type": "scope", + "logic": "POSITIVE", + "decisionStrategy": "UNANIMOUS", + "config": { + "resources": "[\"process-model-with-repeating-form-crud\"]", + "scopes": "[\"read\"]", + "applyPolicies": "[\"repeat-form-role-policy\"]" + } + }, + { + "id": "0a86ae38-7460-4bc2-b1f9-f933531303ac", + "name": "all_permissions", + "type": "resource", + "logic": "POSITIVE", + "decisionStrategy": "UNANIMOUS", + "config": { + "resources": "[\"everything\"]", + "applyPolicies": "[\"admins have everything\"]" + } + }, + { + "id": "4b634627-51d9-4257-91d9-29503490e4fb", + "name": "Default Permission", + "description": "A permission that applies to the default resource type", + "type": "resource", + "logic": "POSITIVE", + "decisionStrategy": "UNANIMOUS", + "config": { + "defaultResourceType": "urn:spiffworkflow-backend:resources:default", + "applyPolicies": "[\"Default Policy\"]" + } + } + ], + "scopes": [ + { + "id": "c03b5c4e-f1bb-4066-8666-3c8a6f44ddb3", + "name": "read", + "displayName": "read" + }, + { + "id": "f55c3e81-9257-4618-9acb-32c57fc561a6", + "name": "update", + "displayName": "update" + }, + { + "id": "c8628417-7ffa-4675-9cda-955df62ea1db", + "name": "delete", + "displayName": "delete" + }, + { + "id": "50ef4129-aa88-4ecd-9afe-c7e5a1b66142", + "name": "instantiate", + "displayName": "instantiate" + } + ], + "decisionStrategy": "UNANIMOUS" + } + }, + { + "id": "9f340eba-2b84-43d0-a976-010e270e3981", + "clientId": "spiffworkflow-frontend", + "surrogateAuthRequired": false, + "enabled": true, + "alwaysDisplayInConsole": false, + "clientAuthenticatorType": "client-secret", + "redirectUris": [ + "http://localhost:7001/*", + "http://167.172.242.138:7001/*" + ], + "webOrigins": ["*"], + "notBefore": 0, + "bearerOnly": false, + "consentRequired": false, + "standardFlowEnabled": true, + "implicitFlowEnabled": false, + "directAccessGrantsEnabled": true, + "serviceAccountsEnabled": false, + "publicClient": true, + "frontchannelLogout": false, + "protocol": "openid-connect", + "attributes": { + "saml.force.post.binding": "false", + "saml.multivalued.roles": "false", + "frontchannel.logout.session.required": "false", + "oauth2.device.authorization.grant.enabled": "false", + "backchannel.logout.revoke.offline.tokens": "false", + "saml.server.signature.keyinfo.ext": "false", + "use.refresh.tokens": "true", + "oidc.ciba.grant.enabled": "false", + "backchannel.logout.session.required": "true", + "client_credentials.use_refresh_token": "false", + "require.pushed.authorization.requests": "false", + "saml.client.signature": "false", + "saml.allow.ecp.flow": "false", + "id.token.as.detached.signature": "false", + "saml.assertion.signature": "false", + "saml.encrypt": "false", + "saml.server.signature": "false", + "exclude.session.state.from.auth.response": "false", + "saml.artifact.binding": "false", + "saml_force_name_id_format": "false", + "acr.loa.map": "{}", + "tls.client.certificate.bound.access.tokens": "false", + "saml.authnstatement": "false", + "display.on.consent.screen": "false", + "token.response.type.bearer.lower-case": "false", + "saml.onetimeuse.condition": "false" + }, + "authenticationFlowBindingOverrides": {}, + "fullScopeAllowed": true, + "nodeReRegistrationTimeout": -1, + "defaultClientScopes": [ + "web-origins", + "acr", + "profile", + "roles", + "email" + ], + "optionalClientScopes": [ + "address", + "phone", + "offline_access", + "microprofile-jwt" + ] + }, + { + "id": "5d94a8c3-f56b-4eff-ac39-8580053a7fbe", + "clientId": "withAuth", + "surrogateAuthRequired": false, + "enabled": true, + "alwaysDisplayInConsole": false, + "clientAuthenticatorType": "client-secret", + "secret": "6o8kIKQznQtejHOdRhWeKorBJclMGcgA", + "redirectUris": [ + "http://localhost:7001/*", + "http://167.172.242.138:7001/*" + ], + "webOrigins": [], + "notBefore": 0, + "bearerOnly": false, + "consentRequired": false, + "standardFlowEnabled": true, + "implicitFlowEnabled": false, + "directAccessGrantsEnabled": true, + "serviceAccountsEnabled": true, + "authorizationServicesEnabled": true, + "publicClient": false, + "frontchannelLogout": false, + "protocol": "openid-connect", + "attributes": { + "saml.force.post.binding": "false", + "saml.multivalued.roles": "false", + "frontchannel.logout.session.required": "false", + "oauth2.device.authorization.grant.enabled": "false", + "backchannel.logout.revoke.offline.tokens": "false", + "saml.server.signature.keyinfo.ext": "false", + "use.refresh.tokens": "true", + "oidc.ciba.grant.enabled": "false", + "backchannel.logout.session.required": "true", + "client_credentials.use_refresh_token": "false", + "require.pushed.authorization.requests": "false", + "saml.client.signature": "false", + "saml.allow.ecp.flow": "false", + "id.token.as.detached.signature": "false", + "saml.assertion.signature": "false", + "client.secret.creation.time": "1657055472", + "saml.encrypt": "false", + "saml.server.signature": "false", + "exclude.session.state.from.auth.response": "false", + "saml.artifact.binding": "false", + "saml_force_name_id_format": "false", + "acr.loa.map": "{}", + "tls.client.certificate.bound.access.tokens": "false", + "saml.authnstatement": "false", + "display.on.consent.screen": "false", + "token.response.type.bearer.lower-case": "false", + "saml.onetimeuse.condition": "false" + }, + "authenticationFlowBindingOverrides": {}, + "fullScopeAllowed": true, + "nodeReRegistrationTimeout": -1, + "protocolMappers": [ + { + "id": "abfc756f-fc57-45b4-8a40-0cd0f8081f0c", + "name": "Client ID", + "protocol": "openid-connect", + "protocolMapper": "oidc-usersessionmodel-note-mapper", + "consentRequired": false, + "config": { + "user.session.note": "clientId", + "userinfo.token.claim": "true", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "clientId", + "jsonType.label": "String" + } + }, + { + "id": "c05d38b7-9b4d-4286-b40c-f48b3cca42e3", + "name": "Client Host", + "protocol": "openid-connect", + "protocolMapper": "oidc-usersessionmodel-note-mapper", + "consentRequired": false, + "config": { + "user.session.note": "clientHost", + "userinfo.token.claim": "true", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "clientHost", + "jsonType.label": "String" + } + }, + { + "id": "b27d0bd8-b8d9-43cb-a07a-3ec4bdc818dc", + "name": "Client IP Address", + "protocol": "openid-connect", + "protocolMapper": "oidc-usersessionmodel-note-mapper", + "consentRequired": false, + "config": { + "user.session.note": "clientAddress", + "userinfo.token.claim": "true", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "clientAddress", + "jsonType.label": "String" + } + } + ], + "defaultClientScopes": [ + "web-origins", + "acr", + "profile", + "roles", + "email" + ], + "optionalClientScopes": [ + "address", + "phone", + "offline_access", + "microprofile-jwt" + ], + "authorizationSettings": { + "allowRemoteResourceManagement": true, + "policyEnforcementMode": "ENFORCING", + "resources": [ + { + "name": "Default Resource", + "type": "urn:withAuth:resources:default", + "ownerManagedAccess": false, + "attributes": {}, + "_id": "c882ad40-c15d-4f88-ad60-c2ea2f486ce2", + "uris": ["/*"] + } + ], + "policies": [ + { + "id": "b8b338bc-884d-43cf-96d8-3776f2b220f3", + "name": "Default Policy", + "description": "A policy that grants access only for users within this realm", + "type": "role", + "logic": "POSITIVE", + "decisionStrategy": "AFFIRMATIVE", + "config": { + "roles": "[{\"id\":\"spiffworkflow-backend/repeat-form-role-2\",\"required\":false}]" + } + }, + { + "id": "4f5afa22-0fdf-4ed7-97b9-35400591bf6f", + "name": "Default Permission", + "description": "A permission that applies to the default resource type", + "type": "resource", + "logic": "POSITIVE", + "decisionStrategy": "UNANIMOUS", + "config": { + "defaultResourceType": "urn:withAuth:resources:default", + "applyPolicies": "[\"Default Policy\"]" + } + } + ], + "scopes": [], + "decisionStrategy": "UNANIMOUS" + } + } + ], + "clientScopes": [ + { + "id": "fa3d9944-cf66-4af9-b931-1f3b02943e5b", + "name": "acr", + "description": "OpenID Connect scope for add acr (authentication context class reference) to the token", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "false", + "display.on.consent.screen": "false" + }, + "protocolMappers": [ + { + "id": "12ad0a69-d414-4b5b-9f5f-b647db5f8959", + "name": "acr loa level", + "protocol": "openid-connect", + "protocolMapper": "oidc-acr-mapper", + "consentRequired": false, + "config": { + "id.token.claim": "true", + "access.token.claim": "true", + "userinfo.token.claim": "true" + } + } + ] + }, + { + "id": "4e69d058-1229-4704-9411-decf25da0a49", + "name": "profile", + "description": "OpenID Connect built-in scope: profile", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "true", + "display.on.consent.screen": "true", + "consent.screen.text": "${profileScopeConsentText}" + }, + "protocolMappers": [ + { + "id": "d0d7334e-3f11-45d2-9670-46dbc1977cb2", + "name": "full name", + "protocol": "openid-connect", + "protocolMapper": "oidc-full-name-mapper", + "consentRequired": false, + "config": { + "id.token.claim": "true", + "access.token.claim": "true", + "userinfo.token.claim": "true" + } + }, + { + "id": "4efcf169-4df2-4cdb-b331-005aff1cee28", + "name": "website", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "website", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "website", + "jsonType.label": "String" + } + }, + { + "id": "3f639f2f-cf0e-4651-ab93-15a77023b5a0", + "name": "given name", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-property-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "firstName", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "given_name", + "jsonType.label": "String" + } + }, + { + "id": "16e93663-bf6a-4f6d-b5ab-8e68bf118f72", + "name": "nickname", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "nickname", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "nickname", + "jsonType.label": "String" + } + }, + { + "id": "b9c97283-8153-4c4d-b8d8-dd1bde17823b", + "name": "username", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-property-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "username", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "preferred_username", + "jsonType.label": "String" + } + }, + { + "id": "eeead6c7-1dae-4be1-9eca-988ffb38aaf4", + "name": "zoneinfo", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "zoneinfo", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "zoneinfo", + "jsonType.label": "String" + } + }, + { + "id": "d62991bc-2583-42be-bb08-8d1527c4f162", + "name": "family name", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-property-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "lastName", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "family_name", + "jsonType.label": "String" + } + }, + { + "id": "9f761222-f84d-4a25-a53f-13e196d38a46", + "name": "profile", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "profile", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "profile", + "jsonType.label": "String" + } + }, + { + "id": "ec866e3c-582f-4c99-920f-d57cf03d772d", + "name": "gender", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "gender", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "gender", + "jsonType.label": "String" + } + }, + { + "id": "b05e679c-e00e-427e-8e47-0a4fd411c7a6", + "name": "updated at", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "updatedAt", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "updated_at", + "jsonType.label": "long" + } + }, + { + "id": "505ff402-5533-48ea-91f9-ab4804c3826b", + "name": "middle name", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "middleName", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "middle_name", + "jsonType.label": "String" + } + }, + { + "id": "d546af31-b669-442b-9a9d-8a6478364002", + "name": "picture", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "picture", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "picture", + "jsonType.label": "String" + } + }, + { + "id": "5a75c993-290f-4bfb-9044-5d7d269378b2", + "name": "birthdate", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "birthdate", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "birthdate", + "jsonType.label": "String" + } + }, + { + "id": "2d387240-0f2f-4f30-8464-0e7c57946743", + "name": "locale", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "locale", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "locale", + "jsonType.label": "String" + } + } + ] + }, + { + "id": "2efee39d-723c-44af-9eb1-4dde9635b249", + "name": "email", + "description": "OpenID Connect built-in scope: email", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "true", + "display.on.consent.screen": "true", + "consent.screen.text": "${emailScopeConsentText}" + }, + "protocolMappers": [ + { + "id": "5bf7db0f-a915-43c2-bff4-475ee5c3259b", + "name": "email", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-property-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "email", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "email", + "jsonType.label": "String" + } + }, + { + "id": "687a8c7d-c93f-47d9-a176-78b0954429c7", + "name": "email verified", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-property-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "emailVerified", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "email_verified", + "jsonType.label": "boolean" + } + } + ] + }, + { + "id": "4a7737cf-83e3-40e1-b36d-9566b34e4148", + "name": "phone", + "description": "OpenID Connect built-in scope: phone", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "true", + "display.on.consent.screen": "true", + "consent.screen.text": "${phoneScopeConsentText}" + }, + "protocolMappers": [ + { + "id": "14bd2816-a2f3-4fde-9ac2-452dea2e9e58", + "name": "phone number", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "phoneNumber", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "phone_number", + "jsonType.label": "String" + } + }, + { + "id": "6172e315-8999-4df8-89fa-75ffd1981793", + "name": "phone number verified", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-attribute-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "phoneNumberVerified", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "phone_number_verified", + "jsonType.label": "boolean" + } + } + ] + }, + { + "id": "5ad0c621-d3ec-4018-98c8-d6fb630d661f", + "name": "microprofile-jwt", + "description": "Microprofile - JWT built-in scope", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "true", + "display.on.consent.screen": "false" + }, + "protocolMappers": [ + { + "id": "252fdd9f-cc91-4ca3-aaab-cdf053360e94", + "name": "groups", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-realm-role-mapper", + "consentRequired": false, + "config": { + "multivalued": "true", + "userinfo.token.claim": "true", + "user.attribute": "foo", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "groups", + "jsonType.label": "String" + } + }, + { + "id": "8e9b880e-6dd8-4e2f-ade2-77fc8fd0bc6d", + "name": "upn", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-property-mapper", + "consentRequired": false, + "config": { + "userinfo.token.claim": "true", + "user.attribute": "username", + "id.token.claim": "true", + "access.token.claim": "true", + "claim.name": "upn", + "jsonType.label": "String" + } + } + ] + }, + { + "id": "77ca4f26-3777-451b-a907-e258f46f7b95", + "name": "roles", + "description": "OpenID Connect scope for add user roles to the access token", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "false", + "display.on.consent.screen": "true", + "consent.screen.text": "${rolesScopeConsentText}" + }, + "protocolMappers": [ + { + "id": "e7ebb9c0-5ed3-4c6f-bb69-22e01d26b49f", + "name": "audience resolve", + "protocol": "openid-connect", + "protocolMapper": "oidc-audience-resolve-mapper", + "consentRequired": false, + "config": {} + }, + { + "id": "66fd470f-419e-44cd-822e-43df8ee5fe1b", + "name": "realm roles", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-realm-role-mapper", + "consentRequired": false, + "config": { + "user.attribute": "foo", + "access.token.claim": "true", + "claim.name": "realm_access.roles", + "jsonType.label": "String", + "multivalued": "true" + } + }, + { + "id": "f3c313bc-7da7-4cf6-a0df-b62e77209b7c", + "name": "client roles", + "protocol": "openid-connect", + "protocolMapper": "oidc-usermodel-client-role-mapper", + "consentRequired": false, + "config": { + "user.attribute": "foo", + "access.token.claim": "true", + "claim.name": "resource_access.${client_id}.roles", + "jsonType.label": "String", + "multivalued": "true" + } + } + ] + }, + { + "id": "3e9849f5-15ff-43c6-b929-40f26fda2c05", + "name": "offline_access", + "description": "OpenID Connect built-in scope: offline_access", + "protocol": "openid-connect", + "attributes": { + "consent.screen.text": "${offlineAccessScopeConsentText}", + "display.on.consent.screen": "true" + } + }, + { + "id": "ffda6ea6-8add-4c7e-9754-66d00c6735a1", + "name": "web-origins", + "description": "OpenID Connect scope for add allowed web origins to the access token", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "false", + "display.on.consent.screen": "false", + "consent.screen.text": "" + }, + "protocolMappers": [ + { + "id": "05635d42-8bb3-440b-b871-b64c97f524da", + "name": "allowed web origins", + "protocol": "openid-connect", + "protocolMapper": "oidc-allowed-origins-mapper", + "consentRequired": false, + "config": {} + } + ] + }, + { + "id": "6f56ae2b-253f-40f7-ba99-e8c5bbc71423", + "name": "role_list", + "description": "SAML role list", + "protocol": "saml", + "attributes": { + "consent.screen.text": "${samlRoleListScopeConsentText}", + "display.on.consent.screen": "true" + }, + "protocolMappers": [ + { + "id": "7036c17a-9306-4481-82a1-d8d9d77077e5", + "name": "role list", + "protocol": "saml", + "protocolMapper": "saml-role-list-mapper", + "consentRequired": false, + "config": { + "single": "false", + "attribute.nameformat": "Basic", + "attribute.name": "Role" + } + } + ] + }, + { + "id": "ce4493c0-ccb4-45f9-a46e-a40cc3f6d4b2", + "name": "address", + "description": "OpenID Connect built-in scope: address", + "protocol": "openid-connect", + "attributes": { + "include.in.token.scope": "true", + "display.on.consent.screen": "true", + "consent.screen.text": "${addressScopeConsentText}" + }, + "protocolMappers": [ + { + "id": "8a0d3248-d231-40b2-9b8e-3d63bd5a5d12", + "name": "address", + "protocol": "openid-connect", + "protocolMapper": "oidc-address-mapper", + "consentRequired": false, + "config": { + "user.attribute.formatted": "formatted", + "user.attribute.country": "country", + "user.attribute.postal_code": "postal_code", + "userinfo.token.claim": "true", + "user.attribute.street": "street", + "id.token.claim": "true", + "user.attribute.region": "region", + "access.token.claim": "true", + "user.attribute.locality": "locality" + } + } + ] + } + ], + "defaultDefaultClientScopes": [ + "email", + "profile", + "role_list", + "roles", + "acr", + "web-origins" + ], + "defaultOptionalClientScopes": [ + "offline_access", + "phone", + "microprofile-jwt", + "address" + ], + "browserSecurityHeaders": { + "contentSecurityPolicyReportOnly": "", + "xContentTypeOptions": "nosniff", + "xRobotsTag": "none", + "xFrameOptions": "SAMEORIGIN", + "contentSecurityPolicy": "frame-src 'self'; frame-ancestors 'self'; object-src 'none';", + "xXSSProtection": "1; mode=block", + "strictTransportSecurity": "max-age=31536000; includeSubDomains" + }, + "smtpServer": {}, + "eventsEnabled": false, + "eventsListeners": ["jboss-logging"], + "enabledEventTypes": [], + "adminEventsEnabled": false, + "adminEventsDetailsEnabled": false, + "identityProviders": [], + "identityProviderMappers": [], + "components": { + "org.keycloak.services.clientregistration.policy.ClientRegistrationPolicy": [ + { + "id": "b8617465-1c84-4a5f-a16f-a6f10f0f66b1", + "name": "Trusted Hosts", + "providerId": "trusted-hosts", + "subType": "anonymous", + "subComponents": {}, + "config": { + "host-sending-registration-request-must-match": ["true"], + "client-uris-must-match": ["true"] + } + }, + { + "id": "6061713a-c1f5-46e1-adfb-762b8768976a", + "name": "Allowed Protocol Mapper Types", + "providerId": "allowed-protocol-mappers", + "subType": "authenticated", + "subComponents": {}, + "config": { + "allowed-protocol-mapper-types": [ + "oidc-usermodel-attribute-mapper", + "oidc-address-mapper", + "oidc-full-name-mapper", + "oidc-sha256-pairwise-sub-mapper", + "oidc-usermodel-property-mapper", + "saml-role-list-mapper", + "saml-user-property-mapper", + "saml-user-attribute-mapper" + ] + } + }, + { + "id": "d68e938d-dde6-47d9-bdc8-8e8523eb08cd", + "name": "Max Clients Limit", + "providerId": "max-clients", + "subType": "anonymous", + "subComponents": {}, + "config": { + "max-clients": ["200"] + } + }, + { + "id": "1209fa5d-37df-4f9a-b4fa-4a3cd94e21fe", + "name": "Allowed Protocol Mapper Types", + "providerId": "allowed-protocol-mappers", + "subType": "anonymous", + "subComponents": {}, + "config": { + "allowed-protocol-mapper-types": [ + "saml-user-property-mapper", + "saml-user-attribute-mapper", + "oidc-full-name-mapper", + "oidc-usermodel-attribute-mapper", + "oidc-sha256-pairwise-sub-mapper", + "oidc-usermodel-property-mapper", + "saml-role-list-mapper", + "oidc-address-mapper" + ] + } + }, + { + "id": "3854361d-3fe5-47fb-9417-a99592e3dc5c", + "name": "Allowed Client Scopes", + "providerId": "allowed-client-templates", + "subType": "authenticated", + "subComponents": {}, + "config": { + "allow-default-scopes": ["true"] + } + }, + { + "id": "4c4076ec-68ed-46c1-b0a5-3c8ed08dd4f6", + "name": "Consent Required", + "providerId": "consent-required", + "subType": "anonymous", + "subComponents": {}, + "config": {} + }, + { + "id": "bbbe2ea2-2a36-494b-b57f-8b202740ebf4", + "name": "Full Scope Disabled", + "providerId": "scope", + "subType": "anonymous", + "subComponents": {}, + "config": {} + }, + { + "id": "41eef3e1-bf71-4e8a-b729-fea8eb16b5d8", + "name": "Allowed Client Scopes", + "providerId": "allowed-client-templates", + "subType": "anonymous", + "subComponents": {}, + "config": { + "allow-default-scopes": ["true"] + } + } + ], + "org.keycloak.userprofile.UserProfileProvider": [ + { + "id": "320029d9-7878-445e-8da9-cf418dbbfc73", + "providerId": "declarative-user-profile", + "subComponents": {}, + "config": {} + } + ], + "org.keycloak.keys.KeyProvider": [ + { + "id": "1f9958a4-b3ac-4a1b-af95-fd8e6053864a", + "name": "hmac-generated", + "providerId": "hmac-generated", + "subComponents": {}, + "config": { + "kid": ["4e99c641-0494-49d5-979f-45cb5126f6f1"], + "secret": [ + "4wV4voiQmFajEegv83Ugd8DxFoy3JpN4YzO5qMx4XfB7Abq8NKU4Az5AkSpxYBSdb5GJEQypA4aLmnaDyCWLIw" + ], + "priority": ["100"], + "algorithm": ["HS256"] + } + }, + { + "id": "70fe0720-f3b7-47b4-a625-ae8fb6635da1", + "name": "aes-generated", + "providerId": "aes-generated", + "subComponents": {}, + "config": { + "kid": ["76118b54-fc74-4149-9028-fab1fdc07860"], + "secret": ["DvxTn0KA4TEUPqSFBw8qAw"], + "priority": ["100"] + } + }, + { + "id": "a12fdd97-1d72-4d9e-9e6a-f9e0b5d4e5f0", + "name": "rsa-generated", + "providerId": "rsa-generated", + "subComponents": {}, + "config": { + "privateKey": [ + "MIIEpAIBAAKCAQEAimbfmG2pL3qesWhUrQayRyYBbRFE0Ul5Ii/AW8Kq6Kad9R2n2sT2BvXWnsWBH6KuINUFJz3Tb+gWy235Jy0Idmekwx63JR20//ZJ7dyQ+b1iadmYPpqyixGL7NrVxQYT0AEGLcD/Fwsh869F3jgfQt7N15q2arRnOrW5NMwi+IvtHxZRZ3UluxShut2577ef8cakwCv4zoTV29y+Z3XhtlKZ4WOCuqIHL3SRHwNkb+k8cY0Gwc88FHl/ihFR0fX/lc7W2AHRd98ex8il4kBFfShBZur8ZLE7QWQdXRY2EYYr3D/W6/5wf/R2fAvbVmGzcYGZ2qm6d+K1XH8VU3X84wIDAQABAoIBABXXrHwa+nOCz57CD3MLNoGiDuGOsySwisyJartQmraC7TTtDDurkASDMe72zq0WeJK368tIp6DmqQpL/eFf6xD8xHUC2PajnJg033AJuluftvNroupmcb0e9M1ZsBkbH29Zagc4iUmyuRYDWGx8wPpFvYjEYvuuIwiR+3vIp9A/0ZbcBwdtml3Of5gYTXChPj28PrA4K7oFib2Zu1aYCBEdF8h9bKRF/UlvyWeSajjddexSQ6gkEjzAEMpliCDbOGSFGwNu1pY7FF4EpyJbalzdpn44m5v9bqfS9/CDrIOOUus88Nn5wCD2OAmAQnWn0Hnh7at4A5fw3VBUmEt70ckCgYEAx0Fg8Gp3SuMaytrf9HJHJcltyDRsdSxysF1ZvDV9cDUsD28QOa/wFJRVsABxqElU+W6QEc20NMgOHVyPFed5UhQA6WfmydzGIcF5C6T5IbE/5Uk3ptGuPdI0aR7rlRfefQOnUBr28dz5UDBTb93t9+Klxcss+nLGRbugnFBAtTUCgYEAsdD+92nuF/GfET97vbHxtJ6+epHddttWlsa5PVeVOZBE/LUsOZRxmxm4afvZGOkhUrvmA1+U0arcp9crS5+Ol2LUGh/9efqLvoBImBxLwB37VcIYLJi0EVPrhVPh+9r3vah1YMBhtapS0VtuEZOr47Yz7asBg1s1Z06l+bD1JLcCgYA+3YS9NYn/qZl5aQcBs9B4vo2RfeC+M1DYDgvS0rmJ3mzRTcQ7vyOrCoXiarFxW/mgXN69jz4M7RVu9BX83jQrzj3fZjWteKdWXRlYsCseEzNKnwgc7MjhnmGEzQmc15QNs0plfqxs8MAEKcsZX1bGP873kbvWJMIjnCf3SWaxBQKBgQCh9zt2w19jIewA+vFMbXw7SGk6Hgk6zTlG50YtkMxU/YtJIAFjhUohu8DVkNhDr35x7MLribF1dYu9ueku3ew1CokmLsNkywllAVaebw+0s9qOV9hLLuC989HQxQJPtTj54SrhcPrPTZBYME7G5dqo9PrB3oTnUDoJmoLmOABjawKBgQCeyd12ShpKYHZS4ZvE87OfXanuNfpVxhcXOqYHpQz2W0a+oUu9e78MlwTVooR4O52W/Ohch2FPEzq/1DBjJrK6PrMY8DS018BIVpQ9DS35/Ga9NtSi8DX7jTXacYPwL9n/+//U3vw0mjaoMXgCv44nYu4ro62J6wvVM98hjQmLJw==" + ], + "keyUse": ["SIG"], + "certificate": [ + "MIICqTCCAZECBgGBz6+bXzANBgkqhkiG9w0BAQsFADAYMRYwFAYDVQQDDA1zcGlmZndvcmtmbG93MB4XDTIyMDcwNTE4NDUwMVoXDTMyMDcwNTE4NDY0MVowGDEWMBQGA1UEAwwNc3BpZmZ3b3JrZmxvdzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAIpm35htqS96nrFoVK0GskcmAW0RRNFJeSIvwFvCquimnfUdp9rE9gb11p7FgR+iriDVBSc902/oFstt+SctCHZnpMMetyUdtP/2Se3ckPm9YmnZmD6asosRi+za1cUGE9ABBi3A/xcLIfOvRd44H0Lezdeatmq0Zzq1uTTMIviL7R8WUWd1JbsUobrdue+3n/HGpMAr+M6E1dvcvmd14bZSmeFjgrqiBy90kR8DZG/pPHGNBsHPPBR5f4oRUdH1/5XO1tgB0XffHsfIpeJARX0oQWbq/GSxO0FkHV0WNhGGK9w/1uv+cH/0dnwL21Zhs3GBmdqpunfitVx/FVN1/OMCAwEAATANBgkqhkiG9w0BAQsFAAOCAQEAaI7BEPZpf4MU7bMWmNgyfRTRDy5wtpyfuLPGHZ9EqtnvwwzsmlmXXsC55SLXx3wJETm+rFqeRFbo/hamlRajzzD317AUpE7nhnONTukmh6UuB8hXoWiQTD+YDYMy8kneSP4zvfm27F+TgUC4cvJSYuWVaCxFx52kxqW1hZkBzYUcfi21Qb1jRrbTbso37BxuVX+GdN015If3DPD6QnAhLPAYEFA9jiL16YeMdWHdvlXXmvriDegMUYQjFYPRh6iPzUEdG6KGHItF4AkOYBQAcoaYhfxpxofVlDdOqMZ/1c7AAbe4lR6/jYQ0CbHwdUu4dzJQe3vxr7GdxcB1ypvXPA==" + ], + "priority": ["100"] + } + }, + { + "id": "e16c740d-3ae2-4cc5-a68d-49d99e079672", + "name": "rsa-enc-generated", + "providerId": "rsa-enc-generated", + "subComponents": {}, + "config": { + "privateKey": [ + "MIIEowIBAAKCAQEAsqGsclDQDFSTn8HS1LiiNAnTwn3CS8HXPLDYMHr/jUQ8r5eD+vQY5ICh5V5c8l8J6ydbpzffFEKam54Ypp4yzaWJZ4huYBMf4vL7xrAZ4VXBreu16BIxOrThzrJe9WmI8+Annzo62mNYZbjf4WNpZDURmxZSo7v6Czprd5O6T4N5bxr8sjRRptZR8hxtrRvJnuC0jF+dLHIO5SKR1hUVG/gbpIBqGcsLkNC9nnS6M/N5YFzUIV5JhXo3+mrR/yvw7m+oS5yRsN0raCSXVenNP05Dhsd4FOYqoXBBcdgXXbiDxed0HWB/g5dASqyMydHriddGr8FU0W8/uZmF79wxPwIDAQABAoIBAFsWCaL5Bj1jWytZYDJMO5mhcTN5gPu0ShaObo66CVl1dCRtdEUg9xh9ZxBYf7ivMZWRKjEoUj44gDHd+d/sRyeJw3jhnraqydWl5TC5V1kJq4sN6GH/9M5kscf+OGGXgNgqcsnEnYICqm6kSLTbRkBstx+H0HfhQG09StNcpuIn4MsoMZT8XmZbXRLb3FhfpuTSX3t2nbSDRfUf7LI1EDnFQen/AJAA5lOHthLCdz4Gj1vfalOFjCMYOUWmL/mCDEb38F6QJZxkyhmS/r2kM09PFLOio6z3J8C8mVeq7uao0s5xAKj5SJqx4r+TTvL5aOF8JBWm8Hz1Vcip9/MjsQECgYEA/8Hpb4RggNyn+YzTxqxtPtbLFL0YywtNT+gutmJH1gyTjfx7p3dmA/NsdIeuJmBpZfA7oDXIqfj2M9QLfC5bdKnggQzrIO3BgClI88zOIWd229Bt6D1yx92k4+9eaRwOKBPn8+u0mCk8TBv32ecMLQ9o8AKNIHeCZQjByvOrIMECgYEAss0J3TzrRuEOpnxJ9fNOeB3rNpIFrpNua+oEQI4gDbBvyT7osBKkGqfXJpUQMftr8a6uBHLHV7/Wq6/aRkRhk+aER8h01DUIWGLmbCUdkFSJZ8iObMZQvURtckhzxxhYu0Ybwn0RJg/zzR4onTRO+eL1fTnb5Id55PyPt3Pp0f8CgYEAovDOoP6MYOyzk5h1/7gwrX04ytCicBGWQtdgk0/QBn3ir+3wdcPq2Y+HREKA3/BClfBUfIBnhGqZqHFqk8YQ/CWSY4Vwc30l71neIX0UwlFhdy+2JeSoMM9z0sfYtUxrdHsiJtO/LcXvpWmYIVpC9p4/s9FcShf5mhbXKE7PcsECgYBN7qqvAH94LF4rWJ8QEZWRK1E7Ptg1KFOHu79Qt+HmtZFzwPTA0c8vQxq22V/uuSxqcf2tOK4EZDxYJtTXrbRuN5pOg2PQnrDdfXX7iw3gu8gMMVFKvgGxDSM7HbNBAy6hqcQtuD+CPI/CRrPjGUqXBkKD63UZnacWlLK7fk1a1wKBgExUaqOBKmr0vldVn66E1XzZj4F4+fV5Ggka9289pBNBRlJFD4VmIYkDkOrLimyy2cYeCkocrOvF6HMJqTcOzD50pj44OWkYFRbs6vK0S7iLSX0eR158XOR9C+uZzp1vIA4sYwW3504HVdVoIU5M8ItSgDsFjGnvHopTGu3MBWPT" + ], + "keyUse": ["ENC"], + "certificate": [ + "MIICqTCCAZECBgGBz6+byzANBgkqhkiG9w0BAQsFADAYMRYwFAYDVQQDDA1zcGlmZndvcmtmbG93MB4XDTIyMDcwNTE4NDUwMVoXDTMyMDcwNTE4NDY0MVowGDEWMBQGA1UEAwwNc3BpZmZ3b3JrZmxvdzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALKhrHJQ0AxUk5/B0tS4ojQJ08J9wkvB1zyw2DB6/41EPK+Xg/r0GOSAoeVeXPJfCesnW6c33xRCmpueGKaeMs2liWeIbmATH+Ly+8awGeFVwa3rtegSMTq04c6yXvVpiPPgJ586OtpjWGW43+FjaWQ1EZsWUqO7+gs6a3eTuk+DeW8a/LI0UabWUfIcba0byZ7gtIxfnSxyDuUikdYVFRv4G6SAahnLC5DQvZ50ujPzeWBc1CFeSYV6N/pq0f8r8O5vqEuckbDdK2gkl1XpzT9OQ4bHeBTmKqFwQXHYF124g8XndB1gf4OXQEqsjMnR64nXRq/BVNFvP7mZhe/cMT8CAwEAATANBgkqhkiG9w0BAQsFAAOCAQEArDDC7bYbuBg33PbUQi7P77lV7PuE9uQU1F3HqulhkARQeM/xmBdJRj9CHjj62shkI3An70tJtGBJkVAHltmvjC+A6IDO5I8IbnPkvWJFu9HwphdP/C1HXYmGPPe7yGdKpy6mdCZ+LMZP7BENhOlx9yXLDFYtcGvqZ4u3XvfsLqUsRGqZHNlhVJD13dUbI6pvbwMsb3gIxozgTIa2ySHMbHafln2UQk5jD0eOIVkaNAdlHqMHiBpPjkoVxnhAmJ/dUIAqKBvuIbCOu9N0kOQSl82LqC7CZ21JCyT86Ll3n1RTkxY5G3JzGW4dyJMOGSyVnWaQ9Z+C92ZMFcOt611M2A==" + ], + "priority": ["100"], + "algorithm": ["RSA-OAEP"] + } + } + ] + }, + "internationalizationEnabled": false, + "supportedLocales": [], + "authenticationFlows": [ + { + "id": "a2e35646-200f-4d14-98ba-c9b5150d8753", + "alias": "Account verification options", + "description": "Method with which to verity the existing account", + "providerId": "basic-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "idp-email-verification", + "authenticatorFlow": false, + "requirement": "ALTERNATIVE", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticatorFlow": true, + "requirement": "ALTERNATIVE", + "priority": 20, + "autheticatorFlow": true, + "flowAlias": "Verify Existing Account by Re-authentication", + "userSetupAllowed": false + } + ] + }, + { + "id": "d85a3c40-8cc9-43a1-ba04-0c8ca2c072da", + "alias": "Authentication Options", + "description": "Authentication options.", + "providerId": "basic-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "basic-auth", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "basic-auth-otp", + "authenticatorFlow": false, + "requirement": "DISABLED", + "priority": 20, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "auth-spnego", + "authenticatorFlow": false, + "requirement": "DISABLED", + "priority": 30, + "autheticatorFlow": false, + "userSetupAllowed": false + } + ] + }, + { + "id": "e127feb1-c4d8-471a-9afc-c21df984462e", + "alias": "Browser - Conditional OTP", + "description": "Flow to determine if the OTP is required for the authentication", + "providerId": "basic-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "conditional-user-configured", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "auth-otp-form", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 20, + "autheticatorFlow": false, + "userSetupAllowed": false + } + ] + }, + { + "id": "f8f6347b-7eb1-44ca-a912-a826a8f93b6d", + "alias": "Direct Grant - Conditional OTP", + "description": "Flow to determine if the OTP is required for the authentication", + "providerId": "basic-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "conditional-user-configured", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "direct-grant-validate-otp", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 20, + "autheticatorFlow": false, + "userSetupAllowed": false + } + ] + }, + { + "id": "d2bb8529-3fb8-4085-9153-b56a930829cd", + "alias": "First broker login - Conditional OTP", + "description": "Flow to determine if the OTP is required for the authentication", + "providerId": "basic-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "conditional-user-configured", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "auth-otp-form", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 20, + "autheticatorFlow": false, + "userSetupAllowed": false + } + ] + }, + { + "id": "6ccd1a2e-0184-43d4-80e4-7400a008408f", + "alias": "Handle Existing Account", + "description": "Handle what to do if there is existing account with same email/username like authenticated identity provider", + "providerId": "basic-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "idp-confirm-link", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticatorFlow": true, + "requirement": "REQUIRED", + "priority": 20, + "autheticatorFlow": true, + "flowAlias": "Account verification options", + "userSetupAllowed": false + } + ] + }, + { + "id": "f13bd8b5-895a-44a0-82a6-067dffdcffa9", + "alias": "Reset - Conditional OTP", + "description": "Flow to determine if the OTP should be reset or not. Set to REQUIRED to force.", + "providerId": "basic-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "conditional-user-configured", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "reset-otp", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 20, + "autheticatorFlow": false, + "userSetupAllowed": false + } + ] + }, + { + "id": "3ef752df-8070-4864-9f1e-2900317924b2", + "alias": "User creation or linking", + "description": "Flow for the existing/non-existing user alternatives", + "providerId": "basic-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticatorConfig": "create unique user config", + "authenticator": "idp-create-user-if-unique", + "authenticatorFlow": false, + "requirement": "ALTERNATIVE", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticatorFlow": true, + "requirement": "ALTERNATIVE", + "priority": 20, + "autheticatorFlow": true, + "flowAlias": "Handle Existing Account", + "userSetupAllowed": false + } + ] + }, + { + "id": "9adb8fbe-b778-4ee1-9a1b-c01021aee03e", + "alias": "Verify Existing Account by Re-authentication", + "description": "Reauthentication of existing account", + "providerId": "basic-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "idp-username-password-form", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticatorFlow": true, + "requirement": "CONDITIONAL", + "priority": 20, + "autheticatorFlow": true, + "flowAlias": "First broker login - Conditional OTP", + "userSetupAllowed": false + } + ] + }, + { + "id": "1958f0c6-aaa0-41df-bbe1-be12668286f5", + "alias": "browser", + "description": "browser based authentication", + "providerId": "basic-flow", + "topLevel": true, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "auth-cookie", + "authenticatorFlow": false, + "requirement": "ALTERNATIVE", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "auth-spnego", + "authenticatorFlow": false, + "requirement": "DISABLED", + "priority": 20, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "identity-provider-redirector", + "authenticatorFlow": false, + "requirement": "ALTERNATIVE", + "priority": 25, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticatorFlow": true, + "requirement": "ALTERNATIVE", + "priority": 30, + "autheticatorFlow": true, + "flowAlias": "forms", + "userSetupAllowed": false + } + ] + }, + { + "id": "c4a0fb82-e755-465f-a0d1-c87846836397", + "alias": "clients", + "description": "Base authentication for clients", + "providerId": "client-flow", + "topLevel": true, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "client-secret", + "authenticatorFlow": false, + "requirement": "ALTERNATIVE", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "client-jwt", + "authenticatorFlow": false, + "requirement": "ALTERNATIVE", + "priority": 20, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "client-secret-jwt", + "authenticatorFlow": false, + "requirement": "ALTERNATIVE", + "priority": 30, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "client-x509", + "authenticatorFlow": false, + "requirement": "ALTERNATIVE", + "priority": 40, + "autheticatorFlow": false, + "userSetupAllowed": false + } + ] + }, + { + "id": "3d377bcf-c7b0-4356-bf2f-f83fb1e4aca9", + "alias": "direct grant", + "description": "OpenID Connect Resource Owner Grant", + "providerId": "basic-flow", + "topLevel": true, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "direct-grant-validate-username", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "direct-grant-validate-password", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 20, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticatorFlow": true, + "requirement": "CONDITIONAL", + "priority": 30, + "autheticatorFlow": true, + "flowAlias": "Direct Grant - Conditional OTP", + "userSetupAllowed": false + } + ] + }, + { + "id": "97d2ac80-b725-44f8-b171-655bc28cac2a", + "alias": "docker auth", + "description": "Used by Docker clients to authenticate against the IDP", + "providerId": "basic-flow", + "topLevel": true, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "docker-http-basic-authenticator", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + } + ] + }, + { + "id": "0fcc3a08-ea77-42e4-a1fb-858abcf1759a", + "alias": "first broker login", + "description": "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account", + "providerId": "basic-flow", + "topLevel": true, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticatorConfig": "review profile config", + "authenticator": "idp-review-profile", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticatorFlow": true, + "requirement": "REQUIRED", + "priority": 20, + "autheticatorFlow": true, + "flowAlias": "User creation or linking", + "userSetupAllowed": false + } + ] + }, + { + "id": "ac743fa7-98df-4933-898f-44b716ff55e2", + "alias": "forms", + "description": "Username, password, otp and other auth forms.", + "providerId": "basic-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "auth-username-password-form", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticatorFlow": true, + "requirement": "CONDITIONAL", + "priority": 20, + "autheticatorFlow": true, + "flowAlias": "Browser - Conditional OTP", + "userSetupAllowed": false + } + ] + }, + { + "id": "65451a14-aa9d-49da-807a-f934b10775cb", + "alias": "http challenge", + "description": "An authentication flow based on challenge-response HTTP Authentication Schemes", + "providerId": "basic-flow", + "topLevel": true, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "no-cookie-redirect", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticatorFlow": true, + "requirement": "REQUIRED", + "priority": 20, + "autheticatorFlow": true, + "flowAlias": "Authentication Options", + "userSetupAllowed": false + } + ] + }, + { + "id": "733a256d-0ccb-4197-852c-91bf62f80e4b", + "alias": "registration", + "description": "registration flow", + "providerId": "basic-flow", + "topLevel": true, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "registration-page-form", + "authenticatorFlow": true, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": true, + "flowAlias": "registration form", + "userSetupAllowed": false + } + ] + }, + { + "id": "d34e94db-5cfd-412b-9555-bfcf3ab7b21b", + "alias": "registration form", + "description": "registration form", + "providerId": "form-flow", + "topLevel": false, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "registration-user-creation", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 20, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "registration-profile-action", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 40, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "registration-password-action", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 50, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "registration-recaptcha-action", + "authenticatorFlow": false, + "requirement": "DISABLED", + "priority": 60, + "autheticatorFlow": false, + "userSetupAllowed": false + } + ] + }, + { + "id": "2c90ffbf-2de2-41df-bfb0-ddd089bf8c57", + "alias": "reset credentials", + "description": "Reset credentials for a user if they forgot their password or something", + "providerId": "basic-flow", + "topLevel": true, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "reset-credentials-choose-user", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "reset-credential-email", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 20, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticator": "reset-password", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 30, + "autheticatorFlow": false, + "userSetupAllowed": false + }, + { + "authenticatorFlow": true, + "requirement": "CONDITIONAL", + "priority": 40, + "autheticatorFlow": true, + "flowAlias": "Reset - Conditional OTP", + "userSetupAllowed": false + } + ] + }, + { + "id": "a779f34a-421c-4b7c-b94a-5b8736cf485b", + "alias": "saml ecp", + "description": "SAML ECP Profile Authentication Flow", + "providerId": "basic-flow", + "topLevel": true, + "builtIn": true, + "authenticationExecutions": [ + { + "authenticator": "http-basic-authenticator", + "authenticatorFlow": false, + "requirement": "REQUIRED", + "priority": 10, + "autheticatorFlow": false, + "userSetupAllowed": false + } + ] + } + ], + "authenticatorConfig": [ + { + "id": "d99b0848-0378-4a5d-9a72-6efd758e935f", + "alias": "create unique user config", + "config": { + "require.password.update.after.registration": "false" + } + }, + { + "id": "ab775beb-09ca-4f94-b62b-16f0692269e9", + "alias": "review profile config", + "config": { + "update.profile.on.first.login": "missing" + } + } + ], + "requiredActions": [ + { + "alias": "CONFIGURE_TOTP", + "name": "Configure OTP", + "providerId": "CONFIGURE_TOTP", + "enabled": true, + "defaultAction": false, + "priority": 10, + "config": {} + }, + { + "alias": "terms_and_conditions", + "name": "Terms and Conditions", + "providerId": "terms_and_conditions", + "enabled": false, + "defaultAction": false, + "priority": 20, + "config": {} + }, + { + "alias": "UPDATE_PASSWORD", + "name": "Update Password", + "providerId": "UPDATE_PASSWORD", + "enabled": true, + "defaultAction": false, + "priority": 30, + "config": {} + }, + { + "alias": "UPDATE_PROFILE", + "name": "Update Profile", + "providerId": "UPDATE_PROFILE", + "enabled": true, + "defaultAction": false, + "priority": 40, + "config": {} + }, + { + "alias": "VERIFY_EMAIL", + "name": "Verify Email", + "providerId": "VERIFY_EMAIL", + "enabled": true, + "defaultAction": false, + "priority": 50, + "config": {} + }, + { + "alias": "delete_account", + "name": "Delete Account", + "providerId": "delete_account", + "enabled": false, + "defaultAction": false, + "priority": 60, + "config": {} + }, + { + "alias": "update_user_locale", + "name": "Update User Locale", + "providerId": "update_user_locale", + "enabled": true, + "defaultAction": false, + "priority": 1000, + "config": {} + } + ], + "browserFlow": "browser", + "registrationFlow": "registration", + "directGrantFlow": "direct grant", + "resetCredentialsFlow": "reset credentials", + "clientAuthenticationFlow": "clients", + "dockerAuthenticationFlow": "docker auth", + "attributes": { + "cibaBackchannelTokenDeliveryMode": "poll", + "cibaExpiresIn": "120", + "cibaAuthRequestedUserHint": "login_hint", + "oauth2DeviceCodeLifespan": "600", + "clientOfflineSessionMaxLifespan": "0", + "oauth2DevicePollingInterval": "5", + "clientSessionIdleTimeout": "0", + "parRequestUriLifespan": "60", + "clientSessionMaxLifespan": "0", + "clientOfflineSessionIdleTimeout": "0", + "cibaInterval": "5" + }, + "keycloakVersion": "18.0.2", + "userManagedAccessAllowed": false, + "clientProfiles": { + "profiles": [] + }, + "clientPolicies": { + "policies": [] + } +} diff --git a/bin/start_keycloak b/bin/start_keycloak new file mode 100755 index 00000000..932e14ba --- /dev/null +++ b/bin/start_keycloak @@ -0,0 +1,80 @@ +#!/usr/bin/env bash + +function error_handler() { + >&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}." + exit "$2" +} +trap 'error_handler ${LINENO} $?' ERR +set -o errtrace -o errexit -o nounset -o pipefail + +if ! docker network inspect spiffworkflow > /dev/null 2>&1; then + docker network create spiffworkflow +fi +docker rm keycloak 2>/dev/null || echo 'no keycloak container found' +docker run \ + -p 7002:8080 \ + -d \ + --network=spiffworkflow \ + --name keycloak \ + -e KEYCLOAK_LOGLEVEL=ALL \ + -e ROOT_LOGLEVEL=ALL \ + -e KEYCLOAK_ADMIN=admin \ + -e KEYCLOAK_ADMIN_PASSWORD=admin quay.io/keycloak/keycloak:18.0.2 start-dev \ + -Dkeycloak.profile.feature.token_exchange=enabled \ + -Dkeycloak.profile.feature.admin_fine_grained_authz=enabled + +docker cp bin/finance-realm.json keycloak:/tmp +docker cp bin/spiffworkflow-realm.json keycloak:/tmp +docker cp bin/quarkus-realm.json keycloak:/tmp + +sleep 10 +docker exec keycloak /opt/keycloak/bin/kc.sh import --file /tmp/finance-realm.json || echo '' +docker exec keycloak /opt/keycloak/bin/kc.sh import --file /tmp/spiffworkflow-realm.json || echo '' +docker exec keycloak /opt/keycloak/bin/kc.sh import --file /tmp/quarkus-realm.json || echo '' + +echo 'imported realms' + +if [ "${TURN_OFF_SSL:-}" == "true" ]; then + docker exec -it keycloak /opt/keycloak/bin/kcadm.sh config credentials --server http://localhost:8080 --realm master --user admin + docker exec -it keycloak /opt/keycloak/bin/kcadm.sh update realms/master -s sslRequired=NONE + docker exec -it keycloak /opt/keycloak/bin/kcadm.sh update realms/spiffworkflow -s sslRequired=NONE + echo 'turned off SSL requirement' +fi + +docker stop keycloak +docker start keycloak + + +# to export: +# /opt/keycloak/bin/kc.sh export --dir /tmp/hey --users realm_file +# change any js policies to role policies - just copy the config of one and change the type to role +# https://github.com/keycloak/keycloak/issues/11664#issuecomment-1111062102 +# +# if docker exec commands fail below then attempt to import by adding a new realm in the webui + +# NOTE: creds - user1 / password + +#### Example resource_set call +# GET /realms/quarkus/authz/protection/resource_set?matchingUri=true&deep=true&max=-1&exactName=false&uri=%2Fapi%2Fusers%2Fme HTTP/1.1..Authorization: Bearer eyJhbGciOiJSUzI1NiIsInR5cCIgOiAiSldUIiwia2lkIiA6ICJjZklBRE5feHhDSm1Wa1d5Ti1QTlhFRXZNVVdzMnI2OEN4dG1oRUROelhVIn0.eyJleHAiOjE2NTcxMzgzNzAsImlhdCI6MTY1NzEzODA3MCwianRpIjoiY2I1OTc0OTAtYzJjMi00YTFkLThkNmQtMzBkOGU5YzE1YTNlIiwiaXNzIjoiaHR0cDovL2xvY2FsaG9zdDo0MzI3OS9yZWFsbXMvcXVhcmt1cyIsImF1ZCI6ImFjY291bnQiLCJzdWIiOiI5NDhjNTllYy00NmVkLTRkOTktYWE0My0wMjkwMDAyOWI5MzAiLCJ0eXAiOiJCZWFyZXIiLCJhenAiOiJiYWNrZW5kLXNlcnZpY2UiLCJyZWFsbV9hY2Nlc3MiOnsicm9sZXMiOlsib2ZmbGluZV9hY2Nlc3MiXX0sInJlc291cmNlX2FjY2VzcyI6eyJiYWNrZW5kLXNlcnZpY2UiOnsicm9sZXMiOlsidW1hX3Byb3RlY3Rpb24iXX0sImFjY291bnQiOnsicm9sZXMiOlsibWFuYWdlLWFjY291bnQiLCJtYW5hZ2UtYWNjb3VudC1saW5rcyIsInZpZXctcHJvZmlsZSJdfX0sInNjb3BlIjoiZW1haWwgcHJvZmlsZSIsImNsaWVudEhvc3QiOiIxNzIuMTcuMC4xIiwiZW1haWxfdmVyaWZpZWQiOmZhbHNlLCJjbGllbnRJZCI6ImJhY2tlbmQtc2VydmljZSIsInByZWZlcnJlZF91c2VybmFtZSI6InNlcnZpY2UtYWNjb3VudC1iYWNrZW5kLXNlcnZpY2UiLCJjbGllbnRBZGRyZXNzIjoiMTcyLjE3LjAuMSIsImVtYWlsIjoic2VydmljZS1hY2NvdW50LWJhY2tlbmQtc2VydmljZUBwbGFjZWhvbGRlci5vcmcifQ.VRcdoJQO5KWeDFprl6g21Gp9lAqLH1GUAegZPslI9lcL7wdEDLauleTs7cr9ODvXpBbbWVZirP445H3bIfEpyZ2UiKeoEYB6WvR2r_hIHCbNGrV9klkCVjQSuCtdB-Zf3OWHXctz_warlNXF4i4VLtkettlxeGRTVpqT-_lO-y2PhHVNe7imEcnceoKWZQe-Z0JBAJ1Gs2_mj_vgL8V2ZKAd7x0uuAcNyqo4Kmvqh75vkhIuGYAbWfY--wdv8cuphNpbKCGoz27n-D_Im8tW00B1_twctwXo8yfZHp46o1yERbTCS1Xu_eBFufKB21au6omxneyKSD47AfHLR_ymvg..Host: localhost:43279..Connection: Keep-Alive.... +# # +# T 127.0.0.1:43279 -> 127.0.0.1:39282 [AP] #127 +# HTTP/1.1 200 OK..Referrer-Policy: no-referrer..X-Frame-Options: SAMEORIGIN..Strict-Transport-Security: max-age=31536000; includeSubDomains..Cache-Control: no-cache..X-Content-Type-Options: nosniff..X-XSS-Protection: 1; mode=block..Content-Type: application/json..content-length: 236....[{"name":"usersme","owner":{"id":"0ac5df91-e044-4051-bd03-106a3a5fb9cc","name":"backend-service"},"ownerManagedAccess":false,"displayName":"usersme","attributes":{},"_id":"179611c3-be58-4ba2-95b2-4aacda3cc0f1","uris":["/api/users/me"]}] +# # +# T 127.0.0.1:39282 -> 127.0.0.1:43279 [AP] #128 +# POST /realms/quarkus/protocol/openid-connect/token HTTP/1.1..Authorization: Basic YmFja2VuZC1zZXJ2aWNlOnNlY3JldA==..Content-Length: 1231..Content-Type: application/x-www-form-urlencoded; charset=UTF-8..Host: localhost:43279..Connection: Keep-Alive.... +# # +# T 127.0.0.1:39282 -> 127.0.0.1:43279 [AP] #129 +# audience=backend-service&grant_type=urn%3Aietf%3Aparams%3Aoauth%3Agrant-type%3Auma-ticket&permission=179611c3-be58-4ba2-95b2-4aacda3cc0f1&subject_token=eyJhbGciOiJSUzI1NiIsInR5cCIgOiAiSldUIiwia2lkIiA6ICJjZklBRE5feHhDSm1Wa1d5Ti1QTlhFRXZNVVdzMnI2OEN4dG1oRUROelhVIn0.eyJleHAiOjE2NTcxMzgzNzYsImlhdCI6MTY1NzEzODA3NiwiYXV0aF90aW1lIjoxNjU3MTM4MDc2LCJqdGkiOiI0ZjMyYzljNS05NzY3LTQ0YzAtOTBlNi1kZmJhNjFmMmJmNDgiLCJpc3MiOiJodHRwOi8vbG9jYWxob3N0OjQzMjc5L3JlYWxtcy9xdWFya3VzIiwic3ViIjoiZWI0MTIzYTMtYjcyMi00Nzk4LTlhZjUtODk1N2Y4MjM2NTdhIiwidHlwIjoiQmVhcmVyIiwiYXpwIjoiYmFja2VuZC1zZXJ2aWNlIiwibm9uY2UiOiI5SklBc2RhIiwic2Vzc2lvbl9zdGF0ZSI6IjBlZTVkNjRmLWYxM2EtNDg1Yy1hNzBhLTJmMDA0YjQ3MWIwNyIsInJlYWxtX2FjY2VzcyI6eyJyb2xlcyI6WyJ1c2VyIl19LCJzY29wZSI6Im9wZW5pZCBlbWFpbCBwcm9maWxlIiwic2lkIjoiMGVlNWQ2NGYtZjEzYS00ODVjLWE3MGEtMmYwMDRiNDcxYjA3IiwiZW1haWxfdmVyaWZpZWQiOmZhbHNlLCJwcmVmZXJyZWRfdXNlcm5hbWUiOiJhbGljZSJ9.Jjz0GYaApd_b05YOSe_Eq0tsFQk7qd-vGgIyjdeeEJAAA5xhS2f_DMpwiBLAPibk-gFnGf5CuLynA0z5bxE5vlbQHX9-aKxH8AEixDFkUsnfn7PN1NQtiG-Jj5cfuLxOShy2X2EOScZVTdRc9PgO_Xsb7ltDrtwtQ6eKOYVt-mqd7PR3cWJHjTldh4tiibjrKPccyZNBNC3W03pno3WLRVaG09Kotcsj1e5oS0safAcxACa3CSfchnY88E7Qwi1mva2F4X-gUar5-Zn2yT2iu8vqH3BCHzz8frAsYv1dOougRBaMfayLiFgKo7ZjsOI8OfPDSm7PEOMFEgHEHIloiw +# ## +# T 127.0.0.1:43279 -> 127.0.0.1:39282 [AP] #131 +# HTTP/1.1 403 Forbidden..Referrer-Policy: no-referrer..X-Frame-Options: SAMEORIGIN..Strict-Transport-Security: max-age=31536000; includeSubDomains..Cache-Control: no-store..X-Content-Type-Options: nosniff..Pragma: no-cache..X-XSS-Protection: 1; mode=block..Content-Type: application/json..content-length: 62....{"error":"access_denied","error_description":"not_authorized"} +######## + +#### quarkus for example +# https://quarkus.io/guides/security-keycloak-authorization +# from that guide, we ultimately found that we hit GET /resource_set described at: +# https://github.com/keycloak/keycloak-documentation/blob/main/authorization_services/topics/service-protection-resources-api-papi.adoc +# when we get the resource, we just hit the token endpoint and provide the resource and scope, and token will say pass or fail. +# More info: +# * https://stackoverflow.com/a/58861610/6090676 +# * https://github.com/keycloak/keycloak/discussions/10044 diff --git a/bin/test_file_upload b/bin/test_file_upload new file mode 100644 index 00000000..b0e660bf --- /dev/null +++ b/bin/test_file_upload @@ -0,0 +1,10 @@ +#!/usr/bin/env bash + +function error_handler() { + >&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}." + exit "$2" +} +trap 'error_handler ${LINENO} $?' ERR +set -o errtrace -o errexit -o nounset -o pipefail + +curl -v -F key1=value1 -F upload=@localfilename URL diff --git a/bin/test_with_curl b/bin/test_with_curl new file mode 100755 index 00000000..c766d886 --- /dev/null +++ b/bin/test_with_curl @@ -0,0 +1,26 @@ +#!/usr/bin/env bash + +function error_handler() { + >&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}." + exit "$2" +} +trap 'error_handler ${LINENO} $?' ERR +set -o errtrace -o errexit -o nounset -o pipefail + +if [[ "${1:-}" == "c" ]]; then + curl --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d '{}' +elif grep -qE '^[0-9]$' <<<"${1:-}" ; then + curl --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d "{ \"task_identifier\": \"${1}\"}" +else + ./bin/recreate_db clean + curl --silent --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d '{ "task_identifier": "1", "answer": {"Product Name": "G", "Quantity": "2"}}' | jq . + curl --silent --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d '{ "task_identifier": "1", "answer": {"Sleeve Type": "Short"}}' | jq . + curl --silent --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d '{ "task_identifier": "1", "answer": {"Continue shopping?": "N"}}' | jq . + curl --silent --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d '{ "task_identifier": "1", "answer": {"Shipping Method": "Overnight"}}' | jq . + curl --silent --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d '{ "task_identifier": "1", "answer": {"Shipping Address": "Somewhere"}}' | jq . + curl --silent --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d '{ "task_identifier": "1", "answer": {"Place Order": "Y"}}' | jq . + curl --silent --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d '{ "task_identifier": "1", "answer": {"Card Number": "MY_CARD"}}' | jq . + curl --silent --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d '{ "task_identifier": "2", "answer": {"Was the customer charged?": "Y"}}' | jq . + curl --silent --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d '{ "task_identifier": "1", "answer": {"Was the product available?": "Y"}}' | jq . + curl --silent --fail localhost:5000/run_process -H "Content-type: application/json" -X POST -d '{ "task_identifier": "1", "answer": {"Was the order shipped?": "Y"}}' | jq . +fi diff --git a/bin/wait_for_keycloak b/bin/wait_for_keycloak new file mode 100755 index 00000000..d7018465 --- /dev/null +++ b/bin/wait_for_keycloak @@ -0,0 +1,24 @@ +#!/usr/bin/env bash + +function error_handler() { + >&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}." + exit "$2" +} +trap 'error_handler ${LINENO} $?' ERR +set -o errtrace -o errexit -o nounset -o pipefail + +max_attempts="${1:-}" +if [[ -z "$max_attempts" ]]; then + max_attempts=100 +fi + +echo "waiting for backend to come up..." +attempts=0 +while [[ "$(curl -s -o /dev/null -w '%{http_code}' http://localhost:7002/realms/master/.well-known/openid-configuration)" != "200" ]]; do + if [[ "$attempts" -gt "$max_attempts" ]]; then + >&2 echo "ERROR: Server not up after $max_attempts attempts. There is probably a problem" + exit 1 + fi + attempts=$(( attempts + 1 )) + sleep 1 +done diff --git a/bin/wait_for_server_to_be_up b/bin/wait_for_server_to_be_up new file mode 100755 index 00000000..4c845613 --- /dev/null +++ b/bin/wait_for_server_to_be_up @@ -0,0 +1,24 @@ +#!/usr/bin/env bash + +function error_handler() { + >&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}." + exit "$2" +} +trap 'error_handler ${LINENO} $?' ERR +set -o errtrace -o errexit -o nounset -o pipefail + +max_attempts="${1:-}" +if [[ -z "$max_attempts" ]]; then + max_attempts=100 +fi + +echo "waiting for backend to come up..." +attempts=0 +while [[ "$(curl -s -o /dev/null -w '%{http_code}' http://localhost:7000/v1.0/status)" != "200" ]]; do + if [[ "$attempts" -gt "$max_attempts" ]]; then + >&2 echo "ERROR: Server not up after $max_attempts attempts. There is probably a problem" + exit 1 + fi + attempts=$(( attempts + 1 )) + sleep 1 +done diff --git a/codecov.yml b/codecov.yml new file mode 100644 index 00000000..9ac26504 --- /dev/null +++ b/codecov.yml @@ -0,0 +1,9 @@ +comment: false +coverage: + status: + project: + default: + target: "100" + patch: + default: + target: "100" diff --git a/conftest.py b/conftest.py new file mode 100644 index 00000000..98846ca1 --- /dev/null +++ b/conftest.py @@ -0,0 +1,106 @@ +"""Conftest.""" +import os +import shutil + +import pytest +from flask.app import Flask +from flask_bpmn.models.db import db +from flask_bpmn.models.db import SpiffworkflowBaseDBModel +from tests.spiffworkflow_backend.helpers.base_test import BaseTest +from tests.spiffworkflow_backend.helpers.test_data import load_test_spec + +from spiffworkflow_backend.models.process_instance import ProcessInstanceModel +from spiffworkflow_backend.services.process_instance_processor import ( + ProcessInstanceProcessor, +) +from spiffworkflow_backend.services.process_instance_service import ( + ProcessInstanceService, +) +from spiffworkflow_backend.services.process_model_service import ProcessModelService + + +# We need to call this before importing spiffworkflow_backend +# otherwise typeguard cannot work. hence the noqa: E402 +if os.environ.get("RUN_TYPEGUARD") == "true": + from typeguard.importhook import install_import_hook + + install_import_hook(packages="spiffworkflow_backend") + + +from spiffworkflow_backend import create_app # noqa: E402 + + +@pytest.fixture(scope="session") +def app() -> Flask: + """App.""" + os.environ["SPIFFWORKFLOW_BACKEND_ENV"] = "testing" + + # os.environ["FLASK_SESSION_SECRET_KEY"] = "this_is_testing_secret_key" + os.environ["FLASK_SESSION_SECRET_KEY"] = "super_secret_key" + app = create_app() + + # NOTE: set this here since nox shoves tests and src code to + # different places and this allows us to know exactly where we are at the start + app.config["BPMN_SPEC_ABSOLUTE_DIR"] = os.path.join( + os.path.dirname(__file__), + "tests", + "spiffworkflow_backend", + "files", + "bpmn_specs", + ) + + return app + + +@pytest.fixture() +def with_db_and_bpmn_file_cleanup() -> None: + """Process_group_resource.""" + for model in SpiffworkflowBaseDBModel._all_subclasses(): + db.session.query(model).delete() + + try: + yield + finally: + process_model_service = ProcessModelService() + if os.path.exists(process_model_service.root_path()): + shutil.rmtree(process_model_service.root_path()) + + +@pytest.fixture() +def setup_process_instances_for_reports() -> list[ProcessInstanceModel]: + """Setup_process_instances_for_reports.""" + user = BaseTest.find_or_create_user() + process_group_id = "runs_without_input" + process_model_id = "sample" + load_test_spec(process_group_id=process_group_id, process_model_id=process_model_id) + process_instances = [] + for data in [kay(), ray(), jay()]: + process_instance = ProcessInstanceService.create_process_instance( + process_group_identifier=process_group_id, + process_model_identifier=process_model_id, + user=user, + ) + processor = ProcessInstanceProcessor(process_instance) + processor.slam_in_data(data) + process_instance.status = "complete" + db.session.add(process_instance) + db.session.commit() + + process_instances.append(process_instance) + + return process_instances + + +def kay() -> dict: + """Kay.""" + return {"name": "kay", "grade_level": 2, "test_score": 10} + + +def ray() -> dict: + """Ray.""" + return {"name": "ray", "grade_level": 1, "test_score": 9} + + +def jay() -> dict: + """Jay.""" + return {"name": "jay", "grade_level": 2, "test_score": 8} diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 00000000..c7075b2b --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,100 @@ +# Why we are running with network_mode: host +# Wow this has been awful. We run three things in docker: mysql, keycloak, and the backend server. +# The backend-server needs to talk to the other two. +# +# In order to talk to keycloak, it needs to go through localhost so that it can communicate with +# keycloak using the same url as the frontend so that tokens can be properly validated. +# If the domains are different, keycloak invalidates the token. There may be a way to change +# this but I didn't find it. +# +# In order for the backend server to talk to the mysql server, they need to be on the same network. +# I tried splitting it out where the mysql runs on a custom network and the backend runs on both +# the custom network AND with localhost. Nothing I tried worked and googling didn't help. They +# only ever mentioned one thing or using host.docker.internal which would cause the domains to +# be different. +# +# So instead we are running with both the mysql server and the backend server in host netowrk mode. +# There may be a better way to do this but if it works, then it works. + +version: "3.8" +services: + db: + container_name: db + image: mysql:8.0.29 + platform: linux/amd64 + cap_add: + - SYS_NICE + restart: "${SPIFFWORKFLOW_BACKEND_DATABASE_DOCKER_RESTART_POLICY:-no}" + environment: + - MYSQL_DATABASE=${SPIFFWORKFLOW_BACKEND_DATABASE_NAME:-spiffworkflow_backend_development} + - MYSQL_ROOT_PASSWORD=${SPIFFWORKFLOW_BACKEND_MYSQL_ROOT_DATABASE:-my-secret-pw} + - MYSQL_TCP_PORT=7003 + network_mode: host + ports: + - "7003" + volumes: + - spiffworkflow_backend:/var/lib/mysql + healthcheck: + test: mysql --user=root --password=${SPIFFWORKFLOW_BACKEND_MYSQL_ROOT_DATABASE:-my-secret-pw} -e 'select 1' ${SPIFFWORKFLOW_BACKEND_DATABASE_NAME:-spiffworkflow_backend_development} + interval: 10s + timeout: 5s + retries: 10 + + spiffworkflow-backend: &spiffworkflow-backend + container_name: spiffworkflow-backend + profiles: + - run + depends_on: + db: + condition: service_healthy + build: + context: . + environment: + - APPLICATION_ROOT=/ + - SPIFFWORKFLOW_BACKEND_ENV=${SPIFFWORKFLOW_BACKEND_ENV:-development} + - FLASK_DEBUG=0 + - FLASK_SESSION_SECRET_KEY=${FLASK_SESSION_SECRET_KEY:-super_secret_key} + - OPEN_ID_SERVER_URL=${OPEN_ID_SERVER_URL:-http://localhost:7002} + - SPIFFWORKFLOW_FRONTEND_URL=${SPIFFWORKFLOW_FRONTEND_URL:-http://localhost:7001} + - SPIFFWORKFLOW_BACKEND_URL=${SPIFFWORKFLOW_BACKEND_URL:-http://localhost:7000} + - SPIFFWORKFLOW_BACKEND_PORT=7000 + - SPIFFWORKFLOW_BACKEND_UPGRADE_DB=true + - SPIFFWORKFLOW_BACKEND_DATABASE_URI=mysql+mysqlconnector://root:${SPIFFWORKFLOW_BACKEND_MYSQL_ROOT_DATABASE:-my-secret-pw}@localhost:7003/${SPIFFWORKFLOW_BACKEND_DATABASE_NAME:-spiffworkflow_backend_development} + - BPMN_SPEC_ABSOLUTE_DIR=/app/process_models + - SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA=${SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA:-false} + - PROCESS_WAITING_MESSAGES=true + ports: + - "7000:7000" + network_mode: host + volumes: + - ${BPMN_SPEC_ABSOLUTE_DIR:-./../sample-process-models}:/app/process_models + - ./log:/app/log + healthcheck: + test: curl localhost:7000/v1.0/status --fail + interval: 10s + timeout: 5s + retries: 20 + + spiffworkflow-backend-local-debug: + <<: *spiffworkflow-backend + container_name: spiffworkflow-backend-local-debug + profiles: + - debug + volumes: + - ${BPMN_SPEC_ABSOLUTE_DIR:-./../sample-process-models}:/app/process_models + - ./:/app + command: /app/bin/boot_in_docker_debug_mode + + # the docs say we can disable healthchecks with disable: true + # but it returns a bad exit code so setup one that doesn't matter + # since there is nothing to healthcheck in this case + # https://docs.docker.com/compose/compose-file/compose-file-v3/#healthcheck + healthcheck: + test: cat /etc/hosts + interval: 10s + timeout: 5s + retries: 20 + +volumes: + spiffworkflow_backend: + driver: local diff --git a/docs/codeofconduct.rst b/docs/codeofconduct.rst new file mode 100644 index 00000000..96e0ba2f --- /dev/null +++ b/docs/codeofconduct.rst @@ -0,0 +1 @@ +.. include:: ../CODE_OF_CONDUCT.rst diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 00000000..0ca1992d --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,17 @@ +"""Sphinx configuration.""" +from datetime import datetime + + +project = "Spiffworkflow Backend" +author = "Sartography" +copyright = f"{datetime.now().year}, {author}" +extensions = [ + "sphinx.ext.napoleon", + "autoapi.extension", + "sphinx_click", +] + +# https://github.com/readthedocs/sphinx-autoapi +autoapi_type = "python" +autoapi_dirs = ["../src"] +html_theme = "furo" diff --git a/docs/contributing.rst b/docs/contributing.rst new file mode 100644 index 00000000..c8670b6b --- /dev/null +++ b/docs/contributing.rst @@ -0,0 +1,4 @@ +.. include:: ../CONTRIBUTING.rst + :end-before: github-only + +.. _Code of Conduct: codeofconduct.html diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 00000000..3a2ebb73 --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,16 @@ +.. include:: ../README.rst + :end-before: github-only + +.. _Contributor Guide: contributing.html +.. _Usage: usage.html + +.. toctree:: + :hidden: + :maxdepth: 1 + + usage + reference + contributing + Code of Conduct + License + Changelog diff --git a/docs/license.rst b/docs/license.rst new file mode 100644 index 00000000..68c5792f --- /dev/null +++ b/docs/license.rst @@ -0,0 +1 @@ +.. include:: ../LICENSE.rst diff --git a/docs/reference.rst b/docs/reference.rst new file mode 100644 index 00000000..0529ca1f --- /dev/null +++ b/docs/reference.rst @@ -0,0 +1,9 @@ +Reference +========= + + +spiffworkflow_backend +---------- + +.. automodule:: spiffworkflow_backend + :members: diff --git a/docs/requirements.txt b/docs/requirements.txt new file mode 100644 index 00000000..74d335aa --- /dev/null +++ b/docs/requirements.txt @@ -0,0 +1,3 @@ +furo==2022.9.29 +sphinx==5.2.3 +sphinx-click==4.3.0 diff --git a/docs/usage.rst b/docs/usage.rst new file mode 100644 index 00000000..308fe61e --- /dev/null +++ b/docs/usage.rst @@ -0,0 +1,6 @@ +Usage +===== + +.. click:: spiffworkflow_backend.__main__:main + :prog: spiffworkflow-backend + :nested: full diff --git a/log/.keep b/log/.keep new file mode 100644 index 00000000..e69de29b diff --git a/migrations/README b/migrations/README new file mode 100644 index 00000000..0e048441 --- /dev/null +++ b/migrations/README @@ -0,0 +1 @@ +Single-database configuration for Flask. diff --git a/migrations/alembic.ini b/migrations/alembic.ini new file mode 100644 index 00000000..ec9d45c2 --- /dev/null +++ b/migrations/alembic.ini @@ -0,0 +1,50 @@ +# A generic, single database configuration. + +[alembic] +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic,flask_migrate + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[logger_flask_migrate] +level = INFO +handlers = +qualname = flask_migrate + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/migrations/env.py b/migrations/env.py new file mode 100644 index 00000000..630e381a --- /dev/null +++ b/migrations/env.py @@ -0,0 +1,89 @@ +import logging +from logging.config import fileConfig + +from flask import current_app + +from alembic import context + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +fileConfig(config.config_file_name) +logger = logging.getLogger('alembic.env') + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +config.set_main_option( + 'sqlalchemy.url', + str(current_app.extensions['migrate'].db.get_engine().url).replace( + '%', '%%')) +target_metadata = current_app.extensions['migrate'].db.metadata + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + + +def run_migrations_offline(): + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, target_metadata=target_metadata, literal_binds=True + ) + + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online(): + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + + # this callback is used to prevent an auto-migration from being generated + # when there are no changes to the schema + # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html + def process_revision_directives(context, revision, directives): + if getattr(config.cmd_opts, 'autogenerate', False): + script = directives[0] + if script.upgrade_ops.is_empty(): + directives[:] = [] + logger.info('No changes in schema detected.') + + connectable = current_app.extensions['migrate'].db.get_engine() + + with connectable.connect() as connection: + context.configure( + connection=connection, + target_metadata=target_metadata, + process_revision_directives=process_revision_directives, + **current_app.extensions['migrate'].configure_args + ) + + with context.begin_transaction(): + context.run_migrations() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/migrations/script.py.mako b/migrations/script.py.mako new file mode 100644 index 00000000..2c015630 --- /dev/null +++ b/migrations/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade(): + ${upgrades if upgrades else "pass"} + + +def downgrade(): + ${downgrades if downgrades else "pass"} diff --git a/migrations/versions/88e30afd19ac_.py b/migrations/versions/88e30afd19ac_.py new file mode 100644 index 00000000..9e088be7 --- /dev/null +++ b/migrations/versions/88e30afd19ac_.py @@ -0,0 +1,350 @@ +"""empty message + +Revision ID: 88e30afd19ac +Revises: +Create Date: 2022-10-11 09:39:40.882490 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = '88e30afd19ac' +down_revision = None +branch_labels = None +depends_on = None + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('admin_session', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('token', sa.String(length=50), nullable=True), + sa.Column('admin_impersonate_uid', sa.String(length=50), nullable=True), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('token') + ) + op.create_table('bpmn_process_id_lookup', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('bpmn_process_identifier', sa.String(length=255), nullable=True), + sa.Column('bpmn_file_relative_path', sa.String(length=255), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_bpmn_process_id_lookup_bpmn_process_identifier'), 'bpmn_process_id_lookup', ['bpmn_process_identifier'], unique=True) + op.create_table('group', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=True), + sa.Column('identifier', sa.String(length=255), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('message_model', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('identifier', sa.String(length=50), nullable=True), + sa.Column('name', sa.String(length=50), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_message_model_identifier'), 'message_model', ['identifier'], unique=True) + op.create_index(op.f('ix_message_model_name'), 'message_model', ['name'], unique=True) + op.create_table('permission_target', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('uri', sa.String(length=255), nullable=False), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('uri') + ) + op.create_table('user', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('username', sa.String(length=255), nullable=False), + sa.Column('uid', sa.String(length=50), nullable=True), + sa.Column('service', sa.String(length=50), nullable=False), + sa.Column('service_id', sa.String(length=255), nullable=False), + sa.Column('name', sa.String(length=255), nullable=True), + sa.Column('email', sa.String(length=255), nullable=True), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('service', 'service_id', name='service_key'), + sa.UniqueConstraint('uid'), + sa.UniqueConstraint('username') + ) + op.create_table('message_correlation_property', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('identifier', sa.String(length=50), nullable=True), + sa.Column('message_model_id', sa.Integer(), nullable=False), + sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True), + sa.Column('created_at_in_seconds', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['message_model_id'], ['message_model.id'], ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('identifier', 'message_model_id', name='message_correlation_property_unique') + ) + op.create_index(op.f('ix_message_correlation_property_identifier'), 'message_correlation_property', ['identifier'], unique=False) + op.create_table('message_triggerable_process_model', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('message_model_id', sa.Integer(), nullable=False), + sa.Column('process_model_identifier', sa.String(length=50), nullable=False), + sa.Column('process_group_identifier', sa.String(length=50), nullable=False), + sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True), + sa.Column('created_at_in_seconds', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['message_model_id'], ['message_model.id'], ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('message_model_id') + ) + op.create_index(op.f('ix_message_triggerable_process_model_process_group_identifier'), 'message_triggerable_process_model', ['process_group_identifier'], unique=False) + op.create_index(op.f('ix_message_triggerable_process_model_process_model_identifier'), 'message_triggerable_process_model', ['process_model_identifier'], unique=False) + op.create_table('principal', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=True), + sa.Column('group_id', sa.Integer(), nullable=True), + sa.CheckConstraint('NOT(user_id IS NULL AND group_id IS NULL)'), + sa.ForeignKeyConstraint(['group_id'], ['group.id'], ), + sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('group_id'), + sa.UniqueConstraint('user_id') + ) + op.create_table('process_instance', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('process_model_identifier', sa.String(length=50), nullable=False), + sa.Column('process_group_identifier', sa.String(length=50), nullable=False), + sa.Column('process_initiator_id', sa.Integer(), nullable=False), + sa.Column('bpmn_json', sa.JSON(), nullable=True), + sa.Column('start_in_seconds', sa.Integer(), nullable=True), + sa.Column('end_in_seconds', sa.Integer(), nullable=True), + sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True), + sa.Column('created_at_in_seconds', sa.Integer(), nullable=True), + sa.Column('status', sa.String(length=50), nullable=True), + sa.Column('bpmn_version_control_type', sa.String(length=50), nullable=True), + sa.Column('bpmn_version_control_identifier', sa.String(length=255), nullable=True), + sa.ForeignKeyConstraint(['process_initiator_id'], ['user.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_process_instance_process_group_identifier'), 'process_instance', ['process_group_identifier'], unique=False) + op.create_index(op.f('ix_process_instance_process_model_identifier'), 'process_instance', ['process_model_identifier'], unique=False) + op.create_table('process_instance_report', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('identifier', sa.String(length=50), nullable=False), + sa.Column('process_model_identifier', sa.String(length=50), nullable=False), + sa.Column('process_group_identifier', sa.String(length=50), nullable=False), + sa.Column('report_metadata', sa.JSON(), nullable=True), + sa.Column('created_by_id', sa.Integer(), nullable=False), + sa.Column('created_at_in_seconds', sa.Integer(), nullable=True), + sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['created_by_id'], ['user.id'], ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('process_group_identifier', 'process_model_identifier', 'identifier', name='process_instance_report_unique') + ) + op.create_index(op.f('ix_process_instance_report_identifier'), 'process_instance_report', ['identifier'], unique=False) + op.create_index(op.f('ix_process_instance_report_process_group_identifier'), 'process_instance_report', ['process_group_identifier'], unique=False) + op.create_index(op.f('ix_process_instance_report_process_model_identifier'), 'process_instance_report', ['process_model_identifier'], unique=False) + op.create_table('secret', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('key', sa.String(length=50), nullable=False), + sa.Column('value', sa.String(length=255), nullable=False), + sa.Column('creator_user_id', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['creator_user_id'], ['user.id'], ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('key') + ) + op.create_table('user_group_assignment', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=False), + sa.Column('group_id', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['group_id'], ['group.id'], ), + sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('user_id', 'group_id', name='user_group_assignment_unique') + ) + op.create_table('active_task', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('process_instance_id', sa.Integer(), nullable=False), + sa.Column('assigned_principal_id', sa.Integer(), nullable=True), + sa.Column('form_file_name', sa.String(length=50), nullable=True), + sa.Column('ui_form_file_name', sa.String(length=50), nullable=True), + sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True), + sa.Column('created_at_in_seconds', sa.Integer(), nullable=True), + sa.Column('task_id', sa.String(length=50), nullable=True), + sa.Column('task_name', sa.String(length=50), nullable=True), + sa.Column('task_title', sa.String(length=50), nullable=True), + sa.Column('task_type', sa.String(length=50), nullable=True), + sa.Column('task_status', sa.String(length=50), nullable=True), + sa.Column('process_model_display_name', sa.String(length=255), nullable=True), + sa.Column('task_data', sa.Text(), nullable=True), + sa.ForeignKeyConstraint(['assigned_principal_id'], ['principal.id'], ), + sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('task_id', 'process_instance_id', name='active_task_unique') + ) + op.create_table('file', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=50), nullable=False), + sa.Column('type', sa.String(length=50), nullable=False), + sa.Column('content_type', sa.String(length=50), nullable=False), + sa.Column('process_instance_id', sa.Integer(), nullable=True), + sa.Column('task_spec', sa.String(length=50), nullable=True), + sa.Column('irb_doc_code', sa.String(length=50), nullable=False), + sa.Column('md5_hash', sa.String(length=50), nullable=False), + sa.Column('data', sa.LargeBinary(), nullable=True), + sa.Column('size', sa.Integer(), nullable=True), + sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True), + sa.Column('created_at_in_seconds', sa.Integer(), nullable=True), + sa.Column('user_uid', sa.String(length=50), nullable=True), + sa.Column('archived', sa.Boolean(), nullable=True), + sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ), + sa.ForeignKeyConstraint(['user_uid'], ['user.uid'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('message_correlation', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('process_instance_id', sa.Integer(), nullable=False), + sa.Column('message_correlation_property_id', sa.Integer(), nullable=False), + sa.Column('name', sa.String(length=255), nullable=False), + sa.Column('value', sa.String(length=255), nullable=False), + sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True), + sa.Column('created_at_in_seconds', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['message_correlation_property_id'], ['message_correlation_property.id'], ), + sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('process_instance_id', 'message_correlation_property_id', 'name', name='message_instance_id_name_unique') + ) + op.create_index(op.f('ix_message_correlation_message_correlation_property_id'), 'message_correlation', ['message_correlation_property_id'], unique=False) + op.create_index(op.f('ix_message_correlation_name'), 'message_correlation', ['name'], unique=False) + op.create_index(op.f('ix_message_correlation_process_instance_id'), 'message_correlation', ['process_instance_id'], unique=False) + op.create_index(op.f('ix_message_correlation_value'), 'message_correlation', ['value'], unique=False) + op.create_table('message_instance', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('process_instance_id', sa.Integer(), nullable=False), + sa.Column('message_model_id', sa.Integer(), nullable=False), + sa.Column('message_type', sa.String(length=20), nullable=False), + sa.Column('payload', sa.JSON(), nullable=True), + sa.Column('status', sa.String(length=20), nullable=False), + sa.Column('failure_cause', sa.Text(), nullable=True), + sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True), + sa.Column('created_at_in_seconds', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['message_model_id'], ['message_model.id'], ), + sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('permission_assignment', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('principal_id', sa.Integer(), nullable=False), + sa.Column('permission_target_id', sa.Integer(), nullable=False), + sa.Column('grant_type', sa.Enum('permit', 'deny', name='permitdeny'), nullable=True), + sa.Column('permission', sa.Enum('create', 'read', 'update', 'delete', 'list', 'instantiate', name='permission'), nullable=True), + sa.ForeignKeyConstraint(['permission_target_id'], ['permission_target.id'], ), + sa.ForeignKeyConstraint(['principal_id'], ['principal.id'], ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('principal_id', 'permission_target_id', 'permission', name='permission_assignment_uniq') + ) + op.create_table('secret_allowed_process', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('secret_id', sa.Integer(), nullable=False), + sa.Column('allowed_relative_path', sa.String(length=500), nullable=False), + sa.ForeignKeyConstraint(['secret_id'], ['secret.id'], ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('secret_id', 'allowed_relative_path', name='unique_secret_path') + ) + op.create_table('spiff_logging', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('process_instance_id', sa.Integer(), nullable=False), + sa.Column('bpmn_process_identifier', sa.String(length=255), nullable=False), + sa.Column('bpmn_task_identifier', sa.String(length=255), nullable=False), + sa.Column('bpmn_task_name', sa.String(length=255), nullable=True), + sa.Column('bpmn_task_type', sa.String(length=255), nullable=True), + sa.Column('spiff_task_guid', sa.String(length=50), nullable=False), + sa.Column('timestamp', sa.DECIMAL(precision=17, scale=6), nullable=False), + sa.Column('message', sa.String(length=255), nullable=True), + sa.Column('current_user_id', sa.Integer(), nullable=True), + sa.ForeignKeyConstraint(['current_user_id'], ['user.id'], ), + sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('task_event', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('user_id', sa.Integer(), nullable=False), + sa.Column('process_instance_id', sa.Integer(), nullable=False), + sa.Column('spec_version', sa.String(length=50), nullable=True), + sa.Column('action', sa.String(length=50), nullable=True), + sa.Column('task_id', sa.String(length=50), nullable=True), + sa.Column('task_name', sa.String(length=50), nullable=True), + sa.Column('task_title', sa.String(length=50), nullable=True), + sa.Column('task_type', sa.String(length=50), nullable=True), + sa.Column('task_state', sa.String(length=50), nullable=True), + sa.Column('task_lane', sa.String(length=50), nullable=True), + sa.Column('form_data', sa.JSON(), nullable=True), + sa.Column('mi_type', sa.String(length=50), nullable=True), + sa.Column('mi_count', sa.Integer(), nullable=True), + sa.Column('mi_index', sa.Integer(), nullable=True), + sa.Column('process_name', sa.String(length=50), nullable=True), + sa.Column('date', sa.DateTime(timezone=True), nullable=True), + sa.ForeignKeyConstraint(['process_instance_id'], ['process_instance.id'], ), + sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('data_store', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('updated_at_in_seconds', sa.Integer(), nullable=True), + sa.Column('key', sa.String(length=50), nullable=False), + sa.Column('process_instance_id', sa.Integer(), nullable=True), + sa.Column('task_spec', sa.String(length=50), nullable=True), + sa.Column('spec_id', sa.String(length=50), nullable=True), + sa.Column('user_id', sa.String(length=50), nullable=True), + sa.Column('file_id', sa.Integer(), nullable=True), + sa.Column('value', sa.String(length=50), nullable=True), + sa.ForeignKeyConstraint(['file_id'], ['file.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_table('message_correlation_message_instance', + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('message_instance_id', sa.Integer(), nullable=False), + sa.Column('message_correlation_id', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint(['message_correlation_id'], ['message_correlation.id'], ), + sa.ForeignKeyConstraint(['message_instance_id'], ['message_instance.id'], ), + sa.PrimaryKeyConstraint('id'), + sa.UniqueConstraint('message_instance_id', 'message_correlation_id', name='message_correlation_message_instance_unique') + ) + op.create_index(op.f('ix_message_correlation_message_instance_message_correlation_id'), 'message_correlation_message_instance', ['message_correlation_id'], unique=False) + op.create_index(op.f('ix_message_correlation_message_instance_message_instance_id'), 'message_correlation_message_instance', ['message_instance_id'], unique=False) + # ### end Alembic commands ### + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index(op.f('ix_message_correlation_message_instance_message_instance_id'), table_name='message_correlation_message_instance') + op.drop_index(op.f('ix_message_correlation_message_instance_message_correlation_id'), table_name='message_correlation_message_instance') + op.drop_table('message_correlation_message_instance') + op.drop_table('data_store') + op.drop_table('task_event') + op.drop_table('spiff_logging') + op.drop_table('secret_allowed_process') + op.drop_table('permission_assignment') + op.drop_table('message_instance') + op.drop_index(op.f('ix_message_correlation_value'), table_name='message_correlation') + op.drop_index(op.f('ix_message_correlation_process_instance_id'), table_name='message_correlation') + op.drop_index(op.f('ix_message_correlation_name'), table_name='message_correlation') + op.drop_index(op.f('ix_message_correlation_message_correlation_property_id'), table_name='message_correlation') + op.drop_table('message_correlation') + op.drop_table('file') + op.drop_table('active_task') + op.drop_table('user_group_assignment') + op.drop_table('secret') + op.drop_index(op.f('ix_process_instance_report_process_model_identifier'), table_name='process_instance_report') + op.drop_index(op.f('ix_process_instance_report_process_group_identifier'), table_name='process_instance_report') + op.drop_index(op.f('ix_process_instance_report_identifier'), table_name='process_instance_report') + op.drop_table('process_instance_report') + op.drop_index(op.f('ix_process_instance_process_model_identifier'), table_name='process_instance') + op.drop_index(op.f('ix_process_instance_process_group_identifier'), table_name='process_instance') + op.drop_table('process_instance') + op.drop_table('principal') + op.drop_index(op.f('ix_message_triggerable_process_model_process_model_identifier'), table_name='message_triggerable_process_model') + op.drop_index(op.f('ix_message_triggerable_process_model_process_group_identifier'), table_name='message_triggerable_process_model') + op.drop_table('message_triggerable_process_model') + op.drop_index(op.f('ix_message_correlation_property_identifier'), table_name='message_correlation_property') + op.drop_table('message_correlation_property') + op.drop_table('user') + op.drop_table('permission_target') + op.drop_index(op.f('ix_message_model_name'), table_name='message_model') + op.drop_index(op.f('ix_message_model_identifier'), table_name='message_model') + op.drop_table('message_model') + op.drop_table('group') + op.drop_index(op.f('ix_bpmn_process_id_lookup_bpmn_process_identifier'), table_name='bpmn_process_id_lookup') + op.drop_table('bpmn_process_id_lookup') + op.drop_table('admin_session') + # ### end Alembic commands ### diff --git a/noxfile.py b/noxfile.py new file mode 100644 index 00000000..ef4f7787 --- /dev/null +++ b/noxfile.py @@ -0,0 +1,220 @@ +"""Nox sessions.""" +import os +import shutil +import sys +from pathlib import Path +from textwrap import dedent + +import nox + +try: + from nox_poetry import Session + from nox_poetry import session +except ImportError: + message = f"""\ + Nox failed to import the 'nox-poetry' package. + + Please install it using the following command: + + {sys.executable} -m pip install nox-poetry""" + raise SystemExit(dedent(message)) from None + + +package = "spiffworkflow_backend" +python_versions = ["3.10", "3.9"] +nox.needs_version = ">= 2021.6.6" +nox.options.sessions = ( + "pre-commit", + "safety", + "mypy", + "tests", + "typeguard", + "xdoctest", + "docs-build", +) + + +def setup_database(session: Session) -> None: + """Run database migrations against the database.""" + session.env["FLASK_INSTANCE_PATH"] = os.path.join( + os.getcwd(), "instance", "testing" + ) + flask_env_key = "FLASK_SESSION_SECRET_KEY" + session.env[flask_env_key] = "super_secret_key" + session.env["FLASK_APP"] = "src/spiffworkflow_backend" + session.env["SPIFFWORKFLOW_BACKEND_ENV"] = "testing" + session.run("flask", "db", "upgrade") + + +def activate_virtualenv_in_precommit_hooks(session: Session) -> None: + """Activate virtualenv in hooks installed by pre-commit. + + This function patches git hooks installed by pre-commit to activate the + session's virtual environment. This allows pre-commit to locate hooks in + that environment when invoked from git. + + Args: + session: The Session object. + """ + assert session.bin is not None # noqa: S101 + + virtualenv = session.env.get("VIRTUAL_ENV") + if virtualenv is None: + return + + hookdir = Path(".git") / "hooks" + if not hookdir.is_dir(): + return + + for hook in hookdir.iterdir(): + if hook.name.endswith(".sample") or not hook.is_file(): + continue + + text = hook.read_text() + bindir = repr(session.bin)[1:-1] # strip quotes + if not ( + Path("A") == Path("a") and bindir.lower() in text.lower() or bindir in text + ): + continue + + lines = text.splitlines() + if not (lines[0].startswith("#!") and "python" in lines[0].lower()): + continue + + header = dedent( + f"""\ + import os + os.environ["VIRTUAL_ENV"] = {virtualenv!r} + os.environ["PATH"] = os.pathsep.join(( + {session.bin!r}, + os.environ.get("PATH", ""), + )) + """ + ) + + lines.insert(1, header) + hook.write_text("\n".join(lines)) + + +@session(name="pre-commit", python="3.10") +def precommit(session: Session) -> None: + """Lint using pre-commit.""" + args = session.posargs or ["run", "--all-files", "--show-diff-on-failure"] + session.install( + "black", + "darglint", + "flake8", + "flake8-bandit", + "flake8-bugbear", + "flake8-docstrings", + "flake8-rst-docstrings", + "pep8-naming", + "pre-commit", + "pre-commit-hooks", + "pyupgrade", + "reorder-python-imports", + ) + session.run("pre-commit", *args) + if args and args[0] == "install": + activate_virtualenv_in_precommit_hooks(session) + + +@session(python="3.10") +def safety(session: Session) -> None: + """Scan dependencies for insecure packages.""" + requirements = session.poetry.export_requirements() + session.install("safety") + session.run("safety", "check", "--full-report", f"--file={requirements}") + + +@session(python=python_versions) +def mypy(session: Session) -> None: + """Type-check using mypy.""" + args = session.posargs or ["src", "tests", "docs/conf.py"] + session.install(".") + session.install("mypy", "pytest", "sqlalchemy-stubs") + session.run("mypy", *args) + if not session.posargs: + session.run("mypy", f"--python-executable={sys.executable}", "noxfile.py") + + +@session(python=python_versions) +def tests(session: Session) -> None: + """Run the test suite.""" + session.install(".") + session.install("coverage[toml]", "pytest", "pygments") + try: + setup_database(session) + session.run("coverage", "run", "--parallel", "-m", "pytest", *session.posargs) + finally: + if session.interactive: + session.notify("coverage", posargs=[]) + + +@session +def coverage(session: Session) -> None: + """Produce the coverage report.""" + args = session.posargs or ["report"] + + session.install("coverage[toml]") + + if not session.posargs and any(Path().glob(".coverage.*")): + session.run("coverage", "combine") + + session.run("coverage", *args) + + +@session(python=python_versions) +def typeguard(session: Session) -> None: + """Runtime type checking using Typeguard.""" + session.install(".") + session.install("pytest", "typeguard", "pygments") + setup_database(session) + session.env["RUN_TYPEGUARD"] = "true" + session.run("pytest", *session.posargs) + + +@session(python=python_versions) +def xdoctest(session: Session) -> None: + """Run examples with xdoctest.""" + if session.posargs: + args = [package, *session.posargs] + else: + args = [f"--modname={package}", "--command=all"] + if "FORCE_COLOR" in os.environ: + args.append("--colored=1") + + session.install(".") + session.install("xdoctest[colors]") + session.run("python", "-m", "xdoctest", *args) + + +@session(name="docs-build", python="3.10") +def docs_build(session: Session) -> None: + """Build the documentation.""" + args = session.posargs or ["docs", "docs/_build"] + if not session.posargs and "FORCE_COLOR" in os.environ: + args.insert(0, "--color") + + session.install(".") + session.install("sphinx", "sphinx-click", "furo") + + build_dir = Path("docs", "_build") + if build_dir.exists(): + shutil.rmtree(build_dir) + + session.run("sphinx-build", *args) + + +@session(python="3.10") +def docs(session: Session) -> None: + """Build and serve the documentation with live reloading on file changes.""" + args = session.posargs or ["--open-browser", "docs", "docs/_build"] + session.install(".") + session.install("sphinx", "sphinx-autobuild", "sphinx-click", "furo") + + build_dir = Path("docs", "_build") + if build_dir.exists(): + shutil.rmtree(build_dir) + + session.run("sphinx-autobuild", *args) diff --git a/perms.yml b/perms.yml new file mode 100644 index 00000000..99931856 --- /dev/null +++ b/perms.yml @@ -0,0 +1,32 @@ +group-admin: + type: Group + users: [jakub, kb, alex, dan, mike, jason] + +group-finance: + type: Group + users: [harmeet, sasha] + +group-hr: + type: Group + users: [manuchehr] + +permission-admin: + type: Permission + groups: [group-admin] + users: [] + allowed_permissions: [CREATE, READ, UPDATE, DELETE, LIST, INSTANTIATE] + uri: /* + +permission-finance-admin: + type: Permission + groups: [group-a] + users: [] + allowed_permissions: [CREATE, READ, UPDATE, DELETE] + uri: /v1.0/process-groups/finance/* + +permission-read-all: + type: Permission + groups: [group-finance, group-hr, group-admin] + users: [] + allowed_permissions: [READ] + uri: /* diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 00000000..b1d46768 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,3524 @@ +[[package]] +name = "alabaster" +version = "0.7.12" +description = "A configurable sidebar-enabled Sphinx theme" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "alembic" +version = "1.8.1" +description = "A database migration tool for SQLAlchemy." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +Mako = "*" +SQLAlchemy = ">=1.3.0" + +[package.extras] +tz = ["python-dateutil"] + +[[package]] +name = "amqp" +version = "5.1.1" +description = "Low-level AMQP client for Python (fork of amqplib)." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +vine = ">=5.0.0" + +[[package]] +name = "aniso8601" +version = "9.0.1" +description = "A library for parsing ISO 8601 strings." +category = "main" +optional = false +python-versions = "*" + +[package.extras] +dev = ["black", "coverage", "isort", "pre-commit", "pyenchant", "pylint"] + +[[package]] +name = "APScheduler" +version = "3.9.1" +description = "In-process task scheduler with Cron-like capabilities" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" + +[package.dependencies] +pytz = "*" +setuptools = ">=0.7" +six = ">=1.4.0" +tzlocal = ">=2.0,<3.0.0 || >=4.0.0" + +[package.extras] +asyncio = ["trollius"] +doc = ["sphinx", "sphinx-rtd-theme"] +gevent = ["gevent"] +mongodb = ["pymongo (>=3.0)"] +redis = ["redis (>=3.0)"] +rethinkdb = ["rethinkdb (>=2.4.0)"] +sqlalchemy = ["sqlalchemy (>=0.8)"] +testing = ["mock", "pytest", "pytest-asyncio", "pytest-asyncio (<0.6)", "pytest-cov", "pytest-tornado5"] +tornado = ["tornado (>=4.3)"] +twisted = ["twisted"] +zookeeper = ["kazoo"] + +[[package]] +name = "astroid" +version = "2.12.10" +description = "An abstract syntax tree for Python with inference support." +category = "main" +optional = false +python-versions = ">=3.7.2" + +[package.dependencies] +lazy-object-proxy = ">=1.4.0" +typing-extensions = {version = ">=3.10", markers = "python_version < \"3.10\""} +wrapt = {version = ">=1.11,<2", markers = "python_version < \"3.11\""} + +[[package]] +name = "attrs" +version = "22.1.0" +description = "Classes Without Boilerplate" +category = "main" +optional = false +python-versions = ">=3.5" + +[package.extras] +dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] +docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] +tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] +tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] + +[[package]] +name = "Babel" +version = "2.10.3" +description = "Internationalization utilities" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +pytz = ">=2015.7" + +[[package]] +name = "bandit" +version = "1.7.2" +description = "Security oriented static analyser for python code." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} +GitPython = ">=1.0.1" +PyYAML = ">=5.3.1" +stevedore = ">=1.20.0" + +[package.extras] +test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)", "toml"] +toml = ["toml"] +yaml = ["PyYAML"] + +[[package]] +name = "bcrypt" +version = "4.0.0" +description = "Modern password hashing for your software and your servers" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.extras] +tests = ["pytest (>=3.2.1,!=3.3.0)"] +typecheck = ["mypy"] + +[[package]] +name = "beautifulsoup4" +version = "4.11.1" +description = "Screen-scraping library" +category = "dev" +optional = false +python-versions = ">=3.6.0" + +[package.dependencies] +soupsieve = ">1.2" + +[package.extras] +html5lib = ["html5lib"] +lxml = ["lxml"] + +[[package]] +name = "billiard" +version = "3.6.4.0" +description = "Python multiprocessing fork with improvements and bugfixes" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "black" +version = "22.8.0" +description = "The uncompromising code formatter." +category = "dev" +optional = false +python-versions = ">=3.6.2" + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""} +typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "blinker" +version = "1.5" +description = "Fast, simple object-to-object and broadcast signaling" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "celery" +version = "5.2.7" +description = "Distributed Task Queue." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +billiard = ">=3.6.4.0,<4.0" +click = ">=8.0.3,<9.0" +click-didyoumean = ">=0.0.3" +click-plugins = ">=1.1.1" +click-repl = ">=0.2.0" +kombu = ">=5.2.3,<6.0" +pytz = ">=2021.3" +vine = ">=5.0.0,<6.0" + +[package.extras] +arangodb = ["pyArango (>=1.3.2)"] +auth = ["cryptography"] +azureblockblob = ["azure-storage-blob (==12.9.0)"] +brotli = ["brotli (>=1.0.0)", "brotlipy (>=0.7.0)"] +cassandra = ["cassandra-driver (<3.21.0)"] +consul = ["python-consul2"] +cosmosdbsql = ["pydocumentdb (==2.3.2)"] +couchbase = ["couchbase (>=3.0.0)"] +couchdb = ["pycouchdb"] +django = ["Django (>=1.11)"] +dynamodb = ["boto3 (>=1.9.178)"] +elasticsearch = ["elasticsearch"] +eventlet = ["eventlet (>=0.32.0)"] +gevent = ["gevent (>=1.5.0)"] +librabbitmq = ["librabbitmq (>=1.5.0)"] +memcache = ["pylibmc"] +mongodb = ["pymongo[srv] (>=3.11.1)"] +msgpack = ["msgpack"] +pymemcache = ["python-memcached"] +pyro = ["pyro4"] +pytest = ["pytest-celery"] +redis = ["redis (>=3.4.1,!=4.0.0,!=4.0.1)"] +s3 = ["boto3 (>=1.9.125)"] +slmq = ["softlayer-messaging (>=1.0.3)"] +solar = ["ephem"] +sqlalchemy = ["sqlalchemy"] +sqs = ["kombu[sqs]"] +tblib = ["tblib (>=1.3.0)", "tblib (>=1.5.0)"] +yaml = ["PyYAML (>=3.10)"] +zookeeper = ["kazoo (>=1.3.1)"] +zstd = ["zstandard"] + +[[package]] +name = "certifi" +version = "2022.9.24" +description = "Python package for providing Mozilla's CA Bundle." +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "cfgv" +version = "3.3.1" +description = "Validate configuration and produce human readable error messages." +category = "dev" +optional = false +python-versions = ">=3.6.1" + +[[package]] +name = "charset-normalizer" +version = "2.1.1" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" +optional = false +python-versions = ">=3.6.0" + +[package.extras] +unicode_backport = ["unicodedata2"] + +[[package]] +name = "classify-imports" +version = "4.2.0" +description = "Utilities for refactoring imports in python-like syntax." +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "click" +version = "8.1.3" +description = "Composable command line interface toolkit" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "click-didyoumean" +version = "0.3.0" +description = "Enables git-like *did-you-mean* feature in click" +category = "main" +optional = false +python-versions = ">=3.6.2,<4.0.0" + +[package.dependencies] +click = ">=7" + +[[package]] +name = "click-plugins" +version = "1.1.1" +description = "An extension module for click to enable registering CLI commands via setuptools entry-points." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +click = ">=4.0" + +[package.extras] +dev = ["coveralls", "pytest (>=3.6)", "pytest-cov", "wheel"] + +[[package]] +name = "click-repl" +version = "0.2.0" +description = "REPL plugin for Click" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +click = "*" +prompt-toolkit = "*" +six = "*" + +[[package]] +name = "clickclick" +version = "20.10.2" +description = "Click utility functions" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +click = ">=4.0" +PyYAML = ">=3.11" + +[[package]] +name = "colorama" +version = "0.4.5" +description = "Cross-platform colored terminal text." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "configparser" +version = "5.3.0" +description = "Updated configparser from stdlib for earlier Pythons." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx"] +testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "types-backports"] + +[[package]] +name = "connexion" +version = "2.14.1" +description = "Connexion - API first applications with OpenAPI/Swagger and Flask" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +clickclick = ">=1.2,<21" +flask = ">=1.0.4,<3" +inflection = ">=0.3.1,<0.6" +itsdangerous = ">=0.24" +jsonschema = ">=2.5.1,<5" +packaging = ">=20" +PyYAML = ">=5.1,<7" +requests = ">=2.9.1,<3" +swagger-ui-bundle = {version = ">=0.0.2,<0.1", optional = true, markers = "extra == \"swagger-ui\""} +werkzeug = ">=1.0,<3" + +[package.extras] +aiohttp = ["MarkupSafe (>=0.23)", "aiohttp (>=2.3.10,<4)", "aiohttp-jinja2 (>=0.14.0,<2)"] +docs = ["sphinx-autoapi (==1.8.1)"] +flask = ["flask (>=1.0.4,<3)", "itsdangerous (>=0.24)"] +swagger-ui = ["swagger-ui-bundle (>=0.0.2,<0.1)"] +tests = ["MarkupSafe (>=0.23)", "aiohttp (>=2.3.10,<4)", "aiohttp-jinja2 (>=0.14.0,<2)", "aiohttp-remotes", "decorator (>=5,<6)", "flask (>=1.0.4,<3)", "itsdangerous (>=0.24)", "pytest (>=6,<7)", "pytest-aiohttp", "pytest-cov (>=2,<3)", "swagger-ui-bundle (>=0.0.2,<0.1)", "testfixtures (>=6,<7)"] + +[[package]] +name = "coverage" +version = "6.5.0" +description = "Code coverage measurement for Python" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "darglint" +version = "1.8.1" +description = "A utility for ensuring Google-style docstrings stay up to date with the source code." +category = "dev" +optional = false +python-versions = ">=3.6,<4.0" + +[[package]] +name = "dateparser" +version = "1.1.1" +description = "Date parsing library designed to parse dates from HTML pages" +category = "main" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +python-dateutil = "*" +pytz = "*" +regex = "<2019.02.19 || >2019.02.19,<2021.8.27 || >2021.8.27,<2022.3.15" +tzlocal = "*" + +[package.extras] +calendars = ["convertdate", "convertdate", "hijri-converter"] +fasttext = ["fasttext"] +langdetect = ["langdetect"] + +[[package]] +name = "distlib" +version = "0.3.6" +description = "Distribution utilities" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "docutils" +version = "0.19" +description = "Docutils -- Python Documentation Utilities" +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "dparse" +version = "0.6.2" +description = "A parser for Python dependency files" +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +packaging = "*" +toml = "*" + +[package.extras] +conda = ["pyyaml"] +pipenv = ["pipenv"] + +[[package]] +name = "ecdsa" +version = "0.18.0" +description = "ECDSA cryptographic signature library (pure python)" +category = "main" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" + +[package.dependencies] +six = ">=1.9.0" + +[package.extras] +gmpy = ["gmpy"] +gmpy2 = ["gmpy2"] + +[[package]] +name = "filelock" +version = "3.8.0" +description = "A platform independent file lock." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo (>=2022.6.21)", "sphinx (>=5.1.1)", "sphinx-autodoc-typehints (>=1.19.1)"] +testing = ["covdefaults (>=2.2)", "coverage (>=6.4.2)", "pytest (>=7.1.2)", "pytest-cov (>=3)", "pytest-timeout (>=2.1)"] + +[[package]] +name = "flake8" +version = "4.0.1" +description = "the modular source code checker: pep8 pyflakes and co" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +mccabe = ">=0.6.0,<0.7.0" +pycodestyle = ">=2.8.0,<2.9.0" +pyflakes = ">=2.4.0,<2.5.0" + +[[package]] +name = "flake8-bandit" +version = "2.1.2" +description = "Automated security testing with bandit and flake8." +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +bandit = "*" +flake8 = "*" +flake8-polyfill = "*" +pycodestyle = "*" + +[[package]] +name = "flake8-bugbear" +version = "22.9.23" +description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +attrs = ">=19.2.0" +flake8 = ">=3.0.0" + +[package.extras] +dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit"] + +[[package]] +name = "flake8-docstrings" +version = "1.6.0" +description = "Extension for flake8 which uses pydocstyle to check docstrings" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +flake8 = ">=3" +pydocstyle = ">=2.1" + +[[package]] +name = "flake8-polyfill" +version = "1.0.2" +description = "Polyfill package for Flake8 plugins" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +flake8 = "*" + +[[package]] +name = "flake8-rst-docstrings" +version = "0.2.7" +description = "Python docstring reStructuredText (RST) validator" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +flake8 = ">=3.0.0" +pygments = "*" +restructuredtext-lint = "*" + +[[package]] +name = "Flask" +version = "2.2.2" +description = "A simple framework for building complex web applications." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +click = ">=8.0" +importlib-metadata = {version = ">=3.6.0", markers = "python_version < \"3.10\""} +itsdangerous = ">=2.0" +Jinja2 = ">=3.0" +Werkzeug = ">=2.2.2" + +[package.extras] +async = ["asgiref (>=3.2)"] +dotenv = ["python-dotenv"] + +[[package]] +name = "Flask-Admin" +version = "1.6.0" +description = "Simple and extensible admin interface framework for Flask" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +Flask = ">=0.7" +wtforms = "*" + +[package.extras] +aws = ["boto"] +azure = ["azure-storage-blob"] + +[[package]] +name = "Flask-Bcrypt" +version = "1.0.1" +description = "Brcrypt hashing for Flask." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +bcrypt = ">=3.1.1" +Flask = "*" + +[[package]] +name = "flask-bpmn" +version = "0.0.0" +description = "Flask Bpmn" +category = "main" +optional = false +python-versions = "^3.7" +develop = false + +[package.dependencies] +click = "^8.0.1" +flask = "*" +flask-admin = "*" +flask-bcrypt = "*" +flask-cors = "*" +flask-mail = "*" +flask-marshmallow = "*" +flask-migrate = "*" +flask-restful = "*" +sentry-sdk = "*" +sphinx-autoapi = "^1.9.0" +spiffworkflow = "*" +werkzeug = "*" + +[package.source] +type = "git" +url = "https://github.com/sartography/flask-bpmn" +reference = "main" +resolved_reference = "f3fc539423a3522d142146d2a039c0cd49badaf5" + +[[package]] +name = "Flask-Cors" +version = "3.0.10" +description = "A Flask extension adding a decorator for CORS support" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +Flask = ">=0.9" +Six = "*" + +[[package]] +name = "Flask-Mail" +version = "0.9.1" +description = "Flask extension for sending email" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +blinker = "*" +Flask = "*" + +[[package]] +name = "flask-marshmallow" +version = "0.14.0" +description = "Flask + marshmallow for beautiful APIs" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +Flask = "*" +marshmallow = ">=2.0.0" +six = ">=1.9.0" + +[package.extras] +dev = ["flake8 (==3.8.3)", "flake8-bugbear (==20.1.4)", "flask-sqlalchemy", "marshmallow-sqlalchemy (>=0.13.0)", "marshmallow-sqlalchemy (>=0.13.0,<0.19.0)", "mock", "pre-commit (>=2.4,<3.0)", "pytest", "tox"] +docs = ["Sphinx (==3.2.1)", "marshmallow-sqlalchemy (>=0.13.0)", "sphinx-issues (==1.2.0)"] +lint = ["flake8 (==3.8.3)", "flake8-bugbear (==20.1.4)", "pre-commit (>=2.4,<3.0)"] +sqlalchemy = ["flask-sqlalchemy", "marshmallow-sqlalchemy (>=0.13.0)", "marshmallow-sqlalchemy (>=0.13.0,<0.19.0)"] +tests = ["flask-sqlalchemy", "marshmallow-sqlalchemy (>=0.13.0)", "marshmallow-sqlalchemy (>=0.13.0,<0.19.0)", "mock", "pytest"] + +[[package]] +name = "Flask-Migrate" +version = "3.1.0" +description = "SQLAlchemy database migrations for Flask applications using Alembic." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +alembic = ">=0.7" +Flask = ">=0.9" +Flask-SQLAlchemy = ">=1.0" + +[[package]] +name = "Flask-RESTful" +version = "0.3.9" +description = "Simple framework for creating REST APIs" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +aniso8601 = ">=0.82" +Flask = ">=0.8" +pytz = "*" +six = ">=1.3.0" + +[package.extras] +docs = ["sphinx"] + +[[package]] +name = "flask-sqlalchemy" +version = "3.0.0" +description = "Add SQLAlchemy support to your Flask application." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +Flask = ">=2.2" +SQLAlchemy = ">=1.4.18" + +[[package]] +name = "furo" +version = "2022.9.29" +description = "A clean customisable Sphinx documentation theme." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +beautifulsoup4 = "*" +pygments = ">=2.7" +sphinx = ">=4.0,<6.0" +sphinx-basic-ng = "*" + +[[package]] +name = "gitdb" +version = "4.0.9" +description = "Git Object Database" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +smmap = ">=3.0.1,<6" + +[[package]] +name = "GitPython" +version = "3.1.27" +description = "GitPython is a python library used to interact with Git repositories" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +gitdb = ">=4.0.1,<5" + +[[package]] +name = "greenlet" +version = "1.1.3" +description = "Lightweight in-process concurrent programming" +category = "main" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" + +[package.extras] +docs = ["Sphinx"] + +[[package]] +name = "gunicorn" +version = "20.1.0" +description = "WSGI HTTP Server for UNIX" +category = "main" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +setuptools = ">=3.0" + +[package.extras] +eventlet = ["eventlet (>=0.24.1)"] +gevent = ["gevent (>=1.4.0)"] +setproctitle = ["setproctitle"] +tornado = ["tornado (>=0.2)"] + +[[package]] +name = "identify" +version = "2.5.6" +description = "File identification library for Python" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +license = ["ukkonen"] + +[[package]] +name = "idna" +version = "3.4" +description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "imagesize" +version = "1.4.1" +description = "Getting image size from png/jpeg/jpeg2000/gif file" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "importlib-metadata" +version = "4.13.0" +description = "Read metadata from Python packages" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] +perf = ["ipython"] +testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] + +[[package]] +name = "inflection" +version = "0.5.1" +description = "A port of Ruby on Rails inflector to Python" +category = "main" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "iniconfig" +version = "1.1.1" +description = "iniconfig: brain-dead simple config-ini parsing" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "itsdangerous" +version = "2.1.2" +description = "Safely pass data to untrusted environments and back." +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "Jinja2" +version = "3.1.2" +description = "A very fast and expressive template engine." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jsonschema" +version = "4.16.0" +description = "An implementation of JSON Schema validation for Python" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +attrs = ">=17.4.0" +pyrsistent = ">=0.14.0,<0.17.0 || >0.17.0,<0.17.1 || >0.17.1,<0.17.2 || >0.17.2" + +[package.extras] +format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] + +[[package]] +name = "kombu" +version = "5.2.4" +description = "Messaging library for Python." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +amqp = ">=5.0.9,<6.0.0" +vine = "*" + +[package.extras] +azureservicebus = ["azure-servicebus (>=7.0.0)"] +azurestoragequeues = ["azure-storage-queue"] +consul = ["python-consul (>=0.6.0)"] +librabbitmq = ["librabbitmq (>=2.0.0)"] +mongodb = ["pymongo (>=3.3.0,<3.12.1)"] +msgpack = ["msgpack"] +pyro = ["pyro4"] +qpid = ["qpid-python (>=0.26)", "qpid-tools (>=0.26)"] +redis = ["redis (>=3.4.1,!=4.0.0,!=4.0.1)"] +slmq = ["softlayer-messaging (>=1.0.3)"] +sqlalchemy = ["sqlalchemy"] +sqs = ["boto3 (>=1.9.12)", "pycurl (>=7.44.1,<7.45.0)", "urllib3 (>=1.26.7)"] +yaml = ["PyYAML (>=3.10)"] +zookeeper = ["kazoo (>=1.3.1)"] + +[[package]] +name = "lazy-object-proxy" +version = "1.7.1" +description = "A fast and thorough lazy object proxy." +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "libcst" +version = "0.4.7" +description = "A concrete syntax tree with AST-like properties for Python 3.5, 3.6, 3.7, 3.8, 3.9, and 3.10 programs." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +pyyaml = ">=5.2" +typing-extensions = ">=3.7.4.2" +typing-inspect = ">=0.4.0" + +[package.extras] +dev = ["black (==22.3.0)", "coverage (>=4.5.4)", "fixit (==0.1.1)", "flake8 (>=3.7.8)", "hypothesis (>=4.36.0)", "hypothesmith (>=0.0.4)", "jinja2 (==3.0.3)", "jupyter (>=1.0.0)", "maturin (>=0.8.3,<0.9)", "nbsphinx (>=0.4.2)", "prompt-toolkit (>=2.0.9)", "pyre-check (==0.9.9)", "setuptools-rust (>=0.12.1)", "setuptools-scm (>=6.0.1)", "slotscheck (>=0.7.1)", "sphinx-rtd-theme (>=0.4.3)", "ufmt (==1.3)", "usort (==1.0.0rc1)"] + +[[package]] +name = "livereload" +version = "2.6.3" +description = "Python LiveReload is an awesome tool for web developers" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +six = "*" +tornado = {version = "*", markers = "python_version > \"2.7\""} + +[[package]] +name = "lxml" +version = "4.9.1" +description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, != 3.4.*" + +[package.extras] +cssselect = ["cssselect (>=0.7)"] +html5 = ["html5lib"] +htmlsoup = ["BeautifulSoup4"] +source = ["Cython (>=0.29.7)"] + +[[package]] +name = "Mako" +version = "1.2.3" +description = "A super-fast templating language that borrows the best ideas from the existing templating languages." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +MarkupSafe = ">=0.9.2" + +[package.extras] +babel = ["Babel"] +lingua = ["lingua"] +testing = ["pytest"] + +[[package]] +name = "MarkupSafe" +version = "2.1.1" +description = "Safely add untrusted strings to HTML/XML markup." +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "marshmallow" +version = "3.18.0" +description = "A lightweight library for converting complex datatypes to and from native Python datatypes." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +packaging = ">=17.0" + +[package.extras] +dev = ["flake8 (==5.0.4)", "flake8-bugbear (==22.9.11)", "mypy (==0.971)", "pre-commit (>=2.4,<3.0)", "pytest", "pytz", "simplejson", "tox"] +docs = ["alabaster (==0.7.12)", "autodocsumm (==0.2.9)", "sphinx (==5.1.1)", "sphinx-issues (==3.0.1)", "sphinx-version-warning (==1.1.2)"] +lint = ["flake8 (==5.0.4)", "flake8-bugbear (==22.9.11)", "mypy (==0.971)", "pre-commit (>=2.4,<3.0)"] +tests = ["pytest", "pytz", "simplejson"] + +[[package]] +name = "marshmallow-enum" +version = "1.5.1" +description = "Enum field for Marshmallow" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +marshmallow = ">=2.0.0" + +[[package]] +name = "marshmallow-sqlalchemy" +version = "0.28.1" +description = "SQLAlchemy integration with the marshmallow (de)serialization library" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +marshmallow = ">=3.0.0" +packaging = ">=21.3" +SQLAlchemy = ">=1.3.0" + +[package.extras] +dev = ["flake8 (==4.0.1)", "flake8-bugbear (==22.7.1)", "pre-commit (>=2.0,<3.0)", "pytest", "pytest-lazy-fixture (>=0.6.2)", "tox"] +docs = ["alabaster (==0.7.12)", "sphinx (==4.4.0)", "sphinx-issues (==3.0.1)"] +lint = ["flake8 (==4.0.1)", "flake8-bugbear (==22.7.1)", "pre-commit (>=2.0,<3.0)"] +tests = ["pytest", "pytest-lazy-fixture (>=0.6.2)"] + +[[package]] +name = "mccabe" +version = "0.6.1" +description = "McCabe checker, plugin for flake8" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "MonkeyType" +version = "22.2.0" +description = "Generating type annotations from sampled production types" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +libcst = ">=0.3.7" +mypy-extensions = "*" + +[[package]] +name = "mypy" +version = "0.982" +description = "Optional static typing for Python" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +mypy-extensions = ">=0.4.3" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=3.10" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +python2 = ["typed-ast (>=1.4.0,<2)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "0.4.3" +description = "Experimental type system extensions for programs checked with the mypy typechecker." +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "mysql-connector-python" +version = "8.0.30" +description = "MySQL driver written in Python" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +protobuf = ">=3.11.0,<=3.20.1" + +[package.extras] +compression = ["lz4 (>=2.1.6,<=3.1.3)", "zstandard (>=0.12.0,<=0.15.2)"] +dns-srv = ["dnspython (>=1.16.0,<=2.1.0)"] +gssapi = ["gssapi (>=1.6.9,<=1.7.3)"] + +[[package]] +name = "nodeenv" +version = "1.7.0" +description = "Node.js virtual environment builder" +category = "dev" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" + +[package.dependencies] +setuptools = "*" + +[[package]] +name = "orjson" +version = "3.8.0" +description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "packaging" +version = "21.3" +description = "Core utilities for Python packages" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" + +[[package]] +name = "pathspec" +version = "0.10.1" +description = "Utility library for gitignore style pattern matching of file paths." +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "pbr" +version = "5.10.0" +description = "Python Build Reasonableness" +category = "dev" +optional = false +python-versions = ">=2.6" + +[[package]] +name = "pep8-naming" +version = "0.13.2" +description = "Check PEP-8 naming conventions, plugin for flake8" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +flake8 = ">=3.9.1" + +[[package]] +name = "platformdirs" +version = "2.5.2" +description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx (>=4)", "sphinx-autodoc-typehints (>=1.12)"] +test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] + +[[package]] +name = "pluggy" +version = "1.0.0" +description = "plugin and hook calling mechanisms for python" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pre-commit" +version = "2.20.0" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +cfgv = ">=2.0.0" +identify = ">=1.0.0" +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +toml = "*" +virtualenv = ">=20.0.8" + +[[package]] +name = "pre-commit-hooks" +version = "4.3.0" +description = "Some out-of-the-box hooks for pre-commit." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +"ruamel.yaml" = ">=0.15" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} + +[[package]] +name = "prompt-toolkit" +version = "3.0.31" +description = "Library for building powerful interactive command lines in Python" +category = "main" +optional = false +python-versions = ">=3.6.2" + +[package.dependencies] +wcwidth = "*" + +[[package]] +name = "protobuf" +version = "3.20.1" +description = "Protocol Buffers" +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "psycopg2" +version = "2.9.3" +description = "psycopg2 - Python-PostgreSQL Database Adapter" +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "pyasn1" +version = "0.4.8" +description = "ASN.1 types and codecs" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "pycodestyle" +version = "2.8.0" +description = "Python style guide checker" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "pydocstyle" +version = "6.1.1" +description = "Python docstring style checker" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +snowballstemmer = "*" + +[package.extras] +toml = ["toml"] + +[[package]] +name = "pyflakes" +version = "2.4.0" +description = "passive checker of Python programs" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "Pygments" +version = "2.13.0" +description = "Pygments is a syntax highlighting package written in Python." +category = "main" +optional = false +python-versions = ">=3.6" + +[package.extras] +plugins = ["importlib-metadata"] + +[[package]] +name = "PyJWT" +version = "2.5.0" +description = "JSON Web Token implementation in Python" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +crypto = ["cryptography (>=3.3.1)", "types-cryptography (>=3.3.21)"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.3.1)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "types-cryptography (>=3.3.21)", "zope.interface"] +docs = ["sphinx (>=4.5.0,<5.0.0)", "sphinx-rtd-theme", "zope.interface"] +tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] + +[[package]] +name = "pyparsing" +version = "3.0.9" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +category = "main" +optional = false +python-versions = ">=3.6.8" + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] + +[[package]] +name = "pyrsistent" +version = "0.18.1" +description = "Persistent/Functional/Immutable data structures" +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "pytest" +version = "7.1.3" +description = "pytest: simple powerful testing with Python" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +py = ">=1.8.2" +tomli = ">=1.0.0" + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] + +[[package]] +name = "pytest-flask" +version = "1.2.0" +description = "A set of py.test fixtures to test Flask applications." +category = "main" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +Flask = "*" +pytest = ">=5.2" +Werkzeug = ">=0.7" + +[package.extras] +docs = ["Sphinx", "sphinx-rtd-theme"] + +[[package]] +name = "pytest-flask-sqlalchemy" +version = "1.1.0" +description = "A pytest plugin for preserving test isolation in Flask-SQlAlchemy using database transactions." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +Flask-SQLAlchemy = ">=2.3" +packaging = ">=14.1" +pytest = ">=3.2.1" +pytest-mock = ">=1.6.2" +SQLAlchemy = ">=1.2.2" + +[package.extras] +tests = ["psycopg2-binary", "pytest (>=6.0.1)", "pytest-postgresql (>=2.4.0,<4.0.0)"] + +[[package]] +name = "pytest-mock" +version = "3.9.0" +description = "Thin-wrapper around the mock package for easier use with pytest" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +pytest = ">=5.0" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-jose" +version = "3.3.0" +description = "JOSE implementation in Python" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +ecdsa = "!=0.15" +pyasn1 = "*" +rsa = "*" + +[package.extras] +cryptography = ["cryptography (>=3.4.0)"] +pycrypto = ["pyasn1", "pycrypto (>=2.6.0,<2.7.0)"] +pycryptodome = ["pyasn1", "pycryptodome (>=3.3.1,<4.0.0)"] + +[[package]] +name = "python-keycloak" +version = "2.6.0" +description = "python-keycloak is a Python package providing access to the Keycloak API." +category = "main" +optional = false +python-versions = ">=3.7,<4.0" + +[package.dependencies] +python-jose = ">=3.3.0,<4.0.0" +requests = ">=2.20.0,<3.0.0" +requests-toolbelt = ">=0.9.1,<0.10.0" +urllib3 = ">=1.26.0,<2.0.0" + +[package.extras] +docs = ["Sphinx (>=5.0.2,<6.0.0)", "alabaster (>=0.7.12,<0.8.0)", "commonmark (>=0.9.1,<0.10.0)", "m2r2 (>=0.3.2,<0.4.0)", "mock (>=4.0.3,<5.0.0)", "readthedocs-sphinx-ext (>=2.1.8,<3.0.0)", "recommonmark (>=0.7.1,<0.8.0)", "sphinx-autoapi (>=1.8.4,<2.0.0)", "sphinx-rtd-theme (>=1.0.0,<2.0.0)"] + +[[package]] +name = "pytz" +version = "2022.4" +description = "World timezone definitions, modern and historical" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "pytz-deprecation-shim" +version = "0.1.0.post0" +description = "Shims to make deprecation of pytz easier" +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" + +[package.dependencies] +tzdata = {version = "*", markers = "python_version >= \"3.6\""} + +[[package]] +name = "pyupgrade" +version = "2.38.4" +description = "A tool to automatically upgrade syntax for newer versions." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +tokenize-rt = "<5" + +[[package]] +name = "PyYAML" +version = "6.0" +description = "YAML parser and emitter for Python" +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "regex" +version = "2022.3.2" +description = "Alternative regular expression module, to replace re." +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "reorder-python-imports" +version = "3.8.3" +description = "Tool for reordering python imports" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +classify-imports = ">=4.1" + +[[package]] +name = "requests" +version = "2.28.1" +description = "Python HTTP for Humans." +category = "main" +optional = false +python-versions = ">=3.7, <4" + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<3" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<1.27" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-toolbelt" +version = "0.9.1" +description = "A utility belt for advanced users of python-requests" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +requests = ">=2.0.1,<3.0.0" + +[[package]] +name = "RestrictedPython" +version = "5.2" +description = "RestrictedPython is a defined subset of the Python language which allows to provide a program input into a trusted environment." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <3.11" + +[package.extras] +docs = ["Sphinx", "sphinx-rtd-theme"] +test = ["pytest", "pytest-mock"] + +[[package]] +name = "restructuredtext-lint" +version = "1.4.0" +description = "reStructuredText linter" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +docutils = ">=0.11,<1.0" + +[[package]] +name = "rsa" +version = "4.9" +description = "Pure-Python RSA implementation" +category = "main" +optional = false +python-versions = ">=3.6,<4" + +[package.dependencies] +pyasn1 = ">=0.1.3" + +[[package]] +name = "ruamel.yaml" +version = "0.17.21" +description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" +category = "dev" +optional = false +python-versions = ">=3" + +[package.dependencies] +"ruamel.yaml.clib" = {version = ">=0.2.6", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.11\""} + +[package.extras] +docs = ["ryd"] +jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] + +[[package]] +name = "ruamel.yaml.clib" +version = "0.2.6" +description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" +category = "dev" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "safety" +version = "2.3.1" +description = "Checks installed dependencies for known vulnerabilities and licenses." +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +Click = ">=8.0.2" +dparse = ">=0.6.2" +packaging = ">=21.0" +requests = "*" +"ruamel.yaml" = ">=0.17.21" +setuptools = ">=19.3" + +[package.extras] +github = ["jinja2 (>=3.1.0)", "pygithub (>=1.43.3)"] +gitlab = ["python-gitlab (>=1.3.0)"] + +[[package]] +name = "sentry-sdk" +version = "1.9.10" +description = "Python client for Sentry (https://sentry.io)" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +certifi = "*" +urllib3 = {version = ">=1.26.11", markers = "python_version >= \"3.6\""} + +[package.extras] +aiohttp = ["aiohttp (>=3.5)"] +beam = ["apache-beam (>=2.12)"] +bottle = ["bottle (>=0.12.13)"] +celery = ["celery (>=3)"] +chalice = ["chalice (>=1.16.0)"] +django = ["django (>=1.8)"] +falcon = ["falcon (>=1.4)"] +fastapi = ["fastapi (>=0.79.0)"] +flask = ["blinker (>=1.1)", "flask (>=0.11)"] +httpx = ["httpx (>=0.16.0)"] +pure_eval = ["asttokens", "executing", "pure-eval"] +pyspark = ["pyspark (>=2.4.4)"] +quart = ["blinker (>=1.1)", "quart (>=0.16.1)"] +rq = ["rq (>=0.6)"] +sanic = ["sanic (>=0.8)"] +sqlalchemy = ["sqlalchemy (>=1.2)"] +starlette = ["starlette (>=0.19.1)"] +tornado = ["tornado (>=5)"] + +[[package]] +name = "setuptools" +version = "65.4.1" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mock", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "smmap" +version = "5.0.0" +description = "A pure Python implementation of a sliding window memory map manager" +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "snowballstemmer" +version = "2.2.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "soupsieve" +version = "2.3.2.post1" +description = "A modern CSS selector implementation for Beautiful Soup." +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "Sphinx" +version = "5.2.3" +description = "Python documentation generator" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +alabaster = ">=0.7,<0.8" +babel = ">=2.9" +colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} +docutils = ">=0.14,<0.20" +imagesize = ">=1.3" +importlib-metadata = {version = ">=4.8", markers = "python_version < \"3.10\""} +Jinja2 = ">=3.0" +packaging = ">=21.0" +Pygments = ">=2.12" +requests = ">=2.5.0" +snowballstemmer = ">=2.0" +sphinxcontrib-applehelp = "*" +sphinxcontrib-devhelp = "*" +sphinxcontrib-htmlhelp = ">=2.0.0" +sphinxcontrib-jsmath = "*" +sphinxcontrib-qthelp = "*" +sphinxcontrib-serializinghtml = ">=1.1.5" + +[package.extras] +docs = ["sphinxcontrib-websupport"] +lint = ["docutils-stubs", "flake8 (>=3.5.0)", "flake8-bugbear", "flake8-comprehensions", "flake8-simplify", "isort", "mypy (>=0.981)", "sphinx-lint", "types-requests", "types-typed-ast"] +test = ["cython", "html5lib", "pytest (>=4.6)", "typed_ast"] + +[[package]] +name = "sphinx-autoapi" +version = "1.9.0" +description = "Sphinx API documentation generator" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +astroid = ">=2.7" +Jinja2 = "*" +PyYAML = "*" +sphinx = ">=3.0" +unidecode = "*" + +[package.extras] +docs = ["sphinx", "sphinx-rtd-theme"] +dotnet = ["sphinxcontrib-dotnetdomain"] +go = ["sphinxcontrib-golangdomain"] + +[[package]] +name = "sphinx-autobuild" +version = "2021.3.14" +description = "Rebuild Sphinx documentation on changes, with live-reload in the browser." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +colorama = "*" +livereload = "*" +sphinx = "*" + +[package.extras] +test = ["pytest", "pytest-cov"] + +[[package]] +name = "sphinx-basic-ng" +version = "1.0.0b1" +description = "A modern skeleton for Sphinx themes." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +sphinx = ">=4.0" + +[package.extras] +docs = ["furo", "ipython", "myst-parser", "sphinx-copybutton", "sphinx-inline-tabs"] + +[[package]] +name = "sphinx-click" +version = "4.3.0" +description = "Sphinx extension that automatically documents click applications" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +click = ">=7.0" +docutils = "*" +sphinx = ">=2.0" + +[[package]] +name = "sphinxcontrib-applehelp" +version = "1.0.2" +description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books" +category = "main" +optional = false +python-versions = ">=3.5" + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "1.0.2" +description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." +category = "main" +optional = false +python-versions = ">=3.5" + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.0.0" +description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["html5lib", "pytest"] + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +description = "A sphinx extension which renders display math in HTML via JavaScript" +category = "main" +optional = false +python-versions = ">=3.5" + +[package.extras] +test = ["flake8", "mypy", "pytest"] + +[[package]] +name = "sphinxcontrib-qthelp" +version = "1.0.3" +description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." +category = "main" +optional = false +python-versions = ">=3.5" + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "1.1.5" +description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." +category = "main" +optional = false +python-versions = ">=3.5" + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "SpiffWorkflow" +version = "1.1.7" +description = "A workflow framework and BPMN/DMN Processor" +category = "main" +optional = false +python-versions = "*" +develop = false + +[package.dependencies] +celery = "*" +configparser = "*" +dateparser = "*" +importlib-metadata = "<5.0" +lxml = "*" +pytz = "*" + +[package.source] +type = "git" +url = "https://github.com/sartography/SpiffWorkflow" +reference = "main" +resolved_reference = "63db3e45947ec66b8d0efc2c74064004f8ff482c" + +[[package]] +name = "SQLAlchemy" +version = "1.4.41" +description = "Database Abstraction Library" +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" + +[package.dependencies] +greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} + +[package.extras] +aiomysql = ["aiomysql", "greenlet (!=0.4.17)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (!=0.4.17)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"] +mariadb_connector = ["mariadb (>=1.0.1,!=1.1.2)"] +mssql = ["pyodbc"] +mssql_pymssql = ["pymssql"] +mssql_pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"] +mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"] +mysql_connector = ["mysql-connector-python"] +oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql_asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql_pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] +postgresql_psycopg2binary = ["psycopg2-binary"] +postgresql_psycopg2cffi = ["psycopg2cffi"] +pymysql = ["pymysql", "pymysql (<1)"] +sqlcipher = ["sqlcipher3_binary"] + +[[package]] +name = "sqlalchemy-stubs" +version = "0.4" +description = "SQLAlchemy stubs and mypy plugin" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +mypy = ">=0.790" +typing-extensions = ">=3.7.4" + +[[package]] +name = "stevedore" +version = "4.0.0" +description = "Manage dynamic plugins for Python applications" +category = "dev" +optional = false +python-versions = ">=3.8" + +[package.dependencies] +pbr = ">=2.0.0,<2.1.0 || >2.1.0" + +[[package]] +name = "swagger-ui-bundle" +version = "0.0.9" +description = "swagger_ui_bundle - swagger-ui files in a pip package" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +Jinja2 = ">=2.0" + +[[package]] +name = "tokenize-rt" +version = "4.2.1" +description = "A wrapper around the stdlib `tokenize` which roundtrips." +category = "dev" +optional = false +python-versions = ">=3.6.1" + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +category = "dev" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "tornado" +version = "6.2" +description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." +category = "dev" +optional = false +python-versions = ">= 3.7" + +[[package]] +name = "typeguard" +version = "2.13.3" +description = "Run-time type checker for Python" +category = "dev" +optional = false +python-versions = ">=3.5.3" + +[package.extras] +doc = ["sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["mypy", "pytest", "typing-extensions"] + +[[package]] +name = "types-pytz" +version = "2022.4.0.0" +description = "Typing stubs for pytz" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "types-requests" +version = "2.28.11.1" +description = "Typing stubs for requests" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +types-urllib3 = "<1.27" + +[[package]] +name = "types-urllib3" +version = "1.26.25" +description = "Typing stubs for urllib3" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "typing-extensions" +version = "4.3.0" +description = "Backported and Experimental Type Hints for Python 3.7+" +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "typing-inspect" +version = "0.8.0" +description = "Runtime inspection utilities for typing module." +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +mypy-extensions = ">=0.3.0" +typing-extensions = ">=3.7.4" + +[[package]] +name = "tzdata" +version = "2022.4" +description = "Provider of IANA time zone data" +category = "main" +optional = false +python-versions = ">=2" + +[[package]] +name = "tzlocal" +version = "4.2" +description = "tzinfo object for the local timezone" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +pytz-deprecation-shim = "*" +tzdata = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +devenv = ["black", "pyroma", "pytest-cov", "zest.releaser"] +test = ["pytest (>=4.3)", "pytest-mock (>=3.3)"] + +[[package]] +name = "Unidecode" +version = "1.3.6" +description = "ASCII transliterations of Unicode text" +category = "main" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "urllib3" +version = "1.26.12" +description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + +[[package]] +name = "vine" +version = "5.0.0" +description = "Promises, promises, promises." +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "virtualenv" +version = "20.16.5" +description = "Virtual Python Environment builder" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +distlib = ">=0.3.5,<1" +filelock = ">=3.4.1,<4" +platformdirs = ">=2.4,<3" + +[package.extras] +docs = ["proselint (>=0.13)", "sphinx (>=5.1.1)", "sphinx-argparse (>=0.3.1)", "sphinx-rtd-theme (>=1)", "towncrier (>=21.9)"] +testing = ["coverage (>=6.2)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=21.3)", "pytest (>=7.0.1)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.6.1)", "pytest-randomly (>=3.10.3)", "pytest-timeout (>=2.1)"] + +[[package]] +name = "wcwidth" +version = "0.2.5" +description = "Measures the displayed width of unicode strings in a terminal" +category = "main" +optional = false +python-versions = "*" + +[[package]] +name = "Werkzeug" +version = "2.2.2" +description = "The comprehensive WSGI web application library." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +MarkupSafe = ">=2.1.1" + +[package.extras] +watchdog = ["watchdog"] + +[[package]] +name = "wrapt" +version = "1.14.1" +description = "Module for decorators, wrappers and monkey patching." +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" + +[[package]] +name = "WTForms" +version = "3.0.1" +description = "Form validation and rendering for Python web development." +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +MarkupSafe = "*" + +[package.extras] +email = ["email-validator"] + +[[package]] +name = "xdoctest" +version = "1.1.0" +description = "A rewrite of the builtin doctest module" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +colorama = {version = "*", optional = true, markers = "platform_system == \"Windows\" and extra == \"colors\""} +Pygments = {version = "*", optional = true, markers = "python_version >= \"3.5.0\" and extra == \"colors\""} +six = "*" + +[package.extras] +all = ["IPython", "IPython", "Pygments", "Pygments", "attrs", "cmake", "codecov", "colorama", "debugpy", "debugpy", "debugpy", "debugpy", "debugpy", "ipykernel", "ipykernel", "ipython-genutils", "jedi", "jinja2", "jupyter-client", "jupyter-client", "jupyter-core", "nbconvert", "ninja", "pybind11", "pytest", "pytest", "pytest", "pytest", "pytest", "pytest", "pytest-cov", "pytest-cov", "pytest-cov", "pytest-cov", "scikit-build", "six", "typing"] +all-strict = ["IPython (==7.10.0)", "IPython (==7.23.1)", "Pygments (==2.0.0)", "Pygments (==2.4.1)", "attrs (==19.2.0)", "cmake (==3.21.2)", "codecov (==2.0.15)", "colorama (==0.4.1)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.3.0)", "debugpy (==1.6.0)", "ipykernel (==5.2.0)", "ipykernel (==6.0.0)", "ipython-genutils (==0.2.0)", "jedi (==0.16)", "jinja2 (==3.0.0)", "jupyter-client (==6.1.5)", "jupyter-client (==7.0.0)", "jupyter-core (==4.7.0)", "nbconvert (==6.0.0)", "ninja (==1.10.2)", "pybind11 (==2.7.1)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==6.2.5)", "pytest-cov (==2.8.1)", "pytest-cov (==2.8.1)", "pytest-cov (==2.9.0)", "pytest-cov (==3.0.0)", "scikit-build (==0.11.1)", "six (==1.11.0)", "typing (==3.7.4)"] +colors = ["Pygments", "Pygments", "colorama"] +jupyter = ["IPython", "IPython", "attrs", "debugpy", "debugpy", "debugpy", "debugpy", "debugpy", "ipykernel", "ipykernel", "ipython-genutils", "jedi", "jinja2", "jupyter-client", "jupyter-client", "jupyter-core", "nbconvert"] +optional = ["IPython", "IPython", "Pygments", "Pygments", "attrs", "colorama", "debugpy", "debugpy", "debugpy", "debugpy", "debugpy", "ipykernel", "ipykernel", "ipython-genutils", "jedi", "jinja2", "jupyter-client", "jupyter-client", "jupyter-core", "nbconvert", "pyflakes", "tomli"] +optional-strict = ["IPython (==7.10.0)", "IPython (==7.23.1)", "Pygments (==2.0.0)", "Pygments (==2.4.1)", "attrs (==19.2.0)", "colorama (==0.4.1)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.0.0)", "debugpy (==1.3.0)", "debugpy (==1.6.0)", "ipykernel (==5.2.0)", "ipykernel (==6.0.0)", "ipython-genutils (==0.2.0)", "jedi (==0.16)", "jinja2 (==3.0.0)", "jupyter-client (==6.1.5)", "jupyter-client (==7.0.0)", "jupyter-core (==4.7.0)", "nbconvert (==6.0.0)", "pyflakes (==2.2.0)", "tomli (==0.2.0)"] +runtime-strict = ["six (==1.11.0)"] +tests = ["cmake", "codecov", "ninja", "pybind11", "pytest", "pytest", "pytest", "pytest", "pytest", "pytest", "pytest-cov", "pytest-cov", "pytest-cov", "pytest-cov", "scikit-build", "typing"] +tests-strict = ["cmake (==3.21.2)", "codecov (==2.0.15)", "ninja (==1.10.2)", "pybind11 (==2.7.1)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==4.6.0)", "pytest (==6.2.5)", "pytest-cov (==2.8.1)", "pytest-cov (==2.8.1)", "pytest-cov (==2.9.0)", "pytest-cov (==3.0.0)", "scikit-build (==0.11.1)", "typing (==3.7.4)"] + +[[package]] +name = "zipp" +version = "3.8.1" +description = "Backport of pathlib-compatible object wrapper for zip files" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx"] +testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] + +[metadata] +lock-version = "1.1" +python-versions = ">=3.9,<3.11" +content-hash = "ba476dd0748bb440b522d1bf24fb62eb30ce3cfbd48b9e3d8f7b5069ddc78ba9" + +[metadata.files] +alabaster = [ + {file = "alabaster-0.7.12-py2.py3-none-any.whl", hash = "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359"}, + {file = "alabaster-0.7.12.tar.gz", hash = "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"}, +] +alembic = [ + {file = "alembic-1.8.1-py3-none-any.whl", hash = "sha256:0a024d7f2de88d738d7395ff866997314c837be6104e90c5724350313dee4da4"}, + {file = "alembic-1.8.1.tar.gz", hash = "sha256:cd0b5e45b14b706426b833f06369b9a6d5ee03f826ec3238723ce8caaf6e5ffa"}, +] +amqp = [ + {file = "amqp-5.1.1-py3-none-any.whl", hash = "sha256:6f0956d2c23d8fa6e7691934d8c3930eadb44972cbbd1a7ae3a520f735d43359"}, + {file = "amqp-5.1.1.tar.gz", hash = "sha256:2c1b13fecc0893e946c65cbd5f36427861cffa4ea2201d8f6fca22e2a373b5e2"}, +] +aniso8601 = [ + {file = "aniso8601-9.0.1-py2.py3-none-any.whl", hash = "sha256:1d2b7ef82963909e93c4f24ce48d4de9e66009a21bf1c1e1c85bdd0812fe412f"}, + {file = "aniso8601-9.0.1.tar.gz", hash = "sha256:72e3117667eedf66951bb2d93f4296a56b94b078a8a95905a052611fb3f1b973"}, +] +APScheduler = [ + {file = "APScheduler-3.9.1-py2.py3-none-any.whl", hash = "sha256:ddc25a0ddd899de44d7f451f4375fb971887e65af51e41e5dcf681f59b8b2c9a"}, + {file = "APScheduler-3.9.1.tar.gz", hash = "sha256:65e6574b6395498d371d045f2a8a7e4f7d50c6ad21ef7313d15b1c7cf20df1e3"}, +] +astroid = [ + {file = "astroid-2.12.10-py3-none-any.whl", hash = "sha256:997e0c735df60d4a4caff27080a3afc51f9bdd693d3572a4a0b7090b645c36c5"}, + {file = "astroid-2.12.10.tar.gz", hash = "sha256:81f870105d892e73bf535da77a8261aa5bde838fa4ed12bb2f435291a098c581"}, +] +attrs = [ + {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, + {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, +] +Babel = [ + {file = "Babel-2.10.3-py3-none-any.whl", hash = "sha256:ff56f4892c1c4bf0d814575ea23471c230d544203c7748e8c68f0089478d48eb"}, + {file = "Babel-2.10.3.tar.gz", hash = "sha256:7614553711ee97490f732126dc077f8d0ae084ebc6a96e23db1482afabdb2c51"}, +] +bandit = [ + {file = "bandit-1.7.2-py3-none-any.whl", hash = "sha256:e20402cadfd126d85b68ed4c8862959663c8c372dbbb1fca8f8e2c9f55a067ec"}, + {file = "bandit-1.7.2.tar.gz", hash = "sha256:6d11adea0214a43813887bfe71a377b5a9955e4c826c8ffd341b494e3ab25260"}, +] +bcrypt = [ + {file = "bcrypt-4.0.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:845b1daf4df2dd94d2fdbc9454953ca9dd0e12970a0bfc9f3dcc6faea3fa96e4"}, + {file = "bcrypt-4.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8780e69f9deec9d60f947b169507d2c9816e4f11548f1f7ebee2af38b9b22ae4"}, + {file = "bcrypt-4.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c3334446fac200499e8bc04a530ce3cf0b3d7151e0e4ac5c0dddd3d95e97843"}, + {file = "bcrypt-4.0.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfb67f6a6c72dfb0a02f3df51550aa1862708e55128b22543e2b42c74f3620d7"}, + {file = "bcrypt-4.0.0-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:7c7dd6c1f05bf89e65261d97ac3a6520f34c2acb369afb57e3ea4449be6ff8fd"}, + {file = "bcrypt-4.0.0-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:594780b364fb45f2634c46ec8d3e61c1c0f1811c4f2da60e8eb15594ecbf93ed"}, + {file = "bcrypt-4.0.0-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2d0dd19aad87e4ab882ef1d12df505f4c52b28b69666ce83c528f42c07379227"}, + {file = "bcrypt-4.0.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:bf413f2a9b0a2950fc750998899013f2e718d20fa4a58b85ca50b6df5ed1bbf9"}, + {file = "bcrypt-4.0.0-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:ede0f506554571c8eda80db22b83c139303ec6b595b8f60c4c8157bdd0bdee36"}, + {file = "bcrypt-4.0.0-cp36-abi3-win32.whl", hash = "sha256:dc6ec3dc19b1c193b2f7cf279d3e32e7caf447532fbcb7af0906fe4398900c33"}, + {file = "bcrypt-4.0.0-cp36-abi3-win_amd64.whl", hash = "sha256:0b0f0c7141622a31e9734b7f649451147c04ebb5122327ac0bd23744df84be90"}, + {file = "bcrypt-4.0.0.tar.gz", hash = "sha256:c59c170fc9225faad04dde1ba61d85b413946e8ce2e5f5f5ff30dfd67283f319"}, +] +beautifulsoup4 = [ + {file = "beautifulsoup4-4.11.1-py3-none-any.whl", hash = "sha256:58d5c3d29f5a36ffeb94f02f0d786cd53014cf9b3b3951d42e0080d8a9498d30"}, + {file = "beautifulsoup4-4.11.1.tar.gz", hash = "sha256:ad9aa55b65ef2808eb405f46cf74df7fcb7044d5cbc26487f96eb2ef2e436693"}, +] +billiard = [ + {file = "billiard-3.6.4.0-py3-none-any.whl", hash = "sha256:87103ea78fa6ab4d5c751c4909bcff74617d985de7fa8b672cf8618afd5a875b"}, + {file = "billiard-3.6.4.0.tar.gz", hash = "sha256:299de5a8da28a783d51b197d496bef4f1595dd023a93a4f59dde1886ae905547"}, +] +black = [ + {file = "black-22.8.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ce957f1d6b78a8a231b18e0dd2d94a33d2ba738cd88a7fe64f53f659eea49fdd"}, + {file = "black-22.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5107ea36b2b61917956d018bd25129baf9ad1125e39324a9b18248d362156a27"}, + {file = "black-22.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e8166b7bfe5dcb56d325385bd1d1e0f635f24aae14b3ae437102dedc0c186747"}, + {file = "black-22.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd82842bb272297503cbec1a2600b6bfb338dae017186f8f215c8958f8acf869"}, + {file = "black-22.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:d839150f61d09e7217f52917259831fe2b689f5c8e5e32611736351b89bb2a90"}, + {file = "black-22.8.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a05da0430bd5ced89176db098567973be52ce175a55677436a271102d7eaa3fe"}, + {file = "black-22.8.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a098a69a02596e1f2a58a2a1c8d5a05d5a74461af552b371e82f9fa4ada8342"}, + {file = "black-22.8.0-cp36-cp36m-win_amd64.whl", hash = "sha256:5594efbdc35426e35a7defa1ea1a1cb97c7dbd34c0e49af7fb593a36bd45edab"}, + {file = "black-22.8.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a983526af1bea1e4cf6768e649990f28ee4f4137266921c2c3cee8116ae42ec3"}, + {file = "black-22.8.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b2c25f8dea5e8444bdc6788a2f543e1fb01494e144480bc17f806178378005e"}, + {file = "black-22.8.0-cp37-cp37m-win_amd64.whl", hash = "sha256:78dd85caaab7c3153054756b9fe8c611efa63d9e7aecfa33e533060cb14b6d16"}, + {file = "black-22.8.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:cea1b2542d4e2c02c332e83150e41e3ca80dc0fb8de20df3c5e98e242156222c"}, + {file = "black-22.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5b879eb439094751185d1cfdca43023bc6786bd3c60372462b6f051efa6281a5"}, + {file = "black-22.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0a12e4e1353819af41df998b02c6742643cfef58282915f781d0e4dd7a200411"}, + {file = "black-22.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3a73f66b6d5ba7288cd5d6dad9b4c9b43f4e8a4b789a94bf5abfb878c663eb3"}, + {file = "black-22.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:e981e20ec152dfb3e77418fb616077937378b322d7b26aa1ff87717fb18b4875"}, + {file = "black-22.8.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8ce13ffed7e66dda0da3e0b2eb1bdfc83f5812f66e09aca2b0978593ed636b6c"}, + {file = "black-22.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:32a4b17f644fc288c6ee2bafdf5e3b045f4eff84693ac069d87b1a347d861497"}, + {file = "black-22.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0ad827325a3a634bae88ae7747db1a395d5ee02cf05d9aa7a9bd77dfb10e940c"}, + {file = "black-22.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53198e28a1fb865e9fe97f88220da2e44df6da82b18833b588b1883b16bb5d41"}, + {file = "black-22.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:bc4d4123830a2d190e9cc42a2e43570f82ace35c3aeb26a512a2102bce5af7ec"}, + {file = "black-22.8.0-py3-none-any.whl", hash = "sha256:d2c21d439b2baf7aa80d6dd4e3659259be64c6f49dfd0f32091063db0e006db4"}, + {file = "black-22.8.0.tar.gz", hash = "sha256:792f7eb540ba9a17e8656538701d3eb1afcb134e3b45b71f20b25c77a8db7e6e"}, +] +blinker = [ + {file = "blinker-1.5-py2.py3-none-any.whl", hash = "sha256:1eb563df6fdbc39eeddc177d953203f99f097e9bf0e2b8f9f3cf18b6ca425e36"}, + {file = "blinker-1.5.tar.gz", hash = "sha256:923e5e2f69c155f2cc42dafbbd70e16e3fde24d2d4aa2ab72fbe386238892462"}, +] +celery = [ + {file = "celery-5.2.7-py3-none-any.whl", hash = "sha256:138420c020cd58d6707e6257b6beda91fd39af7afde5d36c6334d175302c0e14"}, + {file = "celery-5.2.7.tar.gz", hash = "sha256:fafbd82934d30f8a004f81e8f7a062e31413a23d444be8ee3326553915958c6d"}, +] +certifi = [ + {file = "certifi-2022.9.24-py3-none-any.whl", hash = "sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382"}, + {file = "certifi-2022.9.24.tar.gz", hash = "sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14"}, +] +cfgv = [ + {file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"}, + {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"}, +] +charset-normalizer = [ + {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, + {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, +] +classify-imports = [ + {file = "classify_imports-4.2.0-py2.py3-none-any.whl", hash = "sha256:dbbc264b70a470ed8c6c95976a11dfb8b7f63df44ed1af87328bbed2663f5161"}, + {file = "classify_imports-4.2.0.tar.gz", hash = "sha256:7abfb7ea92149b29d046bd34573d247ba6e68cc28100c801eba4af17964fc40e"}, +] +click = [ + {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, + {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, +] +click-didyoumean = [ + {file = "click-didyoumean-0.3.0.tar.gz", hash = "sha256:f184f0d851d96b6d29297354ed981b7dd71df7ff500d82fa6d11f0856bee8035"}, + {file = "click_didyoumean-0.3.0-py3-none-any.whl", hash = "sha256:a0713dc7a1de3f06bc0df5a9567ad19ead2d3d5689b434768a6145bff77c0667"}, +] +click-plugins = [ + {file = "click-plugins-1.1.1.tar.gz", hash = "sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b"}, + {file = "click_plugins-1.1.1-py2.py3-none-any.whl", hash = "sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8"}, +] +click-repl = [ + {file = "click-repl-0.2.0.tar.gz", hash = "sha256:cd12f68d745bf6151210790540b4cb064c7b13e571bc64b6957d98d120dacfd8"}, + {file = "click_repl-0.2.0-py3-none-any.whl", hash = "sha256:94b3fbbc9406a236f176e0506524b2937e4b23b6f4c0c0b2a0a83f8a64e9194b"}, +] +clickclick = [ + {file = "clickclick-20.10.2-py2.py3-none-any.whl", hash = "sha256:c8f33e6d9ec83f68416dd2136a7950125bd256ec39ccc9a85c6e280a16be2bb5"}, + {file = "clickclick-20.10.2.tar.gz", hash = "sha256:4efb13e62353e34c5eef7ed6582c4920b418d7dedc86d819e22ee089ba01802c"}, +] +colorama = [ + {file = "colorama-0.4.5-py2.py3-none-any.whl", hash = "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da"}, + {file = "colorama-0.4.5.tar.gz", hash = "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4"}, +] +configparser = [ + {file = "configparser-5.3.0-py3-none-any.whl", hash = "sha256:b065779fd93c6bf4cee42202fa4351b4bb842e96a3fb469440e484517a49b9fa"}, + {file = "configparser-5.3.0.tar.gz", hash = "sha256:8be267824b541c09b08db124917f48ab525a6c3e837011f3130781a224c57090"}, +] +connexion = [ + {file = "connexion-2.14.1-py2.py3-none-any.whl", hash = "sha256:f343717241b4c4802a694c38fee66fb1693c897fe4ea5a957fa9b3b07caf6394"}, + {file = "connexion-2.14.1.tar.gz", hash = "sha256:99aa5781e70a7b94f8ffae8cf89f309d49cdb811bbd65a8e2f2546f3b19a01e6"}, +] +coverage = [ + {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"}, + {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"}, + {file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"}, + {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04"}, + {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0"}, + {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae"}, + {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466"}, + {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a"}, + {file = "coverage-6.5.0-cp310-cp310-win32.whl", hash = "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32"}, + {file = "coverage-6.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e"}, + {file = "coverage-6.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795"}, + {file = "coverage-6.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75"}, + {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b"}, + {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91"}, + {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4"}, + {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa"}, + {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"}, + {file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"}, + {file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"}, + {file = "coverage-6.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d"}, + {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3"}, + {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef"}, + {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79"}, + {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d"}, + {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c"}, + {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f"}, + {file = "coverage-6.5.0-cp37-cp37m-win32.whl", hash = "sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b"}, + {file = "coverage-6.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2"}, + {file = "coverage-6.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c"}, + {file = "coverage-6.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba"}, + {file = "coverage-6.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e"}, + {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398"}, + {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b"}, + {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b"}, + {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f"}, + {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e"}, + {file = "coverage-6.5.0-cp38-cp38-win32.whl", hash = "sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d"}, + {file = "coverage-6.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6"}, + {file = "coverage-6.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745"}, + {file = "coverage-6.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc"}, + {file = "coverage-6.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe"}, + {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf"}, + {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5"}, + {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62"}, + {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518"}, + {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f"}, + {file = "coverage-6.5.0-cp39-cp39-win32.whl", hash = "sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72"}, + {file = "coverage-6.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987"}, + {file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"}, + {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"}, +] +darglint = [ + {file = "darglint-1.8.1-py3-none-any.whl", hash = "sha256:5ae11c259c17b0701618a20c3da343a3eb98b3bc4b5a83d31cdd94f5ebdced8d"}, + {file = "darglint-1.8.1.tar.gz", hash = "sha256:080d5106df149b199822e7ee7deb9c012b49891538f14a11be681044f0bb20da"}, +] +dateparser = [ + {file = "dateparser-1.1.1-py2.py3-none-any.whl", hash = "sha256:9600874312ff28a41f96ec7ccdc73be1d1c44435719da47fea3339d55ff5a628"}, + {file = "dateparser-1.1.1.tar.gz", hash = "sha256:038196b1f12c7397e38aad3d61588833257f6f552baa63a1499e6987fa8d42d9"}, +] +distlib = [ + {file = "distlib-0.3.6-py2.py3-none-any.whl", hash = "sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e"}, + {file = "distlib-0.3.6.tar.gz", hash = "sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46"}, +] +docutils = [ + {file = "docutils-0.19-py3-none-any.whl", hash = "sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc"}, + {file = "docutils-0.19.tar.gz", hash = "sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6"}, +] +dparse = [ + {file = "dparse-0.6.2-py3-none-any.whl", hash = "sha256:8097076f1dd26c377f30d4745e6ec18fef42f3bf493933b842ac5bafad8c345f"}, + {file = "dparse-0.6.2.tar.gz", hash = "sha256:d45255bda21f998bc7ddf2afd5e62505ba6134756ba2d42a84c56b0826614dfe"}, +] +ecdsa = [ + {file = "ecdsa-0.18.0-py2.py3-none-any.whl", hash = "sha256:80600258e7ed2f16b9aa1d7c295bd70194109ad5a30fdee0eaeefef1d4c559dd"}, + {file = "ecdsa-0.18.0.tar.gz", hash = "sha256:190348041559e21b22a1d65cee485282ca11a6f81d503fddb84d5017e9ed1e49"}, +] +filelock = [ + {file = "filelock-3.8.0-py3-none-any.whl", hash = "sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4"}, + {file = "filelock-3.8.0.tar.gz", hash = "sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc"}, +] +flake8 = [ + {file = "flake8-4.0.1-py2.py3-none-any.whl", hash = "sha256:479b1304f72536a55948cb40a32dce8bb0ffe3501e26eaf292c7e60eb5e0428d"}, + {file = "flake8-4.0.1.tar.gz", hash = "sha256:806e034dda44114815e23c16ef92f95c91e4c71100ff52813adf7132a6ad870d"}, +] +flake8-bandit = [ + {file = "flake8_bandit-2.1.2.tar.gz", hash = "sha256:687fc8da2e4a239b206af2e54a90093572a60d0954f3054e23690739b0b0de3b"}, +] +flake8-bugbear = [ + {file = "flake8-bugbear-22.9.23.tar.gz", hash = "sha256:17b9623325e6e0dcdcc80ed9e4aa811287fcc81d7e03313b8736ea5733759937"}, + {file = "flake8_bugbear-22.9.23-py3-none-any.whl", hash = "sha256:cd2779b2b7ada212d7a322814a1e5651f1868ab0d3f24cc9da66169ab8fda474"}, +] +flake8-docstrings = [ + {file = "flake8-docstrings-1.6.0.tar.gz", hash = "sha256:9fe7c6a306064af8e62a055c2f61e9eb1da55f84bb39caef2b84ce53708ac34b"}, + {file = "flake8_docstrings-1.6.0-py2.py3-none-any.whl", hash = "sha256:99cac583d6c7e32dd28bbfbef120a7c0d1b6dde4adb5a9fd441c4227a6534bde"}, +] +flake8-polyfill = [ + {file = "flake8-polyfill-1.0.2.tar.gz", hash = "sha256:e44b087597f6da52ec6393a709e7108b2905317d0c0b744cdca6208e670d8eda"}, + {file = "flake8_polyfill-1.0.2-py2.py3-none-any.whl", hash = "sha256:12be6a34ee3ab795b19ca73505e7b55826d5f6ad7230d31b18e106400169b9e9"}, +] +flake8-rst-docstrings = [ + {file = "flake8-rst-docstrings-0.2.7.tar.gz", hash = "sha256:2740067ab9237559dd45a3434d8c987792c7b259ca563621a3b95efe201f5382"}, + {file = "flake8_rst_docstrings-0.2.7-py3-none-any.whl", hash = "sha256:5d56075dce360bcc9c6775bfe7cb431aa395de600ca7e8d40580a28d50b2a803"}, +] +Flask = [ + {file = "Flask-2.2.2-py3-none-any.whl", hash = "sha256:b9c46cc36662a7949f34b52d8ec7bb59c0d74ba08ba6cb9ce9adc1d8676d9526"}, + {file = "Flask-2.2.2.tar.gz", hash = "sha256:642c450d19c4ad482f96729bd2a8f6d32554aa1e231f4f6b4e7e5264b16cca2b"}, +] +Flask-Admin = [ + {file = "Flask-Admin-1.6.0.tar.gz", hash = "sha256:424ffc79b7b0dfff051555686ea12e86e48dffacac14beaa319fb4502ac40988"}, +] +Flask-Bcrypt = [ + {file = "Flask-Bcrypt-1.0.1.tar.gz", hash = "sha256:f07b66b811417ea64eb188ae6455b0b708a793d966e1a80ceec4a23bc42a4369"}, + {file = "Flask_Bcrypt-1.0.1-py3-none-any.whl", hash = "sha256:062fd991dc9118d05ac0583675507b9fe4670e44416c97e0e6819d03d01f808a"}, +] +flask-bpmn = [] +Flask-Cors = [ + {file = "Flask-Cors-3.0.10.tar.gz", hash = "sha256:b60839393f3b84a0f3746f6cdca56c1ad7426aa738b70d6c61375857823181de"}, + {file = "Flask_Cors-3.0.10-py2.py3-none-any.whl", hash = "sha256:74efc975af1194fc7891ff5cd85b0f7478be4f7f59fe158102e91abb72bb4438"}, +] +Flask-Mail = [ + {file = "Flask-Mail-0.9.1.tar.gz", hash = "sha256:22e5eb9a940bf407bcf30410ecc3708f3c56cc44b29c34e1726fe85006935f41"}, +] +flask-marshmallow = [ + {file = "flask-marshmallow-0.14.0.tar.gz", hash = "sha256:bd01a6372cbe50e36f205cfff0fc5dab0b7b662c4c8b2c4fc06a3151b2950950"}, + {file = "flask_marshmallow-0.14.0-py2.py3-none-any.whl", hash = "sha256:2adcd782b5a4a6c5ae3c96701f320d8ca6997995a52b2661093c56cc3ed24754"}, +] +Flask-Migrate = [ + {file = "Flask-Migrate-3.1.0.tar.gz", hash = "sha256:57d6060839e3a7f150eaab6fe4e726d9e3e7cffe2150fb223d73f92421c6d1d9"}, + {file = "Flask_Migrate-3.1.0-py3-none-any.whl", hash = "sha256:a6498706241aba6be7a251078de9cf166d74307bca41a4ca3e403c9d39e2f897"}, +] +Flask-RESTful = [ + {file = "Flask-RESTful-0.3.9.tar.gz", hash = "sha256:ccec650b835d48192138c85329ae03735e6ced58e9b2d9c2146d6c84c06fa53e"}, + {file = "Flask_RESTful-0.3.9-py2.py3-none-any.whl", hash = "sha256:4970c49b6488e46c520b325f54833374dc2b98e211f1b272bd4b0c516232afe2"}, +] +flask-sqlalchemy = [ + {file = "Flask-SQLAlchemy-3.0.0.tar.gz", hash = "sha256:b54939fd5f48184742b7d5b222d86983e233b43140c1071a36327353e86f3b56"}, + {file = "Flask_SQLAlchemy-3.0.0-py3-none-any.whl", hash = "sha256:741dabf0903569a89e4793667e25be5bb9581e614fa0eeb81a395cc7dee40c4b"}, +] +furo = [ + {file = "furo-2022.9.29-py3-none-any.whl", hash = "sha256:559ee17999c0f52728481dcf6b1b0cf8c9743e68c5e3a18cb45a7992747869a9"}, + {file = "furo-2022.9.29.tar.gz", hash = "sha256:d4238145629c623609c2deb5384f8d036e2a1ee2a101d64b67b4348112470dbd"}, +] +gitdb = [ + {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"}, + {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, +] +GitPython = [ + {file = "GitPython-3.1.27-py3-none-any.whl", hash = "sha256:5b68b000463593e05ff2b261acff0ff0972df8ab1b70d3cdbd41b546c8b8fc3d"}, + {file = "GitPython-3.1.27.tar.gz", hash = "sha256:1c885ce809e8ba2d88a29befeb385fcea06338d3640712b59ca623c220bb5704"}, +] +greenlet = [ + {file = "greenlet-1.1.3-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:8c287ae7ac921dfde88b1c125bd9590b7ec3c900c2d3db5197f1286e144e712b"}, + {file = "greenlet-1.1.3-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:870a48007872d12e95a996fca3c03a64290d3ea2e61076aa35d3b253cf34cd32"}, + {file = "greenlet-1.1.3-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:7c5227963409551ae4a6938beb70d56bf1918c554a287d3da6853526212fbe0a"}, + {file = "greenlet-1.1.3-cp27-cp27m-win32.whl", hash = "sha256:9fae214f6c43cd47f7bef98c56919b9222481e833be2915f6857a1e9e8a15318"}, + {file = "greenlet-1.1.3-cp27-cp27m-win_amd64.whl", hash = "sha256:de431765bd5fe62119e0bc6bc6e7b17ac53017ae1782acf88fcf6b7eae475a49"}, + {file = "greenlet-1.1.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:510c3b15587afce9800198b4b142202b323bf4b4b5f9d6c79cb9a35e5e3c30d2"}, + {file = "greenlet-1.1.3-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:9951dcbd37850da32b2cb6e391f621c1ee456191c6ae5528af4a34afe357c30e"}, + {file = "greenlet-1.1.3-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:07c58e169bbe1e87b8bbf15a5c1b779a7616df9fd3e61cadc9d691740015b4f8"}, + {file = "greenlet-1.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df02fdec0c533301497acb0bc0f27f479a3a63dcdc3a099ae33a902857f07477"}, + {file = "greenlet-1.1.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c88e134d51d5e82315a7c32b914a58751b7353eb5268dbd02eabf020b4c4700"}, + {file = "greenlet-1.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b41d19c0cfe5c259fe6c539fd75051cd39a5d33d05482f885faf43f7f5e7d26"}, + {file = "greenlet-1.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:6f5d4b2280ceea76c55c893827961ed0a6eadd5a584a7c4e6e6dd7bc10dfdd96"}, + {file = "greenlet-1.1.3-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:184416e481295832350a4bf731ba619a92f5689bf5d0fa4341e98b98b1265bd7"}, + {file = "greenlet-1.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd0404d154084a371e6d2bafc787201612a1359c2dee688ae334f9118aa0bf47"}, + {file = "greenlet-1.1.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7a43bbfa9b6cfdfaeefbd91038dde65ea2c421dc387ed171613df340650874f2"}, + {file = "greenlet-1.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce5b64dfe8d0cca407d88b0ee619d80d4215a2612c1af8c98a92180e7109f4b5"}, + {file = "greenlet-1.1.3-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:903fa5716b8fbb21019268b44f73f3748c41d1a30d71b4a49c84b642c2fed5fa"}, + {file = "greenlet-1.1.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:0118817c9341ef2b0f75f5af79ac377e4da6ff637e5ee4ac91802c0e379dadb4"}, + {file = "greenlet-1.1.3-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:466ce0928e33421ee84ae04c4ac6f253a3a3e6b8d600a79bd43fd4403e0a7a76"}, + {file = "greenlet-1.1.3-cp35-cp35m-win32.whl", hash = "sha256:65ad1a7a463a2a6f863661329a944a5802c7129f7ad33583dcc11069c17e622c"}, + {file = "greenlet-1.1.3-cp35-cp35m-win_amd64.whl", hash = "sha256:7532a46505470be30cbf1dbadb20379fb481244f1ca54207d7df3bf0bbab6a20"}, + {file = "greenlet-1.1.3-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:caff52cb5cd7626872d9696aee5b794abe172804beb7db52eed1fd5824b63910"}, + {file = "greenlet-1.1.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:db41f3845eb579b544c962864cce2c2a0257fe30f0f1e18e51b1e8cbb4e0ac6d"}, + {file = "greenlet-1.1.3-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:e8533f5111704d75de3139bf0b8136d3a6c1642c55c067866fa0a51c2155ee33"}, + {file = "greenlet-1.1.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9537e4baf0db67f382eb29255a03154fcd4984638303ff9baaa738b10371fa57"}, + {file = "greenlet-1.1.3-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f8bfd36f368efe0ab2a6aa3db7f14598aac454b06849fb633b762ddbede1db90"}, + {file = "greenlet-1.1.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0877a9a2129a2c56a2eae2da016743db7d9d6a05d5e1c198f1b7808c602a30e"}, + {file = "greenlet-1.1.3-cp36-cp36m-win32.whl", hash = "sha256:88b04e12c9b041a1e0bcb886fec709c488192638a9a7a3677513ac6ba81d8e79"}, + {file = "greenlet-1.1.3-cp36-cp36m-win_amd64.whl", hash = "sha256:4f166b4aca8d7d489e82d74627a7069ab34211ef5ebb57c300ec4b9337b60fc0"}, + {file = "greenlet-1.1.3-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:cd16a89efe3a003029c87ff19e9fba635864e064da646bc749fc1908a4af18f3"}, + {file = "greenlet-1.1.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5b756e6730ea59b2745072e28ad27f4c837084688e6a6b3633c8b1e509e6ae0e"}, + {file = "greenlet-1.1.3-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:9b2f7d0408ddeb8ea1fd43d3db79a8cefaccadd2a812f021333b338ed6b10aba"}, + {file = "greenlet-1.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44b4817c34c9272c65550b788913620f1fdc80362b209bc9d7dd2f40d8793080"}, + {file = "greenlet-1.1.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d58a5a71c4c37354f9e0c24c9c8321f0185f6945ef027460b809f4bb474bfe41"}, + {file = "greenlet-1.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1dd51d2650e70c6c4af37f454737bf4a11e568945b27f74b471e8e2a9fd21268"}, + {file = "greenlet-1.1.3-cp37-cp37m-win32.whl", hash = "sha256:048d2bed76c2aa6de7af500ae0ea51dd2267aec0e0f2a436981159053d0bc7cc"}, + {file = "greenlet-1.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:77e41db75f9958f2083e03e9dd39da12247b3430c92267df3af77c83d8ff9eed"}, + {file = "greenlet-1.1.3-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:1626185d938d7381631e48e6f7713e8d4b964be246073e1a1d15c2f061ac9f08"}, + {file = "greenlet-1.1.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:1ec2779774d8e42ed0440cf8bc55540175187e8e934f2be25199bf4ed948cd9e"}, + {file = "greenlet-1.1.3-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:f2f908239b7098799b8845e5936c2ccb91d8c2323be02e82f8dcb4a80dcf4a25"}, + {file = "greenlet-1.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b181e9aa6cb2f5ec0cacc8cee6e5a3093416c841ba32c185c30c160487f0380"}, + {file = "greenlet-1.1.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2cf45e339cabea16c07586306a31cfcc5a3b5e1626d365714d283732afed6809"}, + {file = "greenlet-1.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6200a11f003ec26815f7e3d2ded01b43a3810be3528dd760d2f1fa777490c3cd"}, + {file = "greenlet-1.1.3-cp38-cp38-win32.whl", hash = "sha256:db5b25265010a1b3dca6a174a443a0ed4c4ab12d5e2883a11c97d6e6d59b12f9"}, + {file = "greenlet-1.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:095a980288fe05adf3d002fbb180c99bdcf0f930e220aa66fcd56e7914a38202"}, + {file = "greenlet-1.1.3-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:cbc1eb55342cbac8f7ec159088d54e2cfdd5ddf61c87b8bbe682d113789331b2"}, + {file = "greenlet-1.1.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:694ffa7144fa5cc526c8f4512665003a39fa09ef00d19bbca5c8d3406db72fbe"}, + {file = "greenlet-1.1.3-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:aa741c1a8a8cc25eb3a3a01a62bdb5095a773d8c6a86470bde7f607a447e7905"}, + {file = "greenlet-1.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3a669f11289a8995d24fbfc0e63f8289dd03c9aaa0cc8f1eab31d18ca61a382"}, + {file = "greenlet-1.1.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:76a53bfa10b367ee734b95988bd82a9a5f0038a25030f9f23bbbc005010ca600"}, + {file = "greenlet-1.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fb0aa7f6996879551fd67461d5d3ab0c3c0245da98be90c89fcb7a18d437403"}, + {file = "greenlet-1.1.3-cp39-cp39-win32.whl", hash = "sha256:5fbe1ab72b998ca77ceabbae63a9b2e2dc2d963f4299b9b278252ddba142d3f1"}, + {file = "greenlet-1.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:ffe73f9e7aea404722058405ff24041e59d31ca23d1da0895af48050a07b6932"}, + {file = "greenlet-1.1.3.tar.gz", hash = "sha256:bcb6c6dd1d6be6d38d6db283747d07fda089ff8c559a835236560a4410340455"}, +] +gunicorn = [ + {file = "gunicorn-20.1.0-py3-none-any.whl", hash = "sha256:9dcc4547dbb1cb284accfb15ab5667a0e5d1881cc443e0677b4882a4067a807e"}, + {file = "gunicorn-20.1.0.tar.gz", hash = "sha256:e0a968b5ba15f8a328fdfd7ab1fcb5af4470c28aaf7e55df02a99bc13138e6e8"}, +] +identify = [ + {file = "identify-2.5.6-py2.py3-none-any.whl", hash = "sha256:b276db7ec52d7e89f5bc4653380e33054ddc803d25875952ad90b0f012cbcdaa"}, + {file = "identify-2.5.6.tar.gz", hash = "sha256:6c32dbd747aa4ceee1df33f25fed0b0f6e0d65721b15bd151307ff7056d50245"}, +] +idna = [ + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, +] +imagesize = [ + {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, + {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, +] +importlib-metadata = [ + {file = "importlib_metadata-4.13.0-py3-none-any.whl", hash = "sha256:8a8a81bcf996e74fee46f0d16bd3eaa382a7eb20fd82445c3ad11f4090334116"}, + {file = "importlib_metadata-4.13.0.tar.gz", hash = "sha256:dd0173e8f150d6815e098fd354f6414b0f079af4644ddfe90c71e2fc6174346d"}, +] +inflection = [ + {file = "inflection-0.5.1-py2.py3-none-any.whl", hash = "sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2"}, + {file = "inflection-0.5.1.tar.gz", hash = "sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417"}, +] +iniconfig = [ + {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, + {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, +] +itsdangerous = [ + {file = "itsdangerous-2.1.2-py3-none-any.whl", hash = "sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44"}, + {file = "itsdangerous-2.1.2.tar.gz", hash = "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"}, +] +Jinja2 = [ + {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, + {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, +] +jsonschema = [ + {file = "jsonschema-4.16.0-py3-none-any.whl", hash = "sha256:9e74b8f9738d6a946d70705dc692b74b5429cd0960d58e79ffecfc43b2221eb9"}, + {file = "jsonschema-4.16.0.tar.gz", hash = "sha256:165059f076eff6971bae5b742fc029a7b4ef3f9bcf04c14e4776a7605de14b23"}, +] +kombu = [ + {file = "kombu-5.2.4-py3-none-any.whl", hash = "sha256:8b213b24293d3417bcf0d2f5537b7f756079e3ea232a8386dcc89a59fd2361a4"}, + {file = "kombu-5.2.4.tar.gz", hash = "sha256:37cee3ee725f94ea8bb173eaab7c1760203ea53bbebae226328600f9d2799610"}, +] +lazy-object-proxy = [ + {file = "lazy-object-proxy-1.7.1.tar.gz", hash = "sha256:d609c75b986def706743cdebe5e47553f4a5a1da9c5ff66d76013ef396b5a8a4"}, + {file = "lazy_object_proxy-1.7.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bb8c5fd1684d60a9902c60ebe276da1f2281a318ca16c1d0a96db28f62e9166b"}, + {file = "lazy_object_proxy-1.7.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a57d51ed2997e97f3b8e3500c984db50a554bb5db56c50b5dab1b41339b37e36"}, + {file = "lazy_object_proxy-1.7.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd45683c3caddf83abbb1249b653a266e7069a09f486daa8863fb0e7496a9fdb"}, + {file = "lazy_object_proxy-1.7.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:8561da8b3dd22d696244d6d0d5330618c993a215070f473b699e00cf1f3f6443"}, + {file = "lazy_object_proxy-1.7.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fccdf7c2c5821a8cbd0a9440a456f5050492f2270bd54e94360cac663398739b"}, + {file = "lazy_object_proxy-1.7.1-cp310-cp310-win32.whl", hash = "sha256:898322f8d078f2654d275124a8dd19b079080ae977033b713f677afcfc88e2b9"}, + {file = "lazy_object_proxy-1.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:85b232e791f2229a4f55840ed54706110c80c0a210d076eee093f2b2e33e1bfd"}, + {file = "lazy_object_proxy-1.7.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:46ff647e76f106bb444b4533bb4153c7370cdf52efc62ccfc1a28bdb3cc95442"}, + {file = "lazy_object_proxy-1.7.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12f3bb77efe1367b2515f8cb4790a11cffae889148ad33adad07b9b55e0ab22c"}, + {file = "lazy_object_proxy-1.7.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c19814163728941bb871240d45c4c30d33b8a2e85972c44d4e63dd7107faba44"}, + {file = "lazy_object_proxy-1.7.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:e40f2013d96d30217a51eeb1db28c9ac41e9d0ee915ef9d00da639c5b63f01a1"}, + {file = "lazy_object_proxy-1.7.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:2052837718516a94940867e16b1bb10edb069ab475c3ad84fd1e1a6dd2c0fcfc"}, + {file = "lazy_object_proxy-1.7.1-cp36-cp36m-win32.whl", hash = "sha256:6a24357267aa976abab660b1d47a34aaf07259a0c3859a34e536f1ee6e76b5bb"}, + {file = "lazy_object_proxy-1.7.1-cp36-cp36m-win_amd64.whl", hash = "sha256:6aff3fe5de0831867092e017cf67e2750c6a1c7d88d84d2481bd84a2e019ec35"}, + {file = "lazy_object_proxy-1.7.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6a6e94c7b02641d1311228a102607ecd576f70734dc3d5e22610111aeacba8a0"}, + {file = "lazy_object_proxy-1.7.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ce15276a1a14549d7e81c243b887293904ad2d94ad767f42df91e75fd7b5b6"}, + {file = "lazy_object_proxy-1.7.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e368b7f7eac182a59ff1f81d5f3802161932a41dc1b1cc45c1f757dc876b5d2c"}, + {file = "lazy_object_proxy-1.7.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6ecbb350991d6434e1388bee761ece3260e5228952b1f0c46ffc800eb313ff42"}, + {file = "lazy_object_proxy-1.7.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:553b0f0d8dbf21890dd66edd771f9b1b5f51bd912fa5f26de4449bfc5af5e029"}, + {file = "lazy_object_proxy-1.7.1-cp37-cp37m-win32.whl", hash = "sha256:c7a683c37a8a24f6428c28c561c80d5f4fd316ddcf0c7cab999b15ab3f5c5c69"}, + {file = "lazy_object_proxy-1.7.1-cp37-cp37m-win_amd64.whl", hash = "sha256:df2631f9d67259dc9620d831384ed7732a198eb434eadf69aea95ad18c587a28"}, + {file = "lazy_object_proxy-1.7.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:07fa44286cda977bd4803b656ffc1c9b7e3bc7dff7d34263446aec8f8c96f88a"}, + {file = "lazy_object_proxy-1.7.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4dca6244e4121c74cc20542c2ca39e5c4a5027c81d112bfb893cf0790f96f57e"}, + {file = "lazy_object_proxy-1.7.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91ba172fc5b03978764d1df5144b4ba4ab13290d7bab7a50f12d8117f8630c38"}, + {file = "lazy_object_proxy-1.7.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:043651b6cb706eee4f91854da4a089816a6606c1428fd391573ef8cb642ae4f7"}, + {file = "lazy_object_proxy-1.7.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b9e89b87c707dd769c4ea91f7a31538888aad05c116a59820f28d59b3ebfe25a"}, + {file = "lazy_object_proxy-1.7.1-cp38-cp38-win32.whl", hash = "sha256:9d166602b525bf54ac994cf833c385bfcc341b364e3ee71e3bf5a1336e677b55"}, + {file = "lazy_object_proxy-1.7.1-cp38-cp38-win_amd64.whl", hash = "sha256:8f3953eb575b45480db6568306893f0bd9d8dfeeebd46812aa09ca9579595148"}, + {file = "lazy_object_proxy-1.7.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dd7ed7429dbb6c494aa9bc4e09d94b778a3579be699f9d67da7e6804c422d3de"}, + {file = "lazy_object_proxy-1.7.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70ed0c2b380eb6248abdef3cd425fc52f0abd92d2b07ce26359fcbc399f636ad"}, + {file = "lazy_object_proxy-1.7.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7096a5e0c1115ec82641afbdd70451a144558ea5cf564a896294e346eb611be1"}, + {file = "lazy_object_proxy-1.7.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f769457a639403073968d118bc70110e7dce294688009f5c24ab78800ae56dc8"}, + {file = "lazy_object_proxy-1.7.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:39b0e26725c5023757fc1ab2a89ef9d7ab23b84f9251e28f9cc114d5b59c1b09"}, + {file = "lazy_object_proxy-1.7.1-cp39-cp39-win32.whl", hash = "sha256:2130db8ed69a48a3440103d4a520b89d8a9405f1b06e2cc81640509e8bf6548f"}, + {file = "lazy_object_proxy-1.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:677ea950bef409b47e51e733283544ac3d660b709cfce7b187f5ace137960d61"}, + {file = "lazy_object_proxy-1.7.1-pp37.pp38-none-any.whl", hash = "sha256:d66906d5785da8e0be7360912e99c9188b70f52c422f9fc18223347235691a84"}, +] +libcst = [ + {file = "libcst-0.4.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dc6f8965b6ca68d47e11321772887d81fa6fd8ea86e6ef87434ca2147de10747"}, + {file = "libcst-0.4.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a8f47d809df59fcd83058b777b86a300154ee3a1f1b0523a398a67b5f8affd4c"}, + {file = "libcst-0.4.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0d19de56aa733b4ef024527e3ce4896d4b0e9806889797f409ec24caa651a44"}, + {file = "libcst-0.4.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31da97bc986dc3f7a97f7d431fa911932aaf716d2f8bcda947fc964afd3b57cd"}, + {file = "libcst-0.4.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:71b2e2c5e33e53669c20de0853cecfac1ffb8657ee727ab8527140f39049b820"}, + {file = "libcst-0.4.7-cp310-cp310-win_amd64.whl", hash = "sha256:76fae68bd6b7ce069e267b3322c806b4305341cea78d161ae40e0ed641c8c660"}, + {file = "libcst-0.4.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:bac76d69980bb3254f503f52128c256ef4d1bcbaabe4a17c3a9ebcd1fc0472c0"}, + {file = "libcst-0.4.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f86535271eaefe84a99736875566a038449f92e1a2a61ea0b588d8359fbefd"}, + {file = "libcst-0.4.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:617f7fa2610a8c86cf22d8d03416f25391383d05bd0ad1ca8ef68023ddd6b4f6"}, + {file = "libcst-0.4.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c3637fffe476c5b4ee2225c6474b83382518f2c1b2fe4771039e06bdd7835a4a"}, + {file = "libcst-0.4.7-cp37-cp37m-win_amd64.whl", hash = "sha256:f56565124c2541adee0634e411b2126b3f335306d19e91ed2bfe52efa698b219"}, + {file = "libcst-0.4.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0ca2771ff3cfdf1f148349f89fcae64afa365213ed5c2703a69a89319325d0c8"}, + {file = "libcst-0.4.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:aa438131b7befc7e5a3cbadb5a7b1506305de5d62262ea0556add0152f40925e"}, + {file = "libcst-0.4.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c6bd66a8be2ffad7b968d90dae86c62fd4739c0e011d71f3e76544a891ae743"}, + {file = "libcst-0.4.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:214a9c4f4f90cd5b4bfa18e17877da4dd9a896821d9af9be86fa3effdc289b9b"}, + {file = "libcst-0.4.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27a37f2b459a8b51a41e260bd89c24ae41ab1d658f610c91650c79b1bbf27138"}, + {file = "libcst-0.4.7-cp38-cp38-win_amd64.whl", hash = "sha256:2f6766391d90472f036b88a95251c87d498ab068c377724f212ab0cc20509a68"}, + {file = "libcst-0.4.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:234293aa8681a3d47fef1716c5622797a81cbe85a9381fe023815468cfe20eed"}, + {file = "libcst-0.4.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fa618dc359663a0a097c633452b104c1ca93365da7a811e655c6944f6b323239"}, + {file = "libcst-0.4.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3569d9901c18940632414fb7a0943bffd326db9f726a9c041664926820857815"}, + {file = "libcst-0.4.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:beb5347e46b419f782589da060e9300957e71d561aa5574309883b71f93c1dfe"}, + {file = "libcst-0.4.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e541ccfeebda1ae5f005fc120a5bf3e8ac9ccfda405ec3efd3df54fc4688ac3"}, + {file = "libcst-0.4.7-cp39-cp39-win_amd64.whl", hash = "sha256:3a2b7253cd2e3f0f8a3e23b5c2acb492811d865ef36e0816091c925f32b713d2"}, + {file = "libcst-0.4.7.tar.gz", hash = "sha256:95c52c2130531f6e726a3b077442cfd486975435fecf3db8224d43fba7b85099"}, +] +livereload = [ + {file = "livereload-2.6.3.tar.gz", hash = "sha256:776f2f865e59fde56490a56bcc6773b6917366bce0c267c60ee8aaf1a0959869"}, +] +lxml = [ + {file = "lxml-4.9.1-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:98cafc618614d72b02185ac583c6f7796202062c41d2eeecdf07820bad3295ed"}, + {file = "lxml-4.9.1-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c62e8dd9754b7debda0c5ba59d34509c4688f853588d75b53c3791983faa96fc"}, + {file = "lxml-4.9.1-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:21fb3d24ab430fc538a96e9fbb9b150029914805d551deeac7d7822f64631dfc"}, + {file = "lxml-4.9.1-cp27-cp27m-win32.whl", hash = "sha256:86e92728ef3fc842c50a5cb1d5ba2bc66db7da08a7af53fb3da79e202d1b2cd3"}, + {file = "lxml-4.9.1-cp27-cp27m-win_amd64.whl", hash = "sha256:4cfbe42c686f33944e12f45a27d25a492cc0e43e1dc1da5d6a87cbcaf2e95627"}, + {file = "lxml-4.9.1-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dad7b164905d3e534883281c050180afcf1e230c3d4a54e8038aa5cfcf312b84"}, + {file = "lxml-4.9.1-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a614e4afed58c14254e67862456d212c4dcceebab2eaa44d627c2ca04bf86837"}, + {file = "lxml-4.9.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:f9ced82717c7ec65a67667bb05865ffe38af0e835cdd78728f1209c8fffe0cad"}, + {file = "lxml-4.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:d9fc0bf3ff86c17348dfc5d322f627d78273eba545db865c3cd14b3f19e57fa5"}, + {file = "lxml-4.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e5f66bdf0976ec667fc4594d2812a00b07ed14d1b44259d19a41ae3fff99f2b8"}, + {file = "lxml-4.9.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:fe17d10b97fdf58155f858606bddb4e037b805a60ae023c009f760d8361a4eb8"}, + {file = "lxml-4.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8caf4d16b31961e964c62194ea3e26a0e9561cdf72eecb1781458b67ec83423d"}, + {file = "lxml-4.9.1-cp310-cp310-win32.whl", hash = "sha256:4780677767dd52b99f0af1f123bc2c22873d30b474aa0e2fc3fe5e02217687c7"}, + {file = "lxml-4.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:b122a188cd292c4d2fcd78d04f863b789ef43aa129b233d7c9004de08693728b"}, + {file = "lxml-4.9.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:be9eb06489bc975c38706902cbc6888f39e946b81383abc2838d186f0e8b6a9d"}, + {file = "lxml-4.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:f1be258c4d3dc609e654a1dc59d37b17d7fef05df912c01fc2e15eb43a9735f3"}, + {file = "lxml-4.9.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:927a9dd016d6033bc12e0bf5dee1dde140235fc8d0d51099353c76081c03dc29"}, + {file = "lxml-4.9.1-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9232b09f5efee6a495a99ae6824881940d6447debe272ea400c02e3b68aad85d"}, + {file = "lxml-4.9.1-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:04da965dfebb5dac2619cb90fcf93efdb35b3c6994fea58a157a834f2f94b318"}, + {file = "lxml-4.9.1-cp35-cp35m-win32.whl", hash = "sha256:4d5bae0a37af799207140652a700f21a85946f107a199bcb06720b13a4f1f0b7"}, + {file = "lxml-4.9.1-cp35-cp35m-win_amd64.whl", hash = "sha256:4878e667ebabe9b65e785ac8da4d48886fe81193a84bbe49f12acff8f7a383a4"}, + {file = "lxml-4.9.1-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:1355755b62c28950f9ce123c7a41460ed9743c699905cbe664a5bcc5c9c7c7fb"}, + {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:bcaa1c495ce623966d9fc8a187da80082334236a2a1c7e141763ffaf7a405067"}, + {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6eafc048ea3f1b3c136c71a86db393be36b5b3d9c87b1c25204e7d397cee9536"}, + {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:13c90064b224e10c14dcdf8086688d3f0e612db53766e7478d7754703295c7c8"}, + {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:206a51077773c6c5d2ce1991327cda719063a47adc02bd703c56a662cdb6c58b"}, + {file = "lxml-4.9.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:e8f0c9d65da595cfe91713bc1222af9ecabd37971762cb830dea2fc3b3bb2acf"}, + {file = "lxml-4.9.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:8f0a4d179c9a941eb80c3a63cdb495e539e064f8054230844dcf2fcb812b71d3"}, + {file = "lxml-4.9.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:830c88747dce8a3e7525defa68afd742b4580df6aa2fdd6f0855481e3994d391"}, + {file = "lxml-4.9.1-cp36-cp36m-win32.whl", hash = "sha256:1e1cf47774373777936c5aabad489fef7b1c087dcd1f426b621fda9dcc12994e"}, + {file = "lxml-4.9.1-cp36-cp36m-win_amd64.whl", hash = "sha256:5974895115737a74a00b321e339b9c3f45c20275d226398ae79ac008d908bff7"}, + {file = "lxml-4.9.1-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:1423631e3d51008871299525b541413c9b6c6423593e89f9c4cfbe8460afc0a2"}, + {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:2aaf6a0a6465d39b5ca69688fce82d20088c1838534982996ec46633dc7ad6cc"}, + {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:9f36de4cd0c262dd9927886cc2305aa3f2210db437aa4fed3fb4940b8bf4592c"}, + {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ae06c1e4bc60ee076292e582a7512f304abdf6c70db59b56745cca1684f875a4"}, + {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:57e4d637258703d14171b54203fd6822fda218c6c2658a7d30816b10995f29f3"}, + {file = "lxml-4.9.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6d279033bf614953c3fc4a0aa9ac33a21e8044ca72d4fa8b9273fe75359d5cca"}, + {file = "lxml-4.9.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a60f90bba4c37962cbf210f0188ecca87daafdf60271f4c6948606e4dabf8785"}, + {file = "lxml-4.9.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6ca2264f341dd81e41f3fffecec6e446aa2121e0b8d026fb5130e02de1402785"}, + {file = "lxml-4.9.1-cp37-cp37m-win32.whl", hash = "sha256:27e590352c76156f50f538dbcebd1925317a0f70540f7dc8c97d2931c595783a"}, + {file = "lxml-4.9.1-cp37-cp37m-win_amd64.whl", hash = "sha256:eea5d6443b093e1545ad0210e6cf27f920482bfcf5c77cdc8596aec73523bb7e"}, + {file = "lxml-4.9.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f05251bbc2145349b8d0b77c0d4e5f3b228418807b1ee27cefb11f69ed3d233b"}, + {file = "lxml-4.9.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:487c8e61d7acc50b8be82bda8c8d21d20e133c3cbf41bd8ad7eb1aaeb3f07c97"}, + {file = "lxml-4.9.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8d1a92d8e90b286d491e5626af53afef2ba04da33e82e30744795c71880eaa21"}, + {file = "lxml-4.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:b570da8cd0012f4af9fa76a5635cd31f707473e65a5a335b186069d5c7121ff2"}, + {file = "lxml-4.9.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5ef87fca280fb15342726bd5f980f6faf8b84a5287fcc2d4962ea8af88b35130"}, + {file = "lxml-4.9.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:93e414e3206779ef41e5ff2448067213febf260ba747fc65389a3ddaa3fb8715"}, + {file = "lxml-4.9.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6653071f4f9bac46fbc30f3c7838b0e9063ee335908c5d61fb7a4a86c8fd2036"}, + {file = "lxml-4.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:32a73c53783becdb7eaf75a2a1525ea8e49379fb7248c3eeefb9412123536387"}, + {file = "lxml-4.9.1-cp38-cp38-win32.whl", hash = "sha256:1a7c59c6ffd6ef5db362b798f350e24ab2cfa5700d53ac6681918f314a4d3b94"}, + {file = "lxml-4.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:1436cf0063bba7888e43f1ba8d58824f085410ea2025befe81150aceb123e345"}, + {file = "lxml-4.9.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:4beea0f31491bc086991b97517b9683e5cfb369205dac0148ef685ac12a20a67"}, + {file = "lxml-4.9.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:41fb58868b816c202e8881fd0f179a4644ce6e7cbbb248ef0283a34b73ec73bb"}, + {file = "lxml-4.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:bd34f6d1810d9354dc7e35158aa6cc33456be7706df4420819af6ed966e85448"}, + {file = "lxml-4.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:edffbe3c510d8f4bf8640e02ca019e48a9b72357318383ca60e3330c23aaffc7"}, + {file = "lxml-4.9.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6d949f53ad4fc7cf02c44d6678e7ff05ec5f5552b235b9e136bd52e9bf730b91"}, + {file = "lxml-4.9.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:079b68f197c796e42aa80b1f739f058dcee796dc725cc9a1be0cdb08fc45b000"}, + {file = "lxml-4.9.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9c3a88d20e4fe4a2a4a84bf439a5ac9c9aba400b85244c63a1ab7088f85d9d25"}, + {file = "lxml-4.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4e285b5f2bf321fc0857b491b5028c5f276ec0c873b985d58d7748ece1d770dd"}, + {file = "lxml-4.9.1-cp39-cp39-win32.whl", hash = "sha256:ef72013e20dd5ba86a8ae1aed7f56f31d3374189aa8b433e7b12ad182c0d2dfb"}, + {file = "lxml-4.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:10d2017f9150248563bb579cd0d07c61c58da85c922b780060dcc9a3aa9f432d"}, + {file = "lxml-4.9.1-pp37-pypy37_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0538747a9d7827ce3e16a8fdd201a99e661c7dee3c96c885d8ecba3c35d1032c"}, + {file = "lxml-4.9.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:0645e934e940107e2fdbe7c5b6fb8ec6232444260752598bc4d09511bd056c0b"}, + {file = "lxml-4.9.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:6daa662aba22ef3258934105be2dd9afa5bb45748f4f702a3b39a5bf53a1f4dc"}, + {file = "lxml-4.9.1-pp38-pypy38_pp73-macosx_10_15_x86_64.whl", hash = "sha256:603a464c2e67d8a546ddaa206d98e3246e5db05594b97db844c2f0a1af37cf5b"}, + {file = "lxml-4.9.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:c4b2e0559b68455c085fb0f6178e9752c4be3bba104d6e881eb5573b399d1eb2"}, + {file = "lxml-4.9.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:0f3f0059891d3254c7b5fb935330d6db38d6519ecd238ca4fce93c234b4a0f73"}, + {file = "lxml-4.9.1-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:c852b1530083a620cb0de5f3cd6826f19862bafeaf77586f1aef326e49d95f0c"}, + {file = "lxml-4.9.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:287605bede6bd36e930577c5925fcea17cb30453d96a7b4c63c14a257118dbb9"}, + {file = "lxml-4.9.1.tar.gz", hash = "sha256:fe749b052bb7233fe5d072fcb549221a8cb1a16725c47c37e42b0b9cb3ff2c3f"}, +] +Mako = [ + {file = "Mako-1.2.3-py3-none-any.whl", hash = "sha256:c413a086e38cd885088d5e165305ee8eed04e8b3f8f62df343480da0a385735f"}, + {file = "Mako-1.2.3.tar.gz", hash = "sha256:7fde96466fcfeedb0eed94f187f20b23d85e4cb41444be0e542e2c8c65c396cd"}, +] +MarkupSafe = [ + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"}, + {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"}, + {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"}, + {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"}, + {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"}, + {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, +] +marshmallow = [ + {file = "marshmallow-3.18.0-py3-none-any.whl", hash = "sha256:35e02a3a06899c9119b785c12a22f4cda361745d66a71ab691fd7610202ae104"}, + {file = "marshmallow-3.18.0.tar.gz", hash = "sha256:6804c16114f7fce1f5b4dadc31f4674af23317fcc7f075da21e35c1a35d781f7"}, +] +marshmallow-enum = [ + {file = "marshmallow-enum-1.5.1.tar.gz", hash = "sha256:38e697e11f45a8e64b4a1e664000897c659b60aa57bfa18d44e226a9920b6e58"}, + {file = "marshmallow_enum-1.5.1-py2.py3-none-any.whl", hash = "sha256:57161ab3dbfde4f57adeb12090f39592e992b9c86d206d02f6bd03ebec60f072"}, +] +marshmallow-sqlalchemy = [ + {file = "marshmallow-sqlalchemy-0.28.1.tar.gz", hash = "sha256:aa376747296780a56355e3067b9c8bf43a2a1c44ff985de82b3a5d9e161ca2b8"}, + {file = "marshmallow_sqlalchemy-0.28.1-py2.py3-none-any.whl", hash = "sha256:dbb061c19375eca3a7d18358d2ca8bbaee825fc3000a3f114e2698282362b536"}, +] +mccabe = [ + {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, + {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, +] +MonkeyType = [ + {file = "MonkeyType-22.2.0-py3-none-any.whl", hash = "sha256:3d0815c7e98a18e9267990a452548247f6775fd636e65df5a7d77100ea7ad282"}, + {file = "MonkeyType-22.2.0.tar.gz", hash = "sha256:6b0c00b49dcc5095a2c08d28246cf005e05673fc51f64d203f9a6bca2036dfab"}, +] +mypy = [ + {file = "mypy-0.982-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5085e6f442003fa915aeb0a46d4da58128da69325d8213b4b35cc7054090aed5"}, + {file = "mypy-0.982-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:41fd1cf9bc0e1c19b9af13a6580ccb66c381a5ee2cf63ee5ebab747a4badeba3"}, + {file = "mypy-0.982-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f793e3dd95e166b66d50e7b63e69e58e88643d80a3dcc3bcd81368e0478b089c"}, + {file = "mypy-0.982-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86ebe67adf4d021b28c3f547da6aa2cce660b57f0432617af2cca932d4d378a6"}, + {file = "mypy-0.982-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:175f292f649a3af7082fe36620369ffc4661a71005aa9f8297ea473df5772046"}, + {file = "mypy-0.982-cp310-cp310-win_amd64.whl", hash = "sha256:8ee8c2472e96beb1045e9081de8e92f295b89ac10c4109afdf3a23ad6e644f3e"}, + {file = "mypy-0.982-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58f27ebafe726a8e5ccb58d896451dd9a662a511a3188ff6a8a6a919142ecc20"}, + {file = "mypy-0.982-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6af646bd46f10d53834a8e8983e130e47d8ab2d4b7a97363e35b24e1d588947"}, + {file = "mypy-0.982-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e7aeaa763c7ab86d5b66ff27f68493d672e44c8099af636d433a7f3fa5596d40"}, + {file = "mypy-0.982-cp37-cp37m-win_amd64.whl", hash = "sha256:724d36be56444f569c20a629d1d4ee0cb0ad666078d59bb84f8f887952511ca1"}, + {file = "mypy-0.982-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:14d53cdd4cf93765aa747a7399f0961a365bcddf7855d9cef6306fa41de01c24"}, + {file = "mypy-0.982-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:26ae64555d480ad4b32a267d10cab7aec92ff44de35a7cd95b2b7cb8e64ebe3e"}, + {file = "mypy-0.982-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6389af3e204975d6658de4fb8ac16f58c14e1bacc6142fee86d1b5b26aa52bda"}, + {file = "mypy-0.982-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b35ce03a289480d6544aac85fa3674f493f323d80ea7226410ed065cd46f206"}, + {file = "mypy-0.982-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c6e564f035d25c99fd2b863e13049744d96bd1947e3d3d2f16f5828864506763"}, + {file = "mypy-0.982-cp38-cp38-win_amd64.whl", hash = "sha256:cebca7fd333f90b61b3ef7f217ff75ce2e287482206ef4a8b18f32b49927b1a2"}, + {file = "mypy-0.982-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a705a93670c8b74769496280d2fe6cd59961506c64f329bb179970ff1d24f9f8"}, + {file = "mypy-0.982-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:75838c649290d83a2b83a88288c1eb60fe7a05b36d46cbea9d22efc790002146"}, + {file = "mypy-0.982-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:91781eff1f3f2607519c8b0e8518aad8498af1419e8442d5d0afb108059881fc"}, + {file = "mypy-0.982-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaa97b9ddd1dd9901a22a879491dbb951b5dec75c3b90032e2baa7336777363b"}, + {file = "mypy-0.982-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a692a8e7d07abe5f4b2dd32d731812a0175626a90a223d4b58f10f458747dd8a"}, + {file = "mypy-0.982-cp39-cp39-win_amd64.whl", hash = "sha256:eb7a068e503be3543c4bd329c994103874fa543c1727ba5288393c21d912d795"}, + {file = "mypy-0.982-py3-none-any.whl", hash = "sha256:1021c241e8b6e1ca5a47e4d52601274ac078a89845cfde66c6d5f769819ffa1d"}, + {file = "mypy-0.982.tar.gz", hash = "sha256:85f7a343542dc8b1ed0a888cdd34dca56462654ef23aa673907305b260b3d746"}, +] +mypy-extensions = [ + {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, + {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, +] +mysql-connector-python = [ + {file = "mysql-connector-python-8.0.30.tar.gz", hash = "sha256:59a8592e154c874c299763bb8aa12c518384c364bcfd0d193e85c869ea81a895"}, + {file = "mysql_connector_python-8.0.30-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f1eb74eb30bb04ff314f5e19af5421d23b504e41d16ddcee2603b4100d18fd68"}, + {file = "mysql_connector_python-8.0.30-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:712cdfa97f35fec715e8d7aaa15ed9ce04f3cf71b3c177fcca273047040de9f2"}, + {file = "mysql_connector_python-8.0.30-cp310-cp310-manylinux1_i686.whl", hash = "sha256:ce23ca9c27e1f7b4707b3299ce515125f312736d86a7e5b2aa778484fa3ffa10"}, + {file = "mysql_connector_python-8.0.30-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:8876b1d51cae33cdfe7021d68206661e94dcd2666e5e14a743f8321e2b068e84"}, + {file = "mysql_connector_python-8.0.30-cp310-cp310-win_amd64.whl", hash = "sha256:41a04d1900e366bf6c2a645ead89ab9a567806d5ada7d417a3a31f170321dd14"}, + {file = "mysql_connector_python-8.0.30-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:7f771bd5cba3ade6d9f7a649e65d7c030f69f0e69980632b5cbbd3d19c39cee5"}, + {file = "mysql_connector_python-8.0.30-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:611c6945805216104575f7143ff6497c87396ce82d3257e6da7257b65406f13e"}, + {file = "mysql_connector_python-8.0.30-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:47deb8c3324db7eb2bfb720ec8084d547b1bce457672ea261bc21836024249db"}, + {file = "mysql_connector_python-8.0.30-cp37-cp37m-win_amd64.whl", hash = "sha256:234c6b156a1989bebca6eb564dc8f2e9d352f90a51bd228ccd68eb66fcd5fd7a"}, + {file = "mysql_connector_python-8.0.30-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8b7d50c221320b0e609dce9ca8801ab2f2a748dfee65cd76b1e4c6940757734a"}, + {file = "mysql_connector_python-8.0.30-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:d8f74c9388176635f75c01d47d0abc783a47e58d7f36d04fb6ee40ab6fb35c9b"}, + {file = "mysql_connector_python-8.0.30-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d9d3af14594aceda2c3096564b4c87ffac21e375806a802daeaf7adcd18d36b"}, + {file = "mysql_connector_python-8.0.30-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:f5d812245754d4759ebc8c075662fef65397e1e2a438a3c391eac9d545077b8b"}, + {file = "mysql_connector_python-8.0.30-cp38-cp38-win_amd64.whl", hash = "sha256:a130c5489861c7ff2990e5b503c37beb2fb7b32211b92f9107ad864ee90654c0"}, + {file = "mysql_connector_python-8.0.30-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:954a1fc2e9a811662c5b17cea24819c020ff9d56b2ff8e583dd0a233fb2399f6"}, + {file = "mysql_connector_python-8.0.30-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:62266d1b18cb4e286a05df0e1c99163a4955c82d41045305bcf0ab2aac107843"}, + {file = "mysql_connector_python-8.0.30-cp39-cp39-manylinux1_i686.whl", hash = "sha256:36e763f21e62b3c9623a264f2513ee11924ea1c9cc8640c115a279d3087064be"}, + {file = "mysql_connector_python-8.0.30-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:b5dc0f3295e404f93b674bfaff7589a9fbb8b5ae6c1c134112a1d1beb2f664b2"}, + {file = "mysql_connector_python-8.0.30-cp39-cp39-win_amd64.whl", hash = "sha256:33c4e567547a9a1868462fda8f2b19ea186a7b1afe498171dca39c0f3aa43a75"}, + {file = "mysql_connector_python-8.0.30-py2.py3-none-any.whl", hash = "sha256:f1d40cac9c786e292433716c1ade7a8968cbc3ea177026697b86a63188ddba34"}, +] +nodeenv = [ + {file = "nodeenv-1.7.0-py2.py3-none-any.whl", hash = "sha256:27083a7b96a25f2f5e1d8cb4b6317ee8aeda3bdd121394e5ac54e498028a042e"}, + {file = "nodeenv-1.7.0.tar.gz", hash = "sha256:e0e7f7dfb85fc5394c6fe1e8fa98131a2473e04311a45afb6508f7cf1836fa2b"}, +] +orjson = [ + {file = "orjson-3.8.0-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:9a93850a1bdc300177b111b4b35b35299f046148ba23020f91d6efd7bf6b9d20"}, + {file = "orjson-3.8.0-cp310-cp310-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:7536a2a0b41672f824912aeab545c2467a9ff5ca73a066ff04fb81043a0a177a"}, + {file = "orjson-3.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:66c19399bb3b058e3236af7910b57b19a4fc221459d722ed72a7dc90370ca090"}, + {file = "orjson-3.8.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8b391d5c2ddc2f302d22909676b306cb6521022c3ee306c861a6935670291b2c"}, + {file = "orjson-3.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bdb1042970ca5f544a047d6c235a7eb4acdb69df75441dd1dfcbc406377ab37"}, + {file = "orjson-3.8.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:d189e2acb510e374700cb98cf11b54f0179916ee40f8453b836157ae293efa79"}, + {file = "orjson-3.8.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:6a23b40c98889e9abac084ce5a1fb251664b41da9f6bdb40a4729e2288ed2ed4"}, + {file = "orjson-3.8.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b68a42a31f8429728183c21fb440c21de1b62e5378d0d73f280e2d894ef8942e"}, + {file = "orjson-3.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ff13410ddbdda5d4197a4a4c09969cb78c722a67550f0a63c02c07aadc624833"}, + {file = "orjson-3.8.0-cp310-none-win_amd64.whl", hash = "sha256:2d81e6e56bbea44be0222fb53f7b255b4e7426290516771592738ca01dbd053b"}, + {file = "orjson-3.8.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:e2defd9527651ad39ec20ae03c812adf47ef7662bdd6bc07dabb10888d70dc62"}, + {file = "orjson-3.8.0-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:9e6ac22cec72d5b39035b566e4b86c74b84866f12b5b0b6541506a080fb67d6d"}, + {file = "orjson-3.8.0-cp37-cp37m-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:e2f4a5542f50e3d336a18cb224fc757245ca66b1fd0b70b5dd4471b8ff5f2b0e"}, + {file = "orjson-3.8.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1418feeb8b698b9224b1f024555895169d481604d5d884498c1838d7412794c"}, + {file = "orjson-3.8.0-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6e3da2e4bd27c3b796519ca74132c7b9e5348fb6746315e0f6c1592bc5cf1caf"}, + {file = "orjson-3.8.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:896a21a07f1998648d9998e881ab2b6b80d5daac4c31188535e9d50460edfcf7"}, + {file = "orjson-3.8.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:4065906ce3ad6195ac4d1bddde862fe811a42d7be237a1ff762666c3a4bb2151"}, + {file = "orjson-3.8.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:5f856279872a4449fc629924e6a083b9821e366cf98b14c63c308269336f7c14"}, + {file = "orjson-3.8.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1b1cd25acfa77935bb2e791b75211cec0cfc21227fe29387e553c545c3ff87e1"}, + {file = "orjson-3.8.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3e2459d441ab8fd8b161aa305a73d5269b3cda13b5a2a39eba58b4dd3e394f49"}, + {file = "orjson-3.8.0-cp37-none-win_amd64.whl", hash = "sha256:d2b5dafbe68237a792143137cba413447f60dd5df428e05d73dcba10c1ea6fcf"}, + {file = "orjson-3.8.0-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:5b072ef8520cfe7bd4db4e3c9972d94336763c2253f7c4718a49e8733bada7b8"}, + {file = "orjson-3.8.0-cp38-cp38-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:e68c699471ea3e2dd1b35bfd71c6a0a0e4885b64abbe2d98fce1ef11e0afaff3"}, + {file = "orjson-3.8.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c7225e8b08996d1a0c804d3a641a53e796685e8c9a9fd52bd428980032cad9a"}, + {file = "orjson-3.8.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8f687776a03c19f40b982fb5c414221b7f3d19097841571be2223d1569a59877"}, + {file = "orjson-3.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7990a9caf3b34016ac30be5e6cfc4e7efd76aa85614a1215b0eae4f0c7e3db59"}, + {file = "orjson-3.8.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:02d638d43951ba346a80f0abd5942a872cc87db443e073f6f6fc530fee81e19b"}, + {file = "orjson-3.8.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:f4b46dbdda2f0bd6480c39db90b21340a19c3b0fcf34bc4c6e465332930ca539"}, + {file = "orjson-3.8.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:655d7387a1634a9a477c545eea92a1ee902ab28626d701c6de4914e2ed0fecd2"}, + {file = "orjson-3.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5edb93cdd3eb32977633fa7aaa6a34b8ab54d9c49cdcc6b0d42c247a29091b22"}, + {file = "orjson-3.8.0-cp38-none-win_amd64.whl", hash = "sha256:03ed95814140ff09f550b3a42e6821f855d981c94d25b9cc83e8cca431525d70"}, + {file = "orjson-3.8.0-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:7b0e72974a5d3b101226899f111368ec2c9824d3e9804af0e5b31567f53ad98a"}, + {file = "orjson-3.8.0-cp39-cp39-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:6ea5fe20ef97545e14dd4d0263e4c5c3bc3d2248d39b4b0aed4b84d528dfc0af"}, + {file = "orjson-3.8.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6433c956f4a18112342a18281e0bec67fcd8b90be3a5271556c09226e045d805"}, + {file = "orjson-3.8.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:87462791dd57de2e3e53068bf4b7169c125c50960f1bdda08ed30c797cb42a56"}, + {file = "orjson-3.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be02f6acee33bb63862eeff80548cd6b8a62e2d60ad2d8dfd5a8824cc43d8887"}, + {file = "orjson-3.8.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:a709c2249c1f2955dbf879506fd43fa08c31fdb79add9aeb891e3338b648bf60"}, + {file = "orjson-3.8.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:2065b6d280dc58f131ffd93393737961ff68ae7eb6884b68879394074cc03c13"}, + {file = "orjson-3.8.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5fd6cac83136e06e538a4d17117eaeabec848c1e86f5742d4811656ad7ee475f"}, + {file = "orjson-3.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:25b5e48fbb9f0b428a5e44cf740675c9281dd67816149fc33659803399adbbe8"}, + {file = "orjson-3.8.0-cp39-none-win_amd64.whl", hash = "sha256:2058653cc12b90e482beacb5c2d52dc3d7606f9e9f5a52c1c10ef49371e76f52"}, + {file = "orjson-3.8.0.tar.gz", hash = "sha256:fb42f7cf57d5804a9daa6b624e3490ec9e2631e042415f3aebe9f35a8492ba6c"}, +] +packaging = [ + {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, + {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, +] +pathspec = [ + {file = "pathspec-0.10.1-py3-none-any.whl", hash = "sha256:46846318467efc4556ccfd27816e004270a9eeeeb4d062ce5e6fc7a87c573f93"}, + {file = "pathspec-0.10.1.tar.gz", hash = "sha256:7ace6161b621d31e7902eb6b5ae148d12cfd23f4a249b9ffb6b9fee12084323d"}, +] +pbr = [ + {file = "pbr-5.10.0-py2.py3-none-any.whl", hash = "sha256:da3e18aac0a3c003e9eea1a81bd23e5a3a75d745670dcf736317b7d966887fdf"}, + {file = "pbr-5.10.0.tar.gz", hash = "sha256:cfcc4ff8e698256fc17ea3ff796478b050852585aa5bae79ecd05b2ab7b39b9a"}, +] +pep8-naming = [ + {file = "pep8-naming-0.13.2.tar.gz", hash = "sha256:93eef62f525fd12a6f8c98f4dcc17fa70baae2f37fa1f73bec00e3e44392fa48"}, + {file = "pep8_naming-0.13.2-py3-none-any.whl", hash = "sha256:59e29e55c478db69cffbe14ab24b5bd2cd615c0413edf790d47d3fb7ba9a4e23"}, +] +platformdirs = [ + {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, + {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, +] +pluggy = [ + {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, + {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, +] +pre-commit = [ + {file = "pre_commit-2.20.0-py2.py3-none-any.whl", hash = "sha256:51a5ba7c480ae8072ecdb6933df22d2f812dc897d5fe848778116129a681aac7"}, + {file = "pre_commit-2.20.0.tar.gz", hash = "sha256:a978dac7bc9ec0bcee55c18a277d553b0f419d259dadb4b9418ff2d00eb43959"}, +] +pre-commit-hooks = [ + {file = "pre_commit_hooks-4.3.0-py2.py3-none-any.whl", hash = "sha256:9ccaf7c98794659d345080ee1ea0256a55ae059675045eebdbbc17c0be8c7e4b"}, + {file = "pre_commit_hooks-4.3.0.tar.gz", hash = "sha256:fda598a4c834d030727e6a615722718b47510f4bed72df4c949f95ba9f5aaf88"}, +] +prompt-toolkit = [ + {file = "prompt_toolkit-3.0.31-py3-none-any.whl", hash = "sha256:9696f386133df0fc8ca5af4895afe5d78f5fcfe5258111c2a79a1c3e41ffa96d"}, + {file = "prompt_toolkit-3.0.31.tar.gz", hash = "sha256:9ada952c9d1787f52ff6d5f3484d0b4df8952787c087edf6a1f7c2cb1ea88148"}, +] +protobuf = [ + {file = "protobuf-3.20.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3cc797c9d15d7689ed507b165cd05913acb992d78b379f6014e013f9ecb20996"}, + {file = "protobuf-3.20.1-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:ff8d8fa42675249bb456f5db06c00de6c2f4c27a065955917b28c4f15978b9c3"}, + {file = "protobuf-3.20.1-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cd68be2559e2a3b84f517fb029ee611546f7812b1fdd0aa2ecc9bc6ec0e4fdde"}, + {file = "protobuf-3.20.1-cp310-cp310-win32.whl", hash = "sha256:9016d01c91e8e625141d24ec1b20fed584703e527d28512aa8c8707f105a683c"}, + {file = "protobuf-3.20.1-cp310-cp310-win_amd64.whl", hash = "sha256:32ca378605b41fd180dfe4e14d3226386d8d1b002ab31c969c366549e66a2bb7"}, + {file = "protobuf-3.20.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9be73ad47579abc26c12024239d3540e6b765182a91dbc88e23658ab71767153"}, + {file = "protobuf-3.20.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:097c5d8a9808302fb0da7e20edf0b8d4703274d140fd25c5edabddcde43e081f"}, + {file = "protobuf-3.20.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e250a42f15bf9d5b09fe1b293bdba2801cd520a9f5ea2d7fb7536d4441811d20"}, + {file = "protobuf-3.20.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:cdee09140e1cd184ba9324ec1df410e7147242b94b5f8b0c64fc89e38a8ba531"}, + {file = "protobuf-3.20.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:af0ebadc74e281a517141daad9d0f2c5d93ab78e9d455113719a45a49da9db4e"}, + {file = "protobuf-3.20.1-cp37-cp37m-win32.whl", hash = "sha256:755f3aee41354ae395e104d62119cb223339a8f3276a0cd009ffabfcdd46bb0c"}, + {file = "protobuf-3.20.1-cp37-cp37m-win_amd64.whl", hash = "sha256:62f1b5c4cd6c5402b4e2d63804ba49a327e0c386c99b1675c8a0fefda23b2067"}, + {file = "protobuf-3.20.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:06059eb6953ff01e56a25cd02cca1a9649a75a7e65397b5b9b4e929ed71d10cf"}, + {file = "protobuf-3.20.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:cb29edb9eab15742d791e1025dd7b6a8f6fcb53802ad2f6e3adcb102051063ab"}, + {file = "protobuf-3.20.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:69ccfdf3657ba59569c64295b7d51325f91af586f8d5793b734260dfe2e94e2c"}, + {file = "protobuf-3.20.1-cp38-cp38-win32.whl", hash = "sha256:dd5789b2948ca702c17027c84c2accb552fc30f4622a98ab5c51fcfe8c50d3e7"}, + {file = "protobuf-3.20.1-cp38-cp38-win_amd64.whl", hash = "sha256:77053d28427a29987ca9caf7b72ccafee011257561259faba8dd308fda9a8739"}, + {file = "protobuf-3.20.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6f50601512a3d23625d8a85b1638d914a0970f17920ff39cec63aaef80a93fb7"}, + {file = "protobuf-3.20.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:284f86a6207c897542d7e956eb243a36bb8f9564c1742b253462386e96c6b78f"}, + {file = "protobuf-3.20.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7403941f6d0992d40161aa8bb23e12575637008a5a02283a930addc0508982f9"}, + {file = "protobuf-3.20.1-cp39-cp39-win32.whl", hash = "sha256:db977c4ca738dd9ce508557d4fce0f5aebd105e158c725beec86feb1f6bc20d8"}, + {file = "protobuf-3.20.1-cp39-cp39-win_amd64.whl", hash = "sha256:7e371f10abe57cee5021797126c93479f59fccc9693dafd6bd5633ab67808a91"}, + {file = "protobuf-3.20.1-py2.py3-none-any.whl", hash = "sha256:adfc6cf69c7f8c50fd24c793964eef18f0ac321315439d94945820612849c388"}, + {file = "protobuf-3.20.1.tar.gz", hash = "sha256:adc31566d027f45efe3f44eeb5b1f329da43891634d61c75a5944e9be6dd42c9"}, +] +psycopg2 = [ + {file = "psycopg2-2.9.3-cp310-cp310-win32.whl", hash = "sha256:083707a696e5e1c330af2508d8fab36f9700b26621ccbcb538abe22e15485362"}, + {file = "psycopg2-2.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:d3ca6421b942f60c008f81a3541e8faf6865a28d5a9b48544b0ee4f40cac7fca"}, + {file = "psycopg2-2.9.3-cp36-cp36m-win32.whl", hash = "sha256:9572e08b50aed176ef6d66f15a21d823bb6f6d23152d35e8451d7d2d18fdac56"}, + {file = "psycopg2-2.9.3-cp36-cp36m-win_amd64.whl", hash = "sha256:a81e3866f99382dfe8c15a151f1ca5fde5815fde879348fe5a9884a7c092a305"}, + {file = "psycopg2-2.9.3-cp37-cp37m-win32.whl", hash = "sha256:cb10d44e6694d763fa1078a26f7f6137d69f555a78ec85dc2ef716c37447e4b2"}, + {file = "psycopg2-2.9.3-cp37-cp37m-win_amd64.whl", hash = "sha256:4295093a6ae3434d33ec6baab4ca5512a5082cc43c0505293087b8a46d108461"}, + {file = "psycopg2-2.9.3-cp38-cp38-win32.whl", hash = "sha256:34b33e0162cfcaad151f249c2649fd1030010c16f4bbc40a604c1cb77173dcf7"}, + {file = "psycopg2-2.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:0762c27d018edbcb2d34d51596e4346c983bd27c330218c56c4dc25ef7e819bf"}, + {file = "psycopg2-2.9.3-cp39-cp39-win32.whl", hash = "sha256:8cf3878353cc04b053822896bc4922b194792df9df2f1ad8da01fb3043602126"}, + {file = "psycopg2-2.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:06f32425949bd5fe8f625c49f17ebb9784e1e4fe928b7cce72edc36fb68e4c0c"}, + {file = "psycopg2-2.9.3.tar.gz", hash = "sha256:8e841d1bf3434da985cc5ef13e6f75c8981ced601fd70cc6bf33351b91562981"}, +] +py = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] +pyasn1 = [ + {file = "pyasn1-0.4.8-py2.4.egg", hash = "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3"}, + {file = "pyasn1-0.4.8-py2.5.egg", hash = "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf"}, + {file = "pyasn1-0.4.8-py2.6.egg", hash = "sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00"}, + {file = "pyasn1-0.4.8-py2.7.egg", hash = "sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8"}, + {file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"}, + {file = "pyasn1-0.4.8-py3.1.egg", hash = "sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86"}, + {file = "pyasn1-0.4.8-py3.2.egg", hash = "sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7"}, + {file = "pyasn1-0.4.8-py3.3.egg", hash = "sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576"}, + {file = "pyasn1-0.4.8-py3.4.egg", hash = "sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12"}, + {file = "pyasn1-0.4.8-py3.5.egg", hash = "sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2"}, + {file = "pyasn1-0.4.8-py3.6.egg", hash = "sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359"}, + {file = "pyasn1-0.4.8-py3.7.egg", hash = "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776"}, + {file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"}, +] +pycodestyle = [ + {file = "pycodestyle-2.8.0-py2.py3-none-any.whl", hash = "sha256:720f8b39dde8b293825e7ff02c475f3077124006db4f440dcbc9a20b76548a20"}, + {file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"}, +] +pydocstyle = [ + {file = "pydocstyle-6.1.1-py3-none-any.whl", hash = "sha256:6987826d6775056839940041beef5c08cc7e3d71d63149b48e36727f70144dc4"}, + {file = "pydocstyle-6.1.1.tar.gz", hash = "sha256:1d41b7c459ba0ee6c345f2eb9ae827cab14a7533a88c5c6f7e94923f72df92dc"}, +] +pyflakes = [ + {file = "pyflakes-2.4.0-py2.py3-none-any.whl", hash = "sha256:3bb3a3f256f4b7968c9c788781e4ff07dce46bdf12339dcda61053375426ee2e"}, + {file = "pyflakes-2.4.0.tar.gz", hash = "sha256:05a85c2872edf37a4ed30b0cce2f6093e1d0581f8c19d7393122da7e25b2b24c"}, +] +Pygments = [ + {file = "Pygments-2.13.0-py3-none-any.whl", hash = "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"}, + {file = "Pygments-2.13.0.tar.gz", hash = "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1"}, +] +PyJWT = [ + {file = "PyJWT-2.5.0-py3-none-any.whl", hash = "sha256:8d82e7087868e94dd8d7d418e5088ce64f7daab4b36db654cbaedb46f9d1ca80"}, + {file = "PyJWT-2.5.0.tar.gz", hash = "sha256:e77ab89480905d86998442ac5788f35333fa85f65047a534adc38edf3c88fc3b"}, +] +pyparsing = [ + {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, + {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, +] +pyrsistent = [ + {file = "pyrsistent-0.18.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:df46c854f490f81210870e509818b729db4488e1f30f2a1ce1698b2295a878d1"}, + {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d45866ececf4a5fff8742c25722da6d4c9e180daa7b405dc0a2a2790d668c26"}, + {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4ed6784ceac462a7d6fcb7e9b663e93b9a6fb373b7f43594f9ff68875788e01e"}, + {file = "pyrsistent-0.18.1-cp310-cp310-win32.whl", hash = "sha256:e4f3149fd5eb9b285d6bfb54d2e5173f6a116fe19172686797c056672689daf6"}, + {file = "pyrsistent-0.18.1-cp310-cp310-win_amd64.whl", hash = "sha256:636ce2dc235046ccd3d8c56a7ad54e99d5c1cd0ef07d9ae847306c91d11b5fec"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e92a52c166426efbe0d1ec1332ee9119b6d32fc1f0bbfd55d5c1088070e7fc1b"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7a096646eab884bf8bed965bad63ea327e0d0c38989fc83c5ea7b8a87037bfc"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cdfd2c361b8a8e5d9499b9082b501c452ade8bbf42aef97ea04854f4a3f43b22"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-win32.whl", hash = "sha256:7ec335fc998faa4febe75cc5268a9eac0478b3f681602c1f27befaf2a1abe1d8"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-win_amd64.whl", hash = "sha256:6455fc599df93d1f60e1c5c4fe471499f08d190d57eca040c0ea182301321286"}, + {file = "pyrsistent-0.18.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fd8da6d0124efa2f67d86fa70c851022f87c98e205f0594e1fae044e7119a5a6"}, + {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bfe2388663fd18bd8ce7db2c91c7400bf3e1a9e8bd7d63bf7e77d39051b85ec"}, + {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e3e1fcc45199df76053026a51cc59ab2ea3fc7c094c6627e93b7b44cdae2c8c"}, + {file = "pyrsistent-0.18.1-cp38-cp38-win32.whl", hash = "sha256:b568f35ad53a7b07ed9b1b2bae09eb15cdd671a5ba5d2c66caee40dbf91c68ca"}, + {file = "pyrsistent-0.18.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1b96547410f76078eaf66d282ddca2e4baae8964364abb4f4dcdde855cd123a"}, + {file = "pyrsistent-0.18.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f87cc2863ef33c709e237d4b5f4502a62a00fab450c9e020892e8e2ede5847f5"}, + {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bc66318fb7ee012071b2792024564973ecc80e9522842eb4e17743604b5e045"}, + {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:914474c9f1d93080338ace89cb2acee74f4f666fb0424896fcfb8d86058bf17c"}, + {file = "pyrsistent-0.18.1-cp39-cp39-win32.whl", hash = "sha256:1b34eedd6812bf4d33814fca1b66005805d3640ce53140ab8bbb1e2651b0d9bc"}, + {file = "pyrsistent-0.18.1-cp39-cp39-win_amd64.whl", hash = "sha256:e24a828f57e0c337c8d8bb9f6b12f09dfdf0273da25fda9e314f0b684b415a07"}, + {file = "pyrsistent-0.18.1.tar.gz", hash = "sha256:d4d61f8b993a7255ba714df3aca52700f8125289f84f704cf80916517c46eb96"}, +] +pytest = [ + {file = "pytest-7.1.3-py3-none-any.whl", hash = "sha256:1377bda3466d70b55e3f5cecfa55bb7cfcf219c7964629b967c37cf0bda818b7"}, + {file = "pytest-7.1.3.tar.gz", hash = "sha256:4f365fec2dff9c1162f834d9f18af1ba13062db0c708bf7b946f8a5c76180c39"}, +] +pytest-flask = [ + {file = "pytest-flask-1.2.0.tar.gz", hash = "sha256:46fde652f77777bf02dc91205aec4ce20cdf2acbbbd66a918ab91f5c14693d3d"}, + {file = "pytest_flask-1.2.0-py3-none-any.whl", hash = "sha256:fe25b39ad0db09c3d1fe728edecf97ced85e774c775db259a6d25f0270a4e7c9"}, +] +pytest-flask-sqlalchemy = [ + {file = "pytest-flask-sqlalchemy-1.1.0.tar.gz", hash = "sha256:db71a57b90435e5d854b21c37a2584056d6fc3ddb28c09d8d0a2546bd6e390ff"}, + {file = "pytest_flask_sqlalchemy-1.1.0-py3-none-any.whl", hash = "sha256:b9f272d5c4092fcbe4a6284e402a37cad84f5b9be3c0bbe1a11927f24c99ff83"}, +] +pytest-mock = [ + {file = "pytest-mock-3.9.0.tar.gz", hash = "sha256:c899a0dcc8a5f22930acd020b500abd5f956911f326864a3b979e4866e14da82"}, + {file = "pytest_mock-3.9.0-py3-none-any.whl", hash = "sha256:1a1b9264224d026932d6685a0f9cef3b61d91563c3e74af9fe5afb2767e13812"}, +] +python-dateutil = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] +python-jose = [ + {file = "python-jose-3.3.0.tar.gz", hash = "sha256:55779b5e6ad599c6336191246e95eb2293a9ddebd555f796a65f838f07e5d78a"}, + {file = "python_jose-3.3.0-py2.py3-none-any.whl", hash = "sha256:9b1376b023f8b298536eedd47ae1089bcdb848f1535ab30555cd92002d78923a"}, +] +python-keycloak = [ + {file = "python-keycloak-2.6.0.tar.gz", hash = "sha256:08c530ff86f631faccb8033d9d9345cc3148cb2cf132ff7564f025292e4dbd96"}, + {file = "python_keycloak-2.6.0-py3-none-any.whl", hash = "sha256:a1ce102b978beb56d385319b3ca20992b915c2c12d15a2d0c23f1104882f3fb6"}, +] +pytz = [ + {file = "pytz-2022.4-py2.py3-none-any.whl", hash = "sha256:2c0784747071402c6e99f0bafdb7da0fa22645f06554c7ae06bf6358897e9c91"}, + {file = "pytz-2022.4.tar.gz", hash = "sha256:48ce799d83b6f8aab2020e369b627446696619e79645419610b9facd909b3174"}, +] +pytz-deprecation-shim = [ + {file = "pytz_deprecation_shim-0.1.0.post0-py2.py3-none-any.whl", hash = "sha256:8314c9692a636c8eb3bda879b9f119e350e93223ae83e70e80c31675a0fdc1a6"}, + {file = "pytz_deprecation_shim-0.1.0.post0.tar.gz", hash = "sha256:af097bae1b616dde5c5744441e2ddc69e74dfdcb0c263129610d85b87445a59d"}, +] +pyupgrade = [ + {file = "pyupgrade-2.38.4-py2.py3-none-any.whl", hash = "sha256:944ff993c396ddc2b9012eb3de4cda138eb4c149b22c6c560d4c8bfd0e180982"}, + {file = "pyupgrade-2.38.4.tar.gz", hash = "sha256:1eb43a49f416752929741ba4d706bf3f33593d3cac9bdc217fc1ef55c047c1f4"}, +] +PyYAML = [ + {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, + {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, + {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, + {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, + {file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"}, + {file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"}, + {file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"}, + {file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"}, + {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, + {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, + {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, + {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, + {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, + {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, + {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, + {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, + {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, + {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, + {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, + {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, +] +regex = [ + {file = "regex-2022.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ab69b4fe09e296261377d209068d52402fb85ef89dc78a9ac4a29a895f4e24a7"}, + {file = "regex-2022.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5bc5f921be39ccb65fdda741e04b2555917a4bced24b4df14eddc7569be3b493"}, + {file = "regex-2022.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:43eba5c46208deedec833663201752e865feddc840433285fbadee07b84b464d"}, + {file = "regex-2022.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c68d2c04f7701a418ec2e5631b7f3552efc32f6bcc1739369c6eeb1af55f62e0"}, + {file = "regex-2022.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:caa2734ada16a44ae57b229d45091f06e30a9a52ace76d7574546ab23008c635"}, + {file = "regex-2022.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef806f684f17dbd6263d72a54ad4073af42b42effa3eb42b877e750c24c76f86"}, + {file = "regex-2022.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be319f4eb400ee567b722e9ea63d5b2bb31464e3cf1b016502e3ee2de4f86f5c"}, + {file = "regex-2022.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:42bb37e2b2d25d958c25903f6125a41aaaa1ed49ca62c103331f24b8a459142f"}, + {file = "regex-2022.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:fbc88d3ba402b5d041d204ec2449c4078898f89c4a6e6f0ed1c1a510ef1e221d"}, + {file = "regex-2022.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:91e0f7e7be77250b808a5f46d90bf0032527d3c032b2131b63dee54753a4d729"}, + {file = "regex-2022.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:cb3652bbe6720786b9137862205986f3ae54a09dec8499a995ed58292bdf77c2"}, + {file = "regex-2022.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:878c626cbca3b649e14e972c14539a01191d79e58934e3f3ef4a9e17f90277f8"}, + {file = "regex-2022.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6df070a986fc064d865c381aecf0aaff914178fdf6874da2f2387e82d93cc5bd"}, + {file = "regex-2022.3.2-cp310-cp310-win32.whl", hash = "sha256:b549d851f91a4efb3e65498bd4249b1447ab6035a9972f7fc215eb1f59328834"}, + {file = "regex-2022.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:8babb2b5751105dc0aef2a2e539f4ba391e738c62038d8cb331c710f6b0f3da7"}, + {file = "regex-2022.3.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:1977bb64264815d3ef016625adc9df90e6d0e27e76260280c63eca993e3f455f"}, + {file = "regex-2022.3.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e73652057473ad3e6934944af090852a02590c349357b79182c1b681da2c772"}, + {file = "regex-2022.3.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b22ff939a8856a44f4822da38ef4868bd3a9ade22bb6d9062b36957c850e404f"}, + {file = "regex-2022.3.2-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:878f5d649ba1db9f52cc4ef491f7dba2d061cdc48dd444c54260eebc0b1729b9"}, + {file = "regex-2022.3.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0008650041531d0eadecc96a73d37c2dc4821cf51b0766e374cb4f1ddc4e1c14"}, + {file = "regex-2022.3.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:06b1df01cf2aef3a9790858af524ae2588762c8a90e784ba00d003f045306204"}, + {file = "regex-2022.3.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:57484d39447f94967e83e56db1b1108c68918c44ab519b8ecfc34b790ca52bf7"}, + {file = "regex-2022.3.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:74d86e8924835f863c34e646392ef39039405f6ce52956d8af16497af4064a30"}, + {file = "regex-2022.3.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:ae17fc8103f3b63345709d3e9654a274eee1c6072592aec32b026efd401931d0"}, + {file = "regex-2022.3.2-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:5f92a7cdc6a0ae2abd184e8dfd6ef2279989d24c85d2c85d0423206284103ede"}, + {file = "regex-2022.3.2-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:5dcc4168536c8f68654f014a3db49b6b4a26b226f735708be2054314ed4964f4"}, + {file = "regex-2022.3.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:1e30762ddddb22f7f14c4f59c34d3addabc789216d813b0f3e2788d7bcf0cf29"}, + {file = "regex-2022.3.2-cp36-cp36m-win32.whl", hash = "sha256:286ff9ec2709d56ae7517040be0d6c502642517ce9937ab6d89b1e7d0904f863"}, + {file = "regex-2022.3.2-cp36-cp36m-win_amd64.whl", hash = "sha256:d326ff80ed531bf2507cba93011c30fff2dd51454c85f55df0f59f2030b1687b"}, + {file = "regex-2022.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:9d828c5987d543d052b53c579a01a52d96b86f937b1777bbfe11ef2728929357"}, + {file = "regex-2022.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c87ac58b9baaf50b6c1b81a18d20eda7e2883aa9a4fb4f1ca70f2e443bfcdc57"}, + {file = "regex-2022.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d6c2441538e4fadd4291c8420853431a229fcbefc1bf521810fbc2629d8ae8c2"}, + {file = "regex-2022.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f3356afbb301ec34a500b8ba8b47cba0b44ed4641c306e1dd981a08b416170b5"}, + {file = "regex-2022.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d96eec8550fd2fd26f8e675f6d8b61b159482ad8ffa26991b894ed5ee19038b"}, + {file = "regex-2022.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf668f26604e9f7aee9f8eaae4ca07a948168af90b96be97a4b7fa902a6d2ac1"}, + {file = "regex-2022.3.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0eb0e2845e81bdea92b8281a3969632686502565abf4a0b9e4ab1471c863d8f3"}, + {file = "regex-2022.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:87bc01226cd288f0bd9a4f9f07bf6827134dc97a96c22e2d28628e824c8de231"}, + {file = "regex-2022.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:09b4b6ccc61d4119342b26246ddd5a04accdeebe36bdfe865ad87a0784efd77f"}, + {file = "regex-2022.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:9557545c10d52c845f270b665b52a6a972884725aa5cf12777374e18f2ea8960"}, + {file = "regex-2022.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:0be0c34a39e5d04a62fd5342f0886d0e57592a4f4993b3f9d257c1f688b19737"}, + {file = "regex-2022.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7b103dffb9f6a47ed7ffdf352b78cfe058b1777617371226c1894e1be443afec"}, + {file = "regex-2022.3.2-cp37-cp37m-win32.whl", hash = "sha256:f8169ec628880bdbca67082a9196e2106060a4a5cbd486ac51881a4df805a36f"}, + {file = "regex-2022.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:4b9c16a807b17b17c4fa3a1d8c242467237be67ba92ad24ff51425329e7ae3d0"}, + {file = "regex-2022.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:67250b36edfa714ba62dc62d3f238e86db1065fccb538278804790f578253640"}, + {file = "regex-2022.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5510932596a0f33399b7fff1bd61c59c977f2b8ee987b36539ba97eb3513584a"}, + {file = "regex-2022.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6f7ee2289176cb1d2c59a24f50900f8b9580259fa9f1a739432242e7d254f93"}, + {file = "regex-2022.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86d7a68fa53688e1f612c3246044157117403c7ce19ebab7d02daf45bd63913e"}, + {file = "regex-2022.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaf5317c961d93c1a200b9370fb1c6b6836cc7144fef3e5a951326912bf1f5a3"}, + {file = "regex-2022.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad397bc7d51d69cb07ef89e44243f971a04ce1dca9bf24c992c362406c0c6573"}, + {file = "regex-2022.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:297c42ede2c81f0cb6f34ea60b5cf6dc965d97fa6936c11fc3286019231f0d66"}, + {file = "regex-2022.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:af4d8cc28e4c7a2f6a9fed544228c567340f8258b6d7ea815b62a72817bbd178"}, + {file = "regex-2022.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:452519bc4c973e961b1620c815ea6dd8944a12d68e71002be5a7aff0a8361571"}, + {file = "regex-2022.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cb34c2d66355fb70ae47b5595aafd7218e59bb9c00ad8cc3abd1406ca5874f07"}, + {file = "regex-2022.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d146e5591cb67c5e836229a04723a30af795ef9b70a0bbd913572e14b7b940f"}, + {file = "regex-2022.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:03299b0bcaa7824eb7c0ebd7ef1e3663302d1b533653bfe9dc7e595d453e2ae9"}, + {file = "regex-2022.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9ccb0a4ab926016867260c24c192d9df9586e834f5db83dfa2c8fffb3a6e5056"}, + {file = "regex-2022.3.2-cp38-cp38-win32.whl", hash = "sha256:f7e8f1ee28e0a05831c92dc1c0c1c94af5289963b7cf09eca5b5e3ce4f8c91b0"}, + {file = "regex-2022.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:35ed2f3c918a00b109157428abfc4e8d1ffabc37c8f9abc5939ebd1e95dabc47"}, + {file = "regex-2022.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:55820bc631684172b9b56a991d217ec7c2e580d956591dc2144985113980f5a3"}, + {file = "regex-2022.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:83f03f0bd88c12e63ca2d024adeee75234d69808b341e88343b0232329e1f1a1"}, + {file = "regex-2022.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42d6007722d46bd2c95cce700181570b56edc0dcbadbfe7855ec26c3f2d7e008"}, + {file = "regex-2022.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:320c2f4106962ecea0f33d8d31b985d3c185757c49c1fb735501515f963715ed"}, + {file = "regex-2022.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbd3fe37353c62fd0eb19fb76f78aa693716262bcd5f9c14bb9e5aca4b3f0dc4"}, + {file = "regex-2022.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17e51ad1e6131c496b58d317bc9abec71f44eb1957d32629d06013a21bc99cac"}, + {file = "regex-2022.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:72bc3a5effa5974be6d965ed8301ac1e869bc18425c8a8fac179fbe7876e3aee"}, + {file = "regex-2022.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e5602a9b5074dcacc113bba4d2f011d2748f50e3201c8139ac5b68cf2a76bd8b"}, + {file = "regex-2022.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:729aa8ca624c42f309397c5fc9e21db90bf7e2fdd872461aabdbada33de9063c"}, + {file = "regex-2022.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d6ecfd1970b3380a569d7b3ecc5dd70dba295897418ed9e31ec3c16a5ab099a5"}, + {file = "regex-2022.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:13bbf0c9453c6d16e5867bda7f6c0c7cff1decf96c5498318bb87f8136d2abd4"}, + {file = "regex-2022.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:58ba41e462653eaf68fc4a84ec4d350b26a98d030be1ab24aba1adcc78ffe447"}, + {file = "regex-2022.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c0446b2871335d5a5e9fcf1462f954586b09a845832263db95059dcd01442015"}, + {file = "regex-2022.3.2-cp39-cp39-win32.whl", hash = "sha256:20e6a27959f162f979165e496add0d7d56d7038237092d1aba20b46de79158f1"}, + {file = "regex-2022.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:9efa41d1527b366c88f265a227b20bcec65bda879962e3fc8a2aee11e81266d7"}, + {file = "regex-2022.3.2.tar.gz", hash = "sha256:79e5af1ff258bc0fe0bdd6f69bc4ae33935a898e3cbefbbccf22e88a27fa053b"}, +] +reorder-python-imports = [ + {file = "reorder_python_imports-3.8.3-py2.py3-none-any.whl", hash = "sha256:783f9575f76dd21ae5de974edf514cebc2b6904bbfe7cda515c24f1815fa22bf"}, + {file = "reorder_python_imports-3.8.3.tar.gz", hash = "sha256:cb622aa0ea505972b59cc01aa26c08fb17d39a8fc62ff488288908725927d968"}, +] +requests = [ + {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, + {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, +] +requests-toolbelt = [ + {file = "requests-toolbelt-0.9.1.tar.gz", hash = "sha256:968089d4584ad4ad7c171454f0a5c6dac23971e9472521ea3b6d49d610aa6fc0"}, + {file = "requests_toolbelt-0.9.1-py2.py3-none-any.whl", hash = "sha256:380606e1d10dc85c3bd47bf5a6095f815ec007be7a8b69c878507068df059e6f"}, +] +RestrictedPython = [ + {file = "RestrictedPython-5.2-py2.py3-none-any.whl", hash = "sha256:fdf8621034c5dcb990a2a198f232f66b2d48866dd16d848e00ac7d187ae452ba"}, + {file = "RestrictedPython-5.2.tar.gz", hash = "sha256:634da1f6c5c122a262f433b083ee3d17a9a039f8f1b3778597efb47461cd361b"}, +] +restructuredtext-lint = [ + {file = "restructuredtext_lint-1.4.0.tar.gz", hash = "sha256:1b235c0c922341ab6c530390892eb9e92f90b9b75046063e047cacfb0f050c45"}, +] +rsa = [ + {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, + {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, +] +"ruamel.yaml" = [ + {file = "ruamel.yaml-0.17.21-py3-none-any.whl", hash = "sha256:742b35d3d665023981bd6d16b3d24248ce5df75fdb4e2924e93a05c1f8b61ca7"}, + {file = "ruamel.yaml-0.17.21.tar.gz", hash = "sha256:8b7ce697a2f212752a35c1ac414471dc16c424c9573be4926b56ff3f5d23b7af"}, +] +"ruamel.yaml.clib" = [ + {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6e7be2c5bcb297f5b82fee9c665eb2eb7001d1050deaba8471842979293a80b0"}, + {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:066f886bc90cc2ce44df8b5f7acfc6a7e2b2e672713f027136464492b0c34d7c"}, + {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:221eca6f35076c6ae472a531afa1c223b9c29377e62936f61bc8e6e8bdc5f9e7"}, + {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-win32.whl", hash = "sha256:1070ba9dd7f9370d0513d649420c3b362ac2d687fe78c6e888f5b12bf8bc7bee"}, + {file = "ruamel.yaml.clib-0.2.6-cp310-cp310-win_amd64.whl", hash = "sha256:77df077d32921ad46f34816a9a16e6356d8100374579bc35e15bab5d4e9377de"}, + {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:cfdb9389d888c5b74af297e51ce357b800dd844898af9d4a547ffc143fa56751"}, + {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:7b2927e92feb51d830f531de4ccb11b320255ee95e791022555971c466af4527"}, + {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-win32.whl", hash = "sha256:ada3f400d9923a190ea8b59c8f60680c4ef8a4b0dfae134d2f2ff68429adfab5"}, + {file = "ruamel.yaml.clib-0.2.6-cp35-cp35m-win_amd64.whl", hash = "sha256:de9c6b8a1ba52919ae919f3ae96abb72b994dd0350226e28f3686cb4f142165c"}, + {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d67f273097c368265a7b81e152e07fb90ed395df6e552b9fa858c6d2c9f42502"}, + {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:72a2b8b2ff0a627496aad76f37a652bcef400fd861721744201ef1b45199ab78"}, + {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:d3c620a54748a3d4cf0bcfe623e388407c8e85a4b06b8188e126302bcab93ea8"}, + {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-win32.whl", hash = "sha256:9efef4aab5353387b07f6b22ace0867032b900d8e91674b5d8ea9150db5cae94"}, + {file = "ruamel.yaml.clib-0.2.6-cp36-cp36m-win_amd64.whl", hash = "sha256:846fc8336443106fe23f9b6d6b8c14a53d38cef9a375149d61f99d78782ea468"}, + {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0847201b767447fc33b9c235780d3aa90357d20dd6108b92be544427bea197dd"}, + {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:78988ed190206672da0f5d50c61afef8f67daa718d614377dcd5e3ed85ab4a99"}, + {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:210c8fcfeff90514b7133010bf14e3bad652c8efde6b20e00c43854bf94fa5a6"}, + {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-win32.whl", hash = "sha256:a49e0161897901d1ac9c4a79984b8410f450565bbad64dbfcbf76152743a0cdb"}, + {file = "ruamel.yaml.clib-0.2.6-cp37-cp37m-win_amd64.whl", hash = "sha256:bf75d28fa071645c529b5474a550a44686821decebdd00e21127ef1fd566eabe"}, + {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a32f8d81ea0c6173ab1b3da956869114cae53ba1e9f72374032e33ba3118c233"}, + {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7f7ecb53ae6848f959db6ae93bdff1740e651809780822270eab111500842a84"}, + {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:61bc5e5ca632d95925907c569daa559ea194a4d16084ba86084be98ab1cec1c6"}, + {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-win32.whl", hash = "sha256:89221ec6d6026f8ae859c09b9718799fea22c0e8da8b766b0b2c9a9ba2db326b"}, + {file = "ruamel.yaml.clib-0.2.6-cp38-cp38-win_amd64.whl", hash = "sha256:31ea73e564a7b5fbbe8188ab8b334393e06d997914a4e184975348f204790277"}, + {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dc6a613d6c74eef5a14a214d433d06291526145431c3b964f5e16529b1842bed"}, + {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:1866cf2c284a03b9524a5cc00daca56d80057c5ce3cdc86a52020f4c720856f0"}, + {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:1b4139a6ffbca8ef60fdaf9b33dec05143ba746a6f0ae0f9d11d38239211d335"}, + {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-win32.whl", hash = "sha256:3fb9575a5acd13031c57a62cc7823e5d2ff8bc3835ba4d94b921b4e6ee664104"}, + {file = "ruamel.yaml.clib-0.2.6-cp39-cp39-win_amd64.whl", hash = "sha256:825d5fccef6da42f3c8eccd4281af399f21c02b32d98e113dbc631ea6a6ecbc7"}, + {file = "ruamel.yaml.clib-0.2.6.tar.gz", hash = "sha256:4ff604ce439abb20794f05613c374759ce10e3595d1867764dd1ae675b85acbd"}, +] +safety = [ + {file = "safety-2.3.1-py3-none-any.whl", hash = "sha256:8f098d12b607db2756886280e85c28ece8db1bba4f45fc5f981f4663217bd619"}, + {file = "safety-2.3.1.tar.gz", hash = "sha256:6e6fcb7d4e8321098cf289f59b65051cafd3467f089c6e57c9f894ae32c23b71"}, +] +sentry-sdk = [ + {file = "sentry-sdk-1.9.10.tar.gz", hash = "sha256:4fbace9a763285b608c06f01a807b51acb35f6059da6a01236654e08b0ee81ff"}, + {file = "sentry_sdk-1.9.10-py2.py3-none-any.whl", hash = "sha256:2469240f6190aaebcb453033519eae69cfe8cc602065b4667e18ee14fc1e35dc"}, +] +setuptools = [ + {file = "setuptools-65.4.1-py3-none-any.whl", hash = "sha256:1b6bdc6161661409c5f21508763dc63ab20a9ac2f8ba20029aaaa7fdb9118012"}, + {file = "setuptools-65.4.1.tar.gz", hash = "sha256:3050e338e5871e70c72983072fe34f6032ae1cdeeeb67338199c2f74e083a80e"}, +] +six = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] +smmap = [ + {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"}, + {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, +] +snowballstemmer = [ + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, +] +soupsieve = [ + {file = "soupsieve-2.3.2.post1-py3-none-any.whl", hash = "sha256:3b2503d3c7084a42b1ebd08116e5f81aadfaea95863628c80a3b774a11b7c759"}, + {file = "soupsieve-2.3.2.post1.tar.gz", hash = "sha256:fc53893b3da2c33de295667a0e19f078c14bf86544af307354de5fcf12a3f30d"}, +] +Sphinx = [ + {file = "Sphinx-5.2.3.tar.gz", hash = "sha256:5b10cb1022dac8c035f75767799c39217a05fc0fe2d6fe5597560d38e44f0363"}, + {file = "sphinx-5.2.3-py3-none-any.whl", hash = "sha256:7abf6fabd7b58d0727b7317d5e2650ef68765bbe0ccb63c8795fa8683477eaa2"}, +] +sphinx-autoapi = [ + {file = "sphinx-autoapi-1.9.0.tar.gz", hash = "sha256:c897ea337df16ad0cde307cbdfe2bece207788dde1587fa4fc8b857d1fc5dcba"}, + {file = "sphinx_autoapi-1.9.0-py2.py3-none-any.whl", hash = "sha256:d217953273b359b699d8cb81a5a72985a3e6e15cfe3f703d9a3c201ffc30849b"}, +] +sphinx-autobuild = [ + {file = "sphinx-autobuild-2021.3.14.tar.gz", hash = "sha256:de1ca3b66e271d2b5b5140c35034c89e47f263f2cd5db302c9217065f7443f05"}, + {file = "sphinx_autobuild-2021.3.14-py3-none-any.whl", hash = "sha256:8fe8cbfdb75db04475232f05187c776f46f6e9e04cacf1e49ce81bdac649ccac"}, +] +sphinx-basic-ng = [ + {file = "sphinx_basic_ng-1.0.0b1-py3-none-any.whl", hash = "sha256:ade597a3029c7865b24ad0eda88318766bcc2f9f4cef60df7e28126fde94db2a"}, + {file = "sphinx_basic_ng-1.0.0b1.tar.gz", hash = "sha256:89374bd3ccd9452a301786781e28c8718e99960f2d4f411845ea75fc7bb5a9b0"}, +] +sphinx-click = [ + {file = "sphinx-click-4.3.0.tar.gz", hash = "sha256:bd4db5d3c1bec345f07af07b8e28a76cfc5006d997984e38ae246bbf8b9a3b38"}, + {file = "sphinx_click-4.3.0-py3-none-any.whl", hash = "sha256:23e85a3cb0b728a421ea773699f6acadefae171d1a764a51dd8ec5981503ccbe"}, +] +sphinxcontrib-applehelp = [ + {file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"}, + {file = "sphinxcontrib_applehelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a"}, +] +sphinxcontrib-devhelp = [ + {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, + {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, +] +sphinxcontrib-htmlhelp = [ + {file = "sphinxcontrib-htmlhelp-2.0.0.tar.gz", hash = "sha256:f5f8bb2d0d629f398bf47d0d69c07bc13b65f75a81ad9e2f71a63d4b7a2f6db2"}, + {file = "sphinxcontrib_htmlhelp-2.0.0-py2.py3-none-any.whl", hash = "sha256:d412243dfb797ae3ec2b59eca0e52dac12e75a241bf0e4eb861e450d06c6ed07"}, +] +sphinxcontrib-jsmath = [ + {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, + {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, +] +sphinxcontrib-qthelp = [ + {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, + {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, +] +sphinxcontrib-serializinghtml = [ + {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, + {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, +] +SpiffWorkflow = [] +SQLAlchemy = [ + {file = "SQLAlchemy-1.4.41-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:13e397a9371ecd25573a7b90bd037db604331cf403f5318038c46ee44908c44d"}, + {file = "SQLAlchemy-1.4.41-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:2d6495f84c4fd11584f34e62f9feec81bf373787b3942270487074e35cbe5330"}, + {file = "SQLAlchemy-1.4.41-cp27-cp27m-win32.whl", hash = "sha256:e570cfc40a29d6ad46c9aeaddbdcee687880940a3a327f2c668dd0e4ef0a441d"}, + {file = "SQLAlchemy-1.4.41-cp27-cp27m-win_amd64.whl", hash = "sha256:5facb7fd6fa8a7353bbe88b95695e555338fb038ad19ceb29c82d94f62775a05"}, + {file = "SQLAlchemy-1.4.41-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:f37fa70d95658763254941ddd30ecb23fc4ec0c5a788a7c21034fc2305dab7cc"}, + {file = "SQLAlchemy-1.4.41-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:361f6b5e3f659e3c56ea3518cf85fbdae1b9e788ade0219a67eeaaea8a4e4d2a"}, + {file = "SQLAlchemy-1.4.41-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0990932f7cca97fece8017414f57fdd80db506a045869d7ddf2dda1d7cf69ecc"}, + {file = "SQLAlchemy-1.4.41-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cd767cf5d7252b1c88fcfb58426a32d7bd14a7e4942497e15b68ff5d822b41ad"}, + {file = "SQLAlchemy-1.4.41-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5102fb9ee2c258a2218281adcb3e1918b793c51d6c2b4666ce38c35101bb940e"}, + {file = "SQLAlchemy-1.4.41-cp310-cp310-win32.whl", hash = "sha256:2082a2d2fca363a3ce21cfa3d068c5a1ce4bf720cf6497fb3a9fc643a8ee4ddd"}, + {file = "SQLAlchemy-1.4.41-cp310-cp310-win_amd64.whl", hash = "sha256:e4b12e3d88a8fffd0b4ca559f6d4957ed91bd4c0613a4e13846ab8729dc5c251"}, + {file = "SQLAlchemy-1.4.41-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:90484a2b00baedad361402c257895b13faa3f01780f18f4a104a2f5c413e4536"}, + {file = "SQLAlchemy-1.4.41-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b67fc780cfe2b306180e56daaa411dd3186bf979d50a6a7c2a5b5036575cbdbb"}, + {file = "SQLAlchemy-1.4.41-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ad2b727fc41c7f8757098903f85fafb4bf587ca6605f82d9bf5604bd9c7cded"}, + {file = "SQLAlchemy-1.4.41-cp311-cp311-win32.whl", hash = "sha256:59bdc291165b6119fc6cdbc287c36f7f2859e6051dd923bdf47b4c55fd2f8bd0"}, + {file = "SQLAlchemy-1.4.41-cp311-cp311-win_amd64.whl", hash = "sha256:d2e054aed4645f9b755db85bc69fc4ed2c9020c19c8027976f66576b906a74f1"}, + {file = "SQLAlchemy-1.4.41-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:4ba7e122510bbc07258dc42be6ed45997efdf38129bde3e3f12649be70683546"}, + {file = "SQLAlchemy-1.4.41-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0dcf127bb99458a9d211e6e1f0f3edb96c874dd12f2503d4d8e4f1fd103790b"}, + {file = "SQLAlchemy-1.4.41-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e16c2be5cb19e2c08da7bd3a87fed2a0d4e90065ee553a940c4fc1a0fb1ab72b"}, + {file = "SQLAlchemy-1.4.41-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5ebeeec5c14533221eb30bad716bc1fd32f509196318fb9caa7002c4a364e4c"}, + {file = "SQLAlchemy-1.4.41-cp36-cp36m-win32.whl", hash = "sha256:3e2ef592ac3693c65210f8b53d0edcf9f4405925adcfc031ff495e8d18169682"}, + {file = "SQLAlchemy-1.4.41-cp36-cp36m-win_amd64.whl", hash = "sha256:eb30cf008850c0a26b72bd1b9be6730830165ce049d239cfdccd906f2685f892"}, + {file = "SQLAlchemy-1.4.41-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:c23d64a0b28fc78c96289ffbd0d9d1abd48d267269b27f2d34e430ea73ce4b26"}, + {file = "SQLAlchemy-1.4.41-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8eb8897367a21b578b26f5713833836f886817ee2ffba1177d446fa3f77e67c8"}, + {file = "SQLAlchemy-1.4.41-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:14576238a5f89bcf504c5f0a388d0ca78df61fb42cb2af0efe239dc965d4f5c9"}, + {file = "SQLAlchemy-1.4.41-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:639e1ae8d48b3c86ffe59c0daa9a02e2bfe17ca3d2b41611b30a0073937d4497"}, + {file = "SQLAlchemy-1.4.41-cp37-cp37m-win32.whl", hash = "sha256:0005bd73026cd239fc1e8ccdf54db58b6193be9a02b3f0c5983808f84862c767"}, + {file = "SQLAlchemy-1.4.41-cp37-cp37m-win_amd64.whl", hash = "sha256:5323252be2bd261e0aa3f33cb3a64c45d76829989fa3ce90652838397d84197d"}, + {file = "SQLAlchemy-1.4.41-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:05f0de3a1dc3810a776275763764bb0015a02ae0f698a794646ebc5fb06fad33"}, + {file = "SQLAlchemy-1.4.41-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0002e829142b2af00b4eaa26c51728f3ea68235f232a2e72a9508a3116bd6ed0"}, + {file = "SQLAlchemy-1.4.41-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:22ff16cedab5b16a0db79f1bc99e46a6ddececb60c396562e50aab58ddb2871c"}, + {file = "SQLAlchemy-1.4.41-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ccfd238f766a5bb5ee5545a62dd03f316ac67966a6a658efb63eeff8158a4bbf"}, + {file = "SQLAlchemy-1.4.41-cp38-cp38-win32.whl", hash = "sha256:58bb65b3274b0c8a02cea9f91d6f44d0da79abc993b33bdedbfec98c8440175a"}, + {file = "SQLAlchemy-1.4.41-cp38-cp38-win_amd64.whl", hash = "sha256:ce8feaa52c1640de9541eeaaa8b5fb632d9d66249c947bb0d89dd01f87c7c288"}, + {file = "SQLAlchemy-1.4.41-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:199a73c31ac8ea59937cc0bf3dfc04392e81afe2ec8a74f26f489d268867846c"}, + {file = "SQLAlchemy-1.4.41-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4676d51c9f6f6226ae8f26dc83ec291c088fe7633269757d333978df78d931ab"}, + {file = "SQLAlchemy-1.4.41-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:036d8472356e1d5f096c5e0e1a7e0f9182140ada3602f8fff6b7329e9e7cfbcd"}, + {file = "SQLAlchemy-1.4.41-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2307495d9e0ea00d0c726be97a5b96615035854972cc538f6e7eaed23a35886c"}, + {file = "SQLAlchemy-1.4.41-cp39-cp39-win32.whl", hash = "sha256:9c56e19780cd1344fcd362fd6265a15f48aa8d365996a37fab1495cae8fcd97d"}, + {file = "SQLAlchemy-1.4.41-cp39-cp39-win_amd64.whl", hash = "sha256:f5fa526d027d804b1f85cdda1eb091f70bde6fb7d87892f6dd5a48925bc88898"}, + {file = "SQLAlchemy-1.4.41.tar.gz", hash = "sha256:0292f70d1797e3c54e862e6f30ae474014648bc9c723e14a2fda730adb0a9791"}, +] +sqlalchemy-stubs = [ + {file = "sqlalchemy-stubs-0.4.tar.gz", hash = "sha256:c665d6dd4482ef642f01027fa06c3d5e91befabb219dc71fc2a09e7d7695f7ae"}, + {file = "sqlalchemy_stubs-0.4-py3-none-any.whl", hash = "sha256:5eec7aa110adf9b957b631799a72fef396b23ff99fe296df726645d01e312aa5"}, +] +stevedore = [ + {file = "stevedore-4.0.0-py3-none-any.whl", hash = "sha256:87e4d27fe96d0d7e4fc24f0cbe3463baae4ec51e81d95fbe60d2474636e0c7d8"}, + {file = "stevedore-4.0.0.tar.gz", hash = "sha256:f82cc99a1ff552310d19c379827c2c64dd9f85a38bcd5559db2470161867b786"}, +] +swagger-ui-bundle = [ + {file = "swagger_ui_bundle-0.0.9-py3-none-any.whl", hash = "sha256:cea116ed81147c345001027325c1ddc9ca78c1ee7319935c3c75d3669279d575"}, + {file = "swagger_ui_bundle-0.0.9.tar.gz", hash = "sha256:b462aa1460261796ab78fd4663961a7f6f347ce01760f1303bbbdf630f11f516"}, +] +tokenize-rt = [ + {file = "tokenize_rt-4.2.1-py2.py3-none-any.whl", hash = "sha256:08a27fa032a81cf45e8858d0ac706004fcd523e8463415ddf1442be38e204ea8"}, + {file = "tokenize_rt-4.2.1.tar.gz", hash = "sha256:0d4f69026fed520f8a1e0103aa36c406ef4661417f20ca643f913e33531b3b94"}, +] +toml = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] +tomli = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] +tornado = [ + {file = "tornado-6.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:20f638fd8cc85f3cbae3c732326e96addff0a15e22d80f049e00121651e82e72"}, + {file = "tornado-6.2-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:87dcafae3e884462f90c90ecc200defe5e580a7fbbb4365eda7c7c1eb809ebc9"}, + {file = "tornado-6.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba09ef14ca9893954244fd872798b4ccb2367c165946ce2dd7376aebdde8e3ac"}, + {file = "tornado-6.2-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8150f721c101abdef99073bf66d3903e292d851bee51910839831caba341a75"}, + {file = "tornado-6.2-cp37-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3a2f5999215a3a06a4fc218026cd84c61b8b2b40ac5296a6db1f1451ef04c1e"}, + {file = "tornado-6.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:5f8c52d219d4995388119af7ccaa0bcec289535747620116a58d830e7c25d8a8"}, + {file = "tornado-6.2-cp37-abi3-musllinux_1_1_i686.whl", hash = "sha256:6fdfabffd8dfcb6cf887428849d30cf19a3ea34c2c248461e1f7d718ad30b66b"}, + {file = "tornado-6.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:1d54d13ab8414ed44de07efecb97d4ef7c39f7438cf5e976ccd356bebb1b5fca"}, + {file = "tornado-6.2-cp37-abi3-win32.whl", hash = "sha256:5c87076709343557ef8032934ce5f637dbb552efa7b21d08e89ae7619ed0eb23"}, + {file = "tornado-6.2-cp37-abi3-win_amd64.whl", hash = "sha256:e5f923aa6a47e133d1cf87d60700889d7eae68988704e20c75fb2d65677a8e4b"}, + {file = "tornado-6.2.tar.gz", hash = "sha256:9b630419bde84ec666bfd7ea0a4cb2a8a651c2d5cccdbdd1972a0c859dfc3c13"}, +] +typeguard = [ + {file = "typeguard-2.13.3-py3-none-any.whl", hash = "sha256:5e3e3be01e887e7eafae5af63d1f36c849aaa94e3a0112097312aabfa16284f1"}, + {file = "typeguard-2.13.3.tar.gz", hash = "sha256:00edaa8da3a133674796cf5ea87d9f4b4c367d77476e185e80251cc13dfbb8c4"}, +] +types-pytz = [ + {file = "types-pytz-2022.4.0.0.tar.gz", hash = "sha256:17d66e4b16e80ceae0787726f3a22288df7d3f9fdebeb091dc64b92c0e4ea09d"}, + {file = "types_pytz-2022.4.0.0-py3-none-any.whl", hash = "sha256:950b0f3d64ed5b03a3e29c1e38fe2be8371c933c8e97922d0352345336eb8af4"}, +] +types-requests = [ + {file = "types-requests-2.28.11.1.tar.gz", hash = "sha256:02b1806c5b9904edcd87fa29236164aea0e6cdc4d93ea020cd615ef65cb43d65"}, + {file = "types_requests-2.28.11.1-py3-none-any.whl", hash = "sha256:1ff2c1301f6fe58b5d1c66cdf631ca19734cb3b1a4bbadc878d75557d183291a"}, +] +types-urllib3 = [ + {file = "types-urllib3-1.26.25.tar.gz", hash = "sha256:5aef0e663724eef924afa8b320b62ffef2c1736c1fa6caecfc9bc6c8ae2c3def"}, + {file = "types_urllib3-1.26.25-py3-none-any.whl", hash = "sha256:c1d78cef7bd581e162e46c20a57b2e1aa6ebecdcf01fd0713bb90978ff3e3427"}, +] +typing-extensions = [ + {file = "typing_extensions-4.3.0-py3-none-any.whl", hash = "sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02"}, + {file = "typing_extensions-4.3.0.tar.gz", hash = "sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6"}, +] +typing-inspect = [ + {file = "typing_inspect-0.8.0-py3-none-any.whl", hash = "sha256:5fbf9c1e65d4fa01e701fe12a5bca6c6e08a4ffd5bc60bfac028253a447c5188"}, + {file = "typing_inspect-0.8.0.tar.gz", hash = "sha256:8b1ff0c400943b6145df8119c41c244ca8207f1f10c9c057aeed1560e4806e3d"}, +] +tzdata = [ + {file = "tzdata-2022.4-py2.py3-none-any.whl", hash = "sha256:74da81ecf2b3887c94e53fc1d466d4362aaf8b26fc87cda18f22004544694583"}, + {file = "tzdata-2022.4.tar.gz", hash = "sha256:ada9133fbd561e6ec3d1674d3fba50251636e918aa97bd59d63735bef5a513bb"}, +] +tzlocal = [ + {file = "tzlocal-4.2-py3-none-any.whl", hash = "sha256:89885494684c929d9191c57aa27502afc87a579be5cdd3225c77c463ea043745"}, + {file = "tzlocal-4.2.tar.gz", hash = "sha256:ee5842fa3a795f023514ac2d801c4a81d1743bbe642e3940143326b3a00addd7"}, +] +Unidecode = [ + {file = "Unidecode-1.3.6-py3-none-any.whl", hash = "sha256:547d7c479e4f377b430dd91ac1275d593308dce0fc464fb2ab7d41f82ec653be"}, + {file = "Unidecode-1.3.6.tar.gz", hash = "sha256:fed09cf0be8cf415b391642c2a5addfc72194407caee4f98719e40ec2a72b830"}, +] +urllib3 = [ + {file = "urllib3-1.26.12-py2.py3-none-any.whl", hash = "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"}, + {file = "urllib3-1.26.12.tar.gz", hash = "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"}, +] +vine = [ + {file = "vine-5.0.0-py2.py3-none-any.whl", hash = "sha256:4c9dceab6f76ed92105027c49c823800dd33cacce13bdedc5b914e3514b7fb30"}, + {file = "vine-5.0.0.tar.gz", hash = "sha256:7d3b1624a953da82ef63462013bbd271d3eb75751489f9807598e8f340bd637e"}, +] +virtualenv = [ + {file = "virtualenv-20.16.5-py3-none-any.whl", hash = "sha256:d07dfc5df5e4e0dbc92862350ad87a36ed505b978f6c39609dc489eadd5b0d27"}, + {file = "virtualenv-20.16.5.tar.gz", hash = "sha256:227ea1b9994fdc5ea31977ba3383ef296d7472ea85be9d6732e42a91c04e80da"}, +] +wcwidth = [ + {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, + {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, +] +Werkzeug = [ + {file = "Werkzeug-2.2.2-py3-none-any.whl", hash = "sha256:f979ab81f58d7318e064e99c4506445d60135ac5cd2e177a2de0089bfd4c9bd5"}, + {file = "Werkzeug-2.2.2.tar.gz", hash = "sha256:7ea2d48322cc7c0f8b3a215ed73eabd7b5d75d0b50e31ab006286ccff9e00b8f"}, +] +wrapt = [ + {file = "wrapt-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3"}, + {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:903500616422a40a98a5a3c4ff4ed9d0066f3b4c951fa286018ecdf0750194ef"}, + {file = "wrapt-1.14.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:5a9a0d155deafd9448baff28c08e150d9b24ff010e899311ddd63c45c2445e28"}, + {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ddaea91abf8b0d13443f6dac52e89051a5063c7d014710dcb4d4abb2ff811a59"}, + {file = "wrapt-1.14.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:36f582d0c6bc99d5f39cd3ac2a9062e57f3cf606ade29a0a0d6b323462f4dd87"}, + {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7ef58fb89674095bfc57c4069e95d7a31cfdc0939e2a579882ac7d55aadfd2a1"}, + {file = "wrapt-1.14.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:e2f83e18fe2f4c9e7db597e988f72712c0c3676d337d8b101f6758107c42425b"}, + {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ee2b1b1769f6707a8a445162ea16dddf74285c3964f605877a20e38545c3c462"}, + {file = "wrapt-1.14.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:833b58d5d0b7e5b9832869f039203389ac7cbf01765639c7309fd50ef619e0b1"}, + {file = "wrapt-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:80bb5c256f1415f747011dc3604b59bc1f91c6e7150bd7db03b19170ee06b320"}, + {file = "wrapt-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:07f7a7d0f388028b2df1d916e94bbb40624c59b48ecc6cbc232546706fac74c2"}, + {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02b41b633c6261feff8ddd8d11c711df6842aba629fdd3da10249a53211a72c4"}, + {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fe803deacd09a233e4762a1adcea5db5d31e6be577a43352936179d14d90069"}, + {file = "wrapt-1.14.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:257fd78c513e0fb5cdbe058c27a0624c9884e735bbd131935fd49e9fe719d310"}, + {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4fcc4649dc762cddacd193e6b55bc02edca674067f5f98166d7713b193932b7f"}, + {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:11871514607b15cfeb87c547a49bca19fde402f32e2b1c24a632506c0a756656"}, + {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8ad85f7f4e20964db4daadcab70b47ab05c7c1cf2a7c1e51087bfaa83831854c"}, + {file = "wrapt-1.14.1-cp310-cp310-win32.whl", hash = "sha256:a9a52172be0b5aae932bef82a79ec0a0ce87288c7d132946d645eba03f0ad8a8"}, + {file = "wrapt-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:6d323e1554b3d22cfc03cd3243b5bb815a51f5249fdcbb86fda4bf62bab9e164"}, + {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:43ca3bbbe97af00f49efb06e352eae40434ca9d915906f77def219b88e85d907"}, + {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:6b1a564e6cb69922c7fe3a678b9f9a3c54e72b469875aa8018f18b4d1dd1adf3"}, + {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:00b6d4ea20a906c0ca56d84f93065b398ab74b927a7a3dbd470f6fc503f95dc3"}, + {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:a85d2b46be66a71bedde836d9e41859879cc54a2a04fad1191eb50c2066f6e9d"}, + {file = "wrapt-1.14.1-cp35-cp35m-win32.whl", hash = "sha256:dbcda74c67263139358f4d188ae5faae95c30929281bc6866d00573783c422b7"}, + {file = "wrapt-1.14.1-cp35-cp35m-win_amd64.whl", hash = "sha256:b21bb4c09ffabfa0e85e3a6b623e19b80e7acd709b9f91452b8297ace2a8ab00"}, + {file = "wrapt-1.14.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9e0fd32e0148dd5dea6af5fee42beb949098564cc23211a88d799e434255a1f4"}, + {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9736af4641846491aedb3c3f56b9bc5568d92b0692303b5a305301a95dfd38b1"}, + {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b02d65b9ccf0ef6c34cba6cf5bf2aab1bb2f49c6090bafeecc9cd81ad4ea1c1"}, + {file = "wrapt-1.14.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21ac0156c4b089b330b7666db40feee30a5d52634cc4560e1905d6529a3897ff"}, + {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:9f3e6f9e05148ff90002b884fbc2a86bd303ae847e472f44ecc06c2cd2fcdb2d"}, + {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:6e743de5e9c3d1b7185870f480587b75b1cb604832e380d64f9504a0535912d1"}, + {file = "wrapt-1.14.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d79d7d5dc8a32b7093e81e97dad755127ff77bcc899e845f41bf71747af0c569"}, + {file = "wrapt-1.14.1-cp36-cp36m-win32.whl", hash = "sha256:81b19725065dcb43df02b37e03278c011a09e49757287dca60c5aecdd5a0b8ed"}, + {file = "wrapt-1.14.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b014c23646a467558be7da3d6b9fa409b2c567d2110599b7cf9a0c5992b3b471"}, + {file = "wrapt-1.14.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:88bd7b6bd70a5b6803c1abf6bca012f7ed963e58c68d76ee20b9d751c74a3248"}, + {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5901a312f4d14c59918c221323068fad0540e34324925c8475263841dbdfe68"}, + {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d77c85fedff92cf788face9bfa3ebaa364448ebb1d765302e9af11bf449ca36d"}, + {file = "wrapt-1.14.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d649d616e5c6a678b26d15ece345354f7c2286acd6db868e65fcc5ff7c24a77"}, + {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7d2872609603cb35ca513d7404a94d6d608fc13211563571117046c9d2bcc3d7"}, + {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ee6acae74a2b91865910eef5e7de37dc6895ad96fa23603d1d27ea69df545015"}, + {file = "wrapt-1.14.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2b39d38039a1fdad98c87279b48bc5dce2c0ca0d73483b12cb72aa9609278e8a"}, + {file = "wrapt-1.14.1-cp37-cp37m-win32.whl", hash = "sha256:60db23fa423575eeb65ea430cee741acb7c26a1365d103f7b0f6ec412b893853"}, + {file = "wrapt-1.14.1-cp37-cp37m-win_amd64.whl", hash = "sha256:709fe01086a55cf79d20f741f39325018f4df051ef39fe921b1ebe780a66184c"}, + {file = "wrapt-1.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8c0ce1e99116d5ab21355d8ebe53d9460366704ea38ae4d9f6933188f327b456"}, + {file = "wrapt-1.14.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e3fb1677c720409d5f671e39bac6c9e0e422584e5f518bfd50aa4cbbea02433f"}, + {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:642c2e7a804fcf18c222e1060df25fc210b9c58db7c91416fb055897fc27e8cc"}, + {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b7c050ae976e286906dd3f26009e117eb000fb2cf3533398c5ad9ccc86867b1"}, + {file = "wrapt-1.14.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef3f72c9666bba2bab70d2a8b79f2c6d2c1a42a7f7e2b0ec83bb2f9e383950af"}, + {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:01c205616a89d09827986bc4e859bcabd64f5a0662a7fe95e0d359424e0e071b"}, + {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5a0f54ce2c092aaf439813735584b9537cad479575a09892b8352fea5e988dc0"}, + {file = "wrapt-1.14.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2cf71233a0ed05ccdabe209c606fe0bac7379fdcf687f39b944420d2a09fdb57"}, + {file = "wrapt-1.14.1-cp38-cp38-win32.whl", hash = "sha256:aa31fdcc33fef9eb2552cbcbfee7773d5a6792c137b359e82879c101e98584c5"}, + {file = "wrapt-1.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1967f46ea8f2db647c786e78d8cc7e4313dbd1b0aca360592d8027b8508e24d"}, + {file = "wrapt-1.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3232822c7d98d23895ccc443bbdf57c7412c5a65996c30442ebe6ed3df335383"}, + {file = "wrapt-1.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:988635d122aaf2bdcef9e795435662bcd65b02f4f4c1ae37fbee7401c440b3a7"}, + {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cca3c2cdadb362116235fdbd411735de4328c61425b0aa9f872fd76d02c4e86"}, + {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d52a25136894c63de15a35bc0bdc5adb4b0e173b9c0d07a2be9d3ca64a332735"}, + {file = "wrapt-1.14.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40e7bc81c9e2b2734ea4bc1aceb8a8f0ceaac7c5299bc5d69e37c44d9081d43b"}, + {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b9b7a708dd92306328117d8c4b62e2194d00c365f18eff11a9b53c6f923b01e3"}, + {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6a9a25751acb379b466ff6be78a315e2b439d4c94c1e99cb7266d40a537995d3"}, + {file = "wrapt-1.14.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:34aa51c45f28ba7f12accd624225e2b1e5a3a45206aa191f6f9aac931d9d56fe"}, + {file = "wrapt-1.14.1-cp39-cp39-win32.whl", hash = "sha256:dee0ce50c6a2dd9056c20db781e9c1cfd33e77d2d569f5d1d9321c641bb903d5"}, + {file = "wrapt-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:dee60e1de1898bde3b238f18340eec6148986da0455d8ba7848d50470a7a32fb"}, + {file = "wrapt-1.14.1.tar.gz", hash = "sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d"}, +] +WTForms = [ + {file = "WTForms-3.0.1-py3-none-any.whl", hash = "sha256:837f2f0e0ca79481b92884962b914eba4e72b7a2daaf1f939c890ed0124b834b"}, + {file = "WTForms-3.0.1.tar.gz", hash = "sha256:6b351bbb12dd58af57ffef05bc78425d08d1914e0fd68ee14143b7ade023c5bc"}, +] +xdoctest = [ + {file = "xdoctest-1.1.0-py3-none-any.whl", hash = "sha256:da330c4dacee51f3c785820bc743188fb6f7c64c5fa1c54bff8836b3cf23d69b"}, + {file = "xdoctest-1.1.0.tar.gz", hash = "sha256:0fd4fad7932f0a2f082dfdfb857dd6ca41603757586c39b1e5b4d333fc389f8a"}, +] +zipp = [ + {file = "zipp-3.8.1-py3-none-any.whl", hash = "sha256:47c40d7fe183a6f21403a199b3e4192cca5774656965b0a4988ad2f8feb5f009"}, + {file = "zipp-3.8.1.tar.gz", hash = "sha256:05b45f1ee8f807d0cc928485ca40a07cb491cf092ff587c0df9cb1fd154848d2"}, +] diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..3ff12de8 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,146 @@ +[tool.poetry] +name = "spiffworkflow-backend" +version = "0.0.0" +description = "Spiffworkflow Backend" +authors = ["Jason Lantz "] +license = "MIT" +readme = "README.rst" +homepage = "https://github.com/sartography/spiffworkflow-backend" +repository = "https://github.com/sartography/spiffworkflow-backend" +documentation = "https://spiffworkflow-backend.readthedocs.io" +classifiers = [ + "Development Status :: 1 - Planning", +] + +[tool.poetry.urls] +Changelog = "https://github.com/sartography/spiffworkflow-backend/releases" + +[tool.poetry.dependencies] +python = ">=3.9,<3.11" +click = "^8.0.1" +flask = "2.2.2" +flask-admin = "*" +flask-bcrypt = "*" +flask-cors = "*" +flask-mail = "*" +flask-marshmallow = "*" +flask-migrate = "*" +flask-restful = "*" +werkzeug = "*" +# go back to main once https://github.com/sartography/SpiffWorkflow/pull/241 is merged +SpiffWorkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "main"} +# SpiffWorkflow = {develop = true, path = "/Users/kevin/projects/github/sartography/SpiffWorkflow"} +# SpiffWorkflow = {develop = true, path = "/home/jason/projects/github/sartography/SpiffWorkflow"} +sentry-sdk = "^1.9.10" +sphinx-autoapi = "^1.8.4" +# flask-bpmn = {develop = true, path = "/home/jason/projects/github/sartography/flask-bpmn"} +# flask-bpmn = {develop = true, path = "/Users/kevin/projects/github/sartography/flask-bpmn"} +flask-bpmn = {git = "https://github.com/sartography/flask-bpmn", rev = "main"} +mysql-connector-python = "^8.0.29" +pytest-flask = "^1.2.0" +pytest-flask-sqlalchemy = "^1.1.0" +psycopg2 = "^2.9.3" +typing-extensions = "^4.3.0" +connexion = {extras = [ "swagger-ui",], version = "^2"} +lxml = "^4.9.1" +marshmallow-enum = "^1.5.1" +marshmallow-sqlalchemy = "^0.28.0" +PyJWT = "^2.4.0" +gunicorn = "^20.1.0" +types-pytz = "^2022.1.1" +python-keycloak = "^2.5.0" +APScheduler = "^3.9.1" +types-requests = "^2.28.6" +Jinja2 = "^3.1.2" +RestrictedPython = "^5.2" +Flask-SQLAlchemy = "^3" +orjson = "^3.8.0" + + +[tool.poetry.dev-dependencies] +pytest = "^7.1.2" +coverage = {extras = ["toml"], version = "^6.1"} +safety = "^2.3.1" +mypy = ">=0.961" +typeguard = "^2.13.2" +xdoctest = {extras = ["colors"], version = "^1.0.1"} +sphinx = "^5.0.2" +sphinx-autobuild = ">=2021.3.14" +pre-commit = "^2.20.0" +flake8 = "^4.0.1" +black = ">=21.10b0" +flake8-bandit = "^2.1.2" + +# 1.7.3 broke us. https://github.com/PyCQA/bandit/issues/841 +bandit = "1.7.2" + +flake8-bugbear = "^22.7.1" +flake8-docstrings = "^1.6.0" +flake8-rst-docstrings = "^0.2.7" +# flask-sqlalchemy-stubs = "^0.2" +pep8-naming = "^0.13.2" +darglint = "^1.8.1" +reorder-python-imports = "^3.8.1" +pre-commit-hooks = "^4.0.1" +sphinx-click = "^4.3.0" +Pygments = "^2.10.0" +pyupgrade = "^2.37.1" +furo = ">=2021.11.12" +MonkeyType = "^22.2.0" +sqlalchemy-stubs = "^0.4" + +[tool.poetry.scripts] +spiffworkflow-backend = "spiffworkflow_backend.__main__:main" + +[tool.pytest.ini_options] +# ignore deprecation warnings from various packages that we don't control +filterwarnings = [ + # note the use of single quote below to denote "raw" strings in TOML + # kombu/utils/compat.py:82 + 'ignore:SelectableGroups dict interface is deprecated. Use select.', + # flask_marshmallow/__init__.py:34 + # marshmallow_sqlalchemy/convert.py:17 + 'ignore:distutils Version classes are deprecated. Use packaging.version instead.', + # connexion/spec.py:50 + 'ignore:Passing a schema to Validator.iter_errors is deprecated and will be removed in a future release', + # connexion/decorators/validation.py:16 + 'ignore:Accessing jsonschema.draft4_format_checker is deprecated and will be removed in a future release.', + # connexion/apis/flask_api.py:236 + "ignore:'_request_ctx_stack' is deprecated and will be removed in Flask 2.3", + "ignore:Setting 'json_encoder' on the app or a blueprint is deprecated and will be removed in Flask 2.3", + "ignore:'JSONEncoder' is deprecated and will be removed in Flask 2.3", + "ignore:'app.json_encoder' is deprecated and will be removed in Flask 2.3" +] + +[tool.coverage.paths] +source = ["src", "*/site-packages"] +tests = ["tests", "*/tests"] + +[tool.coverage.run] +branch = true +source = ["spiffworkflow_backend", "tests"] + +[tool.coverage.report] +show_missing = true +fail_under = 50 + +[tool.mypy] +strict = true +disallow_any_generics = false +warn_unreachable = true +pretty = true +show_column_numbers = true +show_error_codes = true +show_error_context = true +plugins = "sqlmypy" + +# We get 'error: Module has no attribute "set_context"' for sentry-sdk without this option +implicit_reexport = true + +# allow for subdirs to NOT require __init__.py +namespace_packages = true +explicit_package_bases = false + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" diff --git a/sonar-project.properties b/sonar-project.properties new file mode 100644 index 00000000..cf1276ab --- /dev/null +++ b/sonar-project.properties @@ -0,0 +1,13 @@ +sonar.organization=sartography +sonar.projectKey=sartography_spiffworkflow-backend +sonar.host.url=https://sonarcloud.io +sonar.python.version=3.9,3.10 +sonar.python.coverage.reportPaths=coverage.xml +sonar.test.inclusions=tests + +# it's finding "bugs" we don't care about in the deprecated UI +sonar.exclusions=migrations/**,bin/keycloak_test_server.py,src/spiffworkflow_backend/routes/admin_blueprint/templates/*.html + +sonar.coverage.exclusions=noxfile.py,conftest.py +# sonar.exclusions=crc/templates/*.html,docs/**,config/**,instance/**,migrations/**,postgres/**,readme_images/**,schema/**,templates/** +# sonar.sources=crc diff --git a/src/spiffworkflow_backend/__init__.py b/src/spiffworkflow_backend/__init__.py new file mode 100644 index 00000000..4960abc9 --- /dev/null +++ b/src/spiffworkflow_backend/__init__.py @@ -0,0 +1,139 @@ +"""__init__.""" +import os +from typing import Any + +import connexion # type: ignore +import flask.app +import flask.json +import sqlalchemy +from apscheduler.schedulers.background import BackgroundScheduler # type: ignore +from flask.json.provider import DefaultJSONProvider +from flask_bpmn.api.api_error import api_error_blueprint +from flask_bpmn.models.db import db +from flask_bpmn.models.db import migrate +from flask_cors import CORS # type: ignore +from flask_mail import Mail # type: ignore + +import spiffworkflow_backend.load_database_models # noqa: F401 +from spiffworkflow_backend.config import setup_config +from spiffworkflow_backend.routes.admin_blueprint.admin_blueprint import admin_blueprint +from spiffworkflow_backend.routes.process_api_blueprint import process_api_blueprint +from spiffworkflow_backend.routes.user_blueprint import user_blueprint +from spiffworkflow_backend.services.background_processing_service import ( + BackgroundProcessingService, +) + + +class MyJSONEncoder(DefaultJSONProvider): + """MyJSONEncoder.""" + + def default(self, obj: Any) -> Any: + """Default.""" + if hasattr(obj, "serialized"): + return obj.serialized + elif isinstance(obj, sqlalchemy.engine.row.Row): # type: ignore + return_dict = {} + for row_key in obj.keys(): + row_value = obj[row_key] + if hasattr(row_value, "__dict__"): + return_dict.update(row_value.__dict__) + else: + return_dict.update({row_key: row_value}) + return_dict.pop("_sa_instance_state") + return return_dict + return super().default(obj) + + def dumps(self, obj: Any, **kwargs: Any) -> Any: + """Dumps.""" + kwargs.setdefault("default", self.default) + return super().dumps(obj, **kwargs) + + +def start_scheduler(app: flask.app.Flask) -> None: + """Start_scheduler.""" + scheduler = BackgroundScheduler() + scheduler.add_job( + BackgroundProcessingService(app).process_message_instances_with_app_context, + "interval", + seconds=10, + ) + scheduler.add_job( + BackgroundProcessingService(app).run, + "interval", + seconds=30, + ) + scheduler.start() + + +def create_app() -> flask.app.Flask: + """Create_app.""" + # We need to create the sqlite database in a known location. + # If we rely on the app.instance_path without setting an environment + # variable, it will be one thing when we run flask db upgrade in the + # noxfile and another thing when the tests actually run. + # instance_path is described more at https://flask.palletsprojects.com/en/2.1.x/config/ + connexion_app = connexion.FlaskApp( + __name__, server_args={"instance_path": os.environ.get("FLASK_INSTANCE_PATH")} + ) + app = connexion_app.app + app.config["CONNEXION_APP"] = connexion_app + app.config["SESSION_TYPE"] = "filesystem" + + if os.environ.get("FLASK_SESSION_SECRET_KEY") is None: + raise KeyError( + "Cannot find the secret_key from the environment. Please set FLASK_SESSION_SECRET_KEY" + ) + + app.secret_key = os.environ.get("FLASK_SESSION_SECRET_KEY") + + setup_config(app) + db.init_app(app) + migrate.init_app(app, db) + + app.register_blueprint(user_blueprint) + app.register_blueprint(process_api_blueprint) + app.register_blueprint(api_error_blueprint) + app.register_blueprint(admin_blueprint, url_prefix="/admin") + + origins_re = [ + r"^https?:\/\/%s(.*)" % o.replace(".", r"\.") + for o in app.config["CORS_ALLOW_ORIGINS"] + ] + CORS(app, origins=origins_re) + + connexion_app.add_api("api.yml", base_path="/v1.0") + + mail = Mail(app) + app.config["MAIL_APP"] = mail + + app.json = MyJSONEncoder(app) + + if app.config["PROCESS_WAITING_MESSAGES"]: + start_scheduler(app) + + configure_sentry(app) + + return app # type: ignore + + +def configure_sentry(app: flask.app.Flask) -> None: + """Configure_sentry.""" + import sentry_sdk + from flask import Flask + from sentry_sdk.integrations.flask import FlaskIntegration + + sentry_sample_rate = app.config.get("SENTRY_SAMPLE_RATE") + if sentry_sample_rate is None: + return + sentry_sdk.init( + dsn=app.config.get("SENTRY_DSN"), + integrations=[ + FlaskIntegration(), + ], + # Set traces_sample_rate to 1.0 to capture 100% + # of transactions for performance monitoring. + # We recommend adjusting this value in production. + traces_sample_rate=float(sentry_sample_rate), + ) + + app = Flask(__name__) diff --git a/src/spiffworkflow_backend/__main__.py b/src/spiffworkflow_backend/__main__.py new file mode 100644 index 00000000..ff873c42 --- /dev/null +++ b/src/spiffworkflow_backend/__main__.py @@ -0,0 +1,13 @@ +"""Command-line interface.""" +import click + + +@click.command() +@click.version_option() +def main() -> None: + """Spiffworkflow Backend.""" + print("This does nothing") + + +if __name__ == "__main__": + main(prog_name="spiffworkflow-backend") # pragma: no cover diff --git a/src/spiffworkflow_backend/api.yml b/src/spiffworkflow_backend/api.yml new file mode 100755 index 00000000..597e4c1d --- /dev/null +++ b/src/spiffworkflow_backend/api.yml @@ -0,0 +1,2049 @@ +openapi: "3.0.2" +info: + version: 1.0.0 + title: Workflow Microservice + license: + name: MIT +servers: + - url: http://localhost:5000/v1.0 +security: + - jwt: ["secret"] + - oAuth2AuthCode: + - read_email + - uid + +paths: + /login: + parameters: + - name: redirect_url + in: query + required: false + schema: + type: string + get: + security: [] + summary: redirect to open id authentication server + operationId: spiffworkflow_backend.routes.user.login + tags: + - Authentication + responses: + "200": + description: Redirects to authentication server + /login_return: + parameters: + - name: code + in: query + required: true + schema: + type: string + - name: state + in: query + required: true + schema: + type: string + - name: session_state + in: query + required: false + schema: + type: string + get: + security: [] + operationId: spiffworkflow_backend.routes.user.login_return + tags: + - Authentication + responses: + "200": + description: Test Return Response + /logout: + parameters: + - name: id_token + in: query + required: true + schema: + type: string + - name: redirect_url + in: query + required: false + schema: + type: string + get: + security: [] + operationId: spiffworkflow_backend.routes.user.logout + summary: Logout authenticated user + tags: + - Authentication + responses: + "200": + description: Logout Authenticated User + /logout_return: + get: + security: [] + operationId: spiffworkflow_backend.routes.user.logout_return + summary: Logout authenticated user + tags: + - Authentication + responses: + "200": + description: Logout Authenticated User + + /login_api: + get: + security: [] + operationId: spiffworkflow_backend.routes.user.login_api + summary: Authenticate user for API access + tags: + - Authentication + responses: + "200": + description: Redirects to authentication server + /login_api_return: + parameters: + - name: code + in: query + required: true + schema: + type: string + - name: state + in: query + required: true + schema: + type: string + - name: session_state + in: query + required: false + schema: + type: string + get: + security: [] + operationId: spiffworkflow_backend.routes.user.login_api_return + tags: + - Authentication + responses: + "200": + description: Test Return Response + + /status: + get: + security: [] + operationId: spiffworkflow_backend.routes.process_api_blueprint.status + summary: Returns 200 if the server is Responding + tags: + - Liveness + - Status + responses: + "200": + description: The server is running. + content: + application/json: + schema: + $ref: "#/components/schemas/OkTrue" + + /process-groups: + parameters: + - name: page + in: query + required: false + description: The page number to return. Defaults to page 1. + schema: + type: integer + - name: per_page + in: query + required: false + description: The number of groups to show per page. Defaults to page 10. + schema: + type: integer + # process_groups_list + get: + operationId: spiffworkflow_backend.routes.process_api_blueprint.process_groups_list + summary: get list + tags: + - Process Groups + responses: + "200": + description: An array of process groups + content: + application/json: + schema: + type: array + items: + $ref: "#/components/schemas/ProcessModelCategory" + # process_group_add + post: + operationId: spiffworkflow_backend.routes.process_api_blueprint.process_group_add + summary: Add process group + tags: + - Process Groups + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/ProcessModelCategory" + responses: + "201": + description: Processs Group + content: + application/json: + schema: + $ref: "#/components/schemas/ProcessModelCategory" + + /process-groups/{process_group_id}: + parameters: + - name: process_group_id + in: path + required: true + description: The unique id of an existing process group. + schema: + type: string + # process_group_show + get: + operationId: spiffworkflow_backend.routes.process_api_blueprint.process_group_show + summary: Returns a single process group + tags: + - Process Groups + responses: + "200": + description: Processs Group. + content: + application/json: + schema: + $ref: "#/components/schemas/ProcessModelCategory" + delete: + operationId: spiffworkflow_backend.routes.process_api_blueprint.process_group_delete + summary: Deletes a single process group + tags: + - Process Groups + responses: + "200": + description: The process group was deleted. + put: + operationId: spiffworkflow_backend.routes.process_api_blueprint.process_group_update + summary: Updates a single process group + tags: + - Process Groups + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/ProcessModelCategory" + responses: + "200": + description: Process group updated successfully + content: + application/json: + schema: + $ref: "#/components/schemas/ProcessModelCategory" + + /process-models: + parameters: + - name: process_group_identifier + in: query + required: false + description: The group containing the models we want to return + schema: + type: string + - name: page + in: query + required: false + description: The page number to return. Defaults to page 1. + schema: + type: integer + - name: per_page + in: query + required: false + description: The number of models to show per page. Defaults to page 10. + schema: + type: integer + # process_model_list + get: + operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_list + summary: Return a list of process models for a given process group + tags: + - Process Models + responses: + "200": + description: Successfully return the requested models + content: + application/json: + schema: + type: array + items: + $ref: "#/components/schemas/ProcessModel" + # process_model_add + post: + operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_add + summary: Creates a new process model with the given parameters. + tags: + - Process Models + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/ProcessModel" + responses: + "201": + description: Process model created successfully. + content: + application/json: + schema: + $ref: "#/components/schemas/ProcessModel" + + /process-models/{process_group_id}/{process_model_id}/files: + parameters: + - name: process_group_id + in: path + required: true + description: The group containing the models we want to return + schema: + type: string + - name: process_model_id + in: path + required: true + description: The unique id of an existing process model to validate. + schema: + type: string + # add_file + post: + operationId: spiffworkflow_backend.routes.process_api_blueprint.add_file + summary: Add a new workflow spec file + tags: + - Process Model Files + requestBody: + content: + multipart/form-data: + schema: + type: object + properties: + file: + type: string + format: binary + responses: + "201": + description: Metadata about the uploaded file, but not the file content. + content: + application/json: + schema: + $ref: "#/components/schemas/File" + # get: + # operationId: spiffworkflow_backend.api.process_api_blueprint.get_files + # summary: Provide a list of workflow spec files for the given workflow_spec_id. IMPORTANT, only includes metadata, not the file content. + # tags: + # - Process Model Files + # responses: + # '200': + # description: An array of file descriptions (not the file content) + # content: + # application/json: + # schema: + # type: array + # items: + # $ref: "#/components/schemas/File" + + /process-models/{process_group_id}/{process_model_id}: + parameters: + - name: process_group_id + in: path + required: true + description: The unique id of an existing process group + schema: + type: string + - name: process_model_id + in: path + required: true + description: The unique id of an existing process model. + schema: + type: string + # process_model_show + get: + operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_show + summary: Returns a single process model + tags: + - Process Models + responses: + "200": + description: Workflow spec. + content: + application/json: + schema: + $ref: "#/components/schemas/ProcessModel" + # process_model_delete + delete: + operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_delete + summary: Removes an existing process model + tags: + - Process Models + responses: + "200": + description: The process model has been removed. + content: + application/json: + schema: + $ref: "#/components/schemas/OkTrue" + # process model update + put: + operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_update + summary: Modifies an existing process mosel with the given parameters. + tags: + - Process Models + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/ProcessModel" + responses: + "200": + description: Process model updated successfully. + content: + application/json: + schema: + $ref: "#/components/schemas/ProcessModel" + + /process-instances: + parameters: + - name: process_group_identifier + in: query + required: false + description: The unique id of an existing process group + schema: + type: string + - name: process_model_identifier + in: query + required: false + description: The unique id of an existing process model. + schema: + type: string + - name: page + in: query + required: false + description: The page number to return. Defaults to page 1. + schema: + type: integer + - name: per_page + in: query + required: false + description: The page number to return. Defaults to page 1. + schema: + type: integer + - name: start_from + in: query + required: false + description: For filtering - beginning of start window - in seconds since epoch + schema: + type: integer + - name: start_till + in: query + required: false + description: For filtering - end of start window - in seconds since epoch + schema: + type: integer + - name: end_from + in: query + required: false + description: For filtering - beginning of end window - in seconds since epoch + schema: + type: integer + - name: end_till + in: query + required: false + description: For filtering - end of end window - in seconds since epoch + schema: + type: integer + - name: process_status + in: query + required: false + description: For filtering - not_started, user_input_required, waiting, complete, faulted, or suspended + schema: + type: string + # process_instance_list + get: + operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_list + summary: Returns a list of process instances for a given process model + tags: + - Process Instances + responses: + "200": + description: Workflow. + content: + application/json: + schema: + type: array + items: + $ref: "#/components/schemas/Workflow" + + /process-models/{process_group_id}/{process_model_id}/script-unit-tests: + parameters: + - name: process_group_id + in: path + required: true + description: The unique id of an existing process group + schema: + type: string + - name: process_model_id + in: path + required: true + description: The unique id of an existing process model. + schema: + type: string + post: + operationId: spiffworkflow_backend.routes.process_api_blueprint.script_unit_test_create + summary: Create script unit test based on given criteria + tags: + - Script Unit Test + responses: + "200": + description: Script Unit Test Result + content: + application/json: + schema: + $ref: "#/components/schemas/Workflow" + + /process-models/{process_group_id}/{process_model_id}/script-unit-tests/run: + parameters: + - name: process_group_id + in: path + required: true + description: The unique id of an existing process group + schema: + type: string + - name: process_model_id + in: path + required: true + description: The unique id of an existing process model. + schema: + type: string + post: + operationId: spiffworkflow_backend.routes.process_api_blueprint.script_unit_test_run + summary: Run a given script unit test. + tags: + - Script Unit Test + responses: + "200": + description: Script Unit Test Result + content: + application/json: + schema: + $ref: "#/components/schemas/Workflow" + + /process-models/{process_group_id}/{process_model_id}/process-instances: + parameters: + - name: process_group_id + in: path + required: true + description: The unique id of an existing process group + schema: + type: string + - name: process_model_id + in: path + required: true + description: The unique id of an existing process model. + schema: + type: string + # process_instance_create + post: + operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_create + summary: Creates an process instance from a process model and returns the instance + tags: + - Process Instances + responses: + "201": + description: Workflow generated successfully + content: + application/json: + schema: + $ref: "#/components/schemas/Workflow" + + /process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}: + parameters: + - name: process_group_id + in: path + required: true + description: The unique id of an existing process group + schema: + type: string + - name: process_model_id + in: path + required: true + description: The unique id of an existing process model. + schema: + type: string + - name: process_instance_id + in: path + required: true + description: The unique id of an existing process instance. + schema: + type: integer + get: + tags: + - Process Instances + operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_show + summary: Show information about a process instance + responses: + "200": + description: One Process Instance + content: + application/json: + schema: + $ref: "#/components/schemas/Workflow" + # process_instance_delete + delete: + operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_delete + summary: Deletes a single process instance + tags: + - Process Instances + responses: + "200": + description: The process instance was deleted. + content: + application/json: + schema: + $ref: "#/components/schemas/OkTrue" + + /process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}/run: + parameters: + - name: process_group_id + in: path + required: true + description: The unique id of an existing process group + schema: + type: string + - name: process_model_id + in: path + required: true + description: The unique id of an existing process model. + schema: + type: string + - name: process_instance_id + in: path + required: true + description: The unique id of an existing process instance. + schema: + type: integer + - name: do_engine_steps + in: query + required: false + description: Defaults to true, can be set to false if you are just looking at the workflow not completeing it. + schema: + type: boolean + # process_instance_run + post: + operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_run + summary: Run a process instance + tags: + - Process Instances + responses: + "200": + description: Returns details about the workflows state and current task + content: + application/json: + schema: + $ref: "#/components/schemas/Workflow" + + /process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}/terminate: + parameters: + - name: process_group_id + in: path + required: true + description: The unique id of an existing process group + schema: + type: string + - name: process_model_id + in: path + required: true + description: The unique id of an existing process model. + schema: + type: string + - name: process_instance_id + in: path + required: true + description: The unique id of an existing process instance. + schema: + type: integer + post: + operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_terminate + summary: Terminate a process instance + tags: + - Process Instances + responses: + "200": + description: Empty ok true response on successful termination. + content: + application/json: + schema: + $ref: "#/components/schemas/OkTrue" + + /process-models/{process_group_id}/{process_model_id}/process-instances/reports: + parameters: + - name: process_group_id + in: path + required: true + description: The unique id of an existing process group + schema: + type: string + - name: process_model_id + in: path + required: true + description: The unique id of an existing process model. + schema: + type: string + - name: page + in: query + required: false + description: The page number to return. Defaults to page 1. + schema: + type: integer + - name: per_page + in: query + required: false + description: The page number to return. Defaults to page 1. + schema: + type: integer + get: + operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_report_list + summary: Returns all process instance reports for process model + tags: + - Process Instances + responses: + "200": + description: Workflow. + content: + application/json: + schema: + type: array + items: + $ref: "#/components/schemas/Workflow" + post: + operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_report_create + summary: Returns all process instance reports for process model + tags: + - Process Instances + responses: + "201": + description: The process instance report was created. + content: + application/json: + schema: + $ref: "#/components/schemas/OkTrue" + + /process-models/{process_group_id}/{process_model_id}/process-instances/reports/{report_identifier}: + parameters: + - name: process_group_id + in: path + required: true + description: The unique id of an existing process group + schema: + type: string + - name: process_model_id + in: path + required: true + description: The unique id of an existing process model. + schema: + type: string + - name: report_identifier + in: path + required: true + description: The unique id of an existing report + schema: + type: string + - name: page + in: query + required: false + description: The page number to return. Defaults to page 1. + schema: + type: integer + - name: per_page + in: query + required: false + description: The page number to return. Defaults to page 1. + schema: + type: integer + get: + operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_report_show + summary: Returns a report of process instances for a given process model + tags: + - Process Instances + responses: + "200": + description: Workflow. + content: + application/json: + schema: + type: array + items: + $ref: "#/components/schemas/Workflow" + put: + operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_report_update + summary: Updates a process instance report + tags: + - Process Instances + responses: + "200": + description: The process instance report was updated. + content: + application/json: + schema: + $ref: "#/components/schemas/OkTrue" + delete: + operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_report_delete + summary: Delete a process instance report + tags: + - Process Instances + responses: + "200": + description: The process instance report was delete. + content: + application/json: + schema: + $ref: "#/components/schemas/OkTrue" + + /process-models/{process_group_id}/{process_model_id}/files/{file_name}: + parameters: + - name: process_group_id + in: path + required: true + description: The unique id of an existing process group + schema: + type: string + - name: process_model_id + in: path + required: true + description: The unique id of an existing process model to validate. + schema: + type: string + - name: file_name + in: path + required: true + description: The id of the spec file + schema: + type: string + # get_file + get: + operationId: spiffworkflow_backend.routes.process_api_blueprint.get_file + summary: Returns metadata about the file + tags: + - Process Model Files + responses: + "200": + description: Returns the file information requested. + content: + application/json: + schema: + $ref: "#/components/schemas/File" + # process_model_file_update + put: + operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_file_update + summary: save the contents to the given file + tags: + - Process Model Files + requestBody: + description: Log Pagination Request + required: false + content: + multipart/form-data: + schema: + type: object + properties: + file: + type: string + format: binary + responses: + "200": + description: Metadata about the uploaded file, but not the file content. + content: + application/json: + schema: + $ref: "#/components/schemas/OkTrue" + delete: + operationId: spiffworkflow_backend.routes.process_api_blueprint.process_model_file_delete + summary: Removes an existing process model file + tags: + - Process Model Files + responses: + "200": + description: The process model has been removed. + content: + application/json: + schema: + $ref: "#/components/schemas/OkTrue" + + /tasks: + parameters: + - name: page + in: query + required: false + description: The page number to return. Defaults to page 1. + schema: + type: integer + - name: per_page + in: query + required: false + description: The page number to return. Defaults to page 1. + schema: + type: integer + get: + tags: + - Tasks + # security: [] + operationId: spiffworkflow_backend.routes.process_api_blueprint.task_list_my_tasks + summary: returns the list of ready or waiting tasks for a user + responses: + "200": + description: list of tasks + content: + application/json: + schema: + type: array + items: + $ref: "#/components/schemas/Task" + + /process-instance/{process_instance_id}/tasks: + parameters: + - name: process_instance_id + in: path + required: true + description: The unique id of an existing process instance. + schema: + type: integer + - name: all_tasks + in: query + required: false + description: If true, this wil return all tasks associated with the process instance and not just user tasks. + schema: + type: boolean + get: + tags: + - Process Instances + operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_task_list + summary: returns the list of all user tasks associated with process instance + responses: + "200": + description: list of tasks + content: + application/json: + schema: + type: array + items: + $ref: "#/components/schemas/Task" + + /service_tasks: + get: + tags: + - Service Tasks + operationId: spiffworkflow_backend.routes.process_api_blueprint.service_tasks_show + summary: Gets all available service task connectors + responses: + "200": + description: All service task connectors + content: + application/json: + schema: + $ref: "#/components/schemas/ServiceTask" + + /tasks/{process_instance_id}/{task_id}: + parameters: + - name: task_id + in: path + required: true + description: The unique id of an existing process group. + schema: + type: string + - name: process_instance_id + in: path + required: true + description: The unique id of an existing process instance. + schema: + type: integer + - name: terminate_loop + in: query + required: false + description: Terminate the loop on a looping task + schema: + type: boolean + get: + tags: + - Tasks + operationId: spiffworkflow_backend.routes.process_api_blueprint.task_show + summary: Gets one task that a user wants to complete + responses: + "200": + description: One task + content: + application/json: + schema: + $ref: "#/components/schemas/Task" + put: + tags: + - Tasks + operationId: spiffworkflow_backend.routes.process_api_blueprint.task_submit + summary: Update the form data for a tasks + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/ProcessModelCategory" + responses: + "200": + description: One task + content: + application/json: + schema: + $ref: "#/components/schemas/Task" + "202": + description: "ok: true" + content: + application/json: + schema: + $ref: "#/components/schemas/OkTrue" + + /messages: + parameters: + - name: process_instance_id + in: query + required: false + description: the id of the process instance + schema: + type: integer + - name: page + in: query + required: false + description: The page number to return. Defaults to page 1. + schema: + type: integer + - name: per_page + in: query + required: false + description: The number of models to show per page. Defaults to page 10. + schema: + type: integer + get: + tags: + - Messages + operationId: spiffworkflow_backend.routes.process_api_blueprint.message_instance_list + summary: Get a list of message instances + responses: + "200": + description: One task + content: + application/json: + schema: + $ref: "#/components/schemas/Workflow" + + /messages/{message_identifier}: + parameters: + - name: message_identifier + in: path + required: true + description: The unique identifier of the message model. + schema: + type: string + post: + tags: + - Messages + operationId: spiffworkflow_backend.routes.process_api_blueprint.message_start + summary: Instantiate and run a given process model with a message start event matching given identifier + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/Workflow" + responses: + "200": + description: One task + content: + application/json: + schema: + $ref: "#/components/schemas/Workflow" + + /process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}/logs: + parameters: + - name: process_group_id + in: path + required: true + description: The unique id of an existing process group + schema: + type: string + - name: process_model_id + in: path + required: true + description: The unique id of an existing process model. + schema: + type: string + - name: process_instance_id + in: path + required: true + description: the id of the process instance + schema: + type: integer + - name: page + in: query + required: false + description: The page number to return. Defaults to page 1. + schema: + type: integer + - name: per_page + in: query + required: false + description: The number of items to show per page. Defaults to page 10. + schema: + type: integer + get: + tags: + - Process Instances + operationId: spiffworkflow_backend.routes.process_api_blueprint.process_instance_log_list + summary: returns a list of logs associated with the process instance + responses: + "200": + description: list of logs + content: + application/json: + schema: + $ref: "#/components/schemas/ProcessInstanceLog" + + /secrets: + parameters: + - name: page + in: query + required: false + description: The page number to return. Defaults to page 1. + schema: + type: integer + - name: per_page + in: query + required: false + description: The number of items to show per page. Defaults to page 10. + schema: + type: integer + post: + operationId: spiffworkflow_backend.routes.process_api_blueprint.add_secret + summary: Create a secret for a key and value + tags: + - Secrets + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/Secret" + responses: + "201": + description: Secret created successfully + content: + application/json: + schema: + type: number + get: + operationId: spiffworkflow_backend.routes.process_api_blueprint.secret_list + summary: Return list of all secrets + tags: + - Secrets + responses: + "200": + description: list of secrets + content: + application/json: + schema: + $ref: "#/components/schemas/Secret" + + /secrets/{key}: + parameters: + - name: key + in: path + required: true + description: The key we are using + schema: + type: string + get: + operationId: spiffworkflow_backend.routes.process_api_blueprint.get_secret + summary: Return a secret value for a key + tags: + - Secrets + responses: + "200": + description: We return a secret + content: + application/json: + schema: + $ref: "#/components/schemas/Secret" + delete: + operationId: spiffworkflow_backend.routes.process_api_blueprint.delete_secret + summary: Delete an existing secret + tags: + - Secrets + responses: + "204": + description: The secret is deleted + "401": + description: Unauthorized to delete secret + "404": + description: Secret does not exist + put: + operationId: spiffworkflow_backend.routes.process_api_blueprint.update_secret + summary: Modify an existing secret + tags: + - Secrets + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/Secret" + responses: + "200": + description: Secret updated successfully + content: + application/json: + schema: + $ref: "#/components/schemas/Secret" + "401": + description: Unauthorized to update secret + "404": + description: Secret does not exist + + /secrets/allowed_process_paths: + post: + operationId: spiffworkflow_backend.routes.process_api_blueprint.add_allowed_process_path + summary: Create an allowed process to a secret + tags: + - Secrets + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SecretAllowedProcessPath" + responses: + "201": + description: Allowed process created successfully + content: + application/json: + schema: + $ref: "#/components/schemas/SecretAllowedProcessPath" + /secrets/allowed_process_paths/{allowed_process_path_id}: + parameters: + - name: allowed_process_path_id + in: path + required: true + description: The id of the allowed process path to delete + schema: + type: integer + delete: + operationId: spiffworkflow_backend.routes.process_api_blueprint.delete_allowed_process_path + summary: Delete an existing allowed process for a secret + tags: + - Secrets + responses: + "204": + description: The allowed process is deleted. + +components: + securitySchemes: + jwt: + type: http + scheme: bearer + bearerFormat: JWT + x-bearerInfoFunc: spiffworkflow_backend.routes.user.verify_token + x-scopeValidateFunc: spiffworkflow_backend.routes.user.validate_scope + + oAuth2AuthCode: + type: oauth2 + description: authenticate with openid server + flows: + authorizationCode: + authorizationUrl: /v1.0/login_api + tokenUrl: /v1.0/login_api_return + scopes: + read_email: read email + x-tokenInfoFunc: spiffworkflow_backend.routes.user.get_scope + # oAuth2AuthCode: + # type: oauth2 + # description: authenticate with openid server + # flows: + # implicit: + # authorizationUrl: /v1.0/login_api + # scopes: + # uid: uid + # x-tokenInfoUrl: localhost:7000/v1.0/login_api_return + # x-tokenInfoFunc: spiffworkflow_backend.routes.user.get_scope + + schemas: + OkTrue: + properties: + ok: + type: boolean + example: true + User: + properties: + uid: + type: string + email_address: + type: string + display_name: + type: string + affiliation: + type: string + eppn: + type: string + first_name: + type: string + last_name: + type: string + title: + type: string + DataModel: + properties: + id: + type: string + ProcessModelDiffList: + properties: + workflow_spec_id: + type: string + example: top_level_workflow + created_at_in_seconds: + type: integer + location: + type: string + example: remote + new: + type: boolean + example: false + ProcessModelFilesList: + properties: + file_model_id: + type: integer + example: 171 + workflow_spec_id: + type: string + example: top_level_workflow + filename: + type: string + example: data_security_plan.dmn + created_at_in_seconds: + type: integer + type: + type: string + example: dmn + primary: + type: boolean + example: false + content_type: + type: string + example: text/xml + primary_process_id: + type: string + example: null + md5_hash: + type: string + example: f12e2bbd-a20c-673b-ccb8-a8a1ea9c5b7b + + ProcessModelFilesDiff: + properties: + filename: + type: string + example: data_security_plan.dmn + created_at_in_seconds: + type: integer + type: + type: string + example: dmn + primary: + type: boolean + example: false + content_type: + type: string + example: text/xml + primary_process_id: + type: string + example: null + md5_hash: + type: string + example: f12e2bbd-a20c-673b-ccb8-a8a1ea9c5b7b + location: + type: string + example: remote + new: + type: boolean + example: false + ProcessModelAll: + properties: + workflow_spec_id: + type: string + example: acaf1258-43b4-437e-8846-f612afa66811 + created_at_in_seconds: + type: integer + md5_hash: + type: string + example: c30fd597f21715018eab12f97f9d4956 + Study: + properties: + id: + type: integer + example: 1234 + title: + type: string + example: The impact of fried pickles on beer consumption in bipedal software developers. + updated_at_in_seconds: + type: string + format: date_time + example: "2019-12-25T09:12:33.001Z" + user_uid: + type: string + example: dhf8r + status: + type: string + enum: ["in_progress", "hold", "open_for_enrollment", "abandoned"] + example: done + sponsor: + type: string + x-nullable: true + example: "Sartography Pharmaceuticals" + ind_number: + type: string + x-nullable: true + example: "27b-6-42" + StudyAssociate: + properties: + uid: + type: string + example: "dhf8r" + access: + type: boolean + example: False + role: + type: string + example: "TODO" + DocumentDirectory: + properties: + level: + type: string + x-nullable: true + example: "Ancillary Document" + description: + type: string + x-nullable: true + example: "Documents that are ancillary to the study" + file: + $ref: "#/components/schemas/File" + x-nullable: true + expanded: + type: boolean + example: False + filecount: + type: integer + example: 1 + children: + type: array + items: + $ref: "#/components/schemas/File" + DataStore: + properties: + id: + type: integer + example: 1234 + key: + type: string + example: MyKey + workflow_id: + type: integer + x-nullable: true + example: 12 + study_id: + type: integer + x-nullable: true + example: 42 + user_id: + type: string + x-nullable: true + example: dhf8r + task_id: + type: string + x-nullable: true + example: MyTask + process_model_id: + type: string + x-nullable: true + example: My Spec Name + value: + type: string + x-nullable: true + example: Some Value + + ProcessModel: + properties: + id: + type: string + name: + type: string + display_name: + type: string + description: + type: string + primary_process_id: + type: string + nullable: true + category_id: + type: string + nullable: true + standalone: + type: boolean + example: false + default: false + workflow_spec_category: + $ref: "#/components/schemas/ProcessModelCategory" + is_status: + type: boolean + nullable: true + ProcessModelCategory: + properties: + id: + type: string + name: + type: string + display_name: + type: string + display_order: + type: integer + workflows: + type: array + items: + $ref: "#/components/schemas/Workflow" + File: + properties: + id: + type: number + name: + type: string + example: "random_fact.bpmn" + version: + type: integer + updated_at_in_seconds: + type: string + format: date_time + example: "2019-12-25T09:12:33.001Z" + type: + type: string + primary: + type: boolean + content_type: + type: string + example: "application/xml" + workflow_spec_id: + type: string + example: "random_fact" + x-nullable: true + file: + type: string + Workflow: + properties: + id: + readOnly: true + type: integer + format: int64 + status: + type: string + enum: ["new", "user_input_required", "waiting", "complete"] + navigation: + type: array + items: + $ref: "#/components/schemas/NavigationItem" + next_task: + $ref: "#/components/schemas/Task" + workflow_spec_id: + type: string + spec_version: + type: string + is_latest_spec: + type: boolean + num_tasks_total: + type: integer + num_tasks_complete: + type: integer + num_tasks_incomplete: + type: integer + study_id: + type: integer + + example: + id: 291234 + status: "user_input_required" + workflow_spec_id: "random_fact" + spec_version: "v1.1 [22,23]" + is_latest_spec: True + next_task: + id: study_identification + name: Study Identification + title: IRB Review + documentation: "# Heading 1\n\nMarkdown documentation text goes here" + type: form + state: ready + Task: + properties: + id: + readOnly: true + type: string + name: + type: string + title: + type: string + type: + type: string + state: + type: string + form: + $ref: "#/components/schemas/Form" + documentation: + type: string + data: + type: object + multi_instance_type: + type: string + enum: ["none", "looping", "parallel", "sequential"] + multi_instance_count: + type: number + multi_instance_index: + type: number + process_name: + type: string + properties: + type: object + example: + id: study_identification + name: Study Identification + title: IRB Review + documentation: "# Heading 1\n\nMarkdown documentation text goes here" + type: form + state: ready + form: + "key": "irb_review_form" + "fields": + - "id": "irb_review_type" + "type": "enum" + "label": "Select IRB Review Type" + "options": + - id: "emergency_use" + name: "Emergency Use" + - id: "humanitarian_device" + name: "Humanitarian Device" + - id: "non_human" + name: "Non-Human" + - id: "non_uva_agent" + name: "Non-UVA Agent" + - id: "exempt" + name: "Exempt" + - id: "non_engaged" + name: "Non-Engaged" + - id: "expedited" + name: "Expedited" + - id: "full_board" + name: "Full Board" + "default_value": "Full Board" + "validation": + - name: "required" + config: "true" + "properties": + - id: "description" + value: "Description text goes here" + - id: "help" + value: "# Heading 1\n\nMarkdown help text goes here" + - id: "required_expression" + value: "model.my_boolean_field_id && model.my_enum_field_value !== 'something'" + - id: "hide_expression" + value: "model.my_enum_field_value === 'something'" + PaginatedTaskLog: + properties: + code: + example: "email_sent" + type: string + level: + example: "warning" + type: string + user: + example: "email_sent" + type: string + page: + type: integer + example: 0 + per_page: + type: integer + example: 10 + sort_column: + type: string + example: "timestamp" + sort_reverse: + type: boolean + example: false + items: + type: array + items: + $ref: "#/components/schemas/TaskLog" + has_next: + type: boolean + example: true + has_prev: + type: boolean + example: false + TaskLog: + properties: + level: + type: string + example: "info" + code: + example: "email_sent" + type: string + message: + example: "Approval email set to Jake in Accounting" + type: string + workflow_id: + example: 42 + type: integer + study_id: + example: 187 + type: integer + user_uid: + example: "dhf8r" + type: string + timestamp: + type: string + format: date_time + example: "2021-01-07T11:36:40.001Z" + TaskEvent: + properties: + workflow: + $ref: "#/components/schemas/Workflow" + study: + $ref: "#/components/schemas/Study" + workflow_sec: + $ref: "#/components/schemas/ProcessModel" + spec_version: + type: string + action: + type: string + task_id: + type: string + task_type: + type: string + task_lane: + type: string + form_data: + type: object + mi_type: + type: string + mi_count: + type: integer + mi_index: + type: integer + process_name: + type: string + date: + type: string + Form: + properties: + key: + type: string + fields: + type: array + items: + $ref: "#/components/schemas/Field" + example: + "key": "irb_review_form" + "fields": + - "id": "irb_review_type" + "type": "enum" + "label": "Select IRB Review Type" + "options": + - id: "emergency_use" + name: "Emergency Use" + - id: "humanitarian_device" + name: "Humanitarian Device" + - id: "non_human" + name: "Non-Human" + - id: "non_uva_agent" + name: "Non-UVA Agent" + - id: "exempt" + name: "Exempt" + - id: "non_engaged" + name: "Non-Engaged" + - id: "expedited" + name: "Expedited" + - id: "full_board" + name: "Full Board" + "default_value": "Full Board" + "validation": + - name: "required" + config: "true" + "properties": + - id: "description" + value: "Description text goes here" + - id: "help" + value: "# Heading 1\n\nMarkdown help text goes here" + - id: "required_expression" + value: "model.my_boolean_field_id && model.my_enum_field_value !== 'something'" + - id: "hide_expression" + value: "model.my_enum_field_value === 'something'" + Field: + properties: + id: + type: string + readOnly: true + type: + type: string + enum: ["string", "long", "boolean", "date", "enum"] + readOnly: true + label: + type: string + readOnly: true + options: + type: array + items: + $ref: "#/components/schemas/EnumFieldOption" + readOnly: true + default_value: + type: string + readOnly: true + validation: + type: array + items: + $ref: "#/components/schemas/FieldValidation" + readOnly: true + "properties": + type: array + items: + $ref: "#/components/schemas/FieldProperty" + readOnly: true + EnumFieldOption: + properties: + id: + type: string + name: + type: string + FieldValidation: + properties: + name: + type: string + config: + type: string + FieldProperty: + properties: + id: + type: string + value: + type: string + example: + id: "required_expression" + value: "model.should_require" + Error: + required: + - code + - message + properties: + code: + type: string + format: string + example: "access_denied" + message: + type: string + example: "You do not have permission to view the requested study." + Script: + properties: + name: + type: string + format: string + example: "random_fact" + description: + type: string + example: "Returns a random fact about a topic. Provide an argument of either 'cat', 'norris', or 'buzzword'" + LookupItem: + properties: + value: + type: string + format: string + example: "1000" + label: + type: string + example: "Chuck Norris" + data: + type: string + NavigationItem: + properties: + id: + type: number + format: integer + example: 5 + task_id: + type: string + format: uuid + example: "1234123uuid1234" + name: + type: string + example: "Task_Has_bananas" + description: + type: string + example: "Has Bananas?" + backtracks: + type: boolean + example: false + level: + type: integer + example: 1 + indent: + type: integer + example: 2 + child_count: + type: integer + example: 4 + state: + type: string + enum: + [ + "FUTURE", + "WAITING", + "READY", + "CANCELLED", + "COMPLETED", + "LIKELY", + "MAYBE", + ] + readOnly: true + is_decision: + type: boolean + example: False + readOnly: true + task: + $ref: "#/components/schemas/Task" + Approval: + properties: + id: + type: number + format: integer + example: 5 + ApprovalCounts: + properties: + PENDING: + type: number + format: integer + example: 5 + APPROVED: + type: number + format: integer + example: 5 + DECLINED: + type: number + format: integer + example: 5 + CANCELED: + type: number + format: integer + example: 5 + AWAITING: + type: number + format: integer + example: 5 + ServiceTask: + properties: + items: + type: array + $ref: "#/components/schemas/ServiceTaskConnector" + readOnly: true + ServiceTaskConnector: + properties: + id: + type: string + example: xero/CreateInvoice + parameters: + type: array + $ref: "#/components/schemas/ServiceTaskOperatorParameter" + readOnly: true + ServiceTaskOperatorParameter: + properties: + id: + type: string + example: client_id + type: + type: string + example: str + required: + type: boolean + example: false + GitRepo: + properties: + # remote: + # type: string + # example: sartography/crconnect-workflow-specs + directory: + type: string + example: /home/cr-connect/sync_files + branch: + type: string + example: dev + merge_branch: + type: string + example: staging + changes: + type: array + example: ["file_1.txt", "file_2.txt"] + untracked: + type: array + example: ["a_file.txt", "b_file.txt"] + Secret: + properties: + key: + description: The key of the secret we want to use + type: string + example: my_secret_key + nullable: false + value: + description: The value associated with the key + type: string + example: my_super_secret_value + nullable: false + creator_user_id: + description: The id of the logged in user that created this secret + type: number + example: 1 + nullable: false + allowed_processes: + description: The processes allowed to access this secret + type: array + items: + $ref: "#/components/schemas/SecretAllowedProcessPath" + nullable: true + ProcessInstanceLog: + properties: + id: + description: The id of the log + type: number + example: 1 + nullable: false + process_instance_id: + description: The id of the associated process instance + type: number + example: 2 + nullable: false + bpmn_process_identifier: + description: The id of the bpmn process element + type: string + example: Process_SimpleProcess + nullable: false + task: + description: The task identifier + type: number + example: 1234567890 + nullable: false + message: + description: The msg returned in the log + type: string + example: Some message returned in the log + nullable: true + timestamp: + description: The timestamp returned in the log + type: number + example: 123456789.12345 + SecretAllowedProcessPath: + properties: + id: + description: The id of the allowed process path + type: number + example: 1 + nullable: true + secret_id: + description: The id of the secret associated with this allowed process path + type: number + example: 2 + allowed_relative_path: + description: The allowed process path + type: string + example: /group_one/group_two/model_a diff --git a/src/spiffworkflow_backend/config/__init__.py b/src/spiffworkflow_backend/config/__init__.py new file mode 100644 index 00000000..82dadd3d --- /dev/null +++ b/src/spiffworkflow_backend/config/__init__.py @@ -0,0 +1,73 @@ +"""__init__.py.""" +import os +import threading + +from flask.app import Flask +from werkzeug.utils import ImportStringError + +from spiffworkflow_backend.services.logging_service import setup_logger + + +def setup_database_uri(app: Flask) -> None: + """Setup_database_uri.""" + if os.environ.get("SPIFFWORKFLOW_BACKEND_DATABASE_URI") is None: + database_name = f"spiffworkflow_backend_{app.config['ENV_IDENTIFIER']}" + if os.environ.get("SPIFF_DATABASE_TYPE") == "sqlite": + app.config[ + "SQLALCHEMY_DATABASE_URI" + ] = f"sqlite:///{app.instance_path}/db_{app.config['ENV_IDENTIFIER']}.sqlite3" + elif os.environ.get("SPIFF_DATABASE_TYPE") == "postgres": + app.config[ + "SQLALCHEMY_DATABASE_URI" + ] = f"postgresql://spiffworkflow_backend:spiffworkflow_backend@localhost:5432/{database_name}" + else: + # use pswd to trick flake8 with hardcoded passwords + db_pswd = os.environ.get("DB_PASSWORD") + if db_pswd is None: + db_pswd = "" + app.config[ + "SQLALCHEMY_DATABASE_URI" + ] = f"mysql+mysqlconnector://root:{db_pswd}@localhost/{database_name}" + else: + app.config["SQLALCHEMY_DATABASE_URI"] = os.environ.get( + "SPIFFWORKFLOW_BACKEND_DATABASE_URI" + ) + + +def setup_config(app: Flask) -> None: + """Setup_config.""" + # ensure the instance folder exists + try: + os.makedirs(app.instance_path) + except OSError: + pass + + app.config["ENV_IDENTIFIER"] = os.environ.get( + "SPIFFWORKFLOW_BACKEND_ENV", "development" + ) + app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False + app.config.from_object("spiffworkflow_backend.config.default") + + # This allows config/testing.py or instance/config.py to override the default config + if "ENV_IDENTIFIER" in app.config and app.config["ENV_IDENTIFIER"] == "testing": + app.config.from_pyfile("config/testing.py", silent=True) + else: + app.config.from_pyfile(f"{app.instance_path}/config.py", silent=True) + + setup_database_uri(app) + setup_logger(app) + + env_config_module = "spiffworkflow_backend.config." + app.config["ENV_IDENTIFIER"] + try: + app.config.from_object(env_config_module) + except ImportStringError as exception: + raise ModuleNotFoundError( + f"Cannot find config module: {env_config_module}" + ) from exception + + # unversioned (see .gitignore) config that can override everything and include secrets. + # src/spiffworkflow_backend/config/secrets.py + app.config.from_pyfile(os.path.join("config", "secrets.py")) + + thread_local_data = threading.local() + app.config["THREAD_LOCAL_DATA"] = thread_local_data diff --git a/src/spiffworkflow_backend/config/default.py b/src/spiffworkflow_backend/config/default.py new file mode 100644 index 00000000..30459b4b --- /dev/null +++ b/src/spiffworkflow_backend/config/default.py @@ -0,0 +1,47 @@ +"""Default.""" +import re +from os import environ + +# Does the site allow self-registration of users +SELF_REGISTRATION = environ.get("SELF_REGISTRATION", default=False) + +DEVELOPMENT = False + +BPMN_SPEC_ABSOLUTE_DIR = environ.get("BPMN_SPEC_ABSOLUTE_DIR", default="") +CORS_DEFAULT = "*" +CORS_ALLOW_ORIGINS = re.split( + r",\s*", environ.get("CORS_ALLOW_ORIGINS", default=CORS_DEFAULT) +) + +PROCESS_WAITING_MESSAGES = ( + environ.get("PROCESS_WAITING_MESSAGES", default="false") == "true" +) +SPIFFWORKFLOW_FRONTEND_URL = environ.get( + "SPIFFWORKFLOW_FRONTEND_URL", default="http://localhost:7001" +) +SPIFFWORKFLOW_BACKEND_URL = environ.get( + "SPIFFWORKFLOW_BACKEND_URL", default="http://localhost:7000" +) + +GIT_COMMIT_ON_SAVE = environ.get("GIT_COMMIT_ON_SAVE", default="false") == "true" + +# Open ID server +OPEN_ID_SERVER_URL = environ.get("OPEN_ID_SERVER_URL", default="http://localhost:7002") +OPEN_ID_CLIENT_ID = environ.get("OPEN_ID_CLIENT_ID", default="spiffworkflow-backend") +OPEN_ID_REALM_NAME = environ.get("OPEN_ID_REALM_NAME", default="spiffworkflow") +OPEN_ID_CLIENT_SECRET_KEY = environ.get( + "OPEN_ID_CLIENT_SECRET_KEY", default="JXeQExm0JhQPLumgHtIIqf52bDalHz0q" +) # noqa: S105 + +SPIFFWORKFLOW_BACKEND_LOG_TO_FILE = ( + environ.get("SPIFFWORKFLOW_BACKEND_LOG_TO_FILE", default="false") == "true" +) + +# service task connector proxy +CONNECTOR_PROXY_URL = environ.get( + "CONNECTOR_PROXY_URL", default="http://localhost:7004" +) + +# Sentry Configuration +SENTRY_DSN = environ.get("SENTRY_DSN", default="") +SENTRY_SAMPLE_RATE = environ.get("SENTRY_SAMPLE_RATE", default="1.0") diff --git a/src/spiffworkflow_backend/config/development.py b/src/spiffworkflow_backend/config/development.py new file mode 100644 index 00000000..5ddd1a28 --- /dev/null +++ b/src/spiffworkflow_backend/config/development.py @@ -0,0 +1 @@ +"""Development.""" diff --git a/src/spiffworkflow_backend/config/staging.py b/src/spiffworkflow_backend/config/staging.py new file mode 100644 index 00000000..55bb285b --- /dev/null +++ b/src/spiffworkflow_backend/config/staging.py @@ -0,0 +1,4 @@ +"""Staging.""" +GIT_COMMIT_ON_SAVE = True +GIT_COMMIT_USERNAME = "staging" +GIT_COMMIT_EMAIL = "staging@example.com" diff --git a/src/spiffworkflow_backend/config/testing.py b/src/spiffworkflow_backend/config/testing.py new file mode 100644 index 00000000..a9b04327 --- /dev/null +++ b/src/spiffworkflow_backend/config/testing.py @@ -0,0 +1,9 @@ +"""Testing.py.""" +from os import environ + + +TESTING = True +SECRET_KEY = "the_secret_key" +SPIFFWORKFLOW_BACKEND_LOG_TO_FILE = ( + environ.get("SPIFFWORKFLOW_BACKEND_LOG_TO_FILE", default="true") == "true" +) diff --git a/src/spiffworkflow_backend/exceptions/process_entity_not_found_error.py b/src/spiffworkflow_backend/exceptions/process_entity_not_found_error.py new file mode 100644 index 00000000..5ba6f737 --- /dev/null +++ b/src/spiffworkflow_backend/exceptions/process_entity_not_found_error.py @@ -0,0 +1,5 @@ +"""Process_entity_not_found.""" + + +class ProcessEntityNotFoundError(Exception): + """ProcessEntityNotFoundError.""" diff --git a/src/spiffworkflow_backend/helpers/fixture_data.py b/src/spiffworkflow_backend/helpers/fixture_data.py new file mode 100644 index 00000000..3c21b0f9 --- /dev/null +++ b/src/spiffworkflow_backend/helpers/fixture_data.py @@ -0,0 +1 @@ +"""Fixture_data.""" diff --git a/src/spiffworkflow_backend/helpers/spiff_enum.py b/src/spiffworkflow_backend/helpers/spiff_enum.py new file mode 100644 index 00000000..e3f65215 --- /dev/null +++ b/src/spiffworkflow_backend/helpers/spiff_enum.py @@ -0,0 +1,11 @@ +"""Spiff_enum.""" +import enum + + +class SpiffEnum(enum.Enum): + """SpiffEnum.""" + + @classmethod + def list(cls) -> list[str]: + """List.""" + return [el.value for el in cls] diff --git a/src/spiffworkflow_backend/load_database_models.py b/src/spiffworkflow_backend/load_database_models.py new file mode 100644 index 00000000..697e6772 --- /dev/null +++ b/src/spiffworkflow_backend/load_database_models.py @@ -0,0 +1,57 @@ +"""Loads and sets up all database models for SQLAlchemy. + +autoflake8 will remove these lines without the noqa comment + +NOTE: make sure this file is ignored by reorder-python-imports since +some models need to be loaded before others for relationships and to +avoid circular imports +""" + + +from flask_bpmn.models.db import add_listeners + +# must load this before UserModel and GroupModel for relationships +from spiffworkflow_backend.models.user_group_assignment import ( + UserGroupAssignmentModel, +) # noqa: F401 + + +from spiffworkflow_backend.models.active_task import ActiveTaskModel # noqa: F401 +from spiffworkflow_backend.models.bpmn_process_id_lookup import ( + BpmnProcessIdLookup, +) # noqa: F401 +from spiffworkflow_backend.models.data_store import DataStoreModel # noqa: F401 +from spiffworkflow_backend.models.file import FileModel # noqa: F401 +from spiffworkflow_backend.models.message_correlation_property import ( + MessageCorrelationPropertyModel, +) # noqa: F401 +from spiffworkflow_backend.models.message_instance import ( + MessageInstanceModel, +) # noqa: F401 +from spiffworkflow_backend.models.message_model import MessageModel # noqa: F401 +from spiffworkflow_backend.models.message_triggerable_process_model import ( + MessageTriggerableProcessModel, +) # noqa: F401 +from spiffworkflow_backend.models.permission_assignment import ( + PermissionAssignmentModel, +) # noqa: F401 +from spiffworkflow_backend.models.permission_target import ( + PermissionTargetModel, +) # noqa: F401 +from spiffworkflow_backend.models.principal import PrincipalModel # noqa: F401 +from spiffworkflow_backend.models.process_instance import ( + ProcessInstanceModel, +) # noqa: F401 +from spiffworkflow_backend.models.process_instance_report import ( + ProcessInstanceReportModel, +) # noqa: F401 +from spiffworkflow_backend.models.secret_model import ( + SecretAllowedProcessPathModel, +) # noqa: F401 +from spiffworkflow_backend.models.secret_model import SecretModel # noqa: F401 +from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel # noqa: F401 +from spiffworkflow_backend.models.task_event import TaskEventModel # noqa: F401 +from spiffworkflow_backend.models.user import UserModel # noqa: F401 +from spiffworkflow_backend.models.group import GroupModel # noqa: F401 + +add_listeners() diff --git a/src/spiffworkflow_backend/models/__init__.py b/src/spiffworkflow_backend/models/__init__.py new file mode 100644 index 00000000..f520b09d --- /dev/null +++ b/src/spiffworkflow_backend/models/__init__.py @@ -0,0 +1 @@ +"""__init__.""" diff --git a/src/spiffworkflow_backend/models/active_task.py b/src/spiffworkflow_backend/models/active_task.py new file mode 100644 index 00000000..b6041b83 --- /dev/null +++ b/src/spiffworkflow_backend/models/active_task.py @@ -0,0 +1,72 @@ +"""Active_task.""" +from __future__ import annotations + +import json +from dataclasses import dataclass + +from flask_bpmn.models.db import db +from flask_bpmn.models.db import SpiffworkflowBaseDBModel +from sqlalchemy import ForeignKey +from sqlalchemy.orm import relationship +from sqlalchemy.orm import RelationshipProperty + +from spiffworkflow_backend.models.principal import PrincipalModel +from spiffworkflow_backend.models.process_instance import ProcessInstanceModel +from spiffworkflow_backend.models.task import Task + + +@dataclass +class ActiveTaskModel(SpiffworkflowBaseDBModel): + """ActiveTaskModel.""" + + __tablename__ = "active_task" + __table_args__ = ( + db.UniqueConstraint( + "task_id", "process_instance_id", name="active_task_unique" + ), + ) + + assigned_principal: RelationshipProperty[PrincipalModel] = relationship( + PrincipalModel + ) + id: int = db.Column(db.Integer, primary_key=True) + process_instance_id: int = db.Column( + ForeignKey(ProcessInstanceModel.id), nullable=False # type: ignore + ) + assigned_principal_id: int = db.Column(ForeignKey(PrincipalModel.id)) + form_file_name: str | None = db.Column(db.String(50)) + ui_form_file_name: str | None = db.Column(db.String(50)) + + updated_at_in_seconds: int = db.Column(db.Integer) + created_at_in_seconds: int = db.Column(db.Integer) + + task_id = db.Column(db.String(50)) + task_name = db.Column(db.String(50)) + task_title = db.Column(db.String(50)) + task_type = db.Column(db.String(50)) + task_status = db.Column(db.String(50)) + process_model_display_name = db.Column(db.String(255)) + task_data: str = db.Column(db.Text) + + @classmethod + def to_task(cls, task: ActiveTaskModel) -> Task: + """To_task.""" + task_data = json.loads(task.task_data) + + new_task = Task( + task.task_id, + task.task_name, + task.task_title, + task.task_type, + task.task_status, + data=task_data, + process_instance_id=task.process_instance_id, + ) + if hasattr(task, "process_model_display_name"): + new_task.process_model_display_name = task.process_model_display_name + if hasattr(task, "process_group_identifier"): + new_task.process_group_identifier = task.process_group_identifier + if hasattr(task, "process_model_identifier"): + new_task.process_model_identifier = task.process_model_identifier + + return new_task diff --git a/src/spiffworkflow_backend/models/bpmn_process_id_lookup.py b/src/spiffworkflow_backend/models/bpmn_process_id_lookup.py new file mode 100644 index 00000000..0c71817c --- /dev/null +++ b/src/spiffworkflow_backend/models/bpmn_process_id_lookup.py @@ -0,0 +1,13 @@ +"""Message_model.""" +from flask_bpmn.models.db import db +from flask_bpmn.models.db import SpiffworkflowBaseDBModel + + +class BpmnProcessIdLookup(SpiffworkflowBaseDBModel): + """BpmnProcessIdLookup.""" + + __tablename__ = "bpmn_process_id_lookup" + + id = db.Column(db.Integer, primary_key=True) + bpmn_process_identifier = db.Column(db.String(255), unique=True, index=True) + bpmn_file_relative_path = db.Column(db.String(255)) diff --git a/src/spiffworkflow_backend/models/data_store.py b/src/spiffworkflow_backend/models/data_store.py new file mode 100644 index 00000000..abbfcf34 --- /dev/null +++ b/src/spiffworkflow_backend/models/data_store.py @@ -0,0 +1,31 @@ +"""Data_store.""" +from flask_bpmn.models.db import db +from flask_bpmn.models.db import SpiffworkflowBaseDBModel +from flask_marshmallow.sqla import SQLAlchemyAutoSchema # type: ignore + + +class DataStoreModel(SpiffworkflowBaseDBModel): + """DataStoreModel.""" + + __tablename__ = "data_store" + id = db.Column(db.Integer, primary_key=True) + updated_at_in_seconds = db.Column(db.Integer) + key = db.Column(db.String(50), nullable=False) + process_instance_id = db.Column(db.Integer) + task_spec = db.Column(db.String(50)) + spec_id = db.Column(db.String(50)) + user_id = db.Column(db.String(50), nullable=True) + file_id = db.Column(db.Integer, db.ForeignKey("file.id"), nullable=True) + value = db.Column(db.String(50)) + + +class DataStoreSchema(SQLAlchemyAutoSchema): # type: ignore + """DataStoreSchema.""" + + class Meta: + """Meta.""" + + model = DataStoreModel + load_instance = True + include_fk = True + sqla_session = db.session diff --git a/src/spiffworkflow_backend/models/file.py b/src/spiffworkflow_backend/models/file.py new file mode 100644 index 00000000..eb49b873 --- /dev/null +++ b/src/spiffworkflow_backend/models/file.py @@ -0,0 +1,179 @@ +"""File.""" +from dataclasses import dataclass +from dataclasses import field +from datetime import datetime +from typing import Optional + +from flask_bpmn.models.db import db +from flask_bpmn.models.db import SpiffworkflowBaseDBModel +from marshmallow import INCLUDE +from marshmallow import Schema +from sqlalchemy.orm import deferred +from sqlalchemy.orm import relationship + +from spiffworkflow_backend.helpers.spiff_enum import SpiffEnum +from spiffworkflow_backend.models.data_store import DataStoreModel + + +class FileModel(SpiffworkflowBaseDBModel): + """FileModel.""" + + __tablename__ = "file" + id = db.Column(db.Integer, primary_key=True) + name = db.Column(db.String(50), nullable=False) + type = db.Column(db.String(50), nullable=False) + content_type = db.Column(db.String(50), nullable=False) + process_instance_id = db.Column( + db.Integer, db.ForeignKey("process_instance.id"), nullable=True + ) + task_spec = db.Column(db.String(50), nullable=True) + irb_doc_code = db.Column( + db.String(50), nullable=False + ) # Code reference to the documents.xlsx reference file. + data_stores = relationship(DataStoreModel, cascade="all,delete", backref="file") + md5_hash = db.Column(db.String(50), unique=False, nullable=False) + data = deferred(db.Column(db.LargeBinary)) # type: ignore + size = db.Column(db.Integer, default=0) + updated_at_in_seconds = db.Column(db.Integer) + created_at_in_seconds = db.Column(db.Integer) + user_uid = db.Column(db.String(50), db.ForeignKey("user.uid"), nullable=True) + archived = db.Column(db.Boolean, default=False) + + +class FileType(SpiffEnum): + """FileType.""" + + bpmn = "bpmn" + csv = "csv" + dmn = "dmn" + doc = "doc" + docx = "docx" + gif = "gif" + jpg = "jpg" + json = "json" + md = "md" + pdf = "pdf" + png = "png" + ppt = "ppt" + pptx = "pptx" + rtf = "rtf" + svg = "svg" + svg_xml = "svg+xml" + txt = "txt" + xls = "xls" + xlsx = "xlsx" + xml = "xml" + zip = "zip" + + +CONTENT_TYPES = { + "bpmn": "text/xml", + "csv": "text/csv", + "dmn": "text/xml", + "doc": "application/msword", + "docx": "application/vnd.openxmlformats-officedocument.wordprocessingml.document", + "gif": "image/gif", + "jpg": "image/jpeg", + "json": "application/json", + "md": "text/plain", + "pdf": "application/pdf", + "png": "image/png", + "ppt": "application/vnd.ms-powerpoint", + "pptx": "application/vnd.openxmlformats-officedocument.presentationml.presentation", + "rtf": "application/rtf", + "svg": "image/svg+xml", + "svg_xml": "image/svg+xml", + "txt": "text/plain", + "xls": "application/vnd.ms-excel", + "xlsx": "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet", + "xml": "application/xml", + "zip": "application/zip", +} + + +@dataclass(order=True) +class File: + """File.""" + + sort_index: str = field(init=False) + + content_type: str + name: str + type: str + document: dict + last_modified: datetime + size: int + process_instance_id: Optional[int] = None + irb_doc_code: Optional[str] = None + data_store: Optional[dict] = field(default_factory=dict) + user_uid: Optional[str] = None + file_contents: Optional[bytes] = None + process_model_id: Optional[str] = None + process_group_id: Optional[str] = None + archived: bool = False + + def __post_init__(self) -> None: + """__post_init__.""" + self.sort_index = f"{self.type}:{self.name}" + + @classmethod + def from_file_system( + cls, + file_name: str, + file_type: FileType, + content_type: str, + last_modified: datetime, + file_size: int, + ) -> "File": + """From_file_system.""" + instance = cls( + name=file_name, + content_type=content_type, + type=file_type.value, + document={}, + last_modified=last_modified, + size=file_size, + ) + return instance + + +class FileSchema(Schema): + """FileSchema.""" + + class Meta: + """Meta.""" + + model = File + fields = [ + "id", + "name", + "content_type", + "process_instance_id", + "irb_doc_code", + "last_modified", + "type", + "archived", + "size", + "data_store", + "document", + "user_uid", + "url", + "file_contents", + "process_model_id", + "process_group_id", + ] + unknown = INCLUDE + + # url = Method("get_url") + # + # def get_url(self, obj): + # token = 'not_available' + # if hasattr(obj, 'id') and obj.id is not None: + # file_url = url_for("/v1_0.crc_api_file_get_file_data_link", file_id=obj.id, _external=True) + # if hasattr(flask.g, 'user'): + # token = flask.g.user.encode_auth_token() + # url = file_url + '?auth_token=' + urllib.parse.quote_plus(token) + # return url + # else: + # return "" + # diff --git a/src/spiffworkflow_backend/models/group.py b/src/spiffworkflow_backend/models/group.py new file mode 100644 index 00000000..8287c40c --- /dev/null +++ b/src/spiffworkflow_backend/models/group.py @@ -0,0 +1,32 @@ +"""Group.""" +from __future__ import annotations + +from typing import TYPE_CHECKING + +from flask_bpmn.models.db import db +from flask_bpmn.models.group import FlaskBpmnGroupModel +from sqlalchemy.orm import relationship + +if TYPE_CHECKING: + from spiffworkflow_backend.models.user_group_assignment import ( # noqa: F401 + UserGroupAssignmentModel, + ) # noqa: F401 + from spiffworkflow_backend.models.user import UserModel # noqa: F401 + + +class GroupModel(FlaskBpmnGroupModel): + """GroupModel.""" + + __tablename__ = "group" + __table_args__ = {"extend_existing": True} + + identifier = db.Column(db.String(255)) + + user_group_assignments = relationship("UserGroupAssignmentModel", cascade="delete") + users = relationship( # type: ignore + "UserModel", + viewonly=True, + secondary="user_group_assignment", + overlaps="user_group_assignments,users", + ) + principal = relationship("PrincipalModel", uselist=False) # type: ignore diff --git a/src/spiffworkflow_backend/models/message_correlation.py b/src/spiffworkflow_backend/models/message_correlation.py new file mode 100644 index 00000000..baec8270 --- /dev/null +++ b/src/spiffworkflow_backend/models/message_correlation.py @@ -0,0 +1,49 @@ +"""Message_correlation.""" +from dataclasses import dataclass +from typing import TYPE_CHECKING + +from flask_bpmn.models.db import db +from flask_bpmn.models.db import SpiffworkflowBaseDBModel +from sqlalchemy import ForeignKey +from sqlalchemy.orm import relationship + +from spiffworkflow_backend.models.message_correlation_property import ( + MessageCorrelationPropertyModel, +) +from spiffworkflow_backend.models.process_instance import ProcessInstanceModel + +if TYPE_CHECKING: + from spiffworkflow_backend.models.message_correlation_message_instance import ( # noqa: F401 + MessageCorrelationMessageInstanceModel, + ) + + +@dataclass +class MessageCorrelationModel(SpiffworkflowBaseDBModel): + """Message Correlations to relate queued messages together.""" + + __tablename__ = "message_correlation" + __table_args__ = ( + db.UniqueConstraint( + "process_instance_id", + "message_correlation_property_id", + "name", + name="message_instance_id_name_unique", + ), + ) + + id = db.Column(db.Integer, primary_key=True) + process_instance_id = db.Column( + ForeignKey(ProcessInstanceModel.id), nullable=False, index=True # type: ignore + ) + message_correlation_property_id = db.Column( + ForeignKey(MessageCorrelationPropertyModel.id), nullable=False, index=True + ) + name = db.Column(db.String(255), nullable=False, index=True) + value = db.Column(db.String(255), nullable=False, index=True) + updated_at_in_seconds: int = db.Column(db.Integer) + created_at_in_seconds: int = db.Column(db.Integer) + + message_correlations_message_instances = relationship( + "MessageCorrelationMessageInstanceModel", cascade="delete" + ) diff --git a/src/spiffworkflow_backend/models/message_correlation_message_instance.py b/src/spiffworkflow_backend/models/message_correlation_message_instance.py new file mode 100644 index 00000000..320dfba3 --- /dev/null +++ b/src/spiffworkflow_backend/models/message_correlation_message_instance.py @@ -0,0 +1,32 @@ +"""Message_correlation_message_instance.""" +from dataclasses import dataclass + +from flask_bpmn.models.db import db +from flask_bpmn.models.db import SpiffworkflowBaseDBModel +from sqlalchemy import ForeignKey + +from spiffworkflow_backend.models.message_correlation import MessageCorrelationModel +from spiffworkflow_backend.models.message_instance import MessageInstanceModel + + +@dataclass +class MessageCorrelationMessageInstanceModel(SpiffworkflowBaseDBModel): + """MessageCorrelationMessageInstanceModel.""" + + __tablename__ = "message_correlation_message_instance" + + __table_args__ = ( + db.UniqueConstraint( + "message_instance_id", + "message_correlation_id", + name="message_correlation_message_instance_unique", + ), + ) + + id = db.Column(db.Integer, primary_key=True) + message_instance_id = db.Column( + ForeignKey(MessageInstanceModel.id), nullable=False, index=True # type: ignore + ) + message_correlation_id = db.Column( + ForeignKey(MessageCorrelationModel.id), nullable=False, index=True + ) diff --git a/src/spiffworkflow_backend/models/message_correlation_property.py b/src/spiffworkflow_backend/models/message_correlation_property.py new file mode 100644 index 00000000..b84b7140 --- /dev/null +++ b/src/spiffworkflow_backend/models/message_correlation_property.py @@ -0,0 +1,25 @@ +"""Message_correlation_property.""" +from flask_bpmn.models.db import db +from flask_bpmn.models.db import SpiffworkflowBaseDBModel +from sqlalchemy import ForeignKey + +from spiffworkflow_backend.models.message_model import MessageModel + + +class MessageCorrelationPropertyModel(SpiffworkflowBaseDBModel): + """MessageCorrelationPropertyModel.""" + + __tablename__ = "message_correlation_property" + __table_args__ = ( + db.UniqueConstraint( + "identifier", + "message_model_id", + name="message_correlation_property_unique", + ), + ) + + id = db.Column(db.Integer, primary_key=True) + identifier = db.Column(db.String(50), index=True) + message_model_id = db.Column(ForeignKey(MessageModel.id), nullable=False) + updated_at_in_seconds: int = db.Column(db.Integer) + created_at_in_seconds: int = db.Column(db.Integer) diff --git a/src/spiffworkflow_backend/models/message_instance.py b/src/spiffworkflow_backend/models/message_instance.py new file mode 100644 index 00000000..61dd12b2 --- /dev/null +++ b/src/spiffworkflow_backend/models/message_instance.py @@ -0,0 +1,88 @@ +"""Message_instance.""" +import enum +from dataclasses import dataclass +from typing import Any +from typing import Optional +from typing import TYPE_CHECKING + +from flask_bpmn.models.db import db +from flask_bpmn.models.db import SpiffworkflowBaseDBModel +from sqlalchemy import ForeignKey +from sqlalchemy.event import listens_for +from sqlalchemy.orm import relationship +from sqlalchemy.orm import Session +from sqlalchemy.orm import validates + +from spiffworkflow_backend.models.message_model import MessageModel +from spiffworkflow_backend.models.process_instance import ProcessInstanceModel + +if TYPE_CHECKING: + from spiffworkflow_backend.models.message_correlation_message_instance import ( # noqa: F401 + MessageCorrelationMessageInstanceModel, + ) + + +class MessageTypes(enum.Enum): + """MessageTypes.""" + + send = "send" + receive = "receive" + + +class MessageStatuses(enum.Enum): + """MessageStatuses.""" + + ready = "ready" + running = "running" + completed = "completed" + failed = "failed" + + +@dataclass +class MessageInstanceModel(SpiffworkflowBaseDBModel): + """Messages from a process instance that are ready to send to a receiving task.""" + + __tablename__ = "message_instance" + + id: int = db.Column(db.Integer, primary_key=True) + process_instance_id: int = db.Column(ForeignKey(ProcessInstanceModel.id), nullable=False) # type: ignore + message_model_id: int = db.Column(ForeignKey(MessageModel.id), nullable=False) + message_model = relationship("MessageModel") + message_correlations_message_instances = relationship( + "MessageCorrelationMessageInstanceModel", cascade="delete" + ) + + message_type: str = db.Column(db.String(20), nullable=False) + payload: str = db.Column(db.JSON) + status: str = db.Column(db.String(20), nullable=False, default="ready") + failure_cause: str = db.Column(db.Text()) + updated_at_in_seconds: int = db.Column(db.Integer) + created_at_in_seconds: int = db.Column(db.Integer) + + @validates("message_type") + def validate_message_type(self, key: str, value: Any) -> Any: + """Validate_message_type.""" + return self.validate_enum_field(key, value, MessageTypes) + + @validates("status") + def validate_status(self, key: str, value: Any) -> Any: + """Validate_status.""" + return self.validate_enum_field(key, value, MessageStatuses) + + +# This runs for ALL db flushes for ANY model, not just this one even if it's in the MessageInstanceModel class +# so this may not be worth it or there may be a better way to do it +# +# https://stackoverflow.com/questions/32555829/flask-validates-decorator-multiple-fields-simultaneously/33025472#33025472 +# https://docs.sqlalchemy.org/en/14/orm/session_events.html#before-flush +@listens_for(Session, "before_flush") # type: ignore +def ensure_failure_cause_is_set_if_message_instance_failed( + session: Any, _flush_context: Optional[Any], _instances: Optional[Any] +) -> None: + """Ensure_failure_cause_is_set_if_message_instance_failed.""" + for instance in session.new: + if isinstance(instance, MessageInstanceModel): + if instance.status == "failed" and instance.failure_cause is None: + raise ValueError( + f"{instance.__class__.__name__}: failure_cause must be set if status is failed" + ) diff --git a/src/spiffworkflow_backend/models/message_model.py b/src/spiffworkflow_backend/models/message_model.py new file mode 100644 index 00000000..601b6d40 --- /dev/null +++ b/src/spiffworkflow_backend/models/message_model.py @@ -0,0 +1,13 @@ +"""Message_model.""" +from flask_bpmn.models.db import db +from flask_bpmn.models.db import SpiffworkflowBaseDBModel + + +class MessageModel(SpiffworkflowBaseDBModel): + """MessageModel.""" + + __tablename__ = "message_model" + + id = db.Column(db.Integer, primary_key=True) + identifier = db.Column(db.String(50), unique=True, index=True) + name = db.Column(db.String(50), unique=True, index=True) diff --git a/src/spiffworkflow_backend/models/message_triggerable_process_model.py b/src/spiffworkflow_backend/models/message_triggerable_process_model.py new file mode 100644 index 00000000..97d54aa7 --- /dev/null +++ b/src/spiffworkflow_backend/models/message_triggerable_process_model.py @@ -0,0 +1,22 @@ +"""Message_correlation_property.""" +from flask_bpmn.models.db import db +from flask_bpmn.models.db import SpiffworkflowBaseDBModel +from sqlalchemy import ForeignKey + +from spiffworkflow_backend.models.message_model import MessageModel + + +class MessageTriggerableProcessModel(SpiffworkflowBaseDBModel): + """MessageTriggerableProcessModel.""" + + __tablename__ = "message_triggerable_process_model" + + id = db.Column(db.Integer, primary_key=True) + message_model_id = db.Column( + ForeignKey(MessageModel.id), nullable=False, unique=True + ) + process_model_identifier: str = db.Column(db.String(50), nullable=False, index=True) + process_group_identifier: str = db.Column(db.String(50), nullable=False, index=True) + + updated_at_in_seconds: int = db.Column(db.Integer) + created_at_in_seconds: int = db.Column(db.Integer) diff --git a/src/spiffworkflow_backend/models/permission_assignment.py b/src/spiffworkflow_backend/models/permission_assignment.py new file mode 100644 index 00000000..006d63ce --- /dev/null +++ b/src/spiffworkflow_backend/models/permission_assignment.py @@ -0,0 +1,55 @@ +"""PermissionAssignment.""" +import enum + +from flask_bpmn.models.db import db +from flask_bpmn.models.db import SpiffworkflowBaseDBModel +from sqlalchemy import Enum +from sqlalchemy import ForeignKey + +from spiffworkflow_backend.models.permission_target import PermissionTargetModel +from spiffworkflow_backend.models.principal import PrincipalModel + + +class PermitDeny(enum.Enum): + """PermitDeny.""" + + # permit, aka grant + permit = "permit" + deny = "deny" + + +class Permission(enum.Enum): + """Permission.""" + + # from original requirements + # instantiate = 1 + # administer = 2 + # view_instance = 3 + + create = 1 + read = 2 + update = 3 + delete = 4 + list = 5 + instantiate = 6 # this is something you do to a process model + + +class PermissionAssignmentModel(SpiffworkflowBaseDBModel): + """PermissionAssignmentModel.""" + + __tablename__ = "permission_assignment" + __table_args__ = ( + db.UniqueConstraint( + "principal_id", + "permission_target_id", + "permission", + name="permission_assignment_uniq", + ), + ) + id = db.Column(db.Integer, primary_key=True) + principal_id = db.Column(ForeignKey(PrincipalModel.id), nullable=False) + permission_target_id = db.Column( + ForeignKey(PermissionTargetModel.id), nullable=False + ) + grant_type = db.Column(Enum(PermitDeny)) + permission = db.Column(Enum(Permission)) diff --git a/src/spiffworkflow_backend/models/permission_target.py b/src/spiffworkflow_backend/models/permission_target.py new file mode 100644 index 00000000..0e576cf8 --- /dev/null +++ b/src/spiffworkflow_backend/models/permission_target.py @@ -0,0 +1,26 @@ +"""PermissionTarget.""" +from flask_bpmn.models.db import db +from flask_bpmn.models.db import SpiffworkflowBaseDBModel + +# process groups and models are not in the db +# from sqlalchemy import ForeignKey # type: ignore +# +# from spiffworkflow_backend.models.process_group import ProcessGroupModel +# from spiffworkflow_backend.models.process_model import ProcessModel + + +class PermissionTargetModel(SpiffworkflowBaseDBModel): + """PermissionTargetModel.""" + + __tablename__ = "permission_target" + # __table_args__ = ( + # CheckConstraint( + # "NOT(process_group_id IS NULL AND process_model_identifier IS NULL AND process_instance_id IS NULL)" + # ), + # ) + + id = db.Column(db.Integer, primary_key=True) + uri = db.Column(db.String(255), unique=True, nullable=False) + # process_group_id = db.Column(ForeignKey(ProcessGroupModel.id), nullable=True) # type: ignore + # process_model_identifier = db.Column(ForeignKey(ProcessModel.id), nullable=True) # type: ignore + # process_instance_id = db.Column(ForeignKey(ProcessInstanceModel.id), nullable=True) # type: ignore diff --git a/src/spiffworkflow_backend/models/principal.py b/src/spiffworkflow_backend/models/principal.py new file mode 100644 index 00000000..fbe05930 --- /dev/null +++ b/src/spiffworkflow_backend/models/principal.py @@ -0,0 +1,30 @@ +"""Principal.""" +from dataclasses import dataclass + +from flask_bpmn.models.db import db +from flask_bpmn.models.db import SpiffworkflowBaseDBModel +from sqlalchemy import ForeignKey +from sqlalchemy.schema import CheckConstraint + +from spiffworkflow_backend.models.group import GroupModel +from spiffworkflow_backend.models.user import UserModel + + +class DataValidityError(Exception): + """DataValidityError.""" + + +class MissingPrincipalError(DataValidityError): + """MissingPrincipalError.""" + + +@dataclass +class PrincipalModel(SpiffworkflowBaseDBModel): + """PrincipalModel.""" + + __tablename__ = "principal" + __table_args__ = (CheckConstraint("NOT(user_id IS NULL AND group_id IS NULL)"),) + + id = db.Column(db.Integer, primary_key=True) + user_id = db.Column(ForeignKey(UserModel.id), nullable=True, unique=True) + group_id = db.Column(ForeignKey(GroupModel.id), nullable=True, unique=True) diff --git a/src/spiffworkflow_backend/models/process_group.py b/src/spiffworkflow_backend/models/process_group.py new file mode 100644 index 00000000..0b100ed4 --- /dev/null +++ b/src/spiffworkflow_backend/models/process_group.py @@ -0,0 +1,62 @@ +"""Process_group.""" +from __future__ import annotations + +from dataclasses import dataclass +from dataclasses import field +from typing import Any + +import marshmallow +from marshmallow import post_load +from marshmallow import Schema + +from spiffworkflow_backend.models.process_model import ProcessModelInfo + + +@dataclass(order=True) +class ProcessGroup: + """ProcessGroup.""" + + sort_index: str = field(init=False) + + id: str # A unique string name, lower case, under scores (ie, 'my_group') + display_name: str + display_order: int | None = 0 + admin: bool | None = False + process_models: list[ProcessModelInfo] = field( + default_factory=list[ProcessModelInfo] + ) + + def __post_init__(self) -> None: + """__post_init__.""" + self.sort_index = self.id + + def __eq__(self, other: Any) -> bool: + """__eq__.""" + if not isinstance(other, ProcessGroup): + return False + if other.id == self.id: + return True + return False + + +class ProcessGroupSchema(Schema): + """ProcessGroupSchema.""" + + class Meta: + """Meta.""" + + model = ProcessGroup + fields = ["id", "display_name", "display_order", "admin", "process_models"] + + process_models = marshmallow.fields.List( + marshmallow.fields.Nested( + "ProcessModelInfoSchema", dump_only=True, required=False + ) + ) + + @post_load + def make_process_group( + self, data: dict[str, str | bool | int], **kwargs: dict + ) -> ProcessGroup: + """Make_process_group.""" + return ProcessGroup(**data) # type: ignore diff --git a/src/spiffworkflow_backend/models/process_instance.py b/src/spiffworkflow_backend/models/process_instance.py new file mode 100644 index 00000000..d1d117c8 --- /dev/null +++ b/src/spiffworkflow_backend/models/process_instance.py @@ -0,0 +1,295 @@ +"""Process_instance.""" +from __future__ import annotations + +from dataclasses import dataclass +from typing import Any +from typing import cast + +import marshmallow +from flask_bpmn.models.db import db +from flask_bpmn.models.db import SpiffworkflowBaseDBModel +from marshmallow import INCLUDE +from marshmallow import Schema +from marshmallow_enum import EnumField # type: ignore +from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore +from sqlalchemy import ForeignKey +from sqlalchemy.orm import deferred +from sqlalchemy.orm import relationship +from sqlalchemy.orm import validates + +from spiffworkflow_backend.helpers.spiff_enum import SpiffEnum +from spiffworkflow_backend.models.process_model import ProcessModelInfo +from spiffworkflow_backend.models.task import Task +from spiffworkflow_backend.models.task import TaskSchema +from spiffworkflow_backend.models.user import UserModel + + +class NavigationItemSchema(Schema): + """NavigationItemSchema.""" + + class Meta: + """Meta.""" + + fields = [ + "spec_id", + "name", + "spec_type", + "task_id", + "description", + "backtracks", + "indent", + "lane", + "state", + "children", + ] + unknown = INCLUDE + + state = marshmallow.fields.String(required=False, allow_none=True) + description = marshmallow.fields.String(required=False, allow_none=True) + backtracks = marshmallow.fields.String(required=False, allow_none=True) + lane = marshmallow.fields.String(required=False, allow_none=True) + task_id = marshmallow.fields.String(required=False, allow_none=True) + children = marshmallow.fields.List( + marshmallow.fields.Nested(lambda: NavigationItemSchema()) + ) + + +class ProcessInstanceStatus(SpiffEnum): + """ProcessInstanceStatus.""" + + not_started = "not_started" + user_input_required = "user_input_required" + waiting = "waiting" + complete = "complete" + faulted = "faulted" + suspended = "suspended" + terminated = "terminated" + erroring = "erroring" + + +class ProcessInstanceModel(SpiffworkflowBaseDBModel): + """ProcessInstanceModel.""" + + __tablename__ = "process_instance" + id: int = db.Column(db.Integer, primary_key=True) + process_model_identifier: str = db.Column(db.String(50), nullable=False, index=True) + process_group_identifier: str = db.Column(db.String(50), nullable=False, index=True) + process_initiator_id: int = db.Column(ForeignKey(UserModel.id), nullable=False) + process_initiator = relationship("UserModel") + + active_tasks = relationship("ActiveTaskModel", cascade="delete") # type: ignore + task_events = relationship("TaskEventModel", cascade="delete") # type: ignore + spiff_logs = relationship("SpiffLoggingModel", cascade="delete") # type: ignore + message_instances = relationship("MessageInstanceModel", cascade="delete") # type: ignore + message_correlations = relationship("MessageCorrelationModel", cascade="delete") # type: ignore + + bpmn_json: str | None = deferred(db.Column(db.JSON)) # type: ignore + start_in_seconds: int | None = db.Column(db.Integer) + end_in_seconds: int | None = db.Column(db.Integer) + updated_at_in_seconds: int = db.Column(db.Integer) + created_at_in_seconds: int = db.Column(db.Integer) + status: str = db.Column(db.String(50)) + + bpmn_xml_file_contents: bytes | None = None + bpmn_version_control_type: str = db.Column(db.String(50)) + bpmn_version_control_identifier: str = db.Column(db.String(255)) + + @property + def serialized(self) -> dict[str, Any]: + """Return object data in serializeable format.""" + local_bpmn_xml_file_contents = "" + if self.bpmn_xml_file_contents: + local_bpmn_xml_file_contents = self.bpmn_xml_file_contents.decode("utf-8") + + return { + "id": self.id, + "process_model_identifier": self.process_model_identifier, + "process_group_identifier": self.process_group_identifier, + "status": self.status, + "bpmn_json": self.bpmn_json, + "start_in_seconds": self.start_in_seconds, + "end_in_seconds": self.end_in_seconds, + "process_initiator_id": self.process_initiator_id, + "bpmn_xml_file_contents": local_bpmn_xml_file_contents, + } + + @property + def serialized_flat(self) -> dict: + """Return object in serializeable format with data merged together with top-level attributes. + + Top-level attributes like process_model_identifier and status win over data attributes. + """ + serialized_top_level_attributes = self.serialized + serialized_top_level_attributes.pop("data", None) + return cast(dict, DeepMerge.merge(self.data, serialized_top_level_attributes)) + + @validates("status") + def validate_status(self, key: str, value: Any) -> Any: + """Validate_status.""" + return self.validate_enum_field(key, value, ProcessInstanceStatus) + + +class ProcessInstanceModelSchema(Schema): + """ProcessInstanceModelSchema.""" + + class Meta: + """Meta.""" + + model = ProcessInstanceModel + fields = [ + "id", + "process_model_identifier", + "process_group_identifier", + "process_initiator_id", + "start_in_seconds", + "end_in_seconds", + "updated_at_in_seconds", + "created_at_in_seconds", + "status", + "bpmn_version_control_identifier", + ] + + status = marshmallow.fields.Method("get_status", dump_only=True) + + def get_status(self, obj: ProcessInstanceModel) -> str: + """Get_status.""" + return obj.status + + +class ProcessInstanceApi: + """ProcessInstanceApi.""" + + def __init__( + self, + id: int, + status: ProcessInstanceStatus, + next_task: Task | None, + process_model_identifier: str, + process_group_identifier: str, + completed_tasks: int, + updated_at_in_seconds: int, + is_review: bool, + title: str, + ) -> None: + """__init__.""" + self.id = id + self.status = status + self.next_task = next_task # The next task that requires user input. + # self.navigation = navigation fixme: would be a hotness. + self.process_model_identifier = process_model_identifier + self.process_group_identifier = process_group_identifier + self.completed_tasks = completed_tasks + self.updated_at_in_seconds = updated_at_in_seconds + self.title = title + self.is_review = is_review + + +class ProcessInstanceApiSchema(Schema): + """ProcessInstanceApiSchema.""" + + class Meta: + """Meta.""" + + model = ProcessInstanceApi + fields = [ + "id", + "status", + "next_task", + "navigation", + "process_model_identifier", + "process_group_identifier", + "completed_tasks", + "updated_at_in_seconds", + "is_review", + "title", + "study_id", + "state", + ] + unknown = INCLUDE + + status = EnumField(ProcessInstanceStatus) + next_task = marshmallow.fields.Nested(TaskSchema, dump_only=True, required=False) + navigation = marshmallow.fields.List( + marshmallow.fields.Nested(NavigationItemSchema, dump_only=True) + ) + state = marshmallow.fields.String(allow_none=True) + + @marshmallow.post_load + def make_process_instance( + self, data: dict[str, Any], **kwargs: dict + ) -> ProcessInstanceApi: + """Make_process_instance.""" + keys = [ + "id", + "status", + "next_task", + "navigation", + "process_model_identifier", + "process_group_identifier", + "completed_tasks", + "updated_at_in_seconds", + "is_review", + "title", + "study_id", + "state", + ] + filtered_fields = {key: data[key] for key in keys} + filtered_fields["next_task"] = TaskSchema().make_task(data["next_task"]) + return ProcessInstanceApi(**filtered_fields) + + +@dataclass +class ProcessInstanceMetadata: + """ProcessInstanceMetadata.""" + + id: int + display_name: str | None = None + description: str | None = None + spec_version: str | None = None + state: str | None = None + status: str | None = None + completed_tasks: int | None = None + is_review: bool | None = None + state_message: str | None = None + process_model_identifier: str | None = None + process_group_id: str | None = None + + @classmethod + def from_process_instance( + cls, process_instance: ProcessInstanceModel, process_model: ProcessModelInfo + ) -> ProcessInstanceMetadata: + """From_process_instance.""" + instance = cls( + id=process_instance.id, + display_name=process_model.display_name, + description=process_model.description, + process_group_id=process_model.process_group_id, + state_message=process_instance.state_message, + status=process_instance.status, + completed_tasks=process_instance.completed_tasks, + is_review=process_model.is_review, + process_model_identifier=process_instance.process_model_identifier, + ) + return instance + + +class ProcessInstanceMetadataSchema(Schema): + """ProcessInstanceMetadataSchema.""" + + status = EnumField(ProcessInstanceStatus) + + class Meta: + """Meta.""" + + model = ProcessInstanceMetadata + additional = [ + "id", + "display_name", + "description", + "state", + "completed_tasks", + "process_group_id", + "is_review", + "state_message", + ] + unknown = INCLUDE diff --git a/src/spiffworkflow_backend/models/process_instance_report.py b/src/spiffworkflow_backend/models/process_instance_report.py new file mode 100644 index 00000000..8f8886bf --- /dev/null +++ b/src/spiffworkflow_backend/models/process_instance_report.py @@ -0,0 +1,335 @@ +"""Process_instance.""" +from __future__ import annotations + +from dataclasses import dataclass +from typing import Any +from typing import cast +from typing import Optional +from typing import TypedDict + +from flask_bpmn.models.db import db +from flask_bpmn.models.db import SpiffworkflowBaseDBModel +from sqlalchemy import ForeignKey +from sqlalchemy.orm import deferred +from sqlalchemy.orm import relationship + +from spiffworkflow_backend.exceptions.process_entity_not_found_error import ( + ProcessEntityNotFoundError, +) +from spiffworkflow_backend.models.process_instance import ProcessInstanceModel +from spiffworkflow_backend.models.user import UserModel +from spiffworkflow_backend.services.process_instance_processor import ( + ProcessInstanceProcessor, +) +from spiffworkflow_backend.services.process_model_service import ProcessModelService + + +ReportMetadata = dict[str, Any] + + +class ProcessInstanceReportResult(TypedDict): + """ProcessInstanceReportResult.""" + + report_metadata: ReportMetadata + results: list[dict] + + +# https://stackoverflow.com/a/56842689/6090676 +class Reversor: + """Reversor.""" + + def __init__(self, obj: Any): + """__init__.""" + self.obj = obj + + def __eq__(self, other: Any) -> Any: + """__eq__.""" + return other.obj == self.obj + + def __lt__(self, other: Any) -> Any: + """__lt__.""" + return other.obj < self.obj + + +@dataclass +class ProcessInstanceReportModel(SpiffworkflowBaseDBModel): + """ProcessInstanceReportModel.""" + + __tablename__ = "process_instance_report" + __table_args__ = ( + db.UniqueConstraint( + "process_group_identifier", + "process_model_identifier", + "identifier", + name="process_instance_report_unique", + ), + ) + + id = db.Column(db.Integer, primary_key=True) + identifier: str = db.Column(db.String(50), nullable=False, index=True) + process_model_identifier: str = db.Column(db.String(50), nullable=False, index=True) + process_group_identifier = db.Column(db.String(50), nullable=False, index=True) + report_metadata: dict = deferred(db.Column(db.JSON)) # type: ignore + created_by_id = db.Column(ForeignKey(UserModel.id), nullable=False) + created_by = relationship("UserModel") + created_at_in_seconds = db.Column(db.Integer) + updated_at_in_seconds = db.Column(db.Integer) + + @classmethod + def add_fixtures(cls) -> None: + """Add_fixtures.""" + try: + process_model = ProcessModelService().get_process_model( + group_id="sartography-admin", process_model_id="ticket" + ) + user = UserModel.query.first() + columns = [ + {"Header": "id", "accessor": "id"}, + {"Header": "month", "accessor": "month"}, + {"Header": "milestone", "accessor": "milestone"}, + {"Header": "req_id", "accessor": "req_id"}, + {"Header": "feature", "accessor": "feature"}, + {"Header": "dev_days", "accessor": "dev_days"}, + {"Header": "priority", "accessor": "priority"}, + ] + json = {"order": "month asc", "columns": columns} + + cls.create_report( + identifier="standard", + process_group_identifier=process_model.process_group_id, + process_model_identifier=process_model.id, + user=user, + report_metadata=json, + ) + cls.create_report( + identifier="for-month", + process_group_identifier="sartography-admin", + process_model_identifier="ticket", + user=user, + report_metadata=cls.ticket_for_month_report(), + ) + cls.create_report( + identifier="for-month-3", + process_group_identifier="sartography-admin", + process_model_identifier="ticket", + user=user, + report_metadata=cls.ticket_for_month_3_report(), + ) + cls.create_report( + identifier="hot-report", + process_group_identifier="category_number_one", + process_model_identifier="process-model-with-form", + user=user, + report_metadata=cls.process_model_with_form_report_fixture(), + ) + + except ProcessEntityNotFoundError: + print("Did not find process models so not adding report fixtures for them") + + @classmethod + def create_report( + cls, + identifier: str, + process_group_identifier: str, + process_model_identifier: str, + user: UserModel, + report_metadata: ReportMetadata, + ) -> None: + """Make_fixture_report.""" + process_instance_report = ProcessInstanceReportModel.query.filter_by( + identifier=identifier, + process_group_identifier=process_group_identifier, + process_model_identifier=process_model_identifier, + ).first() + + if process_instance_report is None: + process_instance_report = cls( + identifier=identifier, + process_group_identifier=process_group_identifier, + process_model_identifier=process_model_identifier, + created_by_id=user.id, + report_metadata=report_metadata, + ) + db.session.add(process_instance_report) + db.session.commit() + + @classmethod + def ticket_for_month_report(cls) -> dict: + """Ticket_for_month_report.""" + return { + "columns": [ + {"Header": "id", "accessor": "id"}, + {"Header": "month", "accessor": "month"}, + {"Header": "milestone", "accessor": "milestone"}, + {"Header": "req_id", "accessor": "req_id"}, + {"Header": "feature", "accessor": "feature"}, + {"Header": "priority", "accessor": "priority"}, + ], + "order": "month asc", + "filter_by": [ + { + "field_name": "month", + "operator": "equals", + "field_value": "{{month}}", + } + ], + } + + @classmethod + def ticket_for_month_3_report(cls) -> dict: + """Ticket_for_month_report.""" + return { + "columns": [ + {"Header": "id", "accessor": "id"}, + {"Header": "month", "accessor": "month"}, + {"Header": "milestone", "accessor": "milestone"}, + {"Header": "req_id", "accessor": "req_id"}, + {"Header": "feature", "accessor": "feature"}, + {"Header": "dev_days", "accessor": "dev_days"}, + {"Header": "priority", "accessor": "priority"}, + ], + "order": "month asc", + "filter_by": [ + {"field_name": "month", "operator": "equals", "field_value": "3"} + ], + } + + @classmethod + def process_model_with_form_report_fixture(cls) -> dict: + """Process_model_with_form_report_fixture.""" + return { + "columns": [ + {"Header": "id", "accessor": "id"}, + { + "Header": "system_generated_number", + "accessor": "system_generated_number", + }, + { + "Header": "user_generated_number", + "accessor": "user_generated_number", + }, + {"Header": "product", "accessor": "product"}, + ], + "order": "-id", + } + + @classmethod + def create_with_attributes( + cls, + identifier: str, + process_group_identifier: str, + process_model_identifier: str, + report_metadata: dict, + user: UserModel, + ) -> ProcessInstanceReportModel: + """Create_with_attributes.""" + process_model = ProcessModelService().get_process_model( + group_id=process_group_identifier, process_model_id=process_model_identifier + ) + process_instance_report = cls( + identifier=identifier, + process_group_identifier=process_model.process_group_id, + process_model_identifier=process_model.id, + created_by_id=user.id, + report_metadata=report_metadata, + ) + db.session.add(process_instance_report) + db.session.commit() + return process_instance_report + + def with_substitutions(self, field_value: Any, substitution_variables: dict) -> Any: + """With_substitutions.""" + if substitution_variables is not None: + for key, value in substitution_variables.items(): + if isinstance(value, str) or isinstance(value, int): + field_value = str(field_value).replace( + "{{" + key + "}}", str(value) + ) + return field_value + + # modeled after https://github.com/suyash248/sqlalchemy-json-querybuilder + # just supports "equals" operator for now. + # perhaps we will use the database instead of filtering in memory in the future and then we might use this lib directly. + def passes_filter( + self, process_instance_dict: dict, substitution_variables: dict + ) -> bool: + """Passes_filter.""" + if "filter_by" in self.report_metadata: + for filter_by in self.report_metadata["filter_by"]: + field_name = filter_by["field_name"] + operator = filter_by["operator"] + field_value = self.with_substitutions( + filter_by["field_value"], substitution_variables + ) + if operator == "equals": + if str(process_instance_dict.get(field_name)) != str(field_value): + return False + + return True + + def order_things(self, process_instance_dicts: list) -> list: + """Order_things.""" + order_by = self.report_metadata["order_by"] + + def order_by_function_for_lambda( + process_instance_dict: dict, + ) -> list[Reversor | str | None]: + """Order_by_function_for_lambda.""" + comparison_values: list[Reversor | str | None] = [] + for order_by_item in order_by: + if order_by_item.startswith("-"): + # remove leading - from order_by_item + order_by_item = order_by_item[1:] + sort_value = process_instance_dict.get(order_by_item) + comparison_values.append(Reversor(sort_value)) + else: + sort_value = cast( + Optional[str], process_instance_dict.get(order_by_item) + ) + comparison_values.append(sort_value) + return comparison_values + + return sorted(process_instance_dicts, key=order_by_function_for_lambda) + + def generate_report( + self, + process_instances: list[ProcessInstanceModel], + substitution_variables: dict | None, + ) -> ProcessInstanceReportResult: + """Generate_report.""" + if substitution_variables is None: + substitution_variables = {} + + def to_serialized(process_instance: ProcessInstanceModel) -> dict: + """To_serialized.""" + processor = ProcessInstanceProcessor(process_instance) + process_instance.data = processor.get_current_data() + return process_instance.serialized_flat + + process_instance_dicts = map(to_serialized, process_instances) + results = [] + for process_instance_dict in process_instance_dicts: + if self.passes_filter(process_instance_dict, substitution_variables): + results.append(process_instance_dict) + + if "order_by" in self.report_metadata: + results = self.order_things(results) + + if "columns" in self.report_metadata: + column_keys_to_keep = [ + c["accessor"] for c in self.report_metadata["columns"] + ] + + pruned_results = [] + for result in results: + dict_you_want = { + your_key: result[your_key] + for your_key in column_keys_to_keep + if result.get(your_key) + } + pruned_results.append(dict_you_want) + results = pruned_results + + return ProcessInstanceReportResult( + report_metadata=self.report_metadata, results=results + ) diff --git a/src/spiffworkflow_backend/models/process_model.py b/src/spiffworkflow_backend/models/process_model.py new file mode 100644 index 00000000..9fdc8077 --- /dev/null +++ b/src/spiffworkflow_backend/models/process_model.py @@ -0,0 +1,90 @@ +"""Process_model.""" +from __future__ import annotations + +import enum +from dataclasses import dataclass +from dataclasses import field +from typing import Any + +import marshmallow +from marshmallow import Schema +from marshmallow.decorators import post_load + +from spiffworkflow_backend.models.file import File + + +class NotificationType(enum.Enum): + """NotificationType.""" + + fault = "fault" + suspend = "suspend" + + +@dataclass(order=True) +class ProcessModelInfo: + """ProcessModelInfo.""" + + sort_index: str = field(init=False) + + id: str + display_name: str + description: str + process_group_id: str = "" + process_group: Any | None = None + is_master_spec: bool | None = False + standalone: bool | None = False + library: bool | None = False + primary_file_name: str | None = None + primary_process_id: str | None = None + libraries: list[str] = field(default_factory=list) + display_order: int | None = 0 + is_review: bool = False + files: list[File] | None = field(default_factory=list[File]) + fault_or_suspend_on_exception: str = NotificationType.fault.value + exception_notification_addresses: list[str] = field(default_factory=list) + + def __post_init__(self) -> None: + """__post_init__.""" + self.sort_index = f"{self.process_group_id}:{self.id}" + + def __eq__(self, other: Any) -> bool: + """__eq__.""" + if not isinstance(other, ProcessModelInfo): + return False + if other.id == self.id: + return True + return False + + +class ProcessModelInfoSchema(Schema): + """ProcessModelInfoSchema.""" + + class Meta: + """Meta.""" + + model = ProcessModelInfo + + id = marshmallow.fields.String(required=True) + display_name = marshmallow.fields.String(required=True) + description = marshmallow.fields.String() + is_master_spec = marshmallow.fields.Boolean(required=True) + standalone = marshmallow.fields.Boolean(required=True) + library = marshmallow.fields.Boolean(required=True) + display_order = marshmallow.fields.Integer(allow_none=True) + primary_file_name = marshmallow.fields.String(allow_none=True) + primary_process_id = marshmallow.fields.String(allow_none=True) + is_review = marshmallow.fields.Boolean(allow_none=True) + process_group_id = marshmallow.fields.String(allow_none=True) + libraries = marshmallow.fields.List(marshmallow.fields.String(), allow_none=True) + files = marshmallow.fields.List(marshmallow.fields.Nested("FileSchema")) + fault_or_suspend_on_exception = marshmallow.fields.String() + exception_notification_addresses = marshmallow.fields.List( + marshmallow.fields.String + ) + + @post_load + def make_spec( + self, data: dict[str, str | bool | int | NotificationType], **_: Any + ) -> ProcessModelInfo: + """Make_spec.""" + return ProcessModelInfo(**data) # type: ignore diff --git a/src/spiffworkflow_backend/models/secret_model.py b/src/spiffworkflow_backend/models/secret_model.py new file mode 100644 index 00000000..9eab36b4 --- /dev/null +++ b/src/spiffworkflow_backend/models/secret_model.py @@ -0,0 +1,65 @@ +"""Secret_model.""" +from dataclasses import dataclass + +from flask_bpmn.models.db import db +from flask_bpmn.models.db import SpiffworkflowBaseDBModel +from marshmallow import Schema +from sqlalchemy import ForeignKey +from sqlalchemy.orm import relationship +from sqlalchemy.orm import RelationshipProperty + +from spiffworkflow_backend.models.user import UserModel + + +@dataclass() +class SecretModel(SpiffworkflowBaseDBModel): + """SecretModel.""" + + __tablename__ = "secret" + id: int = db.Column(db.Integer, primary_key=True) + key: str = db.Column(db.String(50), unique=True, nullable=False) + value: str = db.Column(db.String(255), nullable=False) + creator_user_id: int = db.Column(ForeignKey(UserModel.id), nullable=False) + + allowed_processes: RelationshipProperty = relationship( + "SecretAllowedProcessPathModel", cascade="delete" + ) + + +@dataclass() +class SecretAllowedProcessPathModel(SpiffworkflowBaseDBModel): + """Allowed processes can be Process Groups or Process Models. + + We store the path in either case. + """ + + __tablename__ = "secret_allowed_process" + __table_args__ = ( + db.UniqueConstraint( + "secret_id", "allowed_relative_path", name="unique_secret_path" + ), + ) + + id: int = db.Column(db.Integer, primary_key=True) + secret_id: int = db.Column(ForeignKey(SecretModel.id), nullable=False) # type: ignore + allowed_relative_path: str = db.Column(db.String(500), nullable=False) + + +class SecretModelSchema(Schema): + """SecretModelSchema.""" + + class Meta: + """Meta.""" + + model = SecretModel + fields = ["key", "value", "creator_user_id", "allowed_processes"] + + +class SecretAllowedProcessSchema(Schema): + """SecretAllowedProcessSchema.""" + + class Meta: + """Meta.""" + + model = SecretAllowedProcessPathModel + fields = ["secret_id", "allowed_relative_path"] diff --git a/src/spiffworkflow_backend/models/spiff_logging.py b/src/spiffworkflow_backend/models/spiff_logging.py new file mode 100644 index 00000000..a655ec51 --- /dev/null +++ b/src/spiffworkflow_backend/models/spiff_logging.py @@ -0,0 +1,27 @@ +"""Spiff_logging.""" +from dataclasses import dataclass +from typing import Optional + +from flask_bpmn.models.db import db +from flask_bpmn.models.db import SpiffworkflowBaseDBModel +from sqlalchemy import ForeignKey + +from spiffworkflow_backend.models.process_instance import ProcessInstanceModel +from spiffworkflow_backend.models.user import UserModel + + +@dataclass +class SpiffLoggingModel(SpiffworkflowBaseDBModel): + """LoggingModel.""" + + __tablename__ = "spiff_logging" + id: int = db.Column(db.Integer, primary_key=True) + process_instance_id: int = db.Column(ForeignKey(ProcessInstanceModel.id), nullable=False) # type: ignore + bpmn_process_identifier: str = db.Column(db.String(255), nullable=False) + bpmn_task_identifier: str = db.Column(db.String(255), nullable=False) + bpmn_task_name: str = db.Column(db.String(255), nullable=True) + bpmn_task_type: str = db.Column(db.String(255), nullable=True) + spiff_task_guid: str = db.Column(db.String(50), nullable=False) + timestamp: float = db.Column(db.DECIMAL(17, 6), nullable=False) + message: Optional[str] = db.Column(db.String(255), nullable=True) + current_user_id: int = db.Column(ForeignKey(UserModel.id), nullable=True) diff --git a/src/spiffworkflow_backend/models/task.py b/src/spiffworkflow_backend/models/task.py new file mode 100644 index 00000000..42eff1f4 --- /dev/null +++ b/src/spiffworkflow_backend/models/task.py @@ -0,0 +1,299 @@ +"""Task.""" +import enum +from typing import Any +from typing import Optional +from typing import Union + +import marshmallow +from marshmallow import Schema +from marshmallow_enum import EnumField # type: ignore + + +class MultiInstanceType(enum.Enum): + """MultiInstanceType.""" + + none = "none" + looping = "looping" + parallel = "parallel" + sequential = "sequential" + + +class Task: + """Task.""" + + ########################################################################## + # Custom properties and validations defined in Camunda form fields # + ########################################################################## + + # Custom task title + PROP_EXTENSIONS_TITLE = "display_name" + PROP_EXTENSIONS_CLEAR_DATA = "clear_data" + + # Field Types + FIELD_TYPE_STRING = "string" + FIELD_TYPE_LONG = "long" + FIELD_TYPE_BOOLEAN = "boolean" + FIELD_TYPE_DATE = "date" + FIELD_TYPE_ENUM = "enum" + FIELD_TYPE_TEXTAREA = "textarea" # textarea: Multiple lines of text + FIELD_TYPE_AUTO_COMPLETE = "autocomplete" + FIELD_TYPE_FILE = "file" + FIELD_TYPE_FILES = "files" # files: Multiple files + FIELD_TYPE_TEL = "tel" # tel: Phone number + FIELD_TYPE_EMAIL = "email" # email: Email address + FIELD_TYPE_URL = "url" # url: Website address + + FIELD_PROP_AUTO_COMPLETE_MAX = ( + "autocomplete_num" # Not used directly, passed in from the front end. + ) + + # Required field + FIELD_CONSTRAINT_REQUIRED = "required" + + # Field properties and expressions Expressions + FIELD_PROP_REPEAT = "repeat" + FIELD_PROP_READ_ONLY = "read_only" + FIELD_PROP_LDAP_LOOKUP = "ldap.lookup" + FIELD_PROP_READ_ONLY_EXPRESSION = "read_only_expression" + FIELD_PROP_HIDE_EXPRESSION = "hide_expression" + FIELD_PROP_REQUIRED_EXPRESSION = "required_expression" + FIELD_PROP_LABEL_EXPRESSION = "label_expression" + FIELD_PROP_REPEAT_HIDE_EXPRESSION = "repeat_hide_expression" + FIELD_PROP_VALUE_EXPRESSION = "value_expression" + + # Enum field options + FIELD_PROP_SPREADSHEET_NAME = "spreadsheet.name" + FIELD_PROP_DATA_NAME = "data.name" + FIELD_PROP_VALUE_COLUMN = "value.column" + FIELD_PROP_LABEL_COLUMN = "label.column" + + # Enum field options values pulled from task data + + # Group and Repeat functions + FIELD_PROP_GROUP = "group" + FIELD_PROP_REPLEAT = "repeat" + FIELD_PROP_REPLEAT_TITLE = "repeat_title" + FIELD_PROP_REPLEAT_BUTTON = "repeat_button_label" + + # File specific field properties + FIELD_PROP_DOC_CODE = "doc_code" # to associate a file upload field with a doc code + FIELD_PROP_FILE_DATA = ( + "file_data" # to associate a bit of data with a specific file upload file. + ) + + # Additional properties + FIELD_PROP_ENUM_TYPE = "enum_type" + FIELD_PROP_BOOLEAN_TYPE = "boolean_type" + FIELD_PROP_TEXT_AREA_ROWS = "rows" + FIELD_PROP_TEXT_AREA_COLS = "cols" + FIELD_PROP_TEXT_AREA_AUTO = "autosize" + FIELD_PROP_PLACEHOLDER = "placeholder" + FIELD_PROP_DESCRIPTION = "description" + FIELD_PROP_MARKDOWN_DESCRIPTION = "markdown_description" + FIELD_PROP_HELP = "help" + + ########################################################################## + + def __init__( + self, + id: str, + name: str, + title: str, + type: str, + state: str, + lane: Union[str, None] = None, + form: None = None, + documentation: str = "", + data: Union[dict[str, Any], None] = None, + multi_instance_type: Union[MultiInstanceType, None] = None, + multi_instance_count: str = "", + multi_instance_index: str = "", + process_name: str = "", + properties: Union[dict, None] = None, + process_instance_id: Union[int, None] = None, + process_model_display_name: Union[str, None] = None, + process_group_identifier: Union[str, None] = None, + process_model_identifier: Union[str, None] = None, + form_schema: Union[str, None] = None, + form_ui_schema: Union[str, None] = None, + parent: Optional[str] = None, + ): + """__init__.""" + self.id = id + self.name = name + self.title = title + self.type = type + self.state = state + self.form = form + self.documentation = documentation + self.lane = lane + self.parent = parent + + self.data = data + if self.data is None: + self.data = {} + + self.process_instance_id = process_instance_id + self.process_group_identifier = process_group_identifier + self.process_model_identifier = process_model_identifier + self.process_model_display_name = process_model_display_name + self.form_schema = form_schema + self.form_ui_schema = form_ui_schema + + self.multi_instance_type = ( + multi_instance_type # Some tasks have a repeat behavior. + ) + self.multi_instance_count = ( + multi_instance_count # This is the number of times the task could repeat. + ) + self.multi_instance_index = ( + multi_instance_index # And the index of the currently repeating task. + ) + self.process_name = process_name + + self.properties = properties # Arbitrary extension properties from BPMN editor. + if self.properties is None: + self.properties = {} + + @property + def serialized(self) -> dict[str, Any]: + """Return object data in serializeable format.""" + multi_instance_type = None + if self.multi_instance_type: + MultiInstanceType(self.multi_instance_type) + + return { + "id": self.id, + "name": self.name, + "title": self.title, + "type": self.type, + "state": self.state, + "lane": self.lane, + "form": self.form, + "documentation": self.documentation, + "data": self.data, + "multi_instance_type": multi_instance_type, + "multi_instance_count": self.multi_instance_count, + "multi_instance_index": self.multi_instance_index, + "process_name": self.process_name, + "properties": self.properties, + "process_instance_id": self.process_instance_id, + "process_model_display_name": self.process_model_display_name, + "process_group_identifier": self.process_group_identifier, + "process_model_identifier": self.process_model_identifier, + "form_schema": self.form_schema, + "form_ui_schema": self.form_ui_schema, + "parent": self.parent, + } + + @classmethod + def valid_property_names(cls) -> list[str]: + """Valid_property_names.""" + return [ + value for name, value in vars(cls).items() if name.startswith("FIELD_PROP") + ] + + @classmethod + def valid_field_types(cls) -> list[str]: + """Valid_field_types.""" + return [ + value for name, value in vars(cls).items() if name.startswith("FIELD_TYPE") + ] + + +class OptionSchema(Schema): + """OptionSchema.""" + + class Meta: + """Meta.""" + + fields = ["id", "name", "data"] + + +class ValidationSchema(Schema): + """ValidationSchema.""" + + class Meta: + """Meta.""" + + fields = ["name", "config"] + + +class FormFieldPropertySchema(Schema): + """FormFieldPropertySchema.""" + + class Meta: + """Meta.""" + + fields = ["id", "value"] + + +class FormFieldSchema(Schema): + """FormFieldSchema.""" + + class Meta: + """Meta.""" + + fields = [ + "id", + "type", + "label", + "default_value", + "options", + "validation", + "properties", + "value", + ] + + default_value = marshmallow.fields.String(required=False, allow_none=True) + options = marshmallow.fields.List(marshmallow.fields.Nested(OptionSchema)) + validation = marshmallow.fields.List(marshmallow.fields.Nested(ValidationSchema)) + properties = marshmallow.fields.List( + marshmallow.fields.Nested(FormFieldPropertySchema) + ) + + +# class FormSchema(Schema): +# """FormSchema.""" +# +# key = marshmallow.fields.String(required=True, allow_none=False) +# fields = marshmallow.fields.List(marshmallow.fields.Nested(FormFieldSchema)) + + +class TaskSchema(Schema): + """TaskSchema.""" + + class Meta: + """Meta.""" + + fields = [ + "id", + "name", + "title", + "type", + "state", + "lane", + "form", + "documentation", + "data", + "multi_instance_type", + "multi_instance_count", + "multi_instance_index", + "process_name", + "properties", + "process_instance_id", + "form_schema", + "form_ui_schema", + ] + + multi_instance_type = EnumField(MultiInstanceType) + documentation = marshmallow.fields.String(required=False, allow_none=True) + # form = marshmallow.fields.Nested(FormSchema, required=False, allow_none=True) + title = marshmallow.fields.String(required=False, allow_none=True) + process_name = marshmallow.fields.String(required=False, allow_none=True) + lane = marshmallow.fields.String(required=False, allow_none=True) + + @marshmallow.post_load + def make_task(self, data: dict[str, Any], **kwargs: dict) -> Task: + """Make_task.""" + return Task(**data) diff --git a/src/spiffworkflow_backend/models/task_event.py b/src/spiffworkflow_backend/models/task_event.py new file mode 100644 index 00000000..5bb668b4 --- /dev/null +++ b/src/spiffworkflow_backend/models/task_event.py @@ -0,0 +1,100 @@ +"""Task_event.""" +from __future__ import annotations + +import enum +from typing import TYPE_CHECKING + +from flask_bpmn.models.db import db +from flask_bpmn.models.db import SpiffworkflowBaseDBModel +from marshmallow import fields +from marshmallow import INCLUDE +from marshmallow import Schema +from sqlalchemy import func + + +if TYPE_CHECKING: + from spiffworkflow_backend.models.process_instance import ( + ProcessInstanceModel, + ) # noqa: F401 + + +class TaskAction(enum.Enum): + """TaskAction.""" + + COMPLETE = "COMPLETE" + TOKEN_RESET = "TOKEN_RESET" # noqa: S105 + HARD_RESET = "HARD_RESET" + SOFT_RESET = "SOFT_RESET" + ASSIGNMENT = "ASSIGNMENT" # Whenever the lane changes between tasks we assign the task to specific user. + + +class TaskEventModel(SpiffworkflowBaseDBModel): + """TaskEventModel.""" + + __tablename__ = "task_event" + id = db.Column(db.Integer, primary_key=True) + user_id = db.Column( + db.Integer, db.ForeignKey("user.id"), nullable=False + ) # In some cases the unique user id may not exist in the db yet. + process_instance_id = db.Column( + db.Integer, db.ForeignKey("process_instance.id"), nullable=False + ) + spec_version = db.Column(db.String(50)) + action = db.Column(db.String(50)) + task_id = db.Column(db.String(50)) + task_name = db.Column(db.String(50)) + task_title = db.Column(db.String(50)) + task_type = db.Column(db.String(50)) + task_state = db.Column(db.String(50)) + task_lane = db.Column(db.String(50)) + form_data = db.Column( + db.JSON + ) # And form data submitted when the task was completed. + mi_type = db.Column(db.String(50)) + mi_count = db.Column(db.Integer) + mi_index = db.Column(db.Integer) + process_name = db.Column(db.String(50)) + date = db.Column(db.DateTime(timezone=True), default=func.now()) + + +class TaskEvent: + """TaskEvent.""" + + def __init__(self, model: TaskEventModel, process_instance: ProcessInstanceModel): + """__init__.""" + self.id = model.id + self.process_instance = process_instance + self.user_id = model.user_id + self.action = model.action + self.task_id = model.task_id + self.task_title = model.task_title + self.task_name = model.task_name + self.task_type = model.task_type + self.task_state = model.task_state + self.task_lane = model.task_lane + self.date = model.date + + +class TaskEventSchema(Schema): + """TaskEventSchema.""" + + process_instance = fields.Nested("ProcessInstanceMetadataSchema", dump_only=True) + task_lane = fields.String(allow_none=True, required=False) + + class Meta: + """Meta.""" + + model = TaskEvent + additional = [ + "id", + "user_id", + "action", + "task_id", + "task_title", + "task_name", + "task_type", + "task_state", + "task_lane", + "date", + ] + unknown = INCLUDE diff --git a/src/spiffworkflow_backend/models/user.py b/src/spiffworkflow_backend/models/user.py new file mode 100644 index 00000000..47711c33 --- /dev/null +++ b/src/spiffworkflow_backend/models/user.py @@ -0,0 +1,117 @@ +"""User.""" +from typing import Any + +import jwt +import marshmallow +from flask import current_app +from flask_bpmn.api.api_error import ApiError +from flask_bpmn.models.db import db +from flask_bpmn.models.db import SpiffworkflowBaseDBModel +from marshmallow import Schema +from sqlalchemy.orm import relationship +from sqlalchemy.orm import validates + +from spiffworkflow_backend.models.group import GroupModel +from spiffworkflow_backend.services.authentication_service import ( + AuthenticationProviderTypes, +) + + +class UserModel(SpiffworkflowBaseDBModel): + """UserModel.""" + + __tablename__ = "user" + __table_args__ = (db.UniqueConstraint("service", "service_id", name="service_key"),) + + id = db.Column(db.Integer, primary_key=True) + username = db.Column(db.String(255), nullable=False, unique=True) + uid = db.Column(db.String(50), unique=True) + service = db.Column(db.String(50), nullable=False, unique=False) + service_id = db.Column(db.String(255), nullable=False, unique=False) + name = db.Column(db.String(255)) + email = db.Column(db.String(255)) + + user_group_assignments = relationship("UserGroupAssignmentModel", cascade="delete") # type: ignore + groups = relationship( # type: ignore + GroupModel, + viewonly=True, + secondary="user_group_assignment", + overlaps="user_group_assignments,users", + ) + principal = relationship("PrincipalModel", uselist=False) # type: ignore + + @validates("service") + def validate_service(self, key: str, value: Any) -> str: + """Validate_service.""" + try: + ap_type = getattr(AuthenticationProviderTypes, value, None) + except Exception as e: + raise ValueError(f"invalid service type: {value}") from e + if ap_type is not None: + ap_value: str = ap_type.value + return ap_value + raise ApiError( + error_code="invalid_service", + message=f"Could not validate service with value: {value}", + ) + + def encode_auth_token(self) -> str: + """Generate the Auth Token. + + :return: string + """ + secret_key = current_app.config.get("SECRET_KEY") + if secret_key is None: + raise KeyError("we need current_app.config to have a SECRET_KEY") + + # hours = float(app.config['TOKEN_AUTH_TTL_HOURS']) + payload = { + # 'exp': datetime.datetime.utcnow() + datetime.timedelta(hours=hours, minutes=0, seconds=0), + # 'iat': datetime.datetime.utcnow(), + "sub": f"service:{self.service}::service_id:{self.service_id}", + "token_type": "internal", + } + return jwt.encode( + payload, + secret_key, + algorithm="HS256", + ) + + def is_admin(self) -> bool: + """Is_admin.""" + return True + + # @classmethod + # def from_open_id_user_info(cls, user_info: dict) -> Any: + # """From_open_id_user_info.""" + # instance = cls() + # instance.service = "keycloak" + # instance.service_id = user_info["sub"] + # instance.name = user_info["preferred_username"] + # instance.username = user_info["sub"] + # + # return instance + + +class UserModelSchema(Schema): + """UserModelSchema.""" + + class Meta: + """Meta.""" + + model = UserModel + # load_instance = True + # include_relationships = False + # exclude = ("UserGroupAssignment",) + + id = marshmallow.fields.String(required=True) + username = marshmallow.fields.String(required=True) + + +class AdminSessionModel(SpiffworkflowBaseDBModel): + """AdminSessionModel.""" + + __tablename__ = "admin_session" + id = db.Column(db.Integer, primary_key=True) + token = db.Column(db.String(50), unique=True) + admin_impersonate_uid = db.Column(db.String(50)) diff --git a/src/spiffworkflow_backend/models/user_group_assignment.py b/src/spiffworkflow_backend/models/user_group_assignment.py new file mode 100644 index 00000000..fa5b620c --- /dev/null +++ b/src/spiffworkflow_backend/models/user_group_assignment.py @@ -0,0 +1,24 @@ +"""UserGroupAssignment.""" +from flask_bpmn.models.db import db +from flask_bpmn.models.db import SpiffworkflowBaseDBModel +from sqlalchemy import ForeignKey +from sqlalchemy.orm import relationship + +from spiffworkflow_backend.models.group import GroupModel +from spiffworkflow_backend.models.user import UserModel + + +class UserGroupAssignmentModel(SpiffworkflowBaseDBModel): + """UserGroupAssignmentModel.""" + + __tablename__ = "user_group_assignment" + __table_args__ = ( + db.UniqueConstraint("user_id", "group_id", name="user_group_assignment_unique"), + ) + + id = db.Column(db.Integer, primary_key=True) + user_id = db.Column(ForeignKey(UserModel.id), nullable=False) + group_id = db.Column(ForeignKey(GroupModel.id), nullable=False) + + group = relationship("GroupModel", overlaps="groups,user_group_assignments,users") # type: ignore + user = relationship("UserModel", overlaps="groups,user_group_assignments,users") # type: ignore diff --git a/src/spiffworkflow_backend/py.typed b/src/spiffworkflow_backend/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/src/spiffworkflow_backend/routes/__init__.py b/src/spiffworkflow_backend/routes/__init__.py new file mode 100644 index 00000000..f520b09d --- /dev/null +++ b/src/spiffworkflow_backend/routes/__init__.py @@ -0,0 +1 @@ +"""__init__.""" diff --git a/src/spiffworkflow_backend/routes/admin_blueprint/__init__.py b/src/spiffworkflow_backend/routes/admin_blueprint/__init__.py new file mode 100644 index 00000000..f520b09d --- /dev/null +++ b/src/spiffworkflow_backend/routes/admin_blueprint/__init__.py @@ -0,0 +1 @@ +"""__init__.""" diff --git a/src/spiffworkflow_backend/routes/admin_blueprint/admin_blueprint.py b/src/spiffworkflow_backend/routes/admin_blueprint/admin_blueprint.py new file mode 100644 index 00000000..8a74c547 --- /dev/null +++ b/src/spiffworkflow_backend/routes/admin_blueprint/admin_blueprint.py @@ -0,0 +1,186 @@ +"""APIs for dealing with process groups, process models, and process instances.""" +from typing import Union + +from flask import Blueprint +from flask import flash +from flask import redirect +from flask import render_template +from flask import request +from flask import url_for +from werkzeug.wrappers.response import Response + +from spiffworkflow_backend.services.process_instance_processor import ( + ProcessInstanceProcessor, +) +from spiffworkflow_backend.services.process_instance_service import ( + ProcessInstanceService, +) +from spiffworkflow_backend.services.process_model_service import ProcessModelService +from spiffworkflow_backend.services.spec_file_service import SpecFileService +from spiffworkflow_backend.services.user_service import UserService + +admin_blueprint = Blueprint( + "admin", __name__, template_folder="templates", static_folder="static" +) + +ALLOWED_BPMN_EXTENSIONS = {"bpmn", "dmn"} + + +@admin_blueprint.route("/process-groups", methods=["GET"]) +def process_groups_list() -> str: + """Process_groups_list.""" + process_groups = ProcessModelService().get_process_groups() + return render_template("process_groups_list.html", process_groups=process_groups) + + +@admin_blueprint.route("/process-groups/", methods=["GET"]) +def process_group_show(process_group_id: str) -> str: + """Show_process_group.""" + process_group = ProcessModelService().get_process_group(process_group_id) + return render_template("process_group_show.html", process_group=process_group) + + +@admin_blueprint.route("/process-models/", methods=["GET"]) +def process_model_show(process_model_id: str) -> Union[str, Response]: + """Show_process_model.""" + process_model = ProcessModelService().get_process_model(process_model_id) + files = SpecFileService.get_files(process_model, extension_filter="bpmn") + current_file_name = process_model.primary_file_name + if current_file_name is None: + flash("No primary_file_name", "error") + return redirect(url_for("admin.process_groups_list")) + bpmn_xml = SpecFileService.get_data(process_model, current_file_name) + return render_template( + "process_model_show.html", + process_model=process_model, + bpmn_xml=bpmn_xml, + files=files, + current_file_name=current_file_name, + ) + + +@admin_blueprint.route( + "/process-models//", methods=["GET"] +) +def process_model_show_file(process_model_id: str, file_name: str) -> str: + """Process_model_show_file.""" + process_model = ProcessModelService().get_process_model(process_model_id) + bpmn_xml = SpecFileService.get_data(process_model, file_name) + files = SpecFileService.get_files(process_model, extension_filter="bpmn") + return render_template( + "process_model_show.html", + process_model=process_model, + bpmn_xml=bpmn_xml, + files=files, + current_file_name=file_name, + ) + + +@admin_blueprint.route( + "/process-models//upload-file", methods=["POST"] +) +def process_model_upload_file(process_model_id: str) -> Response: + """Process_model_upload_file.""" + process_model_service = ProcessModelService() + process_model = process_model_service.get_process_model(process_model_id) + + if "file" not in request.files: + flash("No file part", "error") + request_file = request.files["file"] + # If the user does not select a file, the browser submits an + # empty file without a filename. + if request_file.filename == "" or request_file.filename is None: + flash("No selected file", "error") + else: + if request_file and _allowed_file(request_file.filename): + if request_file.filename is not None: + SpecFileService.add_file( + process_model, request_file.filename, request_file.stream.read() + ) + process_model_service.save_process_model(process_model) + + return redirect( + url_for("admin.process_model_show", process_model_id=process_model.id) + ) + + +@admin_blueprint.route( + "/process_models//edit/", methods=["GET"] +) +def process_model_edit(process_model_id: str, file_name: str) -> str: + """Edit_bpmn.""" + process_model = ProcessModelService().get_process_model(process_model_id) + bpmn_xml = SpecFileService.get_data(process_model, file_name) + + return render_template( + "process_model_edit.html", + bpmn_xml=bpmn_xml.decode("utf-8"), + process_model=process_model, + file_name=file_name, + ) + + +@admin_blueprint.route( + "/process-models//save/", methods=["POST"] +) +def process_model_save(process_model_id: str, file_name: str) -> Union[str, Response]: + """Process_model_save.""" + process_model = ProcessModelService().get_process_model(process_model_id) + SpecFileService.update_file(process_model, file_name, request.get_data()) + if process_model.primary_file_name is None: + flash("No primary_file_name", "error") + return redirect(url_for("admin.process_groups_list")) + bpmn_xml = SpecFileService.get_data(process_model, process_model.primary_file_name) + return render_template( + "process_model_edit.html", + bpmn_xml=bpmn_xml.decode("utf-8"), + process_model=process_model, + file_name=file_name, + ) + + +@admin_blueprint.route("/process-models//run", methods=["GET"]) +def process_model_run(process_model_id: str) -> Union[str, Response]: + """Process_model_run.""" + user = UserService.create_user("internal", "Mr. Test", username="Mr. Test") + process_instance = ProcessInstanceService.create_process_instance( + process_model_id, user + ) + processor = ProcessInstanceProcessor(process_instance) + processor.do_engine_steps() + result = processor.get_data() + + process_model = ProcessModelService().get_process_model(process_model_id) + files = SpecFileService.get_files(process_model, extension_filter="bpmn") + current_file_name = process_model.primary_file_name + if current_file_name is None: + flash("No primary_file_name", "error") + return redirect(url_for("admin.process_groups_list")) + bpmn_xml = SpecFileService.get_data(process_model, current_file_name) + + return render_template( + "process_model_show.html", + process_model=process_model, + bpmn_xml=bpmn_xml, + result=result, + files=files, + current_file_name=current_file_name, + ) + + +# def _find_or_create_user(username: str = "test_user1") -> Any: +# """Find_or_create_user.""" +# user = UserModel.query.filter_by(username=username).first() +# if user is None: +# user = UserModel(username=username) +# db.session.add(user) +# db.session.commit() +# return user + + +def _allowed_file(filename: str) -> bool: + """_allowed_file.""" + return ( + "." in filename + and filename.rsplit(".", 1)[1].lower() in ALLOWED_BPMN_EXTENSIONS + ) diff --git a/src/spiffworkflow_backend/routes/admin_blueprint/static/app.js b/src/spiffworkflow_backend/routes/admin_blueprint/static/app.js new file mode 100644 index 00000000..5fb392dc --- /dev/null +++ b/src/spiffworkflow_backend/routes/admin_blueprint/static/app.js @@ -0,0 +1,26 @@ +import BpmnViewer from "bpmn-js"; + +var viewer = new BpmnViewer({ + container: "#canvas", +}); + +viewer + .importXML(pizzaDiagram) + .then(function (result) { + const { warnings } = result; + + console.log("success !", warnings); + + viewer.get("canvas").zoom("fit-viewport"); + }) + .catch(function (err) { + const { warnings, message } = err; + + console.log("something went wrong:", warnings, message); + }); + +export function sayHello() { + console.log("hello"); +} + +window.foo = "bar"; diff --git a/src/spiffworkflow_backend/routes/admin_blueprint/static/package-lock.json b/src/spiffworkflow_backend/routes/admin_blueprint/static/package-lock.json new file mode 100644 index 00000000..b7367928 --- /dev/null +++ b/src/spiffworkflow_backend/routes/admin_blueprint/static/package-lock.json @@ -0,0 +1,3172 @@ +{ + "name": "spiffworkflow-backend", + "version": "0.0.0", + "lockfileVersion": 2, + "requires": true, + "packages": { + "": { + "name": "spiffworkflow-backend", + "version": "0.0.0", + "license": "ISC", + "dependencies": { + "bpmn-js": "^9.1.0", + "bpmn-js-properties-panel": "^1.1.1" + }, + "devDependencies": { + "webpack-cli": "^4.9.2" + } + }, + "node_modules/@bpmn-io/element-templates-validator": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/@bpmn-io/element-templates-validator/-/element-templates-validator-0.8.1.tgz", + "integrity": "sha512-cJMVYXxQAkntBZ2Brr76AI8D8xXWNS9GI8YM0h5kjkTihfYC+7FfN744RM1RVx8zJqTzOMf8nkS37t95Re4wvA==", + "dependencies": { + "@camunda/element-templates-json-schema": "^0.9.1", + "@camunda/zeebe-element-templates-json-schema": "^0.4.1", + "json-source-map": "^0.6.1", + "min-dash": "^3.8.1" + } + }, + "node_modules/@bpmn-io/extract-process-variables": { + "version": "0.4.5", + "resolved": "https://registry.npmjs.org/@bpmn-io/extract-process-variables/-/extract-process-variables-0.4.5.tgz", + "integrity": "sha512-LtHx5b9xqS8avRLrq/uTlKhWzMeV3bWQKIdDic2bdo5n9roitX13GRb01u2S0hSsKDWEhXQtydFYN2b6G7bqfw==", + "dependencies": { + "min-dash": "^3.8.1" + } + }, + "node_modules/@bpmn-io/properties-panel": { + "version": "0.13.2", + "resolved": "https://registry.npmjs.org/@bpmn-io/properties-panel/-/properties-panel-0.13.2.tgz", + "integrity": "sha512-S0FUjXApQ8V1tW3TkrmuxXkfiMv6WPdeKkc7DD9tzKTHHnT634GY4pafKPPknxYsLGthUiJghqWbuQahqQjz+g==", + "peer": true, + "dependencies": { + "classnames": "^2.3.1", + "diagram-js": "^8.1.2", + "min-dash": "^3.7.0", + "min-dom": "^3.1.3" + } + }, + "node_modules/@camunda/element-templates-json-schema": { + "version": "0.9.1", + "resolved": "https://registry.npmjs.org/@camunda/element-templates-json-schema/-/element-templates-json-schema-0.9.1.tgz", + "integrity": "sha512-hqAOdwf0EdEDughDAfsOWtQQaKx/7m3srVbrUfVZy2Nh2mUc3hyBbkODO4tkMjTKv6I4bw36cyMchzjIEaz4CA==" + }, + "node_modules/@camunda/zeebe-element-templates-json-schema": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/@camunda/zeebe-element-templates-json-schema/-/zeebe-element-templates-json-schema-0.4.1.tgz", + "integrity": "sha512-FAe7auxm+IJiRB0W68VOjBxih6aOJB/0K3nvjO0TtRdyS+a2X1DIDBDtsQO6g+pJDtW6oij0kC1LiBUvm6FmLw==" + }, + "node_modules/@discoveryjs/json-ext": { + "version": "0.5.7", + "resolved": "https://registry.npmjs.org/@discoveryjs/json-ext/-/json-ext-0.5.7.tgz", + "integrity": "sha512-dBVuXR082gk3jsFp7Rd/JI4kytwGHecnCoTtXFb7DB6CNHp4rg5k1bhg0nWdLGLnOV71lmDzGQaLMy8iPLY0pw==", + "dev": true, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.1.tgz", + "integrity": "sha512-GcHwniMlA2z+WFPWuY8lp3fsza0I8xPFMWL5+n8LYyP6PSvPrXf4+n8stDHZY2DM0zy9sVkRDy1jDI4XGzYVqg==", + "dev": true, + "peer": true, + "dependencies": { + "@jridgewell/set-array": "^1.0.0", + "@jridgewell/sourcemap-codec": "^1.4.10", + "@jridgewell/trace-mapping": "^0.3.9" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.0.7.tgz", + "integrity": "sha512-8cXDaBBHOr2pQ7j77Y6Vp5VDT2sIqWyWQ56TjEq4ih/a4iST3dItRe8Q9fp0rrIl9DoKhWQtUQz/YpOxLkXbNA==", + "dev": true, + "peer": true, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/set-array": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.1.1.tgz", + "integrity": "sha512-Ct5MqZkLGEXTVmQYbGtx9SVqD2fqwvdubdps5D3djjAkgkKwT918VNOz65pEHFaYTeWcukmJmH5SwsA9Tn2ObQ==", + "dev": true, + "peer": true, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/source-map": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/@jridgewell/source-map/-/source-map-0.3.2.tgz", + "integrity": "sha512-m7O9o2uR8k2ObDysZYzdfhb08VuEml5oWGiosa1VdaPZ/A6QyPkAJuwN0Q1lhULOf6B7MtQmHENS743hWtCrgw==", + "dev": true, + "peer": true, + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.0", + "@jridgewell/trace-mapping": "^0.3.9" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.4.13", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.13.tgz", + "integrity": "sha512-GryiOJmNcWbovBxTfZSF71V/mXbgcV3MewDe3kIMCLyIh5e7SKAeUZs+rMnJ8jkMolZ/4/VsdBmMrw3l+VdZ3w==", + "dev": true, + "peer": true + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.13", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.13.tgz", + "integrity": "sha512-o1xbKhp9qnIAoHJSWd6KlCZfqslL4valSF81H8ImioOAxluWYWOpWkpyktY2vnt4tbrX9XYaxovq6cgowaJp2w==", + "dev": true, + "peer": true, + "dependencies": { + "@jridgewell/resolve-uri": "^3.0.3", + "@jridgewell/sourcemap-codec": "^1.4.10" + } + }, + "node_modules/@philippfromme/moddle-helpers": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/@philippfromme/moddle-helpers/-/moddle-helpers-0.4.1.tgz", + "integrity": "sha512-6ST9WdafFGh/vxeQP4pwFkcGcqIQJ0mtQSrXwoetTLigCXCcP4UXdXxjcIEwWKoXuexXV/2CgFS0CPENSVcwdg==", + "dependencies": { + "min-dash": "^3.8.1" + } + }, + "node_modules/@types/eslint": { + "version": "8.4.2", + "resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-8.4.2.tgz", + "integrity": "sha512-Z1nseZON+GEnFjJc04sv4NSALGjhFwy6K0HXt7qsn5ArfAKtb63dXNJHf+1YW6IpOIYRBGUbu3GwJdj8DGnCjA==", + "dev": true, + "peer": true, + "dependencies": { + "@types/estree": "*", + "@types/json-schema": "*" + } + }, + "node_modules/@types/eslint-scope": { + "version": "3.7.3", + "resolved": "https://registry.npmjs.org/@types/eslint-scope/-/eslint-scope-3.7.3.tgz", + "integrity": "sha512-PB3ldyrcnAicT35TWPs5IcwKD8S333HMaa2VVv4+wdvebJkjWuW/xESoB8IwRcog8HYVYamb1g/R31Qv5Bx03g==", + "dev": true, + "peer": true, + "dependencies": { + "@types/eslint": "*", + "@types/estree": "*" + } + }, + "node_modules/@types/estree": { + "version": "0.0.51", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-0.0.51.tgz", + "integrity": "sha512-CuPgU6f3eT/XgKKPqKd/gLZV1Xmvf1a2R5POBOGQa6uv82xpls89HU5zKeVoyR8XzHd1RGNOlQlvUe3CFkjWNQ==", + "dev": true, + "peer": true + }, + "node_modules/@types/json-schema": { + "version": "7.0.11", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.11.tgz", + "integrity": "sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ==", + "dev": true, + "peer": true + }, + "node_modules/@types/node": { + "version": "17.0.38", + "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.38.tgz", + "integrity": "sha512-5jY9RhV7c0Z4Jy09G+NIDTsCZ5G0L5n+Z+p+Y7t5VJHM30bgwzSjVtlcBxqAj+6L/swIlvtOSzr8rBk/aNyV2g==", + "dev": true, + "peer": true + }, + "node_modules/@webassemblyjs/ast": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.11.1.tgz", + "integrity": "sha512-ukBh14qFLjxTQNTXocdyksN5QdM28S1CxHt2rdskFyL+xFV7VremuBLVbmCePj+URalXBENx/9Lm7lnhihtCSw==", + "dev": true, + "peer": true, + "dependencies": { + "@webassemblyjs/helper-numbers": "1.11.1", + "@webassemblyjs/helper-wasm-bytecode": "1.11.1" + } + }, + "node_modules/@webassemblyjs/floating-point-hex-parser": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.1.tgz", + "integrity": "sha512-iGRfyc5Bq+NnNuX8b5hwBrRjzf0ocrJPI6GWFodBFzmFnyvrQ83SHKhmilCU/8Jv67i4GJZBMhEzltxzcNagtQ==", + "dev": true, + "peer": true + }, + "node_modules/@webassemblyjs/helper-api-error": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.1.tgz", + "integrity": "sha512-RlhS8CBCXfRUR/cwo2ho9bkheSXG0+NwooXcc3PAILALf2QLdFyj7KGsKRbVc95hZnhnERon4kW/D3SZpp6Tcg==", + "dev": true, + "peer": true + }, + "node_modules/@webassemblyjs/helper-buffer": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.1.tgz", + "integrity": "sha512-gwikF65aDNeeXa8JxXa2BAk+REjSyhrNC9ZwdT0f8jc4dQQeDQ7G4m0f2QCLPJiMTTO6wfDmRmj/pW0PsUvIcA==", + "dev": true, + "peer": true + }, + "node_modules/@webassemblyjs/helper-numbers": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.1.tgz", + "integrity": "sha512-vDkbxiB8zfnPdNK9Rajcey5C0w+QJugEglN0of+kmO8l7lDb77AnlKYQF7aarZuCrv+l0UvqL+68gSDr3k9LPQ==", + "dev": true, + "peer": true, + "dependencies": { + "@webassemblyjs/floating-point-hex-parser": "1.11.1", + "@webassemblyjs/helper-api-error": "1.11.1", + "@xtuc/long": "4.2.2" + } + }, + "node_modules/@webassemblyjs/helper-wasm-bytecode": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.1.tgz", + "integrity": "sha512-PvpoOGiJwXeTrSf/qfudJhwlvDQxFgelbMqtq52WWiXC6Xgg1IREdngmPN3bs4RoO83PnL/nFrxucXj1+BX62Q==", + "dev": true, + "peer": true + }, + "node_modules/@webassemblyjs/helper-wasm-section": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.1.tgz", + "integrity": "sha512-10P9No29rYX1j7F3EVPX3JvGPQPae+AomuSTPiF9eBQeChHI6iqjMIwR9JmOJXwpnn/oVGDk7I5IlskuMwU/pg==", + "dev": true, + "peer": true, + "dependencies": { + "@webassemblyjs/ast": "1.11.1", + "@webassemblyjs/helper-buffer": "1.11.1", + "@webassemblyjs/helper-wasm-bytecode": "1.11.1", + "@webassemblyjs/wasm-gen": "1.11.1" + } + }, + "node_modules/@webassemblyjs/ieee754": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/ieee754/-/ieee754-1.11.1.tgz", + "integrity": "sha512-hJ87QIPtAMKbFq6CGTkZYJivEwZDbQUgYd3qKSadTNOhVY7p+gfP6Sr0lLRVTaG1JjFj+r3YchoqRYxNH3M0GQ==", + "dev": true, + "peer": true, + "dependencies": { + "@xtuc/ieee754": "^1.2.0" + } + }, + "node_modules/@webassemblyjs/leb128": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/leb128/-/leb128-1.11.1.tgz", + "integrity": "sha512-BJ2P0hNZ0u+Th1YZXJpzW6miwqQUGcIHT1G/sf72gLVD9DZ5AdYTqPNbHZh6K1M5VmKvFXwGSWZADz+qBWxeRw==", + "dev": true, + "peer": true, + "dependencies": { + "@xtuc/long": "4.2.2" + } + }, + "node_modules/@webassemblyjs/utf8": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/utf8/-/utf8-1.11.1.tgz", + "integrity": "sha512-9kqcxAEdMhiwQkHpkNiorZzqpGrodQQ2IGrHHxCy+Ozng0ofyMA0lTqiLkVs1uzTRejX+/O0EOT7KxqVPuXosQ==", + "dev": true, + "peer": true + }, + "node_modules/@webassemblyjs/wasm-edit": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.1.tgz", + "integrity": "sha512-g+RsupUC1aTHfR8CDgnsVRVZFJqdkFHpsHMfJuWQzWU3tvnLC07UqHICfP+4XyL2tnr1amvl1Sdp06TnYCmVkA==", + "dev": true, + "peer": true, + "dependencies": { + "@webassemblyjs/ast": "1.11.1", + "@webassemblyjs/helper-buffer": "1.11.1", + "@webassemblyjs/helper-wasm-bytecode": "1.11.1", + "@webassemblyjs/helper-wasm-section": "1.11.1", + "@webassemblyjs/wasm-gen": "1.11.1", + "@webassemblyjs/wasm-opt": "1.11.1", + "@webassemblyjs/wasm-parser": "1.11.1", + "@webassemblyjs/wast-printer": "1.11.1" + } + }, + "node_modules/@webassemblyjs/wasm-gen": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.1.tgz", + "integrity": "sha512-F7QqKXwwNlMmsulj6+O7r4mmtAlCWfO/0HdgOxSklZfQcDu0TpLiD1mRt/zF25Bk59FIjEuGAIyn5ei4yMfLhA==", + "dev": true, + "peer": true, + "dependencies": { + "@webassemblyjs/ast": "1.11.1", + "@webassemblyjs/helper-wasm-bytecode": "1.11.1", + "@webassemblyjs/ieee754": "1.11.1", + "@webassemblyjs/leb128": "1.11.1", + "@webassemblyjs/utf8": "1.11.1" + } + }, + "node_modules/@webassemblyjs/wasm-opt": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.1.tgz", + "integrity": "sha512-VqnkNqnZlU5EB64pp1l7hdm3hmQw7Vgqa0KF/KCNO9sIpI6Fk6brDEiX+iCOYrvMuBWDws0NkTOxYEb85XQHHw==", + "dev": true, + "peer": true, + "dependencies": { + "@webassemblyjs/ast": "1.11.1", + "@webassemblyjs/helper-buffer": "1.11.1", + "@webassemblyjs/wasm-gen": "1.11.1", + "@webassemblyjs/wasm-parser": "1.11.1" + } + }, + "node_modules/@webassemblyjs/wasm-parser": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.1.tgz", + "integrity": "sha512-rrBujw+dJu32gYB7/Lup6UhdkPx9S9SnobZzRVL7VcBH9Bt9bCBLEuX/YXOOtBsOZ4NQrRykKhffRWHvigQvOA==", + "dev": true, + "peer": true, + "dependencies": { + "@webassemblyjs/ast": "1.11.1", + "@webassemblyjs/helper-api-error": "1.11.1", + "@webassemblyjs/helper-wasm-bytecode": "1.11.1", + "@webassemblyjs/ieee754": "1.11.1", + "@webassemblyjs/leb128": "1.11.1", + "@webassemblyjs/utf8": "1.11.1" + } + }, + "node_modules/@webassemblyjs/wast-printer": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.11.1.tgz", + "integrity": "sha512-IQboUWM4eKzWW+N/jij2sRatKMh99QEelo3Eb2q0qXkvPRISAj8Qxtmw5itwqK+TTkBuUIE45AxYPToqPtL5gg==", + "dev": true, + "peer": true, + "dependencies": { + "@webassemblyjs/ast": "1.11.1", + "@xtuc/long": "4.2.2" + } + }, + "node_modules/@webpack-cli/configtest": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@webpack-cli/configtest/-/configtest-1.1.1.tgz", + "integrity": "sha512-1FBc1f9G4P/AxMqIgfZgeOTuRnwZMten8E7zap5zgpPInnCrP8D4Q81+4CWIch8i/Nf7nXjP0v6CjjbHOrXhKg==", + "dev": true, + "peerDependencies": { + "webpack": "4.x.x || 5.x.x", + "webpack-cli": "4.x.x" + } + }, + "node_modules/@webpack-cli/info": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/@webpack-cli/info/-/info-1.4.1.tgz", + "integrity": "sha512-PKVGmazEq3oAo46Q63tpMr4HipI3OPfP7LiNOEJg963RMgT0rqheag28NCML0o3GIzA3DmxP1ZIAv9oTX1CUIA==", + "dev": true, + "dependencies": { + "envinfo": "^7.7.3" + }, + "peerDependencies": { + "webpack-cli": "4.x.x" + } + }, + "node_modules/@webpack-cli/serve": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/@webpack-cli/serve/-/serve-1.6.1.tgz", + "integrity": "sha512-gNGTiTrjEVQ0OcVnzsRSqTxaBSr+dmTfm+qJsCDluky8uhdLWep7Gcr62QsAKHTMxjCS/8nEITsmFAhfIx+QSw==", + "dev": true, + "peerDependencies": { + "webpack-cli": "4.x.x" + }, + "peerDependenciesMeta": { + "webpack-dev-server": { + "optional": true + } + } + }, + "node_modules/@xtuc/ieee754": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@xtuc/ieee754/-/ieee754-1.2.0.tgz", + "integrity": "sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA==", + "dev": true, + "peer": true + }, + "node_modules/@xtuc/long": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@xtuc/long/-/long-4.2.2.tgz", + "integrity": "sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ==", + "dev": true, + "peer": true + }, + "node_modules/acorn": { + "version": "8.7.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.7.1.tgz", + "integrity": "sha512-Xx54uLJQZ19lKygFXOWsscKUbsBZW0CPykPhVQdhIeIwrbPmJzqeASDInc8nKBnp/JT6igTs82qPXz069H8I/A==", + "dev": true, + "peer": true, + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-import-assertions": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/acorn-import-assertions/-/acorn-import-assertions-1.8.0.tgz", + "integrity": "sha512-m7VZ3jwz4eK6A4Vtt8Ew1/mNbP24u0FhdyfA7fSvnJR6LMdfOYnmuIrrJAgrYfYJ10F/otaHTtrtrtmHdMNzEw==", + "dev": true, + "peer": true, + "peerDependencies": { + "acorn": "^8" + } + }, + "node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "peer": true, + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ajv-keywords": { + "version": "3.5.2", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz", + "integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==", + "dev": true, + "peer": true, + "peerDependencies": { + "ajv": "^6.9.1" + } + }, + "node_modules/array-move": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/array-move/-/array-move-3.0.1.tgz", + "integrity": "sha512-H3Of6NIn2nNU1gsVDqDnYKY/LCdWvCMMOWifNGhKcVQgiZ6nOek39aESOvro6zmueP07exSl93YLvkN4fZOkSg==", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/bpmn-js": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/bpmn-js/-/bpmn-js-9.1.0.tgz", + "integrity": "sha512-LFrNVt15hCvTJ7RrdshJeNYyLPAJQKC8sBCXvnFoLuwfuBwNbxkDtaripzrkgCj7X5wyduh+ogZ4KaE5xwsTbA==", + "dependencies": { + "bpmn-moddle": "^7.1.2", + "css.escape": "^1.5.1", + "diagram-js": "^8.3.0", + "diagram-js-direct-editing": "^1.6.3", + "ids": "^1.0.0", + "inherits": "^2.0.4", + "min-dash": "^3.5.2", + "min-dom": "^3.2.0", + "object-refs": "^0.3.0", + "tiny-svg": "^2.2.2" + } + }, + "node_modules/bpmn-js-properties-panel": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/bpmn-js-properties-panel/-/bpmn-js-properties-panel-1.1.1.tgz", + "integrity": "sha512-FIxg3yIeKtYzDFLqI9GzhYBYDh12WtyvHFOSl4bSlDUQJIs31Xe7yobECEZNQ+00XtmtKUK96ikHUti5YydgDQ==", + "dependencies": { + "@bpmn-io/element-templates-validator": "^0.8.1", + "@bpmn-io/extract-process-variables": "^0.4.5", + "@philippfromme/moddle-helpers": "^0.4.1", + "array-move": "^3.0.1", + "classnames": "^2.3.1", + "ids": "^1.0.0", + "min-dash": "^3.8.1", + "min-dom": "^3.1.3", + "preact-markup": "^2.1.1", + "semver-compare": "^1.0.0" + }, + "peerDependencies": { + "@bpmn-io/properties-panel": "0.13.x", + "bpmn-js": "8.x || 9.x", + "camunda-bpmn-js-behaviors": "0.1.x", + "diagram-js": "7.x || 8.x" + } + }, + "node_modules/bpmn-moddle": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/bpmn-moddle/-/bpmn-moddle-7.1.2.tgz", + "integrity": "sha512-Sax4LokRCTqlg26njjULN3ZGtCmwH5gZVUZTRF0jwJk+YpMQhSfSoUECxjNv8OROoLxu8Z+MjdOHIxgvJf7KwA==", + "dependencies": { + "min-dash": "^3.5.2", + "moddle": "^5.0.2", + "moddle-xml": "^9.0.5" + } + }, + "node_modules/browserslist": { + "version": "4.20.3", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.20.3.tgz", + "integrity": "sha512-NBhymBQl1zM0Y5dQT/O+xiLP9/rzOIQdKM/eMJBAq7yBgaB6krIYLGejrwVYnSHZdqjscB1SPuAjHwxjvN6Wdg==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + } + ], + "peer": true, + "dependencies": { + "caniuse-lite": "^1.0.30001332", + "electron-to-chromium": "^1.4.118", + "escalade": "^3.1.1", + "node-releases": "^2.0.3", + "picocolors": "^1.0.0" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + } + }, + "node_modules/buffer-from": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", + "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", + "dev": true, + "peer": true + }, + "node_modules/camunda-bpmn-js-behaviors": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/camunda-bpmn-js-behaviors/-/camunda-bpmn-js-behaviors-0.1.0.tgz", + "integrity": "sha512-YJs4kAkRhZ1GyE4VVPTJlZ/GjuDHnSGvzuLTa87HIfpEonVMHsmRrQL0Gr/bkSVcQaA4s6XB0XKV6rz32LHNUA==", + "peer": true, + "dependencies": { + "ids": "^1.0.0", + "min-dash": "^3.7.0" + }, + "peerDependencies": { + "bpmn-js": "9.x", + "camunda-bpmn-moddle": "6.x", + "zeebe-bpmn-moddle": "0.12.x" + } + }, + "node_modules/camunda-bpmn-moddle": { + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/camunda-bpmn-moddle/-/camunda-bpmn-moddle-6.1.2.tgz", + "integrity": "sha512-DfhOTeq8oN01cB5sLE6Rq34/9xGD15/Y14pEM+YBIjgvV6Rclh+BgIa/2aRMm8An4Kc/itm2tECYiDr8p/FyTQ==", + "peer": true, + "dependencies": { + "min-dash": "^3.8.1" + }, + "peerDependencies": { + "bpmn-js": "^6.x || ^7.x || ^8.x || ^9.x", + "diagram-js": "^5.x || ^6.x || ^7.x || ^8.x" + } + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001344", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001344.tgz", + "integrity": "sha512-0ZFjnlCaXNOAYcV7i+TtdKBp0L/3XEU2MF/x6Du1lrh+SRX4IfzIVL4HNJg5pB2PmFb8rszIGyOvsZnqqRoc2g==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + } + ], + "peer": true + }, + "node_modules/chrome-trace-event": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/chrome-trace-event/-/chrome-trace-event-1.0.3.tgz", + "integrity": "sha512-p3KULyQg4S7NIHixdwbGX+nFHkoBiA4YQmyWtjb8XngSKV124nJmRysgAeujbUVb15vh+RvFUfCPqU7rXk+hZg==", + "dev": true, + "peer": true, + "engines": { + "node": ">=6.0" + } + }, + "node_modules/classnames": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/classnames/-/classnames-2.3.1.tgz", + "integrity": "sha512-OlQdbZ7gLfGarSqxesMesDa5uz7KFbID8Kpq/SxIoNGDqY8lSYs0D+hhtBXhcdB3rcbXArFr7vlHheLk1voeNA==" + }, + "node_modules/clone-deep": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/clone-deep/-/clone-deep-4.0.1.tgz", + "integrity": "sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ==", + "dev": true, + "dependencies": { + "is-plain-object": "^2.0.4", + "kind-of": "^6.0.2", + "shallow-clone": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/colorette": { + "version": "2.0.16", + "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.16.tgz", + "integrity": "sha512-hUewv7oMjCp+wkBv5Rm0v87eJhq4woh5rSR+42YSQJKecCqgIqNkZ6lAlQms/BwHPJA5NKMRlpxPRv0n8HQW6g==", + "dev": true + }, + "node_modules/commander": { + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", + "dev": true, + "peer": true + }, + "node_modules/component-event": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/component-event/-/component-event-0.1.4.tgz", + "integrity": "sha512-GMwOG8MnUHP1l8DZx1ztFO0SJTFnIzZnBDkXAj8RM2ntV2A6ALlDxgbMY1Fvxlg6WPQ+5IM/a6vg4PEYbjg/Rw==" + }, + "node_modules/cross-spawn": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "dev": true, + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/css.escape": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/css.escape/-/css.escape-1.5.1.tgz", + "integrity": "sha1-QuJ9T6BK4y+TGktNQZH6nN3ul8s=" + }, + "node_modules/diagram-js": { + "version": "8.5.0", + "resolved": "https://registry.npmjs.org/diagram-js/-/diagram-js-8.5.0.tgz", + "integrity": "sha512-UHA/Zfs7kG22M9wXAifAyPb2OZ4lG4lFi0CZ0GC6/lXmOsSHwHVZ1s/h9UqaIXnzIKW8SnZoP3Rwqel1ZhZLzg==", + "dependencies": { + "css.escape": "^1.5.1", + "didi": "^8.0.0", + "hammerjs": "^2.0.1", + "inherits-browser": "0.0.1", + "min-dash": "^3.5.2", + "min-dom": "^3.2.0", + "object-refs": "^0.3.0", + "path-intersection": "^2.2.1", + "tiny-svg": "^2.2.2" + } + }, + "node_modules/diagram-js-direct-editing": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/diagram-js-direct-editing/-/diagram-js-direct-editing-1.7.0.tgz", + "integrity": "sha512-ZfTLF4hdWr7NSoruwxGvVmu7aVaUjWRXjwgK5dx58LbXAsNjBS3Ap7zjVuGxjWUpCZ/MMwyZ00lpTHPH2P7BFQ==", + "dependencies": { + "min-dash": "^3.5.2", + "min-dom": "^3.1.3" + }, + "peerDependencies": { + "diagram-js": "^0.x || ^1.x || ^2.x || ^3.x || ^4.x || ^5.x || ^6.x || ^7.x || ^8.x" + } + }, + "node_modules/didi": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/didi/-/didi-8.0.0.tgz", + "integrity": "sha512-PwqTBaYzzfJSyxvpXPcTWF6nDdCKx2mFAU5eup1ZSb5wbaAS9a/HiKdtcAUdie/VMLHoFI50jkYZcA+bhUOugw==" + }, + "node_modules/domify": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/domify/-/domify-1.4.1.tgz", + "integrity": "sha512-x18nuiDHMCZGXr4KJSRMf/TWYtiaRo6RX8KN9fEbW54mvbQ6pieUuerC2ahBg+kEp1wycFj8MPUI0WkIOw5E9w==" + }, + "node_modules/electron-to-chromium": { + "version": "1.4.143", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.143.tgz", + "integrity": "sha512-2hIgvu0+pDfXIqmVmV5X6iwMjQ2KxDsWKwM+oI1fABEOy/Dqmll0QJRmIQ3rm+XaoUa/qKrmy5h7LSTFQ6Ldzg==", + "dev": true, + "peer": true + }, + "node_modules/enhanced-resolve": { + "version": "5.9.3", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.9.3.tgz", + "integrity": "sha512-Bq9VSor+kjvW3f9/MiiR4eE3XYgOl7/rS8lnSxbRbF3kS0B2r+Y9w5krBWxZgDxASVZbdYrn5wT4j/Wb0J9qow==", + "dev": true, + "peer": true, + "dependencies": { + "graceful-fs": "^4.2.4", + "tapable": "^2.2.0" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/envinfo": { + "version": "7.8.1", + "resolved": "https://registry.npmjs.org/envinfo/-/envinfo-7.8.1.tgz", + "integrity": "sha512-/o+BXHmB7ocbHEAs6F2EnG0ogybVVUdkRunTT2glZU9XAaGmhqskrvKwqXuDfNjEO0LZKWdejEEpnq8aM0tOaw==", + "dev": true, + "bin": { + "envinfo": "dist/cli.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/es-module-lexer": { + "version": "0.9.3", + "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-0.9.3.tgz", + "integrity": "sha512-1HQ2M2sPtxwnvOvT1ZClHyQDiggdNjURWpY2we6aMKCQiUVxTmVs2UYPLIrD84sS+kMdUwfBSylbJPwNnBrnHQ==", + "dev": true, + "peer": true + }, + "node_modules/escalade": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", + "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "dev": true, + "peer": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/eslint-scope": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", + "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==", + "dev": true, + "peer": true, + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^4.1.1" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dev": true, + "peer": true, + "dependencies": { + "estraverse": "^5.2.0" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/esrecurse/node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "peer": true, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estraverse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", + "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", + "dev": true, + "peer": true, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/events": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", + "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==", + "dev": true, + "peer": true, + "engines": { + "node": ">=0.8.x" + } + }, + "node_modules/execa": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", + "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", + "dev": true, + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "dev": true, + "peer": true + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true, + "peer": true + }, + "node_modules/fastest-levenshtein": { + "version": "1.0.12", + "resolved": "https://registry.npmjs.org/fastest-levenshtein/-/fastest-levenshtein-1.0.12.tgz", + "integrity": "sha512-On2N+BpYJ15xIC974QNVuYGMOlEVt4s0EOI3wwMqOmK1fdDY+FN/zltPV8vosq4ad4c/gJ1KHScUn/6AWIgiow==", + "dev": true + }, + "node_modules/find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", + "dev": true + }, + "node_modules/get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/glob-to-regexp": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz", + "integrity": "sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==", + "dev": true, + "peer": true + }, + "node_modules/graceful-fs": { + "version": "4.2.10", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz", + "integrity": "sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==", + "dev": true, + "peer": true + }, + "node_modules/hammerjs": { + "version": "2.0.8", + "resolved": "https://registry.npmjs.org/hammerjs/-/hammerjs-2.0.8.tgz", + "integrity": "sha1-BO93hiz/K7edMPdpIJWTAiK/YPE=", + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "dev": true, + "dependencies": { + "function-bind": "^1.1.1" + }, + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "peer": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/human-signals": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", + "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", + "dev": true, + "engines": { + "node": ">=10.17.0" + } + }, + "node_modules/ids": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/ids/-/ids-1.0.0.tgz", + "integrity": "sha512-Zvtq1xUto4LttpstyOlFum8lKx+i1OmRfg+6A9drFS9iSZsDPMHG4Sof/qwNR4kCU7jBeWFPrY2ocHxiz7cCRw==" + }, + "node_modules/import-local": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/import-local/-/import-local-3.1.0.tgz", + "integrity": "sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg==", + "dev": true, + "dependencies": { + "pkg-dir": "^4.2.0", + "resolve-cwd": "^3.0.0" + }, + "bin": { + "import-local-fixture": "fixtures/cli.js" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/indexof": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/indexof/-/indexof-0.0.1.tgz", + "integrity": "sha1-gtwzbSMrkGIXnQWrMpOmYFn9Q10=" + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "node_modules/inherits-browser": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/inherits-browser/-/inherits-browser-0.0.1.tgz", + "integrity": "sha512-kaDA3DkCdCpvrKIo/1T/3yVn+qpFUHLjYtSHmTYewb+QfjfaQy6FGQ7LwBu7st0tG9UvYad/XAlqQmdIh6CICw==" + }, + "node_modules/interpret": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/interpret/-/interpret-2.2.0.tgz", + "integrity": "sha512-Ju0Bz/cEia55xDwUWEa8+olFpCiQoypjnQySseKtmjNrnps3P+xfpUmGr90T7yjlVJmOtybRvPXhKMbHr+fWnw==", + "dev": true, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/is-core-module": { + "version": "2.9.0", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.9.0.tgz", + "integrity": "sha512-+5FPy5PnwmO3lvfMb0AsoPaBG+5KHUI0wYFXOtYPnVVVspTFUuMZNfNaNVRt3FZadstu2c8x23vykRW/NBoU6A==", + "dev": true, + "dependencies": { + "has": "^1.0.3" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-plain-object": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz", + "integrity": "sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==", + "dev": true, + "dependencies": { + "isobject": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true + }, + "node_modules/isobject": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz", + "integrity": "sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/jest-worker": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-27.5.1.tgz", + "integrity": "sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg==", + "dev": true, + "peer": true, + "dependencies": { + "@types/node": "*", + "merge-stream": "^2.0.0", + "supports-color": "^8.0.0" + }, + "engines": { + "node": ">= 10.13.0" + } + }, + "node_modules/json-parse-even-better-errors": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", + "dev": true, + "peer": true + }, + "node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true, + "peer": true + }, + "node_modules/json-source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/json-source-map/-/json-source-map-0.6.1.tgz", + "integrity": "sha512-1QoztHPsMQqhDq0hlXY5ZqcEdUzxQEIxgFkKl4WUp2pgShObl+9ovi4kRh2TfvAfxAoHOJ9vIMEqk3k4iex7tg==" + }, + "node_modules/kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/loader-runner": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/loader-runner/-/loader-runner-4.3.0.tgz", + "integrity": "sha512-3R/1M+yS3j5ou80Me59j7F9IMs4PXs3VqRrm0TU3AbKPxlmpoY1TNscJV/oGJXo8qCatFGTfDbY6W6ipGOYXfg==", + "dev": true, + "peer": true, + "engines": { + "node": ">=6.11.5" + } + }, + "node_modules/locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/matches-selector": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/matches-selector/-/matches-selector-1.2.0.tgz", + "integrity": "sha512-c4vLwYWyl+Ji+U43eU/G5FwxWd4ZH0ePUsFs5y0uwD9HUEFBXUQ1zUUan+78IpRD+y4pUfG0nAzNM292K7ItvA==" + }, + "node_modules/merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", + "dev": true + }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "dev": true, + "peer": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dev": true, + "peer": true, + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/min-dash": { + "version": "3.8.1", + "resolved": "https://registry.npmjs.org/min-dash/-/min-dash-3.8.1.tgz", + "integrity": "sha512-evumdlmIlg9mbRVPbC4F5FuRhNmcMS5pvuBUbqb1G9v09Ro0ImPEgz5n3khir83lFok1inKqVDjnKEg3GpDxQg==" + }, + "node_modules/min-dom": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/min-dom/-/min-dom-3.2.1.tgz", + "integrity": "sha512-v6YCmnDzxk4rRJntWTUiwggLupPw/8ZSRqUq0PDaBwVZEO/wYzCH4SKVBV+KkEvf3u0XaWHly5JEosPtqRATZA==", + "dependencies": { + "component-event": "^0.1.4", + "domify": "^1.3.1", + "indexof": "0.0.1", + "matches-selector": "^1.2.0", + "min-dash": "^3.8.1" + } + }, + "node_modules/moddle": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/moddle/-/moddle-5.0.3.tgz", + "integrity": "sha512-EjnQkSaZClHMsM3H/guBy9h7AmHUICH0Pf8H1VnnYGUXy2hkZQU4gqEAyHywJzMRAhYX87pXjH2NtyigF7evkA==", + "dependencies": { + "min-dash": "^3.0.0" + } + }, + "node_modules/moddle-xml": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/moddle-xml/-/moddle-xml-9.0.5.tgz", + "integrity": "sha512-1t9N35ZMQZTYZmRDoh1mBVd0XwLB34BkBywNJ0+YlLLYxaDBjFR/I+fqwsY746ayYPBz6yNRg8JpLyFgNF+eHg==", + "dependencies": { + "min-dash": "^3.5.2", + "moddle": "^5.0.2", + "saxen": "^8.1.2" + } + }, + "node_modules/neo-async": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz", + "integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==", + "dev": true, + "peer": true + }, + "node_modules/node-releases": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.5.tgz", + "integrity": "sha512-U9h1NLROZTq9uE1SNffn6WuPDg8icmi3ns4rEl/oTfIle4iLjTliCzgTsbaIFMq/Xn078/lfY/BL0GWZ+psK4Q==", + "dev": true, + "peer": true + }, + "node_modules/npm-run-path": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", + "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", + "dev": true, + "dependencies": { + "path-key": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/object-refs": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/object-refs/-/object-refs-0.3.0.tgz", + "integrity": "sha512-eP0ywuoWOaDoiake/6kTJlPJhs+k0qNm4nYRzXLNHj6vh+5M3i9R1epJTdxIPGlhWc4fNRQ7a6XJNCX+/L4FOQ==" + }, + "node_modules/onetime": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "dev": true, + "dependencies": { + "mimic-fn": "^2.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-intersection": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/path-intersection/-/path-intersection-2.2.1.tgz", + "integrity": "sha512-9u8xvMcSfuOiStv9bPdnRJQhGQXLKurew94n4GPQCdH1nj9QKC9ObbNoIpiRq8skiOBxKkt277PgOoFgAt3/rA==" + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true + }, + "node_modules/picocolors": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", + "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==", + "dev": true, + "peer": true + }, + "node_modules/pkg-dir": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz", + "integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==", + "dev": true, + "dependencies": { + "find-up": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/preact": { + "version": "10.7.3", + "resolved": "https://registry.npmjs.org/preact/-/preact-10.7.3.tgz", + "integrity": "sha512-giqJXP8VbtA1tyGa3f1n9wiN7PrHtONrDyE3T+ifjr/tTkg+2N4d/6sjC9WyJKv8wM7rOYDveqy5ZoFmYlwo4w==", + "peer": true, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/preact" + } + }, + "node_modules/preact-markup": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/preact-markup/-/preact-markup-2.1.1.tgz", + "integrity": "sha512-8JL2p36mzK8XkspOyhBxUSPjYwMxDM0L5BWBZWxsZMVW8WsGQrYQDgVuDKkRspt2hwrle+Cxr/053hpc9BJwfw==", + "peerDependencies": { + "preact": ">=10" + } + }, + "node_modules/punycode": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", + "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", + "dev": true, + "peer": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/randombytes": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", + "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", + "dev": true, + "peer": true, + "dependencies": { + "safe-buffer": "^5.1.0" + } + }, + "node_modules/rechoir": { + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/rechoir/-/rechoir-0.7.1.tgz", + "integrity": "sha512-/njmZ8s1wVeR6pjTZ+0nCnv8SpZNRMT2D1RLOJQESlYFDBvwpTA4KWJpZ+sBJ4+vhjILRcK7JIFdGCdxEAAitg==", + "dev": true, + "dependencies": { + "resolve": "^1.9.0" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/resolve": { + "version": "1.22.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.0.tgz", + "integrity": "sha512-Hhtrw0nLeSrFQ7phPp4OOcVjLPIeMnRlr5mcnVuMe7M/7eBn98A3hmFRLoFo3DLZkivSYwhRUJTyPyWAk56WLw==", + "dev": true, + "dependencies": { + "is-core-module": "^2.8.1", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/resolve-cwd": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/resolve-cwd/-/resolve-cwd-3.0.0.tgz", + "integrity": "sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==", + "dev": true, + "dependencies": { + "resolve-from": "^5.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/resolve-from": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "peer": true + }, + "node_modules/saxen": { + "version": "8.1.2", + "resolved": "https://registry.npmjs.org/saxen/-/saxen-8.1.2.tgz", + "integrity": "sha512-xUOiiFbc3Ow7p8KMxwsGICPx46ZQvy3+qfNVhrkwfz3Vvq45eGt98Ft5IQaA1R/7Tb5B5MKh9fUR9x3c3nDTxw==" + }, + "node_modules/schema-utils": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.1.1.tgz", + "integrity": "sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw==", + "dev": true, + "peer": true, + "dependencies": { + "@types/json-schema": "^7.0.8", + "ajv": "^6.12.5", + "ajv-keywords": "^3.5.2" + }, + "engines": { + "node": ">= 10.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + } + }, + "node_modules/semver-compare": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/semver-compare/-/semver-compare-1.0.0.tgz", + "integrity": "sha1-De4hahyUGrN+nvsXiPavxf9VN/w=" + }, + "node_modules/serialize-javascript": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz", + "integrity": "sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag==", + "dev": true, + "peer": true, + "dependencies": { + "randombytes": "^2.1.0" + } + }, + "node_modules/shallow-clone": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/shallow-clone/-/shallow-clone-3.0.1.tgz", + "integrity": "sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA==", + "dev": true, + "dependencies": { + "kind-of": "^6.0.2" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true + }, + "node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "peer": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/source-map-support": { + "version": "0.5.21", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz", + "integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==", + "dev": true, + "peer": true, + "dependencies": { + "buffer-from": "^1.0.0", + "source-map": "^0.6.0" + } + }, + "node_modules/strip-final-newline": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", + "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "peer": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/tapable": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.2.1.tgz", + "integrity": "sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==", + "dev": true, + "peer": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/terser": { + "version": "5.14.0", + "resolved": "https://registry.npmjs.org/terser/-/terser-5.14.0.tgz", + "integrity": "sha512-JC6qfIEkPBd9j1SMO3Pfn+A6w2kQV54tv+ABQLgZr7dA3k/DL/OBoYSWxzVpZev3J+bUHXfr55L8Mox7AaNo6g==", + "dev": true, + "peer": true, + "dependencies": { + "@jridgewell/source-map": "^0.3.2", + "acorn": "^8.5.0", + "commander": "^2.20.0", + "source-map-support": "~0.5.20" + }, + "bin": { + "terser": "bin/terser" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/terser-webpack-plugin": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.1.tgz", + "integrity": "sha512-GvlZdT6wPQKbDNW/GDQzZFg/j4vKU96yl2q6mcUkzKOgW4gwf1Z8cZToUCrz31XHlPWH8MVb1r2tFtdDtTGJ7g==", + "dev": true, + "peer": true, + "dependencies": { + "jest-worker": "^27.4.5", + "schema-utils": "^3.1.1", + "serialize-javascript": "^6.0.0", + "source-map": "^0.6.1", + "terser": "^5.7.2" + }, + "engines": { + "node": ">= 10.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, + "peerDependencies": { + "webpack": "^5.1.0" + }, + "peerDependenciesMeta": { + "@swc/core": { + "optional": true + }, + "esbuild": { + "optional": true + }, + "uglify-js": { + "optional": true + } + } + }, + "node_modules/tiny-svg": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/tiny-svg/-/tiny-svg-2.2.3.tgz", + "integrity": "sha512-u5KGg889pD1W2c9GlLrTnAGzIkAO00/VXZGyzeiGHw+b9er8McLO0SnhxPQQDwDqFO0MrJ825AEsRUoTiDZFuQ==" + }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dev": true, + "peer": true, + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/watchpack": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.3.1.tgz", + "integrity": "sha512-x0t0JuydIo8qCNctdDrn1OzH/qDzk2+rdCOC3YzumZ42fiMqmQ7T3xQurykYMhYfHaPHTp4ZxAx2NfUo1K6QaA==", + "dev": true, + "peer": true, + "dependencies": { + "glob-to-regexp": "^0.4.1", + "graceful-fs": "^4.1.2" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/webpack": { + "version": "5.72.1", + "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.72.1.tgz", + "integrity": "sha512-dXG5zXCLspQR4krZVR6QgajnZOjW2K/djHvdcRaDQvsjV9z9vaW6+ja5dZOYbqBBjF6kGXka/2ZyxNdc+8Jung==", + "dev": true, + "peer": true, + "dependencies": { + "@types/eslint-scope": "^3.7.3", + "@types/estree": "^0.0.51", + "@webassemblyjs/ast": "1.11.1", + "@webassemblyjs/wasm-edit": "1.11.1", + "@webassemblyjs/wasm-parser": "1.11.1", + "acorn": "^8.4.1", + "acorn-import-assertions": "^1.7.6", + "browserslist": "^4.14.5", + "chrome-trace-event": "^1.0.2", + "enhanced-resolve": "^5.9.3", + "es-module-lexer": "^0.9.0", + "eslint-scope": "5.1.1", + "events": "^3.2.0", + "glob-to-regexp": "^0.4.1", + "graceful-fs": "^4.2.9", + "json-parse-even-better-errors": "^2.3.1", + "loader-runner": "^4.2.0", + "mime-types": "^2.1.27", + "neo-async": "^2.6.2", + "schema-utils": "^3.1.0", + "tapable": "^2.1.1", + "terser-webpack-plugin": "^5.1.3", + "watchpack": "^2.3.1", + "webpack-sources": "^3.2.3" + }, + "bin": { + "webpack": "bin/webpack.js" + }, + "engines": { + "node": ">=10.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, + "peerDependenciesMeta": { + "webpack-cli": { + "optional": true + } + } + }, + "node_modules/webpack-cli": { + "version": "4.9.2", + "resolved": "https://registry.npmjs.org/webpack-cli/-/webpack-cli-4.9.2.tgz", + "integrity": "sha512-m3/AACnBBzK/kMTcxWHcZFPrw/eQuY4Df1TxvIWfWM2x7mRqBQCqKEd96oCUa9jkapLBaFfRce33eGDb4Pr7YQ==", + "dev": true, + "dependencies": { + "@discoveryjs/json-ext": "^0.5.0", + "@webpack-cli/configtest": "^1.1.1", + "@webpack-cli/info": "^1.4.1", + "@webpack-cli/serve": "^1.6.1", + "colorette": "^2.0.14", + "commander": "^7.0.0", + "execa": "^5.0.0", + "fastest-levenshtein": "^1.0.12", + "import-local": "^3.0.2", + "interpret": "^2.2.0", + "rechoir": "^0.7.0", + "webpack-merge": "^5.7.3" + }, + "bin": { + "webpack-cli": "bin/cli.js" + }, + "engines": { + "node": ">=10.13.0" + }, + "peerDependencies": { + "webpack": "4.x.x || 5.x.x" + }, + "peerDependenciesMeta": { + "@webpack-cli/generators": { + "optional": true + }, + "@webpack-cli/migrate": { + "optional": true + }, + "webpack-bundle-analyzer": { + "optional": true + }, + "webpack-dev-server": { + "optional": true + } + } + }, + "node_modules/webpack-cli/node_modules/commander": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz", + "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==", + "dev": true, + "engines": { + "node": ">= 10" + } + }, + "node_modules/webpack-merge": { + "version": "5.8.0", + "resolved": "https://registry.npmjs.org/webpack-merge/-/webpack-merge-5.8.0.tgz", + "integrity": "sha512-/SaI7xY0831XwP6kzuwhKWVKDP9t1QY1h65lAFLbZqMPIuYcD9QAW4u9STIbU9kaJbPBB/geU/gLr1wDjOhQ+Q==", + "dev": true, + "dependencies": { + "clone-deep": "^4.0.1", + "wildcard": "^2.0.0" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/webpack-sources": { + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/webpack-sources/-/webpack-sources-3.2.3.tgz", + "integrity": "sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w==", + "dev": true, + "peer": true, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/wildcard": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/wildcard/-/wildcard-2.0.0.tgz", + "integrity": "sha512-JcKqAHLPxcdb9KM49dufGXn2x3ssnfjbcaQdLlfZsL9rH9wgDQjUtDxbo8NE0F6SFvydeu1VhZe7hZuHsB2/pw==", + "dev": true + }, + "node_modules/zeebe-bpmn-moddle": { + "version": "0.12.1", + "resolved": "https://registry.npmjs.org/zeebe-bpmn-moddle/-/zeebe-bpmn-moddle-0.12.1.tgz", + "integrity": "sha512-rnUoK+A/gzinOGUlmJKeXmnjorgEm4yf7qgeaowXGZOFtFqtM2lvJ7XYTJNsKClaNfFG245JtKHH3G/caJxE6g==", + "peer": true + } + }, + "dependencies": { + "@bpmn-io/element-templates-validator": { + "version": "0.8.1", + "resolved": "https://registry.npmjs.org/@bpmn-io/element-templates-validator/-/element-templates-validator-0.8.1.tgz", + "integrity": "sha512-cJMVYXxQAkntBZ2Brr76AI8D8xXWNS9GI8YM0h5kjkTihfYC+7FfN744RM1RVx8zJqTzOMf8nkS37t95Re4wvA==", + "requires": { + "@camunda/element-templates-json-schema": "^0.9.1", + "@camunda/zeebe-element-templates-json-schema": "^0.4.1", + "json-source-map": "^0.6.1", + "min-dash": "^3.8.1" + } + }, + "@bpmn-io/extract-process-variables": { + "version": "0.4.5", + "resolved": "https://registry.npmjs.org/@bpmn-io/extract-process-variables/-/extract-process-variables-0.4.5.tgz", + "integrity": "sha512-LtHx5b9xqS8avRLrq/uTlKhWzMeV3bWQKIdDic2bdo5n9roitX13GRb01u2S0hSsKDWEhXQtydFYN2b6G7bqfw==", + "requires": { + "min-dash": "^3.8.1" + } + }, + "@bpmn-io/properties-panel": { + "version": "0.13.2", + "resolved": "https://registry.npmjs.org/@bpmn-io/properties-panel/-/properties-panel-0.13.2.tgz", + "integrity": "sha512-S0FUjXApQ8V1tW3TkrmuxXkfiMv6WPdeKkc7DD9tzKTHHnT634GY4pafKPPknxYsLGthUiJghqWbuQahqQjz+g==", + "peer": true, + "requires": { + "classnames": "^2.3.1", + "diagram-js": "^8.1.2", + "min-dash": "^3.7.0", + "min-dom": "^3.1.3" + } + }, + "@camunda/element-templates-json-schema": { + "version": "0.9.1", + "resolved": "https://registry.npmjs.org/@camunda/element-templates-json-schema/-/element-templates-json-schema-0.9.1.tgz", + "integrity": "sha512-hqAOdwf0EdEDughDAfsOWtQQaKx/7m3srVbrUfVZy2Nh2mUc3hyBbkODO4tkMjTKv6I4bw36cyMchzjIEaz4CA==" + }, + "@camunda/zeebe-element-templates-json-schema": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/@camunda/zeebe-element-templates-json-schema/-/zeebe-element-templates-json-schema-0.4.1.tgz", + "integrity": "sha512-FAe7auxm+IJiRB0W68VOjBxih6aOJB/0K3nvjO0TtRdyS+a2X1DIDBDtsQO6g+pJDtW6oij0kC1LiBUvm6FmLw==" + }, + "@discoveryjs/json-ext": { + "version": "0.5.7", + "resolved": "https://registry.npmjs.org/@discoveryjs/json-ext/-/json-ext-0.5.7.tgz", + "integrity": "sha512-dBVuXR082gk3jsFp7Rd/JI4kytwGHecnCoTtXFb7DB6CNHp4rg5k1bhg0nWdLGLnOV71lmDzGQaLMy8iPLY0pw==", + "dev": true + }, + "@jridgewell/gen-mapping": { + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.1.tgz", + "integrity": "sha512-GcHwniMlA2z+WFPWuY8lp3fsza0I8xPFMWL5+n8LYyP6PSvPrXf4+n8stDHZY2DM0zy9sVkRDy1jDI4XGzYVqg==", + "dev": true, + "peer": true, + "requires": { + "@jridgewell/set-array": "^1.0.0", + "@jridgewell/sourcemap-codec": "^1.4.10", + "@jridgewell/trace-mapping": "^0.3.9" + } + }, + "@jridgewell/resolve-uri": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.0.7.tgz", + "integrity": "sha512-8cXDaBBHOr2pQ7j77Y6Vp5VDT2sIqWyWQ56TjEq4ih/a4iST3dItRe8Q9fp0rrIl9DoKhWQtUQz/YpOxLkXbNA==", + "dev": true, + "peer": true + }, + "@jridgewell/set-array": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.1.1.tgz", + "integrity": "sha512-Ct5MqZkLGEXTVmQYbGtx9SVqD2fqwvdubdps5D3djjAkgkKwT918VNOz65pEHFaYTeWcukmJmH5SwsA9Tn2ObQ==", + "dev": true, + "peer": true + }, + "@jridgewell/source-map": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/@jridgewell/source-map/-/source-map-0.3.2.tgz", + "integrity": "sha512-m7O9o2uR8k2ObDysZYzdfhb08VuEml5oWGiosa1VdaPZ/A6QyPkAJuwN0Q1lhULOf6B7MtQmHENS743hWtCrgw==", + "dev": true, + "peer": true, + "requires": { + "@jridgewell/gen-mapping": "^0.3.0", + "@jridgewell/trace-mapping": "^0.3.9" + } + }, + "@jridgewell/sourcemap-codec": { + "version": "1.4.13", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.13.tgz", + "integrity": "sha512-GryiOJmNcWbovBxTfZSF71V/mXbgcV3MewDe3kIMCLyIh5e7SKAeUZs+rMnJ8jkMolZ/4/VsdBmMrw3l+VdZ3w==", + "dev": true, + "peer": true + }, + "@jridgewell/trace-mapping": { + "version": "0.3.13", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.13.tgz", + "integrity": "sha512-o1xbKhp9qnIAoHJSWd6KlCZfqslL4valSF81H8ImioOAxluWYWOpWkpyktY2vnt4tbrX9XYaxovq6cgowaJp2w==", + "dev": true, + "peer": true, + "requires": { + "@jridgewell/resolve-uri": "^3.0.3", + "@jridgewell/sourcemap-codec": "^1.4.10" + } + }, + "@philippfromme/moddle-helpers": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/@philippfromme/moddle-helpers/-/moddle-helpers-0.4.1.tgz", + "integrity": "sha512-6ST9WdafFGh/vxeQP4pwFkcGcqIQJ0mtQSrXwoetTLigCXCcP4UXdXxjcIEwWKoXuexXV/2CgFS0CPENSVcwdg==", + "requires": { + "min-dash": "^3.8.1" + } + }, + "@types/eslint": { + "version": "8.4.2", + "resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-8.4.2.tgz", + "integrity": "sha512-Z1nseZON+GEnFjJc04sv4NSALGjhFwy6K0HXt7qsn5ArfAKtb63dXNJHf+1YW6IpOIYRBGUbu3GwJdj8DGnCjA==", + "dev": true, + "peer": true, + "requires": { + "@types/estree": "*", + "@types/json-schema": "*" + } + }, + "@types/eslint-scope": { + "version": "3.7.3", + "resolved": "https://registry.npmjs.org/@types/eslint-scope/-/eslint-scope-3.7.3.tgz", + "integrity": "sha512-PB3ldyrcnAicT35TWPs5IcwKD8S333HMaa2VVv4+wdvebJkjWuW/xESoB8IwRcog8HYVYamb1g/R31Qv5Bx03g==", + "dev": true, + "peer": true, + "requires": { + "@types/eslint": "*", + "@types/estree": "*" + } + }, + "@types/estree": { + "version": "0.0.51", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-0.0.51.tgz", + "integrity": "sha512-CuPgU6f3eT/XgKKPqKd/gLZV1Xmvf1a2R5POBOGQa6uv82xpls89HU5zKeVoyR8XzHd1RGNOlQlvUe3CFkjWNQ==", + "dev": true, + "peer": true + }, + "@types/json-schema": { + "version": "7.0.11", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.11.tgz", + "integrity": "sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ==", + "dev": true, + "peer": true + }, + "@types/node": { + "version": "17.0.38", + "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.38.tgz", + "integrity": "sha512-5jY9RhV7c0Z4Jy09G+NIDTsCZ5G0L5n+Z+p+Y7t5VJHM30bgwzSjVtlcBxqAj+6L/swIlvtOSzr8rBk/aNyV2g==", + "dev": true, + "peer": true + }, + "@webassemblyjs/ast": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.11.1.tgz", + "integrity": "sha512-ukBh14qFLjxTQNTXocdyksN5QdM28S1CxHt2rdskFyL+xFV7VremuBLVbmCePj+URalXBENx/9Lm7lnhihtCSw==", + "dev": true, + "peer": true, + "requires": { + "@webassemblyjs/helper-numbers": "1.11.1", + "@webassemblyjs/helper-wasm-bytecode": "1.11.1" + } + }, + "@webassemblyjs/floating-point-hex-parser": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/floating-point-hex-parser/-/floating-point-hex-parser-1.11.1.tgz", + "integrity": "sha512-iGRfyc5Bq+NnNuX8b5hwBrRjzf0ocrJPI6GWFodBFzmFnyvrQ83SHKhmilCU/8Jv67i4GJZBMhEzltxzcNagtQ==", + "dev": true, + "peer": true + }, + "@webassemblyjs/helper-api-error": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-api-error/-/helper-api-error-1.11.1.tgz", + "integrity": "sha512-RlhS8CBCXfRUR/cwo2ho9bkheSXG0+NwooXcc3PAILALf2QLdFyj7KGsKRbVc95hZnhnERon4kW/D3SZpp6Tcg==", + "dev": true, + "peer": true + }, + "@webassemblyjs/helper-buffer": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-buffer/-/helper-buffer-1.11.1.tgz", + "integrity": "sha512-gwikF65aDNeeXa8JxXa2BAk+REjSyhrNC9ZwdT0f8jc4dQQeDQ7G4m0f2QCLPJiMTTO6wfDmRmj/pW0PsUvIcA==", + "dev": true, + "peer": true + }, + "@webassemblyjs/helper-numbers": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-numbers/-/helper-numbers-1.11.1.tgz", + "integrity": "sha512-vDkbxiB8zfnPdNK9Rajcey5C0w+QJugEglN0of+kmO8l7lDb77AnlKYQF7aarZuCrv+l0UvqL+68gSDr3k9LPQ==", + "dev": true, + "peer": true, + "requires": { + "@webassemblyjs/floating-point-hex-parser": "1.11.1", + "@webassemblyjs/helper-api-error": "1.11.1", + "@xtuc/long": "4.2.2" + } + }, + "@webassemblyjs/helper-wasm-bytecode": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-bytecode/-/helper-wasm-bytecode-1.11.1.tgz", + "integrity": "sha512-PvpoOGiJwXeTrSf/qfudJhwlvDQxFgelbMqtq52WWiXC6Xgg1IREdngmPN3bs4RoO83PnL/nFrxucXj1+BX62Q==", + "dev": true, + "peer": true + }, + "@webassemblyjs/helper-wasm-section": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/helper-wasm-section/-/helper-wasm-section-1.11.1.tgz", + "integrity": "sha512-10P9No29rYX1j7F3EVPX3JvGPQPae+AomuSTPiF9eBQeChHI6iqjMIwR9JmOJXwpnn/oVGDk7I5IlskuMwU/pg==", + "dev": true, + "peer": true, + "requires": { + "@webassemblyjs/ast": "1.11.1", + "@webassemblyjs/helper-buffer": "1.11.1", + "@webassemblyjs/helper-wasm-bytecode": "1.11.1", + "@webassemblyjs/wasm-gen": "1.11.1" + } + }, + "@webassemblyjs/ieee754": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/ieee754/-/ieee754-1.11.1.tgz", + "integrity": "sha512-hJ87QIPtAMKbFq6CGTkZYJivEwZDbQUgYd3qKSadTNOhVY7p+gfP6Sr0lLRVTaG1JjFj+r3YchoqRYxNH3M0GQ==", + "dev": true, + "peer": true, + "requires": { + "@xtuc/ieee754": "^1.2.0" + } + }, + "@webassemblyjs/leb128": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/leb128/-/leb128-1.11.1.tgz", + "integrity": "sha512-BJ2P0hNZ0u+Th1YZXJpzW6miwqQUGcIHT1G/sf72gLVD9DZ5AdYTqPNbHZh6K1M5VmKvFXwGSWZADz+qBWxeRw==", + "dev": true, + "peer": true, + "requires": { + "@xtuc/long": "4.2.2" + } + }, + "@webassemblyjs/utf8": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/utf8/-/utf8-1.11.1.tgz", + "integrity": "sha512-9kqcxAEdMhiwQkHpkNiorZzqpGrodQQ2IGrHHxCy+Ozng0ofyMA0lTqiLkVs1uzTRejX+/O0EOT7KxqVPuXosQ==", + "dev": true, + "peer": true + }, + "@webassemblyjs/wasm-edit": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-edit/-/wasm-edit-1.11.1.tgz", + "integrity": "sha512-g+RsupUC1aTHfR8CDgnsVRVZFJqdkFHpsHMfJuWQzWU3tvnLC07UqHICfP+4XyL2tnr1amvl1Sdp06TnYCmVkA==", + "dev": true, + "peer": true, + "requires": { + "@webassemblyjs/ast": "1.11.1", + "@webassemblyjs/helper-buffer": "1.11.1", + "@webassemblyjs/helper-wasm-bytecode": "1.11.1", + "@webassemblyjs/helper-wasm-section": "1.11.1", + "@webassemblyjs/wasm-gen": "1.11.1", + "@webassemblyjs/wasm-opt": "1.11.1", + "@webassemblyjs/wasm-parser": "1.11.1", + "@webassemblyjs/wast-printer": "1.11.1" + } + }, + "@webassemblyjs/wasm-gen": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-gen/-/wasm-gen-1.11.1.tgz", + "integrity": "sha512-F7QqKXwwNlMmsulj6+O7r4mmtAlCWfO/0HdgOxSklZfQcDu0TpLiD1mRt/zF25Bk59FIjEuGAIyn5ei4yMfLhA==", + "dev": true, + "peer": true, + "requires": { + "@webassemblyjs/ast": "1.11.1", + "@webassemblyjs/helper-wasm-bytecode": "1.11.1", + "@webassemblyjs/ieee754": "1.11.1", + "@webassemblyjs/leb128": "1.11.1", + "@webassemblyjs/utf8": "1.11.1" + } + }, + "@webassemblyjs/wasm-opt": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-opt/-/wasm-opt-1.11.1.tgz", + "integrity": "sha512-VqnkNqnZlU5EB64pp1l7hdm3hmQw7Vgqa0KF/KCNO9sIpI6Fk6brDEiX+iCOYrvMuBWDws0NkTOxYEb85XQHHw==", + "dev": true, + "peer": true, + "requires": { + "@webassemblyjs/ast": "1.11.1", + "@webassemblyjs/helper-buffer": "1.11.1", + "@webassemblyjs/wasm-gen": "1.11.1", + "@webassemblyjs/wasm-parser": "1.11.1" + } + }, + "@webassemblyjs/wasm-parser": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wasm-parser/-/wasm-parser-1.11.1.tgz", + "integrity": "sha512-rrBujw+dJu32gYB7/Lup6UhdkPx9S9SnobZzRVL7VcBH9Bt9bCBLEuX/YXOOtBsOZ4NQrRykKhffRWHvigQvOA==", + "dev": true, + "peer": true, + "requires": { + "@webassemblyjs/ast": "1.11.1", + "@webassemblyjs/helper-api-error": "1.11.1", + "@webassemblyjs/helper-wasm-bytecode": "1.11.1", + "@webassemblyjs/ieee754": "1.11.1", + "@webassemblyjs/leb128": "1.11.1", + "@webassemblyjs/utf8": "1.11.1" + } + }, + "@webassemblyjs/wast-printer": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/wast-printer/-/wast-printer-1.11.1.tgz", + "integrity": "sha512-IQboUWM4eKzWW+N/jij2sRatKMh99QEelo3Eb2q0qXkvPRISAj8Qxtmw5itwqK+TTkBuUIE45AxYPToqPtL5gg==", + "dev": true, + "peer": true, + "requires": { + "@webassemblyjs/ast": "1.11.1", + "@xtuc/long": "4.2.2" + } + }, + "@webpack-cli/configtest": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@webpack-cli/configtest/-/configtest-1.1.1.tgz", + "integrity": "sha512-1FBc1f9G4P/AxMqIgfZgeOTuRnwZMten8E7zap5zgpPInnCrP8D4Q81+4CWIch8i/Nf7nXjP0v6CjjbHOrXhKg==", + "dev": true, + "requires": {} + }, + "@webpack-cli/info": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/@webpack-cli/info/-/info-1.4.1.tgz", + "integrity": "sha512-PKVGmazEq3oAo46Q63tpMr4HipI3OPfP7LiNOEJg963RMgT0rqheag28NCML0o3GIzA3DmxP1ZIAv9oTX1CUIA==", + "dev": true, + "requires": { + "envinfo": "^7.7.3" + } + }, + "@webpack-cli/serve": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/@webpack-cli/serve/-/serve-1.6.1.tgz", + "integrity": "sha512-gNGTiTrjEVQ0OcVnzsRSqTxaBSr+dmTfm+qJsCDluky8uhdLWep7Gcr62QsAKHTMxjCS/8nEITsmFAhfIx+QSw==", + "dev": true, + "requires": {} + }, + "@xtuc/ieee754": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@xtuc/ieee754/-/ieee754-1.2.0.tgz", + "integrity": "sha512-DX8nKgqcGwsc0eJSqYt5lwP4DH5FlHnmuWWBRy7X0NcaGR0ZtuyeESgMwTYVEtxmsNGY+qit4QYT/MIYTOTPeA==", + "dev": true, + "peer": true + }, + "@xtuc/long": { + "version": "4.2.2", + "resolved": "https://registry.npmjs.org/@xtuc/long/-/long-4.2.2.tgz", + "integrity": "sha512-NuHqBY1PB/D8xU6s/thBgOAiAP7HOYDQ32+BFZILJ8ivkUkAHQnWfn6WhL79Owj1qmUnoN/YPhktdIoucipkAQ==", + "dev": true, + "peer": true + }, + "acorn": { + "version": "8.7.1", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.7.1.tgz", + "integrity": "sha512-Xx54uLJQZ19lKygFXOWsscKUbsBZW0CPykPhVQdhIeIwrbPmJzqeASDInc8nKBnp/JT6igTs82qPXz069H8I/A==", + "dev": true, + "peer": true + }, + "acorn-import-assertions": { + "version": "1.8.0", + "resolved": "https://registry.npmjs.org/acorn-import-assertions/-/acorn-import-assertions-1.8.0.tgz", + "integrity": "sha512-m7VZ3jwz4eK6A4Vtt8Ew1/mNbP24u0FhdyfA7fSvnJR6LMdfOYnmuIrrJAgrYfYJ10F/otaHTtrtrtmHdMNzEw==", + "dev": true, + "peer": true, + "requires": {} + }, + "ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "peer": true, + "requires": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + } + }, + "ajv-keywords": { + "version": "3.5.2", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz", + "integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==", + "dev": true, + "peer": true, + "requires": {} + }, + "array-move": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/array-move/-/array-move-3.0.1.tgz", + "integrity": "sha512-H3Of6NIn2nNU1gsVDqDnYKY/LCdWvCMMOWifNGhKcVQgiZ6nOek39aESOvro6zmueP07exSl93YLvkN4fZOkSg==" + }, + "bpmn-js": { + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/bpmn-js/-/bpmn-js-9.1.0.tgz", + "integrity": "sha512-LFrNVt15hCvTJ7RrdshJeNYyLPAJQKC8sBCXvnFoLuwfuBwNbxkDtaripzrkgCj7X5wyduh+ogZ4KaE5xwsTbA==", + "requires": { + "bpmn-moddle": "^7.1.2", + "css.escape": "^1.5.1", + "diagram-js": "^8.3.0", + "diagram-js-direct-editing": "^1.6.3", + "ids": "^1.0.0", + "inherits": "^2.0.4", + "min-dash": "^3.5.2", + "min-dom": "^3.2.0", + "object-refs": "^0.3.0", + "tiny-svg": "^2.2.2" + } + }, + "bpmn-js-properties-panel": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/bpmn-js-properties-panel/-/bpmn-js-properties-panel-1.1.1.tgz", + "integrity": "sha512-FIxg3yIeKtYzDFLqI9GzhYBYDh12WtyvHFOSl4bSlDUQJIs31Xe7yobECEZNQ+00XtmtKUK96ikHUti5YydgDQ==", + "requires": { + "@bpmn-io/element-templates-validator": "^0.8.1", + "@bpmn-io/extract-process-variables": "^0.4.5", + "@philippfromme/moddle-helpers": "^0.4.1", + "array-move": "^3.0.1", + "classnames": "^2.3.1", + "ids": "^1.0.0", + "min-dash": "^3.8.1", + "min-dom": "^3.1.3", + "preact-markup": "^2.1.1", + "semver-compare": "^1.0.0" + } + }, + "bpmn-moddle": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/bpmn-moddle/-/bpmn-moddle-7.1.2.tgz", + "integrity": "sha512-Sax4LokRCTqlg26njjULN3ZGtCmwH5gZVUZTRF0jwJk+YpMQhSfSoUECxjNv8OROoLxu8Z+MjdOHIxgvJf7KwA==", + "requires": { + "min-dash": "^3.5.2", + "moddle": "^5.0.2", + "moddle-xml": "^9.0.5" + } + }, + "browserslist": { + "version": "4.20.3", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.20.3.tgz", + "integrity": "sha512-NBhymBQl1zM0Y5dQT/O+xiLP9/rzOIQdKM/eMJBAq7yBgaB6krIYLGejrwVYnSHZdqjscB1SPuAjHwxjvN6Wdg==", + "dev": true, + "peer": true, + "requires": { + "caniuse-lite": "^1.0.30001332", + "electron-to-chromium": "^1.4.118", + "escalade": "^3.1.1", + "node-releases": "^2.0.3", + "picocolors": "^1.0.0" + } + }, + "buffer-from": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", + "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", + "dev": true, + "peer": true + }, + "camunda-bpmn-js-behaviors": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/camunda-bpmn-js-behaviors/-/camunda-bpmn-js-behaviors-0.1.0.tgz", + "integrity": "sha512-YJs4kAkRhZ1GyE4VVPTJlZ/GjuDHnSGvzuLTa87HIfpEonVMHsmRrQL0Gr/bkSVcQaA4s6XB0XKV6rz32LHNUA==", + "peer": true, + "requires": { + "ids": "^1.0.0", + "min-dash": "^3.7.0" + } + }, + "camunda-bpmn-moddle": { + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/camunda-bpmn-moddle/-/camunda-bpmn-moddle-6.1.2.tgz", + "integrity": "sha512-DfhOTeq8oN01cB5sLE6Rq34/9xGD15/Y14pEM+YBIjgvV6Rclh+BgIa/2aRMm8An4Kc/itm2tECYiDr8p/FyTQ==", + "peer": true, + "requires": { + "min-dash": "^3.8.1" + } + }, + "caniuse-lite": { + "version": "1.0.30001344", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001344.tgz", + "integrity": "sha512-0ZFjnlCaXNOAYcV7i+TtdKBp0L/3XEU2MF/x6Du1lrh+SRX4IfzIVL4HNJg5pB2PmFb8rszIGyOvsZnqqRoc2g==", + "dev": true, + "peer": true + }, + "chrome-trace-event": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/chrome-trace-event/-/chrome-trace-event-1.0.3.tgz", + "integrity": "sha512-p3KULyQg4S7NIHixdwbGX+nFHkoBiA4YQmyWtjb8XngSKV124nJmRysgAeujbUVb15vh+RvFUfCPqU7rXk+hZg==", + "dev": true, + "peer": true + }, + "classnames": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/classnames/-/classnames-2.3.1.tgz", + "integrity": "sha512-OlQdbZ7gLfGarSqxesMesDa5uz7KFbID8Kpq/SxIoNGDqY8lSYs0D+hhtBXhcdB3rcbXArFr7vlHheLk1voeNA==" + }, + "clone-deep": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/clone-deep/-/clone-deep-4.0.1.tgz", + "integrity": "sha512-neHB9xuzh/wk0dIHweyAXv2aPGZIVk3pLMe+/RNzINf17fe0OG96QroktYAUm7SM1PBnzTabaLboqqxDyMU+SQ==", + "dev": true, + "requires": { + "is-plain-object": "^2.0.4", + "kind-of": "^6.0.2", + "shallow-clone": "^3.0.0" + } + }, + "colorette": { + "version": "2.0.16", + "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.16.tgz", + "integrity": "sha512-hUewv7oMjCp+wkBv5Rm0v87eJhq4woh5rSR+42YSQJKecCqgIqNkZ6lAlQms/BwHPJA5NKMRlpxPRv0n8HQW6g==", + "dev": true + }, + "commander": { + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", + "dev": true, + "peer": true + }, + "component-event": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/component-event/-/component-event-0.1.4.tgz", + "integrity": "sha512-GMwOG8MnUHP1l8DZx1ztFO0SJTFnIzZnBDkXAj8RM2ntV2A6ALlDxgbMY1Fvxlg6WPQ+5IM/a6vg4PEYbjg/Rw==" + }, + "cross-spawn": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "dev": true, + "requires": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + } + }, + "css.escape": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/css.escape/-/css.escape-1.5.1.tgz", + "integrity": "sha1-QuJ9T6BK4y+TGktNQZH6nN3ul8s=" + }, + "diagram-js": { + "version": "8.5.0", + "resolved": "https://registry.npmjs.org/diagram-js/-/diagram-js-8.5.0.tgz", + "integrity": "sha512-UHA/Zfs7kG22M9wXAifAyPb2OZ4lG4lFi0CZ0GC6/lXmOsSHwHVZ1s/h9UqaIXnzIKW8SnZoP3Rwqel1ZhZLzg==", + "requires": { + "css.escape": "^1.5.1", + "didi": "^8.0.0", + "hammerjs": "^2.0.1", + "inherits-browser": "0.0.1", + "min-dash": "^3.5.2", + "min-dom": "^3.2.0", + "object-refs": "^0.3.0", + "path-intersection": "^2.2.1", + "tiny-svg": "^2.2.2" + } + }, + "diagram-js-direct-editing": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/diagram-js-direct-editing/-/diagram-js-direct-editing-1.7.0.tgz", + "integrity": "sha512-ZfTLF4hdWr7NSoruwxGvVmu7aVaUjWRXjwgK5dx58LbXAsNjBS3Ap7zjVuGxjWUpCZ/MMwyZ00lpTHPH2P7BFQ==", + "requires": { + "min-dash": "^3.5.2", + "min-dom": "^3.1.3" + } + }, + "didi": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/didi/-/didi-8.0.0.tgz", + "integrity": "sha512-PwqTBaYzzfJSyxvpXPcTWF6nDdCKx2mFAU5eup1ZSb5wbaAS9a/HiKdtcAUdie/VMLHoFI50jkYZcA+bhUOugw==" + }, + "domify": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/domify/-/domify-1.4.1.tgz", + "integrity": "sha512-x18nuiDHMCZGXr4KJSRMf/TWYtiaRo6RX8KN9fEbW54mvbQ6pieUuerC2ahBg+kEp1wycFj8MPUI0WkIOw5E9w==" + }, + "electron-to-chromium": { + "version": "1.4.143", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.143.tgz", + "integrity": "sha512-2hIgvu0+pDfXIqmVmV5X6iwMjQ2KxDsWKwM+oI1fABEOy/Dqmll0QJRmIQ3rm+XaoUa/qKrmy5h7LSTFQ6Ldzg==", + "dev": true, + "peer": true + }, + "enhanced-resolve": { + "version": "5.9.3", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.9.3.tgz", + "integrity": "sha512-Bq9VSor+kjvW3f9/MiiR4eE3XYgOl7/rS8lnSxbRbF3kS0B2r+Y9w5krBWxZgDxASVZbdYrn5wT4j/Wb0J9qow==", + "dev": true, + "peer": true, + "requires": { + "graceful-fs": "^4.2.4", + "tapable": "^2.2.0" + } + }, + "envinfo": { + "version": "7.8.1", + "resolved": "https://registry.npmjs.org/envinfo/-/envinfo-7.8.1.tgz", + "integrity": "sha512-/o+BXHmB7ocbHEAs6F2EnG0ogybVVUdkRunTT2glZU9XAaGmhqskrvKwqXuDfNjEO0LZKWdejEEpnq8aM0tOaw==", + "dev": true + }, + "es-module-lexer": { + "version": "0.9.3", + "resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-0.9.3.tgz", + "integrity": "sha512-1HQ2M2sPtxwnvOvT1ZClHyQDiggdNjURWpY2we6aMKCQiUVxTmVs2UYPLIrD84sS+kMdUwfBSylbJPwNnBrnHQ==", + "dev": true, + "peer": true + }, + "escalade": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz", + "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==", + "dev": true, + "peer": true + }, + "eslint-scope": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.1.1.tgz", + "integrity": "sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw==", + "dev": true, + "peer": true, + "requires": { + "esrecurse": "^4.3.0", + "estraverse": "^4.1.1" + } + }, + "esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dev": true, + "peer": true, + "requires": { + "estraverse": "^5.2.0" + }, + "dependencies": { + "estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "peer": true + } + } + }, + "estraverse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", + "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", + "dev": true, + "peer": true + }, + "events": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", + "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==", + "dev": true, + "peer": true + }, + "execa": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", + "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", + "dev": true, + "requires": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" + } + }, + "fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "dev": true, + "peer": true + }, + "fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true, + "peer": true + }, + "fastest-levenshtein": { + "version": "1.0.12", + "resolved": "https://registry.npmjs.org/fastest-levenshtein/-/fastest-levenshtein-1.0.12.tgz", + "integrity": "sha512-On2N+BpYJ15xIC974QNVuYGMOlEVt4s0EOI3wwMqOmK1fdDY+FN/zltPV8vosq4ad4c/gJ1KHScUn/6AWIgiow==", + "dev": true + }, + "find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "requires": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + } + }, + "function-bind": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", + "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", + "dev": true + }, + "get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "dev": true + }, + "glob-to-regexp": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz", + "integrity": "sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==", + "dev": true, + "peer": true + }, + "graceful-fs": { + "version": "4.2.10", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.10.tgz", + "integrity": "sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA==", + "dev": true, + "peer": true + }, + "hammerjs": { + "version": "2.0.8", + "resolved": "https://registry.npmjs.org/hammerjs/-/hammerjs-2.0.8.tgz", + "integrity": "sha1-BO93hiz/K7edMPdpIJWTAiK/YPE=" + }, + "has": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", + "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", + "dev": true, + "requires": { + "function-bind": "^1.1.1" + } + }, + "has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "peer": true + }, + "human-signals": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", + "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", + "dev": true + }, + "ids": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/ids/-/ids-1.0.0.tgz", + "integrity": "sha512-Zvtq1xUto4LttpstyOlFum8lKx+i1OmRfg+6A9drFS9iSZsDPMHG4Sof/qwNR4kCU7jBeWFPrY2ocHxiz7cCRw==" + }, + "import-local": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/import-local/-/import-local-3.1.0.tgz", + "integrity": "sha512-ASB07uLtnDs1o6EHjKpX34BKYDSqnFerfTOJL2HvMqF70LnxpjkzDB8J44oT9pu4AMPkQwf8jl6szgvNd2tRIg==", + "dev": true, + "requires": { + "pkg-dir": "^4.2.0", + "resolve-cwd": "^3.0.0" + } + }, + "indexof": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/indexof/-/indexof-0.0.1.tgz", + "integrity": "sha1-gtwzbSMrkGIXnQWrMpOmYFn9Q10=" + }, + "inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "inherits-browser": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/inherits-browser/-/inherits-browser-0.0.1.tgz", + "integrity": "sha512-kaDA3DkCdCpvrKIo/1T/3yVn+qpFUHLjYtSHmTYewb+QfjfaQy6FGQ7LwBu7st0tG9UvYad/XAlqQmdIh6CICw==" + }, + "interpret": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/interpret/-/interpret-2.2.0.tgz", + "integrity": "sha512-Ju0Bz/cEia55xDwUWEa8+olFpCiQoypjnQySseKtmjNrnps3P+xfpUmGr90T7yjlVJmOtybRvPXhKMbHr+fWnw==", + "dev": true + }, + "is-core-module": { + "version": "2.9.0", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.9.0.tgz", + "integrity": "sha512-+5FPy5PnwmO3lvfMb0AsoPaBG+5KHUI0wYFXOtYPnVVVspTFUuMZNfNaNVRt3FZadstu2c8x23vykRW/NBoU6A==", + "dev": true, + "requires": { + "has": "^1.0.3" + } + }, + "is-plain-object": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-2.0.4.tgz", + "integrity": "sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==", + "dev": true, + "requires": { + "isobject": "^3.0.1" + } + }, + "is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "dev": true + }, + "isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true + }, + "isobject": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/isobject/-/isobject-3.0.1.tgz", + "integrity": "sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg==", + "dev": true + }, + "jest-worker": { + "version": "27.5.1", + "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-27.5.1.tgz", + "integrity": "sha512-7vuh85V5cdDofPyxn58nrPjBktZo0u9x1g8WtjQol+jZDaE+fhN+cIvTj11GndBnMnyfrUOG1sZQxCdjKh+DKg==", + "dev": true, + "peer": true, + "requires": { + "@types/node": "*", + "merge-stream": "^2.0.0", + "supports-color": "^8.0.0" + } + }, + "json-parse-even-better-errors": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", + "dev": true, + "peer": true + }, + "json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true, + "peer": true + }, + "json-source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/json-source-map/-/json-source-map-0.6.1.tgz", + "integrity": "sha512-1QoztHPsMQqhDq0hlXY5ZqcEdUzxQEIxgFkKl4WUp2pgShObl+9ovi4kRh2TfvAfxAoHOJ9vIMEqk3k4iex7tg==" + }, + "kind-of": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz", + "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==", + "dev": true + }, + "loader-runner": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/loader-runner/-/loader-runner-4.3.0.tgz", + "integrity": "sha512-3R/1M+yS3j5ou80Me59j7F9IMs4PXs3VqRrm0TU3AbKPxlmpoY1TNscJV/oGJXo8qCatFGTfDbY6W6ipGOYXfg==", + "dev": true, + "peer": true + }, + "locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "requires": { + "p-locate": "^4.1.0" + } + }, + "matches-selector": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/matches-selector/-/matches-selector-1.2.0.tgz", + "integrity": "sha512-c4vLwYWyl+Ji+U43eU/G5FwxWd4ZH0ePUsFs5y0uwD9HUEFBXUQ1zUUan+78IpRD+y4pUfG0nAzNM292K7ItvA==" + }, + "merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", + "dev": true + }, + "mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "dev": true, + "peer": true + }, + "mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dev": true, + "peer": true, + "requires": { + "mime-db": "1.52.0" + } + }, + "mimic-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", + "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", + "dev": true + }, + "min-dash": { + "version": "3.8.1", + "resolved": "https://registry.npmjs.org/min-dash/-/min-dash-3.8.1.tgz", + "integrity": "sha512-evumdlmIlg9mbRVPbC4F5FuRhNmcMS5pvuBUbqb1G9v09Ro0ImPEgz5n3khir83lFok1inKqVDjnKEg3GpDxQg==" + }, + "min-dom": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/min-dom/-/min-dom-3.2.1.tgz", + "integrity": "sha512-v6YCmnDzxk4rRJntWTUiwggLupPw/8ZSRqUq0PDaBwVZEO/wYzCH4SKVBV+KkEvf3u0XaWHly5JEosPtqRATZA==", + "requires": { + "component-event": "^0.1.4", + "domify": "^1.3.1", + "indexof": "0.0.1", + "matches-selector": "^1.2.0", + "min-dash": "^3.8.1" + } + }, + "moddle": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/moddle/-/moddle-5.0.3.tgz", + "integrity": "sha512-EjnQkSaZClHMsM3H/guBy9h7AmHUICH0Pf8H1VnnYGUXy2hkZQU4gqEAyHywJzMRAhYX87pXjH2NtyigF7evkA==", + "requires": { + "min-dash": "^3.0.0" + } + }, + "moddle-xml": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/moddle-xml/-/moddle-xml-9.0.5.tgz", + "integrity": "sha512-1t9N35ZMQZTYZmRDoh1mBVd0XwLB34BkBywNJ0+YlLLYxaDBjFR/I+fqwsY746ayYPBz6yNRg8JpLyFgNF+eHg==", + "requires": { + "min-dash": "^3.5.2", + "moddle": "^5.0.2", + "saxen": "^8.1.2" + } + }, + "neo-async": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz", + "integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==", + "dev": true, + "peer": true + }, + "node-releases": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.5.tgz", + "integrity": "sha512-U9h1NLROZTq9uE1SNffn6WuPDg8icmi3ns4rEl/oTfIle4iLjTliCzgTsbaIFMq/Xn078/lfY/BL0GWZ+psK4Q==", + "dev": true, + "peer": true + }, + "npm-run-path": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", + "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", + "dev": true, + "requires": { + "path-key": "^3.0.0" + } + }, + "object-refs": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/object-refs/-/object-refs-0.3.0.tgz", + "integrity": "sha512-eP0ywuoWOaDoiake/6kTJlPJhs+k0qNm4nYRzXLNHj6vh+5M3i9R1epJTdxIPGlhWc4fNRQ7a6XJNCX+/L4FOQ==" + }, + "onetime": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", + "dev": true, + "requires": { + "mimic-fn": "^2.1.0" + } + }, + "p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "requires": { + "p-try": "^2.0.0" + } + }, + "p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "requires": { + "p-limit": "^2.2.0" + } + }, + "p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true + }, + "path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true + }, + "path-intersection": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/path-intersection/-/path-intersection-2.2.1.tgz", + "integrity": "sha512-9u8xvMcSfuOiStv9bPdnRJQhGQXLKurew94n4GPQCdH1nj9QKC9ObbNoIpiRq8skiOBxKkt277PgOoFgAt3/rA==" + }, + "path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true + }, + "path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true + }, + "picocolors": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", + "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==", + "dev": true, + "peer": true + }, + "pkg-dir": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz", + "integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==", + "dev": true, + "requires": { + "find-up": "^4.0.0" + } + }, + "preact": { + "version": "10.7.3", + "resolved": "https://registry.npmjs.org/preact/-/preact-10.7.3.tgz", + "integrity": "sha512-giqJXP8VbtA1tyGa3f1n9wiN7PrHtONrDyE3T+ifjr/tTkg+2N4d/6sjC9WyJKv8wM7rOYDveqy5ZoFmYlwo4w==", + "peer": true + }, + "preact-markup": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/preact-markup/-/preact-markup-2.1.1.tgz", + "integrity": "sha512-8JL2p36mzK8XkspOyhBxUSPjYwMxDM0L5BWBZWxsZMVW8WsGQrYQDgVuDKkRspt2hwrle+Cxr/053hpc9BJwfw==", + "requires": {} + }, + "punycode": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", + "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==", + "dev": true, + "peer": true + }, + "randombytes": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz", + "integrity": "sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ==", + "dev": true, + "peer": true, + "requires": { + "safe-buffer": "^5.1.0" + } + }, + "rechoir": { + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/rechoir/-/rechoir-0.7.1.tgz", + "integrity": "sha512-/njmZ8s1wVeR6pjTZ+0nCnv8SpZNRMT2D1RLOJQESlYFDBvwpTA4KWJpZ+sBJ4+vhjILRcK7JIFdGCdxEAAitg==", + "dev": true, + "requires": { + "resolve": "^1.9.0" + } + }, + "resolve": { + "version": "1.22.0", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.0.tgz", + "integrity": "sha512-Hhtrw0nLeSrFQ7phPp4OOcVjLPIeMnRlr5mcnVuMe7M/7eBn98A3hmFRLoFo3DLZkivSYwhRUJTyPyWAk56WLw==", + "dev": true, + "requires": { + "is-core-module": "^2.8.1", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + } + }, + "resolve-cwd": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/resolve-cwd/-/resolve-cwd-3.0.0.tgz", + "integrity": "sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==", + "dev": true, + "requires": { + "resolve-from": "^5.0.0" + } + }, + "resolve-from": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", + "dev": true + }, + "safe-buffer": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", + "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "dev": true, + "peer": true + }, + "saxen": { + "version": "8.1.2", + "resolved": "https://registry.npmjs.org/saxen/-/saxen-8.1.2.tgz", + "integrity": "sha512-xUOiiFbc3Ow7p8KMxwsGICPx46ZQvy3+qfNVhrkwfz3Vvq45eGt98Ft5IQaA1R/7Tb5B5MKh9fUR9x3c3nDTxw==" + }, + "schema-utils": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.1.1.tgz", + "integrity": "sha512-Y5PQxS4ITlC+EahLuXaY86TXfR7Dc5lw294alXOq86JAHCihAIZfqv8nNCWvaEJvaC51uN9hbLGeV0cFBdH+Fw==", + "dev": true, + "peer": true, + "requires": { + "@types/json-schema": "^7.0.8", + "ajv": "^6.12.5", + "ajv-keywords": "^3.5.2" + } + }, + "semver-compare": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/semver-compare/-/semver-compare-1.0.0.tgz", + "integrity": "sha1-De4hahyUGrN+nvsXiPavxf9VN/w=" + }, + "serialize-javascript": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.0.tgz", + "integrity": "sha512-Qr3TosvguFt8ePWqsvRfrKyQXIiW+nGbYpy8XK24NQHE83caxWt+mIymTT19DGFbNWNLfEwsrkSmN64lVWB9ag==", + "dev": true, + "peer": true, + "requires": { + "randombytes": "^2.1.0" + } + }, + "shallow-clone": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/shallow-clone/-/shallow-clone-3.0.1.tgz", + "integrity": "sha512-/6KqX+GVUdqPuPPd2LxDDxzX6CAbjJehAAOKlNpqqUpAqPM6HeL8f+o3a+JsyGjn2lv0WY8UsTgUJjU9Ok55NA==", + "dev": true, + "requires": { + "kind-of": "^6.0.2" + } + }, + "shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "requires": { + "shebang-regex": "^3.0.0" + } + }, + "shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true + }, + "signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true + }, + "source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "peer": true + }, + "source-map-support": { + "version": "0.5.21", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz", + "integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==", + "dev": true, + "peer": true, + "requires": { + "buffer-from": "^1.0.0", + "source-map": "^0.6.0" + } + }, + "strip-final-newline": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", + "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", + "dev": true + }, + "supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "peer": true, + "requires": { + "has-flag": "^4.0.0" + } + }, + "supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true + }, + "tapable": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/tapable/-/tapable-2.2.1.tgz", + "integrity": "sha512-GNzQvQTOIP6RyTfE2Qxb8ZVlNmw0n88vp1szwWRimP02mnTsx3Wtn5qRdqY9w2XduFNUgvOwhNnQsjwCp+kqaQ==", + "dev": true, + "peer": true + }, + "terser": { + "version": "5.14.0", + "resolved": "https://registry.npmjs.org/terser/-/terser-5.14.0.tgz", + "integrity": "sha512-JC6qfIEkPBd9j1SMO3Pfn+A6w2kQV54tv+ABQLgZr7dA3k/DL/OBoYSWxzVpZev3J+bUHXfr55L8Mox7AaNo6g==", + "dev": true, + "peer": true, + "requires": { + "@jridgewell/source-map": "^0.3.2", + "acorn": "^8.5.0", + "commander": "^2.20.0", + "source-map-support": "~0.5.20" + } + }, + "terser-webpack-plugin": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/terser-webpack-plugin/-/terser-webpack-plugin-5.3.1.tgz", + "integrity": "sha512-GvlZdT6wPQKbDNW/GDQzZFg/j4vKU96yl2q6mcUkzKOgW4gwf1Z8cZToUCrz31XHlPWH8MVb1r2tFtdDtTGJ7g==", + "dev": true, + "peer": true, + "requires": { + "jest-worker": "^27.4.5", + "schema-utils": "^3.1.1", + "serialize-javascript": "^6.0.0", + "source-map": "^0.6.1", + "terser": "^5.7.2" + } + }, + "tiny-svg": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/tiny-svg/-/tiny-svg-2.2.3.tgz", + "integrity": "sha512-u5KGg889pD1W2c9GlLrTnAGzIkAO00/VXZGyzeiGHw+b9er8McLO0SnhxPQQDwDqFO0MrJ825AEsRUoTiDZFuQ==" + }, + "uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dev": true, + "peer": true, + "requires": { + "punycode": "^2.1.0" + } + }, + "watchpack": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.3.1.tgz", + "integrity": "sha512-x0t0JuydIo8qCNctdDrn1OzH/qDzk2+rdCOC3YzumZ42fiMqmQ7T3xQurykYMhYfHaPHTp4ZxAx2NfUo1K6QaA==", + "dev": true, + "peer": true, + "requires": { + "glob-to-regexp": "^0.4.1", + "graceful-fs": "^4.1.2" + } + }, + "webpack": { + "version": "5.72.1", + "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.72.1.tgz", + "integrity": "sha512-dXG5zXCLspQR4krZVR6QgajnZOjW2K/djHvdcRaDQvsjV9z9vaW6+ja5dZOYbqBBjF6kGXka/2ZyxNdc+8Jung==", + "dev": true, + "peer": true, + "requires": { + "@types/eslint-scope": "^3.7.3", + "@types/estree": "^0.0.51", + "@webassemblyjs/ast": "1.11.1", + "@webassemblyjs/wasm-edit": "1.11.1", + "@webassemblyjs/wasm-parser": "1.11.1", + "acorn": "^8.4.1", + "acorn-import-assertions": "^1.7.6", + "browserslist": "^4.14.5", + "chrome-trace-event": "^1.0.2", + "enhanced-resolve": "^5.9.3", + "es-module-lexer": "^0.9.0", + "eslint-scope": "5.1.1", + "events": "^3.2.0", + "glob-to-regexp": "^0.4.1", + "graceful-fs": "^4.2.9", + "json-parse-even-better-errors": "^2.3.1", + "loader-runner": "^4.2.0", + "mime-types": "^2.1.27", + "neo-async": "^2.6.2", + "schema-utils": "^3.1.0", + "tapable": "^2.1.1", + "terser-webpack-plugin": "^5.1.3", + "watchpack": "^2.3.1", + "webpack-sources": "^3.2.3" + } + }, + "webpack-cli": { + "version": "4.9.2", + "resolved": "https://registry.npmjs.org/webpack-cli/-/webpack-cli-4.9.2.tgz", + "integrity": "sha512-m3/AACnBBzK/kMTcxWHcZFPrw/eQuY4Df1TxvIWfWM2x7mRqBQCqKEd96oCUa9jkapLBaFfRce33eGDb4Pr7YQ==", + "dev": true, + "requires": { + "@discoveryjs/json-ext": "^0.5.0", + "@webpack-cli/configtest": "^1.1.1", + "@webpack-cli/info": "^1.4.1", + "@webpack-cli/serve": "^1.6.1", + "colorette": "^2.0.14", + "commander": "^7.0.0", + "execa": "^5.0.0", + "fastest-levenshtein": "^1.0.12", + "import-local": "^3.0.2", + "interpret": "^2.2.0", + "rechoir": "^0.7.0", + "webpack-merge": "^5.7.3" + }, + "dependencies": { + "commander": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz", + "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==", + "dev": true + } + } + }, + "webpack-merge": { + "version": "5.8.0", + "resolved": "https://registry.npmjs.org/webpack-merge/-/webpack-merge-5.8.0.tgz", + "integrity": "sha512-/SaI7xY0831XwP6kzuwhKWVKDP9t1QY1h65lAFLbZqMPIuYcD9QAW4u9STIbU9kaJbPBB/geU/gLr1wDjOhQ+Q==", + "dev": true, + "requires": { + "clone-deep": "^4.0.1", + "wildcard": "^2.0.0" + } + }, + "webpack-sources": { + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/webpack-sources/-/webpack-sources-3.2.3.tgz", + "integrity": "sha512-/DyMEOrDgLKKIG0fmvtz+4dUX/3Ghozwgm6iPp8KRhvn+eQf9+Q7GWxVNMk3+uCPWfdXYC4ExGBckIXdFEfH1w==", + "dev": true, + "peer": true + }, + "which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "requires": { + "isexe": "^2.0.0" + } + }, + "wildcard": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/wildcard/-/wildcard-2.0.0.tgz", + "integrity": "sha512-JcKqAHLPxcdb9KM49dufGXn2x3ssnfjbcaQdLlfZsL9rH9wgDQjUtDxbo8NE0F6SFvydeu1VhZe7hZuHsB2/pw==", + "dev": true + }, + "zeebe-bpmn-moddle": { + "version": "0.12.1", + "resolved": "https://registry.npmjs.org/zeebe-bpmn-moddle/-/zeebe-bpmn-moddle-0.12.1.tgz", + "integrity": "sha512-rnUoK+A/gzinOGUlmJKeXmnjorgEm4yf7qgeaowXGZOFtFqtM2lvJ7XYTJNsKClaNfFG245JtKHH3G/caJxE6g==", + "peer": true + } + } +} diff --git a/src/spiffworkflow_backend/routes/admin_blueprint/static/package.json b/src/spiffworkflow_backend/routes/admin_blueprint/static/package.json new file mode 100644 index 00000000..bee0dcf5 --- /dev/null +++ b/src/spiffworkflow_backend/routes/admin_blueprint/static/package.json @@ -0,0 +1,18 @@ +{ + "name": "spiffworkflow-backend", + "version": "0.0.0", + "description": "Serve up Spiff Workflows to the World!", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "author": "", + "license": "ISC", + "dependencies": { + "bpmn-js": "^9.1.0", + "bpmn-js-properties-panel": "^1.1.1" + }, + "devDependencies": { + "webpack-cli": "^4.9.2" + } +} diff --git a/src/spiffworkflow_backend/routes/admin_blueprint/static/style.css b/src/spiffworkflow_backend/routes/admin_blueprint/static/style.css new file mode 100644 index 00000000..b534fcd5 --- /dev/null +++ b/src/spiffworkflow_backend/routes/admin_blueprint/static/style.css @@ -0,0 +1,2 @@ +.example { +} diff --git a/src/spiffworkflow_backend/routes/admin_blueprint/templates/layout.html b/src/spiffworkflow_backend/routes/admin_blueprint/templates/layout.html new file mode 100644 index 00000000..d1521fdd --- /dev/null +++ b/src/spiffworkflow_backend/routes/admin_blueprint/templates/layout.html @@ -0,0 +1,23 @@ + + + + {% block head %} + + {% block title %}{% endblock %} + {% endblock %} + + +

{{ self.title() }}

+ {% with messages = get_flashed_messages(with_categories=true) %} {% if + messages %} +
    + {% for category, message in messages %} +
  • {{ message }}
  • + {% endfor %} +
+ {% endif %} {% endwith %} {% block content %}{% endblock %} + + diff --git a/src/spiffworkflow_backend/routes/admin_blueprint/templates/process_group_show.html b/src/spiffworkflow_backend/routes/admin_blueprint/templates/process_group_show.html new file mode 100644 index 00000000..a5fbab88 --- /dev/null +++ b/src/spiffworkflow_backend/routes/admin_blueprint/templates/process_group_show.html @@ -0,0 +1,25 @@ +{% extends "layout.html" %} +{% block title %}Process Group: {{ process_group.id }}{% endblock %} +{% block content %} + + + + {# here we iterate over every item in our list#} + {% for process_model in process_group.process_models %} + + + + {% endfor %} + +
+ {{ process_model.display_name }} +
+{% endblock %} diff --git a/src/spiffworkflow_backend/routes/admin_blueprint/templates/process_groups_list.html b/src/spiffworkflow_backend/routes/admin_blueprint/templates/process_groups_list.html new file mode 100644 index 00000000..f6eb8f58 --- /dev/null +++ b/src/spiffworkflow_backend/routes/admin_blueprint/templates/process_groups_list.html @@ -0,0 +1,18 @@ +{% extends "layout.html" %} {% block title %}Process Groups{% endblock %} {% +block content %} + + + {# here we iterate over every item in our list#} {% for process_group in + process_groups %} + + + + {% endfor %} + +
+ {{ process_group.display_name }} +
+{% endblock %} diff --git a/src/spiffworkflow_backend/routes/admin_blueprint/templates/process_model_edit.html b/src/spiffworkflow_backend/routes/admin_blueprint/templates/process_model_edit.html new file mode 100644 index 00000000..8dca623a --- /dev/null +++ b/src/spiffworkflow_backend/routes/admin_blueprint/templates/process_model_edit.html @@ -0,0 +1,167 @@ +{% extends "layout.html" %} {% block title %} + Process Model Edit: {{ process_model.id }} +{% endblock %} + +{% block head %} + + + + + + + + + + + + + + + + + + + +{% endblock %} + +{% block content %} +
{{ result }}
+ + + +
+
+ + + + +{% endblock %} diff --git a/src/spiffworkflow_backend/routes/admin_blueprint/templates/process_model_show.html b/src/spiffworkflow_backend/routes/admin_blueprint/templates/process_model_show.html new file mode 100644 index 00000000..5eedf1de --- /dev/null +++ b/src/spiffworkflow_backend/routes/admin_blueprint/templates/process_model_show.html @@ -0,0 +1,159 @@ +{% extends "layout.html" %} {% block title %}Process Model: {{ process_model.id +}}{% endblock %} {% block head %} {{ super() }} + + + + + + + + + + + + + + + + + + +{% endblock %} {% block content %} +
{{ result }}
+ + + + +{% if files %} +

BPMN Files

+
    + {% for file in files %} +
  • + {{ file.name }} + {% if file.name == current_file_name %} (current) {% endif %} +
  • + {% endfor %} +
+{% endif %} + +
+ + +
+ +
+ + + + +{% endblock %} diff --git a/src/spiffworkflow_backend/routes/process_api_blueprint.py b/src/spiffworkflow_backend/routes/process_api_blueprint.py new file mode 100644 index 00000000..95e4805f --- /dev/null +++ b/src/spiffworkflow_backend/routes/process_api_blueprint.py @@ -0,0 +1,1432 @@ +"""APIs for dealing with process groups, process models, and process instances.""" +import json +import os +import random +import string +import uuid +from typing import Any +from typing import Dict +from typing import Optional +from typing import TypedDict +from typing import Union + +import connexion # type: ignore +import flask.wrappers +import jinja2 +from flask import Blueprint +from flask import current_app +from flask import g +from flask import jsonify +from flask import make_response +from flask import request +from flask.wrappers import Response +from flask_bpmn.api.api_error import ApiError +from flask_bpmn.models.db import db +from lxml import etree # type: ignore +from lxml.builder import ElementMaker # type: ignore +from SpiffWorkflow.task import Task as SpiffTask # type: ignore +from SpiffWorkflow.task import TaskState +from sqlalchemy import desc + +from spiffworkflow_backend.exceptions.process_entity_not_found_error import ( + ProcessEntityNotFoundError, +) +from spiffworkflow_backend.models.active_task import ActiveTaskModel +from spiffworkflow_backend.models.file import FileSchema +from spiffworkflow_backend.models.message_instance import MessageInstanceModel +from spiffworkflow_backend.models.message_model import MessageModel +from spiffworkflow_backend.models.message_triggerable_process_model import ( + MessageTriggerableProcessModel, +) +from spiffworkflow_backend.models.principal import PrincipalModel +from spiffworkflow_backend.models.process_group import ProcessGroupSchema +from spiffworkflow_backend.models.process_instance import ProcessInstanceApiSchema +from spiffworkflow_backend.models.process_instance import ProcessInstanceModel +from spiffworkflow_backend.models.process_instance import ProcessInstanceModelSchema +from spiffworkflow_backend.models.process_instance_report import ( + ProcessInstanceReportModel, +) +from spiffworkflow_backend.models.process_model import ProcessModelInfo +from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema +from spiffworkflow_backend.models.secret_model import SecretAllowedProcessSchema +from spiffworkflow_backend.models.secret_model import SecretModel +from spiffworkflow_backend.models.secret_model import SecretModelSchema +from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel +from spiffworkflow_backend.models.user import UserModel +from spiffworkflow_backend.services.error_handling_service import ErrorHandlingService +from spiffworkflow_backend.services.file_system_service import FileSystemService +from spiffworkflow_backend.services.git_service import GitService +from spiffworkflow_backend.services.message_service import MessageService +from spiffworkflow_backend.services.process_instance_processor import ( + ProcessInstanceProcessor, +) +from spiffworkflow_backend.services.process_instance_service import ( + ProcessInstanceService, +) +from spiffworkflow_backend.services.process_model_service import ProcessModelService +from spiffworkflow_backend.services.script_unit_test_runner import ScriptUnitTestRunner +from spiffworkflow_backend.services.secret_service import SecretService +from spiffworkflow_backend.services.service_task_service import ServiceTaskService +from spiffworkflow_backend.services.spec_file_service import SpecFileService +from spiffworkflow_backend.services.user_service import UserService + + +class TaskDataSelectOption(TypedDict): + """TaskDataSelectOption.""" + + value: str + label: str + + +class ReactJsonSchemaSelectOption(TypedDict): + """ReactJsonSchemaSelectOption.""" + + type: str + title: str + enum: list[str] + + +process_api_blueprint = Blueprint("process_api", __name__) + + +def status() -> flask.wrappers.Response: + """Status.""" + ProcessInstanceModel.query.filter().first() + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def process_group_add( + body: Dict[str, Union[str, bool, int]] +) -> flask.wrappers.Response: + """Add_process_group.""" + process_model_service = ProcessModelService() + process_group = ProcessGroupSchema().load(body) + process_model_service.add_process_group(process_group) + return Response( + json.dumps(ProcessGroupSchema().dump(process_group)), + status=201, + mimetype="application/json", + ) + + +def process_group_delete(process_group_id: str) -> flask.wrappers.Response: + """Process_group_delete.""" + ProcessModelService().process_group_delete(process_group_id) + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def process_group_update( + process_group_id: str, body: Dict[str, Union[str, bool, int]] +) -> Dict[str, Union[str, bool, int]]: + """Process Group Update.""" + process_group = ProcessGroupSchema().load(body) + ProcessModelService().update_process_group(process_group) + return ProcessGroupSchema().dump(process_group) # type: ignore + + +def process_groups_list(page: int = 1, per_page: int = 100) -> flask.wrappers.Response: + """Process_groups_list.""" + process_groups = ProcessModelService().get_process_groups() + batch = ProcessModelService().get_batch( + items=process_groups, page=page, per_page=per_page + ) + pages = len(process_groups) // per_page + remainder = len(process_groups) % per_page + if remainder > 0: + pages += 1 + response_json = { + "results": ProcessGroupSchema(many=True).dump(batch), + "pagination": { + "count": len(batch), + "total": len(process_groups), + "pages": pages, + }, + } + return Response(json.dumps(response_json), status=200, mimetype="application/json") + + +def process_group_show( + process_group_id: str, +) -> Any: + """Process_group_show.""" + try: + process_group = ProcessModelService().get_process_group(process_group_id) + except ProcessEntityNotFoundError as exception: + raise ( + ApiError( + error_code="process_group_cannot_be_found", + message=f"Process group cannot be found: {process_group_id}", + status_code=400, + ) + ) from exception + return ProcessGroupSchema().dump(process_group) + + +def process_model_add( + body: Dict[str, Union[str, bool, int]] +) -> flask.wrappers.Response: + """Add_process_model.""" + process_model_info = ProcessModelInfoSchema().load(body) + if process_model_info is None: + raise ApiError( + error_code="process_model_could_not_be_created", + message=f"Process Model could not be created from given body: {body}", + status_code=400, + ) + + process_model_service = ProcessModelService() + process_group = process_model_service.get_process_group( + process_model_info.process_group_id + ) + if process_group is None: + raise ApiError( + error_code="process_model_could_not_be_created", + message=f"Process Model could not be created from given body because Process Group could not be found: {body}", + status_code=400, + ) + + process_model_info.process_group = process_group + process_model_service.add_spec(process_model_info) + return Response( + json.dumps(ProcessModelInfoSchema().dump(process_model_info)), + status=201, + mimetype="application/json", + ) + + +def process_model_delete( + process_group_id: str, process_model_id: str +) -> flask.wrappers.Response: + """Process_model_delete.""" + ProcessModelService().process_model_delete(process_model_id) + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def process_model_update( + process_group_id: str, process_model_id: str, body: Dict[str, Union[str, bool, int]] +) -> Any: + """Process_model_update.""" + body_include_list = ["display_name", "primary_file_name", "primary_process_id"] + body_filtered = { + include_item: body[include_item] + for include_item in body_include_list + if include_item in body + } + + process_model = get_process_model(process_model_id, process_group_id) + ProcessModelService().update_spec(process_model, body_filtered) + return ProcessModelInfoSchema().dump(process_model) + + +def process_model_show(process_group_id: str, process_model_id: str) -> Any: + """Process_model_show.""" + process_model = get_process_model(process_model_id, process_group_id) + files = sorted(SpecFileService.get_files(process_model)) + process_model.files = files + process_model_json = ProcessModelInfoSchema().dump(process_model) + return process_model_json + + +def process_model_list( + process_group_identifier: Optional[str] = None, page: int = 1, per_page: int = 100 +) -> flask.wrappers.Response: + """Process model list!""" + process_models = ProcessModelService().get_process_models( + process_group_id=process_group_identifier + ) + batch = ProcessModelService().get_batch( + process_models, page=page, per_page=per_page + ) + pages = len(process_models) // per_page + remainder = len(process_models) % per_page + if remainder > 0: + pages += 1 + response_json = { + "results": ProcessModelInfoSchema(many=True).dump(batch), + "pagination": { + "count": len(batch), + "total": len(process_models), + "pages": pages, + }, + } + + return Response(json.dumps(response_json), status=200, mimetype="application/json") + + +def get_file(process_group_id: str, process_model_id: str, file_name: str) -> Any: + """Get_file.""" + process_model = get_process_model(process_model_id, process_group_id) + files = SpecFileService.get_files(process_model, file_name) + if len(files) == 0: + raise ApiError( + error_code="unknown file", + message=f"No information exists for file {file_name}" + f" it does not exist in workflow {process_model_id}.", + status_code=404, + ) + + file = files[0] + file_contents = SpecFileService.get_data(process_model, file.name) + file.file_contents = file_contents + file.process_model_id = process_model.id + file.process_group_id = process_model.process_group_id + return FileSchema().dump(file) + + +def process_model_file_update( + process_group_id: str, process_model_id: str, file_name: str +) -> flask.wrappers.Response: + """Process_model_file_update.""" + process_model = get_process_model(process_model_id, process_group_id) + + request_file = get_file_from_request() + request_file_contents = request_file.stream.read() + if not request_file_contents: + raise ApiError( + error_code="file_contents_empty", + message="Given request file does not have any content", + status_code=400, + ) + + SpecFileService.update_file(process_model, file_name, request_file_contents) + + if current_app.config["GIT_COMMIT_ON_SAVE"]: + git_output = GitService.commit( + message=f"User: {g.user.username} clicked save for {process_group_id}/{process_model_id}/{file_name}" + ) + current_app.logger.info(f"git output: {git_output}") + else: + current_app.logger.info("Git commit on save is disabled") + + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def process_model_file_delete( + process_group_id: str, process_model_id: str, file_name: str +) -> flask.wrappers.Response: + """Process_model_file_delete.""" + process_model = get_process_model(process_model_id, process_group_id) + try: + SpecFileService.delete_file(process_model, file_name) + except FileNotFoundError as exception: + raise ( + ApiError( + error_code="process_model_file_cannot_be_found", + message=f"Process model file cannot be found: {file_name}", + status_code=400, + ) + ) from exception + + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def add_file(process_group_id: str, process_model_id: str) -> flask.wrappers.Response: + """Add_file.""" + process_model = get_process_model(process_model_id, process_group_id) + request_file = get_file_from_request() + if not request_file.filename: + raise ApiError( + error_code="could_not_get_filename", + message="Could not get filename from request", + status_code=400, + ) + + file = SpecFileService.add_file( + process_model, request_file.filename, request_file.stream.read() + ) + file_contents = SpecFileService.get_data(process_model, file.name) + file.file_contents = file_contents + file.process_model_id = process_model.id + file.process_group_id = process_model.process_group_id + return Response( + json.dumps(FileSchema().dump(file)), status=201, mimetype="application/json" + ) + + +def process_instance_create( + process_group_id: str, process_model_id: str +) -> flask.wrappers.Response: + """Create_process_instance.""" + process_instance = ProcessInstanceService.create_process_instance( + process_model_id, g.user, process_group_identifier=process_group_id + ) + return Response( + json.dumps(ProcessInstanceModelSchema().dump(process_instance)), + status=201, + mimetype="application/json", + ) + + +def process_instance_run( + process_group_id: str, + process_model_id: str, + process_instance_id: int, + do_engine_steps: bool = True, +) -> flask.wrappers.Response: + """Process_instance_run.""" + process_instance = ProcessInstanceService().get_process_instance( + process_instance_id + ) + processor = ProcessInstanceProcessor(process_instance) + + if do_engine_steps: + try: + processor.do_engine_steps() + except ApiError as e: + ErrorHandlingService().handle_error(processor, e) + raise e + except Exception as e: + ErrorHandlingService().handle_error(processor, e) + task = processor.bpmn_process_instance.last_task + raise ApiError.from_task( + error_code="unknown_exception", + message=f"An unknown error occurred. Original error: {e}", + status_code=400, + task=task, + ) from e + processor.save() + ProcessInstanceService.update_task_assignments(processor) + + if not current_app.config["PROCESS_WAITING_MESSAGES"]: + MessageService.process_message_instances() + + process_instance_api = ProcessInstanceService.processor_to_process_instance_api( + processor + ) + process_instance_data = processor.get_data() + process_instance_metadata = ProcessInstanceApiSchema().dump(process_instance_api) + process_instance_metadata["data"] = process_instance_data + return Response( + json.dumps(process_instance_metadata), status=200, mimetype="application/json" + ) + + +def process_instance_terminate( + process_group_id: str, + process_model_id: str, + process_instance_id: int, + do_engine_steps: bool = True, +) -> flask.wrappers.Response: + """Process_instance_run.""" + process_instance = ProcessInstanceService().get_process_instance( + process_instance_id + ) + processor = ProcessInstanceProcessor(process_instance) + processor.terminate() + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def process_instance_log_list( + process_group_id: str, + process_model_id: str, + process_instance_id: int, + page: int = 1, + per_page: int = 100, +) -> flask.wrappers.Response: + """Process_instance_log_list.""" + # to make sure the process instance exists + process_instance = find_process_instance_by_id_or_raise(process_instance_id) + + logs = ( + SpiffLoggingModel.query.filter( + SpiffLoggingModel.process_instance_id == process_instance.id + ) + .order_by(SpiffLoggingModel.timestamp.desc()) # type: ignore + .join( + UserModel, isouter=True + ) # isouter since if we don't have a user, we still want the log + .add_columns( + UserModel.username, + ) + .paginate(page=page, per_page=per_page, error_out=False) + ) + + response_json = { + "results": logs.items, + "pagination": { + "count": len(logs.items), + "total": logs.total, + "pages": logs.pages, + }, + } + + return make_response(jsonify(response_json), 200) + + +def message_instance_list( + process_instance_id: Optional[int] = None, + page: int = 1, + per_page: int = 100, +) -> flask.wrappers.Response: + """Message_instance_list.""" + # to make sure the process instance exists + message_instances_query = MessageInstanceModel.query + + if process_instance_id: + message_instances_query = message_instances_query.filter_by( + process_instance_id=process_instance_id + ) + + message_instances = ( + message_instances_query.order_by( + MessageInstanceModel.created_at_in_seconds.desc(), # type: ignore + MessageInstanceModel.id.desc(), # type: ignore + ) + .join(MessageModel) + .join(ProcessInstanceModel) + .add_columns( + MessageModel.identifier.label("message_identifier"), + ProcessInstanceModel.process_model_identifier, + ProcessInstanceModel.process_group_identifier, + ) + .paginate(page=page, per_page=per_page, error_out=False) + ) + + response_json = { + "results": message_instances.items, + "pagination": { + "count": len(message_instances.items), + "total": message_instances.total, + "pages": message_instances.pages, + }, + } + + return make_response(jsonify(response_json), 200) + + +# body: { +# payload: dict, +# process_instance_id: Optional[int], +# } +def message_start( + message_identifier: str, + body: Dict[str, Any], +) -> flask.wrappers.Response: + """Message_start.""" + message_model = MessageModel.query.filter_by(identifier=message_identifier).first() + if message_model is None: + raise ( + ApiError( + error_code="unknown_message", + message=f"Could not find message with identifier: {message_identifier}", + status_code=404, + ) + ) + + if "payload" not in body: + raise ( + ApiError( + error_code="missing_payload", + message="Body is missing payload.", + status_code=400, + ) + ) + + process_instance = None + if "process_instance_id" in body: + # to make sure we have a valid process_instance_id + process_instance = find_process_instance_by_id_or_raise( + body["process_instance_id"] + ) + + message_instance = MessageInstanceModel.query.filter_by( + process_instance_id=process_instance.id, + message_model_id=message_model.id, + message_type="receive", + status="ready", + ).first() + if message_instance is None: + raise ( + ApiError( + error_code="cannot_find_waiting_message", + message=f"Could not find waiting message for identifier {message_identifier} " + f"and process instance {process_instance.id}", + status_code=400, + ) + ) + MessageService.process_message_receive( + message_instance, message_model.name, body["payload"] + ) + + else: + message_triggerable_process_model = ( + MessageTriggerableProcessModel.query.filter_by( + message_model_id=message_model.id + ).first() + ) + + if message_triggerable_process_model is None: + raise ( + ApiError( + error_code="cannot_start_message", + message=f"Message with identifier cannot be start with message: {message_identifier}", + status_code=400, + ) + ) + + process_instance = MessageService.process_message_triggerable_process_model( + message_triggerable_process_model, + message_model.name, + body["payload"], + g.user, + ) + + return Response( + json.dumps(ProcessInstanceModelSchema().dump(process_instance)), + status=200, + mimetype="application/json", + ) + + +def process_instance_list( + process_group_identifier: Optional[str] = None, + process_model_identifier: Optional[str] = None, + page: int = 1, + per_page: int = 100, + start_from: Optional[int] = None, + start_till: Optional[int] = None, + end_from: Optional[int] = None, + end_till: Optional[int] = None, + process_status: Optional[str] = None, +) -> flask.wrappers.Response: + """Process_instance_list.""" + process_instance_query = ProcessInstanceModel.query + if process_model_identifier is not None and process_group_identifier is not None: + process_model = get_process_model( + process_model_identifier, process_group_identifier + ) + + process_instance_query = process_instance_query.filter_by( + process_model_identifier=process_model.id + ) + + # this can never happen. obviously the class has the columns it defines. this is just to appease mypy. + if ( + ProcessInstanceModel.start_in_seconds is None + or ProcessInstanceModel.end_in_seconds is None + ): + raise ( + ApiError( + error_code="unexpected_condition", + message="Something went very wrong", + status_code=500, + ) + ) + + if start_from is not None: + process_instance_query = process_instance_query.filter( + ProcessInstanceModel.start_in_seconds >= start_from + ) + if start_till is not None: + process_instance_query = process_instance_query.filter( + ProcessInstanceModel.start_in_seconds <= start_till + ) + if end_from is not None: + process_instance_query = process_instance_query.filter( + ProcessInstanceModel.end_in_seconds >= end_from + ) + if end_till is not None: + process_instance_query = process_instance_query.filter( + ProcessInstanceModel.end_in_seconds <= end_till + ) + if process_status is not None: + process_status_array = process_status.split(",") + process_instance_query = process_instance_query.filter( + ProcessInstanceModel.status.in_(process_status_array) # type: ignore + ) + + process_instances = process_instance_query.order_by( + ProcessInstanceModel.start_in_seconds.desc(), ProcessInstanceModel.id.desc() # type: ignore + ).paginate(page=page, per_page=per_page, error_out=False) + + response_json = { + "results": process_instances.items, + "pagination": { + "count": len(process_instances.items), + "total": process_instances.total, + "pages": process_instances.pages, + }, + } + + return make_response(jsonify(response_json), 200) + + +def process_instance_show( + process_group_id: str, process_model_id: str, process_instance_id: int +) -> flask.wrappers.Response: + """Create_process_instance.""" + process_instance = find_process_instance_by_id_or_raise(process_instance_id) + current_version_control_revision = GitService.get_current_revision() + process_model = get_process_model(process_model_id, process_group_id) + + if process_model.primary_file_name: + if ( + process_instance.bpmn_version_control_identifier + == current_version_control_revision + ): + bpmn_xml_file_contents = SpecFileService.get_data( + process_model, process_model.primary_file_name + ) + else: + bpmn_xml_file_contents = GitService.get_instance_file_contents_for_revision( + process_model, process_instance.bpmn_version_control_identifier + ) + process_instance.bpmn_xml_file_contents = bpmn_xml_file_contents + + return make_response(jsonify(process_instance), 200) + + +def process_instance_delete( + process_group_id: str, process_model_id: str, process_instance_id: int +) -> flask.wrappers.Response: + """Create_process_instance.""" + process_instance = find_process_instance_by_id_or_raise(process_instance_id) + + db.session.delete(process_instance) + db.session.commit() + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def process_instance_report_list( + process_group_id: str, process_model_id: str, page: int = 1, per_page: int = 100 +) -> flask.wrappers.Response: + """Process_instance_report_list.""" + process_model = get_process_model(process_model_id, process_group_id) + + process_instance_reports = ProcessInstanceReportModel.query.filter_by( + process_group_identifier=process_group_id, + process_model_identifier=process_model.id, + ).all() + + return make_response(jsonify(process_instance_reports), 200) + + +def process_instance_report_create( + process_group_id: str, process_model_id: str, body: Dict[str, Any] +) -> flask.wrappers.Response: + """Process_instance_report_create.""" + ProcessInstanceReportModel.create_report( + identifier=body["identifier"], + process_group_identifier=process_group_id, + process_model_identifier=process_model_id, + user=g.user, + report_metadata=body["report_metadata"], + ) + + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def process_instance_report_update( + process_group_id: str, + process_model_id: str, + report_identifier: str, + body: Dict[str, Any], +) -> flask.wrappers.Response: + """Process_instance_report_create.""" + process_instance_report = ProcessInstanceReportModel.query.filter_by( + identifier=report_identifier, + process_group_identifier=process_group_id, + process_model_identifier=process_model_id, + ).first() + if process_instance_report is None: + raise ApiError( + error_code="unknown_process_instance_report", + message="Unknown process instance report", + status_code=404, + ) + + process_instance_report.report_metadata = body["report_metadata"] + db.session.commit() + + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def process_instance_report_delete( + process_group_id: str, + process_model_id: str, + report_identifier: str, +) -> flask.wrappers.Response: + """Process_instance_report_create.""" + process_instance_report = ProcessInstanceReportModel.query.filter_by( + identifier=report_identifier, + process_group_identifier=process_group_id, + process_model_identifier=process_model_id, + ).first() + if process_instance_report is None: + raise ApiError( + error_code="unknown_process_instance_report", + message="Unknown process instance report", + status_code=404, + ) + + db.session.delete(process_instance_report) + db.session.commit() + + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def service_tasks_show() -> flask.wrappers.Response: + """Service_tasks_show.""" + available_connectors = ServiceTaskService.available_connectors() + print(available_connectors) + + return Response( + json.dumps(available_connectors), status=200, mimetype="application/json" + ) + + +def process_instance_report_show( + process_group_id: str, + process_model_id: str, + report_identifier: str, + page: int = 1, + per_page: int = 100, +) -> flask.wrappers.Response: + """Process_instance_list.""" + process_model = get_process_model(process_model_id, process_group_id) + + process_instances = ( + ProcessInstanceModel.query.filter_by(process_model_identifier=process_model.id) + .order_by( + ProcessInstanceModel.start_in_seconds.desc(), ProcessInstanceModel.id.desc() # type: ignore + ) + .paginate(page=page, per_page=per_page, error_out=False) + ) + + process_instance_report = ProcessInstanceReportModel.query.filter_by( + identifier=report_identifier + ).first() + if process_instance_report is None: + raise ApiError( + error_code="unknown_process_instance_report", + message="Unknown process instance report", + status_code=404, + ) + + substitution_variables = request.args.to_dict() + result_dict = process_instance_report.generate_report( + process_instances.items, substitution_variables + ) + + # update this if we go back to a database query instead of filtering in memory + result_dict["pagination"] = { + "count": len(result_dict["results"]), + "total": len(result_dict["results"]), + "pages": 1, + } + + return Response(json.dumps(result_dict), status=200, mimetype="application/json") + + +def task_list_my_tasks(page: int = 1, per_page: int = 100) -> flask.wrappers.Response: + """Task_list_my_tasks.""" + principal = find_principal_or_raise() + + active_tasks = ( + ActiveTaskModel.query.filter_by(assigned_principal_id=principal.id) + .order_by(desc(ActiveTaskModel.id)) # type: ignore + .join(ProcessInstanceModel) + # just need this add_columns to add the process_model_identifier. Then add everything back that was removed. + .add_columns( + ProcessInstanceModel.process_model_identifier, + ProcessInstanceModel.process_group_identifier, + ActiveTaskModel.task_data, + ActiveTaskModel.task_name, + ActiveTaskModel.task_title, + ActiveTaskModel.task_type, + ActiveTaskModel.task_status, + ActiveTaskModel.task_id, + ActiveTaskModel.id, + ActiveTaskModel.process_model_display_name, + ActiveTaskModel.process_instance_id, + ) + .paginate(page=page, per_page=per_page, error_out=False) + ) + + tasks = [ActiveTaskModel.to_task(active_task) for active_task in active_tasks.items] + + response_json = { + "results": tasks, + "pagination": { + "count": len(active_tasks.items), + "total": active_tasks.total, + "pages": active_tasks.pages, + }, + } + + return make_response(jsonify(response_json), 200) + + +def process_instance_task_list( + process_instance_id: int, all_tasks: bool = False +) -> flask.wrappers.Response: + """Process_instance_task_list.""" + process_instance = find_process_instance_by_id_or_raise(process_instance_id) + processor = ProcessInstanceProcessor(process_instance) + + spiff_tasks = None + if all_tasks: + spiff_tasks = processor.bpmn_process_instance.get_tasks(TaskState.ANY_MASK) + else: + spiff_tasks = processor.get_all_user_tasks() + + tasks = [] + for spiff_task in spiff_tasks: + task = ProcessInstanceService.spiff_task_to_api_task(spiff_task) + task.data = spiff_task.data + tasks.append(task) + + return make_response(jsonify(tasks), 200) + + +def task_show(process_instance_id: int, task_id: str) -> flask.wrappers.Response: + """Task_show.""" + process_instance = find_process_instance_by_id_or_raise(process_instance_id) + process_model = get_process_model( + process_instance.process_model_identifier, + process_instance.process_group_identifier, + ) + + form_schema_file_name = "" + form_ui_schema_file_name = "" + spiff_task = get_spiff_task_from_process_instance(task_id, process_instance) + extensions = spiff_task.task_spec.extensions + + if "properties" in extensions: + properties = extensions["properties"] + if "formJsonSchemaFilename" in properties: + form_schema_file_name = properties["formJsonSchemaFilename"] + if "formUiSchemaFilename" in properties: + form_ui_schema_file_name = properties["formUiSchemaFilename"] + task = ProcessInstanceService.spiff_task_to_api_task(spiff_task) + task.data = spiff_task.data + task.process_model_display_name = process_model.display_name + + process_model_with_form = process_model + all_processes = SpecFileService.get_all_bpmn_process_identifiers_for_process_model( + process_model + ) + if task.process_name not in all_processes: + bpmn_file_full_path = ( + ProcessInstanceProcessor.bpmn_file_full_path_from_bpmn_process_identifier( + task.process_name + ) + ) + relative_path = os.path.relpath( + bpmn_file_full_path, start=FileSystemService.root_path() + ) + process_model_relative_path = os.path.dirname(relative_path) + process_model_with_form = ( + ProcessModelService.get_process_model_from_relative_path( + process_model_relative_path + ) + ) + + if task.type == "User Task": + if not form_schema_file_name: + raise ( + ApiError( + error_code="missing_form_file", + message=f"Cannot find a form file for process_instance_id: {process_instance_id}, task_id: {task_id}", + status_code=400, + ) + ) + + form_contents = prepare_form_data( + form_schema_file_name, + task.data, + process_model_with_form, + ) + + try: + # form_contents is a str + form_dict = json.loads(form_contents) + except Exception as exception: + raise ( + ApiError( + error_code="error_loading_form", + message=f"Could not load form schema from: {form_schema_file_name}. Error was: {str(exception)}", + status_code=400, + ) + ) from exception + + if task.data: + _update_form_schema_with_task_data_as_needed(form_dict, task.data) + + if form_contents: + task.form_schema = form_dict + + if form_ui_schema_file_name: + ui_form_contents = prepare_form_data( + form_ui_schema_file_name, + task.data, + process_model_with_form, + ) + if ui_form_contents: + task.form_ui_schema = ui_form_contents + elif task.type == "Manual Task": + if task.properties and task.data: + if task.properties["instructionsForEndUser"]: + task.properties["instructionsForEndUser"] = render_jinja_template( + task.properties["instructionsForEndUser"], task.data + ) + return make_response(jsonify(task), 200) + + +def task_submit( + process_instance_id: int, + task_id: str, + body: Dict[str, Any], + terminate_loop: bool = False, +) -> flask.wrappers.Response: + """Task_submit_user_data.""" + principal = find_principal_or_raise() + active_task_assigned_to_me = find_active_task_by_id_or_raise( + process_instance_id, task_id, principal.id + ) + + process_instance = find_process_instance_by_id_or_raise( + active_task_assigned_to_me.process_instance_id + ) + + processor = ProcessInstanceProcessor(process_instance) + spiff_task = get_spiff_task_from_process_instance( + task_id, process_instance, processor=processor + ) + + if spiff_task.state != TaskState.READY: + raise ( + ApiError( + error_code="invalid_state", + message="You may not update a task unless it is in the READY state.", + status_code=400, + ) + ) + + if terminate_loop and spiff_task.is_looping(): + spiff_task.terminate_loop() + + # TODO: support repeating fields + # Extract the details specific to the form submitted + # form_data = WorkflowService().extract_form_data(body, spiff_task) + + ProcessInstanceService.complete_form_task(processor, spiff_task, body, g.user) + + # If we need to update all tasks, then get the next ready task and if it a multi-instance with the same + # task spec, complete that form as well. + # if update_all: + # last_index = spiff_task.task_info()["mi_index"] + # next_task = processor.next_task() + # while next_task and next_task.task_info()["mi_index"] > last_index: + # __update_task(processor, next_task, form_data, user) + # last_index = next_task.task_info()["mi_index"] + # next_task = processor.next_task() + + ProcessInstanceService.update_task_assignments(processor) + + next_active_task_assigned_to_me = ActiveTaskModel.query.filter_by( + assigned_principal_id=principal.id, process_instance_id=process_instance.id + ).first() + if next_active_task_assigned_to_me: + return make_response( + jsonify(ActiveTaskModel.to_task(next_active_task_assigned_to_me)), 200 + ) + + return Response(json.dumps({"ok": True}), status=202, mimetype="application/json") + + +def script_unit_test_create( + process_group_id: str, process_model_id: str, body: Dict[str, Union[str, bool, int]] +) -> flask.wrappers.Response: + """Script_unit_test_run.""" + bpmn_task_identifier = _get_required_parameter_or_raise( + "bpmn_task_identifier", body + ) + input_json = _get_required_parameter_or_raise("input_json", body) + expected_output_json = _get_required_parameter_or_raise( + "expected_output_json", body + ) + + process_model = get_process_model(process_model_id, process_group_id) + file = SpecFileService.get_files(process_model, process_model.primary_file_name)[0] + if file is None: + raise ApiError( + error_code="cannot_find_file", + message=f"Could not find the primary bpmn file for process_model: {process_model.id}", + status_code=404, + ) + + # TODO: move this to an xml service or something + file_contents = SpecFileService.get_data(process_model, file.name) + bpmn_etree_element = SpecFileService.get_etree_element_from_binary_data( + file_contents, file.name + ) + + nsmap = bpmn_etree_element.nsmap + spiff_element_maker = ElementMaker( + namespace="http://spiffworkflow.org/bpmn/schema/1.0/core", nsmap=nsmap + ) + + script_task_elements = bpmn_etree_element.xpath( + f"//bpmn:scriptTask[@id='{bpmn_task_identifier}']", + namespaces={"bpmn": "http://www.omg.org/spec/BPMN/20100524/MODEL"}, + ) + if len(script_task_elements) == 0: + raise ApiError( + error_code="missing_script_task", + message=f"Cannot find a script task with id: {bpmn_task_identifier}", + status_code=404, + ) + script_task_element = script_task_elements[0] + + extension_elements = None + extension_elements_array = script_task_element.xpath( + "//bpmn:extensionElements", + namespaces={"bpmn": "http://www.omg.org/spec/BPMN/20100524/MODEL"}, + ) + if len(extension_elements_array) == 0: + bpmn_element_maker = ElementMaker( + namespace="http://www.omg.org/spec/BPMN/20100524/MODEL", nsmap=nsmap + ) + extension_elements = bpmn_element_maker("extensionElements") + script_task_element.append(extension_elements) + else: + extension_elements = extension_elements_array[0] + + unit_test_elements = None + unit_test_elements_array = extension_elements.xpath( + "//spiffworkflow:unitTests", + namespaces={"spiffworkflow": "http://spiffworkflow.org/bpmn/schema/1.0/core"}, + ) + if len(unit_test_elements_array) == 0: + unit_test_elements = spiff_element_maker("unitTests") + extension_elements.append(unit_test_elements) + else: + unit_test_elements = unit_test_elements_array[0] + + fuzz = "".join( + random.choice(string.ascii_uppercase + string.digits) # noqa: S311 + for _ in range(7) + ) + unit_test_id = f"unit_test_{fuzz}" + + input_json_element = spiff_element_maker("inputJson", json.dumps(input_json)) + expected_output_json_element = spiff_element_maker( + "expectedOutputJson", json.dumps(expected_output_json) + ) + unit_test_element = spiff_element_maker("unitTest", id=unit_test_id) + unit_test_element.append(input_json_element) + unit_test_element.append(expected_output_json_element) + unit_test_elements.append(unit_test_element) + SpecFileService.update_file( + process_model, file.name, etree.tostring(bpmn_etree_element) + ) + + return Response(json.dumps({"ok": True}), status=202, mimetype="application/json") + + +def script_unit_test_run( + process_group_id: str, process_model_id: str, body: Dict[str, Union[str, bool, int]] +) -> flask.wrappers.Response: + """Script_unit_test_run.""" + # FIXME: We should probably clear this somewhere else but this works + current_app.config["THREAD_LOCAL_DATA"].process_instance_id = None + + python_script = _get_required_parameter_or_raise("python_script", body) + input_json = _get_required_parameter_or_raise("input_json", body) + expected_output_json = _get_required_parameter_or_raise( + "expected_output_json", body + ) + + result = ScriptUnitTestRunner.run_with_script_and_pre_post_contexts( + python_script, input_json, expected_output_json + ) + return make_response(jsonify(result), 200) + + +def get_file_from_request() -> Any: + """Get_file_from_request.""" + request_file = connexion.request.files.get("file") + if not request_file: + raise ApiError( + error_code="no_file_given", + message="Given request does not contain a file", + status_code=400, + ) + return request_file + + +def get_process_model(process_model_id: str, process_group_id: str) -> ProcessModelInfo: + """Get_process_model.""" + process_model = None + try: + process_model = ProcessModelService().get_process_model( + process_model_id, group_id=process_group_id + ) + except ProcessEntityNotFoundError as exception: + raise ( + ApiError( + error_code="process_model_cannot_be_found", + message=f"Process model cannot be found: {process_model_id}", + status_code=400, + ) + ) from exception + + return process_model + + +def find_principal_or_raise() -> PrincipalModel: + """Find_principal_or_raise.""" + principal = PrincipalModel.query.filter_by(user_id=g.user.id).first() + if principal is None: + raise ( + ApiError( + error_code="principal_not_found", + message=f"Principal not found from user id: {g.user.id}", + status_code=400, + ) + ) + return principal # type: ignore + + +def find_active_task_by_id_or_raise( + process_instance_id: int, task_id: str, principal_id: PrincipalModel +) -> ActiveTaskModel: + """Find_active_task_by_id_or_raise.""" + active_task_assigned_to_me = ActiveTaskModel.query.filter_by( + process_instance_id=process_instance_id, + task_id=task_id, + assigned_principal_id=principal_id, + ).first() + if active_task_assigned_to_me is None: + message = ( + f"Task not found for principal user {principal_id} " + f"process_instance_id: {process_instance_id}, task_id: {task_id}" + ) + raise ( + ApiError( + error_code="task_not_found", + message=message, + status_code=400, + ) + ) + return active_task_assigned_to_me # type: ignore + + +def find_process_instance_by_id_or_raise( + process_instance_id: int, +) -> ProcessInstanceModel: + """Find_process_instance_by_id_or_raise.""" + process_instance = ProcessInstanceModel.query.filter_by( + id=process_instance_id + ).first() + if process_instance is None: + raise ( + ApiError( + error_code="process_instance_cannot_be_found", + message=f"Process instance cannot be found: {process_instance_id}", + status_code=400, + ) + ) + return process_instance # type: ignore + + +def get_value_from_array_with_index(array: list, index: int) -> Any: + """Get_value_from_array_with_index.""" + if index < 0: + return None + + if index >= len(array): + return None + + return array[index] + + +def prepare_form_data( + form_file: str, task_data: Union[dict, None], process_model: ProcessModelInfo +) -> str: + """Prepare_form_data.""" + if task_data is None: + return "" + + file_contents = SpecFileService.get_data(process_model, form_file).decode("utf-8") + return render_jinja_template(file_contents, task_data) + + +def render_jinja_template(unprocessed_template: str, data: dict[str, Any]) -> str: + """Render_jinja_template.""" + jinja_environment = jinja2.Environment(autoescape=True) + template = jinja_environment.from_string(unprocessed_template) + return template.render(**data) + + +def get_spiff_task_from_process_instance( + task_id: str, + process_instance: ProcessInstanceModel, + processor: Union[ProcessInstanceProcessor, None] = None, +) -> SpiffTask: + """Get_spiff_task_from_process_instance.""" + if processor is None: + processor = ProcessInstanceProcessor(process_instance) + task_uuid = uuid.UUID(task_id) + spiff_task = processor.bpmn_process_instance.get_task(task_uuid) + + if spiff_task is None: + raise ( + ApiError( + error_code="empty_task", + message="Processor failed to obtain task.", + status_code=500, + ) + ) + return spiff_task + + +# +# Methods for secrets CRUD - maybe move somewhere else: +# +def get_secret(key: str) -> Optional[str]: + """Get_secret.""" + return SecretService.get_secret(key) + + +def secret_list( + page: int = 1, + per_page: int = 100, +) -> Response: + """Secret_list.""" + secrets = ( + SecretModel.query.order_by(SecretModel.key) + .join(UserModel) + .add_columns( + UserModel.username, + ) + .paginate(page=page, per_page=per_page, error_out=False) + ) + response_json = { + "results": secrets.items, + "pagination": { + "count": len(secrets.items), + "total": secrets.total, + "pages": secrets.pages, + }, + } + return make_response(jsonify(response_json), 200) + + +def add_secret(body: Dict) -> Response: + """Add secret.""" + secret_model = SecretService().add_secret(body["key"], body["value"], g.user.id) + assert secret_model # noqa: S101 + return Response( + json.dumps(SecretModelSchema().dump(secret_model)), + status=201, + mimetype="application/json", + ) + + +def update_secret(key: str, body: dict) -> Response: + """Update secret.""" + SecretService().update_secret(key, body["value"], body["creator_user_id"]) + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def delete_secret(key: str) -> Response: + """Delete secret.""" + current_user = UserService.current_user() + SecretService.delete_secret(key, current_user.id) + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def add_allowed_process_path(body: dict) -> Response: + """Get allowed process paths.""" + allowed_process_path = SecretService.add_allowed_process( + body["secret_id"], g.user.id, body["allowed_relative_path"] + ) + return Response( + json.dumps(SecretAllowedProcessSchema().dump(allowed_process_path)), + status=201, + mimetype="application/json", + ) + + +def delete_allowed_process_path(allowed_process_path_id: int) -> Response: + """Get allowed process paths.""" + SecretService().delete_allowed_process(allowed_process_path_id, g.user.id) + return Response(json.dumps({"ok": True}), status=200, mimetype="application/json") + + +def _get_required_parameter_or_raise(parameter: str, post_body: dict[str, Any]) -> Any: + """Get_required_parameter_or_raise.""" + return_value = None + if parameter in post_body: + return_value = post_body[parameter] + + if return_value is None or return_value == "": + raise ( + ApiError( + error_code="missing_required_parameter", + message=f"Parameter is missing from json request body: {parameter}", + status_code=400, + ) + ) + + return return_value + + +# originally from: https://bitcoden.com/answers/python-nested-dictionary-update-value-where-any-nested-key-matches +def _update_form_schema_with_task_data_as_needed( + in_dict: dict, task_data: dict +) -> None: + """Update_nested.""" + for k, value in in_dict.items(): + if "anyOf" == k: + # value will look like the array on the right of "anyOf": ["options_from_task_data_var:awesome_options"] + if isinstance(value, list): + if len(value) == 1: + first_element_in_value_list = value[0] + if isinstance(first_element_in_value_list, str): + if first_element_in_value_list.startswith( + "options_from_task_data_var:" + ): + task_data_var = first_element_in_value_list.replace( + "options_from_task_data_var:", "" + ) + + if task_data_var not in task_data: + raise ( + ApiError( + error_code="missing_task_data_var", + message=f"Task data is missing variable: {task_data_var}", + status_code=500, + ) + ) + + select_options_from_task_data = task_data.get(task_data_var) + if isinstance(select_options_from_task_data, list): + if all( + "value" in d and "label" in d + for d in select_options_from_task_data + ): + + def map_function( + task_data_select_option: TaskDataSelectOption, + ) -> ReactJsonSchemaSelectOption: + """Map_function.""" + return { + "type": "string", + "enum": [task_data_select_option["value"]], + "title": task_data_select_option["label"], + } + + options_for_react_json_schema_form = list( + map(map_function, select_options_from_task_data) + ) + + in_dict[k] = options_for_react_json_schema_form + elif isinstance(value, dict): + _update_form_schema_with_task_data_as_needed(value, task_data) + elif isinstance(value, list): + for o in value: + if isinstance(o, dict): + _update_form_schema_with_task_data_as_needed(o, task_data) diff --git a/src/spiffworkflow_backend/routes/user.py b/src/spiffworkflow_backend/routes/user.py new file mode 100644 index 00000000..e5c08d41 --- /dev/null +++ b/src/spiffworkflow_backend/routes/user.py @@ -0,0 +1,376 @@ +"""User.""" +import ast +import base64 +from typing import Any +from typing import Dict +from typing import Optional +from typing import Union + +import jwt +from flask import current_app +from flask import g +from flask import redirect +from flask_bpmn.api.api_error import ApiError +from werkzeug.wrappers.response import Response + +from spiffworkflow_backend.models.user import UserModel +from spiffworkflow_backend.services.authentication_service import ( + PublicAuthenticationService, +) +from spiffworkflow_backend.services.user_service import UserService + +""" +.. module:: crc.api.user + :synopsis: Single Sign On (SSO) user login and session handlers +""" + + +def verify_token(token: Optional[str] = None) -> Dict[str, Optional[Union[str, int]]]: + """Verify the token for the user (if provided). + + If in production environment and token is not provided, gets user from the SSO headers and returns their token. + + Args: + token: Optional[str] + + Returns: + token: str + + Raises: # noqa: DAR401 + ApiError: If not on production and token is not valid, returns an 'invalid_token' 403 error. + If on production and user is not authenticated, returns a 'no_user' 403 error. + """ + if token: + user_model = None + decoded_token = get_decoded_token(token) + + if decoded_token is not None: + + if "token_type" in decoded_token: + token_type = decoded_token["token_type"] + if token_type == "internal": # noqa: S105 + try: + user_model = get_user_from_decoded_internal_token(decoded_token) + except Exception as e: + current_app.logger.error( + f"Exception in verify_token getting user from decoded internal token. {e}" + ) + + elif "iss" in decoded_token.keys(): + try: + user_info = PublicAuthenticationService.get_user_info_from_id_token( + token + ) + except ApiError as ae: + raise ae + except Exception as e: + current_app.logger.error(f"Exception raised in get_token: {e}") + raise ApiError( + error_code="fail_get_user_info", + message="Cannot get user info from token", + ) from e + + if ( + user_info is not None and "error" not in user_info + ): # not sure what to test yet + user_model = ( + UserModel.query.filter(UserModel.service == "open_id") + .filter(UserModel.service_id == user_info["sub"]) + .first() + ) + if user_model is None: + raise ApiError( + error_code="invalid_user", + message="Invalid user. Please log in.", + status_code=401, + ) + # no user_info + else: + raise ApiError( + error_code="no_user_info", message="Cannot retrieve user info" + ) + + else: + current_app.logger.debug( + "token_type not in decode_token in verify_token" + ) + raise ApiError( + error_code="invalid_token", + message="Invalid token. Please log in.", + status_code=401, + ) + + if user_model: + g.user = user_model + + # If the user is valid, store the token for this session + if g.user: + g.token = token + scope = get_scope(token) + return {"uid": g.user.id, "sub": g.user.id, "scope": scope} + # return validate_scope(token, user_info, user_model) + else: + raise ApiError(error_code="no_user_id", message="Cannot get a user id") + + raise ApiError( + error_code="invalid_token", message="Cannot validate token.", status_code=401 + ) + # no token -- do we ever get here? + # else: + # ... + # if current_app.config.get("DEVELOPMENT"): + # # Fall back to a default user if this is not production. + # g.user = UserModel.query.first() + # if not g.user: + # raise ApiError( + # "no_user", + # "You are in development mode, but there are no users in the database. Add one, and it will use it.", + # ) + # token_from_user = g.user.encode_auth_token() + # token_info = UserModel.decode_auth_token(token_from_user) + # return token_info + # + # else: + # raise ApiError( + # error_code="no_auth_token", + # message="No authorization token was available.", + # status_code=401, + # ) + + +def validate_scope(token: Any) -> bool: + """Validate_scope.""" + print("validate_scope") + # token = PublicAuthenticationService.refresh_token(token) + # user_info = PublicAuthenticationService.get_user_info_from_public_access_token(token) + # bearer_token = PublicAuthenticationService.get_bearer_token(token) + # permission = PublicAuthenticationService.get_permission_by_basic_token(token) + # permissions = PublicAuthenticationService.get_permissions_by_token_for_resource_and_scope(token) + # introspection = PublicAuthenticationService.introspect_token(basic_token) + return True + + +# def login_api(redirect_url: str = "/v1.0/ui") -> Response: +# """Api_login.""" +# # TODO: Fix this! mac 20220801 +# # token:dict = PublicAuthenticationService().get_public_access_token(uid, password) +# # +# # return token +# # if uid: +# # sub = f"service:internal::service_id:{uid}" +# # token = encode_auth_token(sub) +# # user_model = UserModel(username=uid, +# # uid=uid, +# # service='internal', +# # name="API User") +# # g.user = user_model +# # +# # g.token = token +# # scope = get_scope(token) +# # return token +# # return {"uid": uid, "sub": uid, "scope": scope} +# return login(redirect_url) + + +# def login_api_return(code: str, state: str, session_state: str) -> Optional[Response]: +# print("login_api_return") + + +def encode_auth_token(sub: str, token_type: Optional[str] = None) -> str: + """Generates the Auth Token. + + :return: string + """ + payload = {"sub": sub} + if token_type is None: + token_type = "internal" # noqa: S105 + payload["token_type"] = token_type + if "SECRET_KEY" in current_app.config: + secret_key = current_app.config.get("SECRET_KEY") + else: + current_app.logger.error("Missing SECRET_KEY in encode_auth_token") + raise ApiError( + error_code="encode_error", message="Missing SECRET_KEY in encode_auth_token" + ) + return jwt.encode( + payload, + str(secret_key), + algorithm="HS256", + ) + + +def login(redirect_url: str = "/") -> Response: + """Login.""" + state = PublicAuthenticationService.generate_state(redirect_url) + login_redirect_url = PublicAuthenticationService().get_login_redirect_url( + state.decode("UTF-8") + ) + return redirect(login_redirect_url) + + +def login_return(code: str, state: str, session_state: str) -> Optional[Response]: + """Login_return.""" + state_dict = ast.literal_eval(base64.b64decode(state).decode("utf-8")) + state_redirect_url = state_dict["redirect_url"] + + id_token_object = PublicAuthenticationService().get_id_token_object(code) + if "id_token" in id_token_object: + id_token = id_token_object["id_token"] + + if PublicAuthenticationService.validate_id_token(id_token): + user_info = PublicAuthenticationService.get_user_info_from_id_token( + id_token_object["access_token"] + ) + if user_info and "error" not in user_info: + user_model = ( + UserModel.query.filter(UserModel.service == "open_id") + .filter(UserModel.service_id == user_info["sub"]) + .first() + ) + + if user_model is None: + current_app.logger.debug("create_user in login_return") + name = username = email = "" + if "name" in user_info: + name = user_info["name"] + if "username" in user_info: + username = user_info["username"] + elif "preferred_username" in user_info: + username = user_info["preferred_username"] + if "email" in user_info: + email = user_info["email"] + user_model = UserService().create_user( + service="open_id", + service_id=user_info["sub"], + name=name, + username=username, + email=email, + ) + + if user_model: + g.user = user_model.id + + redirect_url = ( + f"{state_redirect_url}?" + + f"access_token={id_token_object['access_token']}&" + + f"id_token={id_token}" + ) + return redirect(redirect_url) + + raise ApiError( + error_code="invalid_login", + message="Login failed. Please try again", + status_code=401, + ) + + else: + raise ApiError( + error_code="invalid_token", + message="Login failed. Please try again", + status_code=401, + ) + + +def login_api() -> Response: + """Login_api.""" + redirect_url = "/v1.0/login_api_return" + state = PublicAuthenticationService.generate_state(redirect_url) + login_redirect_url = PublicAuthenticationService().get_login_redirect_url( + state.decode("UTF-8"), redirect_url + ) + return redirect(login_redirect_url) + + +def login_api_return(code: str, state: str, session_state: str) -> str: + """Login_api_return.""" + state_dict = ast.literal_eval(base64.b64decode(state).decode("utf-8")) + state_dict["redirect_url"] + + id_token_object = PublicAuthenticationService().get_id_token_object( + code, "/v1.0/login_api_return" + ) + access_token: str = id_token_object["access_token"] + assert access_token # noqa: S101 + return access_token + # return redirect("localhost:7000/v1.0/ui") + # return {'uid': 'user_1'} + + +def logout(id_token: str, redirect_url: Optional[str]) -> Response: + """Logout.""" + if redirect_url is None: + redirect_url = "" + return PublicAuthenticationService().logout( + redirect_url=redirect_url, id_token=id_token + ) + + +def logout_return() -> Response: + """Logout_return.""" + frontend_url = str(current_app.config["SPIFFWORKFLOW_FRONTEND_URL"]) + return redirect(f"{frontend_url}/") + + +def get_decoded_token(token: str) -> Optional[Dict]: + """Get_token_type.""" + try: + decoded_token = jwt.decode(token, options={"verify_signature": False}) + except Exception as e: + print(f"Exception in get_token_type: {e}") + raise ApiError( + error_code="invalid_token", message="Cannot decode token." + ) from e + else: + if "token_type" in decoded_token or "iss" in decoded_token: + return decoded_token + else: + current_app.logger.error( + f"Unknown token type in get_decoded_token: token: {token}" + ) + raise ApiError( + error_code="unknown_token", + message="Unknown token type in get_decoded_token", + ) + # try: + # # see if we have an open_id token + # decoded_token = AuthorizationService.decode_auth_token(token) + # else: + # if 'sub' in decoded_token and 'iss' in decoded_token and 'aud' in decoded_token: + # token_type = 'id_token' + + # if 'token_type' in decoded_token and 'sub' in decoded_token: + # return True + + +def get_scope(token: str) -> str: + """Get_scope.""" + scope = "" + decoded_token = jwt.decode(token, options={"verify_signature": False}) + if "scope" in decoded_token: + scope = decoded_token["scope"] + return scope + + +def get_user_from_decoded_internal_token(decoded_token: dict) -> Optional[UserModel]: + """Get_user_from_decoded_internal_token.""" + sub = decoded_token["sub"] + parts = sub.split("::") + service = parts[0].split(":")[1] + service_id = parts[1].split(":")[1] + user: UserModel = ( + UserModel.query.filter(UserModel.service == service) + .filter(UserModel.service_id == service_id) + .first() + ) + # user: UserModel = UserModel.query.filter() + if user: + return user + user = UserModel( + username=service_id, + uid=service_id, + service=service, + service_id=service_id, + name="API User", + ) + + return user diff --git a/src/spiffworkflow_backend/routes/user_blueprint.py b/src/spiffworkflow_backend/routes/user_blueprint.py new file mode 100644 index 00000000..29bbddcd --- /dev/null +++ b/src/spiffworkflow_backend/routes/user_blueprint.py @@ -0,0 +1,240 @@ +"""Main.""" +import json +from typing import Any +from typing import Final + +import flask.wrappers +from flask import Blueprint +from flask import request +from flask import Response +from flask_bpmn.api.api_error import ApiError +from flask_bpmn.models.db import db +from sqlalchemy.exc import IntegrityError + +from spiffworkflow_backend.models.group import GroupModel +from spiffworkflow_backend.models.user import UserModel +from spiffworkflow_backend.models.user_group_assignment import UserGroupAssignmentModel + +APPLICATION_JSON: Final = "application/json" + +user_blueprint = Blueprint("main", __name__) + + +# @user_blueprint.route("/user/", methods=["GET"]) +# def create_user(username: str) -> flask.wrappers.Response: +# """Create_user.""" +# user = UserService.create_user('internal', username) +# return Response(json.dumps({"id": user.id}), status=201, mimetype=APPLICATION_JSON) + +# def _create_user(username): +# user = UserModel.query.filter_by(username=username).first() +# if user is not None: +# raise ( +# ApiError( +# error_code="user_already_exists", +# message=f"User already exists: {username}", +# status_code=409, +# ) +# ) +# +# user = UserModel(username=username, +# service='internal', +# service_id=username, +# name=username) +# try: +# db.session.add(user) +# except IntegrityError as exception: +# raise ( +# ApiError(error_code="integrity_error", message=repr(exception), status_code=500) +# ) from exception +# +# try: +# db.session.commit() +# except Exception as e: +# db.session.rollback() +# raise ApiError(code='add_user_error', +# message=f'Could not add user {username}') from e +# try: +# create_principal(user.id) +# except ApiError as ae: +# # TODO: What is the right way to do this +# raise ae +# return user +# +@user_blueprint.route("/user/", methods=["DELETE"]) +def delete_user(username: str) -> flask.wrappers.Response: + """Delete_user.""" + user = UserModel.query.filter_by(username=username).first() + if user is None: + raise ( + ApiError( + error_code="user_cannot_be_found", + message=f"User cannot be found: {username}", + status_code=400, + ) + ) + + db.session.delete(user) + db.session.commit() + + return Response(json.dumps({"ok": True}), status=204, mimetype=APPLICATION_JSON) + + +@user_blueprint.route("/group/", methods=["GET"]) +def create_group(group_name: str) -> flask.wrappers.Response: + """Create_group.""" + group = GroupModel.query.filter_by(name=group_name).first() + if group is not None: + raise ( + ApiError( + error_code="group_already_exists", + message=f"Group already exists: {group_name}", + status_code=409, + ) + ) + + group = GroupModel(name=group_name) + try: + db.session.add(group) + except IntegrityError as exception: + raise ( + ApiError( + error_code="integrity_error", message=repr(exception), status_code=500 + ) + ) from exception + db.session.commit() + + return Response(json.dumps({"id": group.id}), status=201, mimetype=APPLICATION_JSON) + + +@user_blueprint.route("/group/", methods=["DELETE"]) +def delete_group(group_name: str) -> flask.wrappers.Response: + """Delete_group.""" + group = GroupModel.query.filter_by(name=group_name).first() + if group is None: + raise ( + ApiError( + error_code="group_cannot_be_found", + message=f"Group cannot be found: {group_name}", + status_code=400, + ) + ) + + db.session.delete(group) + db.session.commit() + + return Response(json.dumps({"ok": True}), status=204, mimetype=APPLICATION_JSON) + + +@user_blueprint.route("/assign_user_to_group", methods=["POST"]) +def assign_user_to_group() -> flask.wrappers.Response: + """Assign_user_to_group.""" + user = get_user_from_request() + group = get_group_from_request() + + user_group_assignment = UserGroupAssignmentModel.query.filter_by( + user_id=user.id, group_id=group.id + ).first() + if user_group_assignment is not None: + raise ( + ApiError( + error_code="user_is_already_in_group", + message=f"User ({user.id}) is already in group ({group.id})", + status_code=409, + ) + ) + + user_group_assignment = UserGroupAssignmentModel(user_id=user.id, group_id=group.id) + db.session.add(user_group_assignment) + db.session.commit() + + return Response( + json.dumps({"id": user_group_assignment.id}), + status=201, + mimetype=APPLICATION_JSON, + ) + + +@user_blueprint.route("/remove_user_from_group", methods=["POST"]) +def remove_user_from_group() -> flask.wrappers.Response: + """Remove_user_from_group.""" + user = get_user_from_request() + group = get_group_from_request() + + user_group_assignment = UserGroupAssignmentModel.query.filter_by( + user_id=user.id, group_id=group.id + ).first() + if user_group_assignment is None: + raise ( + ApiError( + error_code="user_not_in_group", + message=f"User ({user.id}) is not in group ({group.id})", + status_code=400, + ) + ) + + db.session.delete(user_group_assignment) + db.session.commit() + + return Response( + json.dumps({"ok": True}), + status=204, + mimetype=APPLICATION_JSON, + ) + + +def get_value_from_request_json(key: str) -> Any: + """Get_value_from_request_json.""" + if request.json is None: + return None + return request.json.get(key) + + +def get_user_from_request() -> Any: + """Get_user_from_request.""" + user_id = get_value_from_request_json("user_id") + + if user_id is None: + raise ( + ApiError( + error_code="user_id_is_required", + message="Attribute user_id is required", + status_code=400, + ) + ) + + user = UserModel.query.filter_by(id=user_id).first() + if user is None: + raise ( + ApiError( + error_code="user_cannot_be_found", + message=f"User cannot be found: {user_id}", + status_code=400, + ) + ) + return user + + +def get_group_from_request() -> Any: + """Get_group_from_request.""" + group_id = get_value_from_request_json("group_id") + + if group_id is None: + raise ( + ApiError( + error_code="group_id_is_required", + message="Attribute group_id is required", + status_code=400, + ) + ) + + group = GroupModel.query.filter_by(id=group_id).first() + if group is None: + raise ( + ApiError( + error_code="group_cannot_be_found", + message=f"Group cannot be found: {group_id}", + status_code=400, + ) + ) + return group diff --git a/src/spiffworkflow_backend/scripts/fact_service.py b/src/spiffworkflow_backend/scripts/fact_service.py new file mode 100644 index 00000000..ea585d2a --- /dev/null +++ b/src/spiffworkflow_backend/scripts/fact_service.py @@ -0,0 +1,41 @@ +"""Fact_service.""" +from typing import Any +from typing import Optional + +from SpiffWorkflow.task import Task as SpiffTask # type: ignore + +from spiffworkflow_backend.scripts.script import Script + + +class FactService(Script): + """FactService.""" + + def get_description(self) -> str: + """Get_description.""" + return """Just your basic class that can pull in data from a few api endpoints and + do a basic task.""" + + def run( + self, + task: Optional[SpiffTask], + environment_identifier: str, + *args: Any, + **kwargs: Any + ) -> Any: + """Run.""" + if "type" not in kwargs: + raise Exception("Please specify a 'type' of fact as a keyword argument.") + else: + fact = kwargs["type"] + + if fact == "cat": + details = "The cat in the hat" # self.get_cat() + elif fact == "norris": + details = "Chuck Norris doesn’t read books. He stares them down until he gets the information he wants." + elif fact == "buzzword": + details = "Move the Needle." # self.get_buzzword() + else: + details = "unknown fact type." + + # self.add_data_to_task(task, details) + return details diff --git a/src/spiffworkflow_backend/scripts/get_env.py b/src/spiffworkflow_backend/scripts/get_env.py new file mode 100644 index 00000000..310bcdec --- /dev/null +++ b/src/spiffworkflow_backend/scripts/get_env.py @@ -0,0 +1,25 @@ +"""Get_env.""" +from typing import Any +from typing import Optional + +from SpiffWorkflow.task import Task as SpiffTask # type: ignore + +from spiffworkflow_backend.scripts.script import Script + + +class GetEnv(Script): + """GetEnv.""" + + def get_description(self) -> str: + """Get_description.""" + return """Returns the current environment - ie testing, staging, production.""" + + def run( + self, + task: Optional[SpiffTask], + environment_identifier: str, + *_args: Any, + **kwargs: Any + ) -> Any: + """Run.""" + return environment_identifier diff --git a/src/spiffworkflow_backend/scripts/script.py b/src/spiffworkflow_backend/scripts/script.py new file mode 100644 index 00000000..f36e4ace --- /dev/null +++ b/src/spiffworkflow_backend/scripts/script.py @@ -0,0 +1,112 @@ +"""Script.""" +from __future__ import annotations + +import importlib +import os +import pkgutil +from abc import abstractmethod +from typing import Any +from typing import Callable + +from flask_bpmn.api.api_error import ApiError +from SpiffWorkflow.task import Task as SpiffTask # type: ignore + +# Generally speaking, having some global in a flask app is TERRIBLE. +# This is here, because after loading the application this will never change under +# any known condition, and it is expensive to calculate it everytime. +SCRIPT_SUB_CLASSES = None + + +class Script: + """Provides an abstract class that defines how scripts should work, this must be extended in all Script Tasks.""" + + @abstractmethod + def get_description(self) -> str: + """Get_description.""" + raise ApiError("invalid_script", "This script does not supply a description.") + + @abstractmethod + def run( + self, + task: SpiffTask, + environment_identifier: str, + *args: Any, + **kwargs: Any, + ) -> Any: + """Run.""" + raise ApiError( + "invalid_script", + "This is an internal error. The script you are trying to execute '%s' " + % self.__class__.__name__ + + "does not properly implement the run function.", + ) + + @staticmethod + def generate_augmented_list( + task: SpiffTask, environment_identifier: str + ) -> dict[str, Callable]: + """This makes a dictionary of lambda functions that are closed over the class instance that they represent. + + This is passed into PythonScriptParser as a list of helper functions that are + available for running. In general, they maintain the do_task call structure that they had, but + they always return a value rather than updating the task data. + + We may be able to remove the task for each of these calls if we are not using it other than potentially + updating the task data. + """ + + def make_closure( + subclass: type[Script], task: SpiffTask, environment_identifier: str + ) -> Callable: + """Yes - this is black magic. + + Essentially, we want to build a list of all of the submodules (i.e. email, user_data_get, etc) + and a function that is assocated with them. + This basically creates an Instance of the class and returns a function that calls do_task + on the instance of that class. + the next for x in range, then grabs the name of the module and associates it with the function + that we created. + """ + instance = subclass() + return lambda *ar, **kw: subclass.run( + instance, + task, + environment_identifier, + *ar, + **kw, + ) + + execlist = {} + subclasses = Script.get_all_subclasses() + for x in range(len(subclasses)): + subclass = subclasses[x] + execlist[subclass.__module__.split(".")[-1]] = make_closure( + subclass, task=task, environment_identifier=environment_identifier + ) + return execlist + + @classmethod + def get_all_subclasses(cls) -> list[type[Script]]: + """Get_all_subclasses.""" + # This is expensive to generate, never changes after we load up. + global SCRIPT_SUB_CLASSES + if not SCRIPT_SUB_CLASSES: + SCRIPT_SUB_CLASSES = Script._get_all_subclasses(Script) + return SCRIPT_SUB_CLASSES + + @staticmethod + def _get_all_subclasses(script_class: Any) -> list[type[Script]]: + """_get_all_subclasses.""" + # hackish mess to make sure we have all the modules loaded for the scripts + pkg_dir = os.path.dirname(__file__) + for (_module_loader, name, _ispkg) in pkgutil.iter_modules([pkg_dir]): + importlib.import_module("." + name, __package__) + + """Returns a list of all classes that extend this class.""" + all_subclasses = [] + + for subclass in script_class.__subclasses__(): + all_subclasses.append(subclass) + all_subclasses.extend(Script._get_all_subclasses(subclass)) + + return all_subclasses diff --git a/src/spiffworkflow_backend/services/acceptance_test_fixtures.py b/src/spiffworkflow_backend/services/acceptance_test_fixtures.py new file mode 100644 index 00000000..4b8c73c1 --- /dev/null +++ b/src/spiffworkflow_backend/services/acceptance_test_fixtures.py @@ -0,0 +1,43 @@ +"""Acceptance_test_fixtures.""" +import json +import time + +from flask_bpmn.models.db import db +from tests.spiffworkflow_backend.helpers.base_test import BaseTest + +from spiffworkflow_backend.models.process_instance import ProcessInstanceModel +from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus + + +def load_fixtures() -> list[ProcessInstanceModel]: + """Load_fixtures.""" + test_process_group_id = "acceptance-tests-group-one" + test_process_model_id = "acceptance-tests-model-1" + user = BaseTest.find_or_create_user() + statuses = ProcessInstanceStatus.list() + current_time = round(time.time()) + + # as of 2022-06-24 + # not_started - 1 hour ago + # user_input_required - 2 hours ago + # waiting - 3 hourse ago + # complete - 4 hours ago + # faulted - 5 hours ago + # suspended - 6 hours ago + process_instances = [] + for i in range(len(statuses)): + process_instance = ProcessInstanceModel( + status=statuses[i], + process_initiator=user, + process_model_identifier=test_process_model_id, + process_group_identifier=test_process_group_id, + updated_at_in_seconds=round(time.time()), + start_in_seconds=current_time - (3600 * i), + end_in_seconds=current_time - (3600 * i - 20), + bpmn_json=json.dumps({"i": i}), + ) + db.session.add(process_instance) + process_instances.append(process_instance) + + db.session.commit() + return process_instances diff --git a/src/spiffworkflow_backend/services/authentication_service.py b/src/spiffworkflow_backend/services/authentication_service.py new file mode 100644 index 00000000..73559590 --- /dev/null +++ b/src/spiffworkflow_backend/services/authentication_service.py @@ -0,0 +1,213 @@ +"""Authentication_service.""" +import base64 +import enum +import json +import time +from typing import Optional + +import jwt +import requests +from flask import current_app +from flask import redirect +from flask_bpmn.api.api_error import ApiError +from werkzeug.wrappers.response import Response + + +class AuthenticationProviderTypes(enum.Enum): + """AuthenticationServiceProviders.""" + + open_id = "open_id" + internal = "internal" + + +class PublicAuthenticationService: + """PublicAuthenticationService.""" + + """Not sure where/if this ultimately lives. + It uses a separate public open_id client: spiffworkflow-frontend + Used during development to make testing easy. + """ + + @staticmethod + def get_open_id_args() -> tuple: + """Get_open_id_args.""" + open_id_server_url = current_app.config["OPEN_ID_SERVER_URL"] + open_id_client_id = current_app.config["OPEN_ID_CLIENT_ID"] + open_id_realm_name = current_app.config["OPEN_ID_REALM_NAME"] + open_id_client_secret_key = current_app.config[ + "OPEN_ID_CLIENT_SECRET_KEY" + ] # noqa: S105 + return ( + open_id_server_url, + open_id_client_id, + open_id_realm_name, + open_id_client_secret_key, + ) + + @classmethod + def get_user_info_from_id_token(cls, token: str) -> dict: + """This seems to work with basic tokens too.""" + ( + open_id_server_url, + open_id_client_id, + open_id_realm_name, + open_id_client_secret_key, + ) = cls.get_open_id_args() + + # backend_basic_auth_string = f"{open_id_client_id}:{open_id_client_secret_key}" + # backend_basic_auth_bytes = bytes(backend_basic_auth_string, encoding="ascii") + # backend_basic_auth = base64.b64encode(backend_basic_auth_bytes) + + headers = {"Authorization": f"Bearer {token}"} + + request_url = f"{open_id_server_url}/realms/{open_id_realm_name}/protocol/openid-connect/userinfo" + try: + request_response = requests.get(request_url, headers=headers) + except Exception as e: + current_app.logger.error(f"Exception in get_user_info_from_id_token: {e}") + raise ApiError( + error_code="token_error", + message=f"Exception in get_user_info_from_id_token: {e}", + status_code=401, + ) from e + + if request_response.status_code == 401: + raise ApiError( + error_code="invalid_token", message="Please login", status_code=401 + ) + elif request_response.status_code == 200: + user_info: dict = json.loads(request_response.text) + return user_info + + raise ApiError( + error_code="user_info_error", + message="Cannot get user info in get_user_info_from_id_token", + status_code=401, + ) + + def get_backend_url(self) -> str: + """Get_backend_url.""" + return str(current_app.config["SPIFFWORKFLOW_BACKEND_URL"]) + + def logout(self, id_token: str, redirect_url: Optional[str] = None) -> Response: + """Logout.""" + if redirect_url is None: + redirect_url = "/" + return_redirect_url = f"{self.get_backend_url()}/v1.0/logout_return" + ( + open_id_server_url, + open_id_client_id, + open_id_realm_name, + open_id_client_secret_key, + ) = PublicAuthenticationService.get_open_id_args() + request_url = ( + f"{open_id_server_url}/realms/{open_id_realm_name}/protocol/openid-connect/logout?" + + f"post_logout_redirect_uri={return_redirect_url}&" + + f"id_token_hint={id_token}" + ) + + return redirect(request_url) + + @staticmethod + def generate_state(redirect_url: str) -> bytes: + """Generate_state.""" + state = base64.b64encode(bytes(str({"redirect_url": redirect_url}), "UTF-8")) + return state + + def get_login_redirect_url( + self, state: str, redirect_url: str = "/v1.0/login_return" + ) -> str: + """Get_login_redirect_url.""" + ( + open_id_server_url, + open_id_client_id, + open_id_realm_name, + open_id_client_secret_key, + ) = PublicAuthenticationService.get_open_id_args() + return_redirect_url = f"{self.get_backend_url()}{redirect_url}" + login_redirect_url = ( + f"{open_id_server_url}/realms/{open_id_realm_name}/protocol/openid-connect/auth?" + + f"state={state}&" + + "response_type=code&" + + f"client_id={open_id_client_id}&" + + "scope=openid&" + + f"redirect_uri={return_redirect_url}" + ) + return login_redirect_url + + def get_id_token_object( + self, code: str, redirect_url: str = "/v1.0/login_return" + ) -> dict: + """Get_id_token_object.""" + ( + open_id_server_url, + open_id_client_id, + open_id_realm_name, + open_id_client_secret_key, + ) = PublicAuthenticationService.get_open_id_args() + + backend_basic_auth_string = f"{open_id_client_id}:{open_id_client_secret_key}" + backend_basic_auth_bytes = bytes(backend_basic_auth_string, encoding="ascii") + backend_basic_auth = base64.b64encode(backend_basic_auth_bytes) + headers = { + "Content-Type": "application/x-www-form-urlencoded", + "Authorization": f"Basic {backend_basic_auth.decode('utf-8')}", + } + data = { + "grant_type": "authorization_code", + "code": code, + "redirect_uri": f"{self.get_backend_url()}{redirect_url}", + } + + request_url = f"{open_id_server_url}/realms/{open_id_realm_name}/protocol/openid-connect/token" + + response = requests.post(request_url, data=data, headers=headers) + id_token_object: dict = json.loads(response.text) + return id_token_object + + @classmethod + def validate_id_token(cls, id_token: str) -> bool: + """Https://openid.net/specs/openid-connect-core-1_0.html#IDTokenValidation.""" + valid = True + now = time.time() + ( + open_id_server_url, + open_id_client_id, + open_id_realm_name, + open_id_client_secret_key, + ) = cls.get_open_id_args() + try: + decoded_token = jwt.decode(id_token, options={"verify_signature": False}) + except Exception as e: + raise ApiError( + error_code="bad_id_token", + message="Cannot decode id_token", + status_code=401, + ) from e + if decoded_token["iss"] != f"{open_id_server_url}/realms/{open_id_realm_name}": + valid = False + elif ( + open_id_client_id not in decoded_token["aud"] + and "account" not in decoded_token["aud"] + ): + valid = False + elif "azp" in decoded_token and decoded_token["azp"] not in ( + open_id_client_id, + "account", + ): + valid = False + elif now < decoded_token["iat"]: + valid = False + + if not valid: + current_app.logger.error(f"Invalid token in validate_id_token: {id_token}") + return False + + if now > decoded_token["exp"]: + raise ApiError( + error_code="invalid_token", + message="Your token is expired. Please Login", + status_code=401, + ) + + return True diff --git a/src/spiffworkflow_backend/services/authorization_service.py b/src/spiffworkflow_backend/services/authorization_service.py new file mode 100644 index 00000000..0209e3f9 --- /dev/null +++ b/src/spiffworkflow_backend/services/authorization_service.py @@ -0,0 +1,340 @@ +"""Authorization_service.""" +from typing import Union + +import jwt +from flask import current_app +from flask_bpmn.api.api_error import ApiError + +from spiffworkflow_backend.models.permission_assignment import PermissionAssignmentModel +from spiffworkflow_backend.models.permission_target import PermissionTargetModel +from spiffworkflow_backend.models.principal import MissingPrincipalError +from spiffworkflow_backend.models.principal import PrincipalModel +from spiffworkflow_backend.models.user import UserModel + + +class AuthorizationService: + """Determine whether a user has permission to perform their request.""" + + @classmethod + def has_permission( + cls, principals: list[PrincipalModel], permission: str, target_uri: str + ) -> bool: + """Has_permission.""" + principal_ids = [p.id for p in principals] + permission_assignments = ( + PermissionAssignmentModel.query.filter( + PermissionAssignmentModel.principal_id.in_(principal_ids) + ) + .filter_by(permission=permission) + .join(PermissionTargetModel) + .filter_by(uri=target_uri) + .all() + ) + + for permission_assignment in permission_assignments: + if permission_assignment.grant_type.value == "permit": + return True + elif permission_assignment.grant_type.value == "deny": + return False + else: + raise Exception("Unknown grant type") + + return False + + @classmethod + def user_has_permission( + cls, user: UserModel, permission: str, target_uri: str + ) -> bool: + """User_has_permission.""" + if user.principal is None: + raise MissingPrincipalError( + f"Missing principal for user with id: {user.id}" + ) + + principals = [user.principal] + + for group in user.groups: + if group.principal is None: + raise MissingPrincipalError( + f"Missing principal for group with id: {group.id}" + ) + principals.append(group.principal) + + return cls.has_permission(principals, permission, target_uri) + # return False + + # def refresh_token(self, token: str) -> str: + # """Refresh_token.""" + # # if isinstance(token, str): + # # token = eval(token) + # ( + # open_id_server_url, + # open_id_client_id, + # open_id_realm_name, + # open_id_client_secret_key, + # ) = AuthorizationService.get_open_id_args() + # headers = {"Content-Type": "application/x-www-form-urlencoded"} + # request_url = f"{open_id_server_url}/realms/{open_id_realm_name}/protocol/openid-connect/token" + # data = { + # "grant_type": "refresh_token", + # "client_id": "spiffworkflow-frontend", + # "subject_token": token, + # "refresh_token": token, + # } + # refresh_response = requests.post(request_url, headers=headers, data=data) + # refresh_token = json.loads(refresh_response.text) + # return refresh_token + + # def get_bearer_token(self, basic_token: str) -> dict: + # """Get_bearer_token.""" + # ( + # open_id_server_url, + # open_id_client_id, + # open_id_realm_name, + # open_id_client_secret_key, + # ) = AuthorizationService.get_open_id_args() + # + # backend_basic_auth_string = f"{open_id_client_id}:{open_id_client_secret_key}" + # backend_basic_auth_bytes = bytes(backend_basic_auth_string, encoding="ascii") + # backend_basic_auth = base64.b64encode(backend_basic_auth_bytes) + # + # headers = { + # "Content-Type": "application/x-www-form-urlencoded", + # "Authorization": f"Basic {backend_basic_auth.decode('utf-8')}", + # } + # data = { + # "grant_type": "urn:ietf:params:oauth:grant-type:token-exchange", + # "client_id": open_id_client_id, + # "subject_token": basic_token, + # "audience": open_id_client_id, + # } + # request_url = f"{open_id_server_url}/realms/{open_id_realm_name}/protocol/openid-connect/token" + # + # backend_response = requests.post(request_url, headers=headers, data=data) + # # json_data = json.loads(backend_response.text) + # # bearer_token = json_data['access_token'] + # bearer_token: dict = json.loads(backend_response.text) + # return bearer_token + + @staticmethod + def decode_auth_token(auth_token: str) -> dict[str, Union[str, None]]: + """Decode the auth token. + + :param auth_token: + :return: integer|string + """ + secret_key = current_app.config.get("SECRET_KEY") + if secret_key is None: + raise KeyError("we need current_app.config to have a SECRET_KEY") + + try: + payload = jwt.decode(auth_token, options={"verify_signature": False}) + return payload + except jwt.ExpiredSignatureError as exception: + raise ApiError( + "token_expired", + "The Authentication token you provided expired and must be renewed.", + ) from exception + except jwt.InvalidTokenError as exception: + raise ApiError( + "token_invalid", + "The Authentication token you provided is invalid. You need a new token. ", + ) from exception + + # def get_bearer_token_from_internal_token(self, internal_token): + # """Get_bearer_token_from_internal_token.""" + # self.decode_auth_token(internal_token) + # print(f"get_user_by_internal_token: {internal_token}") + + # def introspect_token(self, basic_token: str) -> dict: + # """Introspect_token.""" + # ( + # open_id_server_url, + # open_id_client_id, + # open_id_realm_name, + # open_id_client_secret_key, + # ) = AuthorizationService.get_open_id_args() + # + # bearer_token = AuthorizationService().get_bearer_token(basic_token) + # auth_bearer_string = f"Bearer {bearer_token['access_token']}" + # + # headers = { + # "Content-Type": "application/x-www-form-urlencoded", + # "Authorization": auth_bearer_string, + # } + # data = { + # "client_id": open_id_client_id, + # "client_secret": open_id_client_secret_key, + # "token": basic_token, + # } + # request_url = f"{open_id_server_url}/realms/{open_id_realm_name}/protocol/openid-connect/token/introspect" + # + # introspect_response = requests.post(request_url, headers=headers, data=data) + # introspection = json.loads(introspect_response.text) + # + # return introspection + + # def get_permission_by_basic_token(self, basic_token: dict) -> list: + # """Get_permission_by_basic_token.""" + # ( + # open_id_server_url, + # open_id_client_id, + # open_id_realm_name, + # open_id_client_secret_key, + # ) = AuthorizationService.get_open_id_args() + # + # # basic_token = AuthorizationService().refresh_token(basic_token) + # # bearer_token = AuthorizationService().get_bearer_token(basic_token['access_token']) + # bearer_token = AuthorizationService().get_bearer_token(basic_token) + # # auth_bearer_string = f"Bearer {bearer_token['access_token']}" + # auth_bearer_string = f"Bearer {bearer_token}" + # + # headers = { + # "Content-Type": "application/x-www-form-urlencoded", + # "Authorization": auth_bearer_string, + # } + # data = { + # "client_id": open_id_client_id, + # "client_secret": open_id_client_secret_key, + # "grant_type": "urn:ietf:params:oauth:grant-type:uma-ticket", + # "response_mode": "permissions", + # "audience": open_id_client_id, + # "response_include_resource_name": True, + # } + # request_url = f"{open_id_server_url}/realms/{open_id_realm_name}/protocol/openid-connect/token" + # permission_response = requests.post(request_url, headers=headers, data=data) + # permission = json.loads(permission_response.text) + # return permission + + # def get_auth_status_for_resource_and_scope_by_token( + # self, basic_token: dict, resource: str, scope: str + # ) -> str: + # """Get_auth_status_for_resource_and_scope_by_token.""" + # ( + # open_id_server_url, + # open_id_client_id, + # open_id_realm_name, + # open_id_client_secret_key, + # ) = AuthorizationService.get_open_id_args() + # + # # basic_token = AuthorizationService().refresh_token(basic_token) + # bearer_token = AuthorizationService().get_bearer_token(basic_token) + # auth_bearer_string = f"Bearer {bearer_token['access_token']}" + # + # headers = { + # "Content-Type": "application/x-www-form-urlencoded", + # "Authorization": auth_bearer_string, + # } + # data = { + # "client_id": open_id_client_id, + # "client_secret": open_id_client_secret_key, + # "grant_type": "urn:ietf:params:oauth:grant-type:uma-ticket", + # "permission": f"{resource}#{scope}", + # "response_mode": "permissions", + # "audience": open_id_client_id, + # } + # request_url = f"{open_id_server_url}/realms/{open_id_realm_name}/protocol/openid-connect/token" + # auth_response = requests.post(request_url, headers=headers, data=data) + # + # print("get_auth_status_for_resource_and_scope_by_token") + # auth_status: str = json.loads(auth_response.text) + # return auth_status + + # def get_permissions_by_token_for_resource_and_scope( + # self, basic_token: str, resource: str|None=None, scope: str|None=None + # ) -> str: + # """Get_permissions_by_token_for_resource_and_scope.""" + # ( + # open_id_server_url, + # open_id_client_id, + # open_id_realm_name, + # open_id_client_secret_key, + # ) = AuthorizationService.get_open_id_args() + # + # # basic_token = AuthorizationService().refresh_token(basic_token) + # # bearer_token = AuthorizationService().get_bearer_token(basic_token['access_token']) + # bearer_token = AuthorizationService().get_bearer_token(basic_token) + # auth_bearer_string = f"Bearer {bearer_token['access_token']}" + # + # headers = { + # "Content-Type": "application/x-www-form-urlencoded", + # "Authorization": auth_bearer_string, + # } + # permision = "" + # if resource is not None and resource != '': + # permision += resource + # if scope is not None and scope != '': + # permision += "#" + scope + # data = { + # "client_id": open_id_client_id, + # "client_secret": open_id_client_secret_key, + # "grant_type": "urn:ietf:params:oauth:grant-type:uma-ticket", + # "response_mode": "permissions", + # "permission": permision, + # "audience": open_id_client_id, + # "response_include_resource_name": True, + # } + # request_url = f"{open_id_server_url}/realms/{open_id_realm_name}/protocol/openid-connect/token" + # permission_response = requests.post(request_url, headers=headers, data=data) + # permission: str = json.loads(permission_response.text) + # return permission + + # def get_resource_set(self, public_access_token, uri): + # """Get_resource_set.""" + # ( + # open_id_server_url, + # open_id_client_id, + # open_id_realm_name, + # open_id_client_secret_key, + # ) = AuthorizationService.get_open_id_args() + # bearer_token = AuthorizationService().get_bearer_token(public_access_token) + # auth_bearer_string = f"Bearer {bearer_token['access_token']}" + # headers = { + # "Content-Type": "application/json", + # "Authorization": auth_bearer_string, + # } + # data = { + # "matchingUri": "true", + # "deep": "true", + # "max": "-1", + # "exactName": "false", + # "uri": uri, + # } + # + # # f"matchingUri=true&deep=true&max=-1&exactName=false&uri={URI_TO_TEST_AGAINST}" + # request_url = f"{open_id_server_url}/realms/{open_id_realm_name}/authz/protection/resource_set" + # response = requests.get(request_url, headers=headers, data=data) + # + # print("get_resource_set") + + # def get_permission_by_token(self, public_access_token: str) -> dict: + # """Get_permission_by_token.""" + # # TODO: Write a test for this + # ( + # open_id_server_url, + # open_id_client_id, + # open_id_realm_name, + # open_id_client_secret_key, + # ) = AuthorizationService.get_open_id_args() + # bearer_token = AuthorizationService().get_bearer_token(public_access_token) + # auth_bearer_string = f"Bearer {bearer_token['access_token']}" + # headers = { + # "Content-Type": "application/x-www-form-urlencoded", + # "Authorization": auth_bearer_string, + # } + # data = { + # "grant_type": "urn:ietf:params:oauth:grant-type:uma-ticket", + # "audience": open_id_client_id, + # } + # request_url = f"{open_id_server_url}/realms/{open_id_realm_name}/protocol/openid-connect/token" + # permission_response = requests.post(request_url, headers=headers, data=data) + # permission: dict = json.loads(permission_response.text) + # + # return permission + + +class KeycloakAuthorization: + """Interface with Keycloak server.""" + + +# class KeycloakClient: diff --git a/src/spiffworkflow_backend/services/background_processing_service.py b/src/spiffworkflow_backend/services/background_processing_service.py new file mode 100644 index 00000000..08a2b02d --- /dev/null +++ b/src/spiffworkflow_backend/services/background_processing_service.py @@ -0,0 +1,25 @@ +"""Background_processing_service.""" +import flask + +from spiffworkflow_backend.services.message_service import MessageService +from spiffworkflow_backend.services.process_instance_service import ( + ProcessInstanceService, +) + + +class BackgroundProcessingService: + """Used to facilitate doing work outside of an HTTP request/response.""" + + def __init__(self, app: flask.app.Flask): + """__init__.""" + self.app = app + + def run(self) -> None: + """Since this runs in a scheduler, we need to specify the app context as well.""" + with self.app.app_context(): + ProcessInstanceService.do_waiting() + + def process_message_instances_with_app_context(self) -> None: + """Since this runs in a scheduler, we need to specify the app context as well.""" + with self.app.app_context(): + MessageService.process_message_instances() diff --git a/src/spiffworkflow_backend/services/email_service.py b/src/spiffworkflow_backend/services/email_service.py new file mode 100644 index 00000000..461c9fd2 --- /dev/null +++ b/src/spiffworkflow_backend/services/email_service.py @@ -0,0 +1,49 @@ +"""Email_service.""" +from typing import List +from typing import Optional + +from flask import current_app +from flask_mail import Message # type: ignore + + +class EmailService: + """Provides common interface for working with an Email.""" + + @staticmethod + def add_email( + subject: str, + sender: str, + recipients: List[str], + content: str, + content_html: str, + cc: Optional[str] = None, + bcc: Optional[str] = None, + reply_to: Optional[str] = None, + attachment_files: Optional[dict] = None, + ) -> None: + """We will receive all data related to an email and send it.""" + mail = current_app.config["MAIL_APP"] + + # Send mail + try: + msg = Message( + subject, + sender=sender, + recipients=recipients, + body=content, + html=content_html, + cc=cc, + bcc=bcc, + reply_to=reply_to, + ) + + if attachment_files is not None: + for file in attachment_files: + msg.attach(file["name"], file["type"], file["data"]) + + mail.send(msg) + + except Exception as e: + # app.logger.error('An exception happened in EmailService', exc_info=True) + # app.logger.error(str(e)) + raise e diff --git a/src/spiffworkflow_backend/services/error_handling_service.py b/src/spiffworkflow_backend/services/error_handling_service.py new file mode 100644 index 00000000..36c66d93 --- /dev/null +++ b/src/spiffworkflow_backend/services/error_handling_service.py @@ -0,0 +1,113 @@ +"""Error_handling_service.""" +from typing import Any +from typing import List +from typing import Union + +from flask_bpmn.api.api_error import ApiError +from flask_bpmn.models.db import db + +from spiffworkflow_backend.models.process_instance import ProcessInstanceModel +from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus +from spiffworkflow_backend.services.email_service import EmailService +from spiffworkflow_backend.services.process_instance_processor import ( + ProcessInstanceProcessor, +) +from spiffworkflow_backend.services.process_model_service import ProcessModelService + + +class ErrorHandlingService: + """ErrorHandlingService.""" + + @staticmethod + def set_instance_status(instance_id: int, status: str) -> None: + """Set_instance_status.""" + instance = ( + db.session.query(ProcessInstanceModel) + .filter(ProcessInstanceModel.id == instance_id) + .first() + ) + if instance: + instance.status = status + db.session.commit() + + def handle_error( + self, _processor: ProcessInstanceProcessor, _error: Union[ApiError, Exception] + ) -> None: + """On unhandled exceptions, set instance.status based on model.fault_or_suspend_on_exception.""" + process_model = ProcessModelService().get_process_model( + _processor.process_model_identifier, _processor.process_group_identifier + ) + if process_model.fault_or_suspend_on_exception == "suspend": + self.set_instance_status( + _processor.process_instance_model.id, + ProcessInstanceStatus.suspended.value, + ) + else: + # fault is the default + self.set_instance_status( + _processor.process_instance_model.id, + ProcessInstanceStatus.faulted.value, + ) + + if len(process_model.exception_notification_addresses) > 0: + try: + # some notification method (waku?) + self.handle_email_notification( + _processor, _error, process_model.exception_notification_addresses + ) + except Exception as e: + # hmm... what to do if a notification method fails. Probably log, at least + print(e) + + @staticmethod + def hanle_sentry_notification(_error: ApiError, _recipients: List) -> None: + """SentryHandler.""" + ... + + @staticmethod + def handle_email_notification( + processor: ProcessInstanceProcessor, + error: Union[ApiError, Exception], + recipients: List, + ) -> None: + """EmailHandler.""" + subject = "Unexpected error in app" + if isinstance(error, ApiError): + content = f"{error.message}" + else: + content = str(error) + content_html = content + + EmailService.add_email( + subject, + "sender@company.com", + recipients, + content, + content_html, + cc=None, + bcc=None, + reply_to=None, + attachment_files=None, + ) + + @staticmethod + def handle_waku_notification(_error: ApiError, _recipients: List) -> Any: + """WakuHandler.""" + # class WakuMessage: + # """WakuMessage.""" + # + # payload: str + # contentTopic: str # Optional + # version: int # Optional + # timestamp: int # Optional + + +class FailingService: + """FailingService.""" + + @staticmethod + def fail_as_service() -> None: + """It fails.""" + raise ApiError( + error_code="failing_service", message="This is my failing service" + ) diff --git a/src/spiffworkflow_backend/services/file_system_service.py b/src/spiffworkflow_backend/services/file_system_service.py new file mode 100644 index 00000000..735f13d7 --- /dev/null +++ b/src/spiffworkflow_backend/services/file_system_service.py @@ -0,0 +1,200 @@ +"""File_system_service.""" +import os +from datetime import datetime +from typing import List +from typing import Optional + +import pytz +from flask import current_app +from flask_bpmn.api.api_error import ApiError + +from spiffworkflow_backend.models.file import CONTENT_TYPES +from spiffworkflow_backend.models.file import File +from spiffworkflow_backend.models.file import FileType +from spiffworkflow_backend.models.process_model import ProcessModelInfo + + +class FileSystemService: + """FileSystemService.""" + + """ Simple Service meant for extension that provides some useful + methods for dealing with the File system. + """ + LIBRARY_SPECS = "Library Specs" + STAND_ALONE_SPECS = "Stand Alone" + MASTER_SPECIFICATION = "Master Specification" + REFERENCE_FILES = "Reference Files" + SPECIAL_FOLDERS = [LIBRARY_SPECS, MASTER_SPECIFICATION, REFERENCE_FILES] + CAT_JSON_FILE = "process_group.json" + WF_JSON_FILE = "workflow.json" + + @staticmethod + def root_path() -> str: + """Root_path.""" + # fixme: allow absolute files + dir_name = current_app.config["BPMN_SPEC_ABSOLUTE_DIR"] + app_root = current_app.root_path + return os.path.join(app_root, "..", dir_name) + + @staticmethod + def process_group_path(name: str) -> str: + """Category_path.""" + return os.path.join(FileSystemService.root_path(), name) + + @staticmethod + def library_path(name: str) -> str: + """Library_path.""" + return os.path.join( + FileSystemService.root_path(), FileSystemService.LIBRARY_SPECS, name + ) + + @staticmethod + def full_path_from_relative_path(relative_path: str) -> str: + """Full_path_from_relative_path.""" + return os.path.join(FileSystemService.root_path(), relative_path) + + @staticmethod + def process_model_relative_path(spec: ProcessModelInfo) -> str: + """Get the file path to a process model relative to BPMN_SPEC_ABSOLUTE_DIR. + + If the full path is /path/to/process-group-a/group-b/process-model-a, it will return: + process-group-a/group-b/process-model-a + """ + workflow_path = FileSystemService.workflow_path(spec) + return os.path.relpath(workflow_path, start=FileSystemService.root_path()) + + @staticmethod + def process_group_path_for_spec(spec: ProcessModelInfo) -> str: + """Category_path_for_spec.""" + if spec.is_master_spec: + return os.path.join(FileSystemService.root_path()) + elif spec.library: + process_group_path = FileSystemService.process_group_path( + FileSystemService.LIBRARY_SPECS + ) + elif spec.standalone: + process_group_path = FileSystemService.process_group_path( + FileSystemService.STAND_ALONE_SPECS + ) + else: + process_group_path = FileSystemService.process_group_path( + spec.process_group_id + ) + return process_group_path + + @staticmethod + def workflow_path(spec: ProcessModelInfo) -> str: + """Workflow_path.""" + if spec.is_master_spec: + return os.path.join( + FileSystemService.root_path(), FileSystemService.MASTER_SPECIFICATION + ) + else: + process_group_path = FileSystemService.process_group_path_for_spec(spec) + return os.path.join(process_group_path, spec.id) + + @staticmethod + def full_path_to_process_model_file(spec: ProcessModelInfo, file_name: str) -> str: + """Full_path_to_process_model_file.""" + return os.path.join(FileSystemService.workflow_path(spec), file_name) + + def next_display_order(self, spec: ProcessModelInfo) -> int: + """Next_display_order.""" + path = self.process_group_path_for_spec(spec) + if os.path.exists(path): + return len(next(os.walk(path))[1]) + else: + return 0 + + @staticmethod + def write_file_data_to_system(file_path: str, file_data: bytes) -> None: + """Write_file_data_to_system.""" + os.makedirs(os.path.dirname(file_path), exist_ok=True) + with open(file_path, "wb") as f_handle: + f_handle.write(file_data) + + @staticmethod + def get_extension(file_name: str) -> str: + """Get_extension.""" + _, file_extension = os.path.splitext(file_name) + return file_extension.lower().strip()[1:] + + @staticmethod + def assert_valid_file_name(file_name: str) -> None: + """Assert_valid_file_name.""" + file_extension = FileSystemService.get_extension(file_name) + if file_extension not in FileType.list(): + raise ApiError( + "unknown_extension", + "The file you provided does not have an accepted extension:" + + file_extension, + status_code=404, + ) + + @staticmethod + def _timestamp(file_path: str) -> float: + """_timestamp.""" + return os.path.getmtime(file_path) + + @staticmethod + def _last_modified(file_path: str) -> datetime: + """_last_modified.""" + # Returns the last modified date of the given file. + timestamp = os.path.getmtime(file_path) + utc_dt = datetime.utcfromtimestamp(timestamp) + aware_utc_dt = utc_dt.replace(tzinfo=pytz.utc) + return aware_utc_dt + + @staticmethod + def file_type(file_name: str) -> FileType: + """File_type.""" + extension = FileSystemService.get_extension(file_name) + return FileType[extension] + + @staticmethod + def _get_files(file_path: str, file_name: Optional[str] = None) -> List[File]: + """Returns an array of File objects at the given path, can be restricted to just one file.""" + files = [] + items = os.scandir(file_path) + for item in items: + if item.is_file(): + if item.name.startswith("."): + continue # Ignore hidden files + if item.name == FileSystemService.WF_JSON_FILE: + continue # Ignore the json files. + if file_name is not None and item.name != file_name: + continue + file = FileSystemService.to_file_object_from_dir_entry(item) + files.append(file) + return files + + @staticmethod + def to_file_object(file_name: str, file_path: str) -> File: + """To_file_object.""" + file_type = FileSystemService.file_type(file_name) + content_type = CONTENT_TYPES[file_type.name] + last_modified = FileSystemService._last_modified(file_path) + size = os.path.getsize(file_path) + file = File.from_file_system( + file_name, file_type, content_type, last_modified, size + ) + return file + + @staticmethod + def to_file_object_from_dir_entry(item: os.DirEntry) -> File: + """To_file_object_from_dir_entry.""" + extension = FileSystemService.get_extension(item.name) + try: + file_type = FileType[extension] + content_type = CONTENT_TYPES[file_type.name] + except KeyError as exception: + raise ApiError( + "invalid_type", + f"Invalid File Type: {extension}, for file {item.name}", + ) from exception + stats = item.stat() + file_size = stats.st_size + last_modified = FileSystemService._last_modified(item.path) + return File.from_file_system( + item.name, file_type, content_type, last_modified, file_size + ) diff --git a/src/spiffworkflow_backend/services/git_service.py b/src/spiffworkflow_backend/services/git_service.py new file mode 100644 index 00000000..815e4cad --- /dev/null +++ b/src/spiffworkflow_backend/services/git_service.py @@ -0,0 +1,56 @@ +"""Git_service.""" +import os + +from flask import current_app + +from spiffworkflow_backend.models.process_model import ProcessModelInfo +from spiffworkflow_backend.services.file_system_service import FileSystemService + + +class GitService: + """GitService.""" + + @staticmethod + def get_current_revision() -> str: + """Get_current_revision.""" + bpmn_spec_absolute_dir = current_app.config["BPMN_SPEC_ABSOLUTE_DIR"] + # The value includes a carriage return character at the end, so we don't grab the last character + current_git_revision = os.popen( # noqa: S605 + f"cd {bpmn_spec_absolute_dir} && git rev-parse --short HEAD" + ).read()[ + :-1 + ] # noqa: S605 + return current_git_revision + + @staticmethod + def get_instance_file_contents_for_revision( + process_model: ProcessModelInfo, revision: str + ) -> bytes: + """Get_instance_file_contents_for_revision.""" + bpmn_spec_absolute_dir = current_app.config["BPMN_SPEC_ABSOLUTE_DIR"] + process_model_relative_path = FileSystemService.process_model_relative_path( + process_model + ) + shell_cd_command = f"cd {bpmn_spec_absolute_dir}" + shell_git_command = f"git show {revision}:{process_model_relative_path}/{process_model.primary_file_name}" + shell_command = f"{shell_cd_command} && {shell_git_command}" + # git show 78ae5eb:category_number_one/script-task/script-task.bpmn + file_contents: str = os.popen(shell_command).read()[:-1] # noqa: S605 + assert file_contents # noqa: S101 + return file_contents.encode("utf-8") + + @staticmethod + def commit(message: str) -> str: + """Commit.""" + bpmn_spec_absolute_dir = current_app.config["BPMN_SPEC_ABSOLUTE_DIR"] + git_username = "" + git_email = "" + if ( + current_app.config["GIT_COMMIT_USERNAME"] + and current_app.config["GIT_COMMIT_EMAIL"] + ): + git_username = current_app.config["GIT_COMMIT_USERNAME"] + git_email = current_app.config["GIT_COMMIT_EMAIL"] + shell_command = f"./bin/git_commit_bpmn_models_repo '{bpmn_spec_absolute_dir}' '{message}' '{git_username}' '{git_email}'" + output = os.popen(shell_command).read() # noqa: S605 + return output diff --git a/src/spiffworkflow_backend/services/logging_service.py b/src/spiffworkflow_backend/services/logging_service.py new file mode 100644 index 00000000..af919e92 --- /dev/null +++ b/src/spiffworkflow_backend/services/logging_service.py @@ -0,0 +1,204 @@ +"""Logging_service.""" +import json +import logging +import re +from typing import Any +from typing import Optional + +from flask import g +from flask.app import Flask +from flask_bpmn.models.db import db + +from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel + + +# flask logging formats: +# from: https://www.askpython.com/python-modules/flask/flask-logging +# %(asctime)s— The timestamp as a string. +# %(levelname)s—The logging level as a string. +# %(name)s—The logger name as a string. +# %(threadname)s—The thread name as a string. +# %(message)s—The log message. + +# full message list: +# {'name': 'gunicorn.error', 'msg': 'GET /admin/token', 'args': (), 'levelname': 'DEBUG', 'levelno': 10, 'pathname': '~/.cache/pypoetry/virtualenvs/spiffworkflow-backend-R_hdWfN1-py3.10/lib/python3.10/site-packages/gunicorn/glogging.py', 'filename': 'glogging.py', 'module': 'glogging', 'exc_info': None, 'exc_text': None, 'stack_info': None, 'lineno': 267, 'funcName': 'debug', 'created': 1657307111.4513023, 'msecs': 451.30228996276855, 'relativeCreated': 1730.785846710205, 'thread': 139945864087360, 'threadName': 'MainThread', 'processName': 'MainProcess', 'process': 2109561, 'message': 'GET /admin/token', 'asctime': '2022-07-08T15:05:11.451Z'} + +# originally from https://stackoverflow.com/a/70223539/6090676 +class JsonFormatter(logging.Formatter): + """Formatter that outputs JSON strings after parsing the LogRecord. + + @param dict fmt_dict: Key: logging format attribute pairs. Defaults to {"message": "message"}. + @param str time_format: time.strftime() format string. Default: "%Y-%m-%dT%H:%M:%S" + @param str msec_format: Microsecond formatting. Appended at the end. Default: "%s.%03dZ" + """ + + def __init__( + self, + fmt_dict: Optional[dict] = None, + time_format: str = "%Y-%m-%dT%H:%M:%S", + msec_format: str = "%s.%03dZ", + ): + """__init__.""" + self.fmt_dict = fmt_dict if fmt_dict is not None else {"message": "message"} + self.default_time_format = time_format + self.default_msec_format = msec_format + self.datefmt = None + + def usesTime(self) -> bool: + """Overwritten to look for the attribute in the format dict values instead of the fmt string.""" + return "asctime" in self.fmt_dict.values() + + # we are overriding a method that returns a string and returning a dict, hence the Any + def formatMessage(self, record: logging.LogRecord) -> Any: + """Overwritten to return a dictionary of the relevant LogRecord attributes instead of a string. + + KeyError is raised if an unknown attribute is provided in the fmt_dict. + """ + return { + fmt_key: record.__dict__[fmt_val] + for fmt_key, fmt_val in self.fmt_dict.items() + } + + def format(self, record: logging.LogRecord) -> str: + """Mostly the same as the parent's class method. + + The difference being that a dict is manipulated and dumped as JSON instead of a string. + """ + record.message = record.getMessage() + + if self.usesTime(): + record.asctime = self.formatTime(record, self.datefmt) + + message_dict = self.formatMessage(record) + + if record.exc_info: + # Cache the traceback text to avoid converting it multiple times + # (it's constant anyway) + if not record.exc_text: + record.exc_text = self.formatException(record.exc_info) + + if record.exc_text: + message_dict["exc_info"] = record.exc_text + + if record.stack_info: + message_dict["stack_info"] = self.formatStack(record.stack_info) + + return json.dumps(message_dict, default=str) + + +class SpiffFilter(logging.Filter): + """SpiffFilter.""" + + def __init__(self, app: Flask): + """__init__.""" + self.app = app + super().__init__() + + def filter(self, record: logging.LogRecord) -> bool: + """Filter.""" + tld = self.app.config["THREAD_LOCAL_DATA"] + process_instance_id = "" + if hasattr(tld, "process_instance_id"): + process_instance_id = tld.process_instance_id + setattr(record, "process_instance_id", process_instance_id) # noqa: B010 + if hasattr(g, "user") and g.user: + setattr(record, "current_user_id", g.user.id) # noqa: B010 + return True + + +def setup_logger(app: Flask) -> None: + """Setup_logger.""" + log_level = logging.DEBUG + spiff_log_level = logging.DEBUG + log_formatter = logging.Formatter( + "%(asctime)s - %(name)s - %(levelname)s - %(message)s" + ) + + app.logger.debug("Printing log to create app logger") + + # the json formatter is nice for real environments but makes + # debugging locally a little more difficult + if app.config["ENV_IDENTIFIER"] != "development": + json_formatter = JsonFormatter( + { + "level": "levelname", + "message": "message", + "loggerName": "name", + "processName": "processName", + "processID": "process", + "threadName": "threadName", + "threadID": "thread", + "timestamp": "asctime", + } + ) + log_formatter = json_formatter + + spiff_logger_filehandler = None + if app.config["SPIFFWORKFLOW_BACKEND_LOG_TO_FILE"]: + spiff_logger_filehandler = logging.FileHandler( + f"{app.instance_path}/../../log/{app.config['ENV_IDENTIFIER']}.log" + ) + spiff_logger_filehandler.setLevel(spiff_log_level) + spiff_logger_filehandler.setFormatter(log_formatter) + + # make all loggers act the same + for name in logging.root.manager.loggerDict: + # use a regex so spiffworkflow_backend isn't filtered out + if not re.match(r"^spiff\b", name): + the_logger = logging.getLogger(name) + the_logger.setLevel(log_level) + if spiff_logger_filehandler: + the_logger.handlers = [] + the_logger.propagate = False + the_logger.addHandler(spiff_logger_filehandler) + else: + for the_handler in the_logger.handlers: + the_handler.setFormatter(log_formatter) + the_handler.setLevel(log_level) + + spiff_logger = logging.getLogger("spiff") + spiff_logger.setLevel(spiff_log_level) + spiff_formatter = logging.Formatter( + "%(asctime)s | %(levelname)s | %(message)s | %(action)s | %(task_type)s | %(process)s | %(processName)s | %(process_instance_id)s" + ) + + # if you add a handler to spiff, it will be used/inherited by spiff.metrics + # if you add a filter to the spiff logger directly (and not the handler), it will NOT be inherited by spiff.metrics + # so put filters on handlers. + db_handler = DBHandler() + db_handler.setLevel(spiff_log_level) + db_handler.setFormatter(spiff_formatter) + db_handler.addFilter(SpiffFilter(app)) + spiff_logger.addHandler(db_handler) + + +# https://9to5answer.com/python-logging-to-database +class DBHandler(logging.Handler): + """DBHandler.""" + + def emit(self, record: logging.LogRecord) -> None: + """Emit.""" + # if we do not have a process instance id then do not log and assume we are running a script unit test + # that initializes a BpmnWorkflow without a process instance + if record and record.process_instance_id: # type: ignore + bpmn_process_identifier = record.workflow # type: ignore + spiff_task_guid = str(record.task_id) # type: ignore + bpmn_task_identifier = str(record.task_spec) # type: ignore + bpmn_task_name = record.task_name if hasattr(record, "task_name") else None # type: ignore + bpmn_task_type = record.task_type if hasattr(record, "task_type") else None # type: ignore + timestamp = record.created + message = record.msg if hasattr(record, "msg") else None + current_user_id = record.current_user_id if hasattr(record, "current_user_id") else None # type: ignore + spiff_log = SpiffLoggingModel( + process_instance_id=record.process_instance_id, # type: ignore + bpmn_process_identifier=bpmn_process_identifier, + spiff_task_guid=spiff_task_guid, + bpmn_task_name=bpmn_task_name, + bpmn_task_identifier=bpmn_task_identifier, + bpmn_task_type=bpmn_task_type, + message=message, + timestamp=timestamp, + current_user_id=current_user_id, + ) + db.session.add(spiff_log) + db.session.commit() diff --git a/src/spiffworkflow_backend/services/message_service.py b/src/spiffworkflow_backend/services/message_service.py new file mode 100644 index 00000000..da1e6224 --- /dev/null +++ b/src/spiffworkflow_backend/services/message_service.py @@ -0,0 +1,233 @@ +"""Message_service.""" +from typing import Any +from typing import Optional + +from flask_bpmn.models.db import db +from sqlalchemy import and_ +from sqlalchemy import or_ +from sqlalchemy import select + +from spiffworkflow_backend.models.message_correlation import MessageCorrelationModel +from spiffworkflow_backend.models.message_correlation_message_instance import ( + MessageCorrelationMessageInstanceModel, +) +from spiffworkflow_backend.models.message_instance import MessageInstanceModel +from spiffworkflow_backend.models.message_triggerable_process_model import ( + MessageTriggerableProcessModel, +) +from spiffworkflow_backend.models.process_instance import ProcessInstanceModel +from spiffworkflow_backend.models.user import UserModel +from spiffworkflow_backend.services.process_instance_processor import ( + ProcessInstanceProcessor, +) +from spiffworkflow_backend.services.process_instance_service import ( + ProcessInstanceService, +) + + +class MessageServiceError(Exception): + """MessageServiceError.""" + + +class MessageService: + """MessageService.""" + + @classmethod + def process_message_instances(cls) -> None: + """Process_message_instances.""" + message_instances_send = MessageInstanceModel.query.filter_by( + message_type="send", status="ready" + ).all() + message_instances_receive = MessageInstanceModel.query.filter_by( + message_type="receive", status="ready" + ).all() + for message_instance_send in message_instances_send: + # print(f"message_instance_send.id: {message_instance_send.id}") + # check again in case another background process picked up the message + # while the previous one was running + if message_instance_send.status != "ready": + continue + + message_instance_send.status = "running" + db.session.add(message_instance_send) + db.session.commit() + + message_instance_receive = None + try: + message_instance_receive = cls.get_message_instance_receive( + message_instance_send, message_instances_receive + ) + if message_instance_receive is None: + message_triggerable_process_model = ( + MessageTriggerableProcessModel.query.filter_by( + message_model_id=message_instance_send.message_model_id + ).first() + ) + if message_triggerable_process_model: + process_instance_send = ProcessInstanceModel.query.filter_by( + id=message_instance_send.process_instance_id, + ).first() + # TODO: use the correct swimlane user when that is set up + cls.process_message_triggerable_process_model( + message_triggerable_process_model, + message_instance_send.message_model.name, + message_instance_send.payload, + process_instance_send.process_initiator, + ) + message_instance_send.status = "completed" + else: + # if we can't get a queued message then put it back in the queue + message_instance_send.status = "ready" + + else: + if message_instance_receive.status != "ready": + continue + message_instance_receive.status = "running" + + cls.process_message_receive( + message_instance_receive, + message_instance_send.message_model.name, + message_instance_send.payload, + ) + message_instance_receive.status = "completed" + db.session.add(message_instance_receive) + message_instance_send.status = "completed" + + db.session.add(message_instance_send) + db.session.commit() + except Exception as exception: + db.session.rollback() + message_instance_send.status = "failed" + message_instance_send.failure_cause = str(exception) + db.session.add(message_instance_send) + + if message_instance_receive: + message_instance_receive.status = "failed" + message_instance_receive.failure_cause = str(exception) + db.session.add(message_instance_receive) + + db.session.commit() + raise exception + + @staticmethod + def process_message_triggerable_process_model( + message_triggerable_process_model: MessageTriggerableProcessModel, + message_model_name: str, + message_payload: dict, + user: UserModel, + ) -> ProcessInstanceModel: + """Process_message_triggerable_process_model.""" + process_instance_receive = ProcessInstanceService.create_process_instance( + message_triggerable_process_model.process_model_identifier, + user, + process_group_identifier=message_triggerable_process_model.process_group_identifier, + ) + processor_receive = ProcessInstanceProcessor(process_instance_receive) + processor_receive.do_engine_steps(save=False) + processor_receive.bpmn_process_instance.catch_bpmn_message( + message_model_name, + message_payload, + correlations={}, + ) + processor_receive.do_engine_steps(save=True) + + return process_instance_receive + + @staticmethod + def process_message_receive( + message_instance_receive: MessageInstanceModel, + message_model_name: str, + message_payload: dict, + ) -> None: + """Process_message_receive.""" + process_instance_receive = ProcessInstanceModel.query.filter_by( + id=message_instance_receive.process_instance_id + ).first() + if process_instance_receive is None: + raise MessageServiceError( + ( + f"Process instance cannot be found for queued message: {message_instance_receive.id}." + f"Tried with id {message_instance_receive.process_instance_id}", + ) + ) + + processor_receive = ProcessInstanceProcessor(process_instance_receive) + processor_receive.bpmn_process_instance.catch_bpmn_message( + message_model_name, + message_payload, + correlations={}, + ) + processor_receive.do_engine_steps(save=True) + + @staticmethod + def get_message_instance_receive( + message_instance_send: MessageInstanceModel, + message_instances_receive: list[MessageInstanceModel], + ) -> Optional[MessageInstanceModel]: + """Get_message_instance_receive.""" + message_correlations_send = ( + MessageCorrelationModel.query.join(MessageCorrelationMessageInstanceModel) + .filter_by(message_instance_id=message_instance_send.id) + .all() + ) + + message_correlation_filter = [] + for message_correlation_send in message_correlations_send: + message_correlation_filter.append( + and_( + MessageCorrelationModel.name == message_correlation_send.name, + MessageCorrelationModel.value == message_correlation_send.value, + MessageCorrelationModel.message_correlation_property_id + == message_correlation_send.message_correlation_property_id, + ) + ) + + for message_instance_receive in message_instances_receive: + + # sqlalchemy supports select / where statements like active record apparantly + # https://docs.sqlalchemy.org/en/14/core/tutorial.html#conjunctions + message_correlation_select = ( + select([db.func.count()]) + .select_from(MessageCorrelationModel) # type: ignore + .where( + and_( + MessageCorrelationModel.process_instance_id + == message_instance_receive.process_instance_id, + or_(*message_correlation_filter), + ) + ) + .join(MessageCorrelationMessageInstanceModel) # type: ignore + .filter_by( + message_instance_id=message_instance_receive.id, + ) + ) + message_correlations_receive = db.session.execute( + message_correlation_select + ) + + # since the query matches on name, value, and message_instance_receive.id, if the counts + # message correlations found are the same, then this should be the relevant message + if ( + message_correlations_receive.scalar() == len(message_correlations_send) + and message_instance_receive.message_model_id + == message_instance_send.message_model_id + ): + return message_instance_receive + + return None + + @staticmethod + def get_process_instance_for_message_instance( + message_instance: MessageInstanceModel, + ) -> Any: + """Get_process_instance_for_message_instance.""" + process_instance = ProcessInstanceModel.query.filter_by( + id=message_instance.process_instance_id + ).first() + if process_instance is None: + raise MessageServiceError( + f"Process instance cannot be found for message: {message_instance.id}." + f"Tried with id {message_instance.process_instance_id}" + ) + + return process_instance diff --git a/src/spiffworkflow_backend/services/process_instance_processor.py b/src/spiffworkflow_backend/services/process_instance_processor.py new file mode 100644 index 00000000..f8204ff2 --- /dev/null +++ b/src/spiffworkflow_backend/services/process_instance_processor.py @@ -0,0 +1,1134 @@ +"""Process_instance_processor.""" +import decimal +import json +import logging +import os +import time +from datetime import datetime +from typing import Any +from typing import Callable +from typing import Dict +from typing import List +from typing import NewType +from typing import Optional +from typing import Tuple +from typing import Union + +from flask import current_app +from flask_bpmn.api.api_error import ApiError +from flask_bpmn.models.db import db +from lxml import etree # type: ignore +from RestrictedPython import safe_globals # type: ignore +from SpiffWorkflow.bpmn.exceptions import WorkflowTaskExecException # type: ignore +from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException # type: ignore +from SpiffWorkflow.bpmn.PythonScriptEngine import Box # type: ignore +from SpiffWorkflow.bpmn.PythonScriptEngine import DEFAULT_GLOBALS +from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine +from SpiffWorkflow.bpmn.serializer import BpmnWorkflowSerializer # type: ignore +from SpiffWorkflow.bpmn.specs.BpmnProcessSpec import BpmnProcessSpec # type: ignore +from SpiffWorkflow.bpmn.specs.events import CancelEventDefinition # type: ignore +from SpiffWorkflow.bpmn.specs.events import EndEvent +from SpiffWorkflow.bpmn.workflow import BpmnWorkflow # type: ignore +from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser # type: ignore +from SpiffWorkflow.dmn.serializer import BusinessRuleTaskConverter # type: ignore +from SpiffWorkflow.exceptions import WorkflowException # type: ignore +from SpiffWorkflow.serializer.exceptions import MissingSpecError # type: ignore +from SpiffWorkflow.spiff.parser.process import SpiffBpmnParser # type: ignore +from SpiffWorkflow.spiff.serializer import BoundaryEventConverter # type: ignore +from SpiffWorkflow.spiff.serializer import CallActivityTaskConverter +from SpiffWorkflow.spiff.serializer import EndEventConverter +from SpiffWorkflow.spiff.serializer import IntermediateCatchEventConverter +from SpiffWorkflow.spiff.serializer import IntermediateThrowEventConverter +from SpiffWorkflow.spiff.serializer import ManualTaskConverter +from SpiffWorkflow.spiff.serializer import NoneTaskConverter +from SpiffWorkflow.spiff.serializer import ReceiveTaskConverter +from SpiffWorkflow.spiff.serializer import ScriptTaskConverter +from SpiffWorkflow.spiff.serializer import SendTaskConverter +from SpiffWorkflow.spiff.serializer import ServiceTaskConverter +from SpiffWorkflow.spiff.serializer import StartEventConverter +from SpiffWorkflow.spiff.serializer import SubWorkflowTaskConverter +from SpiffWorkflow.spiff.serializer import TransactionSubprocessConverter +from SpiffWorkflow.spiff.serializer import UserTaskConverter +from SpiffWorkflow.task import Task as SpiffTask # type: ignore +from SpiffWorkflow.task import TaskState +from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore + +from spiffworkflow_backend.models.active_task import ActiveTaskModel +from spiffworkflow_backend.models.bpmn_process_id_lookup import BpmnProcessIdLookup +from spiffworkflow_backend.models.file import File +from spiffworkflow_backend.models.file import FileType +from spiffworkflow_backend.models.message_correlation import MessageCorrelationModel +from spiffworkflow_backend.models.message_correlation_message_instance import ( + MessageCorrelationMessageInstanceModel, +) +from spiffworkflow_backend.models.message_correlation_property import ( + MessageCorrelationPropertyModel, +) +from spiffworkflow_backend.models.message_instance import MessageInstanceModel +from spiffworkflow_backend.models.message_instance import MessageModel +from spiffworkflow_backend.models.principal import PrincipalModel +from spiffworkflow_backend.models.process_instance import ProcessInstanceModel +from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus +from spiffworkflow_backend.models.process_model import ProcessModelInfo +from spiffworkflow_backend.models.task_event import TaskAction +from spiffworkflow_backend.models.task_event import TaskEventModel +from spiffworkflow_backend.models.user import UserModelSchema +from spiffworkflow_backend.scripts.script import Script +from spiffworkflow_backend.services.file_system_service import FileSystemService +from spiffworkflow_backend.services.process_model_service import ProcessModelService +from spiffworkflow_backend.services.service_task_service import ServiceTaskDelegate +from spiffworkflow_backend.services.spec_file_service import SpecFileService +from spiffworkflow_backend.services.user_service import UserService + +# Sorry about all this crap. I wanted to move this thing to another file, but +# importing a bunch of types causes circular imports. + + +def _import(name: str, glbls: Dict[str, Any], *args: Any) -> None: + """_import.""" + if name not in glbls: + raise ImportError(f"Import not allowed: {name}", name=name) + + +DEFAULT_GLOBALS.update( + { + "datetime": datetime, + "time": time, + "decimal": decimal, + } +) +# This will overwrite the standard builtins +DEFAULT_GLOBALS.update(safe_globals) +DEFAULT_GLOBALS["__builtins__"]["__import__"] = _import + + +class ProcessInstanceProcessorError(Exception): + """ProcessInstanceProcessorError.""" + + +class CustomBpmnScriptEngine(PythonScriptEngine): # type: ignore + """This is a custom script processor that can be easily injected into Spiff Workflow. + + It will execute python code read in from the bpmn. It will also make any scripts in the + scripts directory available for execution. + """ + + def __init__(self) -> None: + """__init__.""" + super().__init__(default_globals=DEFAULT_GLOBALS) + + def __get_augment_methods(self, task: SpiffTask) -> Dict[str, Callable]: + """__get_augment_methods.""" + return Script.generate_augmented_list( + task, current_app.config["ENV_IDENTIFIER"] + ) + + def evaluate(self, task: SpiffTask, expression: str) -> Any: + """Evaluate.""" + return self._evaluate(expression, task.data, task) + + def _evaluate( + self, + expression: str, + context: Dict[str, Union[Box, str]], + task: Optional[SpiffTask] = None, + external_methods: Optional[Dict[str, Any]] = None, + ) -> Any: + """_evaluate.""" + methods = self.__get_augment_methods(task) + if external_methods: + methods.update(external_methods) + + """Evaluate the given expression, within the context of the given task and return the result.""" + try: + return super()._evaluate(expression, context, external_methods=methods) + except Exception as exception: + if task is None: + raise ProcessInstanceProcessorError( + "Error evaluating expression: " + "'%s', exception: %s" % (expression, str(exception)), + ) from exception + else: + raise WorkflowTaskExecException( + task, + "Error evaluating expression " + "'%s', %s" % (expression, str(exception)), + ) from exception + + def execute( + self, task: SpiffTask, script: str, external_methods: Any = None + ) -> None: + """Execute.""" + try: + methods = self.__get_augment_methods(task) + if external_methods: + methods.update(external_methods) + super().execute(task, script, methods) + except WorkflowException as e: + raise e + except Exception as e: + raise WorkflowTaskExecException(task, f" {script}, {e}", e) from e + + def call_service( + self, + operation_name: str, + operation_params: Dict[str, Any], + task_data: Dict[str, Any], + ) -> Any: + """CallService.""" + return ServiceTaskDelegate.call_connector( + operation_name, operation_params, task_data + ) + + +class MyCustomParser(BpmnDmnParser): # type: ignore + """A BPMN and DMN parser that can also parse spiffworkflow-specific extensions.""" + + OVERRIDE_PARSER_CLASSES = BpmnDmnParser.OVERRIDE_PARSER_CLASSES + OVERRIDE_PARSER_CLASSES.update(SpiffBpmnParser.OVERRIDE_PARSER_CLASSES) + + +IdToBpmnProcessSpecMapping = NewType( + "IdToBpmnProcessSpecMapping", dict[str, BpmnProcessSpec] +) + + +class ProcessInstanceProcessor: + """ProcessInstanceProcessor.""" + + _script_engine = CustomBpmnScriptEngine() + SERIALIZER_VERSION = "1.0-spiffworkflow-backend" + wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter( + [ + BoundaryEventConverter, + BusinessRuleTaskConverter, + CallActivityTaskConverter, + EndEventConverter, + IntermediateCatchEventConverter, + IntermediateThrowEventConverter, + ManualTaskConverter, + NoneTaskConverter, + ReceiveTaskConverter, + ScriptTaskConverter, + SendTaskConverter, + ServiceTaskConverter, + StartEventConverter, + SubWorkflowTaskConverter, + TransactionSubprocessConverter, + UserTaskConverter, + ] + ) + _serializer = BpmnWorkflowSerializer(wf_spec_converter, version=SERIALIZER_VERSION) + + PROCESS_INSTANCE_ID_KEY = "process_instance_id" + VALIDATION_PROCESS_KEY = "validate_only" + + # __init__ calls these helpers: + # * get_spec, which returns a spec and any subprocesses (as IdToBpmnProcessSpecMapping dict) + # * __get_bpmn_process_instance, which takes spec and subprocesses and instantiates and returns a BpmnWorkflow + def __init__( + self, process_instance_model: ProcessInstanceModel, validate_only: bool = False + ) -> None: + """Create a Workflow Processor based on the serialized information available in the process_instance model.""" + current_app.config[ + "THREAD_LOCAL_DATA" + ].process_instance_id = process_instance_model.id + + self.process_instance_model = process_instance_model + self.process_model_service = ProcessModelService() + bpmn_process_spec = None + subprocesses: Optional[IdToBpmnProcessSpecMapping] = None + if process_instance_model.bpmn_json is None: + ( + bpmn_process_spec, + subprocesses, + ) = ProcessInstanceProcessor.get_process_model_and_subprocesses( + process_instance_model.process_model_identifier, + process_instance_model.process_group_identifier, + ) + else: + bpmn_json_length = len(process_instance_model.bpmn_json.encode("utf-8")) + megabyte = float(1024**2) + json_size = bpmn_json_length / megabyte + if json_size > 1: + wf_json = json.loads(process_instance_model.bpmn_json) + if "spec" in wf_json and "tasks" in wf_json: + task_tree = wf_json["tasks"] + test_spec = wf_json["spec"] + task_size = "{:.2f}".format( + len(json.dumps(task_tree).encode("utf-8")) / megabyte + ) + spec_size = "{:.2f}".format( + len(json.dumps(test_spec).encode("utf-8")) / megabyte + ) + message = ( + "Workflow " + + process_instance_model.process_model_identifier + + f" JSON Size is over 1MB:{json_size:.2f} MB" + ) + message += f"\n Task Size: {task_size}" + message += f"\n Spec Size: {spec_size}" + current_app.logger.warning(message) + + def check_sub_specs( + test_spec: dict, indent: int = 0, show_all: bool = False + ) -> None: + """Check_sub_specs.""" + for my_spec_name in test_spec["task_specs"]: + my_spec = test_spec["task_specs"][my_spec_name] + my_spec_size = ( + len(json.dumps(my_spec).encode("utf-8")) / megabyte + ) + if my_spec_size > 0.1 or show_all: + current_app.logger.warning( + (" " * indent) + + "Sub-Spec " + + my_spec["name"] + + " :" + + f"{my_spec_size:.2f}" + ) + if "spec" in my_spec: + if my_spec["name"] == "Call_Emails_Process_Email": + pass + check_sub_specs(my_spec["spec"], indent + 5) + + check_sub_specs(test_spec, 5) + + self.process_model_identifier = process_instance_model.process_model_identifier + self.process_group_identifier = process_instance_model.process_group_identifier + + try: + self.bpmn_process_instance = self.__get_bpmn_process_instance( + process_instance_model, + bpmn_process_spec, + validate_only, + subprocesses=subprocesses, + ) + self.bpmn_process_instance.script_engine = self._script_engine + + self.add_user_info_to_process_instance(self.bpmn_process_instance) + + if self.PROCESS_INSTANCE_ID_KEY not in self.bpmn_process_instance.data: + if not process_instance_model.id: + db.session.add(process_instance_model) + # If the model is new, and has no id, save it, write it into the process_instance model + # and save it again. In this way, the workflow process is always aware of the + # database model to which it is associated, and scripts running within the model + # can then load data as needed. + self.bpmn_process_instance.data[ + ProcessInstanceProcessor.PROCESS_INSTANCE_ID_KEY + ] = process_instance_model.id + self.save() + + except MissingSpecError as ke: + raise ApiError( + error_code="unexpected_process_instance_structure", + message="Failed to deserialize process_instance" + " '%s' due to a mis-placed or missing task '%s'" + % (self.process_model_identifier, str(ke)), + ) from ke + + @classmethod + def get_process_model_and_subprocesses( + cls, process_model_identifier: str, process_group_identifier: str + ) -> Tuple[BpmnProcessSpec, IdToBpmnProcessSpecMapping]: + """Get_process_model_and_subprocesses.""" + process_model_info = ProcessModelService().get_process_model( + process_model_identifier, process_group_identifier + ) + if process_model_info is None: + raise ( + ApiError( + "process_model_not_found", + f"The given process model was not found: {process_group_identifier}/{process_model_identifier}.", + ) + ) + spec_files = SpecFileService.get_files( + process_model_info, include_libraries=True + ) + return cls.get_spec(spec_files, process_model_info) + + @classmethod + def get_bpmn_process_instance_from_process_model( + cls, process_model_identifier: str, process_group_identifier: str + ) -> BpmnWorkflow: + """Get_all_bpmn_process_identifiers_for_process_model.""" + (bpmn_process_spec, subprocesses) = cls.get_process_model_and_subprocesses( + process_model_identifier, + process_group_identifier, + ) + return cls.get_bpmn_process_instance_from_workflow_spec( + bpmn_process_spec, subprocesses + ) + + def add_user_info_to_process_instance( + self, bpmn_process_instance: BpmnWorkflow + ) -> None: + """Add_user_info_to_process_instance.""" + current_user = None + if UserService.has_user(): + current_user = UserService.current_user(allow_admin_impersonate=True) + + # fall back to initiator if g.user is not set + # this is for background processes when there will not be a user + # coming in from the api + elif self.process_instance_model.process_initiator_id: + current_user = self.process_instance_model.process_initiator + + if current_user: + current_user_data = UserModelSchema().dump(current_user) + tasks = bpmn_process_instance.get_tasks(TaskState.READY) + for task in tasks: + task.data["current_user"] = current_user_data + + @staticmethod + def reset( + process_instance_model: ProcessInstanceModel, clear_data: bool = False + ) -> None: + """Resets the process_instance back to an unstarted state - where nothing has happened yet. + + If clear_data is set to false, then the information + previously used in forms will be re-populated when the form is re- + displayed, and any files that were updated will remain in place, otherwise + files will also be cleared out. + """ + # Try to execute a cancel notify + try: + bpmn_process_instance = ( + ProcessInstanceProcessor.__get_bpmn_process_instance( + process_instance_model + ) + ) + ProcessInstanceProcessor.__cancel_notify(bpmn_process_instance) + except Exception as e: + db.session.rollback() # in case the above left the database with a bad transaction + current_app.logger.error( + "Unable to send a cancel notify for process_instance %s during a reset." + " Continuing with the reset anyway so we don't get in an unresolvable" + " state. An %s error occured with the following information: %s" + % (process_instance_model.id, e.__class__.__name__, str(e)) + ) + process_instance_model.bpmn_json = None + process_instance_model.status = ProcessInstanceStatus.not_started.value + + # clear out any task assignments + db.session.query(TaskEventModel).filter( + TaskEventModel.process_instance_id == process_instance_model.id + ).filter(TaskEventModel.action == TaskAction.ASSIGNMENT.value).delete() + + if clear_data: + # Clear out data in previous task events + task_events = ( + db.session.query(TaskEventModel) + .filter(TaskEventModel.process_instance_id == process_instance_model.id) + .all() + ) + for task_event in task_events: + task_event.form_data = {} + db.session.add(task_event) + # Remove any uploaded files. + + # TODO: grab UserFileService + # files = FileModel.query.filter(FileModel.process_instance_id == process_instance_model.id).all() + # for file in files: + # UserFileService().delete_file(file.id) + db.session.commit() + + @staticmethod + def get_bpmn_process_instance_from_workflow_spec( + spec: BpmnProcessSpec, + subprocesses: Optional[IdToBpmnProcessSpecMapping] = None, + ) -> BpmnWorkflow: + """Get_bpmn_process_instance_from_workflow_spec.""" + return BpmnWorkflow( + spec, + script_engine=ProcessInstanceProcessor._script_engine, + subprocess_specs=subprocesses, + ) + + @staticmethod + def __get_bpmn_process_instance( + process_instance_model: ProcessInstanceModel, + spec: Optional[BpmnProcessSpec] = None, + validate_only: bool = False, + subprocesses: Optional[IdToBpmnProcessSpecMapping] = None, + ) -> BpmnWorkflow: + """__get_bpmn_process_instance.""" + if process_instance_model.bpmn_json: + # turn off logging to avoid duplicated spiff logs + spiff_logger = logging.getLogger("spiff") + original_spiff_logger_log_level = spiff_logger.level + spiff_logger.setLevel(logging.WARNING) + + try: + bpmn_process_instance = ( + ProcessInstanceProcessor._serializer.deserialize_json( + process_instance_model.bpmn_json + ) + ) + except Exception as err: + raise (err) + finally: + spiff_logger.setLevel(original_spiff_logger_log_level) + + bpmn_process_instance.script_engine = ( + ProcessInstanceProcessor._script_engine + ) + else: + bpmn_process_instance = ( + ProcessInstanceProcessor.get_bpmn_process_instance_from_workflow_spec( + spec, subprocesses + ) + ) + bpmn_process_instance.data[ + ProcessInstanceProcessor.VALIDATION_PROCESS_KEY + ] = validate_only + return bpmn_process_instance + + def slam_in_data(self, data: dict) -> None: + """Slam_in_data.""" + self.bpmn_process_instance.data = DeepMerge.merge( + self.bpmn_process_instance.data, data + ) + + self.save() + + def save(self) -> None: + """Saves the current state of this processor to the database.""" + self.process_instance_model.bpmn_json = self.serialize() + complete_states = [TaskState.CANCELLED, TaskState.COMPLETED] + user_tasks = list(self.get_all_user_tasks()) + self.process_instance_model.status = self.get_status().value + self.process_instance_model.total_tasks = len(user_tasks) + self.process_instance_model.completed_tasks = sum( + 1 for t in user_tasks if t.state in complete_states + ) + + if self.process_instance_model.start_in_seconds is None: + self.process_instance_model.start_in_seconds = round(time.time()) + + if self.process_instance_model.end_in_seconds is None: + if self.bpmn_process_instance.is_completed(): + self.process_instance_model.end_in_seconds = round(time.time()) + + db.session.add(self.process_instance_model) + + ActiveTaskModel.query.filter_by( + process_instance_id=self.process_instance_model.id + ).delete() + + ready_or_waiting_tasks = self.get_all_ready_or_waiting_tasks() + for ready_or_waiting_task in ready_or_waiting_tasks: + # filter out non-usertasks + if not self.bpmn_process_instance._is_engine_task( + ready_or_waiting_task.task_spec + ): + user_id = ready_or_waiting_task.data["current_user"]["id"] + principal = PrincipalModel.query.filter_by(user_id=user_id).first() + if principal is None: + raise ( + ApiError( + error_code="principal_not_found", + message=f"Principal not found from user id: {user_id}", + status_code=400, + ) + ) + + extensions = ready_or_waiting_task.task_spec.extensions + + form_file_name = None + ui_form_file_name = None + if "properties" in extensions: + properties = extensions["properties"] + if "formJsonSchemaFilename" in properties: + form_file_name = properties["formJsonSchemaFilename"] + if "formUiSchemaFilename" in properties: + ui_form_file_name = properties["formUiSchemaFilename"] + + process_model_display_name = "" + process_model_info = self.process_model_service.get_process_model( + self.process_instance_model.process_model_identifier + ) + if process_model_info is not None: + process_model_display_name = process_model_info.display_name + + active_task = ActiveTaskModel( + process_instance_id=self.process_instance_model.id, + process_model_display_name=process_model_display_name, + assigned_principal_id=principal.id, + form_file_name=form_file_name, + ui_form_file_name=ui_form_file_name, + task_id=str(ready_or_waiting_task.id), + task_name=ready_or_waiting_task.task_spec.name, + task_title=ready_or_waiting_task.task_spec.description, + task_type=ready_or_waiting_task.task_spec.__class__.__name__, + task_status=ready_or_waiting_task.get_state_name(), + task_data=json.dumps(ready_or_waiting_task.data), + ) + db.session.add(active_task) + + db.session.commit() + + @staticmethod + def get_parser() -> MyCustomParser: + """Get_parser.""" + parser = MyCustomParser() + return parser + + @staticmethod + def backfill_missing_bpmn_process_id_lookup_records( + bpmn_process_identifier: str, + ) -> Optional[str]: + """Backfill_missing_bpmn_process_id_lookup_records.""" + process_models = ProcessModelService().get_process_models() + for process_model in process_models: + if process_model.primary_file_name: + etree_element = SpecFileService.get_etree_element_from_file_name( + process_model, process_model.primary_file_name + ) + bpmn_process_identifiers = [] + + try: + bpmn_process_identifiers = ( + SpecFileService.get_executable_bpmn_process_identifiers( + etree_element + ) + ) + except ValidationException: + # ignore validation errors here + pass + + if bpmn_process_identifier in bpmn_process_identifiers: + SpecFileService.store_bpmn_process_identifiers( + process_model, + process_model.primary_file_name, + etree_element, + ) + return FileSystemService.full_path_to_process_model_file( + process_model, process_model.primary_file_name + ) + return None + + @staticmethod + def bpmn_file_full_path_from_bpmn_process_identifier( + bpmn_process_identifier: str, + ) -> str: + """Bpmn_file_full_path_from_bpmn_process_identifier.""" + bpmn_process_id_lookup = BpmnProcessIdLookup.query.filter_by( + bpmn_process_identifier=bpmn_process_identifier + ).first() + bpmn_file_full_path = None + if bpmn_process_id_lookup is None: + bpmn_file_full_path = ProcessInstanceProcessor.backfill_missing_bpmn_process_id_lookup_records( + bpmn_process_identifier + ) + else: + bpmn_file_full_path = os.path.join( + FileSystemService.root_path(), + bpmn_process_id_lookup.bpmn_file_relative_path, + ) + if bpmn_file_full_path is None: + raise ( + ApiError( + error_code="could_not_find_bpmn_process_identifier", + message="Could not find the the given bpmn process identifier from any sources: %s" + % bpmn_process_identifier, + ) + ) + return os.path.abspath(bpmn_file_full_path) + + @staticmethod + def update_spiff_parser_with_all_process_dependency_files( + parser: BpmnDmnParser, + processed_identifiers: Optional[set[str]] = None, + ) -> None: + """Update_spiff_parser_with_all_process_dependency_files.""" + if processed_identifiers is None: + processed_identifiers = set() + processor_dependencies = parser.get_process_dependencies() + processor_dependencies_new = processor_dependencies - processed_identifiers + bpmn_process_identifiers_in_parser = parser.get_process_ids() + + new_bpmn_files = set() + for bpmn_process_identifier in processor_dependencies_new: + + # ignore identifiers that spiff already knows about + if bpmn_process_identifier in bpmn_process_identifiers_in_parser: + continue + + new_bpmn_file_full_path = ProcessInstanceProcessor.bpmn_file_full_path_from_bpmn_process_identifier( + bpmn_process_identifier + ) + new_bpmn_files.add(new_bpmn_file_full_path) + dmn_file_glob = os.path.join( + os.path.dirname(new_bpmn_file_full_path), "*.dmn" + ) + parser.add_dmn_files_by_glob(dmn_file_glob) + processed_identifiers.add(bpmn_process_identifier) + + if new_bpmn_files: + parser.add_bpmn_files(new_bpmn_files) + ProcessInstanceProcessor.update_spiff_parser_with_all_process_dependency_files( + parser, processed_identifiers + ) + + @staticmethod + def get_spec( + files: List[File], process_model_info: ProcessModelInfo + ) -> Tuple[BpmnProcessSpec, IdToBpmnProcessSpecMapping]: + """Returns a SpiffWorkflow specification for the given process_instance spec, using the files provided.""" + parser = ProcessInstanceProcessor.get_parser() + + for file in files: + data = SpecFileService.get_data(process_model_info, file.name) + if file.type == FileType.bpmn.value: + bpmn: etree.Element = etree.fromstring(data) + parser.add_bpmn_xml(bpmn, filename=file.name) + elif file.type == FileType.dmn.value: + dmn: etree.Element = etree.fromstring(data) + parser.add_dmn_xml(dmn, filename=file.name) + if ( + process_model_info.primary_process_id is None + or process_model_info.primary_process_id == "" + ): + raise ( + ApiError( + error_code="no_primary_bpmn_error", + message="There is no primary BPMN process id defined for process_model %s" + % process_model_info.id, + ) + ) + ProcessInstanceProcessor.update_spiff_parser_with_all_process_dependency_files( + parser + ) + + try: + bpmn_process_spec = parser.get_spec(process_model_info.primary_process_id) + + # returns a dict of {process_id: bpmn_process_spec}, otherwise known as an IdToBpmnProcessSpecMapping + subprocesses = parser.get_subprocess_specs( + process_model_info.primary_process_id + ) + except ValidationException as ve: + raise ApiError( + error_code="process_instance_validation_error", + message="Failed to parse the Workflow Specification. " + + "Error is '%s.'" % str(ve), + file_name=ve.filename, + task_id=ve.id, + tag=ve.tag, + ) from ve + return (bpmn_process_spec, subprocesses) + + @staticmethod + def status_of(bpmn_process_instance: BpmnWorkflow) -> ProcessInstanceStatus: + """Status_of.""" + if bpmn_process_instance.is_completed(): + return ProcessInstanceStatus.complete + user_tasks = bpmn_process_instance.get_ready_user_tasks() + waiting_tasks = bpmn_process_instance.get_tasks(TaskState.WAITING) + if len(waiting_tasks) > 0: + return ProcessInstanceStatus.waiting + if len(user_tasks) > 0: + return ProcessInstanceStatus.user_input_required + else: + return ProcessInstanceStatus.waiting + + def get_status(self) -> ProcessInstanceStatus: + """Get_status.""" + return self.status_of(self.bpmn_process_instance) + + # messages have one correlation key (possibly wrong) + # correlation keys may have many correlation properties + def process_bpmn_messages(self) -> None: + """Process_bpmn_messages.""" + bpmn_messages = self.bpmn_process_instance.get_bpmn_messages() + for bpmn_message in bpmn_messages: + # only message sends are in get_bpmn_messages + message_model = MessageModel.query.filter_by(name=bpmn_message.name).first() + if message_model is None: + raise ApiError( + "invalid_message_name", + f"Invalid message name: {bpmn_message.name}.", + ) + + if not bpmn_message.correlations: + raise ApiError( + "message_correlations_missing", + f"Could not find any message correlations bpmn_message: {bpmn_message.name}", + ) + + message_correlations = [] + for ( + message_correlation_key, + message_correlation_properties, + ) in bpmn_message.correlations.items(): + for ( + message_correlation_property_identifier, + message_correlation_property_value, + ) in message_correlation_properties.items(): + message_correlation_property = ( + MessageCorrelationPropertyModel.query.filter_by( + identifier=message_correlation_property_identifier, + ).first() + ) + if message_correlation_property is None: + raise ApiError( + "message_correlations_missing_from_process", + "Could not find a known message correlation with identifier:" + f"{message_correlation_property_identifier}", + ) + message_correlations.append( + { + "message_correlation_property": message_correlation_property, + "name": message_correlation_key, + "value": message_correlation_property_value, + } + ) + message_instance = MessageInstanceModel( + process_instance_id=self.process_instance_model.id, + message_type="send", + message_model_id=message_model.id, + payload=bpmn_message.payload, + ) + db.session.add(message_instance) + db.session.commit() + + for message_correlation in message_correlations: + message_correlation = MessageCorrelationModel( + process_instance_id=self.process_instance_model.id, + message_correlation_property_id=message_correlation[ + "message_correlation_property" + ].id, + name=message_correlation["name"], + value=message_correlation["value"], + ) + db.session.add(message_correlation) + db.session.commit() + message_correlation_message_instance = ( + MessageCorrelationMessageInstanceModel( + message_instance_id=message_instance.id, + message_correlation_id=message_correlation.id, + ) + ) + db.session.add(message_correlation_message_instance) + db.session.commit() + + def queue_waiting_receive_messages(self) -> None: + """Queue_waiting_receive_messages.""" + waiting_tasks = self.get_all_waiting_tasks() + for waiting_task in waiting_tasks: + # if it's not something that can wait for a message, skip it + if waiting_task.task_spec.__class__.__name__ not in [ + "IntermediateCatchEvent", + "ReceiveTask", + ]: + continue + + # timer events are not related to messaging, so ignore them for these purposes + if waiting_task.task_spec.event_definition.__class__.__name__ in [ + "TimerEventDefinition", + ]: + continue + + message_model = MessageModel.query.filter_by( + name=waiting_task.task_spec.event_definition.name + ).first() + if message_model is None: + raise ApiError( + "invalid_message_name", + f"Invalid message name: {waiting_task.task_spec.event_definition.name}.", + ) + + # Ensure we are only creating one message instance for each waiting message + message_instance = MessageInstanceModel.query.filter_by( + process_instance_id=self.process_instance_model.id, + message_type="receive", + message_model_id=message_model.id, + ).first() + if message_instance: + continue + + message_instance = MessageInstanceModel( + process_instance_id=self.process_instance_model.id, + message_type="receive", + message_model_id=message_model.id, + ) + db.session.add(message_instance) + + for ( + spiff_correlation_property + ) in waiting_task.task_spec.event_definition.correlation_properties: + # NOTE: we may have to cycle through keys here + # not sure yet if it's valid for a property to be associated with multiple keys + correlation_key_name = spiff_correlation_property.correlation_keys[0] + message_correlation = ( + MessageCorrelationModel.query.filter_by( + process_instance_id=self.process_instance_model.id, + name=correlation_key_name, + ) + .join(MessageCorrelationPropertyModel) + .filter_by(identifier=spiff_correlation_property.name) + .first() + ) + message_correlation_message_instance = ( + MessageCorrelationMessageInstanceModel( + message_instance_id=message_instance.id, + message_correlation_id=message_correlation.id, + ) + ) + db.session.add(message_correlation_message_instance) + + db.session.commit() + + def do_engine_steps(self, exit_at: None = None, save: bool = False) -> None: + """Do_engine_steps.""" + try: + self.bpmn_process_instance.refresh_waiting_tasks() + self.bpmn_process_instance.do_engine_steps(exit_at=exit_at) + self.process_bpmn_messages() + self.queue_waiting_receive_messages() + + if save: + self.save() + + except WorkflowTaskExecException as we: + raise ApiError.from_workflow_exception("task_error", str(we), we) from we + + def cancel_notify(self) -> None: + """Cancel_notify.""" + self.__cancel_notify(self.bpmn_process_instance) + + @staticmethod + def __cancel_notify(bpmn_process_instance: BpmnWorkflow) -> None: + """__cancel_notify.""" + try: + # A little hackly, but make the bpmn_process_instance catch a cancel event. + bpmn_process_instance.signal("cancel") # generate a cancel signal. + bpmn_process_instance.catch(CancelEventDefinition()) + bpmn_process_instance.do_engine_steps() + except WorkflowTaskExecException as we: + raise ApiError.from_workflow_exception("task_error", str(we), we) from we + + def serialize(self) -> str: + """Serialize.""" + return self._serializer.serialize_json(self.bpmn_process_instance) # type: ignore + + def next_user_tasks(self) -> list[SpiffTask]: + """Next_user_tasks.""" + return self.bpmn_process_instance.get_ready_user_tasks() # type: ignore + + def next_task(self) -> SpiffTask: + """Returns the next task that should be completed even if there are parallel tasks and multiple options are available. + + If the process_instance is complete + it will return the final end task. + """ + # If the whole blessed mess is done, return the end_event task in the tree + # This was failing in the case of a call activity where we have an intermediate EndEvent + # what we really want is the LAST EndEvent + + endtasks = [] + if self.bpmn_process_instance.is_completed(): + for task in SpiffTask.Iterator( + self.bpmn_process_instance.task_tree, TaskState.ANY_MASK + ): + # Assure that we find the end event for this process_instance, and not for any sub-process_instances. + if ( + isinstance(task.task_spec, EndEvent) + and task.workflow == self.bpmn_process_instance + ): + endtasks.append(task) + if len(endtasks) > 0: + return endtasks[-1] + + # If there are ready tasks to complete, return the next ready task, but return the one + # in the active parallel path if possible. In some cases the active parallel path may itself be + # a parallel gateway with multiple tasks, so prefer ones that share a parent. + + # Get a list of all ready tasks + ready_tasks = self.bpmn_process_instance.get_tasks(TaskState.READY) + + if len(ready_tasks) == 0: + # If no ready tasks exist, check for a waiting task. + waiting_tasks = self.bpmn_process_instance.get_tasks(TaskState.WAITING) + if len(waiting_tasks) > 0: + return waiting_tasks[0] + else: + return # We have not tasks to return. + + # Get a list of all completed user tasks (Non engine tasks) + completed_user_tasks = self.completed_user_tasks() + + # If there are no completed user tasks, return the first ready task + if len(completed_user_tasks) == 0: + return ready_tasks[0] + + # Take the last completed task, find a child of it, and return that task + last_user_task = completed_user_tasks[0] + if len(ready_tasks) > 0: + for task in ready_tasks: + if task._is_descendant_of(last_user_task): + return task + for task in ready_tasks: + if ( + self.bpmn_process_instance.last_task + and task.parent == last_user_task.parent + ): + return task + + return ready_tasks[0] + + # If there are no ready tasks, but the thing isn't complete yet, find the first non-complete task + # and return that + next_task = None + for task in SpiffTask.Iterator( + self.bpmn_process_instance.task_tree, TaskState.NOT_FINISHED_MASK + ): + next_task = task + return next_task + + def completed_user_tasks(self) -> List[SpiffTask]: + """Completed_user_tasks.""" + user_tasks = self.bpmn_process_instance.get_tasks(TaskState.COMPLETED) + user_tasks.reverse() + user_tasks = list( + filter( + lambda task: not self.bpmn_process_instance._is_engine_task( + task.task_spec + ), + user_tasks, + ) + ) + return user_tasks # type: ignore + + def complete_task(self, task: SpiffTask) -> None: + """Complete_task.""" + self.bpmn_process_instance.complete_task_from_id(task.id) + + def get_data(self) -> dict[str, Any]: + """Get_data.""" + return self.bpmn_process_instance.data # type: ignore + + def get_current_data(self) -> dict[str, Any]: + """Get the current data for the process. + + Return either most recent task data or the process data + if the process instance is complete + """ + if self.process_instance_model.status == "complete": + return self.get_data() + + most_recent_task = None + for task in self.get_all_ready_or_waiting_tasks(): + if most_recent_task is None: + most_recent_task = task + elif most_recent_task.last_state_change < task.last_state_change: # type: ignore + most_recent_task = task + + if most_recent_task: + return most_recent_task.data # type: ignore + + return {} + + def get_process_instance_id(self) -> int: + """Get_process_instance_id.""" + return self.process_instance_model.id + + def get_ready_user_tasks(self) -> list[SpiffTask]: + """Get_ready_user_tasks.""" + return self.bpmn_process_instance.get_ready_user_tasks() # type: ignore + + def get_current_user_tasks(self) -> list[SpiffTask]: + """Return a list of all user tasks that are READY or COMPLETE and are parallel to the READY Task.""" + ready_tasks = self.bpmn_process_instance.get_ready_user_tasks() + additional_tasks = [] + if len(ready_tasks) > 0: + for child in ready_tasks[0].parent.children: + if child.state == TaskState.COMPLETED: + additional_tasks.append(child) + return ready_tasks + additional_tasks # type: ignore + + def get_all_user_tasks(self) -> List[SpiffTask]: + """Get_all_user_tasks.""" + all_tasks = self.bpmn_process_instance.get_tasks(TaskState.ANY_MASK) + return [ + t + for t in all_tasks + if not self.bpmn_process_instance._is_engine_task(t.task_spec) + ] + + def get_all_completed_tasks(self) -> list[SpiffTask]: + """Get_all_completed_tasks.""" + all_tasks = self.bpmn_process_instance.get_tasks(TaskState.ANY_MASK) + return [ + t + for t in all_tasks + if not self.bpmn_process_instance._is_engine_task(t.task_spec) + and t.state in [TaskState.COMPLETED, TaskState.CANCELLED] + ] + + def get_all_waiting_tasks(self) -> list[SpiffTask]: + """Get_all_ready_or_waiting_tasks.""" + all_tasks = self.bpmn_process_instance.get_tasks(TaskState.ANY_MASK) + return [t for t in all_tasks if t.state in [TaskState.WAITING]] + + def get_all_ready_or_waiting_tasks(self) -> list[SpiffTask]: + """Get_all_ready_or_waiting_tasks.""" + all_tasks = self.bpmn_process_instance.get_tasks(TaskState.ANY_MASK) + return [t for t in all_tasks if t.state in [TaskState.WAITING, TaskState.READY]] + + @classmethod + def get_task_by_bpmn_identifier( + cls, bpmn_task_identifier: str, bpmn_process_instance: BpmnWorkflow + ) -> Optional[SpiffTask]: + """Get_task_by_id.""" + all_tasks = bpmn_process_instance.get_tasks(TaskState.ANY_MASK) + for task in all_tasks: + if task.task_spec.name == bpmn_task_identifier: + return task + return None + + def get_nav_item(self, task: SpiffTask) -> Any: + """Get_nav_item.""" + for nav_item in self.bpmn_process_instance.get_nav_list(): + if nav_item["task_id"] == task.id: + return nav_item + return None + + def find_spec_and_field(self, spec_name: str, field_id: Union[str, int]) -> Any: + """Tracks down a form field by name in the process_instance spec(s), Returns a tuple of the task, and form.""" + process_instances = [self.bpmn_process_instance] + for task in self.bpmn_process_instance.get_ready_user_tasks(): + if task.process_instance not in process_instances: + process_instances.append(task.process_instance) + for process_instance in process_instances: + for spec in process_instance.spec.task_specs.values(): + if spec.name == spec_name: + if not hasattr(spec, "form"): + raise ApiError( + "invalid_spec", + "The spec name you provided does not contain a form.", + ) + + for field in spec.form.fields: + if field.id == field_id: + return spec, field + + raise ApiError( + "invalid_field", + f"The task '{spec_name}' has no field named '{field_id}'", + ) + + raise ApiError( + "invalid_spec", + f"Unable to find a task in the process_instance called '{spec_name}'", + ) + + def terminate(self) -> None: + """Terminate.""" + self.bpmn_process_instance.cancel() + self.save() + self.process_instance_model.status = "terminated" + db.session.add(self.process_instance_model) + db.session.commit() diff --git a/src/spiffworkflow_backend/services/process_instance_service.py b/src/spiffworkflow_backend/services/process_instance_service.py new file mode 100644 index 00000000..c15ed5b1 --- /dev/null +++ b/src/spiffworkflow_backend/services/process_instance_service.py @@ -0,0 +1,437 @@ +"""Process_instance_service.""" +import time +from typing import Any +from typing import Dict +from typing import List +from typing import Optional + +from flask import current_app +from flask_bpmn.api.api_error import ApiError +from flask_bpmn.models.db import db +from SpiffWorkflow.task import Task as SpiffTask # type: ignore +from SpiffWorkflow.util.deep_merge import DeepMerge # type: ignore + +from spiffworkflow_backend.models.process_instance import ProcessInstanceApi +from spiffworkflow_backend.models.process_instance import ProcessInstanceModel +from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus +from spiffworkflow_backend.models.task import MultiInstanceType +from spiffworkflow_backend.models.task import Task +from spiffworkflow_backend.models.task_event import TaskAction +from spiffworkflow_backend.models.task_event import TaskEventModel +from spiffworkflow_backend.models.user import UserModel +from spiffworkflow_backend.services.git_service import GitService +from spiffworkflow_backend.services.process_instance_processor import ( + ProcessInstanceProcessor, +) +from spiffworkflow_backend.services.process_model_service import ProcessModelService + +# from SpiffWorkflow.task import TaskState # type: ignore + + +class ProcessInstanceService: + """ProcessInstanceService.""" + + TASK_STATE_LOCKED = "locked" + + @staticmethod + def create_process_instance( + process_model_identifier: str, + user: UserModel, + process_group_identifier: Optional[str] = None, + ) -> ProcessInstanceModel: + """Get_process_instance_from_spec.""" + current_git_revision = GitService.get_current_revision() + process_instance_model = ProcessInstanceModel( + status=ProcessInstanceStatus.not_started.value, + process_initiator=user, + process_model_identifier=process_model_identifier, + process_group_identifier=process_group_identifier, + start_in_seconds=round(time.time()), + bpmn_version_control_type="git", + bpmn_version_control_identifier=current_git_revision, + ) + db.session.add(process_instance_model) + db.session.commit() + return process_instance_model + + @staticmethod + def do_waiting() -> None: + """Do_waiting.""" + records = ( + db.session.query(ProcessInstanceModel) + .filter(ProcessInstanceModel.status == ProcessInstanceStatus.waiting.value) + .all() + ) + for process_instance in records: + try: + current_app.logger.info( + f"Processing process_instance {process_instance.id}" + ) + processor = ProcessInstanceProcessor(process_instance) + processor.do_engine_steps(save=True) + except Exception: + db.session.rollback() # in case the above left the database with a bad transaction + process_instance.status = ProcessInstanceStatus.erroring.value + db.session.add(process_instance) + db.session.commit() + error_message = f"Error running waiting task for process_instance {process_instance.id}" + "({process_instance.process_model_identifier}). {str(e)}" + current_app.logger.error(error_message) + + @staticmethod + def processor_to_process_instance_api( + processor: ProcessInstanceProcessor, next_task: None = None + ) -> ProcessInstanceApi: + """Returns an API model representing the state of the current process_instance. + + If requested, and possible, next_task is set to the current_task. + """ + # navigation = processor.bpmn_process_instance.get_deep_nav_list() + # ProcessInstanceService.update_navigation(navigation, processor) + process_model_service = ProcessModelService() + process_model = process_model_service.get_process_model( + processor.process_model_identifier + ) + is_review_value = process_model.is_review if process_model else False + title_value = process_model.display_name if process_model else "" + process_instance_api = ProcessInstanceApi( + id=processor.get_process_instance_id(), + status=processor.get_status(), + next_task=None, + # navigation=navigation, + process_model_identifier=processor.process_model_identifier, + process_group_identifier=processor.process_group_identifier, + # total_tasks=len(navigation), + completed_tasks=processor.process_instance_model.completed_tasks, + updated_at_in_seconds=processor.process_instance_model.updated_at_in_seconds, + is_review=is_review_value, + title=title_value, + ) + next_task_trying_again = next_task + if ( + not next_task + ): # The Next Task can be requested to be a certain task, useful for parallel tasks. + # This may or may not work, sometimes there is no next task to complete. + next_task_trying_again = processor.next_task() + + if next_task_trying_again is not None: + previous_form_data = ProcessInstanceService.get_previously_submitted_data( + processor.process_instance_model.id, next_task_trying_again + ) + # DeepMerge.merge(next_task_trying_again.data, previous_form_data) + next_task_trying_again.data = DeepMerge.merge( + previous_form_data, next_task_trying_again.data + ) + + process_instance_api.next_task = ( + ProcessInstanceService.spiff_task_to_api_task( + next_task_trying_again, add_docs_and_forms=True + ) + ) + # TODO: Hack for now, until we decide how to implment forms + process_instance_api.next_task.form = None + + # Update the state of the task to locked if the current user does not own the task. + # user_uids = WorkflowService.get_users_assigned_to_task(processor, next_task) + # if not UserService.in_list(user_uids, allow_admin_impersonate=True): + # workflow_api.next_task.state = WorkflowService.TASK_STATE_LOCKED + + return process_instance_api + + @staticmethod + def get_previously_submitted_data( + process_instance_id: int, spiff_task: SpiffTask + ) -> Dict[Any, Any]: + """If the user has completed this task previously, find the form data for the last submission.""" + query = ( + db.session.query(TaskEventModel) + .filter_by(process_instance_id=process_instance_id) + .filter_by(task_name=spiff_task.task_spec.name) + .filter_by(action=TaskAction.COMPLETE.value) + ) + + if ( + hasattr(spiff_task, "internal_data") + and "runtimes" in spiff_task.internal_data + ): + query = query.filter_by(mi_index=spiff_task.internal_data["runtimes"]) + + latest_event = query.order_by(TaskEventModel.date.desc()).first() + if latest_event: + if latest_event.form_data is not None: + return latest_event.form_data # type: ignore + else: + missing_form_error = ( + f"We have lost data for workflow {process_instance_id}, " + f"task {spiff_task.task_spec.name}, it is not in the task event model, " + f"and it should be." + ) + current_app.logger.error( + "missing_form_data", missing_form_error, exc_info=True + ) + return {} + else: + return {} + + def get_process_instance(self, process_instance_id: int) -> Any: + """Get_process_instance.""" + result = ( + db.session.query(ProcessInstanceModel) + .filter(ProcessInstanceModel.id == process_instance_id) + .first() + ) + return result + + @staticmethod + def update_task_assignments(processor: ProcessInstanceProcessor) -> None: + """For every upcoming user task, log a task action that connects the assigned user(s) to that task. + + All existing assignment actions for this workflow are removed from the database, + so that only the current valid actions are available. update_task_assignments + should be called whenever progress is made on a workflow. + """ + db.session.query(TaskEventModel).filter( + TaskEventModel.process_instance_id == processor.process_instance_model.id + ).filter(TaskEventModel.action == TaskAction.ASSIGNMENT.value).delete() + db.session.commit() + + tasks = processor.get_current_user_tasks() + for task in tasks: + user_ids = ProcessInstanceService.get_users_assigned_to_task( + processor, task + ) + + for user_id in user_ids: + ProcessInstanceService().log_task_action( + user_id, processor, task, TaskAction.ASSIGNMENT.value + ) + + @staticmethod + def get_users_assigned_to_task( + processor: ProcessInstanceProcessor, spiff_task: SpiffTask + ) -> List[int]: + """Get_users_assigned_to_task.""" + if processor.process_instance_model.process_initiator_id is None: + raise ApiError.from_task( + error_code="invalid_workflow", + message="A process instance must have a user_id.", + task=spiff_task, + ) + # # Standalone workflow - we only care about the current user + # elif processor.workflow_model.study_id is None and processor.workflow_model.user_id is not None: + # return [processor.workflow_model.user_id] + + # Workflow associated with a study - get all the users + else: + if ( + not hasattr(spiff_task.task_spec, "lane") + or spiff_task.task_spec.lane is None + ): + current_user = spiff_task.data["current_user"] + return [ + current_user["id"], + ] + # return [processor.process_instance_model.process_initiator_id] + + if spiff_task.task_spec.lane not in spiff_task.data: + return [] # No users are assignable to the task at this moment + lane_users = spiff_task.data[spiff_task.task_spec.lane] + if not isinstance(lane_users, list): + lane_users = [lane_users] + + lane_uids = [] + for user in lane_users: + if isinstance(user, dict): + if user.get("value"): + lane_uids.append(user["value"]) + else: + raise ApiError.from_task( + error_code="task_lane_user_error", + message="Spiff Task %s lane user dict must have a key called 'value' with the user's uid in it." + % spiff_task.task_spec.name, + task=spiff_task, + ) + elif isinstance(user, str): + lane_uids.append(user) + else: + raise ApiError.from_task( + error_code="task_lane_user_error", + message="Spiff Task %s lane user is not a string or dict" + % spiff_task.task_spec.name, + task=spiff_task, + ) + + return lane_uids + + @staticmethod + def complete_form_task( + processor: ProcessInstanceProcessor, + spiff_task: SpiffTask, + data: dict[str, Any], + user: UserModel, + ) -> None: + """All the things that need to happen when we complete a form. + + Abstracted here because we need to do it multiple times when completing all tasks in + a multi-instance task. + """ + dot_dct = ProcessInstanceService.create_dot_dict(data) + spiff_task.update_data(dot_dct) + # ProcessInstanceService.post_process_form(spiff_task) # some properties may update the data store. + processor.complete_task(spiff_task) + # Log the action before doing the engine steps, as doing so could effect the state of the task + # the workflow could wrap around in the ngine steps, and the task could jump from being completed to + # another state. What we are logging here is the completion. + ProcessInstanceService.log_task_action( + user.id, processor, spiff_task, TaskAction.COMPLETE.value + ) + processor.do_engine_steps() + processor.save() + + @staticmethod + def log_task_action( + user_id: int, + processor: ProcessInstanceProcessor, + spiff_task: SpiffTask, + action: str, + ) -> None: + """Log_task_action.""" + task = ProcessInstanceService.spiff_task_to_api_task(spiff_task) + form_data = ProcessInstanceService.extract_form_data( + spiff_task.data, spiff_task + ) + multi_instance_type_value = "" + if task.multi_instance_type: + multi_instance_type_value = task.multi_instance_type.value + + task_event = TaskEventModel( + # study_id=processor.workflow_model.study_id, + user_id=user_id, + process_instance_id=processor.process_instance_model.id, + # workflow_spec_id=processor.workflow_model.workflow_spec_id, + action=action, + task_id=str(task.id), + task_name=task.name, + task_title=task.title, + task_type=str(task.type), + task_state=task.state, + task_lane=task.lane, + form_data=form_data, + mi_type=multi_instance_type_value, # Some tasks have a repeat behavior. + mi_count=task.multi_instance_count, # This is the number of times the task could repeat. + mi_index=task.multi_instance_index, # And the index of the currently repeating task. + process_name=task.process_name, + # date=datetime.utcnow(), <=== For future reference, NEVER do this. Let the database set the time. + ) + db.session.add(task_event) + db.session.commit() + + @staticmethod + def extract_form_data(latest_data: dict, task: SpiffTask) -> dict: + """Extracts data from the latest_data that is directly related to the form that is being submitted.""" + data = {} + + if hasattr(task.task_spec, "form"): + for field in task.task_spec.form.fields: + if field.has_property(Task.FIELD_PROP_REPEAT): + group = field.get_property(Task.FIELD_PROP_REPEAT) + if group in latest_data: + data[group] = latest_data[group] + else: + value = ProcessInstanceService.get_dot_value(field.id, latest_data) + if value is not None: + ProcessInstanceService.set_dot_value(field.id, value, data) + return data + + @staticmethod + def create_dot_dict(data: dict) -> dict[str, Any]: + """Create_dot_dict.""" + dot_dict: dict[str, Any] = {} + for key, value in data.items(): + ProcessInstanceService.set_dot_value(key, value, dot_dict) + return dot_dict + + @staticmethod + def get_dot_value(path: str, source: dict) -> Any: + """Get_dot_value.""" + # Given a path in dot notation, uas as 'fruit.type' tries to find that value in + # the source, but looking deep in the dictionary. + paths = path.split(".") # [a,b,c] + s = source + index = 0 + for p in paths: + index += 1 + if isinstance(s, dict) and p in s: + if index == len(paths): + return s[p] + else: + s = s[p] + if path in source: + return source[path] + return None + + @staticmethod + def set_dot_value(path: str, value: Any, target: dict) -> dict: + """Set_dot_value.""" + # Given a path in dot notation, such as "fruit.type", and a value "apple", will + # set the value in the target dictionary, as target["fruit"]["type"]="apple" + destination = target + paths = path.split(".") # [a,b,c] + index = 0 + for p in paths: + index += 1 + if p not in destination: + if index == len(paths): + destination[p] = value + else: + destination[p] = {} + destination = destination[p] + return target + + @staticmethod + def spiff_task_to_api_task( + spiff_task: SpiffTask, add_docs_and_forms: bool = False + ) -> Task: + """Spiff_task_to_api_task.""" + task_type = spiff_task.task_spec.spec_type + + info = spiff_task.task_info() + if info["is_looping"]: + mi_type = MultiInstanceType.looping + elif info["is_sequential_mi"]: + mi_type = MultiInstanceType.sequential + elif info["is_parallel_mi"]: + mi_type = MultiInstanceType.parallel + else: + mi_type = MultiInstanceType.none + + props = {} + if hasattr(spiff_task.task_spec, "extensions"): + for key, val in spiff_task.task_spec.extensions.items(): + props[key] = val + + if hasattr(spiff_task.task_spec, "lane"): + lane = spiff_task.task_spec.lane + else: + lane = None + + parent_id = None + if spiff_task.parent: + parent_id = spiff_task.parent.id + + task = Task( + spiff_task.id, + spiff_task.task_spec.name, + spiff_task.task_spec.description, + task_type, + spiff_task.get_state_name(), + lane=lane, + multi_instance_type=mi_type, + multi_instance_count=info["mi_count"], + multi_instance_index=info["mi_index"], + process_name=spiff_task.task_spec._wf_spec.description, + properties=props, + parent=parent_id, + ) + + return task diff --git a/src/spiffworkflow_backend/services/process_model_service.py b/src/spiffworkflow_backend/services/process_model_service.py new file mode 100644 index 00000000..c48c9d98 --- /dev/null +++ b/src/spiffworkflow_backend/services/process_model_service.py @@ -0,0 +1,329 @@ +"""Process_model_service.""" +import json +import os +import shutil +from typing import Any +from typing import List +from typing import Optional +from typing import TypeVar + +from flask_bpmn.api.api_error import ApiError + +from spiffworkflow_backend.exceptions.process_entity_not_found_error import ( + ProcessEntityNotFoundError, +) +from spiffworkflow_backend.models.process_group import ProcessGroup +from spiffworkflow_backend.models.process_group import ProcessGroupSchema +from spiffworkflow_backend.models.process_instance import ProcessInstanceModel +from spiffworkflow_backend.models.process_model import ProcessModelInfo +from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema +from spiffworkflow_backend.services.file_system_service import FileSystemService + +T = TypeVar("T") + + +class ProcessModelService(FileSystemService): + """ProcessModelService.""" + + """This is a way of persisting json files to the file system in a way that mimics the data + as it would have been stored in the database. This is specific to Workflow Specifications, and + Workflow Specification process_groups. + We do this, so we can easily drop in a new configuration on the file system, and change all + the workflow process_models at once, or manage those file in a git repository. """ + + GROUP_SCHEMA = ProcessGroupSchema() + WF_SCHEMA = ProcessModelInfoSchema() + + @staticmethod + def get_batch( + items: list[T], + page: int = 1, + per_page: int = 10, + ) -> list[T]: + """Get_batch.""" + start = (page - 1) * per_page + end = start + per_page + return items[start:end] + + def add_spec(self, process_model: ProcessModelInfo) -> None: + """Add_spec.""" + display_order = self.next_display_order(process_model) + process_model.display_order = display_order + self.save_process_model(process_model) + + def update_spec( + self, process_model: ProcessModelInfo, attributes_to_update: dict + ) -> None: + """Update_spec.""" + for atu_key, atu_value in attributes_to_update.items(): + if hasattr(process_model, atu_key): + setattr(process_model, atu_key, atu_value) + self.save_process_model(process_model) + + def save_process_model(self, process_model: ProcessModelInfo) -> None: + """Save_process_model.""" + spec_path = self.workflow_path(process_model) + if ( + process_model.is_master_spec + or process_model.library + or process_model.standalone + ): + process_model.process_group_id = "" + os.makedirs(spec_path, exist_ok=True) + json_path = os.path.join(spec_path, self.WF_JSON_FILE) + with open(json_path, "w") as wf_json: + json.dump( + self.WF_SCHEMA.dump(process_model), wf_json, indent=4, sort_keys=True + ) + + def process_model_delete(self, process_model_id: str) -> None: + """Delete Procecss Model.""" + instances = ProcessInstanceModel.query.filter( + ProcessInstanceModel.process_model_identifier == process_model_id + ).all() + if len(instances) > 0: + raise ApiError( + error_code="existing_instances", + message=f"We cannot delete the model `{process_model_id}`, there are existing instances that depend on it.", + ) + process_model = self.get_process_model(process_model_id) + path = self.workflow_path(process_model) + shutil.rmtree(path) + + @property + def master_spec(self) -> Optional[ProcessModelInfo]: + """Master_spec.""" + return self.get_master_spec() + + def get_master_spec(self) -> Optional[ProcessModelInfo]: + """Get_master_spec.""" + path = os.path.join( + FileSystemService.root_path(), FileSystemService.MASTER_SPECIFICATION + ) + if os.path.exists(path): + return self.__scan_spec(path, FileSystemService.MASTER_SPECIFICATION) + return None + + @classmethod + def get_process_model_from_relative_path( + cls, relative_path: str + ) -> ProcessModelInfo: + """Get_process_model_from_relative_path.""" + process_group_identifier = os.path.dirname(relative_path) + process_group = cls().get_process_group(process_group_identifier) + path = os.path.join(FileSystemService.root_path(), relative_path) + return cls().__scan_spec(path, process_group=process_group) + + def get_process_model( + self, process_model_id: str, group_id: Optional[str] = None + ) -> ProcessModelInfo: + """Get a process model from a model and group id.""" + if not os.path.exists(FileSystemService.root_path()): + raise ProcessEntityNotFoundError("process_model_not_found") + + master_spec = self.get_master_spec() + if master_spec and master_spec.id == process_model_id: + return master_spec + if group_id is not None: + process_group = self.get_process_group(group_id) + if process_group is not None: + for process_model in process_group.process_models: + if process_model_id == process_model.id: + return process_model + with os.scandir(FileSystemService.root_path()) as process_group_dirs: + for item in process_group_dirs: + process_group_dir = item + if item.is_dir(): + with os.scandir(item.path) as spec_dirs: + for sd in spec_dirs: + if sd.name == process_model_id: + # Now we have the process_group direcotry, and spec directory + process_group = self.__scan_process_group( + process_group_dir + ) + return self.__scan_spec(sd.path, sd.name, process_group) + raise ProcessEntityNotFoundError("process_model_not_found") + + def get_process_models( + self, process_group_id: Optional[str] = None + ) -> List[ProcessModelInfo]: + """Get process models.""" + process_groups = [] + if process_group_id is None: + process_groups = self.get_process_groups() + else: + process_group = self.get_process_group(process_group_id) + if process_group is not None: + process_groups.append(process_group) + + process_models = [] + for process_group in process_groups: + process_models.extend(process_group.process_models) + process_models.sort() + return process_models + + def get_process_groups(self) -> list[ProcessGroup]: + """Returns the process_groups as a list in display order.""" + process_groups = self.__scan_process_groups() + process_groups.sort() + return process_groups + + def get_libraries(self) -> List[ProcessModelInfo]: + """Get_libraries.""" + process_group = self.get_process_group(self.LIBRARY_SPECS) + if not process_group: + return [] + return process_group.process_models + + def get_standalones(self) -> List[ProcessModelInfo]: + """Get_standalones.""" + process_group = self.get_process_group(self.STAND_ALONE_SPECS) + if not process_group: + return [] + return process_group.process_models + + def get_process_group(self, process_group_id: str) -> ProcessGroup: + """Look for a given process_group, and return it.""" + if os.path.exists(FileSystemService.root_path()): + with os.scandir(FileSystemService.root_path()) as directory_items: + for item in directory_items: + if item.is_dir() and item.name == process_group_id: + return self.__scan_process_group(item) + + raise ProcessEntityNotFoundError( + "process_group_not_found", f"Process Group Id: {process_group_id}" + ) + + def add_process_group(self, process_group: ProcessGroup) -> ProcessGroup: + """Add_process_group.""" + display_order = len(self.get_process_groups()) + process_group.display_order = display_order + return self.update_process_group(process_group) + + def update_process_group(self, process_group: ProcessGroup) -> ProcessGroup: + """Update_process_group.""" + cat_path = self.process_group_path(process_group.id) + os.makedirs(cat_path, exist_ok=True) + json_path = os.path.join(cat_path, self.CAT_JSON_FILE) + with open(json_path, "w") as cat_json: + json.dump( + self.GROUP_SCHEMA.dump(process_group), + cat_json, + indent=4, + sort_keys=True, + ) + return process_group + + def process_group_delete(self, process_group_id: str) -> None: + """Delete_process_group.""" + path = self.process_group_path(process_group_id) + if os.path.exists(path): + shutil.rmtree(path) + self.cleanup_process_group_display_order() + + def cleanup_process_group_display_order(self) -> List[Any]: + """Cleanup_process_group_display_order.""" + process_groups = self.get_process_groups() # Returns an ordered list + index = 0 + for process_group in process_groups: + process_group.display_order = index + self.update_process_group(process_group) + index += 1 + return process_groups + + def __scan_process_groups(self) -> list[ProcessGroup]: + """__scan_process_groups.""" + if not os.path.exists(FileSystemService.root_path()): + return [] # Nothing to scan yet. There are no files. + + with os.scandir(FileSystemService.root_path()) as directory_items: + process_groups = [] + for item in directory_items: + if item.is_dir() and not item.name[0] == ".": + # if item.name == self.REFERENCE_FILES: + # continue + # elif item.name == self.MASTER_SPECIFICATION: + # continue + # elif item.name == self.LIBRARY_SPECS: + # continue + # elif item.name == self.STAND_ALONE_SPECS: + # continue + process_groups.append(self.__scan_process_group(item)) + return process_groups + + def __scan_process_group(self, dir_item: os.DirEntry) -> ProcessGroup: + """Reads the process_group.json file, and any workflow directories.""" + cat_path = os.path.join(dir_item.path, self.CAT_JSON_FILE) + if os.path.exists(cat_path): + with open(cat_path) as cat_json: + data = json.load(cat_json) + process_group = ProcessGroup(**data) + if process_group is None: + raise ApiError( + error_code="process_group_could_not_be_loaded_from_disk", + message=f"We could not load the process_group from disk from: {dir_item}", + ) + else: + process_group = ProcessGroup( + id=dir_item.name, + display_name=dir_item.name, + display_order=10000, + admin=False, + ) + with open(cat_path, "w") as wf_json: + json.dump(self.GROUP_SCHEMA.dump(process_group), wf_json, indent=4) + with os.scandir(dir_item.path) as workflow_dirs: + process_group.process_models = [] + for item in workflow_dirs: + if item.is_dir(): + process_group.process_models.append( + self.__scan_spec( + item.path, item.name, process_group=process_group + ) + ) + process_group.process_models.sort() + return process_group + + def __scan_spec( + self, + path: str, + name: Optional[str] = None, + process_group: Optional[ProcessGroup] = None, + ) -> ProcessModelInfo: + """__scan_spec.""" + spec_path = os.path.join(path, self.WF_JSON_FILE) + is_master = FileSystemService.MASTER_SPECIFICATION in spec_path + + if os.path.exists(spec_path): + with open(spec_path) as wf_json: + data = json.load(wf_json) + spec = ProcessModelInfo(**data) + if spec is None: + raise ApiError( + error_code="process_model_could_not_be_loaded_from_disk", + message=f"We could not load the process_model from disk with data: {data}", + ) + else: + if name is None: + raise ApiError( + error_code="missing_name_of_process_model", + message="Missing name of process model. It should be given", + ) + + spec = ProcessModelInfo( + id=name, + library=False, + standalone=False, + is_master_spec=is_master, + display_name=name, + description="", + display_order=0, + is_review=False, + libraries=[], + ) + with open(spec_path, "w") as wf_json: + json.dump(self.WF_SCHEMA.dump(spec), wf_json, indent=4) + if process_group: + spec.process_group = process_group + spec.process_group_id = process_group.id + return spec diff --git a/src/spiffworkflow_backend/services/script_unit_test_runner.py b/src/spiffworkflow_backend/services/script_unit_test_runner.py new file mode 100644 index 00000000..9112e20f --- /dev/null +++ b/src/spiffworkflow_backend/services/script_unit_test_runner.py @@ -0,0 +1,121 @@ +"""Process_instance_processor.""" +import json +import sys +import traceback +from dataclasses import dataclass +from typing import Any +from typing import Optional + +from SpiffWorkflow.bpmn.exceptions import WorkflowTaskExecException # type: ignore +from SpiffWorkflow.task import Task as SpiffTask # type: ignore + +from spiffworkflow_backend.services.process_instance_processor import ( + CustomBpmnScriptEngine, +) + +PythonScriptContext = dict[str, Any] + + +@dataclass +class ScriptUnitTestResult: + """ScriptUnitTestResult.""" + + result: bool + context: Optional[PythonScriptContext] = None + error: Optional[str] = None + line_number: Optional[int] = None + offset: Optional[int] = None + + +class ScriptUnitTestRunner: + """ScriptUnitTestRunner.""" + + _script_engine = CustomBpmnScriptEngine() + + @classmethod + def run_with_script_and_pre_post_contexts( + cls, + script: str, + input_context: PythonScriptContext, + expected_output_context: PythonScriptContext, + ) -> ScriptUnitTestResult: + """Run_task.""" + # make a new variable just for clarity, since we are going to update this dict in place + # with the output variables from the script. + context = input_context.copy() + + try: + cls._script_engine._execute(context=context, script=script) + except SyntaxError as ex: + return ScriptUnitTestResult( + result=False, + error=f"Syntax error: {str(ex)}", + line_number=ex.lineno, + offset=ex.offset, + ) + except Exception as ex: + if isinstance(ex, WorkflowTaskExecException): + # we never expect this to happen, so we want to know about it. + # if indeed we are always getting WorkflowTaskExecException, + # we can simplify this error handling and replace it with the + # except block from revision cccd523ea39499c10f7f3c2e3f061852970973ac + raise ex + error_message = f"{ex.__class__.__name__}: {str(ex)}" + line_number = 0 + _cl, _exc, tb = sys.exc_info() + # Loop back through the stack trace to find the file called + # 'string' - which is the script we are executing, then use that + # to parse and pull out the offending line. + for frame_summary in traceback.extract_tb(tb): + if frame_summary.filename == "": + if frame_summary.lineno is not None: + line_number = frame_summary.lineno + + return ScriptUnitTestResult( + result=False, + line_number=line_number, + error=f"Failed to execute script: {error_message}", + ) + + result_as_boolean = context == expected_output_context + + script_unit_test_result = ScriptUnitTestResult( + result=result_as_boolean, context=context + ) + return script_unit_test_result + + @classmethod + def run_test( + cls, + task: SpiffTask, + test_identifier: str, + ) -> ScriptUnitTestResult: + """Run_test.""" + # this is totally made up, but hopefully resembles what spiffworkflow ultimately does + unit_tests = task.task_spec.extensions["unitTests"] + unit_test = [ + unit_test for unit_test in unit_tests if unit_test["id"] == test_identifier + ][0] + + input_context = None + expected_output_context = None + try: + input_context = json.loads(unit_test["inputJson"]) + except json.decoder.JSONDecodeError as ex: + return ScriptUnitTestResult( + result=False, + error=f"Failed to parse inputJson: {unit_test['inputJson']}: {str(ex)}", + ) + + try: + expected_output_context = json.loads(unit_test["expectedOutputJson"]) + except json.decoder.JSONDecodeError as ex: + return ScriptUnitTestResult( + result=False, + error=f"Failed to parse expectedOutputJson: {unit_test['expectedOutputJson']}: {str(ex)}", + ) + + script = task.task_spec.script + return cls.run_with_script_and_pre_post_contexts( + script, input_context, expected_output_context + ) diff --git a/src/spiffworkflow_backend/services/secret_service.py b/src/spiffworkflow_backend/services/secret_service.py new file mode 100644 index 00000000..214bf2a3 --- /dev/null +++ b/src/spiffworkflow_backend/services/secret_service.py @@ -0,0 +1,209 @@ +"""Secret_service.""" +from typing import Optional + +from flask_bpmn.api.api_error import ApiError +from flask_bpmn.models.db import db +from sqlalchemy.exc import IntegrityError + +from spiffworkflow_backend.models.secret_model import SecretAllowedProcessPathModel +from spiffworkflow_backend.models.secret_model import SecretModel + +# from cryptography.fernet import Fernet +# +# +# class EncryptionService: +# key = Fernet.generate_key() # this is your "password" +# cipher_suite = Fernet(key) +# encoded_text = cipher_suite.encrypt(b"Hello stackoverflow!") +# decoded_text = cipher_suite.decrypt(encoded_text) + + +class SecretService: + """SecretService.""" + + def encrypt_key(self, plain_key: str) -> str: + """Encrypt_key.""" + # flask_secret = current_app.secret_key + # print("encrypt_key") + ... + + def decrypt_key(self, encrypted_key: str) -> str: + """Decrypt key.""" + ... + + @staticmethod + def add_secret( + key: str, + value: str, + creator_user_id: int, + ) -> SecretModel: + """Add_secret.""" + # encrypted_key = self.encrypt_key(key) + secret_model = SecretModel( + key=key, value=value, creator_user_id=creator_user_id + ) + db.session.add(secret_model) + try: + db.session.commit() + except Exception as e: + raise ApiError( + error_code="create_secret_error", + message=f"There was an error creating a secret with key: {key} and value ending with: {value[:-4]}. " + f"Original error is {e}", + ) from e + return secret_model + + @staticmethod + def get_secret(key: str) -> Optional[SecretModel]: + """Get_secret.""" + secret: SecretModel = ( + db.session.query(SecretModel).filter(SecretModel.key == key).first() + ) + if secret is not None: + return secret + else: + raise ApiError( + error_code="missing_secret_error", + message=f"Unable to locate a secret with the name: {key}. ", + ) + + @staticmethod + def update_secret( + key: str, + value: str, + creator_user_id: Optional[int] = None, + ) -> None: + """Does this pass pre commit?""" + secret_model = SecretModel.query.filter(SecretModel.key == key).first() + if secret_model: + if secret_model.creator_user_id == creator_user_id: + secret_model.value = value + db.session.add(secret_model) + try: + db.session.commit() + except Exception as e: + raise ApiError( + error_code="update_secret_error", + message=f"There was an error updating the secret with key: {key}, and value: {value}", + ) from e + else: + raise ApiError( + error_code="update_secret_error", + message=f"User: {creator_user_id} cannot update the secret with key : {key}", + status_code=401, + ) + else: + raise ApiError( + error_code="update_secret_error", + message=f"Cannot update secret with key: {key}. Resource does not exist.", + status_code=404, + ) + + @staticmethod + def delete_secret(key: str, user_id: int) -> None: + """Delete secret.""" + secret_model = SecretModel.query.filter(SecretModel.key == key).first() + if secret_model: + if secret_model.creator_user_id == user_id: + db.session.delete(secret_model) + try: + db.session.commit() + except Exception as e: + raise ApiError( + error_code="delete_secret_error", + message=f"Could not delete secret with key: {key}. Original error is: {e}", + ) from e + else: + raise ApiError( + error_code="delete_secret_error", + message=f"User: {user_id} cannot delete the secret with key : {key}", + status_code=401, + ) + else: + raise ApiError( + error_code="delete_secret_error", + message=f"Cannot delete secret with key: {key}. Resource does not exist.", + status_code=404, + ) + + @staticmethod + def add_allowed_process( + secret_id: int, user_id: str, allowed_relative_path: str + ) -> SecretAllowedProcessPathModel: + """Add_allowed_process.""" + secret_model = SecretModel.query.filter(SecretModel.id == secret_id).first() + if secret_model: + if secret_model.creator_user_id == user_id: + secret_process_model = SecretAllowedProcessPathModel( + secret_id=secret_model.id, + allowed_relative_path=allowed_relative_path, + ) + assert secret_process_model # noqa: S101 + db.session.add(secret_process_model) + try: + db.session.commit() + except IntegrityError as ie: + db.session.rollback() + raise ApiError( + error_code="add_allowed_process_error", + message=f"Error adding allowed_process with secret {secret_model.id}, " + f"and path: {allowed_relative_path}. Resource already exists. " + f"Original error is {ie}", + status_code=409, + ) from ie + except Exception as e: + # TODO: should we call db.session.rollback() here? + # db.session.rollback() + raise ApiError( + error_code="add_allowed_process_error", + message=f"Could not create an allowed process for secret with key: {secret_model.key} " + f"with path: {allowed_relative_path}. " + f"Original error is {e}", + ) from e + return secret_process_model + else: + raise ApiError( + error_code="add_allowed_process_error", + message=f"User: {user_id} cannot modify the secret with key : {secret_model.key}", + status_code=401, + ) + else: + raise ApiError( + error_code="add_allowed_process_error", + message=f"Cannot add allowed process to secret with key: {secret_id}. Resource does not exist.", + status_code=404, + ) + + @staticmethod + def delete_allowed_process(allowed_process_id: int, user_id: int) -> None: + """Delete_allowed_process.""" + allowed_process = SecretAllowedProcessPathModel.query.filter( + SecretAllowedProcessPathModel.id == allowed_process_id + ).first() + if allowed_process: + secret = SecretModel.query.filter( + SecretModel.id == allowed_process.secret_id + ).first() + assert secret # noqa: S101 + if secret.creator_user_id == user_id: + db.session.delete(allowed_process) + try: + db.session.commit() + except Exception as e: + raise ApiError( + error_code="delete_allowed_process_error", + message=f"There was an exception deleting allowed_process: {allowed_process_id}. " + f"Original error is: {e}", + ) from e + else: + raise ApiError( + error_code="delete_allowed_process_error", + message=f"User: {user_id} cannot delete the allowed_process with id : {allowed_process_id}", + status_code=401, + ) + else: + raise ApiError( + error_code="delete_allowed_process_error", + message=f"Cannot delete allowed_process: {allowed_process_id}. Resource does not exist.", + status_code=404, + ) diff --git a/src/spiffworkflow_backend/services/service_task_service.py b/src/spiffworkflow_backend/services/service_task_service.py new file mode 100644 index 00000000..96cfaaff --- /dev/null +++ b/src/spiffworkflow_backend/services/service_task_service.py @@ -0,0 +1,75 @@ +"""ServiceTask_service.""" +import json +from typing import Any + +import requests +from flask import current_app + +from spiffworkflow_backend.services.file_system_service import FileSystemService +from spiffworkflow_backend.services.secret_service import SecretService + + +def connector_proxy_url() -> Any: + """Returns the connector proxy url.""" + return current_app.config["CONNECTOR_PROXY_URL"] + + +class ServiceTaskDelegate: + """ServiceTaskDelegate.""" + + @staticmethod + def normalize_value(value: Any) -> Any: + """Normalize_value.""" + if isinstance(value, dict): + value = json.dumps(value) + + secret_prefix = "secret:" # noqa: S105 + if value.startswith(secret_prefix): + key = value.removeprefix(secret_prefix) + secret = SecretService().get_secret(key) + assert secret # noqa: S101 + return secret.value + + file_prefix = "file:" + if value.startswith(file_prefix): + file_name = value.removeprefix(file_prefix) + full_path = FileSystemService.full_path_from_relative_path(file_name) + with open(full_path) as f: + return f.read() + + return value + + @staticmethod + def call_connector(name: str, bpmn_params: Any, task_data: Any) -> str: + """Calls a connector via the configured proxy.""" + params = { + k: ServiceTaskDelegate.normalize_value(v["value"]) + for k, v in bpmn_params.items() + } + params["spiff__task_data"] = json.dumps(task_data) + + proxied_response = requests.get(f"{connector_proxy_url()}/v1/do/{name}", params) + + if proxied_response.status_code != 200: + print("got error from connector proxy") + + return proxied_response.text + + +class ServiceTaskService: + """ServiceTaskService.""" + + @staticmethod + def available_connectors() -> Any: + """Returns a list of available connectors.""" + try: + response = requests.get(f"{connector_proxy_url()}/v1/commands") + + if response.status_code != 200: + return [] + + parsed_response = json.loads(response.text) + return parsed_response + except Exception as e: + print(e) + return [] diff --git a/src/spiffworkflow_backend/services/spec_file_service.py b/src/spiffworkflow_backend/services/spec_file_service.py new file mode 100644 index 00000000..d591d46a --- /dev/null +++ b/src/spiffworkflow_backend/services/spec_file_service.py @@ -0,0 +1,500 @@ +"""Spec_file_service.""" +import os +import shutil +from datetime import datetime +from typing import List +from typing import Optional + +from flask_bpmn.api.api_error import ApiError +from flask_bpmn.models.db import db +from lxml import etree # type: ignore +from lxml.etree import _Element # type: ignore +from lxml.etree import Element as EtreeElement +from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException # type: ignore + +from spiffworkflow_backend.models.bpmn_process_id_lookup import BpmnProcessIdLookup +from spiffworkflow_backend.models.file import File +from spiffworkflow_backend.models.file import FileType +from spiffworkflow_backend.models.message_correlation_property import ( + MessageCorrelationPropertyModel, +) +from spiffworkflow_backend.models.message_model import MessageModel +from spiffworkflow_backend.models.message_triggerable_process_model import ( + MessageTriggerableProcessModel, +) +from spiffworkflow_backend.models.process_model import ProcessModelInfo +from spiffworkflow_backend.services.file_system_service import FileSystemService +from spiffworkflow_backend.services.process_model_service import ProcessModelService + + +class SpecFileService(FileSystemService): + """SpecFileService.""" + + """We store spec files on the file system. This allows us to take advantage of Git for + syncing and versioning. + The files are stored in a directory whose path is determined by the category and spec names. + """ + + @staticmethod + def get_files( + process_model_info: ProcessModelInfo, + file_name: Optional[str] = None, + include_libraries: bool = False, + extension_filter: str = "", + ) -> List[File]: + """Return all files associated with a workflow specification.""" + path = SpecFileService.workflow_path(process_model_info) + files = SpecFileService._get_files(path, file_name) + if include_libraries: + for lib_name in process_model_info.libraries: + lib_path = SpecFileService.library_path(lib_name) + files.extend(SpecFileService._get_files(lib_path, file_name)) + + if extension_filter != "": + files = list( + filter(lambda file: file.name.endswith(extension_filter), files) + ) + + return files + + @staticmethod + def add_file( + process_model_info: ProcessModelInfo, file_name: str, binary_data: bytes + ) -> File: + """Add_file.""" + # Same as update + return SpecFileService.update_file(process_model_info, file_name, binary_data) + + @staticmethod + def update_file( + process_model_info: ProcessModelInfo, file_name: str, binary_data: bytes + ) -> File: + """Update_file.""" + SpecFileService.assert_valid_file_name(file_name) + file_path = SpecFileService.file_path(process_model_info, file_name) + SpecFileService.write_file_data_to_system(file_path, binary_data) + file = SpecFileService.to_file_object(file_name, file_path) + + if file.type == FileType.bpmn.value: + set_primary_file = False + if ( + process_model_info.primary_file_name is None + or file_name == process_model_info.primary_file_name + ): + # If no primary process exists, make this primary process. + set_primary_file = True + SpecFileService.process_bpmn_file( + process_model_info, + file_name, + binary_data, + set_primary_file=set_primary_file, + ) + + return file + + @staticmethod + def get_data(process_model_info: ProcessModelInfo, file_name: str) -> bytes: + """Get_data.""" + file_path = SpecFileService.file_path(process_model_info, file_name) + if not os.path.exists(file_path): + # If the file isn't here, it may be in a library + for lib in process_model_info.libraries: + file_path = SpecFileService.library_path(lib) + file_path = os.path.join(file_path, file_name) + if os.path.exists(file_path): + break + if not os.path.exists(file_path): + raise ApiError( + "unknown_file", + f"No file found with name {file_name} in {process_model_info.display_name}", + ) + with open(file_path, "rb") as f_handle: + spec_file_data = f_handle.read() + return spec_file_data + + @staticmethod + def file_path(spec: ProcessModelInfo, file_name: str) -> str: + """File_path.""" + return os.path.join(SpecFileService.workflow_path(spec), file_name) + + @staticmethod + def last_modified(spec: ProcessModelInfo, file_name: str) -> datetime: + """Last_modified.""" + path = SpecFileService.file_path(spec, file_name) + return FileSystemService._last_modified(path) + + @staticmethod + def timestamp(spec: ProcessModelInfo, file_name: str) -> float: + """Timestamp.""" + path = SpecFileService.file_path(spec, file_name) + return FileSystemService._timestamp(path) + + @staticmethod + def delete_file(spec: ProcessModelInfo, file_name: str) -> None: + """Delete_file.""" + # Fixme: Remember to remove the lookup files when the spec file is removed. + # lookup_files = session.query(LookupFileModel).filter_by(file_model_id=file_id).all() + # for lf in lookup_files: + # session.query(LookupDataModel).filter_by(lookup_file_model_id=lf.id).delete() + # session.query(LookupFileModel).filter_by(id=lf.id).delete() + file_path = SpecFileService.file_path(spec, file_name) + os.remove(file_path) + + @staticmethod + def delete_all_files(spec: ProcessModelInfo) -> None: + """Delete_all_files.""" + dir_path = SpecFileService.workflow_path(spec) + if os.path.exists(dir_path): + shutil.rmtree(dir_path) + + @staticmethod + def get_etree_element_from_file_name( + process_model_info: ProcessModelInfo, file_name: str + ) -> EtreeElement: + """Get_etree_element_from_file_name.""" + binary_data = SpecFileService.get_data(process_model_info, file_name) + return SpecFileService.get_etree_element_from_binary_data( + binary_data, file_name + ) + + @staticmethod + def get_etree_element_from_binary_data( + binary_data: bytes, file_name: str + ) -> EtreeElement: + """Get_etree_element_from_binary_data.""" + try: + return etree.fromstring(binary_data) + except etree.XMLSyntaxError as xse: + raise ApiError( + "invalid_xml", + "Failed to parse xml: " + str(xse), + file_name=file_name, + ) from xse + + @staticmethod + def process_bpmn_file( + process_model_info: ProcessModelInfo, + file_name: str, + binary_data: Optional[bytes] = None, + set_primary_file: Optional[bool] = False, + ) -> None: + """Set_primary_bpmn.""" + # If this is a BPMN, extract the process id, and determine if it is contains swim lanes. + extension = SpecFileService.get_extension(file_name) + file_type = FileType[extension] + if file_type == FileType.bpmn: + if not binary_data: + binary_data = SpecFileService.get_data(process_model_info, file_name) + + bpmn_etree_element: EtreeElement = ( + SpecFileService.get_etree_element_from_binary_data( + binary_data, file_name + ) + ) + + try: + if set_primary_file: + attributes_to_update = { + "primary_process_id": ( + SpecFileService.get_bpmn_process_identifier( + bpmn_etree_element + ) + ), + "primary_file_name": file_name, + "is_review": SpecFileService.has_swimlane(bpmn_etree_element), + } + ProcessModelService().update_spec( + process_model_info, attributes_to_update + ) + + SpecFileService.check_for_message_models( + bpmn_etree_element, process_model_info + ) + SpecFileService.store_bpmn_process_identifiers( + process_model_info, file_name, bpmn_etree_element + ) + + except ValidationException as ve: + if ve.args[0].find("No executable process tag found") >= 0: + raise ApiError( + error_code="missing_executable_option", + message="No executable process tag found. Please make sure the Executable option is set in the workflow.", + ) from ve + else: + raise ApiError( + error_code="validation_error", + message=f"There was an error validating your workflow. Original message is: {ve}", + ) from ve + else: + raise ApiError( + "invalid_xml", + "Only a BPMN can be the primary file.", + file_name=file_name, + ) + + @staticmethod + def has_swimlane(et_root: _Element) -> bool: + """Look through XML and determine if there are any lanes present that have a label.""" + elements = et_root.xpath( + "//bpmn:lane", + namespaces={"bpmn": "http://www.omg.org/spec/BPMN/20100524/MODEL"}, + ) + retval = False + for el in elements: + if el.get("name"): + retval = True + return retval + + @staticmethod + def append_identifier_of_process_to_array( + process_element: _Element, process_identifiers: list[str] + ) -> None: + """Append_identifier_of_process_to_array.""" + process_id_key = "id" + if "name" in process_element.attrib: + process_id_key = "name" + + process_identifiers.append(process_element.attrib[process_id_key]) + + @staticmethod + def get_all_bpmn_process_identifiers_for_process_model( + process_model_info: ProcessModelInfo, + ) -> list[str]: + """Get_all_bpmn_process_identifiers_for_process_model.""" + if process_model_info.primary_file_name is None: + return [] + + binary_data = SpecFileService.get_data( + process_model_info, process_model_info.primary_file_name + ) + + et_root: EtreeElement = SpecFileService.get_etree_element_from_binary_data( + binary_data, process_model_info.primary_file_name + ) + process_identifiers: list[str] = [] + for child in et_root: + if child.tag.endswith("process") and child.attrib.get( + "isExecutable", False + ): + subprocesses = child.xpath( + "//bpmn:subProcess", + namespaces={"bpmn": "http://www.omg.org/spec/BPMN/20100524/MODEL"}, + ) + for subprocess in subprocesses: + SpecFileService.append_identifier_of_process_to_array( + subprocess, process_identifiers + ) + + SpecFileService.append_identifier_of_process_to_array( + child, process_identifiers + ) + + if len(process_identifiers) == 0: + raise ValidationException("No executable process tag found") + return process_identifiers + + @staticmethod + def get_executable_process_elements(et_root: _Element) -> list[_Element]: + """Get_executable_process_elements.""" + process_elements = [] + for child in et_root: + if child.tag.endswith("process") and child.attrib.get( + "isExecutable", False + ): + process_elements.append(child) + + if len(process_elements) == 0: + raise ValidationException("No executable process tag found") + return process_elements + + @staticmethod + def get_executable_bpmn_process_identifiers(et_root: _Element) -> list[str]: + """Get_executable_bpmn_process_identifiers.""" + process_elements = SpecFileService.get_executable_process_elements(et_root) + bpmn_process_identifiers = [pe.attrib["id"] for pe in process_elements] + return bpmn_process_identifiers + + @staticmethod + def get_bpmn_process_identifier(et_root: _Element) -> str: + """Get_bpmn_process_identifier.""" + process_elements = SpecFileService.get_executable_process_elements(et_root) + + # There are multiple root elements + if len(process_elements) > 1: + + # Look for the element that has the startEvent in it + for e in process_elements: + this_element: EtreeElement = e + for child_element in list(this_element): + if child_element.tag.endswith("startEvent"): + # coorce Any to string + return str(this_element.attrib["id"]) + + raise ValidationException( + "No start event found in %s" % et_root.attrib["id"] + ) + + return str(process_elements[0].attrib["id"]) + + @staticmethod + def store_bpmn_process_identifiers( + process_model_info: ProcessModelInfo, bpmn_file_name: str, et_root: _Element + ) -> None: + """Store_bpmn_process_identifiers.""" + relative_process_model_path = SpecFileService.process_model_relative_path( + process_model_info + ) + relative_bpmn_file_path = os.path.join( + relative_process_model_path, bpmn_file_name + ) + bpmn_process_identifiers = ( + SpecFileService.get_executable_bpmn_process_identifiers(et_root) + ) + for bpmn_process_identifier in bpmn_process_identifiers: + process_id_lookup = BpmnProcessIdLookup.query.filter_by( + bpmn_process_identifier=bpmn_process_identifier + ).first() + if process_id_lookup is None: + process_id_lookup = BpmnProcessIdLookup( + bpmn_process_identifier=bpmn_process_identifier, + bpmn_file_relative_path=relative_bpmn_file_path, + ) + db.session.add(process_id_lookup) + db.session.commit() + else: + if relative_bpmn_file_path != process_id_lookup.bpmn_file_relative_path: + full_bpmn_file_path = SpecFileService.full_path_from_relative_path( + process_id_lookup.bpmn_file_relative_path + ) + # if the old relative bpmn file no longer exists, then assume things were moved around + # on the file system. Otherwise, assume it is a duplicate process id and error. + if os.path.isfile(full_bpmn_file_path): + raise ValidationException( + f"Process id ({bpmn_process_identifier}) has already been used for " + f"{process_id_lookup.bpmn_file_relative_path}. It cannot be reused." + ) + else: + process_id_lookup.bpmn_file_relative_path = ( + relative_bpmn_file_path + ) + db.session.add(process_id_lookup) + db.session.commit() + + @staticmethod + def check_for_message_models( + et_root: _Element, process_model_info: ProcessModelInfo + ) -> None: + """Check_for_message_models.""" + for child in et_root: + if child.tag.endswith("message"): + message_model_identifier = child.attrib.get("id") + message_name = child.attrib.get("name") + if message_model_identifier is None: + raise ValidationException( + "Message identifier is missing from bpmn xml" + ) + + message_model = MessageModel.query.filter_by( + identifier=message_model_identifier + ).first() + if message_model is None: + message_model = MessageModel( + identifier=message_model_identifier, name=message_name + ) + db.session.add(message_model) + db.session.commit() + + for child in et_root: + if child.tag.endswith("}process"): + message_event_definitions = child.xpath( + "//bpmn:startEvent/bpmn:messageEventDefinition", + namespaces={"bpmn": "http://www.omg.org/spec/BPMN/20100524/MODEL"}, + ) + if message_event_definitions: + message_event_definition = message_event_definitions[0] + message_model_identifier = message_event_definition.attrib.get( + "messageRef" + ) + if message_model_identifier is None: + raise ValidationException( + "Could not find messageRef from message event definition: {message_event_definition}" + ) + + message_model = MessageModel.query.filter_by( + identifier=message_model_identifier + ).first() + if message_model is None: + raise ValidationException( + f"Could not find message model with identifier '{message_model_identifier}'" + f"specified by message event definition: {message_event_definition}" + ) + + message_triggerable_process_model = ( + MessageTriggerableProcessModel.query.filter_by( + message_model_id=message_model.id, + ).first() + ) + + if message_triggerable_process_model is None: + message_triggerable_process_model = MessageTriggerableProcessModel( + message_model_id=message_model.id, + process_model_identifier=process_model_info.id, + process_group_identifier=process_model_info.process_group_id, + ) + db.session.add(message_triggerable_process_model) + db.session.commit() + else: + if ( + message_triggerable_process_model.process_model_identifier + != process_model_info.id + or message_triggerable_process_model.process_group_identifier + != process_model_info.process_group_id + ): + raise ValidationException( + "Message model is already used to start process model" + f"'{process_model_info.process_group_id}/{process_model_info.id}'" + ) + + for child in et_root: + if child.tag.endswith("correlationProperty"): + correlation_identifier = child.attrib.get("id") + if correlation_identifier is None: + raise ValidationException( + "Correlation identifier is missing from bpmn xml" + ) + correlation_property_retrieval_expressions = child.xpath( + "//bpmn:correlationPropertyRetrievalExpression", + namespaces={"bpmn": "http://www.omg.org/spec/BPMN/20100524/MODEL"}, + ) + if not correlation_property_retrieval_expressions: + raise ValidationException( + f"Correlation is missing correlation property retrieval expressions: {correlation_identifier}" + ) + + for cpre in correlation_property_retrieval_expressions: + message_model_identifier = cpre.attrib.get("messageRef") + if message_model_identifier is None: + raise ValidationException( + f"Message identifier is missing from correlation property: {correlation_identifier}" + ) + message_model = MessageModel.query.filter_by( + identifier=message_model_identifier + ).first() + if message_model is None: + raise ValidationException( + f"Could not find message model with identifier '{message_model_identifier}'" + f"specified by correlation property: {cpre}" + ) + message_correlation_property = ( + MessageCorrelationPropertyModel.query.filter_by( + identifier=correlation_identifier, + message_model_id=message_model.id, + ).first() + ) + if message_correlation_property is None: + message_correlation_property = MessageCorrelationPropertyModel( + identifier=correlation_identifier, + message_model_id=message_model.id, + ) + db.session.add(message_correlation_property) + db.session.commit() diff --git a/src/spiffworkflow_backend/services/user_service.py b/src/spiffworkflow_backend/services/user_service.py new file mode 100644 index 00000000..b0dcec35 --- /dev/null +++ b/src/spiffworkflow_backend/services/user_service.py @@ -0,0 +1,297 @@ +"""User_service.""" +from typing import Any +from typing import Optional + +from flask import current_app +from flask import g +from flask_bpmn.api.api_error import ApiError +from flask_bpmn.models.db import db + +from spiffworkflow_backend.models.group import GroupModel +from spiffworkflow_backend.models.principal import PrincipalModel +from spiffworkflow_backend.models.user import AdminSessionModel +from spiffworkflow_backend.models.user import UserModel +from spiffworkflow_backend.models.user_group_assignment import UserGroupAssignmentModel + + +class UserService: + """Provides common tools for working with users.""" + + @classmethod + def create_user( + cls, + service: str, + service_id: str, + name: Optional[str] = "", + username: Optional[str] = "", + email: Optional[str] = "", + ) -> UserModel: + """Create_user.""" + user_model: Optional[UserModel] = ( + UserModel.query.filter(UserModel.service == service) + .filter(UserModel.service_id == service_id) + .first() + ) + if user_model is None: + if username == "": + username = service_id + + user_model = UserModel( + username=username, + service=service, + service_id=service_id, + name=name, + email=email, + ) + db.session.add(user_model) + + try: + db.session.commit() + except Exception as e: + db.session.rollback() + raise ApiError( + error_code="add_user_error", + message=f"Could not add user {username}", + ) from e + cls.create_principal(user_model.id) + return user_model + + else: + # TODO: username may be ''. + # not sure what to send in error message. + # Don't really want to send service_id. + raise ( + ApiError( + error_code="user_already_exists", + message=f"User already exists: {username}", + status_code=409, + ) + ) + + @classmethod + def find_or_create_user( + cls, + service: str, + service_id: str, + name: Optional[str] = None, + username: Optional[str] = None, + email: Optional[str] = None, + ) -> UserModel: + """Find_or_create_user.""" + user_model: UserModel + try: + user_model = cls.create_user( + service=service, + service_id=service_id, + name=name, + username=username, + email=email, + ) + except ApiError: + user_model = ( + UserModel.query.filter(UserModel.service == service) + .filter(UserModel.service_id == service_id) + .first() + ) + return user_model + + # Returns true if the current user is logged in. + @staticmethod + def has_user() -> bool: + """Has_user.""" + return "token" in g and bool(g.token) and "user" in g and bool(g.user) + + # Returns true if the current user is an admin. + @staticmethod + def user_is_admin() -> bool: + """User_is_admin.""" + return UserService.has_user() and g.user.is_admin() + + # Returns true if the current admin user is impersonating another user. + @staticmethod + def admin_is_impersonating() -> bool: + """Admin_is_impersonating.""" + if UserService.user_is_admin(): + admin_session = UserService.get_admin_session() + return admin_session is not None + + else: + raise ApiError( + "unauthorized", + "You do not have permissions to do this.", + status_code=403, + ) + + # Returns true if the given user uid is different from the current user's uid. + @staticmethod + def is_different_user(uid: str) -> bool: + """Is_different_user.""" + return UserService.has_user() and uid is not None and uid is not g.user.uid + + @staticmethod + def current_user(allow_admin_impersonate: bool = False) -> Any: + """Current_user.""" + if not UserService.has_user(): + raise ApiError( + "logged_out", "You are no longer logged in.", status_code=401 + ) + + # Admins can pretend to be different users and act on a user's behalf in + # some circumstances. + if ( + UserService.user_is_admin() + and allow_admin_impersonate + and UserService.admin_is_impersonating() + ): + return UserService.get_admin_session_user() + else: + return g.user + + # Admins can pretend to be different users and act on a user's behalf in some circumstances. + # This method allows an admin user to start impersonating another user with the given uid. + # Stops impersonating if the uid is None or invalid. + @staticmethod + def start_impersonating(uid: Optional[str] = None) -> None: + """Start_impersonating.""" + if not UserService.has_user(): + raise ApiError( + "logged_out", "You are no longer logged in.", status_code=401 + ) + + if not UserService.user_is_admin(): + raise ApiError( + "unauthorized", + "You do not have permissions to do this.", + status_code=403, + ) + + if uid is None: + raise ApiError("invalid_uid", "Please provide a valid user uid.") + + if UserService.is_different_user(uid): + # Impersonate the user if the given uid is valid. + impersonate_user = ( + db.session.query(UserModel).filter(UserModel.uid == uid).first() + ) + + if impersonate_user is not None: + g.impersonate_user = impersonate_user + + # Store the uid and user session token. + db.session.query(AdminSessionModel).filter( + AdminSessionModel.token == g.token + ).delete() + db.session.add( + AdminSessionModel(token=g.token, admin_impersonate_uid=uid) + ) + db.session.commit() + else: + raise ApiError("invalid_uid", "The uid provided is not valid.") + + @staticmethod + def stop_impersonating() -> None: + """Stop_impersonating.""" + if not UserService.has_user(): + raise ApiError( + "logged_out", "You are no longer logged in.", status_code=401 + ) + + # Clear out the current impersonating user. + if "impersonate_user" in g: + del g.impersonate_user + + admin_session = UserService.get_admin_session() + if admin_session: + db.session.delete(admin_session) + db.session.commit() + + @staticmethod + def in_list(uids: list[str], allow_admin_impersonate: bool = False) -> bool: + """Returns true if the current user's id is in the given list of ids. + + False if there is no user, or the user is not in the list. + """ + if ( + UserService.has_user() + ): # If someone is logged in, lock tasks that don't belong to them. + user = UserService.current_user(allow_admin_impersonate) + if user.uid in uids: + return True + return False + + @staticmethod + def get_admin_session() -> Any: + """Get_admin_session.""" + if UserService.user_is_admin(): + return ( + db.session.query(AdminSessionModel) + .filter(AdminSessionModel.token == g.token) + .first() + ) + else: + raise ApiError( + "unauthorized", + "You do not have permissions to do this.", + status_code=403, + ) + + @staticmethod + def get_admin_session_user() -> Any: + """Get_admin_session_user.""" + if UserService.user_is_admin(): + admin_session = UserService.get_admin_session() + + if admin_session is not None: + return ( + db.session.query(UserModel) + .filter(UserModel.uid == admin_session.admin_impersonate_uid) + .first() + ) + else: + raise ApiError( + "unauthorized", + "You do not have permissions to do this.", + status_code=403, + ) + + @staticmethod + def get_principal_by_user_id(user_id: int) -> PrincipalModel: + """Get_principal_by_user_id.""" + principal: PrincipalModel = ( + db.session.query(PrincipalModel) + .filter(PrincipalModel.user_id == user_id) + .first() + ) + if principal: + return principal + raise ApiError( + error_code="no_principal_found", + message=f"No principal was found for user_id: {user_id}", + ) + + @classmethod + def create_principal(cls, user_id: int) -> PrincipalModel: + """Create_principal.""" + principal: Optional[PrincipalModel] = PrincipalModel.query.filter_by( + user_id=user_id + ).first() + if principal is None: + principal = PrincipalModel(user_id=user_id) + db.session.add(principal) + try: + db.session.commit() + except Exception as e: + db.session.rollback() + current_app.logger.error(f"Exception in create_principal: {e}") + raise ApiError( + error_code="add_principal_error", + message=f"Could not create principal {user_id}", + ) from e + return principal + + @classmethod + def add_user_to_group(cls, user: UserModel, group: GroupModel) -> None: + """Add_user_to_group.""" + ugam = UserGroupAssignmentModel(user_id=user.id, group_id=group.id) + db.session.add(ugam) + db.session.commit() diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 00000000..218723b7 --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1 @@ +"""Test suite for the spiffworkflow_backend package.""" diff --git a/tests/data/call_activity_duplicate/call_activity_nested_duplicate.bpmn b/tests/data/call_activity_duplicate/call_activity_nested_duplicate.bpmn new file mode 100644 index 00000000..5997fd9d --- /dev/null +++ b/tests/data/call_activity_duplicate/call_activity_nested_duplicate.bpmn @@ -0,0 +1,72 @@ + + + + + Flow_1g3dpd7 + + + + Flow_1g3dpd7 + Flow_0qdgvah + + + + # Main Workflow +Hello {{my_other_var}} + + + Flow_04o2npf + + + + Flow_0qdgvah + Flow_0upce00 + + + + + Level2c + + Flow_0upce00 + Flow_04o2npf + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/data/call_activity_nested/call_activity_level_2.bpmn b/tests/data/call_activity_nested/call_activity_level_2.bpmn new file mode 100644 index 00000000..24da04a5 --- /dev/null +++ b/tests/data/call_activity_nested/call_activity_level_2.bpmn @@ -0,0 +1,55 @@ + + + + + Flow_1g3dpd7 + + + + Flow_1g3dpd7 + Flow_0qdgvah + + + + # Main Workflow +Hello {{my_other_var}} + + + Flow_1ll6j9j + + + + Flow_0qdgvah + Flow_1ll6j9j + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/data/call_activity_nested/call_activity_level_2b.bpmn b/tests/data/call_activity_nested/call_activity_level_2b.bpmn new file mode 100644 index 00000000..9530bf35 --- /dev/null +++ b/tests/data/call_activity_nested/call_activity_level_2b.bpmn @@ -0,0 +1,44 @@ + + + + + Flow_1g3dpd7 + + + + # Main Workflow +Hello {{my_other_var}} + + + Flow_0l0w6u9 + + + + Flow_1g3dpd7 + Flow_0l0w6u9 + a = 1 + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/data/call_activity_nested/call_activity_level_3.bpmn b/tests/data/call_activity_nested/call_activity_level_3.bpmn new file mode 100644 index 00000000..742407de --- /dev/null +++ b/tests/data/call_activity_nested/call_activity_level_3.bpmn @@ -0,0 +1,43 @@ + + + + + Flow_1g3dpd7 + + + + + # Main Workflow +Hello {{my_other_var}} + + + Flow_0qdgvah + + + Flow_1g3dpd7 + Flow_0qdgvah + a = 3 + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/data/call_activity_nested/call_activity_nested.bpmn b/tests/data/call_activity_nested/call_activity_nested.bpmn new file mode 100644 index 00000000..72b9a183 --- /dev/null +++ b/tests/data/call_activity_nested/call_activity_nested.bpmn @@ -0,0 +1,55 @@ + + + + + Flow_1g3dpd7 + + + + Flow_1g3dpd7 + Flow_0qdgvah + + + + # Main Workflow + Hello {{my_other_var}} + + + Flow_0upce00 + + + + Flow_0qdgvah + Flow_0upce00 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/data/call_activity_nested/level2c.dmn b/tests/data/call_activity_nested/level2c.dmn new file mode 100644 index 00000000..ff7d9368 --- /dev/null +++ b/tests/data/call_activity_nested/level2c.dmn @@ -0,0 +1,20 @@ + + + + + + + + + + + + + + + + + + + + diff --git a/tests/data/call_activity_same_directory/call_activity_test.bpmn b/tests/data/call_activity_same_directory/call_activity_test.bpmn new file mode 100644 index 00000000..e193921d --- /dev/null +++ b/tests/data/call_activity_same_directory/call_activity_test.bpmn @@ -0,0 +1,39 @@ + + + + + Flow_1rcteeq + + + + Flow_1rcteeq + Flow_1rid3w7 + + + Flow_1rid3w7 + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/data/call_activity_same_directory/callable_process.bpmn b/tests/data/call_activity_same_directory/callable_process.bpmn new file mode 100644 index 00000000..7eb4f3d0 --- /dev/null +++ b/tests/data/call_activity_same_directory/callable_process.bpmn @@ -0,0 +1,26 @@ + + + + + Flow_0ogjqo9 + + + Flow_0ogjqo9 + + + + + + + + + + + + + + + + + + diff --git a/tests/data/dangerous-scripts/read_env.bpmn b/tests/data/dangerous-scripts/read_env.bpmn new file mode 100644 index 00000000..1c5449b5 --- /dev/null +++ b/tests/data/dangerous-scripts/read_env.bpmn @@ -0,0 +1,42 @@ + + + + + Flow_1oq5kne + + + + Flow_1r45j8e + + + + Flow_1oq5kne + Flow_1r45j8e + import os + +env = os.environ + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/data/dangerous-scripts/read_etc_passwd.bpmn b/tests/data/dangerous-scripts/read_etc_passwd.bpmn new file mode 100644 index 00000000..40f5eda4 --- /dev/null +++ b/tests/data/dangerous-scripts/read_etc_passwd.bpmn @@ -0,0 +1,42 @@ + + + + + Flow_1oq5kne + + + + Flow_1r45j8e + + + + Flow_1oq5kne + Flow_1r45j8e + user_list = open('/etc/passwd').read() + +env = os.environ + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/data/dot_notation/diagram.bpmn b/tests/data/dot_notation/diagram.bpmn new file mode 100644 index 00000000..cb623748 --- /dev/null +++ b/tests/data/dot_notation/diagram.bpmn @@ -0,0 +1,74 @@ + + + + + to + + + from.name + + + + + {"to": "the_recipient1" } + + + + + {"from": {"name": "the_sender"}} + + + + + new + + + + + Flow_0dbnzbi + + + + Flow_0nt355i + + + + + + + + + + Flow_0dbnzbi + Flow_0nt355i + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/data/dot_notation/json_schema.json b/tests/data/dot_notation/json_schema.json new file mode 100644 index 00000000..63c782c5 --- /dev/null +++ b/tests/data/dot_notation/json_schema.json @@ -0,0 +1,36 @@ +{ + "title": "Submit Invoice", + "description": "Information for submitting an invoice.", + "type": "object", + "required": [ + "invoice.contributorName", + "invoice.contributorId", + "invoice.invoiceId", + "invoice.invoiceAmount", + "invoice.dueDate" + ], + "properties": { + "invoice.contributorName": { + "type": "string", + "title": "Contributor Name" + }, + "invoice.contributorId": { + "type": "integer", + "title": "Contributor Id", + "minLength": 2 + }, + "invoice.invoiceId": { + "type": "integer", + "title": "Invoice Id", + "minLength": 4 + }, + "invoice.invoiceAmount": { + "type": "number", + "title": "Invoice Amount" + }, + "invoice.dueDate": { + "type": "string", + "title": "Due Date" + } + } +} diff --git a/tests/data/dot_notation/ui_schema.json b/tests/data/dot_notation/ui_schema.json new file mode 100644 index 00000000..49dc9920 --- /dev/null +++ b/tests/data/dot_notation/ui_schema.json @@ -0,0 +1,19 @@ +{ + "invoice.contributorName": { + "ui:autofocus": true, + "ui:emptyValue": "" + }, + "invoice.contributorId": { + "ui:emptyValue": "Enter your Status Contributor ID" + }, + "invoice.invoiceId": { + "ui:title": "Status Invoice ID", + "ui:description": "Enter the Status Invoice ID found in the upper left" + }, + "invoice.invoiceAmount": { + "ui:help": "Amount in Euros" + }, + "invoice.dueDate": { + "ui:widget": "date" + } +} diff --git a/tests/data/dynamic_enum_select_fields/color_question.json b/tests/data/dynamic_enum_select_fields/color_question.json new file mode 100644 index 00000000..20ea1c12 --- /dev/null +++ b/tests/data/dynamic_enum_select_fields/color_question.json @@ -0,0 +1,18 @@ +{ + "definitions": { + "Color": { + "title": "Color", + "type": "string", + "anyOf": ["options_from_task_data_var:awesome_color_options"] + } + }, + "title": "Select Color", + "type": "object", + "required": ["selectedColor"], + "properties": { + "selectedColor": { + "$ref": "#/definitions/Color", + "title": "Select color" + } + } +} diff --git a/tests/data/dynamic_enum_select_fields/dynamic_enums_ask_for_color.bpmn b/tests/data/dynamic_enum_select_fields/dynamic_enums_ask_for_color.bpmn new file mode 100644 index 00000000..7d21851b --- /dev/null +++ b/tests/data/dynamic_enum_select_fields/dynamic_enums_ask_for_color.bpmn @@ -0,0 +1,57 @@ + + + + + Flow_1my9ag5 + + + + + Flow_13mlau2 + + + + Flow_1my9ag5 + Flow_0b04rbg + awesome_color_options = [{"value": "blue", "label": "Blue"}, {"value": "green", "label": "Green"}] + + + + + + + + Flow_0b04rbg + Flow_13mlau2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/data/error/error.bpmn b/tests/data/error/error.bpmn new file mode 100644 index 00000000..14d1386a --- /dev/null +++ b/tests/data/error/error.bpmn @@ -0,0 +1,39 @@ + + + + + Flow_094epta + + + + Flow_1qfek48 + + + + Flow_094epta + Flow_1qfek48 + 'a' + 1 + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/data/hello_world/hello_world.bpmn b/tests/data/hello_world/hello_world.bpmn new file mode 100644 index 00000000..1e5bc853 --- /dev/null +++ b/tests/data/hello_world/hello_world.bpmn @@ -0,0 +1,91 @@ + + + + This workflow asks for a name and says hello + + SequenceFlow_0qyd2b7 + + + + Flow_1d27j6f + + + SequenceFlow_0qyd2b7 + Flow_1d27j6f + + Flow_1s17tt0 + + + + Flow_0bazl8x + Flow_1mcaszp + a = 1 + + + Flow_1mcaszp + + + + + Flow_1s17tt0 + Flow_0bazl8x + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/data/message_send_one_conversation/message_receiver.bpmn b/tests/data/message_send_one_conversation/message_receiver.bpmn new file mode 100644 index 00000000..c82aa363 --- /dev/null +++ b/tests/data/message_send_one_conversation/message_receiver.bpmn @@ -0,0 +1,99 @@ + + + + + + + + + message_correlation_property_topica + message_correlation_property_topicb + + + + + topica + + + the_payload.topica + + + + + topicb + + + the_payload.topicb + + + + + the_payload + + + + + { "the_payload": { +"topica": the_payload.topica, +"topicb": the_payload.topicb, +}} + + + + + + + Flow_11r9uiw + + + Flow_0fruoax + Flow_11r9uiw + + + Flow_0fruoax + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/data/message_send_one_conversation/message_sender.bpmn b/tests/data/message_send_one_conversation/message_sender.bpmn new file mode 100644 index 00000000..a735d1ad --- /dev/null +++ b/tests/data/message_send_one_conversation/message_sender.bpmn @@ -0,0 +1,136 @@ + + + + + + + + + message_correlation_property_topica + message_correlation_property_topicb + + + + + topica + + + the_payload.topica + + + + + topicb + + + the_payload.topicb + + + + + Flow_10conab + + + + + + Flow_1qgz6p0 + + + Flow_037vpjk + Flow_1qgz6p0 + + + + + the_topic = "first_conversation" + + Flow_1ihr88m + Flow_037vpjk + + + + Flow_10conab + Flow_1ihr88m + +timestamp = time.time() +the_topica = f"first_conversation_a_{timestamp}" +the_topicb = f"first_conversation_b_{timestamp}" +del time + + + + + { +"topica": the_topica, +"topicb": the_topicb, +} + + + + + the_payload + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/data/message_send_two_conversations/message_receiver_one.bpmn b/tests/data/message_send_two_conversations/message_receiver_one.bpmn new file mode 100644 index 00000000..c8bb821a --- /dev/null +++ b/tests/data/message_send_two_conversations/message_receiver_one.bpmn @@ -0,0 +1,114 @@ + + + + + + + + + mcp_topica_one + mcp_topicb_one + + + + + topica_one + + + topica_one + + + + + topicb_one + + + topicb_one + + + + + payload_var_one + + + + + { +"topica_one": payload_var_one.topica_one, +"topicb_one": payload_var_one.topicb_one, +"second_var_one": second_var_one +} + + + + + + Flow_11r9uiw + + + Flow_197lbl3 + Flow_11r9uiw + + + + + Flow_0rx0bxv + Flow_197lbl3 + second_var_one = payload_var_one.initial_var_one + 1 + + + Flow_0rx0bxv + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/data/message_send_two_conversations/message_receiver_two.bpmn b/tests/data/message_send_two_conversations/message_receiver_two.bpmn new file mode 100644 index 00000000..3cb82942 --- /dev/null +++ b/tests/data/message_send_two_conversations/message_receiver_two.bpmn @@ -0,0 +1,114 @@ + + + + + + + + + mcp_topica_two + mcp_topicb_two + + + + + topica_two + + + topica_two + + + + + topicb_two + + + topicb_two + + + + + payload_var_two + + + + + { +"topica_two": payload_var_two.topica_two, +"topicb_two": payload_var_two.topicb_two, +"second_var_two": second_var_two +} + + + + + + Flow_11r9uiw + + + Flow_197lbl3 + Flow_11r9uiw + + + + + Flow_0rx0bxv + Flow_197lbl3 + second_var_two = payload_var_two.initial_var_two + 1 + + + Flow_0rx0bxv + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/data/message_send_two_conversations/message_sender.bpmn b/tests/data/message_send_two_conversations/message_sender.bpmn new file mode 100644 index 00000000..38359c92 --- /dev/null +++ b/tests/data/message_send_two_conversations/message_sender.bpmn @@ -0,0 +1,231 @@ + + + + + + + + + + + + mcp_topica_one + mcp_topicb_one + + + mcp_topica_two + mcp_topicb_two + + + + + topica_one + + + payload_var_one.topica_one + + + + + topicb_one + + + payload_var_one.topicb + + + + + + + + + + + Flow_0sxqx67 + + + Flow_01u8qkn + Flow_0sxqx67 + + + + Flow_1yt3owq + Flow_01u8qkn + + + Flow_10conab + + + Flow_10conab + Flow_1ihr88m + +timestamp = time.time() +topic_one_a = f"topic_one_a_conversation_{timestamp}" +topic_one_b = f"topic_one_b_conversation_{timestamp}" +del time + + + + the_topic = "first_conversation" + + Flow_1ihr88m + Flow_0n4m9ti + + + + Flow_0q3clix + Flow_1yt3owq + + + + Flow_0n4m9ti + Flow_0q3clix + +timestamp = time.time() +topic_two_a = f"topic_two_a_conversation_{timestamp}" +topic_two_b = f"topic_two_b_conversation_{timestamp}" +del time + + + + + { +"topica_one": topic_one_a, +"topicb_one": topic_one_b, +"initial_var_one": 3 +} + + + + + payload_var_one + + + + + payload_var_two + + + + + { +"topica_two": topic_two_a, +"topicb_two": topic_two_b, +"initial_var_two": 5 +} + + + + + topica_two + + + topica_two + + + + + topicb_two + + + topicb_two + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/data/random_fact/random_fact.bpmn b/tests/data/random_fact/random_fact.bpmn new file mode 100644 index 00000000..7e92bba4 --- /dev/null +++ b/tests/data/random_fact/random_fact.bpmn @@ -0,0 +1,200 @@ + + + + + SequenceFlow_0c7wlth + + + # h1 Heading 8-) +## h2 Heading +### h3 Heading +#### h4 Heading +##### h5 Heading +###### h6 Heading + + +## Horizontal Rules + +___ + +--- + +*** + + +## Typographic replacements + +"double quotes" and 'single quotes' + + +## Emphasis + +**This is bold text** + +__This is bold text__ + +*This is italic text* + +_This is italic text_ + +~~Strikethrough~~ + + +## Blockquotes + + +> Blockquotes can also be nested... +>> ...by using additional greater-than signs right next to each other... +> > > ...or with spaces between arrows. + + +## Lists + +Unordered + ++ Create a list by starting a line with `+`, `-`, or `*` ++ Sub-lists are made by indenting 2 spaces: + - Marker character change forces new list start: + * Ac tristique libero volutpat at + + Facilisis in pretium nisl aliquet + - Nulla volutpat aliquam velit ++ Very easy! + +Ordered + +1. Lorem ipsum dolor sit amet +2. Consectetur adipiscing elit +3. Integer molestie lorem at massa + + +1. You can use sequential numbers... +1. ...or keep all the numbers as `1.` + +Start numbering with offset: + +57. foo +1. bar + +## Tables + +| Option | Description | +| ------ | ----------- | +| data | path to data files to supply the data that will be passed into templates. | +| engine | engine to be used for processing templates. Handlebars is the default. | +| ext | extension to be used for dest files. | + +Right aligned columns + +| Option | Description | +| ------:| -----------:| +| data | path to data files to supply the data that will be passed into templates. | +| engine | engine to be used for processing templates. Handlebars is the default. | +| ext | extension to be used for dest files. | + + +## Links + +[link text](http://dev.nodeca.com) + +[link with title](http://nodeca.github.io/pica/demo/ "title text!") + +Autoconverted link https://github.com/nodeca/pica (enable linkify to see) + + +## Images + +![Minion](https://octodex.github.com/images/minion.png) +![Stormtroopocat](https://octodex.github.com/images/stormtroopocat.jpg "The Stormtroopocat") + + + + + + + + + + + + + + + + SequenceFlow_0c7wlth + SequenceFlow_0641sh6 + + + + + + + + + SequenceFlow_0641sh6 + SequenceFlow_0t29gjo + FactService = fact_service() + + + # Great Job! +You have completed the random fact generator. +You chose to receive a random fact of the type: "{{type}}" + +Your random fact is: +{{details}} + SequenceFlow_0t29gjo + + + + + + User sets the Fact.type to cat, norris, or buzzword + + + + Makes an API  call to get a fact of the required type. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/data/random_fact/random_fact2.bpmn b/tests/data/random_fact/random_fact2.bpmn new file mode 100644 index 00000000..22d580f2 --- /dev/null +++ b/tests/data/random_fact/random_fact2.bpmn @@ -0,0 +1,200 @@ + + + + + SequenceFlow_0c7wlth + + + # h1 Heading 8-) +## h2 Heading +### h3 Heading +#### h4 Heading +##### h5 Heading +###### h6 Heading + + +## Horizontal Rules + +___ + +--- + +*** + + +## Typographic replacements + +"double quotes" and 'single quotes' + + +## Emphasis + +**This is bold text** + +__This is bold text__ + +*This is italic text* + +_This is italic text_ + +~~Strikethrough~~ + + +## Blockquotes + + +> Blockquotes can also be nested... +>> ...by using additional greater-than signs right next to each other... +> > > ...or with spaces between arrows. + + +## Lists + +Unordered + ++ Create a list by starting a line with `+`, `-`, or `*` ++ Sub-lists are made by indenting 2 spaces: + - Marker character change forces new list start: + * Ac tristique libero volutpat at + + Facilisis in pretium nisl aliquet + - Nulla volutpat aliquam velit ++ Very easy! + +Ordered + +1. Lorem ipsum dolor sit amet +2. Consectetur adipiscing elit +3. Integer molestie lorem at massa + + +1. You can use sequential numbers... +1. ...or keep all the numbers as `1.` + +Start numbering with offset: + +57. foo +1. bar + +## Tables + +| Option | Description | +| ------ | ----------- | +| data | path to data files to supply the data that will be passed into templates. | +| engine | engine to be used for processing templates. Handlebars is the default. | +| ext | extension to be used for dest files. | + +Right aligned columns + +| Option | Description | +| ------:| -----------:| +| data | path to data files to supply the data that will be passed into templates. | +| engine | engine to be used for processing templates. Handlebars is the default. | +| ext | extension to be used for dest files. | + + +## Links + +[link text](http://dev.nodeca.com) + +[link with title](http://nodeca.github.io/pica/demo/ "title text!") + +Autoconverted link https://github.com/nodeca/pica (enable linkify to see) + + +## Images + +![Minion](https://octodex.github.com/images/minion.png) +![Stormtroopocat](https://octodex.github.com/images/stormtroopocat.jpg "The Stormtroopocat") + + + + + + + + + + + + + + + + SequenceFlow_0c7wlth + SequenceFlow_0641sh6 + + + + + + + + + SequenceFlow_0641sh6 + SequenceFlow_0t29gjo + FactService = fact_service() + + + # Great Job! +You have completed the random fact generator. +You chose to receive a random fact of the type: "{{type}}" + +Your random fact is: +{{details}} + SequenceFlow_0t29gjo + + + + + + User sets the Fact.type to cat, norris, or buzzword + + + + Makes an API  call to get a fact of the required type. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/data/sample/sample.bpmn b/tests/data/sample/sample.bpmn new file mode 100644 index 00000000..2081217b --- /dev/null +++ b/tests/data/sample/sample.bpmn @@ -0,0 +1,62 @@ + + + + + Flow_10jwwqy + + + + Flow_1hd6o66 + + + + Flow_10jwwqy + Flow_0htxke7 + my_var = "Hello World" +Mike = "Awesome" +person = "Kevin" + + + + + wonderful + + Flow_0htxke7 + Flow_1hd6o66 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/data/sample/wonderful.dmn b/tests/data/sample/wonderful.dmn new file mode 100644 index 00000000..2d6e13df --- /dev/null +++ b/tests/data/sample/wonderful.dmn @@ -0,0 +1,44 @@ + + + + + + + person + + + + + + "Dan" + + + "pretty wonderful" + + + + + "Kevin" + + + "Very wonderful" + + + + + "Mike" + + + "Powerful wonderful" + + + + + + + + + + + + diff --git a/tests/data/script_with_unit_tests/script_with_unit_tests.bpmn b/tests/data/script_with_unit_tests/script_with_unit_tests.bpmn new file mode 100644 index 00000000..0b93cf86 --- /dev/null +++ b/tests/data/script_with_unit_tests/script_with_unit_tests.bpmn @@ -0,0 +1,70 @@ + + + + + Flow_10jwwqy + + + + Flow_0htxke7 + + + + + + + {"hey": false} + {"hey": true} + + + {} + {"something_else": true} + + + + Flow_0niwe1y + Flow_0htxke7 + try: + if not hey: + hey = True +except: + something_else = True + + + + Flow_10jwwqy + Flow_0niwe1y + hey = False + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/data/simple_script/simple_script.bpmn b/tests/data/simple_script/simple_script.bpmn new file mode 100644 index 00000000..ab573ce4 --- /dev/null +++ b/tests/data/simple_script/simple_script.bpmn @@ -0,0 +1,67 @@ + + + + + Flow_1k9q28c + + + + + Flow_10610n2 + + + + Flow_1k9q28c + Flow_1fviiob + a = 1 +b = 2 +c = a + b +norris=fact_service(type='norris') + + + ## Display Data + + +### a +{{ a }} + + +### b +{{ b }} + + +### c +{{ c }} + Flow_1fviiob + Flow_10610n2 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/data/spiff_example/call_activity_multi.bpmn b/tests/data/spiff_example/call_activity_multi.bpmn new file mode 100644 index 00000000..cf50fd08 --- /dev/null +++ b/tests/data/spiff_example/call_activity_multi.bpmn @@ -0,0 +1,318 @@ + + + + + Flow_104dmrv + Flow_0ikn93z + Flow_1h8w6f7 + + + Flow_1r5bppm + Flow_0uy2bcm + Flow_1gj4orb + + + + + + + + + + + + + + + + + Flow_07gubde + Flow_0w8pc6v + Flow_104dmrv + + + Flow_06gb1zr + + + product_name in [ 'product_c', 'product_d', 'product_e', 'product_f' ] + + + + + + Flow_1h8w6f7 + Flow_16qjxga + Flow_0b4pvj2 + Flow_0apn5fw + + + + + + + + + + + + + + Flow_0ikn93z + Flow_16qjxga + + + + Flow_0b4pvj2 + Flow_1y8t5or + Flow_1r5bppm + Flow_043j5w0 + + + + product_name in [ 'product_d', 'product_e', 'product_f' ] + + + + + + + + + + + + + Flow_0apn5fw + Flow_1y8t5or + + + Flow_10k8ds7 + + + + product_name in [ 'product_e', 'product_f', 'product_g' ] + + + + + + + + + + + + Flow_043j5w0 + Flow_0uy2bcm + + + + + + + + + Flow_1gj4orb + Flow_1f3n00b + + + Flow_0xot77f + Flow_07gubde + Flow_10k8ds7 + + + + Flow_1f3n00b + Flow_0xot77f + products[f'item_{len(products) + 1}'] = { + 'product_name': product_name, + 'product_quantity': product_quantity, + 'product_color': product_color, + 'product_size': product_size, + 'product_style': product_style, + 'product_price': product_price, +} + +product_color = None +product_size = None +product_style = None + + + + Flow_06gb1zr + Flow_0w8pc6v + products = { } +product_color = None +product_size = None +product_style = None + + + continue_shopping == 'N' + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/data/spiff_example/multiinstance.bpmn b/tests/data/spiff_example/multiinstance.bpmn new file mode 100644 index 00000000..6f20f560 --- /dev/null +++ b/tests/data/spiff_example/multiinstance.bpmn @@ -0,0 +1,914 @@ + + + + + + + + + Activity_19sdt5q + Event_1vajwed + Activity_1is8laj + Event_1pekp7a + Activity_0v73ge8 + Activity_1xyyogj + Gateway_0ag7o01 + Event_10yrr8z + Activity_0mi602x + Event_1xcx92w + + + Activity_1p0c9z2 + Gateway_1ohpjhg + Event_00gtyea + Activity_0gol5af + Event_0wdh5sd + Event_0p1446c + Event_05ljcvh + Event_0qk2468 + Event_136gk7h + + + StartEvent_1 + Activity_0t2klx8 + Activity_1f7q7oz + Gateway_1szx3lk + Event_02km2o9 + Activity_1ncg4ro + Activity_0ytnmfa + Event_1yahe0s + Event_1pd719w + Event_1ailogh + + + Event_0avgftb + Activity_1b9i014 + Activity_1mz873i + Event_143f6no + + + + Flow_10i21xl + + + + + + + + Flow_07c9v4j + Flow_1hno9d6 + + + Flow_1p871fh + Flow_0kipsup + + + Flow_1hno9d6 + Flow_1p871fh + Flow_1j0o5jz + + + Flow_0kipsup + Flow_0aei8oa + + + + + + + + + Flow_1pqw9yz + Flow_0aei8oa + Flow_0xe3ahu + + + Flow_10i21xl + Flow_07c9v4j + + Flow_0i0d3q2 + + + Flow_0i0d3q2 + Flow_1i1kc4t + + + Flow_1i1kc4t + Flow_16po4do + Flow_0emp5k2 + + + + + + + + + + + Flow_16po4do + Flow_0di3wyr + + + + + + + + Flow_0emp5k2 + Flow_0q6f23o + + + Flow_0q6f23o + Flow_0di3wyr + Flow_062tuue + + + Flow_062tuue + Flow_0uc6vnt + + + Flow_0uc6vnt + Flow_0rd1dlt + order_total = sum([ p['product_quantity'] * p['product_price'] for p in products.values() ]) + shipping_cost + + + + + + + + + + + + Flow_05m2g52 + Flow_10491an + Flow_1x6xt92 + + + + Order Summary +{% for product in products.values() %} +{{ product.product_name }} +Quantity: {{ product.product_quantity }} +Price: {{ product.product_price }} +{% endfor %} +Shipping Cost: {{ shipping_cost }} +Order Total: {{ order_total }} + + + + + + + + Flow_0rd1dlt + Flow_05m2g52 + + + + Flow_1x6xt92 + + + place_order == 'Y' + + + Flow_10491an + + + + + Flow_0scpwet + Flow_0ajg6w9 + + Flow_1k1bdpv + + + + + Flow_0b23bgx + + + + Flow_19v3u5b + Flow_0b23bgx + Flow_0z098re + + + + Flow_0266l2t + Flow_1iq0o3k + Flow_0ld0n9z + + + + any([ product['product_available'] == 'N' for product in products.values() ]) + + + Flow_0ld0n9z + + + + Retrieve {{ product.product_quantity }} of {{ product.product_name }} + + + + + + Flow_1k1bdpv + Flow_0266l2t + + + + + + + + + Flow_1iq0o3k + Flow_1570y49 + Flow_19v3u5b + + + product_shipped == 'N' + + + Flow_0z098re + Flow_08d8k7v + + + + + Flow_08d8k7v + Flow_1570y49 + + timedelta(seconds=10) + + + + + + Flow_1pqw9yz + + + + + + + + + customer_charged == 'Y' + + + + + + + + + + + customer_charged == 'Y' + + + + + + + + + + + + + Flow_1j0o5jz + Flow_0vsfpmk + + + + + + Flow_0vsfpmk + Flow_1p952p9 + Flow_03zlrvf + + + Flow_1p952p9 + Flow_0n7bq3w + Flow_0scpwet + + + + + + + Flow_1ibv0n0 + + + + + + + + + Flow_1ny6y69 + + timedelta(minutes=1) + + + + Flow_0xmh1bh + + + + + + + + + Flow_1ny6y69 + Flow_1dg8d2v + + + Flow_1dg8d2v + Flow_1ftgujt + + + + Flow_1ibv0n0 + Flow_1ftgujt + Flow_0ed065j + + + Flow_0ed065j + + + Flow_0xmh1bh + Flow_0obez9n + + + Flow_0obez9n + Flow_0evkswc + + + Flow_0hogek8 + Flow_1am1v3t + Flow_0n7bq3w + + + Flow_1am1v3t + Flow_0evkswc + + + + + + + + + Flow_03zlrvf + Flow_0hogek8 + + + + + + + + + Flow_1o45y3l + + + + Flow_1o45y3l + Flow_0rfdrxr + + + Flow_0rfdrxr + Flow_0y0t1it + + + Flow_0y0t1it + + + Flow_0ajg6w9 + + + + Flow_0xe3ahu + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/data/spiff_example/product_prices.dmn b/tests/data/spiff_example/product_prices.dmn new file mode 100644 index 00000000..be07629e --- /dev/null +++ b/tests/data/spiff_example/product_prices.dmn @@ -0,0 +1,83 @@ + + + + + + + product_name + + + + + Product A + + "product_a" + + + 15.00 + + + + Product B + + "product_b" + + + 15.00 + + + + Product C: color + + "product_c" + + + 25.00 + + + + Product D: color, size + + "product_d" + + + 20.00 + + + + Product E: color, size, style + + "product_e" + + + 25.00 + + + + Product F: color, size, style + + "product_f" + + + 30.00 + + + + Product G: style + + "product_g" + + + 25.00 + + + + + + + + + + + + diff --git a/tests/data/spiff_example/shipping_costs.dmn b/tests/data/spiff_example/shipping_costs.dmn new file mode 100644 index 00000000..1f37b7c9 --- /dev/null +++ b/tests/data/spiff_example/shipping_costs.dmn @@ -0,0 +1,38 @@ + + + + + + + shipping_method + + + + + Ground + + "standard" + + + 5.00 + + + + Express + + "overnight" + + + 25.00 + + + + + + + + + + + + diff --git a/tests/data/timer_intermediate_catch_event/timer_intermediate_catch_event.bpmn b/tests/data/timer_intermediate_catch_event/timer_intermediate_catch_event.bpmn new file mode 100644 index 00000000..28312f57 --- /dev/null +++ b/tests/data/timer_intermediate_catch_event/timer_intermediate_catch_event.bpmn @@ -0,0 +1,44 @@ + + + + + Flow_109wuuc + + + + Flow_0cy1fiy + + + + Flow_109wuuc + Flow_0cy1fiy + + timedelta(seconds=30) + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/data/user_task/user_task.bpmn b/tests/data/user_task/user_task.bpmn new file mode 100644 index 00000000..27c15257 --- /dev/null +++ b/tests/data/user_task/user_task.bpmn @@ -0,0 +1,65 @@ + + + + + Flow_0vj0gx7 + + + + ## Name +{{ name }} + Flow_1y1c27e + + + + + + + + + + + + + Flow_0s4f93a + Flow_1y1c27e + + + + Flow_0vj0gx7 + Flow_0s4f93a + a = 1 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/files/.keep b/tests/files/.keep new file mode 100644 index 00000000..e69de29b diff --git a/tests/spiffworkflow_backend/__init__.py b/tests/spiffworkflow_backend/__init__.py new file mode 100644 index 00000000..f520b09d --- /dev/null +++ b/tests/spiffworkflow_backend/__init__.py @@ -0,0 +1 @@ +"""__init__.""" diff --git a/tests/spiffworkflow_backend/helpers/__init__.py b/tests/spiffworkflow_backend/helpers/__init__.py new file mode 100644 index 00000000..14e8999c --- /dev/null +++ b/tests/spiffworkflow_backend/helpers/__init__.py @@ -0,0 +1 @@ +"""Init.""" diff --git a/tests/spiffworkflow_backend/helpers/base_test.py b/tests/spiffworkflow_backend/helpers/base_test.py new file mode 100644 index 00000000..b7ca4dd0 --- /dev/null +++ b/tests/spiffworkflow_backend/helpers/base_test.py @@ -0,0 +1,264 @@ +"""Base_test.""" +import io +import json +import os +import time +from typing import Any +from typing import Dict +from typing import Optional + +from flask import current_app +from flask.app import Flask +from flask.testing import FlaskClient +from flask_bpmn.api.api_error import ApiError +from flask_bpmn.models.db import db +from tests.spiffworkflow_backend.helpers.test_data import load_test_spec +from werkzeug.test import TestResponse + +from spiffworkflow_backend.models.process_group import ProcessGroup +from spiffworkflow_backend.models.process_group import ProcessGroupSchema +from spiffworkflow_backend.models.process_instance import ProcessInstanceModel +from spiffworkflow_backend.models.process_model import NotificationType +from spiffworkflow_backend.models.process_model import ProcessModelInfo +from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema +from spiffworkflow_backend.models.user import UserModel +from spiffworkflow_backend.services.process_model_service import ProcessModelService +from spiffworkflow_backend.services.user_service import UserService + +# from tests.spiffworkflow_backend.helpers.test_data import logged_in_headers + + +class BaseTest: + """BaseTest.""" + + @staticmethod + def find_or_create_user(username: str = "test_user_1") -> UserModel: + """Find_or_create_user.""" + user = UserModel.query.filter_by(username=username).first() + if isinstance(user, UserModel): + return user + + user = UserService.create_user("internal", username, username=username) + if isinstance(user, UserModel): + return user + + raise ApiError( + error_code="create_user_error", + message=f"Cannot find or create user: {username}", + ) + + @staticmethod + def get_open_id_constants(app: Flask) -> tuple: + """Get_open_id_constants.""" + open_id_server_url = app.config["OPEN_ID_SERVER_URL"] + open_id_client_id = app.config["OPEN_ID_CLIENT_ID"] + open_id_realm_name = app.config["OPEN_ID_REALM_NAME"] + open_id_client_secret_key = app.config[ + "OPEN_ID_CLIENT_SECRET_KEY" + ] # noqa: S105 + + return ( + open_id_server_url, + open_id_client_id, + open_id_realm_name, + open_id_client_secret_key, + ) + + def create_process_instance( + self, + client: FlaskClient, + test_process_group_id: str, + test_process_model_id: str, + headers: Dict[str, str], + ) -> TestResponse: + """Create_process_instance.""" + load_test_spec(test_process_model_id, process_group_id=test_process_group_id) + response = client.post( + f"/v1.0/process-models/{test_process_group_id}/{test_process_model_id}/process-instances", + headers=headers, + ) + assert response.status_code == 201 + return response + + def create_process_model_with_api( + self, + client: FlaskClient, + process_group_id: Optional[str] = None, + process_model_id: str = "make_cookies", + process_model_display_name: str = "Cooooookies", + process_model_description: str = "Om nom nom delicious cookies", + fault_or_suspend_on_exception: str = NotificationType.suspend.value, + exception_notification_addresses: Optional[list] = None, + primary_process_id: Optional[str] = None, + primary_file_name: Optional[str] = None, + ) -> TestResponse: + """Create_process_model.""" + process_model_service = ProcessModelService() + + # make sure we have a group + if process_group_id is None: + process_group_tmp = ProcessGroup( + id="test_cat", + display_name="Test Category", + display_order=0, + admin=False, + ) + process_group = process_model_service.add_process_group(process_group_tmp) + else: + process_group = ProcessModelService().get_process_group(process_group_id) + + if exception_notification_addresses is None: + exception_notification_addresses = [] + model = ProcessModelInfo( + id=process_model_id, + display_name=process_model_display_name, + description=process_model_description, + process_group_id=process_group.id, + standalone=False, + is_review=False, + is_master_spec=False, + libraries=[], + library=False, + primary_process_id=primary_process_id, + primary_file_name=primary_file_name, + fault_or_suspend_on_exception=fault_or_suspend_on_exception, + exception_notification_addresses=exception_notification_addresses, + ) + user = self.find_or_create_user() + response = client.post( + "/v1.0/process-models", + content_type="application/json", + data=json.dumps(ProcessModelInfoSchema().dump(model)), + headers=self.logged_in_headers(user), + ) + assert response.status_code == 201 + return response + + def create_spec_file( + self, + client: FlaskClient, + process_group_id: str = "random_fact", + process_model_id: str = "random_fact", + process_model: Optional[ProcessModelInfo] = None, + file_name: str = "random_fact.svg", + file_data: bytes = b"abcdef", + ) -> Any: + """Test_create_spec_file.""" + if process_model is None: + process_model = load_test_spec( + process_model_id, process_group_id=process_group_id + ) + data = {"file": (io.BytesIO(file_data), file_name)} + user = self.find_or_create_user() + response = client.post( + f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/files", + data=data, + follow_redirects=True, + content_type="multipart/form-data", + headers=self.logged_in_headers(user), + ) + assert response.status_code == 201 + assert response.get_data() is not None + file = json.loads(response.get_data(as_text=True)) + # assert FileType.svg.value == file["type"] + # assert "image/svg+xml" == file["content_type"] + + response = client.get( + f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/files/{file_name}", + headers=self.logged_in_headers(user), + ) + assert response.status_code == 200 + file2 = json.loads(response.get_data(as_text=True)) + assert file["file_contents"] == file2["file_contents"] + return file + + def create_process_group( + self, + client: FlaskClient, + user: Any, + process_group_id: str, + display_name: str = "", + ) -> str: + """Create_process_group.""" + process_group = ProcessGroup( + id=process_group_id, display_name=display_name, display_order=0, admin=False + ) + response = client.post( + "/v1.0/process-groups", + headers=self.logged_in_headers(user), + content_type="application/json", + data=json.dumps(ProcessGroupSchema().dump(process_group)), + ) + assert response.status_code == 201 + assert response.json is not None + assert response.json["id"] == process_group_id + return process_group_id + + # @staticmethod + # def get_public_access_token(username: str, password: str) -> dict: + # """Get_public_access_token.""" + # public_access_token = PublicAuthenticationService().get_public_access_token( + # username, password + # ) + # return public_access_token + + def create_process_instance_from_process_model( + self, process_model: ProcessModelInfo, status: Optional[str] = "not_started" + ) -> ProcessInstanceModel: + """Create_process_instance_from_process_model.""" + user = self.find_or_create_user() + current_time = round(time.time()) + process_instance = ProcessInstanceModel( + status=status, + process_initiator=user, + process_model_identifier=process_model.id, + process_group_identifier=process_model.process_group_id, + updated_at_in_seconds=round(time.time()), + start_in_seconds=current_time - (3600 * 1), + end_in_seconds=current_time - (3600 * 1 - 20), + ) + db.session.add(process_instance) + db.session.commit() + return process_instance + + @staticmethod + def logged_in_headers( + user: UserModel, _redirect_url: str = "http://some/frontend/url" + ) -> Dict[str, str]: + """Logged_in_headers.""" + # if user is None: + # uid = 'test_user' + # user_info = {'uid': 'test_user'} + # else: + # uid = user.uid + # user_info = {'uid': user.uid} + + # query_string = user_info_to_query_string(user_info, redirect_url) + # rv = self.app.get("/v1.0/login%s" % query_string, follow_redirects=False) + # self.assertTrue(rv.status_code == 302) + # self.assertTrue(str.startswith(rv.location, redirect_url)) + # + # user_model = session.query(UserModel).filter_by(uid=uid).first() + # self.assertIsNotNone(user_model.ldap_info.display_name) + # self.assertEqual(user_model.uid, uid) + # self.assertTrue('user' in g, 'User should be in Flask globals') + # user = UserService.current_user(allow_admin_impersonate=True) + # self.assertEqual(uid, user.uid, 'Logged in user should match given user uid') + + return dict(Authorization="Bearer " + user.encode_auth_token()) + + def get_test_data_file_contents( + self, file_name: str, process_model_test_data_dir: str + ) -> bytes: + """Get_test_data_file_contents.""" + file_full_path = os.path.join( + current_app.instance_path, + "..", + "..", + "tests", + "data", + process_model_test_data_dir, + file_name, + ) + with open(file_full_path, "rb") as file: + return file.read() diff --git a/tests/spiffworkflow_backend/helpers/example_data.py b/tests/spiffworkflow_backend/helpers/example_data.py new file mode 100644 index 00000000..811d51fe --- /dev/null +++ b/tests/spiffworkflow_backend/helpers/example_data.py @@ -0,0 +1,109 @@ +"""Example_data.""" +import glob +import os +from typing import Optional + +from flask import current_app + +from spiffworkflow_backend.models.process_model import ProcessModelInfo +from spiffworkflow_backend.services.process_model_service import ProcessModelService +from spiffworkflow_backend.services.spec_file_service import SpecFileService + + +class ExampleDataLoader: + """ExampleDataLoader.""" + + def create_spec( + self, + process_model_id: str, + display_name: str = "", + description: str = "", + master_spec: bool = False, + process_group_id: str = "", + display_order: int = 0, + from_tests: bool = False, + standalone: bool = False, + library: bool = False, + bpmn_file_name: Optional[str] = None, + process_model_source_directory: Optional[str] = None, + ) -> ProcessModelInfo: + """Assumes that a directory exists in static/bpmn with the same name as the given process_model_id. + + further assumes that the [process_model_id].bpmn is the primary file for the process model. + returns an array of data models to be added to the database. + """ + spec = ProcessModelInfo( + id=process_model_id, + display_name=display_name, + description=description, + process_group_id=process_group_id, + display_order=display_order, + is_master_spec=master_spec, + standalone=standalone, + library=library, + is_review=False, + libraries=[], + ) + workflow_spec_service = ProcessModelService() + workflow_spec_service.add_spec(spec) + + bpmn_file_name_with_extension = bpmn_file_name + if not bpmn_file_name_with_extension: + bpmn_file_name_with_extension = process_model_id + + if not bpmn_file_name_with_extension.endswith(".bpmn"): + bpmn_file_name_with_extension += ".bpmn" + + process_model_source_directory_to_use = process_model_source_directory + if not process_model_source_directory_to_use: + process_model_source_directory_to_use = process_model_id + + file_name_matcher = "*.*" + if bpmn_file_name: + file_name_matcher = bpmn_file_name_with_extension + + file_glob = "" + if from_tests: + file_glob = os.path.join( + current_app.instance_path, + "..", + "..", + "tests", + "data", + process_model_source_directory_to_use, + file_name_matcher, + ) + else: + file_glob = os.path.join( + current_app.root_path, + "static", + "bpmn", + process_model_source_directory_to_use, + file_name_matcher, + ) + + files = glob.glob(file_glob) + for file_path in files: + if os.path.isdir(file_path): + continue # Don't try to process sub directories + + filename = os.path.basename(file_path) + # since there are multiple bpmn files in a test data directory, ensure we set the correct one as the primary + is_primary = filename.lower() == bpmn_file_name_with_extension + file = None + try: + file = open(file_path, "rb") + data = file.read() + SpecFileService.add_file( + process_model_info=spec, file_name=filename, binary_data=data + ) + if is_primary: + SpecFileService.process_bpmn_file( + spec, filename, data, set_primary_file=True + ) + workflow_spec_service = ProcessModelService() + workflow_spec_service.save_process_model(spec) + finally: + if file: + file.close() + return spec diff --git a/tests/spiffworkflow_backend/helpers/test_data.py b/tests/spiffworkflow_backend/helpers/test_data.py new file mode 100644 index 00000000..aa0d3436 --- /dev/null +++ b/tests/spiffworkflow_backend/helpers/test_data.py @@ -0,0 +1,81 @@ +"""User.""" +from typing import Optional + +from tests.spiffworkflow_backend.helpers.example_data import ExampleDataLoader + +from spiffworkflow_backend.exceptions.process_entity_not_found_error import ( + ProcessEntityNotFoundError, +) +from spiffworkflow_backend.models.process_group import ProcessGroup +from spiffworkflow_backend.models.process_model import ProcessModelInfo +from spiffworkflow_backend.services.process_model_service import ProcessModelService + + +def assure_process_group_exists(process_group_id: Optional[str] = None) -> ProcessGroup: + """Assure_process_group_exists.""" + process_group = None + process_model_service = ProcessModelService() + if process_group_id is not None: + try: + process_group = process_model_service.get_process_group(process_group_id) + except ProcessEntityNotFoundError: + process_group = None + + if process_group is None: + process_group_id_to_create = "test_process_group" + if process_group_id is not None: + process_group_id_to_create = process_group_id + process_group = ProcessGroup( + id=process_group_id_to_create, + display_name="Test Workflows", + admin=False, + display_order=0, + ) + process_model_service.add_process_group(process_group) + return process_group + + +def load_test_spec( + process_model_id: str, + master_spec: bool = False, + process_group_id: Optional[str] = None, + library: bool = False, + bpmn_file_name: Optional[str] = None, + process_model_source_directory: Optional[str] = None, +) -> ProcessModelInfo: + """Loads a process model into the bpmn dir based on a directory in tests/data.""" + process_group = None + process_model_service = ProcessModelService() + if process_group_id is None: + process_group_id = "test_process_group_id" + if not master_spec and not library: + process_group = assure_process_group_exists(process_group_id) + process_group_id = process_group.id + + try: + return process_model_service.get_process_model( + process_model_id, group_id=process_group_id + ) + except ProcessEntityNotFoundError: + spec = ExampleDataLoader().create_spec( + process_model_id=process_model_id, + master_spec=master_spec, + from_tests=True, + display_name=process_model_id, + process_group_id=process_group_id, + library=library, + bpmn_file_name=bpmn_file_name, + process_model_source_directory=process_model_source_directory, + ) + return spec + + +# def user_info_to_query_string(user_info, redirect_url): +# query_string_list = [] +# items = user_info.items() +# for key, value in items: +# query_string_list.append('%s=%s' % (key, urllib.parse.quote(value))) +# +# query_string_list.append('redirect_url=%s' % redirect_url) +# +# return '?%s' % '&'.join(query_string_list) diff --git a/tests/spiffworkflow_backend/integration/__init__.py b/tests/spiffworkflow_backend/integration/__init__.py new file mode 100644 index 00000000..f520b09d --- /dev/null +++ b/tests/spiffworkflow_backend/integration/__init__.py @@ -0,0 +1 @@ +"""__init__.""" diff --git a/tests/spiffworkflow_backend/integration/bank-api-authz-config.json b/tests/spiffworkflow_backend/integration/bank-api-authz-config.json new file mode 100644 index 00000000..5cce87c2 --- /dev/null +++ b/tests/spiffworkflow_backend/integration/bank-api-authz-config.json @@ -0,0 +1,102 @@ +{ + "allowRemoteResourceManagement": true, + "policyEnforcementMode": "ENFORCING", + "resources": [ + { + "name": "Default Resource", + "type": "urn:bank-api:resources:default", + "ownerManagedAccess": false, + "attributes": {}, + "_id": "0f0c6dcf-9b86-419d-8331-ce6dd1f779a1", + "uris": ["/*"] + }, + { + "name": "View Account Resource", + "ownerManagedAccess": false, + "displayName": "View Account Resource", + "attributes": {}, + "_id": "6934ad55-cd6a-46d9-8653-7b1966973917", + "uris": ["account/{id}"], + "scopes": [ + { + "name": "account:view" + } + ] + } + ], + "policies": [ + { + "id": "1cec062d-19dd-4896-9ced-07fe20d68b22", + "name": "Default Policy", + "description": "A policy that grants access only for users within this realm", + "type": "js", + "logic": "POSITIVE", + "decisionStrategy": "AFFIRMATIVE", + "config": { + "code": "// by default, grants any permission associated with this policy\n$evaluation.grant();\n" + } + }, + { + "id": "2059c4a3-59d4-4a56-bf31-f861141f515c", + "name": "Only Bank Teller and Account Owner Policy", + "type": "role", + "logic": "POSITIVE", + "decisionStrategy": "UNANIMOUS", + "config": { + "roles": "[{\"id\":\"accounts_owner\",\"required\":false},{\"id\":\"bank_teller\",\"required\":false}]" + } + }, + { + "id": "570a1e09-25ad-4da6-ab0a-0b77101176f2", + "name": "Only Account Owner Policy", + "type": "role", + "logic": "POSITIVE", + "decisionStrategy": "UNANIMOUS", + "config": { + "roles": "[{\"id\":\"accounts_owner\",\"required\":false}]" + } + }, + { + "id": "13494e3d-5e85-43fe-80e9-ab7b6f1191d5", + "name": "Default Permission", + "description": "A permission that applies to the default resource type", + "type": "resource", + "logic": "POSITIVE", + "decisionStrategy": "UNANIMOUS", + "config": { + "defaultResourceType": "urn:bank-api:resources:default", + "applyPolicies": "[\"Default Policy\"]" + } + }, + { + "id": "cf04026c-e44f-401f-92e5-5c330dff2831", + "name": "View Account Resource Permission", + "type": "resource", + "logic": "POSITIVE", + "decisionStrategy": "UNANIMOUS", + "config": { + "resources": "[\"View Account Resource\"]", + "applyPolicies": "[\"Only Bank Teller and Account Owner Policy\"]" + } + }, + { + "id": "6ce39e54-ffe7-4f4e-b689-d190e63e3b2d", + "name": "View Account Scope Permission", + "description": "View Account Scope Permission", + "type": "scope", + "logic": "POSITIVE", + "decisionStrategy": "UNANIMOUS", + "config": { + "scopes": "[\"account:view\"]", + "applyPolicies": "[\"Only Account Owner Policy\"]" + } + } + ], + "scopes": [ + { + "id": "326a57f4-c487-4466-8521-d3f0c25c399f", + "name": "account:view" + } + ], + "decisionStrategy": "UNANIMOUS" +} diff --git a/tests/spiffworkflow_backend/integration/bpmn.json b/tests/spiffworkflow_backend/integration/bpmn.json new file mode 100644 index 00000000..65ea9d21 --- /dev/null +++ b/tests/spiffworkflow_backend/integration/bpmn.json @@ -0,0 +1,430 @@ +{ + "spec": { + "name": "sample", + "description": "sample", + "file": "sample.bpmn", + "task_specs": { + "Start": { + "id": "sample_1", + "name": "Start", + "description": "", + "manual": false, + "internal": false, + "position": { "x": 0, "y": 0 }, + "lookahead": 2, + "inputs": [], + "outputs": ["StartEvent_1"], + "typename": "StartTask" + }, + "sample.EndJoin": { + "id": "sample_2", + "name": "sample.EndJoin", + "description": "", + "manual": false, + "internal": false, + "position": { "x": 0, "y": 0 }, + "lookahead": 2, + "inputs": ["Event_1qb1u6a"], + "outputs": ["End"], + "typename": "_EndJoin" + }, + "End": { + "id": "sample_3", + "name": "End", + "description": "", + "manual": false, + "internal": false, + "position": { "x": 0, "y": 0 }, + "lookahead": 2, + "inputs": ["sample.EndJoin"], + "outputs": [], + "typename": "Simple" + }, + "StartEvent_1": { + "id": "sample_4", + "name": "StartEvent_1", + "description": null, + "manual": false, + "internal": false, + "position": { "x": 152.0, "y": 102.0 }, + "lookahead": 2, + "inputs": ["Start"], + "outputs": ["Activity_0pxf6g1"], + "lane": null, + "documentation": null, + "loopTask": false, + "outgoing_sequence_flows": { + "Activity_0pxf6g1": { + "id": "Flow_10jwwqy", + "name": null, + "documentation": null, + "target_task_spec": "Activity_0pxf6g1", + "typename": "SequenceFlow" + } + }, + "outgoing_sequence_flows_by_id": { + "Flow_10jwwqy": { + "id": "Flow_10jwwqy", + "name": null, + "documentation": null, + "target_task_spec": "Activity_0pxf6g1", + "typename": "SequenceFlow" + } + }, + "event_definition": { + "internal": false, + "external": false, + "typename": "NoneEventDefinition" + }, + "typename": "StartEvent", + "extensions": {} + }, + "Activity_0pxf6g1": { + "id": "sample_5", + "name": "Activity_0pxf6g1", + "description": "My Script", + "manual": false, + "internal": false, + "position": { "x": 240.0, "y": 80.0 }, + "lookahead": 2, + "inputs": ["StartEvent_1"], + "outputs": ["Activity_0a21ntf"], + "lane": null, + "documentation": null, + "loopTask": false, + "outgoing_sequence_flows": { + "Activity_0a21ntf": { + "id": "Flow_0htxke7", + "name": null, + "documentation": null, + "target_task_spec": "Activity_0a21ntf", + "typename": "SequenceFlow" + } + }, + "outgoing_sequence_flows_by_id": { + "Flow_0htxke7": { + "id": "Flow_0htxke7", + "name": null, + "documentation": null, + "target_task_spec": "Activity_0a21ntf", + "typename": "SequenceFlow" + } + }, + "script": "my_var = \"Hello World\"\nMike = \"Awesome\"\nperson = \"Kevin\"", + "typename": "ScriptTask", + "extensions": {} + }, + "Activity_0a21ntf": { + "id": "sample_6", + "name": "Activity_0a21ntf", + "description": "is wonderful?", + "manual": false, + "internal": false, + "position": { "x": 0, "y": 0 }, + "lookahead": 2, + "inputs": ["Activity_0pxf6g1"], + "outputs": ["Event_1qb1u6a"], + "lane": null, + "documentation": null, + "loopTask": false, + "outgoing_sequence_flows": { + "Event_1qb1u6a": { + "id": "Flow_1hd6o66", + "name": null, + "documentation": null, + "target_task_spec": "Event_1qb1u6a", + "typename": "SequenceFlow" + } + }, + "outgoing_sequence_flows_by_id": { + "Flow_1hd6o66": { + "id": "Flow_1hd6o66", + "name": null, + "documentation": null, + "target_task_spec": "Event_1qb1u6a", + "typename": "SequenceFlow" + } + }, + "decision_table": { + "id": "DecisionTable_02m334z", + "name": "", + "inputs": [ + { + "id": "Input_1", + "label": "", + "name": "", + "expression": "person", + "typeRef": "string" + } + ], + "outputs": [ + { + "id": "Output_1", + "label": "", + "name": "wonderfulness", + "typeRef": "string" + } + ], + "rules": [ + { + "id": "DecisionRule_1hnzjl8", + "description": "", + "input_entries": [ + { + "id": "UnaryTests_1pb0ou0", + "input_id": "Input_1", + "description": "", + "lhs": ["\"Dan\""] + } + ], + "output_entries": [ + { + "id": "LiteralExpression_0rtsxgu", + "output_id": "Output_1", + "description": "", + "text": "\"pretty wonderful\"" + } + ] + }, + { + "id": "DecisionRule_1cc73tk", + "description": "", + "input_entries": [ + { + "id": "UnaryTests_1fbj1wn", + "input_id": "Input_1", + "description": "", + "lhs": ["\"Kevin\""] + } + ], + "output_entries": [ + { + "id": "LiteralExpression_1d9nsnp", + "output_id": "Output_1", + "description": "", + "text": "\"Very wonderful\"" + } + ] + }, + { + "id": "DecisionRule_09y32t9", + "description": "", + "input_entries": [ + { + "id": "UnaryTests_05h3qcx", + "input_id": "Input_1", + "description": "", + "lhs": ["\"Mike\""] + } + ], + "output_entries": [ + { + "id": "LiteralExpression_15b83d9", + "output_id": "Output_1", + "description": "", + "text": "\"Powerful wonderful\"" + } + ] + } + ] + }, + "typename": "BusinessRuleTask", + "extensions": {} + }, + "Event_1qb1u6a": { + "id": "sample_7", + "name": "Event_1qb1u6a", + "description": null, + "manual": false, + "internal": false, + "position": { "x": 802.0, "y": 92.0 }, + "lookahead": 2, + "inputs": ["Activity_0a21ntf"], + "outputs": ["sample.EndJoin"], + "lane": null, + "documentation": null, + "loopTask": false, + "outgoing_sequence_flows": { + "sample.EndJoin": { + "id": "Event_1qb1u6a.ToEndJoin", + "name": null, + "documentation": null, + "target_task_spec": "sample.EndJoin", + "typename": "SequenceFlow" + } + }, + "outgoing_sequence_flows_by_id": { + "Event_1qb1u6a.ToEndJoin": { + "id": "Event_1qb1u6a.ToEndJoin", + "name": null, + "documentation": null, + "target_task_spec": "sample.EndJoin", + "typename": "SequenceFlow" + } + }, + "event_definition": { + "internal": false, + "external": false, + "typename": "NoneEventDefinition" + }, + "typename": "EndEvent", + "extensions": {} + }, + "Root": { + "id": "sample_8", + "name": "Root", + "description": "", + "manual": false, + "internal": false, + "position": { "x": 0, "y": 0 }, + "lookahead": 2, + "inputs": [], + "outputs": [], + "typename": "Simple" + } + }, + "typename": "BpmnProcessSpec" + }, + "data": { + "validate_only": false, + "process_instance_id": 63, + "current_user": { "username": "test_user1", "id": "1" }, + "my_var": "Hello World", + "Mike": "Awesome", + "person": "Kevin", + "wonderfulness": "Very wonderful" + }, + "last_task": "11314c29-ab0c-447d-82c0-48d824b017ec", + "success": true, + "tasks": { + "4e227637-6ded-431a-ad6a-dd57d7b87b38": { + "id": "4e227637-6ded-431a-ad6a-dd57d7b87b38", + "parent": null, + "children": ["69a8b231-43fd-4faa-8343-de37d7985824"], + "last_state_change": 1654725001.792306, + "state": 32, + "task_spec": "Root", + "triggered": false, + "workflow_name": "sample", + "internal_data": {}, + "data": {} + }, + "69a8b231-43fd-4faa-8343-de37d7985824": { + "id": "69a8b231-43fd-4faa-8343-de37d7985824", + "parent": "4e227637-6ded-431a-ad6a-dd57d7b87b38", + "children": ["348a4290-c626-4a3d-9bf0-c25f09c5e448"], + "last_state_change": 1654725001.7997818, + "state": 32, + "task_spec": "Start", + "triggered": false, + "workflow_name": "sample", + "internal_data": {}, + "data": { "current_user": { "username": "test_user1", "id": "1" } } + }, + "348a4290-c626-4a3d-9bf0-c25f09c5e448": { + "id": "348a4290-c626-4a3d-9bf0-c25f09c5e448", + "parent": "69a8b231-43fd-4faa-8343-de37d7985824", + "children": ["dc89284f-e574-4e6b-9c74-e79db384beba"], + "last_state_change": 1654725001.8002229, + "state": 32, + "task_spec": "StartEvent_1", + "triggered": false, + "workflow_name": "sample", + "internal_data": { "event_fired": true }, + "data": { "current_user": { "username": "test_user1", "id": "1" } } + }, + "dc89284f-e574-4e6b-9c74-e79db384beba": { + "id": "dc89284f-e574-4e6b-9c74-e79db384beba", + "parent": "348a4290-c626-4a3d-9bf0-c25f09c5e448", + "children": ["f014d940-4023-4676-8a22-8ab82501cb6a"], + "last_state_change": 1654725001.800621, + "state": 32, + "task_spec": "Activity_0pxf6g1", + "triggered": false, + "workflow_name": "sample", + "internal_data": {}, + "data": { + "current_user": { "username": "test_user1", "id": "1" }, + "my_var": "Hello World", + "Mike": "Awesome", + "person": "Kevin" + } + }, + "f014d940-4023-4676-8a22-8ab82501cb6a": { + "id": "f014d940-4023-4676-8a22-8ab82501cb6a", + "parent": "dc89284f-e574-4e6b-9c74-e79db384beba", + "children": ["5c7368cf-6b56-4d40-a0b2-e6ccd1115cb1"], + "last_state_change": 1654725001.801095, + "state": 32, + "task_spec": "Activity_0a21ntf", + "triggered": false, + "workflow_name": "sample", + "internal_data": {}, + "data": { + "current_user": { "username": "test_user1", "id": "1" }, + "my_var": "Hello World", + "Mike": "Awesome", + "person": "Kevin", + "wonderfulness": "Very wonderful" + } + }, + "5c7368cf-6b56-4d40-a0b2-e6ccd1115cb1": { + "id": "5c7368cf-6b56-4d40-a0b2-e6ccd1115cb1", + "parent": "f014d940-4023-4676-8a22-8ab82501cb6a", + "children": ["6487a111-1427-4de4-aa0e-bdb3b1ccba01"], + "last_state_change": 1654725001.8016891, + "state": 32, + "task_spec": "Event_1qb1u6a", + "triggered": false, + "workflow_name": "sample", + "internal_data": {}, + "data": { + "current_user": { "username": "test_user1", "id": "1" }, + "my_var": "Hello World", + "Mike": "Awesome", + "person": "Kevin", + "wonderfulness": "Very wonderful" + } + }, + "6487a111-1427-4de4-aa0e-bdb3b1ccba01": { + "id": "6487a111-1427-4de4-aa0e-bdb3b1ccba01", + "parent": "5c7368cf-6b56-4d40-a0b2-e6ccd1115cb1", + "children": ["11314c29-ab0c-447d-82c0-48d824b017ec"], + "last_state_change": 1654725001.8022258, + "state": 32, + "task_spec": "sample.EndJoin", + "triggered": false, + "workflow_name": "sample", + "internal_data": {}, + "data": { + "current_user": { "username": "test_user1", "id": "1" }, + "my_var": "Hello World", + "Mike": "Awesome", + "person": "Kevin", + "wonderfulness": "Very wonderful" + } + }, + "11314c29-ab0c-447d-82c0-48d824b017ec": { + "id": "11314c29-ab0c-447d-82c0-48d824b017ec", + "parent": "6487a111-1427-4de4-aa0e-bdb3b1ccba01", + "children": [], + "last_state_change": 1654725001.802538, + "state": 32, + "task_spec": "End", + "triggered": false, + "workflow_name": "sample", + "internal_data": {}, + "data": { + "current_user": { "username": "test_user1", "id": "1" }, + "my_var": "Hello World", + "Mike": "Awesome", + "person": "Kevin", + "wonderfulness": "Very wonderful" + } + } + }, + "root": "4e227637-6ded-431a-ad6a-dd57d7b87b38", + "subprocess_specs": {}, + "subprocesses": {}, + "serializer_version": "1.0-CRC" +} diff --git a/tests/spiffworkflow_backend/integration/test_authentication.py b/tests/spiffworkflow_backend/integration/test_authentication.py new file mode 100644 index 00000000..934c1b24 --- /dev/null +++ b/tests/spiffworkflow_backend/integration/test_authentication.py @@ -0,0 +1,174 @@ +"""Test_authentication.""" +import ast +import base64 + +from tests.spiffworkflow_backend.helpers.base_test import BaseTest + +from spiffworkflow_backend.services.authentication_service import ( + PublicAuthenticationService, +) + + +class TestAuthentication(BaseTest): + """TestAuthentication.""" + + def test_get_login_state(self) -> None: + """Test_get_login_state.""" + redirect_url = "http://example.com/" + state = PublicAuthenticationService.generate_state(redirect_url) + state_dict = ast.literal_eval(base64.b64decode(state).decode("utf-8")) + + assert isinstance(state_dict, dict) + assert "redirect_url" in state_dict.keys() + assert state_dict["redirect_url"] == redirect_url + + # def test_get_login_redirect_url(self): + # redirect_url = "http://example.com/" + # state = PublicAuthenticationService.generate_state(redirect_url) + # with current_app.app_context(): + # login_redirect_url = PublicAuthenticationService().get_login_redirect_url(state.decode("UTF-8")) + # print("test_get_login_redirect_url") + # print("test_get_login_redirect_url") + + # def test_get_token_script(self, app: Flask) -> None: + # """Test_get_token_script.""" + # print("Test Get Token Script") + # + # ( + # keycloak_server_url, + # keycloak_client_id, + # keycloak_realm_name, + # keycloak_client_secret_key, + # ) = self.get_keycloak_constants(app) + # keycloak_user = "ciuser1" + # keycloak_pass = "ciuser1" # noqa: S105 + # + # print(f"Test Get Token Script: keycloak_server_url: {keycloak_server_url}") + # print(f"Test Get Token Script: keycloak_client_id: {keycloak_client_id}") + # print(f"Test Get Token Script: keycloak_realm_name: {keycloak_realm_name}") + # print( + # f"Test Get Token Script: keycloak_client_secret_key: {keycloak_client_secret_key}" + # ) + # + # frontend_client_id = "spiffworkflow-frontend" + # + # print(f"Test Get Token Script: frontend_client_id: {frontend_client_id}") + # + # # Get frontend token + # request_url = f"{keycloak_server_url}/realms/{keycloak_realm_name}/protocol/openid-connect/token" + # headers = {"Content-Type": "application/x-www-form-urlencoded"} + # post_data = { + # "grant_type": "password", + # "username": keycloak_user, + # "password": keycloak_pass, + # "client_id": frontend_client_id, + # } + # print(f"Test Get Token Script: request_url: {request_url}") + # print(f"Test Get Token Script: headers: {headers}") + # print(f"Test Get Token Script: post_data: {post_data}") + # + # frontend_response = requests.post( + # request_url, headers=headers, json=post_data, data=post_data + # ) + # frontend_token = json.loads(frontend_response.text) + # + # print(f"Test Get Token Script: frontend_response: {frontend_response}") + # print(f"Test Get Token Script: frontend_token: {frontend_token}") + # + # # assert isinstance(frontend_token, dict) + # # assert isinstance(frontend_token["access_token"], str) + # # assert isinstance(frontend_token["refresh_token"], str) + # # assert frontend_token["expires_in"] == 300 + # # assert frontend_token["refresh_expires_in"] == 1800 + # # assert frontend_token["token_type"] == "Bearer" + # + # # Get backend token + # backend_basic_auth_string = f"{keycloak_client_id}:{keycloak_client_secret_key}" + # backend_basic_auth_bytes = bytes(backend_basic_auth_string, encoding="ascii") + # backend_basic_auth = base64.b64encode(backend_basic_auth_bytes) + # + # request_url = f"{keycloak_server_url}/realms/{keycloak_realm_name}/protocol/openid-connect/token" + # headers = { + # "Content-Type": "application/x-www-form-urlencoded", + # "Authorization": f"Basic {backend_basic_auth.decode('utf-8')}", + # } + # data = { + # "grant_type": "urn:ietf:params:oauth:grant-type:token-exchange", + # "client_id": keycloak_client_id, + # "subject_token": frontend_token["access_token"], + # "audience": keycloak_client_id, + # } + # print(f"Test Get Token Script: request_url: {request_url}") + # print(f"Test Get Token Script: headers: {headers}") + # print(f"Test Get Token Script: data: {data}") + # + # backend_response = requests.post(request_url, headers=headers, data=data) + # json_data = json.loads(backend_response.text) + # backend_token = json_data["access_token"] + # print(f"Test Get Token Script: backend_response: {backend_response}") + # print(f"Test Get Token Script: backend_token: {backend_token}") + # + # if backend_token: + # # Getting resource set + # auth_bearer_string = f"Bearer {backend_token}" + # headers = { + # "Content-Type": "application/json", + # "Authorization": auth_bearer_string, + # } + # + # # uri_to_test_against = "%2Fprocess-models" + # uri_to_test_against = "/status" + # request_url = ( + # f"{keycloak_server_url}/realms/{keycloak_realm_name}/authz/protection/resource_set?" + # + f"matchingUri=true&deep=true&max=-1&exactName=false&uri={uri_to_test_against}" + # ) + # # f"uri={uri_to_test_against}" + # print(f"Test Get Token Script: request_url: {request_url}") + # print(f"Test Get Token Script: headers: {headers}") + # + # resource_result = requests.get(request_url, headers=headers) + # print(f"Test Get Token Script: resource_result: {resource_result}") + # + # json_data = json.loads(resource_result.text) + # resource_id_name_pairs = [] + # for result in json_data: + # if "_id" in result and result["_id"]: + # pair_key = result["_id"] + # if "name" in result and result["name"]: + # pair_value = result["name"] + # # pair = {{result['_id']}: {}} + # else: + # pair_value = "no_name" + # # pair = {{result['_id']}: } + # pair = [pair_key, pair_value] + # resource_id_name_pairs.append(pair) + # print( + # f"Test Get Token Script: resource_id_name_pairs: {resource_id_name_pairs}" + # ) + # + # # Getting Permissions + # for resource_id_name_pair in resource_id_name_pairs: + # resource_id = resource_id_name_pair[0] + # resource_id_name_pair[1] + # + # headers = { + # "Content-Type": "application/x-www-form-urlencoded", + # "Authorization": f"Basic {backend_basic_auth.decode('utf-8')}", + # } + # + # post_data = { + # "audience": keycloak_client_id, + # "permission": resource_id, + # "subject_token": backend_token, + # "grant_type": "urn:ietf:params:oauth:grant-type:uma-ticket", + # } + # print(f"Test Get Token Script: headers: {headers}") + # print(f"Test Get Token Script: post_data: {post_data}") + # print(f"Test Get Token Script: request_url: {request_url}") + # + # permission_result = requests.post( + # request_url, headers=headers, data=post_data + # ) + # print(f"Test Get Token Script: permission_result: {permission_result}") + # + # print("test_get_token_script") diff --git a/tests/spiffworkflow_backend/integration/test_authorization.py b/tests/spiffworkflow_backend/integration/test_authorization.py new file mode 100644 index 00000000..912e039a --- /dev/null +++ b/tests/spiffworkflow_backend/integration/test_authorization.py @@ -0,0 +1,159 @@ +"""Test_authorization.""" +from tests.spiffworkflow_backend.helpers.base_test import BaseTest + + +class TestAuthorization(BaseTest): + """TestAuthorization.""" + + # def test_get_bearer_token(self, app: Flask) -> None: + # """Test_get_bearer_token.""" + # for user_id in ("user_1", "user_2", "admin_1", "admin_2"): + # public_access_token = self.get_public_access_token(user_id, user_id) + # bearer_token = PublicAuthenticationService.get_bearer_token(public_access_token) + # assert isinstance(public_access_token, str) + # assert isinstance(bearer_token, dict) + # assert "access_token" in bearer_token + # assert isinstance(bearer_token["access_token"], str) + # assert "refresh_token" in bearer_token + # assert isinstance(bearer_token["refresh_token"], str) + # assert "token_type" in bearer_token + # assert bearer_token["token_type"] == "Bearer" + # assert "scope" in bearer_token + # assert isinstance(bearer_token["scope"], str) + # + # def test_get_user_info_from_public_access_token(self, app: Flask) -> None: + # """Test_get_user_info_from_public_access_token.""" + # for user_id in ("user_1", "user_2", "admin_1", "admin_2"): + # public_access_token = self.get_public_access_token(user_id, user_id) + # user_info = PublicAuthenticationService.get_user_info_from_id_token( + # public_access_token + # ) + # assert "sub" in user_info + # assert isinstance(user_info["sub"], str) + # assert len(user_info["sub"]) == 36 + # assert "preferred_username" in user_info + # assert user_info["preferred_username"] == user_id + # assert "email" in user_info + # assert user_info["email"] == f"{user_id}@example.com" + # + # def test_introspect_token(self, app: Flask) -> None: + # """Test_introspect_token.""" + # ( + # keycloak_server_url, + # keycloak_client_id, + # keycloak_realm_name, + # keycloak_client_secret_key, + # ) = self.get_keycloak_constants(app) + # for user_id in ("user_1", "user_2", "admin_1", "admin_2"): + # basic_token = self.get_public_access_token(user_id, user_id) + # introspection = PublicAuthenticationService.introspect_token(basic_token) + # assert isinstance(introspection, dict) + # assert introspection["typ"] == "Bearer" + # assert introspection["preferred_username"] == user_id + # assert introspection["client_id"] == "spiffworkflow-frontend" + # + # assert "resource_access" in introspection + # resource_access = introspection["resource_access"] + # assert isinstance(resource_access, dict) + # + # assert keycloak_client_id in resource_access + # client = resource_access[keycloak_client_id] + # assert "roles" in client + # roles = client["roles"] + # + # assert isinstance(roles, list) + # if user_id == "admin_1": + # assert len(roles) == 2 + # for role in roles: + # assert role in ("User", "Admin") + # elif user_id == "admin_2": + # assert len(roles) == 1 + # assert roles[0] == "User" + # elif user_id == "user_1" or user_id == "user_2": + # assert len(roles) == 2 + # for role in roles: + # assert role in ("User", "Anonymous") + # + # def test_get_permission_by_token(self, app: Flask) -> None: + # """Test_get_permission_by_token.""" + # output: dict = {} + # for user_id in ("user_1", "user_2", "admin_1", "admin_2"): + # output[user_id] = {} + # basic_token = self.get_public_access_token(user_id, user_id) + # permissions = PublicAuthenticationService.get_permission_by_basic_token( + # basic_token + # ) + # if isinstance(permissions, list): + # for permission in permissions: + # resource_name = permission["rsname"] + # output[user_id][resource_name] = {} + # # assert resource_name in resource_names + # # if resource_name == 'Process Groups' or resource_name == 'Process Models': + # if "scopes" in permission: + # scopes = permission["scopes"] + # output[user_id][resource_name]["scopes"] = scopes + # + # # if user_id == 'admin_1': + # # # assert len(permissions) == 3 + # # for permission in permissions: + # # resource_name = permission['rsname'] + # # # assert resource_name in resource_names + # # if resource_name == 'Process Groups' or resource_name == 'Process Models': + # # # assert len(permission['scopes']) == 4 + # # for item in permission['scopes']: + # # # assert item in ('instantiate', 'read', 'update', 'delete') + # # ... + # # else: + # # # assert resource_name == 'Default Resource' + # # # assert 'scopes' not in permission + # # ... + # # + # # if user_id == 'admin_2': + # # # assert len(permissions) == 3 + # # for permission in permissions: + # # resource_name = permission['rsname'] + # # # assert resource_name in resource_names + # # if resource_name == 'Process Groups' or resource_name == 'Process Models': + # # # assert len(permission['scopes']) == 1 + # # # assert permission['scopes'][0] == 'read' + # # ... + # # else: + # # # assert resource_name == 'Default Resource' + # # # assert 'scopes' not in permission + # # ... + # # else: + # # print(f"No Permissions: {permissions}") + # print("test_get_permission_by_token") + # + # def test_get_auth_status_for_resource_and_scope_by_token(self, app: Flask) -> None: + # """Test_get_auth_status_for_resource_and_scope_by_token.""" + # resources = "Admin", "Process Groups", "Process Models" + # # scope = 'read' + # output: dict = {} + # for user_id in ("user_1", "user_2", "admin_1", "admin_2"): + # output[user_id] = {} + # basic_token = self.get_public_access_token(user_id, user_id) + # for resource in resources: + # output[user_id][resource] = {} + # for scope in "instantiate", "read", "update", "delete": + # auth_status = PublicAuthenticationService.get_auth_status_for_resource_and_scope_by_token( + # basic_token, resource, scope + # ) + # output[user_id][resource][scope] = auth_status + # print("test_get_auth_status_for_resource_and_scope_by_token") + # + # def test_get_permissions_by_token_for_resource_and_scope(self, app: Flask) -> None: + # """Test_get_permissions_by_token_for_resource_and_scope.""" + # resource_names = "Default Resource", "Process Groups", "Process Models" + # output: dict = {} + # for user_id in ("user_1", "user_2", "admin_1", "admin_2"): + # output[user_id] = {} + # basic_token = self.get_public_access_token(user_id, user_id) + # for resource in resource_names: + # output[user_id][resource] = {} + # for scope in "instantiate", "read", "update", "delete": + # permissions = PublicAuthenticationService.get_permissions_by_token_for_resource_and_scope( + # basic_token, resource, scope + # ) + # output[user_id][resource][scope] = permissions + # print("test_get_permissions_by_token_for_resource_and_scope") diff --git a/tests/spiffworkflow_backend/integration/test_logging_service.py b/tests/spiffworkflow_backend/integration/test_logging_service.py new file mode 100644 index 00000000..bb7ec710 --- /dev/null +++ b/tests/spiffworkflow_backend/integration/test_logging_service.py @@ -0,0 +1,46 @@ +"""Test_logging_service.""" +from flask.app import Flask +from flask.testing import FlaskClient +from tests.spiffworkflow_backend.helpers.base_test import BaseTest + + +class TestLoggingService(BaseTest): + """Test logging service.""" + + def test_logging_service_spiff_logger( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_process_instance_run.""" + process_group_id = "test_logging_spiff_logger" + process_model_id = "simple_script" + user = self.find_or_create_user() + headers = self.logged_in_headers(user) + response = self.create_process_instance( + client, process_group_id, process_model_id, headers + ) + assert response.json is not None + process_instance_id = response.json["id"] + response = client.post( + f"/v1.0/process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}/run", + headers=self.logged_in_headers(user), + ) + assert response.status_code == 200 + + log_response = client.get( + f"/v1.0/process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}/logs", + headers=self.logged_in_headers(user), + ) + assert log_response.status_code == 200 + assert log_response.json + logs: list = log_response.json["results"] + assert len(logs) > 0 + for log in logs: + assert log["process_instance_id"] == process_instance_id + for key in [ + "timestamp", + "spiff_task_guid", + "bpmn_task_identifier", + "bpmn_process_identifier", + "message", + ]: + assert key in log.keys() diff --git a/tests/spiffworkflow_backend/integration/test_process_api.py b/tests/spiffworkflow_backend/integration/test_process_api.py new file mode 100644 index 00000000..3f71f1f1 --- /dev/null +++ b/tests/spiffworkflow_backend/integration/test_process_api.py @@ -0,0 +1,1664 @@ +"""Test Process Api Blueprint.""" +import io +import json +import time +from typing import Any + +import pytest +from flask.app import Flask +from flask.testing import FlaskClient +from flask_bpmn.models.db import db +from tests.spiffworkflow_backend.helpers.base_test import BaseTest +from tests.spiffworkflow_backend.helpers.test_data import load_test_spec + +from spiffworkflow_backend.exceptions.process_entity_not_found_error import ( + ProcessEntityNotFoundError, +) +from spiffworkflow_backend.models.active_task import ActiveTaskModel +from spiffworkflow_backend.models.process_group import ProcessGroup +from spiffworkflow_backend.models.process_group import ProcessGroupSchema +from spiffworkflow_backend.models.process_instance import ProcessInstanceModel +from spiffworkflow_backend.models.process_instance import ProcessInstanceStatus +from spiffworkflow_backend.models.process_instance_report import ( + ProcessInstanceReportModel, +) +from spiffworkflow_backend.models.process_model import NotificationType +from spiffworkflow_backend.models.process_model import ProcessModelInfoSchema +from spiffworkflow_backend.models.task_event import TaskEventModel +from spiffworkflow_backend.models.user import UserModel +from spiffworkflow_backend.services.file_system_service import FileSystemService +from spiffworkflow_backend.services.process_instance_processor import ( + ProcessInstanceProcessor, +) +from spiffworkflow_backend.services.process_model_service import ProcessModelService + +# from spiffworkflow_backend.services.git_service import GitService + + +class TestProcessApi(BaseTest): + """TestProcessAPi.""" + + def test_process_model_add( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_add_new_process_model.""" + process_model_identifier = "sample" + model_display_name = "Sample" + model_description = "The sample" + self.create_process_model_with_api( + client, + process_model_id=process_model_identifier, + process_model_display_name=model_display_name, + process_model_description=model_description, + ) + process_model = ProcessModelService().get_process_model( + process_model_identifier + ) + assert model_display_name == process_model.display_name + assert 0 == process_model.display_order + assert 1 == len(ProcessModelService().get_process_groups()) + + bpmn_file_name = "sample.bpmn" + bpmn_file_data_bytes = self.get_test_data_file_contents( + bpmn_file_name, "sample" + ) + self.create_spec_file( + client, + file_name=bpmn_file_name, + file_data=bpmn_file_data_bytes, + process_model=process_model, + ) + process_model = ProcessModelService().get_process_model( + process_model_identifier + ) + assert process_model.primary_file_name == bpmn_file_name + assert process_model.primary_process_id == "sample" + + def test_primary_process_id_updates_via_xml( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_primary_process_id_updates_via_xml.""" + process_model_identifier = "sample" + initial_primary_process_id = "sample" + terminal_primary_process_id = "new_process_id" + + process_model = load_test_spec(process_model_id=process_model_identifier) + assert process_model.primary_process_id == initial_primary_process_id + + bpmn_file_name = "sample.bpmn" + bpmn_file_data_bytes = self.get_test_data_file_contents( + bpmn_file_name, "sample" + ) + bpmn_file_data_string = bpmn_file_data_bytes.decode("utf-8") + old_string = f'bpmn:process id="{initial_primary_process_id}"' + new_string = f'bpmn:process id="{terminal_primary_process_id}"' + updated_bpmn_file_data_string = bpmn_file_data_string.replace( + old_string, new_string + ) + updated_bpmn_file_data_bytes = bytearray(updated_bpmn_file_data_string, "utf-8") + data = {"file": (io.BytesIO(updated_bpmn_file_data_bytes), bpmn_file_name)} + + user = self.find_or_create_user() + response = client.put( + f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/files/{bpmn_file_name}", + data=data, + follow_redirects=True, + content_type="multipart/form-data", + headers=self.logged_in_headers(user), + ) + assert response.status_code == 200 + process_model = ProcessModelService().get_process_model( + process_model_identifier + ) + assert process_model.primary_file_name == bpmn_file_name + assert process_model.primary_process_id == terminal_primary_process_id + + def test_process_model_delete( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_process_model_delete.""" + self.create_process_model_with_api(client) + + # assert we have a model + process_model = ProcessModelService().get_process_model("make_cookies") + assert process_model is not None + assert process_model.id == "make_cookies" + + # delete the model + user = self.find_or_create_user() + response = client.delete( + f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}", + headers=self.logged_in_headers(user), + ) + assert response.status_code == 200 + assert response.json is not None + assert response.json["ok"] is True + + # assert we no longer have a model + with pytest.raises(ProcessEntityNotFoundError): + ProcessModelService().get_process_model("make_cookies") + + def test_process_model_delete_with_instances( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_process_model_delete_with_instances.""" + test_process_group_id = "runs_without_input" + test_process_model_id = "sample" + user = self.find_or_create_user() + headers = self.logged_in_headers(user) + # create an instance from a model + response = self.create_process_instance( + client, test_process_group_id, test_process_model_id, headers + ) + + data = json.loads(response.get_data(as_text=True)) + # make sure the instance has the correct model + assert data["process_model_identifier"] == test_process_model_id + + # try to delete the model + response = client.delete( + f"/v1.0/process-models/{test_process_group_id}/{test_process_model_id}", + headers=self.logged_in_headers(user), + ) + + # make sure we get an error in the response + assert response.status_code == 400 + data = json.loads(response.get_data(as_text=True)) + assert data["error_code"] == "existing_instances" + assert ( + data["message"] + == "We cannot delete the model `sample`, there are existing instances that depend on it." + ) + + def test_process_model_update( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_process_model_update.""" + self.create_process_model_with_api(client) + process_model = ProcessModelService().get_process_model("make_cookies") + assert process_model.id == "make_cookies" + assert process_model.display_name == "Cooooookies" + assert process_model.is_review is False + assert process_model.primary_file_name is None + assert process_model.primary_process_id is None + + process_model.display_name = "Updated Display Name" + process_model.primary_file_name = "superduper.bpmn" + process_model.primary_process_id = "superduper" + process_model.is_review = True # not in the include list, so get ignored + + user = self.find_or_create_user() + response = client.put( + f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}", + headers=self.logged_in_headers(user), + content_type="application/json", + data=json.dumps(ProcessModelInfoSchema().dump(process_model)), + ) + assert response.status_code == 200 + assert response.json is not None + assert response.json["display_name"] == "Updated Display Name" + assert response.json["primary_file_name"] == "superduper.bpmn" + assert response.json["primary_process_id"] == "superduper" + assert response.json["is_review"] is False + + def test_process_model_list( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_process_model_list.""" + # create a group + group_id = "test_group" + user = self.find_or_create_user() + self.create_process_group(client, user, group_id) + + # add 5 models to the group + for i in range(5): + process_model_identifier = f"test_model_{i}" + model_display_name = f"Test Model {i}" + model_description = f"Test Model {i} Description" + self.create_process_model_with_api( + client, + group_id, + process_model_identifier, + model_display_name, + model_description, + ) + + # get all models + response = client.get( + f"/v1.0/process-models?process_group_identifier={group_id}", + headers=self.logged_in_headers(user), + ) + assert response.json is not None + assert len(response.json["results"]) == 5 + assert response.json["pagination"]["count"] == 5 + assert response.json["pagination"]["total"] == 5 + assert response.json["pagination"]["pages"] == 1 + + # get first page, 1 per page + response = client.get( + f"/v1.0/process-models?page=1&per_page=1&process_group_identifier={group_id}", + headers=self.logged_in_headers(user), + ) + assert response.json is not None + assert len(response.json["results"]) == 1 + assert response.json["results"][0]["id"] == "test_model_0" + assert response.json["pagination"]["count"] == 1 + assert response.json["pagination"]["total"] == 5 + assert response.json["pagination"]["pages"] == 5 + + # get second page, 1 per page + response = client.get( + f"/v1.0/process-models?page=2&per_page=1&process_group_identifier={group_id}", + headers=self.logged_in_headers(user), + ) + assert response.json is not None + assert len(response.json["results"]) == 1 + assert response.json["results"][0]["id"] == "test_model_1" + assert response.json["pagination"]["count"] == 1 + assert response.json["pagination"]["total"] == 5 + assert response.json["pagination"]["pages"] == 5 + + # get first page, 3 per page + response = client.get( + f"/v1.0/process-models?page=1&per_page=3&process_group_identifier={group_id}", + headers=self.logged_in_headers(user), + ) + assert response.json is not None + assert len(response.json["results"]) == 3 + assert response.json["results"][0]["id"] == "test_model_0" + assert response.json["pagination"]["count"] == 3 + assert response.json["pagination"]["total"] == 5 + assert response.json["pagination"]["pages"] == 2 + + # get second page, 3 per page + response = client.get( + f"/v1.0/process-models?page=2&per_page=3&process_group_identifier={group_id}", + headers=self.logged_in_headers(user), + ) + # there should only be 2 left + assert response.json is not None + assert len(response.json["results"]) == 2 + assert response.json["results"][0]["id"] == "test_model_3" + assert response.json["pagination"]["count"] == 2 + assert response.json["pagination"]["total"] == 5 + assert response.json["pagination"]["pages"] == 2 + + def test_process_group_add( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_add_process_group.""" + process_group = ProcessGroup( + id="test", + display_name="Another Test Category", + display_order=0, + admin=False, + ) + user = self.find_or_create_user() + response = client.post( + "/v1.0/process-groups", + headers=self.logged_in_headers(user), + content_type="application/json", + data=json.dumps(ProcessGroupSchema().dump(process_group)), + ) + assert response.status_code == 201 + + # Check what is returned + result = ProcessGroupSchema().loads(response.get_data(as_text=True)) + assert result is not None + assert result.display_name == "Another Test Category" + assert result.id == "test" + + # Check what is persisted + persisted = ProcessModelService().get_process_group("test") + assert persisted.display_name == "Another Test Category" + assert persisted.id == "test" + + def test_process_group_delete( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_process_group_delete.""" + process_group_id = "test" + process_group_display_name = "My Process Group" + + user = self.find_or_create_user() + self.create_process_group( + client, user, process_group_id, display_name=process_group_display_name + ) + persisted = ProcessModelService().get_process_group(process_group_id) + assert persisted is not None + assert persisted.id == process_group_id + + client.delete( + f"/v1.0/process-groups/{process_group_id}", + headers=self.logged_in_headers(user), + ) + + with pytest.raises(ProcessEntityNotFoundError): + ProcessModelService().get_process_group(process_group_id) + + def test_process_group_update( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test Process Group Update.""" + group_id = "test_process_group" + group_display_name = "Test Group" + + user = self.find_or_create_user() + self.create_process_group( + client, user, group_id, display_name=group_display_name + ) + process_group = ProcessModelService().get_process_group(group_id) + + assert process_group.display_name == group_display_name + + process_group.display_name = "Modified Display Name" + + response = client.put( + f"/v1.0/process-groups/{group_id}", + headers=self.logged_in_headers(user), + content_type="application/json", + data=json.dumps(ProcessGroupSchema().dump(process_group)), + ) + assert response.status_code == 200 + + process_group = ProcessModelService().get_process_group(group_id) + assert process_group.display_name == "Modified Display Name" + + def test_process_group_list( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_process_group_list.""" + # add 5 groups + user = self.find_or_create_user() + for i in range(5): + group_id = f"test_process_group_{i}" + group_display_name = f"Test Group {i}" + self.create_process_group( + client, user, group_id, display_name=group_display_name + ) + + # get all groups + response = client.get( + "/v1.0/process-groups", + headers=self.logged_in_headers(user), + ) + assert response.json is not None + assert len(response.json["results"]) == 5 + assert response.json["pagination"]["count"] == 5 + assert response.json["pagination"]["total"] == 5 + assert response.json["pagination"]["pages"] == 1 + + # get first page, one per page + response = client.get( + "/v1.0/process-groups?page=1&per_page=1", + headers=self.logged_in_headers(user), + ) + assert response.json is not None + assert len(response.json["results"]) == 1 + assert response.json["results"][0]["id"] == "test_process_group_0" + assert response.json["pagination"]["count"] == 1 + assert response.json["pagination"]["total"] == 5 + assert response.json["pagination"]["pages"] == 5 + + # get second page, one per page + response = client.get( + "/v1.0/process-groups?page=2&per_page=1", + headers=self.logged_in_headers(user), + ) + assert response.json is not None + assert len(response.json["results"]) == 1 + assert response.json["results"][0]["id"] == "test_process_group_1" + assert response.json["pagination"]["count"] == 1 + assert response.json["pagination"]["total"] == 5 + assert response.json["pagination"]["pages"] == 5 + + # get first page, 3 per page + response = client.get( + "/v1.0/process-groups?page=1&per_page=3", + headers=self.logged_in_headers(user), + ) + assert response.json is not None + assert len(response.json["results"]) == 3 + assert response.json["results"][0]["id"] == "test_process_group_0" + assert response.json["results"][1]["id"] == "test_process_group_1" + assert response.json["results"][2]["id"] == "test_process_group_2" + assert response.json["pagination"]["count"] == 3 + assert response.json["pagination"]["total"] == 5 + assert response.json["pagination"]["pages"] == 2 + + # get second page, 3 per page + response = client.get( + "/v1.0/process-groups?page=2&per_page=3", + headers=self.logged_in_headers(user), + ) + # there should only be 2 left + assert response.json is not None + assert len(response.json["results"]) == 2 + assert response.json["results"][0]["id"] == "test_process_group_3" + assert response.json["results"][1]["id"] == "test_process_group_4" + assert response.json["pagination"]["count"] == 2 + assert response.json["pagination"]["total"] == 5 + assert response.json["pagination"]["pages"] == 2 + + def test_process_model_file_update_fails_if_no_file_given( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_process_model_file_update.""" + self.create_spec_file(client) + + process_model = load_test_spec("random_fact") + data = {"key1": "THIS DATA"} + user = self.find_or_create_user() + response = client.put( + f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/files/random_fact.svg", + data=data, + follow_redirects=True, + content_type="multipart/form-data", + headers=self.logged_in_headers(user), + ) + + assert response.status_code == 400 + assert response.json is not None + assert response.json["error_code"] == "no_file_given" + + def test_process_model_file_update_fails_if_contents_is_empty( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_process_model_file_update.""" + self.create_spec_file(client) + + process_model = load_test_spec("random_fact") + data = {"file": (io.BytesIO(b""), "random_fact.svg")} + user = self.find_or_create_user() + response = client.put( + f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/files/random_fact.svg", + data=data, + follow_redirects=True, + content_type="multipart/form-data", + headers=self.logged_in_headers(user), + ) + + assert response.status_code == 400 + assert response.json is not None + assert response.json["error_code"] == "file_contents_empty" + + def test_process_model_file_update( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_process_model_file_update.""" + original_file = self.create_spec_file(client) + + process_model = load_test_spec("random_fact") + new_file_contents = b"THIS_IS_NEW_DATA" + data = {"file": (io.BytesIO(new_file_contents), "random_fact.svg")} + user = self.find_or_create_user() + response = client.put( + f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/files/random_fact.svg", + data=data, + follow_redirects=True, + content_type="multipart/form-data", + headers=self.logged_in_headers(user), + ) + + assert response.status_code == 200 + assert response.json is not None + assert response.json["ok"] + + response = client.get( + f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/files/random_fact.svg", + headers=self.logged_in_headers(user), + ) + assert response.status_code == 200 + updated_file = json.loads(response.get_data(as_text=True)) + assert original_file != updated_file + assert updated_file["file_contents"] == new_file_contents.decode() + + def test_process_model_file_delete_when_bad_process_model( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_process_model_file_update.""" + self.create_spec_file(client) + + process_model = load_test_spec("random_fact") + user = self.find_or_create_user() + response = client.delete( + f"/v1.0/process-models/INCORRECT-NON-EXISTENT-GROUP/{process_model.id}/files/random_fact.svg", + follow_redirects=True, + headers=self.logged_in_headers(user), + ) + + assert response.status_code == 400 + assert response.json is not None + assert response.json["error_code"] == "process_model_cannot_be_found" + + def test_process_model_file_delete_when_bad_file( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_process_model_file_update.""" + self.create_spec_file(client) + + process_model = load_test_spec("random_fact") + user = self.find_or_create_user() + response = client.delete( + f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/files/random_fact_DOES_NOT_EXIST.svg", + follow_redirects=True, + headers=self.logged_in_headers(user), + ) + + assert response.status_code == 400 + assert response.json is not None + assert response.json["error_code"] == "process_model_file_cannot_be_found" + + def test_process_model_file_delete( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_process_model_file_update.""" + self.create_spec_file(client) + + process_model = load_test_spec("random_fact") + user = self.find_or_create_user() + response = client.delete( + f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/files/random_fact.svg", + follow_redirects=True, + headers=self.logged_in_headers(user), + ) + + assert response.status_code == 200 + assert response.json is not None + assert response.json["ok"] + + response = client.get( + f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/files/random_fact.svg", + headers=self.logged_in_headers(user), + ) + assert response.status_code == 404 + + def test_get_file( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_get_file.""" + user = self.find_or_create_user() + test_process_group_id = "group_id1" + process_model_dir_name = "hello_world" + load_test_spec(process_model_dir_name, process_group_id=test_process_group_id) + response = client.get( + f"/v1.0/process-models/{test_process_group_id}/{process_model_dir_name}/files/hello_world.bpmn", + headers=self.logged_in_headers(user), + ) + assert response.status_code == 200 + assert response.json is not None + assert response.json["name"] == "hello_world.bpmn" + assert response.json["process_group_id"] == "group_id1" + assert response.json["process_model_id"] == "hello_world" + + def test_get_workflow_from_workflow_spec( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_get_workflow_from_workflow_spec.""" + user = self.find_or_create_user() + process_model = load_test_spec("hello_world") + response = client.post( + f"/v1.0/process-models/{process_model.process_group_id}/{process_model.id}/process-instances", + headers=self.logged_in_headers(user), + ) + assert response.status_code == 201 + assert response.json is not None + assert "hello_world" == response.json["process_model_identifier"] + # assert('Task_GetName' == response.json['next_task']['name']) + + def test_get_process_groups_when_none( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_get_process_groups_when_none.""" + user = self.find_or_create_user() + response = client.get( + "/v1.0/process-groups", headers=self.logged_in_headers(user) + ) + assert response.status_code == 200 + assert response.json is not None + assert response.json["results"] == [] + + def test_get_process_groups_when_there_are_some( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_get_process_groups_when_there_are_some.""" + user = self.find_or_create_user() + load_test_spec("hello_world") + response = client.get( + "/v1.0/process-groups", headers=self.logged_in_headers(user) + ) + assert response.status_code == 200 + assert response.json is not None + assert len(response.json["results"]) == 1 + assert response.json["pagination"]["count"] == 1 + assert response.json["pagination"]["total"] == 1 + assert response.json["pagination"]["pages"] == 1 + + def test_get_process_group_when_found( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_get_process_group_when_found.""" + user = self.find_or_create_user() + test_process_group_id = "group_id1" + process_model_dir_name = "hello_world" + load_test_spec(process_model_dir_name, process_group_id=test_process_group_id) + response = client.get( + f"/v1.0/process-groups/{test_process_group_id}", + headers=self.logged_in_headers(user), + ) + assert response.status_code == 200 + assert response.json is not None + assert response.json["id"] == test_process_group_id + assert response.json["process_models"][0]["id"] == process_model_dir_name + + def test_get_process_model_when_found( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_get_process_model_when_found.""" + user = self.find_or_create_user() + test_process_group_id = "group_id1" + process_model_dir_name = "hello_world" + load_test_spec(process_model_dir_name, process_group_id=test_process_group_id) + response = client.get( + f"/v1.0/process-models/{test_process_group_id}/{process_model_dir_name}", + headers=self.logged_in_headers(user), + ) + assert response.status_code == 200 + assert response.json is not None + assert response.json["id"] == process_model_dir_name + assert len(response.json["files"]) == 1 + assert response.json["files"][0]["name"] == "hello_world.bpmn" + + def test_get_process_model_when_not_found( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_get_process_model_when_not_found.""" + user = self.find_or_create_user() + process_model_dir_name = "THIS_NO_EXISTS" + group_id = self.create_process_group(client, user, "my_group") + response = client.get( + f"/v1.0/process-models/{group_id}/{process_model_dir_name}", + headers=self.logged_in_headers(user), + ) + assert response.status_code == 400 + assert response.json is not None + assert response.json["error_code"] == "process_model_cannot_be_found" + + def test_process_instance_create( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_process_instance_create.""" + test_process_group_id = "runs_without_input" + test_process_model_id = "sample" + user = self.find_or_create_user() + headers = self.logged_in_headers(user) + response = self.create_process_instance( + client, test_process_group_id, test_process_model_id, headers + ) + assert response.json is not None + assert response.json["updated_at_in_seconds"] is not None + assert response.json["status"] == "not_started" + assert response.json["process_model_identifier"] == test_process_model_id + # TODO: mock out the responses for the git service so we can do something like this + # current_revision = GitService.get_current_revision() + # assert response.json["bpmn_version_control_identifier"] == current_revision + + def test_process_instance_run( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_process_instance_run.""" + process_group_id = "runs_without_input" + process_model_id = "sample" + user = self.find_or_create_user() + headers = self.logged_in_headers(user) + response = self.create_process_instance( + client, process_group_id, process_model_id, headers + ) + assert response.json is not None + process_instance_id = response.json["id"] + response = client.post( + f"/v1.0/process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}/run", + headers=self.logged_in_headers(user), + ) + + assert response.json is not None + assert type(response.json["updated_at_in_seconds"]) is int + assert response.json["updated_at_in_seconds"] > 0 + assert response.json["status"] == "complete" + assert response.json["process_model_identifier"] == process_model_id + assert response.json["data"]["current_user"]["username"] == user.username + assert response.json["data"]["Mike"] == "Awesome" + assert response.json["data"]["person"] == "Kevin" + + def test_process_instance_show( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_process_instance_show.""" + process_group_id = "simple_script" + process_model_id = "simple_script" + user = self.find_or_create_user() + headers = self.logged_in_headers(user) + create_response = self.create_process_instance( + client, process_group_id, process_model_id, headers + ) + assert create_response.json is not None + process_instance_id = create_response.json["id"] + client.post( + f"/v1.0/process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}/run", + headers=self.logged_in_headers(user), + ) + show_response = client.get( + f"/v1.0/process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}", + headers=self.logged_in_headers(user), + ) + assert show_response.json is not None + file_system_root = FileSystemService.root_path() + file_path = f"{file_system_root}/{process_group_id}/{process_model_id}/{process_model_id}.bpmn" + with open(file_path) as f_open: + xml_file_contents = f_open.read() + assert show_response.json["bpmn_xml_file_contents"] == xml_file_contents + + def test_message_start_when_starting_process_instance( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_message_start_when_starting_process_instance.""" + # ensure process model is loaded in db + load_test_spec( + "message_receiver", + process_model_source_directory="message_send_one_conversation", + bpmn_file_name="message_receiver", + ) + user = self.find_or_create_user() + message_model_identifier = "message_send" + payload = { + "topica": "the_topica_string", + "topicb": "the_topicb_string", + "andThis": "another_item_non_key", + } + response = client.post( + f"/v1.0/messages/{message_model_identifier}", + content_type="application/json", + headers=self.logged_in_headers(user), + data=json.dumps({"payload": payload}), + ) + assert response.status_code == 200 + json_data = response.json + assert json_data + assert json_data["status"] == "complete" + process_instance_id = json_data["id"] + process_instance = ProcessInstanceModel.query.filter_by( + id=process_instance_id + ).first() + assert process_instance + + processor = ProcessInstanceProcessor(process_instance) + process_instance_data = processor.get_data() + assert process_instance_data + assert process_instance_data["the_payload"] == payload + + def test_message_start_when_providing_message_to_running_process_instance( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_message_start_when_providing_message_to_running_process_instance.""" + process_model = load_test_spec( + "message_sender", + process_model_source_directory="message_send_one_conversation", + bpmn_file_name="message_sender", + ) + user = self.find_or_create_user() + message_model_identifier = "message_response" + payload = { + "the_payload": { + "topica": "the_payload.topica_string", + "topicb": "the_payload.topicb_string", + "andThis": "another_item_non_key", + } + } + response = self.create_process_instance( + client, + process_model.process_group_id, + process_model.id, + self.logged_in_headers(user), + ) + assert response.json is not None + process_instance_id = response.json["id"] + + response = client.post( + f"/v1.0/process-models/{process_model.process_group_id}/" + f"{process_model.id}/process-instances/{process_instance_id}/run", + headers=self.logged_in_headers(user), + ) + + assert response.json is not None + + response = client.post( + f"/v1.0/messages/{message_model_identifier}", + content_type="application/json", + headers=self.logged_in_headers(user), + data=json.dumps( + {"payload": payload, "process_instance_id": process_instance_id} + ), + ) + assert response.status_code == 200 + json_data = response.json + assert json_data + assert json_data["status"] == "complete" + process_instance_id = json_data["id"] + process_instance = ProcessInstanceModel.query.filter_by( + id=process_instance_id + ).first() + assert process_instance + + processor = ProcessInstanceProcessor(process_instance) + process_instance_data = processor.get_data() + assert process_instance_data + assert process_instance_data["the_payload"] == payload + + def test_process_instance_can_be_terminated( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_message_start_when_providing_message_to_running_process_instance.""" + # this task will wait on a catch event + process_model = load_test_spec( + "message_sender", + process_model_source_directory="message_send_one_conversation", + bpmn_file_name="message_sender", + ) + user = self.find_or_create_user() + response = self.create_process_instance( + client, + process_model.process_group_id, + process_model.id, + self.logged_in_headers(user), + ) + assert response.json is not None + process_instance_id = response.json["id"] + + response = client.post( + f"/v1.0/process-models/{process_model.process_group_id}/" + f"{process_model.id}/process-instances/{process_instance_id}/run", + headers=self.logged_in_headers(user), + ) + assert response.status_code == 200 + assert response.json is not None + + response = client.post( + f"/v1.0/process-models/{process_model.process_group_id}/" + f"{process_model.id}/process-instances/{process_instance_id}/terminate", + headers=self.logged_in_headers(user), + ) + assert response.status_code == 200 + assert response.json is not None + + process_instance = ProcessInstanceModel.query.filter_by( + id=process_instance_id + ).first() + assert process_instance + assert process_instance.status == "terminated" + + def test_process_instance_delete( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_process_instance_delete.""" + process_group_id = "my_process_group" + process_model_id = "user_task" + + user = self.find_or_create_user() + headers = self.logged_in_headers(user) + response = self.create_process_instance( + client, process_group_id, process_model_id, headers + ) + assert response.json is not None + process_instance_id = response.json["id"] + + response = client.post( + f"/v1.0/process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}/run", + headers=self.logged_in_headers(user), + ) + + assert response.json is not None + task_events = ( + db.session.query(TaskEventModel) + .filter(TaskEventModel.process_instance_id == process_instance_id) + .all() + ) + assert len(task_events) == 1 + task_event = task_events[0] + assert task_event.user_id == user.id + + delete_response = client.delete( + f"/v1.0/process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}", + headers=self.logged_in_headers(user), + ) + assert delete_response.status_code == 200 + + def test_process_instance_run_user_task_creates_task_event( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_process_instance_run_user_task.""" + process_group_id = "my_process_group" + process_model_id = "user_task" + + user = self.find_or_create_user() + headers = self.logged_in_headers(user) + response = self.create_process_instance( + client, process_group_id, process_model_id, headers + ) + assert response.json is not None + process_instance_id = response.json["id"] + + response = client.post( + f"/v1.0/process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}/run", + headers=self.logged_in_headers(user), + ) + + assert response.json is not None + task_events = ( + db.session.query(TaskEventModel) + .filter(TaskEventModel.process_instance_id == process_instance_id) + .all() + ) + assert len(task_events) == 1 + task_event = task_events[0] + assert task_event.user_id == user.id + # TODO: When user tasks work, we need to add some more assertions for action, task_state, etc. + + def test_task_show( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_process_instance_run_user_task.""" + process_group_id = "my_process_group" + process_model_id = "dynamic_enum_select_fields" + + user = self.find_or_create_user() + headers = self.logged_in_headers(user) + response = self.create_process_instance( + client, process_group_id, process_model_id, headers + ) + assert response.json is not None + process_instance_id = response.json["id"] + + response = client.post( + f"/v1.0/process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}/run", + headers=self.logged_in_headers(user), + ) + + assert response.json is not None + active_tasks = ( + db.session.query(ActiveTaskModel) + .filter(ActiveTaskModel.process_instance_id == process_instance_id) + .all() + ) + assert len(active_tasks) == 1 + active_task = active_tasks[0] + response = client.get( + f"/v1.0/tasks/{process_instance_id}/{active_task.task_id}", + headers=self.logged_in_headers(user), + ) + assert response.json is not None + assert ( + response.json["form_schema"]["definitions"]["Color"]["anyOf"][1]["title"] + == "Green" + ) + + def test_process_instance_list_with_default_list( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_process_instance_list_with_default_list.""" + test_process_group_id = "runs_without_input" + process_model_dir_name = "sample" + user = self.find_or_create_user() + headers = self.logged_in_headers(user) + self.create_process_instance( + client, test_process_group_id, process_model_dir_name, headers + ) + + response = client.get( + "/v1.0/process-instances", + headers=self.logged_in_headers(user), + ) + assert response.status_code == 200 + assert response.json is not None + assert len(response.json["results"]) == 1 + assert response.json["pagination"]["count"] == 1 + assert response.json["pagination"]["pages"] == 1 + assert response.json["pagination"]["total"] == 1 + + process_instance_dict = response.json["results"][0] + assert type(process_instance_dict["id"]) is int + assert ( + process_instance_dict["process_model_identifier"] == process_model_dir_name + ) + assert ( + process_instance_dict["process_group_identifier"] == test_process_group_id + ) + assert type(process_instance_dict["start_in_seconds"]) is int + assert process_instance_dict["start_in_seconds"] > 0 + assert process_instance_dict["end_in_seconds"] is None + assert process_instance_dict["status"] == "not_started" + + def test_process_instance_list_with_paginated_items( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_process_instance_list_with_paginated_items.""" + test_process_group_id = "runs_without_input" + process_model_dir_name = "sample" + user = self.find_or_create_user() + headers = self.logged_in_headers(user) + self.create_process_instance( + client, test_process_group_id, process_model_dir_name, headers + ) + self.create_process_instance( + client, test_process_group_id, process_model_dir_name, headers + ) + self.create_process_instance( + client, test_process_group_id, process_model_dir_name, headers + ) + self.create_process_instance( + client, test_process_group_id, process_model_dir_name, headers + ) + self.create_process_instance( + client, test_process_group_id, process_model_dir_name, headers + ) + + response = client.get( + "/v1.0/process-instances?per_page=2&page=3", + headers=self.logged_in_headers(user), + ) + assert response.status_code == 200 + assert response.json is not None + assert len(response.json["results"]) == 1 + assert response.json["pagination"]["count"] == 1 + assert response.json["pagination"]["pages"] == 3 + assert response.json["pagination"]["total"] == 5 + + response = client.get( + "/v1.0/process-instances?per_page=2&page=1", + headers=self.logged_in_headers(user), + ) + assert response.status_code == 200 + assert response.json is not None + assert len(response.json["results"]) == 2 + assert response.json["pagination"]["count"] == 2 + assert response.json["pagination"]["pages"] == 3 + assert response.json["pagination"]["total"] == 5 + + def test_process_instance_list_filter( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_process_instance_list_filter.""" + test_process_group_id = "runs_without_input" + test_process_model_id = "sample" + user = self.find_or_create_user() + load_test_spec(test_process_model_id, process_group_id=test_process_group_id) + + statuses = [status.value for status in ProcessInstanceStatus] + # create 5 instances with different status, and different start_in_seconds/end_in_seconds + for i in range(5): + process_instance = ProcessInstanceModel( + status=ProcessInstanceStatus[statuses[i]].value, + process_initiator=user, + process_model_identifier=test_process_model_id, + process_group_identifier=test_process_group_id, + updated_at_in_seconds=round(time.time()), + start_in_seconds=(1000 * i) + 1000, + end_in_seconds=(1000 * i) + 2000, + bpmn_json=json.dumps({"i": i}), + ) + db.session.add(process_instance) + db.session.commit() + + # Without filtering we should get all 5 instances + response = client.get( + f"/v1.0/process-instances?process_group_identifier={test_process_group_id}&process_model_identifier={test_process_model_id}", + headers=self.logged_in_headers(user), + ) + assert response.json is not None + results = response.json["results"] + assert len(results) == 5 + + # filter for each of the status + # we should get 1 instance each time + for i in range(5): + response = client.get( + f"/v1.0/process-instances?process_status={ProcessInstanceStatus[statuses[i]].value}&process_group_identifier={test_process_group_id}&process_model_identifier={test_process_model_id}", + headers=self.logged_in_headers(user), + ) + assert response.json is not None + results = response.json["results"] + assert len(results) == 1 + assert results[0]["status"] == ProcessInstanceStatus[statuses[i]].value + + response = client.get( + f"/v1.0/process-instances?process_status=not_started,complete&process_group_identifier={test_process_group_id}&process_model_identifier={test_process_model_id}", + headers=self.logged_in_headers(user), + ) + assert response.json is not None + results = response.json["results"] + assert len(results) == 2 + assert results[0]["status"] in ["complete", "not_started"] + assert results[1]["status"] in ["complete", "not_started"] + + # filter by start/end seconds + # start > 1000 - this should eliminate the first + response = client.get( + "/v1.0/process-instances?start_from=1001", + headers=self.logged_in_headers(user), + ) + assert response.json is not None + results = response.json["results"] + assert len(results) == 4 + for i in range(4): + assert json.loads(results[i]["bpmn_json"])["i"] in (1, 2, 3, 4) + + # start > 2000, end < 5000 - this should eliminate the first 2 and the last + response = client.get( + "/v1.0/process-instances?start_from=2001&end_till=5999", + headers=self.logged_in_headers(user), + ) + assert response.json is not None + results = response.json["results"] + assert len(results) == 2 + assert json.loads(results[0]["bpmn_json"])["i"] in (2, 3) + assert json.loads(results[1]["bpmn_json"])["i"] in (2, 3) + + # start > 1000, start < 4000 - this should eliminate the first and the last 2 + response = client.get( + "/v1.0/process-instances?start_from=1001&start_till=3999", + headers=self.logged_in_headers(user), + ) + assert response.json is not None + results = response.json["results"] + assert len(results) == 2 + assert json.loads(results[0]["bpmn_json"])["i"] in (1, 2) + assert json.loads(results[1]["bpmn_json"])["i"] in (1, 2) + + # end > 2000, end < 6000 - this should eliminate the first and the last + response = client.get( + "/v1.0/process-instances?end_from=2001&end_till=5999", + headers=self.logged_in_headers(user), + ) + assert response.json is not None + results = response.json["results"] + assert len(results) == 3 + for i in range(3): + assert json.loads(results[i]["bpmn_json"])["i"] in (1, 2, 3) + + def test_process_instance_report_list( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_process_instance_report_list.""" + process_group_identifier = "runs_without_input" + process_model_identifier = "sample" + user = self.find_or_create_user() + self.logged_in_headers(user) + load_test_spec( + process_model_identifier, process_group_id=process_group_identifier + ) + report_identifier = "testreport" + report_metadata = {"order_by": ["month"]} + ProcessInstanceReportModel.create_with_attributes( + identifier=report_identifier, + process_group_identifier=process_group_identifier, + process_model_identifier=process_model_identifier, + report_metadata=report_metadata, + user=user, + ) + response = client.get( + f"/v1.0/process-models/{process_group_identifier}/{process_model_identifier}/process-instances/reports", + headers=self.logged_in_headers(user), + ) + assert response.status_code == 200 + assert response.json is not None + assert len(response.json) == 1 + assert response.json[0]["identifier"] == report_identifier + assert response.json[0]["report_metadata"]["order_by"] == ["month"] + + def test_process_instance_report_show_with_default_list( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + setup_process_instances_for_reports: list[ProcessInstanceModel], + ) -> None: + """Test_process_instance_report_show_with_default_list.""" + test_process_group_id = "runs_without_input" + process_model_dir_name = "sample" + user = self.find_or_create_user() + + report_metadata = { + "columns": [ + {"Header": "id", "accessor": "id"}, + { + "Header": "process_model_identifier", + "accessor": "process_model_identifier", + }, + {"Header": "process_group_id", "accessor": "process_group_identifier"}, + {"Header": "start_in_seconds", "accessor": "start_in_seconds"}, + {"Header": "status", "accessor": "status"}, + {"Header": "Name", "accessor": "name"}, + {"Header": "Status", "accessor": "status"}, + ], + "order_by": ["test_score"], + "filter_by": [ + {"field_name": "grade_level", "operator": "equals", "field_value": 2} + ], + } + + ProcessInstanceReportModel.create_with_attributes( + identifier="sure", + process_group_identifier=test_process_group_id, + process_model_identifier=process_model_dir_name, + report_metadata=report_metadata, + user=self.find_or_create_user(), + ) + + response = client.get( + f"/v1.0/process-models/{test_process_group_id}/{process_model_dir_name}/process-instances/reports/sure", + headers=self.logged_in_headers(user), + ) + assert response.status_code == 200 + assert response.json is not None + assert len(response.json["results"]) == 2 + assert response.json["pagination"]["count"] == 2 + assert response.json["pagination"]["pages"] == 1 + assert response.json["pagination"]["total"] == 2 + + process_instance_dict = response.json["results"][0] + assert type(process_instance_dict["id"]) is int + assert ( + process_instance_dict["process_model_identifier"] == process_model_dir_name + ) + assert ( + process_instance_dict["process_group_identifier"] == test_process_group_id + ) + assert type(process_instance_dict["start_in_seconds"]) is int + assert process_instance_dict["start_in_seconds"] > 0 + assert process_instance_dict["status"] == "complete" + + def test_process_instance_report_show_with_dynamic_filter_and_query_param( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + setup_process_instances_for_reports: list[ProcessInstanceModel], + ) -> None: + """Test_process_instance_report_show_with_default_list.""" + test_process_group_id = "runs_without_input" + process_model_dir_name = "sample" + user = self.find_or_create_user() + + report_metadata = { + "filter_by": [ + { + "field_name": "grade_level", + "operator": "equals", + "field_value": "{{grade_level}}", + } + ], + } + + ProcessInstanceReportModel.create_with_attributes( + identifier="sure", + process_group_identifier=test_process_group_id, + process_model_identifier=process_model_dir_name, + report_metadata=report_metadata, + user=self.find_or_create_user(), + ) + + response = client.get( + f"/v1.0/process-models/{test_process_group_id}/{process_model_dir_name}/process-instances/reports/sure?grade_level=1", + headers=self.logged_in_headers(user), + ) + assert response.status_code == 200 + assert response.json is not None + assert len(response.json["results"]) == 1 + + def test_process_instance_report_show_with_bad_identifier( + self, + app: Flask, + client: FlaskClient, + with_db_and_bpmn_file_cleanup: None, + setup_process_instances_for_reports: list[ProcessInstanceModel], + ) -> None: + """Test_process_instance_report_show_with_default_list.""" + test_process_group_id = "runs_without_input" + process_model_dir_name = "sample" + user = self.find_or_create_user() + + response = client.get( + f"/v1.0/process-models/{test_process_group_id}/{process_model_dir_name}/process-instances/reports/sure?grade_level=1", + headers=self.logged_in_headers(user), + ) + assert response.status_code == 404 + data = json.loads(response.get_data(as_text=True)) + assert data["error_code"] == "unknown_process_instance_report" + + def setup_testing_instance( + self, + client: FlaskClient, + process_group_id: str, + process_model_id: str, + user: UserModel, + ) -> Any: + """Setup_testing_instance.""" + headers = self.logged_in_headers(user) + response = self.create_process_instance( + client, process_group_id, process_model_id, headers + ) + process_instance = response.json + assert isinstance(process_instance, dict) + process_instance_id = process_instance["id"] + return process_instance_id + + def test_error_handler( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_error_handler.""" + process_group_id = "data" + process_model_id = "error" + user = self.find_or_create_user() + + process_instance_id = self.setup_testing_instance( + client, process_group_id, process_model_id, user + ) + + process = ( + db.session.query(ProcessInstanceModel) + .filter(ProcessInstanceModel.id == process_instance_id) + .first() + ) + assert process.status == "not_started" + + response = client.post( + f"/v1.0/process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}/run", + headers=self.logged_in_headers(user), + ) + assert response.status_code == 400 + + api_error = json.loads(response.get_data(as_text=True)) + assert api_error["error_code"] == "task_error" + assert ( + 'TypeError:can only concatenate str (not "int") to str' + in api_error["message"] + ) + + process = ( + db.session.query(ProcessInstanceModel) + .filter(ProcessInstanceModel.id == process_instance_id) + .first() + ) + assert process.status == "faulted" + + def test_error_handler_suspend( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_error_handler_suspend.""" + process_group_id = "data" + process_model_id = "error" + user = self.find_or_create_user() + + process_instance_id = self.setup_testing_instance( + client, process_group_id, process_model_id, user + ) + process_model = ProcessModelService().get_process_model( + process_model_id, process_group_id + ) + ProcessModelService().update_spec( + process_model, + {"fault_or_suspend_on_exception": NotificationType.suspend.value}, + ) + + process = ( + db.session.query(ProcessInstanceModel) + .filter(ProcessInstanceModel.id == process_instance_id) + .first() + ) + assert process.status == "not_started" + + response = client.post( + f"/v1.0/process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}/run", + headers=self.logged_in_headers(user), + ) + assert response.status_code == 400 + + process = ( + db.session.query(ProcessInstanceModel) + .filter(ProcessInstanceModel.id == process_instance_id) + .first() + ) + assert process.status == "suspended" + + def test_error_handler_with_email( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_error_handler.""" + process_group_id = "data" + process_model_id = "error" + user = self.find_or_create_user() + + process_instance_id = self.setup_testing_instance( + client, process_group_id, process_model_id, user + ) + + process_model = ProcessModelService().get_process_model( + process_model_id, process_group_id + ) + ProcessModelService().update_spec( + process_model, {"exception_notification_addresses": ["user@example.com"]} + ) + + mail = app.config["MAIL_APP"] + with mail.record_messages() as outbox: + + response = client.post( + f"/v1.0/process-models/{process_group_id}/{process_model_id}/process-instances/{process_instance_id}/run", + headers=self.logged_in_headers(user), + ) + assert response.status_code == 400 + assert len(outbox) == 1 + message = outbox[0] + assert message.subject == "Unexpected error in app" + assert ( + message.body == 'TypeError:can only concatenate str (not "int") to str' + ) + assert message.recipients == process_model.exception_notification_addresses + + process = ( + db.session.query(ProcessInstanceModel) + .filter(ProcessInstanceModel.id == process_instance_id) + .first() + ) + assert process.status == "faulted" + + def test_process_model_file_create( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_process_model_file_create.""" + process_group_id = "hello_world" + process_model_id = "hello_world" + file_name = "hello_world.svg" + file_data = b"abc123" + + result = self.create_spec_file( + client, + process_group_id=process_group_id, + process_model_id=process_model_id, + file_name=file_name, + file_data=file_data, + ) + assert result["process_group_id"] == process_group_id + assert result["process_model_id"] == process_model_id + assert result["name"] == file_name + assert bytes(str(result["file_contents"]), "utf-8") == file_data + + def test_can_get_message_instances_by_process_instance_id_and_without( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_can_get_message_instances_by_process_instance_id.""" + load_test_spec( + "message_receiver", + process_model_source_directory="message_send_one_conversation", + bpmn_file_name="message_receiver", + ) + user = self.find_or_create_user() + message_model_identifier = "message_send" + payload = { + "topica": "the_topica_string", + "topicb": "the_topicb_string", + "andThis": "another_item_non_key", + } + response = client.post( + f"/v1.0/messages/{message_model_identifier}", + content_type="application/json", + headers=self.logged_in_headers(user), + data=json.dumps({"payload": payload}), + ) + assert response.status_code == 200 + assert response.json is not None + process_instance_id_one = response.json["id"] + + response = client.post( + f"/v1.0/messages/{message_model_identifier}", + content_type="application/json", + headers=self.logged_in_headers(user), + data=json.dumps({"payload": payload}), + ) + assert response.status_code == 200 + assert response.json is not None + process_instance_id_two = response.json["id"] + + response = client.get( + f"/v1.0/messages?process_instance_id={process_instance_id_one}", + headers=self.logged_in_headers(user), + ) + assert response.status_code == 200 + assert response.json is not None + assert len(response.json["results"]) == 1 + assert ( + response.json["results"][0]["process_instance_id"] + == process_instance_id_one + ) + + response = client.get( + f"/v1.0/messages?process_instance_id={process_instance_id_two}", + headers=self.logged_in_headers(user), + ) + assert response.status_code == 200 + assert response.json is not None + assert len(response.json["results"]) == 1 + assert ( + response.json["results"][0]["process_instance_id"] + == process_instance_id_two + ) + + response = client.get( + "/v1.0/messages", + headers=self.logged_in_headers(user), + ) + assert response.status_code == 200 + assert response.json is not None + assert len(response.json["results"]) == 2 + + # def test_get_process_model(self): + # + # load_test_spec('random_fact') + # response = client.get('/v1.0/workflow-specification/random_fact', headers=self.logged_in_headers()) + # assert_success(response) + # json_data = json.loads(response.get_data(as_text=True)) + # api_spec = WorkflowSpecInfoSchema().load(json_data) + # + # fs_spec = process_model_service.get_spec('random_fact') + # assert(WorkflowSpecInfoSchema().dump(fs_spec) == json_data) + # + + # def test_waku_debug_info(self) -> None: + # """Test_waku_debug_info.""" + # debug_info_method = "get_waku_v2_debug_v1_info" + # + # headers = {"Content-Type": "application/json"} + # + # rpc_json = { + # "jsonrpc": "2.0", + # "method": debug_info_method, + # "params": [], + # "id": "id", + # } + # + # request_url = "http://localhost:8545" + # rpc_response = requests.post(request_url, headers=headers, json=rpc_json) + # + # rpc_json_text: dict = json.loads(rpc_response.text) + # assert isinstance(rpc_json_text, dict) + # # assert 'jsonrpc' in rpc_json_text + # # assert rpc_json_text['jsonrpc'] == '2.0' + # assert "result" in rpc_json_text + # result = rpc_json_text["result"] + # assert isinstance(result, dict) + # assert "listenAddresses" in result + # assert "enrUri" in result + # + # print("test_call_waku") + # + # def test_send_message(self) -> None: + # """Test_send_message.""" + # relay_message_method = "post_waku_v2_relay_v1_message" + # + # headers = {"Content-Type": "application/json"} + # + # # class WakuMessage: + # # payload: str + # # contentTopic: str # Optional + # # # version: int # Optional + # # timestamp: int # Optional + # payload = "This is my message" + # contentTopic = "myTestTopic" # noqa: N806 + # timestamp = time.time() + # + # waku_relay_message = { + # "payload": payload, + # "contentTopic": contentTopic, + # "timestamp": timestamp, + # } + # + # # ["", [{"contentTopic":"/waku/2/default-content/proto"}]] + # params = ["/waku/2/default-waku/proto", {"message": waku_relay_message}] + # rpc_json = { + # "jsonrpc": "2.0", + # "method": relay_message_method, + # "params": params, + # "id": 1, + # } + # + # request_url = "http://localhost:8545" + # rpc_response = requests.post(request_url, headers=headers, json=rpc_json) + # assert rpc_response.status_code == 200 + # + # rpc_json_data: dict = json.loads(rpc_response.text) + # assert "error" in rpc_json_data + # assert "result" in rpc_json_data + # assert rpc_json_data["error"] is None + # assert rpc_json_data["result"] is True + # + # print("test_send_message") + # + # def test_get_waku_messages(self) -> None: + # """Test_get_waku_messages.""" + # method = "get_waku_v2_store_v1_messages" + # headers = {"Content-Type": "application/json"} + # params = [{"contentTopic": "/waku/2/default-content/proto"}] + # + # rpc_json = {"jsonrpc": "2.0", "method": method, "params": params, "id": 1} + # request_url = "http://localhost:8545" + # rpc_response = requests.post(request_url, headers=headers, json=rpc_json) + # assert rpc_response.status_code == 200 + # + # rpc_json_data: dict = json.loads(rpc_response.text) + # assert "error" in rpc_json_data + # assert rpc_json_data["error"] is None + # assert "result" in rpc_json_data + # assert isinstance(rpc_json_data["result"], dict) + # assert "messages" in rpc_json_data["result"] + # assert "pagingInfo" in rpc_json_data["result"] + # + # print("get_waku_messages") diff --git a/tests/spiffworkflow_backend/integration/test_secret_service.py b/tests/spiffworkflow_backend/integration/test_secret_service.py new file mode 100644 index 00000000..94637fac --- /dev/null +++ b/tests/spiffworkflow_backend/integration/test_secret_service.py @@ -0,0 +1,494 @@ +"""Test_secret_service.""" +import json +from typing import Optional + +import pytest +from flask.app import Flask +from flask.testing import FlaskClient +from flask_bpmn.api.api_error import ApiError +from tests.spiffworkflow_backend.helpers.base_test import BaseTest +from werkzeug.test import TestResponse + +from spiffworkflow_backend.models.process_model import ProcessModelInfo +from spiffworkflow_backend.models.secret_model import SecretAllowedProcessPathModel +from spiffworkflow_backend.models.secret_model import SecretModel +from spiffworkflow_backend.models.secret_model import SecretModelSchema +from spiffworkflow_backend.models.user import UserModel +from spiffworkflow_backend.services.file_system_service import FileSystemService +from spiffworkflow_backend.services.process_model_service import ProcessModelService +from spiffworkflow_backend.services.secret_service import SecretService + + +class SecretServiceTestHelpers(BaseTest): + """SecretServiceTestHelpers.""" + + test_key = "test_key" + test_value = "test_value" + test_process_group_id = "test" + test_process_group_display_name = "My Test Process Group" + test_process_model_id = "make_cookies" + test_process_model_display_name = "Cooooookies" + test_process_model_description = "Om nom nom delicious cookies" + + def add_test_secret(self, user: UserModel) -> SecretModel: + """Add_test_secret.""" + return SecretService().add_secret(self.test_key, self.test_value, user.id) + + def add_test_process( + self, client: FlaskClient, user: UserModel + ) -> ProcessModelInfo: + """Add_test_process.""" + self.create_process_group( + client, + user, + self.test_process_group_id, + display_name=self.test_process_group_display_name, + ) + self.create_process_model_with_api( + client, + process_group_id=self.test_process_group_id, + process_model_id=self.test_process_model_id, + process_model_display_name=self.test_process_model_display_name, + process_model_description=self.test_process_model_description, + ) + process_model_info = ProcessModelService().get_process_model( + self.test_process_model_id, self.test_process_group_id + ) + return process_model_info + + def add_test_secret_allowed_process( + self, client: FlaskClient, user: UserModel + ) -> SecretAllowedProcessPathModel: + """Add_test_secret_allowed_process.""" + process_model_info = self.add_test_process(client, user) + process_model_relative_path = FileSystemService.process_model_relative_path( + process_model_info + ) + + test_secret = self.add_test_secret(user) + allowed_process_model = SecretService().add_allowed_process( + secret_id=test_secret.id, + user_id=user.id, + allowed_relative_path=process_model_relative_path, + ) + return allowed_process_model + + +class TestSecretService(SecretServiceTestHelpers): + """TestSecretService.""" + + def test_add_secret(self, app: Flask, with_db_and_bpmn_file_cleanup: None) -> None: + """Test_add_secret.""" + user = self.find_or_create_user() + test_secret = self.add_test_secret(user) + + assert test_secret is not None + assert test_secret.key == self.test_key + assert test_secret.value == self.test_value + assert test_secret.creator_user_id == user.id + + def test_add_secret_duplicate_key_fails( + self, app: Flask, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_add_secret_duplicate_key_fails.""" + user = self.find_or_create_user() + self.add_test_secret(user) + with pytest.raises(ApiError) as ae: + self.add_test_secret(user) + assert ae.value.error_code == "create_secret_error" + + def test_get_secret(self, app: Flask, with_db_and_bpmn_file_cleanup: None) -> None: + """Test_get_secret.""" + user = self.find_or_create_user() + self.add_test_secret(user) + + secret = SecretService().get_secret(self.test_key) + assert secret is not None + assert secret.value == self.test_value + + def test_get_secret_bad_key_fails( + self, app: Flask, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_get_secret_bad_service.""" + user = self.find_or_create_user() + self.add_test_secret(user) + + with pytest.raises(ApiError): + SecretService().get_secret("bad_key") + + def test_update_secret( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test update secret.""" + user = self.find_or_create_user() + self.add_test_secret(user) + secret = SecretService.get_secret(self.test_key) + assert secret + assert secret.value == self.test_value + SecretService.update_secret(self.test_key, "new_secret_value", user.id) + new_secret = SecretService.get_secret(self.test_key) + assert new_secret + assert new_secret.value == "new_secret_value" # noqa: S105 + + def test_update_secret_bad_user_fails( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_update_secret_bad_user.""" + user = self.find_or_create_user() + self.add_test_secret(user) + with pytest.raises(ApiError) as ae: + SecretService.update_secret( + self.test_key, "new_secret_value", user.id + 1 + ) # noqa: S105 + assert ( + ae.value.message + == f"User: {user.id+1} cannot update the secret with key : test_key" + ) + + def test_update_secret_bad_secret_fails( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_update_secret_bad_secret_fails.""" + user = self.find_or_create_user() + secret = self.add_test_secret(user) + with pytest.raises(ApiError) as ae: + SecretService.update_secret(secret.key + "x", "some_new_value", user.id) + assert "Resource does not exist" in ae.value.message + assert ae.value.error_code == "update_secret_error" + + def test_delete_secret( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test delete secret.""" + user = self.find_or_create_user() + self.add_test_secret(user) + secrets = SecretModel.query.all() + assert len(secrets) == 1 + assert secrets[0].creator_user_id == user.id + SecretService.delete_secret(self.test_key, user.id) + secrets = SecretModel.query.all() + assert len(secrets) == 0 + + def test_delete_secret_bad_user_fails( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_delete_secret_bad_user.""" + user = self.find_or_create_user() + self.add_test_secret(user) + with pytest.raises(ApiError) as ae: + SecretService.delete_secret(self.test_key, user.id + 1) + assert ( + f"User: {user.id+1} cannot delete the secret with key" in ae.value.message + ) + + def test_delete_secret_bad_secret_fails( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_delete_secret_bad_secret_fails.""" + user = self.find_or_create_user() + self.add_test_secret(user) + with pytest.raises(ApiError) as ae: + SecretService.delete_secret(self.test_key + "x", user.id) + assert "Resource does not exist" in ae.value.message + + def test_secret_add_allowed_process( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_secret_add_allowed_process.""" + user = self.find_or_create_user() + test_secret = self.add_test_secret(user) + process_model_info = self.add_test_process(client, user) + + process_model_relative_path = FileSystemService.process_model_relative_path( + process_model_info + ) + allowed_process_model = SecretService().add_allowed_process( + secret_id=test_secret.id, + user_id=user.id, + allowed_relative_path=process_model_relative_path, + ) + + assert allowed_process_model is not None + assert isinstance(allowed_process_model, SecretAllowedProcessPathModel) + assert allowed_process_model.secret_id == test_secret.id + assert ( + allowed_process_model.allowed_relative_path == process_model_relative_path + ) + + assert len(test_secret.allowed_processes) == 1 + assert test_secret.allowed_processes[0] == allowed_process_model + + def test_secret_add_allowed_process_same_process_fails( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Do not allow duplicate entries for secret_id/allowed_relative_path pairs. + + We actually take care of this in the db model with a unique constraint + on the 2 columns. + """ + user = self.find_or_create_user() + test_secret = self.add_test_secret(user) + process_model_info = self.add_test_process(client, user) + + process_model_relative_path = FileSystemService.process_model_relative_path( + process_model_info + ) + SecretService().add_allowed_process( + secret_id=test_secret.id, + user_id=user.id, + allowed_relative_path=process_model_relative_path, + ) + allowed_processes = SecretAllowedProcessPathModel.query.all() + assert len(allowed_processes) == 1 + + with pytest.raises(ApiError) as ae: + SecretService().add_allowed_process( + secret_id=test_secret.id, + user_id=user.id, + allowed_relative_path=process_model_relative_path, + ) + assert "Resource already exists" in ae.value.message + + def test_secret_add_allowed_process_bad_user_fails( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_secret_add_allowed_process_bad_user.""" + user = self.find_or_create_user() + process_model_info = self.add_test_process(client, user) + process_model_relative_path = FileSystemService.process_model_relative_path( + process_model_info + ) + test_secret = self.add_test_secret(user) + with pytest.raises(ApiError) as ae: + SecretService().add_allowed_process( + secret_id=test_secret.id, + user_id=user.id + 1, + allowed_relative_path=process_model_relative_path, + ) + assert ( + ae.value.message + == f"User: {user.id+1} cannot modify the secret with key : {self.test_key}" + ) + + def test_secret_add_allowed_process_bad_secret_fails( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_secret_add_allowed_process_bad_secret_fails.""" + user = self.find_or_create_user() + process_model_info = self.add_test_process(client, user) + process_model_relative_path = FileSystemService.process_model_relative_path( + process_model_info + ) + test_secret = self.add_test_secret(user) + + with pytest.raises(ApiError) as ae: + SecretService().add_allowed_process( + secret_id=test_secret.id + 1, + user_id=user.id, + allowed_relative_path=process_model_relative_path, + ) + assert "Resource does not exist" in ae.value.message + + def test_secret_delete_allowed_process( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_secret_delete_allowed_process.""" + user = self.find_or_create_user() + allowed_process_model = self.add_test_secret_allowed_process(client, user) + + allowed_processes = SecretAllowedProcessPathModel.query.all() + assert len(allowed_processes) == 1 + + SecretService().delete_allowed_process(allowed_process_model.id, user.id) + + allowed_processes = SecretAllowedProcessPathModel.query.all() + assert len(allowed_processes) == 0 + + def test_secret_delete_allowed_process_bad_user_fails( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_secret_delete_allowed_process_bad_user_fails.""" + user = self.find_or_create_user() + allowed_process_model = self.add_test_secret_allowed_process(client, user) + with pytest.raises(ApiError) as ae: + SecretService().delete_allowed_process( + allowed_process_model.id, user.id + 1 + ) + message = ae.value.message + assert ( + f"User: {user.id+1} cannot delete the allowed_process with id : {allowed_process_model.id}" + in message + ) + + def test_secret_delete_allowed_process_bad_allowed_process_fails( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_secret_delete_allowed_process_bad_allowed_process_fails.""" + user = self.find_or_create_user() + allowed_process_model = self.add_test_secret_allowed_process(client, user) + with pytest.raises(ApiError) as ae: + SecretService().delete_allowed_process( + allowed_process_model.id + 1, user.id + ) + assert "Resource does not exist" in ae.value.message + + +class TestSecretServiceApi(SecretServiceTestHelpers): + """TestSecretServiceApi.""" + + def test_add_secret( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_add_secret.""" + user = self.find_or_create_user() + secret_model = SecretModel( + key=self.test_key, + value=self.test_value, + creator_user_id=user.id, + ) + data = json.dumps(SecretModelSchema().dump(secret_model)) + response: TestResponse = client.post( + "/v1.0/secrets", + headers=self.logged_in_headers(user), + content_type="application/json", + data=data, + ) + assert response.json + secret: dict = response.json + for key in ["key", "value", "creator_user_id"]: + assert key in secret.keys() + assert secret["key"] == self.test_key + assert secret["value"] == self.test_value + assert secret["creator_user_id"] == user.id + + def test_get_secret( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test get secret.""" + user = self.find_or_create_user() + self.add_test_secret(user) + secret_response = client.get( + f"/v1.0/secrets/{self.test_key}", + headers=self.logged_in_headers(user), + ) + assert secret_response + assert secret_response.status_code == 200 + assert secret_response.json + assert secret_response.json["value"] == self.test_value + + def test_update_secret( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_update_secret.""" + user = self.find_or_create_user() + self.add_test_secret(user) + secret: Optional[SecretModel] = SecretService.get_secret(self.test_key) + assert secret + assert secret.value == self.test_value + secret_model = SecretModel( + key=self.test_key, value="new_secret_value", creator_user_id=user.id + ) + response = client.put( + f"/v1.0/secrets/{self.test_key}", + headers=self.logged_in_headers(user), + content_type="application/json", + data=json.dumps(SecretModelSchema().dump(secret_model)), + ) + assert response.status_code == 200 + + secret_model = SecretModel.query.filter( + SecretModel.key == self.test_key + ).first() + assert secret_model.value == "new_secret_value" + + def test_delete_secret( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test delete secret.""" + user = self.find_or_create_user() + self.add_test_secret(user) + secret = SecretService.get_secret(self.test_key) + assert secret + assert secret.value == self.test_value + secret_response = client.delete( + f"/v1.0/secrets/{self.test_key}", + headers=self.logged_in_headers(user), + ) + assert secret_response.status_code == 200 + with pytest.raises(ApiError): + secret = SecretService.get_secret(self.test_key) + + def test_delete_secret_bad_user( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_delete_secret_bad_user.""" + user_1 = self.find_or_create_user() + user_2 = self.find_or_create_user("test_user_2") + self.add_test_secret(user_1) + secret_response = client.delete( + f"/v1.0/secrets/{self.test_key}", + headers=self.logged_in_headers(user_2), + ) + assert secret_response.status_code == 401 + + def test_delete_secret_bad_key( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test delete secret.""" + user = self.find_or_create_user() + secret_response = client.delete( + "/v1.0/secrets/bad_secret_key", + headers=self.logged_in_headers(user), + ) + assert secret_response.status_code == 404 + + def test_add_secret_allowed_process( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test add secret allowed process.""" + user = self.find_or_create_user() + test_secret = self.add_test_secret(user) + process_model_info = self.add_test_process(client, user) + process_model_relative_path = FileSystemService.process_model_relative_path( + process_model_info + ) + data = { + "secret_id": test_secret.id, + "allowed_relative_path": process_model_relative_path, + } + response: TestResponse = client.post( + "/v1.0/secrets/allowed_process_paths", + headers=self.logged_in_headers(user), + content_type="application/json", + data=json.dumps(data), + ) + assert response.status_code == 201 + allowed_processes = SecretAllowedProcessPathModel.query.all() + assert len(allowed_processes) == 1 + assert allowed_processes[0].allowed_relative_path == process_model_relative_path + assert allowed_processes[0].secret_id == test_secret.id + + def test_delete_secret_allowed_process( + self, app: Flask, client: FlaskClient, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test delete secret allowed process.""" + user = self.find_or_create_user() + test_secret = self.add_test_secret(user) + process_model_info = self.add_test_process(client, user) + process_model_relative_path = FileSystemService.process_model_relative_path( + process_model_info + ) + allowed_process = SecretService.add_allowed_process( + test_secret.id, user.id, process_model_relative_path + ) + allowed_processes = SecretAllowedProcessPathModel.query.all() + assert len(allowed_processes) == 1 + assert allowed_processes[0].secret_id == test_secret.id + assert allowed_processes[0].allowed_relative_path == process_model_relative_path + response = client.delete( + f"/v1.0/secrets/allowed_process_paths/{allowed_process.id}", + headers=self.logged_in_headers(user), + ) + assert response.status_code == 200 + allowed_processes = SecretAllowedProcessPathModel.query.all() + assert len(allowed_processes) == 0 diff --git a/tests/spiffworkflow_backend/integration/test_user_blueprint.py b/tests/spiffworkflow_backend/integration/test_user_blueprint.py new file mode 100644 index 00000000..ce142f1c --- /dev/null +++ b/tests/spiffworkflow_backend/integration/test_user_blueprint.py @@ -0,0 +1,190 @@ +"""Test_user_blueprint.""" +# TODO: fix these tests for new authing system +# """Test User Blueprint.""" +# import json +# from typing import Any +# +# from flask.testing import FlaskClient +# +# from spiffworkflow_backend.models.group import GroupModel +# from spiffworkflow_backend.models.user import UserModel +# +# +# def test_acceptance(client: FlaskClient) -> None: +# """Test_acceptance.""" +# # Create a user U +# user = create_user(client, "U") +# # Create a group G +# group_g = create_group(client, "G") +# # Assign user U to group G +# assign_user_to_group(client, user, group_g) +# # Delete group G +# delete_group(client, group_g.name) +# # Create group H +# group_h = create_group(client, "H") +# # Assign user U to group H +# assign_user_to_group(client, user, group_h) +# # Unassign user U from group H +# remove_user_from_group(client, user, group_h) +# # Delete group H +# delete_group(client, group_h.name) +# # Delete user U +# delete_user(client, user.username) +# +# +# def test_user_can_be_created_and_deleted(client: FlaskClient) -> None: +# """Test_user_can_be_created_and_deleted.""" +# username = "joe" +# response = client.get(f"/user/{username}") +# assert response.status_code == 201 +# user = UserModel.query.filter_by(username=username).first() +# assert user.username == username +# +# response = client.delete(f"/user/{username}") +# assert response.status_code == 204 +# user = UserModel.query.filter_by(username=username).first() +# assert user is None +# +# +# def test_delete_returns_an_error_if_user_is_not_found(client: FlaskClient) -> None: +# """Test_delete_returns_an_error_if_user_is_not_found.""" +# username = "joe" +# response = client.delete(f"/user/{username}") +# assert response.status_code == 400 +# +# +# def test_create_returns_an_error_if_user_exists(client: FlaskClient) -> None: +# """Test_create_returns_an_error_if_user_exists.""" +# username = "joe" +# response = client.get(f"/user/{username}") +# assert response.status_code == 201 +# user = UserModel.query.filter_by(username=username).first() +# assert user.username == username +# +# response = client.get(f"/user/{username}") +# assert response.status_code == 409 +# +# response = client.delete(f"/user/{username}") +# assert response.status_code == 204 +# user = UserModel.query.filter_by(username=username).first() +# assert user is None +# +# +# def test_group_can_be_created_and_deleted(client: FlaskClient) -> None: +# """Test_group_can_be_created_and_deleted.""" +# group_name = "administrators" +# response = client.get(f"/group/{group_name}") +# assert response.status_code == 201 +# group = GroupModel.query.filter_by(name=group_name).first() +# assert group.name == group_name +# +# response = client.delete(f"/group/{group_name}") +# assert response.status_code == 204 +# group = GroupModel.query.filter_by(name=group_name).first() +# assert group is None +# +# +# def test_delete_returns_an_error_if_group_is_not_found(client: FlaskClient) -> None: +# """Test_delete_returns_an_error_if_group_is_not_found.""" +# group_name = "administrators" +# response = client.delete(f"/group/{group_name}") +# assert response.status_code == 400 +# +# +# def test_create_returns_an_error_if_group_exists(client: FlaskClient) -> None: +# """Test_create_returns_an_error_if_group_exists.""" +# group_name = "administrators" +# response = client.get(f"/group/{group_name}") +# assert response.status_code == 201 +# group = GroupModel.query.filter_by(name=group_name).first() +# assert group.name == group_name +# +# response = client.get(f"/group/{group_name}") +# assert response.status_code == 409 +# +# response = client.delete(f"/group/{group_name}") +# assert response.status_code == 204 +# group = GroupModel.query.filter_by(name=group_name).first() +# assert group is None +# +# +# def test_user_can_be_assigned_to_a_group(client: FlaskClient) -> None: +# """Test_user_can_be_assigned_to_a_group.""" +# user = create_user(client, "joe") +# group = create_group(client, "administrators") +# assign_user_to_group(client, user, group) +# delete_user(client, user.username) +# delete_group(client, group.name) +# +# +# def test_user_can_be_removed_from_a_group(client: FlaskClient) -> None: +# """Test_user_can_be_removed_from_a_group.""" +# user = create_user(client, "joe") +# group = create_group(client, "administrators") +# assign_user_to_group(client, user, group) +# remove_user_from_group(client, user, group) +# delete_user(client, user.username) +# delete_group(client, group.name) +# +# +# def create_user(client: FlaskClient, username: str) -> Any: +# """Create_user.""" +# response = client.get(f"/user/{username}") +# assert response.status_code == 201 +# user = UserModel.query.filter_by(username=username).first() +# assert user.username == username +# return user +# +# +# def delete_user(client: FlaskClient, username: str) -> None: +# """Delete_user.""" +# response = client.delete(f"/user/{username}") +# assert response.status_code == 204 +# user = UserModel.query.filter_by(username=username).first() +# assert user is None +# +# +# def create_group(client: FlaskClient, group_name: str) -> Any: +# """Create_group.""" +# response = client.get(f"/group/{group_name}") +# assert response.status_code == 201 +# group = GroupModel.query.filter_by(name=group_name).first() +# assert group.name == group_name +# return group +# +# +# def delete_group(client: FlaskClient, group_name: str) -> None: +# """Delete_group.""" +# response = client.delete(f"/group/{group_name}") +# assert response.status_code == 204 +# group = GroupModel.query.filter_by(name=group_name).first() +# assert group is None +# +# +# def assign_user_to_group( +# client: FlaskClient, user: UserModel, group: GroupModel +# ) -> None: +# """Assign_user_to_group.""" +# response = client.post( +# "/assign_user_to_group", +# content_type="application/json", +# data=json.dumps({"user_id": user.id, "group_id": group.id}), +# ) +# assert response.status_code == 201 +# user = UserModel.query.filter_by(id=user.id).first() +# assert len(user.user_group_assignments) == 1 +# assert user.user_group_assignments[0].group_id == group.id +# +# +# def remove_user_from_group( +# client: FlaskClient, user: UserModel, group: GroupModel +# ) -> None: +# """Remove_user_from_group.""" +# response = client.post( +# "remove_user_from_group", +# content_type="application/json", +# data=json.dumps({"user_id": user.id, "group_id": group.id}), +# ) +# assert response.status_code == 204 +# user = UserModel.query.filter_by(id=user.id).first() +# assert len(user.user_group_assignments) == 0 diff --git a/tests/spiffworkflow_backend/unit/test_acceptance_test_fixtures.py b/tests/spiffworkflow_backend/unit/test_acceptance_test_fixtures.py new file mode 100644 index 00000000..93fb4206 --- /dev/null +++ b/tests/spiffworkflow_backend/unit/test_acceptance_test_fixtures.py @@ -0,0 +1,16 @@ +"""Test_acceptance_test_fixtures.""" +from flask.app import Flask + +from spiffworkflow_backend.services.acceptance_test_fixtures import load_fixtures + + +def test_start_dates_are_one_hour_apart(app: Flask) -> None: + """Test_start_dates_are_one_hour_apart.""" + process_instances = load_fixtures() + + assert len(process_instances) > 2 + assert process_instances[0].start_in_seconds is not None + assert process_instances[1].start_in_seconds is not None + assert (process_instances[0].start_in_seconds - 3600) == ( + process_instances[1].start_in_seconds + ) diff --git a/tests/spiffworkflow_backend/unit/test_dot_notation.py b/tests/spiffworkflow_backend/unit/test_dot_notation.py new file mode 100644 index 00000000..4446d4d9 --- /dev/null +++ b/tests/spiffworkflow_backend/unit/test_dot_notation.py @@ -0,0 +1,56 @@ +"""Test_various_bpmn_constructs.""" +from flask.app import Flask +from tests.spiffworkflow_backend.helpers.base_test import BaseTest +from tests.spiffworkflow_backend.helpers.test_data import load_test_spec + +from spiffworkflow_backend.services.process_instance_processor import ( + ProcessInstanceProcessor, +) +from spiffworkflow_backend.services.process_instance_service import ( + ProcessInstanceService, +) + + +class TestDotNotation(BaseTest): + """TestVariousBpmnConstructs.""" + + def test_dot_notation( + self, app: Flask, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_form_data_conversion_to_dot_dict.""" + process_model = load_test_spec( + "test_dot_notation", + bpmn_file_name="diagram.bpmn", + process_model_source_directory="dot_notation", + ) + current_user = self.find_or_create_user() + + process_instance = self.create_process_instance_from_process_model( + process_model + ) + processor = ProcessInstanceProcessor(process_instance) + + processor.do_engine_steps(save=True) + + user_task = processor.get_ready_user_tasks()[0] + form_data = { + "invoice.contibutorName": "Elizabeth", + "invoice.contributorId": 100, + "invoice.invoiceId": 10001, + "invoice.invoiceAmount": "1000.00", + "invoice.dueDate": "09/30/2022", + } + ProcessInstanceService.complete_form_task( + processor, user_task, form_data, current_user + ) + + expected = { + "contibutorName": "Elizabeth", + "contributorId": 100, + "invoiceId": 10001, + "invoiceAmount": "1000.00", + "dueDate": "09/30/2022", + } + + processor.do_engine_steps(save=True) + assert processor.get_data()["invoice"] == expected diff --git a/tests/spiffworkflow_backend/unit/test_environment_var_script.py b/tests/spiffworkflow_backend/unit/test_environment_var_script.py new file mode 100644 index 00000000..ac96e7e4 --- /dev/null +++ b/tests/spiffworkflow_backend/unit/test_environment_var_script.py @@ -0,0 +1,23 @@ +"""Test_environment_var_script.""" +from flask import Flask +from tests.spiffworkflow_backend.helpers.base_test import BaseTest + +from spiffworkflow_backend.services.process_instance_processor import ( + ProcessInstanceProcessor, +) + + +class TestEnvironmentVarScript(BaseTest): + """TestEnvironmentVarScript.""" + + # it's not totally obvious we want to keep this test/file + def test_script_engine_can_use_custom_scripts( + self, + app: Flask, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """Test_script_engine_takes_data_and_returns_expected_results.""" + with app.app_context(): + script_engine = ProcessInstanceProcessor._script_engine + result = script_engine._evaluate("get_env()", {}) + assert result == "testing" diff --git a/tests/spiffworkflow_backend/unit/test_file.py b/tests/spiffworkflow_backend/unit/test_file.py new file mode 100644 index 00000000..22f2fb15 --- /dev/null +++ b/tests/spiffworkflow_backend/unit/test_file.py @@ -0,0 +1,27 @@ +"""Test_file.""" +from datetime import datetime + +from spiffworkflow_backend.models.file import File + + +def test_files_can_be_sorted() -> None: + """Test_files_can_be_sorted.""" + europe = create_test_file(type="bpmn", name="europe") + asia = create_test_file(type="bpmn", name="asia") + africa = create_test_file(type="dmn", name="africa") + oceania = create_test_file(type="dmn", name="oceania") + + mylist = [europe, oceania, asia, africa] + assert sorted(mylist) == [asia, europe, africa, oceania] + + +def create_test_file(type: str, name: str) -> File: + """Create_test_file.""" + return File( + type=type, + name=name, + content_type=type, + document={}, + last_modified=datetime.now(), + size=1, + ) diff --git a/tests/spiffworkflow_backend/unit/test_message_instance.py b/tests/spiffworkflow_backend/unit/test_message_instance.py new file mode 100644 index 00000000..842d5ff4 --- /dev/null +++ b/tests/spiffworkflow_backend/unit/test_message_instance.py @@ -0,0 +1,162 @@ +"""Test_message_instance.""" +import pytest +from flask import Flask +from flask_bpmn.models.db import db +from tests.spiffworkflow_backend.helpers.base_test import BaseTest +from tests.spiffworkflow_backend.helpers.test_data import load_test_spec + +from spiffworkflow_backend.models.message_instance import MessageInstanceModel +from spiffworkflow_backend.models.message_model import MessageModel + + +class TestMessageInstance(BaseTest): + """TestMessageInstance.""" + + def test_can_create_message_instance( + self, app: Flask, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_can_create_message_instance.""" + message_model_identifier = "message_model_one" + message_model = self.create_message_model(message_model_identifier) + process_model = load_test_spec("hello_world") + process_instance = self.create_process_instance_from_process_model( + process_model, "waiting" + ) + + queued_message = MessageInstanceModel( + process_instance_id=process_instance.id, + message_type="send", + message_model_id=message_model.id, + ) + db.session.add(queued_message) + db.session.commit() + + assert queued_message.status == "ready" + assert queued_message.failure_cause is None + + queued_message_from_query = MessageInstanceModel.query.filter_by( # type: ignore + id=queued_message.id + ).first() + assert queued_message_from_query is not None + + def test_cannot_set_invalid_status( + self, app: Flask, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_cannot_set_invalid_status.""" + message_model_identifier = "message_model_one" + message_model = self.create_message_model(message_model_identifier) + process_model = load_test_spec("hello_world") + process_instance = self.create_process_instance_from_process_model( + process_model, "waiting" + ) + + with pytest.raises(ValueError) as exception: + MessageInstanceModel( + process_instance_id=process_instance.id, + message_type="send", + message_model_id=message_model.id, + status="BAD_STATUS", + ) + assert ( + str(exception.value) == "MessageInstanceModel: invalid status: BAD_STATUS" + ) + + queued_message = MessageInstanceModel( + process_instance_id=process_instance.id, + message_type="send", + message_model_id=message_model.id, + ) + db.session.add(queued_message) + db.session.commit() + + with pytest.raises(ValueError) as exception: + queued_message.status = "BAD_STATUS" + assert ( + str(exception.value) == "MessageInstanceModel: invalid status: BAD_STATUS" + ) + + def test_cannot_set_invalid_message_type( + self, app: Flask, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_cannot_set_invalid_message_type.""" + message_model_identifier = "message_model_one" + message_model = self.create_message_model(message_model_identifier) + process_model = load_test_spec("hello_world") + process_instance = self.create_process_instance_from_process_model( + process_model, "waiting" + ) + + with pytest.raises(ValueError) as exception: + MessageInstanceModel( + process_instance_id=process_instance.id, + message_type="BAD_MESSAGE_TYPE", + message_model_id=message_model.id, + ) + assert ( + str(exception.value) + == "MessageInstanceModel: invalid message_type: BAD_MESSAGE_TYPE" + ) + + queued_message = MessageInstanceModel( + process_instance_id=process_instance.id, + message_type="send", + message_model_id=message_model.id, + ) + db.session.add(queued_message) + db.session.commit() + + with pytest.raises(ValueError) as exception: + queued_message.message_type = "BAD_MESSAGE_TYPE" + assert ( + str(exception.value) + == "MessageInstanceModel: invalid message_type: BAD_MESSAGE_TYPE" + ) + + def test_force_failure_cause_if_status_is_failure( + self, app: Flask, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_force_failure_cause_if_status_is_failure.""" + message_model_identifier = "message_model_one" + message_model = self.create_message_model(message_model_identifier) + process_model = load_test_spec("hello_world") + process_instance = self.create_process_instance_from_process_model( + process_model, "waiting" + ) + + queued_message = MessageInstanceModel( + process_instance_id=process_instance.id, + message_type="send", + message_model_id=message_model.id, + status="failed", + ) + db.session.add(queued_message) + with pytest.raises(ValueError) as exception: + db.session.commit() + assert ( + str(exception.value) + == "MessageInstanceModel: failure_cause must be set if status is failed" + ) + assert queued_message.id is None + db.session.remove() # type: ignore + + queued_message = MessageInstanceModel( + process_instance_id=process_instance.id, + message_type="send", + message_model_id=message_model.id, + ) + db.session.add(queued_message) + db.session.commit() + + queued_message.status = "failed" + queued_message.failure_cause = "THIS TEST FAILURE" + db.session.add(queued_message) + db.session.commit() + assert queued_message.id is not None + assert queued_message.failure_cause == "THIS TEST FAILURE" + + def create_message_model(self, message_model_identifier: str) -> MessageModel: + """Create_message_model.""" + message_model = MessageModel(identifier=message_model_identifier) + db.session.add(message_model) + db.session.commit() + return message_model diff --git a/tests/spiffworkflow_backend/unit/test_message_service.py b/tests/spiffworkflow_backend/unit/test_message_service.py new file mode 100644 index 00000000..38079c96 --- /dev/null +++ b/tests/spiffworkflow_backend/unit/test_message_service.py @@ -0,0 +1,246 @@ +"""Test_message_service.""" +from flask import Flask +from tests.spiffworkflow_backend.helpers.base_test import BaseTest +from tests.spiffworkflow_backend.helpers.test_data import load_test_spec + +from spiffworkflow_backend.models.message_correlation import MessageCorrelationModel +from spiffworkflow_backend.models.message_correlation_message_instance import ( + MessageCorrelationMessageInstanceModel, +) +from spiffworkflow_backend.models.message_instance import MessageInstanceModel +from spiffworkflow_backend.models.process_instance import ProcessInstanceModel +from spiffworkflow_backend.services.message_service import MessageService +from spiffworkflow_backend.services.process_instance_processor import ( + ProcessInstanceProcessor, +) +from spiffworkflow_backend.services.process_instance_service import ( + ProcessInstanceService, +) + + +class TestMessageService(BaseTest): + """TestMessageService.""" + + def test_can_send_message_to_waiting_message( + self, app: Flask, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_can_send_message_to_waiting_message.""" + process_model_sender = load_test_spec( + "message_sender", + process_model_source_directory="message_send_one_conversation", + bpmn_file_name="message_sender", + ) + load_test_spec( + "message_receiver", + process_model_source_directory="message_send_one_conversation", + bpmn_file_name="message_receiver", + ) + user = self.find_or_create_user() + + process_instance_sender = ProcessInstanceService.create_process_instance( + process_model_sender.id, + user, + process_group_identifier=process_model_sender.process_group_id, + ) + + processor_sender = ProcessInstanceProcessor(process_instance_sender) + processor_sender.do_engine_steps() + processor_sender.save() + + message_instance_result = MessageInstanceModel.query.all() + assert len(message_instance_result) == 2 + # ensure both message instances are for the same process instance + # it will be send_message and receive_message_response + assert ( + message_instance_result[0].process_instance_id + == message_instance_result[1].process_instance_id + ) + + message_instance_sender = message_instance_result[0] + assert message_instance_sender.process_instance_id == process_instance_sender.id + message_correlations = MessageCorrelationModel.query.all() + assert len(message_correlations) == 2 + assert message_correlations[0].process_instance_id == process_instance_sender.id + message_correlations_message_instances = ( + MessageCorrelationMessageInstanceModel.query.all() + ) + assert len(message_correlations_message_instances) == 4 + assert ( + message_correlations_message_instances[0].message_instance_id + == message_instance_sender.id + ) + assert ( + message_correlations_message_instances[1].message_instance_id + == message_instance_sender.id + ) + assert ( + message_correlations_message_instances[2].message_instance_id + == message_instance_result[1].id + ) + assert ( + message_correlations_message_instances[3].message_instance_id + == message_instance_result[1].id + ) + + # process first message + MessageService.process_message_instances() + assert message_instance_sender.status == "completed" + + process_instance_result = ProcessInstanceModel.query.all() + + assert len(process_instance_result) == 2 + process_instance_receiver = process_instance_result[1] + + # just make sure it's a different process instance + assert process_instance_receiver.id != process_instance_sender.id + assert process_instance_receiver.status == "complete" + + message_instance_result = MessageInstanceModel.query.all() + assert len(message_instance_result) == 3 + message_instance_receiver = message_instance_result[1] + assert message_instance_receiver.id != message_instance_sender.id + assert message_instance_receiver.status == "ready" + + # process second message + MessageService.process_message_instances() + + message_instance_result = MessageInstanceModel.query.all() + assert len(message_instance_result) == 3 + for message_instance in message_instance_result: + assert message_instance.status == "completed" + + process_instance_result = ProcessInstanceModel.query.all() + assert len(process_instance_result) == 2 + for process_instance in process_instance_result: + assert process_instance.status == "complete" + + def test_can_send_message_to_multiple_process_models( + self, app: Flask, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_can_send_message_to_multiple_process_models.""" + process_model_sender = load_test_spec( + "message_sender", + process_model_source_directory="message_send_two_conversations", + bpmn_file_name="message_sender", + ) + load_test_spec( + "message_receiver_one", + process_model_source_directory="message_send_two_conversations", + bpmn_file_name="message_receiver_one", + ) + load_test_spec( + "message_receiver_two", + process_model_source_directory="message_send_two_conversations", + bpmn_file_name="message_receiver_two", + ) + + user = self.find_or_create_user() + + process_instance_sender = ProcessInstanceService.create_process_instance( + process_model_sender.id, + user, + process_group_identifier=process_model_sender.process_group_id, + ) + + processor_sender = ProcessInstanceProcessor(process_instance_sender) + processor_sender.do_engine_steps() + processor_sender.save() + + message_instance_result = MessageInstanceModel.query.all() + assert len(message_instance_result) == 3 + # ensure both message instances are for the same process instance + # it will be send_message and receive_message_response + assert ( + message_instance_result[0].process_instance_id + == message_instance_result[1].process_instance_id + ) + + message_instance_sender = message_instance_result[0] + assert message_instance_sender.process_instance_id == process_instance_sender.id + message_correlations = MessageCorrelationModel.query.all() + assert len(message_correlations) == 4 + assert message_correlations[0].process_instance_id == process_instance_sender.id + message_correlations_message_instances = ( + MessageCorrelationMessageInstanceModel.query.all() + ) + assert len(message_correlations_message_instances) == 6 + assert ( + message_correlations_message_instances[0].message_instance_id + == message_instance_sender.id + ) + assert ( + message_correlations_message_instances[1].message_instance_id + == message_instance_sender.id + ) + assert ( + message_correlations_message_instances[2].message_instance_id + == message_instance_result[1].id + ) + assert ( + message_correlations_message_instances[3].message_instance_id + == message_instance_result[1].id + ) + + # process first message + MessageService.process_message_instances() + assert message_instance_sender.status == "completed" + + process_instance_result = ProcessInstanceModel.query.all() + + assert len(process_instance_result) == 3 + process_instance_receiver_one = ProcessInstanceModel.query.filter_by( + process_model_identifier="message_receiver_one" + ).first() + assert process_instance_receiver_one is not None + process_instance_receiver_two = ProcessInstanceModel.query.filter_by( + process_model_identifier="message_receiver_two" + ).first() + assert process_instance_receiver_two is not None + + # just make sure it's a different process instance + assert ( + process_instance_receiver_one.process_model_identifier + == "message_receiver_one" + ) + assert process_instance_receiver_one.id != process_instance_sender.id + assert process_instance_receiver_one.status == "complete" + assert ( + process_instance_receiver_two.process_model_identifier + == "message_receiver_two" + ) + assert process_instance_receiver_two.id != process_instance_sender.id + assert process_instance_receiver_two.status == "complete" + + message_instance_result = MessageInstanceModel.query.all() + assert len(message_instance_result) == 5 + + message_instance_receiver_one = [ + x + for x in message_instance_result + if x.process_instance_id == process_instance_receiver_one.id + ][0] + message_instance_receiver_two = [ + x + for x in message_instance_result + if x.process_instance_id == process_instance_receiver_two.id + ][0] + assert message_instance_receiver_one is not None + assert message_instance_receiver_two is not None + assert message_instance_receiver_one.id != message_instance_sender.id + assert message_instance_receiver_one.status == "ready" + assert message_instance_receiver_two.id != message_instance_sender.id + assert message_instance_receiver_two.status == "ready" + + # process second message + MessageService.process_message_instances() + MessageService.process_message_instances() + + message_instance_result = MessageInstanceModel.query.all() + assert len(message_instance_result) == 6 + for message_instance in message_instance_result: + assert message_instance.status == "completed" + + process_instance_result = ProcessInstanceModel.query.all() + assert len(process_instance_result) == 3 + for process_instance in process_instance_result: + assert process_instance.status == "complete" diff --git a/tests/spiffworkflow_backend/unit/test_permissions.py b/tests/spiffworkflow_backend/unit/test_permissions.py new file mode 100644 index 00000000..b3a31989 --- /dev/null +++ b/tests/spiffworkflow_backend/unit/test_permissions.py @@ -0,0 +1,131 @@ +"""Test Permissions.""" +from flask.app import Flask +from flask_bpmn.models.db import db +from tests.spiffworkflow_backend.helpers.base_test import BaseTest +from tests.spiffworkflow_backend.helpers.test_data import load_test_spec + +from spiffworkflow_backend.models.group import GroupModel +from spiffworkflow_backend.models.permission_assignment import PermissionAssignmentModel +from spiffworkflow_backend.models.permission_target import PermissionTargetModel +from spiffworkflow_backend.models.principal import PrincipalModel +from spiffworkflow_backend.services.authorization_service import AuthorizationService +from spiffworkflow_backend.services.user_service import UserService + + +# we think we can get the list of roles for a user. +# spiff needs a way to determine what each role allows. + +# user role allows list and read of all process groups/models +# super-admin role allows create, update, and delete of all process groups/models +# * super-admins users maybe conventionally get the user role as well +# finance-admin role allows create, update, and delete of all models under the finance group +class TestPermissions(BaseTest): + """TestPermissions.""" + + def test_user_can_be_given_permission_to_administer_process_group( + self, app: Flask, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_user_can_be_given_permission_to_administer_process_group.""" + process_group_id = "group-a" + load_test_spec( + "timers_intermediate_catch_event", + process_group_id=process_group_id, + ) + dan = self.find_or_create_user() + principal = dan.principal + + permission_target = PermissionTargetModel(uri=f"/{process_group_id}") + db.session.add(permission_target) + db.session.commit() + + permission_assignment = PermissionAssignmentModel( + permission_target_id=permission_target.id, + principal_id=principal.id, + permission="delete", + grant_type="permit", + ) + db.session.add(permission_assignment) + db.session.commit() + + def test_group_a_admin_needs_to_stay_away_from_group_b( + self, app: Flask, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_group_a_admin_needs_to_stay_away_from_group_b.""" + process_group_ids = ["group-a", "group-b"] + process_group_a_id = process_group_ids[0] + process_group_b_id = process_group_ids[1] + for process_group_id in process_group_ids: + load_test_spec( + "timers_intermediate_catch_event", + process_group_id=process_group_id, + ) + group_a_admin = self.find_or_create_user() + + permission_target = PermissionTargetModel(uri=f"/{process_group_a_id}") + db.session.add(permission_target) + db.session.commit() + + permission_assignment = PermissionAssignmentModel( + permission_target_id=permission_target.id, + principal_id=group_a_admin.principal.id, + permission="update", + grant_type="permit", + ) + db.session.add(permission_assignment) + db.session.commit() + + has_permission_to_a = AuthorizationService.user_has_permission( + user=group_a_admin, + permission="update", + target_uri=f"/{process_group_a_id}", + ) + assert has_permission_to_a is True + has_permission_to_b = AuthorizationService.user_has_permission( + user=group_a_admin, + permission="update", + target_uri=f"/{process_group_b_id}", + ) + assert has_permission_to_b is False + + def test_user_can_be_granted_access_through_a_group( + self, app: Flask, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_group_a_admin_needs_to_stay_away_from_group_b.""" + process_group_ids = ["group-a", "group-b"] + process_group_a_id = process_group_ids[0] + process_group_ids[1] + for process_group_id in process_group_ids: + load_test_spec( + "timers_intermediate_catch_event", + process_group_id=process_group_id, + ) + user = self.find_or_create_user() + group = GroupModel(identifier="groupA") + db.session.add(group) + db.session.commit() + + UserService.add_user_to_group(user, group) + + permission_target = PermissionTargetModel(uri=f"/{process_group_a_id}") + db.session.add(permission_target) + db.session.commit() + + principal = PrincipalModel(group_id=group.id) + db.session.add(principal) + db.session.commit() + + permission_assignment = PermissionAssignmentModel( + permission_target_id=permission_target.id, + principal_id=group.principal.id, + permission="update", + grant_type="permit", + ) + db.session.add(permission_assignment) + db.session.commit() + + has_permission_to_a = AuthorizationService.user_has_permission( + user=user, + permission="update", + target_uri=f"/{process_group_a_id}", + ) + assert has_permission_to_a is True diff --git a/tests/spiffworkflow_backend/unit/test_process_group.py b/tests/spiffworkflow_backend/unit/test_process_group.py new file mode 100644 index 00000000..6c3ad0ad --- /dev/null +++ b/tests/spiffworkflow_backend/unit/test_process_group.py @@ -0,0 +1,16 @@ +"""Process Model.""" +from flask.app import Flask + +from spiffworkflow_backend.models.process_group import ProcessGroup +from spiffworkflow_backend.services.process_model_service import ProcessModelService + + +def test_there_is_at_least_one_group_after_we_create_one( + app: Flask, with_db_and_bpmn_file_cleanup: None +) -> None: + """Test_there_is_at_least_one_group_after_we_create_one.""" + process_model_service = ProcessModelService() + process_group = ProcessGroup(id="hey", display_name="sure") + process_model_service.add_process_group(process_group) + process_groups = ProcessModelService().get_process_groups() + assert len(process_groups) > 0 diff --git a/tests/spiffworkflow_backend/unit/test_process_instance_processor.py b/tests/spiffworkflow_backend/unit/test_process_instance_processor.py new file mode 100644 index 00000000..009239f7 --- /dev/null +++ b/tests/spiffworkflow_backend/unit/test_process_instance_processor.py @@ -0,0 +1,36 @@ +"""Test_process_instance_processor.""" +from flask.app import Flask +from tests.spiffworkflow_backend.helpers.base_test import BaseTest + +from spiffworkflow_backend.services.process_instance_processor import ( + ProcessInstanceProcessor, +) + + +class TestProcessInstanceProcessor(BaseTest): + """TestProcessInstanceProcessor.""" + + # it's not totally obvious we want to keep this test/file + def test_script_engine_takes_data_and_returns_expected_results( + self, + app: Flask, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """Test_script_engine_takes_data_and_returns_expected_results.""" + script_engine = ProcessInstanceProcessor._script_engine + + result = script_engine._evaluate("a", {"a": 1}) + assert result == 1 + + def test_script_engine_can_use_custom_scripts( + self, + app: Flask, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """Test_script_engine_takes_data_and_returns_expected_results.""" + script_engine = ProcessInstanceProcessor._script_engine + result = script_engine._evaluate("fact_service(type='norris')", {}) + assert ( + result + == "Chuck Norris doesn’t read books. He stares them down until he gets the information he wants." + ) diff --git a/tests/spiffworkflow_backend/unit/test_process_instance_report.py b/tests/spiffworkflow_backend/unit/test_process_instance_report.py new file mode 100644 index 00000000..acfac138 --- /dev/null +++ b/tests/spiffworkflow_backend/unit/test_process_instance_report.py @@ -0,0 +1,144 @@ +"""Test Permissions.""" +from typing import Optional + +from flask.app import Flask +from tests.spiffworkflow_backend.helpers.base_test import BaseTest + +from spiffworkflow_backend.models.process_instance import ProcessInstanceModel +from spiffworkflow_backend.models.process_instance_report import ( + ProcessInstanceReportModel, +) + +# from tests.spiffworkflow_backend.helpers.test_data import find_or_create_process_group +# from spiffworkflow_backend.models.permission_assignment import PermissionAssignmentModel +# from spiffworkflow_backend.models.permission_target import PermissionTargetModel + + +def test_generate_report_with_filter_by( + app: Flask, + with_db_and_bpmn_file_cleanup: None, + setup_process_instances_for_reports: list[ProcessInstanceModel], +) -> None: + """Test_user_can_be_given_permission_to_administer_process_group.""" + process_instances = setup_process_instances_for_reports + report_metadata = { + "filter_by": [ + {"field_name": "grade_level", "operator": "equals", "field_value": 2} + ] + } + results = do_report_with_metadata_and_instances(report_metadata, process_instances) + assert len(results) == 2 + names = get_names_from_results(results) + assert names == ["kay", "jay"] + + +def test_generate_report_with_filter_by_with_variable_substitution( + app: Flask, + with_db_and_bpmn_file_cleanup: None, + setup_process_instances_for_reports: list[ProcessInstanceModel], +) -> None: + """Test_user_can_be_given_permission_to_administer_process_group.""" + process_instances = setup_process_instances_for_reports + report_metadata = { + "filter_by": [ + { + "field_name": "grade_level", + "operator": "equals", + "field_value": "{{grade_level}}", + } + ] + } + results = do_report_with_metadata_and_instances( + report_metadata, process_instances, {"grade_level": 1} + ) + assert len(results) == 1 + names = get_names_from_results(results) + assert names == ["ray"] + + +def test_generate_report_with_order_by_and_one_field( + app: Flask, + with_db_and_bpmn_file_cleanup: None, + setup_process_instances_for_reports: list[ProcessInstanceModel], +) -> None: + """Test_user_can_be_given_permission_to_administer_process_group.""" + process_instances = setup_process_instances_for_reports + report_metadata = {"order_by": ["test_score"]} + results = do_report_with_metadata_and_instances(report_metadata, process_instances) + assert len(results) == 3 + names = get_names_from_results(results) + assert names == ["jay", "ray", "kay"] + + +def test_generate_report_with_order_by_and_two_fields( + app: Flask, + with_db_and_bpmn_file_cleanup: None, + setup_process_instances_for_reports: list[ProcessInstanceModel], +) -> None: + """Test_user_can_be_given_permission_to_administer_process_group.""" + process_instances = setup_process_instances_for_reports + report_metadata = {"order_by": ["grade_level", "test_score"]} + results = do_report_with_metadata_and_instances(report_metadata, process_instances) + assert len(results) == 3 + names = get_names_from_results(results) + assert names == ["ray", "jay", "kay"] + + +def test_generate_report_with_order_by_desc( + app: Flask, + with_db_and_bpmn_file_cleanup: None, + setup_process_instances_for_reports: list[ProcessInstanceModel], +) -> None: + """Test_user_can_be_given_permission_to_administer_process_group.""" + process_instances = setup_process_instances_for_reports + report_metadata = {"order_by": ["grade_level", "-test_score"]} + results = do_report_with_metadata_and_instances(report_metadata, process_instances) + assert len(results) == 3 + names = get_names_from_results(results) + assert names == ["ray", "kay", "jay"] + + +def test_generate_report_with_columns( + app: Flask, + with_db_and_bpmn_file_cleanup: None, + setup_process_instances_for_reports: list[ProcessInstanceModel], +) -> None: + """Test_user_can_be_given_permission_to_administer_process_group.""" + process_instances = setup_process_instances_for_reports + report_metadata = { + "columns": [ + {"Header": "Name", "accessor": "name"}, + {"Header": "Status", "accessor": "status"}, + ], + "order_by": ["test_score"], + "filter_by": [ + {"field_name": "grade_level", "operator": "equals", "field_value": 1} + ], + } + results = do_report_with_metadata_and_instances(report_metadata, process_instances) + assert len(results) == 1 + assert results == [{"name": "ray", "status": "complete"}] + + +def do_report_with_metadata_and_instances( + report_metadata: dict, + process_instances: list[ProcessInstanceModel], + substitution_variables: Optional[dict] = None, +) -> list[dict]: + """Do_report_with_metadata_and_instances.""" + process_instance_report = ProcessInstanceReportModel.create_with_attributes( + identifier="sure", + process_group_identifier=process_instances[0].process_group_identifier, + process_model_identifier=process_instances[0].process_model_identifier, + report_metadata=report_metadata, + user=BaseTest.find_or_create_user(), + ) + + return process_instance_report.generate_report( + process_instances, substitution_variables + )["results"] + + +def get_names_from_results(results: list[dict]) -> list[str]: + """Get_names_from_results.""" + return [result["name"] for result in results] diff --git a/tests/spiffworkflow_backend/unit/test_process_model.py b/tests/spiffworkflow_backend/unit/test_process_model.py new file mode 100644 index 00000000..fe3affd2 --- /dev/null +++ b/tests/spiffworkflow_backend/unit/test_process_model.py @@ -0,0 +1,107 @@ +"""Process Model.""" +from flask.app import Flask +from flask_bpmn.models.db import db +from tests.spiffworkflow_backend.helpers.base_test import BaseTest +from tests.spiffworkflow_backend.helpers.test_data import load_test_spec + +from spiffworkflow_backend.models.bpmn_process_id_lookup import BpmnProcessIdLookup +from spiffworkflow_backend.models.process_model import ProcessModelInfo +from spiffworkflow_backend.services.process_instance_processor import ( + ProcessInstanceProcessor, +) + + +class TestProcessModel(BaseTest): + """TestProcessModel.""" + + def test_initializes_files_as_empty_array(self) -> None: + """Test_initializes_files_as_empty_array.""" + process_model_one = self.create_test_process_model( + id="model_one", display_name="Model One" + ) + assert process_model_one.files == [] + assert process_model_one.libraries == [] + + def test_can_run_process_model_with_call_activities_when_in_same_process_model_directory( + self, app: Flask, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_can_run_process_model_with_call_activities.""" + process_model = load_test_spec( + "call_activity_test", + process_model_source_directory="call_activity_same_directory", + ) + + process_instance = self.create_process_instance_from_process_model( + process_model + ) + processor = ProcessInstanceProcessor(process_instance) + processor.do_engine_steps(save=True) + assert process_instance.status == "complete" + + def test_can_run_process_model_with_call_activities_when_not_in_same_directory( + self, app: Flask, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_can_run_process_model_with_call_activities.""" + process_model = load_test_spec( + "call_activity_nested", + process_model_source_directory="call_activity_nested", + bpmn_file_name="call_activity_nested", + ) + + bpmn_file_names = [ + "call_activity_level_2b", + "call_activity_level_2", + "call_activity_level_3", + ] + for bpmn_file_name in bpmn_file_names: + load_test_spec( + bpmn_file_name, + process_model_source_directory="call_activity_nested", + bpmn_file_name=bpmn_file_name, + ) + process_instance = self.create_process_instance_from_process_model( + process_model + ) + processor = ProcessInstanceProcessor(process_instance) + processor.do_engine_steps(save=True) + assert process_instance.status == "complete" + + def test_can_run_process_model_with_call_activities_when_process_identifier_is_not_in_database( + self, app: Flask, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_can_run_process_model_with_call_activities.""" + process_model = load_test_spec( + "call_activity_nested", + process_model_source_directory="call_activity_nested", + bpmn_file_name="call_activity_nested", + ) + + bpmn_file_names = [ + "call_activity_level_2b", + "call_activity_level_2", + "call_activity_level_3", + ] + for bpmn_file_name in bpmn_file_names: + load_test_spec( + bpmn_file_name, + process_model_source_directory="call_activity_nested", + bpmn_file_name=bpmn_file_name, + ) + process_instance = self.create_process_instance_from_process_model( + process_model + ) + + # delete all of the id lookup items to force to processor to find the correct + # process model when running the process + db.session.query(BpmnProcessIdLookup).delete() + processor = ProcessInstanceProcessor(process_instance) + processor.do_engine_steps(save=True) + assert process_instance.status == "complete" + + def create_test_process_model(self, id: str, display_name: str) -> ProcessModelInfo: + """Create_test_process_model.""" + return ProcessModelInfo( + id=id, + display_name=display_name, + description=display_name, + ) diff --git a/tests/spiffworkflow_backend/unit/test_process_model_service.py b/tests/spiffworkflow_backend/unit/test_process_model_service.py new file mode 100644 index 00000000..535dc03d --- /dev/null +++ b/tests/spiffworkflow_backend/unit/test_process_model_service.py @@ -0,0 +1,25 @@ +"""Test_process_model_service.""" +from flask import Flask +from tests.spiffworkflow_backend.helpers.base_test import BaseTest +from tests.spiffworkflow_backend.helpers.test_data import load_test_spec + +from spiffworkflow_backend.services.process_model_service import ProcessModelService + + +class TestProcessModelService(BaseTest): + """TestProcessModelService.""" + + def test_can_update_specified_attributes( + self, app: Flask, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_can_update_specified_attributes.""" + process_model = load_test_spec("hello_world") + assert process_model.display_name == "hello_world" + + primary_process_id = process_model.primary_process_id + assert primary_process_id == "Process_HelloWorld" + + ProcessModelService().update_spec(process_model, {"display_name": "new_name"}) + + assert process_model.display_name == "new_name" + assert process_model.primary_process_id == primary_process_id diff --git a/tests/spiffworkflow_backend/unit/test_restricted_script_engine.py b/tests/spiffworkflow_backend/unit/test_restricted_script_engine.py new file mode 100644 index 00000000..9b6f1bb3 --- /dev/null +++ b/tests/spiffworkflow_backend/unit/test_restricted_script_engine.py @@ -0,0 +1,58 @@ +"""Test_various_bpmn_constructs.""" +import pytest +from flask.app import Flask +from flask_bpmn.api.api_error import ApiError +from tests.spiffworkflow_backend.helpers.base_test import BaseTest +from tests.spiffworkflow_backend.helpers.test_data import load_test_spec + +from spiffworkflow_backend.services.process_instance_processor import ( + ProcessInstanceProcessor, +) + + +class TestOpenFile(BaseTest): + """TestVariousBpmnConstructs.""" + + def test_dot_notation( + self, app: Flask, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_form_data_conversion_to_dot_dict.""" + process_model = load_test_spec( + "dangerous", + bpmn_file_name="read_etc_passwd.bpmn", + process_model_source_directory="dangerous-scripts", + ) + self.find_or_create_user() + + process_instance = self.create_process_instance_from_process_model( + process_model + ) + processor = ProcessInstanceProcessor(process_instance) + + with pytest.raises(ApiError) as exception: + processor.do_engine_steps(save=True) + assert "name 'open' is not defined" in str(exception.value) + + +class TestImportModule(BaseTest): + """TestVariousBpmnConstructs.""" + + def test_dot_notation( + self, app: Flask, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_form_data_conversion_to_dot_dict.""" + process_model = load_test_spec( + "dangerous", + bpmn_file_name="read_env.bpmn", + process_model_source_directory="dangerous-scripts", + ) + self.find_or_create_user() + + process_instance = self.create_process_instance_from_process_model( + process_model + ) + processor = ProcessInstanceProcessor(process_instance) + + with pytest.raises(ApiError) as exception: + processor.do_engine_steps(save=True) + assert "Import not allowed: os" in str(exception.value) diff --git a/tests/spiffworkflow_backend/unit/test_script_unit_test_runner.py b/tests/spiffworkflow_backend/unit/test_script_unit_test_runner.py new file mode 100644 index 00000000..69c54851 --- /dev/null +++ b/tests/spiffworkflow_backend/unit/test_script_unit_test_runner.py @@ -0,0 +1,138 @@ +"""Test Permissions.""" +from flask.app import Flask +from tests.spiffworkflow_backend.helpers.base_test import BaseTest +from tests.spiffworkflow_backend.helpers.test_data import load_test_spec + +from spiffworkflow_backend.services.process_instance_processor import ( + ProcessInstanceProcessor, +) +from spiffworkflow_backend.services.script_unit_test_runner import PythonScriptContext +from spiffworkflow_backend.services.script_unit_test_runner import ScriptUnitTestRunner + + +class TestScriptUnitTestRunner(BaseTest): + """TestScriptUnitTestRunner.""" + + def test_takes_data_and_returns_expected_result( + self, + app: Flask, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """Test_takes_data_and_returns_expected_result.""" + app.config["THREAD_LOCAL_DATA"].process_instance_id = None + + process_group_id = "test_logging_spiff_logger" + process_model_id = "simple_script" + load_test_spec(process_model_id, process_group_id=process_group_id) + bpmn_process_instance = ( + ProcessInstanceProcessor.get_bpmn_process_instance_from_process_model( + process_model_id, process_group_id + ) + ) + task = ProcessInstanceProcessor.get_task_by_bpmn_identifier( + "Activity_RunScript", bpmn_process_instance + ) + assert task is not None + + input_context: PythonScriptContext = {"a": 1} + expected_output_context: PythonScriptContext = {"a": 2} + script = "a = 2" + + unit_test_result = ScriptUnitTestRunner.run_with_script_and_pre_post_contexts( + script, input_context, expected_output_context + ) + + assert unit_test_result.result + assert unit_test_result.context == {"a": 2} + + def test_fails_when_expected_output_does_not_match_actual_output( + self, + app: Flask, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """Test_fails_when_expected_output_does_not_match_actual_output.""" + app.config["THREAD_LOCAL_DATA"].process_instance_id = None + + process_group_id = "test_logging_spiff_logger" + process_model_id = "simple_script" + load_test_spec(process_model_id, process_group_id=process_group_id) + bpmn_process_instance = ( + ProcessInstanceProcessor.get_bpmn_process_instance_from_process_model( + process_model_id, process_group_id + ) + ) + task = ProcessInstanceProcessor.get_task_by_bpmn_identifier( + "Activity_RunScript", bpmn_process_instance + ) + assert task is not None + + input_context: PythonScriptContext = {"a": 1} + expected_output_context: PythonScriptContext = {"a": 2, "b": 3} + script = "a = 2" + + unit_test_result = ScriptUnitTestRunner.run_with_script_and_pre_post_contexts( + script, input_context, expected_output_context + ) + + assert unit_test_result.result is not True + assert unit_test_result.context == {"a": 2} + + def test_script_with_unit_tests_when_hey_is_passed_in( + self, + app: Flask, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """Test_script_with_unit_tests_when_hey_is_passed_in.""" + app.config["THREAD_LOCAL_DATA"].process_instance_id = None + + process_group_id = "script_with_unit_tests" + process_model_id = "script_with_unit_tests" + load_test_spec(process_model_id, process_group_id=process_group_id) + bpmn_process_instance = ( + ProcessInstanceProcessor.get_bpmn_process_instance_from_process_model( + process_model_id, process_group_id + ) + ) + task = ProcessInstanceProcessor.get_task_by_bpmn_identifier( + "script_with_unit_test_id", bpmn_process_instance + ) + assert task is not None + + expected_output_context: PythonScriptContext = {"hey": True} + + unit_test_result = ScriptUnitTestRunner.run_test( + task, "sets_hey_to_true_if_hey_is_false" + ) + + assert unit_test_result.result + assert unit_test_result.context == expected_output_context + + def test_script_with_unit_tests_when_hey_is_not_passed_in( + self, + app: Flask, + with_db_and_bpmn_file_cleanup: None, + ) -> None: + """Test_script_with_unit_tests_when_hey_is_not_passed_in.""" + app.config["THREAD_LOCAL_DATA"].process_instance_id = None + + process_group_id = "script_with_unit_tests" + process_model_id = "script_with_unit_tests" + load_test_spec(process_model_id, process_group_id=process_group_id) + bpmn_process_instance = ( + ProcessInstanceProcessor.get_bpmn_process_instance_from_process_model( + process_model_id, process_group_id + ) + ) + task = ProcessInstanceProcessor.get_task_by_bpmn_identifier( + "script_with_unit_test_id", bpmn_process_instance + ) + assert task is not None + + expected_output_context: PythonScriptContext = {"something_else": True} + + unit_test_result = ScriptUnitTestRunner.run_test( + task, "sets_something_else_if_no_hey" + ) + + assert unit_test_result.result + assert unit_test_result.context == expected_output_context diff --git a/tests/spiffworkflow_backend/unit/test_spec_file_service.py b/tests/spiffworkflow_backend/unit/test_spec_file_service.py new file mode 100644 index 00000000..fd882e71 --- /dev/null +++ b/tests/spiffworkflow_backend/unit/test_spec_file_service.py @@ -0,0 +1,97 @@ +"""Test_message_service.""" +import os + +import pytest +from flask import Flask +from flask_bpmn.api.api_error import ApiError +from flask_bpmn.models.db import db +from tests.spiffworkflow_backend.helpers.base_test import BaseTest +from tests.spiffworkflow_backend.helpers.test_data import load_test_spec + +from spiffworkflow_backend.models.bpmn_process_id_lookup import BpmnProcessIdLookup + + +class TestSpecFileService(BaseTest): + """TestSpecFileService.""" + + call_activity_nested_relative_file_path = os.path.join( + "test_process_group_id", "call_activity_nested", "call_activity_nested.bpmn" + ) + + def test_can_store_process_ids_for_lookup( + self, app: Flask, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_can_store_process_ids_for_lookup.""" + load_test_spec( + "call_activity_nested", + process_model_source_directory="call_activity_nested", + bpmn_file_name="call_activity_nested", + ) + bpmn_process_id_lookups = BpmnProcessIdLookup.query.all() + assert len(bpmn_process_id_lookups) == 1 + assert bpmn_process_id_lookups[0].bpmn_process_identifier == "Level1" + assert ( + bpmn_process_id_lookups[0].bpmn_file_relative_path + == self.call_activity_nested_relative_file_path + ) + + def test_fails_to_save_duplicate_process_id( + self, app: Flask, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_fails_to_save_duplicate_process_id.""" + bpmn_process_identifier = "Level1" + load_test_spec( + "call_activity_nested", + process_model_source_directory="call_activity_nested", + bpmn_file_name="call_activity_nested", + ) + bpmn_process_id_lookups = BpmnProcessIdLookup.query.all() + assert len(bpmn_process_id_lookups) == 1 + assert ( + bpmn_process_id_lookups[0].bpmn_process_identifier + == bpmn_process_identifier + ) + assert ( + bpmn_process_id_lookups[0].bpmn_file_relative_path + == self.call_activity_nested_relative_file_path + ) + with pytest.raises(ApiError) as exception: + load_test_spec( + "call_activity_nested_duplicate", + process_model_source_directory="call_activity_duplicate", + bpmn_file_name="call_activity_nested_duplicate", + ) + assert f"Process id ({bpmn_process_identifier}) has already been used" in str( + exception.value + ) + + def test_updates_relative_file_path_when_appropriate( + self, app: Flask, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_updates_relative_file_path_when_appropriate.""" + bpmn_process_identifier = "Level1" + bpmn_file_relative_path = os.path.join( + "test_process_group_id", "call_activity_nested", "new_bpmn_file.bpmn" + ) + process_id_lookup = BpmnProcessIdLookup( + bpmn_process_identifier=bpmn_process_identifier, + bpmn_file_relative_path=bpmn_file_relative_path, + ) + db.session.add(process_id_lookup) + db.session.commit() + + load_test_spec( + "call_activity_nested", + process_model_source_directory="call_activity_nested", + bpmn_file_name="call_activity_nested", + ) + bpmn_process_id_lookups = BpmnProcessIdLookup.query.all() + assert len(bpmn_process_id_lookups) == 1 + assert ( + bpmn_process_id_lookups[0].bpmn_process_identifier + == bpmn_process_identifier + ) + assert ( + bpmn_process_id_lookups[0].bpmn_file_relative_path + == self.call_activity_nested_relative_file_path + ) diff --git a/tests/spiffworkflow_backend/unit/test_spiff_logging.py b/tests/spiffworkflow_backend/unit/test_spiff_logging.py new file mode 100644 index 00000000..c4a5984f --- /dev/null +++ b/tests/spiffworkflow_backend/unit/test_spiff_logging.py @@ -0,0 +1,45 @@ +"""Process Model.""" +from decimal import Decimal + +from flask.app import Flask +from flask_bpmn.models.db import db +from tests.spiffworkflow_backend.helpers.base_test import BaseTest +from tests.spiffworkflow_backend.helpers.test_data import load_test_spec + +from spiffworkflow_backend.models.spiff_logging import SpiffLoggingModel + + +class TestSpiffLogging(BaseTest): + """TestSpiffLogging.""" + + def test_timestamps_are_stored_correctly( + self, app: Flask, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_timestamps_are_stored_correctly.""" + process_model = load_test_spec( + "call_activity_test", + process_model_source_directory="call_activity_same_directory", + ) + + process_instance = self.create_process_instance_from_process_model( + process_model + ) + bpmn_process_identifier = "test_process_identifier" + spiff_task_guid = "test_spiff_task_guid" + bpmn_task_identifier = "test_bpmn_task_identifier" + timestamp = 1663250624.664887 # actual timestamp from spiff logs + message = "test_message" + spiff_log = SpiffLoggingModel( + process_instance_id=process_instance.id, + bpmn_process_identifier=bpmn_process_identifier, + spiff_task_guid=spiff_task_guid, + bpmn_task_identifier=bpmn_task_identifier, + message=message, + timestamp=timestamp, + ) + assert spiff_log.timestamp == timestamp + + db.session.add(spiff_log) + db.session.commit() + + assert spiff_log.timestamp == Decimal(str(timestamp)) diff --git a/tests/spiffworkflow_backend/unit/test_various_bpmn_constructs.py b/tests/spiffworkflow_backend/unit/test_various_bpmn_constructs.py new file mode 100644 index 00000000..c97803d8 --- /dev/null +++ b/tests/spiffworkflow_backend/unit/test_various_bpmn_constructs.py @@ -0,0 +1,27 @@ +"""Test_various_bpmn_constructs.""" +from flask.app import Flask +from tests.spiffworkflow_backend.helpers.base_test import BaseTest +from tests.spiffworkflow_backend.helpers.test_data import load_test_spec + +from spiffworkflow_backend.services.process_instance_processor import ( + ProcessInstanceProcessor, +) + + +class TestVariousBpmnConstructs(BaseTest): + """TestVariousBpmnConstructs.""" + + def test_running_process_with_timer_intermediate_catch_event( + self, app: Flask, with_db_and_bpmn_file_cleanup: None + ) -> None: + """Test_running_process_with_timer_intermediate_catch_event.""" + process_model = load_test_spec( + "timers_intermediate_catch_event", + process_model_source_directory="timer_intermediate_catch_event", + ) + + process_instance = self.create_process_instance_from_process_model( + process_model + ) + processor = ProcessInstanceProcessor(process_instance) + processor.do_engine_steps(save=True) diff --git a/tests/test_main.py b/tests/test_main.py new file mode 100644 index 00000000..7917a970 --- /dev/null +++ b/tests/test_main.py @@ -0,0 +1,17 @@ +"""Test cases for the __main__ module.""" +import pytest +from click.testing import CliRunner + +from spiffworkflow_backend import __main__ + + +@pytest.fixture +def runner() -> CliRunner: + """Fixture for invoking command-line interfaces.""" + return CliRunner() + + +def test_main_succeeds(runner: CliRunner) -> None: + """It exits with a status code of zero.""" + result = runner.invoke(__main__.main) + assert result.exit_code == 0 diff --git a/wsgi.py b/wsgi.py new file mode 100644 index 00000000..060a0fd7 --- /dev/null +++ b/wsgi.py @@ -0,0 +1,13 @@ +"""This is my docstring.""" +import os + +from spiffworkflow_backend import create_app +from spiffworkflow_backend.services.acceptance_test_fixtures import load_fixtures + +app = create_app() + +# this is in here because when we put it in the create_app function, +# it also loaded when we were running migrations, which resulted in a chicken/egg thing. +if os.environ.get("SPIFFWORKFLOW_BACKEND_LOAD_FIXTURE_DATA") == "true": + with app.app_context(): + load_fixtures()