Merge remote-tracking branch 'origin/main' into feature/more-secret-secrets

This commit is contained in:
burnettk 2023-05-30 08:38:15 -04:00
commit d940339229
11 changed files with 5819 additions and 70 deletions

View File

@ -84,7 +84,7 @@ jobs:
uses: actions/checkout@v3.3.0 uses: actions/checkout@v3.3.0
- name: Set up Python ${{ matrix.python }} - name: Set up Python ${{ matrix.python }}
uses: actions/setup-python@v4.6.0 uses: actions/setup-python@v4.6.1
with: with:
python-version: ${{ matrix.python }} python-version: ${{ matrix.python }}
@ -195,7 +195,7 @@ jobs:
- name: Check out the repository - name: Check out the repository
uses: actions/checkout@v3.3.0 uses: actions/checkout@v3.3.0
- name: Set up Python - name: Set up Python
uses: actions/setup-python@v4.6.0 uses: actions/setup-python@v4.6.1
with: with:
python-version: "3.11" python-version: "3.11"
- name: Install Poetry - name: Install Poetry
@ -236,7 +236,7 @@ jobs:
fetch-depth: 0 fetch-depth: 0
- name: Set up Python - name: Set up Python
uses: actions/setup-python@v4.6.0 uses: actions/setup-python@v4.6.1
with: with:
python-version: "3.11" python-version: "3.11"
@ -257,7 +257,7 @@ jobs:
nox --version nox --version
- name: Download coverage data - name: Download coverage data
uses: actions/download-artifact@v3.0.1 uses: actions/download-artifact@v3.0.2
with: with:
name: coverage-data name: coverage-data
# this action doesn't seem to respect working-directory so include working-directory value in path # this action doesn't seem to respect working-directory so include working-directory value in path
@ -273,7 +273,7 @@ jobs:
nox --force-color --session=coverage -- xml nox --force-color --session=coverage -- xml
- name: Upload coverage report - name: Upload coverage report
uses: codecov/codecov-action@v3.1.3 uses: codecov/codecov-action@v3.1.4
- name: SonarCloud Scan - name: SonarCloud Scan
uses: sonarsource/sonarcloud-github-action@v1.9 uses: sonarsource/sonarcloud-github-action@v1.9

View File

@ -30,29 +30,17 @@ function get_python_dirs() {
(git ls-tree -r HEAD --name-only | grep -E '\.py$' | awk -F '/' '{print $1}' | sort | uniq | grep -v '\.' | grep -Ev '^(bin|migrations)$') || echo '' (git ls-tree -r HEAD --name-only | grep -E '\.py$' | awk -F '/' '{print $1}' | sort | uniq | grep -v '\.' | grep -Ev '^(bin|migrations)$') || echo ''
} }
function run_autoflake() { function run_autofixers() {
# checking command -v autoflake8 is not good enough, since the asdf shim may be installed, which will make command -v succeed, # checking command -v ruff is not good enough, since the asdf shim may be installed, which will make command -v succeed,
# but autoflake8 may not have been pip installed inside the correct version of python. # but ruff may not have been pip installed inside the correct version of python.
if ! autoflake8 --help >/dev/null ; then if ! ruff --help >/dev/null 2>&1; then
pip install autoflake8 pip install ruff
asdf reshim python
fi
if ! autoflake --help >/dev/null ; then
pip install autoflake
asdf reshim python
fi
if ! autopep8 --help >/dev/null ; then
pip install autopep8
asdf reshim python asdf reshim python
fi fi
python_dirs=$(get_python_dirs) python_dirs=$(get_python_dirs)
python_files=$(find $python_dirs -type f -name "*.py" ! -name '.null-ls*' ! -name '_null-ls*') python_files=$(find $python_dirs -type f -name "*.py" ! -name '.null-ls*' ! -name '_null-ls*')
autoflake8 --in-place --remove-unused-variables --remove-duplicate-keys --expand-star-imports --exit-zero-even-if-changed $python_files ruff --fix $python_files
autoflake --in-place --remove-all-unused-imports $python_files
autopep8 --in-place $python_files
} }
function run_pre_commmit() { function run_pre_commmit() {
@ -71,7 +59,7 @@ done
for python_project in "${python_projects[@]}" ; do for python_project in "${python_projects[@]}" ; do
if [[ "$subcommand" != "pre" ]] || [[ -n "$(git status --porcelain "$python_project")" ]]; then if [[ "$subcommand" != "pre" ]] || [[ -n "$(git status --porcelain "$python_project")" ]]; then
pushd "$python_project" pushd "$python_project"
run_autoflake || run_autoflake run_autofixers || run_autofixers
popd popd
fi fi
done done

View File

@ -186,8 +186,7 @@ Once you have successfully signed in, navigate to the process section. This sect
![Untitled](images/Untitled_16.png) ![Untitled](images/Untitled_16.png)
> **Step 2: Find and click on the process** ### Step 2: Find and click on the process
>
You can either search for a process model using the search bar or navigate through displayed processes to find the process model. You can either search for a process model using the search bar or navigate through displayed processes to find the process model.
@ -259,38 +258,4 @@ A prompt will appear, allowing you to provide a name for the identifier associat
If you want to filter by ID, go to the "Find by Id" section of the page. Enter the ID and click "Submit". The system will show you the process instance with the corresponding ID. If you want to filter by ID, go to the "Find by Id" section of the page. Enter the ID and click "Submit". The system will show you the process instance with the corresponding ID.
You can now view the process instances that you filtered for and take appropriate action based on their status. This can help you manage your workflows more efficiently and keep track of the progress of various process instances. You can now view the process instances that you filtered for and take appropriate action based on their status. This can help you manage your workflows more efficiently and keep track of the progress of various process instances.
---
## 🗳️ How to request additional permissions
As a user, you may be required to access certain process groups or start process models in order to perform desired actions. However, you may not have the necessary access or permissions to do so. In this case, you will need to request access or additional permissions from the admins - PPG team.
By following these steps, you can submit a request and seek the necessary permissions to perform the desired actions.
### Step 1: Navigate & Search
Once you are signed in, navigate to the "**Process**" section. Use the search bar or browse through the available process models until you find "**Request Access**”. Click on the process model to open it.
![Untitled](images/Untitled_29.png)
If you want to access the request access process from **Home** section and click on the "**Start New +**" button. This will open the "Processes I can start" section where you can find the “Request Access” process.
![Untitled](images/Untitled_30.png)
### Step 2: Start the Process
Once the "**Process Request**" model is open, initiate the process by clicking on the "Start" button.
![Untitled](images/Untitled_31.png)
### Step 3: Provide Request Details & Submit
A task will be presented to capture the necessary information and details for special permissions request. Find the “**Description”** text field and enter the relevant information and details about your request.
Ensure that all required details have been included such as Process name, Process group name, and type of permissions you need. Click on the "**Submit**" button or similar action to submit your access or special permissions request.
![Untitled](images/Untitled_32.png)
By following these steps, you can request the special permissions needed to carry out your tasks effectively.

View File

@ -171,7 +171,7 @@ select = [
"E", # pycodestyle error "E", # pycodestyle error
# "ERA", # eradicate # "ERA", # eradicate
"F", # pyflakes "F", # pyflakes
# "N", # pep8-naming "N", # pep8-naming
# "PL", # pylint # "PL", # pylint
# "S", # flake8-bandit # "S", # flake8-bandit
"UP", # pyupgrade "UP", # pyupgrade

View File

@ -2,6 +2,7 @@
import os import os
import threading import threading
import uuid import uuid
from urllib.parse import urlparse
from flask.app import Flask from flask.app import Flask
from werkzeug.utils import ImportStringError from werkzeug.utils import ImportStringError
@ -78,6 +79,43 @@ def _set_up_tenant_specific_fields_as_list_of_strings(app: Flask) -> None:
) )
# see the message in the ConfigurationError below for why we are checking this.
# we really do not want this to raise when there is not a problem, so there are lots of return statements littered throughout.
def _check_for_incompatible_frontend_and_backend_urls(app: Flask) -> None:
if not app.config.get("SPIFFWORKFLOW_BACKEND_CHECK_FRONTEND_AND_BACKEND_URL_COMPATIBILITY"):
return
frontend_url = app.config.get("SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND")
backend_url = app.config.get("SPIFFWORKFLOW_BACKEND_URL")
if frontend_url is None or backend_url is None:
return
if frontend_url == "" or backend_url == "":
return
if not frontend_url.startswith("https://") or not backend_url.startswith("https://"):
return
frontend_url_parsed = urlparse(frontend_url)
frontend_domain = frontend_url_parsed.netloc
backend_url_parsed = urlparse(backend_url)
backend_domain = backend_url_parsed.netloc
if frontend_domain == backend_domain:
# probably backend and frontend are using different paths.
# routing by path will work just fine and won't cause any problems with setting cookies
return
if backend_domain.endswith(frontend_domain):
return
raise ConfigurationError(
"SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND and SPIFFWORKFLOW_BACKEND_URL are incompatible. We need backend to set"
" cookies for frontend, so they need to be on the same domain. A common setup is to have frontend on"
" example.com and backend on api.example.com. If you do not need this functionality, you can avoid this check"
" by setting environment variable SPIFFWORKFLOW_BACKEND_CHECK_FRONTEND_AND_BACKEND_URL_COMPATIBILITY=false"
)
def setup_config(app: Flask) -> None: def setup_config(app: Flask) -> None:
"""Setup_config.""" """Setup_config."""
# ensure the instance folder exists # ensure the instance folder exists
@ -144,3 +182,4 @@ def setup_config(app: Flask) -> None:
thread_local_data = threading.local() thread_local_data = threading.local()
app.config["THREAD_LOCAL_DATA"] = thread_local_data app.config["THREAD_LOCAL_DATA"] = thread_local_data
_set_up_tenant_specific_fields_as_list_of_strings(app) _set_up_tenant_specific_fields_as_list_of_strings(app)
_check_for_incompatible_frontend_and_backend_urls(app)

View File

@ -47,6 +47,9 @@ SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND = environ.get(
"SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND", default="http://localhost:7001" "SPIFFWORKFLOW_BACKEND_URL_FOR_FRONTEND", default="http://localhost:7001"
) )
SPIFFWORKFLOW_BACKEND_URL = environ.get("SPIFFWORKFLOW_BACKEND_URL", default="http://localhost:7000") SPIFFWORKFLOW_BACKEND_URL = environ.get("SPIFFWORKFLOW_BACKEND_URL", default="http://localhost:7000")
SPIFFWORKFLOW_BACKEND_CHECK_FRONTEND_AND_BACKEND_URL_COMPATIBILITY = (
environ.get("SPIFFWORKFLOW_BACKEND_CHECK_FRONTEND_AND_BACKEND_URL_COMPATIBILITY", default="true") == "true"
)
# service task connector proxy # service task connector proxy
SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_URL = environ.get( SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_URL = environ.get(
"SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_URL", default="http://localhost:7004" "SPIFFWORKFLOW_BACKEND_CONNECTOR_PROXY_URL", default="http://localhost:7004"

View File

@ -46,12 +46,14 @@ class JsonFormatter(logging.Formatter):
self.default_msec_format = msec_format self.default_msec_format = msec_format
self.datefmt = None self.datefmt = None
def usesTime(self) -> bool: def usesTime(self) -> bool: # noqa: N802, this is overriding a method from python's stdlib
"""Overwritten to look for the attribute in the format dict values instead of the fmt string.""" """Overwritten to look for the attribute in the format dict values instead of the fmt string."""
return "asctime" in self.fmt_dict.values() return "asctime" in self.fmt_dict.values()
# we are overriding a method that returns a string and returning a dict, hence the Any # we are overriding a method that returns a string and returning a dict, hence the Any
def formatMessage(self, record: logging.LogRecord) -> Any: def formatMessage( # noqa: N802, this is overriding a method from python's stdlib
self, record: logging.LogRecord
) -> Any:
"""Overwritten to return a dictionary of the relevant LogRecord attributes instead of a string. """Overwritten to return a dictionary of the relevant LogRecord attributes instead of a string.
KeyError is raised if an unknown attribute is provided in the fmt_dict. KeyError is raised if an unknown attribute is provided in the fmt_dict.

View File

@ -26,11 +26,11 @@ class NoTestCasesFoundError(Exception):
pass pass
class MissingInputTaskData(Exception): class MissingInputTaskDataError(Exception):
pass pass
class UnsupporterRunnerDelegateGiven(Exception): class UnsupporterRunnerDelegateGivenError(Exception):
pass pass
@ -236,7 +236,7 @@ class ProcessModelTestRunner:
self.test_case_identifier = test_case_identifier self.test_case_identifier = test_case_identifier
if not issubclass(process_model_test_runner_delegate_class, ProcessModelTestRunnerDelegate): if not issubclass(process_model_test_runner_delegate_class, ProcessModelTestRunnerDelegate):
raise UnsupporterRunnerDelegateGiven( raise UnsupporterRunnerDelegateGivenError(
"Process model test runner delegate must inherit from ProcessModelTestRunnerDelegate. Given" "Process model test runner delegate must inherit from ProcessModelTestRunnerDelegate. Given"
f" class '{process_model_test_runner_delegate_class}' does not" f" class '{process_model_test_runner_delegate_class}' does not"
) )
@ -342,7 +342,7 @@ class ProcessModelTestRunner:
task_data_length = len(test_case_task_properties["data"]) task_data_length = len(test_case_task_properties["data"])
test_case_index = self.task_data_index[test_case_task_key] test_case_index = self.task_data_index[test_case_task_key]
if task_data_length <= test_case_index: if task_data_length <= test_case_index:
raise MissingInputTaskData( raise MissingInputTaskDataError(
f"Missing input task data for task: {test_case_task_key}. " f"Missing input task data for task: {test_case_task_key}. "
f"Only {task_data_length} given in the json but task was called {test_case_index + 1} times" f"Only {task_data_length} given in the json but task was called {test_case_index + 1} times"
) )

View File

@ -5,7 +5,7 @@ from flask import Flask
from flask import current_app from flask import current_app
from spiffworkflow_backend.services.process_model_test_runner_service import NoTestCasesFoundError from spiffworkflow_backend.services.process_model_test_runner_service import NoTestCasesFoundError
from spiffworkflow_backend.services.process_model_test_runner_service import ProcessModelTestRunner from spiffworkflow_backend.services.process_model_test_runner_service import ProcessModelTestRunner
from spiffworkflow_backend.services.process_model_test_runner_service import UnsupporterRunnerDelegateGiven from spiffworkflow_backend.services.process_model_test_runner_service import UnsupporterRunnerDelegateGivenError
from tests.spiffworkflow_backend.helpers.base_test import BaseTest from tests.spiffworkflow_backend.helpers.base_test import BaseTest
@ -34,7 +34,7 @@ class TestProcessModelTestRunner(BaseTest):
app: Flask, app: Flask,
with_db_and_bpmn_file_cleanup: None, with_db_and_bpmn_file_cleanup: None,
) -> None: ) -> None:
with pytest.raises(UnsupporterRunnerDelegateGiven): with pytest.raises(UnsupporterRunnerDelegateGivenError):
ProcessModelTestRunner( ProcessModelTestRunner(
os.path.join(self.root_path(), "DNE"), process_model_test_runner_delegate_class=NoTestCasesFoundError os.path.join(self.root_path(), "DNE"), process_model_test_runner_delegate_class=NoTestCasesFoundError
) )

File diff suppressed because it is too large Load Diff