Squashed 'flask-bpmn/' content from commit f3fc5394
git-subtree-dir: flask-bpmn git-subtree-split: f3fc539423a3522d142146d2a039c0cd49badaf5
This commit is contained in:
commit
79e2bb98b7
|
@ -0,0 +1,12 @@
|
|||
{
|
||||
"_template": "gh:cjolowicz/cookiecutter-hypermodern-python",
|
||||
"author": "Sartography",
|
||||
"development_status": "Development Status :: 1 - Planning",
|
||||
"email": "sartography@users.noreply.github.com",
|
||||
"friendly_name": "Flask Bpmn",
|
||||
"github_user": "sartography",
|
||||
"license": "MIT",
|
||||
"package_name": "flask_bpmn",
|
||||
"project_name": "flask-bpmn",
|
||||
"version": "0.0.1"
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
[flake8]
|
||||
select = B,B9,C,D,DAR,E,F,N,RST,S,W
|
||||
ignore = E203,E501,RST201,RST203,RST301,W503
|
||||
max-line-length = 120
|
||||
max-complexity = 30
|
||||
docstring-convention = google
|
||||
rst-roles = class,const,func,meth,mod,ref
|
||||
rst-directives = deprecated
|
||||
|
||||
per-file-ignores =
|
||||
# prefer naming tests descriptively rather than forcing comments
|
||||
tests/*:S101,D103
|
|
@ -0,0 +1 @@
|
|||
* text=auto eol=lf
|
|
@ -0,0 +1,18 @@
|
|||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: github-actions
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
- package-ecosystem: pip
|
||||
directory: "/.github/workflows"
|
||||
schedule:
|
||||
interval: daily
|
||||
- package-ecosystem: pip
|
||||
directory: "/docs"
|
||||
schedule:
|
||||
interval: daily
|
||||
- package-ecosystem: pip
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
|
@ -0,0 +1,66 @@
|
|||
---
|
||||
# Labels names are important as they are used by Release Drafter to decide
|
||||
# regarding where to record them in changelog or if to skip them.
|
||||
#
|
||||
# The repository labels will be automatically configured using this file and
|
||||
# the GitHub Action https://github.com/marketplace/actions/github-labeler.
|
||||
- name: breaking
|
||||
description: Breaking Changes
|
||||
color: bfd4f2
|
||||
- name: bug
|
||||
description: Something isn't working
|
||||
color: d73a4a
|
||||
- name: build
|
||||
description: Build System and Dependencies
|
||||
color: bfdadc
|
||||
- name: ci
|
||||
description: Continuous Integration
|
||||
color: 4a97d6
|
||||
- name: dependencies
|
||||
description: Pull requests that update a dependency file
|
||||
color: 0366d6
|
||||
- name: documentation
|
||||
description: Improvements or additions to documentation
|
||||
color: 0075ca
|
||||
- name: duplicate
|
||||
description: This issue or pull request already exists
|
||||
color: cfd3d7
|
||||
- name: enhancement
|
||||
description: New feature or request
|
||||
color: a2eeef
|
||||
- name: github_actions
|
||||
description: Pull requests that update Github_actions code
|
||||
color: "000000"
|
||||
- name: good first issue
|
||||
description: Good for newcomers
|
||||
color: 7057ff
|
||||
- name: help wanted
|
||||
description: Extra attention is needed
|
||||
color: 008672
|
||||
- name: invalid
|
||||
description: This doesn't seem right
|
||||
color: e4e669
|
||||
- name: performance
|
||||
description: Performance
|
||||
color: "016175"
|
||||
- name: python
|
||||
description: Pull requests that update Python code
|
||||
color: 2b67c6
|
||||
- name: question
|
||||
description: Further information is requested
|
||||
color: d876e3
|
||||
- name: refactoring
|
||||
description: Refactoring
|
||||
color: ef67c4
|
||||
- name: removal
|
||||
description: Removals and Deprecations
|
||||
color: 9ae7ea
|
||||
- name: style
|
||||
description: Style
|
||||
color: c120e5
|
||||
- name: testing
|
||||
description: Testing
|
||||
color: b1fc6f
|
||||
- name: wontfix
|
||||
description: This will not be worked on
|
||||
color: ffffff
|
|
@ -0,0 +1,29 @@
|
|||
categories:
|
||||
- title: ":boom: Breaking Changes"
|
||||
label: "breaking"
|
||||
- title: ":rocket: Features"
|
||||
label: "enhancement"
|
||||
- title: ":fire: Removals and Deprecations"
|
||||
label: "removal"
|
||||
- title: ":beetle: Fixes"
|
||||
label: "bug"
|
||||
- title: ":racehorse: Performance"
|
||||
label: "performance"
|
||||
- title: ":rotating_light: Testing"
|
||||
label: "testing"
|
||||
- title: ":construction_worker: Continuous Integration"
|
||||
label: "ci"
|
||||
- title: ":books: Documentation"
|
||||
label: "documentation"
|
||||
- title: ":hammer: Refactoring"
|
||||
label: "refactoring"
|
||||
- title: ":lipstick: Style"
|
||||
label: "style"
|
||||
- title: ":package: Dependencies"
|
||||
labels:
|
||||
- "dependencies"
|
||||
- "build"
|
||||
template: |
|
||||
## Changes
|
||||
|
||||
$CHANGES
|
|
@ -0,0 +1,72 @@
|
|||
name: Dependabot auto-merge
|
||||
on:
|
||||
workflow_run:
|
||||
workflows: ["Tests"]
|
||||
# completed does not mean success of Tests workflow. see below checking github.event.workflow_run.conclusion
|
||||
types:
|
||||
- completed
|
||||
|
||||
# workflow_call is used to indicate that a workflow can be called by another workflow. When a workflow is triggered with the workflow_call event, the event payload in the called workflow is the same event payload from the calling workflow. For more information see, "Reusing workflows."
|
||||
|
||||
# https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request
|
||||
# maybe hook into this instead of workflow_run:
|
||||
# on:
|
||||
# pull_request:
|
||||
# pull_request_target:
|
||||
# types: [labeled]
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
|
||||
jobs:
|
||||
# print the context for debugging in case a job gets skipped
|
||||
printJob:
|
||||
name: Print event
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Dump GitHub context
|
||||
env:
|
||||
GITHUB_CONTEXT: ${{ toJson(github) }}
|
||||
run: |
|
||||
echo "$GITHUB_CONTEXT"
|
||||
|
||||
dependabot:
|
||||
runs-on: ubuntu-latest
|
||||
if: ${{ github.actor == 'dependabot[bot]' && github.event.workflow_run.event == 'pull_request' && github.event.workflow_run.conclusion == 'success' }}
|
||||
steps:
|
||||
- name: Development Code
|
||||
uses: actions/checkout@v3
|
||||
|
||||
###### GET PR NUMBER
|
||||
# we saved the pr_number in tests.yml. fetch it so we can merge the correct PR.
|
||||
# https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#workflow_run
|
||||
- name: "Download artifact"
|
||||
uses: actions/github-script@v6
|
||||
with:
|
||||
script: |
|
||||
let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
run_id: context.payload.workflow_run.id,
|
||||
});
|
||||
let matchArtifact = allArtifacts.data.artifacts.filter((artifact) => {
|
||||
return artifact.name == "pr_number"
|
||||
})[0];
|
||||
let download = await github.rest.actions.downloadArtifact({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
artifact_id: matchArtifact.id,
|
||||
archive_format: 'zip',
|
||||
});
|
||||
let fs = require('fs');
|
||||
fs.writeFileSync(`${process.env.GITHUB_WORKSPACE}/pr_number.zip`, Buffer.from(download.data));
|
||||
- name: "Unzip artifact"
|
||||
run: unzip pr_number.zip
|
||||
###########
|
||||
|
||||
- name: print pr number
|
||||
run: cat pr_number
|
||||
- name: actually merge it
|
||||
run: gh pr merge --auto --merge "$(cat pr_number)"
|
||||
env:
|
||||
GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}}
|
|
@ -0,0 +1,5 @@
|
|||
pip==22.2.2
|
||||
nox==2022.8.7
|
||||
nox-poetry==1.0.1
|
||||
poetry==1.1.14
|
||||
virtualenv==20.16.3
|
|
@ -0,0 +1,19 @@
|
|||
name: Labeler
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- master
|
||||
|
||||
jobs:
|
||||
labeler:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
uses: actions/checkout@v3.0.2
|
||||
|
||||
- name: Run Labeler
|
||||
uses: crazy-max/ghaction-github-labeler@v3.1.1
|
||||
with:
|
||||
skip-delete: true
|
|
@ -0,0 +1,189 @@
|
|||
name: Tests
|
||||
|
||||
on:
|
||||
- push
|
||||
- pull_request
|
||||
|
||||
jobs:
|
||||
tests:
|
||||
name: ${{ matrix.session }} ${{ matrix.python }} / ${{ matrix.os }}
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- { python: "3.10", os: "ubuntu-latest", session: "pre-commit" }
|
||||
- { python: "3.10", os: "ubuntu-latest", session: "safety" }
|
||||
- { python: "3.10", os: "ubuntu-latest", session: "mypy" }
|
||||
- { python: "3.9", os: "ubuntu-latest", session: "mypy" }
|
||||
- { python: "3.8", os: "ubuntu-latest", session: "mypy" }
|
||||
- { python: "3.7", os: "ubuntu-latest", session: "mypy" }
|
||||
- { python: "3.10", os: "ubuntu-latest", session: "tests" }
|
||||
- { python: "3.9", os: "ubuntu-latest", session: "tests" }
|
||||
- { python: "3.8", os: "ubuntu-latest", session: "tests" }
|
||||
- { python: "3.7", os: "ubuntu-latest", session: "tests" }
|
||||
- { python: "3.10", os: "windows-latest", session: "tests" }
|
||||
- { python: "3.10", os: "macos-latest", session: "tests" }
|
||||
- { python: "3.10", os: "ubuntu-latest", session: "typeguard" }
|
||||
- { python: "3.10", os: "ubuntu-latest", session: "xdoctest" }
|
||||
- { python: "3.10", os: "ubuntu-latest", session: "docs-build" }
|
||||
|
||||
env:
|
||||
NOXSESSION: ${{ matrix.session }}
|
||||
FORCE_COLOR: "1"
|
||||
PRE_COMMIT_COLOR: "always"
|
||||
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
uses: actions/checkout@v3.0.2
|
||||
|
||||
- name: Set up Python ${{ matrix.python }}
|
||||
uses: actions/setup-python@v4.2.0
|
||||
with:
|
||||
python-version: ${{ matrix.python }}
|
||||
|
||||
- name: Upgrade pip
|
||||
run: |
|
||||
pip install --constraint=.github/workflows/constraints.txt pip
|
||||
pip --version
|
||||
|
||||
- name: Upgrade pip in virtual environments
|
||||
shell: python
|
||||
run: |
|
||||
import os
|
||||
import pip
|
||||
|
||||
with open(os.environ["GITHUB_ENV"], mode="a") as io:
|
||||
print(f"VIRTUALENV_PIP={pip.__version__}", file=io)
|
||||
|
||||
- name: Install Poetry
|
||||
run: |
|
||||
pipx install --pip-args=--constraint=.github/workflows/constraints.txt poetry
|
||||
poetry --version
|
||||
|
||||
- name: Install Nox
|
||||
run: |
|
||||
pipx install --pip-args=--constraint=.github/workflows/constraints.txt nox
|
||||
pipx inject --pip-args=--constraint=.github/workflows/constraints.txt nox nox-poetry
|
||||
nox --version
|
||||
|
||||
- name: Compute pre-commit cache key
|
||||
if: matrix.session == 'pre-commit'
|
||||
id: pre-commit-cache
|
||||
shell: python
|
||||
run: |
|
||||
import hashlib
|
||||
import sys
|
||||
|
||||
python = "py{}.{}".format(*sys.version_info[:2])
|
||||
payload = sys.version.encode() + sys.executable.encode()
|
||||
digest = hashlib.sha256(payload).hexdigest()
|
||||
result = "${{ runner.os }}-{}-{}-pre-commit".format(python, digest[:8])
|
||||
|
||||
print("::set-output name=result::{}".format(result))
|
||||
|
||||
- name: Restore pre-commit cache
|
||||
uses: actions/cache@v3.0.6
|
||||
if: matrix.session == 'pre-commit'
|
||||
with:
|
||||
path: ~/.cache/pre-commit
|
||||
key: ${{ steps.pre-commit-cache.outputs.result }}-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
restore-keys: |
|
||||
${{ steps.pre-commit-cache.outputs.result }}-
|
||||
|
||||
- name: Run Nox
|
||||
run: |
|
||||
nox --force-color --python=${{ matrix.python }}
|
||||
|
||||
- name: Upload coverage data
|
||||
# pin to upload coverage from only one matrix entry, otherwise coverage gets confused later
|
||||
if: always() && matrix.session == 'tests' && matrix.python == '3.10' && matrix.os == 'ubuntu-latest'
|
||||
uses: "actions/upload-artifact@v3.0.0"
|
||||
with:
|
||||
name: coverage-data
|
||||
path: ".coverage.*"
|
||||
|
||||
- name: Upload documentation
|
||||
if: matrix.session == 'docs-build'
|
||||
uses: actions/upload-artifact@v3.0.0
|
||||
with:
|
||||
name: docs
|
||||
path: docs/_build
|
||||
|
||||
coverage:
|
||||
runs-on: ubuntu-latest
|
||||
needs: tests
|
||||
steps:
|
||||
- name: Check out the repository
|
||||
uses: actions/checkout@v3.0.2
|
||||
with:
|
||||
# Disabling shallow clone is recommended for improving relevancy of reporting in sonarcloud
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4.2.0
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
- name: Upgrade pip
|
||||
run: |
|
||||
pip install --constraint=.github/workflows/constraints.txt pip
|
||||
pip --version
|
||||
|
||||
- name: Install Poetry
|
||||
run: |
|
||||
pipx install --pip-args=--constraint=.github/workflows/constraints.txt poetry
|
||||
poetry --version
|
||||
|
||||
- name: Install Nox
|
||||
run: |
|
||||
pipx install --pip-args=--constraint=.github/workflows/constraints.txt nox
|
||||
pipx inject --pip-args=--constraint=.github/workflows/constraints.txt nox nox-poetry
|
||||
nox --version
|
||||
|
||||
- name: Download coverage data
|
||||
uses: actions/download-artifact@v3.0.0
|
||||
with:
|
||||
name: coverage-data
|
||||
|
||||
- name: Combine coverage data and display human readable report
|
||||
run: |
|
||||
find . -name \*.pyc -delete
|
||||
nox --force-color --session=coverage
|
||||
|
||||
- name: Create coverage report
|
||||
run: |
|
||||
nox --force-color --session=coverage -- xml
|
||||
|
||||
- name: Upload coverage report
|
||||
uses: codecov/codecov-action@v3.1.0
|
||||
with:
|
||||
# server is flaky. see https://github.com/codecov/codecov-action/issues/598
|
||||
fail_ci_if_error: false
|
||||
|
||||
- name: SonarCloud Scan
|
||||
# thought about just skipping dependabot
|
||||
# if: ${{ github.actor != 'dependabot[bot]' }}
|
||||
# but figured all pull requests seems better, since none of them will have access to sonarcloud.
|
||||
# however, with just skipping pull requests, the build associated with "Triggered via push" is also associated with the pull request and also fails hitting sonarcloud
|
||||
# if: ${{ github.event_name != 'pull_request' }}
|
||||
# so just skip everything but main
|
||||
if: github.ref_name == 'main'
|
||||
uses: sonarsource/sonarcloud-github-action@master
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
|
||||
# part about saving PR number and then using it from auto-merge-dependabot-prs from:
|
||||
# https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#workflow_run
|
||||
- name: Save PR number
|
||||
if: ${{ github.event_name == 'pull_request' }}
|
||||
env:
|
||||
PR_NUMBER: ${{ github.event.number }}
|
||||
run: |
|
||||
mkdir -p ./pr
|
||||
echo "$PR_NUMBER" > ./pr/pr_number
|
||||
- uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: pr_number
|
||||
path: pr/
|
|
@ -0,0 +1,12 @@
|
|||
.mypy_cache/
|
||||
/.coverage
|
||||
/.coverage.*
|
||||
/.nox/
|
||||
/.python-version
|
||||
/.pytype/
|
||||
/dist/
|
||||
/docs/_build/
|
||||
/src/*.egg-info/
|
||||
__pycache__/
|
||||
*.sqlite3
|
||||
/pyrightconfig.json
|
|
@ -0,0 +1,58 @@
|
|||
repos:
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: black
|
||||
name: black
|
||||
entry: black
|
||||
language: system
|
||||
types: [python]
|
||||
require_serial: true
|
||||
- id: check-added-large-files
|
||||
name: Check for added large files
|
||||
entry: check-added-large-files
|
||||
language: system
|
||||
- id: check-toml
|
||||
name: Check Toml
|
||||
entry: check-toml
|
||||
language: system
|
||||
types: [toml]
|
||||
- id: check-yaml
|
||||
name: Check Yaml
|
||||
entry: check-yaml
|
||||
language: system
|
||||
types: [yaml]
|
||||
- id: end-of-file-fixer
|
||||
name: Fix End of Files
|
||||
entry: end-of-file-fixer
|
||||
language: system
|
||||
types: [text]
|
||||
stages: [commit, push, manual]
|
||||
- id: flake8
|
||||
name: flake8
|
||||
entry: flake8
|
||||
language: system
|
||||
types: [python]
|
||||
require_serial: true
|
||||
- id: pyupgrade
|
||||
name: pyupgrade
|
||||
description: Automatically upgrade syntax for newer versions.
|
||||
entry: pyupgrade
|
||||
language: system
|
||||
types: [python]
|
||||
args: [--py37-plus]
|
||||
- id: reorder-python-imports
|
||||
name: Reorder python imports
|
||||
entry: reorder-python-imports
|
||||
language: system
|
||||
types: [python]
|
||||
args: [--application-directories=src]
|
||||
- id: trailing-whitespace
|
||||
name: Trim Trailing Whitespace
|
||||
entry: trailing-whitespace-fixer
|
||||
language: system
|
||||
types: [text]
|
||||
stages: [commit, push, manual]
|
||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||
rev: v2.4.1
|
||||
hooks:
|
||||
- id: prettier
|
|
@ -0,0 +1,12 @@
|
|||
version: 2
|
||||
build:
|
||||
os: ubuntu-20.04
|
||||
tools:
|
||||
python: "3.10"
|
||||
sphinx:
|
||||
configuration: docs/conf.py
|
||||
formats: all
|
||||
python:
|
||||
install:
|
||||
- requirements: docs/requirements.txt
|
||||
- path: .
|
|
@ -0,0 +1 @@
|
|||
python 3.10.4
|
|
@ -0,0 +1,105 @@
|
|||
Contributor Covenant Code of Conduct
|
||||
====================================
|
||||
|
||||
Our Pledge
|
||||
----------
|
||||
|
||||
We as members, contributors, and leaders pledge to make participation in our community a harassment-free experience for everyone, regardless of age, body size, visible or invisible disability, ethnicity, sex characteristics, gender identity and expression, level of experience, education, socio-economic status, nationality, personal appearance, race, religion, or sexual identity and orientation.
|
||||
|
||||
We pledge to act and interact in ways that contribute to an open, welcoming, diverse, inclusive, and healthy community.
|
||||
|
||||
|
||||
Our Standards
|
||||
-------------
|
||||
|
||||
Examples of behavior that contributes to a positive environment for our community include:
|
||||
|
||||
- Demonstrating empathy and kindness toward other people
|
||||
- Being respectful of differing opinions, viewpoints, and experiences
|
||||
- Giving and gracefully accepting constructive feedback
|
||||
- Accepting responsibility and apologizing to those affected by our mistakes, and learning from the experience
|
||||
- Focusing on what is best not just for us as individuals, but for the overall community
|
||||
|
||||
Examples of unacceptable behavior include:
|
||||
|
||||
- The use of sexualized language or imagery, and sexual attention or
|
||||
advances of any kind
|
||||
- Trolling, insulting or derogatory comments, and personal or political attacks
|
||||
- Public or private harassment
|
||||
- Publishing others' private information, such as a physical or email
|
||||
address, without their explicit permission
|
||||
- Other conduct which could reasonably be considered inappropriate in a
|
||||
professional setting
|
||||
|
||||
Enforcement Responsibilities
|
||||
----------------------------
|
||||
|
||||
Community leaders are responsible for clarifying and enforcing our standards of acceptable behavior and will take appropriate and fair corrective action in response to any behavior that they deem inappropriate, threatening, offensive, or harmful.
|
||||
|
||||
Community leaders have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, and will communicate reasons for moderation decisions when appropriate.
|
||||
|
||||
|
||||
Scope
|
||||
-----
|
||||
|
||||
This Code of Conduct applies within all community spaces, and also applies when an individual is officially representing the community in public spaces. Examples of representing our community include using an official e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event.
|
||||
|
||||
|
||||
Enforcement
|
||||
-----------
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported to the community leaders responsible for enforcement at sartography@users.noreply.github.com. All complaints will be reviewed and investigated promptly and fairly.
|
||||
|
||||
All community leaders are obligated to respect the privacy and security of the reporter of any incident.
|
||||
|
||||
|
||||
Enforcement Guidelines
|
||||
----------------------
|
||||
|
||||
Community leaders will follow these Community Impact Guidelines in determining the consequences for any action they deem in violation of this Code of Conduct:
|
||||
|
||||
|
||||
1. Correction
|
||||
~~~~~~~~~~~~~
|
||||
|
||||
**Community Impact**: Use of inappropriate language or other behavior deemed unprofessional or unwelcome in the community.
|
||||
|
||||
**Consequence**: A private, written warning from community leaders, providing clarity around the nature of the violation and an explanation of why the behavior was inappropriate. A public apology may be requested.
|
||||
|
||||
|
||||
2. Warning
|
||||
~~~~~~~~~~
|
||||
|
||||
**Community Impact**: A violation through a single incident or series of actions.
|
||||
|
||||
**Consequence**: A warning with consequences for continued behavior. No interaction with the people involved, including unsolicited interaction with those enforcing the Code of Conduct, for a specified period of time. This includes avoiding interactions in community spaces as well as external channels like social media. Violating these terms may lead to a temporary or permanent ban.
|
||||
|
||||
|
||||
3. Temporary Ban
|
||||
~~~~~~~~~~~~~~~~
|
||||
|
||||
**Community Impact**: A serious violation of community standards, including sustained inappropriate behavior.
|
||||
|
||||
**Consequence**: A temporary ban from any sort of interaction or public communication with the community for a specified period of time. No public or private interaction with the people involved, including unsolicited interaction with those enforcing the Code of Conduct, is allowed during this period. Violating these terms may lead to a permanent ban.
|
||||
|
||||
|
||||
4. Permanent Ban
|
||||
~~~~~~~~~~~~~~~~
|
||||
|
||||
**Community Impact**: Demonstrating a pattern of violation of community standards, including sustained inappropriate behavior, harassment of an individual, or aggression toward or disparagement of classes of individuals.
|
||||
|
||||
**Consequence**: A permanent ban from any sort of public interaction within the community.
|
||||
|
||||
|
||||
Attribution
|
||||
-----------
|
||||
|
||||
This Code of Conduct is adapted from the `Contributor Covenant <homepage_>`__, version 2.0,
|
||||
available at https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
|
||||
|
||||
Community Impact Guidelines were inspired by `Mozilla’s code of conduct enforcement ladder <https://github.com/mozilla/diversity>`__.
|
||||
|
||||
.. _homepage: https://www.contributor-covenant.org
|
||||
|
||||
For answers to common questions about this code of conduct, see the FAQ at
|
||||
https://www.contributor-covenant.org/faq. Translations are available at https://www.contributor-covenant.org/translations.
|
|
@ -0,0 +1,123 @@
|
|||
Contributor Guide
|
||||
=================
|
||||
|
||||
Thank you for your interest in improving this project.
|
||||
This project is open-source under the `MIT license`_ and
|
||||
welcomes contributions in the form of bug reports, feature requests, and pull requests.
|
||||
|
||||
Here is a list of important resources for contributors:
|
||||
|
||||
- `Source Code`_
|
||||
- `Documentation`_
|
||||
- `Issue Tracker`_
|
||||
- `Code of Conduct`_
|
||||
|
||||
.. _MIT license: https://opensource.org/licenses/MIT
|
||||
.. _Source Code: https://github.com/sartography/flask-bpmn
|
||||
.. _Documentation: https://flask-bpmn.readthedocs.io/
|
||||
.. _Issue Tracker: https://github.com/sartography/flask-bpmn/issues
|
||||
|
||||
How to report a bug
|
||||
-------------------
|
||||
|
||||
Report bugs on the `Issue Tracker`_.
|
||||
|
||||
When filing an issue, make sure to answer these questions:
|
||||
|
||||
- Which operating system and Python version are you using?
|
||||
- Which version of this project are you using?
|
||||
- What did you do?
|
||||
- What did you expect to see?
|
||||
- What did you see instead?
|
||||
|
||||
The best way to get your bug fixed is to provide a test case,
|
||||
and/or steps to reproduce the issue.
|
||||
|
||||
|
||||
How to request a feature
|
||||
------------------------
|
||||
|
||||
Request features on the `Issue Tracker`_.
|
||||
|
||||
|
||||
How to set up your development environment
|
||||
------------------------------------------
|
||||
|
||||
You need Python 3.7+ and the following tools:
|
||||
|
||||
- Poetry_
|
||||
- Nox_
|
||||
- nox-poetry_
|
||||
|
||||
Install the package with development requirements:
|
||||
|
||||
.. code:: console
|
||||
|
||||
$ poetry install
|
||||
|
||||
You can now run an interactive Python session,
|
||||
or the command-line interface:
|
||||
|
||||
.. code:: console
|
||||
|
||||
$ poetry run python
|
||||
$ poetry run flask-bpmn
|
||||
|
||||
.. _Poetry: https://python-poetry.org/
|
||||
.. _Nox: https://nox.thea.codes/
|
||||
.. _nox-poetry: https://nox-poetry.readthedocs.io/
|
||||
|
||||
|
||||
How to test the project
|
||||
-----------------------
|
||||
|
||||
Run the full test suite:
|
||||
|
||||
.. code:: console
|
||||
|
||||
$ nox
|
||||
|
||||
List the available Nox sessions:
|
||||
|
||||
.. code:: console
|
||||
|
||||
$ nox --list-sessions
|
||||
|
||||
You can also run a specific Nox session.
|
||||
For example, invoke the unit test suite like this:
|
||||
|
||||
.. code:: console
|
||||
|
||||
$ nox --session=tests
|
||||
|
||||
Unit tests are located in the ``tests`` directory,
|
||||
and are written using the pytest_ testing framework.
|
||||
|
||||
.. _pytest: https://pytest.readthedocs.io/
|
||||
|
||||
|
||||
How to submit changes
|
||||
---------------------
|
||||
|
||||
Open a `pull request`_ to submit changes to this project.
|
||||
|
||||
Your pull request needs to meet the following guidelines for acceptance:
|
||||
|
||||
- The Nox test suite must pass without errors and warnings.
|
||||
- Include unit tests. This project maintains 100% code coverage.
|
||||
- If your changes add functionality, update the documentation accordingly.
|
||||
|
||||
Feel free to submit early, though—we can always iterate on this.
|
||||
|
||||
To run linting and code formatting checks before committing your change, you can install pre-commit as a Git hook by running the following command:
|
||||
|
||||
.. code:: console
|
||||
|
||||
$ nox --session=pre-commit -- install
|
||||
|
||||
It is recommended to open an issue before starting work on anything.
|
||||
This will allow a chance to talk it over with the owners and validate your approach.
|
||||
|
||||
.. _pull request: https://github.com/sartography/flask-bpmn/pulls
|
||||
.. github-only
|
||||
.. _Code of Conduct: CODE_OF_CONDUCT.rst
|
|
@ -0,0 +1,22 @@
|
|||
MIT License
|
||||
===========
|
||||
|
||||
Copyright © 2022 Sartography
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
**The software is provided "as is", without warranty of any kind, express or
|
||||
implied, including but not limited to the warranties of merchantability,
|
||||
fitness for a particular purpose and noninfringement. In no event shall the
|
||||
authors or copyright holders be liable for any claim, damages or other
|
||||
liability, whether in an action of contract, tort or otherwise, arising from,
|
||||
out of or in connection with the software or the use or other dealings in the
|
||||
software.**
|
|
@ -0,0 +1,102 @@
|
|||
Flask Bpmn
|
||||
==========
|
||||
|
||||
|PyPI| |Status| |Python Version| |License|
|
||||
|
||||
|Read the Docs| |Tests| |Codecov|
|
||||
|
||||
|pre-commit| |Black|
|
||||
|
||||
.. |PyPI| image:: https://img.shields.io/pypi/v/flask-bpmn.svg
|
||||
:target: https://pypi.org/project/flask-bpmn/
|
||||
:alt: PyPI
|
||||
.. |Status| image:: https://img.shields.io/pypi/status/flask-bpmn.svg
|
||||
:target: https://pypi.org/project/flask-bpmn/
|
||||
:alt: Status
|
||||
.. |Python Version| image:: https://img.shields.io/pypi/pyversions/flask-bpmn
|
||||
:target: https://pypi.org/project/flask-bpmn
|
||||
:alt: Python Version
|
||||
.. |License| image:: https://img.shields.io/pypi/l/flask-bpmn
|
||||
:target: https://opensource.org/licenses/MIT
|
||||
:alt: License
|
||||
.. |Read the Docs| image:: https://img.shields.io/readthedocs/flask-bpmn/latest.svg?label=Read%20the%20Docs
|
||||
:target: https://flask-bpmn.readthedocs.io/
|
||||
:alt: Read the documentation at https://flask-bpmn.readthedocs.io/
|
||||
.. |Tests| image:: https://github.com/sartography/flask-bpmn/workflows/Tests/badge.svg
|
||||
:target: https://github.com/sartography/flask-bpmn/actions?workflow=Tests
|
||||
:alt: Tests
|
||||
.. |Codecov| image:: https://codecov.io/gh/sartography/flask-bpmn/branch/main/graph/badge.svg
|
||||
:target: https://codecov.io/gh/sartography/flask-bpmn
|
||||
:alt: Codecov
|
||||
.. |pre-commit| image:: https://img.shields.io/badge/pre--commit-enabled-brightgreen?logo=pre-commit&logoColor=white
|
||||
:target: https://github.com/pre-commit/pre-commit
|
||||
:alt: pre-commit
|
||||
.. |Black| image:: https://img.shields.io/badge/code%20style-black-000000.svg
|
||||
:target: https://github.com/psf/black
|
||||
:alt: Black
|
||||
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
* Provides bmpn engine functionality for inclusion in a flask application.
|
||||
|
||||
|
||||
Requirements
|
||||
------------
|
||||
|
||||
* Python 3.7+
|
||||
|
||||
|
||||
Installation
|
||||
------------
|
||||
|
||||
You can install *Flask Bpmn* via pip_ from PyPI_:
|
||||
|
||||
.. code:: console
|
||||
|
||||
$ pip install flask-bpmn
|
||||
|
||||
|
||||
Usage
|
||||
-----
|
||||
|
||||
Please see the `Command-line Reference <Usage_>`_ for details.
|
||||
|
||||
|
||||
Contributing
|
||||
------------
|
||||
|
||||
Contributions are very welcome.
|
||||
To learn more, see the `Contributor Guide`_.
|
||||
|
||||
|
||||
License
|
||||
-------
|
||||
|
||||
Distributed under the terms of the `MIT license`_,
|
||||
*Flask Bpmn* is free and open source software.
|
||||
|
||||
|
||||
Issues
|
||||
------
|
||||
|
||||
If you encounter any problems,
|
||||
please `file an issue`_ along with a detailed description.
|
||||
|
||||
|
||||
Credits
|
||||
-------
|
||||
|
||||
This project was generated from `@cjolowicz`_'s `Hypermodern Python Cookiecutter`_ template.
|
||||
|
||||
.. _@cjolowicz: https://github.com/cjolowicz
|
||||
.. _Cookiecutter: https://github.com/audreyr/cookiecutter
|
||||
.. _MIT license: https://opensource.org/licenses/MIT
|
||||
.. _PyPI: https://pypi.org/
|
||||
.. _Hypermodern Python Cookiecutter: https://github.com/cjolowicz/cookiecutter-hypermodern-python
|
||||
.. _file an issue: https://github.com/sartography/flask-bpmn/issues
|
||||
.. _pip: https://pip.pypa.io/
|
||||
.. github-only
|
||||
.. _Contributor Guide: CONTRIBUTING.rst
|
||||
.. _Usage: https://flask-bpmn.readthedocs.io/en/latest/usage.html
|
|
@ -0,0 +1,9 @@
|
|||
comment: false
|
||||
coverage:
|
||||
status:
|
||||
project:
|
||||
default:
|
||||
target: "100"
|
||||
patch:
|
||||
default:
|
||||
target: "100"
|
|
@ -0,0 +1 @@
|
|||
.. include:: ../CODE_OF_CONDUCT.rst
|
|
@ -0,0 +1,17 @@
|
|||
"""Sphinx configuration."""
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
project = "Flask Bpmn"
|
||||
author = "Sartography"
|
||||
copyright = f"{datetime.now().year}, {author}"
|
||||
extensions = [
|
||||
"sphinx.ext.napoleon",
|
||||
"autoapi.extension",
|
||||
"sphinx_click",
|
||||
]
|
||||
|
||||
# https://github.com/readthedocs/sphinx-autoapi
|
||||
autoapi_type = "python"
|
||||
autoapi_dirs = ["../src"]
|
||||
html_theme = "furo"
|
|
@ -0,0 +1,4 @@
|
|||
.. include:: ../CONTRIBUTING.rst
|
||||
:end-before: github-only
|
||||
|
||||
.. _Code of Conduct: codeofconduct.html
|
|
@ -0,0 +1,16 @@
|
|||
.. include:: ../README.rst
|
||||
:end-before: github-only
|
||||
|
||||
.. _Contributor Guide: contributing.html
|
||||
.. _Usage: usage.html
|
||||
|
||||
.. toctree::
|
||||
:hidden:
|
||||
:maxdepth: 1
|
||||
|
||||
usage
|
||||
reference
|
||||
contributing
|
||||
Code of Conduct <codeofconduct>
|
||||
License <license>
|
||||
Changelog <https://github.com/sartography/flask-bpmn/releases>
|
|
@ -0,0 +1 @@
|
|||
.. include:: ../LICENSE.rst
|
|
@ -0,0 +1,9 @@
|
|||
Reference
|
||||
=========
|
||||
|
||||
|
||||
flask_bpmn
|
||||
----------
|
||||
|
||||
.. automodule:: flask_bpmn
|
||||
:members:
|
|
@ -0,0 +1,3 @@
|
|||
furo==2022.6.21
|
||||
sphinx==5.1.1
|
||||
sphinx-click==4.3.0
|
|
@ -0,0 +1,6 @@
|
|||
Usage
|
||||
=====
|
||||
|
||||
.. click:: flask_bpmn.__main__:main
|
||||
:prog: flask-bpmn
|
||||
:nested: full
|
|
@ -0,0 +1,205 @@
|
|||
"""Nox sessions."""
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from textwrap import dedent
|
||||
|
||||
import nox
|
||||
|
||||
try:
|
||||
from nox_poetry import Session
|
||||
from nox_poetry import session
|
||||
except ImportError:
|
||||
message = f"""\
|
||||
Nox failed to import the 'nox-poetry' package.
|
||||
|
||||
Please install it using the following command:
|
||||
|
||||
{sys.executable} -m pip install nox-poetry"""
|
||||
raise SystemExit(dedent(message)) from None
|
||||
|
||||
|
||||
package = "flask_bpmn"
|
||||
python_versions = ["3.10", "3.9", "3.8", "3.7"]
|
||||
nox.needs_version = ">= 2021.6.6"
|
||||
nox.options.sessions = (
|
||||
"pre-commit",
|
||||
"safety",
|
||||
"mypy",
|
||||
"tests",
|
||||
"typeguard",
|
||||
"xdoctest",
|
||||
"docs-build",
|
||||
)
|
||||
|
||||
|
||||
def activate_virtualenv_in_precommit_hooks(session: Session) -> None:
|
||||
"""Activate virtualenv in hooks installed by pre-commit.
|
||||
|
||||
This function patches git hooks installed by pre-commit to activate the
|
||||
session's virtual environment. This allows pre-commit to locate hooks in
|
||||
that environment when invoked from git.
|
||||
|
||||
Args:
|
||||
session: The Session object.
|
||||
"""
|
||||
assert session.bin is not None # noqa: S101
|
||||
|
||||
virtualenv = session.env.get("VIRTUAL_ENV")
|
||||
if virtualenv is None:
|
||||
return
|
||||
|
||||
hookdir = Path(".git") / "hooks"
|
||||
if not hookdir.is_dir():
|
||||
return
|
||||
|
||||
for hook in hookdir.iterdir():
|
||||
if hook.name.endswith(".sample") or not hook.is_file():
|
||||
continue
|
||||
|
||||
text = hook.read_text()
|
||||
bindir = repr(session.bin)[1:-1] # strip quotes
|
||||
if not (
|
||||
Path("A") == Path("a") and bindir.lower() in text.lower() or bindir in text
|
||||
):
|
||||
continue
|
||||
|
||||
lines = text.splitlines()
|
||||
if not (lines[0].startswith("#!") and "python" in lines[0].lower()):
|
||||
continue
|
||||
|
||||
header = dedent(
|
||||
f"""\
|
||||
import os
|
||||
os.environ["VIRTUAL_ENV"] = {virtualenv!r}
|
||||
os.environ["PATH"] = os.pathsep.join((
|
||||
{session.bin!r},
|
||||
os.environ.get("PATH", ""),
|
||||
))
|
||||
"""
|
||||
)
|
||||
|
||||
lines.insert(1, header)
|
||||
hook.write_text("\n".join(lines))
|
||||
|
||||
|
||||
@session(name="pre-commit", python="3.10")
|
||||
def precommit(session: Session) -> None:
|
||||
"""Lint using pre-commit."""
|
||||
args = session.posargs or ["run", "--all-files", "--show-diff-on-failure"]
|
||||
session.install(
|
||||
"black",
|
||||
"darglint",
|
||||
"flake8",
|
||||
"flake8-bandit",
|
||||
"flake8-bugbear",
|
||||
"flake8-docstrings",
|
||||
"flake8-rst-docstrings",
|
||||
"pep8-naming",
|
||||
"pre-commit",
|
||||
"pre-commit-hooks",
|
||||
"pyupgrade",
|
||||
"reorder-python-imports",
|
||||
)
|
||||
session.run("pre-commit", *args)
|
||||
if args and args[0] == "install":
|
||||
activate_virtualenv_in_precommit_hooks(session)
|
||||
|
||||
|
||||
@session(python="3.10")
|
||||
def safety(session: Session) -> None:
|
||||
"""Scan dependencies for insecure packages."""
|
||||
requirements = session.poetry.export_requirements()
|
||||
session.install("safety")
|
||||
session.run("safety", "check", "--full-report", f"--file={requirements}")
|
||||
|
||||
|
||||
@session(python=python_versions)
|
||||
def mypy(session: Session) -> None:
|
||||
"""Type-check using mypy."""
|
||||
args = session.posargs or ["src", "tests", "docs/conf.py"]
|
||||
session.install(".")
|
||||
session.install("mypy", "pytest")
|
||||
session.run("mypy", *args)
|
||||
if not session.posargs:
|
||||
session.run("mypy", f"--python-executable={sys.executable}", "noxfile.py")
|
||||
|
||||
|
||||
@session(python=python_versions)
|
||||
def tests(session: Session) -> None:
|
||||
"""Run the test suite."""
|
||||
session.install(".")
|
||||
session.install("coverage[toml]", "pytest", "pygments")
|
||||
try:
|
||||
session.run("coverage", "run", "--parallel", "-m", "pytest", *session.posargs)
|
||||
finally:
|
||||
if session.interactive:
|
||||
session.notify("coverage", posargs=[])
|
||||
|
||||
|
||||
@session
|
||||
def coverage(session: Session) -> None:
|
||||
"""Produce the coverage report."""
|
||||
args = session.posargs or ["report"]
|
||||
|
||||
session.install("coverage[toml]")
|
||||
|
||||
if not session.posargs and any(Path().glob(".coverage.*")):
|
||||
session.run("coverage", "combine")
|
||||
|
||||
session.run("coverage", *args)
|
||||
|
||||
|
||||
@session(python=python_versions)
|
||||
def typeguard(session: Session) -> None:
|
||||
"""Runtime type checking using Typeguard."""
|
||||
session.install(".")
|
||||
session.install("pytest", "typeguard", "pygments")
|
||||
session.run("pytest", f"--typeguard-packages={package}", *session.posargs)
|
||||
|
||||
|
||||
@session(python=python_versions)
|
||||
def xdoctest(session: Session) -> None:
|
||||
"""Run examples with xdoctest."""
|
||||
if session.posargs:
|
||||
args = [package, *session.posargs]
|
||||
else:
|
||||
args = [f"--modname={package}", "--command=all"]
|
||||
if "FORCE_COLOR" in os.environ:
|
||||
args.append("--colored=1")
|
||||
|
||||
session.install(".")
|
||||
session.install("xdoctest[colors]")
|
||||
session.run("python", "-m", "xdoctest", *args)
|
||||
|
||||
|
||||
@session(name="docs-build", python="3.10")
|
||||
def docs_build(session: Session) -> None:
|
||||
"""Build the documentation."""
|
||||
args = session.posargs or ["docs", "docs/_build"]
|
||||
if not session.posargs and "FORCE_COLOR" in os.environ:
|
||||
args.insert(0, "--color")
|
||||
|
||||
session.install(".")
|
||||
session.install("sphinx", "sphinx-click", "furo")
|
||||
|
||||
build_dir = Path("docs", "_build")
|
||||
if build_dir.exists():
|
||||
shutil.rmtree(build_dir)
|
||||
|
||||
session.run("sphinx-build", *args)
|
||||
|
||||
|
||||
@session(python="3.10")
|
||||
def docs(session: Session) -> None:
|
||||
"""Build and serve the documentation with live reloading on file changes."""
|
||||
args = session.posargs or ["--open-browser", "docs", "docs/_build"]
|
||||
session.install(".")
|
||||
session.install("sphinx", "sphinx-autobuild", "sphinx-click", "furo")
|
||||
|
||||
build_dir = Path("docs", "_build")
|
||||
if build_dir.exists():
|
||||
shutil.rmtree(build_dir)
|
||||
|
||||
session.run("sphinx-autobuild", *args)
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,99 @@
|
|||
[tool.poetry]
|
||||
name = "flask-bpmn"
|
||||
version = "0.0.0"
|
||||
description = "Flask Bpmn"
|
||||
authors = ["Jason Lantz <sartography@users.noreply.github.com>"]
|
||||
license = "MIT"
|
||||
readme = "README.rst"
|
||||
homepage = "https://github.com/sartography/flask-bpmn"
|
||||
repository = "https://github.com/sartography/flask-bpmn"
|
||||
documentation = "https://flask-bpmn.readthedocs.io"
|
||||
classifiers = [
|
||||
"Development Status :: 1 - Planning",
|
||||
]
|
||||
|
||||
[tool.poetry.urls]
|
||||
Changelog = "https://github.com/sartography/flask-bpmn/releases"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
python = "^3.7"
|
||||
click = "^8.0.1"
|
||||
flask = "*"
|
||||
flask-admin = "*"
|
||||
flask-bcrypt = "*"
|
||||
flask-cors = "*"
|
||||
flask-mail = "*"
|
||||
flask-marshmallow = "*"
|
||||
flask-migrate = "*"
|
||||
flask-restful = "*"
|
||||
werkzeug = "*"
|
||||
# spiffworkflow = {git = "https://github.com/sartography/SpiffWorkflow", rev = "feature/spiff_properties"}
|
||||
spiffworkflow = "*"
|
||||
sentry-sdk = "*"
|
||||
sphinx-autoapi = "^1.9.0"
|
||||
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
pytest = "^6.2.5"
|
||||
coverage = {extras = ["toml"], version = "^6.4"}
|
||||
safety = "^2.1.1"
|
||||
mypy = "^0.971"
|
||||
typeguard = "^2.13.2"
|
||||
xdoctest = {extras = ["colors"], version = "^1.0.1"}
|
||||
sphinx = "^4.3.0"
|
||||
sphinx-autobuild = ">=2021.3.14"
|
||||
pre-commit = "^2.20.0"
|
||||
flake8 = "^4.0.1"
|
||||
black = ">=21.10b0"
|
||||
flake8-bandit = "^2.1.2"
|
||||
|
||||
# 1.7.3 broke us. https://github.com/PyCQA/bandit/issues/841
|
||||
bandit = "1.7.2"
|
||||
|
||||
flake8-bugbear = "^22.7.1"
|
||||
flake8-docstrings = "^1.6.0"
|
||||
flake8-rst-docstrings = "^0.2.7"
|
||||
pep8-naming = "^0.13.1"
|
||||
darglint = "^1.8.1"
|
||||
reorder-python-imports = "^3.8.2"
|
||||
pre-commit-hooks = "^4.3.0"
|
||||
sphinx-click = "^4.3.0"
|
||||
Pygments = "^2.13.0"
|
||||
pyupgrade = "^2.37.3"
|
||||
furo = ">=2021.11.12"
|
||||
MonkeyType = "^22.2.0"
|
||||
|
||||
[tool.poetry.scripts]
|
||||
flask-bpmn = "flask_bpmn.__main__:main"
|
||||
|
||||
[tool.coverage.paths]
|
||||
source = ["src", "*/site-packages"]
|
||||
tests = ["tests", "*/tests"]
|
||||
|
||||
[tool.coverage.run]
|
||||
branch = true
|
||||
source = ["flask_bpmn", "tests"]
|
||||
|
||||
[tool.coverage.report]
|
||||
show_missing = true
|
||||
fail_under = 50
|
||||
|
||||
[tool.mypy]
|
||||
strict = true
|
||||
disallow_any_generics = false
|
||||
warn_unreachable = true
|
||||
pretty = true
|
||||
show_column_numbers = true
|
||||
show_error_codes = true
|
||||
show_error_context = true
|
||||
|
||||
# We get 'error: Module has no attribute "set_context"' for sentry-sdk without this option
|
||||
implicit_reexport = true
|
||||
|
||||
# allow for subdirs to NOT require __init__.py
|
||||
namespace_packages = true
|
||||
explicit_package_bases = false
|
||||
|
||||
[build-system]
|
||||
requires = ["poetry-core>=1.0.0"]
|
||||
build-backend = "poetry.core.masonry.api"
|
|
@ -0,0 +1,7 @@
|
|||
sonar.organization=sartography
|
||||
sonar.projectKey=sartography_flask-bpmn
|
||||
sonar.host.url=https://sonarcloud.io
|
||||
sonar.python.version=3.7,3.8,3.9,3.10
|
||||
sonar.python.coverage.reportPaths=coverage.xml
|
||||
sonar.test.inclusions=tests
|
||||
sonar.coverage.exclusions=noxfile.py,conftest.py,conf.py
|
|
@ -0,0 +1 @@
|
|||
"""Flask Bpmn."""
|
|
@ -0,0 +1,13 @@
|
|||
"""Command-line interface."""
|
||||
import click
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.version_option()
|
||||
def main() -> None:
|
||||
"""Flask Bpmn."""
|
||||
print("This does nothing")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main(prog_name="flask-bpmn") # pragma: no cover
|
|
@ -0,0 +1,231 @@
|
|||
"""API Error functionality."""
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from typing import Any
|
||||
|
||||
import sentry_sdk
|
||||
from flask import Blueprint
|
||||
from flask import current_app
|
||||
from flask import g
|
||||
from marshmallow import Schema
|
||||
from SpiffWorkflow.bpmn.exceptions import WorkflowTaskExecException # type: ignore
|
||||
from SpiffWorkflow.exceptions import WorkflowException # type: ignore
|
||||
from SpiffWorkflow.specs.base import TaskSpec # type: ignore
|
||||
from SpiffWorkflow.task import Task # type: ignore
|
||||
from werkzeug.exceptions import InternalServerError
|
||||
|
||||
api_error_blueprint = Blueprint("api_error_blueprint", __name__)
|
||||
|
||||
|
||||
class ApiError(Exception):
|
||||
"""ApiError Class to help handle exceptions."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
error_code: str,
|
||||
message: str,
|
||||
status_code: int = 400,
|
||||
file_name: str = "",
|
||||
task_id: str = "",
|
||||
task_name: str = "",
|
||||
tag: str = "",
|
||||
task_data: dict | None | str = None,
|
||||
error_type: str = "",
|
||||
error_line: str = "",
|
||||
line_number: int = 0,
|
||||
offset: int = 0,
|
||||
task_trace: dict | None = None,
|
||||
) -> None:
|
||||
"""The Init Method."""
|
||||
if task_data is None:
|
||||
task_data = {}
|
||||
if task_trace is None:
|
||||
task_trace = {}
|
||||
self.status_code = status_code
|
||||
self.error_code = error_code # a short consistent string describing the error.
|
||||
self.message = message # A detailed message that provides more information.
|
||||
|
||||
# OPTIONAL: The id of the task in the BPMN Diagram.
|
||||
self.task_id = task_id or ""
|
||||
|
||||
# OPTIONAL: The name of the task in the BPMN Diagram.
|
||||
|
||||
# OPTIONAL: The file that caused the error.
|
||||
self.task_name = task_name or ""
|
||||
self.file_name = file_name or ""
|
||||
|
||||
# OPTIONAL: The XML Tag that caused the issue.
|
||||
self.tag = tag or ""
|
||||
|
||||
# OPTIONAL: A snapshot of data connected to the task when error occurred.
|
||||
self.task_data = task_data or ""
|
||||
self.line_number = line_number
|
||||
self.offset = offset
|
||||
self.error_type = error_type
|
||||
self.error_line = error_line
|
||||
self.task_trace = task_trace
|
||||
|
||||
try:
|
||||
user = g.user.uid
|
||||
except Exception:
|
||||
user = "Unknown"
|
||||
self.task_user = user
|
||||
# This is for sentry logging into Slack
|
||||
sentry_sdk.set_context("User", {"user": user})
|
||||
Exception.__init__(self, self.message)
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""Instructions to print instance as a string."""
|
||||
msg = "ApiError: % s. " % self.message
|
||||
if self.task_name:
|
||||
msg += f"Error in task '{self.task_name}' ({self.task_id}). "
|
||||
if self.line_number:
|
||||
msg += "Error is on line %i. " % self.line_number
|
||||
if self.file_name:
|
||||
msg += "In file %s. " % self.file_name
|
||||
return msg
|
||||
|
||||
@classmethod
|
||||
def from_task(
|
||||
cls,
|
||||
error_code: str,
|
||||
message: str,
|
||||
task: Task,
|
||||
status_code: int = 400,
|
||||
line_number: int = 0,
|
||||
offset: int = 0,
|
||||
error_type: str = "",
|
||||
error_line: str = "",
|
||||
task_trace: dict | None = None,
|
||||
) -> ApiError:
|
||||
"""Constructs an API Error with details pulled from the current task."""
|
||||
instance = cls(error_code, message, status_code=status_code)
|
||||
instance.task_id = task.task_spec.name or ""
|
||||
instance.task_name = task.task_spec.description or ""
|
||||
instance.file_name = task.workflow.spec.file or ""
|
||||
instance.line_number = line_number
|
||||
instance.offset = offset
|
||||
instance.error_type = error_type
|
||||
instance.error_line = error_line
|
||||
if task_trace:
|
||||
instance.task_trace = task_trace
|
||||
else:
|
||||
instance.task_trace = WorkflowTaskExecException.get_task_trace(task)
|
||||
|
||||
# spiffworkflow is doing something weird where task ends up referenced in the data in some cases.
|
||||
if "task" in task.data:
|
||||
task.data.pop("task")
|
||||
|
||||
# Assure that there is nothing in the json data that can't be serialized.
|
||||
instance.task_data = ApiError.remove_unserializeable_from_dict(task.data)
|
||||
|
||||
current_app.logger.error(message, exc_info=True)
|
||||
return instance
|
||||
|
||||
@staticmethod
|
||||
def remove_unserializeable_from_dict(my_dict: dict) -> dict:
|
||||
"""Removes unserializeable from dict."""
|
||||
keys_to_delete = []
|
||||
for key, value in my_dict.items():
|
||||
if not ApiError.is_jsonable(value):
|
||||
keys_to_delete.append(key)
|
||||
for key in keys_to_delete:
|
||||
del my_dict[key]
|
||||
return my_dict
|
||||
|
||||
@staticmethod
|
||||
def is_jsonable(x: Any) -> bool:
|
||||
"""Attempts a json.dump on given input and returns false if it cannot."""
|
||||
try:
|
||||
json.dumps(x)
|
||||
return True
|
||||
except (TypeError, OverflowError, ValueError):
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def from_task_spec(
|
||||
cls,
|
||||
code: str,
|
||||
message: str,
|
||||
task_spec: TaskSpec,
|
||||
status_code: int = 400,
|
||||
) -> ApiError:
|
||||
"""Constructs an API Error with details pulled from the current task."""
|
||||
instance = cls(code, message, status_code=status_code)
|
||||
instance.task_id = task_spec.name or ""
|
||||
instance.task_name = task_spec.description or ""
|
||||
if task_spec._wf_spec:
|
||||
instance.file_name = task_spec._wf_spec.file
|
||||
current_app.logger.error(message, exc_info=True)
|
||||
return instance
|
||||
|
||||
@classmethod
|
||||
def from_workflow_exception(
|
||||
cls,
|
||||
error_code: str,
|
||||
message: str,
|
||||
exp: WorkflowException,
|
||||
) -> ApiError:
|
||||
"""Deals with workflow exceptions.
|
||||
|
||||
We catch a lot of workflow exception errors,
|
||||
so consolidating the error_code, and doing the best things
|
||||
we can with the data we have.
|
||||
"""
|
||||
if isinstance(exp, WorkflowTaskExecException):
|
||||
return ApiError.from_task(
|
||||
error_code,
|
||||
message,
|
||||
exp.task,
|
||||
line_number=exp.line_number,
|
||||
offset=exp.offset,
|
||||
error_type=exp.exception.__class__.__name__,
|
||||
error_line=exp.error_line,
|
||||
task_trace=exp.task_trace,
|
||||
)
|
||||
|
||||
else:
|
||||
return ApiError.from_task_spec(error_code, message, exp.sender)
|
||||
|
||||
|
||||
class ApiErrorSchema(Schema):
|
||||
"""ApiErrorSchema Class."""
|
||||
|
||||
class Meta:
|
||||
"""Sets the fields to search the error schema for."""
|
||||
|
||||
fields = (
|
||||
"error_code",
|
||||
"message",
|
||||
"workflow_name",
|
||||
"file_name",
|
||||
"task_name",
|
||||
"task_id",
|
||||
"task_data",
|
||||
"task_user",
|
||||
"hint",
|
||||
"line_number",
|
||||
"offset",
|
||||
"error_type",
|
||||
"error_line",
|
||||
"task_trace",
|
||||
)
|
||||
|
||||
|
||||
@api_error_blueprint.app_errorhandler(ApiError)
|
||||
def handle_invalid_usage(error: ApiError) -> tuple[str, int]:
|
||||
"""Handles invalid usage error."""
|
||||
response = ApiErrorSchema().dump(error)
|
||||
return response, error.status_code
|
||||
|
||||
|
||||
@api_error_blueprint.app_errorhandler(InternalServerError)
|
||||
def handle_internal_server_error(error: InternalServerError) -> tuple[str, int]:
|
||||
"""Handles internal server error."""
|
||||
original = getattr(error, "original_exception", None)
|
||||
api_error = ApiError(
|
||||
error_code="Internal Server Error (500)", message=str(original)
|
||||
)
|
||||
response = ApiErrorSchema().dump(api_error)
|
||||
return response, 500
|
|
@ -0,0 +1 @@
|
|||
"""__init__."""
|
|
@ -0,0 +1,85 @@
|
|||
"""Db."""
|
||||
from __future__ import annotations
|
||||
|
||||
import enum
|
||||
import time
|
||||
from typing import Any
|
||||
|
||||
from flask_migrate import Migrate # type: ignore
|
||||
from flask_sqlalchemy import SQLAlchemy # type: ignore
|
||||
from sqlalchemy import event # type: ignore
|
||||
from sqlalchemy.engine.base import Connection # type: ignore
|
||||
from sqlalchemy.orm.mapper import Mapper # type: ignore
|
||||
|
||||
db = SQLAlchemy()
|
||||
migrate = Migrate()
|
||||
|
||||
|
||||
class SpiffworkflowBaseDBModel(db.Model): # type: ignore
|
||||
"""SpiffworkflowBaseDBModel."""
|
||||
|
||||
__abstract__ = True
|
||||
|
||||
@classmethod
|
||||
def _all_subclasses(cls) -> list[type[SpiffworkflowBaseDBModel]]:
|
||||
"""Get all subclasses of cls, descending.
|
||||
|
||||
So, if A is a subclass of B is a subclass of cls, this
|
||||
will include A and B.
|
||||
(Does not include cls)
|
||||
"""
|
||||
children = cls.__subclasses__()
|
||||
result = []
|
||||
while children:
|
||||
next = children.pop()
|
||||
subclasses = next.__subclasses__()
|
||||
result.append(next)
|
||||
# check subclasses of subclasses SpiffworkflowBaseDBModel. i guess we only go down to grandchildren, which seems cool.
|
||||
for subclass in subclasses:
|
||||
children.append(subclass)
|
||||
return result
|
||||
|
||||
def validate_enum_field(
|
||||
self, key: str, value: Any, enum_variable: enum.EnumMeta
|
||||
) -> Any:
|
||||
"""Validate_enum_field."""
|
||||
try:
|
||||
m_type = getattr(enum_variable, value, None)
|
||||
except Exception as e:
|
||||
raise ValueError(
|
||||
f"{self.__class__.__name__}: invalid {key}: {value}"
|
||||
) from e
|
||||
|
||||
if m_type is None:
|
||||
raise ValueError(f"{self.__class__.__name__}: invalid {key}: {value}")
|
||||
|
||||
return m_type.value
|
||||
|
||||
|
||||
def update_created_modified_on_create_listener(
|
||||
mapper: Mapper, _connection: Connection, target: SpiffworkflowBaseDBModel
|
||||
) -> None:
|
||||
"""Event listener that runs before a record is updated, and sets the create/modified field accordingly."""
|
||||
if "created_at_in_seconds" in mapper.columns.keys():
|
||||
target.created_at_in_seconds = round(time.time())
|
||||
if "updated_at_in_seconds" in mapper.columns.keys():
|
||||
target.updated_at_in_seconds = round(time.time())
|
||||
|
||||
|
||||
def update_modified_on_update_listener(
|
||||
mapper: Mapper, _connection: Connection, target: SpiffworkflowBaseDBModel
|
||||
) -> None:
|
||||
"""Event listener that runs before a record is updated, and sets the modified field accordingly."""
|
||||
if "updated_at_in_seconds" in mapper.columns.keys():
|
||||
if db.session.is_modified(target, include_collections=False):
|
||||
target.updated_at_in_seconds = round(time.time())
|
||||
|
||||
|
||||
def add_listeners() -> None:
|
||||
"""Adds the listeners to all subclasses.
|
||||
|
||||
This should be called after importing all subclasses
|
||||
"""
|
||||
for cls in SpiffworkflowBaseDBModel._all_subclasses():
|
||||
event.listen(cls, "before_insert", update_created_modified_on_create_listener)
|
||||
event.listen(cls, "before_update", update_modified_on_update_listener)
|
|
@ -0,0 +1,11 @@
|
|||
"""Group."""
|
||||
from flask_bpmn.models.db import db
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
|
||||
|
||||
class FlaskBpmnGroupModel(SpiffworkflowBaseDBModel):
|
||||
"""FlaskBpmnGroupModel."""
|
||||
|
||||
__tablename__ = "group"
|
||||
id = db.Column(db.Integer, primary_key=True)
|
||||
name = db.Column(db.String(255))
|
|
@ -0,0 +1 @@
|
|||
"""Test suite for the flask_bpmn package."""
|
|
@ -0,0 +1,29 @@
|
|||
"""Test cases for the __main__ module."""
|
||||
import io
|
||||
|
||||
from flask_bpmn.api.api_error import ApiError
|
||||
|
||||
|
||||
def test_is_jsonable_can_succeed() -> None:
|
||||
"""Test_is_jsonable_can_succeed."""
|
||||
result = ApiError.is_jsonable("This is a string and should pass json.dumps")
|
||||
assert result is True
|
||||
|
||||
|
||||
def test_is_jsonable_can_fail() -> None:
|
||||
"""Test_is_jsonable_can_fail."""
|
||||
result = ApiError.is_jsonable(io.StringIO("BAD JSON OBJECT"))
|
||||
assert result is False
|
||||
|
||||
|
||||
def test_remove_unserializeable_from_dict_succeeds() -> None:
|
||||
"""Test_remove_unserializeable_from_dict_succeeds."""
|
||||
initial_dict_object = {
|
||||
"valid_key": "valid_value",
|
||||
"invalid_key_value": io.StringIO("BAD JSON OBJECT"),
|
||||
}
|
||||
final_dict_object = {
|
||||
"valid_key": "valid_value",
|
||||
}
|
||||
result = ApiError.remove_unserializeable_from_dict(initial_dict_object)
|
||||
assert result == final_dict_object
|
|
@ -0,0 +1,20 @@
|
|||
"""Test cases for the group module."""
|
||||
from flask_bpmn.models.db import SpiffworkflowBaseDBModel
|
||||
from flask_bpmn.models.group import FlaskBpmnGroupModel
|
||||
|
||||
|
||||
class AppGroupModel(FlaskBpmnGroupModel):
|
||||
"""AppGroupModel."""
|
||||
|
||||
|
||||
def test_table_names_are_singular_per_what_appear_to_be_flask_conventions() -> None:
|
||||
"""Test_is_jsonable_can_succeed."""
|
||||
assert FlaskBpmnGroupModel.__tablename__ == "group"
|
||||
|
||||
|
||||
def test__all_subclasses_of_spiffworkflow_base_db_model_returns_all_subclasses_that_are_defined() -> None:
|
||||
"""Test_is_jsonable_can_succeed."""
|
||||
assert SpiffworkflowBaseDBModel._all_subclasses() == [
|
||||
FlaskBpmnGroupModel,
|
||||
AppGroupModel,
|
||||
]
|
|
@ -0,0 +1,17 @@
|
|||
"""Test cases for the __main__ module."""
|
||||
import pytest
|
||||
from click.testing import CliRunner
|
||||
|
||||
from flask_bpmn import __main__
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def runner() -> CliRunner:
|
||||
"""Fixture for invoking command-line interfaces."""
|
||||
return CliRunner()
|
||||
|
||||
|
||||
def test_main_succeeds(runner: CliRunner) -> None:
|
||||
"""It exits with a status code of zero."""
|
||||
result = runner.invoke(__main__.main)
|
||||
assert result.exit_code == 0
|
Loading…
Reference in New Issue