Merge commit 'f4b87a661e49c788db34a3531219bcebf3d4fa27' as 'connector-proxy-status-im'

This commit is contained in:
Jon Herron 2022-10-12 10:23:14 -04:00
commit 959d5d4fe3
54 changed files with 6014 additions and 0 deletions

View File

@ -0,0 +1,2 @@
[darglint]
strictness = long

View File

@ -0,0 +1,12 @@
[flake8]
select = B,B9,C,D,DAR,E,F,N,RST,S,W
ignore = E203,E501,RST201,RST203,RST301,W503,S410,S320
max-line-length = 120
max-complexity = 30
docstring-convention = google
rst-roles = class,const,func,meth,mod,ref
rst-directives = deprecated
per-file-ignores =
# prefer naming tests descriptively rather than forcing comments
tests/*:S101,D103

133
connector-proxy-status-im/.gitignore vendored Normal file
View File

@ -0,0 +1,133 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
config.py
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
.python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# IDEs
.idea

View File

@ -0,0 +1,63 @@
repos:
- repo: local
hooks:
- id: black
name: black
entry: black
language: system
types: [python]
require_serial: true
exclude: ^migrations/
- id: check-added-large-files
name: Check for added large files
entry: check-added-large-files
language: system
- id: check-toml
name: Check Toml
entry: check-toml
language: system
types: [toml]
- id: check-yaml
name: Check Yaml
entry: check-yaml
language: system
types: [yaml]
- id: end-of-file-fixer
name: Fix End of Files
entry: end-of-file-fixer
language: system
types: [text]
stages: [commit, push, manual]
- id: flake8
name: flake8
entry: flake8
language: system
types: [python]
require_serial: true
exclude: ^migrations/
- id: pyupgrade
name: pyupgrade
description: Automatically upgrade syntax for newer versions.
entry: pyupgrade
language: system
types: [python]
args: [--py37-plus]
- id: reorder-python-imports
name: Reorder python imports
entry: reorder-python-imports
language: system
types: [python]
args: [--application-directories=src]
exclude: ^migrations/
- id: trailing-whitespace
name: Trim Trailing Whitespace
entry: trailing-whitespace-fixer
language: system
types: [text]
stages: [commit, push, manual]
exclude: ^migrations/
- repo: https://github.com/pre-commit/mirrors-prettier
rev: v2.4.1
hooks:
- id: prettier
exclude_types: [html]

View File

@ -0,0 +1 @@
python 3.10.4

View File

@ -0,0 +1,23 @@
FROM ghcr.io/sartography/python:3.10
RUN pip install poetry
RUN useradd _gunicorn --no-create-home --user-group
WORKDIR /app
ADD pyproject.toml poetry.lock /app/
ADD connectors /app/connectors
RUN poetry install
RUN set -xe \
&& apt-get remove -y gcc python3-dev libssl-dev \
&& apt-get autoremove -y \
&& apt-get clean -y \
&& rm -rf /var/lib/apt/lists/*
COPY . /app/
# run poetry install again AFTER copying the app into the image
# otherwise it does not know what the main app module is
RUN poetry install
CMD ./bin/boot_server_in_docker

View File

@ -0,0 +1,7 @@
# Run the service
poetry run flask --debug run --port=7004
# You can check to see if it is running by loading
http://localhost:7004/v1/commands

View File

@ -0,0 +1,307 @@
import importlib
import inspect
import json
import os
import pkgutil
import types
import typing
from flask import Flask
from flask import redirect
from flask import request
from flask import Response
from flask import session
from flask import url_for
from flask_oauthlib.contrib.client import OAuth
app = Flask(__name__)
app.config.from_pyfile("config.py", silent=True)
if app.config["ENV"] != "production":
os.environ["OAUTHLIB_INSECURE_TRANSPORT"] = "1"
@app.before_first_request
def load_plugins():
print("load the plugins once here?")
@app.route("/liveness")
def status():
return Response(json.dumps({"ok": True}), status=200, mimetype="application/json")
def list_targets(targets):
descriptions = []
for plugin_name, plugin_targets in targets.items():
for target_name, target in plugin_targets.items():
description = PluginService.describe_target(
plugin_name, target_name, target
)
descriptions.append(description)
return Response(json.dumps(descriptions), status=200, mimetype="application/json")
@app.route("/v1/auths")
def list_auths():
return list_targets(PluginService.available_auths_by_plugin())
@app.route("/v1/commands")
def list_commands():
return list_targets(PluginService.available_commands_by_plugin())
def auth_handler(plugin_display_name, auth_name, params):
auth = PluginService.auth_named(plugin_display_name, auth_name)
if auth is not None:
handler_params = auth.filtered_params(params)
app_description = auth(**handler_params).app_description()
# TODO right now this assumes Oauth.
# would need to expand if other auth providers are used
handler = OAuth(app).remote_app(**app_description)
@handler.tokengetter
def tokengetter():
pass
@handler.tokensaver
def tokensaver(token):
pass
return handler
@app.route("/v1/auth/<plugin_display_name>/<auth_name>")
def do_auth(plugin_display_name, auth_name):
params = request.args.to_dict()
our_redirect_url = params["redirect_url"]
session["redirect_url"] = our_redirect_url
handler = auth_handler(plugin_display_name, auth_name, params)
if handler is None:
return Response("Auth not found", status=404)
# TODO factor into handler
# TODO namespace the keys
session["client_id"] = params["client_id"]
session["client_secret"] = params["client_secret"]
oauth_redirect_url = url_for(
"auth_callback",
plugin_display_name=plugin_display_name,
auth_name=auth_name,
_external=True,
)
return handler.authorize(callback_uri=oauth_redirect_url)
@app.route("/v1/auth/<plugin_display_name>/<auth_name>/callback")
def auth_callback(plugin_display_name, auth_name):
handler = auth_handler(plugin_display_name, auth_name, session)
if handler is None:
return Response("Auth not found", status=404)
response = json.dumps(handler.authorized_response())
redirect_url = session["redirect_url"]
# TODO compare redirect_url to whitelist
return redirect(f"{redirect_url}?response={response}")
@app.route("/v1/do/<plugin_display_name>/<command_name>")
def do_command(plugin_display_name, command_name):
command = PluginService.command_named(plugin_display_name, command_name)
if command is None:
return json_error_response(
f"Command not found: {plugin_display_name}:{command_name}", status=404
)
params = request.args.to_dict()
raw_task_data = params.pop('spiff__task_data', '{}')
task_data = json.loads(raw_task_data)
try:
result = command(**params).execute(app.config, task_data)
except Exception as e:
return json_error_response(
f"Error encountered when executing {plugin_display_name}:{command_name} {str(e)}",
status=404,
)
return Response(result["response"], mimetype=result["mimetype"], status=200)
def json_error_response(message, status):
resp = {"error": message, "status": status}
return Response(json.dumps(resp), status=status)
class PluginService:
PLUGIN_PREFIX = "connector_"
@staticmethod
def plugin_display_name(plugin_name):
return plugin_name.removeprefix(PluginService.PLUGIN_PREFIX)
@staticmethod
def plugin_name_from_display_name(plugin_display_name):
return PluginService.PLUGIN_PREFIX + plugin_display_name
@staticmethod
def available_plugins():
return {
name: importlib.import_module(name)
for finder, name, ispkg in pkgutil.iter_modules()
if name.startswith(PluginService.PLUGIN_PREFIX)
}
@staticmethod
def available_auths_by_plugin():
return {
plugin_name: {
auth_name: auth
for auth_name, auth in PluginService.auths_for_plugin(
plugin_name, plugin
)
}
for plugin_name, plugin in PluginService.available_plugins().items()
}
@staticmethod
def available_commands_by_plugin():
return {
plugin_name: {
command_name: command
for command_name, command in PluginService.commands_for_plugin(
plugin_name, plugin
)
}
for plugin_name, plugin in PluginService.available_plugins().items()
}
@staticmethod
def target_id(plugin_name, target_name):
plugin_display_name = PluginService.plugin_display_name(plugin_name)
return f"{plugin_display_name}/{target_name}"
@staticmethod
def auth_named(plugin_display_name, auth_name):
plugin_name = PluginService.plugin_name_from_display_name(plugin_display_name)
available_auths_by_plugin = PluginService.available_auths_by_plugin()
try:
return available_auths_by_plugin[plugin_name][auth_name]
except Exception:
return None
@staticmethod
def command_named(plugin_display_name, command_name):
plugin_name = PluginService.plugin_name_from_display_name(plugin_display_name)
available_commands_by_plugin = PluginService.available_commands_by_plugin()
try:
return available_commands_by_plugin[plugin_name][command_name]
except Exception:
return None
@staticmethod
def modules_for_plugin_in_package(plugin, package_name):
for finder, name, ispkg in pkgutil.iter_modules(plugin.__path__):
if ispkg and name == package_name:
sub_pkg = finder.find_module(name).load_module(name)
yield from PluginService.modules_for_plugin_in_package(sub_pkg, None)
else:
spec = finder.find_spec(name)
if spec is not None and spec.loader is not None:
module = types.ModuleType(spec.name)
spec.loader.exec_module(module)
yield name, module
@staticmethod
def targets_for_plugin(plugin_name, plugin, target_package_name):
for module_name, module in PluginService.modules_for_plugin_in_package(
plugin, target_package_name
):
for member_name, member in inspect.getmembers(module, inspect.isclass):
if member.__module__ == module_name:
yield member_name, member
@staticmethod
def auths_for_plugin(plugin_name, plugin):
yield from PluginService.targets_for_plugin(plugin_name, plugin, "auths")
@staticmethod
def commands_for_plugin(plugin_name, plugin):
# TODO check if class has an execute method before yielding
yield from PluginService.targets_for_plugin(plugin_name, plugin, "commands")
@staticmethod
def param_annotation_desc(param):
"""Parses a callable parameter's type annotation, if any, to form a ParameterDescription."""
param_id = param.name
param_type_desc = "any"
none_type = type(None)
supported_types = {str, int, bool, none_type}
unsupported_type_marker = object
annotation = param.annotation
if annotation in supported_types:
annotation_types = {annotation}
else:
# an annotation can have more than one type in the case of a union
# get_args normalizes Union[str, dict] to (str, dict)
# get_args normalizes Optional[str] to (str, none)
# all unsupported types are marked so (str, dict) -> (str, unsupported)
# the absense of a type annotation results in an empty set
annotation_types = set(
map(
lambda t: t if t in supported_types else unsupported_type_marker,
typing.get_args(annotation),
)
)
# a parameter is required if it has no default value and none is not in its type set
param_req = param.default is param.empty and none_type not in annotation_types
# the none type from a union is used for requiredness, but needs to be discarded
# to single out the optional type
annotation_types.discard(none_type)
# if we have a single supported type use that, else any is the default
if len(annotation_types) == 1:
annotation_type = annotation_types.pop()
if annotation_type in supported_types:
param_type_desc = annotation_type.__name__
return {"id": param_id, "type": param_type_desc, "required": param_req}
@staticmethod
def callable_params_desc(kallable):
sig = inspect.signature(kallable)
params_to_skip = ["self", "kwargs"]
sig_params = filter(
lambda param: param.name not in params_to_skip, sig.parameters.values()
)
params = [PluginService.param_annotation_desc(param) for param in sig_params]
return params
@staticmethod
def describe_target(plugin_name, target_name, target):
parameters = PluginService.callable_params_desc(target.__init__)
target_id = PluginService.target_id(plugin_name, target_name)
return {"id": target_id, "parameters": parameters}
if __name__ == "__main__":
app.run(host="localhost", port=5000)

View File

@ -0,0 +1,18 @@
#!/usr/bin/env bash
function error_handler() {
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
exit "$2"
}
trap 'error_handler ${LINENO} $?' ERR
set -o errtrace -o errexit -o nounset -o pipefail
port="${CONNECTOR_PROXY_STATUS_IM_PORT:-}"
if [[ -z "$port" ]]; then
port=7004
fi
workers=3
# THIS MUST BE THE LAST COMMAND!
exec poetry run gunicorn --bind "0.0.0.0:$port" --workers="$workers" --timeout 90 --capture-output --access-logfile '-' --log-level debug app:app

View File

@ -0,0 +1,21 @@
#!/usr/bin/env bash
function error_handler() {
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
exit "$2"
}
trap 'error_handler ${LINENO} $?' ERR
set -o errtrace -o errexit -o nounset -o pipefail
if [[ -z "${CONNECTOR_PROXY_STATUS_IM_DOCKER_COMPOSE_PROFILE:-}" ]]; then
export CONNECTOR_PROXY_STATUS_IM_DOCKER_COMPOSE_PROFILE=run
fi
additional_args=""
if [[ "${RUN_WITH_DAEMON:-}" != "false" ]]; then
additional_args="${additional_args} -d"
fi
docker compose --profile "$CONNECTOR_PROXY_STATUS_IM_DOCKER_COMPOSE_PROFILE" build
docker compose --profile "$CONNECTOR_PROXY_STATUS_IM_DOCKER_COMPOSE_PROFILE" stop
docker compose --profile "$CONNECTOR_PROXY_STATUS_IM_DOCKER_COMPOSE_PROFILE" up --wait $additional_args

View File

@ -0,0 +1,24 @@
#!/usr/bin/env bash
function error_handler() {
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
exit "$2"
}
trap 'error_handler ${LINENO} $?' ERR
set -o errtrace -o errexit -o nounset -o pipefail
if [[ -z "${FLASK_ENV:-}" ]]; then
export FLASK_ENV=staging
fi
if [[ -z "${FLASK_SESSION_SECRET_KEY:-}" ]]; then
export FLASK_SESSION_SECRET_KEY=staging_super_secret_key_dont_tell_anyone
fi
if [[ -z "${CONNECTOR_PROXY_STATUS_IM_DOCKER_COMPOSE_PROFILE:-}" ]]; then
export CONNECTOR_PROXY_STATUS_IMSPIFFWORKFLOW_BACKEND_DOCKER_COMPOSE_PROFILE=run
fi
git pull
./bin/build_and_run_with_docker_compose
./bin/wait_for_server_to_be_up

View File

@ -0,0 +1,15 @@
#!/usr/bin/env bash
function error_handler() {
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
exit "$2"
}
trap 'error_handler ${LINENO} $?' ERR
set -o errtrace -o errexit -o nounset -o pipefail
if [[ -z "${FLASK_ENV:-}" ]]; then
export FLASK_ENV=development
fi
export FLASK_SESSION_SECRET_KEY=super_secret_key
poetry run flask run -p 7004

View File

@ -0,0 +1,24 @@
#!/usr/bin/env bash
function error_handler() {
>&2 echo "Exited with BAD EXIT CODE '${2}' in ${0} script at line: ${1}."
exit "$2"
}
trap 'error_handler ${LINENO} $?' ERR
set -o errtrace -o errexit -o nounset -o pipefail
max_attempts="${1:-}"
if [[ -z "$max_attempts" ]]; then
max_attempts=100
fi
echo "waiting for app to come up..."
attempts=0
while [[ "$(curl -s -o /dev/null -w '%{http_code}' "http://localhost:7004/liveness")" != "200" ]]; do
if [[ "$attempts" -gt "$max_attempts" ]]; then
>&2 echo "ERROR: Server not up after $max_attempts attempts. There is probably a problem"
exit 1
fi
attempts=$(( attempts + 1 ))
sleep 1
done

View File

@ -0,0 +1,129 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
.python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/

View File

@ -0,0 +1 @@
"""__init__."""

View File

@ -0,0 +1,28 @@
"""SimpleAuth."""
import boto3 # type: ignore
from botocore.config import Config # type: ignore
class SimpleAuth:
"""Established a simple Boto 3 Client based on an access key and a secret key."""
def __init__(self, resource_type: str, access_key: str, secret_key: str):
"""
:param access_key: AWS Access Key
:param secret_key: AWS Secret Key
"""
my_config = Config(
region_name="us-east-1", retries={"max_attempts": 10, "mode": "standard"}
)
# Get the service resource.
self.resource = boto3.resource(
resource_type,
aws_access_key_id=access_key,
aws_secret_access_key=secret_key,
config=my_config,
)
def get_resource(self):
"""Get_resource."""
return self.resource

View File

@ -0,0 +1,38 @@
"""AddDynamoItem."""
import json
from connector_aws.auths.simpleAuth import SimpleAuth # type: ignore
class AddDynamoItem:
"""Add a new record to a dynamo db table."""
def __init__(
self, access_key: str, secret_key: str, table_name: str, item_data: str
):
"""
:param access_key: AWS Access Key
:param secret_key: AWS Secret Key
:param table_name: The name of hte Dynamo DB table to add information to.
:param item_data: The data to add
:return: Json Data structure containing a http status code (hopefully '200' for success..)
and a response string.
"""
# Get the service resource.
self.dynamodb = SimpleAuth("dynamodb", access_key, secret_key).get_resource()
# Instantiate a table resource object without actually
# creating a DynamoDB table. Note that the attributes of this table
# are lazy-loaded: a request is not made nor are the attribute
# values populated until the attributes
# on the table resource are accessed or its load() method is called.
self.table = self.dynamodb.Table(table_name)
self.item_data = json.loads(item_data)
def execute(self, config, task_data):
"""Execute."""
result = self.table.put_item(Item=self.item_data)
if "ResponseMetadata" in result:
del result["ResponseMetadata"]
result_str = json.dumps(result)
return dict(response=result_str, mimetype="application/json")

View File

@ -0,0 +1,28 @@
"""QueryDynamoTable."""
import json
from connector_aws.auths.simpleAuth import SimpleAuth # type: ignore
class QueryDynamoTable:
"""Return all records for a given partition key."""
def __init__(self, access_key: str, secret_key: str, table_name: str, key: str):
"""
:param access_key: AWS Access Key
:param secret_key: AWS Secret Key
:param table_name: The name of hte Dynamo DB table to add information to.
:param key: The partition key for what to return.
:return: Json Data structure containing the requested data.
"""
self.dynamodb = SimpleAuth("dynamodb", access_key, secret_key).get_resource()
self.table = self.dynamodb.Table(table_name)
self.key = key
def execute(self, config, task_data):
"""Execute."""
result = self.table.get_item(Key={"primaryKeyName": self.key})
if "ResponseMetadata" in result:
del result["ResponseMetadata"]
result_str = json.dumps(result)
return dict(response=result_str, mimetype="application/json")

View File

@ -0,0 +1,26 @@
"""ScanDynamoTable."""
import json
from connector_aws.auths.simpleAuth import SimpleAuth # type: ignore
class ScanDynamoTable:
"""Return all records in a given table. Potentially very expensive."""
def __init__(self, access_key: str, secret_key: str, table_name: str):
"""
:param access_key: AWS Access Key
:param secret_key: AWS Secret Key
:param table_name: The name of hte Dynamo DB table to scan
:return: Json Data structure containing the requested data.
"""
self.dynamodb = SimpleAuth("dynamodb", access_key, secret_key).get_resource()
self.table = self.dynamodb.Table(table_name)
def execute(self, config, task_data):
"""Execute."""
result = self.table.scan()
if "ResponseMetadata" in result:
del result["ResponseMetadata"]
result_str = json.dumps(result)
return dict(response=result_str, mimetype="application/json")

View File

@ -0,0 +1,49 @@
"""UploadFile."""
from botocore.exceptions import ClientError # type: ignore
from connector_aws.auths.simpleAuth import SimpleAuth # type: ignore
class UploadFileData:
"""UploadFileData."""
def __init__(
self,
access_key: str,
secret_key: str,
file_data: bytes,
bucket: str,
object_name: str,
):
"""
:param access_key: AWS Access Key
:param secret_key: AWS Secret Key
:param file_data: Contents of file to be uploaded
:param bucket: Bucket to upload to
:param object_name: S3 object name.
:return: Json Data structure containing a http status code (hopefully '200' for success..)
and a response string.
"""
self.client = SimpleAuth("s3", access_key, secret_key).get_resource()
self.file_data = file_data
self.bucket = bucket
self.object_name = object_name
def execute(self, config, task_data):
"""Execute."""
# Upload the file
try:
result = self.client.Object(self.bucket, self.object_name).put(
Body=self.file_data
)
status = str(result["ResponseMetadata"]["HTTPStatusCode"])
# TODO these can be improved
if status == "200":
response = '{ "result": "success" }'
else:
response = '{ "result": "error" }'
except ClientError as e:
response = f'{ "error": "AWS Excetion {e}" }'
status = "500"
return {"response": response, "status": status, "mimetype": "application/json"}

View File

@ -0,0 +1,120 @@
[[package]]
name = "boto3"
version = "1.24.87"
description = "The AWS SDK for Python"
category = "main"
optional = false
python-versions = ">= 3.7"
[package.dependencies]
botocore = ">=1.27.87,<1.28.0"
jmespath = ">=0.7.1,<2.0.0"
s3transfer = ">=0.6.0,<0.7.0"
[package.extras]
crt = ["botocore[crt] (>=1.21.0,<2.0a0)"]
[[package]]
name = "botocore"
version = "1.27.87"
description = "Low-level, data-driven core of boto 3."
category = "main"
optional = false
python-versions = ">= 3.7"
[package.dependencies]
jmespath = ">=0.7.1,<2.0.0"
python-dateutil = ">=2.1,<3.0.0"
urllib3 = ">=1.25.4,<1.27"
[package.extras]
crt = ["awscrt (==0.14.0)"]
[[package]]
name = "jmespath"
version = "1.0.1"
description = "JSON Matching Expressions"
category = "main"
optional = false
python-versions = ">=3.7"
[[package]]
name = "python-dateutil"
version = "2.8.2"
description = "Extensions to the standard Python datetime module"
category = "main"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
[package.dependencies]
six = ">=1.5"
[[package]]
name = "s3transfer"
version = "0.6.0"
description = "An Amazon S3 Transfer Manager"
category = "main"
optional = false
python-versions = ">= 3.7"
[package.dependencies]
botocore = ">=1.12.36,<2.0a.0"
[package.extras]
crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"]
[[package]]
name = "six"
version = "1.16.0"
description = "Python 2 and 3 compatibility utilities"
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
[[package]]
name = "urllib3"
version = "1.26.12"
description = "HTTP library with thread-safe connection pooling, file post, and more."
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4"
[package.extras]
brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"]
secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"]
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
[metadata]
lock-version = "1.1"
python-versions = "^3.10"
content-hash = "5847165cfd4b8263f268ba49f5fe4981a66e7267abfeaa46d4a278dfe1dcba66"
[metadata.files]
boto3 = [
{file = "boto3-1.24.87-py3-none-any.whl", hash = "sha256:bcc579e801774cb2c7dda87ff985feda1ae7e10591d11ef37526363139138bd4"},
{file = "boto3-1.24.87.tar.gz", hash = "sha256:3dd7ed74d1d29dd8094a078be86ed61c45de6638fe18856f7a1ff9282b4d929e"},
]
botocore = [
{file = "botocore-1.27.87-py3-none-any.whl", hash = "sha256:c4cbd22056ace4c7aa99e62e8ae629865ab80cc8bbf7c6d68ccf0f768f0034b6"},
{file = "botocore-1.27.87.tar.gz", hash = "sha256:216de9751116d0d1cc3901e26d95a5c9a30ecb6973ae6147af1cf504858d845a"},
]
jmespath = [
{file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"},
{file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"},
]
python-dateutil = [
{file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
{file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
]
s3transfer = [
{file = "s3transfer-0.6.0-py3-none-any.whl", hash = "sha256:06176b74f3a15f61f1b4f25a1fc29a4429040b7647133a463da8fa5bd28d5ecd"},
{file = "s3transfer-0.6.0.tar.gz", hash = "sha256:2ed07d3866f523cc561bf4a00fc5535827981b117dd7876f036b0c1aca42c947"},
]
six = [
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
]
urllib3 = [
{file = "urllib3-1.26.12-py2.py3-none-any.whl", hash = "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"},
{file = "urllib3-1.26.12.tar.gz", hash = "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"},
]

View File

@ -0,0 +1,15 @@
[tool.poetry]
name = "connector-aws"
version = "0.1.0"
description = ""
authors = ["Dan Funk <dan@sartography.com>"]
[tool.poetry.dependencies]
python = "^3.10"
boto3 = "^1.24"
[tool.poetry.dev-dependencies]
[build-system]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"

View File

@ -0,0 +1,129 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
.python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/

View File

@ -0,0 +1,50 @@
"""GetPayRate."""
import json
import requests
#
# Sample response
#
# {
# "amount": "65000.00",
# "currency": "USD",
# "id": "4",
# "payRate": "65000.00 USD"
# }
class GetPayRate:
"""GetPayRate."""
def __init__(self, employee_id: str):
"""__init__."""
self.employee_id = employee_id
def execute(self, config, task_data):
"""Execute."""
api_key = config["BAMBOOHR_API_KEY"]
subdomain = config["BAMBOOHR_SUBDOMAIN"]
url = f"https://api.bamboohr.com/api/gateway.php/{subdomain}/v1/employees/{self.employee_id}"
headers = {"Accept": "application/json"}
params = {"fields": "payRate", "onlyCurrent": "true"}
auth = (api_key, "x")
try:
raw_response = requests.get(url, params, headers=headers, auth=auth)
parsed_response = json.loads(raw_response.text)
pay_rate = parsed_response["payRate"]
pay_rate_parts = pay_rate.split(" ")
parsed_response["amount"] = pay_rate_parts[0]
parsed_response["currency"] = pay_rate_parts[1]
response = json.dumps(parsed_response)
except Exception:
response = '{ "error": "Invalid Employee ID" }'
return {
"response": response,
"status": raw_response.status_code,
"mimetype": "application/json",
}

View File

@ -0,0 +1,84 @@
[[package]]
name = "certifi"
version = "2022.6.15.1"
description = "Python package for providing Mozilla's CA Bundle."
category = "main"
optional = false
python-versions = ">=3.6"
[[package]]
name = "charset-normalizer"
version = "2.1.1"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
category = "main"
optional = false
python-versions = ">=3.6.0"
[package.extras]
unicode_backport = ["unicodedata2"]
[[package]]
name = "idna"
version = "3.3"
description = "Internationalized Domain Names in Applications (IDNA)"
category = "main"
optional = false
python-versions = ">=3.5"
[[package]]
name = "requests"
version = "2.28.1"
description = "Python HTTP for Humans."
category = "main"
optional = false
python-versions = ">=3.7, <4"
[package.dependencies]
certifi = ">=2017.4.17"
charset-normalizer = ">=2,<3"
idna = ">=2.5,<4"
urllib3 = ">=1.21.1,<1.27"
[package.extras]
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"]
[[package]]
name = "urllib3"
version = "1.26.12"
description = "HTTP library with thread-safe connection pooling, file post, and more."
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4"
[package.extras]
brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"]
secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"]
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
[metadata]
lock-version = "1.1"
python-versions = "^3.10"
content-hash = "85c733ed2e30b2fa419bcfed233b1bbd86d4dced07ae54f97e4f90d17b1993ad"
[metadata.files]
certifi = [
{file = "certifi-2022.6.15.1-py3-none-any.whl", hash = "sha256:43dadad18a7f168740e66944e4fa82c6611848ff9056ad910f8f7a3e46ab89e0"},
{file = "certifi-2022.6.15.1.tar.gz", hash = "sha256:cffdcd380919da6137f76633531a5817e3a9f268575c128249fb637e4f9e73fb"},
]
charset-normalizer = [
{file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"},
{file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"},
]
idna = [
{file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"},
{file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"},
]
requests = [
{file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"},
{file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"},
]
urllib3 = [
{file = "urllib3-1.26.12-py2.py3-none-any.whl", hash = "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"},
{file = "urllib3-1.26.12.tar.gz", hash = "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"},
]

View File

@ -0,0 +1,15 @@
[tool.poetry]
name = "connector-bamboohr"
version = "0.1.0"
description = ""
authors = ["Jon Herron <jon.herron@yahoo.com>"]
[tool.poetry.dependencies]
python = "^3.10"
requests = "^2.28.1"
[tool.poetry.dev-dependencies]
[build-system]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"

View File

@ -0,0 +1 @@
"""__init__."""

View File

@ -0,0 +1,82 @@
"""Create."""
from io import BytesIO
from connector_aws.commands.uploadFile import UploadFileData
from jinja2 import BaseLoader
from jinja2 import Environment
from markdown2 import markdown # type: ignore
from xhtml2pdf import pisa # type: ignore
class CreatePDF:
"""CreatePDF."""
def __init__(self, template: str):
"""__init__."""
self.template = template
def execute(self, config, task_data):
"""Execute."""
buf = BytesIO()
html_string = markdown(self.template)
html_template = Environment(loader=BaseLoader, autoescape=True).from_string(
html_string
)
html_content = html_template.render(**task_data)
pisa_status = pisa.CreatePDF(html_content, dest=buf)
if pisa_status.err:
return {
"response": "ERR",
"status": "500",
"mimetype": "text",
}
return {
"response": buf.getvalue(),
"status": "200",
"mimetype": "application/pdf",
}
class CreatePDFAndUploadToS3:
"""CreatePDFAndUploadToS3."""
def __init__(self, template: str, aws_object_name: str):
"""__init__."""
self.template = template
self.aws_object_name = aws_object_name
def execute(self, config, task_data):
"""Execute."""
aws_access_key_id = config["AWS_ACCESS_KEY_ID"]
aws_secret_access_key = config["AWS_SECRET_ACCESS_KEY"]
aws_bucket = config["AWS_INVOICE_S3_BUCKET"]
pdf_result = CreatePDF(self.template).execute(config, task_data)
if pdf_result["status"] != "200":
return {
"response": '{ "error": "failed to create pdf" }',
"status": "500",
"mimetype": "application/json",
}
aws_result = UploadFileData(
aws_access_key_id,
aws_secret_access_key,
pdf_result["response"],
aws_bucket,
self.aws_object_name,
).execute(config, task_data)
if aws_result["status"] != "200":
return aws_result
return {
"response": '{ "created": "' + self.aws_object_name + '"}',
"status": "200",
"mimetype": "application/json",
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,19 @@
[tool.poetry]
name = "connector-pdf"
version = "0.1.0"
description = ""
authors = ["Jon Herron <jon.herron@yahoo.com>"]
readme = "README.md"
packages = [{include = "connector_pdf"}]
[tool.poetry.dependencies]
python = "^3.10"
xhtml2pdf = "^0.2.8"
connector-aws = {develop=true, path="../connector-aws"}
Jinja2 = "^3.1.2"
markdown2 = "^2.4.5"
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"

View File

@ -0,0 +1 @@
"""__init__."""

View File

@ -0,0 +1,129 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
.python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/

View File

@ -0,0 +1 @@
"""__init__."""

View File

@ -0,0 +1,41 @@
"""SendMessage."""
import json
from dataclasses import dataclass
import requests
from flask import current_app
@dataclass
class SendMessage:
"""SendMessage."""
message: str
message_type: str
recipient: str
def execute(self, config, task_data):
"""Execute."""
url = f'{current_app.config["WAKU_PROXY_BASE_URL"]}/sendMessage'
headers = {"Accept": "application/json", "Content-type": "application/json"}
request_body = {
"message": self.message,
"recipient": self.recipient,
"message_type": self.message_type,
}
status_code = None
try:
raw_response = requests.post(url, json.dumps(request_body), headers=headers)
status_code = raw_response.status_code
parsed_response = json.loads(raw_response.text)
response = json.dumps(parsed_response)
except Exception as ex:
response = json.dumps({"error": str(ex)})
return {
"response": response,
"node_returned_200": True,
"status": status_code,
"mimetype": "application/json",
}

View File

@ -0,0 +1,84 @@
[[package]]
name = "certifi"
version = "2022.6.15.1"
description = "Python package for providing Mozilla's CA Bundle."
category = "main"
optional = false
python-versions = ">=3.6"
[[package]]
name = "charset-normalizer"
version = "2.1.1"
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
category = "main"
optional = false
python-versions = ">=3.6.0"
[package.extras]
unicode_backport = ["unicodedata2"]
[[package]]
name = "idna"
version = "3.3"
description = "Internationalized Domain Names in Applications (IDNA)"
category = "main"
optional = false
python-versions = ">=3.5"
[[package]]
name = "requests"
version = "2.28.1"
description = "Python HTTP for Humans."
category = "main"
optional = false
python-versions = ">=3.7, <4"
[package.dependencies]
certifi = ">=2017.4.17"
charset-normalizer = ">=2,<3"
idna = ">=2.5,<4"
urllib3 = ">=1.21.1,<1.27"
[package.extras]
socks = ["PySocks (>=1.5.6,!=1.5.7)"]
use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"]
[[package]]
name = "urllib3"
version = "1.26.12"
description = "HTTP library with thread-safe connection pooling, file post, and more."
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4"
[package.extras]
brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"]
secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"]
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
[metadata]
lock-version = "1.1"
python-versions = "^3.10"
content-hash = "85c733ed2e30b2fa419bcfed233b1bbd86d4dced07ae54f97e4f90d17b1993ad"
[metadata.files]
certifi = [
{file = "certifi-2022.6.15.1-py3-none-any.whl", hash = "sha256:43dadad18a7f168740e66944e4fa82c6611848ff9056ad910f8f7a3e46ab89e0"},
{file = "certifi-2022.6.15.1.tar.gz", hash = "sha256:cffdcd380919da6137f76633531a5817e3a9f268575c128249fb637e4f9e73fb"},
]
charset-normalizer = [
{file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"},
{file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"},
]
idna = [
{file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"},
{file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"},
]
requests = [
{file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"},
{file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"},
]
urllib3 = [
{file = "urllib3-1.26.12-py2.py3-none-any.whl", hash = "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"},
{file = "urllib3-1.26.12.tar.gz", hash = "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"},
]

View File

@ -0,0 +1,15 @@
[tool.poetry]
name = "connector-waku"
version = "0.1.0"
description = ""
authors = ["Jon Herron <jon.herron@yahoo.com>"]
[tool.poetry.dependencies]
python = "^3.10"
requests = "^2.28.1"
[tool.poetry.dev-dependencies]
[build-system]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"

View File

@ -0,0 +1,129 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
.python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/

View File

@ -0,0 +1 @@
"""__init__."""

View File

@ -0,0 +1 @@
"""__init__."""

View File

@ -0,0 +1,34 @@
"""Oauth."""
class OAuth:
"""OAuth."""
def __init__(self, client_id: str, client_secret: str):
"""__init__."""
self.client_id = client_id
self.client_secret = client_secret
def app_description(self):
"""App_description."""
return {
"name": "xero",
"version": "2",
"client_id": self.client_id,
"client_secret": self.client_secret,
"endpoint_url": "https://api.xero.com/",
"authorization_url": "https://login.xero.com/identity/connect/authorize",
"access_token_url": "https://identity.xero.com/connect/token",
"refresh_token_url": "https://identity.xero.com/connect/token",
"scope": "offline_access openid profile email accounting.transactions "
"accounting.reports.read accounting.journals.read accounting.settings "
"accounting.contacts accounting.attachments assets projects",
}
@staticmethod
def filtered_params(params):
"""Filtered_params."""
return {
"client_id": params["client_id"],
"client_secret": params["client_secret"],
}

View File

@ -0,0 +1,242 @@
"""CreateInvoice."""
import json
from datetime import datetime
from datetime import timedelta
from xero_python.accounting import AccountingApi # type: ignore
from xero_python.accounting import Contact
from xero_python.accounting import Invoice
from xero_python.accounting import Invoices
from xero_python.accounting import LineItem
from xero_python.api_client import ApiClient # type: ignore
from xero_python.api_client.configuration import Configuration # type: ignore
from xero_python.api_client.oauth2 import OAuth2Token # type: ignore
from xero_python.api_client.serializer import serialize # type: ignore
from xero_python.identity import IdentityApi # type: ignore
#
# Sample response
#
# {
# "Invoices": [
# {
# "AmountDue": 21.85,
# "AmountPaid": 0.0,
# "BrandingThemeID": "324587a9-7eed-46c0-ad64-fa941a1b5b3e",
# "Contact": {
# "Addresses": [
# {
# "AddressLine1": "79 Madison Ave, Fl 2",
# "AddressLine2": "",
# "AddressLine3": "",
# "AddressLine4": "",
# "AddressType": "STREET",
# "AttentionTo": "",
# "City": "New York",
# "Country": "USA",
# "PostalCode": "10016",
# "Region": "NY"
# },
# {
# "AddressLine1": "Nairn Towers, 901",
# "AddressLine2": "120-130 Flinders Street",
# "AddressType": "POBOX",
# "AttentionTo": "",
# "City": "Oaktown",
# "Country": "",
# "PostalCode": "89012",
# "Region": "NY"
# }
# ],
# "BankAccountDetails": "",
# "ContactGroups": [
# {
# "ContactGroupID": "1b979d15-4ad9-42d7-8111-85b990477df0",
# "Contacts": [],
# "Name": "Training",
# "Status": "ACTIVE"
# }
# ],
# "ContactID": "375ac066-85a0-4044-a8be-3159856d5c85",
# "ContactPersons": [],
# "ContactStatus": "ACTIVE",
# "EmailAddress": "info@rexmedia.co",
# "FirstName": "",
# "HasAttachments": false,
# "HasValidationErrors": false,
# "IsCustomer": true,
# "IsSupplier": false,
# "LastName": "",
# "Name": "Rex Media Group",
# "Phones": [
# {
# "PhoneAreaCode": "",
# "PhoneCountryCode": "",
# "PhoneNumber": "",
# "PhoneType": "DDI"
# },
# {
# "PhoneAreaCode": "",
# "PhoneCountryCode": "",
# "PhoneNumber": "",
# "PhoneType": "FAX"
# },
# {
# "PhoneAreaCode": "",
# "PhoneCountryCode": "",
# "PhoneNumber": "",
# "PhoneType": "MOBILE"
# },
# {
# "PhoneAreaCode": "201",
# "PhoneCountryCode": "",
# "PhoneNumber": "5556789",
# "PhoneType": "DEFAULT"
# }
# ],
# "PurchasesTrackingCategories": [],
# "SalesTrackingCategories": [],
# "UpdatedDateUTC": "/Date(1663005822390+0000)/"
# },
# "CurrencyCode": "USD",
# "CurrencyRate": 1.0,
# "Date": "/Date(1602288000000)/",
# "DueDate": "/Date(1603843200000)/",
# "HasAttachments": false,
# "HasErrors": false,
# "InvoiceID": "119f7d2e-0598-4dbb-823b-6f6d89823369",
# "InvoiceNumber": "INV-0074",
# "IsDiscounted": false,
# "LineAmountTypes": "Exclusive",
# "LineItems": [
# {
# "AccountCode": "400",
# "Description": "Foobar",
# "LineAmount": 20.0,
# "LineItemID": "b3c5b459-2b91-4b00-8c94-b691f54ab464",
# "Quantity": 1.0,
# "TaxAmount": 1.85,
# "TaxType": "OUTPUT",
# "Tracking": [],
# "UnitAmount": 20.0
# }
# ],
# "Overpayments": [],
# "Prepayments": [],
# "Reference": "Website Design",
# "SentToContact": false,
# "Status": "AUTHORISED",
# "SubTotal": 20.0,
# "Total": 21.85,
# "TotalTax": 1.85,
# "Type": "ACCREC",
# "UpdatedDateUTC": "/Date(1663261898297+0000)/"
# }
# ]
# }
class CreateInvoice:
"""CreateInvoice."""
def __init__(
self,
access_token,
description: str,
contact_name: str,
contact_email: str,
amount: str,
# reference: str,
# created_date: str,
# due_date: str,
# account_code: str,
):
"""__init__."""
self.access_token = access_token
self.description = description
self.contact_name = contact_name
self.contact_email = contact_email
self.amount = amount
def execute(self, config, task_data):
"""Creates an invoice in xero."""
client_id = config["XERO_CLIENT_ID"]
client_secret = config["XERO_CLIENT_SECRET"]
access_token = json.loads(self.access_token)
api_client = ApiClient(
Configuration(
debug=True,
oauth2_token=OAuth2Token(
client_id=client_id, client_secret=client_secret
),
),
pool_threads=1,
)
@api_client.oauth2_token_getter
def obtain_xero_oauth2_token():
"""Obtain_xero_oauth2_token."""
return access_token
@api_client.oauth2_token_saver
def store_xero_oauth2_token(token):
"""Store_xero_oauth2_token."""
access_token = token # noqa
api_instance = AccountingApi(api_client)
summarize_errors = "True"
unitdp = 2
date_value = datetime.now()
due_date_value = date_value + timedelta(days=7)
contact = Contact(name=self.contact_name, email_address=self.contact_email)
line_item = LineItem(
description=self.description,
quantity=1.0,
unit_amount=self.amount,
account_code="400",
tracking=[],
)
line_items = []
line_items.append(line_item)
invoice = Invoice(
type="ACCREC",
contact=contact,
date=date_value,
due_date=due_date_value,
line_items=line_items,
reference="Created by SpiffWorkflow",
status="AUTHORISED",
)
invoices = Invoices(invoices=[invoice])
try:
xero_tenant_id = self._get_xero_tenant_id(api_client, access_token)
created_invoices = api_instance.create_invoices(
xero_tenant_id, invoices, summarize_errors, unitdp
)
response = json.dumps(serialize(created_invoices))
status = 200
except Exception as e:
# TODO better error logging/reporting in debug
response = f'{{ "error": "{e.reason}" }}'
status = 500
return {"response": response, "status": status, "mimetype": "application/json"}
def _get_xero_tenant_id(self, api_client, token):
"""_get_xero_tenant_id."""
if not token:
return None
identity_api = IdentityApi(api_client)
for connection in identity_api.get_connections():
if connection.tenant_type == "ORGANISATION":
return connection.tenant_id

View File

@ -0,0 +1,78 @@
[[package]]
name = "certifi"
version = "2022.6.15"
description = "Python package for providing Mozilla's CA Bundle."
category = "main"
optional = false
python-versions = ">=3.6"
[[package]]
name = "python-dateutil"
version = "2.8.2"
description = "Extensions to the standard Python datetime module"
category = "main"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
[package.dependencies]
six = ">=1.5"
[[package]]
name = "six"
version = "1.16.0"
description = "Python 2 and 3 compatibility utilities"
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*"
[[package]]
name = "urllib3"
version = "1.26.12"
description = "HTTP library with thread-safe connection pooling, file post, and more."
category = "main"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4"
[package.extras]
brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"]
secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"]
socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"]
[[package]]
name = "xero-python"
version = "1.18.0"
description = "Official Python sdk for Xero API generated by OpenAPI spec for oAuth2"
category = "main"
optional = false
python-versions = ">=3.5"
[package.dependencies]
certifi = "*"
python-dateutil = ">=2.7"
urllib3 = "*"
[metadata]
lock-version = "1.1"
python-versions = "^3.10"
content-hash = "5edaacf9e6d6917f1e4cdc601bb373dd708bb2d9b1b63790c891e208b4682a0a"
[metadata.files]
certifi = [
{file = "certifi-2022.6.15-py3-none-any.whl", hash = "sha256:fe86415d55e84719d75f8b69414f6438ac3547d2078ab91b67e779ef69378412"},
{file = "certifi-2022.6.15.tar.gz", hash = "sha256:84c85a9078b11105f04f3036a9482ae10e4621616db313fe045dd24743a0820d"},
]
python-dateutil = [
{file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
{file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
]
six = [
{file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"},
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
]
urllib3 = [
{file = "urllib3-1.26.12-py2.py3-none-any.whl", hash = "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"},
{file = "urllib3-1.26.12.tar.gz", hash = "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"},
]
xero-python = [
{file = "xero_python-1.18.0.tar.gz", hash = "sha256:99960f6026b4cd3cc182579f34fba6a1b3b8560c4e72f32a3d950ea0b6050f9d"},
]

View File

@ -0,0 +1,15 @@
[tool.poetry]
name = "connector-xero"
version = "0.1.0"
description = ""
authors = ["Jon Herron <jon.herron@yahoo.com>"]
[tool.poetry.dependencies]
python = "^3.10"
xero-python = "^1.18.0"
[tool.poetry.dev-dependencies]
[build-system]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"

View File

@ -0,0 +1,20 @@
version: "3.8"
services:
connector-proxy-status-im: &connector-proxy-status-im
container_name: connector-proxy-status-im
profiles:
- run
build:
context: .
environment:
- FLASK_ENV=${FLASK_ENV:-development}
- FLASK_DEBUG=0
- FLASK_SESSION_SECRET_KEY=${FLASK_SESSION_SECRET_KEY:-super_secret_key}
ports:
- "7004:7004"
network_mode: host
healthcheck:
test: curl localhost:7004/liveness --fail
interval: 10s
timeout: 5s
retries: 20

View File

@ -0,0 +1,220 @@
"""Nox sessions."""
import os
import shutil
import sys
from pathlib import Path
from textwrap import dedent
import nox
try:
from nox_poetry import Session
from nox_poetry import session
except ImportError:
message = f"""\
Nox failed to import the 'nox-poetry' package.
Please install it using the following command:
{sys.executable} -m pip install nox-poetry"""
raise SystemExit(dedent(message)) from None
package = "connector_proxy_status_im"
python_versions = ["3.10", "3.9"]
nox.needs_version = ">= 2021.6.6"
nox.options.sessions = (
"pre-commit",
"safety",
"mypy",
"tests",
"typeguard",
"xdoctest",
"docs-build",
)
def setup_database(session: Session) -> None:
"""Run database migrations against the database."""
session.env["FLASK_INSTANCE_PATH"] = os.path.join(
os.getcwd(), "instance", "testing"
)
flask_env_key = "FLASK_SESSION_SECRET_KEY"
session.env[flask_env_key] = "super_secret_key"
session.env["FLASK_APP"] = "src/connector_proxy_status_im"
session.env["FLASK_ENV"] = "testing"
session.run("flask", "db", "upgrade")
def activate_virtualenv_in_precommit_hooks(session: Session) -> None:
"""Activate virtualenv in hooks installed by pre-commit.
This function patches git hooks installed by pre-commit to activate the
session's virtual environment. This allows pre-commit to locate hooks in
that environment when invoked from git.
Args:
session: The Session object.
"""
assert session.bin is not None # noqa: S101
virtualenv = session.env.get("VIRTUAL_ENV")
if virtualenv is None:
return
hookdir = Path(".git") / "hooks"
if not hookdir.is_dir():
return
for hook in hookdir.iterdir():
if hook.name.endswith(".sample") or not hook.is_file():
continue
text = hook.read_text()
bindir = repr(session.bin)[1:-1] # strip quotes
if not (
Path("A") == Path("a") and bindir.lower() in text.lower() or bindir in text
):
continue
lines = text.splitlines()
if not (lines[0].startswith("#!") and "python" in lines[0].lower()):
continue
header = dedent(
f"""\
import os
os.environ["VIRTUAL_ENV"] = {virtualenv!r}
os.environ["PATH"] = os.pathsep.join((
{session.bin!r},
os.environ.get("PATH", ""),
))
"""
)
lines.insert(1, header)
hook.write_text("\n".join(lines))
@session(name="pre-commit", python="3.10")
def precommit(session: Session) -> None:
"""Lint using pre-commit."""
args = session.posargs or ["run", "--all-files", "--show-diff-on-failure"]
session.install(
"black",
"darglint",
"flake8",
"flake8-bandit",
"flake8-bugbear",
"flake8-docstrings",
"flake8-rst-docstrings",
"pep8-naming",
"pre-commit",
"pre-commit-hooks",
"pyupgrade",
"reorder-python-imports",
)
session.run("pre-commit", *args)
if args and args[0] == "install":
activate_virtualenv_in_precommit_hooks(session)
@session(python="3.10")
def safety(session: Session) -> None:
"""Scan dependencies for insecure packages."""
requirements = session.poetry.export_requirements()
session.install("safety")
session.run("safety", "check", "--full-report", f"--file={requirements}")
@session(python=python_versions)
def mypy(session: Session) -> None:
"""Type-check using mypy."""
args = session.posargs or ["src", "tests", "docs/conf.py"]
session.install(".")
session.install("mypy", "pytest", "sqlalchemy-stubs")
session.run("mypy", *args)
if not session.posargs:
session.run("mypy", f"--python-executable={sys.executable}", "noxfile.py")
@session(python=python_versions)
def tests(session: Session) -> None:
"""Run the test suite."""
session.install(".")
session.install("coverage[toml]", "pytest", "pygments")
try:
setup_database(session)
session.run("coverage", "run", "--parallel", "-m", "pytest", *session.posargs)
finally:
if session.interactive:
session.notify("coverage", posargs=[])
@session
def coverage(session: Session) -> None:
"""Produce the coverage report."""
args = session.posargs or ["report"]
session.install("coverage[toml]")
if not session.posargs and any(Path().glob(".coverage.*")):
session.run("coverage", "combine")
session.run("coverage", *args)
@session(python=python_versions)
def typeguard(session: Session) -> None:
"""Runtime type checking using Typeguard."""
session.install(".")
session.install("pytest", "typeguard", "pygments")
setup_database(session)
session.env["RUN_TYPEGUARD"] = "true"
session.run("pytest", *session.posargs)
@session(python=python_versions)
def xdoctest(session: Session) -> None:
"""Run examples with xdoctest."""
if session.posargs:
args = [package, *session.posargs]
else:
args = [f"--modname={package}", "--command=all"]
if "FORCE_COLOR" in os.environ:
args.append("--colored=1")
session.install(".")
session.install("xdoctest[colors]")
session.run("python", "-m", "xdoctest", *args)
@session(name="docs-build", python="3.10")
def docs_build(session: Session) -> None:
"""Build the documentation."""
args = session.posargs or ["docs", "docs/_build"]
if not session.posargs and "FORCE_COLOR" in os.environ:
args.insert(0, "--color")
session.install(".")
session.install("sphinx", "sphinx-click", "furo")
build_dir = Path("docs", "_build")
if build_dir.exists():
shutil.rmtree(build_dir)
session.run("sphinx-build", *args)
@session(python="3.10")
def docs(session: Session) -> None:
"""Build and serve the documentation with live reloading on file changes."""
args = session.posargs or ["--open-browser", "docs", "docs/_build"]
session.install(".")
session.install("sphinx", "sphinx-autobuild", "sphinx-click", "furo")
build_dir = Path("docs", "_build")
if build_dir.exists():
shutil.rmtree(build_dir)
session.run("sphinx-autobuild", *args)

2421
connector-proxy-status-im/poetry.lock generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,42 @@
[tool.poetry]
name = "connector-proxy-status-im"
version = "0.1.0"
description = ""
authors = ["Jon Herron <jon.herron@yahoo.com>"]
[tool.poetry.dependencies]
python = "^3.10"
Flask = "^2.2.2"
connector-xero = {develop=true, path="connectors/connector-xero"}
connector-bamboohr = {develop=true, path="connectors/connector-bamboohr"}
connector-waku = {develop=true, path="connectors/connector-waku"}
connector-aws = {develop=true, path="connectors/connector-aws"}
connector-pdf = {develop=true, path="connectors/connector-pdf"}
gunicorn = "^20.1.0"
Flask-OAuthlib = "^0.9.6"
Flask-Session = "^0.4.0"
types-requests = "^2.28.11.2"
[tool.poetry.dev-dependencies]
[tool.poetry.group.dev.dependencies]
pytest = "^7.1.3"
coverage = "^6.5.0"
safety = "^2.3.1"
mypy = "^0.982"
typeguard = "^2.13.3"
xdoctest = "^1.1.0"
Sphinx = "^5.2.3"
sphinx-autobuild = "^2021.3.14"
pre-commit = "^2.20.0"
flake8 = "^5.0.4"
black = "^22.10.0"
flake8-bandit = "^4.1.1"
pyupgrade = "^3.0.0"
pre-commit-hooks = "^4.3.0"
[build-system]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"