From 98740750c875738797041ba36b36aa5c1c03efbb Mon Sep 17 00:00:00 2001 From: Niels Perfors Date: Fri, 5 Apr 2024 10:26:00 +0200 Subject: [PATCH] v2 init --- .devcontainer/Dockerfile | 14 + .devcontainer/devcontainer.json | 47 + .devcontainer/requirements-dev.txt | 23 + .gitattributes | 2 + .github/labeler.yml | 18 - .github/scripts/release.py | 48 - .github/workflows/lint.yml | 23 - .github/workflows/pr-labeler.yml | 13 - .github/workflows/publish.yml | 78 +- .github/workflows/release.yml | 12 - .gitignore | 4 +- .pre-commit-config.yaml | 54 +- .pypirc | 10 + .vscode/extensions.json | 7 + .vscode/settings.json | 43 +- .vscode/tasks.json | 41 + LICENSE | 2 +- README.md | 29 + docs/ENDPOINTS.md | 972 ------------- docs/METHODS.md | 329 ----- docs/README.md | 93 -- pyproject.toml | 197 ++- requirements.txt | 4 + sapcommissions/__init__.py | 69 - sapcommissions/endpoints.py | 2005 ------------------------- sapcommissions/exceptions.py | 18 - sapcommissions/resources.py | 2178 ---------------------------- setup.py | 4 - src/sapcommissions/__init__.py | 7 + src/sapcommissions/__main__.py | 139 ++ src/sapcommissions/client.py | 390 +++++ src/sapcommissions/const.py | 167 +++ src/sapcommissions/deploy.py | 190 +++ src/sapcommissions/exceptions.py | 84 ++ src/sapcommissions/helpers.py | 106 ++ src/sapcommissions/model.py | 763 ++++++++++ tests/__init__.py | 1 + tests/conftest.py | 48 + tests/test_base.py | 73 - tests/test_client.py | 59 + tests/test_dev.py | 101 ++ tests/test_endpoints.py | 328 ----- tests/test_model.py | 57 + tests/test_pipeline.py | 173 +++ tests/test_resources.py | 182 --- tox.ini | 8 - 46 files changed, 2723 insertions(+), 6490 deletions(-) create mode 100644 .devcontainer/Dockerfile create mode 100644 .devcontainer/devcontainer.json create mode 100644 .devcontainer/requirements-dev.txt create mode 100644 .gitattributes delete mode 100644 .github/labeler.yml delete mode 100644 .github/scripts/release.py delete mode 100644 .github/workflows/lint.yml delete mode 100644 .github/workflows/pr-labeler.yml delete mode 100644 .github/workflows/release.yml create mode 100644 .pypirc create mode 100644 .vscode/extensions.json create mode 100644 .vscode/tasks.json create mode 100644 README.md delete mode 100644 docs/ENDPOINTS.md delete mode 100644 docs/METHODS.md delete mode 100644 docs/README.md create mode 100644 requirements.txt delete mode 100644 sapcommissions/__init__.py delete mode 100644 sapcommissions/endpoints.py delete mode 100644 sapcommissions/exceptions.py delete mode 100644 sapcommissions/resources.py delete mode 100644 setup.py create mode 100644 src/sapcommissions/__init__.py create mode 100644 src/sapcommissions/__main__.py create mode 100644 src/sapcommissions/client.py create mode 100644 src/sapcommissions/const.py create mode 100644 src/sapcommissions/deploy.py create mode 100644 src/sapcommissions/exceptions.py create mode 100644 src/sapcommissions/helpers.py create mode 100644 src/sapcommissions/model.py create mode 100644 tests/__init__.py create mode 100644 tests/conftest.py delete mode 100644 tests/test_base.py create mode 100644 tests/test_client.py create mode 100644 tests/test_dev.py delete mode 100644 tests/test_endpoints.py create mode 100644 tests/test_model.py create mode 100644 tests/test_pipeline.py delete mode 100644 tests/test_resources.py delete mode 100644 tox.ini diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile new file mode 100644 index 0000000..0f8d288 --- /dev/null +++ b/.devcontainer/Dockerfile @@ -0,0 +1,14 @@ +FROM mcr.microsoft.com/vscode/devcontainers/python:0-3.11 + +# Uninstall pre-installed formatting and linting tools +# They would conflict with our pinned versions +RUN pipx uninstall pydocstyle \ + && pipx uninstall pycodestyle \ + && pipx uninstall mypy \ + && pipx uninstall pylint + +# Install Python dependencies from requirements +WORKDIR /usr/src/app +COPY . . +RUN python -m pip install --upgrade pip setuptools \ + && python -m pip install -r requirements.txt -r .devcontainer/requirements-dev.txt diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json new file mode 100644 index 0000000..8511dea --- /dev/null +++ b/.devcontainer/devcontainer.json @@ -0,0 +1,47 @@ +{ + "name": "Python Package Devcontainer", + "build": { + "context": "..", + "dockerfile": "./Dockerfile" + }, + "customizations": { + "vscode": { + "settings": { + "python.defaultInterpreterPath": "/usr/local/bin/python", + "python.testing.pytestEnabled": false, + "python.testing.unittestEnabled": false, + "python.testing.pytestArgs": [ + "--no-cov" + ], + "editor.formatOnPaste": false, + "editor.formatOnSave": true, + "editor.formatOnType": true, + "files.trimTrailingWhitespace": true, + "terminal.integrated.profiles.linux": { + "zsh": { + "path": "/usr/bin/zsh" + } + }, + "terminal.integrated.defaultProfile.linux": "zsh", + "[python]": { + "editor.defaultFormatter": "charliermarsh.ruff" + } + }, + "extensions": [ + "ms-python.python", + "ms-python.vscode-pylance", + "visualstudioexptteam.vscodeintellicode", + "charliermarsh.ruff", + "ms-python.pylint", + "redhat.vscode-yaml", + "esbenp.prettier-vscode", + "GitHub.vscode-pull-request-github", + "eamodio.gitlens", + "tamasfe.even-better-toml", + "github.vscode-github-actions" + ] + } + }, + // "postCreateCommand": "pre-commit install --install-hooks", + "postStartCommand": "python -m pip install -e ." +} \ No newline at end of file diff --git a/.devcontainer/requirements-dev.txt b/.devcontainer/requirements-dev.txt new file mode 100644 index 0000000..fd8342a --- /dev/null +++ b/.devcontainer/requirements-dev.txt @@ -0,0 +1,23 @@ +# Use this file to list development requirements for your project. + +# build +setuptools>=61.0 +setuptools_scm>=6.2 + +# pre-commit +pre-commit>=3.6.0 +ruff>=0.2.0 +codespell>=2.2.6 +yamllint>=1.33.0 + +# dev +pylint>=3.0.3 +mypy>=1.8.0 + +# test +coverage>=7.4.1 +pytest>=7.4.4 +# pytest-socket>=0.6.0 +pytest-cov>=4.1.0 +pytest-asyncio>=0.23.4 +# aioresponses>=0.7.6 diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..e911947 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,2 @@ +* text eol=lf +*.py whitespace=error \ No newline at end of file diff --git a/.github/labeler.yml b/.github/labeler.yml deleted file mode 100644 index f01cb31..0000000 --- a/.github/labeler.yml +++ /dev/null @@ -1,18 +0,0 @@ -ci/cd: - - "*" - - ".github/**" - - ".vscode/**" - -documentation: - - "README.md" - - "./*.md" - - "docs/**" - -endpoints: - - "**/*endpoints**" - -resources: - - "**/*resources**" - -tests: - - "tests/**" diff --git a/.github/scripts/release.py b/.github/scripts/release.py deleted file mode 100644 index b421abe..0000000 --- a/.github/scripts/release.py +++ /dev/null @@ -1,48 +0,0 @@ -#!/usr/bin/env python3 -import json -import subprocess - - -def get_last_version() -> str: - """Return the version number of the last release.""" - json_string = ( - subprocess.run( - ["gh", "release", "view", "--json", "tagName"], - check=True, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - ) - .stdout.decode("utf8") - .strip() - ) - - return json.loads(json_string)["tagName"] - - -def bump_patch_number(version_number: str) -> str: - """Return a copy of `version_number` with the patch number incremented.""" - major, minor, patch = version_number.split(".") - return f"{major}.{minor}.{int(patch) + 1}" - - -def create_new_patch_release(): - """Create a new patch release on GitHub.""" - try: - last_version_number = get_last_version() - except subprocess.CalledProcessError as err: - if err.stderr.decode("utf8").startswith("HTTP 404:"): - # The project doesn't have any releases yet. - new_version_number = "0.0.1" - else: - raise - else: - new_version_number = bump_patch_number(last_version_number) - - subprocess.run( - ["gh", "release", "create", "--generate-notes", new_version_number], - check=True, - ) - - -if __name__ == "__main__": - create_new_patch_release() diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml deleted file mode 100644 index 1ef0ce2..0000000 --- a/.github/workflows/lint.yml +++ /dev/null @@ -1,23 +0,0 @@ -name: Code Quality and Lint -on: [pull_request] - -jobs: - lint: - name: Python Linters - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v3 - - - name: Run Linters - uses: ricardochaves/python-lint@v1.4.0 - with: - python-root-list: "sapcommissions tests" - use-mypy: false - use-pycodestyle: true - use-pylint: false - extra-pylint-options: "--rcfile=pyproject.toml" - use-flake8: true - extra-flake8-options: "--config=tox.ini" - use-black: true - use-isort: true diff --git a/.github/workflows/pr-labeler.yml b/.github/workflows/pr-labeler.yml deleted file mode 100644 index f857cac..0000000 --- a/.github/workflows/pr-labeler.yml +++ /dev/null @@ -1,13 +0,0 @@ -name: labeler - -on: [pull_request] - -jobs: - labeler: - name: Label the PR changes - permissions: - contents: read - pull-requests: write - runs-on: ubuntu-latest - steps: - - uses: actions/labeler@v4 diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index e69464a..4380338 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -1,15 +1,25 @@ -name: Publish to PyPI.org +--- +# This workflow will upload a Python Package using Twine +# when a release is created. For more information see: +# https://packaging.python.org/en/latest/guides/publishing-package-distribution-releases-using-github-actions-ci-cd-workflows/ + +name: Publish Python 🐍 distribution 📦 to PyPI and TestPyPI + on: release: types: [published] + jobs: - pypi: + build: + name: Build distribution 📦 runs-on: ubuntu-latest + if: startsWith(github.ref, 'refs/tags/') # only publish on tag pushes steps: - - name: Checkout - uses: actions/checkout@v3 + - uses: actions/checkout@v4 + - name: Set up Python + uses: actions/setup-python@v5 with: - fetch-depth: 0 + python-version: "3.11" - name: Install pypa/build run: >- python3 -m @@ -17,19 +27,55 @@ jobs: build --user - name: Build a binary wheel and a source tarball - run: >- - python3 -m - build - --sdist - --wheel - --outdir dist/ - - name: Publish distribution 📦 to Test PyPI + run: python3 -m build + - name: Store the distribution packages + uses: actions/upload-artifact@v4 + with: + name: python-package-distributions + path: dist/ + + publish-to-testpypi: + name: Publish Python 🐍 distribution 📦 to TestPyPI + needs: + - build + runs-on: ubuntu-latest + + environment: + name: testpypi + url: https://test.pypi.org/p/python-sapcommissions + + permissions: + id-token: write # IMPORTANT: mandatory for trusted publishing + + steps: + - name: Download all the dists + uses: actions/download-artifact@v4 + with: + name: python-package-distributions + path: dist/ + - name: Publish distribution 📦 to TestPyPI uses: pypa/gh-action-pypi-publish@release/v1 with: - password: ${{ secrets.TEST_PYPI_API_TOKEN }} repository-url: https://test.pypi.org/legacy/ + + publish-to-pypi: + name: >- + Publish Python 🐍 distribution 📦 to PyPI + needs: + - publish-to-testpypi + - build + runs-on: ubuntu-latest + environment: + name: pypi + url: https://pypi.org/p/python-sapcommissions + permissions: + id-token: write # IMPORTANT: mandatory for trusted publishing + + steps: + - name: Download all the dists + uses: actions/download-artifact@v4 + with: + name: python-package-distributions + path: dist/ - name: Publish distribution 📦 to PyPI - if: startsWith(github.ref, 'refs/tags') uses: pypa/gh-action-pypi-publish@release/v1 - with: - password: ${{ secrets.PYPI_API_TOKEN }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml deleted file mode 100644 index 6223229..0000000 --- a/.github/workflows/release.yml +++ /dev/null @@ -1,12 +0,0 @@ -name: Create a new patch release -on: workflow_dispatch -jobs: - github: - runs-on: ubuntu-latest - steps: - - name: Checkout - uses: actions/checkout@v3 - - name: Create new patch release - run: .github/scripts/release.py - env: - GITHUB_TOKEN: ${{ secrets.PERSONAL_ACCESS_TOKEN }} diff --git a/.gitignore b/.gitignore index 15747ec..505eb38 100644 --- a/.gitignore +++ b/.gitignore @@ -3,4 +3,6 @@ __pycache__/ .venv/ dist/ build/ -main.py +.env +*.log +tests/deploy/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 5546fe1..ed35fa7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,8 +1,26 @@ # See https://pre-commit.com for more information # See https://pre-commit.com/hooks.html for more hooks default_language_version: - python: python3.10 + python: python3.11 repos: + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.1.15 + hooks: + - id: ruff + args: + - --fix + - id: ruff-format + files: ^((src|tests)/.+)?[^/]+\.py$ + - repo: https://github.com/codespell-project/codespell + rev: v2.2.2 + hooks: + - id: codespell + args: + - --ignore-words-list=perfors + - --skip="./.*,*.csv,*.json,*.txt,*.xml" + - --quiet-level=2 + exclude_types: [csv, json, txt, xml] + exclude: ^tests/fixtures/.*$ - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.4.0 hooks: @@ -17,30 +35,16 @@ repos: args: - --pytest-test-first - id: no-commit-to-branch - - id: trailing-whitespace - - - repo: https://github.com/PyCQA/flake8 - rev: 6.0.0 - hooks: - - id: flake8 args: - - --config=tox.ini - - - repo: https://github.com/psf/black - rev: 23.3.0 - hooks: - - id: black - - - repo: https://github.com/PyCQA/isort - rev: 5.12.0 + - --branch=main + - id: trailing-whitespace + - id: check-json + exclude: (.vscode|.devcontainer) + - repo: https://github.com/adrienverge/yamllint.git + rev: v1.32.0 hooks: - - id: isort - - - repo: local + - id: yamllint + - repo: https://github.com/pre-commit/mirrors-prettier + rev: v3.0.3 hooks: - - id: pylint - name: pylint - entry: .venv/Scripts/pylint.exe - language: system - types: [python] - require_serial: true + - id: prettier diff --git a/.pypirc b/.pypirc new file mode 100644 index 0000000..929e4f4 --- /dev/null +++ b/.pypirc @@ -0,0 +1,10 @@ +[distutils] +index-servers = + pypi + testpypi + +[pypi] +repository = https://upload.pypi.org/legacy/ + +[testpypi] +repository = https://test.pypi.org/legacy/ \ No newline at end of file diff --git a/.vscode/extensions.json b/.vscode/extensions.json new file mode 100644 index 0000000..b472479 --- /dev/null +++ b/.vscode/extensions.json @@ -0,0 +1,7 @@ +{ + "recommendations": [ + "sanaajani.taskrunnercode", + "GitHub.copilot", + "GitHub.copilot-chat" + ] +} \ No newline at end of file diff --git a/.vscode/settings.json b/.vscode/settings.json index 7212cc2..e40989d 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,20 +1,25 @@ { - "files.exclude": { - "**/__pycache__": true, - "**/*.egg-info": true - }, - "python.formatting.provider": "none", - "python.linting.enabled": true, - "python.linting.flake8Enabled": true, - "python.linting.pylintEnabled": true, - "python.testing.pytestEnabled": false, - "python.testing.unittestEnabled": true, - "python.testing.unittestArgs": [ - "-v", - "-s", - "./tests", - "-p", - "test_*.py" - ], - "editor.formatOnSave": true -} + "files.exclude": { + "**/__pycache__": true, + "**/*.egg-info": true + }, + "python.defaultInterpreterPath": "/usr/local/bin/python", + "python.testing.pytestEnabled": true, + "python.testing.unittestEnabled": false, + "python.testing.pytestArgs": [ + "tests" + ], + "editor.formatOnPaste": false, + "editor.formatOnSave": true, + "editor.formatOnType": true, + "files.trimTrailingWhitespace": true, + "terminal.integrated.profiles.linux": { + "zsh": { + "path": "/usr/bin/zsh" + } + }, + "terminal.integrated.defaultProfile.linux": "zsh", + "[python]": { + "editor.defaultFormatter": "charliermarsh.ruff" + } +} \ No newline at end of file diff --git a/.vscode/tasks.json b/.vscode/tasks.json new file mode 100644 index 0000000..316b711 --- /dev/null +++ b/.vscode/tasks.json @@ -0,0 +1,41 @@ +{ + "version": "2.0.0", + "tasks": [ + { + "label": "install", + "type": "shell", + "command": "python -m pip install -e .", + "group": { + "kind": "build", + "isDefault": true + }, + "presentation": { + "reveal": "always", + "panel": "shared" + }, + "problemMatcher": [] + }, + { + "label": "pytest (with coverage)", + "type": "shell", + "command": "python -m pytest tests/ --cov=src/ --cov-report term-missing -vv", + "group": "test", + "presentation": { + "reveal": "always", + "panel": "shared" + }, + "problemMatcher": [] + }, + { + "label": "pytest", + "type": "shell", + "command": "python -m pytest tests/ -vv", + "group": "test", + "presentation": { + "reveal": "always", + "panel": "shared" + }, + "problemMatcher": [] + } + ] +} diff --git a/LICENSE b/LICENSE index 5f801d5..9c3ab72 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ MIT License -Copyright (c) 2023 Niels Perfors +Copyright (c) 2024 Niels Perfors. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/README.md b/README.md new file mode 100644 index 0000000..d6b908b --- /dev/null +++ b/README.md @@ -0,0 +1,29 @@ +# Python SAP Commissions® + +An Asynchronous Python client to communicate with SAP Commissions®. + +If you like this project, please consider to [BuyMeACoffee](https://www.buymeacoffee.com/niro1987) or +[contact me](mailto:niels.perfors1987@gmail.com) directly. + +[!["Buy Me A Coffee"](https://www.buymeacoffee.com/assets/img/custom_images/orange_img.png)](https://www.buymeacoffee.com/niro1987) + +## Installation + +To install the project, run the following command: + +```text +pip install python-sapcommissions +``` + +### REST API + +This project mimics the usage of the SAP Commissions REST API. Visit +`https://{TENANT}.callidusondemand.com/APIDocument` to read the full specification, replacing `TENANT` with your +tenant-id. + +## Legal Disclamer + +This software is designed for use with SAP Commissions®. It is not affiliated with SAP® and the developers +take no legal responsibility for the functionality or security of your Commissions environment. + +SAP Commissions is a registered trademark of SAP SE or its affiliates in Germany and in other countries. diff --git a/docs/ENDPOINTS.md b/docs/ENDPOINTS.md deleted file mode 100644 index c844a5b..0000000 --- a/docs/ENDPOINTS.md +++ /dev/null @@ -1,972 +0,0 @@ -# Endpoints - -- [Endpoints](#endpoints) - - [List of Endpoints](#list-of-endpoints) - - [Run Pipelines](#run-pipelines) - - [Generate Reports](#generate-reports) - - [Classify](#classify) - - [Allocate](#allocate) - - [Reward](#reward) - - [Pay](#pay) - - [Summarize](#summarize) - - [Compensate](#compensate) - - [Compensate and Pay](#compensate-and-pay) - - [Post](#post) - - [Undo Post](#undo-post) - - [Finalize](#finalize) - - [Undo Finalize](#undo-finalize) - - [Reset From Classify](#reset-from-classify) - - [Reset From Allocate](#reset-from-allocate) - - [Reset From Reward](#reset-from-reward) - - [Reset From Pay](#reset-from-pay) - - [Cleanup Deffered Results](#cleanup-deffered-results) - - [Approve Calculated Data](#approve-calculated-data) - - [Purge Approved Data](#purge-approved-data) - - [Update Analytics](#update-analytics) - - [Validate](#validate) - - [Transfer](#transfer) - - [Transfer If All Valid](#transfer-if-all-valid) - - [Validate and Transfer](#validate-and-transfer) - - [Validate and Transfer If All Valid](#validate-and-transfer-if-all-valid) - - [Reset From Validate](#reset-from-validate) - - [Purge](#purge) - - [XML Import](#xml-import) - -## List of Endpoints - -| Endpoint | Methods | -| ---------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | -| AppliedDeposits | [`Get`][get], [`List`][list] | -| AuditLogs | [`Get`][get], [`List`][list] | -| Balances | [`Get`][get], [`List`][list] | -| BusinessUnits | [`Create`][create], [`Get`][get], [`List`][list], [`Update`][update] | -| Calendars | [`Create`][create], [`Get`][get], [`List`][list], [`Update`][update] | -| Categories | [`Create`][create], [`Create Versions`][create-versions], [`Get`][get], [`Get Versions`][get-versions], [`List`][list], [`Update`][update], [`Update Versions`][update-versions], [`Delete`][delete], [`Delete Versions`][delete-versions] | -| CategoryClassifiers | [`Create`][create], [`Get`][get], [`List`][list], [`Update`][update] | -| CategoryTrees | [`Create`][create], [`Create Versions`][create-versions], [`Get`][get], [`Get Versions`][get-versions], [`List`][list], [`Update`][update], [`Update Versions`][update-versions], [`Delete`][delete], [`Delete Versions`][delete-versions] | -| Commissions | [`Get`][get], [`List`][list] | -| Credits | [`Create`][create], [`Get`][get], [`List`][list], [`Update`][update] | -| CreditTypes | [`Create`][create], [`Get`][get], [`List`][list], [`Update`][update], [`Delete`][delete] | -| Customers | [`Create`][create], [`Create Versions`][create-versions], [`Get`][get], [`Get Versions`][get-versions], [`List`][list], [`Update`][update], [`Update Versions`][update-versions], [`Delete`][delete], [`Delete Versions`][delete-versions] | -| Deposits | [`Create`][create], [`Get`][get], [`List`][list], [`Update`][update], [`Delete`][delete] | -| EarningCodes | [`Create`][create], [`Get`][get], [`List`][list], [`Update`][update], [`Delete`][delete] | -| EarningGroupCodes | [`Create`][create], [`Get`][get], [`List`][list], [`Update`][update], [`Delete`][delete] | -| EarningGroups | [`Create`][create], [`Get`][get], [`List`][list], [`Update`][update], [`Delete`][delete] | -| EventTypes | [`Create`][create], [`Get`][get], [`List`][list], [`Update`][update], [`Delete`][delete] | -| FixedValues | [`Create`][create], [`Create Versions`][create-versions], [`Get`][get], [`Get Versions`][get-versions], [`List`][list], [`Update`][update], [`Update Versions`][update-versions], [`Delete`][delete], [`Delete Versions`][delete-versions] | -| FixValueTypes | [`Create`][create], [`Get`][get], [`List`][list], [`Update`][update], [`Delete`][delete] | -| FixedValueVariables | [`Create`][create], [`Create Versions`][create-versions], [`Get`][get], [`Get Versions`][get-versions], [`List`][list], [`Update`][update], [`Update Versions`][update-versions], [`Delete`][delete], [`Delete Versions`][delete-versions] | -| Formulas | [`Get`][get], [`List`][list] | -| GenericClassifiers | [`Create`][create], [`Create Versions`][create-versions], [`Get`][get], [`Get Versions`][get-versions], [`List`][list], [`Update`][update], [`Update Versions`][update-versions], [`Delete`][delete], [`Delete Versions`][delete-versions] | -| GenericClassifierTypes | [`Get`][get], [`List`][list] | -| GlobalFieldNames | [`Create`][create], [`Get`][get], [`List`][list], [`Update`][update], [`Delete`][delete] | -| Groups | [`Create`][create], [`Get`][get], [`List`][list], [`Update`][update], [`Delete`][delete] | -| Incentives | [`Get`][get], [`List`][list] | -| LookUpTables | [`Get`][get], [`List`][list] | -| LookUpTableVariables | [`Create`][create], [`Create Versions`][create-versions], [`Get`][get], [`Get Versions`][get-versions], [`List`][list], [`Update`][update], [`Update Versions`][update-versions], [`Delete`][delete], [`Delete Versions`][delete-versions] | -| Measurements | [`Get`][get], [`List`][list] | -| MessageLogs | [`Get`][get], [`List`][list] | -| Messages | [`Get`][get], [`List`][list] | -| Participants | [`Create`][create], [`Create Versions`][create-versions], [`Get`][get], [`Get Versions`][get-versions], [`List`][list], [`Update`][update], [`Update Versions`][update-versions], [`Delete`][delete], [`Delete Versions`][delete-versions] | -| PaymentMappings | [`Create`][create], [`Get`][get], [`List`][list], [`Update`][update], [`Delete`][delete] | -| Payments | [`Get`][get], [`List`][list] | -| PatmentSummarys | [`Get`][get], [`List`][list] | -| Periods | [`Create`][create], [`Get`][get], [`List`][list], [`Update`][update], [`Delete`][delete] | -| Pipelines | [`Get`][get], [`List`][list], [Run Pipelines](#run-pipelines) (see below) | -| Plans | [`Get`][get], [`List`][list] | -| PositionGroups | [`Create`][create], [`Get`][get], [`List`][list], [`Update`][update], [`Delete`][delete] | -| PositionRelations | [`Create`][create], [`Create Versions`][create-versions], [`Get`][get], [`Get Versions`][get-versions], [`List`][list], [`Update`][update], [`Update Versions`][update-versions], [`Delete`][delete], [`Delete Versions`][delete-versions] | -| PositionRelationTypes | [`Create`][create], [`Get`][get], [`List`][list], [`Update`][update], [`Delete`][delete] | -| Positions | [`Create`][create], [`Create Versions`][create-versions], [`Get`][get], [`Get Versions`][get-versions], [`List`][list], [`Update`][update], [`Update Versions`][update-versions], [`Delete`][delete], [`Delete Versions`][delete-versions] | -| PrimaryMeasurements | [`Get`][get], [`List`][list] | -| ProcessingUnits | [`Create`][create], [`Get`][get], [`List`][list], [`Update`][update] | -| Products | [`Create`][create], [`Create Versions`][create-versions], [`Get`][get], [`Get Versions`][get-versions], [`List`][list], [`Update`][update], [`Update Versions`][update-versions], [`Delete`][delete], [`Delete Versions`][delete-versions] | -| Quotas | [`Create`][create], [`Get`][get], [`List`][list], [`Update`][update], [`Delete`][delete] | -| RateTables | [`Get`][get], [`List`][list] | -| RateTableVariables | [`Create`][create], [`Create Versions`][create-versions], [`Get`][get], [`Get Versions`][get-versions], [`List`][list], [`Update`][update], [`Update Versions`][update-versions], [`Delete`][delete], [`Delete Versions`][delete-versions] | -| Reasons | [`Create`][create], [`Get`][get], [`List`][list], [`Update`][update], [`Delete`][delete] | -| SalesOrders | [`Create`][create], [`Get`][get], [`List`][list], [`Update`][update], [`Delete`][delete] | -| SalesTransactions | [`Create`][create], [`Get`][get], [`List`][list], [`Update`][update], [`Delete`][delete] | -| SecondaryMeasurements | [`Get`][get], [`List`][list] | -| StatusCodes | [`Create`][create], [`Get`][get], [`List`][list], [`Update`][update], [`Delete`][delete] | -| Territories | [`Create`][create], [`Create Versions`][create-versions], [`Get`][get], [`Get Versions`][get-versions], [`List`][list], [`Update`][update], [`Update Versions`][update-versions], [`Delete`][delete], [`Delete Versions`][delete-versions] | -| TerritoryVariables | [`Create`][create], [`Create Versions`][create-versions], [`Get`][get], [`Get Versions`][get-versions], [`List`][list], [`Update`][update], [`Update Versions`][update-versions], [`Delete`][delete], [`Delete Versions`][delete-versions] | -| Titles | [`Create`][create], [`Create Versions`][create-versions], [`Get`][get], [`Get Versions`][get-versions], [`List`][list], [`Update`][update], [`Update Versions`][update-versions], [`Delete`][delete], [`Delete Versions`][delete-versions] | -| UnitTypes | [`Get`][get], [`List`][list] | -| Users | [`Create`][create], [`Get`][get], [`List`][list], [`Update`][update], [`Delete`][delete] | -| Variables | [`Create`][create], [`Create Versions`][create-versions], [`Get`][get], [`Get Versions`][get-versions], [`List`][list], [`Update`][update], [`Update Versions`][update-versions], [`Delete`][delete], [`Delete Versions`][delete-versions] | - -## Run Pipelines - -Apart from the [`Get`][get] and [`List`][list] methods, you can also run various pipelines. - -### Generate Reports - -Run Reports Generation pipeline. - -```py -# Generate Payments Report for Admin Group. -from sapcommissions import Connection, ReportFormat -from sapcommissions.endpoints import Periods, Pipelines - -env = Connection("CALD", "ENV", "MyUserName", "MySuperSecretPassword") -period = Periods(env).get_id("January 2023") - -pipeline = Pipelines(env).generate_reports( - calendarSeq=period.calendar.calendarSeq, - periodSeq=period.periodSeq, - formats=[ReportFormat.NATIVE], - reports=["Payments Report"], - groups=["CALD Compensation Reports Admin Group"], -) -``` - -| Argument | Type | Required | Description | -| ----------------- | -------------------- | -------- | ----------------------------------------------------------------------- | -| calendarSeq | `str` | True | Calendar system identifier | -| periodSeq | `str` | True | Period system identifier | -| formats | `list[ReportFormat]` | True | List of report formats | -| reports | `list[str]` | True | List of report names | -| groups | `list[str]` | False | List of BO group names. Use either groups or positionSeqs parameter. | -| positionSeqs | `list[str]` | False | List of position system identifiers. Use either groups or positionSeqs. | -| runStats | `bool` | False | Run statistics, default is True. | -| processingUnitSeq | `str` | False | Processing Unit system identifier, required if enabled. | - -| Returns | Description | -| ---------- | ------------------------------------------------------- | -| `Pipeline` | `Pipeline` instance containing only the pipelineRunSeq. | - -### Classify - -Run Classify pipeline. - -```py -# Classify new and modified transactions. -from sapcommissions import Connection -from sapcommissions.endpoints import Periods, Pipelines - -env = Connection("CALD", "ENV", "MyUserName", "MySuperSecretPassword") -period = Periods(env).get_id("January 2023") - -pipeline = Pipelines(env).classify( - calendarSeq=period.calendar.calendarSeq, - periodSeq=period.periodSeq, - incremental=True, -) -``` - -| Argument | Type | Required | Description | -| ----------------- | ------ | -------- | ------------------------------------------------------------- | -| calendarSeq | `str` | True | Calendar system identifier | -| periodSeq | `str` | True | Period system identifier | -| incremental | `bool` | False | Only process new and modified transactions. Default is False. | -| runStats | `bool` | False | Run statistics, default is True. | -| processingUnitSeq | `str` | False | Processing Unit system identifier, required if enabled. | - -| Returns | Description | -| ---------- | ------------------------------------------------------- | -| `Pipeline` | `Pipeline` instance containing only the pipelineRunSeq. | - -### Allocate - -Run Allocate pipeline. - -```py -# Allocate all credits and calculate primary measurements. -from sapcommissions import Connection -from sapcommissions.endpoints import Periods, Pipelines - -env = Connection("CALD", "ENV", "MyUserName", "MySuperSecretPassword") -period = Periods(env).get_id("January 2023") - -pipeline = Pipelines(env).allocate( - calendarSeq=period.calendar.calendarSeq, - periodSeq=period.periodSeq, -) -``` - -| Argument | Type | Required | Description | -| ----------------- | ----------- | -------- | ---------------------------------------------------------- | -| calendarSeq | `str` | True | Calendar system identifier | -| periodSeq | `str` | True | Period system identifier | -| incremental | `bool` | False | Only process new and modified credits. Default is False. | -| positionSeqs | `list[str]` | False | Run for specific positions. Provide a list of positionSeq. | -| runStats | `bool` | False | Run statistics, default is True. | -| processingUnitSeq | `str` | False | Processing Unit system identifier, required if enabled. | - -| Returns | Description | -| ---------- | ------------------------------------------------------- | -| `Pipeline` | `Pipeline` instance containing only the pipelineRunSeq. | - -### Reward - -Run Reward pipeline. - -```py -# Calculate secondary measurements, incentives and deposit values. -from sapcommissions import Connection -from sapcommissions.endpoints import Periods, Pipelines - -env = Connection("CALD", "ENV", "MyUserName", "MySuperSecretPassword") -period = Periods(env).get_id("January 2023") - -pipeline = Pipelines(env).reward( - calendarSeq=period.calendar.calendarSeq, - periodSeq=period.periodSeq, -) -``` - -| Argument | Type | Required | Description | -| ----------------- | ----------- | -------- | ---------------------------------------------------------- | -| calendarSeq | `str` | True | Calendar system identifier | -| periodSeq | `str` | True | Period system identifier | -| positionSeqs | `list[str]` | False | Run for specific positions. Provide a list of positionSeq. | -| runStats | `bool` | False | Run statistics, default is True. | -| processingUnitSeq | `str` | False | Processing Unit system identifier, required if enabled. | - -| Returns | Description | -| ---------- | ------------------------------------------------------- | -| `Pipeline` | `Pipeline` instance containing only the pipelineRunSeq. | - -### Pay - -Run Pay pipeline. - -```py -# Calculate payments and balances. -from sapcommissions import Connection -from sapcommissions.endpoints import Periods, Pipelines - -env = Connection("CALD", "ENV", "MyUserName", "MySuperSecretPassword") -period = Periods(env).get_id("January 2023") - -pipeline = Pipelines(env).pay( - calendarSeq=period.calendar.calendarSeq, - periodSeq=period.periodSeq, -) -``` - -| Argument | Type | Required | Description | -| ----------------- | ------ | -------- | ------------------------------------------------------- | -| calendarSeq | `str` | True | Calendar system identifier | -| periodSeq | `str` | True | Period system identifier | -| runStats | `bool` | False | Run statistics, default is True. | -| processingUnitSeq | `str` | False | Processing Unit system identifier, required if enabled. | - -| Returns | Description | -| ---------- | ------------------------------------------------------- | -| `Pipeline` | `Pipeline` instance containing only the pipelineRunSeq. | - -### Summarize - -Run Summarize pipeline, combination of [classify](#classify) and [allocate](#allocate). - -```py -# Run calculations up to primary measurements. -from sapcommissions import Connection -from sapcommissions.endpoints import Periods, Pipelines - -env = Connection("CALD", "ENV", "MyUserName", "MySuperSecretPassword") -period = Periods(env).get_id("January 2023") - -pipeline = Pipelines(env).classify( - calendarSeq=period.calendar.calendarSeq, - periodSeq=period.periodSeq, -) -``` - -| Argument | Type | Required | Description | -| ----------------- | ----------- | -------- | ------------------------------------------------------------- | -| calendarSeq | `str` | True | Calendar system identifier | -| periodSeq | `str` | True | Period system identifier | -| incremental | `bool` | False | Only process new and modified transactions. Default is False. | -| positionSeqs | `list[str]` | False | Run for specific positions. Provide a list of positionSeq. | -| runStats | `bool` | False | Run statistics, default is True. | -| processingUnitSeq | `str` | False | Processing Unit system identifier, required if enabled. | - -| Returns | Description | -| ---------- | ------------------------------------------------------- | -| `Pipeline` | `Pipeline` instance containing only the pipelineRunSeq. | - -### Compensate - -Run Compensate pipeline, combination of [classify](#classify), [allocate](#allocate) and [reward](#reward). - -```py -# Run calculations up to deposit values, processing only new and modified transactions -# and credits and remove stale results. -from sapcommissions import Connection -from sapcommissions.endpoints import Periods, Pipelines - -env = Connection("CALD", "ENV", "MyUserName", "MySuperSecretPassword") -period = Periods(env).get_id("January 2023") - -pipeline = Pipelines(env).compensate( - calendarSeq=period.calendar.calendarSeq, - periodSeq=period.periodSeq, - incremental=True, - removeStaleResults-True, -) -``` - -| Argument | Type | Required | Description | -| ------------------ | ----------- | -------- | ------------------------------------------------------------- | -| calendarSeq | `str` | True | Calendar system identifier | -| periodSeq | `str` | True | Period system identifier | -| incremental | `bool` | False | Only process new and modified transactions. Default is False. | -| positionSeqs | `list[str]` | False | Run for specific positions. Provide a list of positionSeq. | -| removeStaleResults | `bool` | False | Enable remove stale results. Default is False. | -| runStats | `bool` | False | Run statistics, default is True. | -| processingUnitSeq | `str` | False | Processing Unit system identifier, required if enabled. | - -| Returns | Description | -| ---------- | ------------------------------------------------------- | -| `Pipeline` | `Pipeline` instance containing only the pipelineRunSeq. | - -### Compensate and Pay - -Run Compensate and Pay pipeline, full calculation pipeline. - -```py -# Run Compensate and Pay for specified positions. -from sapcommissions import Connection -from sapcommissions.endpoints import Periods, Pipelines, Positions - -env = Connection("CALD", "ENV", "MyUserName", "MySuperSecretPassword") -period = Periods(env).get_id("January 2023") -positions = Positions(env).list(filter="positionGroup/name eq 'A-Team'") -positions_seq = [position.ruleElementOwnerSeq for position in positions] - -pipeline = Pipelines(env).comp_and_pay( - calendarSeq=period.calendar.calendarSeq, - periodSeq=period.periodSeq, - positionSeqs=positions_seq, -) -``` - -| Argument | Type | Required | Description | -| ------------------ | ----------- | -------- | ------------------------------------------------------------- | -| calendarSeq | `str` | True | Calendar system identifier | -| periodSeq | `str` | True | Period system identifier | -| incremental | `bool` | False | Only process new and modified transactions. Default is False. | -| positionSeqs | `list[str]` | False | Run for specific positions. Provide a list of positionSeq. | -| removeStaleResults | `bool` | False | Enable remove stale results. Default is False. | -| runStats | `bool` | False | Run statistics, default is True. | -| processingUnitSeq | `str` | False | Processing Unit system identifier, required if enabled. | - -| Returns | Description | -| ---------- | ------------------------------------------------------- | -| `Pipeline` | `Pipeline` instance containing only the pipelineRunSeq. | - -### Post - -Run Post pipeline. - -```py -# Post payments and calculate balances. -from sapcommissions import Connection -from sapcommissions.endpoints import Periods, Pipelines - -env = Connection("CALD", "ENV", "MyUserName", "MySuperSecretPassword") -period = Periods(env).get_id("January 2023") - -pipeline = Pipelines(env).post( - calendarSeq=period.calendar.calendarSeq, - periodSeq=period.periodSeq, -) -``` - -| Argument | Type | Required | Description | -| ----------------- | ------ | -------- | ------------------------------------------------------- | -| calendarSeq | `str` | True | Calendar system identifier | -| periodSeq | `str` | True | Period system identifier | -| runStats | `bool` | False | Run statistics, default is True. | -| processingUnitSeq | `str` | False | Processing Unit system identifier, required if enabled. | - -| Returns | Description | -| ---------- | ------------------------------------------------------- | -| `Pipeline` | `Pipeline` instance containing only the pipelineRunSeq. | - -### Undo Post - -Run Undo Post pipeline. - -```py -# Undo Last Post Run. -from sapcommissions import Connection -from sapcommissions.endpoints import Periods, Pipelines - -env = Connection("CALD", "ENV", "MyUserName", "MySuperSecretPassword") -period = Periods(env).get_id("January 2023") - -pipeline = Pipelines(env).undo_post( - calendarSeq=period.calendar.calendarSeq, - periodSeq=period.periodSeq, -) -``` - -| Argument | Type | Required | Description | -| ----------------- | ------ | -------- | ------------------------------------------------------- | -| calendarSeq | `str` | True | Calendar system identifier | -| periodSeq | `str` | True | Period system identifier | -| runStats | `bool` | False | Run statistics, default is True. | -| processingUnitSeq | `str` | False | Processing Unit system identifier, required if enabled. | - -| Returns | Description | -| ---------- | ------------------------------------------------------- | -| `Pipeline` | `Pipeline` instance containing only the pipelineRunSeq. | - -### Finalize - -Run Finalize pipeline. - -```py -# Finalize payments for a period. -from sapcommissions import Connection -from sapcommissions.endpoints import Periods, Pipelines - -env = Connection("CALD", "ENV", "MyUserName", "MySuperSecretPassword") -period = Periods(env).get_id("January 2023") - -pipeline = Pipelines(env).finalize( - calendarSeq=period.calendar.calendarSeq, - periodSeq=period.periodSeq, -) -``` - -| Argument | Type | Required | Description | -| ----------------- | ------ | -------- | ------------------------------------------------------- | -| calendarSeq | `str` | True | Calendar system identifier | -| periodSeq | `str` | True | Period system identifier | -| runStats | `bool` | False | Run statistics, default is True. | -| processingUnitSeq | `str` | False | Processing Unit system identifier, required if enabled. | - -| Returns | Description | -| ---------- | ------------------------------------------------------- | -| `Pipeline` | `Pipeline` instance containing only the pipelineRunSeq. | - -### Undo Finalize - -Run Undo Finalize pipeline. - -```py -# Undo Last Finalize Run. -from sapcommissions import Connection -from sapcommissions.endpoints import Periods, Pipelines - -env = Connection("CALD", "ENV", "MyUserName", "MySuperSecretPassword") -period = Periods(env).get_id("January 2023") - -pipeline = Pipelines(env).undo_finalize( - calendarSeq=period.calendar.calendarSeq, - periodSeq=period.periodSeq, -) -``` - -| Argument | Type | Required | Description | -| ----------------- | ------ | -------- | ------------------------------------------------------- | -| calendarSeq | `str` | True | Calendar system identifier | -| periodSeq | `str` | True | Period system identifier | -| runStats | `bool` | False | Run statistics, default is True. | -| processingUnitSeq | `str` | False | Processing Unit system identifier, required if enabled. | - -| Returns | Description | -| ---------- | ------------------------------------------------------- | -| `Pipeline` | `Pipeline` instance containing only the pipelineRunSeq. | - -### Reset From Classify - -Run Reset From Classify pipeline. - -```py -# Reset all data from classification and forward. -from sapcommissions import Connection -from sapcommissions.endpoints import Periods, Pipelines - -env = Connection("CALD", "ENV", "MyUserName", "MySuperSecretPassword") -period = Periods(env).get_id("January 2023") - -pipeline = Pipelines(env).reset_from_classify( - calendarSeq=period.calendar.calendarSeq, - periodSeq=period.periodSeq, -) -``` - -| Argument | Type | Required | Description | -| ----------------- | ------ | -------- | ------------------------------------------------------- | -| calendarSeq | `str` | True | Calendar system identifier | -| periodSeq | `str` | True | Period system identifier | -| runStats | `bool` | False | Run statistics, default is True. | -| processingUnitSeq | `str` | False | Processing Unit system identifier, required if enabled. | - -| Returns | Description | -| ---------- | ------------------------------------------------------- | -| `Pipeline` | `Pipeline` instance containing only the pipelineRunSeq. | - -### Reset From Allocate - -Run Reset From Allocate pipeline. - -```py -# Reset all data from credit and forward. -from sapcommissions import Connection -from sapcommissions.endpoints import Periods, Pipelines - -env = Connection("CALD", "ENV", "MyUserName", "MySuperSecretPassword") -period = Periods(env).get_id("January 2023") - -pipeline = Pipelines(env).reset_from_allocate( - calendarSeq=period.calendar.calendarSeq, - periodSeq=period.periodSeq, -) -``` - -| Argument | Type | Required | Description | -| ----------------- | ------ | -------- | ------------------------------------------------------- | -| calendarSeq | `str` | True | Calendar system identifier | -| periodSeq | `str` | True | Period system identifier | -| runStats | `bool` | False | Run statistics, default is True. | -| processingUnitSeq | `str` | False | Processing Unit system identifier, required if enabled. | - -| Returns | Description | -| ---------- | ------------------------------------------------------- | -| `Pipeline` | `Pipeline` instance containing only the pipelineRunSeq. | - -### Reset From Reward - -Run Reset From Reward pipeline. - -```py -# Reset all data from deposit and forward. -from sapcommissions import Connection -from sapcommissions.endpoints import Periods, Pipelines - -env = Connection("CALD", "ENV", "MyUserName", "MySuperSecretPassword") -period = Periods(env).get_id("January 2023") - -pipeline = Pipelines(env).reset_from_reward( - calendarSeq=period.calendar.calendarSeq, - periodSeq=period.periodSeq, -) -``` - -| Argument | Type | Required | Description | -| ----------------- | ------ | -------- | ------------------------------------------------------- | -| calendarSeq | `str` | True | Calendar system identifier | -| periodSeq | `str` | True | Period system identifier | -| runStats | `bool` | False | Run statistics, default is True. | -| processingUnitSeq | `str` | False | Processing Unit system identifier, required if enabled. | - -| Returns | Description | -| ---------- | ------------------------------------------------------- | -| `Pipeline` | `Pipeline` instance containing only the pipelineRunSeq. | - -### Reset From Pay - -Run Reset From Pay pipeline. - -```py -# Reset all data from payment and forward. -from sapcommissions import Connection -from sapcommissions.endpoints import Periods, Pipelines - -env = Connection("CALD", "ENV", "MyUserName", "MySuperSecretPassword") -period = Periods(env).get_id("January 2023") - -pipeline = Pipelines(env).reset_from_pay( - calendarSeq=period.calendar.calendarSeq, - periodSeq=period.periodSeq, -) -``` - -| Argument | Type | Required | Description | -| ----------------- | ------ | -------- | ------------------------------------------------------- | -| calendarSeq | `str` | True | Calendar system identifier | -| periodSeq | `str` | True | Period system identifier | -| runStats | `bool` | False | Run statistics, default is True. | -| processingUnitSeq | `str` | False | Processing Unit system identifier, required if enabled. | - -| Returns | Description | -| ---------- | ------------------------------------------------------- | -| `Pipeline` | `Pipeline` instance containing only the pipelineRunSeq. | - -### Cleanup Deffered Results - -Run Cleanup Deffered Results pipeline. - -```py -# Clean up deferred results for all periods in 2023. -from sapcommissions import Connection -from sapcommissions.endpoints import Calendars, Periods, Pipelines - -env = Connection("CALD", "ENV", "MyUserName", "MySuperSecretPassword") -calendar = Calendars(env).get_id("Monthly Calendar") -periods = Periods(env).list( - filter=( - f"calendar eq {calendar.calendarSeq}" - f" and periodType eq {calendar.minorPeriodType.periodTypeSeq}" - f" and startDate ge '1/1/2023' and endDate le '1/1/2024'" - ) -) - -for period in periods: - Pipelines(env).cleanup_deferred_results( - calendarSeq=calendar.calendarSeq, - periodSeq=period.periodSeq, - ) -``` - -| Argument | Type | Required | Description | -| ----------------- | ----- | -------- | ------------------------------------------------------- | -| calendarSeq | `str` | True | Calendar system identifier | -| periodSeq | `str` | True | Period system identifier | -| processingUnitSeq | `str` | False | Processing Unit system identifier, required if enabled. | - -| Returns | Description | -| ---------- | ------------------------------------------------------- | -| `Pipeline` | `Pipeline` instance containing only the pipelineRunSeq. | - -### Approve Calculated Data - -Run Approve Calculated Data pipeline. - -```py -# Approve calculated data. -from sapcommissions import Connection -from sapcommissions.endpoints import Periods, Pipelines - -env = Connection("CALD", "ENV", "MyUserName", "MySuperSecretPassword") -period = Periods(env).get_id("January 2023") - -pipeline = Pipelines(env).approve_calculated_data( - calendarSeq=period.calendar.calendarSeq, - periodSeq=period.periodSeq, -) -``` - -| Argument | Type | Required | Description | -| ----------------- | ----- | -------- | ------------------------------------------------------- | -| calendarSeq | `str` | True | Calendar system identifier | -| periodSeq | `str` | True | Period system identifier | -| processingUnitSeq | `str` | False | Processing Unit system identifier, required if enabled. | - -| Returns | Description | -| ---------- | ------------------------------------------------------- | -| `Pipeline` | `Pipeline` instance containing only the pipelineRunSeq. | - -### Purge Approved Data - -Run Purge Approved Data pipeline. - -```py -# Purge approved data. -from sapcommissions import Connection -from sapcommissions.endpoints import Periods, Pipelines - -env = Connection("CALD", "ENV", "MyUserName", "MySuperSecretPassword") -period = Periods(env).get_id("January 2023") - -pipeline = Pipelines(env).purge_approved_data( - calendarSeq=period.calendar.calendarSeq, - periodSeq=period.periodSeq, -) -``` - -| Argument | Type | Required | Description | -| ----------------- | ----- | -------- | ------------------------------------------------------- | -| calendarSeq | `str` | True | Calendar system identifier | -| periodSeq | `str` | True | Period system identifier | -| processingUnitSeq | `str` | False | Processing Unit system identifier, required if enabled. | - -| Returns | Description | -| ---------- | ------------------------------------------------------- | -| `Pipeline` | `Pipeline` instance containing only the pipelineRunSeq. | - -### Update Analytics - -Run Update Analytics pipeline. - -```py -# Update Analytics. -from sapcommissions import Connection -from sapcommissions.endpoints import Periods, Pipelines - -env = Connection("CALD", "ENV", "MyUserName", "MySuperSecretPassword") -period = Periods(env).get_id("January 2023") - -pipeline = Pipelines(env).update_analytics( - calendarSeq=period.calendar.calendarSeq, - periodSeq=period.periodSeq, -) -``` - -| Argument | Type | Required | Description | -| ----------------- | ------ | -------- | ------------------------------------------------------- | -| calendarSeq | `str` | True | Calendar system identifier | -| periodSeq | `str` | True | Period system identifier | -| runStats | `bool` | False | Run statistics, default is True. | -| processingUnitSeq | `str` | False | Processing Unit system identifier, required if enabled. | - -| Returns | Description | -| ---------- | ------------------------------------------------------- | -| `Pipeline` | `Pipeline` instance containing only the pipelineRunSeq. | - -### Validate - -Validate data from stage. - -```py -# Revalidate an entire batch. -from sapcommissions import Connection, Revalidate -from sapcommissions.endpoints import Calendars, Pipelines - -env = Connection("CALD", "ENV", "MyUserName", "MySuperSecretPassword") -calendar = Calendars(env).get_id("Monthly Calendar") - -pipeline = Pipelines(env).validate( - calendarSeq=calendar.calendarSeq, - batchName="CALD_ENV_OGPO_20230101_123456_positions_file.txt", - revalidate=Revalidate.ALL, -) -``` - -| Argument | Type | Required | Description | -| ----------------- | --------------- | -------- | ------------------------------------------------------------------------------------ | -| calendarSeq | `str` | True | Calendar system identifier | -| batchName | `str` | True | Batch name. | -| runMode | `ImportRunMode` | False | Import all or only new and modified data. Default: ALL. | -| revalidate | `Revalidate` | False | Revalidate all or only errors if provided. Do not revalidate if None. Default: None. | -| runStats | `bool` | False | Run statistics, default is True. | -| processingUnitSeq | `str` | False | Processing Unit system identifier, required if enabled. | - -| Returns | Description | -| ---------- | ------------------------------------------------------- | -| `Pipeline` | `Pipeline` instance containing only the pipelineRunSeq. | - -### Transfer - -Transfer data from stage, leave invalid data. - -```py -# Transfer new and modified data. -from sapcommissions import Connection, ImportRunMode -from sapcommissions.endpoints import Calendars, Pipelines - -env = Connection("CALD", "ENV", "MyUserName", "MySuperSecretPassword") -calendar = Calendars(env).get_id("Monthly Calendar") - -pipeline = Pipelines(env).transfer( - calendarSeq=calendar.calendarSeq, - batchName="CALD_ENV_OGPO_20230101_123456_positions_file.txt", - runMode=ImportRunMode.NEW, -) -``` - -| Argument | Type | Required | Description | -| ----------------- | --------------- | -------- | ------------------------------------------------------- | -| calendarSeq | `str` | True | Calendar system identifier | -| batchName | `str` | True | Batch name. | -| runMode | `ImportRunMode` | False | Import all or only new and modified data. Default: ALL. | -| runStats | `bool` | False | Run statistics, default is True. | -| processingUnitSeq | `str` | False | Processing Unit system identifier, required if enabled. | - -| Returns | Description | -| ---------- | ------------------------------------------------------- | -| `Pipeline` | `Pipeline` instance containing only the pipelineRunSeq. | - -### Transfer If All Valid - -Transfer data from stage only if all data is valid. - -```py -# Transfer data, but only if the entire file is valid. -from sapcommissions import Connection -from sapcommissions.endpoints import Calendars, Pipelines - -env = Connection("CALD", "ENV", "MyUserName", "MySuperSecretPassword") -calendar = Calendars(env).get_id("Monthly Calendar") - -pipeline = Pipelines(env).transfer_if_all_valid( - calendarSeq=calendar.calendarSeq, - batchName="CALD_ENV_OGPO_20230101_123456_positions_file.txt", -) -``` - -| Argument | Type | Required | Description | -| ----------------- | --------------- | -------- | ------------------------------------------------------- | -| calendarSeq | `str` | True | Calendar system identifier | -| batchName | `str` | True | Batch name. | -| runMode | `ImportRunMode` | False | Import all or only new and modified data. Default: ALL. | -| runStats | `bool` | False | Run statistics, default is True. | -| processingUnitSeq | `str` | False | Processing Unit system identifier, required if enabled. | - -| Returns | Description | -| ---------- | ------------------------------------------------------- | -| `Pipeline` | `Pipeline` instance containing only the pipelineRunSeq. | - -### Validate and Transfer - -Validate and Transfer data from stage, leave invalid data. - -```py -# Validate and transfer data from stage, leave invalid data. -from sapcommissions import Connection -from sapcommissions.endpoints import Calendars, Pipelines - -env = Connection("CALD", "ENV", "MyUserName", "MySuperSecretPassword") -calendar = Calendars(env).get_id("Monthly Calendar") - -pipeline = Pipelines(env).validate_and_transfer( - calendarSeq=calendar.calendarSeq, - batchName="CALD_ENV_OGPO_20230101_123456_positions_file.txt", -) -``` - -| Argument | Type | Required | Description | -| ----------------- | --------------- | -------- | ------------------------------------------------------------------------------------ | -| calendarSeq | `str` | True | Calendar system identifier | -| batchName | `str` | True | Batch name. | -| runMode | `ImportRunMode` | False | Import all or only new and modified data. Default: ALL. | -| revalidate | `Revalidate` | False | Revalidate all or only errors if provided. Do not revalidate if None. Default: None. | -| runStats | `bool` | False | Run statistics, default is True. | -| processingUnitSeq | `str` | False | Processing Unit system identifier, required if enabled. | - -| Returns | Description | -| ---------- | ------------------------------------------------------- | -| `Pipeline` | `Pipeline` instance containing only the pipelineRunSeq. | - -### Validate and Transfer If All Valid - -Validate and Transfer data from stage, if all data is valid. - -```py -# Validate and transfer data from stage, if all data is valid. -from sapcommissions import Connection -from sapcommissions.endpoints import Calendars, Pipelines - -env = Connection("CALD", "ENV", "MyUserName", "MySuperSecretPassword") -calendar = Calendars(env).get_id("Monthly Calendar") - -pipeline = Pipelines(env).validate_and_transfer_if_all_valid( - calendarSeq=calendar.calendarSeq, - batchName="CALD_ENV_OGPO_20230101_123456_positions_file.txt", -) -``` - -| Argument | Type | Required | Description | -| ----------------- | --------------- | -------- | ------------------------------------------------------------------------------------ | -| calendarSeq | `str` | True | Calendar system identifier | -| batchName | `str` | True | Batch name. | -| runMode | `ImportRunMode` | False | Import all or only new and modified data. Default: ALL. | -| revalidate | `Revalidate` | False | Revalidate all or only errors if provided. Do not revalidate if None. Default: None. | -| runStats | `bool` | False | Run statistics, default is True. | -| processingUnitSeq | `str` | False | Processing Unit system identifier, required if enabled. | - -| Returns | Description | -| ---------- | ------------------------------------------------------- | -| `Pipeline` | `Pipeline` instance containing only the pipelineRunSeq. | - -### Reset From Validate - -Run Reset From Validate. - -```py -from sapcommissions import Connection -from sapcommissions.endpoints import Periods, Pipelines - -env = Connection("CALD", "ENV", "MyUserName", "MySuperSecretPassword") -period = Periods(env).get_id("January 2023") - -# Remove a batch from a period. -pipeline = Pipelines(env).reset_from_validate( - calendarSeq=period.calendar.calendarSeq, - periodSeq=period.periodSeq, - batchName="CALD_ENV_OGPO_20230101_123456_positions_file.txt", -) - -# Remove all batches from a period. -pipeline = Pipelines(env).reset_from_validate( - calendarSeq=period.calendar.calendarSeq, - periodSeq=period.periodSeq, -) -``` - -| Argument | Type | Required | Description | -| ----------------- | ------ | -------- | ------------------------------------------------------- | -| calendarSeq | `str` | True | Calendar system identifier | -| periodSeq | `str` | True | Period system identifier | -| batchName | `str` | False | Batch name. Remove all batches if None. | -| runStats | `bool` | False | Run statistics, default is True. | -| processingUnitSeq | `str` | False | Processing Unit system identifier, required if enabled. | - -| Returns | Description | -| ---------- | ------------------------------------------------------- | -| `Pipeline` | `Pipeline` instance containing only the pipelineRunSeq. | - -### Purge - -Run Purge import data. - -```py -# Remove a batch from stage. -from sapcommissions import Connection -from sapcommissions.endpoints import Pipelines - -env = Connection("CALD", "ENV", "MyUserName", "MySuperSecretPassword") - -pipeline = Pipelines(env).purge( - batchName="CALD_ENV_OGPO_20230101_123456_positions_file.txt", -) -``` - -| Argument | Type | Required | Description | -| --------- | ----- | -------- | -------------------- | -| batchName | `str` | True | Batch name to purge. | - -| Returns | Description | -| ---------- | ------------------------------------------------------- | -| `Pipeline` | `Pipeline` instance containing only the pipelineRunSeq. | - -### XML Import - -Run XML Import. - -```py -# Import plan data, allow updates on existing objects. -import os - -from sapcommissions import Connection -from sapcommissions.endpoints import Pipelines - -env = Connection("CALD", "ENV", "MyUserName", "MySuperSecretPassword") - -filename = "path/to/plan.xml" -with open(filename, "r") as file: - pipeline = Pipelines(env).xml_import( - xmlFileName=os.path.basename(filename), - xmlFileContent=file.read(), - updateExistingObjects=True, - ) -``` - -| Argument | Type | Required | Description | -| --------------------- | ------ | -------- | ------------------------------------------- | -| xmlFileName | `str` | True | Filename of imported file. | -| xmlFileContent | `str` | True | File content of imported file. | -| updateExistingObjects | `bool` | False | Update existing opbjects. Default is False. | - -| Returns | Description | -| ---------- | ------------------------------------------------------- | -| `Pipeline` | `Pipeline` instance containing only the pipelineRunSeq. | - - - -[create]: METHODS.md#create -[create-versions]: METHODS.md#create-versions -[get]: METHODS.md#get -[get-versions]: METHODS.md#get-versions -[list]: METHODS.md#list -[update]: METHODS.md#update -[update-versions]: METHODS.md#update-versions -[delete]: METHODS.md#delete -[delete-versions]: METHODS.md#delete-versions diff --git a/docs/METHODS.md b/docs/METHODS.md deleted file mode 100644 index 65aa13c..0000000 --- a/docs/METHODS.md +++ /dev/null @@ -1,329 +0,0 @@ -# Methods - -Every endpoint exposes a number of methods. The most common are `list()` and `get()`. Some endpoints also expose -`create()`, `update()`, and `delete()` methods. Versioned endpoints like `Participants` also expose `get_versions()`, -`create_versions()`, `update_versions()` and `delete_versions()` methods. - -## List - -The `list()` method is used to retrieve multiple resources from the endpoint. By default, it returns the current -effective version of the resource (if the endpoint is versioned). To retrieve a different effective version, you must -provide both `startDate` and `endDate` parameters. - -Due to the potentially huge amount of requests to the endpoint, the `list()` method actually returns a `generator` -object, not a `list`. Depending on the `limit` parameter, the `list()` method requests up to 100 resources at a time. - -In most cases, you will want to apply some kind of filter. For a complete list of available filter options please visit -the [REST API Documentation](README.md#rest-api). - -You can limit the number of results returned by providing a `limit`, this is usefull if you want to explore the data if -the endpoint holds a lot of instances. Provide the `raw = True` parameter to return the json response from the API -without converting it to a Python object. - -Provide `filter_kwargs` keyword arguments to apply a quick filter. For example `Positions(prod).list(name='John Doe')` -is equivalent to `Positions(prod).list(filter="name eq 'John Doe'")`. Providing `filter` and `filter_kwargs` will -combine the arguments using the `and` operator, as will multiple `filter_kwargs` arguments. - -```py -# Get all positions. -positions = Positions(prod).list() - -# Get all positions with title 'Sales Manager'. -sales_managers = Positions(prod).list(filter="title/name eq 'Sales Manager'") -``` - -| Argument | Type | Required | Description | -| ------------- | ------ | -------------------------- | ------------------------------------------------ | -| filter | `str` | False | A filter string to apply to the list | -| startDate | `date` | False if endDate is None | Filter list of resources effective for startDate | -| endDate | `date` | False if startDate is None | Filter list of resources effective for endDate | -| limit | `int` | False | Limit the number of returned resources | -| raw | `bool` | False | Return the raw json response from the API | -| filter_kwargs | `dict` | False | Keyword arguments to apply to the filter | - -| Returns | Description | -| --------------------- | ---------------------------------------------- | -| `generator` | A generator of resources, single valid version | - -## Get - -The `get()` method is used to retrieve an existing resource, single (latest) version. The method takes a single -parameter, `seq` (system unique identifier) of the object to retrieve. The `seq` value for a resource is stored in the -first attribute, for simplicity, it can also be read from the `_seq` property. - -| Argument | Type | Required | Description | -| -------- | ------------ | -------- | --------------------------------------------- | -| seq | `int \| str` | True | The system unique identifier for the resource | - -| Returns | Description | -| ---------- | ---------------------------------------- | -| `Resource` | Requested resource, single valid version | - -```py -# Let's say you retrieve a list of positions from the API. The manager attribute refers -# to a position, but does not contain any meaningfull information about the manager yet. -# You can use the `get()` method to enrich the manager data. -positions = Positions(prod).list() - -for position in positions: - if position.manager: - position.manager = Positions(prod).get(position.manager.ruleElementOwnerSeq) -``` - -## Get ID - -`get_id()` is a helper method to simplify the retrieval of a resource by its ID (user unique identifier). The method -takes a single parameter, `id`. If the resource does not provide an id, or the specified `id` could not be found, it -returns `None`. - -| Argument | Type | Required | Description | -| -------- | ----- | -------- | ------------------------------------------- | -| id | `str` | True | The user unique identifier for the resource | - -| Returns | Description | -| ---------- | -------------------------------------------------- | -| `Resource` | Requested resource, single valid version | -| `None` | Resource does not have an id or could not be found | - -```py -position = Positions(prod).get_id('John Doe') - -# What would have been required without this method. -position_id_attr = Position._id_attr # returns 'name' -positions = Positions(prod).list(filter=f"{position_id_attr} eq 'John Doe'") -position = positions[0] if positions else None -``` - -## Get Versions - -The `get_versions()` method is simmilar to the [Get](#get) method, it returns a list of all versions of the resource. - -| Argument | Type | Required | Description | -| -------- | ------------ | -------- | --------------------------------------------- | -| seq | `int \| str` | True | The system unique identifier for the resource | - -| Returns | Description | -| ---------------- | -------------------------------------- | -| `list[Resource]` | A list of all versions for a resources | - -```py -# Get all versions for a position. -positions = Positions(prod).list() -position = positions[0] - -position_versions = Positions(prod).get_versions(first_position.ruleElementOwnerSeq) -``` - -## Create - -With the `create()` method, you can create a new instance of the resource. Unlike the REST API, the `create()` method -accepts only a single resource as a parameter. If successful, the created resource will be returned. - -| Argument | Type | Required | Description | -| -------- | ---------- | -------- | ------------------------------- | -| instance | `Resource` | True | The resource instance to create | - -| Returns | Description | -| ---------- | ----------------------- | -| `Resource` | Created resource object | - -Make sure to provide all required attributes for the resource. Check the documentation for the resource to see which -attributes are required. - -```py -# Create a new position, with title 'Account Manager'. -new_position = Position( - name="John Doe", - effectiveStartDate=date(2020, 1, 1), - effectiveEndDate=date(2200, 1, 1), - title=Title(name="Account Manager"), -) -created_position = Positions(prod).create(new_position) -``` - -## Create Versions - -The `create_versions()` method is used to create new versions of an existing resource. It is imperative that you provide -all versions of the resource, as this method will overwrite all pre-existing versions with the ones provide. This method -can also be used to end-date an existing resource. All pre-existing versions of the resource will be overwritten. - -| Argument | Type | Required | Description | -| --------- | ---------------- | -------- | --------------------------------------------- | -| seq | `int \| str` | True | The system unique identifier for the resource | -| instances | `list[Resource]` | True | The list of resource instances to create | - -| Returns | Description | -| ---------------- | --------------------------------- | -| `list[Resource]` | List of created resource versions | - -```py -# Let's create a new version of the position that you just created. -first_version = Position( - name="John Doe", - effectiveStartDate=date(2020, 1, 1), - effectiveEndDate=date(2020, 12, 31), - title=Title(name="Account Manager"), -) -second_version = Position( - name="John Doe", - effectiveStartDate=date(2021, 1, 1), - effectiveEndDate=date(2200, 1, 1), - title=Title(name="Sales Manager"), -) -versions = [first_version, second_version] - -created_versions = Positions(prod).create_versions( - created_position.ruleElementOwnerSeq, - versions, -) -``` - -## Update - -With the `update()` method, you can update an existing resource. If the endpoint is versioned, this methid only updates -a single valid version, matching the effective date range provided. - -| Argument | Type | Required | Description | -| -------- | ---------- | -------- | ----------------------------- | -| update | `Resource` | True | The updated resource instance | - -| Returns | Description | -| ---------- | --------------------------------------------- | -| `Resource` | Updated resource object, single valid version | - -```py -# Say that you want to assign all positions with title 'Account Manager' or -# 'Sales Manager' to a position group 'Sales'. Let's assume that the position group -# already exists. - -# Get a list of all positions with title 'Account Manager' or 'Sales Manager'. -positions = ( - Positions(prod) - .list(filter="title/name eq 'Account Manager' or title/name eq 'Sales Manager'") -) - -# Now update the position group and update the position. -for position in positions: - position.positionGroup = PositionGroup(name="Sales") - Positions(prod).update(position) -``` - -## Update Versions - -The `update_versions()` method is used to update the versions of an existing resource. It is important to understand the -differance between `update()` and `update_versions()`. [Update](#update) allows a single valid version of the resource -to be updated, it must pre-exist in the environment. With `update_versions()`, you can update multiple versions at once, -and even apply an update without any prior knowledge of pre-existing versions. The provided versions will be applied to -the current existing versions in the environment. - -| Argument | Type | Required | Description | -| -------- | ---------------- | -------- | --------------------------------------------- | -| seq | `int \| str` | True | The system unique identifier for the resource | -| versions | `list[Resource]` | True | The list of resource version update to apply | - -| Returns | Description | -| ---------------- | ---------------------------------------------------------- | -| `list[Resource]` | List of all resource versions after the update was applied | - -**Example:** - -```py -# Let's revisit our previous example where a position is promoted to a different title. -# Our position already has two versions, the first with a title of 'Account Manager', -# the second with a title of 'Sales Manager' and position group 'Sales'. You can update -# the position without any pre-existing knowledge of these versions. - -# You'll need to seq number for the position that you are going to update. -positions = Positions(prod).list(filter="name eq 'John Doe'") -position = positions[0] - -# Now you can update the position -updated_position = Position( - name="John Doe", - effectiveStartDate=date(2022, 1, 1), - effectiveEndDate=date(2200, 1, 1), - title=Title(name="Director"), - positionGroup=PositionGroup(name="Management"), -) - -Positions(prod).update_versions(position_seq, [updated_position]) - -# [ -# Position( -# name="John Doe", -# effectiveStartDate=date(2020, 1, 1), -# effectiveEndDate=date(2020, 12, 31) -# title=Title(name="Account Manager"), -# ), -# Position( -# name="John Doe", -# effectiveStartDate=date(2021, 1, 1), -# effectiveEndDate=date(2021, 12, 31) -# title=Title(name="Sales Manager"), -# positionGroup=PositionGroup(name="Sales"), -# ), -# Position( -# name="John Doe", -# effectiveStartDate=date(2022, 1, 1), -# effectiveEndDate=date(2200, 1, 1) -# title=Title(name="Director"), -# positionGroup=PositionGroup(name="Management"), -# ), -# ] -``` - -## Delete - -With the `delete()` method, you can fully delete a resource from the environment, all effective versions of the resource -will be deleted. - -| Argument | Type | Required | Description | -| -------- | ------------ | -------- | --------------------------------------------- | -| seq | `int \| str` | True | The system unique identifier for the resource | - -| Returns | Description | -| ------- | ---------------------------------------------------------- | -| `str` | Confirmation message `The record is successfully deleted.` | - -```py -# Delete a position with the name 'John Doe'. -positions = Positions(prod).list(filter="name eq 'John Doe'") -position = positions[0] - -message = Positions(prod).delete(position.ruleElementOwnerSeq) -assert message == "The record is successfully deleted." -``` - -## Delete Versions - -The `delete_versions()` method deletes the given versions of the resource. The resulting gap will either be filled from -the previous or next available version of the resource. The effective dates provided must match an existing version of -the resource. - -| Argument | Type | Required | Description | -| ------------------ | ------------ | -------- | ---------------------------------------------------- | -| seq | `int \| str` | True | The system unique identifier for the resource | -| effectiveStartDate | `date` | True | The start date of the version to delete | -| effectiveEndDate | `date` | True | The end date of the version to delete | -| fillFromRight | `bool` | False | Default `True`, fill from the next available version | - -| Returns | Description | -| ------- | ---------------------------------------------------------------------------- | -| `str` | Confirmation message `All versions in given range are deleted successfully.` | - -```py -# Remove the latest version of a position with the name 'John Doe' and fill the gap from -# the previous version. -positions = Positions(prod).list(filter="name eq 'John Doe'") -position = positions[0] - -message = ( - Positions(prod) - .delete_versions( - seq=position.ruleElementOwnerSeq, - effectiveStartDate=position.effectiveStartDate, - effectiveEndDate=position.effectiveEndDate, - fillFromRight=False - ) -) -``` diff --git a/docs/README.md b/docs/README.md deleted file mode 100644 index 14c740b..0000000 --- a/docs/README.md +++ /dev/null @@ -1,93 +0,0 @@ -# SAP Commissions - -A Python wrapper for the SAP Commissions API. - -- [Installation](#installation) - - [REST API](#rest-api) - - [Terminology](#terminology) -- [Usage](#usage) -- [Endpoints](#endpoints) -- [Methods](#methods) -- [Legal Disclaimer](#legal-disclaimer) - -If you like this project, please consider to [BuyMeACoffee](https://www.buymeacoffee.com/niro1987) or -[contact me](mailto:niels.perfors1987@gmail.com) directly. - -[!["Buy Me A Coffee"](https://www.buymeacoffee.com/assets/img/custom_images/orange_img.png)](https://www.buymeacoffee.com/niro1987) - -## Installation - -To install the project, run the following command: - -```text -pip install python-sapcommissions -``` - -### REST API - -This project mimics the usage of the SAP Commissions REST API. Visit -`https://{TENANT}.callidusondemand.com/APIDocument` to read the full specification, replacing `TENANT` with your -tenant-id. - -### Terminology - -Things to keep in mind while reading the documentation. - -| Keyword | Description | -| -------- | --------------------------------------------------------------------------------- | -| Endpoint | A type of object that you can interact with, like `Participants` and `Positions`. | -| Method | An action to perform on an endpoint, like `list()` and `get_id()`. | -| Resource | An instance of an endpoint, like `Participant` and `Position`. | - -## Usage - -To get started, import `Connection` and an endpoint of your choosing. In this example, we'll use `Participants`. - -```py -from sapcommissions import Connection -from sapcommissions.endpoints import Participants -``` - -Initialize a Connection by providing the tenant, username, and password. Optionally, you can disable ssl verification, -if you are having problems connecting to the API from your network. - -```python -prod = Connection("CALD-PRD", "MyUserName", "MySuperSecretPassword", verify_ssl=True) -``` - -In this example we will use the `Participants` endpoint to get a list of all participants from the system. The `list()` -method returns a `generator` object, to retrieve all `Participants`, you can convert the generator to a `list`, -processes the `Participants` one-by-one in a `for-loop` or use a list comprehension to extract neccecery properties. - -```py -participants = Participants(prod).list() - -# Convert to list -all_users = list(participants) - -# For loop -for participant in participants: - ... # Do something - -# List Comprehension -participant_ids = [participant.payeeId for participant in participants] -``` - -## Endpoints - -Endpoints are the objects you can interact with, like `Participants`, `Positions`, `Credits` or `Pipelines`. For a full -list of endpoints and their associated methods, please read the [Endpoints documentation](ENDPOINTS.md). - -## Methods - -Methods are the actions you can perform on an endpoint, like `list()`, `get()` and `create()`. You'll find links to the -associated methods in the [Endpoints documentation](ENDPOINTS.md). For a full list of available methods, please read the -[Methods documentation](METHODS.md). - -## Legal Disclaimer - -This software is designed for use with SAP® Commissions. - -SAP Commissions is the trademark or registered trademark of SAP SE or its affiliates in Germany and in other countries. - -The developers take no legal responsibility for the functionality or security of your SAP Commissions environment. diff --git a/pyproject.toml b/pyproject.toml index d743fe3..007a2b4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -2,83 +2,170 @@ requires = ["setuptools>=61.0", "setuptools_scm[toml]>=6.2"] build-backend = "setuptools.build_meta" - [project] name = "python-sapcommissions" -description = "A Python wrapper for the SAP Commissions API" -authors = [ - {name = "Niels Perfors", email = "niels.perfors1987@gmail.com"}, -] -license = {text = "MIT"} -requires-python = ">=3.10" +authors = [{ name = "Niels Perfors", email = "niels.perfors1987@gmail.com" }] +description = "Client for SAP Commissions" +readme = "README.md" +requires-python = ">=3.11" +keywords = ["SAP Commissions", "Incentive Management"] +license = { text = "MIT" } classifiers = [ - "Development Status :: 2 - Pre-Alpha", - "Programming Language :: Python :: 3.11", - "Operating System :: OS Independent", + "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", ] dependencies = [ - "requests ~= 2.31.0", + "aiohttp>=3.0.0", + "click>=8.1.7", + "python-dotenv>=1.0.1", + "pydantic>=2.6.0", ] -readme = "docs/README.md" dynamic = ["version"] +[project.scripts] +sap = "sapcommissions.__main__:cli" [project.urls] -"Homepage" = "https://github.com/niro1987/python-sapcommissions" +Homepage = "https://github.com/niro1987/python-sapcommissions" +Repository = "https://github.com/niro1987/python-sapcommissions.git" +Issues = "https://github.com/niro1987/python-sapcommissions/issues" +[tool.setuptools] +zip-safe = false +include-package-data = true [tool.setuptools_scm] - [tool.setuptools.packages.find] -exclude = [ - "tests", -] - - -[project.optional-dependencies] -dev = [ - "pycodestyle", - "pylint", - "flake8", - "black", - "isort", - "pre-commit", -] +where = ["src"] +exclude = ["tests"] -[tool.black] -line-length = 88 - -[tool.isort] -profile = "black" +[tool.setuptools.package-data] +"sapcommissions" = ["py.typed"] [tool.pylint.MAIN] py-version = "3.11" -ignore-patterns="tests/*" +ignore = ["tests"] +load-plugins = ["pylint.extensions.code_style", "pylint.extensions.typing"] -load-plugins = [ - "pylint.extensions.code_style", - "pylint.extensions.typing" -] +[tool.pylint."MESSAGES CONTROL"] +disable = [] +enable = ["use-symbolic-message-instead"] -[tool.pylint.BASIC] -attr-naming-style = "camelCase" -argument-naming-style = "camelCase" -good-names = [ - "id" +[tool.pylint.REPORTS] +score = false + +[tool.pylint.FORMAT] +expected-line-ending-format = "LF" + +[tool.pylint.TYPING] +runtime-typing = false + +[tool.pytest.ini_options] +testpaths = ["tests"] +norecursedirs = [".git"] +asyncio_mode = "auto" +log_cli = true +log_cli_level = "DEBUG" +log_cli_format = "%(asctime)s.%(msecs)03d %(levelname)-8s %(name)25s:%(lineno)-4s %(message)s" +log_cli_date_format = "%Y-%m-%d %H:%M:%S" +log_file = "tests/test.log" +log_file_level = "DEBUG" +log_file_format = "%(asctime)s.%(msecs)03d %(levelname)-8s %(name)25s:%(lineno)-4s %(message)s" +log_file_date_format = "%Y-%m-%d %H:%M:%S" + +[tool.ruff.lint] +select = [ + "B002", # Python does not support the unary prefix increment + "B007", # Loop control variable {name} not used within loop body + "B014", # Exception handler with duplicate exception + "B023", # Function definition does not bind loop variable {name} + "B026", # Star-arg unpacking after a keyword argument is strongly discouraged + "C", # complexity + "COM818", # Trailing comma on bare tuple prohibited + "D", # docstrings + "DTZ003", # Use datetime.now(tz=) instead of datetime.utcnow() + "DTZ004", # Use datetime.fromtimestamp(ts, tz=) instead of datetime.utcfromtimestamp(ts) + "E", # pycodestyle + "F", # pyflakes/autoflake + "G", # flake8-logging-format + "I", # isort + "ICN001", # import concentions; {name} should be imported as {asname} + "N804", # First argument of a class method should be named cls + "N805", # First argument of a method should be named self + "N815", # Variable {name} in class scope should not be mixedCase + "S307", # No builtin eval() allowed + "PGH004", # Use specific rule codes when using noqa + "PLC0414", # Useless import alias. Import alias does not rename original package. + "PLC", # pylint + "PLE", # pylint + "PLR", # pylint + "PLW", # pylint + "Q000", # Double quotes found but single quotes preferred + "RUF006", # Store a reference to the return value of asyncio.create_task + "S102", # Use of exec detected + "S103", # bad-file-permissions + "S108", # hardcoded-temp-file + "S306", # suspicious-mktemp-usage + "S307", # suspicious-eval-usage + "S313", # suspicious-xmlc-element-tree-usage + "S314", # suspicious-xml-element-tree-usage + "S315", # suspicious-xml-expat-reader-usage + "S316", # suspicious-xml-expat-builder-usage + "S317", # suspicious-xml-sax-usage + "S318", # suspicious-xml-mini-dom-usage + "S319", # suspicious-xml-pull-dom-usage + "S320", # suspicious-xmle-tree-usage + "S601", # paramiko-call + "S602", # subprocess-popen-with-shell-equals-true + "S604", # call-with-shell-equals-true + "S608", # hardcoded-sql-expression + "S609", # unix-command-wildcard-injection + "SIM105", # Use contextlib.suppress({exception}) instead of try-except-pass + "SIM117", # Merge with-statements that use the same scope + "SIM118", # Use {key} in {dict} instead of {key} in {dict}.keys() + "SIM201", # Use {left} != {right} instead of not {left} == {right} + "SIM208", # Use {expr} instead of not (not {expr}) + "SIM212", # Use {a} if {a} else {b} instead of {b} if not {a} else {a} + "SIM300", # Yoda conditions. Use 'age == 42' instead of '42 == age'. + "SIM401", # Use get from dict with default instead of an if block + "T100", # Trace found: {name} used + "T20", # flake8-print + "TID251", # Banned imports + "TRY004", # Prefer TypeError exception for invalid type + "B904", # Use raise from to specify exception cause + "TRY302", # Remove exception handler; error is immediately re-raised + "UP", # pyupgrade + "W", # pycodestyle ] - -[tool.pylint."MESSAGES CONTROL"] -disable = [ - "too-many-ancestors", - "too-many-arguments", - "too-many-instance-attributes", - "too-many-lines", - "missing-class-docstring" +ignore = [ + "D202", # No blank lines allowed after function docstring + "D203", # 1 blank line required before class docstring + "E501", # line too long + "E731", # do not assign a lambda expression, use a def + "D213", # Multi-line docstring summary should start at the second line + + # May conflict with the formatter, https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules + "W191", + "E111", + "E114", + "E117", + "D206", + "D300", + "Q000", + "Q001", + "Q002", + "Q003", + "COM812", + "COM819", + "ISC001", + "ISC002", ] -enable = ["use-symbolic-message-instead"] - -[tool.pylint.CODE_STYLE] -max-line-length-suggestions = 88 +[tool.ruff.lint.isort] +force-sort-within-sections = true +known-first-party = ["sapcommissions"] +combine-as-imports = true +split-on-trailing-comma = true diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..5884917 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,4 @@ +aiohttp>=3.0.0 +click>=8.1.7 +python-dotenv>=1.0.1 +pydantic>=2.6.0 diff --git a/sapcommissions/__init__.py b/sapcommissions/__init__.py deleted file mode 100644 index 43c75d0..0000000 --- a/sapcommissions/__init__.py +++ /dev/null @@ -1,69 +0,0 @@ -""" -A Python wrapper for the SAP Commissions API. -""" -from dataclasses import dataclass, field -from enum import Enum - - -@dataclass(frozen=True) -class Connection: - """ - Connection variables used to connect with SAP Commissions. - """ - - tenant: str = field(repr=True) - username: str = field(repr=True) - password: str = field(repr=False) - verifySsl: bool = field(default=True, repr=False) - - @property - def url(self) -> str: - """Return the Commissions URL.""" - return f"https://{self.tenant}.callidusondemand.com" - - @property - def apiUrl(self) -> str: - """Returns the base url for the Commissions REST API.""" - return self.url + "/api" - - @property - def apiDocument(self) -> str: - """Returns the url for the Commissions API documentation.""" - return self.url + "/APIDocument" - - -class ReportFormat(Enum): - """ - Enum for the report format. - """ - - PDF = "pdf" - EXCEL = "excel" - NATIVE = "native" - - -class Revalidate(Enum): - """ - Enum for revalidate mode. - """ - - ALL = "all" - ONLY_ERRORS = "onlyError" - - -class ImportRunMode(Enum): - """ - Enum for import runMode. - """ - - ALL = "all" - NEW = "new" - - -class PipelineRunMode(Enum): - """ - Enum for pipeline runMode. - """ - - FULL = "full" - INCREMENTAL = "incremental" diff --git a/sapcommissions/endpoints.py b/sapcommissions/endpoints.py deleted file mode 100644 index e1ffd78..0000000 --- a/sapcommissions/endpoints.py +++ /dev/null @@ -1,2005 +0,0 @@ -"""Endpoints are used to interact with SAP Commissions objects.""" -import logging -from datetime import date -from typing import Any - -from requests.auth import HTTPBasicAuth -from requests.exceptions import HTTPError -from requests.models import Response -from requests.sessions import Session -from urllib3 import disable_warnings - -from sapcommissions import ( - Connection, - ImportRunMode, - PipelineRunMode, - ReportFormat, - Revalidate, - resources, -) -from sapcommissions.exceptions import AuthenticationError, ClientError, ServerError - -LOGGER = logging.getLogger(__name__) - - -def _stage_tables(batchName: str) -> tuple[str, list[str]]: - """ - Deduce the tables to be staged from the batchName. - """ - try: - odi_type: str = batchName.split("_")[1] - odi_type = odi_type.upper() - assert len(odi_type) >= 4 - assert odi_type[:2] in {"TX", "OG", "CL", "PL"} - except (IndexError, AssertionError) as error: - LOGGER.error("Batch does not conform to any ODI template: %s", batchName) - raise TypeError( - "Batch does not conform to any ODI template TX*, OG*, CL*, PL*" - ) from error - - stage_tables: tuple[str, list[str]] - if odi_type[:2] == "TX": - stage_tables = ( - "TransactionalData", - [ - "TransactionAndCredit", - "Deposit", - ], - ) - if odi_type[:2] == "OG": - stage_tables = ( - "OrganizationData", - [ - "Participant", - "Position", - "Title", - "PositionRelation", - ], - ) - if odi_type[:2] == "CL": - stage_tables = ( - "ClassificationData", - [ - "Category", - "Category_Classifiers", - "Customer", - "Product", - "PostalCode", - "GenericClassifier", - ], - ) - if odi_type[:2] == "PL": - stage_tables = ( - "PlanRelatedData", - [ - "FixedValue", - "VariableAssignment", - "Quota", - "RelationalMDLT", - ], - ) - return stage_tables - - -class _Client(Session): - """Interacts with SAP Commissions REST API. Extends requests.Session.""" - - def __init__( - self, - baseUrl: str, - username: str, - password: str, - verifySsl: bool = True, - ) -> None: - """Initialize an endpoint to interact with SAP Commissions.""" - super().__init__() - self.baseUrl: str = baseUrl - self.auth = HTTPBasicAuth(username, password) - if verifySsl is False: - disable_warnings() - self.verify = verifySsl - - def request( # pylint: disable=arguments-differ - self, - method: str, - uri: str, - parameters: dict[str, str] | None = None, - body: list[dict[str, str]] | None = None, - ) -> dict[str, Any | list[dict[str, Any]]] | Response: - """Perform an HTTP request to the SAP Commissions REST API.""" - LOGGER.debug("%s %s %s", method.upper(), uri, parameters) - url: str = self.baseUrl + uri - with super().request( - method=method, - url=url, - params=parameters, - json=body, - ) as response: - try: - response.raise_for_status() - if "application/json" not in response.headers.get("content-type", ""): - raise ValueError("Response content-type is not application/json.") - return response.json() - except HTTPError as error: - LOGGER.error( - "%s %s %s %s", - method.upper(), - response.status_code, - uri, - response.text, - ) - if 401 <= response.status_code <= 403: - raise AuthenticationError(response.text) from error - if 400 <= response.status_code < 500: - raise ClientError(response.text) from error - if 500 <= response.status_code < 600: - raise ServerError(response.text) from error - return None - - def get( # pylint: disable=arguments-renamed,arguments-differ - self, - uri: str, - parameters: dict[str, str] | None = None, - ) -> dict[str, Any | list[dict[str, Any]]]: - """Perform a GET request to the SAP Commissions REST API.""" - return self.request("GET", uri, parameters=parameters) - - def delete( # pylint: disable=arguments-renamed,arguments-differ - self, uri: str, parameters: dict[str, str] | None = None - ) -> dict[str, Any | list[dict[str, Any]]]: - """Perform a DELETE request to the SAP Commissions REST API.""" - return self.request("DELETE", uri, parameters=parameters) - - def post( # pylint: disable=arguments-renamed,arguments-differ - self, uri: str, body: list[dict[str, str]] - ) -> dict[str, Any | list[dict[str, Any]]]: - """Perform a POST request to the SAP Commissions REST API.""" - return self.request("POST", uri, body=body) - - def put( # pylint: disable=arguments-renamed,arguments-differ - self, uri: str, body: list[dict[str, str]] - ) -> dict[str, Any | list[dict[str, Any]]]: - """Perform a PUT request to the SAP Commissions REST API.""" - return self.request("PUT", uri, body=body) - - -class _Endpoint: - """Provides a base template for an endpoint method.""" - - resource: resources._Resource - - def __init__(self, connection: Connection) -> None: - """Initialize a base template for an endpoint method.""" - self._client = _Client( - baseUrl=connection.apiUrl, - username=connection.username, - password=connection.password, - verifySsl=connection.verifySsl, - ) - - def __repr__(self) -> str: - return f"{self.__class__.__name__}({self.name})" - - @property - def name(self) -> str: - """Returns the name of the resource.""" - return self.resource._name # pylint: disable=protected-access - - @property - def url(self) -> str: - """Returns the API URL of the endpoint.""" - return f"/v2/{self.name}" - - -class _Create(_Endpoint): - def create(self, instance: resources._Resource) -> resources._Resource: - """ - Create a new resource for the endpoint. - - Parameters - ---------- - instances : resources._Resource - Resource to create. - """ - LOGGER.info("Create %s", self.name) - - assert isinstance(instance, self.resource) - json_data = instance.to_dict() - - response = self._client.post(self.url, [json_data]) - data = response[self.name] - created = self.resource.from_dict(data[0]) - - return created - - -class _CreateVersions(_Endpoint): - def create_versions( - self, seq: int | str, versions: list[resources._Resource] - ) -> list[resources._Resource]: - """ - Create versions of an existing resource. - - Parameters - ---------- - seq : int | str - Resource system identifier. - versions : list[resources._Resource] - List of resource versions to create. - """ - LOGGER.info("Create versions for %s with seq %s", self.name, seq) - - assert isinstance(seq, (int, str)) - assert isinstance(versions, list) - for version in versions: - assert isinstance(version, self.resource) - json_data = [version.to_dict() for version in versions] - - response = self._client.post(self.url + f"({seq})/versions", json_data) - if response is not None: - data = response[self.name] - created_versions = [self.resource.from_dict(item) for item in data] - else: - created_versions = versions - - return created_versions - - -class _Delete(_Endpoint): - def delete(self, seq: int | str) -> str: - """ - Delete an existing resource. - - Parameters - ---------- - seq : int | str - Resource system identifier to delete. - """ - LOGGER.info("Delete %s with seq %s", self.name, seq) - - assert isinstance(seq, (int, str)) - - response = self._client.delete(f"{self.url}({seq})") - data = response[self.name] - message = data[str(seq)] - - return message - - -class _DeleteVersions(_Endpoint): - def delete_versions( - self, - seq: int | str, - effectiveStartDate: date, - effectiveEndDate: date, - fillFromRight: bool = False, - ) -> str: - """ - Deletes the given version for an existing resource. - - Parameters - ---------- - seq : int | str - Resource system identifier. - effectiveStartDate : date - Resource effectiveStartDate. - effectiveEndDate : date - Resource effectiveEndDate. - fillFromRight : bool - If true, then the gap will be filled by the right (next) version, - otherwise by the left (prev) version. Default is false (prev). - """ - LOGGER.info("Delete versions for %s with seq %s", self.name, seq) - - query = {} - assert isinstance(seq, (int, str)) - assert isinstance(effectiveStartDate, date) - query["effectiveStartDate"] = effectiveStartDate.strftime("%Y-%m-%d") - assert isinstance(effectiveEndDate, date) - query["effectiveEndDate"] = effectiveEndDate.strftime("%Y-%m-%d") - assert isinstance(fillFromRight, bool) - query["fillFromRight"] = fillFromRight - - response = self._client.delete(self.url + f"({seq})/versions", query) - data = response[self.name] - message = data[0] - - return message - - -class _Get(_Endpoint): - def get(self, seq: int | str) -> resources._Resource: - """ - Reads all of the attributes of an existing resource. - - Parameters - ---------- - seq : int | str - Resource system identifier. - """ - LOGGER.info("Get %s with seq %s", self.name, seq) - - assert isinstance(seq, (int, str)) - - query = {} - if expand := self.resource._expands: # pylint: disable=protected-access - query["expand"] = ",".join(expand) - - response = self._client.get(self.url + f"({seq})", query if query else None) - item = self.resource.from_dict(response) - - return item - - -class _GetVersions(_Endpoint): - def get_versions( - self, - seq: int | str, - startDate: date = None, - endDate: date = None, - ) -> list[resources._Resource]: - """ - Returns all of the versions of a resource. - - Parameters - ---------- - seq : int | str - Resource system identifier. - startDate : date - Filter List for resources effective for startDate. - endDate : date - Filter List for resources effective for endDate. - """ - LOGGER.info("Get versions of %s with seq %s", self.name, seq) - - assert isinstance(seq, (int, str)) - - query = {} - if expand := self.resource._expands: # pylint: disable=protected-access - query["expand"] = ",".join(expand) - - if startDate: - assert isinstance(startDate, date) - query["startDate"] = startDate.strftime("%Y-%m-%d") - if endDate: - assert isinstance(endDate, date) - query["endDate"] = endDate.strftime("%Y-%m-%d") - - response = self._client.get(self.url + f"({seq})/versions", query) - data = response[self.name] - resource_versions = [self.resource.from_dict(item) for item in data] - - return resource_versions - - -class _List(_Endpoint): - def list( - self, - filter: str = None, # pylint: disable=redefined-builtin - startDate: date = None, - endDate: date = None, - limit: int = None, - raw: bool = False, - **filter_kwargs: dict, - ) -> list[resources._Resource]: - """ - Returns a list of resources (single valid version). - - Parameters - ---------- - filter : str - Add filter conditions. - startDate : date - Filter List for resource effective for startDate. - endDate : date - Filter List for resource effective for endDate. - limit : int - Limit the number of resources returned. - raw : bool - If true, then the response is returned as is, otherwise it is converted - to resource objects. Default is False. - filter_kwargs : dict - Additional filter conditions, applied with the AND operator. - - Examples - -------- - p.list() - Returns all resources for today's effective date. - p.list(filter="name eq '*Smith*'") - Returns all resources with a name containing 'Smith'. - `p.list(name="*Smith*")` - Also returns all resources with a name containing 'Smith'. - - The keyword arguments are converted to filters. The keyword must be a - part of the resource's attributes. - """ - LOGGER.info("List %s", self.name) - - query = {"top": limit if limit and limit < 100 else 100} - if expand := self.resource._expands: # pylint: disable=protected-access - query["expand"] = ",".join(expand) - if filter: - assert isinstance(filter, str) - query["$filter"] = filter - if startDate: - assert isinstance(startDate, date) - # Unlike the other methods, this one requires a date in [YYYY/MM/DD] - query["startDate"] = startDate.strftime("%Y/%m/%d") - if endDate: - assert isinstance(endDate, date) - # Unlike the other methods, this one requires a date in [YYYY/MM/DD] - query["endDate"] = endDate.strftime("%Y/%m/%d") - - if filter_kwargs: - filters = " and ".join([f"{k} eq '{v}'" for k, v in filter_kwargs.items()]) - if filter: - LOGGER.warning( - "filter and filter_kwargs are both set," - " this could lead to unexpected results." - ) - query["$filter"] = f"({filter}) and (filters)" - else: - query["$filter"] = filters - - yield_count: int = 0 - response = self._client.get(self.url, query) - data = response[self.name] - for item in data: - yield item if raw else self.resource.from_dict(item) - yield_count += 1 - if limit is not None and yield_count >= limit: - return - - while url := response.get("next"): - response = self._client.get(url) - data = response[self.name] - for item in data: - yield item if raw else self.resource.from_dict(item) - yield_count += 1 - if limit and yield_count >= limit: - return - - def get_id( - self, - id: str, # pylint: disable=redefined-builtin - raw: bool = False, - ) -> resources._Resource: - """ - Reads all of the attributes of an existing resource. - - Parameters - ---------- - id : str - User unique identifier. - raw : bool - If true, then the response is returned as is, otherwise it is converted - to resource objects. Default is False. - """ - LOGGER.info("Get %s with id %s", self.name, id) - - assert isinstance(id, str) - # pylint: disable-next=protected-access - if (id_attr := self.resource._idAttr) is None: - LOGGER.warning("%s has no id attribute.", self.name) - return None - - query = {"top": 10} - if expand := self.resource._expands: # pylint: disable=protected-access - query["expand"] = ",".join(expand) - query["$filter"] = f"{id_attr} eq '{id}'" - - response = self._client.get(self.url, query) - data = response[self.name] - items = data if raw else [self.resource.from_dict(item) for item in data] - if len(data) > 1: - LOGGER.warning("Returned %s items for id %s.", len(data), id) - item = items[0] if items else None - - return item - - def count( - self, - filter: str = None, # pylint: disable=redefined-builtin - startDate: date = None, - endDate: date = None, - **filter_kwargs: dict, - ) -> int: - """ - Returns the number of resources. - - Parameters - ---------- - filter : str - Add filter conditions. - startDate : date - Filter List for resource effective for startDate. - endDate : date - Filter List for resource effective for endDate. - filter_kwargs : dict - Additional filter conditions, applied with the AND operator. - - Examples - -------- - p.count() - Returns the count of resources for today's effective date. - p.count(filter="name eq '*Smith*'") - Returns the count of resources with a name containing 'Smith'. - `p.count(name="*Smith*")` - Also returns the count of resources with a name containing 'Smith'. - - The keyword arguments are converted to filters. The keyword must be a - part of the resource's attributes. - """ - LOGGER.info("List %s", self.name) - - query = {"top": 1, "inlineCount": True} - if filter: - assert isinstance(filter, str) - query["$filter"] = filter - if startDate: - assert isinstance(startDate, date) - # Unlike the other methods, this one requires a date in [YYYY/MM/DD] - query["startDate"] = startDate.strftime("%Y/%m/%d") - if endDate: - assert isinstance(endDate, date) - # Unlike the other methods, this one requires a date in [YYYY/MM/DD] - query["endDate"] = endDate.strftime("%Y/%m/%d") - - if filter_kwargs: - filters = " and ".join([f"{k} eq '{v}'" for k, v in filter_kwargs.items()]) - if filter: - LOGGER.warning( - "filter and filter_kwargs are both set," - " this could lead to unexpected results." - ) - query["$filter"] = f"({filter}) and (filters)" - else: - query["$filter"] = filters - - response = self._client.get(self.url, query) - return response["total"] - - -class _Update(_Endpoint): - def update(self, update: resources._Resource) -> resources._Resource: - """ - Update an exiting resource. - - Parameters - ---------- - resource : resources._Resource - Resource to update. - """ - LOGGER.info("Update %s", self.name) - - assert isinstance(update, self.resource) - json_data = update.to_dict() - - response = self._client.put(self.url, [json_data]) - data = response[self.name] - updated = self.resource.from_dict(data[0]) - - return updated - - -class _UpdateVersions(_Endpoint): - def update_versions( - self, seq: int | str, versions: list[resources._Resource] - ) -> list: - """ - Update versions of an existing resource. - - Parameters - ---------- - seq : int | str - Resource system identifier. - versions : list[resources._Resource] - List of resource versions with attributes for the endpoint. - """ - LOGGER.info("Update versions for %s with seq %s", self.name, seq) - - assert isinstance(seq, (int, str)) - assert isinstance(versions, list) - for version in versions: - assert isinstance(version, self.resource) - json_data = [item.to_dict() for item in versions] - - response = self._client.put(self.url + f"({seq})/versions", json_data) - data = response[self.name] - updated_versions = [self.resource.from_dict(item) for item in data] - - return updated_versions - - -class AppliedDeposits(_Get, _List): - resource = resources.AppliedDeposit - - -class AuditLogs(_Get, _List): - resource = resources.AuditLog - - -class Balances(_Get, _List): - resource = resources.Balance - - -class BusinessUnits(_Create, _Get, _List, _Update): - resource = resources.BusinessUnit - - -class Calendars(_Create, _Delete, _Get, _List, _Update): - resource = resources.Calendar - - -class Categories( - _Create, - _CreateVersions, - _Delete, - _DeleteVersions, - _Get, - _GetVersions, - _List, - _Update, - _UpdateVersions, -): - resource = resources.Category - - -class CategoryClassifiers(_Create, _Get, _List, _Update): - resource = resources.CategoryClassifier - - -class CategoryTrees( - _Create, - _CreateVersions, - _Delete, - _DeleteVersions, - _Get, - _GetVersions, - _List, - _Update, - _UpdateVersions, -): - resource = resources.CategoryTree - - -class Commissions(_Get, _List): - resource = resources.Commission - - -class Credits(_Create, _Get, _List, _Update): - resource = resources.Credit - - -class CreditTypes(_Create, _Delete, _Get, _List, _Update): - resource = resources.CreditType - - -class Customers( - _Create, - _CreateVersions, - _Delete, - _DeleteVersions, - _Get, - _GetVersions, - _List, - _Update, - _UpdateVersions, -): - resource = resources.Customer - - -class Deposits(_Create, _Get, _List, _Update): - resource = resources.Deposit - - -class EarningCodes(_Create, _Delete, _Get, _List, _Update): - resource = resources.EarningCode - - -class EarningGroupCodes(_Create, _Delete, _Get, _List, _Update): - resource = resources.EarningGroupCode - - -class EarningGroups(_Create, _Delete, _Get, _List, _Update): - resource = resources.EarningGroup - - -class EventTypes(_Create, _Delete, _Get, _List, _Update): - resource = resources.EventType - - -class FixedValues( - _Create, - _CreateVersions, - _Delete, - _DeleteVersions, - _Get, - _GetVersions, - _List, - _Update, - _UpdateVersions, -): - resource = resources.FixedValue - - -class FixedValueTypes(_Create, _Delete, _Get, _List, _Update): - resource = resources.FixedValueType - - -class FixedValueVariables( - _Create, - _CreateVersions, - _Delete, - _DeleteVersions, - _Get, - _GetVersions, - _List, - _Update, - _UpdateVersions, -): - resource = resources.FixedValueVariable - - -class Formulas(_Get, _List): - resource = resources.Formula - - -class GenericClassifiers( - _Create, - _CreateVersions, - _Delete, - _DeleteVersions, - _Get, - _GetVersions, - _List, - _Update, - _UpdateVersions, -): - resource = resources.GenericClassifier - - -class GenericClassifierTypes(_Get, _List): - resource = resources.GenericClassifierType - - -class GlobalFieldNames(_Create, _Delete, _Get, _List, _Update): - resource = resources.GlobalFieldName - - -class Groups(_Create, _Delete, _Get, _List, _Update): - resource = resources.Group - - -class Incentives(_Get, _List): - resource = resources.Incentive - - -class LookUpTables(_Get, _List): - resource = resources.LookUpTable - - -class LookUpTableVariables( - _Create, - _CreateVersions, - _Delete, - _DeleteVersions, - _Get, - _GetVersions, - _List, - _Update, - _UpdateVersions, -): - resource = resources.LookUpTableVariable - - -class Measurements(_Get, _List): - resource = resources.Measurement - - -class MessageLogs(_Get, _List): - resource = resources.MessageLog - - -class Messages(_Get, _List): - resource = resources.Message - - -class Models(_Get, _List): - resource = resources.Model - - -class Participants( - _Create, - _CreateVersions, - _Delete, - _DeleteVersions, - _Get, - _GetVersions, - _List, - _Update, - _UpdateVersions, -): - resource = resources.Participant - - -class PaymentMappings(_Create, _Delete, _Get, _List, _Update): - resource = resources.PaymentMapping - - -class Payments(_Get, _List): - resource = resources.Payment - - -class PaymentSummarys(_Get, _List): - resource = resources.PaymentSummary - - -class Periods(_Create, _Delete, _Get, _List, _Update): - resource = resources.Period - - -class Pipelines(_Get, _List): # pylint disable=too-many-public-methods - resource = resources.PipelineRun - - def _run_pipeline( - self, - stageTypeSeq: str, - calendarSeq: str, - periodSeq: str, - runMode: PipelineRunMode = PipelineRunMode.FULL, - positionSeqs: list[str] | None = None, - removeStaleResults: bool | None = None, - runStats: bool | None = None, - processingUnitSeq: str | None = None, - ) -> resources.PipelineRun: - """Run a PipelineRun command.""" - command = { - "command": "PipelineRun", - "stageTypeSeq": stageTypeSeq, - "calendarSeq": calendarSeq, - "periodSeq": periodSeq, - "runMode": runMode.value, - } - if ( - positionSeqs is not None - and isinstance(positionSeqs, list) - and len(positionSeqs) > 0 - ): - command["runMode"] = "positions" - command["positionSeqs"] = positionSeqs - if removeStaleResults is not None: - command["removeStaleResults"] = removeStaleResults - if runStats is not None: - command["runStats"] = runStats - if processingUnitSeq is not None: - command["processingUnitSeq"] = processingUnitSeq - - response = self._client.post(self.url, [command]) - data = response[self.name] - pipeline_seq = data["0"][0] - return resources.PipelineRun(pipelineRunSeq=pipeline_seq) - - def generate_reports( - self, - calendarSeq: str, - periodSeq: str, - formats: list[ReportFormat], - reports: list[str], - groups: list[str] | None = None, - positionSeqs: list[str] | None = None, - runStats: bool = True, - processingUnitSeq: str | None = None, - ) -> resources.PipelineRun: - """ - Run Reports Generation pipeline. - - Parameters - ---------- - calendarSeq : str - Calendar system identifier. - periodSeq : str - Period system identifier. - formats : list[ReportFormat] - List of report formats. - reports : list[str] - List of report names. - groups : list[str] : Optional - List of BO groups names. Use either groups or positionSeqs. - positionSeqs : list[str] : Optional - List of position system identifiers. Use either groups or positionSeqs. - runStats : bool : optional - Run statistics. - processingUnitSeq : str : optional - Processing Unit system identifier. - """ - if (groups is None and positionSeqs is None) or ( - groups is not None and positionSeqs is not None - ): - LOGGER.error("Use either groups or positionSeqs") - raise ValueError("Use either groups or positionSeqs") - command = { - "command": "PipelineRun", - "stageTypeSeq": "21673573206720698", - "calendarSeq": calendarSeq, - "periodSeq": periodSeq, - "generateODSReports": True, - "reportTypeName": "Crystal", - "reportFormatsList": [format.value for format in formats], - "odsReportList": reports, - "runMode": "full", - "runStats": runStats, - } - if groups is not None and isinstance(groups, list) and len(groups) > 0: - command["boGroupsList"] = groups - if ( - positionSeqs is not None - and isinstance(positionSeqs, list) - and len(positionSeqs) > 0 - ): - command["runMode"] = "positions" - command["positionSeqs"] = positionSeqs - if processingUnitSeq is not None: - command["processingUnitSeq"] = processingUnitSeq - - response = self._client.post(self.url, [command]) - data = response[self.name] - pipeline_seq = data["0"][0] - return resources.PipelineRun(pipelineRunSeq=pipeline_seq) - - def classify( - self, - calendarSeq: str, - periodSeq: str, - incremental: bool = False, - runStats: bool = True, - processingUnitSeq: str | None = None, - ) -> resources.PipelineRun: - """ - Run Classify pipeline. - - Parameters - ---------- - calendarSeq : str - Calendar system identifier. - periodSeq : str - Period system identifier. - incremental : bool : optional - Only process new and modified transactions. Default is False. - runStats : bool : optional - Run statistics. - processingUnitSeq : str : optional - Processing Unit system identifier. - """ - return self._run_pipeline( - stageTypeSeq="21673573206720515", - calendarSeq=calendarSeq, - periodSeq=periodSeq, - runMode=( - PipelineRunMode.INCREMENTAL if incremental else PipelineRunMode.FULL - ), - runStats=runStats, - processingUnitSeq=processingUnitSeq, - ) - - def allocate( - self, - calendarSeq: str, - periodSeq: str, - incremental: bool = False, - positionSeqs: list[str] | None = None, - runStats: bool = True, - processingUnitSeq: str | None = None, - ) -> resources.PipelineRun: - """ - Run Allocate pipeline. - - Parameters - ---------- - calendarSeq : str - Calendar system identifier. - periodSeq : str - Period system identifier. - incremental : bool : optional - Only process new and modified transactions. Default is False. - positionSeqs : list[str] : optional - Run for specific positions. Provide a list of positionSeq. - runStats : bool : optional - Run statistics. - processingUnitSeq : str : optional - Processing Unit system identifier. - """ - return self._run_pipeline( - stageTypeSeq="21673573206720516", - calendarSeq=calendarSeq, - periodSeq=periodSeq, - runMode=( - PipelineRunMode.INCREMENTAL if incremental else PipelineRunMode.FULL - ), - positionSeqs=positionSeqs, - runStats=runStats, - processingUnitSeq=processingUnitSeq, - ) - - def reward( - self, - calendarSeq: str, - periodSeq: str, - positionSeqs: list[str] | None = None, - runStats: bool = True, - processingUnitSeq: str | None = None, - ) -> resources.PipelineRun: - """ - Run Reward pipeline. - - Parameters - ---------- - calendarSeq : str - Calendar system identifier. - periodSeq : str - Period system identifier. - positionSeqs : list[str] : optional - Run for specific positions. Provide a list of positionSeq. - runStats : bool : optional - Run statistics. - processingUnitSeq : str : optional - Processing Unit system identifier. - """ - return self._run_pipeline( - stageTypeSeq="21673573206720518", - calendarSeq=calendarSeq, - periodSeq=periodSeq, - positionSeqs=positionSeqs, - runStats=runStats, - processingUnitSeq=processingUnitSeq, - ) - - def pay( - self, - calendarSeq: str, - periodSeq: str, - runStats: bool = True, - processingUnitSeq: str | None = None, - ) -> resources.PipelineRun: - """ - Run Pay pipeline. - - Parameters - ---------- - calendarSeq : str - Calendar system identifier. - periodSeq : str - Period system identifier. - processingUnitSeq : str : optional - Processing Unit system identifier. - """ - return self._run_pipeline( - stageTypeSeq="21673573206720519", - calendarSeq=calendarSeq, - periodSeq=periodSeq, - runStats=runStats, - processingUnitSeq=processingUnitSeq, - ) - - def summarize( - self, - calendarSeq: str, - periodSeq: str, - incremental: bool = False, - positionSeqs: list[str] | None = None, - runStats: bool = True, - processingUnitSeq: str | None = None, - ) -> resources.PipelineRun: - """ - Run Summarize pipeline. - - Parameters - ---------- - calendarSeq : str - Calendar system identifier. - periodSeq : str - Period system identifier. - incremental : bool : optional - Only process new and modified transactions. Default is False. - positionSeqs : list[str] : optional - Run for specific positions. Provide a list of positionSeq. - runStats : bool : optional - Run statistics. - processingUnitSeq : str : optional - Processing Unit system identifier. - """ - return self._run_pipeline( - stageTypeSeq="21673573206720531", - calendarSeq=calendarSeq, - periodSeq=periodSeq, - runMode=( - PipelineRunMode.INCREMENTAL if incremental else PipelineRunMode.FULL - ), - positionSeqs=positionSeqs, - runStats=runStats, - processingUnitSeq=processingUnitSeq, - ) - - def compensate( - self, - calendarSeq: str, - periodSeq: str, - incremental: bool = False, - positionSeqs: list[str] | None = None, - removeStaleResults: bool = False, - runStats: bool = True, - processingUnitSeq: str | None = None, - ) -> resources.PipelineRun: - """ - Run Compensate pipeline. - - Parameters - ---------- - calendarSeq : str - Calendar system identifier. - periodSeq : str - Period system identifier. - incremental : bool : optional - Only process new and modified transactions. Default is False. - positionSeqs : list[str] : optional - Run for specific positions. Provide a list of positionSeq. - removeStaleResults: bool : Optional - Enable remove stale results. Default is False. - runStats : bool : optional - Run statistics. - processingUnitSeq : str : optional - Processing Unit system identifier. - """ - return self._run_pipeline( - stageTypeSeq="21673573206720530", - calendarSeq=calendarSeq, - periodSeq=periodSeq, - runMode=( - PipelineRunMode.INCREMENTAL if incremental else PipelineRunMode.FULL - ), - positionSeqs=positionSeqs, - runStats=runStats, - removeStaleResults=removeStaleResults, - processingUnitSeq=processingUnitSeq, - ) - - def comp_and_pay( - self, - calendarSeq: str, - periodSeq: str, - incremental: bool = False, - positionSeqs: list[str] | None = None, - removeStaleResults: bool = False, - runStats: bool = True, - processingUnitSeq: str | None = None, - ) -> resources.PipelineRun: - """ - Run Compensate And Pay pipeline. - - Parameters - ---------- - calendarSeq : str - Calendar system identifier. - periodSeq : str - Period system identifier. - incremental : bool : optional - Only process new and modified transactions. Default is False. - positionSeqs : list[str] : optional - Run for specific positions. Provide a list of positionSeq. - removeStaleResults: bool : Optional - Enable remove stale results. Default is False. - runStats : bool : optional - Run statistics. - processingUnitSeq : str : optional - Processing Unit system identifier. - """ - return self._run_pipeline( - stageTypeSeq="21673573206720532", - calendarSeq=calendarSeq, - periodSeq=periodSeq, - runMode=( - PipelineRunMode.INCREMENTAL if incremental else PipelineRunMode.FULL - ), - positionSeqs=positionSeqs, - runStats=runStats, - removeStaleResults=removeStaleResults, - processingUnitSeq=processingUnitSeq, - ) - - def post( - self, - calendarSeq: str, - periodSeq: str, - runStats: bool = True, - processingUnitSeq: str | None = None, - ) -> resources.PipelineRun: - """ - Run Post pipeline. - - Parameters - ---------- - calendarSeq : str - Calendar system identifier. - periodSeq : str - Period system identifier. - runStats : bool : optional - Run statistics. - processingUnitSeq : str : optional - Processing Unit system identifier. - """ - return self._run_pipeline( - stageTypeSeq="21673573206720520", - calendarSeq=calendarSeq, - periodSeq=periodSeq, - runStats=runStats, - processingUnitSeq=processingUnitSeq, - ) - - def undo_post( - self, - calendarSeq: str, - periodSeq: str, - runStats: bool = True, - processingUnitSeq: str | None = None, - ) -> resources.PipelineRun: - """ - Run Undo Post pipeline. - - Parameters - ---------- - calendarSeq : str - Calendar system identifier. - periodSeq : str - Period system identifier. - runStats : bool : optional - Run statistics. - processingUnitSeq : str : optional - Processing Unit system identifier. - """ - return self._run_pipeline( - stageTypeSeq="21673573206720718", - calendarSeq=calendarSeq, - periodSeq=periodSeq, - runStats=runStats, - processingUnitSeq=processingUnitSeq, - ) - - def finalize( - self, - calendarSeq: str, - periodSeq: str, - runStats: bool = True, - processingUnitSeq: str | None = None, - ) -> resources.PipelineRun: - """ - Run Finalize pipeline. - - Parameters - ---------- - calendarSeq : str - Calendar system identifier. - periodSeq : str - Period system identifier. - runStats : bool : optional - Run statistics. - processingUnitSeq : str : optional - Processing Unit system identifier. - """ - return self._run_pipeline( - stageTypeSeq="21673573206720521", - calendarSeq=calendarSeq, - periodSeq=periodSeq, - runStats=runStats, - processingUnitSeq=processingUnitSeq, - ) - - def undo_finalize( - self, - calendarSeq: str, - periodSeq: str, - runStats: bool = True, - processingUnitSeq: str | None = None, - ) -> resources.PipelineRun: - """ - Run Undo Finalize pipeline. - - Parameters - ---------- - calendarSeq : str - Calendar system identifier. - periodSeq : str - Period system identifier. - runStats : bool : optional - Run statistics. - processingUnitSeq : str : optional - Processing Unit system identifier. - """ - return self._run_pipeline( - stageTypeSeq="21673573206720721", - calendarSeq=calendarSeq, - periodSeq=periodSeq, - runStats=runStats, - processingUnitSeq=processingUnitSeq, - ) - - def reset_from_classify( - self, - calendarSeq: str, - periodSeq: str, - runStats: bool = True, - processingUnitSeq: str | None = None, - ) -> resources.PipelineRun: - """ - Run Reset From Classify pipeline. - - Parameters - ---------- - calendarSeq : str - Calendar system identifier. - periodSeq : str - Period system identifier. - runStats : bool : optional - Run statistics. - processingUnitSeq : str : optional - Processing Unit system identifier. - """ - return self._run_pipeline( - stageTypeSeq="21673573206720514", - calendarSeq=calendarSeq, - periodSeq=periodSeq, - runStats=runStats, - processingUnitSeq=processingUnitSeq, - ) - - def reset_from_allocate( - self, - calendarSeq: str, - periodSeq: str, - runStats: bool = True, - processingUnitSeq: str | None = None, - ) -> resources.PipelineRun: - """ - Run Reset From Allocate pipeline. - - Parameters - ---------- - calendarSeq : str - Calendar system identifier. - periodSeq : str - Period system identifier. - runStats : bool : optional - Run statistics. - processingUnitSeq : str : optional - Processing Unit system identifier. - """ - return self._run_pipeline( - stageTypeSeq="21673573206720523", - calendarSeq=calendarSeq, - periodSeq=periodSeq, - runStats=runStats, - processingUnitSeq=processingUnitSeq, - ) - - def reset_from_reward( - self, - calendarSeq: str, - periodSeq: str, - runStats: bool = True, - processingUnitSeq: str | None = None, - ) -> resources.PipelineRun: - """ - Run Reset From Reward pipeline. - - Parameters - ---------- - calendarSeq : str - Calendar system identifier. - periodSeq : str - Period system identifier. - runStats : bool : optional - Run statistics. - processingUnitSeq : str : optional - Processing Unit system identifier. - """ - return self._run_pipeline( - stageTypeSeq="21673573206720522", - calendarSeq=calendarSeq, - periodSeq=periodSeq, - runStats=runStats, - processingUnitSeq=processingUnitSeq, - ) - - def reset_from_pay( - self, - calendarSeq: str, - periodSeq: str, - runStats: bool = True, - processingUnitSeq: str | None = None, - ) -> resources.PipelineRun: - """ - Run Reset From Payment pipeline. - - Parameters - ---------- - calendarSeq : str - Calendar system identifier. - periodSeq : str - Period system identifier. - runStats : bool : optional - Run statistics. - processingUnitSeq : str : optional - Processing Unit system identifier. - """ - return self._run_pipeline( - stageTypeSeq="21673573206720526", - calendarSeq=calendarSeq, - periodSeq=periodSeq, - runStats=runStats, - processingUnitSeq=processingUnitSeq, - ) - - def cleanup_deferred_results( - self, calendarSeq: str, periodSeq: str, processingUnitSeq: str | None = None - ) -> resources.PipelineRun: - """ - Run Cleanup Deferred Results pipeline. - - Parameters - ---------- - calendarSeq : str - Calendar system identifier. - periodSeq : str - Period system identifier. - processingUnitSeq : str : optional - Processing Unit system identifier. - """ - return self._run_pipeline( - stageTypeSeq="21673573206720540", - calendarSeq=calendarSeq, - periodSeq=periodSeq, - processingUnitSeq=processingUnitSeq, - ) - - def approve_calculated_data( - self, calendarSeq: str, periodSeq: str, processingUnitSeq: str | None = None - ) -> resources.PipelineRun: - """ - Run Approve calculated data. - - Parameters - ---------- - calendarSeq : str - Calendar system identifier. - periodSeq : str - Period system identifier. - processingUnitSeq : str : optional - Processing Unit system identifier. - """ - return self._run_pipeline( - stageTypeSeq="21673573206720712", - calendarSeq=calendarSeq, - periodSeq=periodSeq, - processingUnitSeq=processingUnitSeq, - ) - - def purge_approved_data( - self, calendarSeq: str, periodSeq: str, processingUnitSeq: str | None = None - ) -> resources.PipelineRun: - """ - Run Purge approved data. - - Parameters - ---------- - calendarSeq : str - Calendar system identifier. - periodSeq : str - Period system identifier. - processingUnitSeq : str : optional - Processing Unit system identifier. - """ - return self._run_pipeline( - stageTypeSeq="21673573206720715", - calendarSeq=calendarSeq, - periodSeq=periodSeq, - processingUnitSeq=processingUnitSeq, - ) - - def update_analytics( - self, - calendarSeq: str, - periodSeq: str, - runStats: bool = True, - processingUnitSeq: str | None = None, - ): - """ - Run Update Analitics pipeline. - - Parameters - ---------- - calendarSeq : str - Calendar system identifier. - periodSeq : str - Period system identifier. - runStats : bool : optional - Run statistics. - processingUnitSeq : str : optional - Processing Unit system identifier. - """ - return self._run_pipeline( - stageTypeSeq="21673573206720701", - calendarSeq=calendarSeq, - periodSeq=periodSeq, - runStats=runStats, - processingUnitSeq=processingUnitSeq, - ) - - def _run_import( - self, - stageTypeSeq: str, - calendarSeq: str, - batchName: str, - runMode: ImportRunMode = ImportRunMode.ALL, - revalidate: Revalidate | None = None, - runStats: bool | None = None, - processingUnitSeq: str | None = None, - ) -> resources.PipelineRun: - """Run a Import command.""" - stage_tables = _stage_tables(batchName) - command = { - "command": "Import", - "stageTypeSeq": stageTypeSeq, - "calendarSeq": calendarSeq, - "batchName": batchName, - "runMode": runMode.value, - "module": stage_tables[0], - "stageTables": stage_tables[1], - } - if revalidate is not None: - command["revalidate"] = revalidate.value - if runStats is not None: - command["runStats"] = runStats - if processingUnitSeq is not None: - command["processingUnitSeq"] = processingUnitSeq - - response = self._client.post(self.url, [command]) - data = response[self.name] - pipeline_seq = data["0"][0] - return resources.PipelineRun(pipelineRunSeq=pipeline_seq) - - def validate( - self, - calendarSeq: str, - batchName: str, - runMode: ImportRunMode = ImportRunMode.ALL, - revalidate: Revalidate | None = None, - runStats: bool = True, - processingUnitSeq: str | None = None, - ) -> resources.PipelineRun: - """ - Validate data from stage. - - Parameters - ---------- - calendarSeq : str - Calendar system identifier. - batchName : str - Batch name. - runMode : ImportRunMode : optional - Import all or only new and modified data. Default: ALL. - revalidate : Revalidate : optional - Revalidate all or only errors if provided. Do not revalidate if None. - Default: None. - runStats : bool : optional - Run statistics. - processingUnitSeq : str : optional - Processing Unit system identifier. - """ - return self._run_import( - stageTypeSeq="21673573206720533", - calendarSeq=calendarSeq, - batchName=batchName, - runMode=runMode, - revalidate=revalidate, - runStats=runStats, - processingUnitSeq=processingUnitSeq, - ) - - def transfer( - self, - calendarSeq: str, - batchName: str, - runMode: ImportRunMode = ImportRunMode.ALL, - runStats: bool = True, - processingUnitSeq: str | None = None, - ) -> resources.PipelineRun: - """ - Transfer data from stage, leave invalid data. - - Parameters - ---------- - calendarSeq : str - Calendar system identifier. - batchName : str - Batch name. - runMode : ImportRunMode : optional - Import all or only new and modified data. Default: ALL. - runStats : bool : optional - Run statistics. - processingUnitSeq : str : optional - Processing Unit system identifier. - """ - return self._run_import( - stageTypeSeq="21673573206720534", - calendarSeq=calendarSeq, - batchName=batchName, - runMode=runMode, - runStats=runStats, - processingUnitSeq=processingUnitSeq, - ) - - def transfer_if_all_valid( - self, - calendarSeq: str, - batchName: str, - runMode: ImportRunMode = ImportRunMode.ALL, - runStats: bool = True, - processingUnitSeq: str | None = None, - ) -> resources.PipelineRun: - """ - Transfer data from stage only if all data is valid. - - Parameters - ---------- - calendarSeq : str - Calendar system identifier. - batchName : str - Batch name. - runMode : ImportRunMode : optional - Import all or only new and modified data. Default: ALL. - runStats : bool : optional - Run statistics. - processingUnitSeq : str : optional - Processing Unit system identifier. - """ - return self._run_import( - stageTypeSeq="21673573206720535", - calendarSeq=calendarSeq, - batchName=batchName, - runMode=runMode, - runStats=runStats, - processingUnitSeq=processingUnitSeq, - ) - - def validate_and_transfer( - self, - calendarSeq: str, - batchName: str, - runMode: ImportRunMode = ImportRunMode.ALL, - revalidate: Revalidate | None = None, - runStats: bool = True, - processingUnitSeq: str | None = None, - ) -> resources.PipelineRun: - """ - Validate and Transfer data from stage, leave invalid data. - - Parameters - ---------- - calendarSeq : str - Calendar system identifier. - batchName : str - Batch name. - runMode : ImportRunMode : optional - Import all or only new and modified data. Default: ALL. - revalidate : Revalidate : optional - Revalidate all or only errors if provided. Do not revalidate if None. - Default: None. - runStats : bool : optional - Run statistics. - processingUnitSeq : str : optional - Processing Unit system identifier. - """ - return self._run_import( - stageTypeSeq="21673573206720536", - calendarSeq=calendarSeq, - batchName=batchName, - runMode=runMode, - revalidate=revalidate, - runStats=runStats, - processingUnitSeq=processingUnitSeq, - ) - - def validate_and_transfer_if_all_valid( - self, - calendarSeq: str, - batchName: str, - runMode: ImportRunMode = ImportRunMode.ALL, - revalidate: Revalidate | None = None, - runStats: bool = True, - processingUnitSeq: str | None = None, - ) -> resources.PipelineRun: - """ - Validate and Transfer data from stage only if all data is valid. - - Parameters - ---------- - calendarSeq : str - Calendar system identifier. - batchName : str - Batch name. - runMode : ImportRunMode : optional - Import all or only new and modified data. Default: ALL. - revalidate : Revalidate : optional - Revalidate all or only errors if provided. Do not revalidate if None. - Default: None. - runStats : bool : optional - Run statistics. - processingUnitSeq : str : optional - Processing Unit system identifier. - """ - return self._run_import( - stageTypeSeq="21673573206720537", - calendarSeq=calendarSeq, - batchName=batchName, - runMode=runMode, - revalidate=revalidate, - runStats=runStats, - processingUnitSeq=processingUnitSeq, - ) - - def reset_from_validate( - self, - calendarSeq: str, - periodSeq: str, - batchName: str | None = None, - runStats: bool = True, - processingUnitSeq: str | None = None, - ) -> resources.PipelineRun: - """ - Run Reset From Validate. - - Parameters - ---------- - calendarSeq : str - Calendar system identifier. - periodSeq : str - Period system identifier. - batchName : str : Optional - Batch name. Remove all batches if None. - runStats : bool : optional - Run statistics. - processingUnitSeq : str : optional - Processing Unit system identifier. - """ - command = { - "calendarSeq": calendarSeq, - "periodSeq": periodSeq, - "runStats": runStats, - } - if batchName is not None: - command["batchName"] = batchName - if processingUnitSeq is not None: - command["processingUnitSeq"] = processingUnitSeq - - response = self._client.post(self.url + "/resetfromvalidate", [command]) - data = response[self.name] - pipeline_seq = data["0"][0] - return resources.PipelineRun(pipelineRunSeq=pipeline_seq) - - def purge(self, batchName: str): - """ - Run Purge import data. - - Parameters - ---------- - batchName : str - Batch name to purge. - """ - stage_tables = _stage_tables(batchName) - command = { - "command": "PipelineRun", - "stageTypeSeq": 21673573206720573, - "batchName": batchName, - "module": stage_tables[0], - "stageTables": stage_tables[1], - } - - response = self._client.post(self.url, [command]) - data = response[self.name] - pipeline_seq = data["0"][0] - return resources.PipelineRun(pipelineRunSeq=pipeline_seq) - - def xml_import( - self, xmlFileName: str, xmlFileContent: str, updateExistingObjects: bool = False - ): - """ - Run XML Import. - - Parameters - ---------- - xmlFileName : str - Filename of imported file. - xmlFileContent : str - File content of imported file. - updateExistingObjects : bool : optional - Update existing opbjects. Default is False. - """ - command = { - "command": "XMLImport", - "stageTypeSeq": "21673573206720693", - "xmlFileName": xmlFileName, - "xmlFileContent": xmlFileContent, - "updateExistingObjects": updateExistingObjects, - } - - response = self._client.post(self.url, [command]) - data = response[self.name] - pipeline_seq = data["0"][0] - return resources.PipelineRun(pipelineRunSeq=pipeline_seq) - - -class Plans(_Get, _List): - resource = resources.Plan - - -class PositionGroups(_Create, _Delete, _Get, _List, _Update): - resource = resources.PositionGroup - - -class PositionRelations( - _Create, - _CreateVersions, - _Delete, - _DeleteVersions, - _Get, - _GetVersions, - _List, - _Update, - _UpdateVersions, -): - resource = resources.PositionRelation - - -class PositionRelationTypes(_Create, _Delete, _Get, _List, _Update): - resource = resources.PositionRelationType - - -class Positions( - _Create, - _CreateVersions, - _Delete, - _DeleteVersions, - _Get, - _GetVersions, - _List, - _Update, - _UpdateVersions, -): - resource = resources.Position - - -class PrimaryMeasurements(_Get, _List): - resource = resources.PrimaryMeasurement - - -class ProcessingUnits(_Create, _Get, _List, _Update): - resource = resources.ProcessingUnit - - -class Products( - _Create, - _CreateVersions, - _Delete, - _DeleteVersions, - _Get, - _GetVersions, - _List, - _Update, - _UpdateVersions, -): - resource = resources.Product - - -class Quotas(_Create, _Delete, _Get, _List, _Update): - resource = resources.Quota - - -class RateTables(_Get, _List): - resource = resources.RateTable - - -class RateTableVariables( - _Create, - _CreateVersions, - _Delete, - _DeleteVersions, - _Get, - _GetVersions, - _List, - _Update, - _UpdateVersions, -): - resource = resources.RateTableVariable - - -class Reasons(_Create, _Delete, _Get, _List, _Update): - resource = resources.Reason - - -class SalesOrders(_Create, _Delete, _Get, _List, _Update): - resource = resources.SalesOrder - - -class SalesTransactions(_Create, _Delete, _Get, _List, _Update): - resource = resources.SalesTransaction - - -class SecondaryMeasurements(_Get, _List): - resource = resources.SecondaryMeasurement - - -class StatusCodes(_Create, _Delete, _Get, _List, _Update): - resource = resources.StatusCode - - -class Territories( - _Create, - _CreateVersions, - _Delete, - _DeleteVersions, - _Get, - _GetVersions, - _List, - _Update, - _UpdateVersions, -): - resource = resources.Territory - - -class TerritoryVariables( - _Create, - _CreateVersions, - _Delete, - _DeleteVersions, - _Get, - _GetVersions, - _List, - _Update, - _UpdateVersions, -): - resource = resources.TerritoryVariable - - -class Titles( - _Create, - _CreateVersions, - _Delete, - _DeleteVersions, - _Get, - _GetVersions, - _List, - _Update, - _UpdateVersions, -): - resource = resources.Title - - -class UnitTypes(_Get, _List): - resource = resources.UnitType - - -class Users(_Create, _Delete, _Get, _List, _Update): - resource = resources.User - - -class Variables( - _Create, - _CreateVersions, - _Delete, - _DeleteVersions, - _Get, - _GetVersions, - _List, - _Update, - _UpdateVersions, -): - resource = resources.Variable diff --git a/sapcommissions/exceptions.py b/sapcommissions/exceptions.py deleted file mode 100644 index 2729788..0000000 --- a/sapcommissions/exceptions.py +++ /dev/null @@ -1,18 +0,0 @@ -""" -Exceptions for the sapcommissions package. -""" - - -class ClientError(Exception): - """Exception that incdicates that there was an error with the client.""" - - -class ServerError(Exception): - """Exception that incdicates that there was an error with the server.""" - - -class AuthenticationError(ClientError): - """ - User is not authorized to perform the request. Likely due to an incorrect - username, password or missing privileges. - """ diff --git a/sapcommissions/resources.py b/sapcommissions/resources.py deleted file mode 100644 index edff26e..0000000 --- a/sapcommissions/resources.py +++ /dev/null @@ -1,2178 +0,0 @@ -""" -Create instances of SAP Commissions objects, like `Participant` and `Credit`. -""" -from __future__ import annotations - -import logging -from dataclasses import dataclass, field, fields -from datetime import date, datetime, timezone -from types import UnionType -from typing import ClassVar, get_args, get_origin, get_type_hints - -LOGGER = logging.getLogger(__name__) - - -def _deserialize(value, astype): # pylint: disable=too-many-return-statements - if isinstance(value, list): - if astype is list: - LOGGER.error("Unknown type for list elements: %s", astype) - raise TypeError("Unknown type for list elements") - if get_origin(astype) is list: - if len(subtypes := get_args(astype)) > 1: - LOGGER.error("Impropper type for list elements: %s", subtypes) - raise TypeError("Impropper type for list elements") - return [_deserialize(v, subtypes[0]) for v in value] - LOGGER.error("Impropper type, value is list: %s", astype) - raise TypeError("Impropper type, value is list") - if isinstance(astype, UnionType): - LOGGER.error("UnionType is not supported: %s", astype) - raise NotImplementedError("Unsupported type") - if value is None: - return None - if astype is datetime: - return datetime.fromisoformat(value).astimezone(timezone.utc) - if astype is date: - return datetime.fromisoformat(value).astimezone(timezone.utc).date() - if isinstance(value, astype): - return value - if issubclass(astype, _Resource) and isinstance(value, dict): - return astype.from_dict(value) - if issubclass(astype, _Resource) and isinstance(value, (str, int)): - return astype(**{astype._seqAttr: value}) # pylint: disable=protected-access - return astype(value) - - -def _serialize(value, fromtype): - if isinstance(value, list): - if fromtype is list: - LOGGER.error("Unknown type for list elements: %s", fromtype) - raise TypeError("Unknown type for list elements") - if get_origin(fromtype) is list: - if len(subtypes := get_args(fromtype)) > 1: - LOGGER.error("Impropper type for list elements: %s", subtypes) - raise TypeError("Impropper type for list elements") - return [_serialize(v, subtypes[0]) for v in value] - LOGGER.error("Impropper type, value is list: %s", fromtype) - raise TypeError("Impropper type, value is list") - if isinstance(fromtype, UnionType): - LOGGER.error("UnionType is not supported: %s", fromtype) - raise NotImplementedError("Unsupported type") - if value is None: - return None - if isinstance(value, (datetime, date)): - return value.isoformat() - if isinstance(value, _Resource): - return value.to_dict(ignoreSeq=False) - if isinstance(value, int): - return value - return str(value) - - -def _meta( - seq: bool = False, - id: bool = False, # pylint: disable=redefined-builtin - expand: bool = False, - json_ignore: bool = False, # pylint: disable=invalid-name -): - """Helper function to create metadata for a field.""" - return { - "seq": seq, - "id": id, - "expand": expand, - "json_ignore": json_ignore, - } - - -class _Resource: - _endpoint_name: ClassVar[str] - - @classmethod - @property - def _name(cls) -> str: - """Returns the name resource.""" - return cls._endpoint_name - - @classmethod - @property - def _seqAttr(cls) -> str | None: - """Returns the name of the sequence attribute or None.""" - for fld in fields(cls): - if fld.metadata.get("seq") is True: - return fld.name - return None - - @property - def _seq(self) -> int | None: - """Returns the sequence or None.""" - seq_attr = self._seqAttr - return self[seq_attr] if seq_attr else None - - @classmethod - @property - def _idAttr(cls) -> str | None: - """Returns the name of the identifier attribute or None.""" - for fld in fields(cls): - if fld.metadata.get("id") is True: - return fld.name - return None - - @property - def _id(self) -> str | None: - """Returns the identifier or None.""" - id_attr = self._idAttr - return self[id_attr] if id_attr else None - - @classmethod - @property - def _expands(cls) -> tuple: - """Returns the name of the expandable attributes.""" - return tuple( - fld.name - for fld in fields(cls) - if fld.type not in ("str", "int", "date", "datetime", "bool", "Value") - and fld.metadata.get("expand") is not False - or fld.metadata.get("expand") is True - ) - - @classmethod - def from_dict(cls, json: dict) -> _Resource: - """Convert dictionary to _Resource instance.""" - reference_keys = ("objectType", "key", "displayName") - if all(key in json for key in reference_keys): - if (object_type := json["objectType"]) != cls.__name__: - LOGGER.error("Reference mismatch %s -> %s", object_type, cls.__name__) - raise TypeError("Reference mismatch") - seq_value = json["key"] - id_value = json["displayName"] - json.clear() - if (seq_attr := cls._seqAttr) is not None: - json[seq_attr] = seq_value - if (id_attr := cls._idAttr) is not None: - json[id_attr] = id_value - - types = get_type_hints(cls) - valid_json = {} - - for field_name, value in json.items(): - if field_name in types: - valid_json[field_name] = _deserialize(value, types[field_name]) - else: - LOGGER.warning( - "%s is not a valid field for %s", field_name, cls.__name__ - ) - - return cls(**valid_json) - - def to_dict(self, ignoreSeq: bool = True) -> dict: - """Convert _Resource instance to dictionary.""" - types = get_type_hints(self.__class__) - data = {} - for fld in fields(self): - if (value := self[fld.name]) is None: - continue - if fld.metadata.get("json_ignore") is True: - continue - if ignoreSeq and fld.metadata.get("seq") is True: - continue - data[fld.name] = _serialize(value, types[fld.name]) - return data - - def __getitem__(self, attribute: str): - return getattr(self, attribute) - - -@dataclass(frozen=True) -class Error: - message: str = field(default=None) - timeStamp: datetime = datetime.now() - - -@dataclass -class Address(_Resource): - _endpoint_name: ClassVar[str] = "address" - address1: str = field(default=None, repr=True) - address2: str = field(default=None, repr=False) - address3: str = field(default=None, repr=False) - postalCode: str = field(default=None, repr=False) - city: str = field(default=None, repr=False) - state: str = field(default=None, repr=False) - country: str = field(default=None, repr=False) - areaCode: str = field(default=None, repr=False) - geography: str = field(default=None, repr=False) - phone: str = field(default=None, repr=False) - fax: str = field(default=None, repr=False) - industry: str = field(default=None, repr=False) - contact: str = field(default=None, repr=False) - custId: str = field(default=None, repr=False) - company: str = field(default=None, repr=False) - - -@dataclass -class AppliedDeposit(_Resource): - _endpoint_name: ClassVar[str] = "appliedDeposits" - appliedDepositSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - position: Position = field(default=None, repr=False) - payee: Participant = field(default=None, repr=False) - period: Period = field(default=None, repr=False) - processingUnit: ProcessingUnit = field(default=None, repr=False) - earningGroupId: str = field(default=None, repr=False) - earningCodeId: str = field(default=None, repr=False) - trialPipelineRun: PipelineRun = field(default=None, repr=False) - trialPipelineRunDate: datetime = field(default=None, repr=False) - postPipelineRun: PipelineRun = field(default=None, repr=False) - postPipelineRunDate: datetime = field(default=None, repr=False) - entryNumber: int = field(default=None, repr=False) - value: Value = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class AuditLog(_Resource): - _endpoint_name: ClassVar[str] = "auditLogs" - auditLogSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - businessUnit: BusinessUnit = field(default=None, repr=False) - objectSeq: str = field(default=None, repr=False) - eventDescription: str = field(default=None, repr=True) - objectName: str = field(default=None, repr=False) - eventType: str = field(default=None, repr=False) - modelSeq: Model = field(default=None, metadata=_meta(json_ignore=True), repr=False) - userId: str = field(default=None, repr=False) - eventDate: date = field(default=None, repr=True) - objectType: str = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class Balance(_Resource): - _endpoint_name: ClassVar[str] = "balances" - balanceSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - position: Position = field(default=None, repr=False) - payee: Participant = field(default=None, repr=False) - period: Period = field(default=None, repr=False) - earningGroupId: str = field(default=None, repr=False) - earningCodeId: str = field(default=None, repr=False) - trialPipelineRun: PipelineRun = field(default=None, repr=False) - trialPipelineRunDate: datetime = field(default=None, repr=False) - processingUnit: ProcessingUnit = field(default=None, repr=False) - applyPipelineRun: PipelineRun = field(default=None, repr=False) - applyPipelineRunDate: datetime = field(default=None, repr=False) - postPipelineRun: PipelineRun = field(default=None, repr=False) - postPipelineRunDate: datetime = field(default=None, repr=False) - balanceStatusId: str = field(default=None, repr=False) - value: Value = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class BusinessUnit(_Resource): - _endpoint_name: ClassVar[str] = "businessUnits" - businessUnitSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - name: str = field(default=None, metadata=_meta(id=True), repr=True) - description: str = field(default=None, repr=False) - processingUnit: ProcessingUnit = field(default=None, repr=False) - mask: int = field(default=None, repr=False) - smask: str = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class Calendar(_Resource): - _endpoint_name: ClassVar[str] = "calendars" - calendarSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - name: str = field(default=None, metadata=_meta(id=True), repr=True) - description: str = field(default=None, repr=False) - createdBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - createDate: datetime = field( - default=None, metadata=_meta(json_ignore=True), repr=False - ) - modifiedBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - minorPeriodType: PeriodType = field(default=None, repr=False) - majorPeriodType: PeriodType = field(default=None, repr=False) - periods: list[Period] = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class Category(_Resource): - _endpoint_name: ClassVar[str] = "categories" - ruleElementSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - name: str = field(default=None, metadata=_meta(id=True), repr=True) - description: str = field(default=None, repr=False) - owner: CategoryTree = field(default=None, repr=False) - parent: Category = field(default=None, repr=False) - notAllowUpdate: bool = field(default=None, repr=False) - modelSeq: Model = field(default=None, metadata=_meta(json_ignore=True), repr=False) - returnType: str = field(default=None, repr=False) - effectiveStartDate: date = field(default=None, repr=True) - effectiveEndDate: date = field(default=date(2200, 1, 1), repr=True) - businessUnits: list[BusinessUnit] = field(default=None, repr=False) - createDate: datetime = field(default=None, repr=False) - createdBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - createDate: datetime = field( - default=None, metadata=_meta(json_ignore=True), repr=False - ) - modifiedBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - genericAttribute1: str = field(default=None, repr=False) - genericAttribute2: str = field(default=None, repr=False) - genericAttribute3: str = field(default=None, repr=False) - genericAttribute4: str = field(default=None, repr=False) - genericAttribute5: str = field(default=None, repr=False) - genericAttribute6: str = field(default=None, repr=False) - genericAttribute7: str = field(default=None, repr=False) - genericAttribute8: str = field(default=None, repr=False) - genericAttribute9: str = field(default=None, repr=False) - genericAttribute10: str = field(default=None, repr=False) - genericAttribute11: str = field(default=None, repr=False) - genericAttribute12: str = field(default=None, repr=False) - genericAttribute13: str = field(default=None, repr=False) - genericAttribute14: str = field(default=None, repr=False) - genericAttribute15: str = field(default=None, repr=False) - genericAttribute16: str = field(default=None, repr=False) - genericNumber1: Value = field(default=None, repr=False) - genericNumber2: Value = field(default=None, repr=False) - genericNumber3: Value = field(default=None, repr=False) - genericNumber4: Value = field(default=None, repr=False) - genericNumber5: Value = field(default=None, repr=False) - genericNumber6: Value = field(default=None, repr=False) - genericDate1: date = field(default=None, repr=False) - genericDate2: date = field(default=None, repr=False) - genericDate3: date = field(default=None, repr=False) - genericDate4: date = field(default=None, repr=False) - genericDate5: date = field(default=None, repr=False) - genericDate6: date = field(default=None, repr=False) - genericBoolean1: bool = field(default=None, repr=False) - genericBoolean2: bool = field(default=None, repr=False) - genericBoolean3: bool = field(default=None, repr=False) - genericBoolean4: bool = field(default=None, repr=False) - genericBoolean5: bool = field(default=None, repr=False) - genericBoolean6: bool = field(default=None, repr=False) - calendar: Calendar = field(default=None, repr=False) - owningElement: str = field(default=None, repr=False) - ruleUsage: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - inputSignature: str = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class CategoryClassifier(_Resource): - _endpoint_name: ClassVar[str] = "categoryClassifiers" - categoryClassifiersSeq: int = field( - default=None, metadata=_meta(seq=True), repr=False - ) - categoryTree: CategoryTree = field(default=None, repr=False) - category: Category = field(default=None, repr=False) - classifier: str = field(default=None, repr=False) - effectiveStartDate: date = field(default=None, repr=True) - effectiveEndDate: date = field(default=date(2200, 1, 1), repr=True) - createdBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - createDate: datetime = field( - default=None, metadata=_meta(json_ignore=True), repr=False - ) - modifiedBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class CategoryTree(_Resource): - _endpoint_name: ClassVar[str] = "categoryTrees" - categoryTreeSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - name: str = field(default=None, repr=False) - classifierSelectorId: str = field(default=None, repr=False) - classifierClass: str = field(default=None, repr=False) - description: str = field(default=None, repr=False) - effectiveStartDate: date = field(default=None, repr=True) - effectiveEndDate: date = field(default=None, repr=False) - businessUnits: list[BusinessUnit] = field(default=None, repr=False) - createdBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - createDate: datetime = field( - default=None, metadata=_meta(json_ignore=True), repr=False - ) - modifiedBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - ruleExpression: str = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class Commission(_Resource): - _endpoint_name: ClassVar[str] = "commissions" - commissionSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - position: Position = field(default=None, repr=False) - payee: Participant = field(default=None, repr=False) - period: Period = field(default=None, repr=False) - processingUnit: ProcessingUnit = field(default=None, repr=False) - incentive: Incentive = field(default=None, repr=False) - credit: Credit = field(default=None, repr=False) - pipelineRun: PipelineRun = field(default=None, repr=False) - pipelineRunDate: datetime = field(default=None, repr=False) - value: Value = field(default=None, repr=False) - rateValue: Value = field(default=None, repr=False) - entryNumber: str = field(default=None, repr=False) - businessUnits: list[BusinessUnit] = field(default=None, repr=False) - isPrivate: bool = field(default=None, repr=False) - originTypeId: str = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class Credit(_Resource): - _endpoint_name: ClassVar[str] = "credits" - creditSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - name: str = field(default=None, repr=False) - businessUnits: list[BusinessUnit] = field(default=None, repr=False) - payee: Participant = field(default=None, repr=False) - position: Position = field(default=None, repr=False) - salesOrder: SalesOrder = field(default=None, repr=False) - salesTransaction: SalesTransaction = field(default=None, repr=False) - period: Period = field(default=None, repr=False) - creditType: CreditType = field(default=None, repr=False) - value: Value = field(default=None, repr=False) - preadjustedValue: Value = field(default=None, repr=False) - originTypeId: str = field(default=None, repr=False) - reason: Reason = field(default=None, repr=False) - rule: Rule = field(default=None, repr=False) - isRollable: bool = field(default=None, repr=False) - rollDate: date = field(default=None, repr=False) - isHeld: bool = field(default=None, repr=False) - releaseDate: date = field(default=None, repr=False) - pipelineRun: PipelineRun = field(default=None, repr=False) - processingUnit: ProcessingUnit = field(default=None, repr=False) - compensationDate: date = field(default=None, repr=False) - comments: str = field(default=None, repr=False) - pipelineRunDate: datetime = field(default=None, repr=False) - isPrivate: bool = field(default=None, repr=False) - modelSeq: Model = field(default=None, metadata=_meta(json_ignore=True), repr=False) - genericAttribute1: str = field(default=None, repr=False) - genericAttribute2: str = field(default=None, repr=False) - genericAttribute3: str = field(default=None, repr=False) - genericAttribute4: str = field(default=None, repr=False) - genericAttribute5: str = field(default=None, repr=False) - genericAttribute6: str = field(default=None, repr=False) - genericAttribute7: str = field(default=None, repr=False) - genericAttribute8: str = field(default=None, repr=False) - genericAttribute9: str = field(default=None, repr=False) - genericAttribute10: str = field(default=None, repr=False) - genericAttribute11: str = field(default=None, repr=False) - genericAttribute12: str = field(default=None, repr=False) - genericAttribute13: str = field(default=None, repr=False) - genericAttribute14: str = field(default=None, repr=False) - genericAttribute15: str = field(default=None, repr=False) - genericAttribute16: str = field(default=None, repr=False) - genericNumber1: Value = field(default=None, repr=False) - genericNumber2: Value = field(default=None, repr=False) - genericNumber3: Value = field(default=None, repr=False) - genericNumber4: Value = field(default=None, repr=False) - genericNumber5: Value = field(default=None, repr=False) - genericNumber6: Value = field(default=None, repr=False) - genericDate1: date = field(default=None, repr=False) - genericDate2: date = field(default=None, repr=False) - genericDate3: date = field(default=None, repr=False) - genericDate4: date = field(default=None, repr=False) - genericDate5: date = field(default=None, repr=False) - genericDate6: date = field(default=None, repr=False) - genericBoolean1: bool = field(default=None, repr=False) - genericBoolean2: bool = field(default=None, repr=False) - genericBoolean3: bool = field(default=None, repr=False) - genericBoolean4: bool = field(default=None, repr=False) - genericBoolean5: bool = field(default=None, repr=False) - genericBoolean6: bool = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class CreditType(_Resource): - _endpoint_name: ClassVar[str] = "creditTypes" - dataTypeSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - creditTypeId: str = field(default=None, metadata=_meta(id=True), repr=True) - description: str = field(default=None, repr=False) - createdBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - createDate: datetime = field( - default=None, metadata=_meta(json_ignore=True), repr=False - ) - modifiedBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - notAllowUpdate: bool = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class Customer(_Resource): - _endpoint_name: ClassVar[str] = "customers" - classifierSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - classifierId: str = field(default=None, metadata=_meta(id=True), repr=True) - name: str = field(default=None, repr=False) - description: str = field(default=None, repr=False) - selectorId: str = field(default=None, repr=False) - effectiveStartDate: date = field(default=None, repr=True) - effectiveEndDate: date = field(default=None, repr=False) - businessUnits: list[BusinessUnit] = field(default=None, repr=False) - address1: str = field(default=None, repr=False) - address2: str = field(default=None, repr=False) - address3: str = field(default=None, repr=False) - city: str = field(default=None, repr=False) - state: str = field(default=None, repr=False) - country: str = field(default=None, repr=False) - phone: str = field(default=None, repr=False) - areaCode: str = field(default=None, repr=False) - postalCode: str = field(default=None, repr=False) - geography: str = field(default=None, repr=False) - fax: str = field(default=None, repr=False) - email: str = field(default=None, repr=False) - industry: str = field(default=None, repr=False) - contact: str = field(default=None, repr=False) - createdBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - createDate: datetime = field( - default=None, metadata=_meta(json_ignore=True), repr=False - ) - modifiedBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - genericAttribute1: str = field(default=None, repr=False) - genericAttribute2: str = field(default=None, repr=False) - genericAttribute3: str = field(default=None, repr=False) - genericAttribute4: str = field(default=None, repr=False) - genericAttribute5: str = field(default=None, repr=False) - genericAttribute6: str = field(default=None, repr=False) - genericAttribute7: str = field(default=None, repr=False) - genericAttribute8: str = field(default=None, repr=False) - genericAttribute9: str = field(default=None, repr=False) - genericAttribute10: str = field(default=None, repr=False) - genericAttribute11: str = field(default=None, repr=False) - genericAttribute12: str = field(default=None, repr=False) - genericAttribute13: str = field(default=None, repr=False) - genericAttribute14: str = field(default=None, repr=False) - genericAttribute15: str = field(default=None, repr=False) - genericAttribute16: str = field(default=None, repr=False) - genericNumber1: Value = field(default=None, repr=False) - genericNumber2: Value = field(default=None, repr=False) - genericNumber3: Value = field(default=None, repr=False) - genericNumber4: Value = field(default=None, repr=False) - genericNumber5: Value = field(default=None, repr=False) - genericNumber6: Value = field(default=None, repr=False) - genericDate1: date = field(default=None, repr=False) - genericDate2: date = field(default=None, repr=False) - genericDate3: date = field(default=None, repr=False) - genericDate4: date = field(default=None, repr=False) - genericDate5: date = field(default=None, repr=False) - genericDate6: date = field(default=None, repr=False) - genericBoolean1: bool = field(default=None, repr=False) - genericBoolean2: bool = field(default=None, repr=False) - genericBoolean3: bool = field(default=None, repr=False) - genericBoolean4: bool = field(default=None, repr=False) - genericBoolean5: bool = field(default=None, repr=False) - genericBoolean6: bool = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class Deposit(_Resource): - _endpoint_name: ClassVar[str] = "deposits" - depositSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - name: str = field(default=None, repr=False) - earningGroupId: str = field(default=None, repr=False) - earningCodeId: str = field(default=None, repr=False) - payee: Participant = field(default=None, repr=False) - position: Position = field(default=None, repr=False) - period: Period = field(default=None, repr=False) - value: Value = field(default=None, repr=False) - preadjustedValue: Value = field(default=None, repr=False) - originTypeId: str = field(default=None, repr=False) - reason: Reason = field(default=None, repr=False) - businessUnits: list[BusinessUnit] = field(default=None, repr=False) - rule: Rule = field(default=None, repr=False) - depositDate: date = field(default=None, repr=False) - isHeld: bool = field(default=None, repr=False) - releaseDate: date = field(default=None, repr=False) - pipelineRun: PipelineRun = field(default=None, repr=False) - processingUnit: ProcessingUnit = field(default=None, repr=False) - comments: str = field(default=None, repr=False) - pipelineRunDate: datetime = field(default=None, repr=False) - isPrivate: bool = field(default=None, repr=False) - modelSeq: Model = field(default=None, metadata=_meta(json_ignore=True), repr=False) - genericAttribute1: str = field(default=None, repr=False) - genericAttribute2: str = field(default=None, repr=False) - genericAttribute3: str = field(default=None, repr=False) - genericAttribute4: str = field(default=None, repr=False) - genericAttribute5: str = field(default=None, repr=False) - genericAttribute6: str = field(default=None, repr=False) - genericAttribute7: str = field(default=None, repr=False) - genericAttribute8: str = field(default=None, repr=False) - genericAttribute9: str = field(default=None, repr=False) - genericAttribute10: str = field(default=None, repr=False) - genericAttribute11: str = field(default=None, repr=False) - genericAttribute12: str = field(default=None, repr=False) - genericAttribute13: str = field(default=None, repr=False) - genericAttribute14: str = field(default=None, repr=False) - genericAttribute15: str = field(default=None, repr=False) - genericAttribute16: str = field(default=None, repr=False) - genericNumber1: Value = field(default=None, repr=False) - genericNumber2: Value = field(default=None, repr=False) - genericNumber3: Value = field(default=None, repr=False) - genericNumber4: Value = field(default=None, repr=False) - genericNumber5: Value = field(default=None, repr=False) - genericNumber6: Value = field(default=None, repr=False) - genericDate1: date = field(default=None, repr=False) - genericDate2: date = field(default=None, repr=False) - genericDate3: date = field(default=None, repr=False) - genericDate4: date = field(default=None, repr=False) - genericDate5: date = field(default=None, repr=False) - genericDate6: date = field(default=None, repr=False) - genericBoolean1: bool = field(default=None, repr=False) - genericBoolean2: bool = field(default=None, repr=False) - genericBoolean3: bool = field(default=None, repr=False) - genericBoolean4: bool = field(default=None, repr=False) - genericBoolean5: bool = field(default=None, repr=False) - genericBoolean6: bool = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class EarningCode(_Resource): - _endpoint_name: ClassVar[str] = "earningCodes" - dataTypeSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - earningCodeId: str = field(default=None, metadata=_meta(id=True), repr=True) - description: str = field(default=None, repr=False) - createdBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - createDate: datetime = field( - default=None, metadata=_meta(json_ignore=True), repr=False - ) - modifiedBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - notAllowUpdate: bool = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class EarningGroup(_Resource): - _endpoint_name: ClassVar[str] = "earningGroups" - dataTypeSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - earningGroupId: str = field(default=None, metadata=_meta(id=True), repr=True) - description: str = field(default=None, repr=False) - createdBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - createDate: datetime = field( - default=None, metadata=_meta(json_ignore=True), repr=False - ) - modifiedBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - notAllowUpdate: bool = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class EarningGroupCode(_Resource): - _endpoint_name: ClassVar[str] = "earningGroupCodes" - earningGroupCodeSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - earningGroupCode: str = field(default=None, metadata=_meta(id=True), repr=True) - earningCodeId: str = field(default=None, repr=False) - earningGroupId: str = field(default=None, repr=False) - createdBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - createDate: datetime = field( - default=None, metadata=_meta(json_ignore=True), repr=False - ) - modifiedBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class EventType(_Resource): - _endpoint_name: ClassVar[str] = "eventTypes" - dataTypeSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - eventTypeId: str = field(default=None, metadata=_meta(id=True), repr=True) - description: str = field(default=None, repr=False) - createdBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - createDate: datetime = field( - default=None, metadata=_meta(json_ignore=True), repr=False - ) - modifiedBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - notAllowUpdate: bool = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class FixedValue(_Resource): - _endpoint_name: ClassVar[str] = "fixedValues" - ruleElementOwnerSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - name: str = field(default=None, metadata=_meta(id=True), repr=True) - description: str = field(default=None, repr=False) - effectiveStartDate: date = field(default=None, repr=True) - effectiveEndDate: date = field(default=None, repr=False) - businessUnits: list[BusinessUnit] = field(default=None, repr=False) - createDate: datetime = field( - default=None, metadata=_meta(json_ignore=True), repr=False - ) - calendar: Calendar = field(default=None, repr=False) - periodType: PeriodType = field(default=None, repr=False) - fixedValueType: FixedValueType = field(default=None, repr=False) - value: Value = field(default=None, repr=False) - unitTypeSeq: UnitType = field(default=None, repr=False) - owningElement: str = field(default=None, repr=False) - ruleUsage: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - ruleElementSeq: str = field(default=None, repr=False) - notAllowUpdate: bool = field(default=None, repr=False) - modelSeq: Model = field(default=None, metadata=_meta(json_ignore=True), repr=False) - createdBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - createDate: datetime = field( - default=None, metadata=_meta(json_ignore=True), repr=False - ) - modifiedBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - inputSignature: str = field(default=None, repr=False) - returnType: str = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class FixedValueType(_Resource): - _endpoint_name: ClassVar[str] = "fixedValueTypes" - dataTypeSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - fixedValueTypeId: str = field(default=None, metadata=_meta(id=True), repr=True) - description: str = field(default=None, repr=False) - createdBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - createDate: datetime = field( - default=None, metadata=_meta(json_ignore=True), repr=False - ) - modifiedBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - notAllowUpdate: bool = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class FixedValueVariable(_Resource): - _endpoint_name: ClassVar[str] = "fixedValueVariables" - ruleElementSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - name: str = field(default=None, metadata=_meta(id=True), repr=True) - description: str = field(default=None, repr=False) - calendar: Calendar = field(default=None, repr=False) - requiredPeriodType: PeriodType = field(default=None, repr=False) - effectiveStartDate: date = field(default=None, repr=True) - effectiveEndDate: date = field(default=None, repr=False) - businessUnits: list[BusinessUnit] = field(default=None, repr=False) - createdBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - createDate: datetime = field( - default=None, metadata=_meta(json_ignore=True), repr=False - ) - modifiedBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - notAllowUpdate: bool = field(default=None, repr=False) - defaultElement: FixedValue = field(default=None, repr=False) - modelSeq: Model = field(default=None, metadata=_meta(json_ignore=True), repr=False) - referenceClassType: str = field(default=None, repr=False) - returnType: str = field(default=None, repr=False) - owningElement: str = field(default=None, repr=False) - ruleUsage: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - inputSignature: str = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class Formula(_Resource): - _endpoint_name: ClassVar[str] = "formulas" - ruleElementSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - name: str = field(default=None, metadata=_meta(id=True), repr=True) - description: str = field(default=None, repr=False) - calendar: Calendar = field(default=None, repr=False) - effectiveStartDate: date = field(default=None, repr=True) - effectiveEndDate: date = field(default=None, repr=False) - businessUnits: list[BusinessUnit] = field(default=None, repr=False) - createdBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - createDate: datetime = field( - default=None, metadata=_meta(json_ignore=True), repr=False - ) - modifiedBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - modelSeq: Model = field(default=None, metadata=_meta(json_ignore=True), repr=False) - notAllowUpdate: bool = field(default=None, repr=False) - returnType: str = field(default=None, repr=False) - owningElement: str = field(default=None, repr=False) - ruleUsage: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - inputSignature: str = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class GenericClassifier(_Resource): - _endpoint_name: ClassVar[str] = "genericClassifiers" - classifierSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - classifierId: str = field(default=None, metadata=_meta(id=True), repr=True) - name: str = field(default=None, repr=False) - description: str = field(default=None, repr=False) - selectorId: str = field(default=None, repr=False) - effectiveStartDate: date = field(default=None, repr=True) - effectiveEndDate: date = field(default=None, repr=False) - businessUnits: list[BusinessUnit] = field(default=None, repr=False) - createdBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - createDate: datetime = field( - default=None, metadata=_meta(json_ignore=True), repr=False - ) - modifiedBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - genericAttribute1: str = field(default=None, repr=False) - genericAttribute2: str = field(default=None, repr=False) - genericAttribute3: str = field(default=None, repr=False) - genericAttribute4: str = field(default=None, repr=False) - genericAttribute5: str = field(default=None, repr=False) - genericAttribute6: str = field(default=None, repr=False) - genericAttribute7: str = field(default=None, repr=False) - genericAttribute8: str = field(default=None, repr=False) - genericAttribute9: str = field(default=None, repr=False) - genericAttribute10: str = field(default=None, repr=False) - genericAttribute11: str = field(default=None, repr=False) - genericAttribute12: str = field(default=None, repr=False) - genericAttribute13: str = field(default=None, repr=False) - genericAttribute14: str = field(default=None, repr=False) - genericAttribute15: str = field(default=None, repr=False) - genericAttribute16: str = field(default=None, repr=False) - genericNumber1: Value = field(default=None, repr=False) - genericNumber2: Value = field(default=None, repr=False) - genericNumber3: Value = field(default=None, repr=False) - genericNumber4: Value = field(default=None, repr=False) - genericNumber5: Value = field(default=None, repr=False) - genericNumber6: Value = field(default=None, repr=False) - genericDate1: date = field(default=None, repr=False) - genericDate2: date = field(default=None, repr=False) - genericDate3: date = field(default=None, repr=False) - genericDate4: date = field(default=None, repr=False) - genericDate5: date = field(default=None, repr=False) - genericDate6: date = field(default=None, repr=False) - genericBoolean1: bool = field(default=None, repr=False) - genericBoolean2: bool = field(default=None, repr=False) - genericBoolean3: bool = field(default=None, repr=False) - genericBoolean4: bool = field(default=None, repr=False) - genericBoolean5: bool = field(default=None, repr=False) - genericBoolean6: bool = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class GenericClassifierType(_Resource): - _endpoint_name: ClassVar[str] = "genericClassifierTypes" - genericClassifierTypeSeq: int = field( - default=None, metadata=_meta(seq=True), repr=False - ) - name: str = field(default=None, metadata=_meta(id=True), repr=True) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class GlobalFieldName(_Resource): - _endpoint_name: ClassVar[str] = "globalFieldNames" - globalFieldNameSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - name: str = field(default=None, metadata=_meta(id=True), repr=True) - description: str = field(default=None, repr=False) - globalFieldNameDataTypeLength: int = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class Group(_Resource): - _endpoint_name: ClassVar[str] = "groups" - groupSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - name: str = field(default=None, metadata=_meta(id=True), repr=True) - description: str = field(default=None, repr=False) - createdBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - createDate: datetime = field( - default=None, metadata=_meta(json_ignore=True), repr=False - ) - modifiedBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - policy: str = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class Incentive(_Resource): - _endpoint_name: ClassVar[str] = "incentives" - incentiveSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - name: str = field(default=None, repr=False) - ruleElementOwnerSeq: str = field(default=None, repr=False) - isActive: bool = field(default=None, repr=False) - payee: Participant = field(default=None, repr=False) - quota: Quota = field(default=None, repr=False) - processingUnit: ProcessingUnit = field(default=None, repr=False) - pipelineRunDate: datetime = field(default=None, repr=False) - value: Value = field(default=None, repr=False) - period: Period = field(default=None, repr=False) - releaseDate: datetime = field(default=None, repr=False) - businessUnits: list[BusinessUnit] = field(default=None, repr=False) - attainment: str = field(default=None, repr=False) - pipelineRun: PipelineRun = field(default=None, repr=False) - position: Position = field(default=None, repr=False) - rule: Rule = field(default=None, repr=False) - isPrivate: bool = field(default=None, repr=False) - genericAttribute1: str = field(default=None, repr=False) - genericAttribute2: str = field(default=None, repr=False) - genericAttribute3: str = field(default=None, repr=False) - genericAttribute4: str = field(default=None, repr=False) - genericAttribute5: str = field(default=None, repr=False) - genericAttribute6: str = field(default=None, repr=False) - genericAttribute7: str = field(default=None, repr=False) - genericAttribute8: str = field(default=None, repr=False) - genericAttribute9: str = field(default=None, repr=False) - genericAttribute10: str = field(default=None, repr=False) - genericAttribute11: str = field(default=None, repr=False) - genericAttribute12: str = field(default=None, repr=False) - genericAttribute13: str = field(default=None, repr=False) - genericAttribute14: str = field(default=None, repr=False) - genericAttribute15: str = field(default=None, repr=False) - genericAttribute16: str = field(default=None, repr=False) - genericNumber1: Value = field(default=None, repr=False) - genericNumber2: Value = field(default=None, repr=False) - genericNumber3: Value = field(default=None, repr=False) - genericNumber4: Value = field(default=None, repr=False) - genericNumber5: Value = field(default=None, repr=False) - genericNumber6: Value = field(default=None, repr=False) - genericDate1: date = field(default=None, repr=False) - genericDate2: date = field(default=None, repr=False) - genericDate3: date = field(default=None, repr=False) - genericDate4: date = field(default=None, repr=False) - genericDate5: date = field(default=None, repr=False) - genericDate6: date = field(default=None, repr=False) - genericBoolean1: bool = field(default=None, repr=False) - genericBoolean2: bool = field(default=None, repr=False) - genericBoolean3: bool = field(default=None, repr=False) - genericBoolean4: bool = field(default=None, repr=False) - genericBoolean5: bool = field(default=None, repr=False) - genericBoolean6: bool = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class LookUpTable(_Resource): - _endpoint_name: ClassVar[str] = "relationalMDLTs" - ruleElementSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - name: str = field(default=None, metadata=_meta(id=True), repr=True) - description: str = field(default=None, repr=False) - effectiveStartDate: date = field(default=None, repr=True) - effectiveEndDate: date = field(default=None, repr=False) - businessUnits: list[BusinessUnit] = field(default=None, repr=False) - createdBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - createDate: datetime = field( - default=None, metadata=_meta(json_ignore=True), repr=False - ) - modifiedBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - businessUnit: list[BusinessUnit] = field(default=None, repr=False) - calendar: Calendar = field(default=None, repr=False) - notAllowUpdate: bool = field(default=None, repr=False) - modelSeq: Model = field(default=None, metadata=_meta(json_ignore=True), repr=False) - returnType: str = field(default=None, repr=False) - returnUnitType: UnitType = field(default=None, repr=False) - owningElement: str = field(default=None, repr=False) - ruleUsage: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - inputSignature: str = field(default=None, repr=False) - treatNullAsZero: bool = field(default=None, repr=False) - expressionTypeCounts: str = field( - default=None, metadata=_meta(json_ignore=True), repr=False - ) - dimensions: list[LookUpTableDimension] = field(default=None, repr=False) - indices: list[LookUpTableIndice] = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class LookUpTableDimension(_Resource): - _endpoint_name: ClassVar[str] = "lookupTableDimension" - dimensionSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - name: str = field(default=None, metadata=_meta(id=True), repr=True) - effectiveStartDate: date = field(default=None, repr=True) - effectiveEndDate: date = field(default=None, repr=False) - createDate: datetime = field( - default=None, metadata=_meta(json_ignore=True), repr=False - ) - removeDate: datetime = field( - default=None, metadata=_meta(json_ignore=True), repr=False - ) - displayOrder: int = field(default=None, repr=False) - dimensionType: int = field(default=None, repr=False) - dimensionSlot: int = field(default=None, repr=False) - dimensionUnitType: UnitType = field(default=None, repr=False) - isRanged: bool = field(default=None, repr=False) - includeStartInRange: bool = field(default=None, repr=False) - includeEndInRange: bool = field(default=None, repr=False) - flags: str = field(default=None, repr=False) - MDLT: LookUpTable = field(default=None, repr=False) # pylint: disable=invalid-name - categoryTree: CategoryTree = field(default=None, repr=False) - modelSeq: Model = field(default=None, metadata=_meta(json_ignore=True), repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class LookUpTableIndice(_Resource): - _endpoint_name: ClassVar[str] = "lookuptableIndice" - ordinal: int = field(default=None, repr=False) - displayOrder: int = field(default=None, repr=False) - minString: str = field(default=None, repr=False) - maxString: str = field(default=None, repr=False) - minValue: str = field(default=None, repr=False) - maxValue: str = field(default=None, repr=False) - minDate: date = field(default=None, repr=False) - maxDate: date = field(default=None, repr=False) - validStart: date = field(default=None, repr=False) - validEnd: date = field(default=None, repr=False) - classifier: str = field(default=None, repr=False) # Implement Classifier - category: str = field(default=None, repr=False) # Implement Category - effectiveStartDate: date = field(default=None, repr=True) - effectiveEndDate: date = field(default=None, repr=False) - createDate: datetime = field(default=None, repr=False) - removeDate: datetime = field(default=None, repr=False) - MDLT: LookUpTable = field(default=None, repr=False) # pylint: disable=invalid-name - dimensionSeq: LookUpTableDimension = field(default=None, repr=False) - modelSeq: Model = field(default=None, metadata=_meta(json_ignore=True), repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class LookUpTableVariable(_Resource): - _endpoint_name: ClassVar[str] = "lookUpTableVariables" - ruleElementSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - name: str = field(default=None, metadata=_meta(id=True), repr=True) - description: str = field(default=None, repr=False) - calendar: Calendar = field(default=None, repr=False) - requiredPeriodType: PeriodType = field(default=None, repr=False) - effectiveStartDate: date = field(default=None, repr=True) - effectiveEndDate: date = field(default=None, repr=False) - businessUnits: list[BusinessUnit] = field(default=None, repr=False) - createdBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - createDate: datetime = field( - default=None, metadata=_meta(json_ignore=True), repr=False - ) - modifiedBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - notAllowUpdate: bool = field(default=None, repr=False) - defaultElement: str = field(default=None, repr=False) - modelSeq: Model = field(default=None, metadata=_meta(json_ignore=True), repr=False) - referenceClassType: str = field(default=None, repr=False) - returnType: str = field(default=None, repr=False) - owningElement: str = field(default=None, repr=False) - ruleUsage: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - inputSignature: str = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class Measurement(_Resource): - _endpoint_name: ClassVar[str] = "measurements" - measurementSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - name: str = field(default=None, metadata=_meta(id=True), repr=True) - position: Position = field(default=None, repr=False) - payee: Participant = field(default=None, repr=False) - period: Period = field(default=None, repr=False) - processingUnit: ProcessingUnit = field(default=None, repr=False) - rule: Rule = field(default=None, repr=False) - pipelineRun: PipelineRun = field(default=None, repr=False) - pipelineRunDate: datetime = field(default=None, repr=False) - value: Value = field(default=None, repr=False) - numberOfCredits: Value = field(default=None, repr=False) - businessUnits: list[BusinessUnit] = field(default=None, repr=False) - isPrivate: bool = field(default=None, repr=False) - genericAttribute1: str = field(default=None, repr=False) - genericAttribute2: str = field(default=None, repr=False) - genericAttribute3: str = field(default=None, repr=False) - genericAttribute4: str = field(default=None, repr=False) - genericAttribute5: str = field(default=None, repr=False) - genericAttribute6: str = field(default=None, repr=False) - genericAttribute7: str = field(default=None, repr=False) - genericAttribute8: str = field(default=None, repr=False) - genericAttribute9: str = field(default=None, repr=False) - genericAttribute10: str = field(default=None, repr=False) - genericAttribute11: str = field(default=None, repr=False) - genericAttribute12: str = field(default=None, repr=False) - genericAttribute13: str = field(default=None, repr=False) - genericAttribute14: str = field(default=None, repr=False) - genericAttribute15: str = field(default=None, repr=False) - genericAttribute16: str = field(default=None, repr=False) - genericNumber1: Value = field(default=None, repr=False) - genericNumber2: Value = field(default=None, repr=False) - genericNumber3: Value = field(default=None, repr=False) - genericNumber4: Value = field(default=None, repr=False) - genericNumber5: Value = field(default=None, repr=False) - genericNumber6: Value = field(default=None, repr=False) - genericDate1: date = field(default=None, repr=False) - genericDate2: date = field(default=None, repr=False) - genericDate3: date = field(default=None, repr=False) - genericDate4: date = field(default=None, repr=False) - genericDate5: date = field(default=None, repr=False) - genericDate6: date = field(default=None, repr=False) - genericBoolean1: bool = field(default=None, repr=False) - genericBoolean2: bool = field(default=None, repr=False) - genericBoolean3: bool = field(default=None, repr=False) - genericBoolean4: bool = field(default=None, repr=False) - genericBoolean5: bool = field(default=None, repr=False) - genericBoolean6: bool = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class PrimaryMeasurement(_Resource): - _endpoint_name: ClassVar[str] = "primaryMeasurements" - measurementSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - name: str = field(default=None, metadata=_meta(id=True), repr=True) - position: Position = field(default=None, repr=False) - payee: Participant = field(default=None, repr=False) - period: Period = field(default=None, repr=False) - processingUnit: ProcessingUnit = field(default=None, repr=False) - rule: Rule = field(default=None, repr=False) - pipelineRun: PipelineRun = field(default=None, repr=False) - pipelineRunDate: datetime = field(default=None, repr=False) - value: Value = field(default=None, repr=False) - numberOfCredits: Value = field(default=None, repr=False) - businessUnits: list[BusinessUnit] = field(default=None, repr=False) - isPrivate: bool = field(default=None, repr=False) - genericAttribute1: str = field(default=None, repr=False) - genericAttribute2: str = field(default=None, repr=False) - genericAttribute3: str = field(default=None, repr=False) - genericAttribute4: str = field(default=None, repr=False) - genericAttribute5: str = field(default=None, repr=False) - genericAttribute6: str = field(default=None, repr=False) - genericAttribute7: str = field(default=None, repr=False) - genericAttribute8: str = field(default=None, repr=False) - genericAttribute9: str = field(default=None, repr=False) - genericAttribute10: str = field(default=None, repr=False) - genericAttribute11: str = field(default=None, repr=False) - genericAttribute12: str = field(default=None, repr=False) - genericAttribute13: str = field(default=None, repr=False) - genericAttribute14: str = field(default=None, repr=False) - genericAttribute15: str = field(default=None, repr=False) - genericAttribute16: str = field(default=None, repr=False) - genericNumber1: Value = field(default=None, repr=False) - genericNumber2: Value = field(default=None, repr=False) - genericNumber3: Value = field(default=None, repr=False) - genericNumber4: Value = field(default=None, repr=False) - genericNumber5: Value = field(default=None, repr=False) - genericNumber6: Value = field(default=None, repr=False) - genericDate1: date = field(default=None, repr=False) - genericDate2: date = field(default=None, repr=False) - genericDate3: date = field(default=None, repr=False) - genericDate4: date = field(default=None, repr=False) - genericDate5: date = field(default=None, repr=False) - genericDate6: date = field(default=None, repr=False) - genericBoolean1: bool = field(default=None, repr=False) - genericBoolean2: bool = field(default=None, repr=False) - genericBoolean3: bool = field(default=None, repr=False) - genericBoolean4: bool = field(default=None, repr=False) - genericBoolean5: bool = field(default=None, repr=False) - genericBoolean6: bool = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class SecondaryMeasurement(_Resource): - _endpoint_name: ClassVar[str] = "secondaryMeasurements" - measurementSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - name: str = field(default=None, metadata=_meta(id=True), repr=True) - position: Position = field(default=None, repr=False) - payee: Participant = field(default=None, repr=False) - period: Period = field(default=None, repr=False) - processingUnit: ProcessingUnit = field(default=None, repr=False) - rule: Rule = field(default=None, repr=False) - pipelineRun: PipelineRun = field(default=None, repr=False) - pipelineRunDate: datetime = field(default=None, repr=False) - value: Value = field(default=None, repr=False) - numberOfCredits: Value = field(default=None, repr=False) - businessUnits: list[BusinessUnit] = field(default=None, repr=False) - isPrivate: bool = field(default=None, repr=False) - genericAttribute1: str = field(default=None, repr=False) - genericAttribute2: str = field(default=None, repr=False) - genericAttribute3: str = field(default=None, repr=False) - genericAttribute4: str = field(default=None, repr=False) - genericAttribute5: str = field(default=None, repr=False) - genericAttribute6: str = field(default=None, repr=False) - genericAttribute7: str = field(default=None, repr=False) - genericAttribute8: str = field(default=None, repr=False) - genericAttribute9: str = field(default=None, repr=False) - genericAttribute10: str = field(default=None, repr=False) - genericAttribute11: str = field(default=None, repr=False) - genericAttribute12: str = field(default=None, repr=False) - genericAttribute13: str = field(default=None, repr=False) - genericAttribute14: str = field(default=None, repr=False) - genericAttribute15: str = field(default=None, repr=False) - genericAttribute16: str = field(default=None, repr=False) - genericNumber1: Value = field(default=None, repr=False) - genericNumber2: Value = field(default=None, repr=False) - genericNumber3: Value = field(default=None, repr=False) - genericNumber4: Value = field(default=None, repr=False) - genericNumber5: Value = field(default=None, repr=False) - genericNumber6: Value = field(default=None, repr=False) - genericDate1: date = field(default=None, repr=False) - genericDate2: date = field(default=None, repr=False) - genericDate3: date = field(default=None, repr=False) - genericDate4: date = field(default=None, repr=False) - genericDate5: date = field(default=None, repr=False) - genericDate6: date = field(default=None, repr=False) - genericBoolean1: bool = field(default=None, repr=False) - genericBoolean2: bool = field(default=None, repr=False) - genericBoolean3: bool = field(default=None, repr=False) - genericBoolean4: bool = field(default=None, repr=False) - genericBoolean5: bool = field(default=None, repr=False) - genericBoolean6: bool = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class Message(_Resource): - _endpoint_name: ClassVar[str] = "messages" - messageSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - messageKey: str = field(default=None, metadata=_meta(id=True), repr=True) - messageTimeStamp: datetime = field(default=None, repr=False) - argumentCount: int = field(default=None, repr=False) - subCategory: str = field(default=None, repr=False) - messageLog: MessageLog = field(default=None, repr=False) - module: str = field(default=None, repr=False) - rule: Rule = field(default=None, repr=False) - payee: Participant = field(default=None, repr=False) - messageType: str = field(default=None, repr=False) - runPeriod: Period = field(default=None, repr=False) - objectSeq: str = field(default=None, repr=False) - salesTransaction: SalesTransaction = field(default=None, repr=False) - position: Position = field(default=None, repr=False) - category: str = field(default=None, repr=False) - credit: Credit = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class MessageLog(_Resource): - _endpoint_name: ClassVar[str] = "messageLogs" - messageLogSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - sourceSeq: str = field(default=None, repr=False) - componentName: str = field(default=None, repr=False) - logDate: datetime = field(default=None, repr=False) - logName: str = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class Model(_Resource): - _endpoint_name: ClassVar[str] = "models" - modelSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - modelName: str = field(default=None, metadata=_meta(id=True), repr=True) - description: str = field(default=None, repr=False) - status: str = field(default=None, repr=False) - calendar: Calendar = field(default=None, repr=False) - budgetType: str = field(default=None, repr=False) - budgetValue: Value = field(default=None, repr=False) - useSourcePeriodAsInput: str = field(default=None, repr=False) - sourceAdjustment: Value = field(default=None, repr=False) - sourceStartPeriod: Period = field(default=None, repr=False) - sourceEndPeriod: Period = field(default=None, repr=False) - modelStartPeriod: Period = field(default=None, repr=False) - modelEndPeriod: Period = field(default=None, repr=False) - modificationDate: datetime = field(default=None, repr=False) - budgetPercentValue: Value = field(default=None, repr=False) - processingUnit: ProcessingUnit = field(default=None, repr=False) - useNewTransactionAsInput: bool = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class Participant(_Resource): - _endpoint_name: ClassVar[str] = "participants" - payeeSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - payeeId: str = field(default=None, metadata=_meta(id=True), repr=True) - firstName: str = field(default=None, repr=False) - lastName: str = field(default=None, repr=False) - middleName: str = field(default=None, repr=False) - prefix: str = field(default=None, repr=False) - suffix: str = field(default=None, repr=False) - effectiveStartDate: date = field(default=None, repr=True) - effectiveEndDate: date = field(default=None, repr=False) - hireDate: date = field(default=None, repr=False) - terminationDate: date = field(default=None, repr=False) - salary: Value = field(default=None, repr=False) - userId: str = field(default=None, repr=False) - participantEmail: str = field(default=None, repr=False) - preferredLanguage: str = field(default=None, repr=False) - eventCalendar: str = field(default=None, repr=False) - taxId: str = field(default=None, repr=False) - businessUnits: list[BusinessUnit] = field(default=None, repr=False) - createdBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - createDate: datetime = field( - default=None, metadata=_meta(json_ignore=True), repr=False - ) - modifiedBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - genericAttribute1: str = field(default=None, repr=False) - genericAttribute2: str = field(default=None, repr=False) - genericAttribute3: str = field(default=None, repr=False) - genericAttribute4: str = field(default=None, repr=False) - genericAttribute5: str = field(default=None, repr=False) - genericAttribute6: str = field(default=None, repr=False) - genericAttribute7: str = field(default=None, repr=False) - genericAttribute8: str = field(default=None, repr=False) - genericAttribute9: str = field(default=None, repr=False) - genericAttribute10: str = field(default=None, repr=False) - genericAttribute11: str = field(default=None, repr=False) - genericAttribute12: str = field(default=None, repr=False) - genericAttribute13: str = field(default=None, repr=False) - genericAttribute14: str = field(default=None, repr=False) - genericAttribute15: str = field(default=None, repr=False) - genericAttribute16: str = field(default=None, repr=False) - genericNumber1: Value = field(default=None, repr=False) - genericNumber2: Value = field(default=None, repr=False) - genericNumber3: Value = field(default=None, repr=False) - genericNumber4: Value = field(default=None, repr=False) - genericNumber5: Value = field(default=None, repr=False) - genericNumber6: Value = field(default=None, repr=False) - genericDate1: date = field(default=None, repr=False) - genericDate2: date = field(default=None, repr=False) - genericDate3: date = field(default=None, repr=False) - genericDate4: date = field(default=None, repr=False) - genericDate5: date = field(default=None, repr=False) - genericDate6: date = field(default=None, repr=False) - genericBoolean1: bool = field(default=None, repr=False) - genericBoolean2: bool = field(default=None, repr=False) - genericBoolean3: bool = field(default=None, repr=False) - genericBoolean4: bool = field(default=None, repr=False) - genericBoolean5: bool = field(default=None, repr=False) - genericBoolean6: bool = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class Payment(_Resource): - _endpoint_name: ClassVar[str] = "payments" - paymentSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - position: Position = field(default=None, repr=False) - payee: Participant = field(default=None, repr=False) - period: Period = field(default=None, repr=False) - processingUnit: ProcessingUnit = field(default=None, repr=False) - earningGroupId: str = field(default=None, repr=False) - earningCodeId: str = field(default=None, repr=False) - trialPipelineRun: PipelineRun = field(default=None, repr=False) - trialPipelineRunDate: datetime = field(default=None, repr=False) - postPipelineRun: PipelineRun = field(default=None, repr=False) - postPipelineRunDate: datetime = field(default=None, repr=False) - reason: str = field(default=None, repr=False) - value: Value = field(default=None, repr=False) - genericAttribute1: str = field(default=None, repr=False) - genericAttribute2: str = field(default=None, repr=False) - genericAttribute3: str = field(default=None, repr=False) - genericAttribute4: str = field(default=None, repr=False) - genericAttribute5: str = field(default=None, repr=False) - genericAttribute6: str = field(default=None, repr=False) - genericAttribute7: str = field(default=None, repr=False) - genericAttribute8: str = field(default=None, repr=False) - genericAttribute9: str = field(default=None, repr=False) - genericAttribute10: str = field(default=None, repr=False) - genericAttribute11: str = field(default=None, repr=False) - genericAttribute12: str = field(default=None, repr=False) - genericAttribute13: str = field(default=None, repr=False) - genericAttribute14: str = field(default=None, repr=False) - genericAttribute15: str = field(default=None, repr=False) - genericAttribute16: str = field(default=None, repr=False) - genericNumber1: Value = field(default=None, repr=False) - genericNumber2: Value = field(default=None, repr=False) - genericNumber3: Value = field(default=None, repr=False) - genericNumber4: Value = field(default=None, repr=False) - genericNumber5: Value = field(default=None, repr=False) - genericNumber6: Value = field(default=None, repr=False) - genericDate1: date = field(default=None, repr=False) - genericDate2: date = field(default=None, repr=False) - genericDate3: date = field(default=None, repr=False) - genericDate4: date = field(default=None, repr=False) - genericDate5: date = field(default=None, repr=False) - genericDate6: date = field(default=None, repr=False) - genericBoolean1: bool = field(default=None, repr=False) - genericBoolean2: bool = field(default=None, repr=False) - genericBoolean3: bool = field(default=None, repr=False) - genericBoolean4: bool = field(default=None, repr=False) - genericBoolean5: bool = field(default=None, repr=False) - genericBoolean6: bool = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class PaymentMapping(_Resource): - _endpoint_name: ClassVar[str] = "paymentMappings" - paymentMappingSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - sourceTableName: str = field(default=None, repr=False) - sourceAttribute: str = field(default=None, repr=False) - paymentAttribute: str = field(default=None, repr=False) - createdBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - createDate: datetime = field( - default=None, metadata=_meta(json_ignore=True), repr=False - ) - modifiedBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class PaymentSummary(_Resource): - _endpoint_name: ClassVar[str] = "paymentSummarys" - paymentSummarySeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - position: Position = field(default=None, repr=False) - participant: Participant = field(default=None, repr=False) - period: Period = field(default=None, repr=False) - earningGroupId: str = field(default=None, repr=False) - businessUnits: list[BusinessUnit] = field(default=None, repr=False) - pipelineRun: PipelineRun = field(default=None, repr=False) - processingUnit: ProcessingUnit = field(default=None, repr=False) - pipelineRunDate: datetime = field(default=None, repr=False) - appliedDeposit: Value = field(default=None, repr=False) - priorBalance: Value = field(default=None, repr=False) - balance: Value = field(default=None, repr=False) - payment: Value = field(default=None, repr=False) - Deposit: Value = field(default=None, repr=False) # pylint: disable=invalid-name - outstandingBalance: Value = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class Period(_Resource): - _endpoint_name: ClassVar[str] = "periods" - periodSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - name: str = field(default=None, metadata=_meta(id=True), repr=True) - shortName: str = field(default=None, repr=False) - description: str = field(default=None, repr=False) - createdBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - createDate: datetime = field( - default=None, metadata=_meta(json_ignore=True), repr=False - ) - modifiedBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - calendar: Calendar = field(default=None, repr=False) - periodType: PeriodType = field(default=None, repr=True) - parent: Period = field(default=None, repr=False) - startDate: date = field(default=None, repr=False) - endDate: date = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class PeriodType(_Resource): - _endpoint_name: ClassVar[str] = "periodTypes" - periodTypeSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - name: str = field(default=None, metadata=_meta(id=True), repr=True) - description: str = field(default=None, repr=False) - createdBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - createDate: datetime = field( - default=None, metadata=_meta(json_ignore=True), repr=False - ) - modifiedBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - level: int = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class PipelineRun(_Resource): - _endpoint_name: ClassVar[str] = "pipelines" - pipelineRunSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - batchName: str = field(default=None, repr=False) - command: str = field(default=None, repr=False) - dateSubmitted: datetime = field(default=None, repr=False) - description: str = field(default=None, repr=True) - endDateScheduled: datetime = field(default=None, repr=False) - groupName: str = field(default=None, repr=False) - isolationLevel: str = field(default=None, repr=False) - message: str = field(default=None, repr=False) - modelRun: str = field(default=None, repr=False) - modelSeq: Model = field(default=None, metadata=_meta(json_ignore=True), repr=False) - numErrors: int = field(default=None, repr=False) - numWarnings: int = field(default=None, repr=False) - period: str = field(default=None, repr=False) - priority: str = field(default=None, repr=False) - processingUnit: str = field(default=None, repr=False) - productVersion: str = field(default=None, repr=False) - removeDate: datetime = field(default=None, repr=False) - reportTypeName: str = field(default=None, repr=False) - runMode: str = field(default=None, repr=False) - runParameters: str = field(default=None, repr=False) - runProgress: str = field(default=None, repr=False) - scheduleDay: str = field(default=None, repr=False) - scheduleFrequency: str = field(default=None, repr=False) - schemaVersion: str = field(default=None, repr=False) - stageTables: list[str] = field(default=None, repr=False) - stageType: str = field(default=None, repr=False) - startDateScheduled: datetime = field(default=None, repr=False) - startTime: datetime = field(default=None, repr=True) - state: str = field(default=None, repr=True) - status: str = field(default=None, repr=False) - stopTime: datetime = field(default=None, repr=False) - storedProcVersion: str = field(default=None, repr=False) - targetDatabase: str = field(default=None, repr=False) - traceLevel: str = field(default=None, repr=False) - userId: str = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class Plan(_Resource): - _endpoint_name: ClassVar[str] = "plans" - ruleElementOwnerSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - name: str = field(default=None, metadata=_meta(id=True), repr=True) - description: str = field(default=None, repr=False) - effectiveStartDate: date = field(default=None, repr=True) - effectiveEndDate: date = field(default=None, repr=False) - businessUnits: list[BusinessUnit] = field(default=None, repr=False) - createdBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - createDate: datetime = field( - default=None, metadata=_meta(json_ignore=True), repr=False - ) - modifiedBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - modelSeq: Model = field(default=None, metadata=_meta(json_ignore=True), repr=False) - calendar: Calendar = field(default=None, repr=False) - variableAssignments: VariableAssignment = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class Position(_Resource): - _endpoint_name: ClassVar[str] = "positions" - ruleElementOwnerSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - name: str = field(default=None, metadata=_meta(id=True), repr=True) - description: str = field(default=None, repr=False) - effectiveStartDate: date = field(default=None, repr=True) - effectiveEndDate: date = field(default=date(2200, 1, 1), repr=True) - creditStartDate: date = field(default=None, repr=False) - creditEndDate: date = field(default=None, repr=False) - processingStartDate: date = field(default=None, repr=False) - processingEndDate: date = field(default=None, repr=False) - targetCompensation: Value = field(default=None, repr=False) - businessUnits: list[BusinessUnit] = field(default_factory=list, repr=False) - manager: Position = field(default=None, repr=False) - createdBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - createDate: datetime = field( - default=None, metadata=_meta(json_ignore=True), repr=False - ) - modifiedBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - modelSeq: Model = field(default=None, metadata=_meta(json_ignore=True), repr=False) - title: Title = field(default=None, repr=False) - positionGroup: PositionGroup = field(default=None, repr=False) - payee: Participant = field(default=None, repr=False) - processingUnit: ProcessingUnit = field(default=None, repr=False) - plan: Plan = field(default=None, repr=False) - genericAttribute1: str = field(default=None, repr=False) - genericAttribute2: str = field(default=None, repr=False) - genericAttribute3: str = field(default=None, repr=False) - genericAttribute4: str = field(default=None, repr=False) - genericAttribute5: str = field(default=None, repr=False) - genericAttribute6: str = field(default=None, repr=False) - genericAttribute7: str = field(default=None, repr=False) - genericAttribute8: str = field(default=None, repr=False) - genericAttribute9: str = field(default=None, repr=False) - genericAttribute10: str = field(default=None, repr=False) - genericAttribute11: str = field(default=None, repr=False) - genericAttribute12: str = field(default=None, repr=False) - genericAttribute13: str = field(default=None, repr=False) - genericAttribute14: str = field(default=None, repr=False) - genericAttribute15: str = field(default=None, repr=False) - genericAttribute16: str = field(default=None, repr=False) - genericNumber1: Value = field(default=None, repr=False) - genericNumber2: Value = field(default=None, repr=False) - genericNumber3: Value = field(default=None, repr=False) - genericNumber4: Value = field(default=None, repr=False) - genericNumber5: Value = field(default=None, repr=False) - genericNumber6: Value = field(default=None, repr=False) - genericDate1: date = field(default=None, repr=False) - genericDate2: date = field(default=None, repr=False) - genericDate3: date = field(default=None, repr=False) - genericDate4: date = field(default=None, repr=False) - genericDate5: date = field(default=None, repr=False) - genericDate6: date = field(default=None, repr=False) - genericBoolean1: bool = field(default=None, repr=False) - genericBoolean2: bool = field(default=None, repr=False) - genericBoolean3: bool = field(default=None, repr=False) - genericBoolean4: bool = field(default=None, repr=False) - genericBoolean5: bool = field(default=None, repr=False) - genericBoolean6: bool = field(default=None, repr=False) - variableAssignments: list[VariableAssignment] = field( - default_factory=list, repr=False - ) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class PositionGroup(_Resource): - _endpoint_name: ClassVar[str] = "positionGroups" - positionGroupSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - name: str = field(default=None, metadata=_meta(id=True), repr=True) - businessUnits: list[BusinessUnit] = field(default=None, repr=False) - createdBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - createDate: datetime = field( - default=None, metadata=_meta(json_ignore=True), repr=False - ) - modifiedBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class PositionRelation(_Resource): - _endpoint_name: ClassVar[str] = "positionRelations" - positionRelationSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - name: str = field(default=None, metadata=_meta(id=True), repr=True) - effectiveStartDate: date = field(default=None, repr=True) - effectiveEndDate: date = field(default=None, repr=False) - createdBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - createDate: datetime = field( - default=None, metadata=_meta(json_ignore=True), repr=False - ) - modifiedBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - parentPosition: Position = field(default=None, repr=False) - positionRelationType: PositionRelationType = field(default=None, repr=False) - childPosition: Position = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class PositionRelationType(_Resource): - _endpoint_name: ClassVar[str] = "positionRelationTypes" - dataTypeSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - name: str = field(default=None, metadata=_meta(id=True), repr=True) - description: str = field(default=None, repr=False) - createdBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - createDate: datetime = field( - default=None, metadata=_meta(json_ignore=True), repr=False - ) - modifiedBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - notAllowUpdate: bool = field(default=None, repr=False) - relations: PositionRelation = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class ProcessingUnit(_Resource): - _endpoint_name: ClassVar[str] = "processingUnits" - processingUnitSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - name: str = field(default=None, metadata=_meta(id=True), repr=True) - description: str = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class Product(_Resource): - _endpoint_name: ClassVar[str] = "products" - classifierSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - classifierId: str = field(default=None, metadata=_meta(id=True), repr=True) - name: str = field(default=None, repr=False) - description: str = field(default=None, repr=False) - selectorId: str = field(default=None, repr=False) - effectiveStartDate: date = field(default=None, repr=True) - effectiveEndDate: date = field(default=None, repr=False) - cost: Value = field(default=None, repr=False) - price: Value = field(default=None, repr=False) - businessUnits: list[BusinessUnit] = field(default=None, repr=False) - createdBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - createDate: datetime = field( - default=None, metadata=_meta(json_ignore=True), repr=False - ) - modifiedBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - genericAttribute1: str = field(default=None, repr=False) - genericAttribute2: str = field(default=None, repr=False) - genericAttribute3: str = field(default=None, repr=False) - genericAttribute4: str = field(default=None, repr=False) - genericAttribute5: str = field(default=None, repr=False) - genericAttribute6: str = field(default=None, repr=False) - genericAttribute7: str = field(default=None, repr=False) - genericAttribute8: str = field(default=None, repr=False) - genericAttribute9: str = field(default=None, repr=False) - genericAttribute10: str = field(default=None, repr=False) - genericAttribute11: str = field(default=None, repr=False) - genericAttribute12: str = field(default=None, repr=False) - genericAttribute13: str = field(default=None, repr=False) - genericAttribute14: str = field(default=None, repr=False) - genericAttribute15: str = field(default=None, repr=False) - genericAttribute16: str = field(default=None, repr=False) - genericNumber1: Value = field(default=None, repr=False) - genericNumber2: Value = field(default=None, repr=False) - genericNumber3: Value = field(default=None, repr=False) - genericNumber4: Value = field(default=None, repr=False) - genericNumber5: Value = field(default=None, repr=False) - genericNumber6: Value = field(default=None, repr=False) - genericDate1: date = field(default=None, repr=False) - genericDate2: date = field(default=None, repr=False) - genericDate3: date = field(default=None, repr=False) - genericDate4: date = field(default=None, repr=False) - genericDate5: date = field(default=None, repr=False) - genericDate6: date = field(default=None, repr=False) - genericBoolean1: bool = field(default=None, repr=False) - genericBoolean2: bool = field(default=None, repr=False) - genericBoolean3: bool = field(default=None, repr=False) - genericBoolean4: bool = field(default=None, repr=False) - genericBoolean5: bool = field(default=None, repr=False) - genericBoolean6: bool = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class Quota(_Resource): - _endpoint_name: ClassVar[str] = "quotas" - quotaSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - name: str = field(default=None, repr=False) - description: str = field(default=None, repr=False) - effectiveStartDate: date = field(default=None, repr=True) - effectiveEndDate: date = field(default=None, repr=False) - businessUnits: list[BusinessUnit] = field(default=None, repr=False) - createdBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - createDate: datetime = field( - default=None, metadata=_meta(json_ignore=True), repr=False - ) - modifiedBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - businessUnit: list[BusinessUnit] = field(default=None, repr=False) - calendar: Calendar = field(default=None, repr=False) - unitType: UnitType = field(default=None, repr=False) - modelSeq: Model = field(default=None, metadata=_meta(json_ignore=True), repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class RateTable(_Resource): - _endpoint_name: ClassVar[str] = "rateTables" - ruleElementSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - name: str = field(default=None, metadata=_meta(id=True), repr=True) - description: str = field(default=None, repr=False) - effectiveStartDate: date = field(default=None, repr=True) - effectiveEndDate: date = field(default=None, repr=False) - businessUnits: list[BusinessUnit] = field(default=None, repr=False) - createdBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - createDate: datetime = field( - default=None, metadata=_meta(json_ignore=True), repr=False - ) - modifiedBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - businessUnit: list[BusinessUnit] = field(default=None, repr=False) - calendar: Calendar = field(default=None, repr=False) - notAllowUpdate: bool = field(default=None, repr=False) - modelSeq: Model = field(default=None, metadata=_meta(json_ignore=True), repr=False) - returnType: str = field(default=None, repr=False) - owningElement: str = field(default=None, repr=False) - ruleUsage: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - inputSignature: str = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class RateTableVariable(_Resource): - _endpoint_name: ClassVar[str] = "rateTableVariables" - ruleElementSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - name: str = field(default=None, metadata=_meta(id=True), repr=True) - description: str = field(default=None, repr=False) - effectiveStartDate: date = field(default=None, repr=True) - effectiveEndDate: date = field(default=None, repr=False) - requiredPeriodType: PeriodType = field(default=None, repr=False) - createdBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - createDate: datetime = field( - default=None, metadata=_meta(json_ignore=True), repr=False - ) - modifiedBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - businessUnits: list[BusinessUnit] = field(default=None, repr=False) - calendar: Calendar = field(default=None, repr=False) - unitType: UnitType = field(default=None, repr=False) - modelSeq: Model = field(default=None, metadata=_meta(json_ignore=True), repr=False) - notAllowUpdate: bool = field(default=None, repr=False) - defaultElement: str = field(default=None, repr=False) - referenceClassType: str = field(default=None, repr=False) - returnType: str = field(default=None, repr=False) - owningElement: str = field(default=None, repr=False) - ruleUsage: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - inputSignature: str = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class Reason(_Resource): - _endpoint_name: ClassVar[str] = "reasons" - dataTypeSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - reasonId: str = field(default=None, metadata=_meta(id=True), repr=True) - description: str = field(default=None, repr=False) - createdBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - createDate: datetime = field( - default=None, metadata=_meta(json_ignore=True), repr=False - ) - modifiedBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - notAllowUpdate: bool = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class Rule(_Resource): - _endpoint_name: ClassVar[str] = "rules" - ruleSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - name: str = field(default=None, metadata=_meta(id=True), repr=True) - description: str = field(default=None, repr=False) - effectiveStartDate: date = field(default=None, repr=True) - effectiveEndDate: date = field(default=None, repr=False) - createdBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - createDate: datetime = field( - default=None, metadata=_meta(json_ignore=True), repr=False - ) - modifiedBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - businessUnit: list[BusinessUnit] = field(default=None, repr=False) - calendar: Calendar = field(default=None, repr=False) - notAllowUpdate: bool = field(default=None, repr=False) - type: RuleType = field(default=None, repr=False) - modelSeq: Model = field(default=None, metadata=_meta(json_ignore=True), repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class RuleType(_Resource): - _endpoint_name: ClassVar[str] = "ruleType" - name: str = field(default=None, metadata=_meta(id=True), repr=True) - id: int = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class SalesOrder(_Resource): - _endpoint_name: ClassVar[str] = "salesOrders" - salesOrderSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - orderId: str = field(default=None, metadata=_meta(id=True), repr=True) - businessUnits: list[BusinessUnit] = field(default=None, repr=False) - createdBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - createDate: datetime = field( - default=None, metadata=_meta(json_ignore=True), repr=False - ) - modifiedBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - pipelineRun: PipelineRun = field(default=None, repr=False) - processingUnit: ProcessingUnit = field(default=None, repr=False) - modelSeq: Model = field(default=None, metadata=_meta(json_ignore=True), repr=False) - genericAttribute1: str = field(default=None, repr=False) - genericAttribute2: str = field(default=None, repr=False) - genericAttribute3: str = field(default=None, repr=False) - genericAttribute4: str = field(default=None, repr=False) - genericAttribute5: str = field(default=None, repr=False) - genericAttribute6: str = field(default=None, repr=False) - genericAttribute7: str = field(default=None, repr=False) - genericAttribute8: str = field(default=None, repr=False) - genericAttribute9: str = field(default=None, repr=False) - genericAttribute10: str = field(default=None, repr=False) - genericAttribute11: str = field(default=None, repr=False) - genericAttribute12: str = field(default=None, repr=False) - genericAttribute13: str = field(default=None, repr=False) - genericAttribute14: str = field(default=None, repr=False) - genericAttribute15: str = field(default=None, repr=False) - genericAttribute16: str = field(default=None, repr=False) - genericNumber1: Value = field(default=None, repr=False) - genericNumber2: Value = field(default=None, repr=False) - genericNumber3: Value = field(default=None, repr=False) - genericNumber4: Value = field(default=None, repr=False) - genericNumber5: Value = field(default=None, repr=False) - genericNumber6: Value = field(default=None, repr=False) - genericDate1: date = field(default=None, repr=False) - genericDate2: date = field(default=None, repr=False) - genericDate3: date = field(default=None, repr=False) - genericDate4: date = field(default=None, repr=False) - genericDate5: date = field(default=None, repr=False) - genericDate6: date = field(default=None, repr=False) - genericBoolean1: bool = field(default=None, repr=False) - genericBoolean2: bool = field(default=None, repr=False) - genericBoolean3: bool = field(default=None, repr=False) - genericBoolean4: bool = field(default=None, repr=False) - genericBoolean5: bool = field(default=None, repr=False) - genericBoolean6: bool = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class SalesTransaction(_Resource): - _endpoint_name: ClassVar[str] = "salesTransactions" - salesTransactionSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - salesOrder: SalesOrder = field(default=None, repr=False) - lineNumber: Value = field(default=None, repr=False) - subLineNumber: Value = field(default=None, repr=False) - value: Value = field(default=None, repr=False) - preadjustedValue: Value = field(default=None, repr=False) - isRunnable: bool = field(default=None, repr=False) - compensationDate: date = field(default=None, repr=False) - eventType: EventType = field(default=None, repr=False) - businessUnits: list[BusinessUnit] = field(default=None, repr=False) - createdBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - createDate: datetime = field( - default=None, metadata=_meta(json_ignore=True), repr=False - ) - modifiedBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - modificationDate: datetime = field(default=None, repr=False) - reason: Reason = field(default=None, repr=False) - channel: str = field(default=None, repr=False) - poNumber: str = field(default=None, repr=False) - dataSource: str = field(default=None, repr=False) - modelSeq: Model = field(default=None, metadata=_meta(json_ignore=True), repr=False) - shipToAddress: Address = field(default=None, repr=False) - otherToAddress: Address = field(default=None, repr=False) - billToAddress: Address = field(default=None, repr=False) - transactionAssignments: list[TransactionAssignment] = field( - default=None, repr=False - ) - discountType: str = field(default=None, repr=False) - productName: str = field(default=None, repr=False) - productDescription: str = field(default=None, repr=False) - paymentTerms: str = field(default=None, repr=False) - processingUnit: ProcessingUnit = field( - default=None, metadata=_meta(expand=False), repr=False - ) - unitValue: Value = field(default=None, repr=False) - accountingDate: date = field(default=None, repr=False) - discountPercent: Value = field(default=None, repr=False) - comments: str = field(default=None, repr=False) - productId: str = field(default=None, repr=False) - numberOfUnits: Value = field(default=None, repr=False) - nativeCurrencyAmount: Value = field(default=None, repr=False) - nativeCurrency: str = field(default=None, repr=False) - pipelineRun: PipelineRun = field(default=None, repr=False) - alternateOrderNumber: str = field(default=None, repr=False) - originTypeId: str = field(default=None, repr=False) - genericAttribute1: str = field(default=None, repr=False) - genericAttribute2: str = field(default=None, repr=False) - genericAttribute3: str = field(default=None, repr=False) - genericAttribute4: str = field(default=None, repr=False) - genericAttribute5: str = field(default=None, repr=False) - genericAttribute6: str = field(default=None, repr=False) - genericAttribute7: str = field(default=None, repr=False) - genericAttribute8: str = field(default=None, repr=False) - genericAttribute9: str = field(default=None, repr=False) - genericAttribute10: str = field(default=None, repr=False) - genericAttribute11: str = field(default=None, repr=False) - genericAttribute12: str = field(default=None, repr=False) - genericAttribute13: str = field(default=None, repr=False) - genericAttribute14: str = field(default=None, repr=False) - genericAttribute15: str = field(default=None, repr=False) - genericAttribute16: str = field(default=None, repr=False) - genericAttribute17: str = field(default=None, repr=False) - genericAttribute18: str = field(default=None, repr=False) - genericAttribute19: str = field(default=None, repr=False) - genericAttribute20: str = field(default=None, repr=False) - genericAttribute21: str = field(default=None, repr=False) - genericAttribute22: str = field(default=None, repr=False) - genericAttribute23: str = field(default=None, repr=False) - genericAttribute24: str = field(default=None, repr=False) - genericAttribute25: str = field(default=None, repr=False) - genericAttribute26: str = field(default=None, repr=False) - genericAttribute27: str = field(default=None, repr=False) - genericAttribute28: str = field(default=None, repr=False) - genericAttribute29: str = field(default=None, repr=False) - genericAttribute30: str = field(default=None, repr=False) - genericAttribute31: str = field(default=None, repr=False) - genericAttribute32: str = field(default=None, repr=False) - genericNumber1: Value = field(default=None, repr=False) - genericNumber2: Value = field(default=None, repr=False) - genericNumber3: Value = field(default=None, repr=False) - genericNumber4: Value = field(default=None, repr=False) - genericNumber5: Value = field(default=None, repr=False) - genericNumber6: Value = field(default=None, repr=False) - genericDate1: date = field(default=None, repr=False) - genericDate2: date = field(default=None, repr=False) - genericDate3: date = field(default=None, repr=False) - genericDate4: date = field(default=None, repr=False) - genericDate5: date = field(default=None, repr=False) - genericDate6: date = field(default=None, repr=False) - genericBoolean1: bool = field(default=None, repr=False) - genericBoolean2: bool = field(default=None, repr=False) - genericBoolean3: bool = field(default=None, repr=False) - genericBoolean4: bool = field(default=None, repr=False) - genericBoolean5: bool = field(default=None, repr=False) - genericBoolean6: bool = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - isPurged: bool = field(default=None, repr=False) - - -@dataclass -class StatusCode(_Resource): - _endpoint_name: ClassVar[str] = "statusCodes" - dataTypeSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - name: str = field(default=None, metadata=_meta(id=True), repr=True) - description: str = field(default=None, repr=False) - createdBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - createDate: datetime = field( - default=None, metadata=_meta(json_ignore=True), repr=False - ) - modifiedBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - notAllowUpdate: bool = field(default=None, repr=False) - type: str = field(default=None, repr=False) - isActive: bool = field(default=None, repr=False) - status: str = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class Territory(_Resource): - _endpoint_name: ClassVar[str] = "territories" - ruleElementSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - name: str = field(default=None, metadata=_meta(id=True), repr=True) - description: str = field(default=None, repr=False) - calendar: Calendar = field(default=None, repr=False) - effectiveStartDate: date = field(default=None, repr=True) - effectiveEndDate: date = field(default=None, repr=False) - businessUnits: list[BusinessUnit] = field(default=None, repr=False) - createdBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - createDate: datetime = field( - default=None, metadata=_meta(json_ignore=True), repr=False - ) - modifiedBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - notAllowUpdate: bool = field(default=None, repr=False) - definition: str = field(default=None, repr=False) - modelSeq: Model = field(default=None, metadata=_meta(json_ignore=True), repr=False) - returnType: str = field(default=None, repr=False) - ruleUsage: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - owningElement: str = field(default=None, repr=False) - inputSignature: str = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class TerritoryVariable(_Resource): - _endpoint_name: ClassVar[str] = "territoryVariables" - ruleElementSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - name: str = field(default=None, metadata=_meta(id=True), repr=True) - description: str = field(default=None, repr=False) - calendar: Calendar = field(default=None, repr=False) - effectiveStartDate: date = field(default=None, repr=True) - effectiveEndDate: date = field(default=None, repr=False) - businessUnits: list[BusinessUnit] = field(default=None, repr=False) - createdBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - createDate: datetime = field( - default=None, metadata=_meta(json_ignore=True), repr=False - ) - modifiedBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - notAllowUpdate: bool = field(default=None, repr=False) - defaultElement: str = field(default=None, repr=False) - modelSeq: Model = field(default=None, metadata=_meta(json_ignore=True), repr=False) - referenceClassType: str = field(default=None, repr=False) - returnType: str = field(default=None, repr=False) - owningElement: str = field(default=None, repr=False) - ruleUsage: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - requiredPeriodType: PeriodType = field(default=None, repr=False) - inputSignature: str = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class Title(_Resource): - _endpoint_name: ClassVar[str] = "titles" - ruleElementOwnerSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - name: str = field(default=None, metadata=_meta(id=True), repr=True) - description: str = field(default=None, repr=False) - effectiveStartDate: date = field(default=None, repr=True) - effectiveEndDate: date = field(default=None, repr=False) - businessUnits: list[BusinessUnit] = field(default=None, repr=False) - plan: str = field(default=None, repr=False) - createDate: datetime = field( - default=None, metadata=_meta(json_ignore=True), repr=False - ) - modifiedBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - createdBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - modelSeq: Model = field(default=None, metadata=_meta(json_ignore=True), repr=False) - genericAttribute1: str = field(default=None, repr=False) - genericAttribute2: str = field(default=None, repr=False) - genericAttribute3: str = field(default=None, repr=False) - genericAttribute4: str = field(default=None, repr=False) - genericAttribute5: str = field(default=None, repr=False) - genericAttribute6: str = field(default=None, repr=False) - genericAttribute7: str = field(default=None, repr=False) - genericAttribute8: str = field(default=None, repr=False) - genericAttribute9: str = field(default=None, repr=False) - genericAttribute10: str = field(default=None, repr=False) - genericAttribute11: str = field(default=None, repr=False) - genericAttribute12: str = field(default=None, repr=False) - genericAttribute13: str = field(default=None, repr=False) - genericAttribute14: str = field(default=None, repr=False) - genericAttribute15: str = field(default=None, repr=False) - genericAttribute16: str = field(default=None, repr=False) - genericNumber1: Value = field(default=None, repr=False) - genericNumber2: Value = field(default=None, repr=False) - genericNumber3: Value = field(default=None, repr=False) - genericNumber4: Value = field(default=None, repr=False) - genericNumber5: Value = field(default=None, repr=False) - genericNumber6: Value = field(default=None, repr=False) - genericDate1: date = field(default=None, repr=False) - genericDate2: date = field(default=None, repr=False) - genericDate3: date = field(default=None, repr=False) - genericDate4: date = field(default=None, repr=False) - genericDate5: date = field(default=None, repr=False) - genericDate6: date = field(default=None, repr=False) - genericBoolean1: bool = field(default=None, repr=False) - genericBoolean2: bool = field(default=None, repr=False) - genericBoolean3: bool = field(default=None, repr=False) - genericBoolean4: bool = field(default=None, repr=False) - genericBoolean5: bool = field(default=None, repr=False) - genericBoolean6: bool = field(default=None, repr=False) - variableAssignments: list[VariableAssignment] = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class TransactionAssignment(_Resource): - _endpoint_name: ClassVar[str] = "transactionAssignment" - titleName: str = field(default=None, repr=False) - payeeId: str = field(default=None, repr=False) - positionName: str = field(default=None, repr=False) - salesOrder: SalesOrder = field(default=None, repr=False) - salesTransactionSeq: int = field(default=None, repr=False) - setNumber: int = field(default=None, repr=False) - compensationDate: date = field(default=None, repr=False) - processingUnit: ProcessingUnit = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class UnitType(_Resource): - _endpoint_name: ClassVar[str] = "unitTypes" - unitTypeSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - name: str = field(default=None, metadata=_meta(id=True), repr=True) - description: str = field(default=None, repr=False) - createdBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - createDate: datetime = field( - default=None, metadata=_meta(json_ignore=True), repr=False - ) - modifiedBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - symbol: str = field(default=None, repr=False) - currencyLocale: str = field(default=None, repr=False) - formatting: str = field(default=None, repr=False) - positionOfSymbol: int = field(default=None, repr=False) - reportingScale: str = field(default=None, repr=False) - scale: int = field(default=None, repr=False) - valueClass: dict = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class User(_Resource): - _endpoint_name: ClassVar[str] = "users" - userSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - id: str = field(default=None, metadata=_meta(id=True), repr=True) - userName: str = field(default=None, repr=False) - description: str = field(default=None, repr=False) - createdBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - createDate: datetime = field( - default=None, metadata=_meta(json_ignore=True), repr=False - ) - modifiedBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - readOnlyBusinessUnitList: list[BusinessUnit] = field(default=None, repr=False) - fullAccessBusinessUnitList: list[BusinessUnit] = field(default=None, repr=False) - groups: list[Group] = field(default=None, repr=False) - email: str = field(default=None, repr=False) - preferredLanguage: str = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class Value(_Resource): - _endpoint_name: ClassVar[str] = "value" - value: float = field(default=None, repr=False) - unitType: UnitType = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class Variable(_Resource): - _endpoint_name: ClassVar[str] = "variables" - ruleElementSeq: int = field(default=None, metadata=_meta(seq=True), repr=False) - name: str = field(default=None, metadata=_meta(id=True), repr=True) - description: str = field(default=None, repr=False) - calendar: Calendar = field(default=None, repr=False) - requiredPeriodType: PeriodType = field(default=None, repr=False) - effectiveStartDate: date = field(default=None, repr=True) - effectiveEndDate: date = field(default=None, repr=False) - businessUnits: list[BusinessUnit] = field(default=None, repr=False) - plan: str = field(default=None, repr=False) - createDate: datetime = field( - default=None, metadata=_meta(json_ignore=True), repr=False - ) - modifiedBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - createdBy: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - modelSeq: Model = field(default=None, metadata=_meta(json_ignore=True), repr=False) - notAllowUpdate: bool = field(default=None, repr=False) - defaultElement: str = field(default=None, repr=False) - referenceClassType: str = field(default=None, repr=False) - returnType: str = field(default=None, repr=False) - owningElement: str = field(default=None, repr=False) - ruleUsage: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - inputSignature: str = field(default=None, repr=False) - etag: str = field(default=None, metadata=_meta(json_ignore=True), repr=False) - - -@dataclass -class VariableAssignment(_Resource): - _endpoint_name: ClassVar[str] = "variableAssignment" - owner: str = field(default=None, repr=False) - variable: str = field(default=None, repr=False) - assignment: str = field(default=None, repr=False) - effectiveStartDate: date = field(default=None, repr=True) - effectiveEndDate: date = field(default=None, repr=False) - createDate: datetime = field( - default=None, metadata=_meta(json_ignore=True), repr=False - ) - modelSeq: Model = field(default=None, metadata=_meta(json_ignore=True), repr=False) diff --git a/setup.py b/setup.py deleted file mode 100644 index 7f1a176..0000000 --- a/setup.py +++ /dev/null @@ -1,4 +0,0 @@ -from setuptools import setup - -if __name__ == "__main__": - setup() diff --git a/src/sapcommissions/__init__.py b/src/sapcommissions/__init__.py new file mode 100644 index 0000000..0006b25 --- /dev/null +++ b/src/sapcommissions/__init__.py @@ -0,0 +1,7 @@ +"""Python SAP Commissions Client.""" + +from .client import CommissionsClient + +__all__ = [ + "CommissionsClient", +] diff --git a/src/sapcommissions/__main__.py b/src/sapcommissions/__main__.py new file mode 100644 index 0000000..7b4809c --- /dev/null +++ b/src/sapcommissions/__main__.py @@ -0,0 +1,139 @@ +"""CLI entry point for Python SAP Commissions Client.""" +import asyncio +import logging +import os +import sys +from logging.config import dictConfig +from pathlib import Path + +import click +from aiohttp import BasicAuth, ClientSession + +from sapcommissions import CommissionsClient +from sapcommissions.deploy import deploy_from_path + +LOGGER = logging.getLogger(__package__) + + +def setup_logging(logfile: Path | None = None, verbose: bool = False) -> None: + """Setup logging, add filehandler if logfile is provided.""" + config = { + "version": 1, + "formatters": { + "standard": { + "format": "%(asctime)s | %(name)-25s | %(levelname)-8s | %(message)s", + "datefmt": "%Y-%m-%d %H:%M:%S", + }, + }, + } + handlers = { + "console": {"class": "logging.StreamHandler", "formatter": "standard"}, + } + if logfile: + handlers["file"] = { + "class": "logging.FileHandler", + "formatter": "standard", + "filename": str(str(logfile)), + } + loggers = { + __package__: { + "handlers": list(handlers.keys()), + "level": "DEBUG" if verbose else "INFO", + }, + } + config["handlers"] = handlers + config["loggers"] = loggers + dictConfig(config) + + +async def async_deploy( + path: Path, + tenant: str, + auth: BasicAuth, + verify_ssl: bool = True, +) -> int: + async with ClientSession(auth=auth) as session: + client = CommissionsClient(tenant, session, verify_ssl) + await deploy_from_path(client, path) + return 0 + + +@click.group() +@click.option( + "-l", + "--logfile", + type=click.Path( + exists=False, + file_okay=True, + dir_okay=False, + writable=True, + resolve_path=False, + path_type=Path + ), + help="Enable logging to a file." +) +@click.option("-v", is_flag=True, help="Increase logging verbosity.") +def cli(logfile: Path | None = None, v: bool = False) -> None: + """ + SAP Commissions command-line entry. + """ + setup_logging(logfile, v) + LOGGER.info("sapcommissions command-line interface") + + +@cli.command() +@click.argument( + "path", + type=click.Path( + exists=True, + file_okay=False, + dir_okay=True, + readable=True, + resolve_path=False, + path_type=Path + ) +) +@click.option("-t", "--tenant", type=str, help="Tenant, for example `CALD-DEV`.") +@click.option("-u", "--username", type=str, help="Username for tenant.") +@click.option("-p", "--password", type=str, help="Password for tenant.") +@click.option("--no-ssl", is_flag=True, help="Disable SSL validation.") +def deploy( + path: Path, + tenant: str | None = None, + username: str | None = None, + password: str | None = None, + no_ssl: bool = False, +): + """ + Deploy rule elements from a directory to the tenant. + + \b + PATH is the directory containing the rule elements to deploy. + + \b + Example usage: `sap deploy . -t CALD-DEV -u spamm -p eggs` + + \b + Tenant, Username and password can also be set using environment variables: + SAP_TENANT Tenant, for example `CALD-DEV`. + SAP_USERNAME Username for the tenant. + SAP_PASSWORD Password for the tenant. + """ + sap_tenant: str = tenant or os.environ.get("SAP_TENANT") + sap_username: str = username or os.environ.get("SAP_USERNAME") + sap_password: str = password or os.environ.get("SAP_PASSWORD") + if not (sap_tenant and sap_username and sap_password): + LOGGER.error("Tenant, Username or password not set") + return 1 + LOGGER.info(f"deploy '{path}' on '{sap_tenant}' by '{sap_username}'") + auth = BasicAuth(sap_username, sap_password) + + if no_ssl: + LOGGER.info("SSL validation disabled") + verify_ssl = not no_ssl + + asyncio.run(async_deploy(path, sap_tenant, auth, verify_ssl)) + return 0 + +if __name__ == "__main__": + sys.exit(cli()) diff --git a/src/sapcommissions/client.py b/src/sapcommissions/client.py new file mode 100644 index 0000000..1989081 --- /dev/null +++ b/src/sapcommissions/client.py @@ -0,0 +1,390 @@ +"""Python SAP Commissions Client.""" +import asyncio +import logging +from collections.abc import AsyncGenerator +from dataclasses import dataclass +from typing import Any, Final, TypeVar + +from aiohttp import ClientError, ClientSession +from pydantic import ValidationError + +from sapcommissions import const, exceptions, model +from sapcommissions.helpers import BooleanOperator, LogicalOperator + +LOGGER: Final[logging.Logger] = logging.getLogger(__name__) +T = TypeVar("T", bound="model._Resource") + + +@dataclass +class CommissionsClient: + """Client interface for interacting with SAP Commissions.""" + tenant: str + session: ClientSession + verify_ssl: bool = True + request_timeout: int = const.REQUEST_TIMEOUT + + @property + def host(self) -> str: + """The fully qualified hostname.""" + return f"https://{self.tenant}.callidusondemand.com" + + async def _request( + self, + method: str, + uri: str, + params: dict | None = None, + json: list | None = None, + ) -> dict[str, Any]: + """Send a request.""" + LOGGER.debug(f"Request: {method=}, {uri=}, {params=}") + + try: + async with asyncio.timeout(self.request_timeout): + response = await self.session.request( + method=method, + url=f"{self.host}/{uri}", + params=params, + json=json, + ssl=self.verify_ssl, + ) + except TimeoutError as err: + msg = "Timeout while connecting" + LOGGER.error(msg) + raise exceptions.SAPConnectionError(msg) from err + except ClientError as err: + msg = "Could not connect" + LOGGER.error(msg) + raise exceptions.SAPConnectionError(msg) from err + + if method in ("POST", "PUT") and response.status == const.STATUS_NOT_MODIFIED: + msg = "Resource not modified" + raise exceptions.SAPNotModified(msg) + + if ( + not response.status in const.REQUIRED_STATUS[method] + and response.status != const.STATUS_BAD_REQUEST + ): + text = await response.text() + msg = f"Unexpected status. {response.status}: {text}" + LOGGER.error(msg) + raise exceptions.SAPResponseError(msg) + + if ( + content_type := response.headers.get("Content-Type") + ) != "application/json": + text = await response.text() + msg = f"Unexpected Content-Type. {content_type}: {text}" + LOGGER.error(msg) + raise exceptions.SAPResponseError(msg) + + json = await response.json() + if response.status in (const.STATUS_BAD_REQUEST, const.STATUS_SERVER_ERROR): + raise exceptions.SAPBadRequest(json) + return json + + async def create(self, resource: T) -> T: + """Create a new resource.""" + cls = type(resource) + LOGGER.debug(f"Create {cls.__name__}({resource})") + + endpoint: str = resource.get_endpoint() + attr_resource: str = endpoint.split("/")[-1] + json: dict[str, Any] = resource.model_dump(exclude_none=True) + + try: + response: dict[str, Any] = await self._request( + method="POST", + uri=endpoint, + json=[json], + ) + except exceptions.SAPBadRequest as err: + if not attr_resource in err.data: + msg = f"Unexpected payload. {err.data}" + LOGGER.error(msg) + raise exceptions.SAPResponseError(msg) from err + + error_data: list[dict[str, Any]] = err.data[attr_resource] + for errors in error_data: + if (error_message := errors.get(const.ATTR_ERROR)): + if const.ERROR_ALREADY_EXISTS in error_message: + raise exceptions.SAPAlreadyExists(error_message) from err + if any(const.ERROR_MISSING_FIELD in value for value in errors.values()): + LOGGER.error(errors) + raise exceptions.SAPMissingField(errors) from err + msg = f"Unexpected error. {error_data}" + LOGGER.error(msg) + raise exceptions.SAPResponseError(msg) from err + + if not attr_resource in response: + msg = f"Unexpected payload. {response}" + LOGGER.error(msg) + raise exceptions.SAPResponseError(msg) + + json: list[dict[str, Any]] = response[attr_resource] + data: dict[str, Any] = json[0] + try: + return cls(**data) + except ValidationError as exc: + for error in exc.errors(): + LOGGER.error(f"{error} on {data}") + raise + + async def update(self, resource: T) -> T: + """Update an existing resource.""" + cls = type(resource) + LOGGER.debug(f"Update {cls.__name__}({resource})") + + endpoint: str = resource.get_endpoint() + attr_resource: str = endpoint.split("/")[-1] + json: dict[str, Any] = resource.model_dump(exclude_none=True) + + try: + response: dict[str, Any] = await self._request( + method="PUT", + uri=endpoint, + json=[json], + ) + except exceptions.SAPNotModified: + return resource + except exceptions.SAPBadRequest as err: + if not attr_resource in err.data: + msg = f"Unexpected payload. {err.data}" + LOGGER.error(msg) + raise exceptions.SAPResponseError(msg) from err + + error_data: list[dict[str, Any]] = err.data[attr_resource] + for errors in error_data: + if (error_message := errors.get(const.ATTR_ERROR)): + LOGGER.error(error_message) + raise exceptions.SAPResponseError(error_message) from err + msg = f"Unexpected error. {error_data}" + LOGGER.error(msg) + raise exceptions.SAPResponseError(msg) from err + + if not attr_resource in response: + msg = f"Unexpected payload. {response}" + LOGGER.error(msg) + raise exceptions.SAPResponseError(msg) + + json: list[dict[str, Any]] = response[attr_resource] + data: dict[str, Any] = json[0] + try: + return cls(**data) + except ValidationError as exc: + for error in exc.errors(): + LOGGER.error(f"{error} on {data}") + raise + + async def delete(self, resource: T) -> bool: + """Delete a resource.""" + cls = type(resource) + LOGGER.debug(f"Delete {cls.__name__}({resource})") + + endpoint: str = resource.get_endpoint() + attr_resource: str = endpoint.split("/")[-1] + seq: str = resource.seq + uri: str = f"{endpoint}({seq})" + + try: + response: dict[str, Any] = await self._request( + method="DELETE", + uri=uri, + ) + except exceptions.SAPBadRequest as err: + if not attr_resource in err.data: + msg = f"Unexpected payload. {err.data}" + LOGGER.error(msg) + raise exceptions.SAPResponseError(msg) from err + + error_data: dict[str, str] = err.data[attr_resource] + if not seq in error_data: + msg = f"Unexpected payload. {error_data}" + LOGGER.error(msg) + raise exceptions.SAPResponseError(msg) from err + + error_message: str = error_data[seq] + LOGGER.error(error_message) + raise exceptions.SAPResponseError(error_message) from err + + if not attr_resource in response: + msg = f"Unexpected payload. {response}" + LOGGER.error(msg) + raise exceptions.SAPResponseError(msg) + + json: dict[str, Any] = response[attr_resource] + if not seq in json: + msg = f"Unexpected payload. {json}" + LOGGER.error(msg) + raise exceptions.SAPResponseError(msg) + + return True + + async def read( + self, + resource_cls: type[T], + *, + filter: BooleanOperator | LogicalOperator | str | None = None, + order_by: list[str] | None = None, + ) -> T: + """Read the first matching resource.""" + LOGGER.debug(f"Read {resource_cls.__name__} {filter=}") + list_resources = self.list( + resource_cls, + filter=filter, + order_by=order_by, + page_size=1, + ) + return await anext(list_resources) + + async def read_seq(self, resource_cls: type[T], seq: str) -> T: + """Read the specified resource.""" + LOGGER.debug(f"Read {resource_cls.__name__}({seq})") + + endpoint: str = resource_cls.get_endpoint() + uri: str = f"{endpoint}({seq})" + + response: dict[str, Any] = await self._request("GET", uri=uri) + try: + return resource_cls(**response) + except ValidationError as exc: + for error in exc.errors(): + LOGGER.error(f"{error} on {response}") + raise + + async def reload(self, resource: T) -> T: + """Reload a fully initiated resource.""" + LOGGER.debug(f"Reload {type(resource).__name__}({resource.seq})") + return await self.read_seq(type(resource), resource.seq) + + async def list( + self, + resource_cls: type[T], + *, + filter: BooleanOperator | LogicalOperator | str | None = None, + order_by: list[str] | None = None, + page_size: int = 10, + raw: bool = False, + ) -> AsyncGenerator[T | dict[str, Any], None]: + """Lists resources of a specified type with optional filtering and sorting.""" + LOGGER.debug(f"List {resource_cls.__name__} {filter=} {order_by=} {page_size=}") + if page_size < 1 or page_size > 100: + raise ValueError(f"page_size ({page_size}) must be between 1 and 100") + + endpoint: str = resource_cls.get_endpoint() + attr_resource: str = endpoint.split("/")[-1] + params: dict[str, str] | None = {const.ATTR_TOP: page_size} + if filter: + params[const.ATTR_FILTER] = str(filter) + if order_by: + params[const.ATTR_ORDERBY] = ",".join(order_by) + + uri: str = endpoint + attempt: int = 0 + while uri: + try: + response = await self._request("GET", uri=uri, params=params) + except exceptions.SAPConnectionError: + attempt += 1 + if attempt > 3: + raise + await asyncio.sleep(2.0) + continue + else: + attempt = 0 + + if (next := response.get(const.ATTR_NEXT)): + params = None + uri = "?".join([endpoint, next.split("?", 1)[-1]]) + else: + uri = None + + if not attr_resource in response: + msg = f"Unexpected payload. {response}" + LOGGER.error(msg) + raise exceptions.SAPResponseError(msg) + + json: list[dict[str, Any]] = response[attr_resource] + for item in json: + try: + yield item if raw else resource_cls(**item) + except ValidationError as exc: + for error in exc.errors(): + LOGGER.error(f"{error} on {item}") + raise + + async def run_pipeline(self, job: model._Pipeline) -> model.Pipeline: + """Run a pipeline and retrieves the created Pipeline.""" + LOGGER.debug(f"Run pipeline {type(job).__name__}") + endpoint: str = job.get_endpoint() + json: dict[str, Any] = job.model_dump(exclude_none=True) + LOGGER.debug(f"model_dump: {json}") + + try: + response: dict[str, Any] = await self._request( + method="POST", + uri=endpoint, + json=[json], + ) + except exceptions.SAPBadRequest as err: + if not "pipelines" in err.data: + msg = f"Unexpected payload. {err.data}" + LOGGER.error(msg) + raise exceptions.SAPResponseError(msg) from err + + error_data: dict[str, str] = err.data["pipelines"] + if not "0" in error_data: + msg = f"Unexpected payload. {error_data}" + LOGGER.error(msg) + raise exceptions.SAPResponseError(msg) from err + + msg = error_data["0"] + LOGGER.error(msg) + raise exceptions.SAPResponseError(msg) from err + + if not "pipelines" in response: + msg = f"Unexpected payload. {response}" + LOGGER.error(msg) + raise exceptions.SAPResponseError(msg) + + json: dict[str, list[str]] = response["pipelines"] + if not "0" in json: + msg = f"Unexpected payload. {json}" + LOGGER.error(msg) + raise exceptions.SAPResponseError(msg) + + seq: str = json["0"][0] + return await self.read_seq(model.Pipeline, seq) + + async def cancel_pipeline(self, job: model.Pipeline) -> bool: + """Cancel a running pipeline.""" + cls = type(job) + LOGGER.debug(f"Cancel {job.command}({job.pipelineRunSeq})") + + endpoint: str = job.get_endpoint() + uri: str = f"{endpoint}({job.pipelineRunSeq})" + + try: + response: dict[str, Any] = await self._request( + method="DELETE", + uri=uri, + ) + except exceptions.SAPBadRequest as err: + if not job.pipelineRunSeq in err.data: + msg = f"Unexpected payload. {err.data}" + LOGGER.error(msg) + raise exceptions.SAPResponseError(msg) + + error_message: str = err.data[job.pipelineRunSeq] + if const.ERROR_DELETE_PIPELINE in error_message: + LOGGER.debug(error_message) + return True + msg = f"Unexpected payload. {error_message}" + LOGGER.error(msg) + raise exceptions.SAPResponseError(msg) from err + + if not job.pipelineRunSeq in response: + msg = f"Unexpected payload. {response}" + LOGGER.error(msg) + raise exceptions.SAPResponseError(msg) + + return True diff --git a/src/sapcommissions/const.py b/src/sapcommissions/const.py new file mode 100644 index 0000000..a01881c --- /dev/null +++ b/src/sapcommissions/const.py @@ -0,0 +1,167 @@ +"""Constants for Python SAP Commissions Client.""" +from enum import StrEnum +from typing import Final + + +class HTTPMETHOD(StrEnum): + """StrEnum for HTTP request methods.""" + GET = "GET" + POST = "POST" + PUT = "PUT" + DELETE = "DELETE" + + +class PipelineRunStages(StrEnum): + """StrEnum for PipelineRun stages.""" + Classify = "21673573206720515" + Allocate = "21673573206720516" + Reward = "21673573206720518" + Pay = "21673573206720519" + Summarize = "21673573206720531" + Compensate = "21673573206720530" + CompensateAndPay = "21673573206720532" + ResetFromClassify = "21673573206720514" + ResetFromAllocate = "21673573206720523" + ResetFromReward = "21673573206720522" + ResetFromPay = "21673573206720526" + Post = "21673573206720520" + Finalize = "21673573206720521" + Purge = "21673573206720573" + ReportsGeneration = "21673573206720698" + UndoPost = "21673573206720718" + UndoFinalize = "21673573206720721" + CleanupDefferedResults = "21673573206720540" + UpdateAnalytics = "21673573206720701" + +class ImportStages(StrEnum): + """StrEnum for Import stages.""" + Validate = "21673573206720533" + Transfer = "21673573206720534" + ValidateAndTransfer = "21673573206720536" + ValidateAndTransferIfAllValid = "21673573206720537" + TransferIfAllValid = "21673573206720535" + ResetFromValidate = "21673573206720525" + + +class XMLImportStages(StrEnum): + """StrEnum for XMLImport stage.""" + XMLImport = "21673573206720693" + + +class MaintenanceStages(StrEnum): + """StrEnum for Maintenance stage.""" + Maintenance = "21673573206720692" + + +class PipelineRunMode(StrEnum): + """StrEnum for PipelineRun RunMode.""" + FULL = "full" + POSITIONS = "positions" + INCREMENTAL = "incremental" + + +class ImportRunMode(StrEnum): + """StrEnum for Import RunMode.""" + ALL = "all" + NEW = "new" + + +class RevalidateMode(StrEnum): + """StrEnum for Import Revalidate.""" + ALL = "all" + ERRORS = "onlyError" + + +class StageTables(StrEnum): + TransactionalData = "TransactionalData" + OrganizationData = "OrganizationData" + ClassificationData = "ClassificationData" + PlanRelatedData = "PlanRelatedData" + + +class ReportType(StrEnum): + Crystal = "Crystal" + WebI = "Webi" + + +class ReportFormat(StrEnum): + Native = "native" + Excel = "excel" + PDF = "pdf" + + +class PipelineState(StrEnum): + """StrEnum for Pipeline state.""" + SCHEDULED = "Scheduled" + RUNNING = "Running" + DONE = "Done" + PENDING = "Pending" + + +class PipelineStatus(StrEnum): + """StrEnum for Pipeline status.""" + RUNNING = "Running" + SUCCESSFUL = "Successful" + CANCELED = "Cancel" + DONE = "Done" + FAILED = "Failed" + _Cacel = "Cacel" + + +REQUEST_TIMEOUT: Final[int] = 30 +STATUS_NOT_MODIFIED: Final[int] = 304 +STATUS_BAD_REQUEST: Final[int] = 400 +STATUS_SERVER_ERROR: Final[int] = 500 +REQUIRED_STATUS: Final[dict[str, tuple[int]]] = { + "GET": (200,), + "POST": (200, 201, STATUS_NOT_MODIFIED), + "PUT": (200, STATUS_NOT_MODIFIED), + "DELETE": (200,), +} + +ATTR_ERROR: Final[str] = "_ERROR_" +ATTR_EXPAND: Final[str] = "expand" +ATTR_FILTER: Final[str] = "$filter" +ATTR_ORDERBY: Final[str] = "orderBy" +ATTR_INLINECOUNT: Final[str] = "inlineCount" +ATTR_NEXT: Final[str] = "next" +ATTR_SKIP: Final[str] = "skip" +ATTR_TOP: Final[str] = "top" +ATTR_TOTAL: Final[str] = "total" + +ERROR_ALREADY_EXISTS: Final[str] = "TCMP_35004" +ERROR_DELETE_PIPELINE: Final[str] = "TCMP_60255" +ERROR_MISSING_FIELD: Final[str] = "TCMP_1002" +ERROR_NOT_FOUND: Final[str] = "TCMP_09007" +ERROR_REFERRED_BY: Final[str] = "TCMP_35001" +ERROR_REMOVE_FAILED: Final[str] = "TCMP_35243" + +MSG_PERIOD_TYPE: Final[str] = "Period Type mismatch" +MSG_SUCCESS_DELETE: Final[str] = "successfully deleted" + +STAGETABLES: Final[dict[str, list[str]]] = { + "TransactionalData": [ + "TransactionAndCredit", + "Deposit", + ], + "OrganizationData": [ + "Participant", + "Position", + "Title", + "PositionRelation", + ], + "ClassificationData": [ + "Category", + "Category_Classifiers", + "Customer", + "Product", + "PostalCode", + "GenericClassifier", + ], + "PlanRelatedData": [ + "FixedValue", + "VariableAssignment", + "Quota", + "RelationalMDLT", + ], +} diff --git a/src/sapcommissions/deploy.py b/src/sapcommissions/deploy.py new file mode 100644 index 0000000..ba24296 --- /dev/null +++ b/src/sapcommissions/deploy.py @@ -0,0 +1,190 @@ +"""Deploy module for Python SAP Commissions Client.""" +import asyncio +import csv +import logging +import re +from pathlib import Path +from typing import Any, Callable, Final + +from aiohttp import BasicAuth, ClientSession +from dotenv import dotenv_values + +from sapcommissions import CommissionsClient, model +from sapcommissions.const import PipelineState, PipelineStatus +from sapcommissions.exceptions import SAPAlreadyExists, SAPConnectionError + +LOGGER: Final[logging.Logger] = logging.getLogger(__name__) + +RE_CREDIT_TYPE: Final[re.Pattern] = re.compile(rf"^([a-z0-9_.-]+)?(Credit Type)\.txt$", re.IGNORECASE) +RE_EARNING_CODE: Final[re.Pattern] = re.compile(r"^([a-z0-9_.-]+)?(Earning Code)\.txt$", re.IGNORECASE) +RE_EARNING_GROUP: Final[re.Pattern] = re.compile(r"^([a-z0-9_.-]+)?(Earning Group)\.txt$", re.IGNORECASE) +RE_EVENT_TYPE: Final[re.Pattern] = re.compile(r"^([a-z0-9_.-]+)?(Event Type)\.txt$", re.IGNORECASE) +RE_FIXED_VALUE_TYPE: Final[re.Pattern] = re.compile(r"^([a-z0-9_.-]+)?(Fixed Value Type)\.txt$", re.IGNORECASE) +RE_REASON_CODE: Final[re.Pattern] = re.compile(r"^([a-z0-9_.-]+)?(Reason Code)\.txt$", re.IGNORECASE) +RE_XML: Final[re.Pattern] = re.compile(r"^([a-z0-9_.-]+)?([a-z0-9_.-]+)\.xml$", re.IGNORECASE) + +def _file_cls(file: Path) -> model._Resource: + if re.match(RE_CREDIT_TYPE, file.name): + return model.CreditType + if re.match(RE_EARNING_CODE, file.name): + return model.EarningCode + if re.match(RE_EARNING_GROUP, file.name): + return model.EarningGroup + if re.match(RE_EVENT_TYPE, file.name): + return model.EventType + if re.match(RE_FIXED_VALUE_TYPE, file.name): + return model.FixedValueType + if re.match(RE_REASON_CODE, file.name): + return model.ReasonCode + if re.match(RE_XML, file.name): + return model.XMLImport + raise ValueError(f"Unidentified filetype", {"filename": file.name}) + +async def _retry( + coroutine_function: Callable, + *args, + exceptions: type[BaseException] | tuple[type[BaseException], ...] | None = None, + retries: int = 3, + delay: float = 3.0, + **kwargs +) -> Any: + """ + Retry a coroutine function a specified number of times, with an optional + specific exception(s) to catch. + + Returns: + The result of the coroutine function if successful. + + Raises: + The last exception caught if retries are exhausted. + """ + if exceptions is not None and not isinstance(exceptions, tuple): + exceptions = (exceptions,) + + for attempt in range(retries): + try: + return await coroutine_function(*args, **kwargs) + except Exception as err: + if not isinstance(err, exceptions): + raise + LOGGER.debug(f"Failed attempt {attempt + 1}: {err}") + if attempt + 1 >= retries: + raise + await asyncio.sleep(delay) + + +async def deploy_from_path( + client: CommissionsClient, + path: Path, +) -> dict[Path, list[model._Resource]]: + """Deploy.""" + LOGGER.debug(f"Deploy {path}") + # This is to make sure we recognize each file before we attempt to deploy. + files_with_cls: list[tuple[Path, model._Resource]] = [ + (file, _file_cls(file)) + for file in sorted(path.iterdir(), key=lambda x: x.name) + if file.is_file() + ] + results: dict[Path, list[model._Resource]] = {} + for (file, resource_cls) in files_with_cls: + if resource_cls is model.XMLImport: + results[file] = await deploy_xml(client, file) + else: + results[file] = await deploy_resources_from_file(client, file, resource_cls) + return results + +async def deploy_resources_from_file( + client: CommissionsClient, + file: Path, + resource_cls: model._Resource, +) -> list[model._Resource]: + """Deploy file.""" + LOGGER.info(f"Deploy file: {file}") + with open(file, mode="r", encoding="utf-8", newline="") as f_in: + reader = csv.DictReader(f_in) + resources: list[model._Resource] = [ + resource_cls(**row) for row in reader + ] + tasks = [deploy_resource(client, resource) for resource in resources] + return await asyncio.gather(*tasks) + +async def deploy_resource( + client: CommissionsClient, + resource: model._Resource +) -> model._Resource: + """Deploy resource.""" + resource_cls: model._Resource = resource.__class__ + LOGGER.debug(f"Deploy {resource_cls.__name__}: {resource}") + + result: model._Resource | None = None + try: + result = await _retry( + client.create, + resource, + exceptions=SAPConnectionError, + ) + LOGGER.info(f"{resource_cls.__name__} created: {result}") + except SAPAlreadyExists: # Resource exists, update instead + result = await _retry( + client.update, + resource, + exceptions=SAPConnectionError, + ) + LOGGER.info(f"{resource_cls.__name__} updated: {result}") + return result + +async def deploy_xml( + client: CommissionsClient, + file: Path, +) -> list[model.Pipeline]: + """Deploy XML Plan data.""" + LOGGER.debug(f"Deploy Plan data: {file}") + + job: model.XMLImport = model.XMLImport( + xmlFileName=file.name, + xmlFileContent=file.read_text("UTF-8"), + updateExistingObjects=True, + ) + result: model.Pipeline = await _retry( + client.run_pipeline, + job, + exceptions=SAPConnectionError, + ) + while result.state != PipelineState.DONE: + await asyncio.sleep(2) + result = await _retry( + client.reload, + result, + exceptions=SAPConnectionError, + ) + + if result.status != PipelineStatus.SUCCESSFUL: + LOGGER.error(f"XML Import failed (errors: {result.numErrors})!") + else: + LOGGER.info(f"Plan data imported: {file}") + return [result] + + +async def main(): + config = dotenv_values("tests/.env") + auth = BasicAuth(login=config["SAP_USERNAME"], password=config["SAP_PASSWORD"]) + path = Path("tests/deploy") + + async with ClientSession(auth=auth) as session: + client = CommissionsClient( + tenant="VFNL-MDEV", + session=session, + verify_ssl=False, + ) + result = await deploy_from_path(client, path) + print(result) + +if __name__ == "__main__": + logging.basicConfig( + level=logging.DEBUG, + format="%(name)-20s | %(levelname)-8s | %(message)s", + ) + base_logger = logging.getLogger("sapcommissions") + base_logger.setLevel(logging.ERROR) + + asyncio.run(main()) diff --git a/src/sapcommissions/exceptions.py b/src/sapcommissions/exceptions.py new file mode 100644 index 0000000..5644af2 --- /dev/null +++ b/src/sapcommissions/exceptions.py @@ -0,0 +1,84 @@ +"""Exceptions for Python SAP Commissions Client.""" +from typing import Any + + +class SAPException(Exception): + """Base exception for Python SAP Commissions Client.""" + + +class SAPConnectionError(SAPException): + """Exception to indicate connection error.""" + + +class SAPResponseError(SAPException): + """Exception to indicate an unexpected response.""" + + +class SAPNotModified(SAPException): + """Exception to indicate 304 - Not Modified response.""" + + +class SAPBadRequest(SAPException): + """Exception to indicate an error with the request.""" + + def __init__(self, data: dict[str, Any]) -> None: + """Initialize a Bad Request exception.""" + self.data = data + super().__init__("Bad Request", {"data": data}) + + +class SAPAlreadyExists(SAPException): + """Exception to indicate resource with same key already exists.""" + + +class SAPMissingField(SAPException): + """Exception to indicate one or more required fields are missing.""" + + def __init__(self, fields: dict[str, Any]) -> None: + """Initialize a Missing Required Field exception.""" + self.fields = fields + super().__init__("Missing Required Field(s)", {"fields": fields}) + + +class SAPUnsupportedFileError(SAPException): + """Exception to indicate file is not supported.""" + + +class RequestError(SAPException): + """Exception to indicate an error while handling the server request.""" + + +class ResponseError(SAPException): + """Exception to indicate an error while handling the server response.""" + + +class CRUDError(SAPException): + """Base Exception for CRUD operations.""" + + +class AlreadyExistsError(CRUDError): + """Exception to indicate resource with same key already exists.""" + + +class MissingFieldError(CRUDError): + """Exception to indicate a value is required.""" + + +class PeriodTypeError(CRUDError): + """Exception to indicate a Period Type mismatch.""" + + +class NotModifiedError(CRUDError): + """Exception to indicate resource was not modified.""" + + +class NotFoundError(CRUDError): + """Exception to indicate resource does not exist.""" + + +class ReferredByError(CRUDError): + """Exception to indicate resource is referred by another resource.""" + + +class DeleteFailedError(CRUDError): + """Exception to indicate resource was not deleted.""" diff --git a/src/sapcommissions/helpers.py b/src/sapcommissions/helpers.py new file mode 100644 index 0000000..fbd8ef0 --- /dev/null +++ b/src/sapcommissions/helpers.py @@ -0,0 +1,106 @@ +"""Helpers for Python SAP Commissions Client.""" +from dataclasses import dataclass, field +from datetime import date +from typing import Union + + +@dataclass +class LogicalOperator: + """ + Base class for Logical Operators. + + You cannot create a direct instance of LogicalOperator, + use one of the subclasses instead. + - Equals + - NotEquals + - GreaterThen + - GreaterThenOrEqual + - LesserThen + - LesserThenOrEqual + """ + _operator: str = field(init=False, repr=False) + first: str + second: str | int | date + + def __str__(self) -> str: + if isinstance(self.second, int): + second = f"{self.second}" + elif isinstance(self.second, date): + second = self.second.strftime("%Y-%m-%d") + else: # str + second = f"'{self.second}'" + + return f"{self.first} {self._operator} {second}" + +class Equals(LogicalOperator): + """ + Equal to. + + Supports wildcard operator '*', for example: `Equals('name', 'John *')`. + Supports `null` operator, for example: `Equals('name', 'null')`. + """ + _operator: str = "eq" + +class NotEquals(LogicalOperator): + """ + Not equal to. + + Supports wildcard operator '*', for example: `Equals('name', 'John*')`. + Supports `null` operator, for example: `NotEquals('name', 'null')`. + """ + _operator: str = "ne" + +class GreaterThen(LogicalOperator): + """Greater then.""" + _operator: str = "gt" + +class GreaterThenOrEqual(LogicalOperator): + """Greater then or equals.""" + _operator: str = "ge" + +class LesserThen(LogicalOperator): + """Lesser then.""" + _operator: str = "lt" + +class LesserThenOrEqual(LogicalOperator): + """Lesser then or equals.""" + _operator: str = "le" + + +@dataclass(init=False) +class BooleanOperator: + """ + Base class for Boolean Operators. + + You cannot create a direct instance of LogicalOperator, + use one of the subclasses instead. + - And + - Or + """ + _operator: str = field(init=False, repr=False) + + def __init__( + self, + *conditions: Union[LogicalOperator, "BooleanOperator"] + ): + if not all( + isinstance(m, (LogicalOperator, BooleanOperator)) + and not type(m) in (LogicalOperator, BooleanOperator) + for m in conditions + ): + raise ValueError("conditions must be instance of Boolean- or LogicalOperator") + self.conditions = conditions + + def __str__(self) -> str: + if not self.conditions: + return "" + text: str = f" {self._operator} ".join(str(m) for m in self.conditions) + return f"({text})" if len(self.conditions) > 1 else text + +class And(BooleanOperator): + """All conditions must be true.""" + _operator: str = "and" + +class Or(BooleanOperator): + """Any condition must be true""" + _operator: str = "or" diff --git a/src/sapcommissions/model.py b/src/sapcommissions/model.py new file mode 100644 index 0000000..decf575 --- /dev/null +++ b/src/sapcommissions/model.py @@ -0,0 +1,763 @@ +"""Data models for Python SAP Commissions Client.""" +from datetime import datetime +from typing import Literal + +import pydantic + +from sapcommissions import const + + +class ValueUnitType(pydantic.BaseModel): + """Model for UnitType.""" + name: str + unitTypeSeq: str + + +class Value(pydantic.BaseModel): + """Model for Value.""" + value: int | float + unitType: ValueUnitType + + +class RuleUsage(pydantic.BaseModel): + """Model for RuleUsage.""" + id: str + name: str + + +class _Base(pydantic.BaseModel): + """Base model.""" + _endpoint: str + _attr_seq: str + + @classmethod + def get_endpoint(cls) -> str: + """Return the class endpoint.""" + return cls.__private_attributes__["_endpoint"].default + + @classmethod + def get_attr_seq(cls) -> str: + """Return the seq attribute name.""" + return cls.__private_attributes__["_attr_seq"].default + + @property + def seq(self) -> str | None: + """Return the `seq` attribute value for the resource.""" + return getattr(self, self._attr_seq) + + +class _Resource(_Base): + """BaseModel for any Resource.""" + createDate: datetime | None = pydantic.Field(None, exclude=True, repr=False) + createdBy: str | None = pydantic.Field(None, exclude=True, repr=False) + modifiedBy: str | None = pydantic.Field(None, exclude=True, repr=False) + + +class _DataType(_Resource): + """Base class for Data Type resources.""" + _attr_seq: str = "dataTypeSeq" + dataTypeSeq: str | None = None + description: str | None = pydantic.Field(None, validation_alias=pydantic.AliasChoices("description", "Description")) + notAllowUpdate: bool | None = pydantic.Field(None, repr=False) + + +class _RuleElement(_Resource): + """Base class for Rule Element resources.""" + _attr_seq: str = "ruleElementSeq" + ruleElementSeq: str | None = None + + +class _RuleElementOwner(_Resource): + """Base class for Rule Element Owner resources.""" + _attr_seq: str = "ruleElementOwnerSeq" + ruleElementOwnerSeq: str | None = None + + +class _Pipeline(_Base): + """Base class for Pipeline resources.""" + _endpoint: str = "api/v2/pipelines" + _attr_seq: str = "pipelineRunSeq" + pipelineRunSeq: str | None = None + + +class _PipelineJob(_Pipeline): + """Base class for a Pipeline Job""" + command: Literal["PipelineRun", "Import", "XMLImport", "ModelRun", "MaintenanceRun"] + pipelineRunSeq: None = None + runStats: bool = False + + +class _PipelineRunJob(_PipelineJob): + """Base class for a PipelineRun job.""" + command: Literal["PipelineRun"] = "PipelineRun" + periodSeq: str + calendarSeq: str + stageTypeSeq: const.PipelineRunStages + runMode: const.PipelineRunMode = const.PipelineRunMode.FULL + positionGroups: list[str] | None = None + positionSeqs: list[str] | None = None + processingUnitSeq: str | None = None + + @pydantic.model_validator(mode="after") + def check_runmode(self) -> "_PipelineRunJob": + """If runMode is 'positions', positionGroups or positionSeqs must be list.""" + if self.runMode in (const.PipelineRunMode.FULL, const.PipelineRunMode.INCREMENTAL): + if not (self.positionGroups is None and self.positionSeqs is None): + raise ValueError("When runMode is 'full' or 'incremental', positionGroups and positionSeqs must be None") + + if self.runMode == const.PipelineRunMode.POSITIONS: + if ( + not (self.positionGroups and self.positionSeqs) + or (self.positionGroups and self.positionSeqs) + ): + raise ValueError("When runMode is 'positions', provide either positionGroups or positionSeqs") + if isinstance(self.positionGroups, list) and not len(self.positionGroups): + raise ValueError("positionGroups cannot be an empty list") + if isinstance(self.positionSeqs, list) and not len(self.positionSeqs): + raise ValueError("positionSeqs cannot be an empty list") + return self + + +class EventType(_DataType): + """Class representation of an Event Type.""" + _endpoint: str = "api/v2/eventTypes" + eventTypeId: str = pydantic.Field(validation_alias=pydantic.AliasChoices("eventTypeId", "ID")) + + +class CreditType(_DataType): + """Credit Type.""" + _endpoint: str = "api/v2/creditTypes" + creditTypeId: str = pydantic.Field(validation_alias=pydantic.AliasChoices("creditTypeId", "ID", "Credit Type ID")) + + +class EarningCode(_DataType): + """Earning Code.""" + _endpoint: str = "api/v2/earningCodes" + earningCodeId: str = pydantic.Field(validation_alias=pydantic.AliasChoices("earningCodeId", "ID")) + + +class EarningGroup(_DataType): + """Earning Group.""" + _endpoint: str = "api/v2/earningGroups" + earningGroupId: str = pydantic.Field(validation_alias=pydantic.AliasChoices("earningGroupId", "ID")) + + +class FixedValueType(_DataType): + """Fixed Value Type.""" + _endpoint: str = "api/v2/fixedValueTypes" + fixedValueTypeId: str = pydantic.Field(validation_alias=pydantic.AliasChoices("fixedValueTypeId", "ID")) + + +class ReasonCode(_DataType): + """Reason Code.""" + _endpoint: str = "api/v2/reasons" + reasonId: str = pydantic.Field(validation_alias=pydantic.AliasChoices("reasonId", "ID")) + + +class BusinessUnit(_Resource): + """Business Unit.""" + _endpoint: str = "api/v2/businessUnits" + _attr_seq: str = "businessUnitSeq" + businessUnitSeq: str | None = None + name: str + description: str | None = None + processingUnit: str | None = None + + +class ProcessingUnit(_Resource): + """Processing Unit.""" + _endpoint: str = "api/v2/processingUnits" + _attr_seq: str = "processingUnitSeq" + processingUnitSeq: str | None = None + name: str + description: str | None = None + + +class PeriodType(_Resource): + """Period Type.""" + _endpoint: str = "api/v2/periodTypes" + _attr_seq: str = "periodTypeSeq" + periodTypeSeq: str | None = None + name: str + description: str | None = None + level: int | None = None + + +class Calendar(_Resource): + """Calendar.""" + _endpoint: str = "api/v2/calendars" + _attr_seq: str = "calendarSeq" + calendarSeq: str | None = None + name: str + description: str | None = None + minorPeriodType: str | None = None + majorPeriodType: str | None = None + periods: list[str] | None = None + + +class Period(_Resource): + """Period.""" + _endpoint: str = "api/v2/periods" + _attr_seq: str = "periodSeq" + periodSeq: str | None = None + name: str + shortName: str + startDate: datetime + endDate: datetime + periodType: str + calendar: str + description: str | None = None + parent: str | None = None + + +class Assignment(pydantic.BaseModel): + key: str | None = None + ownedKey: str | None = None + + +class Title(_RuleElementOwner): + """Title.""" + _endpoint: str = "api/v2/titles" + name: str + description: str | None = None + effectiveStartDate: datetime + effectiveEndDate: datetime + businessUnits: list[str] | None = None + plan: str | None = None + variableAssignments: Assignment | list[Assignment | str] | None = None + modelSeq: str | None = None + ga1: str | None = pydantic.Field(None, alias="genericAttribute1") + ga2: str | None = pydantic.Field(None, alias="genericAttribute2") + ga3: str | None = pydantic.Field(None, alias="genericAttribute3") + ga4: str | None = pydantic.Field(None, alias="genericAttribute4") + ga5: str | None = pydantic.Field(None, alias="genericAttribute5") + ga6: str | None = pydantic.Field(None, alias="genericAttribute6") + ga7: str | None = pydantic.Field(None, alias="genericAttribute7") + ga8: str | None = pydantic.Field(None, alias="genericAttribute8") + ga9: str | None = pydantic.Field(None, alias="genericAttribute9") + ga10: str | None = pydantic.Field(None, alias="genericAttribute10") + ga11: str | None = pydantic.Field(None, alias="genericAttribute11") + ga12: str | None = pydantic.Field(None, alias="genericAttribute12") + ga13: str | None = pydantic.Field(None, alias="genericAttribute13") + ga14: str | None = pydantic.Field(None, alias="genericAttribute14") + ga15: str | None = pydantic.Field(None, alias="genericAttribute15") + ga16: str | None = pydantic.Field(None, alias="genericAttribute16") + gn1: Value | None = pydantic.Field(None, alias="genericNumber1") + gn2: Value | None = pydantic.Field(None, alias="genericNumber2") + gn3: Value | None = pydantic.Field(None, alias="genericNumber3") + gn4: Value | None = pydantic.Field(None, alias="genericNumber4") + gn5: Value | None = pydantic.Field(None, alias="genericNumber5") + gn6: Value | None = pydantic.Field(None, alias="genericNumber6") + gd1: datetime | None = pydantic.Field(None, alias="genericDate1") + gd2: datetime | None = pydantic.Field(None, alias="genericDate2") + gd3: datetime | None = pydantic.Field(None, alias="genericDate3") + gd4: datetime | None = pydantic.Field(None, alias="genericDate4") + gd5: datetime | None = pydantic.Field(None, alias="genericDate5") + gd6: datetime | None = pydantic.Field(None, alias="genericDate6") + gb1: bool | None = pydantic.Field(None, alias="genericBoolean1") + gb2: bool | None = pydantic.Field(None, alias="genericBoolean2") + gb3: bool | None = pydantic.Field(None, alias="genericBoolean3") + gb4: bool | None = pydantic.Field(None, alias="genericBoolean4") + gb5: bool | None = pydantic.Field(None, alias="genericBoolean5") + gb6: bool | None = pydantic.Field(None, alias="genericBoolean6") + + +class Position(_RuleElementOwner): + """Position.""" + _endpoint: str = "api/v2/positions" + name: str + description: str | None = None + effectiveStartDate: datetime + effectiveEndDate: datetime + creditStartDate: datetime | None = None + creditEndDate: datetime | None = None + processingStartDate: datetime | None = None + processingEndDate: datetime | None = None + targetCompensation: dict | None = None + processingUnit: str | None = None + businessUnits: list[str] | None = None + manager: str | None = None + title: str | None = None + plan: str | None = None + positionGroup: str | None = None + payee: str | None = None + variableAssignments: Assignment | list[Assignment] | None = None + modelSeq: str | None = None + ga1: str | None = pydantic.Field(None, alias="genericAttribute1") + ga2: str | None = pydantic.Field(None, alias="genericAttribute2") + ga3: str | None = pydantic.Field(None, alias="genericAttribute3") + ga4: str | None = pydantic.Field(None, alias="genericAttribute4") + ga5: str | None = pydantic.Field(None, alias="genericAttribute5") + ga6: str | None = pydantic.Field(None, alias="genericAttribute6") + ga7: str | None = pydantic.Field(None, alias="genericAttribute7") + ga8: str | None = pydantic.Field(None, alias="genericAttribute8") + ga9: str | None = pydantic.Field(None, alias="genericAttribute9") + ga10: str | None = pydantic.Field(None, alias="genericAttribute10") + ga11: str | None = pydantic.Field(None, alias="genericAttribute11") + ga12: str | None = pydantic.Field(None, alias="genericAttribute12") + ga13: str | None = pydantic.Field(None, alias="genericAttribute13") + ga14: str | None = pydantic.Field(None, alias="genericAttribute14") + ga15: str | None = pydantic.Field(None, alias="genericAttribute15") + ga16: str | None = pydantic.Field(None, alias="genericAttribute16") + gn1: Value | None = pydantic.Field(None, alias="genericNumber1") + gn2: Value | None = pydantic.Field(None, alias="genericNumber2") + gn3: Value | None = pydantic.Field(None, alias="genericNumber3") + gn4: Value | None = pydantic.Field(None, alias="genericNumber4") + gn5: Value | None = pydantic.Field(None, alias="genericNumber5") + gn6: Value | None = pydantic.Field(None, alias="genericNumber6") + gd1: datetime | None = pydantic.Field(None, alias="genericDate1") + gd2: datetime | None = pydantic.Field(None, alias="genericDate2") + gd3: datetime | None = pydantic.Field(None, alias="genericDate3") + gd4: datetime | None = pydantic.Field(None, alias="genericDate4") + gd5: datetime | None = pydantic.Field(None, alias="genericDate5") + gd6: datetime | None = pydantic.Field(None, alias="genericDate6") + gb1: bool | None = pydantic.Field(None, alias="genericBoolean1") + gb2: bool | None = pydantic.Field(None, alias="genericBoolean2") + gb3: bool | None = pydantic.Field(None, alias="genericBoolean3") + gb4: bool | None = pydantic.Field(None, alias="genericBoolean4") + gb5: bool | None = pydantic.Field(None, alias="genericBoolean5") + gb6: bool | None = pydantic.Field(None, alias="genericBoolean6") + + +class PositionGroup(_Resource): + """Position.""" + _endpoint: str = "api/v2/positionGroups" + _attr_seq: str = "positionGroupSeq" + positionGroupSeq: str | None = None + name: str + businessUnits: list[str] | None = None + + +class AppliedDeposit(_Resource): + """AppliedDeposit.""" + _endpoint: str = "api/v2/appliedDeposits" + _attr_seq: str = "appliedDepositSeq" + appliedDepositSeq: str | None = None + position: str + payee: str + period: str + earningGroupId: str + earningCodeId: str + trialPipelineRun: str + trialPipelineRunDate: datetime + postPipelineRun: str + postPipelineRunDate: datetime + entryNumber: str + value: Value + processingUnit: str | None = None + + +class Balance(_Resource): + """Balance.""" + _endpoint: str = "api/v2/balances" + _attr_seq: str = "balanceSeq" + balanceSeq: str | None = None + position: str + payee: str + period: str + earningGroupId: str + earningCodeId: str + trialPipelineRun: str + trialPipelineRunDate: datetime + applyPipelineRun: str + applyPipelineRunDate: datetime + postPipelineRun: str + postPipelineRunDate: datetime + balanceStatusId: str + value: Value + processingUnit: str | None = None + + +class Category(_RuleElement): + """Category.""" + _endpoint: str = "api/v2/categories" + name: str + description: str | None = None + owner: str + parent: str | None = None + returnType: str | None = None + effectiveStartDate: datetime + effectiveEndDate: datetime + businessUnits: list[str] | None = None + ruleUsage: RuleUsage | None = None + owningElement: str | None = None + calendar: str | None = None + inputSignature: str | None = None + modelSeq: str | None = None + ga1: str | None = pydantic.Field(None, alias="genericAttribute1") + ga2: str | None = pydantic.Field(None, alias="genericAttribute2") + ga3: str | None = pydantic.Field(None, alias="genericAttribute3") + ga4: str | None = pydantic.Field(None, alias="genericAttribute4") + ga5: str | None = pydantic.Field(None, alias="genericAttribute5") + ga6: str | None = pydantic.Field(None, alias="genericAttribute6") + ga7: str | None = pydantic.Field(None, alias="genericAttribute7") + ga8: str | None = pydantic.Field(None, alias="genericAttribute8") + ga9: str | None = pydantic.Field(None, alias="genericAttribute9") + ga10: str | None = pydantic.Field(None, alias="genericAttribute10") + ga11: str | None = pydantic.Field(None, alias="genericAttribute11") + ga12: str | None = pydantic.Field(None, alias="genericAttribute12") + ga13: str | None = pydantic.Field(None, alias="genericAttribute13") + ga14: str | None = pydantic.Field(None, alias="genericAttribute14") + ga15: str | None = pydantic.Field(None, alias="genericAttribute15") + ga16: str | None = pydantic.Field(None, alias="genericAttribute16") + gn1: Value | None = pydantic.Field(None, alias="genericNumber1") + gn2: Value | None = pydantic.Field(None, alias="genericNumber2") + gn3: Value | None = pydantic.Field(None, alias="genericNumber3") + gn4: Value | None = pydantic.Field(None, alias="genericNumber4") + gn5: Value | None = pydantic.Field(None, alias="genericNumber5") + gn6: Value | None = pydantic.Field(None, alias="genericNumber6") + gd1: datetime | None = pydantic.Field(None, alias="genericDate1") + gd2: datetime | None = pydantic.Field(None, alias="genericDate2") + gd3: datetime | None = pydantic.Field(None, alias="genericDate3") + gd4: datetime | None = pydantic.Field(None, alias="genericDate4") + gd5: datetime | None = pydantic.Field(None, alias="genericDate5") + gd6: datetime | None = pydantic.Field(None, alias="genericDate6") + gb1: bool | None = pydantic.Field(None, alias="genericBoolean1") + gb2: bool | None = pydantic.Field(None, alias="genericBoolean2") + gb3: bool | None = pydantic.Field(None, alias="genericBoolean3") + gb4: bool | None = pydantic.Field(None, alias="genericBoolean4") + gb5: bool | None = pydantic.Field(None, alias="genericBoolean5") + gb6: bool | None = pydantic.Field(None, alias="genericBoolean6") + + +class categoryClassifier(_Resource): + """categoryClassifier.""" + _endpoint: str = "api/v2/categoryClassifiers" + _attr_seq: str = "categoryClassifiersSeq" + categoryClassifiersSeq: str | None = None + categoryTree: str + category: str + classifier: str + effectiveStartDate: datetime + effectiveEndDate: datetime + + +class CategoryTree(_Resource): + """CategoryTree.""" + _endpoint: str = "api/v2/categoryTrees" + _attr_seq: str = "categoryTreeSeq" + categoryTreeSeq: str | None = None + name: str + description: str | None = None + classifierSelectorId: str | None = None + classifierClass: str + effectiveStartDate: datetime + effectiveEndDate: datetime + businessUnits: list[str] | None = None + + +class Commission(_Resource): + """Commission.""" + # TODO: No results. + _endpoint: str = "api/v2/commissions" + _attr_seq: str = "commissionSeq" + commissionSeq: str | None = None + position: str + payee: str + period: str + incentive: str + credit: str + pipelineRun: str + pipelineRunDate: datetime + value: Value + rateValue: Value + entryNumber: Value + businessUnits: list[str] | None = None + processingUnit: str = pydantic.Field(repr=False) + isPrivate: bool | None = None + originTypeId: str + + +class Credit(_Resource): + """Credit.""" + _endpoint: str = "api/v2/credits" + _attr_seq: str = "creditSeq" + creditSeq: str | None = None + name: str + position: str + payee: str + salesOrder: str + salesTransaction: str | None = None + period: str + creditType: str + value: Value + preadjustedValue: Value + originTypeId: str + reason: str | None = None + rule: str | None = None + isRollable: bool | None = None + rollDate: datetime | None = None + isHeld: bool | None = None + releaseDate: datetime | None = None + pipelineRun: str | None = None + pipelineRunDate: datetime | None = None + compensationDate: datetime | None = None + comments: str | None = None + isPrivate: bool | None = None + modelSeq: str | None = None + businessUnits: list[str] | None = None + ga1: str | None = pydantic.Field(None, alias="genericAttribute1") + ga2: str | None = pydantic.Field(None, alias="genericAttribute2") + ga3: str | None = pydantic.Field(None, alias="genericAttribute3") + ga4: str | None = pydantic.Field(None, alias="genericAttribute4") + ga5: str | None = pydantic.Field(None, alias="genericAttribute5") + ga6: str | None = pydantic.Field(None, alias="genericAttribute6") + ga7: str | None = pydantic.Field(None, alias="genericAttribute7") + ga8: str | None = pydantic.Field(None, alias="genericAttribute8") + ga9: str | None = pydantic.Field(None, alias="genericAttribute9") + ga10: str | None = pydantic.Field(None, alias="genericAttribute10") + ga11: str | None = pydantic.Field(None, alias="genericAttribute11") + ga12: str | None = pydantic.Field(None, alias="genericAttribute12") + ga13: str | None = pydantic.Field(None, alias="genericAttribute13") + ga14: str | None = pydantic.Field(None, alias="genericAttribute14") + ga15: str | None = pydantic.Field(None, alias="genericAttribute15") + ga16: str | None = pydantic.Field(None, alias="genericAttribute16") + gn1: Value | None = pydantic.Field(None, alias="genericNumber1") + gn2: Value | None = pydantic.Field(None, alias="genericNumber2") + gn3: Value | None = pydantic.Field(None, alias="genericNumber3") + gn4: Value | None = pydantic.Field(None, alias="genericNumber4") + gn5: Value | None = pydantic.Field(None, alias="genericNumber5") + gn6: Value | None = pydantic.Field(None, alias="genericNumber6") + gd1: datetime | None = pydantic.Field(None, alias="genericDate1") + gd2: datetime | None = pydantic.Field(None, alias="genericDate2") + gd3: datetime | None = pydantic.Field(None, alias="genericDate3") + gd4: datetime | None = pydantic.Field(None, alias="genericDate4") + gd5: datetime | None = pydantic.Field(None, alias="genericDate5") + gd6: datetime | None = pydantic.Field(None, alias="genericDate6") + gb1: bool | None = pydantic.Field(None, alias="genericBoolean1") + gb2: bool | None = pydantic.Field(None, alias="genericBoolean2") + gb3: bool | None = pydantic.Field(None, alias="genericBoolean3") + gb4: bool | None = pydantic.Field(None, alias="genericBoolean4") + gb5: bool | None = pydantic.Field(None, alias="genericBoolean5") + gb6: bool | None = pydantic.Field(None, alias="genericBoolean6") + processingUnit: str = pydantic.Field(repr=False) + + +class ResetFromValidate(_Pipeline): + """Run a ResetFromValidate pipeline.""" + _endpoint: str = "api/v2/pipelines/resetfromvalidate" + pipelineRunSeq: None = None + calendarSeq: str + periodSeq: str + batchName: str | None = None + runStats: bool = False + + +class Purge(_Pipeline): + """Run a Purge pipeline.""" + _endpoint: str = "api/v2/pipelines" + pipelineRunSeq: None = None + command: Literal["PipelineRun"] = "PipelineRun" + stageTypeSeq: Literal[const.PipelineRunStages.Purge] = const.PipelineRunStages.Purge + batchName: str + module: const.StageTables + + @pydantic.computed_field + def stageTables(self) -> list[str]: + """Compute stageTables field based on module.""" + return const.STAGETABLES[self.module] + + +class Pipeline(_Pipeline): + """Pipeline.""" + pipelineRunSeq: str + command: Literal["PipelineRun", "Import", "XMLImport", "ModelRun", "MaintenanceRun", "CleanupDeferredPipelineResults"] | None + stageType: const.PipelineRunStages | const.ImportStages | const.XMLImportStages | const.MaintenanceStages | None + dateSubmitted: datetime + state: const.PipelineState + userId: str + processingUnit: str = pydantic.Field(repr=False) + period: str | None = None + description: str | None = None + status: const.PipelineStatus | None = None + runProgress: float | None = None + startTime: datetime | None = pydantic.Field(None, repr=False) + stopTime: datetime | None = pydantic.Field(None, repr=False) + startDateScheduled: datetime | None = pydantic.Field(None, repr=False) + batchName: str | None = None + priority: int | None = pydantic.Field(repr=False) + message: str | None = pydantic.Field(None, repr=False) + numErrors: int | None = pydantic.Field(repr=False) + numWarnings: int | None = pydantic.Field(repr=False) + runMode: const.ImportRunMode | const.PipelineRunMode | None = pydantic.Field(None, repr=False) + + @pydantic.field_validator("runProgress", mode="before") + @classmethod + def percent_as_float(cls, value: str) -> float | None: + """Convert percentage string to float.""" + return int(value.removesuffix("%")) / 100 if value else None + + +class Classify(_PipelineRunJob): + """Run a Classify pipeline.""" + stageTypeSeq: Literal[const.PipelineRunStages.Classify] = const.PipelineRunStages.Classify + runMode: Literal[const.PipelineRunMode.FULL, const.PipelineRunMode.INCREMENTAL] = const.PipelineRunMode.FULL + positionGroups: None = None + positionSeqs: None = None + + +class Allocate(_PipelineRunJob): + """Run an Allocate pipeline.""" + stageTypeSeq: Literal[const.PipelineRunStages.Allocate] = const.PipelineRunStages.Allocate + + +class Reward(_PipelineRunJob): + """Run a Reward pipeline.""" + stageTypeSeq: Literal[const.PipelineRunStages.Reward] = const.PipelineRunStages.Reward + runMode: Literal[const.PipelineRunMode.FULL, const.PipelineRunMode.POSITIONS] = const.PipelineRunMode.FULL + + +class Pay(_PipelineRunJob): + """Run a Pay pipeline.""" + stageTypeSeq: Literal[const.PipelineRunStages.Pay] = const.PipelineRunStages.Pay + runMode: Literal[const.PipelineRunMode.FULL, const.PipelineRunMode.POSITIONS] = const.PipelineRunMode.FULL + positionSeqs: None = None + + +class Summarize(_PipelineRunJob): + """Run a Summarize pipeline.""" + stageTypeSeq: Literal[const.PipelineRunStages.Summarize] = const.PipelineRunStages.Summarize + + +class Compensate(_PipelineRunJob): + """Run a Compensate pipeline.""" + stageTypeSeq: Literal[const.PipelineRunStages.Compensate] = const.PipelineRunStages.Compensate + removeStaleResults: bool = False + + +class CompensateAndPay(_PipelineRunJob): + """Run a CompensateAndPay pipeline.""" + stageTypeSeq: Literal[const.PipelineRunStages.CompensateAndPay] = const.PipelineRunStages.CompensateAndPay + removeStaleResults: bool = False + + +class ResetFromClassify(_PipelineRunJob): + """Run a ResetFromClassify pipeline.""" + stageTypeSeq: Literal[const.PipelineRunStages.ResetFromClassify] = const.PipelineRunStages.ResetFromClassify + + +class ResetFromAllocate(_PipelineRunJob): + """Run a ResetFromAllocate pipeline.""" + stageTypeSeq: Literal[const.PipelineRunStages.ResetFromAllocate] = const.PipelineRunStages.ResetFromAllocate + + +class ResetFromReward(_PipelineRunJob): + """Run a ResetFromReward pipeline.""" + stageTypeSeq: Literal[const.PipelineRunStages.ResetFromReward] = const.PipelineRunStages.ResetFromReward + + +class ResetFromPay(_PipelineRunJob): + """Run a ResetFromPay pipeline.""" + stageTypeSeq: Literal[const.PipelineRunStages.ResetFromPay] = const.PipelineRunStages.ResetFromPay + + +class Post(_PipelineRunJob): + """Run a Post pipeline.""" + stageTypeSeq: Literal[const.PipelineRunStages.Post] = const.PipelineRunStages.Post + + +class Finalize(_PipelineRunJob): + """Run a Finalize pipeline.""" + stageTypeSeq: Literal[const.PipelineRunStages.Finalize] = const.PipelineRunStages.Finalize + + +class ReportsGeneration(_PipelineRunJob): + """Run a ReportsGeneration pipeline.""" + stageTypeSeq: Literal[const.PipelineRunStages.ReportsGeneration] = const.PipelineRunStages.ReportsGeneration + generateODSReports: Literal[True] = True + reportTypeName: const.ReportType = const.ReportType.Crystal + reportFormatsList: list[const.ReportFormat] + odsReportList: list[str] + boGroupsList: list[str] + runMode: Literal[const.PipelineRunMode.FULL, const.PipelineRunMode.POSITIONS] = const.PipelineRunMode.FULL + + +class UndoPost(_PipelineRunJob): + """Run a UndoPost pipeline.""" + stageTypeSeq: Literal[const.PipelineRunStages.UndoPost] = const.PipelineRunStages.UndoPost + + +class UndoFinalize(_PipelineRunJob): + """Run a UndoFinalize pipeline.""" + stageTypeSeq: Literal[const.PipelineRunStages.UndoFinalize] = const.PipelineRunStages.UndoFinalize + + +class CleanupDefferedResults(_PipelineRunJob): + """Run a CleanupDefferedResults pipeline.""" + stageTypeSeq: Literal[const.PipelineRunStages.CleanupDefferedResults] = const.PipelineRunStages.CleanupDefferedResults + + +class UpdateAnalytics(_PipelineRunJob): + """Run a UpdateAnalytics pipeline.""" + stageTypeSeq: Literal[const.PipelineRunStages.UpdateAnalytics] = const.PipelineRunStages.UpdateAnalytics + +class _XMLImportJob(_PipelineJob): + """Base class for an XMLImport Pipeline job.""" + command: Literal["XMLImport"] = "XMLImport" + stageTypeSeq: const.XMLImportStages + + +class XMLImport(_XMLImportJob): + """Run an XML Import pipeline.""" + stageTypeSeq: Literal[const.XMLImportStages.XMLImport] = const.XMLImportStages.XMLImport + xmlFileName: str + xmlFileContent: str + updateExistingObjects: bool = False + + +class _ImportJob(_PipelineJob): + """Base class for an Import job.""" + command: Literal["Import"] = "Import" + stageTypeSeq: const.ImportStages + calendarSeq: str + batchName: str + module: const.StageTables + runMode: const.ImportRunMode = const.ImportRunMode.ALL + + @pydantic.computed_field + def stageTables(self) -> list[str]: + """Compute stageTables field based on module.""" + return const.STAGETABLES[self.module] + + @pydantic.model_validator(mode="after") + def validate_conditional_fields(self) -> "_ImportJob": + """ + Validate conditional required fields. + + - runMode can only be 'new' when importing TransactionalData + """ + if ( + self.module != const.StageTables.TransactionalData + and self.runMode == const.ImportRunMode.NEW + ): + raise ValueError("runMode can only be 'new' when importing TransactionalData") + + return self + + +class Validate(_ImportJob): + """Run a Validate pipeline.""" + stageTypeSeq: Literal[const.ImportStages.Validate] = const.ImportStages.Validate + revalidate: const.RevalidateMode = const.RevalidateMode.ALL + + +class Transfer(_ImportJob): + """Run a Transfer pipeline.""" + stageTypeSeq: Literal[const.ImportStages.Transfer] = const.ImportStages.Transfer + + +class ValidateAndTransfer(_ImportJob): + """Run a ValidateAndTransfer pipeline.""" + stageTypeSeq: Literal[const.ImportStages.ValidateAndTransfer] = const.ImportStages.ValidateAndTransfer + revalidate: const.RevalidateMode = const.RevalidateMode.ALL + + +class ValidateAndTransferIfAllValid(_ImportJob): + """Run a ValidateAndTransferIfAllValid pipeline.""" + stageTypeSeq: Literal[const.ImportStages.ValidateAndTransferIfAllValid] = const.ImportStages.ValidateAndTransferIfAllValid + revalidate: const.RevalidateMode = const.RevalidateMode.ALL + + +class TransferIfAllValid(_ImportJob): + """Run a TransferIfAllValid pipeline.""" + stageTypeSeq: Literal[const.ImportStages.TransferIfAllValid] = const.ImportStages.TransferIfAllValid diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..67aaa94 --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1 @@ +"""Tests for SAP Commissions.""" diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..a826c4a --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,48 @@ +"""Config for Pytest.""" +from collections.abc import AsyncGenerator + +import pytest +from aiohttp import BasicAuth, ClientSession +from dotenv import dotenv_values +from sapcommissions import CommissionsClient + + +@pytest.fixture(name="client") +async def fixture_client() -> AsyncGenerator[CommissionsClient, None]: + tenant: str = "VFNL-MDEV" + username, password = dotenv_values("tests/.env").values() + auth: BasicAuth = BasicAuth(username, password) + async with ClientSession(auth=auth) as session: + yield CommissionsClient(tenant, session, verify_ssl=False) + +@pytest.fixture(name="client_dev") +async def fixture_client_dev() -> AsyncGenerator[CommissionsClient, None]: + tenant: str = "VFNL-MDEV" + username, password = dotenv_values("tests/.env").values() + auth: BasicAuth = BasicAuth(username, password) + async with ClientSession(auth=auth) as session: + yield CommissionsClient(tenant, session, verify_ssl=False) + +@pytest.fixture(name="client_tst") +async def fixture_client_tst() -> AsyncGenerator[CommissionsClient, None]: + tenant: str = "VFNL-MTST" + username, password = dotenv_values("tests/.env").values() + auth: BasicAuth = BasicAuth(username, password) + async with ClientSession(auth=auth) as session: + yield CommissionsClient(tenant, session, verify_ssl=False) + +@pytest.fixture(name="client_uat") +async def fixture_client_uat() -> AsyncGenerator[CommissionsClient, None]: + tenant: str = "VFNL-MUAT" + username, password = dotenv_values("tests/.env").values() + auth: BasicAuth = BasicAuth(username, password) + async with ClientSession(auth=auth) as session: + yield CommissionsClient(tenant, session, verify_ssl=False) + +@pytest.fixture(name="client_prd") +async def fixture_client_prd() -> AsyncGenerator[CommissionsClient, None]: + tenant: str = "VFNL-MPRD" + username, password = dotenv_values("tests/.env").values() + auth: BasicAuth = BasicAuth(username, password) + async with ClientSession(auth=auth) as session: + yield CommissionsClient(tenant, session, verify_ssl=False) diff --git a/tests/test_base.py b/tests/test_base.py deleted file mode 100644 index fe6898a..0000000 --- a/tests/test_base.py +++ /dev/null @@ -1,73 +0,0 @@ -"""Tests for the sapcommissions module.""" -import unittest -from dataclasses import asdict - -from sapcommissions import Connection - - -class TestConnection(unittest.TestCase): - """Tests for the Connection class.""" - - def test_url(self): - """Test the url attribute.""" - # Create a connection instance to test - conn = Connection( - tenant="spam", environment="eggs", username="user", password="pass" - ) - - # Check that the url property returns the expected value - self.assertEqual(conn.url, "https://spam-eggs.callidusondemand.com") - - def test_api_url(self): - """Test the apiUrl attribute.""" - # Create a connection instance to test - conn = Connection( - tenant="spam", environment="eggs", username="user", password="pass" - ) - - # Check that the api_url property returns the expected value - self.assertEqual(conn.apiUrl, "https://spam-eggs.callidusondemand.com/api") - - def test_api_document(self): - """Test the apiDocument attribute.""" - # Create a connection instance to test - conn = Connection( - tenant="spam", environment="eggs", username="user", password="pass" - ) - - # Check that the api_document property returns the expected value - self.assertEqual( - conn.apiDocument, "https://spam-eggs.callidusondemand.com/APIDocument" - ) - - def test_connection_dataclass(self): - """Test the Connection dataclass.""" - # Check that the Connection dataclass is defined correctly - conn_dict = { - "tenant": "spam", - "environment": "eggs", - "username": "user", - "password": "pass", - "verifySsl": True, - } - - # Check that the fields of the Connection instance match the values in conn_dict - conn = Connection(**conn_dict) - self.assertEqual(asdict(conn), conn_dict) - - # Check that the verify_ssl field defaults to True if not provided - del conn_dict["verifySsl"] - conn = Connection(**conn_dict) - conn_dict["verifySsl"] = True # Verify that verify_ssl defaults to True - self.assertEqual(asdict(conn), conn_dict) - - def test_hidden_password(self): - """Test the password visibility.""" - # Create a connection instance to test - conn = Connection( - tenant="spam", environment="eggs", username="foo", password="barbaz" - ) - - # Check that the password property is hidden from the string representation. - self.assertNotIn("barbaz", str(conn)) - self.assertNotIn("barbaz", repr(conn)) diff --git a/tests/test_client.py b/tests/test_client.py new file mode 100644 index 0000000..5ce68f7 --- /dev/null +++ b/tests/test_client.py @@ -0,0 +1,59 @@ +"""Tests for SAP Commissions Client.""" +from collections.abc import AsyncGenerator +from typing import TypeVar + +import pytest +from sapcommissions import client, const, exceptions, model + +T = TypeVar("T", bound="model._Resource") + + +@pytest.fixture(name="cleanup_resources") +async def fixture_cleanup_resource( + client: client.CommissionsClient, +) -> AsyncGenerator[list[T]]: + """Delete created resources from the tenant.""" + created_resources: list[T] = [] + + yield created_resources + + for resource in created_resources: + try: + await client.delete(resource) + except exceptions.SAPResponseError: + const.LOGGER.warning( + f"Failed to delete resource from tenant: {resource}", + ) + +async def test_create_credit_type( + client: client.CommissionsClient, + cleanup_resources: list[T], +) -> None: + """Test create resource.""" + resource = model.CreditType(creditTypeId="TEST") + + created = await client.create(resource) + assert created.seq is not None + assert created.creditTypeId == "TEST" + + cleanup_resources.append(created) + +async def test_update_credit_type( + client: client.CommissionsClient, + cleanup_resources: list[T], +) -> None: + """Test update resource.""" + resource = model.CreditType(creditTypeId="TEST") + + created = await client.create(resource) + assert created.seq is not None + assert created.creditTypeId == "TEST" + assert created.description is None + + created.description = "UPDATED" + updated = await client.update(created) + assert created.seq == updated.seq + assert updated.creditTypeId == "TEST" + assert updated.description == "UPDATED" + + cleanup_resources.append(updated) diff --git a/tests/test_dev.py b/tests/test_dev.py new file mode 100644 index 0000000..cda4fbc --- /dev/null +++ b/tests/test_dev.py @@ -0,0 +1,101 @@ +"""Some tests for development.""" +import asyncio +import logging +from collections.abc import AsyncGenerator +from datetime import date +from typing import Any + +import pytest +from pydantic import ValidationError +from sapcommissions import CommissionsClient, model +from sapcommissions.helpers import And, Equals, GreaterThenOrEqual + +LOGGER = logging.getLogger(__name__) + +async def aenumerate(aiterable): + index = 0 + async for item in aiterable: + yield (index, item) + index += 1 + +async def test_endpoint_response(client_dev: CommissionsClient) -> None: + """Perform list operation on an endpoint and analyse result.""" + limit: int = 1000 + cls = model.Credit + items_list: AsyncGenerator[dict[str, Any], None] = client_dev.list( + cls, + raw=True, + page_size=100, + ) + + i: int = 0 + items: list[dict[str, Any]] = [] + async for item in items_list: + i += 1 + items.append(item) + if i > limit: + break + + dict_of_list: dict[str, list[Any]] = {} + for item in items: + # LOGGER.info(item) + for key, value in item.items(): + dict_of_list.setdefault(key, []) + dict_of_list[key].append(value) + + empty_fields: set[str] = set() + none_empty_fields: set[str] = set() + unique_values: dict[str, set] = {} + unmapped_fields: set[str] = set() + + for key, values in dict_of_list.items(): + if key not in cls.model_fields and key != "etag": + if not key in [info.alias for info in cls.model_fields.values() if info.alias]: + unmapped_fields.add(key) + if all(value is None for value in values): + empty_fields.add(key) + if all(value is not None for value in values) and key != "etag": + none_empty_fields.add(key) + try: + if len(set(values)) < len(values): + unique_values.setdefault(key, set()) + unique_values[key].update(values) + except TypeError: + # LOGGER.debug(f"{key=}, {values=}") + try: + for lst in values: + unique_values.setdefault(key, set()) + unique_values[key].update(lst) + except TypeError: + LOGGER.info(f"unique_values for {key}: {values}") + + LOGGER.info(cls.__name__) + LOGGER.info(f"fields={set(dict_of_list.keys())}") + LOGGER.info(f"{empty_fields=}") + LOGGER.info(f"{none_empty_fields=}") + LOGGER.info(f"{unmapped_fields=}") + for key, values in unique_values.items(): + if key in unmapped_fields and key != "etag": + LOGGER.info(f"unique_values for {key}: {values}") + + # Test for validation errors + validation_errors: dict[str, set[str]] = {} + for item in items: + try: + # LOGGER.info(cls(**item)) + cls(**item) + except ValidationError as exc: + errors = exc.errors() + for error in errors: + LOGGER.debug(error) + loc = error['loc'][0] + msg = error["msg"] + input = error["input"] + validation_errors.setdefault(loc, set()) + validation_errors[loc].add(f"{input}: {msg}") + for key, values in validation_errors.items(): + LOGGER.error(f"validation_errors for {key}: {values}") + + +if __name__ == "__main__": + pytest.main([__file__]) diff --git a/tests/test_endpoints.py b/tests/test_endpoints.py deleted file mode 100644 index aef8d4d..0000000 --- a/tests/test_endpoints.py +++ /dev/null @@ -1,328 +0,0 @@ -import unittest -from unittest.mock import MagicMock, Mock, patch - -from requests import HTTPError - -from sapcommissions import Connection -from sapcommissions.endpoints import _Client, _Endpoint -from sapcommissions.exceptions import AuthenticationError, ClientError, ServerError - - -class TestClient(unittest.TestCase): - def setUp(self): - self.client = _Client("https://example.com", "username", "password") - - @patch("sapcommissions.endpoints.Session.request") - def test_request_success(self, mocked_request): - # Configure mock response - mock_response = MagicMock() - mock_response.status_code = 200 - mock_response.headers.get.return_value = "application/json" - mock_response.json.return_value = {"foo": "bar"} - - # Configure mock request to return mocked response - mocked_request.return_value.__enter__.return_value = mock_response - - # Perform request - result = self.client.request( - method="GET", - uri="/test", - parameters={"param": "value"}, - ) - - # Check that mock request was called with correct arguments - mocked_request.assert_called_once_with( - method="GET", - url="https://example.com/test", - params={"param": "value"}, - json=None, - ) - - # Check that client returned expected result - self.assertEqual(result, {"foo": "bar"}) - - @patch("sapcommissions.endpoints.Session.request") - def test_request_error_not_json(self, mocked_request): - # Configure mock response - mock_response = MagicMock() - mock_response.status_code = 200 - mock_response.headers.get.return_value = "spam eggs" - - # Configure mock request to return mocked response - mocked_request.return_value.__enter__.return_value = mock_response - - # Perform request - with self.assertRaises(ValueError): - self.client.request( - method="GET", - uri="/test", - parameters={"param": "value"}, - ) - - # Check that mock request was called with correct arguments - mocked_request.assert_called_once_with( - method="GET", - url="https://example.com/test", - params={"param": "value"}, - json=None, - ) - - @patch("sapcommissions.endpoints.Session.request") - def test_request_error_400(self, mocked_request): - # Configure mock response - mock_response = MagicMock() - mock_response.status_code = 400 - mock_response.raise_for_status.side_effect = HTTPError - - # Configure mock request to return mocked response - mocked_request.return_value.__enter__.return_value = mock_response - - # Perform request - with self.assertRaises(ClientError): - self.client.request( - method="GET", - uri="/test", - parameters={"param": "value"}, - ) - - # Check that mock request was called with correct arguments - mocked_request.assert_called_once_with( - method="GET", - url="https://example.com/test", - params={"param": "value"}, - json=None, - ) - - @patch("sapcommissions.endpoints.Session.request") - def test_request_error_401(self, mocked_request): - # Configure mock response - mock_response = MagicMock() - mock_response.status_code = 401 - mock_response.raise_for_status.side_effect = HTTPError - - # Configure mock request to return mocked response - mocked_request.return_value.__enter__.return_value = mock_response - - # Perform request - with self.assertRaises(AuthenticationError): - self.client.request( - method="GET", - uri="/test", - parameters={"param": "value"}, - ) - - # Check that mock request was called with correct arguments - mocked_request.assert_called_once_with( - method="GET", - url="https://example.com/test", - params={"param": "value"}, - json=None, - ) - - @patch("sapcommissions.endpoints.Session.request") - def test_request_error_403(self, mocked_request): - # Configure mock response - mock_response = MagicMock() - mock_response.status_code = 403 - mock_response.raise_for_status.side_effect = HTTPError - - # Configure mock request to return mocked response - mocked_request.return_value.__enter__.return_value = mock_response - - # Perform request - with self.assertRaises(AuthenticationError): - self.client.request( - method="GET", - uri="/test", - parameters={"param": "value"}, - ) - - # Check that mock request was called with correct arguments - mocked_request.assert_called_once_with( - method="GET", - url="https://example.com/test", - params={"param": "value"}, - json=None, - ) - - @patch("sapcommissions.endpoints.Session.request") - def test_request_error_404(self, mocked_request): - # Configure mock response - mock_response = MagicMock() - mock_response.status_code = 404 - mock_response.raise_for_status.side_effect = HTTPError - - # Configure mock request to return mocked response - mocked_request.return_value.__enter__.return_value = mock_response - - # Perform request - with self.assertRaises(ClientError): - self.client.request( - method="GET", - uri="/test", - parameters={"param": "value"}, - ) - - # Check that mock request was called with correct arguments - mocked_request.assert_called_once_with( - method="GET", - url="https://example.com/test", - params={"param": "value"}, - json=None, - ) - - @patch("sapcommissions.endpoints.Session.request") - def test_request_error_412(self, mocked_request): - # Configure mock response - mock_response = MagicMock() - mock_response.status_code = 412 - mock_response.raise_for_status.side_effect = HTTPError - - # Configure mock request to return mocked response - mocked_request.return_value.__enter__.return_value = mock_response - - # Perform request - with self.assertRaises(ClientError): - self.client.request( - method="GET", - uri="/test", - parameters={"param": "value"}, - ) - - # Check that mock request was called with correct arguments - mocked_request.assert_called_once_with( - method="GET", - url="https://example.com/test", - params={"param": "value"}, - json=None, - ) - - @patch("sapcommissions.endpoints.Session.request") - def test_request_error_500(self, mocked_request): - # Configure mock response - mock_response = MagicMock() - mock_response.status_code = 500 - mock_response.raise_for_status.side_effect = HTTPError - - # Configure mock request to return mocked response - mocked_request.return_value.__enter__.return_value = mock_response - - # Perform request - with self.assertRaises(ServerError): - self.client.request( - method="GET", - uri="/test", - parameters={"param": "value"}, - ) - - # Check that mock request was called with correct arguments - mocked_request.assert_called_once_with( - method="GET", - url="https://example.com/test", - params={"param": "value"}, - json=None, - ) - - @patch("sapcommissions.endpoints.Session.request") - def test_get_success(self, mocked_request): - # Configure mock response - mock_response = MagicMock() - mock_response.status_code = 200 - mock_response.headers.get.return_value = "application/json" - mock_response.json.return_value = {"foo": "bar"} - - # Configure mock request to return mocked response - mocked_request.return_value.__enter__.return_value = mock_response - - # Perform request - result = self.client.get("/test") - - # Check that client returned expected result - self.assertEqual(result, {"foo": "bar"}) - - @patch("sapcommissions.endpoints.Session.request") - def test_delete_success(self, mocked_request): - # Configure mock response - mock_response = MagicMock() - mock_response.status_code = 200 - mock_response.headers.get.return_value = "application/json" - mock_response.json.return_value = {"foo": "bar"} - - # Configure mock request to return mocked response - mocked_request.return_value.__enter__.return_value = mock_response - - # Perform request - result = self.client.delete("/test") - - # Check that client returned expected result - self.assertEqual(result, {"foo": "bar"}) - - @patch("sapcommissions.endpoints.Session.request") - def test_post_success(self, mocked_request): - # Configure mock response - mock_response = MagicMock() - mock_response.status_code = 200 - mock_response.headers.get.return_value = "application/json" - mock_response.json.return_value = {"foo": "bar"} - - # Configure mock request to return mocked response - mocked_request.return_value.__enter__.return_value = mock_response - - # Perform request - result = self.client.post("/test", {"spam": "eggs"}) - - # Check that client returned expected result - self.assertEqual(result, {"foo": "bar"}) - - @patch("sapcommissions.endpoints.Session.request") - def test_put_success(self, mocked_request): - # Configure mock response - mock_response = MagicMock() - mock_response.status_code = 200 - mock_response.headers.get.return_value = "application/json" - mock_response.json.return_value = {"foo": "bar"} - - # Configure mock request to return mocked response - mocked_request.return_value.__enter__.return_value = mock_response - - # Perform request - result = self.client.put("/test", {"spam": "eggs"}) - - # Check that client returned expected result - self.assertEqual(result, {"foo": "bar"}) - - -class TestEndpoint(unittest.TestCase): - def setUp(self): - # Create a mock connection for testing - self.connection = Connection("spam", "eggs", "user", "pass") - - # Create a mock resource to be used by the endpoint - self.resource = Mock() - self.resource._name = "test_resource" - - # Create the endpoint to test - self.endpoint = _Endpoint(connection=self.connection) - self.endpoint.resource = self.resource - - def test_name(self): - # Check that the name property returns the correct value - self.assertEqual(self.endpoint.name, "test_resource") - - def test_url(self): - # Check that the url property returns the correct value - self.assertEqual(self.endpoint.url, "/v2/test_resource") - - def test_init(self): - # Check that the endpoint was initialized correctly - self.assertEqual( - self.endpoint._client.baseUrl, - "https://spam-eggs.callidusondemand.com/api", - ) - self.assertEqual(self.endpoint._client.auth.username, "user") - self.assertEqual(self.endpoint._client.auth.password, "pass") - self.assertTrue(self.endpoint._client.verify) - - -if __name__ == "__main__": - unittest.main() diff --git a/tests/test_model.py b/tests/test_model.py new file mode 100644 index 0000000..2d66c11 --- /dev/null +++ b/tests/test_model.py @@ -0,0 +1,57 @@ +"""Test for SAP Commissions Models.""" +import logging +from collections.abc import Generator +from inspect import isclass +from typing import TypeVar, Union + +import pytest +from pydantic import BaseModel +from pydantic.fields import FieldInfo, ModelPrivateAttr +from sapcommissions import model + +LOGGER = logging.getLogger(__name__) +T = TypeVar("T", bound="model._Resource") + + +def list_resource_cls() -> Generator[type[model._Resource], None, None]: + for name in dir(model): + obj = getattr(model, name) + if ( + isclass(obj) + and issubclass(obj, model._Resource) + and not obj.__name__.startswith("_") + ): + yield obj + + +@pytest.mark.parametrize( + "resource_cls", + list_resource_cls(), +) +def test_resource_basics( + resource_cls: type[T], +) -> None: + """Test list resources.""" + assert issubclass(resource_cls, BaseModel), "resource is not a pydantic model" + assert issubclass(resource_cls, model._Base), "resource is not a subclass of '_Base'" + + # _endpoint + assert hasattr(resource_cls, "_endpoint"), "resource does not have attribute '_endpoint'" + endpoint: ModelPrivateAttr = resource_cls.__private_attributes__["_endpoint"] + assert isinstance(endpoint.default, str), "resource does not have a default value for '_endpoint'" + assert endpoint.default.startswith("api/v2/"), "_endpoint should start with 'api/v2/'" + + # _attr_seq + assert hasattr(resource_cls, "_attr_seq"), "resource does not have attribute '_attr_seq'" + attr_seq: ModelPrivateAttr = resource_cls.__private_attributes__["_attr_seq"] + assert isinstance(attr_seq.default, str), "resource does not have a default value for '_attr_seq'" + assert len(attr_seq.default), "_attr_seq should not be an emptry string" + assert attr_seq.default.endswith("Seq"), "_attr_seq should end with 'Seq'" + if issubclass(resource_cls, model._RuleElementOwner): + assert attr_seq.default == "ruleElementOwnerSeq" + + # seq field + assert attr_seq.default in resource_cls.model_fields + seq_field: FieldInfo = resource_cls.model_fields[attr_seq.default] + assert seq_field.annotation == Union[str, None] + assert seq_field.default is None diff --git a/tests/test_pipeline.py b/tests/test_pipeline.py new file mode 100644 index 0000000..d9bb3a8 --- /dev/null +++ b/tests/test_pipeline.py @@ -0,0 +1,173 @@ +"""Tests for running pipelines.""" +import logging +from pathlib import Path + +import pytest +from sapcommissions import CommissionsClient, const, helpers, model + +LOGGER = logging.getLogger(__name__) + + +@pytest.mark.parametrize("pipeline_job", [ + model.Classify, + model.Allocate, + model.Reward, + model.Pay, + model.Summarize, + model.Compensate, + model.CompensateAndPay, + model.ResetFromClassify, + model.ResetFromAllocate, + model.ResetFromReward, + model.ResetFromPay, + model.Post, + model.Finalize, + model.UndoPost, + model.UndoFinalize, + model.CleanupDefferedResults, + model.UpdateAnalytics, +]) +async def test_pipelinerun( + client: CommissionsClient, + pipeline_job: model._PipelineRunJob, +) -> None: + """Test running a pipeline on a calendar period.""" + period: model.Period = await client.read( + model.Period, + filter=helpers.Equals("name", "202401 W1"), + ) + job: model._PipelineRunJob = pipeline_job( + calendarSeq=period.calendar, + periodSeq=period.periodSeq, + ) + result: model.Pipeline = await client.run_pipeline(job) + LOGGER.info(result) + assert result.command == job.command + assert result.stageType == job.stageTypeSeq + assert result.period == period.periodSeq + + +async def test_pipelinerun_report( + client: CommissionsClient, +) -> None: + """Test running a pipeline on a calendar period.""" + period: model.Period = await client.read( + model.Period, + filter=helpers.Equals("name", "202401 W1"), + ) + job: model.ReportsGeneration = model.ReportsGeneration( + calendarSeq=period.calendar, + periodSeq=period.periodSeq, + reportTypeName=const.ReportType.Crystal, + reportFormatsList=[const.ReportFormat.Excel], + odsReportList=["Outbound Files"], + boGroupsList=["VFNL Compensation Reports Admin Group"] + ) + result: model.Pipeline = await client.run_pipeline(job) + LOGGER.info(result) + assert result.command == job.command + assert result.stageType == job.stageTypeSeq + assert result.period == period.periodSeq + + +async def test_xmlimport( + client: CommissionsClient, +) -> None: + """Test running an XML import.""" + file: Path = Path("tests/deploy/07_CR_TEST.xml") + assert file.is_file() + + job = model.XMLImport( + xmlFileName=file.name, + xmlFileContent=file.read_text("UTF-8"), + updateExistingObjects=True, + ) + result: model.Pipeline = await client.run_pipeline(job) + LOGGER.info(result) + assert result.command == job.command + assert result.stageType == job.stageTypeSeq + + +@pytest.mark.parametrize("pipeline_job", [ + model.Validate, + model.Transfer, + model.ValidateAndTransfer, + model.ValidateAndTransferIfAllValid, + model.TransferIfAllValid, +]) +async def test_import( + client: CommissionsClient, + pipeline_job: model._ImportJob, +) -> None: + """Test running an import job.""" + batch_name: str = "test.txt" + calendar: model.Calendar = await client.read( + model.Calendar, + filter=helpers.Equals("name", "Main Weekly Calendar"), + ) + job: model._ImportJob = pipeline_job( + calendarSeq=calendar.calendarSeq, + batchName=batch_name, + module=const.StageTables.TransactionalData, + ) + result: model.Pipeline = await client.run_pipeline(job) + LOGGER.info(result) + assert result.stageType == job.stageTypeSeq + assert result.command == job.command + assert result.batchName == job.batchName + + +async def test_purge( + client: CommissionsClient, +) -> None: + """Test running a Purge pipeline.""" + batch_name: str = "test.txt" + job = model.Purge( + batchName=batch_name, + module=const.StageTables.TransactionalData, + ) + result: model.Pipeline = await client.run_pipeline(job) + LOGGER.info(result) + assert result.stageType == job.stageTypeSeq + assert result.command == job.command + assert result.batchName == job.batchName + + +async def test_resetfromvalidate( + client: CommissionsClient, +) -> None: + """Test running a ResetFromValidate pipeline.""" + batch_name: str = "test.txt" + period: model.Period = await client.read( + model.Period, + filter=helpers.Equals("name", "202001 W1"), + ) + job: model.ResetFromValidate = model.ResetFromValidate( + calendarSeq=period.calendar, + periodSeq=period.periodSeq, + batchName=batch_name, + ) + result: model.Pipeline = await client.run_pipeline(job) + LOGGER.info(result) + assert result.stageType == const.ImportStages.ResetFromValidate + assert result.command == "Import" + assert result.batchName == job.batchName + + +async def test_resetfromvalidate_no_batch( + client: CommissionsClient, +) -> None: + """Test running a ResetFromValidate pipeline without batchName.""" + period: model.Period = await client.read( + model.Period, + filter=helpers.Equals("name", "202001 W1"), + ) + job: model.ResetFromValidate = model.ResetFromValidate( + calendarSeq=period.calendar, + periodSeq=period.periodSeq, + ) + result: model.Pipeline = await client.run_pipeline(job) + LOGGER.info(result) + assert result.stageType == const.ImportStages.ResetFromValidate + assert result.command == "Import" + assert result.batchName is None diff --git a/tests/test_resources.py b/tests/test_resources.py deleted file mode 100644 index 71005ea..0000000 --- a/tests/test_resources.py +++ /dev/null @@ -1,182 +0,0 @@ -"""Tests for the sapcommissions.resources module.""" -from __future__ import annotations - -import unittest -from dataclasses import dataclass, field -from datetime import date, datetime -from typing import ClassVar - -from sapcommissions.resources import _deserialize, _Resource, _serialize - -# pylint: skip-file - - -@dataclass -class DummyResource(_Resource): - _endpoint_name: ClassVar[str] = "test" - id: str = field(metadata={"seq": True}) - name: str = field(default=None, metadata={"id": True}) - items: list[str] = field(default_factory=list) - parent: DummyResource = field(default=None) - - -class TestResourceMethods(unittest.TestCase): - def setUp(self): - self.resource_dict = { - "id": "123", - "name": "Test Resource", - "items": ["item1", "item2"], - } - self.resource: DummyResource = DummyResource( - id="123", - name="Test Resource", - items=["item1", "item2"], - ) - - def test_name_property(self): - self.assertEqual(DummyResource._name, "test") - - def test_seq_attr_property(self): - self.assertEqual(DummyResource._seqAttr, "id") - - def test_seq_property(self): - self.assertEqual(self.resource._seq, "123") - - def test_id_attr_property(self): - self.assertEqual(DummyResource._idAttr, "name") - - def test_id_property(self): - self.assertEqual(self.resource._id, "Test Resource") - - def test_expands_property(self): - self.assertEqual(DummyResource._expands, ("items", "parent")) - - def test_to_dict_with_seq(self): - self.assertEqual(self.resource.to_dict(False), self.resource_dict) - - def test_to_dict_without_seq(self): - resource_dict = self.resource_dict.copy() - del resource_dict["id"] - self.assertEqual(self.resource.to_dict(), resource_dict) - - def test_from_dict_with_valid_dict(self): - self.assertEqual(self.resource, DummyResource.from_dict(self.resource_dict)) - self.assertEqual(self.resource.id, "123") - self.assertEqual(self.resource.name, "Test Resource") - self.assertEqual(self.resource.items, ["item1", "item2"]) - self.assertEqual(self.resource.parent, None) - - def test_from_dict_with_invalid_dict(self): - resource_dict = { - "spam": "eggs", - "ham": "cheese", - "foo": "bar", - } - with self.assertRaises(TypeError): - DummyResource.from_dict(resource_dict) - - def test_from_dict_with_invalid_field(self): - resource_dict = self.resource_dict.copy() - resource_dict["invalidField"] = "spam" - resource: DummyResource = DummyResource.from_dict(resource_dict) - - self.assertEqual(resource, self.resource) - - def test_from_dict_with_valid_reference(self): - resource_dict = { - "objectType": "DummyResource", - "key": "spam", - "displayName": "eggs", - } - resource: DummyResource = DummyResource.from_dict(resource_dict) - self.assertIsInstance(resource, DummyResource) - self.assertEqual(resource.id, "spam") - self.assertEqual(resource.name, "eggs") - - def test_from_dict_with_invalid_reference(self): - resource_dict = { - "objectType": "Sausages", - "key": "spam", - "displayName": "eggs", - } - - with self.assertRaises(TypeError): - DummyResource.from_dict(resource_dict) - - -class TestEncodeDecode(unittest.TestCase): - def test_decode_datetime(self): - dt_str = "2000-01-02T03:04:05.000-06:00" - decoded_dt = _deserialize(dt_str, datetime) - self.assertEqual(decoded_dt, datetime.fromisoformat(dt_str)) - - def test_encode_datetime(self): - dt = datetime(2000, 1, 2, 3, 4, 5) - encoded_dt = _serialize(dt, datetime) - self.assertEqual(encoded_dt, "2000-01-02T03:04:05") - - def test_decode_date(self): - date_str = "2000-01-02T03:04:05.000-06:00" - decoded_date = _deserialize(date_str, date) - self.assertEqual(decoded_date, datetime.fromisoformat(date_str).date()) - - def test_encode_date(self): - dt = date(2000, 1, 2) - encoded_dt = _serialize(dt, date) - self.assertEqual(encoded_dt, "2000-01-02") - - def test_decode_none(self): - decoded_none = _deserialize(None, str) - self.assertIs(decoded_none, None) - - def test_encode_none(self): - encoded_none = _serialize(None, str) - self.assertIs(encoded_none, None) - - def test_decode_string(self): - test_str = "SPAM" - decoded_str = _deserialize(test_str, str) - self.assertEqual(decoded_str, "SPAM") - - def test_encode_string(self): - test_str = "SPAM" - encoded_str = _serialize(test_str, str) - self.assertEqual(encoded_str, "SPAM") - - def test_decode_int(self): - test_int = 1 - decoded_int = _deserialize(test_int, int) - self.assertEqual(decoded_int, 1) - - def test_decode_str_int(self): - test_int_str = "1" - decoded_int_str = _deserialize(test_int_str, int) - self.assertEqual(decoded_int_str, 1) - - def test_encode_int(self): - test_int = 1 - encoded_int = _serialize(test_int, int) - self.assertEqual(encoded_int, 1) - - def test_decode_list_str(self): - test_list = ["SPAM", "EGGS"] - decoded_list = _deserialize(test_list, list[str]) - self.assertListEqual(decoded_list, test_list) - - def test_decode_list_error(self): - test_list = ["SPAM", "EGGS"] - with self.assertRaises(TypeError): - _deserialize(test_list, list[str, int]) - with self.assertRaises(TypeError): - _deserialize(test_list, list) - with self.assertRaises(TypeError): - _deserialize(test_list, str) - - def test_decode_unions(self): - test_value = 1 - with self.assertRaises(NotImplementedError): - _deserialize(test_value, int | dict) - - -if __name__ == "__main__": - unittest.main() diff --git a/tox.ini b/tox.ini deleted file mode 100644 index 7299c3c..0000000 --- a/tox.ini +++ /dev/null @@ -1,8 +0,0 @@ -[flake8] -max-line-length = 88 -ignore = - E203, - W503 - -[pycodestyle] -max-line-length = 88