diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile
index fc5d100..1d59b6f 100644
--- a/.devcontainer/Dockerfile
+++ b/.devcontainer/Dockerfile
@@ -8,11 +8,11 @@ RUN pip install pandas==2.2.3
RUN pip install requests==2.32.3
RUN pip install tabulate==0.9.0
RUN pip install 'git+https://github.com/numbworks/nwshared.git@v1.8.0#egg=nwshared&subdirectory=src'
-RUN pip install 'git+https://github.com/numbworks/nwpackageversions.git@v1.8.0#egg=nwpackageversions&subdirectory=src'
# JUPYTER NOTEBOOK
RUN pip install ipykernel==6.29.5
RUN pip install jupyter==1.1.0
+RUN pip install 'git+https://github.com/numbworks/nwpackageversions.git@v1.8.0#egg=nwpackageversions&subdirectory=src'
# UNIT TESTING
RUN pip install coverage==7.6.4
diff --git a/CHANGELOG b/CHANGELOG
index 9efe61b..ab85160 100644
--- a/CHANGELOG
+++ b/CHANGELOG
@@ -1,3 +1,10 @@
+v4.0.0 - BREAKING CHANGES
+- Application:
+ - Feature: re-structured the whole architecture to simplify usage and future expansions.
+ - Maintenance: makefile now supports "*tests.py" in "type*"" targets and "tryinstall-concise" in "all-concise".
+- Documentation:
+ - Feature: updated to v4.0.0.
+
v3.9.0
- Library:
- Feature: makefile added.
diff --git a/README.md b/README.md
index 5e54062..fb13dcf 100644
--- a/README.md
+++ b/README.md
@@ -8,7 +8,7 @@ Contact: numbworks@gmail.com
From the documentation:
-> nwtimetracking is a Jupyter Notebook designed to analyze the Excel file I use to annotate the durations of all my sessions of extra work and continuos learning, so that I can run analyses on them. This software is born to overcome the lack of support for durations (timedeltas) in Excel.
+> `nwtimetracking` is an application designed to run automated data analysis tasks on `Time Tracking.xlsx`. This file is the one I use to annotate the durations of all my sessions of extra work and continuos learning. This application is born to overcome the lack of support for durations (timedeltas) in Excel.
## Getting started
diff --git a/codecoverage.svg b/codecoverage.svg
index f3e0bcc..7f8ab10 100644
--- a/codecoverage.svg
+++ b/codecoverage.svg
@@ -1 +1 @@
-coverage: 93.0% coverage coverage 93.0% 93.0%
\ No newline at end of file
+coverage: 94.0% coverage coverage 94.0% 94.0%
\ No newline at end of file
diff --git a/codecoverage.txt b/codecoverage.txt
index e5abe75..c1868ea 100644
--- a/codecoverage.txt
+++ b/codecoverage.txt
@@ -1 +1 @@
-https://img.shields.io/badge/coverage-93.0%25-green
\ No newline at end of file
+https://img.shields.io/badge/coverage-94.0%25-green
\ No newline at end of file
diff --git a/docs/docs-nwtimetracking.md b/docs/docs-nwtimetracking.md
index ed7aaba..becfed7 100644
--- a/docs/docs-nwtimetracking.md
+++ b/docs/docs-nwtimetracking.md
@@ -15,12 +15,13 @@ Contact: numbworks@gmail.com
| 2024-10-01 | numbworks | Updated to v3.7.0. |
| 2024-10-28 | numbworks | Updated to v3.8.0. |
| 2024-12-01 | numbworks | Updated to v3.9.0. |
+| 2024-12-05 | numbworks | Updated to v4.0.0. |
## Introduction
-`nwtimetracking` is a `Jupyter Notebook` designed to analyze the Excel file I use to annotate the durations of all my sessions of extra work and continuos learning, so that I can run analyses on them.
+`nwtimetracking` is an application designed to run automated data analysis tasks on `Time Tracking.xlsx`.
-This software is born to overcome the lack of support for durations (timedeltas) in Excel.
+This file is the one I use to annotate the durations of all my sessions of extra work and continuos learning. This application is born to overcome the lack of support for durations (timedeltas) in Excel.
This project may not be useful for many (not generic enough), but I decided to upload it to `Github` anyway, in order to showcase my way of working when I face similar data analysis tasks and I decide to tackle them with `Python` and `Jupyter Notebook`.
@@ -138,21 +139,23 @@ The avalaible target names are:
|---|---|
| type-verbose | Runs a type verification task and logs everything. |
| coverage-verbose | Runs a unit test coverage calculation task and logs the % per class. |
+| tryinstall-verbose | Simulates a "pip install" and logs everything. |
| all-concise | Runs a batch of verification tasks and logs one summary line for each of them. |
The expected outcome for `all-concise` is:
```
MODULE_NAME: nwtimetracking
-MODULE_VERSION: 3.9.0
+MODULE_VERSION: 4.0.0
COVERAGE_THRESHOLD: 70%
-[WARNING] type-concise: not passed! '1' error(s) found!
-[WARNING] changelog-concise: 'CHANGELOG' not updated to current version!
+[OK] type-concise: passed!
+[OK] changelog-concise: 'CHANGELOG' updated to current version!
[OK] setup-concise: 'setup.py' updated to current version!
[OK] coverage-concise: unit test coverage >= 70%.
+[OK] tryinstall-concise: installation process works.
```
-Considering the old-fashioned syntax adopted by `make`, here a summary of its less intuitive aspects:
+Considering the old-fashioned syntax adopted by both `make` and `bash`, here a summary of its less intuitive aspects:
| Aspect | Description |
|---|---|
@@ -161,6 +164,7 @@ Considering the old-fashioned syntax adopted by `make`, here a summary of its le
| `@` | By default, `make` logs all the commands included in the target. The `@` disables this behaviour. |
| `$$` | Necessary to escape `$`. |
| `$@` | Variable that stores the target name. |
+| `if [[ ... ]]` | Double square brackets to enable pattern matching. |
## Known Issues - nwshared
diff --git a/scripts/makefile b/scripts/makefile
index 66d12cb..390a6d2 100644
--- a/scripts/makefile
+++ b/scripts/makefile
@@ -1,9 +1,9 @@
# SETTINGS
-.PHONY: clear makefile-info type-concise changelog-concise setup-concise coverage-concise all-concise
+.PHONY: clear makefile-info type-concise changelog-concise setup-concise coverage-concise tryinstall-concise all-concise
SHELL := /bin/bash
ROOT_DIR := $(shell cd .. && pwd)
MODULE_NAME = "nwtimetracking"
-MODULE_VERSION = "3.9.0"
+MODULE_VERSION = "4.0.0"
COVERAGE_THRESHOLD = 70
# TARGETS
@@ -16,7 +16,8 @@ makefile-info:
type-verbose:
@clear; \
- mypy $(ROOT_DIR)/src/$(MODULE_NAME).py --check-untyped-defs --disable-error-code=import-untyped;
+ mypy $(ROOT_DIR)/src/$(MODULE_NAME).py --check-untyped-defs --disable-error-code=import-untyped; \
+ mypy $(ROOT_DIR)/tests/$(MODULE_NAME)tests.py --check-untyped-defs --disable-error-code=import-untyped --disable-error-code=import-not-found;
coverage-verbose:
@clear; \
cd $(ROOT_DIR)/tests/; \
@@ -25,9 +26,27 @@ coverage-verbose:
coverage html --omit=$(MODULE_NAME)tests.py && sed -n '/
/,/<\/table>/p' htmlcov/class_index.html | pandoc --from html --to plain; \
sleep 3; \
rm -rf htmlcov;
+tryinstall-verbose:
+ @clear; \
+ cd /home; \
+ rm -rf build; \
+ rm -rf dist; \
+ rm -rf $(MODULE_NAME).egg-info; \
+ rm -rf venv; \
+ python /workspaces/$(MODULE_NAME)/src/setup.py bdist_wheel; \
+ python3 -m venv venv; \
+ source venv/bin/activate; \
+ pip install dist/$(MODULE_NAME)*.whl; \
+ pip show $(MODULE_NAME) | grep Version; \
+ deactivate; \
+ rm -rf build; \
+ rm -rf dist; \
+ rm -rf $(MODULE_NAME).egg-info; \
+ rm -rf venv;
type-concise:
@value=$$(mypy $(ROOT_DIR)/src/$(MODULE_NAME).py --disable-error-code=import-untyped | grep -c "error:"); \
+ value+=$$(mypy $(ROOT_DIR)/tests/$(MODULE_NAME)tests.py --disable-error-code=import-untyped --disable-error-code=import-not-found | grep -c "error:"); \
if [ $$value -eq 0 ]; then echo "[OK] $@: passed!"; else echo "[WARNING] $@: not passed! '$$value' error(s) found!"; fi;
changelog-concise:
@value=$$(cat $(ROOT_DIR)/CHANGELOG | grep -c -e "v$(MODULE_VERSION)$$" -e "v$(MODULE_VERSION) - BREAKING CHANGES$$"); \
@@ -40,6 +59,10 @@ coverage-concise:
coverage run -m unittest $(MODULE_NAME)tests.py > /dev/null 2>&1; \
value=$$(coverage report --omit=$(MODULE_NAME)tests.py | grep -oP 'TOTAL\s+\d+\s+\d+\s+\K\d+(?=%)'); \
if [ $$value -ge $(COVERAGE_THRESHOLD) ]; then echo "[OK] $@: unit test coverage >= $(COVERAGE_THRESHOLD)%."; else echo "[WARNING] $@: unit test coverage < $(COVERAGE_THRESHOLD)%."; fi;
+tryinstall-concise:
+ @value=$$(make tryinstall-verbose 2>&1); \
+ last_chars=$$(echo "$$value" | tail -c 100); \
+ if [[ "$$last_chars" == *"Version: $(MODULE_VERSION)"* ]]; then echo "[OK] $@: installation process works."; else echo "[WARNING] $@: installation process fails!"; fi;
# AGGREGATE TARGETS
-all-concise: clear makefile-info type-concise changelog-concise setup-concise coverage-concise
\ No newline at end of file
+all-concise: clear makefile-info type-concise changelog-concise setup-concise coverage-concise tryinstall-concise
\ No newline at end of file
diff --git a/src/nwtimetracking.ipynb b/src/nwtimetracking.ipynb
index 0f66222..3f6ba50 100644
--- a/src/nwtimetracking.ipynb
+++ b/src/nwtimetracking.ipynb
@@ -9,7 +9,7 @@
"|---|---|\n",
"|Title|nwtimetracking|\n",
"|Author|numbworks|\n",
- "|Version|3.9.0|\n",
+ "|Version|4.0.0|\n",
"||Please check [docs/docs-nwtimetracking.md](../docs/docs-nwtimetracking.md) before proceeding.|"
]
},
@@ -23,11 +23,10 @@
},
{
"cell_type": "code",
- "execution_count": 1,
+ "execution_count": 479,
"metadata": {},
"outputs": [],
"source": [
- "from pandas import DataFrame\n",
"from typing import Optional"
]
},
@@ -41,14 +40,12 @@
},
{
"cell_type": "code",
- "execution_count": 2,
+ "execution_count": 480,
"metadata": {},
"outputs": [],
"source": [
- "from nwtimetracking import SettingBag, DefaultPathProvider, YearProvider, SoftwareProjectNameProvider, TimeTrackingManager\n",
- "from nwtimetracking import ComponentBag, MarkdownProcessor\n",
"from nwpackageversions import LanguageChecker, RequirementChecker, RequirementSummary\n",
- "from nwshared import Displayer"
+ "from nwtimetracking import SettingBag, ComponentBag, TimeTrackingProcessor"
]
},
{
@@ -60,7 +57,7 @@
},
{
"cell_type": "code",
- "execution_count": 3,
+ "execution_count": 481,
"metadata": {},
"outputs": [
{
@@ -111,20 +108,28 @@
},
{
"cell_type": "code",
- "execution_count": 4,
+ "execution_count": 482,
"metadata": {},
"outputs": [],
"source": [
"setting_bag : SettingBag = SettingBag(\n",
- " years = YearProvider().get_all_years(),\n",
- " yearly_targets = YearProvider().get_all_yearly_targets(),\n",
- " excel_path = DefaultPathProvider().get_default_time_tracking_path(),\n",
- " excel_books_nrows = 1301,\n",
- " software_project_names = SoftwareProjectNameProvider().get_all_software_project_names(),\n",
- " software_project_names_by_spv = SoftwareProjectNameProvider().get_all_software_project_names_by_spv(), \n",
- " tt_by_year_hashtag_years = [2024],\n",
- " show_tts_by_month_md = False,\n",
- " save_tts_by_month_md = True\n",
+ " options_tt = [\"display\"],\n",
+ " options_tts_by_month = [\"display\", \"save\"],\n",
+ " options_tts_by_year = [\"display\"],\n",
+ " options_tts_by_year_month = [\"display\"],\n",
+ " options_tts_by_year_month_spnv = [\"display\"],\n",
+ " options_tts_by_year_spnv = [\"display\"],\n",
+ " options_tts_by_spn = [\"display\", \"log\"],\n",
+ " options_tts_by_spn_spv = [],\n",
+ " options_tts_by_hashtag = [\"display\"],\n",
+ " options_tts_by_hashtag_year = [\"display\"],\n",
+ " options_tts_by_efs = [\"display\"],\n",
+ " options_tts_by_tr = [\"display\"],\n",
+ " options_definitions = [\"display\"],\n",
+ " excel_nrows = 1301,\n",
+ " tts_by_year_month_spnv_display_only_spn = \"nwtimetracking\",\n",
+ " tts_by_year_spnv_display_only_spn = \"nwtimetracking\",\n",
+ " tts_by_spn_spv_display_only_spn = \"nwtimetracking\"\n",
")\n"
]
},
@@ -138,21 +143,432 @@
},
{
"cell_type": "code",
- "execution_count": 5,
+ "execution_count": 483,
"metadata": {},
- "outputs": [],
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "\n",
+ "\n",
+ " \n",
+ " \n",
+ " Date \n",
+ " StartTime \n",
+ " EndTime \n",
+ " Effort \n",
+ " Hashtag \n",
+ " Descriptor \n",
+ " IsSoftwareProject \n",
+ " IsReleaseDay \n",
+ " Year \n",
+ " Month \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " 2024-12-01 \n",
+ " 15:00 \n",
+ " 15:45 \n",
+ " 0h 45m \n",
+ " #python \n",
+ " nwpackageversions v1.8.0 \n",
+ " True \n",
+ " True \n",
+ " 2024 \n",
+ " 12 \n",
+ " \n",
+ " \n",
+ " 2024-12-01 \n",
+ " 15:45 \n",
+ " 16:15 \n",
+ " 0h 30m \n",
+ " #python \n",
+ " nwreadinglist v4.1.0 \n",
+ " True \n",
+ " True \n",
+ " 2024 \n",
+ " 12 \n",
+ " \n",
+ " \n",
+ " 2024-12-01 \n",
+ " 19:15 \n",
+ " 19:30 \n",
+ " 0h 15m \n",
+ " #python \n",
+ " nwreadinglist v4.1.0 \n",
+ " True \n",
+ " True \n",
+ " 2024 \n",
+ " 12 \n",
+ " \n",
+ " \n",
+ " 2024-12-01 \n",
+ " 19:30 \n",
+ " 20:15 \n",
+ " 0h 45m \n",
+ " #python \n",
+ " nwtimetracking v3.9.0 \n",
+ " True \n",
+ " True \n",
+ " 2024 \n",
+ " 12 \n",
+ " \n",
+ " \n",
+ " 2024-12-01 \n",
+ " 21:00 \n",
+ " 23:00 \n",
+ " 2h 00m \n",
+ " #python \n",
+ " nwtraderaanalytics v4.4.0 \n",
+ " True \n",
+ " True \n",
+ " 2024 \n",
+ " 12 \n",
+ " \n",
+ " \n",
+ "
\n"
+ ],
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
"source": [
- "displayer : Displayer = Displayer()\n",
- "tt_manager : TimeTrackingManager = TimeTrackingManager()\n",
- "sessions_df : DataFrame = tt_manager.get_sessions_dataset(setting_bag = setting_bag)\n",
- "\n",
- "if setting_bag.show_sessions_df:\n",
- " displayer.display(df = sessions_df.tail(n = setting_bag.n_generic))\n"
+ "tt_processor : TimeTrackingProcessor = TimeTrackingProcessor(component_bag = ComponentBag(), setting_bag = setting_bag)\n",
+ "tt_processor.initialize()\n",
+ "tt_processor.process_tt()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 484,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "\n",
+ "\n",
+ " \n",
+ " \n",
+ " Month \n",
+ " 2015 \n",
+ " ↕ \n",
+ " 2016 \n",
+ " ↕ \n",
+ " 2017 \n",
+ " ↕ \n",
+ " 2018 \n",
+ " ↕ \n",
+ " 2019 \n",
+ " ↕ \n",
+ " 2020 \n",
+ " ↕ \n",
+ " 2021 \n",
+ " ↕ \n",
+ " 2022 \n",
+ " ↕ \n",
+ " 2023 \n",
+ " ↕ \n",
+ " 2024 \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " 1 \n",
+ " 00h 00m \n",
+ " ↑ \n",
+ " 18h 00m \n",
+ " ↑ \n",
+ " 88h 30m \n",
+ " ↓ \n",
+ " 80h 15m \n",
+ " ↓ \n",
+ " 60h 00m \n",
+ " ↓ \n",
+ " 29h 15m \n",
+ " ↑ \n",
+ " 53h 00m \n",
+ " ↓ \n",
+ " 00h 00m \n",
+ " ↑ \n",
+ " 06h 00m \n",
+ " ↑ \n",
+ " 45h 45m \n",
+ " \n",
+ " \n",
+ " 2 \n",
+ " 00h 00m \n",
+ " ↑ \n",
+ " 45h 30m \n",
+ " ↑ \n",
+ " 65h 30m \n",
+ " ↑ \n",
+ " 90h 45m \n",
+ " ↓ \n",
+ " 73h 00m \n",
+ " ↓ \n",
+ " 38h 00m \n",
+ " ↓ \n",
+ " 31h 30m \n",
+ " ↓ \n",
+ " 03h 00m \n",
+ " ↑ \n",
+ " 24h 00m \n",
+ " ↑ \n",
+ " 77h 45m \n",
+ " \n",
+ " \n",
+ " 3 \n",
+ " 00h 00m \n",
+ " ↑ \n",
+ " 20h 45m \n",
+ " ↑ \n",
+ " 71h 45m \n",
+ " ↑ \n",
+ " 89h 00m \n",
+ " ↓ \n",
+ " 75h 30m \n",
+ " ↓ \n",
+ " 35h 00m \n",
+ " ↑ \n",
+ " 40h 30m \n",
+ " ↓ \n",
+ " 06h 15m \n",
+ " ↑ \n",
+ " 50h 15m \n",
+ " ↑ \n",
+ " 77h 45m \n",
+ " \n",
+ " \n",
+ " 4 \n",
+ " 00h 00m \n",
+ " ↑ \n",
+ " 37h 30m \n",
+ " ↑ \n",
+ " 68h 00m \n",
+ " ↑ \n",
+ " 88h 30m \n",
+ " ↓ \n",
+ " 59h 45m \n",
+ " ↓ \n",
+ " 40h 45m \n",
+ " ↓ \n",
+ " 19h 00m \n",
+ " ↑ \n",
+ " 27h 30m \n",
+ " ↓ \n",
+ " 19h 00m \n",
+ " ↑ \n",
+ " 29h 30m \n",
+ " \n",
+ " \n",
+ " 5 \n",
+ " 00h 00m \n",
+ " ↑ \n",
+ " 53h 00m \n",
+ " ↑ \n",
+ " 83h 00m \n",
+ " ↑ \n",
+ " 91h 15m \n",
+ " ↓ \n",
+ " 54h 45m \n",
+ " ↓ \n",
+ " 14h 30m \n",
+ " ↑ \n",
+ " 112h 45m \n",
+ " ↓ \n",
+ " 49h 45m \n",
+ " ↓ \n",
+ " 31h 00m \n",
+ " ↑ \n",
+ " 43h 00m \n",
+ " \n",
+ " \n",
+ " 6 \n",
+ " 00h 00m \n",
+ " ↑ \n",
+ " 57h 45m \n",
+ " ↓ \n",
+ " 37h 45m \n",
+ " ↑ \n",
+ " 62h 00m \n",
+ " ↓ \n",
+ " 29h 15m \n",
+ " ↓ \n",
+ " 12h 00m \n",
+ " ↑ \n",
+ " 54h 00m \n",
+ " ↑ \n",
+ " 73h 30m \n",
+ " ↓ \n",
+ " 24h 45m \n",
+ " ↑ \n",
+ " 48h 00m \n",
+ " \n",
+ " \n",
+ " 7 \n",
+ " 00h 00m \n",
+ " ↑ \n",
+ " 46h 45m \n",
+ " ↑ \n",
+ " 65h 30m \n",
+ " ↑ \n",
+ " 69h 30m \n",
+ " ↓ \n",
+ " 24h 15m \n",
+ " ↑ \n",
+ " 34h 00m \n",
+ " ↓ \n",
+ " 23h 30m \n",
+ " ↑ \n",
+ " 51h 00m \n",
+ " ↓ \n",
+ " 16h 30m \n",
+ " ↑ \n",
+ " 67h 00m \n",
+ " \n",
+ " \n",
+ " 8 \n",
+ " 00h 00m \n",
+ " ↑ \n",
+ " 25h 45m \n",
+ " ↑ \n",
+ " 45h 45m \n",
+ " ↑ \n",
+ " 72h 00m \n",
+ " ↓ \n",
+ " 06h 00m \n",
+ " ↑ \n",
+ " 32h 00m \n",
+ " ↑ \n",
+ " 110h 00m \n",
+ " ↓ \n",
+ " 36h 30m \n",
+ " ↑ \n",
+ " 41h 30m \n",
+ " ↓ \n",
+ " 32h 45m \n",
+ " \n",
+ " \n",
+ " 9 \n",
+ " 00h 00m \n",
+ " ↑ \n",
+ " 89h 30m \n",
+ " ↓ \n",
+ " 43h 45m \n",
+ " ↑ \n",
+ " 64h 00m \n",
+ " ↓ \n",
+ " 39h 00m \n",
+ " ↑ \n",
+ " 44h 00m \n",
+ " ↓ \n",
+ " 43h 30m \n",
+ " ↑ \n",
+ " 69h 00m \n",
+ " ↓ \n",
+ " 50h 15m \n",
+ " ↓ \n",
+ " 48h 00m \n",
+ " \n",
+ " \n",
+ " 10 \n",
+ " 08h 00m \n",
+ " ↑ \n",
+ " 82h 15m \n",
+ " ↓ \n",
+ " 64h 30m \n",
+ " ↓ \n",
+ " 46h 45m \n",
+ " ↓ \n",
+ " 45h 30m \n",
+ " ↑ \n",
+ " 48h 00m \n",
+ " ↓ \n",
+ " 35h 30m \n",
+ " ↑ \n",
+ " 38h 30m \n",
+ " ↓ \n",
+ " 20h 00m \n",
+ " ↑ \n",
+ " 101h 30m \n",
+ " \n",
+ " \n",
+ " 11 \n",
+ " 10h 00m \n",
+ " ↑ \n",
+ " 74h 30m \n",
+ " ↓ \n",
+ " 50h 00m \n",
+ " ↓ \n",
+ " 30h 00m \n",
+ " ↑ \n",
+ " 38h 45m \n",
+ " ↓ \n",
+ " 35h 30m \n",
+ " ↓ \n",
+ " 13h 15m \n",
+ " ↑ \n",
+ " 58h 15m \n",
+ " ↓ \n",
+ " 14h 30m \n",
+ " ↑ \n",
+ " 88h 00m \n",
+ " \n",
+ " \n",
+ " 12 \n",
+ " 00h 00m \n",
+ " ↑ \n",
+ " 64h 00m \n",
+ " ↑ \n",
+ " 78h 45m \n",
+ " ↓ \n",
+ " 45h 45m \n",
+ " ↓ \n",
+ " 09h 30m \n",
+ " ↑ \n",
+ " 107h 30m \n",
+ " ↓ \n",
+ " 01h 00m \n",
+ " ↑ \n",
+ " 54h 15m \n",
+ " ↓ \n",
+ " 22h 30m \n",
+ " ↓ \n",
+ " 07h 45m \n",
+ " \n",
+ " \n",
+ "
\n"
+ ],
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ },
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "This content (id: 'tts_by_month') has been successfully saved as '/home/nwtimetracking/TIMETRACKINGBYMONTH.md'.\n"
+ ]
+ }
+ ],
+ "source": [
+ "tt_processor.process_tts_by_month()"
]
},
{
"cell_type": "code",
- "execution_count": 6,
+ "execution_count": 485,
"metadata": {},
"outputs": [
{
@@ -160,92 +576,92 @@
"text/html": [
"\n",
- "\n",
+ "\n",
" \n",
" \n",
- " Year \n",
- " Effort \n",
- " YearlyTarget \n",
- " TargetDiff \n",
- " IsTargetMet \n",
+ " Year \n",
+ " Effort \n",
+ " YearlyTarget \n",
+ " TargetDiff \n",
+ " IsTargetMet \n",
" \n",
" \n",
" \n",
" \n",
- " 2015 \n",
- " 18h 00m \n",
- " 00h 00m \n",
- " +18h 00m \n",
- " True \n",
+ " 2015 \n",
+ " 18h 00m \n",
+ " 00h 00m \n",
+ " +18h 00m \n",
+ " True \n",
" \n",
" \n",
- " 2016 \n",
- " 615h 15m \n",
- " 500h 00m \n",
- " +115h 15m \n",
- " True \n",
+ " 2016 \n",
+ " 615h 15m \n",
+ " 500h 00m \n",
+ " +115h 15m \n",
+ " True \n",
" \n",
" \n",
- " 2017 \n",
- " 762h 45m \n",
- " 500h 00m \n",
- " +262h 45m \n",
- " True \n",
+ " 2017 \n",
+ " 762h 45m \n",
+ " 500h 00m \n",
+ " +262h 45m \n",
+ " True \n",
" \n",
" \n",
- " 2018 \n",
- " 829h 45m \n",
- " 500h 00m \n",
- " +329h 45m \n",
- " True \n",
+ " 2018 \n",
+ " 829h 45m \n",
+ " 500h 00m \n",
+ " +329h 45m \n",
+ " True \n",
" \n",
" \n",
- " 2019 \n",
- " 515h 15m \n",
- " 500h 00m \n",
- " +15h 15m \n",
- " True \n",
+ " 2019 \n",
+ " 515h 15m \n",
+ " 500h 00m \n",
+ " +15h 15m \n",
+ " True \n",
" \n",
" \n",
- " 2020 \n",
- " 470h 30m \n",
- " 500h 00m \n",
- " -30h 30m \n",
- " False \n",
+ " 2020 \n",
+ " 470h 30m \n",
+ " 500h 00m \n",
+ " -30h 30m \n",
+ " False \n",
" \n",
" \n",
- " 2021 \n",
- " 537h 30m \n",
- " 500h 00m \n",
- " +37h 30m \n",
- " True \n",
+ " 2021 \n",
+ " 537h 30m \n",
+ " 500h 00m \n",
+ " +37h 30m \n",
+ " True \n",
" \n",
" \n",
- " 2022 \n",
- " 467h 30m \n",
- " 400h 00m \n",
- " +67h 30m \n",
- " True \n",
+ " 2022 \n",
+ " 467h 30m \n",
+ " 400h 00m \n",
+ " +67h 30m \n",
+ " True \n",
" \n",
" \n",
- " 2023 \n",
- " 320h 15m \n",
- " 250h 00m \n",
- " +70h 15m \n",
- " True \n",
+ " 2023 \n",
+ " 320h 15m \n",
+ " 250h 00m \n",
+ " +70h 15m \n",
+ " True \n",
" \n",
" \n",
- " 2024 \n",
- " 666h 45m \n",
- " 500h 00m \n",
- " +166h 45m \n",
- " True \n",
+ " 2024 \n",
+ " 666h 45m \n",
+ " 500h 00m \n",
+ " +166h 45m \n",
+ " True \n",
" \n",
" \n",
"
\n"
],
"text/plain": [
- ""
+ ""
]
},
"metadata": {},
@@ -253,19 +669,12 @@
}
],
"source": [
- "if setting_bag.show_tt_by_year_df:\n",
- " \n",
- " tt_by_year_df : DataFrame = tt_manager.get_tt_by_year(\n",
- " sessions_df = sessions_df, \n",
- " years = setting_bag.years, \n",
- " yearly_targets = setting_bag.yearly_targets)\n",
- " \n",
- " displayer.display(df = tt_by_year_df)\n"
+ "tt_processor.process_tts_by_year()"
]
},
{
"cell_type": "code",
- "execution_count": 7,
+ "execution_count": 486,
"metadata": {},
"outputs": [
{
@@ -273,57 +682,106 @@
"text/html": [
"\n",
- "\n",
+ "\n",
" \n",
" \n",
- " Year \n",
- " Month \n",
- " Effort \n",
- " YearlyTotal \n",
- " ToTarget \n",
+ " Year \n",
+ " Month \n",
+ " Effort \n",
+ " YearlyTotal \n",
+ " ToTarget \n",
" \n",
" \n",
" \n",
" \n",
- " 2024 \n",
- " 8 \n",
- " 32h 45m \n",
- " 421h 30m \n",
- " -79h 30m \n",
+ " 2024 \n",
+ " 1 \n",
+ " 45h 45m \n",
+ " 45h 45m \n",
+ " -455h 45m \n",
" \n",
" \n",
- " 2024 \n",
- " 9 \n",
- " 48h 00m \n",
- " 469h 30m \n",
- " -31h 30m \n",
+ " 2024 \n",
+ " 2 \n",
+ " 77h 45m \n",
+ " 123h 30m \n",
+ " -377h 30m \n",
" \n",
" \n",
- " 2024 \n",
- " 10 \n",
- " 101h 30m \n",
- " 571h 00m \n",
- " +71h 00m \n",
+ " 2024 \n",
+ " 3 \n",
+ " 77h 45m \n",
+ " 201h 15m \n",
+ " -299h 15m \n",
" \n",
" \n",
- " 2024 \n",
- " 11 \n",
- " 88h 00m \n",
- " 659h 00m \n",
- " +159h 00m \n",
+ " 2024 \n",
+ " 4 \n",
+ " 29h 30m \n",
+ " 230h 45m \n",
+ " -270h 45m \n",
" \n",
" \n",
- " 2024 \n",
- " 12 \n",
- " 07h 45m \n",
- " 666h 45m \n",
- " +166h 45m \n",
+ " 2024 \n",
+ " 5 \n",
+ " 43h 00m \n",
+ " 273h 45m \n",
+ " -227h 45m \n",
+ " \n",
+ " \n",
+ " 2024 \n",
+ " 6 \n",
+ " 48h 00m \n",
+ " 321h 45m \n",
+ " -179h 45m \n",
+ " \n",
+ " \n",
+ " 2024 \n",
+ " 7 \n",
+ " 67h 00m \n",
+ " 388h 45m \n",
+ " -112h 45m \n",
+ " \n",
+ " \n",
+ " 2024 \n",
+ " 8 \n",
+ " 32h 45m \n",
+ " 421h 30m \n",
+ " -79h 30m \n",
+ " \n",
+ " \n",
+ " 2024 \n",
+ " 9 \n",
+ " 48h 00m \n",
+ " 469h 30m \n",
+ " -31h 30m \n",
+ " \n",
+ " \n",
+ " 2024 \n",
+ " 10 \n",
+ " 101h 30m \n",
+ " 571h 00m \n",
+ " +71h 00m \n",
+ " \n",
+ " \n",
+ " 2024 \n",
+ " 11 \n",
+ " 88h 00m \n",
+ " 659h 00m \n",
+ " +159h 00m \n",
+ " \n",
+ " \n",
+ " 2024 \n",
+ " 12 \n",
+ " 07h 45m \n",
+ " 666h 45m \n",
+ " +166h 45m \n",
" \n",
" \n",
"
\n"
],
"text/plain": [
- ""
+ ""
]
},
"metadata": {},
@@ -331,53 +789,417 @@
}
],
"source": [
- "if setting_bag.show_tt_by_year_month_df:\n",
- "\n",
- " tt_by_year_month_df : DataFrame = tt_manager.get_tt_by_year_month(\n",
- " sessions_df = sessions_df, \n",
- " years = setting_bag.years, \n",
- " yearly_targets = setting_bag.yearly_targets)\n",
- " \n",
- " displayer.display(df = tt_by_year_month_df.tail(n = setting_bag.n_generic))\n"
+ "tt_processor.process_tts_by_year_month()"
]
},
{
"cell_type": "code",
- "execution_count": 8,
+ "execution_count": 487,
"metadata": {},
- "outputs": [],
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "\n",
+ "\n",
+ " \n",
+ " \n",
+ " Year \n",
+ " Month \n",
+ " ProjectName \n",
+ " ProjectVersion \n",
+ " Effort \n",
+ " DME \n",
+ " %_DME \n",
+ " TME \n",
+ " %_TME \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " 2023 \n",
+ " 9 \n",
+ " nwtimetracking \n",
+ " 1.0.0 \n",
+ " 11h 30m \n",
+ " 32h 45m \n",
+ " 35.11 \n",
+ " 50h 15m \n",
+ " 22.89 \n",
+ " \n",
+ " \n",
+ " 2023 \n",
+ " 10 \n",
+ " nwtimetracking \n",
+ " 1.0.0 \n",
+ " 20h 00m \n",
+ " 20h 00m \n",
+ " 100.00 \n",
+ " 20h 00m \n",
+ " 100.00 \n",
+ " \n",
+ " \n",
+ " 2023 \n",
+ " 11 \n",
+ " nwtimetracking \n",
+ " 1.1.0 \n",
+ " 10h 30m \n",
+ " 14h 00m \n",
+ " 75.00 \n",
+ " 14h 30m \n",
+ " 72.41 \n",
+ " \n",
+ " \n",
+ " 2023 \n",
+ " 11 \n",
+ " nwtimetracking \n",
+ " 1.2.0 \n",
+ " 03h 30m \n",
+ " 14h 00m \n",
+ " 25.00 \n",
+ " 14h 30m \n",
+ " 24.14 \n",
+ " \n",
+ " \n",
+ " 2023 \n",
+ " 12 \n",
+ " nwtimetracking \n",
+ " 1.3.0 \n",
+ " 02h 30m \n",
+ " 17h 00m \n",
+ " 14.71 \n",
+ " 22h 30m \n",
+ " 11.11 \n",
+ " \n",
+ " \n",
+ " 2024 \n",
+ " 1 \n",
+ " nwtimetracking \n",
+ " 1.3.0 \n",
+ " 07h 15m \n",
+ " 20h 30m \n",
+ " 35.37 \n",
+ " 45h 45m \n",
+ " 15.85 \n",
+ " \n",
+ " \n",
+ " 2024 \n",
+ " 1 \n",
+ " nwtimetracking \n",
+ " 2.0.0 \n",
+ " 02h 30m \n",
+ " 20h 30m \n",
+ " 12.20 \n",
+ " 45h 45m \n",
+ " 5.46 \n",
+ " \n",
+ " \n",
+ " 2024 \n",
+ " 2 \n",
+ " nwtimetracking \n",
+ " 2.2.0 \n",
+ " 07h 45m \n",
+ " 36h 45m \n",
+ " 21.09 \n",
+ " 77h 45m \n",
+ " 9.97 \n",
+ " \n",
+ " \n",
+ " 2024 \n",
+ " 3 \n",
+ " nwtimetracking \n",
+ " 3.0.0 \n",
+ " 03h 30m \n",
+ " 77h 15m \n",
+ " 4.53 \n",
+ " 77h 45m \n",
+ " 4.50 \n",
+ " \n",
+ " \n",
+ " 2024 \n",
+ " 5 \n",
+ " nwtimetracking \n",
+ " 3.2.0 \n",
+ " 02h 00m \n",
+ " 35h 30m \n",
+ " 5.63 \n",
+ " 43h 00m \n",
+ " 4.65 \n",
+ " \n",
+ " \n",
+ " 2024 \n",
+ " 5 \n",
+ " nwtimetracking \n",
+ " 3.3.0 \n",
+ " 01h 00m \n",
+ " 35h 30m \n",
+ " 2.82 \n",
+ " 43h 00m \n",
+ " 2.33 \n",
+ " \n",
+ " \n",
+ " 2024 \n",
+ " 8 \n",
+ " nwtimetracking \n",
+ " 3.4.0 \n",
+ " 01h 00m \n",
+ " 10h 30m \n",
+ " 9.52 \n",
+ " 32h 45m \n",
+ " 3.05 \n",
+ " \n",
+ " \n",
+ " 2024 \n",
+ " 9 \n",
+ " nwtimetracking \n",
+ " 3.5.0 \n",
+ " 04h 00m \n",
+ " 45h 45m \n",
+ " 8.74 \n",
+ " 48h 00m \n",
+ " 8.33 \n",
+ " \n",
+ " \n",
+ " 2024 \n",
+ " 10 \n",
+ " nwtimetracking \n",
+ " 3.7.0 \n",
+ " 06h 00m \n",
+ " 85h 00m \n",
+ " 7.06 \n",
+ " 101h 30m \n",
+ " 5.91 \n",
+ " \n",
+ " \n",
+ " 2024 \n",
+ " 10 \n",
+ " nwtimetracking \n",
+ " 3.8.0 \n",
+ " 01h 00m \n",
+ " 85h 00m \n",
+ " 1.18 \n",
+ " 101h 30m \n",
+ " 0.99 \n",
+ " \n",
+ " \n",
+ " 2024 \n",
+ " 12 \n",
+ " nwtimetracking \n",
+ " 3.9.0 \n",
+ " 00h 45m \n",
+ " 07h 45m \n",
+ " 9.68 \n",
+ " 07h 45m \n",
+ " 9.68 \n",
+ " \n",
+ " \n",
+ "
\n"
+ ],
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
"source": [
- "if setting_bag.show_tt_by_year_month_spnv_df:\n",
- "\n",
- " tt_by_year_month_spnv_df : DataFrame = tt_manager.get_tt_by_year_month_spnv(\n",
- " sessions_df = sessions_df, \n",
- " years = setting_bag.years, \n",
- " software_project_names = setting_bag.software_project_names)\n",
- " \n",
- " displayer.display(df = tt_by_year_month_spnv_df)\n",
- " tt_manager.try_print_definitions(df = tt_by_year_month_spnv_df, definitions = setting_bag.definitions)\n"
+ "tt_processor.process_tts_by_year_month_spnv()"
]
},
{
"cell_type": "code",
- "execution_count": 9,
+ "execution_count": 488,
"metadata": {},
- "outputs": [],
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "\n",
+ "\n",
+ " \n",
+ " \n",
+ " Year \n",
+ " ProjectName \n",
+ " ProjectVersion \n",
+ " Effort \n",
+ " DYE \n",
+ " %_DYE \n",
+ " TYE \n",
+ " %_TYE \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " 2023 \n",
+ " nwtimetracking \n",
+ " 1.0.0 \n",
+ " 31h 30m \n",
+ " 213h 00m \n",
+ " 14.79 \n",
+ " 320h 15m \n",
+ " 9.84 \n",
+ " \n",
+ " \n",
+ " 2023 \n",
+ " nwtimetracking \n",
+ " 1.1.0 \n",
+ " 10h 30m \n",
+ " 213h 00m \n",
+ " 4.93 \n",
+ " 320h 15m \n",
+ " 3.28 \n",
+ " \n",
+ " \n",
+ " 2023 \n",
+ " nwtimetracking \n",
+ " 1.2.0 \n",
+ " 03h 30m \n",
+ " 213h 00m \n",
+ " 1.64 \n",
+ " 320h 15m \n",
+ " 1.09 \n",
+ " \n",
+ " \n",
+ " 2023 \n",
+ " nwtimetracking \n",
+ " 1.3.0 \n",
+ " 02h 30m \n",
+ " 213h 00m \n",
+ " 1.17 \n",
+ " 320h 15m \n",
+ " 0.78 \n",
+ " \n",
+ " \n",
+ " 2024 \n",
+ " nwtimetracking \n",
+ " 1.3.0 \n",
+ " 07h 15m \n",
+ " 429h 30m \n",
+ " 1.69 \n",
+ " 666h 45m \n",
+ " 1.09 \n",
+ " \n",
+ " \n",
+ " 2024 \n",
+ " nwtimetracking \n",
+ " 2.0.0 \n",
+ " 02h 30m \n",
+ " 429h 30m \n",
+ " 0.58 \n",
+ " 666h 45m \n",
+ " 0.37 \n",
+ " \n",
+ " \n",
+ " 2024 \n",
+ " nwtimetracking \n",
+ " 2.2.0 \n",
+ " 07h 45m \n",
+ " 429h 30m \n",
+ " 1.80 \n",
+ " 666h 45m \n",
+ " 1.16 \n",
+ " \n",
+ " \n",
+ " 2024 \n",
+ " nwtimetracking \n",
+ " 3.0.0 \n",
+ " 03h 30m \n",
+ " 429h 30m \n",
+ " 0.81 \n",
+ " 666h 45m \n",
+ " 0.52 \n",
+ " \n",
+ " \n",
+ " 2024 \n",
+ " nwtimetracking \n",
+ " 3.2.0 \n",
+ " 02h 00m \n",
+ " 429h 30m \n",
+ " 0.47 \n",
+ " 666h 45m \n",
+ " 0.30 \n",
+ " \n",
+ " \n",
+ " 2024 \n",
+ " nwtimetracking \n",
+ " 3.3.0 \n",
+ " 01h 00m \n",
+ " 429h 30m \n",
+ " 0.23 \n",
+ " 666h 45m \n",
+ " 0.15 \n",
+ " \n",
+ " \n",
+ " 2024 \n",
+ " nwtimetracking \n",
+ " 3.4.0 \n",
+ " 01h 00m \n",
+ " 429h 30m \n",
+ " 0.23 \n",
+ " 666h 45m \n",
+ " 0.15 \n",
+ " \n",
+ " \n",
+ " 2024 \n",
+ " nwtimetracking \n",
+ " 3.5.0 \n",
+ " 04h 00m \n",
+ " 429h 30m \n",
+ " 0.93 \n",
+ " 666h 45m \n",
+ " 0.60 \n",
+ " \n",
+ " \n",
+ " 2024 \n",
+ " nwtimetracking \n",
+ " 3.7.0 \n",
+ " 06h 00m \n",
+ " 429h 30m \n",
+ " 1.40 \n",
+ " 666h 45m \n",
+ " 0.90 \n",
+ " \n",
+ " \n",
+ " 2024 \n",
+ " nwtimetracking \n",
+ " 3.8.0 \n",
+ " 01h 00m \n",
+ " 429h 30m \n",
+ " 0.23 \n",
+ " 666h 45m \n",
+ " 0.15 \n",
+ " \n",
+ " \n",
+ " 2024 \n",
+ " nwtimetracking \n",
+ " 3.9.0 \n",
+ " 00h 45m \n",
+ " 429h 30m \n",
+ " 0.17 \n",
+ " 666h 45m \n",
+ " 0.11 \n",
+ " \n",
+ " \n",
+ "
\n"
+ ],
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
"source": [
- "if setting_bag.show_tt_by_year_spnv_df:\n",
- "\n",
- " tt_by_year_spnv_df : DataFrame = tt_manager.get_tt_by_year_spnv(\n",
- " sessions_df = sessions_df, \n",
- " years = setting_bag.years, \n",
- " software_project_names = setting_bag.software_project_names)\n",
- " \n",
- " displayer.display(df = tt_by_year_spnv_df)\n",
- " tt_manager.try_print_definitions(df = tt_by_year_spnv_df, definitions = setting_bag.definitions)\n"
+ "tt_processor.process_tts_by_year_spnv()"
]
},
{
"cell_type": "code",
- "execution_count": 10,
+ "execution_count": 489,
"metadata": {},
"outputs": [
{
@@ -385,123 +1207,123 @@
"text/html": [
"\n",
- "\n",
+ "\n",
" \n",
" \n",
- " Hashtag \n",
- " ProjectName \n",
- " Effort \n",
- " DE \n",
- " %_DE \n",
- " TE \n",
- " %_TE \n",
+ " Hashtag \n",
+ " ProjectName \n",
+ " Effort \n",
+ " DE \n",
+ " %_DE \n",
+ " TE \n",
+ " %_TE \n",
" \n",
" \n",
" \n",
" \n",
- " #python \n",
- " nwtraderaanalytics \n",
- " 219h 45m \n",
- " 1874h 30m \n",
- " 11.72 \n",
- " 2654h 45m \n",
- " 8.28 \n",
- " \n",
- " \n",
- " #python \n",
- " nwreadinglist \n",
- " 143h 30m \n",
- " 1874h 30m \n",
- " 7.66 \n",
- " 2654h 45m \n",
- " 5.41 \n",
- " \n",
- " \n",
- " #python \n",
- " nwtimetracking \n",
- " 84h 45m \n",
- " 1874h 30m \n",
- " 4.52 \n",
- " 2654h 45m \n",
- " 3.19 \n",
- " \n",
- " \n",
- " #python \n",
- " nwpackageversions \n",
- " 52h 45m \n",
- " 1874h 30m \n",
- " 2.81 \n",
- " 2654h 45m \n",
- " 1.99 \n",
- " \n",
- " \n",
- " #python \n",
- " nwshared \n",
- " 42h 15m \n",
- " 1874h 30m \n",
- " 2.25 \n",
- " 2654h 45m \n",
- " 1.59 \n",
- " \n",
- " \n",
- " #csharp \n",
- " NW.UnivariateForecasting \n",
- " 208h 00m \n",
- " 1874h 30m \n",
- " 11.10 \n",
- " 2654h 45m \n",
- " 7.84 \n",
- " \n",
- " \n",
- " #csharp \n",
- " NW.NGramTextClassification \n",
- " 207h 30m \n",
- " 1874h 30m \n",
- " 11.07 \n",
- " 2654h 45m \n",
- " 7.82 \n",
- " \n",
- " \n",
- " #csharp \n",
- " NW.MarkdownTables \n",
- " 20h 45m \n",
- " 1874h 30m \n",
- " 1.11 \n",
- " 2654h 45m \n",
- " 0.78 \n",
- " \n",
- " \n",
- " #csharp \n",
- " NW.Shared.Files \n",
- " 05h 30m \n",
- " 1874h 30m \n",
- " 0.29 \n",
- " 2654h 45m \n",
- " 0.21 \n",
- " \n",
- " \n",
- " #csharp \n",
- " NW.Shared.Serialization \n",
- " 04h 15m \n",
- " 1874h 30m \n",
- " 0.23 \n",
- " 2654h 45m \n",
- " 0.16 \n",
- " \n",
- " \n",
- " #csharp \n",
- " NW.Shared.Validation \n",
- " 02h 45m \n",
- " 1874h 30m \n",
- " 0.15 \n",
- " 2654h 45m \n",
- " 0.10 \n",
+ " #python \n",
+ " nwtraderaanalytics \n",
+ " 219h 45m \n",
+ " 1874h 30m \n",
+ " 11.72 \n",
+ " 2654h 45m \n",
+ " 8.28 \n",
+ " \n",
+ " \n",
+ " #python \n",
+ " nwreadinglist \n",
+ " 143h 30m \n",
+ " 1874h 30m \n",
+ " 7.66 \n",
+ " 2654h 45m \n",
+ " 5.41 \n",
+ " \n",
+ " \n",
+ " #python \n",
+ " nwtimetracking \n",
+ " 84h 45m \n",
+ " 1874h 30m \n",
+ " 4.52 \n",
+ " 2654h 45m \n",
+ " 3.19 \n",
+ " \n",
+ " \n",
+ " #python \n",
+ " nwpackageversions \n",
+ " 52h 45m \n",
+ " 1874h 30m \n",
+ " 2.81 \n",
+ " 2654h 45m \n",
+ " 1.99 \n",
+ " \n",
+ " \n",
+ " #python \n",
+ " nwshared \n",
+ " 42h 15m \n",
+ " 1874h 30m \n",
+ " 2.25 \n",
+ " 2654h 45m \n",
+ " 1.59 \n",
+ " \n",
+ " \n",
+ " #csharp \n",
+ " NW.UnivariateForecasting \n",
+ " 208h 00m \n",
+ " 1874h 30m \n",
+ " 11.10 \n",
+ " 2654h 45m \n",
+ " 7.84 \n",
+ " \n",
+ " \n",
+ " #csharp \n",
+ " NW.NGramTextClassification \n",
+ " 207h 30m \n",
+ " 1874h 30m \n",
+ " 11.07 \n",
+ " 2654h 45m \n",
+ " 7.82 \n",
+ " \n",
+ " \n",
+ " #csharp \n",
+ " NW.MarkdownTables \n",
+ " 20h 45m \n",
+ " 1874h 30m \n",
+ " 1.11 \n",
+ " 2654h 45m \n",
+ " 0.78 \n",
+ " \n",
+ " \n",
+ " #csharp \n",
+ " NW.Shared.Files \n",
+ " 05h 30m \n",
+ " 1874h 30m \n",
+ " 0.29 \n",
+ " 2654h 45m \n",
+ " 0.21 \n",
+ " \n",
+ " \n",
+ " #csharp \n",
+ " NW.Shared.Serialization \n",
+ " 04h 15m \n",
+ " 1874h 30m \n",
+ " 0.23 \n",
+ " 2654h 45m \n",
+ " 0.16 \n",
+ " \n",
+ " \n",
+ " #csharp \n",
+ " NW.Shared.Validation \n",
+ " 02h 45m \n",
+ " 1874h 30m \n",
+ " 0.15 \n",
+ " 2654h 45m \n",
+ " 0.10 \n",
" \n",
" \n",
"
\n"
],
"text/plain": [
- ""
+ ""
]
},
"metadata": {},
@@ -517,22 +1339,21 @@
}
],
"source": [
- "if setting_bag.show_tt_by_spn_df:\n",
- "\n",
- " tt_by_spn_df : DataFrame = tt_manager.get_tt_by_spn(\n",
- " sessions_df = sessions_df, \n",
- " years = setting_bag.years, \n",
- " software_project_names = setting_bag.software_project_names,\n",
- " remove_untagged = setting_bag.remove_untagged_from_de\n",
- " )\n",
- " \n",
- " displayer.display(df = tt_by_spn_df, formatters = { \"%_DE\" : \"{:.2f}\", \"%_TE\" : \"{:.2f}\" }) \n",
- " tt_manager.try_print_definitions(df = tt_by_spn_df, definitions = setting_bag.definitions)\n"
+ "tt_processor.process_tts_by_spn()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 490,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "tt_processor.process_tts_by_spn_spv()"
]
},
{
"cell_type": "code",
- "execution_count": 11,
+ "execution_count": 491,
"metadata": {},
"outputs": [
{
@@ -540,415 +1361,244 @@
"text/html": [
"\n",
- "\n",
+ "\n",
" \n",
" \n",
- " ProjectName \n",
- " ProjectVersion \n",
- " Effort \n",
+ " Hashtag \n",
+ " Effort \n",
+ " Effort% \n",
" \n",
" \n",
" \n",
" \n",
- " NW.MarkdownTables \n",
- " 1.0.0 \n",
- " 15h 15m \n",
- " \n",
- " \n",
- " NW.MarkdownTables \n",
- " 1.0.1 \n",
- " 02h 30m \n",
- " \n",
- " \n",
- " NW.MarkdownTables \n",
- " 3.0.0 \n",
- " 03h 00m \n",
- " \n",
- " \n",
- " NW.NGramTextClassification \n",
- " 1.0.0 \n",
- " 73h 15m \n",
- " \n",
- " \n",
- " NW.NGramTextClassification \n",
- " 1.1.0 \n",
- " 07h 30m \n",
- " \n",
- " \n",
- " NW.NGramTextClassification \n",
- " 2.0.0 \n",
- " 15h 30m \n",
- " \n",
- " \n",
- " NW.NGramTextClassification \n",
- " 3.0.0 \n",
- " 28h 00m \n",
- " \n",
- " \n",
- " NW.NGramTextClassification \n",
- " 3.5.0 \n",
- " 39h 45m \n",
- " \n",
- " \n",
- " NW.NGramTextClassification \n",
- " 3.6.0 \n",
- " 17h 30m \n",
- " \n",
- " \n",
- " NW.NGramTextClassification \n",
- " 3.7.0 \n",
- " 08h 00m \n",
- " \n",
- " \n",
- " NW.NGramTextClassification \n",
- " 4.0.0 \n",
- " 14h 30m \n",
- " \n",
- " \n",
- " NW.NGramTextClassification \n",
- " 4.1.0 \n",
- " 02h 15m \n",
- " \n",
- " \n",
- " NW.NGramTextClassification \n",
- " 4.2.0 \n",
- " 01h 15m \n",
- " \n",
- " \n",
- " NW.Shared.Files \n",
- " 1.0.0 \n",
- " 05h 30m \n",
+ " #untagged \n",
+ " 2548h 45m \n",
+ " 48.98 \n",
" \n",
" \n",
- " NW.Shared.Serialization \n",
- " 1.0.0 \n",
- " 04h 15m \n",
+ " #csharp \n",
+ " 1116h 45m \n",
+ " 21.46 \n",
" \n",
" \n",
- " NW.Shared.Validation \n",
- " 1.0.0 \n",
- " 02h 45m \n",
+ " #python \n",
+ " 611h 45m \n",
+ " 11.76 \n",
" \n",
" \n",
- " NW.UnivariateForecasting \n",
- " 1.0.0 \n",
- " 116h 15m \n",
+ " #studying \n",
+ " 419h 30m \n",
+ " 8.06 \n",
" \n",
" \n",
- " NW.UnivariateForecasting \n",
- " 1.0.1 \n",
- " 07h 30m \n",
+ " #maintenance \n",
+ " 333h 45m \n",
+ " 6.41 \n",
" \n",
" \n",
- " NW.UnivariateForecasting \n",
- " 1.1.0 \n",
- " 04h 00m \n",
+ " #powershell \n",
+ " 154h 00m \n",
+ " 2.96 \n",
" \n",
" \n",
- " NW.UnivariateForecasting \n",
- " 1.1.1 \n",
- " 07h 30m \n",
- " \n",
- " \n",
- " NW.UnivariateForecasting \n",
- " 2.0.0 \n",
- " 06h 30m \n",
- " \n",
- " \n",
- " NW.UnivariateForecasting \n",
- " 2.0.1 \n",
- " 02h 00m \n",
- " \n",
- " \n",
- " NW.UnivariateForecasting \n",
- " 2.5.0 \n",
- " 16h 30m \n",
- " \n",
- " \n",
- " NW.UnivariateForecasting \n",
- " 3.0.0 \n",
- " 46h 00m \n",
- " \n",
- " \n",
- " NW.UnivariateForecasting \n",
- " 4.1.0 \n",
- " 01h 00m \n",
- " \n",
- " \n",
- " NW.UnivariateForecasting \n",
- " 4.2.0 \n",
- " 00h 45m \n",
- " \n",
- " \n",
- " nwpackageversions \n",
- " 1.0.0 \n",
- " 34h 15m \n",
- " \n",
- " \n",
- " nwpackageversions \n",
- " 1.1.0 \n",
- " 02h 30m \n",
- " \n",
- " \n",
- " nwpackageversions \n",
- " 1.2.0 \n",
- " 03h 00m \n",
- " \n",
- " \n",
- " nwpackageversions \n",
- " 1.6.0 \n",
- " 12h 15m \n",
- " \n",
- " \n",
- " nwpackageversions \n",
- " 1.8.0 \n",
- " 00h 45m \n",
- " \n",
- " \n",
- " nwreadinglist \n",
- " 1.0.0 \n",
- " 45h 15m \n",
- " \n",
- " \n",
- " nwreadinglist \n",
- " 1.5.0 \n",
- " 16h 15m \n",
- " \n",
- " \n",
- " nwreadinglist \n",
- " 1.6.0 \n",
- " 05h 00m \n",
- " \n",
- " \n",
- " nwreadinglist \n",
- " 2.0.0 \n",
- " 01h 30m \n",
- " \n",
- " \n",
- " nwreadinglist \n",
- " 2.1.0 \n",
- " 02h 00m \n",
- " \n",
- " \n",
- " nwreadinglist \n",
- " 2.2.0 \n",
- " 06h 45m \n",
- " \n",
- " \n",
- " nwreadinglist \n",
- " 3.0.0 \n",
- " 10h 00m \n",
- " \n",
- " \n",
- " nwreadinglist \n",
- " 3.1.0 \n",
- " 05h 15m \n",
- " \n",
- " \n",
- " nwreadinglist \n",
- " 3.2.0 \n",
- " 02h 00m \n",
- " \n",
- " \n",
- " nwreadinglist \n",
- " 3.3.0 \n",
- " 00h 30m \n",
- " \n",
- " \n",
- " nwreadinglist \n",
- " 3.4.0 \n",
- " 03h 15m \n",
- " \n",
- " \n",
- " nwreadinglist \n",
- " 3.5.0 \n",
- " 06h 30m \n",
- " \n",
- " \n",
- " nwreadinglist \n",
- " 3.6.0 \n",
- " 01h 00m \n",
- " \n",
- " \n",
- " nwreadinglist \n",
- " 3.7.0 \n",
- " 07h 45m \n",
- " \n",
- " \n",
- " nwreadinglist \n",
- " 3.8.0 \n",
- " 01h 30m \n",
- " \n",
- " \n",
- " nwreadinglist \n",
- " 4.0.0 \n",
- " 28h 15m \n",
+ " #overtime \n",
+ " 19h 00m \n",
+ " 0.37 \n",
" \n",
+ " \n",
+ "
\n"
+ ],
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "tt_processor.process_tts_by_hashtag()"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 492,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "\n",
+ "\n",
+ " \n",
" \n",
- " nwreadinglist \n",
- " 4.1.0 \n",
- " 00h 45m \n",
+ " Year \n",
+ " Hashtag \n",
+ " Effort \n",
" \n",
+ " \n",
+ " \n",
" \n",
- " nwshared \n",
- " 1.1.0 \n",
- " 08h 00m \n",
+ " 2019 \n",
+ " #csharp \n",
+ " 67h 00m \n",
" \n",
" \n",
- " nwshared \n",
- " 1.2.0 \n",
- " 01h 30m \n",
+ " 2020 \n",
+ " #csharp \n",
+ " 206h 30m \n",
" \n",
" \n",
- " nwshared \n",
- " 1.3.0 \n",
- " 07h 00m \n",
+ " 2021 \n",
+ " #csharp \n",
+ " 441h 45m \n",
" \n",
" \n",
- " nwshared \n",
- " 1.4.0 \n",
- " 06h 30m \n",
+ " 2022 \n",
+ " #csharp \n",
+ " 298h 45m \n",
" \n",
" \n",
- " nwshared \n",
- " 1.5.0 \n",
- " 01h 00m \n",
+ " 2023 \n",
+ " #csharp \n",
+ " 67h 30m \n",
" \n",
" \n",
- " nwshared \n",
- " 1.6.0 \n",
- " 00h 45m \n",
+ " 2024 \n",
+ " #csharp \n",
+ " 35h 15m \n",
" \n",
" \n",
- " nwshared \n",
- " 1.7.0 \n",
- " 03h 00m \n",
+ " 2019 \n",
+ " #maintenance \n",
+ " 53h 45m \n",
" \n",
" \n",
- " nwshared \n",
- " 1.7.1 \n",
- " 01h 00m \n",
+ " 2020 \n",
+ " #maintenance \n",
+ " 53h 30m \n",
" \n",
" \n",
- " nwshared \n",
- " 1.8.0 \n",
- " 13h 30m \n",
+ " 2021 \n",
+ " #maintenance \n",
+ " 29h 30m \n",
" \n",
" \n",
- " nwtimetracking \n",
- " 1.0.0 \n",
- " 31h 30m \n",
+ " 2022 \n",
+ " #maintenance \n",
+ " 36h 30m \n",
" \n",
" \n",
- " nwtimetracking \n",
- " 1.1.0 \n",
- " 10h 30m \n",
+ " 2023 \n",
+ " #maintenance \n",
+ " 52h 30m \n",
" \n",
" \n",
- " nwtimetracking \n",
- " 1.2.0 \n",
- " 03h 30m \n",
+ " 2024 \n",
+ " #maintenance \n",
+ " 108h 00m \n",
" \n",
" \n",
- " nwtimetracking \n",
- " 1.3.0 \n",
- " 09h 45m \n",
+ " 2020 \n",
+ " #overtime \n",
+ " 19h 00m \n",
" \n",
" \n",
- " nwtimetracking \n",
- " 2.0.0 \n",
- " 02h 30m \n",
+ " 2020 \n",
+ " #powershell \n",
+ " 116h 45m \n",
" \n",
" \n",
- " nwtimetracking \n",
- " 2.2.0 \n",
- " 07h 45m \n",
+ " 2021 \n",
+ " #powershell \n",
+ " 11h 30m \n",
" \n",
" \n",
- " nwtimetracking \n",
- " 3.0.0 \n",
- " 03h 30m \n",
+ " 2022 \n",
+ " #powershell \n",
+ " 21h 15m \n",
" \n",
" \n",
- " nwtimetracking \n",
- " 3.2.0 \n",
- " 02h 00m \n",
+ " 2023 \n",
+ " #powershell \n",
+ " 04h 30m \n",
" \n",
" \n",
- " nwtimetracking \n",
- " 3.3.0 \n",
- " 01h 00m \n",
+ " 2022 \n",
+ " #python \n",
+ " 72h 00m \n",
" \n",
" \n",
- " nwtimetracking \n",
- " 3.4.0 \n",
- " 01h 00m \n",
+ " 2023 \n",
+ " #python \n",
+ " 141h 00m \n",
" \n",
" \n",
- " nwtimetracking \n",
- " 3.5.0 \n",
- " 04h 00m \n",
+ " 2024 \n",
+ " #python \n",
+ " 398h 45m \n",
" \n",
" \n",
- " nwtimetracking \n",
- " 3.7.0 \n",
- " 06h 00m \n",
+ " 2019 \n",
+ " #studying \n",
+ " 71h 30m \n",
" \n",
" \n",
- " nwtimetracking \n",
- " 3.8.0 \n",
- " 01h 00m \n",
+ " 2020 \n",
+ " #studying \n",
+ " 74h 45m \n",
" \n",
" \n",
- " nwtimetracking \n",
- " 3.9.0 \n",
- " 00h 45m \n",
+ " 2021 \n",
+ " #studying \n",
+ " 54h 45m \n",
" \n",
" \n",
- " nwtraderaanalytics \n",
- " 1.0.0 \n",
- " 62h 00m \n",
+ " 2022 \n",
+ " #studying \n",
+ " 39h 00m \n",
" \n",
" \n",
- " nwtraderaanalytics \n",
- " 2.0.0 \n",
- " 12h 00m \n",
+ " 2023 \n",
+ " #studying \n",
+ " 54h 45m \n",
" \n",
" \n",
- " nwtraderaanalytics \n",
- " 3.0.0 \n",
- " 52h 15m \n",
+ " 2024 \n",
+ " #studying \n",
+ " 124h 45m \n",
" \n",
" \n",
- " nwtraderaanalytics \n",
- " 4.0.0 \n",
- " 64h 15m \n",
+ " 2015 \n",
+ " #untagged \n",
+ " 18h 00m \n",
" \n",
" \n",
- " nwtraderaanalytics \n",
- " 4.1.0 \n",
- " 01h 45m \n",
+ " 2016 \n",
+ " #untagged \n",
+ " 615h 15m \n",
" \n",
" \n",
- " nwtraderaanalytics \n",
- " 4.2.0 \n",
- " 25h 00m \n",
+ " 2017 \n",
+ " #untagged \n",
+ " 762h 45m \n",
" \n",
" \n",
- " nwtraderaanalytics \n",
- " 4.3.0 \n",
- " 00h 30m \n",
+ " 2018 \n",
+ " #untagged \n",
+ " 829h 45m \n",
" \n",
" \n",
- " nwtraderaanalytics \n",
- " 4.4.0 \n",
- " 02h 00m \n",
+ " 2019 \n",
+ " #untagged \n",
+ " 323h 00m \n",
" \n",
" \n",
"
\n"
],
"text/plain": [
- ""
+ ""
]
},
"metadata": {},
@@ -956,19 +1606,12 @@
}
],
"source": [
- "if setting_bag.show_tt_by_spn_df:\n",
- "\n",
- " tt_by_spn_spv_df : DataFrame = tt_manager.get_tt_by_spn_spv(\n",
- " sessions_df = sessions_df, \n",
- " years = setting_bag.years, \n",
- " software_project_names = setting_bag.software_project_names_by_spv)\n",
- " \n",
- " displayer.display(df = tt_by_spn_spv_df)\n"
+ "tt_processor.process_tts_by_hashtag_year()"
]
},
{
"cell_type": "code",
- "execution_count": 12,
+ "execution_count": 493,
"metadata": {},
"outputs": [
{
@@ -976,40 +1619,23 @@
"text/html": [
"\n",
- "\n",
+ "\n",
" \n",
" \n",
- " Year \n",
- " Hashtag \n",
- " Effort \n",
+ " StartTime \n",
+ " EndTime \n",
+ " Effort \n",
+ " ES_IsCorrect \n",
+ " ES_Expected \n",
+ " ES_Message \n",
" \n",
" \n",
" \n",
- " \n",
- " 2024 \n",
- " #csharp \n",
- " 35h 15m \n",
- " \n",
- " \n",
- " 2024 \n",
- " #maintenance \n",
- " 108h 00m \n",
- " \n",
- " \n",
- " 2024 \n",
- " #python \n",
- " 398h 45m \n",
- " \n",
- " \n",
- " 2024 \n",
- " #studying \n",
- " 124h 45m \n",
- " \n",
" \n",
"
\n"
],
"text/plain": [
- ""
+ ""
]
},
"metadata": {},
@@ -1017,18 +1643,12 @@
}
],
"source": [
- "if setting_bag.show_tt_by_year_hashtag:\n",
- "\n",
- " tt_by_year_hashtag_df : DataFrame = tt_manager.get_tt_by_year_hashtag(\n",
- " sessions_df = sessions_df, \n",
- " years = setting_bag.tt_by_year_hashtag_years)\n",
- " \n",
- " displayer.display(df = tt_by_year_hashtag_df)\n"
+ "tt_processor.process_tts_by_efs()"
]
},
{
"cell_type": "code",
- "execution_count": 13,
+ "execution_count": 494,
"metadata": {},
"outputs": [
{
@@ -1036,417 +1656,59 @@
"text/html": [
"\n",
- "\n",
+ "\n",
" \n",
" \n",
- " Hashtag \n",
- " Effort \n",
- " Effort% \n",
+ " TimeRangeId \n",
+ " Occurrences \n",
" \n",
" \n",
" \n",
" \n",
- " #untagged \n",
- " 2548h 45m \n",
- " 48.98 \n",
+ " 08:00-08:45 \n",
+ " 43 \n",
" \n",
" \n",
- " #csharp \n",
- " 1116h 45m \n",
- " 21.46 \n",
+ " 08:00-08:30 \n",
+ " 25 \n",
" \n",
" \n",
- " #python \n",
- " 611h 45m \n",
- " 11.76 \n",
+ " 18:00-20:00 \n",
+ " 21 \n",
" \n",
" \n",
- " #studying \n",
- " 419h 30m \n",
- " 8.06 \n",
+ " 17:30-18:00 \n",
+ " 18 \n",
" \n",
" \n",
- " #maintenance \n",
- " 333h 45m \n",
- " 6.41 \n",
+ " 19:00-20:00 \n",
+ " 17 \n",
" \n",
" \n",
- " #powershell \n",
- " 154h 00m \n",
- " 2.96 \n",
+ " 17:15-17:45 \n",
+ " 15 \n",
" \n",
" \n",
- " #overtime \n",
- " 19h 00m \n",
- " 0.37 \n",
+ " 17:00-17:30 \n",
+ " 15 \n",
" \n",
- " \n",
- "
\n"
- ],
- "text/plain": [
- ""
- ]
- },
- "metadata": {},
- "output_type": "display_data"
- }
- ],
- "source": [
- "if setting_bag.show_tt_by_hashtag:\n",
- " tt_by_hashtag_df : DataFrame = tt_manager.get_tt_by_hashtag(sessions_df = sessions_df)\n",
- " displayer.display(df = tt_by_hashtag_df, formatters = { \"Effort%\" : \"{:.2f}\" })\n"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 14,
- "metadata": {},
- "outputs": [
- {
- "data": {
- "text/html": [
- "\n",
- "\n",
- " \n",
" \n",
- " Month \n",
- " 2015 \n",
- " ↕ \n",
- " 2016 \n",
- " ↕ \n",
- " 2017 \n",
- " ↕ \n",
- " 2018 \n",
- " ↕ \n",
- " 2019 \n",
- " ↕ \n",
- " 2020 \n",
- " ↕ \n",
- " 2021 \n",
- " ↕ \n",
- " 2022 \n",
- " ↕ \n",
- " 2023 \n",
- " ↕ \n",
- " 2024 \n",
+ " 17:00-20:00 \n",
+ " 11 \n",
" \n",
- " \n",
- " \n",
" \n",
- " 1 \n",
- " 00h 00m \n",
- " ↑ \n",
- " 18h 00m \n",
- " ↑ \n",
- " 88h 30m \n",
- " ↓ \n",
- " 80h 15m \n",
- " ↓ \n",
- " 60h 00m \n",
- " ↓ \n",
- " 29h 15m \n",
- " ↑ \n",
- " 53h 00m \n",
- " ↓ \n",
- " 00h 00m \n",
- " ↑ \n",
- " 06h 00m \n",
- " ↑ \n",
- " 45h 45m \n",
- " \n",
- " \n",
- " 2 \n",
- " 00h 00m \n",
- " ↑ \n",
- " 45h 30m \n",
- " ↑ \n",
- " 65h 30m \n",
- " ↑ \n",
- " 90h 45m \n",
- " ↓ \n",
- " 73h 00m \n",
- " ↓ \n",
- " 38h 00m \n",
- " ↓ \n",
- " 31h 30m \n",
- " ↓ \n",
- " 03h 00m \n",
- " ↑ \n",
- " 24h 00m \n",
- " ↑ \n",
- " 77h 45m \n",
- " \n",
- " \n",
- " 3 \n",
- " 00h 00m \n",
- " ↑ \n",
- " 20h 45m \n",
- " ↑ \n",
- " 71h 45m \n",
- " ↑ \n",
- " 89h 00m \n",
- " ↓ \n",
- " 75h 30m \n",
- " ↓ \n",
- " 35h 00m \n",
- " ↑ \n",
- " 40h 30m \n",
- " ↓ \n",
- " 06h 15m \n",
- " ↑ \n",
- " 50h 15m \n",
- " ↑ \n",
- " 77h 45m \n",
- " \n",
- " \n",
- " 4 \n",
- " 00h 00m \n",
- " ↑ \n",
- " 37h 30m \n",
- " ↑ \n",
- " 68h 00m \n",
- " ↑ \n",
- " 88h 30m \n",
- " ↓ \n",
- " 59h 45m \n",
- " ↓ \n",
- " 40h 45m \n",
- " ↓ \n",
- " 19h 00m \n",
- " ↑ \n",
- " 27h 30m \n",
- " ↓ \n",
- " 19h 00m \n",
- " ↑ \n",
- " 29h 30m \n",
- " \n",
- " \n",
- " 5 \n",
- " 00h 00m \n",
- " ↑ \n",
- " 53h 00m \n",
- " ↑ \n",
- " 83h 00m \n",
- " ↑ \n",
- " 91h 15m \n",
- " ↓ \n",
- " 54h 45m \n",
- " ↓ \n",
- " 14h 30m \n",
- " ↑ \n",
- " 112h 45m \n",
- " ↓ \n",
- " 49h 45m \n",
- " ↓ \n",
- " 31h 00m \n",
- " ↑ \n",
- " 43h 00m \n",
- " \n",
- " \n",
- " 6 \n",
- " 00h 00m \n",
- " ↑ \n",
- " 57h 45m \n",
- " ↓ \n",
- " 37h 45m \n",
- " ↑ \n",
- " 62h 00m \n",
- " ↓ \n",
- " 29h 15m \n",
- " ↓ \n",
- " 12h 00m \n",
- " ↑ \n",
- " 54h 00m \n",
- " ↑ \n",
- " 73h 30m \n",
- " ↓ \n",
- " 24h 45m \n",
- " ↑ \n",
- " 48h 00m \n",
- " \n",
- " \n",
- " 7 \n",
- " 00h 00m \n",
- " ↑ \n",
- " 46h 45m \n",
- " ↑ \n",
- " 65h 30m \n",
- " ↑ \n",
- " 69h 30m \n",
- " ↓ \n",
- " 24h 15m \n",
- " ↑ \n",
- " 34h 00m \n",
- " ↓ \n",
- " 23h 30m \n",
- " ↑ \n",
- " 51h 00m \n",
- " ↓ \n",
- " 16h 30m \n",
- " ↑ \n",
- " 67h 00m \n",
- " \n",
- " \n",
- " 8 \n",
- " 00h 00m \n",
- " ↑ \n",
- " 25h 45m \n",
- " ↑ \n",
- " 45h 45m \n",
- " ↑ \n",
- " 72h 00m \n",
- " ↓ \n",
- " 06h 00m \n",
- " ↑ \n",
- " 32h 00m \n",
- " ↑ \n",
- " 110h 00m \n",
- " ↓ \n",
- " 36h 30m \n",
- " ↑ \n",
- " 41h 30m \n",
- " ↓ \n",
- " 32h 45m \n",
- " \n",
- " \n",
- " 9 \n",
- " 00h 00m \n",
- " ↑ \n",
- " 89h 30m \n",
- " ↓ \n",
- " 43h 45m \n",
- " ↑ \n",
- " 64h 00m \n",
- " ↓ \n",
- " 39h 00m \n",
- " ↑ \n",
- " 44h 00m \n",
- " ↓ \n",
- " 43h 30m \n",
- " ↑ \n",
- " 69h 00m \n",
- " ↓ \n",
- " 50h 15m \n",
- " ↓ \n",
- " 48h 00m \n",
- " \n",
- " \n",
- " 10 \n",
- " 08h 00m \n",
- " ↑ \n",
- " 82h 15m \n",
- " ↓ \n",
- " 64h 30m \n",
- " ↓ \n",
- " 46h 45m \n",
- " ↓ \n",
- " 45h 30m \n",
- " ↑ \n",
- " 48h 00m \n",
- " ↓ \n",
- " 35h 30m \n",
- " ↑ \n",
- " 38h 30m \n",
- " ↓ \n",
- " 20h 00m \n",
- " ↑ \n",
- " 101h 30m \n",
- " \n",
- " \n",
- " 11 \n",
- " 10h 00m \n",
- " ↑ \n",
- " 74h 30m \n",
- " ↓ \n",
- " 50h 00m \n",
- " ↓ \n",
- " 30h 00m \n",
- " ↑ \n",
- " 38h 45m \n",
- " ↓ \n",
- " 35h 30m \n",
- " ↓ \n",
- " 13h 15m \n",
- " ↑ \n",
- " 58h 15m \n",
- " ↓ \n",
- " 14h 30m \n",
- " ↑ \n",
- " 88h 00m \n",
- " \n",
- " \n",
- " 12 \n",
- " 00h 00m \n",
- " ↑ \n",
- " 64h 00m \n",
- " ↑ \n",
- " 78h 45m \n",
- " ↓ \n",
- " 45h 45m \n",
- " ↓ \n",
- " 09h 30m \n",
- " ↑ \n",
- " 107h 30m \n",
- " ↓ \n",
- " 01h 00m \n",
- " ↑ \n",
- " 54h 15m \n",
- " ↓ \n",
- " 22h 30m \n",
- " ↓ \n",
- " 07h 45m \n",
+ " 17:00-17:45 \n",
+ " 11 \n",
" \n",
- " \n",
- "
\n"
- ],
- "text/plain": [
- ""
- ]
- },
- "metadata": {},
- "output_type": "display_data"
- }
- ],
- "source": [
- "if setting_bag.show_tts_by_month_df:\n",
- " \n",
- " tts_by_month_df : DataFrame = tt_manager.get_tts_by_month(sessions_df = sessions_df, years = setting_bag.years)\n",
- " tts_by_month_upd_df : DataFrame = tt_manager.update_future_months_to_empty(tts_by_month_df = tts_by_month_df, now = setting_bag.now)\n",
- "\n",
- " if setting_bag.tts_by_month_update_future_values_to_empty:\n",
- " displayer.display(df = tts_by_month_upd_df)\n",
- " else:\n",
- " displayer.display(df = tts_by_month_df)\n"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 15,
- "metadata": {},
- "outputs": [
- {
- "data": {
- "text/html": [
- "\n",
- "\n",
- " \n",
" \n",
- " StartTime \n",
- " EndTime \n",
- " Effort \n",
- " ES_IsCorrect \n",
- " ES_Expected \n",
- " ES_Message \n",
+ " 18:30-20:00 \n",
+ " 10 \n",
" \n",
- " \n",
- " \n",
" \n",
"
\n"
],
"text/plain": [
- ""
+ ""
]
},
"metadata": {},
@@ -1454,20 +1716,12 @@
}
],
"source": [
- "if (setting_bag.show_effort_status_df):\n",
- " \n",
- " es_df : DataFrame = tt_manager.add_effort_status(sessions_df = sessions_df)\n",
- " es_df = tt_manager.filter_by_is_correct(es_df = es_df, is_correct = setting_bag.effort_status_is_correct)\n",
- " \n",
- " if es_df.empty:\n",
- " displayer.display(df = es_df)\n",
- " else:\n",
- " displayer.display(df = es_df.head(n = setting_bag.effort_status_n))\n"
+ "tt_processor.process_tts_by_tr()"
]
},
{
"cell_type": "code",
- "execution_count": 16,
+ "execution_count": 495,
"metadata": {},
"outputs": [
{
@@ -1475,39 +1729,43 @@
"text/html": [
"\n",
- "\n",
+ "\n",
" \n",
" \n",
- " TimeRangeId \n",
- " Occurrences \n",
+ " Term \n",
+ " Definition \n",
" \n",
" \n",
" \n",
" \n",
- " 08:00-08:45 \n",
- " 43 \n",
+ " DME \n",
+ " Development Monthly Effort \n",
+ " \n",
+ " \n",
+ " TME \n",
+ " Total Monthly Effort \n",
" \n",
" \n",
- " 08:00-08:30 \n",
- " 25 \n",
+ " DYE \n",
+ " Development Yearly Effort \n",
" \n",
" \n",
- " 18:00-20:00 \n",
- " 21 \n",
+ " TYE \n",
+ " Total Yearly Effort \n",
" \n",
" \n",
- " 17:30-18:00 \n",
- " 18 \n",
+ " DE \n",
+ " Development Effort \n",
" \n",
" \n",
- " 19:00-20:00 \n",
- " 17 \n",
+ " TE \n",
+ " Total Effort \n",
" \n",
" \n",
"
\n"
],
"text/plain": [
- ""
+ ""
]
},
"metadata": {},
@@ -1515,42 +1773,7 @@
}
],
"source": [
- "if setting_bag.show_time_ranges_df:\n",
- "\n",
- " time_ranges_df : DataFrame = tt_manager.create_time_ranges_df(\n",
- " sessions_df = sessions_df, \n",
- " unknown_id = setting_bag.time_ranges_unknown_id)\n",
- " \n",
- " if setting_bag.show_time_ranges_df:\n",
- " time_ranges_df = tt_manager.remove_unknown_id(\n",
- " time_ranges_df = time_ranges_df, \n",
- " unknown_id = setting_bag.time_ranges_unknown_id)\n",
- " \n",
- " if setting_bag.time_ranges_filter_by_top_n:\n",
- " time_ranges_df = tt_manager.filter_by_top_n_occurrences(\n",
- " time_ranges_df = time_ranges_df, \n",
- " n = setting_bag.time_ranges_top_n)\n",
- "\n",
- " displayer.display(df = time_ranges_df)\n"
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {},
- "source": [
- "#### Main : Markdown"
- ]
- },
- {
- "cell_type": "code",
- "execution_count": 17,
- "metadata": {},
- "outputs": [],
- "source": [
- "component_bag : ComponentBag = ComponentBag()\n",
- "markdown_processor : MarkdownProcessor = MarkdownProcessor(component_bag = component_bag, setting_bag = setting_bag)\n",
- "\n",
- "markdown_processor.process_tts_by_month_md(tts_by_month_upd_df = tts_by_month_upd_df)"
+ "tt_processor.process_definitions()"
]
}
],
diff --git a/src/nwtimetracking.py b/src/nwtimetracking.py
index 624f550..82f8d0b 100644
--- a/src/nwtimetracking.py
+++ b/src/nwtimetracking.py
@@ -9,18 +9,70 @@
import os
import pandas as pd
import re
-import openpyxl
-from dataclasses import dataclass
-from datetime import datetime
-from datetime import timedelta
-from pandas import DataFrame
-from pandas import Series
-from typing import Any, Callable, Optional, cast
+from dataclasses import dataclass, field
+from datetime import datetime, timedelta
+from enum import StrEnum
+from numpy import uint
+from pandas import DataFrame, Series, NamedAgg
+from typing import Any, Callable, Literal, Optional, Tuple, cast
# LOCAL MODULES
-from nwshared import Formatter, FilePathManager, FileManager, LambdaProvider, MarkdownHelper
+from nwshared import Formatter, FilePathManager, FileManager, LambdaProvider, MarkdownHelper, Displayer
# CONSTANTS
+class TTCN(StrEnum):
+
+ '''Collects all the column names used by TTDataFrameFactory.'''
+
+ DATE = "Date"
+ STARTTIME = "StartTime"
+ ENDTIME = "EndTime"
+ EFFORT = "Effort"
+ HASHTAG = "Hashtag"
+ DESCRIPTOR = "Descriptor"
+ ISSOFTWAREPROJECT = "IsSoftwareProject"
+ ISRELEASEDAY = "IsReleaseDay"
+ YEAR = "Year"
+ MONTH = "Month"
+ PROJECTNAME = "ProjectName"
+ PROJECTVERSION = "ProjectVersion"
+ DME = "DME"
+ TME = "TME"
+ DYE = "DYE"
+ TYE = "TYE"
+ TREND = "↕"
+ EFFORTPRC = "Effort%"
+ YEARLYTARGET = "YearlyTarget"
+ TARGETDIFF = "TargetDiff"
+ ISTARGETMET = "IsTargetMet"
+ YEARLYTOTAL = "YearlyTotal"
+ TOTARGET = "ToTarget"
+ PERCDME = "%_DME"
+ PERCTME = "%_TME"
+ PERCDYE = "%_DYE"
+ PERCTYE = "%_TYE"
+ DE = "DE"
+ TE = "TE"
+ PERCDE = "%_DE"
+ PERCTE = "%_TE"
+ EFFORTSTATUS = "EffortStatus"
+ ESISCORRECT = "ES_IsCorrect"
+ ESEXPECTED = "ES_Expected"
+ ESMESSAGE = "ES_Message"
+ TIMERANGEID = "TimeRangeId"
+ OCCURRENCES = "Occurrences"
+class TTID(StrEnum):
+
+ '''Collects all the ids that identify the dataframes created by TTDataFrameFactory.'''
+
+ TTSBYMONTH = "tts_by_month"
+class DEFINITIONSCN(StrEnum):
+
+ '''Collects all the column names used by definitions.'''
+
+ TERM = "Term"
+ DEFINITION = "Definition"
+
# STATIC CLASSES
class _MessageCollection():
@@ -36,8 +88,7 @@ def effort_status_mismatching_effort(idx : int, start_time_str : str, end_time_s
message : str = "The provided row contains a mismatching effort "
message += f"(idx: '{idx}', start_time: '{start_time_str}', end_time: '{end_time_str}', actual_effort: '{actual_str}', expected_effort: '{expected_str}')."
- return message
-
+ return message
@staticmethod
def effort_status_not_possible_to_create(idx : int, start_time_str : str, end_time_str : str, effort_str : str):
@@ -49,11 +100,26 @@ def effort_status_not_possible_to_create(idx : int, start_time_str : str, end_ti
message : str = "It has not been possible to create an EffortStatus for the provided parameters "
message += f"(idx: '{idx}', start_time_str: '{start_time_str}', end_time_str: '{end_time_str}', effort_str: '{effort_str}')."
- return message
-
+ return message
@staticmethod
def effort_status_not_among_expected_time_values(time : str) -> str:
- return f"The provided time ('{time}') is not among the expected time values."
+ return f"The provided time ('{time}') is not among the expected time values."
+ @staticmethod
+ def starttime_endtime_are_empty() -> str:
+ return "''start_time' and/or 'end_time' are empty, 'effort' can't be verified. We assume that it's correct."
+ @staticmethod
+ def effort_is_correct() -> str:
+ return "The effort is correct."
+
+ @staticmethod
+ def no_mdinfo_found(id : TTID) -> str:
+ return f"No MDInfo object found for id='{id}'."
+ @staticmethod
+ def please_run_initialize_first() -> str:
+ return "Please run the 'initialize' method first."
+ @staticmethod
+ def this_content_successfully_saved_as(id : TTID, file_path : str) -> str:
+ return f"This content (id: '{id}') has been successfully saved as '{file_path}'."
# DTOs
@dataclass(frozen=True)
@@ -83,161 +149,38 @@ class EffortStatus():
is_correct : bool
message : str
+@dataclass(frozen = True)
+class MDInfo():
+
+ '''Represents a collection of information related to a Markdown file.'''
+
+ id : TTID
+ file_name : str
+ paragraph_title : str
+@dataclass(frozen = True)
+class TTSummary():
+
+ '''Collects all the dataframes and markdowns.'''
+
+ # Dataframes
+ tt_df : DataFrame
+ tts_by_month_tpl : Tuple[DataFrame, DataFrame]
+ tts_by_year_df : DataFrame
+ tts_by_year_month_tpl : Tuple[DataFrame, DataFrame]
+ tts_by_year_month_spnv_tpl : Tuple[DataFrame, DataFrame]
+ tts_by_year_spnv_tpl : Tuple[DataFrame, DataFrame]
+ tts_by_spn_df : DataFrame
+ tts_by_spn_spv_df : DataFrame
+ tts_by_hashtag_df : DataFrame
+ tts_by_hashtag_year_df : DataFrame
+ tts_by_efs_tpl : Tuple[DataFrame, DataFrame]
+ tts_by_tr_df : DataFrame
+ definitions_df : DataFrame
+
+ # Markdowns
+ tts_by_month_md : str
# CLASSES
-class SettingBag():
-
- '''Represents a collection of settings.'''
-
- years : list[int]
- yearly_targets : list[YearlyTarget]
- excel_path : str
- excel_books_nrows : int
- software_project_names : list[str]
- software_project_names_by_spv : list[str]
- tt_by_year_hashtag_years : list[int]
-
- show_sessions_df : bool
- show_tt_by_year_df : bool
- show_tt_by_year_month_df : bool
- show_tt_by_year_month_spnv_df : bool
- show_tt_by_year_spnv_df : bool
- show_tt_by_spn_df : bool
- show_tt_by_spn_spv_df : bool
- show_tt_by_year_hashtag : bool
- show_tt_by_hashtag : bool
- show_tts_by_month_df : bool
- show_effort_status_df : bool
- show_time_ranges_df : bool
- excel_books_skiprows : int
- excel_books_tabname : str
- n_generic : int
- n_by_month : int
- now : datetime
- remove_untagged_from_de : bool
- definitions : dict[str, str]
- effort_status_n : int
- effort_status_is_correct : bool
- tts_by_month_update_future_values_to_empty : bool
- time_ranges_unknown_id : str
- time_ranges_top_n : int
- time_ranges_remove_unknown_id : bool
- time_ranges_filter_by_top_n : bool
- working_folder_path : str
- last_update : datetime
- tts_by_month_file_name : str
- show_tts_by_month_md : bool
- save_tts_by_month_md : bool
-
- def __init__(
- self,
- years : list[int],
- yearly_targets : list[YearlyTarget],
- excel_path : str,
- excel_books_nrows : int,
- software_project_names : list[str],
- software_project_names_by_spv : list[str],
- tt_by_year_hashtag_years : list[int],
-
- show_sessions_df : bool = False,
- show_tt_by_year_df : bool = True,
- show_tt_by_year_month_df : bool = True,
- show_tt_by_year_month_spnv_df : bool = False,
- show_tt_by_year_spnv_df : bool = False,
- show_tt_by_spn_df : bool = True,
- show_tt_by_spn_spv_df : bool = True,
- show_tt_by_year_hashtag : bool = True,
- show_tt_by_hashtag : bool = True,
- show_tts_by_month_df : bool = True,
- show_effort_status_df : bool = True,
- show_time_ranges_df : bool = True,
- excel_books_skiprows : int = 0,
- excel_books_tabname : str = "Sessions",
- n_generic : int = 5,
- n_by_month : int = 12,
- now : datetime = datetime.now(),
- remove_untagged_from_de : bool = True,
- definitions : dict[str, str] = {
- "DME": "Development Monthly Effort",
- "TME": "Total Monthly Effort",
- "DYE": "Development Yearly Effort",
- "TYE": "Total Yearly Effort",
- "DE": "Development Effort",
- "TE": "Total Effort"
- },
- effort_status_n : int = 25,
- effort_status_is_correct : bool = False,
- tts_by_month_update_future_values_to_empty : bool = True,
- time_ranges_unknown_id : str = "Unknown",
- time_ranges_top_n : int = 5,
- time_ranges_remove_unknown_id : bool = True,
- time_ranges_filter_by_top_n : bool = True,
- working_folder_path : str = "/home/nwtimetracking/",
- last_update : datetime = datetime.now(),
- tts_by_month_file_name : str = "TIMETRACKINGBYMONTH.md",
- show_tts_by_month_md : bool = False,
- save_tts_by_month_md : bool = False
- ) -> None:
-
- self.years = years
- self.yearly_targets = yearly_targets
- self.excel_path = excel_path
- self.excel_books_nrows = excel_books_nrows
- self.software_project_names = software_project_names
- self.software_project_names_by_spv = software_project_names_by_spv
- self.tt_by_year_hashtag_years = tt_by_year_hashtag_years
-
- self.show_sessions_df = show_sessions_df
- self.show_tt_by_year_df = show_tt_by_year_df
- self.show_tt_by_year_month_df = show_tt_by_year_month_df
- self.show_tt_by_year_month_spnv_df = show_tt_by_year_month_spnv_df
- self.show_tt_by_year_spnv_df = show_tt_by_year_spnv_df
- self.show_tt_by_spn_df = show_tt_by_spn_df
- self.show_tt_by_spn_spv_df = show_tt_by_spn_spv_df
- self.show_tt_by_year_hashtag = show_tt_by_year_hashtag
- self.show_tt_by_hashtag = show_tt_by_hashtag
- self.show_tts_by_month_df = show_tts_by_month_df
- self.show_effort_status_df = show_effort_status_df
- self.show_time_ranges_df = show_time_ranges_df
- self.excel_books_skiprows = excel_books_skiprows
- self.excel_books_tabname = excel_books_tabname
- self.n_generic = n_generic
- self.n_by_month = n_by_month
- self.now = now
- self.remove_untagged_from_de = remove_untagged_from_de
- self.definitions = definitions
- self.effort_status_n = effort_status_n
- self.effort_status_is_correct = effort_status_is_correct
- self.tts_by_month_update_future_values_to_empty = tts_by_month_update_future_values_to_empty
- self.time_ranges_unknown_id = time_ranges_unknown_id
- self.time_ranges_top_n = time_ranges_top_n
- self.time_ranges_remove_unknown_id = time_ranges_remove_unknown_id
- self.time_ranges_filter_by_top_n = time_ranges_filter_by_top_n
- self.working_folder_path = working_folder_path
- self.last_update = last_update
- self.tts_by_month_file_name = tts_by_month_file_name
- self.show_tts_by_month_md = show_tts_by_month_md
- self.save_tts_by_month_md = save_tts_by_month_md
-class ComponentBag():
-
- '''Represents a collection of components.'''
-
- file_path_manager : FilePathManager
- file_manager : FileManager
- logging_function : Callable[[str], None]
- markdown_helper : MarkdownHelper
-
- def __init__(
- self,
- file_path_manager : FilePathManager = FilePathManager(),
- file_manager : FileManager = FileManager(file_path_manager = FilePathManager()),
- logging_function : Callable[[str], None] = LambdaProvider().get_default_logging_function(),
- markdown_helper : MarkdownHelper = MarkdownHelper(formatter = Formatter())) -> None:
-
- self.file_path_manager = file_path_manager
- self.file_manager = file_manager
- self.logging_function = logging_function
- self.markdown_helper = markdown_helper
class DefaultPathProvider():
'''Responsible for proviving the default path to the dataset.'''
@@ -281,6 +224,16 @@ def get_all_yearly_targets(self) -> list[YearlyTarget]:
]
return yearly_targets
+ def get_most_recent_x_years(self, x : uint) -> list[int]:
+
+ '''Returns a list of years.'''
+
+ years : list[int] = self.get_all_years()
+
+ if x <= len(years):
+ years = years[(len(years) - int(x)):]
+
+ return years
class SoftwareProjectNameProvider():
'''Collects all the logic related to the retrieval of software project names.'''
@@ -323,79 +276,99 @@ def get_all_software_project_names_by_spv(self) -> list[str]:
]
return software_project_names_by_spv
-class TimeTrackingManager():
+class MDInfoProvider():
- '''Collects all the logic related to the management of "Time Tracking.xlsx".'''
+ '''Collects all the logic related to the retrieval of MDInfo objects.'''
- def __enforce_dataframe_definition_for_sessions_df(self, sessions_df : DataFrame) -> DataFrame:
+ def get_all(self) -> list[MDInfo]:
- '''Enforces definition for the provided dataframe.'''
-
- column_names : list[str] = []
- column_names.append("Date") # [0], date
- column_names.append("StartTime") # [1], str
- column_names.append("EndTime") # [2], str
- column_names.append("Effort") # [3], str
- column_names.append("Hashtag") # [4], str
- column_names.append("Descriptor") # [5], str
- column_names.append("IsSoftwareProject") # [6], bool
- column_names.append("IsReleaseDay") # [7], bool
- column_names.append("Year") # [8], int
- column_names.append("Month") # [9], int
-
- sessions_df = sessions_df[column_names]
-
- sessions_df[column_names[0]] = pd.to_datetime(sessions_df[column_names[0]], format="%Y-%m-%d")
- sessions_df[column_names[0]] = sessions_df[column_names[0]].apply(lambda x: x.date())
-
- sessions_df = sessions_df.astype({column_names[1]: str})
- sessions_df = sessions_df.astype({column_names[2]: str})
- sessions_df = sessions_df.astype({column_names[3]: str})
- sessions_df = sessions_df.astype({column_names[4]: str})
- sessions_df = sessions_df.astype({column_names[5]: str})
- sessions_df = sessions_df.astype({column_names[6]: bool})
- sessions_df = sessions_df.astype({column_names[7]: bool})
- sessions_df = sessions_df.astype({column_names[8]: int})
- sessions_df = sessions_df.astype({column_names[9]: int})
-
- sessions_df[column_names[1]] = sessions_df[column_names[1]].replace('nan', '')
- sessions_df[column_names[2]] = sessions_df[column_names[2]].replace('nan', '')
- sessions_df[column_names[5]] = sessions_df[column_names[5]].replace('nan', '')
-
- return sessions_df
- def __enforce_dataframe_definition_for_raw_ttm_df(self, df : DataFrame) -> DataFrame:
+ '''Returns a list of MDInfo objects.'''
- '''Ensures that the columns of the provided dataframe have the expected data types.'''
+ md_infos : list[MDInfo] = [
+ MDInfo(id = TTID.TTSBYMONTH, file_name = "TIMETRACKINGBYMONTH.md", paragraph_title = "Time Tracking By Month")
+ ]
+
+ return md_infos
+@dataclass(frozen=True)
+class SettingBag():
- cn_month : str = "Month"
+ '''Represents a collection of settings.'''
- df = df.astype({cn_month: int})
- # can't enforce the year column as "timedelta"
+ # Without Defaults
+ options_tt : list[Literal["display"]]
+ options_tts_by_month : list[Literal["display", "save"]]
+ options_tts_by_year : list[Literal["display"]]
+ options_tts_by_year_month : list[Literal["display"]]
+ options_tts_by_year_month_spnv : list[Literal["display"]]
+ options_tts_by_year_spnv : list[Literal["display"]]
+ options_tts_by_spn : list[Literal["display", "log"]]
+ options_tts_by_spn_spv : list[Literal["display", "log"]]
+ options_tts_by_hashtag : list[Literal["display"]]
+ options_tts_by_hashtag_year : list[Literal["display"]]
+ options_tts_by_efs : list[Literal["display"]]
+ options_tts_by_tr : list[Literal["display"]]
+ options_definitions : list[Literal["display"]]
+ excel_nrows : int
+ tts_by_year_month_spnv_display_only_spn : Optional[str]
+ tts_by_year_spnv_display_only_spn : Optional[str]
+ tts_by_spn_spv_display_only_spn : Optional[str]
+
+ # With Defaults
+ working_folder_path : str = field(default = "/home/nwtimetracking/")
+ excel_path : str = field(default = DefaultPathProvider().get_default_time_tracking_path())
+ excel_skiprows : int = field(default = 0)
+ excel_tabname : str = field(default = "Sessions")
+ years : list[int] = field(default_factory = lambda : YearProvider().get_all_years())
+ yearly_targets : list[YearlyTarget] = field(default_factory = lambda : YearProvider().get_all_yearly_targets())
+ now : datetime = field(default = datetime.now())
+ software_project_names : list[str] = field(default_factory = lambda : SoftwareProjectNameProvider().get_all_software_project_names())
+ software_project_names_by_spv : list[str] = field(default_factory = lambda : SoftwareProjectNameProvider().get_all_software_project_names_by_spv())
+ tt_head_n : Optional[uint] = field(default = uint(5))
+ tt_display_head_n_with_tail : bool = field(default = True)
+ tt_hide_index : bool = field(default = True)
+ tts_by_year_month_display_only_years : Optional[list[int]] = field(default_factory = lambda : YearProvider().get_most_recent_x_years(x = uint(1)))
+ tts_by_year_month_spnv_formatters : dict = field(default_factory = lambda : { "%_DME" : "{:.2f}", "%_TME" : "{:.2f}" })
+ tts_by_year_spnv_formatters : dict = field(default_factory = lambda : { "%_DYE" : "{:.2f}", "%_TYE" : "{:.2f}" })
+ tts_by_spn_formatters : dict = field(default_factory = lambda : { "%_DE" : "{:.2f}", "%_TE" : "{:.2f}" })
+ tts_by_spn_remove_untagged : bool = field(default = True)
+ tts_by_hashtag_formatters : dict = field(default_factory = lambda : { "Effort%" : "{:.2f}" })
+ tts_by_efs_is_correct : bool = field(default = False)
+ tts_by_efs_n : uint = field(default = uint(25))
+ tts_by_tr_unknown_id : str = field(default = "Unknown")
+ tts_by_tr_remove_unknown_occurrences : bool = field(default = True)
+ tts_by_tr_filter_by_top_n : Optional[uint] = field(default = uint(5))
+ tts_by_tr_head_n : Optional[uint] = field(default = uint(10))
+ tts_by_tr_display_head_n_with_tail : bool = field(default = False)
+ md_infos : list[MDInfo] = field(default_factory = lambda : MDInfoProvider().get_all())
+ md_last_update : datetime = field(default = datetime.now())
+class TTDataFrameHelper():
+
+ '''Collects helper functions for TTDataFrameFactory.'''
+
+ def calculate_percentage(self, part : float, whole : float, rounding_digits : int = 2) -> float:
- return df
- def __convert_string_to_timedelta(self, td_str : str) -> timedelta:
+ '''Calculates a percentage.'''
- '''"5h 30m" => 5:30:00'''
+ prct : Optional[float] = None
- td : timedelta = pd.Timedelta(value = td_str).to_pytimedelta()
+ if part == 0:
+ prct = 0
+ elif whole == 0:
+ prct = 0
+ else:
+ prct = (100 * part) / whole
- return td
- def __get_yearly_target(self, yearly_targets : list[YearlyTarget], year : int) -> Optional[YearlyTarget]:
+ prct = round(number = prct, ndigits = rounding_digits)
- '''Retrieves the YearlyTarget object for the provided "year" or None.'''
+ return prct
+ def convert_string_to_timedelta(self, td_str : str) -> timedelta:
- for yearly_target in yearly_targets:
- if yearly_target.year == year:
- return yearly_target
-
- return None
- def __is_yearly_target_met(self, effort : timedelta, yearly_target : timedelta) -> bool:
+ '''"5h 30m" => 5:30:00'''
- if effort >= yearly_target:
- return True
+ td : timedelta = pd.Timedelta(value = td_str).to_pytimedelta()
- return False
- def __format_timedelta(self, td : timedelta, add_plus_sign : bool) -> str:
+ return td
+ def format_timedelta(self, td : timedelta, add_plus_sign : bool) -> str:
'''
4 days 19:15:00 => "115h 15m" (or +115h 15m)
@@ -414,7 +387,50 @@ def __format_timedelta(self, td : timedelta, add_plus_sign : bool) -> str:
formatted = f"+{formatted}"
return formatted
- def __extract_software_project_name(self, descriptor : str) -> str:
+ def get_trend_by_timedelta(self, td_1 : timedelta, td_2 : timedelta) -> str:
+
+ '''
+ 0h 30m, 1h 00m => "↑"
+ 1h 00m, 0h 30m => "↓"
+ 0, 0 => "="
+ '''
+ trend : Optional[str] = None
+
+ if td_1 < td_2:
+ trend = "↑"
+ elif td_1 > td_2:
+ trend = "↓"
+ else:
+ trend = "="
+
+ return trend
+ def try_consolidate_trend_column_name(self, column_name : str) -> str:
+
+ '''
+ "2016" => "2016"
+ "↕1" => "↕"
+ '''
+
+ if column_name.startswith(TTCN.TREND):
+ return TTCN.TREND
+
+ return column_name
+ def get_yearly_target(self, yearly_targets : list[YearlyTarget], year : int) -> Optional[YearlyTarget]:
+
+ '''Retrieves the YearlyTarget object for the provided "year" or None.'''
+
+ for yearly_target in yearly_targets:
+ if yearly_target.year == year:
+ return yearly_target
+
+ return None
+ def is_yearly_target_met(self, effort : timedelta, yearly_target : timedelta) -> bool:
+
+ if effort >= yearly_target:
+ return True
+
+ return False
+ def extract_software_project_name(self, descriptor : str) -> str:
'''
"NW.AutoProffLibrary v1.0.0" => "NW.AutoProffLibrary"
@@ -430,7 +446,7 @@ def __extract_software_project_name(self, descriptor : str) -> str:
return matches[0]
return "ERROR"
- def __extract_software_project_version(self, descriptor : str) -> str:
+ def extract_software_project_version(self, descriptor : str) -> str:
'''
"NW.AutoProffLibrary v1.0.0" => "1.0.0"
@@ -446,154 +462,321 @@ def __extract_software_project_version(self, descriptor : str) -> str:
return matches[0]
return "ERROR"
- def __calculate_percentage(self, part : float, whole : float, rounding_digits : int = 2) -> float:
+ def create_time_object(self, time : str) -> datetime:
- '''Calculates a percentage.'''
+ '''It creates a datetime object suitable for timedelta calculation out of the provided time.'''
- prct : Optional[float] = None
+ day_1_times : list[str] = [
+ "07:00", "07:15", "07:30", "07:45",
+ "08:00", "08:15", "08:30", "08:45",
+ "09:00", "09:15", "09:30", "09:45",
+ "10:00", "10:15", "10:30", "10:45",
+ "11:00", "11:15", "11:30", "11:45",
+ "12:00", "12:15", "12:30", "12:45",
+ "13:00", "13:15", "13:30", "13:45",
+ "14:00", "14:15", "14:30", "14:45",
+ "15:00", "15:15", "15:30", "15:45",
+ "16:00", "16:15", "16:30", "16:45",
+ "17:00", "17:15", "17:30", "17:45",
+ "18:00", "18:15", "18:30", "18:45",
+ "19:00", "19:15", "19:30", "19:45",
+ "20:00", "20:15", "20:30", "20:45",
+ "21:00", "21:15", "21:30", "21:45",
+ "22:00", "22:15", "22:30", "22:45",
+ "23:00", "23:15", "23:30", "23:45"
+ ]
+ day_2_times : list[str] = [
+ "00:00", "00:15", "00:30", "00:45",
+ "01:00", "01:15", "01:30", "01:45",
+ "02:00", "02:15", "02:30", "02:45",
+ "03:00", "03:15", "03:30", "03:45",
+ "04:00", "04:15", "04:30", "04:45",
+ "05:00", "05:15", "05:30", "05:45",
+ "06:00", "06:15", "06:30", "06:45"
+ ]
- if part == 0:
- prct = 0
- elif whole == 0:
- prct = 0
- else:
- prct = (100 * part) / whole
+ strp_format : str = "%Y-%m-%d %H:%M"
- prct = round(number = prct, ndigits = rounding_digits)
+ dt_str : Optional[str] = None
+ if time in day_1_times:
+ dt_str = f"1900-01-01 {time}"
+ elif time in day_2_times:
+ dt_str = f"1900-01-02 {time}"
+ else:
+ raise ValueError(_MessageCollection.effort_status_not_among_expected_time_values(time = time))
+
+ dt : datetime = datetime.strptime(dt_str, strp_format)
- return prct
- def __get_raw_tt_by_year_month_spnv(self, sessions_df : DataFrame, years : list[int], software_project_names : list[str]) -> DataFrame:
-
+ return dt
+ def create_time_range_id(self, start_time : str, end_time : str, unknown_id : str) -> str:
+
'''
- Year Month ProjectName ProjectVersion Effort
- 0 2023 4 nwtraderaanalytics 2.0.0 0 days 09:15:00
- 1 2023 5 NW.AutoProffLibrary 1.0.0 0 days 09:30:00
- ...
+ Creates a unique time range identifier out of the provided parameters.
+ If parameters are empty, it returns unknown_id.
'''
- tt_df : DataFrame = sessions_df.copy(deep = True)
+ time_range_id : str = f"{start_time}-{end_time}"
- cn_year : str = "Year"
- cn_is_software_project : str = "IsSoftwareProject"
- condition_one : Series = (sessions_df[cn_year].isin(values = years))
- condition_two : Series = (sessions_df[cn_is_software_project] == True)
- tt_df = tt_df.loc[condition_one & condition_two]
+ if len(start_time) == 0 or len(end_time) == 0:
+ time_range_id = unknown_id
- cn_descriptor : str = "Descriptor"
- cn_project_name : str = "ProjectName"
- cn_project_version : str = "ProjectVersion"
- tt_df[cn_project_name] = tt_df[cn_descriptor].apply(lambda x : self.__extract_software_project_name(descriptor = x))
- tt_df[cn_project_version] = tt_df[cn_descriptor].apply(lambda x : self.__extract_software_project_version(descriptor = x))
-
- cn_month : str = "Month"
- cn_effort : str = "Effort"
- tt_df[cn_effort] = tt_df[cn_effort].apply(lambda x : self.__convert_string_to_timedelta(td_str = x))
- tt_df = tt_df.groupby(by = [cn_year, cn_month, cn_project_name, cn_project_version])[cn_effort].sum().sort_values(ascending = [False]).reset_index(name = cn_effort)
- tt_df = tt_df.sort_values(by = [cn_year, cn_month, cn_project_name, cn_project_version]).reset_index(drop = True)
-
- condition_three : Series = (tt_df[cn_project_name].isin(values = software_project_names))
- tt_df = tt_df.loc[condition_three]
+ return time_range_id
+ def create_effort_status(self, idx : int, start_time_str : str, end_time_str : str, effort_str : str) -> EffortStatus:
- return tt_df
- def __get_raw_dme(self, sessions_df : DataFrame, years : list[int]) -> DataFrame:
-
'''
- Year Month DME
- 0 2023 4 0 days 09:15:00
- 1 2023 6 0 days 06:45:00
- ...
+ start_time_str, end_time_str:
+ - Expects time values in the "%H:%M" format - for ex. 20:00.
- DME = DevelopmentMonthlyEffort
+ is_correct:
+ start_time_str = "20:00", end_time_str = "00:00", effort_str = "4h 00m" => True
+ start_time_str = "20:00", end_time_str = "00:00", effort_str = "5h 00m" => False
'''
- tt_df : DataFrame = sessions_df.copy(deep = True)
-
- cn_year : str = "Year"
- cn_is_software_project : str = "IsSoftwareProject"
- condition_one : Series = (sessions_df[cn_year].isin(values = years))
- condition_two : Series = (sessions_df[cn_is_software_project] == True)
- tt_df = tt_df.loc[condition_one & condition_two]
+ try:
- cn_descriptor : str = "Descriptor"
- cn_project_name : str = "ProjectName"
- cn_project_version : str = "ProjectVersion"
- tt_df[cn_project_name] = tt_df[cn_descriptor].apply(lambda x : self.__extract_software_project_name(descriptor = x))
- tt_df[cn_project_version] = tt_df[cn_descriptor].apply(lambda x : self.__extract_software_project_version(descriptor = x))
+ if len(start_time_str) == 0 or len(end_time_str) == 0:
+ return self.create_effort_status_for_none_values(idx = idx, effort_str = effort_str)
- cn_month : str = "Month"
- cn_effort : str = "Effort"
- tt_df[cn_effort] = tt_df[cn_effort].apply(lambda x : self.__convert_string_to_timedelta(td_str = x))
- tt_df = tt_df.groupby(by = [cn_year, cn_month])[cn_effort].sum().sort_values(ascending = [False]).reset_index(name = cn_effort)
- tt_df = tt_df.sort_values(by = [cn_year, cn_month]).reset_index(drop = True)
-
- cn_dme : str = "DME"
- tt_df.rename(columns = {cn_effort : cn_dme}, inplace = True)
+ start_time_dt : datetime = self.create_time_object(time = start_time_str)
+ end_time_dt : datetime = self.create_time_object(time = end_time_str)
- return tt_df
- def __get_raw_tme(self, sessions_df : DataFrame, years : list[int]) -> DataFrame:
-
- '''
- Year Month TME
- 0 2023 4 0 days 09:15:00
- 1 2023 6 0 days 06:45:00
- ...
+ actual_str : str = effort_str
+ actual_td : timedelta = self.convert_string_to_timedelta(td_str = effort_str)
- TME = TotalMonthlyEffort
- '''
+ expected_td : timedelta = (end_time_dt - start_time_dt)
+ expected_str : str = self.format_timedelta(td = expected_td, add_plus_sign = False)
+
+ is_correct : bool = True
+ if actual_td != expected_td:
+ is_correct = False
+
+ message : str = _MessageCollection.effort_is_correct()
- tt_df : DataFrame = sessions_df.copy(deep = True)
+ if actual_td != expected_td:
+ message = _MessageCollection.effort_status_mismatching_effort(
+ idx = idx,
+ start_time_str = start_time_str,
+ end_time_str = end_time_str,
+ actual_str = actual_str,
+ expected_str = expected_str
+ )
+
+ effort_status : EffortStatus = EffortStatus(
+ idx = idx,
+ start_time_str = start_time_str,
+ start_time_dt = start_time_dt,
+ end_time_str = end_time_str,
+ end_time_dt = end_time_dt,
+ actual_str = actual_str,
+ actual_td = actual_td,
+ expected_td = expected_td,
+ expected_str = expected_str,
+ is_correct = is_correct,
+ message = message
+ )
- cn_year : str = "Year"
- condition : Series = (sessions_df[cn_year].isin(values = years))
- tt_df = tt_df.loc[condition]
+ return effort_status
+
+ except:
- cn_month : str = "Month"
- cn_effort : str = "Effort"
- tt_df[cn_effort] = tt_df[cn_effort].apply(lambda x : self.__convert_string_to_timedelta(td_str = x))
- tt_df = tt_df.groupby(by = [cn_year, cn_month])[cn_effort].sum().sort_values(ascending = [False]).reset_index(name = cn_effort)
- tt_df = tt_df.sort_values(by = [cn_year, cn_month]).reset_index(drop = True)
-
- cn_tme : str = "TME"
- tt_df.rename(columns = {cn_effort : cn_tme}, inplace = True)
+ error : str = _MessageCollection.effort_status_not_possible_to_create(
+ idx = idx, start_time_str = start_time_str, end_time_str = end_time_str, effort_str = effort_str)
- return tt_df
- def __get_raw_tt_by_year_spnv(self, sessions_df : DataFrame, years : list[int], software_project_names : list[str]) -> DataFrame:
-
- '''
- Year ProjectName ProjectVersion Effort
- 0 2023 nwtraderaanalytics 2.0.0 0 days 09:15:00
- 1 2023 NW.AutoProffLibrary 1.0.0 0 days 09:30:00
- ...
- '''
+ raise ValueError(error)
+ def create_effort_status_for_none_values(self, idx : int, effort_str : str) -> EffortStatus:
- tt_df : DataFrame = sessions_df.copy(deep = True)
+ '''Creates effort status for None values.'''
- cn_year : str = "Year"
- cn_is_software_project : str = "IsSoftwareProject"
- condition_one : Series = (sessions_df[cn_year].isin(values = years))
- condition_two : Series = (sessions_df[cn_is_software_project] == True)
- tt_df = tt_df.loc[condition_one & condition_two]
+ actual_str : str = effort_str
+ actual_td : timedelta = self.convert_string_to_timedelta(td_str = effort_str)
+ is_correct : bool = True
- cn_descriptor : str = "Descriptor"
- cn_project_name : str = "ProjectName"
- cn_project_version : str = "ProjectVersion"
- tt_df[cn_project_name] = tt_df[cn_descriptor].apply(lambda x : self.__extract_software_project_name(descriptor = x))
- tt_df[cn_project_version] = tt_df[cn_descriptor].apply(lambda x : self.__extract_software_project_version(descriptor = x))
+ effort_status : EffortStatus = EffortStatus(
+ idx = idx,
+ start_time_str = None,
+ start_time_dt = None,
+ end_time_str = None,
+ end_time_dt = None,
+ actual_str = actual_str,
+ actual_td = actual_td,
+ expected_td = None,
+ expected_str = None,
+ is_correct = is_correct,
+ message = _MessageCollection.starttime_endtime_are_empty()
+ )
- cn_effort : str = "Effort"
- tt_df[cn_effort] = tt_df[cn_effort].apply(lambda x : self.__convert_string_to_timedelta(td_str = x))
- tt_df = tt_df.groupby(by = [cn_year, cn_project_name, cn_project_version])[cn_effort].sum().sort_values(ascending = [False]).reset_index(name = cn_effort)
- tt_df = tt_df.sort_values(by = [cn_year, cn_project_name, cn_project_version]).reset_index(drop = True)
-
- condition_three : Series = (tt_df[cn_project_name].isin(values = software_project_names))
- tt_df = tt_df.loc[condition_three]
- tt_df = tt_df.sort_values(by = [cn_year, cn_project_name, cn_project_version]).reset_index(drop = True)
+ return effort_status
+ def create_effort_status_and_cast_to_any(self, idx : int, start_time_str : str, end_time_str : str, effort_str : str) -> Any:
- return tt_df
- def __get_raw_dye(self, sessions_df : DataFrame, years : list[int]) -> DataFrame:
-
'''
- Year DYE
+ Wrapper method created to overcome the following error raised by df.apply():
+
+ Argument of type "(x: Unknown) -> EffortStatus" cannot be assigned to parameter "f" of type "(...) -> Series[Any]" in function "apply"
+ Type "(x: Unknown) -> EffortStatus" is not assignable to type "(...) -> Series[Any]"
+ Function return type "EffortStatus" is incompatible with type "Series[Any]"
+ "EffortStatus" is not assignable to "Series[Any]"
+ '''
+
+ return cast(Any, self.create_effort_status(idx = idx, start_time_str = start_time_str, end_time_str = end_time_str, effort_str = effort_str))
+class TTDataFrameFactory():
+
+ '''Collects all the logic related to dataframe creation out of "Time Tracking.xlsx".'''
+
+ __df_helper : TTDataFrameHelper
+
+ def __init__(self, df_helper : TTDataFrameHelper) -> None:
+
+ self.__df_helper = df_helper
+
+ def __enforce_dataframe_definition_for_tt_df(self, tt_df : DataFrame) -> DataFrame:
+
+ '''Enforces definition for the provided dataframe.'''
+
+ column_names : list[str] = []
+ column_names.append(TTCN.DATE) # [0], date
+ column_names.append(TTCN.STARTTIME) # [1], str
+ column_names.append(TTCN.ENDTIME) # [2], str
+ column_names.append(TTCN.EFFORT) # [3], str
+ column_names.append(TTCN.HASHTAG) # [4], str
+ column_names.append(TTCN.DESCRIPTOR) # [5], str
+ column_names.append(TTCN.ISSOFTWAREPROJECT) # [6], bool
+ column_names.append(TTCN.ISRELEASEDAY) # [7], bool
+ column_names.append(TTCN.YEAR) # [8], int
+ column_names.append(TTCN.MONTH) # [9], int
+
+ tt_df = tt_df[column_names]
+
+ tt_df[column_names[0]] = pd.to_datetime(tt_df[column_names[0]], format="%Y-%m-%d")
+ tt_df[column_names[0]] = tt_df[column_names[0]].apply(lambda x: x.date())
+
+ tt_df = tt_df.astype({column_names[1]: str})
+ tt_df = tt_df.astype({column_names[2]: str})
+ tt_df = tt_df.astype({column_names[3]: str})
+ tt_df = tt_df.astype({column_names[4]: str})
+ tt_df = tt_df.astype({column_names[5]: str})
+ tt_df = tt_df.astype({column_names[6]: bool})
+ tt_df = tt_df.astype({column_names[7]: bool})
+ tt_df = tt_df.astype({column_names[8]: int})
+ tt_df = tt_df.astype({column_names[9]: int})
+
+ tt_df[column_names[1]] = tt_df[column_names[1]].replace('nan', '')
+ tt_df[column_names[2]] = tt_df[column_names[2]].replace('nan', '')
+ tt_df[column_names[5]] = tt_df[column_names[5]].replace('nan', '')
+
+ return tt_df
+ def __enforce_dataframe_definition_for_raw_ttm_df(self, df : DataFrame) -> DataFrame:
+
+ '''Ensures that the columns of the provided dataframe have the expected data types.'''
+
+ df = df.astype({TTCN.MONTH: int})
+ # can't enforce the year column as "timedelta"
+
+ return df
+ def __create_raw_tts_by_year_month_spnv(self, tt_df : DataFrame, years : list[int], software_project_names : list[str]) -> DataFrame:
+
+ '''
+ Year Month ProjectName ProjectVersion Effort
+ 0 2023 4 nwtraderaanalytics 2.0.0 0 days 09:15:00
+ 1 2023 5 NW.AutoProffLibrary 1.0.0 0 days 09:30:00
+ ...
+ '''
+
+ tts_df : DataFrame = tt_df.copy(deep = True)
+
+ condition_one : Series = (tt_df[TTCN.YEAR].isin(values = years))
+ condition_two : Series = (tt_df[TTCN.ISSOFTWAREPROJECT] == True)
+ tts_df = tts_df.loc[condition_one & condition_two]
+
+ tts_df[TTCN.PROJECTNAME] = tts_df[TTCN.DESCRIPTOR].apply(lambda x : self.__df_helper.extract_software_project_name(descriptor = x))
+ tts_df[TTCN.PROJECTVERSION] = tts_df[TTCN.DESCRIPTOR].apply(lambda x : self.__df_helper.extract_software_project_version(descriptor = x))
+
+ tts_df[TTCN.EFFORT] = tts_df[TTCN.EFFORT].apply(lambda x : self.__df_helper.convert_string_to_timedelta(td_str = x))
+ tts_df = tts_df.groupby(by = [TTCN.YEAR, TTCN.MONTH, TTCN.PROJECTNAME, TTCN.PROJECTVERSION])[TTCN.EFFORT].sum().sort_values(ascending = [False]).reset_index(name = TTCN.EFFORT)
+ tts_df = tts_df.sort_values(by = [TTCN.YEAR, TTCN.MONTH, TTCN.PROJECTNAME, TTCN.PROJECTVERSION]).reset_index(drop = True)
+
+ condition_three : Series = (tts_df[TTCN.PROJECTNAME].isin(values = software_project_names))
+ tts_df = tts_df.loc[condition_three]
+
+ return tts_df
+ def __create_raw_tts_by_dme(self, tt_df : DataFrame, years : list[int]) -> DataFrame:
+
+ '''
+ Year Month DME
+ 0 2023 4 0 days 09:15:00
+ 1 2023 6 0 days 06:45:00
+ ...
+
+ DME = DevelopmentMonthlyEffort
+ '''
+
+ tts_df : DataFrame = tt_df.copy(deep = True)
+
+ condition_one : Series = (tt_df[TTCN.YEAR].isin(values = years))
+ condition_two : Series = (tt_df[TTCN.ISSOFTWAREPROJECT] == True)
+ tts_df = tts_df.loc[condition_one & condition_two]
+
+ tts_df[TTCN.PROJECTNAME] = tts_df[TTCN.DESCRIPTOR].apply(lambda x : self.__df_helper.extract_software_project_name(descriptor = x))
+ tts_df[TTCN.PROJECTVERSION] = tts_df[TTCN.DESCRIPTOR].apply(lambda x : self.__df_helper.extract_software_project_version(descriptor = x))
+
+ tts_df[TTCN.EFFORT] = tts_df[TTCN.EFFORT].apply(lambda x : self.__df_helper.convert_string_to_timedelta(td_str = x))
+ tts_df = tts_df.groupby(by = [TTCN.YEAR, TTCN.MONTH])[TTCN.EFFORT].sum().sort_values(ascending = [False]).reset_index(name = TTCN.EFFORT)
+ tts_df = tts_df.sort_values(by = [TTCN.YEAR, TTCN.MONTH]).reset_index(drop = True)
+ tts_df.rename(columns = {TTCN.EFFORT : TTCN.DME}, inplace = True)
+
+ return tts_df
+ def __create_raw_tts_by_tme(self, tt_df : DataFrame, years : list[int]) -> DataFrame:
+
+ '''
+ Year Month TME
+ 0 2023 4 0 days 09:15:00
+ 1 2023 6 0 days 06:45:00
+ ...
+
+ TME = TotalMonthlyEffort
+ '''
+
+ tts_df : DataFrame = tt_df.copy(deep = True)
+
+ condition : Series = (tt_df[TTCN.YEAR].isin(values = years))
+ tts_df = tts_df.loc[condition]
+
+ tts_df[TTCN.EFFORT] = tts_df[TTCN.EFFORT].apply(lambda x : self.__df_helper.convert_string_to_timedelta(td_str = x))
+ tts_df = tts_df.groupby(by = [TTCN.YEAR, TTCN.MONTH])[TTCN.EFFORT].sum().sort_values(ascending = [False]).reset_index(name = TTCN.EFFORT)
+ tts_df = tts_df.sort_values(by = [TTCN.YEAR, TTCN.MONTH]).reset_index(drop = True)
+ tts_df.rename(columns = {TTCN.EFFORT : TTCN.TME}, inplace = True)
+
+ return tts_df
+ def __create_raw_tts_by_year_spnv(self, tt_df : DataFrame, years : list[int], software_project_names : list[str]) -> DataFrame:
+
+ '''
+ Year ProjectName ProjectVersion Effort
+ 0 2023 nwtraderaanalytics 2.0.0 0 days 09:15:00
+ 1 2023 NW.AutoProffLibrary 1.0.0 0 days 09:30:00
+ ...
+ '''
+
+ tts_df : DataFrame = tt_df.copy(deep = True)
+
+ condition_one : Series = (tt_df[TTCN.YEAR].isin(values = years))
+ condition_two : Series = (tt_df[TTCN.ISSOFTWAREPROJECT] == True)
+ tts_df = tts_df.loc[condition_one & condition_two]
+
+ tts_df[TTCN.PROJECTNAME] = tts_df[TTCN.DESCRIPTOR].apply(lambda x : self.__df_helper.extract_software_project_name(descriptor = x))
+ tts_df[TTCN.PROJECTVERSION] = tts_df[TTCN.DESCRIPTOR].apply(lambda x : self.__df_helper.extract_software_project_version(descriptor = x))
+
+ tts_df[TTCN.EFFORT] = tts_df[TTCN.EFFORT].apply(lambda x : self.__df_helper.convert_string_to_timedelta(td_str = x))
+ tts_df = tts_df.groupby(by = [TTCN.YEAR, TTCN.PROJECTNAME, TTCN.PROJECTVERSION])[TTCN.EFFORT].sum().sort_values(ascending = [False]).reset_index(name = TTCN.EFFORT)
+ tts_df = tts_df.sort_values(by = [TTCN.YEAR, TTCN.PROJECTNAME, TTCN.PROJECTVERSION]).reset_index(drop = True)
+
+ condition_three : Series = (tts_df[TTCN.PROJECTNAME].isin(values = software_project_names))
+ tts_df = tts_df.loc[condition_three]
+ tts_df = tts_df.sort_values(by = [TTCN.YEAR, TTCN.PROJECTNAME, TTCN.PROJECTVERSION]).reset_index(drop = True)
+
+ return tts_df
+ def __create_raw_tts_by_dye(self, tt_df : DataFrame, years : list[int]) -> DataFrame:
+
+ '''
+ Year DYE
0 2023 0 days 09:15:00
1 2023 0 days 06:45:00
...
@@ -601,30 +784,22 @@ def __get_raw_dye(self, sessions_df : DataFrame, years : list[int]) -> DataFrame
DYE = DevelopmentYearlyEffort
'''
- tt_df : DataFrame = sessions_df.copy(deep = True)
+ tts_df : DataFrame = tt_df.copy(deep = True)
- cn_year : str = "Year"
- cn_is_software_project : str = "IsSoftwareProject"
- condition_one : Series = (sessions_df[cn_year].isin(values = years))
- condition_two : Series = (sessions_df[cn_is_software_project] == True)
- tt_df = tt_df.loc[condition_one & condition_two]
+ condition_one : Series = (tt_df[TTCN.YEAR].isin(values = years))
+ condition_two : Series = (tt_df[TTCN.ISSOFTWAREPROJECT] == True)
+ tts_df = tts_df.loc[condition_one & condition_two]
- cn_descriptor : str = "Descriptor"
- cn_project_name : str = "ProjectName"
- cn_project_version : str = "ProjectVersion"
- tt_df[cn_project_name] = tt_df[cn_descriptor].apply(lambda x : self.__extract_software_project_name(descriptor = x))
- tt_df[cn_project_version] = tt_df[cn_descriptor].apply(lambda x : self.__extract_software_project_version(descriptor = x))
+ tts_df[TTCN.PROJECTNAME] = tts_df[TTCN.DESCRIPTOR].apply(lambda x : self.__df_helper.extract_software_project_name(descriptor = x))
+ tts_df[TTCN.PROJECTVERSION] = tts_df[TTCN.DESCRIPTOR].apply(lambda x : self.__df_helper.extract_software_project_version(descriptor = x))
- cn_effort : str = "Effort"
- tt_df[cn_effort] = tt_df[cn_effort].apply(lambda x : self.__convert_string_to_timedelta(td_str = x))
- tt_df = tt_df.groupby(by = [cn_year])[cn_effort].sum().sort_values(ascending = [False]).reset_index(name = cn_effort)
- tt_df = tt_df.sort_values(by = [cn_year]).reset_index(drop = True)
-
- cn_dye : str = "DYE"
- tt_df.rename(columns = {cn_effort : cn_dye}, inplace = True)
+ tts_df[TTCN.EFFORT] = tts_df[TTCN.EFFORT].apply(lambda x : self.__df_helper.convert_string_to_timedelta(td_str = x))
+ tts_df = tts_df.groupby(by = [TTCN.YEAR])[TTCN.EFFORT].sum().sort_values(ascending = [False]).reset_index(name = TTCN.EFFORT)
+ tts_df = tts_df.sort_values(by = [TTCN.YEAR]).reset_index(drop = True)
+ tts_df.rename(columns = {TTCN.EFFORT : TTCN.DYE}, inplace = True)
- return tt_df
- def __get_raw_tye(self, sessions_df : DataFrame, years : list[int]) -> DataFrame:
+ return tts_df
+ def __create_raw_tts_by_tye(self, tt_df : DataFrame, years : list[int]) -> DataFrame:
'''
Year TYE
@@ -635,22 +810,18 @@ def __get_raw_tye(self, sessions_df : DataFrame, years : list[int]) -> DataFrame
TYE = TotalYearlyEffort
'''
- tt_df : DataFrame = sessions_df.copy(deep = True)
+ tts_df : DataFrame = tt_df.copy(deep = True)
- cn_year : str = "Year"
- condition : Series = (sessions_df[cn_year].isin(values = years))
- tt_df = tt_df.loc[condition]
+ condition : Series = (tt_df[TTCN.YEAR].isin(values = years))
+ tts_df = tts_df.loc[condition]
- cn_effort : str = "Effort"
- tt_df[cn_effort] = tt_df[cn_effort].apply(lambda x : self.__convert_string_to_timedelta(td_str = x))
- tt_df = tt_df.groupby(by = [cn_year])[cn_effort].sum().sort_values(ascending = [False]).reset_index(name = cn_effort)
- tt_df = tt_df.sort_values(by = [cn_year]).reset_index(drop = True)
-
- cn_tye : str = "TYE"
- tt_df.rename(columns = {cn_effort : cn_tye}, inplace = True)
+ tts_df[TTCN.EFFORT] = tts_df[TTCN.EFFORT].apply(lambda x : self.__df_helper.convert_string_to_timedelta(td_str = x))
+ tts_df = tts_df.groupby(by = [TTCN.YEAR])[TTCN.EFFORT].sum().sort_values(ascending = [False]).reset_index(name = TTCN.EFFORT)
+ tts_df = tts_df.sort_values(by = [TTCN.YEAR]).reset_index(drop = True)
+ tts_df.rename(columns = {TTCN.EFFORT : TTCN.TYE}, inplace = True)
- return tt_df
- def __get_raw_tt_by_spn(self, sessions_df : DataFrame, years : list[int], software_project_names : list[str]) -> DataFrame:
+ return tts_df
+ def __create_raw_tts_by_spn(self, tt_df : DataFrame, years : list[int], software_project_names : list[str]) -> DataFrame:
'''
Hashtag ProjectName Effort
@@ -661,69 +832,56 @@ def __get_raw_tt_by_spn(self, sessions_df : DataFrame, years : list[int], softwa
...
'''
- tt_df : DataFrame = sessions_df.copy(deep = True)
-
- cn_year : str = "Year"
- cn_is_software_project : str = "IsSoftwareProject"
- condition_one : Series = (sessions_df[cn_year].isin(values = years))
- condition_two : Series = (sessions_df[cn_is_software_project] == True)
- tt_df = tt_df.loc[condition_one & condition_two]
+ tts_df : DataFrame = tt_df.copy(deep = True)
- cn_descriptor : str = "Descriptor"
- cn_project_name : str = "ProjectName"
- tt_df[cn_project_name] = tt_df[cn_descriptor].apply(lambda x : self.__extract_software_project_name(descriptor = x))
+ condition_one : Series = (tt_df[TTCN.YEAR].isin(values = years))
+ condition_two : Series = (tt_df[TTCN.ISSOFTWAREPROJECT] == True)
+ tts_df = tts_df.loc[condition_one & condition_two]
- cn_effort : str = "Effort"
- cn_hashtag : str = "Hashtag"
- tt_df[cn_effort] = tt_df[cn_effort].apply(lambda x : self.__convert_string_to_timedelta(td_str = x))
- tt_df = tt_df.groupby(by = [cn_project_name, cn_hashtag])[cn_effort].sum().sort_values(ascending = [False]).reset_index(name = cn_effort)
- tt_df = tt_df.sort_values(by = [cn_project_name]).reset_index(drop = True)
+ tts_df[TTCN.PROJECTNAME] = tts_df[TTCN.DESCRIPTOR].apply(lambda x : self.__df_helper.extract_software_project_name(descriptor = x))
+ tts_df[TTCN.EFFORT] = tts_df[TTCN.EFFORT].apply(lambda x : self.__df_helper.convert_string_to_timedelta(td_str = x))
+ tts_df = tts_df.groupby(by = [TTCN.PROJECTNAME, TTCN.HASHTAG])[TTCN.EFFORT].sum().sort_values(ascending = [False]).reset_index(name = TTCN.EFFORT)
+ tts_df = tts_df.sort_values(by = [TTCN.PROJECTNAME]).reset_index(drop = True)
- condition_three : Series = (tt_df[cn_project_name].isin(values = software_project_names))
- tt_df = tt_df.loc[condition_three]
- tt_df = tt_df.sort_values(by = [cn_hashtag, cn_effort], ascending = [False, False]).reset_index(drop = True)
+ condition_three : Series = (tts_df[TTCN.PROJECTNAME].isin(values = software_project_names))
+ tts_df = tts_df.loc[condition_three]
+ tts_df = tts_df.sort_values(by = [TTCN.HASHTAG, TTCN.EFFORT], ascending = [False, False]).reset_index(drop = True)
- tt_df = tt_df[[cn_hashtag, cn_project_name, cn_effort]]
+ tts_df = tts_df[[TTCN.HASHTAG, TTCN.PROJECTNAME, TTCN.EFFORT]]
- return tt_df
- def __get_raw_de(self, sessions_df : DataFrame, years : list[int]) -> timedelta:
+ return tts_df
+ def __create_raw_de(self, tt_df : DataFrame, years : list[int]) -> timedelta:
'''3 days 21:15:00'''
- tt_df : DataFrame = sessions_df.copy(deep = True)
+ tts_df : DataFrame = tt_df.copy(deep = True)
- cn_year : str = "Year"
- cn_is_software_project : str = "IsSoftwareProject"
- condition_one : Series = (sessions_df[cn_year].isin(values = years))
- condition_two : Series = (sessions_df[cn_is_software_project] == True)
- tt_df = tt_df.loc[condition_one & condition_two]
+ condition_one : Series = (tt_df[TTCN.YEAR].isin(values = years))
+ condition_two : Series = (tt_df[TTCN.ISSOFTWAREPROJECT] == True)
+ tts_df = tts_df.loc[condition_one & condition_two]
- cn_effort : str = "Effort"
- tt_df[cn_effort] = tt_df[cn_effort].apply(lambda x : self.__convert_string_to_timedelta(td_str = x))
- summarized : timedelta = tt_df[cn_effort].sum()
+ tts_df[TTCN.EFFORT] = tts_df[TTCN.EFFORT].apply(lambda x : self.__df_helper.convert_string_to_timedelta(td_str = x))
+ summarized : timedelta = tts_df[TTCN.EFFORT].sum()
return summarized
- def __get_raw_te(self, sessions_df : DataFrame, years : list[int], remove_untagged : bool) -> timedelta:
+ def __create_raw_te(self, tt_df : DataFrame, years : list[int], remove_untagged : bool) -> timedelta:
'''186 days 11:15:00'''
- tt_df : DataFrame = sessions_df.copy(deep = True)
+ tts_df : DataFrame = tt_df.copy(deep = True)
- cn_year : str = "Year"
- condition_one : Series = (sessions_df[cn_year].isin(values = years))
- tt_df = tt_df.loc[condition_one]
+ condition_one : Series = (tt_df[TTCN.YEAR].isin(values = years))
+ tts_df = tts_df.loc[condition_one]
if remove_untagged:
- cn_hashtag : str = "Hashtag"
- condition_two : Series = (sessions_df[cn_hashtag] != "#untagged")
- tt_df = tt_df.loc[condition_two]
+ condition_two : Series = (tt_df[TTCN.HASHTAG] != "#untagged")
+ tts_df = tts_df.loc[condition_two]
- cn_effort : str = "Effort"
- tt_df[cn_effort] = tt_df[cn_effort].apply(lambda x : self.__convert_string_to_timedelta(td_str = x))
- summarized : timedelta = tt_df[cn_effort].sum()
+ tts_df[TTCN.EFFORT] = tts_df[TTCN.EFFORT].apply(lambda x : self.__df_helper.convert_string_to_timedelta(td_str = x))
+ summarized : timedelta = tts_df[TTCN.EFFORT].sum()
return summarized
- def __get_raw_tt_by_spn_spv(self, sessions_df : DataFrame, years : list[int], software_project_names : list[str]) -> DataFrame:
+ def __create_raw_tts_by_spn_spv(self, tt_df : DataFrame, years : list[int], software_project_names : list[str]) -> DataFrame:
'''
ProjectName ProjectVersion Effort
@@ -733,31 +891,25 @@ def __get_raw_tt_by_spn_spv(self, sessions_df : DataFrame, years : list[int], so
...
'''
- tt_df : DataFrame = sessions_df.copy(deep = True)
+ tts_df : DataFrame = tt_df.copy(deep = True)
- cn_year : str = "Year"
- cn_is_software_project : str = "IsSoftwareProject"
- condition_one : Series = (sessions_df[cn_year].isin(values = years))
- condition_two : Series = (sessions_df[cn_is_software_project] == True)
- tt_df = tt_df.loc[condition_one & condition_two]
+ condition_one : Series = (tt_df[TTCN.YEAR].isin(values = years))
+ condition_two : Series = (tt_df[TTCN.ISSOFTWAREPROJECT] == True)
+ tts_df = tts_df.loc[condition_one & condition_two]
- cn_descriptor : str = "Descriptor"
- cn_project_name : str = "ProjectName"
- cn_project_version : str = "ProjectVersion"
- tt_df[cn_project_name] = tt_df[cn_descriptor].apply(lambda x : self.__extract_software_project_name(descriptor = x))
- tt_df[cn_project_version] = tt_df[cn_descriptor].apply(lambda x : self.__extract_software_project_version(descriptor = x))
+ tts_df[TTCN.PROJECTNAME] = tts_df[TTCN.DESCRIPTOR].apply(lambda x : self.__df_helper.extract_software_project_name(descriptor = x))
+ tts_df[TTCN.PROJECTVERSION] = tts_df[TTCN.DESCRIPTOR].apply(lambda x : self.__df_helper.extract_software_project_version(descriptor = x))
- cn_effort : str = "Effort"
- tt_df[cn_effort] = tt_df[cn_effort].apply(lambda x : self.__convert_string_to_timedelta(td_str = x))
- tt_df = tt_df.groupby(by = [cn_project_name, cn_project_version])[cn_effort].sum().sort_values(ascending = [False]).reset_index(name = cn_effort)
- tt_df = tt_df.sort_values(by = [cn_project_name, cn_project_version]).reset_index(drop = True)
+ tts_df[TTCN.EFFORT] = tts_df[TTCN.EFFORT].apply(lambda x : self.__df_helper.convert_string_to_timedelta(td_str = x))
+ tts_df = tts_df.groupby(by = [TTCN.PROJECTNAME, TTCN.PROJECTVERSION])[TTCN.EFFORT].sum().sort_values(ascending = [False]).reset_index(name = TTCN.EFFORT)
+ tts_df = tts_df.sort_values(by = [TTCN.PROJECTNAME, TTCN.PROJECTVERSION]).reset_index(drop = True)
- condition_three : Series = (tt_df[cn_project_name].isin(values = software_project_names))
- tt_df = tt_df.loc[condition_three]
- tt_df = tt_df.sort_values(by = [cn_project_name, cn_project_version]).reset_index(drop = True)
+ condition_three : Series = (tts_df[TTCN.PROJECTNAME].isin(values = software_project_names))
+ tts_df = tts_df.loc[condition_three]
+ tts_df = tts_df.sort_values(by = [TTCN.PROJECTNAME, TTCN.PROJECTVERSION]).reset_index(drop = True)
- return tt_df
- def __get_default_raw_ttm(self, year : int) -> DataFrame:
+ return tts_df
+ def __create_default_raw_ttm(self, year : int) -> DataFrame:
'''
default_df:
@@ -767,12 +919,11 @@ def __get_default_raw_ttm(self, year : int) -> DataFrame:
...
'''
- cn_month : str = "Month"
- td : timedelta = self.__convert_string_to_timedelta(td_str = "0h 00m")
+ td : timedelta = self.__df_helper.convert_string_to_timedelta(td_str = "0h 00m")
default_df : DataFrame = pd.DataFrame(
{
- f"{cn_month}": [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12],
+ f"{TTCN.MONTH}": [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12],
f"{str(year)}": [td, td, td, td, td, td, td, td, td, td, td, td]
},
index=[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11],
@@ -781,6 +932,90 @@ def __get_default_raw_ttm(self, year : int) -> DataFrame:
default_df = self.__enforce_dataframe_definition_for_raw_ttm_df(df = default_df)
return default_df
+ def __create_raw_ttm(self, tt_df : DataFrame, year : int) -> DataFrame:
+
+ '''
+ ttm_df:
+
+ Year Month Effort
+ 0 2015 10 8h 00m
+ 1 2015 11 10h 00m
+ 2 2015 12 0h 00m
+
+ ttm_df:
+
+ Year Month 2015
+ 0 2015 10 0 days 08:00:00
+ 1 2015 11 0 days 10:00:00
+ 2 2015 12 0 days 00:00:00
+
+ ttm_df:
+
+ Month 2015
+ 0 1 0 days 00:00:00
+ ...
+ 9 10 0 days 08:00:00
+ 10 11 0 days 10:00:00
+ 11 12 0 days 00:00:00
+ '''
+
+ ttm_df : DataFrame = tt_df.copy(deep=True)
+ ttm_df = ttm_df[[TTCN.YEAR, TTCN.MONTH, TTCN.EFFORT]]
+
+ condition : Series = (tt_df[TTCN.YEAR] == year)
+ ttm_df = ttm_df.loc[condition]
+
+ ttm_df[TTCN.EFFORT] = ttm_df[TTCN.EFFORT].apply(lambda x : self.__df_helper.convert_string_to_timedelta(td_str = x))
+ ttm_df[str(year)] = ttm_df[TTCN.EFFORT]
+ cn_effort = str(year)
+
+ ttm_df = ttm_df.groupby([TTCN.MONTH])[cn_effort].sum().sort_values(ascending = [False]).reset_index(name = cn_effort)
+ ttm_df = ttm_df.sort_values(by = TTCN.MONTH).reset_index(drop = True)
+
+ ttm_df = self.__try_complete_raw_ttm(ttm_df = ttm_df, year = year)
+ ttm_df = self.__enforce_dataframe_definition_for_raw_ttm_df(df = ttm_df)
+
+ return ttm_df
+ def __create_raw_tts_by_year_hashtag(self, tt_df : DataFrame, years : list[int]) -> DataFrame:
+
+ '''
+ Year Hashtag Effort
+ 0 2023 #csharp 0 days 15:15:00
+ 1 2023 #maintenance 0 days 02:30:00
+ 2 2023 #powershell 3 days 02:15:00
+ ...
+ '''
+
+ tts_df : DataFrame = tt_df.copy(deep = True)
+
+ condition : Series = (tt_df[TTCN.YEAR].isin(values = years))
+ tts_df = tts_df.loc[condition]
+
+ tts_df[TTCN.EFFORT] = tts_df[TTCN.EFFORT].apply(lambda x : self.__df_helper.convert_string_to_timedelta(td_str = x))
+ tts_df = tts_df.groupby(by = [TTCN.YEAR, TTCN.HASHTAG])[TTCN.EFFORT].sum().sort_values(ascending = [False]).reset_index(name = TTCN.EFFORT)
+ tts_df = tts_df.sort_values(by = [TTCN.HASHTAG, TTCN.YEAR]).reset_index(drop = True)
+
+ return tts_df
+ def __create_raw_tts_by_hashtag(self, tt_df : DataFrame) -> DataFrame:
+
+ '''
+ Hashtag Effort Effort%
+ 0 #csharp 0 days 15:15:00 56.49
+ 1 #maintenance 0 days 02:30:00 23.97
+ 2 #powershell 3 days 02:15:00 6.43
+ ...
+ '''
+
+ tts_df : DataFrame = tt_df.copy(deep = True)
+
+ tts_df[TTCN.EFFORT] = tts_df[TTCN.EFFORT].apply(lambda x : self.__df_helper.convert_string_to_timedelta(td_str = x))
+ tts_df = tts_df.groupby(by = [TTCN.HASHTAG])[TTCN.EFFORT].sum().sort_values(ascending = [False]).reset_index(name = TTCN.EFFORT)
+
+ summarized : float = tts_df[TTCN.EFFORT].sum()
+ tts_df[TTCN.EFFORTPRC] = tts_df.apply(lambda x : self.__df_helper.calculate_percentage(part = x[TTCN.EFFORT], whole = summarized), axis = 1)
+
+ return tts_df
+
def __try_complete_raw_ttm(self, ttm_df : DataFrame, year : int) -> DataFrame:
'''
@@ -823,86 +1058,19 @@ def __try_complete_raw_ttm(self, ttm_df : DataFrame, year : int) -> DataFrame:
11 12 0h 00m
'''
- cn_month : str = "Month"
-
- if ttm_df[cn_month].count() != 12:
+ if ttm_df[TTCN.MONTH].count() != 12:
- default_df : DataFrame = self.__get_default_raw_ttm(year = year)
- missing_df : DataFrame = default_df.loc[~default_df[cn_month].astype(str).isin(ttm_df[cn_month].astype(str))]
+ default_df : DataFrame = self.__create_default_raw_ttm(year = year)
+ missing_df : DataFrame = default_df.loc[~default_df[TTCN.MONTH].astype(str).isin(ttm_df[TTCN.MONTH].astype(str))]
completed_df : DataFrame = pd.concat([ttm_df, missing_df], ignore_index = True)
- completed_df = completed_df.sort_values(by = cn_month, ascending = [True])
+ completed_df = completed_df.sort_values(by = TTCN.MONTH, ascending = [True])
completed_df = completed_df.reset_index(drop = True)
return completed_df
return ttm_df
- def __get_raw_ttm(self, sessions_df : DataFrame, year : int) -> DataFrame:
-
- '''
- ttm_df:
-
- Year Month Effort
- 0 2015 10 8h 00m
- 1 2015 11 10h 00m
- 2 2015 12 0h 00m
-
- ttm_df:
-
- Year Month 2015
- 0 2015 10 0 days 08:00:00
- 1 2015 11 0 days 10:00:00
- 2 2015 12 0 days 00:00:00
-
- ttm_df:
-
- Month 2015
- 0 1 0 days 00:00:00
- ...
- 9 10 0 days 08:00:00
- 10 11 0 days 10:00:00
- 11 12 0 days 00:00:00
- '''
-
- cn_year : str = "Year"
- cn_month : str = "Month"
- cn_effort : str = "Effort"
-
- ttm_df : DataFrame = sessions_df.copy(deep=True)
- ttm_df = ttm_df[[cn_year, cn_month, cn_effort]]
-
- condition : Series = (sessions_df[cn_year] == year)
- ttm_df = ttm_df.loc[condition]
-
- ttm_df[cn_effort] = ttm_df[cn_effort].apply(lambda x : self.__convert_string_to_timedelta(td_str = x))
- ttm_df[str(year)] = ttm_df[cn_effort]
- cn_effort = str(year)
-
- ttm_df = ttm_df.groupby([cn_month])[cn_effort].sum().sort_values(ascending = [False]).reset_index(name = cn_effort)
- ttm_df = ttm_df.sort_values(by = cn_month).reset_index(drop = True)
-
- ttm_df = self.__try_complete_raw_ttm(ttm_df = ttm_df, year = year)
- ttm_df = self.__enforce_dataframe_definition_for_raw_ttm_df(df = ttm_df)
-
- return ttm_df
- def __get_trend_by_timedelta(self, td_1 : timedelta, td_2 : timedelta) -> str:
-
- '''
- 0h 30m, 1h 00m => "↑"
- 1h 00m, 0h 30m => "↓"
- 0, 0 => "="
- '''
- trend : Optional[str] = None
-
- if td_1 < td_2:
- trend = "↑"
- elif td_1 > td_2:
- trend = "↓"
- else:
- trend = "="
-
- return trend
- def __expand_raw_ttm_by_year(self, sessions_df : DataFrame, years : list, tts_by_month_df : DataFrame, i : int, add_trend : bool) -> DataFrame:
+ def __expand_raw_ttm_by_year(self, tt_df : DataFrame, years : list, tts_by_month_df : DataFrame, i : int, add_trend : bool) -> DataFrame:
'''
actual_df:
@@ -951,15 +1119,14 @@ def __expand_raw_ttm_by_year(self, sessions_df : DataFrame, years : list, tts_by
'''
actual_df : DataFrame = tts_by_month_df.copy(deep = True)
- ttm_df : DataFrame = self.__get_raw_ttm(sessions_df = sessions_df, year = years[i])
+ ttm_df : DataFrame = self.__create_raw_ttm(tt_df = tt_df, year = years[i])
- cn_month : str = "Month"
expansion_df = pd.merge(
left = actual_df,
right = ttm_df,
how = "inner",
- left_on = cn_month,
- right_on = cn_month)
+ left_on = TTCN.MONTH,
+ right_on = TTCN.MONTH)
if add_trend == True:
@@ -967,12 +1134,12 @@ def __expand_raw_ttm_by_year(self, sessions_df : DataFrame, years : list, tts_by
cn_trend_1 : str = str(years[i-1]) # for ex. "2016"
cn_trend_2 : str = str(years[i]) # for ex. "2017"
- expansion_df[cn_trend] = expansion_df.apply(lambda x : self.__get_trend_by_timedelta(td_1 = x[cn_trend_1], td_2 = x[cn_trend_2]), axis = 1)
+ expansion_df[cn_trend] = expansion_df.apply(lambda x : self.__df_helper.get_trend_by_timedelta(td_1 = x[cn_trend_1], td_2 = x[cn_trend_2]), axis = 1)
- new_column_names : list = [cn_month, cn_trend_1, cn_trend, cn_trend_2] # for ex. ["Month", "2016", "↕", "2017"]
+ new_column_names : list = [TTCN.MONTH, cn_trend_1, cn_trend, cn_trend_2] # for ex. ["Month", "2016", "↕", "2017"]
expansion_df = expansion_df.reindex(columns = new_column_names)
- shared_columns : list = [cn_month, str(years[i-1])] # ["Month", "2016"]
+ shared_columns : list = [TTCN.MONTH, str(years[i-1])] # ["Month", "2016"]
actual_df = pd.merge(
left = actual_df,
right = expansion_df,
@@ -984,236 +1151,142 @@ def __expand_raw_ttm_by_year(self, sessions_df : DataFrame, years : list, tts_by
actual_df = expansion_df
return actual_df
- def __try_consolidate_trend_column_name(self, column_name : str) -> str:
+ def __update_future_months_to_empty(self, tts_by_month_df : DataFrame, now : datetime) -> DataFrame:
- '''
- "2016" => "2016"
- "↕1" => "↕"
+ '''
+ If now is 2023-08-09:
+
+ Month 2022 ↕ 2023
+ ...
+ 8 0h 00m = 0h 00m
+ 9 1h 00m ↓ 0h 00m
+ 10 0h 00m = 0h 00m
+ 11 0h 00m = 0h 00m
+ 12 0h 00m = 0h 00m
+
+ Month 2022 ↕ 2023
+ ...
+ 8 0h 00m = 0h 00m
+ 9 1h 00m
+ 10 0h 00m
+ 11 0h 00m
+ 12 0h 00m
'''
- cn_trend : str = "↕"
+ tts_by_month_upd_df : DataFrame = tts_by_month_df.copy(deep = True)
- if column_name.startswith(cn_trend):
- return cn_trend
-
- return column_name
- def __create_effort_status_for_none_values(self, idx : int, effort_str : str) -> EffortStatus:
+ now_year : int = now.year
+ now_month : int = now.month
+ cn_year : str = str(now_year)
+ new_value : str = ""
- actual_str : str = effort_str
- actual_td : timedelta = self.__convert_string_to_timedelta(td_str = effort_str)
- is_correct : bool = True
- message : str = "''start_time' and/or 'end_time' are empty, 'effort' can't be verified. We assume that it's correct."
+ condition : Series = (tts_by_month_upd_df[TTCN.MONTH] > now_month)
+ tts_by_month_upd_df[cn_year] = np.where(condition, new_value, tts_by_month_upd_df[cn_year])
+
+ idx_year : int = cast(int, tts_by_month_upd_df.columns.get_loc(cn_year))
+ idx_trend : int = (idx_year - 1)
+ tts_by_month_upd_df.iloc[:, idx_trend] = np.where(condition, new_value, tts_by_month_upd_df.iloc[:, idx_trend])
- effort_status : EffortStatus = EffortStatus(
- idx = idx,
- start_time_str = None,
- start_time_dt = None,
- end_time_str = None,
- end_time_dt = None,
- actual_str = actual_str,
- actual_td = actual_td,
- expected_td = None,
- expected_str = None,
- is_correct = is_correct,
- message = message
- )
+ return tts_by_month_upd_df
+ def __remove_unknown_occurrences(self, tts_by_tr_df : DataFrame, unknown_id : str) -> DataFrame:
- return effort_status
- def __create_time_object(self, time : str) -> datetime:
+ '''Removes the provided uknown_id from the "TimeRangeId" column of the provided DataFrame.'''
- '''It creates a datetime object suitable for timedelta calculation out of the provided time.'''
+ condition : Series = (tts_by_tr_df[TTCN.TIMERANGEID] != unknown_id)
+ tts_by_tr_df = tts_by_tr_df.loc[condition]
+ tts_by_tr_df.reset_index(drop = True, inplace = True)
- day_1_times : list[str] = [
- "07:00", "07:15", "07:30", "07:45",
- "08:00", "08:15", "08:30", "08:45",
- "09:00", "09:15", "09:30", "09:45",
- "10:00", "10:15", "10:30", "10:45",
- "11:00", "11:15", "11:30", "11:45",
- "12:00", "12:15", "12:30", "12:45",
- "13:00", "13:15", "13:30", "13:45",
- "14:00", "14:15", "14:30", "14:45",
- "15:00", "15:15", "15:30", "15:45",
- "16:00", "16:15", "16:30", "16:45",
- "17:00", "17:15", "17:30", "17:45",
- "18:00", "18:15", "18:30", "18:45",
- "19:00", "19:15", "19:30", "19:45",
- "20:00", "20:15", "20:30", "20:45",
- "21:00", "21:15", "21:30", "21:45",
- "22:00", "22:15", "22:30", "22:45",
- "23:00", "23:15", "23:30", "23:45"
- ]
- day_2_times : list[str] = [
- "00:00", "00:15", "00:30", "00:45",
- "01:00", "01:15", "01:30", "01:45",
- "02:00", "02:15", "02:30", "02:45",
- "03:00", "03:15", "03:30", "03:45",
- "04:00", "04:15", "04:30", "04:45",
- "05:00", "05:15", "05:30", "05:45",
- "06:00", "06:15", "06:30", "06:45"
- ]
-
- strp_format : str = "%Y-%m-%d %H:%M"
-
- dt_str : Optional[str] = None
- if time in day_1_times:
- dt_str = f"1900-01-01 {time}"
- elif time in day_2_times:
- dt_str = f"1900-01-02 {time}"
- else:
- raise ValueError(_MessageCollection.effort_status_not_among_expected_time_values(time = time))
-
- dt : datetime = datetime.strptime(dt_str, strp_format)
-
- return dt
- def __create_effort_status(self, idx : int, start_time_str : str, end_time_str : str, effort_str : str) -> EffortStatus:
+ return tts_by_tr_df
+ def __filter_by_year(self, df : DataFrame, years : list[int]) -> DataFrame:
'''
- start_time_str, end_time_str:
- - Expects time values in the "%H:%M" format - for ex. 20:00.
+ Returns a DataFrame that in the "TTCN.YEAR" column has only values contained in "years".
- is_correct:
- start_time_str = "20:00", end_time_str = "00:00", effort_str = "4h 00m" => True
- start_time_str = "20:00", end_time_str = "00:00", effort_str = "5h 00m" => False
+ Returns df if years is an empty list.
'''
- try:
+ filtered_df : DataFrame = df.copy(deep = True)
- if len(start_time_str) == 0 or len(end_time_str) == 0:
- return self.__create_effort_status_for_none_values(idx = idx, effort_str = effort_str)
-
- start_time_dt : datetime = self.__create_time_object(time = start_time_str)
- end_time_dt : datetime = self.__create_time_object(time = end_time_str)
+ if len(years) > 0:
+ condition : Series = filtered_df[TTCN.YEAR].isin(years)
+ filtered_df = df.loc[condition]
- actual_str : str = effort_str
- actual_td : timedelta = self.__convert_string_to_timedelta(td_str = effort_str)
+ return filtered_df
+ def __filter_by_software_project_name(self, df : DataFrame, software_project_name : Optional[str]) -> DataFrame:
- expected_td : timedelta = (end_time_dt - start_time_dt)
- expected_str : str = self.__format_timedelta(td = expected_td, add_plus_sign = False)
-
- is_correct : bool = True
- if actual_td != expected_td:
- is_correct = False
-
- message : str = "The effort is correct."
- if actual_td != expected_td:
- message = _MessageCollection.effort_status_mismatching_effort(
- idx = idx,
- start_time_str = start_time_str,
- end_time_str = end_time_str,
- actual_str = actual_str,
- expected_str = expected_str
- )
+ '''
+ Returns a DataFrame that in the "TTCN.PROJECTNAME" column has only values that are equal to software_project_name.
- effort_status : EffortStatus = EffortStatus(
- idx = idx,
- start_time_str = start_time_str,
- start_time_dt = start_time_dt,
- end_time_str = end_time_str,
- end_time_dt = end_time_dt,
- actual_str = actual_str,
- actual_td = actual_td,
- expected_td = expected_td,
- expected_str = expected_str,
- is_correct = is_correct,
- message = message
- )
-
- return effort_status
-
- except:
-
- error : str = _MessageCollection.effort_status_not_possible_to_create(
- idx = idx, start_time_str = start_time_str, end_time_str = end_time_str, effort_str = effort_str)
+ Returns df if software_project_name is None.
+ '''
- raise ValueError(error)
- def __create_effort_status_and_cast_to_any(self, idx : int, start_time_str : str, end_time_str : str, effort_str : str) -> Any:
+ filtered_df : DataFrame = df.copy(deep = True)
- '''
- Wrapper method created to overcome the following error raised by df.apply():
+ if software_project_name is not None:
+ condition : Series = (filtered_df[TTCN.PROJECTNAME] == software_project_name)
+ filtered_df = df.loc[condition]
- Argument of type "(x: Unknown) -> EffortStatus" cannot be assigned to parameter "f" of type "(...) -> Series[Any]" in function "apply"
- Type "(x: Unknown) -> EffortStatus" is not assignable to type "(...) -> Series[Any]"
- Function return type "EffortStatus" is incompatible with type "Series[Any]"
- "EffortStatus" is not assignable to "Series[Any]"
- '''
+ return filtered_df
+ def __filter_by_is_correct(self, tts_by_efs_df : DataFrame, is_correct : bool) -> DataFrame:
- return cast(Any, self.__create_effort_status(idx = idx, start_time_str = start_time_str, end_time_str = end_time_str, effort_str = effort_str))
- def __create_time_range_id(self, start_time : str, end_time : str, unknown_id : str) -> str:
-
- '''
- Creates a unique time range identifier out of the provided parameters.
- If parameters are empty, it returns unknown_id.
- '''
+ '''Returns a DataFrame that contains only rows that match the provided is_correct.'''
- time_range_id : str = f"{start_time}-{end_time}"
+ filtered_df : DataFrame = tts_by_efs_df.copy(deep = True)
- if len(start_time) == 0 or len(end_time) == 0:
- time_range_id = unknown_id
+ condition : Series = (filtered_df[TTCN.ESISCORRECT] == is_correct)
+ filtered_df = tts_by_efs_df.loc[condition]
- return time_range_id
- def __get_raw_tt_by_year_hashtag(self, sessions_df : DataFrame, years : list[int]) -> DataFrame:
+ return filtered_df
+ def create_tt_df(self, excel_path : str, excel_skiprows : int, excel_nrows : int, excel_tabname : str) -> DataFrame:
+
'''
- Year Hashtag Effort
- 0 2023 #csharp 0 days 15:15:00
- 1 2023 #maintenance 0 days 02:30:00
- 2 2023 #powershell 3 days 02:15:00
- ...
+ Retrieves the content of the "Sessions" tab and returns it as a Dataframe.
'''
- tt_df : DataFrame = sessions_df.copy(deep = True)
-
- cn_year : str = "Year"
- condition : Series = (sessions_df[cn_year].isin(values = years))
- tt_df = tt_df.loc[condition]
-
- cn_hashtag: str = "Hashtag"
- cn_effort : str = "Effort"
- tt_df[cn_effort] = tt_df[cn_effort].apply(lambda x : self.__convert_string_to_timedelta(td_str = x))
- tt_df = tt_df.groupby(by = [cn_year, cn_hashtag])[cn_effort].sum().sort_values(ascending = [False]).reset_index(name = cn_effort)
- tt_df = tt_df.sort_values(by = [cn_hashtag, cn_year]).reset_index(drop = True)
+ tt_df : DataFrame = pd.read_excel(
+ io = excel_path,
+ skiprows = excel_skiprows,
+ nrows = excel_nrows,
+ sheet_name = excel_tabname,
+ engine = 'openpyxl'
+ )
+ tt_df = self.__enforce_dataframe_definition_for_tt_df(tt_df = tt_df)
return tt_df
- def __get_raw_tt_by_hashtag(self, sessions_df : DataFrame) -> DataFrame:
+ def create_tts_by_month_tpl(self, tt_df : DataFrame, years : list, now : datetime) -> Tuple[DataFrame, DataFrame]:
'''
- Hashtag Effort Effort%
- 0 #csharp 0 days 15:15:00 56.49
- 1 #maintenance 0 days 02:30:00 23.97
- 2 #powershell 3 days 02:15:00 6.43
- ...
- '''
-
- tt_df : DataFrame = sessions_df.copy(deep = True)
+ Month 2016 ↕ 2017 ↕ 2018 ...
+ 0 1 0h 00m ↑ 13h 00m ↓ 0h 00m
+ 1 2 0h 00m ↑ 1h 00m ↓ 0h 00m
+ ...
- cn_hashtag: str = "Hashtag"
- cn_effort : str = "Effort"
- tt_df[cn_effort] = tt_df[cn_effort].apply(lambda x : self.__convert_string_to_timedelta(td_str = x))
- tt_df = tt_df.groupby(by = [cn_hashtag])[cn_effort].sum().sort_values(ascending = [False]).reset_index(name = cn_effort)
+ Returns: (tts_by_month_df, tts_by_month_upd_df).
+ '''
- cn_effort_prc : str = "Effort%"
- summarized : float = tt_df[cn_effort].sum()
- tt_df[cn_effort_prc] = tt_df.apply(lambda x : self.__calculate_percentage(part = x[cn_effort], whole = summarized), axis = 1)
+ tts_df : DataFrame = pd.DataFrame()
- return tt_df
+ for i in range(len(years)):
- def get_sessions_dataset(self, setting_bag : SettingBag) -> DataFrame:
-
- '''
- Retrieves the content of the "Sessions" tab and returns it as a Dataframe.
- '''
+ if i == 0:
+ tts_df = self.__create_raw_ttm(tt_df = tt_df, year = years[i])
+ else:
+ tts_df = self.__expand_raw_ttm_by_year(
+ tt_df = tt_df,
+ years = years,
+ tts_by_month_df = tts_df,
+ i = i,
+ add_trend = True)
+
+ for year in years:
+ tts_df[str(year)] = tts_df[str(year)].apply(lambda x : self.__df_helper.format_timedelta(td = x, add_plus_sign = False))
- sessions_df : DataFrame = pd.read_excel(
- io = setting_bag.excel_path,
- skiprows = setting_bag.excel_books_skiprows,
- nrows = setting_bag.excel_books_nrows,
- sheet_name = setting_bag.excel_books_tabname,
- engine = 'openpyxl'
- )
- sessions_df = self.__enforce_dataframe_definition_for_sessions_df(sessions_df = sessions_df)
+ tts_df.rename(columns = (lambda x : self.__df_helper.try_consolidate_trend_column_name(column_name = x)), inplace = True)
+ tts_upd_df : DataFrame = self.__update_future_months_to_empty(tts_by_month_df = tts_df, now = now)
- return sessions_df
- def get_tt_by_year(self, sessions_df : DataFrame, years : list[int], yearly_targets : list[YearlyTarget]) -> DataFrame:
+ return (tts_df, tts_upd_df)
+ def create_tts_by_year_df(self, tt_df : DataFrame, years : list[int], yearly_targets : list[YearlyTarget]) -> DataFrame:
'''
[0]
@@ -1239,33 +1312,27 @@ def get_tt_by_year(self, sessions_df : DataFrame, years : list[int], yearly_targ
...
'''
- tt_df : DataFrame = sessions_df.copy(deep = True)
+ tts_df : DataFrame = tt_df.copy(deep = True)
- cn_year : str = "Year"
- condition : Series = (sessions_df[cn_year].isin(values = years))
- tt_df = tt_df.loc[condition]
+ condition : Series = (tt_df[TTCN.YEAR].isin(values = years))
+ tts_df = tts_df.loc[condition]
- cn_effort : str = "Effort"
- tt_df[cn_effort] = tt_df[cn_effort].apply(lambda x : self.__convert_string_to_timedelta(td_str = x))
- tt_df = tt_df.groupby([cn_year])[cn_effort].sum().sort_values(ascending = [False]).reset_index(name = cn_effort)
- tt_df = tt_df.sort_values(by = cn_year).reset_index(drop = True)
+ tts_df[TTCN.EFFORT] = tts_df[TTCN.EFFORT].apply(lambda x : self.__df_helper.convert_string_to_timedelta(td_str = x))
+ tts_df = tts_df.groupby([TTCN.YEAR])[TTCN.EFFORT].sum().sort_values(ascending = [False]).reset_index(name = TTCN.EFFORT)
+ tts_df = tts_df.sort_values(by = TTCN.YEAR).reset_index(drop = True)
- cn_yearly_target : str = "YearlyTarget"
- cn_target_diff : str = "TargetDiff"
- cn_is_target_met : str = "IsTargetMet"
+ tts_df[TTCN.YEARLYTARGET] = tts_df[TTCN.YEAR].apply(
+ lambda x : cast(YearlyTarget, self.__df_helper.get_yearly_target(yearly_targets = yearly_targets, year = x)).hours)
+ tts_df[TTCN.TARGETDIFF] = tts_df[TTCN.EFFORT] - tts_df[TTCN.YEARLYTARGET]
+ tts_df[TTCN.ISTARGETMET] = tts_df.apply(
+ lambda x : self.__df_helper.is_yearly_target_met(effort = x[TTCN.EFFORT], yearly_target = x[TTCN.YEARLYTARGET]), axis = 1)
- tt_df[cn_yearly_target] = tt_df[cn_year].apply(
- lambda x : cast(YearlyTarget, self.__get_yearly_target(yearly_targets = yearly_targets, year = x)).hours)
- tt_df[cn_target_diff] = tt_df[cn_effort] - tt_df[cn_yearly_target]
- tt_df[cn_is_target_met] = tt_df.apply(
- lambda x : self.__is_yearly_target_met(effort = x[cn_effort], yearly_target = x[cn_yearly_target]), axis = 1)
+ tts_df[TTCN.EFFORT] = tts_df[TTCN.EFFORT].apply(lambda x : self.__df_helper.format_timedelta(td = x, add_plus_sign = False))
+ tts_df[TTCN.YEARLYTARGET] = tts_df[TTCN.YEARLYTARGET].apply(lambda x : self.__df_helper.format_timedelta(td = x, add_plus_sign = False))
+ tts_df[TTCN.TARGETDIFF] = tts_df[TTCN.TARGETDIFF].apply(lambda x : self.__df_helper.format_timedelta(td = x, add_plus_sign = True))
- tt_df[cn_effort] = tt_df[cn_effort].apply(lambda x : self.__format_timedelta(td = x, add_plus_sign = False))
- tt_df[cn_yearly_target] = tt_df[cn_yearly_target].apply(lambda x : self.__format_timedelta(td = x, add_plus_sign = False))
- tt_df[cn_target_diff] = tt_df[cn_target_diff].apply(lambda x : self.__format_timedelta(td = x, add_plus_sign = True))
-
- return tt_df
- def get_tt_by_year_month(self, sessions_df : DataFrame, years : list[int], yearly_targets : list[YearlyTarget]) -> DataFrame:
+ return tts_df
+ def create_tts_by_year_month_tpl(self, tt_df : DataFrame, years : list[int], yearly_targets : list[YearlyTarget], display_only_years : list[int]) -> Tuple[DataFrame, DataFrame]:
'''
[0]
@@ -1303,38 +1370,35 @@ def get_tt_by_year_month(self, sessions_df : DataFrame, years : list[int], yearl
88 2023 2 23h 00m 29h 00m -371h 00m
89 2023 3 50h 15m 79h 15m -321h 15m
...
- '''
- tt_df : DataFrame = sessions_df.copy(deep = True)
+ Returns (tts_by_year_month_df, tts_by_year_month_flt_df).
+ '''
- cn_year : str = "Year"
- condition : Series = (sessions_df[cn_year].isin(values = years))
- tt_df = tt_df.loc[condition]
+ tts_df : DataFrame = tt_df.copy(deep = True)
- cn_month : str = "Month"
- cn_effort : str = "Effort"
- tt_df[cn_effort] = tt_df[cn_effort].apply(lambda x : self.__convert_string_to_timedelta(td_str = x))
- tt_df = tt_df.groupby(by = [cn_year, cn_month])[cn_effort].sum().sort_values(ascending = [False]).reset_index(name = cn_effort)
- tt_df = tt_df.sort_values(by = [cn_year, cn_month]).reset_index(drop = True)
+ condition : Series = (tt_df[TTCN.YEAR].isin(values = years))
+ tts_df = tts_df.loc[condition]
- cn_yearly_total : str = "YearlyTotal"
- tt_df[cn_yearly_total] = tt_df[cn_effort].groupby(by = tt_df[cn_year]).cumsum()
+ tts_df[TTCN.EFFORT] = tts_df[TTCN.EFFORT].apply(lambda x : self.__df_helper.convert_string_to_timedelta(td_str = x))
+ tts_df = tts_df.groupby(by = [TTCN.YEAR, TTCN.MONTH])[TTCN.EFFORT].sum().sort_values(ascending = [False]).reset_index(name = TTCN.EFFORT)
+ tts_df = tts_df.sort_values(by = [TTCN.YEAR, TTCN.MONTH]).reset_index(drop = True)
- cn_yearly_target : str = "YearlyTarget"
- tt_df[cn_yearly_target] = tt_df[cn_year].apply(
- lambda x : cast(YearlyTarget, self.__get_yearly_target(yearly_targets = yearly_targets, year = x)).hours)
+ tts_df[TTCN.YEARLYTOTAL] = tts_df[TTCN.EFFORT].groupby(by = tts_df[TTCN.YEAR]).cumsum()
- cn_to_target : str = "ToTarget"
- tt_df[cn_to_target] = tt_df[cn_yearly_total] - tt_df[cn_yearly_target]
+ tts_df[TTCN.YEARLYTARGET] = tts_df[TTCN.YEAR].apply(
+ lambda x : cast(YearlyTarget, self.__df_helper.get_yearly_target(yearly_targets = yearly_targets, year = x)).hours)
- tt_df.drop(columns = [cn_yearly_target], axis = 1, inplace = True)
+ tts_df[TTCN.TOTARGET] = tts_df[TTCN.YEARLYTOTAL] - tts_df[TTCN.YEARLYTARGET]
+ tts_df.drop(columns = [TTCN.YEARLYTARGET], axis = 1, inplace = True)
- tt_df[cn_effort] = tt_df[cn_effort].apply(lambda x : self.__format_timedelta(td = x, add_plus_sign = False))
- tt_df[cn_yearly_total] = tt_df[cn_yearly_total].apply(lambda x : self.__format_timedelta(td = x, add_plus_sign = False))
- tt_df[cn_to_target] = tt_df[cn_to_target].apply(lambda x : self.__format_timedelta(td = x, add_plus_sign = True))
+ tts_df[TTCN.EFFORT] = tts_df[TTCN.EFFORT].apply(lambda x : self.__df_helper.format_timedelta(td = x, add_plus_sign = False))
+ tts_df[TTCN.YEARLYTOTAL] = tts_df[TTCN.YEARLYTOTAL].apply(lambda x : self.__df_helper.format_timedelta(td = x, add_plus_sign = False))
+ tts_df[TTCN.TOTARGET] = tts_df[TTCN.TOTARGET].apply(lambda x : self.__df_helper.format_timedelta(td = x, add_plus_sign = True))
- return tt_df
- def get_tt_by_year_month_spnv(self, sessions_df : DataFrame, years : list[int], software_project_names : list[str]) -> DataFrame:
+ tts_flt_df : DataFrame = self.__filter_by_year(df = tts_df, years = display_only_years)
+
+ return (tts_df, tts_flt_df)
+ def create_tts_by_year_month_spnv_tpl(self, tt_df : DataFrame, years : list[int], software_project_names : list[str], software_project_name : Optional[str]) -> Tuple[DataFrame, DataFrame]:
'''
[0] ...
@@ -1344,46 +1408,41 @@ def get_tt_by_year_month_spnv(self, sessions_df : DataFrame, years : list[int],
0 2023 4 nwtraderaanalytics 2.0.0 09h 15m 09h 15m 100.00 19h 00m 48.68
1 2023 6 nwreadinglistmanager 1.0.0 06h 45m 06h 45m 100.00 24h 45m 27.27
...
- '''
- spnv_df : DataFrame = self.__get_raw_tt_by_year_month_spnv(sessions_df = sessions_df, years = years, software_project_names = software_project_names)
- dme_df : DataFrame = self.__get_raw_dme(sessions_df = sessions_df, years = years)
- tme_df : DataFrame = self.__get_raw_tme(sessions_df = sessions_df, years = years)
+ Returns (tts_by_year_month_spnv_df, tts_by_year_month_spnv_flt_df).
+ '''
- cn_year : str = "Year"
- cn_month : str = "Month"
+ spnv_df : DataFrame = self.__create_raw_tts_by_year_month_spnv(tt_df = tt_df, years = years, software_project_names = software_project_names)
+ dme_df : DataFrame = self.__create_raw_tts_by_dme(tt_df = tt_df, years = years)
+ tme_df : DataFrame = self.__create_raw_tts_by_tme(tt_df = tt_df, years = years)
- tt_df : DataFrame = pd.merge(
+ tts_df : DataFrame = pd.merge(
left = spnv_df,
right = dme_df,
how = "inner",
- left_on = [cn_year, cn_month],
- right_on = [cn_year, cn_month]
+ left_on = [TTCN.YEAR, TTCN.MONTH],
+ right_on = [TTCN.YEAR, TTCN.MONTH]
)
- cn_effort : str = "Effort"
- cn_dme : str = "DME"
- cn_percentage_dme : str = "%_DME"
- tt_df[cn_percentage_dme] = tt_df.apply(lambda x : self.__calculate_percentage(part = x[cn_effort], whole = x[cn_dme]), axis = 1)
+ tts_df[TTCN.PERCDME] = tts_df.apply(lambda x : self.__df_helper.calculate_percentage(part = x[TTCN.EFFORT], whole = x[TTCN.DME]), axis = 1)
- tt_df = pd.merge(
- left = tt_df,
+ tts_df = pd.merge(
+ left = tts_df,
right = tme_df,
how = "inner",
- left_on = [cn_year, cn_month],
- right_on = [cn_year, cn_month]
+ left_on = [TTCN.YEAR, TTCN.MONTH],
+ right_on = [TTCN.YEAR, TTCN.MONTH]
)
- cn_tme : str = "TME"
- cn_percentage_tme : str = "%_TME"
- tt_df[cn_percentage_tme] = tt_df.apply(lambda x : self.__calculate_percentage(part = x[cn_effort], whole = x[cn_tme]), axis = 1)
+ tts_df[TTCN.PERCTME] = tts_df.apply(lambda x : self.__df_helper.calculate_percentage(part = x[TTCN.EFFORT], whole = x[TTCN.TME]), axis = 1)
+ tts_df[TTCN.EFFORT] = tts_df[TTCN.EFFORT].apply(lambda x : self.__df_helper.format_timedelta(td = x, add_plus_sign = False))
+ tts_df[TTCN.DME] = tts_df[TTCN.DME].apply(lambda x : self.__df_helper.format_timedelta(td = x, add_plus_sign = False))
+ tts_df[TTCN.TME] = tts_df[TTCN.TME].apply(lambda x : self.__df_helper.format_timedelta(td = x, add_plus_sign = False))
- tt_df[cn_effort] = tt_df[cn_effort].apply(lambda x : self.__format_timedelta(td = x, add_plus_sign = False))
- tt_df[cn_dme] = tt_df[cn_dme].apply(lambda x : self.__format_timedelta(td = x, add_plus_sign = False))
- tt_df[cn_tme] = tt_df[cn_tme].apply(lambda x : self.__format_timedelta(td = x, add_plus_sign = False))
+ tts_flt_df : DataFrame = self.__filter_by_software_project_name(df = tts_df, software_project_name = software_project_name)
- return tt_df
- def get_tt_by_year_spnv(self, sessions_df : DataFrame, years : list[int], software_project_names : list[str]) -> DataFrame:
+ return (tts_df, tts_flt_df)
+ def create_tts_by_year_spnv_tpl(self, tt_df : DataFrame, years : list[int], software_project_names : list[str], software_project_name : Optional[str]) -> Tuple[DataFrame, DataFrame]:
'''
[0] ...
@@ -1393,45 +1452,41 @@ def get_tt_by_year_spnv(self, sessions_df : DataFrame, years : list[int], softwa
0 2023 nwtraderaanalytics 2.0.0 09h 15m 09h 15m 100.00 19h 00m 48.68
1 2023 nwreadinglistmanager 1.0.0 06h 45m 06h 45m 100.00 24h 45m 27.27
...
- '''
- spnv_df : DataFrame = self.__get_raw_tt_by_year_spnv(sessions_df = sessions_df, years = years, software_project_names = software_project_names)
- dye_df : DataFrame = self.__get_raw_dye(sessions_df = sessions_df, years = years)
- tye_df : DataFrame = self.__get_raw_tye(sessions_df = sessions_df, years = years)
+ Returns (tts_by_year_spnv_df, tts_by_year_spnv_flt_df).
+ '''
- cn_year : str = "Year"
+ spnv_df : DataFrame = self.__create_raw_tts_by_year_spnv(tt_df = tt_df, years = years, software_project_names = software_project_names)
+ dye_df : DataFrame = self.__create_raw_tts_by_dye(tt_df = tt_df, years = years)
+ tye_df : DataFrame = self.__create_raw_tts_by_tye(tt_df = tt_df, years = years)
- tt_df : DataFrame = pd.merge(
+ tts_df : DataFrame = pd.merge(
left = spnv_df,
right = dye_df,
how = "inner",
- left_on = [cn_year],
- right_on = [cn_year]
+ left_on = [TTCN.YEAR],
+ right_on = [TTCN.YEAR]
)
- cn_effort : str = "Effort"
- cn_dye : str = "DYE"
- cn_percentage_dye : str = "%_DYE"
- tt_df[cn_percentage_dye] = tt_df.apply(lambda x : self.__calculate_percentage(part = x[cn_effort], whole = x[cn_dye]), axis = 1)
+ tts_df[TTCN.PERCDYE] = tts_df.apply(lambda x : self.__df_helper.calculate_percentage(part = x[TTCN.EFFORT], whole = x[TTCN.DYE]), axis = 1)
- tt_df = pd.merge(
- left = tt_df,
+ tts_df = pd.merge(
+ left = tts_df,
right = tye_df,
how = "inner",
- left_on = [cn_year],
- right_on = [cn_year]
+ left_on = [TTCN.YEAR],
+ right_on = [TTCN.YEAR]
)
- cn_tye : str = "TYE"
- cn_percentage_tye : str = "%_TYE"
- tt_df[cn_percentage_tye] = tt_df.apply(lambda x : self.__calculate_percentage(part = x[cn_effort], whole = x[cn_tye]), axis = 1)
+ tts_df[TTCN.PERCTYE] = tts_df.apply(lambda x : self.__df_helper.calculate_percentage(part = x[TTCN.EFFORT], whole = x[TTCN.TYE]), axis = 1)
+ tts_df[TTCN.EFFORT] = tts_df[TTCN.EFFORT].apply(lambda x : self.__df_helper.format_timedelta(td = x, add_plus_sign = False))
+ tts_df[TTCN.DYE] = tts_df[TTCN.DYE].apply(lambda x : self.__df_helper.format_timedelta(td = x, add_plus_sign = False))
+ tts_df[TTCN.TYE] = tts_df[TTCN.TYE].apply(lambda x : self.__df_helper.format_timedelta(td = x, add_plus_sign = False))
- tt_df[cn_effort] = tt_df[cn_effort].apply(lambda x : self.__format_timedelta(td = x, add_plus_sign = False))
- tt_df[cn_dye] = tt_df[cn_dye].apply(lambda x : self.__format_timedelta(td = x, add_plus_sign = False))
- tt_df[cn_tye] = tt_df[cn_tye].apply(lambda x : self.__format_timedelta(td = x, add_plus_sign = False))
+ tts_flt_df : DataFrame = self.__filter_by_software_project_name(df = tts_df, software_project_name = software_project_name)
- return tt_df
- def get_tt_by_spn(self, sessions_df : DataFrame, years : list[int], software_project_names : list[str], remove_untagged : bool) -> DataFrame:
+ return (tts_df, tts_flt_df)
+ def create_tts_by_spn_df(self, tt_df : DataFrame, years : list[int], software_project_names : list[str], remove_untagged : bool) -> DataFrame:
'''
Hashtag ProjectName Effort DE %_DE TE %_TE
@@ -1447,29 +1502,22 @@ def get_tt_by_spn(self, sessions_df : DataFrame, years : list[int], software_pro
...
'''
- tt_df : DataFrame = self.__get_raw_tt_by_spn(sessions_df = sessions_df, years = years, software_project_names = software_project_names)
- de : timedelta = self.__get_raw_de(sessions_df = sessions_df, years = years)
- te : timedelta = self.__get_raw_te(sessions_df = sessions_df, years = years, remove_untagged = remove_untagged)
+ tts_df : DataFrame = self.__create_raw_tts_by_spn(tt_df = tt_df, years = years, software_project_names = software_project_names)
+ de : timedelta = self.__create_raw_de(tt_df = tt_df, years = years)
+ te : timedelta = self.__create_raw_te(tt_df = tt_df, years = years, remove_untagged = remove_untagged)
- cn_de : str = "DE"
- tt_df[cn_de] = de
+ tts_df[TTCN.DE] = de
+ tts_df[TTCN.PERCDE] = tts_df.apply(lambda x : self.__df_helper.calculate_percentage(part = x[TTCN.EFFORT], whole = x[TTCN.DE]), axis = 1)
- cn_effort : str = "Effort"
- cn_percentage_de : str = "%_DE"
- tt_df[cn_percentage_de] = tt_df.apply(lambda x : self.__calculate_percentage(part = x[cn_effort], whole = x[cn_de]), axis = 1)
+ tts_df[TTCN.TE] = te
+ tts_df[TTCN.PERCTE] = tts_df.apply(lambda x : self.__df_helper.calculate_percentage(part = x[TTCN.EFFORT], whole = x[TTCN.TE]), axis = 1)
- cn_te : str = "TE"
- tt_df[cn_te] = te
+ tts_df[TTCN.EFFORT] = tts_df[TTCN.EFFORT].apply(lambda x : self.__df_helper.format_timedelta(td = x, add_plus_sign = False))
+ tts_df[TTCN.DE] = tts_df[TTCN.DE].apply(lambda x : self.__df_helper.format_timedelta(td = x, add_plus_sign = False))
+ tts_df[TTCN.TE] = tts_df[TTCN.TE].apply(lambda x : self.__df_helper.format_timedelta(td = x, add_plus_sign = False))
- cn_percentage_te : str = "%_TE"
- tt_df[cn_percentage_te] = tt_df.apply(lambda x : self.__calculate_percentage(part = x[cn_effort], whole = x[cn_te]), axis = 1)
-
- tt_df[cn_effort] = tt_df[cn_effort].apply(lambda x : self.__format_timedelta(td = x, add_plus_sign = False))
- tt_df[cn_de] = tt_df[cn_de].apply(lambda x : self.__format_timedelta(td = x, add_plus_sign = False))
- tt_df[cn_te] = tt_df[cn_te].apply(lambda x : self.__format_timedelta(td = x, add_plus_sign = False))
-
- return tt_df
- def get_tt_by_spn_spv(self, sessions_df : DataFrame, years : list[int], software_project_names : list[str]) -> DataFrame:
+ return tts_df
+ def create_tts_by_spn_spv_df(self, tt_df : DataFrame, years : list[int], software_project_names : list[str]) -> DataFrame:
'''
ProjectName ProjectVersion Effort
@@ -1479,42 +1527,11 @@ def get_tt_by_spn_spv(self, sessions_df : DataFrame, years : list[int], software
...
'''
- tt_df : DataFrame = self.__get_raw_tt_by_spn_spv(sessions_df = sessions_df, years = years, software_project_names = software_project_names)
-
- cn_effort : str = "Effort"
- tt_df[cn_effort] = tt_df[cn_effort].apply(lambda x : self.__format_timedelta(td = x, add_plus_sign = False))
-
- return tt_df
- def get_tts_by_month(self, sessions_df : DataFrame, years : list) -> DataFrame:
-
- '''
- Month 2016 ↕ 2017 ↕ 2018 ...
- 0 1 0h 00m ↑ 13h 00m ↓ 0h 00m
- 1 2 0h 00m ↑ 1h 00m ↓ 0h 00m
- ...
- '''
-
- tts_by_month_df : DataFrame = pd.DataFrame()
-
- for i in range(len(years)):
-
- if i == 0:
- tts_by_month_df = self.__get_raw_ttm(sessions_df = sessions_df, year = years[i])
- else:
- tts_by_month_df = self.__expand_raw_ttm_by_year(
- sessions_df = sessions_df,
- years = years,
- tts_by_month_df = tts_by_month_df,
- i = i,
- add_trend = True)
-
- for year in years:
- tts_by_month_df[str(year)] = tts_by_month_df[str(year)].apply(lambda x : self.__format_timedelta(td = x, add_plus_sign = False))
-
- tts_by_month_df.rename(columns = (lambda x : self.__try_consolidate_trend_column_name(column_name = x)), inplace = True)
+ tts_df : DataFrame = self.__create_raw_tts_by_spn_spv(tt_df = tt_df, years = years, software_project_names = software_project_names)
+ tts_df[TTCN.EFFORT] = tts_df[TTCN.EFFORT].apply(lambda x : self.__df_helper.format_timedelta(td = x, add_plus_sign = False))
- return tts_by_month_df
- def get_tt_by_year_hashtag(self, sessions_df : DataFrame, years : list[int]) -> DataFrame:
+ return tts_df
+ def create_tts_by_hashtag_year_df(self, tt_df : DataFrame, years : list[int]) -> DataFrame:
'''
Year Hashtag Effort
@@ -1524,13 +1541,11 @@ def get_tt_by_year_hashtag(self, sessions_df : DataFrame, years : list[int]) ->
...
'''
- tt_df : DataFrame = self.__get_raw_tt_by_year_hashtag(sessions_df = sessions_df, years = years)
+ tts_df : DataFrame = self.__create_raw_tts_by_year_hashtag(tt_df = tt_df, years = years)
+ tts_df[TTCN.EFFORT] = tts_df[TTCN.EFFORT].apply(lambda x : self.__df_helper.format_timedelta(td = x, add_plus_sign = False))
- cn_effort : str = "Effort"
- tt_df[cn_effort] = tt_df[cn_effort].apply(lambda x : self.__format_timedelta(td = x, add_plus_sign = False))
-
- return tt_df
- def get_tt_by_hashtag(self, sessions_df : DataFrame) -> DataFrame:
+ return tts_df
+ def create_tts_by_hashtag_df(self, tt_df : DataFrame) -> DataFrame:
'''
Hashtag Effort Effort%
@@ -1540,207 +1555,647 @@ def get_tt_by_hashtag(self, sessions_df : DataFrame) -> DataFrame:
...
'''
- tt_df : DataFrame = self.__get_raw_tt_by_hashtag(sessions_df = sessions_df)
+ tts_df : DataFrame = self.__create_raw_tts_by_hashtag(tt_df = tt_df)
+ tts_df[TTCN.EFFORT] = tts_df[TTCN.EFFORT].apply(lambda x : self.__df_helper.format_timedelta(td = x, add_plus_sign = False))
+
+ return tts_df
+ def create_tts_by_efs_tpl(self, tt_df : DataFrame, is_correct : bool) -> Tuple[DataFrame, DataFrame]:
+
+ '''
+ StartTime EndTime Effort ES_IsCorrect ES_Expected ES_Message
+ 21:00 23:00 1h 00m False 2h 00m ...
+ ...
+
+ Returns (tts_by_efs_df, tts_by_efs_flt_df).
+ '''
+
+ tts_df : DataFrame = tt_df.copy(deep = True)
+
+ tts_df[TTCN.EFFORTSTATUS] = tts_df.apply(
+ lambda x : self.__df_helper.create_effort_status_and_cast_to_any(
+ idx = x.name,
+ start_time_str = x[TTCN.STARTTIME],
+ end_time_str = x[TTCN.ENDTIME],
+ effort_str = x[TTCN.EFFORT]),
+ axis = 1)
+
+ tts_df[TTCN.ESISCORRECT] = tts_df[TTCN.EFFORTSTATUS].apply(lambda x : x.is_correct)
+ tts_df[TTCN.ESEXPECTED] = tts_df[TTCN.EFFORTSTATUS].apply(lambda x : x.expected_str)
+ tts_df[TTCN.ESMESSAGE] = tts_df[TTCN.EFFORTSTATUS].apply(lambda x : x.message)
+ tts_df = tts_df[[TTCN.STARTTIME, TTCN.ENDTIME, TTCN.EFFORT, TTCN.ESISCORRECT, TTCN.ESEXPECTED, TTCN.ESMESSAGE]]
+
+ tts_flt_df : DataFrame = self.__filter_by_is_correct(tts_by_efs_df = tts_df, is_correct = is_correct)
+
+ return (tts_df, tts_flt_df)
+ def create_tts_by_tr_df(self, tt_df : DataFrame, unknown_id : str, remove_unknown_occurrences : bool) -> DataFrame:
+
+ '''
+ TimeRangeId Occurrences
+ 0 Unknown 44
+ 1 18:00-20:00 19
+ 2 08:00-08:30 16
+ ...
+ '''
+
+ tts_df : DataFrame = tt_df.copy(deep = True)
+ tts_df = tts_df[[TTCN.STARTTIME, TTCN.ENDTIME]]
+
+ tts_df[TTCN.TIMERANGEID] = tts_df.apply(
+ lambda x : self.__df_helper.create_time_range_id(
+ start_time = x[TTCN.STARTTIME],
+ end_time = x[TTCN.ENDTIME],
+ unknown_id = unknown_id), axis = 1)
+
+ count : NamedAgg = pd.NamedAgg(column = TTCN.TIMERANGEID, aggfunc = "count")
+ tts_df = tts_df[[TTCN.TIMERANGEID]].groupby(by = [TTCN.TIMERANGEID], as_index=False).agg(count = count)
+ tts_df.rename(columns={"count" : TTCN.OCCURRENCES}, inplace = True)
+
+ ascending : bool = False
+ tts_df = tts_df.sort_values(by = [TTCN.OCCURRENCES], ascending = ascending).reset_index(drop = True)
+
+ if remove_unknown_occurrences:
+ tts_df = self.__remove_unknown_occurrences(tts_by_tr_df = tts_df, unknown_id = unknown_id)
+
+ return tts_df
+ def create_definitions_df(self) -> DataFrame:
+
+ '''Creates a dataframe containing all the definitions in use in this application.'''
+
+ columns : list[str] = [DEFINITIONSCN.TERM, DEFINITIONSCN.DEFINITION]
+
+ definitions : dict[str, str] = {
+ "DME": "Development Monthly Effort",
+ "TME": "Total Monthly Effort",
+ "DYE": "Development Yearly Effort",
+ "TYE": "Total Yearly Effort",
+ "DE": "Development Effort",
+ "TE": "Total Effort"
+ }
+
+ definitions_df : DataFrame = DataFrame(
+ data = definitions.items(),
+ columns = columns
+ )
+
+ return definitions_df
+class TTMarkdownFactory():
+
+ '''Collects all the logic related to Markdown creation out of Time Tracking dataframes.'''
+
+ __markdown_helper : MarkdownHelper
+
+ def __init__(self, markdown_helper : MarkdownHelper) -> None:
+
+ self.__markdown_helper = markdown_helper
+
+ def create_tts_by_month_md(self, paragraph_title : str, last_update : datetime, tts_by_month_upd_df : DataFrame) -> str:
+
+ '''Creates the expected Markdown content for the provided arguments.'''
+
+ markdown_header : str = self.__markdown_helper.get_markdown_header(last_update = last_update, paragraph_title = paragraph_title)
+ tts_by_month_upd_md : str = tts_by_month_upd_df.to_markdown(index = False)
+
+ md_content : str = markdown_header
+ md_content += "\n"
+ md_content += tts_by_month_upd_md
+ md_content += "\n"
+
+ return md_content
+class TTAdapter():
+
+ '''Adapts SettingBag properties for use in TT*Factory methods.'''
+
+ __df_factory : TTDataFrameFactory
+ __md_factory : TTMarkdownFactory
+
+ def __init__(self, df_factory : TTDataFrameFactory, md_factory : TTMarkdownFactory) -> None:
+
+ self.__df_factory = df_factory
+ self.__md_factory = md_factory
+
+ def extract_file_name_and_paragraph_title(self, id : TTID, setting_bag : SettingBag) -> Tuple[str, str]:
+
+ '''Returns (file_name, paragraph_title) for the provided id or raise an Exception.'''
+
+ for md_info in setting_bag.md_infos:
+ if md_info.id == id:
+ return (md_info.file_name, md_info.paragraph_title)
+
+ raise Exception(_MessageCollection.no_mdinfo_found(id = id))
+
+ def create_tt_df(self, setting_bag : SettingBag) -> DataFrame:
- cn_effort : str = "Effort"
- tt_df[cn_effort] = tt_df[cn_effort].apply(lambda x : self.__format_timedelta(td = x, add_plus_sign = False))
+ '''Creates the expected dataframe out of the provided arguments.'''
+
+ tt_df : DataFrame = self.__df_factory.create_tt_df(
+ excel_path = setting_bag.excel_path,
+ excel_skiprows = setting_bag.excel_skiprows,
+ excel_nrows = setting_bag.excel_nrows,
+ excel_tabname = setting_bag.excel_tabname
+ )
return tt_df
+ def create_tts_by_month_tpl(self, tt_df : DataFrame, setting_bag : SettingBag) -> Tuple[DataFrame, DataFrame]:
+
+ '''Creates the expected dataframes out of the provided arguments.'''
+
+ tts_by_month_tpl : Tuple[DataFrame, DataFrame] = self.__df_factory.create_tts_by_month_tpl(
+ tt_df = tt_df,
+ years = setting_bag.years,
+ now = setting_bag.now
+ )
- def try_print_definitions(self, df : DataFrame, definitions : dict[str, str]) -> None:
+ return tts_by_month_tpl
+ def create_tts_by_year_df(self, tt_df : DataFrame, setting_bag : SettingBag) -> DataFrame:
+
+ '''Creates the expected dataframe out of the provided arguments.'''
+
+ tts_by_year_df : DataFrame = self.__df_factory.create_tts_by_year_df(
+ tt_df = tt_df,
+ years = setting_bag.years,
+ yearly_targets = setting_bag.yearly_targets,
+ )
+
+ return tts_by_year_df
+ def create_tts_by_year_month_tpl(self, tt_df : DataFrame, setting_bag : SettingBag) -> Tuple[DataFrame, DataFrame]:
+
+ '''Creates the expected dataframes out of the provided arguments.'''
+
+ display_only_years : list[int] = []
- '''
- "DE" => print("DE: Development Effort")
- "Year" => do nothing
- '''
+ if display_only_years is not None:
+ display_only_years = cast(list[int], setting_bag.tts_by_year_month_display_only_years)
+
+ tts_by_year_month_df : Tuple[DataFrame, DataFrame] = self.__df_factory.create_tts_by_year_month_tpl(
+ tt_df = tt_df,
+ years = setting_bag.years,
+ yearly_targets = setting_bag.yearly_targets,
+ display_only_years = display_only_years
+ )
+
+ return tts_by_year_month_df
+ def create_tts_by_year_month_spnv_tpl(self, tt_df : DataFrame, setting_bag : SettingBag) -> Tuple[DataFrame, DataFrame]:
+
+ '''Creates the expected dataframes out of the provided arguments.'''
+
+ tts_by_year_month_spnv_tpl : Tuple[DataFrame, DataFrame] = self.__df_factory.create_tts_by_year_month_spnv_tpl(
+ tt_df = tt_df,
+ years = setting_bag.years,
+ software_project_names = setting_bag.software_project_names,
+ software_project_name = setting_bag.tts_by_year_month_spnv_display_only_spn
+ )
+
+ return tts_by_year_month_spnv_tpl
+ def create_tts_by_year_spnv_tpl(self, tt_df : DataFrame, setting_bag : SettingBag) -> Tuple[DataFrame, DataFrame]:
+
+ '''Creates the expected dataframes out of the provided arguments.'''
+
+ tts_by_year_spnv_tpl : Tuple[DataFrame, DataFrame] = self.__df_factory.create_tts_by_year_spnv_tpl(
+ tt_df = tt_df,
+ years = setting_bag.years,
+ software_project_names = setting_bag.software_project_names,
+ software_project_name = setting_bag.tts_by_year_spnv_display_only_spn
+ )
+
+ return tts_by_year_spnv_tpl
+ def create_tts_by_spn_df(self, tt_df : DataFrame, setting_bag : SettingBag) -> DataFrame:
+
+ '''Creates the expected dataframe out of the provided arguments.'''
+
+ tts_by_spn_df : DataFrame = self.__df_factory.create_tts_by_spn_df(
+ tt_df = tt_df,
+ years = setting_bag.years,
+ software_project_names = setting_bag.software_project_names,
+ remove_untagged = setting_bag.tts_by_spn_remove_untagged
+ )
+
+ return tts_by_spn_df
+ def create_tts_by_spn_spv_df(self, tt_df : DataFrame, setting_bag : SettingBag) -> DataFrame:
+
+ '''Creates the expected dataframe out of the provided arguments.'''
+
+ tts_by_spn_spv_df : DataFrame = self.__df_factory.create_tts_by_spn_spv_df(
+ tt_df = tt_df,
+ years = setting_bag.years,
+ software_project_names = setting_bag.software_project_names
+ )
+
+ return tts_by_spn_spv_df
+ def create_tts_by_hashtag_year_df(self, tt_df : DataFrame, setting_bag : SettingBag) -> DataFrame:
+
+ '''Creates the expected dataframe out of the provided arguments.'''
+
+ tts_by_year_hashtag_df : DataFrame = self.__df_factory.create_tts_by_hashtag_year_df(
+ tt_df = tt_df,
+ years = setting_bag.years
+ )
+
+ return tts_by_year_hashtag_df
+ def create_tts_by_efs_tpl(self, tt_df : DataFrame, setting_bag : SettingBag) -> Tuple[DataFrame, DataFrame]:
+
+ '''Creates the expected dataframes out of the provided arguments.'''
+
+ tts_by_efs_tpl : Tuple[DataFrame, DataFrame] = self.__df_factory.create_tts_by_efs_tpl(
+ tt_df = tt_df,
+ is_correct = setting_bag.tts_by_efs_is_correct
+ )
+
+ return tts_by_efs_tpl
+ def create_tts_by_tr_df(self, tt_df : DataFrame, setting_bag : SettingBag) -> DataFrame:
+
+ '''Creates the expected dataframe out of the provided arguments.'''
+
+ tts_by_tr_df : DataFrame = self.__df_factory.create_tts_by_tr_df(
+ tt_df = tt_df,
+ unknown_id = setting_bag.tts_by_tr_unknown_id,
+ remove_unknown_occurrences = setting_bag.tts_by_tr_remove_unknown_occurrences
+ )
+
+ return tts_by_tr_df
+ def create_tts_by_month_md(self, tts_by_month_tpl : Tuple[DataFrame, DataFrame], setting_bag : SettingBag) -> str:
+
+ '''Creates the expected Markdown content out of the provided arguments.'''
+
+ tts_by_month_md : str = self.__md_factory.create_tts_by_month_md(
+ paragraph_title = self.extract_file_name_and_paragraph_title(id = TTID.TTSBYMONTH, setting_bag = setting_bag)[1],
+ last_update = setting_bag.md_last_update,
+ tts_by_month_upd_df = tts_by_month_tpl[1]
+ )
+
+ return tts_by_month_md
+ def create_summary(self, setting_bag : SettingBag) -> TTSummary:
+
+ '''Creates a TTSummary object out of setting_bag.'''
+
+ tt_df : DataFrame = self.create_tt_df(setting_bag = setting_bag)
+ tts_by_month_tpl : Tuple[DataFrame, DataFrame] = self.create_tts_by_month_tpl(tt_df = tt_df, setting_bag = setting_bag)
+ tts_by_year_df : DataFrame = self.create_tts_by_year_df(tt_df = tt_df, setting_bag = setting_bag)
+ tts_by_year_month_tpl : Tuple[DataFrame, DataFrame] = self.create_tts_by_year_month_tpl(tt_df = tt_df, setting_bag = setting_bag)
+ tts_by_year_month_spnv_tpl : Tuple[DataFrame, DataFrame] = self.create_tts_by_year_month_spnv_tpl(tt_df = tt_df, setting_bag = setting_bag)
+ tts_by_year_spnv_tpl : Tuple[DataFrame, DataFrame] = self.create_tts_by_year_spnv_tpl(tt_df = tt_df, setting_bag = setting_bag)
+ tts_by_spn_df : DataFrame = self.create_tts_by_spn_df(tt_df = tt_df, setting_bag = setting_bag)
+ tts_by_spn_spv_df : DataFrame = self.create_tts_by_spn_spv_df(tt_df = tt_df, setting_bag = setting_bag)
+ tts_by_year_hashtag_df : DataFrame = self.create_tts_by_hashtag_year_df(tt_df = tt_df, setting_bag = setting_bag)
+ tts_by_hashtag_df : DataFrame = self.__df_factory.create_tts_by_hashtag_df(tt_df = tt_df)
+ tts_by_efs_tpl : Tuple[DataFrame, DataFrame] = self.create_tts_by_efs_tpl(tt_df = tt_df, setting_bag = setting_bag)
+ tts_by_tr_df : DataFrame = self.create_tts_by_tr_df(tt_df = tt_df, setting_bag = setting_bag)
+ definitions_df : DataFrame = self.__df_factory.create_definitions_df()
+ tts_by_month_md : str = self.create_tts_by_month_md(tts_by_month_tpl = tts_by_month_tpl, setting_bag = setting_bag)
+
+ tt_summary : TTSummary = TTSummary(
+ tt_df = tt_df,
+ tts_by_month_tpl = tts_by_month_tpl,
+ tts_by_year_df = tts_by_year_df,
+ tts_by_year_month_tpl = tts_by_year_month_tpl,
+ tts_by_year_month_spnv_tpl = tts_by_year_month_spnv_tpl,
+ tts_by_year_spnv_tpl = tts_by_year_spnv_tpl,
+ tts_by_spn_df = tts_by_spn_df,
+ tts_by_spn_spv_df = tts_by_spn_spv_df,
+ tts_by_hashtag_year_df = tts_by_year_hashtag_df,
+ tts_by_hashtag_df = tts_by_hashtag_df,
+ tts_by_efs_tpl = tts_by_efs_tpl,
+ tts_by_tr_df = tts_by_tr_df,
+ definitions_df = definitions_df,
+ tts_by_month_md = tts_by_month_md
+ )
+
+ return tt_summary
+@dataclass(frozen=True)
+class ComponentBag():
+
+ '''Represents a collection of components.'''
+
+ file_path_manager : FilePathManager = field(default = FilePathManager())
+ file_manager : FileManager = field(default = FileManager(file_path_manager = FilePathManager()))
+
+ tt_adapter : TTAdapter = field(default = TTAdapter(
+ df_factory = TTDataFrameFactory(df_helper = TTDataFrameHelper()),
+ md_factory = TTMarkdownFactory(markdown_helper = MarkdownHelper(formatter = Formatter())
+ )))
+
+ logging_function : Callable[[str], None] = field(default = LambdaProvider().get_default_logging_function())
+ displayer : Displayer = field(default = Displayer())
+class TimeTrackingProcessor():
+
+ '''Collects all the logic related to the processing of "Time Tracking.xlsx".'''
+
+ __component_bag : ComponentBag
+ __setting_bag : SettingBag
+ __tt_summary : TTSummary
+
+ def __init__(self, component_bag : ComponentBag, setting_bag : SettingBag) -> None:
+
+ self.__component_bag = component_bag
+ self.__setting_bag = setting_bag
+
+ def __validate_summary(self) -> None:
+
+ '''Raises an exception if __tt_summary is None.'''
+
+ if not hasattr(self, '_TimeTrackingProcessor__tt_summary'):
+ raise Exception(_MessageCollection.please_run_initialize_first())
+ def __save_and_log(self, id : TTID, content : str) -> None:
+
+ '''Creates the provided Markdown content using __setting_bag.'''
+
+ file_path : str = self.__component_bag.file_path_manager.create_file_path(
+ folder_path = self.__setting_bag.working_folder_path,
+ file_name = self.__component_bag.tt_adapter.extract_file_name_and_paragraph_title(id = id, setting_bag = self.__setting_bag)[0]
+ )
+
+ self.__component_bag.file_manager.save_content(content = content, file_path = file_path)
+
+ message : str = _MessageCollection.this_content_successfully_saved_as(id = id, file_path = file_path)
+ self.__component_bag.logging_function(message)
+ def __try_log_definitions(self, df : DataFrame, definitions : DataFrame) -> None:
+
+ """Logs the definitions for matching column names in the DataFrame."""
+
+ definitions_dict : dict = definitions.set_index(DEFINITIONSCN.TERM)[DEFINITIONSCN.DEFINITION].to_dict()
for column_name in df.columns:
- if definitions.get(column_name) != None:
- print(f"{column_name}: {definitions[column_name]}")
- def update_future_months_to_empty(self, tts_by_month_df : DataFrame, now : datetime) -> DataFrame:
+ if column_name in definitions_dict:
+ print(f"{column_name}: {definitions_dict[column_name]}")
- '''
- If now is 2023-08-09:
+ def __orchestrate_head_n(self, df : DataFrame, head_n : Optional[uint], display_head_n_with_tail : bool) -> DataFrame:
- Month 2022 ↕ 2023
- ...
- 8 0h 00m = 0h 00m
- 9 1h 00m ↓ 0h 00m
- 10 0h 00m = 0h 00m
- 11 0h 00m = 0h 00m
- 12 0h 00m = 0h 00m
+ '''Prepares df for display().'''
+
+ if head_n is None:
+ return df
+ elif head_n is not None and display_head_n_with_tail == True:
+ return df.tail(n = int(head_n))
+ else:
+ return df.head(n = int(head_n))
+ def __optimize_tt_for_display(self, tt_df : DataFrame) -> DataFrame:
+
+ return self.__orchestrate_head_n(
+ df = tt_df,
+ head_n = self.__setting_bag.tt_head_n,
+ display_head_n_with_tail = self.__setting_bag.tt_display_head_n_with_tail
+ )
+ def __optimize_tts_by_year_month_for_display(self, tts_by_year_month_tpl : Tuple[DataFrame, DataFrame]) -> DataFrame:
- Month 2022 ↕ 2023
- ...
- 8 0h 00m = 0h 00m
- 9 1h 00m
- 10 0h 00m
- 11 0h 00m
- 12 0h 00m
'''
+ tts_by_year_month_tpl is made of (tts_by_year_month_df, tts_by_year_month_flt_df).
- tts_by_month_upd_df : DataFrame = tts_by_month_df.copy(deep = True)
+ This method decides which one of the two DataFrame is to be displayed according to __setting_bag.tts_by_year_month_display_only_years.
+ '''
- now_year : int = now.year
- now_month : int = now.month
- cn_year : str = str(now_year)
- cn_month : str = "Month"
- new_value : str = ""
+ if self.__setting_bag.tts_by_year_month_display_only_years is None:
+ return tts_by_year_month_tpl[0]
- condition : Series = (tts_by_month_upd_df[cn_month] > now_month)
- tts_by_month_upd_df[cn_year] = np.where(condition, new_value, tts_by_month_upd_df[cn_year])
+ return tts_by_year_month_tpl[1]
+ def __optimize_tts_by_year_month_spnv_for_display(self, tts_by_year_month_spnv_tpl : Tuple[DataFrame, DataFrame]) -> DataFrame:
+
+ '''
+ tts_by_year_month_spnv_tpl is made of (tts_by_year_month_spnv_df, tts_by_year_month_spnv_flt_df).
+
+ This method decides which one of the two DataFrame is to be displayed according to __setting_bag.tts_by_year_month_spnv_display_only_spn.
+ '''
+
+ if self.__setting_bag.tts_by_year_month_spnv_display_only_spn is None:
+ return tts_by_year_month_spnv_tpl[0]
+
+ return tts_by_year_month_spnv_tpl[1]
+ def __optimize_tts_by_year_spnv_for_display(self, tts_by_year_spnv_tpl : Tuple[DataFrame, DataFrame]) -> DataFrame:
+
+ '''
+ tts_by_year_spnv_tpl is made of (tts_by_year_spnv_df, tts_by_year_spnv_flt_df).
+
+ This method decides which one of the two DataFrame is to be displayed according to __setting_bag.tts_by_year_spnv_display_only_spn.
+ '''
+
+ if self.__setting_bag.tts_by_year_spnv_display_only_spn is None:
+ return tts_by_year_spnv_tpl[0]
+
+ return tts_by_year_spnv_tpl[1]
+ def __optimize_tts_by_tr_for_display(self, tts_by_tr_df : DataFrame) -> DataFrame:
+
+ return self.__orchestrate_head_n(
+ df = tts_by_tr_df,
+ head_n = self.__setting_bag.tts_by_tr_head_n,
+ display_head_n_with_tail = self.__setting_bag.tts_by_tr_display_head_n_with_tail
+ )
+
+ def initialize(self) -> None:
+
+ '''Creates a TTSummary object and assign it to __tt_summary.'''
+
+ self.__tt_summary = self.__component_bag.tt_adapter.create_summary(setting_bag = self.__setting_bag)
+ def process_tt(self) -> None:
+
+ '''
+ Performs all the actions listed in __setting_bag.options_tt.
- idx_year : int = cast(int, tts_by_month_upd_df.columns.get_loc(cn_year))
- idx_trend : int = (idx_year - 1)
- tts_by_month_upd_df.iloc[:, idx_trend] = np.where(condition, new_value, tts_by_month_upd_df.iloc[:, idx_trend])
+ It raises an exception if the 'initialize' method has not been run yet.
+ '''
- return tts_by_month_upd_df
- def add_effort_status(self, sessions_df : DataFrame) -> DataFrame:
+ self.__validate_summary()
+
+ options : list = self.__setting_bag.options_tt
+ df : DataFrame = self.__optimize_tt_for_display(tt_df = self.__tt_summary.tt_df)
+ hide_index : bool = self.__setting_bag.tt_hide_index
+
+ if "display" in options:
+ self.__component_bag.displayer.display(df = df, hide_index = hide_index)
+ def process_tts_by_month(self) -> None:
'''
- StartTime EndTime Effort ES_IsCorrect ES_Expected ES_Message
- 21:00 23:00 1h 00m False 2h 00m ...
- ...
+ Performs all the actions listed in __setting_bag.options_tts_by_month.
+
+ It raises an exception if the 'initialize' method has not been run yet.
'''
- es_df : DataFrame = sessions_df.copy(deep = True)
-
- cn_start_time : str = "StartTime"
- cn_end_time : str = "EndTime"
- cn_effort : str = "Effort"
- cn_effort_status : str = "EffortStatus"
+ self.__validate_summary()
- es_df[cn_effort_status] = es_df.apply(
- lambda x : self.__create_effort_status_and_cast_to_any(
- idx = x.name,
- start_time_str = x[cn_start_time],
- end_time_str = x[cn_end_time],
- effort_str = x[cn_effort]),
- axis = 1)
-
- cn_es_is_correct : str = "ES_IsCorrect"
- cn_es_expected : str = "ES_Expected"
- cn_es_message : str = "ES_Message"
+ options : list = self.__setting_bag.options_tts_by_month
+ df : DataFrame = self.__tt_summary.tts_by_month_tpl[1]
+ content : str = self.__tt_summary.tts_by_month_md
+ id : TTID = TTID.TTSBYMONTH
- es_df[cn_es_is_correct] = es_df[cn_effort_status].apply(lambda x : x.is_correct)
- es_df[cn_es_expected] = es_df[cn_effort_status].apply(lambda x : x.expected_str)
- es_df[cn_es_message] = es_df[cn_effort_status].apply(lambda x : x.message)
+ if "display" in options:
+ self.__component_bag.displayer.display(df = df)
- es_df = es_df[[cn_start_time, cn_end_time, cn_effort, cn_es_is_correct, cn_es_expected, cn_es_message]]
+ if "save" in options:
+ self.__save_and_log(id = id, content = content)
+ def process_tts_by_year(self) -> None:
- return es_df
- def filter_by_is_correct(self, es_df : DataFrame, is_correct : bool) -> DataFrame:
+ '''
+ Performs all the actions listed in __setting_bag.options_tts_by_year.
+
+ It raises an exception if the 'initialize' method has not been run yet.
+ '''
- '''Returns a DataFrame that contains only rows that match the provided is_correct.'''
+ self.__validate_summary()
- filtered_df : DataFrame = es_df.copy(deep = True)
+ options : list = self.__setting_bag.options_tts_by_year
+ df : DataFrame = self.__tt_summary.tts_by_year_df
- cn_es_is_correct : str = "ES_IsCorrect"
+ if "display" in options:
+ self.__component_bag.displayer.display(df = df)
+ def process_tts_by_year_month(self) -> None:
- condition : Series = (filtered_df[cn_es_is_correct] == is_correct)
- filtered_df = es_df.loc[condition]
+ '''
+ Performs all the actions listed in __setting_bag.options_tts_by_year_month.
+
+ It raises an exception if the 'initialize' method has not been run yet.
+ '''
- return filtered_df
- def create_time_ranges_df(self, sessions_df : DataFrame, unknown_id : str) -> DataFrame:
+ self.__validate_summary()
- '''
- TimeRangeId Occurrences
- 0 Unknown 44
- 1 18:00-20:00 19
- 2 08:00-08:30 16
- ...
- '''
+ options : list = self.__setting_bag.options_tts_by_year_month
+ df : DataFrame = self.__optimize_tts_by_year_month_for_display(tts_by_year_month_tpl = self.__tt_summary.tts_by_year_month_tpl)
+
+ if "display" in options:
+ self.__component_bag.displayer.display(df = df)
+ def process_tts_by_year_month_spnv(self) -> None:
- time_ranges_df : DataFrame = sessions_df.copy(deep = True)
+ '''
+ Performs all the actions listed in __setting_bag.options_tts_by_year_month_spnv.
- cn_start_time : str = "StartTime"
- cn_end_time : str = "EndTime"
- cn_time_range_id : str = "TimeRangeId"
-
- time_ranges_df = time_ranges_df[[cn_start_time, cn_end_time]]
- time_ranges_df[cn_time_range_id] = time_ranges_df.apply(
- lambda x : self.__create_time_range_id(
- start_time = x[cn_start_time],
- end_time = x[cn_end_time],
- unknown_id = unknown_id), axis = 1)
+ It raises an exception if the 'initialize' method has not been run yet.
+ '''
+
+ self.__validate_summary()
- cn_occurrences : str = "Occurrences"
+ options : list = self.__setting_bag.options_tts_by_year_month_spnv
+ df : DataFrame = self.__optimize_tts_by_year_month_spnv_for_display(tts_by_year_month_spnv_tpl = self.__tt_summary.tts_by_year_month_spnv_tpl)
+ formatters : dict = self.__setting_bag.tts_by_year_month_spnv_formatters
- time_ranges_df = time_ranges_df[[cn_time_range_id]].groupby(by = [cn_time_range_id], as_index=False).agg(
- count = pd.NamedAgg(column = cn_time_range_id, aggfunc = "count"))
- time_ranges_df.rename(columns={"count" : cn_occurrences}, inplace = True)
- time_ranges_df = time_ranges_df.sort_values(by = [cn_occurrences], ascending = False).reset_index(drop = True)
+ if "display" in options:
+ self.__component_bag.displayer.display(df = df, formatters = formatters)
+ def process_tts_by_year_spnv(self) -> None:
+
+ '''
+ Performs all the actions listed in __setting_bag.options_tts_by_year_spnv.
- return time_ranges_df
- def remove_unknown_id(self, time_ranges_df : DataFrame, unknown_id : str) -> DataFrame:
+ It raises an exception if the 'initialize' method has not been run yet.
+ '''
- '''Removes the provided uknown_id from the "TimeRangeId" column of the provided DataFrame.'''
+ self.__validate_summary()
- cn_time_range_id : str = "TimeRangeId"
+ options : list = self.__setting_bag.options_tts_by_year_spnv
+ df : DataFrame = self.__optimize_tts_by_year_spnv_for_display(tts_by_year_spnv_tpl = self.__tt_summary.tts_by_year_spnv_tpl)
+ formatters : dict = self.__setting_bag.tts_by_year_spnv_formatters
- condition : Series = (time_ranges_df[cn_time_range_id] != unknown_id)
- time_ranges_df = time_ranges_df.loc[condition]
- time_ranges_df.reset_index(drop = True, inplace = True)
+ if "display" in options:
+ self.__component_bag.displayer.display(df = df, formatters = formatters)
+ def process_tts_by_spn(self) -> None:
- return time_ranges_df
- def filter_by_top_n_occurrences(self, time_ranges_df : DataFrame, n : int, ascending : bool = False) -> DataFrame:
+ '''
+ Performs all the actions listed in __setting_bag.options_tts_by_spn.
+
+ It raises an exception if the 'initialize' method has not been run yet.
+ '''
- '''Returns only the top n rows by "Occurrences" of the provided DataFrame.'''
+ self.__validate_summary()
- cn_occurrences : str = "Occurrences"
+ options : list = self.__setting_bag.options_tts_by_spn
+ df : DataFrame = self.__tt_summary.tts_by_spn_df
+ formatters : dict = self.__setting_bag.tts_by_spn_formatters
+ definitions_df : DataFrame = self.__tt_summary.definitions_df
- time_ranges_df.sort_values(by = cn_occurrences, ascending = [ascending], inplace = True)
- time_ranges_df = time_ranges_df.iloc[0:n]
- time_ranges_df.reset_index(drop = True, inplace = True)
+ if "display" in options:
+ self.__component_bag.displayer.display(df = df, formatters = formatters)
- return time_ranges_df
-class MarkdownProcessor():
+ if "log" in options:
+ self.__try_log_definitions(df = df, definitions = definitions_df)
+ def process_tts_by_spn_spv(self) -> None:
- '''Collects all the logic related to the processing of Markdown content.'''
+ '''
+ Performs all the actions listed in __setting_bag.options_tts_by_spn_spv.
+
+ It raises an exception if the 'initialize' method has not been run yet.
+ '''
- __component_bag : ComponentBag
- __setting_bag : SettingBag
+ self.__validate_summary()
- def __init__(self, component_bag : ComponentBag, setting_bag : SettingBag) -> None:
+ options : list = self.__setting_bag.options_tts_by_spn_spv
+ df : DataFrame = self.__tt_summary.tts_by_spn_spv_df
+ definitions_df : DataFrame = self.__tt_summary.definitions_df
- self.__component_bag = component_bag
- self.__setting_bag = setting_bag
+ if "display" in options:
+ self.__component_bag.displayer.display(df = df)
- def __get_tts_by_month_md(self, last_update : datetime, tts_by_month_upd_df : DataFrame) -> str:
+ if "log" in options:
+ self.__try_log_definitions(df = df, definitions = definitions_df)
+ def process_tts_by_hashtag(self) -> None:
- '''Creates the Markdown content for a "Time Tracking By Month" file out of the provided dataframe.'''
+ '''
+ Performs all the actions listed in __setting_bag.options_tts_by_hashtag.
+
+ It raises an exception if the 'initialize' method has not been run yet.
+ '''
- md_paragraph_title : str = "Time Tracking By Month"
+ self.__validate_summary()
- markdown_header : str = self.__component_bag.markdown_helper.get_markdown_header(last_update = last_update, paragraph_title = md_paragraph_title)
- tts_by_month_upd_md : str = tts_by_month_upd_df.to_markdown(index = False)
+ options : list = self.__setting_bag.options_tts_by_hashtag
+ df : DataFrame = self.__tt_summary.tts_by_hashtag_df
+ formatters : dict = self.__setting_bag.tts_by_hashtag_formatters
- md_content : str = markdown_header
- md_content += "\n"
- md_content += tts_by_month_upd_md
- md_content += "\n"
+ if "display" in options:
+ self.__component_bag.displayer.display(df = df, formatters = formatters)
+ def process_tts_by_hashtag_year(self) -> None:
- return md_content
+ '''
+ Performs all the actions listed in __setting_bag.options_tts_by_hashtag_year.
+
+ It raises an exception if the 'initialize' method has not been run yet.
+ '''
+
+ self.__validate_summary()
+
+ options : list = self.__setting_bag.options_tts_by_hashtag_year
+ df : DataFrame = self.__tt_summary.tts_by_hashtag_year_df
- def process_tts_by_month_md(self, tts_by_month_upd_df : DataFrame) -> None:
+ if "display" in options:
+ self.__component_bag.displayer.display(df = df)
+ def process_tts_by_efs(self) -> None:
- '''Performs all the tasks related to the "Time Tracking By Month" file.'''
+ '''
+ Performs all the actions listed in __setting_bag.options_tts_by_efs.
+
+ It raises an exception if the 'initialize' method has not been run yet.
+ '''
- content : str = self.__get_tts_by_month_md(
- last_update = self.__setting_bag.last_update,
- tts_by_month_upd_df = tts_by_month_upd_df)
+ self.__validate_summary()
- if self.__setting_bag.show_tts_by_month_md:
- file_name_content : str = self.__component_bag.markdown_helper.format_file_name_as_content(file_name = self.__setting_bag.tts_by_month_file_name)
- self.__component_bag.logging_function(file_name_content)
- self.__component_bag.logging_function(content)
+ options : list = self.__setting_bag.options_tts_by_efs
+ df : DataFrame = self.__tt_summary.tts_by_efs_tpl[1]
- if self.__setting_bag.save_tts_by_month_md:
- file_path : str = self.__component_bag.file_path_manager.create_file_path(
- folder_path = self.__setting_bag.working_folder_path,
- file_name = self.__setting_bag.tts_by_month_file_name)
+ if "display" in options:
+ self.__component_bag.displayer.display(df = df)
+ def process_tts_by_tr(self) -> None:
+
+ '''
+ Performs all the actions listed in __setting_bag.options_tts_by_tr.
- self.__component_bag.file_manager.save_content(content = content, file_path = file_path)
+ It raises an exception if the 'initialize' method has not been run yet.
+ '''
+
+ self.__validate_summary()
+
+ options : list = self.__setting_bag.options_tts_by_tr
+ df : DataFrame = self.__optimize_tts_by_tr_for_display(tts_by_tr_df = self.__tt_summary.tts_by_tr_df)
+
+ if "display" in options:
+ self.__component_bag.displayer.display(df = df)
+ def process_definitions(self) -> None:
+
+ '''
+ Performs all the actions listed in __setting_bag.options_definitions.
+
+ It raises an exception if the 'initialize' method has not been run yet.
+ '''
+
+ self.__validate_summary()
+
+ options : list = self.__setting_bag.options_definitions
+ df : DataFrame = self.__tt_summary.definitions_df
+
+ if "display" in options:
+ self.__component_bag.displayer.display(df = df)
# MAIN
if __name__ == "__main__":
diff --git a/src/setup.py b/src/setup.py
index faf4f81..589d89d 100644
--- a/src/setup.py
+++ b/src/setup.py
@@ -1,9 +1,30 @@
'''Contains packaging information about nwtimetracking.py.'''
# GLOBAL MODULES
+from setuptools import setup
+
# INFORMATION
MODULE_ALIAS : str = "nwtt"
MODULE_NAME : str = "nwtimetracking"
-MODULE_VERSION : str = "3.9.0"
+MODULE_VERSION : str = "4.0.0"
-# SETUP
\ No newline at end of file
+# SETUP
+setup(
+ name = MODULE_NAME,
+ version = MODULE_VERSION,
+ description = "An application designed to run automated data analysis tasks on 'Time Tracking.xlsx'.",
+ author = "numbworks",
+ url = f"https://github.com/numbworks/{MODULE_NAME}",
+ py_modules = [ MODULE_NAME ],
+ install_requires = [
+ "numpy>=2.1.2",
+ "pyarrow>=17.0.0",
+ "pyarrow>=17.0.0",
+ "pandas>=2.2.3",
+ "requests>=2.32.3",
+ "tabulate>=0.9.0",
+ "nwshared @ git+https://github.com/numbworks/nwshared.git@v1.8.0#egg=nwshared&subdirectory=src"
+ ],
+ python_requires = ">=3.12",
+ license = "MIT"
+)
\ No newline at end of file
diff --git a/tests/nwtimetrackingtests.py b/tests/nwtimetrackingtests.py
index 1cea68b..5384b5a 100644
--- a/tests/nwtimetrackingtests.py
+++ b/tests/nwtimetrackingtests.py
@@ -5,20 +5,21 @@
from datetime import datetime
from datetime import date
from datetime import timedelta
-from numpy import int64
+from numpy import int64, uint
from pandas import DataFrame
from pandas.testing import assert_frame_equal
from parameterized import parameterized
from types import FunctionType
-from typing import Tuple
-from unittest.mock import Mock, call, patch
+from typing import Literal, Optional, Tuple, cast
+from unittest.mock import Mock, patch
# LOCAL MODULES
import sys, os
sys.path.append(os.path.dirname(__file__).replace('tests', 'src'))
-from nwtimetracking import ComponentBag, MarkdownProcessor, SoftwareProjectNameProvider, YearlyTarget, SettingBag, EffortStatus, _MessageCollection
-from nwtimetracking import DefaultPathProvider, YearProvider, TimeTrackingManager
-from nwshared import MarkdownHelper, Formatter, FilePathManager, FileManager
+from nwshared import MarkdownHelper, Formatter, FilePathManager, FileManager, Displayer
+from nwtimetracking import TTCN, TTID, DEFINITIONSCN, _MessageCollection, TimeTrackingProcessor, YearlyTarget, EffortStatus, MDInfo, TTSummary
+from nwtimetracking import DefaultPathProvider, YearProvider, SoftwareProjectNameProvider, MDInfoProvider, SettingBag
+from nwtimetracking import TTDataFrameHelper, TTDataFrameFactory, TTMarkdownFactory, TTAdapter, ComponentBag
# SUPPORT METHODS
class SupportMethodProvider():
@@ -67,7 +68,6 @@ def are_yearly_targets_equal(yt1 : YearlyTarget, yt2 : YearlyTarget) -> bool:
'''
return (yt1.hours == yt2.hours and yt1.year == yt2.year)
-
@staticmethod
def are_lists_of_yearly_targets_equal(list1 : list[YearlyTarget], list2 : list[YearlyTarget]) -> bool:
@@ -94,58 +94,31 @@ class ObjectMother():
'''Collects all the DTOs required by the unit tests.'''
@staticmethod
- def create_setting_bag() -> SettingBag:
-
- return SettingBag(
- years = [2015],
- yearly_targets = [
- YearlyTarget(year = 2015, hours = timedelta(hours = 0))
- ],
- excel_path = DefaultPathProvider().get_default_time_tracking_path(),
- excel_books_skiprows = 0,
- excel_books_nrows = 920,
- excel_books_tabname = "Sessions",
- n_generic = 5,
- n_by_month = 12,
- now = datetime.now(),
- software_project_names = [
- "NW.MarkdownTables"
- ],
- software_project_names_by_spv = [
- "nwreadinglistmanager"
- ],
- remove_untagged_from_de = True,
- definitions = {
- "DME": "Development Monthly Effort",
- "TME": "Total Monthly Effort",
- "DYE": "Development Yearly Effort",
- "TYE": "Total Yearly Effort",
- "DE": "Development Effort",
- "TE": "Total Effort"
- },
- tt_by_year_hashtag_years = [2023],
- tts_by_month_update_future_values_to_empty = True,
- effort_status_n = 25,
- effort_status_is_correct = False,
- time_ranges_unknown_id = "Unknown",
- time_ranges_top_n = 5,
- time_ranges_remove_unknown_id = True,
- time_ranges_filter_by_top_n = True,
- show_sessions_df = False,
- show_tt_by_year_df = True,
- show_tt_by_year_month_df = True,
- show_tt_by_year_month_spnv_df = False,
- show_tt_by_year_spnv_df = False,
- show_tt_by_spn_df = True,
- show_tt_by_spn_spv_df = True,
- show_tt_by_year_hashtag = True,
- show_tt_by_hashtag = True,
- show_tts_by_month_df = True,
- show_effort_status_df = True,
- show_time_ranges_df = True
+ def get_setting_bag() -> SettingBag:
+
+ setting_bag : SettingBag = SettingBag(
+ options_tt = ["display"],
+ options_tts_by_month = ["display", "save"],
+ options_tts_by_year = ["display"],
+ options_tts_by_year_month = ["display"],
+ options_tts_by_year_month_spnv = ["display"],
+ options_tts_by_year_spnv = ["display"],
+ options_tts_by_spn = ["display", "log"],
+ options_tts_by_spn_spv = [],
+ options_tts_by_hashtag = ["display"],
+ options_tts_by_hashtag_year = ["display"],
+ options_tts_by_efs = ["display"],
+ options_tts_by_tr = ["display"],
+ options_definitions = ["display"],
+ excel_nrows = 1301,
+ tts_by_year_month_spnv_display_only_spn = "nwtimetracking",
+ tts_by_year_spnv_display_only_spn = "nwtimetracking",
+ tts_by_spn_spv_display_only_spn = "nwtimetracking"
)
+
+ return setting_bag
@staticmethod
- def create_excel_data() -> DataFrame:
+ def get_excel_data() -> DataFrame:
excel_data_dict : dict = {
"Date": "2015-10-31",
@@ -163,7 +136,7 @@ def create_excel_data() -> DataFrame:
return excel_data_df
@staticmethod
- def create_sessions_df_column_names() -> list[str]:
+ def get_tt_df_column_names() -> list[str]:
column_names : list[str] = []
column_names.append("Date") # [0], date
@@ -179,7 +152,7 @@ def create_sessions_df_column_names() -> list[str]:
return column_names
@staticmethod
- def create_sessions_df_dtype_names() -> list[str]:
+ def get_tt_df_dtype_names() -> list[str]:
'''Note: the first one should be "date", but it's rendered by Pandas as "object".'''
@@ -198,7 +171,7 @@ def create_sessions_df_dtype_names() -> list[str]:
return expected_dtype_names
@staticmethod
- def create_yearly_targets() -> list[YearlyTarget]:
+ def get_yearly_targets() -> list[YearlyTarget]:
yearly_targets = [
YearlyTarget(year = 2015, hours = timedelta(hours = 0)),
@@ -214,8 +187,9 @@ def create_yearly_targets() -> list[YearlyTarget]:
]
return yearly_targets
+
@staticmethod
- def create_sessions_df() -> DataFrame:
+ def get_tt_df() -> DataFrame:
'''
Date StartTime EndTime Effort Hashtag Descriptor IsSoftwareProject IsReleaseDay Year Month
@@ -237,9 +211,8 @@ def create_sessions_df() -> DataFrame:
'Year': np.array([2024, 2024, 2024, 2024, 2024, 2024, 2024, 2024, 2024, 2024, 2024, 2024, 2024, 2024, 2024, 2024, 2024, 2024, 2024, 2024, 2024], dtype=int64),
'Month': np.array([2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2], dtype=int64),
}, index=pd.RangeIndex(start=980, stop=1001, step=1))
-
@staticmethod
- def create_tt_by_year_df() -> DataFrame:
+ def get_tts_by_year_df() -> DataFrame:
'''
Year Effort YearlyTarget TargetDiff IsTargetMet
@@ -254,22 +227,27 @@ def create_tt_by_year_df() -> DataFrame:
'IsTargetMet': np.array([False], dtype=bool),
}, index=pd.RangeIndex(start=0, stop=1, step=1))
@staticmethod
- def create_tt_by_year_month_df() -> DataFrame:
+ def get_tts_by_year_month_tpl() -> Tuple[DataFrame, DataFrame]:
'''
Year Month Effort YearlyTotal ToTarget
0 2024 2 36h 00m 36h 00m -214h 00m
+
+ Year Month Effort YearlyTotal ToTarget
+ 0 2024 2 36h 00m 36h 00m -214h 00m
'''
- return pd.DataFrame({
+ df : DataFrame = pd.DataFrame({
'Year': np.array([2024], dtype=int64),
'Month': np.array([2], dtype=int64),
'Effort': np.array(['36h 00m'], dtype=object),
'YearlyTotal': np.array(['36h 00m'], dtype=object),
'ToTarget': np.array(['-214h 00m'], dtype=object),
}, index=pd.RangeIndex(start=0, stop=1, step=1))
+
+ return (df, df)
@staticmethod
- def create_tt_by_year_month_spnv_df() -> DataFrame:
+ def get_tts_by_year_month_spnv_tpl() -> Tuple[DataFrame, DataFrame]:
'''
Year Month ProjectName ProjectVersion Effort DME %_DME TME %_TME
@@ -277,9 +255,12 @@ def create_tt_by_year_month_spnv_df() -> DataFrame:
1 2024 2 NW.Shared.Serialization 1.0.0 04h 15m 08h 45m 48.57 36h 00m 11.81
2 2024 2 NW.UnivariateForecasting 4.2.0 00h 45m 08h 45m 8.57 36h 00m 2.08
3 2024 2 nwreadinglistmanager 2.1.0 02h 00m 08h 45m 22.86 36h 00m 5.56
+
+ Year Month ProjectName ProjectVersion Effort DME %_DME TME %_TME
+ 0 2024 2 NW.NGramTextClassification 4.2.0 01h 15m 08h 45m 14.29 36h 00m 3.47
'''
- return pd.DataFrame({
+ df1 : DataFrame = pd.DataFrame({
'Year': np.array([2024, 2024, 2024, 2024], dtype=int64),
'Month': np.array([2, 2, 2, 2], dtype=int64),
'ProjectName': np.array(['NW.NGramTextClassification', 'NW.Shared.Serialization', 'NW.UnivariateForecasting', 'nwreadinglistmanager'], dtype=object),
@@ -290,8 +271,22 @@ def create_tt_by_year_month_spnv_df() -> DataFrame:
'TME': np.array(['36h 00m', '36h 00m', '36h 00m', '36h 00m'], dtype=object),
'%_TME': np.array([3.47, 11.81, 2.08, 5.56], dtype= np.float64),
}, index=pd.RangeIndex(start=0, stop=4, step=1))
+
+ df2 : DataFrame = pd.DataFrame({
+ 'Year': np.array([2024], dtype=int64),
+ 'Month': np.array([2], dtype=int64),
+ 'ProjectName': np.array(['NW.NGramTextClassification'], dtype=object),
+ 'ProjectVersion': np.array(['4.2.0'], dtype=object),
+ 'Effort': np.array(['01h 15m'], dtype=object),
+ 'DME': np.array(['08h 45m'], dtype=object),
+ '%_DME': np.array([14.29], dtype= np.float64),
+ 'TME': np.array(['36h 00m'], dtype=object),
+ '%_TME': np.array([3.47], dtype= np.float64),
+ }, index=pd.RangeIndex(start=0, stop=1, step=1))
+
+ return (df1, df2)
@staticmethod
- def create_tt_by_year_spnv_df() -> DataFrame:
+ def get_tts_by_year_spnv_tpl() -> Tuple[DataFrame, DataFrame]:
'''
Year ProjectName ProjectVersion Effort DYE %_DYE TYE %_TYE
@@ -299,9 +294,12 @@ def create_tt_by_year_spnv_df() -> DataFrame:
1 2024 NW.Shared.Serialization 1.0.0 04h 15m 08h 45m 48.57 36h 00m 11.81
2 2024 NW.UnivariateForecasting 4.2.0 00h 45m 08h 45m 8.57 36h 00m 2.08
3 2024 nwreadinglistmanager 2.1.0 02h 00m 08h 45m 22.86 36h 00m 5.56
+
+ Year ProjectName ProjectVersion Effort DYE %_DYE TYE %_TYE
+ 0 2024 NW.NGramTextClassification 4.2.0 01h 15m 08h 45m 14.29 36h 00m 3.47
'''
- return pd.DataFrame({
+ df1 : DataFrame = pd.DataFrame({
'Year': np.array([2024, 2024, 2024, 2024], dtype=int64),
'ProjectName': np.array(['NW.NGramTextClassification', 'NW.Shared.Serialization', 'NW.UnivariateForecasting', 'nwreadinglistmanager'], dtype=object),
'ProjectVersion': np.array(['4.2.0', '1.0.0', '4.2.0', '2.1.0'], dtype=object),
@@ -311,8 +309,83 @@ def create_tt_by_year_spnv_df() -> DataFrame:
'TYE': np.array(['36h 00m', '36h 00m', '36h 00m', '36h 00m'], dtype=object),
'%_TYE': np.array([3.47, 11.81, 2.08, 5.56], dtype= np.float64),
}, index=pd.RangeIndex(start=0, stop=4, step=1))
+
+ df2 : DataFrame = pd.DataFrame({
+ 'Year': np.array([2024], dtype=int64),
+ 'ProjectName': np.array(['NW.NGramTextClassification'], dtype=object),
+ 'ProjectVersion': np.array(['4.2.0'], dtype=object),
+ 'Effort': np.array(['01h 15m'], dtype=object),
+ 'DYE': np.array(['08h 45m'], dtype=object),
+ '%_DYE': np.array([14.29], dtype= np.float64),
+ 'TYE': np.array(['36h 00m'], dtype=object),
+ '%_TYE': np.array([3.47], dtype= np.float64),
+ }, index=pd.RangeIndex(start=0, stop=1, step=1))
+
+ return (df1, df2)
+ @staticmethod
+ def get_tts_by_spn_spv_df() -> DataFrame:
+
+ '''
+ ProjectName ProjectVersion Effort
+ 0 NW.NGramTextClassification 4.2.0 01h 15m
+ 1 NW.Shared.Serialization 1.0.0 04h 15m
+ 2 NW.UnivariateForecasting 4.2.0 00h 45m
+ 3 nwreadinglistmanager 2.1.0 02h 00m
+ '''
+
+ return pd.DataFrame({
+ 'ProjectName': np.array(['NW.NGramTextClassification', 'NW.Shared.Serialization', 'NW.UnivariateForecasting', 'nwreadinglistmanager'], dtype=object),
+ 'ProjectVersion': np.array(['4.2.0', '1.0.0', '4.2.0', '2.1.0'], dtype=object),
+ 'Effort': np.array(['01h 15m', '04h 15m', '00h 45m', '02h 00m'], dtype=object),
+ }, index=pd.RangeIndex(start=0, stop=4, step=1))
+ @staticmethod
+ def get_tts_by_month_tpl() -> Tuple[DataFrame, DataFrame]:
+
+ '''
+ Month 2024
+ 0 1 00h 00m
+ 1 2 36h 00m
+ ...
+ 10 11 00h 00m
+ 11 12 00h 00m
+
+ Month 2024
+ ...
+ 10 11 00h 00m
+
+ now = 2024-11-30
+ '''
+
+ df1 : DataFrame = pd.DataFrame({
+ 'Month': np.array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12], dtype=int64),
+ '2024': np.array(['00h 00m', '36h 00m', '00h 00m', '00h 00m', '00h 00m', '00h 00m', '00h 00m', '00h 00m', '00h 00m', '00h 00m', '00h 00m', '00h 00m'], dtype=object)
+ }, index=pd.RangeIndex(start=0, stop=12, step=1))
+
+ df2 : DataFrame = pd.DataFrame({
+ 'Month': np.array(['1', '2', '3', '4', '5', '6', '7', '8', '9', '10', '11', ''], dtype=object),
+ '2024': np.array(['00h 00m', '36h 00m', '00h 00m', '00h 00m', '00h 00m', '00h 00m', '00h 00m', '00h 00m', '00h 00m', '00h 00m', '00h 00m', ''], dtype=object)
+ }, index=pd.RangeIndex(start=0, stop=12, step=1))
+
+ return (df1, df2)
+ @staticmethod
+ def get_tts_by_tr_df() -> DataFrame:
+
+ '''
+ TimeRangeId Occurrences
+ 0 08:00-08:30 1
+ 1 08:15-12:45 1
+ 2 08:45-12:15 1
+ 3 10:15-13:00 1
+ 4 11:00-12:30 1
+ ...
+ '''
+
+ return pd.DataFrame({
+ 'TimeRangeId': np.array(['08:00-08:30', '15:30-16:30', '22:00-23:00', '21:00-22:00', '20:15-21:15', '20:00-20:15', '17:15-18:00', '17:15-17:45', '17:00-18:00', '15:30-18:00', '14:30-16:45', '08:15-12:45', '14:00-19:45', '13:30-15:00', '13:30-14:00', '11:15-13:00', '11:00-13:00', '11:00-12:30', '10:15-13:00', '08:45-12:15', '23:00-23:30'], dtype=object),
+ 'Occurrences': np.array([1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], dtype= np.int64),
+ }, index=pd.RangeIndex(start=0, stop=21, step=1))
@staticmethod
- def create_tt_by_spn_df() -> DataFrame:
+ def get_tts_by_spn_df() -> DataFrame:
'''
Hashtag ProjectName Effort DE %_DE TE %_TE
@@ -332,23 +405,7 @@ def create_tt_by_spn_df() -> DataFrame:
'%_TE': np.array([5.56, 11.81, 3.47, 2.08], dtype= np.float64),
}, index=pd.RangeIndex(start=0, stop=4, step=1))
@staticmethod
- def create_tt_by_spn_spv_df() -> DataFrame:
-
- '''
- ProjectName ProjectVersion Effort
- 0 NW.NGramTextClassification 4.2.0 01h 15m
- 1 NW.Shared.Serialization 1.0.0 04h 15m
- 2 NW.UnivariateForecasting 4.2.0 00h 45m
- 3 nwreadinglistmanager 2.1.0 02h 00m
- '''
-
- return pd.DataFrame({
- 'ProjectName': np.array(['NW.NGramTextClassification', 'NW.Shared.Serialization', 'NW.UnivariateForecasting', 'nwreadinglistmanager'], dtype=object),
- 'ProjectVersion': np.array(['4.2.0', '1.0.0', '4.2.0', '2.1.0'], dtype=object),
- 'Effort': np.array(['01h 15m', '04h 15m', '00h 45m', '02h 00m'], dtype=object),
- }, index=pd.RangeIndex(start=0, stop=4, step=1))
- @staticmethod
- def create_tt_by_year_hashtag_df() -> DataFrame:
+ def get_tts_by_hashtag_year_df() -> DataFrame:
'''
Year Hashtag Effort
@@ -364,7 +421,7 @@ def create_tt_by_year_hashtag_df() -> DataFrame:
'Effort': np.array(['06h 15m', '04h 30m', '02h 00m', '23h 15m'], dtype=object),
}, index=pd.RangeIndex(start=0, stop=4, step=1))
@staticmethod
- def create_tt_by_hashtag_df() -> DataFrame:
+ def get_tts_by_hashtag_df() -> DataFrame:
'''
Hashtag Effort Effort%
@@ -380,332 +437,648 @@ def create_tt_by_hashtag_df() -> DataFrame:
'Effort%': np.array([64.58, 17.36, 12.5, 5.56], dtype= np.float64),
}, index=pd.RangeIndex(start=0, stop=4, step=1))
@staticmethod
- def create_tts_by_month_df() -> DataFrame:
-
- '''
- Month 2024
- 0 1 00h 00m
- 1 2 36h 00m
- ...
- '''
-
- return pd.DataFrame({
- 'Month': np.array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12], dtype=int64),
- '2024': np.array(['00h 00m', '36h 00m', '00h 00m', '00h 00m', '00h 00m', '00h 00m', '00h 00m', '00h 00m', '00h 00m', '00h 00m', '00h 00m', '00h 00m'], dtype=object)
- }, index=pd.RangeIndex(start=0, stop=12, step=1))
- @staticmethod
- def create_tts_by_month_upd_df() -> DataFrame:
-
- '''
- Month 2024
- 0 1 00h 00m
- 1 2 36h 00m
- ...
- '''
-
- return pd.DataFrame({
- 'Month': np.array(['1', '2', '', '', '', '', '', '', '', '', '', ''], dtype=object),
- '2024': np.array(['00h 00m', '36h 00m', '', '', '', '', '', '', '', '', '', ''], dtype=object)
- }, index=pd.RangeIndex(start=0, stop=12, step=1))
- @staticmethod
- def create_time_ranges_df() -> DataFrame:
-
- '''
- TimeRangeId Occurrences
- 0 08:00-08:30 1
- 1 08:15-12:45 1
- 2 08:45-12:15 1
- 3 10:15-13:00 1
- 4 11:00-12:30 1
- ...
- '''
+ def get_definitions_df() -> DataFrame:
+
+ columns : list[str] = [DEFINITIONSCN.TERM, DEFINITIONSCN.DEFINITION]
+
+ definitions : dict[str, str] = {
+ "DME": "Development Monthly Effort",
+ "TME": "Total Monthly Effort",
+ "DYE": "Development Yearly Effort",
+ "TYE": "Total Yearly Effort",
+ "DE": "Development Effort",
+ "TE": "Total Effort"
+ }
+
+ definitions_df : DataFrame = DataFrame(
+ data = definitions.items(),
+ columns = columns
+ )
- return pd.DataFrame({
- 'TimeRangeId': np.array(['08:00-08:30', '15:30-16:30', '22:00-23:00', '21:00-22:00', '20:15-21:15', '20:00-20:15', '17:15-18:00', '17:15-17:45', '17:00-18:00', '15:30-18:00', '14:30-16:45', '08:15-12:45', '14:00-19:45', '13:30-15:00', '13:30-14:00', '11:15-13:00', '11:00-13:00', '11:00-12:30', '10:15-13:00', '08:45-12:15', '23:00-23:30'], dtype=object),
- 'Occurrences': np.array([1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1], dtype= np.int64),
- }, index=pd.RangeIndex(start=0, stop=21, step=1))
+ return definitions_df
@staticmethod
- def create_dtos_for_ttsbymonthmd() -> Tuple[DataFrame, str]:
-
- data : list = [
- [1, "00h 00m", "↑", "18h 00m", "↑", "88h 30m", "↓", "80h 15m", "↓", "60h 00m", "↓", "29h 15m", "↑", "53h 00m", "↓", "00h 00m", "↑", "06h 00m", "↑", "45h 45m"]
- ]
- columns : list[str] = ["Month", "2015", "↕", "2016", "↕", "2017", "↕", "2018", "↕", "2019", "↕", "2020", "↕", "2021", "↕", "2022", "↕", "2023", "↕", "2024"]
- df : DataFrame = pd.DataFrame(data, columns = columns)
+ def get_tts_by_month_md() -> str:
- lines : list[str] = [
+ lines: list[str] = [
"## Revision History",
"",
"|Date|Author|Description|",
"|---|---|---|",
"|2020-12-22|numbworks|Created.|",
- "|2024-10-01|numbworks|Last update.|",
+ "|2024-11-30|numbworks|Last update.|",
"",
"## Time Tracking By Month",
"",
- "| Month | 2015 | ↕ | 2016 | ↕ | 2017 | ↕ | 2018 | ↕ | 2019 | ↕ | 2020 | ↕ | 2021 | ↕ | 2022 | ↕ | 2023 | ↕ | 2024 |",
- "|--------:|:--------|:----|:--------|:----|:--------|:----|:--------|:----|:--------|:----|:--------|:----|:--------|:----|:--------|:----|:--------|:----|:--------|",
- "| 1 | 00h 00m | ↑ | 18h 00m | ↑ | 88h 30m | ↓ | 80h 15m | ↓ | 60h 00m | ↓ | 29h 15m | ↑ | 53h 00m | ↓ | 00h 00m | ↑ | 06h 00m | ↑ | 45h 45m |"
+ "| Month | 2024 |",
+ "|--------:|:--------|",
+ "| 1 | 00h 00m |",
+ "| 2 | 36h 00m |",
+ "| 3 | 00h 00m |",
+ "| 4 | 00h 00m |",
+ "| 5 | 00h 00m |",
+ "| 6 | 00h 00m |",
+ "| 7 | 00h 00m |",
+ "| 8 | 00h 00m |",
+ "| 9 | 00h 00m |",
+ "| 10 | 00h 00m |",
+ "| 11 | 00h 00m |",
+ "| 12 | 00h 00m |",
]
- expected : str = "\n".join(lines) + "\n"
-
- return (df, expected)
- @staticmethod
- def create_service_objects_for_ttsbymonthmd() -> Tuple[ComponentBag, SettingBag, MarkdownProcessor]:
-
- component_bag : Mock = Mock()
- component_bag.logging_function = Mock()
- component_bag.file_manager.save_content = Mock()
- component_bag.markdown_helper = MarkdownHelper(formatter = Formatter())
- component_bag.file_path_manager = FilePathManager()
-
- setting_bag : Mock = Mock()
- setting_bag.last_update = datetime(2024, 10, 1)
- setting_bag.tts_by_month_file_name = "TIMETRACKINGBYMONTH.md"
- setting_bag.working_folder_path = "/home/nwtimetracking/"
- setting_bag.show_tts_by_month_md = True
- setting_bag.save_tts_by_month_md = True
- markdown_processor : MarkdownProcessor = MarkdownProcessor(
- component_bag = component_bag,
- setting_bag = setting_bag
- )
+ expected: str = "\n".join(lines) + "\n"
- return (component_bag, setting_bag, markdown_processor)
+ return expected
# TEST CLASSES
-class ComponentBagTestCase(unittest.TestCase):
-
- def test_init_shouldinitializeobjectwithexpectedproperties_whendefault(self) -> None:
+class MessageCollectionTestCase(unittest.TestCase):
+ def test_effortstatusmismatchingeffort_shouldreturnexpectedmessage_wheninvoked(self):
+
# Arrange
+ idx : int = 4
+ start_time_str : str = "20:00"
+ end_time_str : str = "00:00"
+ actual_str : str = "3h 00m"
+ expected_str : str = "4h 00m"
+
+ expected_message : str = (
+ "The provided row contains a mismatching effort "
+ "(idx: '4', start_time: '20:00', end_time: '00:00', actual_effort: '3h 00m', expected_effort: '4h 00m')."
+ )
+
# Act
- component_bag : ComponentBag = ComponentBag()
+ actual_message : str = _MessageCollection.effort_status_mismatching_effort(
+ idx = idx,
+ start_time_str = start_time_str,
+ end_time_str = end_time_str,
+ actual_str = actual_str,
+ expected_str = expected_str
+ )
# Assert
- self.assertIsInstance(component_bag.file_path_manager, FilePathManager)
- self.assertIsInstance(component_bag.file_manager, FileManager)
- self.assertIsInstance(component_bag.logging_function, FunctionType)
- self.assertIsInstance(component_bag.markdown_helper, MarkdownHelper)
-class DefaultPathProviderTestCase(unittest.TestCase):
-
- def test_getdefaulttimetrackingpath_shouldreturnexpectedpath_wheninvoked(self):
+ self.assertEqual(expected_message, actual_message)
+ def test_effortstatusnotpossibletocreate_shouldreturnexpectedmessage_wheninvoked(self):
- '''"C:/project_dir/src/" => "C:/project_dir/data/Time Tracking.xlsx"'''
-
# Arrange
- expected : str = "C:/project_dir/data/Time Tracking.xlsx"
+ idx : int = 770
+ start_time_str : str = "22:00"
+ end_time_str : str = "00:00"
+ effort_str : str = "2h 00m"
+
+ expected_message : str = (
+ "It has not been possible to create an EffortStatus for the provided parameters "
+ "(idx: '770', start_time_str: '22:00', end_time_str: '00:00', effort_str: '2h 00m')."
+ )
# Act
- with patch.object(os, 'getcwd', return_value="C:/project_dir/src/") as mocked_context:
- actual : str = DefaultPathProvider().get_default_time_tracking_path()
+ actual_message : str = _MessageCollection.effort_status_not_possible_to_create(
+ idx = idx,
+ start_time_str = start_time_str,
+ end_time_str = end_time_str,
+ effort_str = effort_str
+ )
# Assert
- self.assertEqual(expected, actual)
-class YearProviderTestCase(unittest.TestCase):
+ self.assertEqual(expected_message, actual_message)
+ def test_effortstatusnotamongexpectedtimevalues_shouldreturnexpectedmessage_wheninvoked(self):
+
+ # Arrange
+ time : str = "25:00"
+ expected_message : str = "The provided time ('25:00') is not among the expected time values."
- def test_getallyears_shouldreturnexpectedlist_wheninvoked(self):
+ # Act
+ actual_message : str = _MessageCollection.effort_status_not_among_expected_time_values(time = time)
+ # Assert
+ self.assertEqual(expected_message, actual_message)
+ def test_starttimeendtimeareempty_shouldreturnexpectedmessage_wheninvoked(self):
+
# Arrange
- expected : list[int] = [2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023, 2024]
+ expected : str = "''start_time' and/or 'end_time' are empty, 'effort' can't be verified. We assume that it's correct."
# Act
- actual : list[int] = YearProvider().get_all_years()
+ actual : str = _MessageCollection.starttime_endtime_are_empty()
# Assert
self.assertEqual(expected, actual)
- def test_getallyearlytargets_shouldreturnexpectedlist_wheninvoked(self):
-
+ def test_effortiscorrect_shouldreturnexpectedmessage_wheninvoked(self):
+
# Arrange
- expected : list[YearlyTarget] = [
- YearlyTarget(year = 2015, hours = timedelta(hours = 0)),
- YearlyTarget(year = 2016, hours = timedelta(hours = 500)),
- YearlyTarget(year = 2017, hours = timedelta(hours = 500)),
- YearlyTarget(year = 2018, hours = timedelta(hours = 500)),
- YearlyTarget(year = 2019, hours = timedelta(hours = 500)),
- YearlyTarget(year = 2020, hours = timedelta(hours = 500)),
- YearlyTarget(year = 2021, hours = timedelta(hours = 500)),
- YearlyTarget(year = 2022, hours = timedelta(hours = 400)),
- YearlyTarget(year = 2023, hours = timedelta(hours = 250)),
- YearlyTarget(year = 2024, hours = timedelta(hours = 500))
- ]
+ expected : str = "The effort is correct."
# Act
- actual : list[YearlyTarget] = YearProvider().get_all_yearly_targets()
+ actual : str = _MessageCollection.effort_is_correct()
# Assert
- self.assertTrue(SupportMethodProvider.are_lists_of_yearly_targets_equal(list1 = expected, list2 = actual))
-class SoftwareProjectNameProviderTestCase(unittest.TestCase):
-
- def test_getallsoftwareprojectnames_shouldreturnexpectedlist_wheninvoked(self):
-
+ self.assertEqual(expected, actual)
+ def test_nomdinfofound_shouldreturnexpectedmessage_wheninvoked(self):
+
# Arrange
- expected : list[str] = [
- "NW.MarkdownTables",
- "NW.NGramTextClassification",
- "NW.UnivariateForecasting",
- "NW.Shared.Files",
- "NW.Shared.Serialization",
- "NW.Shared.Validation",
- "nwreadinglist",
- "nwtimetracking",
- "nwtraderaanalytics",
- "nwshared",
- "nwpackageversions"
- ]
+ id : TTID = TTID.TTSBYMONTH
+ expected : str = "No MDInfo object found for id='tts_by_month'."
# Act
- actual : list[str] = SoftwareProjectNameProvider().get_all_software_project_names()
+ actual : str = _MessageCollection.no_mdinfo_found(id = id)
# Assert
self.assertEqual(expected, actual)
- def test_getallsoftwareprojectnamesbyspv_shouldreturnexpectedlist_wheninvoked(self):
-
+ def test_pleaseruninitializefirst_shouldreturnexpectedmessage_wheninvoked(self):
+
# Arrange
- expected : list[str] = [
- "NW.MarkdownTables",
- "NW.NGramTextClassification",
- "NW.UnivariateForecasting",
- "NW.Shared.Files",
- "NW.Shared.Serialization",
- "NW.Shared.Validation",
- "nwreadinglist",
- "nwtimetracking",
- "nwtraderaanalytics",
- "nwshared",
- "nwpackageversions"
- ]
+ expected : str = "Please run the 'initialize' method first."
# Act
- actual : list[str] = SoftwareProjectNameProvider().get_all_software_project_names_by_spv()
+ actual : str = _MessageCollection.please_run_initialize_first()
# Assert
self.assertEqual(expected, actual)
-class TimeTrackingManagerTestCase(unittest.TestCase):
-
- def test_convertstringtotimedelta_shouldreturnexpectedtimedelta_whenproperstring(self):
-
+ def test_thiscontentsuccessfullysavedas_shouldreturnexpectedmessage_wheninvoked(self):
+
# Arrange
- td_str : str = "5h 30m"
- expected_td : timedelta = pd.Timedelta(hours = 5, minutes = 30).to_pytimedelta()
+ id : TTID = TTID.TTSBYMONTH
+ file_path : str = "/path/to/file.csv"
+ expected : str = (
+ "This content (id: 'tts_by_month') has been successfully saved as '/path/to/file.csv'."
+ )
# Act
- actual_td : str = TimeTrackingManager()._TimeTrackingManager__convert_string_to_timedelta(td_str = td_str) # type: ignore
+ actual : str = _MessageCollection.this_content_successfully_saved_as(id = id, file_path = file_path)
# Assert
- self.assertEqual(expected_td, actual_td)
- def test_getyearlytarget_shouldreturnexpectedhours_whenyearinlist(self):
+ self.assertEqual(expected, actual)
+class YearlyTargetTestCase(unittest.TestCase):
+ def test_init_shouldinitializeobjectwithexpectedproperties_wheninvoked(self) -> None:
+
# Arrange
- yearly_targets : list[YearlyTarget] = ObjectMother.create_yearly_targets()
year : int = 2024
- expected_hours : timedelta = timedelta(hours = 250)
+ hours : timedelta = timedelta(hours = 1200)
# Act
- actual_hours : timedelta = TimeTrackingManager()._TimeTrackingManager__get_yearly_target(yearly_targets = yearly_targets, year = year).hours # type: ignore
+ actual : YearlyTarget = YearlyTarget(year = year, hours = hours)
# Assert
- self.assertEqual(expected_hours, actual_hours)
- def test_getyearlytarget_shouldreturnnone_whenyearnotinlist(self):
+ self.assertEqual(actual.year, year)
+ self.assertEqual(actual.hours, hours)
+ self.assertIsInstance(actual.year, int)
+ self.assertIsInstance(actual.hours, timedelta)
+class EffortStatusTestCase(unittest.TestCase):
+
+ def test_init_shouldinitializeobjectwithexpectedproperties_wheninvoked(self) -> None:
# Arrange
- yearly_targets : list[YearlyTarget] = ObjectMother.create_yearly_targets()
- year : int = 2010
+ idx : int = 1
+ start_time_str : Optional[str] = "07:00"
+ start_time_dt : Optional[datetime] = datetime.strptime("07:00", "%H:%M")
+ end_time_str : Optional[str] = "08:00"
+ end_time_dt : Optional[datetime] = datetime.strptime("08:00", "%H:%M")
+ actual_str : str = "01h 00m"
+ actual_td : timedelta = timedelta(hours = 1)
+ expected_td : Optional[timedelta] = timedelta(hours = 1)
+ expected_str : Optional[str] = "01h 00m"
+ is_correct : bool = True
+ message : str = "Effort matches expected."
# Act
- yearly_target : YearlyTarget = TimeTrackingManager()._TimeTrackingManager__get_yearly_target(yearly_targets = yearly_targets, year = year) # type: ignore
+ actual : EffortStatus = EffortStatus(
+ idx = idx,
+ start_time_str = start_time_str,
+ start_time_dt = start_time_dt,
+ end_time_str = end_time_str,
+ end_time_dt = end_time_dt,
+ actual_str = actual_str,
+ actual_td = actual_td,
+ expected_td = expected_td,
+ expected_str = expected_str,
+ is_correct = is_correct,
+ message = message
+ )
# Assert
- self.assertIsNone(yearly_target)
- def test_isyearlytargetmet_shouldreturntrue_whenyearlytargetismet(self):
+ self.assertEqual(actual.idx, idx)
+ self.assertEqual(actual.start_time_str, start_time_str)
+ self.assertEqual(actual.start_time_dt, start_time_dt)
+ self.assertEqual(actual.end_time_str, end_time_str)
+ self.assertEqual(actual.end_time_dt, end_time_dt)
+ self.assertEqual(actual.actual_str, actual_str)
+ self.assertEqual(actual.actual_td, actual_td)
+ self.assertEqual(actual.expected_td, expected_td)
+ self.assertEqual(actual.expected_str, expected_str)
+ self.assertEqual(actual.is_correct, is_correct)
+ self.assertEqual(actual.message, message)
+ self.assertIsInstance(actual.idx, int)
+ self.assertIsInstance(actual.start_time_str, (str, type(None)))
+ self.assertIsInstance(actual.start_time_dt, (datetime, type(None)))
+ self.assertIsInstance(actual.end_time_str, (str, type(None)))
+ self.assertIsInstance(actual.end_time_dt, (datetime, type(None)))
+ self.assertIsInstance(actual.actual_str, str)
+ self.assertIsInstance(actual.actual_td, timedelta)
+ self.assertIsInstance(actual.expected_td, (timedelta, type(None)))
+ self.assertIsInstance(actual.expected_str, (str, type(None)))
+ self.assertIsInstance(actual.is_correct, bool)
+ self.assertIsInstance(actual.message, str)
+ def test_init_shouldinitializeobjectwithexpectedproperties_whenalloptionalsarenone(self) -> None:
# Arrange
- effort : timedelta = pd.Timedelta(hours = 255, minutes = 30)
- yearly_target : timedelta = pd.Timedelta(hours = 250)
+ idx : int = 1
+ start_time_str : Optional[str] = None
+ start_time_dt : Optional[datetime] = None
+ end_time_str : Optional[str] = None
+ end_time_dt : Optional[datetime] = None
+ actual_str : str = "01h 00m"
+ actual_td : timedelta = timedelta(hours = 1)
+ expected_td : Optional[timedelta] = None
+ expected_str : Optional[str] = None
+ is_correct : bool = True
+ message : str = "Effort recorded without expectation."
# Act
- actual : bool = TimeTrackingManager()._TimeTrackingManager__is_yearly_target_met(effort = effort, yearly_target = yearly_target) # type: ignore
+ actual : EffortStatus = EffortStatus(
+ idx = idx,
+ start_time_str = start_time_str,
+ start_time_dt = start_time_dt,
+ end_time_str = end_time_str,
+ end_time_dt = end_time_dt,
+ actual_str = actual_str,
+ actual_td = actual_td,
+ expected_td = expected_td,
+ expected_str = expected_str,
+ is_correct = is_correct,
+ message = message
+ )
+
+ # Assert
+ self.assertEqual(actual.idx, idx)
+ self.assertIsNone(actual.start_time_str)
+ self.assertIsNone(actual.start_time_dt)
+ self.assertIsNone(actual.end_time_str)
+ self.assertIsNone(actual.end_time_dt)
+ self.assertEqual(actual.actual_str, actual_str)
+ self.assertEqual(actual.actual_td, actual_td)
+ self.assertIsNone(actual.expected_td)
+ self.assertIsNone(actual.expected_str)
+ self.assertEqual(actual.is_correct, is_correct)
+ self.assertEqual(actual.message, message)
+class MDInfoTestCase(unittest.TestCase):
+
+ def test_init_shouldinitializeobjectwithexpectedproperties_wheninvoked(self) -> None:
+
+ # Arrange
+ id : TTID = TTID.TTSBYMONTH
+ file_name : str = "TIMETRACKINGBYMONTH.md"
+ paragraph_title : str = "Time Tracking By Month"
+
+ # Act
+ actual : MDInfo = MDInfo(id = id, file_name = file_name, paragraph_title = paragraph_title)
+
+ # Assert
+ self.assertEqual(actual.id, id)
+ self.assertEqual(actual.file_name, file_name)
+ self.assertEqual(actual.paragraph_title, paragraph_title)
+ self.assertIsInstance(actual.id, TTID)
+ self.assertIsInstance(actual.file_name, str)
+ self.assertIsInstance(actual.paragraph_title, str)
+class TTSummaryTestCase(unittest.TestCase):
+
+ def test_init_shouldinitializeobjectwithexpectedproperties_wheninvoked(self) -> None:
+ # Arrange
+ empty_df : DataFrame = DataFrame()
+ empty_tuple : Tuple[DataFrame, DataFrame] = (empty_df, empty_df)
+ markdown : str = ""
+
+ # Act
+ actual = TTSummary(
+ tt_df = empty_df,
+ tts_by_month_tpl = empty_tuple,
+ tts_by_year_df = empty_df,
+ tts_by_year_month_tpl = empty_tuple,
+ tts_by_year_month_spnv_tpl = empty_tuple,
+ tts_by_year_spnv_tpl = empty_tuple,
+ tts_by_spn_df = empty_df,
+ tts_by_spn_spv_df = empty_df,
+ tts_by_hashtag_df = empty_df,
+ tts_by_hashtag_year_df = empty_df,
+ tts_by_efs_tpl = empty_tuple,
+ tts_by_tr_df = empty_df,
+ definitions_df = empty_df,
+ tts_by_month_md = markdown,
+ )
+
# Assert
- self.assertTrue(actual)
- def test_isyearlytargetmet_shouldreturnfalse_whenyearlytargetisnotmet(self):
+ self.assertEqual(actual.tt_df.shape, empty_df.shape)
+ self.assertEqual(actual.tts_by_month_tpl, empty_tuple)
+ self.assertEqual(actual.tts_by_year_df.shape, empty_df.shape)
+ self.assertEqual(actual.tts_by_year_month_tpl, empty_tuple)
+ self.assertEqual(actual.tts_by_year_month_spnv_tpl, empty_tuple)
+ self.assertEqual(actual.tts_by_year_spnv_tpl, empty_tuple)
+ self.assertEqual(actual.tts_by_spn_df.shape, empty_df.shape)
+ self.assertEqual(actual.tts_by_spn_spv_df.shape, empty_df.shape)
+ self.assertEqual(actual.tts_by_hashtag_df.shape, empty_df.shape)
+ self.assertEqual(actual.tts_by_hashtag_year_df.shape, empty_df.shape)
+ self.assertEqual(actual.tts_by_efs_tpl, empty_tuple)
+ self.assertEqual(actual.tts_by_tr_df.shape, empty_df.shape)
+ self.assertEqual(actual.definitions_df.shape, empty_df.shape)
+ self.assertEqual(actual.tts_by_month_md, markdown)
+ self.assertIsInstance(actual.tts_by_month_md, str)
+class DefaultPathProviderTestCase(unittest.TestCase):
+
+ def test_getdefaulttimetrackingpath_shouldreturnexpectedpath_wheninvoked(self):
+
+ '''"C:/project_dir/src/" => "C:/project_dir/data/Time Tracking.xlsx"'''
# Arrange
- effort : timedelta = pd.Timedelta(hours = 249)
- yearly_target : timedelta = pd.Timedelta(hours = 250)
+ expected : str = "C:/project_dir/data/Time Tracking.xlsx"
# Act
- actual : bool = TimeTrackingManager()._TimeTrackingManager__is_yearly_target_met(effort = effort, yearly_target = yearly_target) # type: ignore
+ with patch.object(os, 'getcwd', return_value="C:/project_dir/src/") as mocked_context:
+ actual : str = DefaultPathProvider().get_default_time_tracking_path()
# Assert
- self.assertFalse(actual)
- def test_formattimedelta_shouldreturnexpectedstring_whenpropertimedeltaandplussignfalse(self):
+ self.assertEqual(expected, actual)
+class YearProviderTestCase(unittest.TestCase):
+
+ def test_getallyears_shouldreturnexpectedlist_wheninvoked(self):
# Arrange
- td : timedelta = pd.Timedelta(hours = 255, minutes = 30)
- expected : str = "255h 30m"
+ expected : list[int] = [2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023, 2024]
# Act
- actual : str = TimeTrackingManager()._TimeTrackingManager__format_timedelta(td = td, add_plus_sign = False) # type: ignore
-
+ actual : list[int] = YearProvider().get_all_years()
+
# Assert
self.assertEqual(expected, actual)
- def test_formattimedelta_shouldreturnexpectedstring_whenpropertimedeltaandplussigntrue(self):
+ def test_getallyearlytargets_shouldreturnexpectedlist_wheninvoked(self):
# Arrange
- td : timedelta = pd.Timedelta(hours = 255, minutes = 30)
- expected : str = "+255h 30m"
+ expected : list[YearlyTarget] = [
+ YearlyTarget(year = 2015, hours = timedelta(hours = 0)),
+ YearlyTarget(year = 2016, hours = timedelta(hours = 500)),
+ YearlyTarget(year = 2017, hours = timedelta(hours = 500)),
+ YearlyTarget(year = 2018, hours = timedelta(hours = 500)),
+ YearlyTarget(year = 2019, hours = timedelta(hours = 500)),
+ YearlyTarget(year = 2020, hours = timedelta(hours = 500)),
+ YearlyTarget(year = 2021, hours = timedelta(hours = 500)),
+ YearlyTarget(year = 2022, hours = timedelta(hours = 400)),
+ YearlyTarget(year = 2023, hours = timedelta(hours = 250)),
+ YearlyTarget(year = 2024, hours = timedelta(hours = 500))
+ ]
# Act
- actual : str = TimeTrackingManager()._TimeTrackingManager__format_timedelta(td = td, add_plus_sign = True) # type: ignore
+ actual : list[YearlyTarget] = YearProvider().get_all_yearly_targets()
+
+ # Assert
+ self.assertTrue(SupportMethodProvider.are_lists_of_yearly_targets_equal(list1 = expected, list2 = actual))
+ def test_getmostrecentxyears_shouldreturnlastxyears_whenxlessthantotalyears(self):
+
+ # Arrange
+ x : uint = uint(5)
+ expected : list[int] = [2020, 2021, 2022, 2023, 2024]
+ # Act
+ actual : list[int] = YearProvider().get_most_recent_x_years(x)
+
# Assert
self.assertEqual(expected, actual)
- def test_extractsoftwareprojectname_shouldreturnexpectedstring_whenproperstring(self):
+ def test_getmostrecentxyears_shouldreturnallyears_whenxgreaterthantotalyears(self):
# Arrange
- descriptor : str = "NW.AutoProffLibrary v1.0.0"
- expected : str = "NW.AutoProffLibrary"
+ x : uint = uint(15)
+ expected : list[int] = [2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023, 2024]
+
+ # Act
+ actual : list[int] = YearProvider().get_most_recent_x_years(x)
+ # Assert
+ self.assertEqual(expected, actual)
+ def test_getmostrecentxyears_shouldreturnemptylist_whenxiszero(self):
+
+ # Arrange
+ x : uint = uint(0)
+ expected : list[int] = []
+
# Act
- actual : str = TimeTrackingManager()._TimeTrackingManager__extract_software_project_name(descriptor = descriptor) # type: ignore
+ actual : list[int] = YearProvider().get_most_recent_x_years(x)
# Assert
self.assertEqual(expected, actual)
- def test_extractsoftwareprojectname_shouldreturnerrorstring_whenunproperstring(self):
+class SoftwareProjectNameProviderTestCase(unittest.TestCase):
+
+ def test_getallsoftwareprojectnames_shouldreturnexpectedlist_wheninvoked(self):
# Arrange
- descriptor : str = "Some gibberish"
- expected : str = "ERROR"
+ expected : list[str] = [
+ "NW.MarkdownTables",
+ "NW.NGramTextClassification",
+ "NW.UnivariateForecasting",
+ "NW.Shared.Files",
+ "NW.Shared.Serialization",
+ "NW.Shared.Validation",
+ "nwreadinglist",
+ "nwtimetracking",
+ "nwtraderaanalytics",
+ "nwshared",
+ "nwpackageversions"
+ ]
# Act
- actual : str = TimeTrackingManager()._TimeTrackingManager__extract_software_project_name(descriptor = descriptor) # type: ignore
+ actual : list[str] = SoftwareProjectNameProvider().get_all_software_project_names()
# Assert
- self.assertEqual(expected, actual)
- def test_extractsoftwareprojectversion_shouldreturnexpectedstring_whenproperstring(self):
+ self.assertEqual(expected, actual)
+ def test_getallsoftwareprojectnamesbyspv_shouldreturnexpectedlist_wheninvoked(self):
# Arrange
- descriptor : str = "NW.AutoProffLibrary v1.0.0"
- expected : str = "1.0.0"
+ expected : list[str] = [
+ "NW.MarkdownTables",
+ "NW.NGramTextClassification",
+ "NW.UnivariateForecasting",
+ "NW.Shared.Files",
+ "NW.Shared.Serialization",
+ "NW.Shared.Validation",
+ "nwreadinglist",
+ "nwtimetracking",
+ "nwtraderaanalytics",
+ "nwshared",
+ "nwpackageversions"
+ ]
# Act
- actual : str = TimeTrackingManager()._TimeTrackingManager__extract_software_project_version(descriptor = descriptor) # type: ignore
+ actual : list[str] = SoftwareProjectNameProvider().get_all_software_project_names_by_spv()
# Assert
self.assertEqual(expected, actual)
- def test_extractsoftwareprojectversion_shouldreturnerrorstring_whenunproperstring(self):
-
+class MDInfoProviderTestCase(unittest.TestCase):
+
+ def test_getall_shouldreturnexpectedlist_wheninvoked(self):
+
# Arrange
- descriptor : str = "Some gibberish"
- expected : str = "ERROR"
+ expected : list[MDInfo] = [
+ MDInfo(
+ id = TTID.TTSBYMONTH,
+ file_name="TIMETRACKINGBYMONTH.md",
+ paragraph_title="Time Tracking By Month",
+ )
+ ]
# Act
- actual : str = TimeTrackingManager()._TimeTrackingManager__extract_software_project_version(descriptor = descriptor) # type: ignore
+ actual : list[MDInfo] = MDInfoProvider().get_all()
# Assert
- self.assertEqual(expected, actual)
+ self.assertEqual(expected, actual)
+ self.assertEqual(expected[0].id, actual[0].id)
+ self.assertEqual(expected[0].file_name, actual[0].file_name)
+ self.assertEqual(expected[0].paragraph_title, actual[0].paragraph_title)
+class SettingBagTestCase(unittest.TestCase):
+
+ def test_init_shouldinitializeobjectwithexpectedproperties_wheninvoked(self) -> None:
+
+ # Arrange
+ options_tt : list[Literal["display"]] = ["display"]
+ options_tts_by_month : list[Literal["display", "save"]] = ["display", "save"]
+ options_tts_by_year : list[Literal["display"]] = ["display"]
+ options_tts_by_year_month : list[Literal["display"]] = ["display"]
+ options_tts_by_year_month_spnv : list[Literal["display"]] = ["display"]
+ options_tts_by_year_spnv : list[Literal["display"]] = ["display"]
+ options_tts_by_spn : list[Literal["display", "log"]] = ["display", "log"]
+ options_tts_by_spn_spv : list[Literal["display", "log"]] = ["display", "log"]
+ options_tts_by_hashtag : list[Literal["display"]] = ["display"]
+ options_tts_by_hashtag_year : list[Literal["display"]] = ["display"]
+ options_tts_by_efs : list[Literal["display"]] = ["display"]
+ options_tts_by_tr : list[Literal["display"]] = ["display"]
+ options_definitions : list[Literal["display"]] = ["display"]
+ excel_nrows : int = 100
+ tts_by_year_month_spnv_display_only_spn : Optional[str] = "SPN1"
+ tts_by_year_spnv_display_only_spn : Optional[str] = "SPN2"
+ tts_by_spn_spv_display_only_spn : Optional[str] = "SPN3"
+ working_folder_path : str = "/home/nwtimetracking/"
+ excel_path : str = "/workspaces/nwtimetracking/"
+ excel_skiprows : int = 0
+ excel_tabname : str = "Sessions"
+ years : list[int] = [2020, 2021, 2022]
+ yearly_targets : list = []
+ now : datetime = datetime.now()
+ software_project_names : list[str] = ["ProjectA", "ProjectB"]
+ software_project_names_by_spv : list[str] = ["ProjectC"]
+ tt_head_n : uint = uint(5)
+ tt_display_head_n_with_tail : bool = True
+ tt_hide_index : bool = True
+ tts_by_year_month_display_only_years : Optional[list[int]] = [2022]
+ tts_by_year_month_spnv_formatters : dict[str, str] = {"%_DME" : "{:.2f}", "%_TME" : "{:.2f}"}
+ tts_by_year_spnv_formatters : dict[str, str] = {"%_DYE" : "{:.2f}", "%_TYE" : "{:.2f}"}
+ tts_by_spn_formatters : dict[str, str] = {"%_DE" : "{:.2f}", "%_TE" : "{:.2f}"}
+ tts_by_spn_remove_untagged : bool = True
+ tts_by_hashtag_formatters : dict[str, str] = {"Effort%" : "{:.2f}"}
+ tts_by_efs_is_correct : bool = False
+ tts_by_efs_n : uint = uint(25)
+ tts_by_tr_unknown_id : str = "Unknown"
+ tts_by_tr_remove_unknown_occurrences : bool = True
+ tts_by_tr_filter_by_top_n : uint = uint(5)
+ tts_by_tr_head_n : uint = uint(10)
+ tts_by_tr_display_head_n_with_tail : bool = False
+ md_infos : list = []
+ md_last_update : datetime = datetime.now()
+
+ # Act
+ actual : SettingBag = SettingBag(
+ options_tt = options_tt,
+ options_tts_by_month = options_tts_by_month,
+ options_tts_by_year = options_tts_by_year,
+ options_tts_by_year_month = options_tts_by_year_month,
+ options_tts_by_year_month_spnv = options_tts_by_year_month_spnv,
+ options_tts_by_year_spnv = options_tts_by_year_spnv,
+ options_tts_by_spn = options_tts_by_spn,
+ options_tts_by_spn_spv = options_tts_by_spn_spv,
+ options_tts_by_hashtag = options_tts_by_hashtag,
+ options_tts_by_hashtag_year = options_tts_by_hashtag_year,
+ options_tts_by_efs = options_tts_by_efs,
+ options_tts_by_tr = options_tts_by_tr,
+ options_definitions = options_definitions,
+ excel_nrows = excel_nrows,
+ tts_by_year_month_spnv_display_only_spn = tts_by_year_month_spnv_display_only_spn,
+ tts_by_year_spnv_display_only_spn = tts_by_year_spnv_display_only_spn,
+ tts_by_spn_spv_display_only_spn = tts_by_spn_spv_display_only_spn,
+ working_folder_path = working_folder_path,
+ excel_path = excel_path,
+ excel_skiprows = excel_skiprows,
+ excel_tabname = excel_tabname,
+ years = years,
+ yearly_targets = yearly_targets,
+ now = now,
+ software_project_names = software_project_names,
+ software_project_names_by_spv = software_project_names_by_spv,
+ tt_head_n = tt_head_n,
+ tt_display_head_n_with_tail = tt_display_head_n_with_tail,
+ tt_hide_index = tt_hide_index,
+ tts_by_year_month_display_only_years = tts_by_year_month_display_only_years,
+ tts_by_year_month_spnv_formatters = tts_by_year_month_spnv_formatters,
+ tts_by_year_spnv_formatters = tts_by_year_spnv_formatters,
+ tts_by_spn_formatters = tts_by_spn_formatters,
+ tts_by_spn_remove_untagged = tts_by_spn_remove_untagged,
+ tts_by_hashtag_formatters = tts_by_hashtag_formatters,
+ tts_by_efs_is_correct = tts_by_efs_is_correct,
+ tts_by_efs_n = tts_by_efs_n,
+ tts_by_tr_unknown_id = tts_by_tr_unknown_id,
+ tts_by_tr_remove_unknown_occurrences = tts_by_tr_remove_unknown_occurrences,
+ tts_by_tr_filter_by_top_n = tts_by_tr_filter_by_top_n,
+ tts_by_tr_head_n = tts_by_tr_head_n,
+ tts_by_tr_display_head_n_with_tail = tts_by_tr_display_head_n_with_tail,
+ md_infos = md_infos,
+ md_last_update = md_last_update
+ )
+
+ # Assert
+ self.assertEqual(actual.options_tt, options_tt)
+ self.assertEqual(actual.options_tts_by_month, options_tts_by_month)
+ self.assertEqual(actual.options_tts_by_year, options_tts_by_year)
+ self.assertEqual(actual.options_tts_by_year_month, options_tts_by_year_month)
+ self.assertEqual(actual.options_tts_by_year_month_spnv, options_tts_by_year_month_spnv)
+ self.assertEqual(actual.options_tts_by_year_spnv, options_tts_by_year_spnv)
+ self.assertEqual(actual.options_tts_by_spn, options_tts_by_spn)
+ self.assertEqual(actual.options_tts_by_spn_spv, options_tts_by_spn_spv)
+ self.assertEqual(actual.options_tts_by_hashtag, options_tts_by_hashtag)
+ self.assertEqual(actual.options_tts_by_hashtag_year, options_tts_by_hashtag_year)
+ self.assertEqual(actual.options_tts_by_efs, options_tts_by_efs)
+ self.assertEqual(actual.options_tts_by_tr, options_tts_by_tr)
+ self.assertEqual(actual.options_definitions, options_definitions)
+ self.assertEqual(actual.excel_nrows, excel_nrows)
+ self.assertEqual(actual.tts_by_year_month_spnv_display_only_spn, tts_by_year_month_spnv_display_only_spn)
+ self.assertEqual(actual.tts_by_year_spnv_display_only_spn, tts_by_year_spnv_display_only_spn)
+ self.assertEqual(actual.tts_by_spn_spv_display_only_spn, tts_by_spn_spv_display_only_spn)
+ self.assertEqual(actual.working_folder_path, working_folder_path)
+ self.assertEqual(actual.excel_path, excel_path)
+ self.assertEqual(actual.excel_skiprows, excel_skiprows)
+ self.assertEqual(actual.excel_tabname, excel_tabname)
+ self.assertEqual(actual.years, years)
+ self.assertEqual(actual.yearly_targets, yearly_targets)
+ self.assertEqual(actual.now, now)
+ self.assertEqual(actual.software_project_names, software_project_names)
+ self.assertEqual(actual.software_project_names_by_spv, software_project_names_by_spv)
+ self.assertEqual(actual.tt_head_n, tt_head_n)
+ self.assertEqual(actual.tt_display_head_n_with_tail, tt_display_head_n_with_tail)
+ self.assertEqual(actual.tt_hide_index, tt_hide_index)
+ self.assertEqual(actual.tts_by_year_month_display_only_years, tts_by_year_month_display_only_years)
+ self.assertEqual(actual.tts_by_year_month_spnv_formatters, tts_by_year_month_spnv_formatters)
+ self.assertEqual(actual.tts_by_year_spnv_formatters, tts_by_year_spnv_formatters)
+ self.assertEqual(actual.tts_by_spn_formatters, tts_by_spn_formatters)
+ self.assertEqual(actual.tts_by_spn_remove_untagged, tts_by_spn_remove_untagged)
+ self.assertEqual(actual.tts_by_hashtag_formatters, tts_by_hashtag_formatters)
+ self.assertEqual(actual.tts_by_efs_is_correct, tts_by_efs_is_correct)
+ self.assertEqual(actual.tts_by_efs_n, tts_by_efs_n)
+ self.assertEqual(actual.tts_by_tr_unknown_id, tts_by_tr_unknown_id)
+ self.assertEqual(actual.tts_by_tr_remove_unknown_occurrences, tts_by_tr_remove_unknown_occurrences)
+ self.assertEqual(actual.tts_by_tr_filter_by_top_n, tts_by_tr_filter_by_top_n)
+ self.assertEqual(actual.tts_by_tr_head_n, tts_by_tr_head_n)
+ self.assertEqual(actual.tts_by_tr_display_head_n_with_tail, tts_by_tr_display_head_n_with_tail)
+ self.assertEqual(actual.md_infos, md_infos)
+ self.assertEqual(actual.md_last_update, md_last_update)
+class TTDataFrameHelperTestCase(unittest.TestCase):
+
+ def setUp(self):
+
+ self.df_helper = TTDataFrameHelper()
+ self.sm_provider = SupportMethodProvider()
def test_calculatepercentage_shouldreturnexpectedfloat_when0and16(self):
# Arrange
@@ -715,7 +1088,7 @@ def test_calculatepercentage_shouldreturnexpectedfloat_when0and16(self):
expected : float = 0.00
# Act
- actual : float = TimeTrackingManager()._TimeTrackingManager__calculate_percentage(part = part, whole = whole, rounding_digits = rounding_digits) # type: ignore
+ actual : float = self.df_helper.calculate_percentage(part = part, whole = whole, rounding_digits = rounding_digits)
# Assert
self.assertEqual(expected, actual)
@@ -728,7 +1101,7 @@ def test_calculatepercentage_shouldreturnexpectedfloat_when4and0(self):
expected : float = 0.00
# Act
- actual : float = TimeTrackingManager()._TimeTrackingManager__calculate_percentage(part = part, whole = whole, rounding_digits = rounding_digits) # type: ignore
+ actual : float = self.df_helper.calculate_percentage(part = part, whole = whole, rounding_digits = rounding_digits)
# Assert
self.assertEqual(expected, actual)
@@ -741,7 +1114,7 @@ def test_calculatepercentage_shouldreturnexpectedfloat_when4and16(self):
expected : float = 25.00
# Act
- actual : float = TimeTrackingManager()._TimeTrackingManager__calculate_percentage(part = part, whole = whole, rounding_digits = rounding_digits) # type: ignore
+ actual : float = self.df_helper.calculate_percentage(part = part, whole = whole, rounding_digits = rounding_digits)
# Assert
self.assertEqual(expected, actual)
@@ -754,7 +1127,7 @@ def test_calculatepercentage_shouldreturnexpectedfloat_when16and16(self):
expected : float = 100.00
# Act
- actual : float = TimeTrackingManager()._TimeTrackingManager__calculate_percentage(part = part, whole = whole, rounding_digits = rounding_digits) # type: ignore
+ actual : float = self.df_helper.calculate_percentage(part = part, whole = whole, rounding_digits = rounding_digits)
# Assert
self.assertEqual(expected, actual)
@@ -767,130 +1140,175 @@ def test_calculatepercentage_shouldreturnexpectedfloat_when3and9and4(self):
expected : float = 33.3333
# Act
- actual : float = TimeTrackingManager()._TimeTrackingManager__calculate_percentage(part = part, whole = whole, rounding_digits = rounding_digits) # type: ignore
+ actual : float = self.df_helper.calculate_percentage(part = part, whole = whole, rounding_digits = rounding_digits)
# Assert
self.assertEqual(expected, actual)
- def test_createeffortstatus_shouldreturnexpectobject_wheneffortiscorrect(self):
+ def test_convertstringtotimedelta_shouldreturnexpectedtimedelta_whenproperstring(self):
# Arrange
- idx : int = 1
- start_time_str : str = "07:00"
- end_time_str : str = "08:00"
- effort_str : str = "01h 00m"
-
- strp_format : str = "%Y-%m-%d %H:%M"
-
- start_time_dt : datetime = datetime.strptime(f"1900-01-01 {start_time_str}", strp_format)
- end_time_dt : datetime = datetime.strptime(f"1900-01-01 {end_time_str}", strp_format)
- actual_str = effort_str
- actual_td : timedelta = pd.Timedelta(value = actual_str).to_pytimedelta()
- expected_str : str = actual_str
- expected_td : timedelta = actual_td
- is_correct : bool = True
- message : str = "The effort is correct."
- expected : EffortStatus = EffortStatus(
- idx = idx,
- start_time_str = start_time_str,
- start_time_dt = start_time_dt,
- end_time_str = end_time_str,
- end_time_dt = end_time_dt,
- actual_str = effort_str,
- actual_td = actual_td,
- expected_td = expected_td,
- expected_str = expected_str,
- is_correct = is_correct,
- message = message
- )
+ td_str : str = "5h 30m"
+ expected_td : timedelta = pd.Timedelta(hours = 5, minutes = 30).to_pytimedelta()
# Act
- actual : EffortStatus = TimeTrackingManager()._TimeTrackingManager__create_effort_status(idx = idx, start_time_str = start_time_str,end_time_str = end_time_str,effort_str = effort_str) # type: ignore
+ actual_td : timedelta = self.df_helper.convert_string_to_timedelta(td_str = td_str)
# Assert
- comparison : bool = SupportMethodProvider().are_effort_statuses_equal(ef1 = expected, ef2 = actual)
- self.assertTrue(comparison)
- def test_createeffortstatus_shouldreturnexpectobject_wheneffortisnotcorrect(self):
+ self.assertEqual(expected_td, actual_td)
+ def test_formattimedelta_shouldreturnexpectedstring_whenpropertimedeltaandplussignfalse(self):
# Arrange
- idx : int = 1
- start_time_str : str = "07:00"
- end_time_str : str = "08:00"
- effort_str : str = "02h 00m"
-
- strp_format : str = "%Y-%m-%d %H:%M"
+ td : timedelta = pd.Timedelta(hours = 255, minutes = 30)
+ expected : str = "255h 30m"
- start_time_dt : datetime = datetime.strptime(f"1900-01-01 {start_time_str}", strp_format)
- end_time_dt : datetime = datetime.strptime(f"1900-01-01 {end_time_str}", strp_format)
- actual_str = effort_str
- actual_td : timedelta = pd.Timedelta(value = actual_str).to_pytimedelta()
- expected_str : str = "01h 00m"
- expected_td : timedelta = pd.Timedelta(value = expected_str).to_pytimedelta()
- is_correct : bool = False
- message : str = _MessageCollection.effort_status_mismatching_effort(
- idx = idx,
- start_time_str = start_time_str,
- end_time_str = end_time_str,
- actual_str = actual_str,
- expected_str = expected_str
- )
+ # Act
+ actual : str = self.df_helper.format_timedelta(td = td, add_plus_sign = False)
+
+ # Assert
+ self.assertEqual(expected, actual)
+ def test_formattimedelta_shouldreturnexpectedstring_whenpropertimedeltaandplussigntrue(self):
- expected : EffortStatus = EffortStatus(
- idx = idx,
- start_time_str = start_time_str,
- start_time_dt = start_time_dt,
- end_time_str = end_time_str,
- end_time_dt = end_time_dt,
- actual_str = effort_str,
- actual_td = actual_td,
- expected_td = expected_td,
- expected_str = expected_str,
- is_correct = is_correct,
- message = message
- )
+ # Arrange
+ td : timedelta = pd.Timedelta(hours = 255, minutes = 30)
+ expected : str = "+255h 30m"
# Act
- actual : EffortStatus = TimeTrackingManager()._TimeTrackingManager__create_effort_status(idx = idx, start_time_str = start_time_str, end_time_str = end_time_str, effort_str = effort_str) # type: ignore
+ actual : str = self.df_helper.format_timedelta(td = td, add_plus_sign = True)
+
+ # Assert
+ self.assertEqual(expected, actual)
+
+ @parameterized.expand([
+ [timedelta(minutes=30), timedelta(hours=1), "↑"],
+ [timedelta(hours=1), timedelta(minutes=30), "↓"],
+ [timedelta(minutes=30), timedelta(minutes=30), "="],
+ ])
+ def test_gettrendbytimedelta_shouldreturnexpectedtrend_wheninvoked(
+ self,
+ td_1 : timedelta,
+ td_2 : timedelta,
+ expected : str
+ ):
+
+ # Arrange
+ # Act
+ actual : str = self.df_helper.get_trend_by_timedelta(td_1 = td_1, td_2 = td_2)
# Assert
- comparison : bool = SupportMethodProvider().are_effort_statuses_equal(ef1 = expected, ef2 = actual)
- self.assertTrue(comparison)
+ self.assertEqual(expected, actual)
@parameterized.expand([
- [1, "5h 30m", timedelta(hours = 5, minutes = 30)],
- [2, "2h 00m", timedelta(hours = 2, minutes = 00)]
+ ["↕1", TTCN.TREND],
+ ["2016", "2016"],
])
- def test_createeffortstatusfornonevalues_shouldreturnexpectedobject_wheninvoked(
+ def test_tryconsolidatetrendcolumnname_shouldreturnexpectedcolumnname_wheninvoked(
self,
- idx : int,
- effort_str : str,
- actual_td : timedelta):
+ column_name: str,
+ expected: str
+ ):
# Arrange
- expected : EffortStatus = EffortStatus(
- idx = idx,
- start_time_str = None,
- start_time_dt = None,
- end_time_str = None,
- end_time_dt = None,
- actual_str = effort_str,
- actual_td = actual_td,
- expected_td = None,
- expected_str = None,
- is_correct = True,
- message = "''start_time' and/or 'end_time' are empty, 'effort' can't be verified. We assume that it's correct."
- )
-
# Act
- actual : EffortStatus = TimeTrackingManager()._TimeTrackingManager__create_effort_status_for_none_values(idx = idx, effort_str = effort_str) # type: ignore
+ actual : str = self.df_helper.try_consolidate_trend_column_name(column_name)
# Assert
- comparison : bool = SupportMethodProvider().are_effort_statuses_equal(ef1 = expected, ef2 = actual)
- self.assertTrue(comparison)
- @parameterized.expand([
- "07:00", "07:15", "07:30", "07:45",
- "08:00", "08:15", "08:30", "08:45",
- "09:00", "09:15", "09:30", "09:45",
- "10:00", "10:15", "10:30", "10:45",
+ self.assertEqual(expected, actual)
+
+ def test_getyearlytarget_shouldreturnexpectedhours_whenyearinlist(self):
+
+ # Arrange
+ yearly_targets : list[YearlyTarget] = ObjectMother.get_yearly_targets()
+ year : int = 2024
+ expected_hours : timedelta = timedelta(hours = 250)
+
+ # Act
+ actual_hours : timedelta = cast(YearlyTarget, self.df_helper.get_yearly_target(yearly_targets = yearly_targets, year = year)).hours
+
+ # Assert
+ self.assertEqual(expected_hours, actual_hours)
+ def test_getyearlytarget_shouldreturnnone_whenyearnotinlist(self):
+
+ # Arrange
+ yearly_targets : list[YearlyTarget] = ObjectMother.get_yearly_targets()
+ year : int = 2010
+
+ # Act
+ yearly_target : Optional[YearlyTarget] = self.df_helper.get_yearly_target(yearly_targets = yearly_targets, year = year)
+
+ # Assert
+ self.assertIsNone(yearly_target)
+ def test_isyearlytargetmet_shouldreturntrue_whenyearlytargetismet(self):
+
+ # Arrange
+ effort : timedelta = pd.Timedelta(hours = 255, minutes = 30)
+ yearly_target : timedelta = pd.Timedelta(hours = 250)
+
+ # Act
+ actual : bool = self.df_helper.is_yearly_target_met(effort = effort, yearly_target = yearly_target)
+
+ # Assert
+ self.assertTrue(actual)
+ def test_isyearlytargetmet_shouldreturnfalse_whenyearlytargetisnotmet(self):
+
+ # Arrange
+ effort : timedelta = pd.Timedelta(hours = 249)
+ yearly_target : timedelta = pd.Timedelta(hours = 250)
+
+ # Act
+ actual : bool = self.df_helper.is_yearly_target_met(effort = effort, yearly_target = yearly_target)
+
+ # Assert
+ self.assertFalse(actual)
+ def test_extractsoftwareprojectname_shouldreturnexpectedstring_whenproperstring(self):
+
+ # Arrange
+ descriptor : str = "NW.AutoProffLibrary v1.0.0"
+ expected : str = "NW.AutoProffLibrary"
+
+ # Act
+ actual : str = self.df_helper.extract_software_project_name(descriptor = descriptor)
+
+ # Assert
+ self.assertEqual(expected, actual)
+ def test_extractsoftwareprojectname_shouldreturnerrorstring_whenunproperstring(self):
+
+ # Arrange
+ descriptor : str = "Some gibberish"
+ expected : str = "ERROR"
+
+ # Act
+ actual : str = self.df_helper.extract_software_project_name(descriptor = descriptor)
+
+ # Assert
+ self.assertEqual(expected, actual)
+ def test_extractsoftwareprojectversion_shouldreturnexpectedstring_whenproperstring(self):
+
+ # Arrange
+ descriptor : str = "NW.AutoProffLibrary v1.0.0"
+ expected : str = "1.0.0"
+
+ # Act
+ actual : str = self.df_helper.extract_software_project_version(descriptor = descriptor)
+
+ # Assert
+ self.assertEqual(expected, actual)
+ def test_extractsoftwareprojectversion_shouldreturnerrorstring_whenunproperstring(self):
+
+ # Arrange
+ descriptor : str = "Some gibberish"
+ expected : str = "ERROR"
+
+ # Act
+ actual : str = self.df_helper.extract_software_project_version(descriptor = descriptor)
+
+ # Assert
+ self.assertEqual(expected, actual)
+
+ @parameterized.expand([
+ "07:00", "07:15", "07:30", "07:45",
+ "08:00", "08:15", "08:30", "08:45",
+ "09:00", "09:15", "09:30", "09:45",
+ "10:00", "10:15", "10:30", "10:45",
"11:00", "11:15", "11:30", "11:45",
"12:00", "12:15", "12:30", "12:45",
"13:00", "13:15", "13:30", "13:45",
@@ -913,10 +1331,11 @@ def test_createtimeobject_shouldreturnexpecteddatatime_whenday1time(self, time :
expected : datetime = datetime.strptime(dt_str, strp_format)
# Act
- actual : datetime = TimeTrackingManager()._TimeTrackingManager__create_time_object(time = time) # type: ignore
+ actual : datetime = self.df_helper.create_time_object(time = time)
# Assert
self.assertEqual(expected, actual)
+
@parameterized.expand([
"00:00", "00:15", "00:30", "00:45",
"01:00", "01:15", "01:30", "01:45",
@@ -934,10 +1353,11 @@ def test_createtimeobject_shouldreturnexpecteddatatime_whenday2time(self, time :
expected : datetime = datetime.strptime(dt_str, strp_format)
# Act
- actual : datetime = TimeTrackingManager()._TimeTrackingManager__create_time_object(time = time) # type: ignore
+ actual : datetime = self.df_helper.create_time_object(time = time)
# Assert
self.assertEqual(expected, actual)
+
@parameterized.expand([
"07:04",
"00:01",
@@ -950,10 +1370,124 @@ def test_createtimeobject_shouldraisevalueerrorexception_whennotamongtimevalues(
# Act
with self.assertRaises(ValueError) as context:
- actual : datetime = TimeTrackingManager()._TimeTrackingManager__create_time_object(time = time) # type: ignore
+ self.df_helper.create_time_object(time = time)
# Assert
self.assertTrue(expected_message in str(context.exception))
+
+ @parameterized.expand([
+ ["07:00", "08:00", "UNKNOWN", "07:00-08:00"],
+ ["", "08:00", "UNKNOWN", "UNKNOWN"],
+ ["07:00", "", "UNKNOWN", "UNKNOWN"]
+ ])
+ def test_createtimerangeid_shouldreturnexpectedtimerangeid_wheninvoked(
+ self,
+ start_time : str,
+ end_time : str,
+ unknown_id : str,
+ expected : str):
+
+ # Arrange
+ # Act
+ actual : str = self.df_helper.create_time_range_id(start_time = start_time, end_time = end_time, unknown_id = unknown_id)
+
+ # Assert
+ self.assertEqual(expected, actual)
+
+ def test_createeffortstatus_shouldreturnexpectobject_wheneffortiscorrect(self):
+
+ # Arrange
+ idx : int = 1
+ start_time_str : str = "07:00"
+ end_time_str : str = "08:00"
+ effort_str : str = "01h 00m"
+
+ strp_format : str = "%Y-%m-%d %H:%M"
+
+ start_time_dt : datetime = datetime.strptime(f"1900-01-01 {start_time_str}", strp_format)
+ end_time_dt : datetime = datetime.strptime(f"1900-01-01 {end_time_str}", strp_format)
+ actual_str = effort_str
+ actual_td : timedelta = pd.Timedelta(value = actual_str).to_pytimedelta()
+ expected_str : str = actual_str
+ expected_td : timedelta = actual_td
+ is_correct : bool = True
+ message : str = "The effort is correct."
+ expected : EffortStatus = EffortStatus(
+ idx = idx,
+ start_time_str = start_time_str,
+ start_time_dt = start_time_dt,
+ end_time_str = end_time_str,
+ end_time_dt = end_time_dt,
+ actual_str = effort_str,
+ actual_td = actual_td,
+ expected_td = expected_td,
+ expected_str = expected_str,
+ is_correct = is_correct,
+ message = message
+ )
+
+ # Act
+ actual : EffortStatus = self.df_helper.create_effort_status(
+ idx = idx,
+ start_time_str = start_time_str,
+ end_time_str = end_time_str,
+ effort_str = effort_str
+ )
+
+ # Assert
+ comparison : bool = self.sm_provider.are_effort_statuses_equal(ef1 = expected, ef2 = actual)
+ self.assertTrue(comparison)
+ def test_createeffortstatus_shouldreturnexpectobject_wheneffortisnotcorrect(self):
+
+ # Arrange
+ idx : int = 1
+ start_time_str : str = "07:00"
+ end_time_str : str = "08:00"
+ effort_str : str = "02h 00m"
+
+ strp_format : str = "%Y-%m-%d %H:%M"
+
+ start_time_dt : datetime = datetime.strptime(f"1900-01-01 {start_time_str}", strp_format)
+ end_time_dt : datetime = datetime.strptime(f"1900-01-01 {end_time_str}", strp_format)
+ actual_str = effort_str
+ actual_td : timedelta = pd.Timedelta(value = actual_str).to_pytimedelta()
+ expected_str : str = "01h 00m"
+ expected_td : timedelta = pd.Timedelta(value = expected_str).to_pytimedelta()
+ is_correct : bool = False
+ message : str = _MessageCollection.effort_status_mismatching_effort(
+ idx = idx,
+ start_time_str = start_time_str,
+ end_time_str = end_time_str,
+ actual_str = actual_str,
+ expected_str = expected_str
+ )
+
+ expected : EffortStatus = EffortStatus(
+ idx = idx,
+ start_time_str = start_time_str,
+ start_time_dt = start_time_dt,
+ end_time_str = end_time_str,
+ end_time_dt = end_time_dt,
+ actual_str = effort_str,
+ actual_td = actual_td,
+ expected_td = expected_td,
+ expected_str = expected_str,
+ is_correct = is_correct,
+ message = message
+ )
+
+ # Act
+ actual : EffortStatus = self.df_helper.create_effort_status(
+ idx = idx,
+ start_time_str = start_time_str,
+ end_time_str = end_time_str,
+ effort_str = effort_str
+ )
+
+ # Assert
+ comparison : bool = self.sm_provider.are_effort_statuses_equal(ef1 = expected, ef2 = actual)
+ self.assertTrue(comparison)
+
@parameterized.expand([
[1, "07:00", "", "5h 30m"],
[1, "", "07:00", "5h 30m"]
@@ -966,7 +1500,7 @@ def test_createeffortstatus_shouldreturnexpectobject_whenstarttimeorendtimeareem
effort_str : str):
# Arrange
- actual_td : timedelta = TimeTrackingManager()._TimeTrackingManager__convert_string_to_timedelta(td_str = effort_str) # type: ignore
+ actual_td : timedelta = self.df_helper.convert_string_to_timedelta(td_str = effort_str)
expected : EffortStatus = EffortStatus(
idx = idx,
start_time_str = None,
@@ -982,15 +1516,16 @@ def test_createeffortstatus_shouldreturnexpectobject_whenstarttimeorendtimeareem
)
# Act
- actual : EffortStatus = TimeTrackingManager()._TimeTrackingManager__create_effort_status(
+ actual : EffortStatus = self.df_helper.create_effort_status(
idx = idx,
start_time_str = start_time_str,
end_time_str = end_time_str,
effort_str = effort_str)
# Assert
- comparison : bool = SupportMethodProvider().are_effort_statuses_equal(ef1 = expected, ef2 = actual)
+ comparison : bool = self.sm_provider.are_effort_statuses_equal(ef1 = expected, ef2 = actual)
self.assertTrue(comparison)
+
@parameterized.expand([
["Some Gibberish", "08:00", "01h 00m"],
["07:00", "Some Gibberish", "01h 00m"],
@@ -1009,41 +1544,91 @@ def test_createeffortstatus_shouldraisevalueerrorexception_whenunproperparameter
# Act
with self.assertRaises(ValueError) as context:
- actual : EffortStatus = TimeTrackingManager()._TimeTrackingManager__create_effort_status(idx = idx, start_time_str = start_time_str, end_time_str = end_time_str, effort_str = effort_str) # type: ignore
+ self.df_helper.create_effort_status(idx = idx, start_time_str = start_time_str, end_time_str = end_time_str, effort_str = effort_str)
# Assert
- self.assertTrue(expected_message in str(context.exception))
+ self.assertTrue(expected_message in str(context.exception))
+
@parameterized.expand([
- ["07:00", "08:00", "UNKNOWN", "07:00-08:00"],
- ["", "08:00", "UNKNOWN", "UNKNOWN"],
- ["07:00", "", "UNKNOWN", "UNKNOWN"]
+ [1, "5h 30m", timedelta(hours = 5, minutes = 30)],
+ [2, "2h 00m", timedelta(hours = 2, minutes = 00)]
])
- def test_createtimerangeid_shouldreturnexpectedtimerangeid_wheninvoked(
- self,
- start_time : str,
- end_time : str,
- unknown_id : str,
- expected : str):
+ def test_createeffortstatusfornonevalues_shouldreturnexpectedobject_wheninvoked(
+ self,
+ idx : int,
+ effort_str : str,
+ actual_td : timedelta):
+
+ # Arrange
+ expected : EffortStatus = EffortStatus(
+ idx = idx,
+ start_time_str = None,
+ start_time_dt = None,
+ end_time_str = None,
+ end_time_dt = None,
+ actual_str = effort_str,
+ actual_td = actual_td,
+ expected_td = None,
+ expected_str = None,
+ is_correct = True,
+ message = "''start_time' and/or 'end_time' are empty, 'effort' can't be verified. We assume that it's correct."
+ )
+
+ # Act
+ actual : EffortStatus = self.df_helper.create_effort_status_for_none_values(idx = idx, effort_str = effort_str) # type: ignore
+
+ # Assert
+ comparison : bool = self.sm_provider.are_effort_statuses_equal(ef1 = expected, ef2 = actual)
+ self.assertTrue(comparison)
+
+ def test_createeffortstatusandcasttoany_shouldwork_withdfapply(self):
# Arrange
+ data : list[dict] = [
+ {"idx": 1, "start_time_str": "07:00", "end_time_str": "08:00", "effort_str": "01h 00m"}
+ ]
+ df : DataFrame = pd.DataFrame(data)
+
# Act
- actual : str = TimeTrackingManager()._TimeTrackingManager__create_time_range_id(start_time = start_time, end_time = end_time, unknown_id = unknown_id) # type: ignore
+ try:
+
+ df[TTCN.EFFORTSTATUS] = df.apply(
+ lambda x : self.df_helper.create_effort_status_and_cast_to_any(
+ idx = x["idx"],
+ start_time_str = x["start_time_str"],
+ end_time_str = x["end_time_str"],
+ effort_str = x["effort_str"]
+ ), axis=1)
+
+ except Exception as e:
+ self.fail(str(e))
# Assert
- self.assertEqual(expected, actual)
+ self.assertTrue(TTCN.EFFORTSTATUS in df.columns)
+class TTDataFrameFactoryTestCase(unittest.TestCase):
- def test_getsessionsdataset_shouldreturnexpecteddataframe_wheninvoked(self):
+ def setUp(self):
+ self.df_factory : TTDataFrameFactory = TTDataFrameFactory(df_helper = TTDataFrameHelper())
+ def test_createttdf_shouldreturnexpecteddataframe_wheninvoked(self):
# Arrange
- excel_data_df : DataFrame = ObjectMother().create_excel_data()
- setting_bag : SettingBag = ObjectMother().create_setting_bag()
- expected_column_names : list[str] = ObjectMother().create_sessions_df_column_names()
- expected_dtype_names : list[str] = ObjectMother().create_sessions_df_dtype_names()
+ excel_path : str = "/workspaces/nwtimetracking/"
+ excel_skiprows : int = 0
+ excel_nrows : int = 100
+ excel_tabname : str = "Sessions"
+ excel_data_df : DataFrame = ObjectMother().get_excel_data()
+ expected_column_names : list[str] = ObjectMother().get_tt_df_column_names()
+ expected_dtype_names : list[str] = ObjectMother().get_tt_df_dtype_names()
expected_nan : str = ""
# Act
with patch.object(pd, 'read_excel', return_value = excel_data_df) as mocked_context:
- actual : DataFrame = TimeTrackingManager().get_sessions_dataset(setting_bag = setting_bag)
+ actual : DataFrame = self.df_factory.create_tt_df(
+ excel_path = excel_path,
+ excel_skiprows = excel_skiprows,
+ excel_nrows = excel_nrows,
+ excel_tabname = excel_tabname
+ )
# Assert
self.assertEqual(expected_column_names, actual.columns.tolist())
@@ -1051,197 +1636,1014 @@ def test_getsessionsdataset_shouldreturnexpecteddataframe_wheninvoked(self):
self.assertEqual(expected_nan, actual[expected_column_names[1]][0])
self.assertEqual(expected_nan, actual[expected_column_names[2]][0])
self.assertEqual(expected_nan, actual[expected_column_names[5]][0])
- def test_getttbyyear_shouldreturnexpecteddataframe_wheninvoked(self):
+ def test_createttsbyyeardf_shouldreturnexpecteddataframe_wheninvoked(self):
# Arrange
years : list[int] = [2024]
yearly_targets : list[YearlyTarget] = [ YearlyTarget(year = 2024, hours = timedelta(hours = 250)) ]
- sessions_df : DataFrame = ObjectMother().create_sessions_df()
- expected_df : DataFrame = ObjectMother().create_tt_by_year_df()
+ tt_df : DataFrame = ObjectMother().get_tt_df()
+ expected_df : DataFrame = ObjectMother().get_tts_by_year_df()
# Act
- actual_df : DataFrame = TimeTrackingManager().get_tt_by_year(sessions_df = sessions_df, years = years, yearly_targets = yearly_targets)
+ actual_df : DataFrame = self.df_factory.create_tts_by_year_df(tt_df = tt_df, years = years, yearly_targets = yearly_targets)
# Assert
assert_frame_equal(expected_df , actual_df)
- def test_getttbyyearmonth_shouldreturnexpecteddataframe_wheninvoked(self):
+ def test_createttsbyyearmonthtpl_shouldreturnexpectedtuple_wheninvoked(self):
# Arrange
years : list[int] = [2024]
yearly_targets : list[YearlyTarget] = [ YearlyTarget(year = 2024, hours = timedelta(hours = 250)) ]
- sessions_df : DataFrame = ObjectMother().create_sessions_df()
- expected_df : DataFrame = ObjectMother().create_tt_by_year_month_df()
+ tt_df : DataFrame = ObjectMother().get_tt_df()
+ expected_tpl : Tuple[DataFrame, DataFrame] = ObjectMother().get_tts_by_year_month_tpl()
# Act
- actual_df : DataFrame = TimeTrackingManager().get_tt_by_year_month(sessions_df = sessions_df, years = years, yearly_targets = yearly_targets)
+ actual_tpl : Tuple[DataFrame, DataFrame] = self.df_factory.create_tts_by_year_month_tpl(
+ tt_df = tt_df,
+ years = years,
+ yearly_targets = yearly_targets,
+ display_only_years = years
+ )
# Assert
- assert_frame_equal(expected_df , actual_df)
- def test_getttbyyearmonthspnv_shouldreturnexpecteddataframe_wheninvoked(self):
+ assert_frame_equal(expected_tpl[0] , actual_tpl[0])
+ assert_frame_equal(expected_tpl[1] , actual_tpl[1])
+ def test_createttsbyyearmonthspnvtpl_shouldreturnexpectedtuple_wheninvoked(self):
# Arrange
years : list[int] = [2024]
software_project_names : list[str] = ["NW.NGramTextClassification", "NW.Shared.Serialization", "NW.UnivariateForecasting", "nwreadinglistmanager"]
- sessions_df : DataFrame = ObjectMother().create_sessions_df()
- expected_df : DataFrame = ObjectMother().create_tt_by_year_month_spnv_df()
+ tt_df : DataFrame = ObjectMother().get_tt_df()
+ expected_tpl : Tuple[DataFrame, DataFrame] = ObjectMother().get_tts_by_year_month_spnv_tpl()
# Act
- actual_df : DataFrame = TimeTrackingManager().get_tt_by_year_month_spnv(sessions_df = sessions_df, years = years, software_project_names = software_project_names)
+ actual_tpl : Tuple[DataFrame, DataFrame] = self.df_factory.create_tts_by_year_month_spnv_tpl(
+ tt_df = tt_df,
+ years = years,
+ software_project_names = software_project_names,
+ software_project_name = software_project_names[0]
+ )
# Assert
- assert_frame_equal(expected_df , actual_df)
- def test_getttbyyearspnv_shouldreturnexpecteddataframe_wheninvoked(self):
+ assert_frame_equal(expected_tpl[0] , actual_tpl[0])
+ assert_frame_equal(expected_tpl[1] , actual_tpl[1])
+ def test_createttsbyyearspnvtpl_shouldreturnexpectedtuple_wheninvoked(self):
# Arrange
years : list[int] = [2024]
software_project_names : list[str] = ["NW.NGramTextClassification", "NW.Shared.Serialization", "NW.UnivariateForecasting", "nwreadinglistmanager"]
- sessions_df : DataFrame = ObjectMother().create_sessions_df()
- expected_df : DataFrame = ObjectMother().create_tt_by_year_spnv_df()
+ tt_df : DataFrame = ObjectMother().get_tt_df()
+ expected_tpl : Tuple[DataFrame, DataFrame] = ObjectMother().get_tts_by_year_spnv_tpl()
# Act
- actual_df : DataFrame = TimeTrackingManager().get_tt_by_year_spnv(sessions_df = sessions_df, years = years, software_project_names = software_project_names)
+ actual_tpl : Tuple[DataFrame, DataFrame] = self.df_factory.create_tts_by_year_spnv_tpl(
+ tt_df = tt_df,
+ years = years,
+ software_project_names = software_project_names,
+ software_project_name = software_project_names[0]
+ )
# Assert
- assert_frame_equal(expected_df , actual_df)
- def test_getttbyspnspv_shouldreturnexpecteddataframe_wheninvoked(self):
+ assert_frame_equal(expected_tpl[0] , actual_tpl[0])
+ assert_frame_equal(expected_tpl[1] , actual_tpl[1])
+ def test_createttsbyspnspvdf_shouldreturnexpecteddataframe_wheninvoked(self):
# Arrange
years : list[int] = [2024]
software_project_names : list[str] = ["NW.NGramTextClassification", "NW.Shared.Serialization", "NW.UnivariateForecasting", "nwreadinglistmanager"]
- sessions_df : DataFrame = ObjectMother().create_sessions_df()
- expected_df : DataFrame = ObjectMother().create_tt_by_spn_spv_df()
+ tt_df : DataFrame = ObjectMother().get_tt_df()
+ expected_df : DataFrame = ObjectMother().get_tts_by_spn_spv_df()
# Act
- actual_df : DataFrame = TimeTrackingManager().get_tt_by_spn_spv(sessions_df = sessions_df, years = years, software_project_names = software_project_names)
+ actual_df : DataFrame = self.df_factory.create_tts_by_spn_spv_df(
+ tt_df = tt_df,
+ years = years,
+ software_project_names = software_project_names
+ )
# Assert
- assert_frame_equal(expected_df , actual_df)
- def test_getttsbymonth_shouldreturnexpecteddataframe_wheninvoked(self):
+ assert_frame_equal(expected_df , actual_df)
+ def test_createttsbymonthtpl_shouldreturnexpectedtuple_wheninvoked(self):
# Arrange
years : list[int] = [2024]
- sessions_df : DataFrame = ObjectMother().create_sessions_df()
- expected_df : DataFrame = ObjectMother().create_tts_by_month_df()
-
- # Act
- actual_df : DataFrame = TimeTrackingManager().get_tts_by_month(sessions_df = sessions_df, years = years)
-
- # Assert
- assert_frame_equal(expected_df, actual_df)
- def test_updatefuturemonthstoempty_shouldreturnexpecteddataframe_wheninvoked(self):
-
- # Arrange
- now : datetime = datetime(2024, 2, 27)
- tts_by_month_df : DataFrame = ObjectMother().create_tts_by_month_df()
- expected_df : DataFrame = ObjectMother().create_tts_by_month_upd_df()
+ now : datetime = datetime(2024, 11, 30)
+ tt_df : DataFrame = ObjectMother().get_tt_df()
+ expected_tpl : Tuple[DataFrame, DataFrame] = ObjectMother().get_tts_by_month_tpl()
# Act
- actual_df : DataFrame = TimeTrackingManager().update_future_months_to_empty(tts_by_month_df = tts_by_month_df, now = now)
+ actual_tpl : Tuple[DataFrame, DataFrame] = self.df_factory.create_tts_by_month_tpl(
+ tt_df = tt_df,
+ years = years,
+ now = now
+ )
# Assert
- assert_frame_equal(expected_df, actual_df)
- def test_createtimeranges_shouldreturnexpecteddataframe_wheninvoked(self):
+ assert_frame_equal(expected_tpl[0] , actual_tpl[0])
+ assert_frame_equal(expected_tpl[1] , actual_tpl[1])
+ def test_createttsbytrdf_shouldreturnexpecteddataframe_wheninvoked(self):
# Arrange
unknown_id : str = "Unknown"
- sessions_df : DataFrame = ObjectMother().create_sessions_df()
- expected_df : DataFrame = ObjectMother().create_time_ranges_df()
+ remove_unknown_occurrences : bool = True
+ tt_df : DataFrame = ObjectMother().get_tt_df()
+ expected_df : DataFrame = ObjectMother().get_tts_by_tr_df()
expected_df.sort_values(by = "TimeRangeId", ascending = True, inplace = True)
expected_df.reset_index(drop = True, inplace = True)
# Act
- actual_df : DataFrame = TimeTrackingManager().create_time_ranges_df(sessions_df = sessions_df, unknown_id = unknown_id)
+ actual_df : DataFrame = self.df_factory.create_tts_by_tr_df(
+ tt_df = tt_df,
+ unknown_id = unknown_id,
+ remove_unknown_occurrences = remove_unknown_occurrences
+ )
actual_df.sort_values(by = "TimeRangeId", ascending = True, inplace = True)
actual_df.reset_index(drop = True, inplace = True)
# Assert
assert_frame_equal(expected_df, actual_df)
- def test_removeunknownid_shouldreturnexpecteddataframe_whencontainsunknownid(self):
+ def test_createttsbyhashtagyeardf_shouldreturnexpecteddataframe_wheninvoked(self):
# Arrange
- unknown_id : str = "Unknown"
- expected_df : DataFrame = ObjectMother().create_time_ranges_df()
- time_ranges_df : DataFrame = ObjectMother().create_time_ranges_df()
- time_ranges_df.loc[len(time_ranges_df.index)] = [unknown_id, 3]
+ years : list[int] = [2024]
+ tt_df : DataFrame = ObjectMother().get_tt_df()
+ expected_df : DataFrame = ObjectMother().get_tts_by_hashtag_year_df()
# Act
- actual_df : DataFrame = TimeTrackingManager().remove_unknown_id(time_ranges_df = time_ranges_df, unknown_id = unknown_id)
+ actual_df : DataFrame = self.df_factory.create_tts_by_hashtag_year_df(tt_df = tt_df, years = years)
# Assert
- assert_frame_equal(expected_df, actual_df)
- def test_removeunknownid_shouldreturnexpecteddataframe_whendoesnotcontainunknownid(self):
+ assert_frame_equal(expected_df , actual_df)
+ def test_createttsbyhashtagdf_shouldreturnexpecteddataframe_wheninvoked(self):
# Arrange
- unknown_id : str = "Unknown"
- expected_df : DataFrame = ObjectMother().create_time_ranges_df()
- time_ranges_df : DataFrame = ObjectMother().create_time_ranges_df()
+ tt_df : DataFrame = ObjectMother().get_tt_df()
+ expected_df : DataFrame = ObjectMother().get_tts_by_hashtag_df()
# Act
- actual_df : DataFrame = TimeTrackingManager().remove_unknown_id(time_ranges_df = time_ranges_df, unknown_id = unknown_id)
+ actual_df : DataFrame = self.df_factory.create_tts_by_hashtag_df(tt_df = tt_df)
# Assert
- assert_frame_equal(expected_df, actual_df)
- def test_getttbyyearhashtag_shouldreturnexpecteddataframe_wheninvoked(self):
+ assert_frame_equal(expected_df , actual_df)
+
+ @parameterized.expand([
+ [True],
+ [False]
+ ])
+ def test_createttsbyspndf_shouldreturnexpecteddataframe_wheninvoked(self, remove_untagged : bool):
# Arrange
years : list[int] = [2024]
- sessions_df : DataFrame = ObjectMother().create_sessions_df()
- expected_df : DataFrame = ObjectMother().create_tt_by_year_hashtag_df()
+ software_project_names : list[str] = ["NW.NGramTextClassification", "NW.Shared.Serialization", "NW.UnivariateForecasting", "nwreadinglistmanager"]
+ tt_df : DataFrame = ObjectMother().get_tt_df()
+ expected_df : DataFrame = ObjectMother().get_tts_by_spn_df()
# Act
- actual_df : DataFrame = TimeTrackingManager().get_tt_by_year_hashtag(sessions_df = sessions_df, years = years)
+ actual_df : DataFrame = self.df_factory.create_tts_by_spn_df(
+ tt_df = tt_df,
+ years = years,
+ software_project_names = software_project_names,
+ remove_untagged = remove_untagged
+ )
# Assert
- assert_frame_equal(expected_df , actual_df)
- def test_getttbyhashtag_shouldreturnexpecteddataframe_wheninvoked(self):
+ assert_frame_equal(expected_df , actual_df)
+
+ def test_createdefinitionsdf_shouldreturnexpecteddataframe_wheninvoked(self):
# Arrange
- sessions_df : DataFrame = ObjectMother().create_sessions_df()
- expected_df : DataFrame = ObjectMother().create_tt_by_hashtag_df()
+ expected_df : DataFrame = ObjectMother().get_definitions_df()
# Act
- actual_df : DataFrame = TimeTrackingManager().get_tt_by_hashtag(sessions_df = sessions_df)
+ actual_df : DataFrame = self.df_factory.create_definitions_df()
# Assert
assert_frame_equal(expected_df , actual_df)
+class TTMarkdownFactoryTestCase(unittest.TestCase):
- @parameterized.expand([
- [True],
- [False]
- ])
- def test_getttbyspn_shouldreturnexpecteddataframe_wheninvoked(self, remove_untagged : bool):
+ def setUp(self) -> None:
+
+ self.md_factory : TTMarkdownFactory = TTMarkdownFactory(markdown_helper = MarkdownHelper(formatter = Formatter()))
+ def test_createttsbymonthmd_shouldreturnexpectedstring_wheninvoked(self) -> None:
+
+ # Arrange
+ paragraph_title : str = "Time Tracking By Month"
+ last_update : datetime = datetime(2024, 11, 30)
+ tts_by_month_upd_df : DataFrame = ObjectMother().get_tts_by_month_tpl()[0]
+ expected : str = ObjectMother().get_tts_by_month_md()
+
+ # Act
+ actual : str = self.md_factory.create_tts_by_month_md(
+ paragraph_title = paragraph_title,
+ last_update = last_update,
+ tts_by_month_upd_df = tts_by_month_upd_df
+ )
+
+ # Assert
+ self.assertEqual(expected, actual)
+class TTAdapterTestCase(unittest.TestCase):
+
+ def setUp(self) -> None:
+ # Without Defaults
+ self.tts_by_year_spnv_display_only_spn : Optional[str] = "nwshared"
+
+ # With Defaults
+ self.excel_path : str = "/home/nwtimetracking/nwtimetrackingmanager/data/Time Tracking.xlsx"
+ self.excel_skiprows : int = 0
+ self.excel_nrows : int = 100
+ self.excel_tabname : str = "Sessions"
+ self.years : list[int] = [2023, 2024]
+ self.yearly_targets : list[YearlyTarget] = [
+ YearlyTarget(year = 2015, hours = timedelta(hours = 0)),
+ YearlyTarget(year = 2016, hours = timedelta(hours = 500)),
+ YearlyTarget(year = 2017, hours = timedelta(hours = 500)),
+ YearlyTarget(year = 2018, hours = timedelta(hours = 500)),
+ YearlyTarget(year = 2019, hours = timedelta(hours = 500)),
+ YearlyTarget(year = 2020, hours = timedelta(hours = 500)),
+ YearlyTarget(year = 2021, hours = timedelta(hours = 500)),
+ YearlyTarget(year = 2022, hours = timedelta(hours = 400)),
+ YearlyTarget(year = 2023, hours = timedelta(hours = 250)),
+ YearlyTarget(year = 2024, hours = timedelta(hours = 500))
+ ]
+ self.now : datetime = datetime(2023, 12, 1)
+ self.software_project_names : list[str] = [ "nwshared", "nwpackageversions"]
+ self.software_project_names_by_spv : list[str] = [ "nwshared" ]
+ self.tts_by_year_month_display_only_years : Optional[list[int]] = [2024]
+ self.tts_by_spn_remove_untagged : bool = True
+ self.tts_by_efs_is_correct : bool = True
+ self.tts_by_tr_unknown_id : str = "Unknown"
+ self.tts_by_tr_remove_unknown_occurrences : bool = True
+ self.md_infos : list[MDInfo] = [
+ MDInfo(id = TTID.TTSBYMONTH, file_name = "TIMETRACKINGBYMONTH.md", paragraph_title = "Time Tracking By Month")
+ ]
+ self.md_last_update : datetime = datetime(2023, 11, 25)
+
+ # Other
+ self.paragraph_title : str = "Time Tracking By Month"
+ def test_extractfilenameandparagraphtitle_shouldreturnexpectedvalues_whenidexists(self) -> None:
+
# Arrange
- years : list[int] = [2024]
- software_project_names : list[str] = ["NW.NGramTextClassification", "NW.Shared.Serialization", "NW.UnivariateForecasting", "nwreadinglistmanager"]
- sessions_df : DataFrame = ObjectMother().create_sessions_df()
- expected_df : DataFrame = ObjectMother().create_tt_by_spn_df()
+ df_factory : TTDataFrameFactory = Mock()
+ md_factory : TTMarkdownFactory = Mock()
+ tt_adapter : TTAdapter = TTAdapter(df_factory = df_factory, md_factory = md_factory)
+
+ id : TTID = TTID.TTSBYMONTH
+ setting_bag : SettingBag = Mock(md_infos = self.md_infos)
# Act
- actual_df : DataFrame = TimeTrackingManager().get_tt_by_spn(sessions_df = sessions_df, years = years, software_project_names = software_project_names, remove_untagged = remove_untagged)
+ actual : Tuple[str, str] = tt_adapter.extract_file_name_and_paragraph_title(id = id, setting_bag = setting_bag)
# Assert
- assert_frame_equal(expected_df , actual_df)
-class MarkdownProcessorTestCase(unittest.TestCase):
+ self.assertEqual(actual, ("TIMETRACKINGBYMONTH.md", "Time Tracking By Month"))
+ def test_extractfilenameandparagraphtitle_shouldraiseexception_wheniddoesnotexist(self) -> None:
+
+ # Arrange
+ df_factory : TTDataFrameFactory = Mock()
+ md_factory : TTMarkdownFactory = Mock()
+ adapter : TTAdapter = TTAdapter(df_factory = df_factory, md_factory = md_factory)
+
+ id : TTID = TTID.TTSBYMONTH
- def test_processttsbymonthmd_shouldlogandsave_whenshowandsavearetrue(self) -> None:
+ md_infos : list[MDInfo] = [
+ MDInfo(id = Mock(id = "other_id"), file_name = "OTHERFILE.md", paragraph_title = "Other Title")
+ ]
+ setting_bag : SettingBag = Mock(md_infos = md_infos)
- # Arrange
- file_name : str = "TIMETRACKINGBYMONTH.md"
- file_path : str = f"/home/nwtimetracking/{file_name}"
- tts_by_month_upd_df, expected = ObjectMother().create_dtos_for_ttsbymonthmd()
- component_bag, _, markdown_processor = ObjectMother().create_service_objects_for_ttsbymonthmd()
+ # Act
+ with self.assertRaises(Exception) as context:
+ adapter.extract_file_name_and_paragraph_title(id = id, setting_bag = setting_bag)
+
+ # Assert
+ self.assertEqual(str(context.exception), _MessageCollection.no_mdinfo_found(id = id))
+ def test_createttdf_shouldcalldffactorywithexpectedarguments_wheninvoked(self) -> None:
+
+ # Arrange
+ df_factory : Mock = Mock()
+ md_factory : Mock = Mock()
+ adapter : TTAdapter = TTAdapter(df_factory = df_factory, md_factory = md_factory)
+
+ setting_bag : Mock = Mock()
+ setting_bag.excel_path = self.excel_path
+ setting_bag.excel_skiprows = self.excel_skiprows
+ setting_bag.excel_nrows = 100
+ setting_bag.excel_tabname = "Sessions"
+
+ # Act
+ adapter.create_tt_df(setting_bag = setting_bag)
+
+ # Assert
+ df_factory.create_tt_df.assert_called_once_with(
+ excel_path = self.excel_path,
+ excel_skiprows = self.excel_skiprows,
+ excel_nrows = self.excel_nrows,
+ excel_tabname = self.excel_tabname
+ )
+ def test_createttsbymonthtpl_shouldcalldffactorywithexpectedarguments_wheninvoked(self) -> None:
+
+ # Arrange
+ df_factory : Mock = Mock()
+ md_factory : Mock = Mock()
+ adapter : TTAdapter = TTAdapter(df_factory = df_factory, md_factory = md_factory)
+
+ setting_bag : Mock = Mock()
+ setting_bag.years = self.years
+ setting_bag.now = self.now
+
+ tt_df : Mock = Mock()
+
+ # Act
+ adapter.create_tts_by_month_tpl(tt_df = tt_df, setting_bag = setting_bag)
+
+ # Assert
+ df_factory.create_tts_by_month_tpl.assert_called_once_with(
+ tt_df = tt_df,
+ years = self.years,
+ now = self.now
+ )
+ def test_createttsbyyeardf_shouldcalldffactorywithexpectedarguments_wheninvoked(self) -> None:
+
+ # Arrange
+ df_factory : Mock = Mock()
+ md_factory : Mock = Mock()
+ adapter : TTAdapter = TTAdapter(df_factory = df_factory, md_factory = md_factory)
+
+ setting_bag : Mock = Mock()
+ setting_bag.years = self.years
+ setting_bag.yearly_targets = self.yearly_targets
+
+ tt_df : Mock = Mock()
+
+ # Act
+ adapter.create_tts_by_year_df(tt_df = tt_df, setting_bag = setting_bag)
+
+ # Assert
+ df_factory.create_tts_by_year_df.assert_called_once_with(
+ tt_df = tt_df,
+ years = self.years,
+ yearly_targets = self.yearly_targets
+ )
+ def test_createttsbyyearmonthdf_shouldcalldffactorywithexpectedarguments_wheninvoked(self) -> None:
+
+ # Arrange
+ df_factory : Mock = Mock()
+ md_factory : Mock = Mock()
+ adapter : TTAdapter = TTAdapter(df_factory = df_factory, md_factory = md_factory)
+
+ setting_bag : Mock = Mock()
+ setting_bag.years = self.years
+ setting_bag.yearly_targets = self.yearly_targets
+ setting_bag.tts_by_year_month_display_only_years = self.tts_by_year_month_display_only_years
+
+ tt_df : Mock = Mock()
# Act
- markdown_processor.process_tts_by_month_md(tts_by_month_upd_df = tts_by_month_upd_df)
+ adapter.create_tts_by_year_month_tpl(tt_df = tt_df, setting_bag = setting_bag)
# Assert
- self.assertEqual(component_bag.logging_function.call_count, 2)
- component_bag.logging_function.assert_has_calls([
- call(file_name + "\n"),
- call(expected)
- ])
- component_bag.file_manager.save_content.assert_called_with(content = expected, file_path = file_path)
+ df_factory.create_tts_by_year_month_tpl.assert_called_once_with(
+ tt_df = tt_df,
+ years = self.years,
+ yearly_targets = self.yearly_targets,
+ display_only_years = self.tts_by_year_month_display_only_years
+ )
+ def test_createttsbyyearmonthspnvtpl_shouldcalldffactorywithexpectedarguments_wheninvoked(self) -> None:
+
+ # Arrange
+ df_factory : Mock = Mock()
+ md_factory : Mock = Mock()
+ tt_adapter : TTAdapter = TTAdapter(df_factory = df_factory, md_factory = md_factory)
+
+ setting_bag : Mock = Mock()
+ setting_bag.years = self.years
+ setting_bag.software_project_names = self.software_project_names
+ setting_bag.tts_by_year_month_spnv_display_only_spn = self.software_project_names_by_spv
+
+ tt_df : Mock = Mock()
+
+ # Act
+ tt_adapter.create_tts_by_year_month_spnv_tpl(tt_df = tt_df, setting_bag = setting_bag)
+
+ # Assert
+ df_factory.create_tts_by_year_month_spnv_tpl.assert_called_once_with(
+ tt_df = tt_df,
+ years = self.years,
+ software_project_names = self.software_project_names,
+ software_project_name = self.software_project_names_by_spv
+ )
+ def test_createttsbyyearspnvtpl_shouldcalldffactorywithexpectedarguments_wheninvoked(self) -> None:
+
+ # Arrange
+ df_factory : Mock = Mock()
+ md_factory : Mock = Mock()
+ tt_adapter : TTAdapter = TTAdapter(df_factory = df_factory, md_factory = md_factory)
+
+ setting_bag : Mock = Mock()
+ setting_bag.years = self.years
+ setting_bag.software_project_names = self.software_project_names
+ setting_bag.tts_by_year_spnv_display_only_spn = self.tts_by_year_spnv_display_only_spn
+
+ tt_df : Mock = Mock()
+
+ # Act
+ tt_adapter.create_tts_by_year_spnv_tpl(tt_df = tt_df, setting_bag = setting_bag)
+
+ # Assert
+ df_factory.create_tts_by_year_spnv_tpl.assert_called_once_with(
+ tt_df = tt_df,
+ years = self.years,
+ software_project_names = self.software_project_names,
+ software_project_name = self.tts_by_year_spnv_display_only_spn
+ )
+ def test_createttsbyspndf_shouldcalldffactorywithexpectedarguments_wheninvoked(self) -> None:
+
+ # Arrange
+ df_factory : Mock = Mock()
+ md_factory : Mock = Mock()
+ tt_adapter : TTAdapter = TTAdapter(df_factory = df_factory, md_factory = md_factory)
+
+ setting_bag : Mock = Mock()
+ setting_bag.years = self.years
+ setting_bag.software_project_names = self.software_project_names
+ setting_bag.tts_by_spn_remove_untagged = self.tts_by_spn_remove_untagged
+
+ tt_df : Mock = Mock()
+
+ # Act
+ tt_adapter.create_tts_by_spn_df(tt_df = tt_df, setting_bag = setting_bag)
+
+ # Assert
+ df_factory.create_tts_by_spn_df.assert_called_once_with(
+ tt_df = tt_df,
+ years = self.years,
+ software_project_names = self.software_project_names,
+ remove_untagged = self.tts_by_spn_remove_untagged
+ )
+ def test_createttsbyspnspvdf_shouldcalldffactorywithexpectedarguments_wheninvoked(self) -> None:
+
+ # Arrange
+ df_factory : Mock = Mock()
+ md_factory : Mock = Mock()
+ tt_adapter : TTAdapter = TTAdapter(df_factory = df_factory, md_factory = md_factory)
+
+ setting_bag : Mock = Mock()
+ setting_bag.years = self.years
+ setting_bag.software_project_names = self.software_project_names
+
+ tt_df : Mock = Mock()
+
+ # Act
+ tt_adapter.create_tts_by_spn_spv_df(tt_df = tt_df, setting_bag = setting_bag)
+
+ # Assert
+ df_factory.create_tts_by_spn_spv_df.assert_called_once_with(
+ tt_df = tt_df,
+ years = self.years,
+ software_project_names = self.software_project_names
+ )
+ def test_createttsbyhashtagyeardf_shouldcalldffactorywithexpectedarguments_wheninvoked(self) -> None:
+
+ # Arrange
+ df_factory : Mock = Mock()
+ md_factory : Mock = Mock()
+ tt_adapter : TTAdapter = TTAdapter(df_factory = df_factory, md_factory = md_factory)
+
+ setting_bag : Mock = Mock()
+ setting_bag.years = self.years
+
+ tt_df : Mock = Mock()
+
+ # Act
+ tt_adapter.create_tts_by_hashtag_year_df(tt_df = tt_df, setting_bag = setting_bag)
+
+ # Assert
+ df_factory.create_tts_by_hashtag_year_df.assert_called_once_with(
+ tt_df = tt_df,
+ years = self.years
+ )
+ def test_createttsbyefstpl_shouldcalldffactorywithexpectedarguments_wheninvoked(self) -> None:
+
+ # Arrange
+ df_factory : Mock = Mock()
+ md_factory : Mock = Mock()
+ tt_adapter : TTAdapter = TTAdapter(df_factory = df_factory, md_factory = md_factory)
+
+ setting_bag : Mock = Mock()
+ setting_bag.tts_by_efs_is_correct = self.tts_by_efs_is_correct
+
+ tt_df : Mock = Mock()
+
+ # Act
+ tt_adapter.create_tts_by_efs_tpl(tt_df = tt_df, setting_bag = setting_bag)
+
+ # Assert
+ df_factory.create_tts_by_efs_tpl.assert_called_once_with(
+ tt_df = tt_df,
+ is_correct = self.tts_by_efs_is_correct
+ )
+ def test_createttsbytrdf_shouldcalldffactorywithexpectedarguments_wheninvoked(self) -> None:
+
+ # Arrange
+ df_factory : Mock = Mock()
+ md_factory : Mock = Mock()
+ tt_adapter : TTAdapter = TTAdapter(df_factory = df_factory, md_factory = md_factory)
+
+ setting_bag : Mock = Mock()
+ setting_bag.tts_by_tr_unknown_id = self.tts_by_tr_unknown_id
+ setting_bag.tts_by_tr_remove_unknown_occurrences = self.tts_by_tr_remove_unknown_occurrences
+
+ tt_df : Mock = Mock()
+
+ # Act
+ tt_adapter.create_tts_by_tr_df(tt_df = tt_df, setting_bag = setting_bag)
+
+ # Assert
+ df_factory.create_tts_by_tr_df.assert_called_once_with(
+ tt_df = tt_df,
+ unknown_id = self.tts_by_tr_unknown_id,
+ remove_unknown_occurrences = self.tts_by_tr_remove_unknown_occurrences
+ )
+ def test_createttsbymonthmd_shouldcallmdfactorywithexpectedarguments_wheninvoked(self) -> None:
+
+ # Arrange
+ df_factory : Mock = Mock()
+ md_factory : Mock = Mock()
+ adapter : TTAdapter = TTAdapter(df_factory = df_factory, md_factory = md_factory)
+
+ setting_bag : Mock = Mock()
+ setting_bag.md_infos = self.md_infos
+ setting_bag.md_last_update = self.md_last_update
+
+ tts_by_month_tpl : Tuple[Mock, Mock] = (Mock(), Mock())
+
+ # Act
+ adapter.create_tts_by_month_md(tts_by_month_tpl = tts_by_month_tpl, setting_bag = setting_bag)
+
+ # Assert
+ md_factory.create_tts_by_month_md.assert_called_once_with(
+ paragraph_title = self.md_infos[0].paragraph_title,
+ last_update = self.md_last_update,
+ tts_by_month_upd_df = tts_by_month_tpl[1]
+ )
+ def test_createsummary_shouldreturnexpectedsummary_wheninvoked(self) -> None:
+
+ # Arrange
+ tt_df : DataFrame = ObjectMother.get_tt_df()
+ tts_by_month_tpl : Tuple[DataFrame, DataFrame] = ObjectMother.get_tts_by_month_tpl()
+ tts_by_year_df : DataFrame = ObjectMother.get_tts_by_year_df()
+ tts_by_year_month_tpl : Tuple[DataFrame, DataFrame] = ObjectMother.get_tts_by_year_month_tpl()
+ tts_by_year_month_spnv_tpl : Tuple[DataFrame, DataFrame] = ObjectMother.get_tts_by_year_month_spnv_tpl()
+ tts_by_year_spnv_tpl : Tuple[DataFrame, DataFrame] = ObjectMother.get_tts_by_year_spnv_tpl()
+ tts_by_spn_df : DataFrame = ObjectMother.get_tts_by_spn_df()
+ tts_by_spn_spv_df : DataFrame = ObjectMother.get_tts_by_spn_spv_df()
+ tts_by_hashtag_df : DataFrame = ObjectMother.get_tts_by_hashtag_df()
+ tts_by_hashtag_year_df : DataFrame = ObjectMother.get_tts_by_hashtag_year_df()
+ tts_by_efs_tpl : Tuple[DataFrame, DataFrame] = Mock() # TO UPDATE
+ tts_by_tr_df : DataFrame = ObjectMother.get_tts_by_tr_df()
+ definitions_df : DataFrame = ObjectMother.get_definitions_df()
+ tts_by_month_md : str = ObjectMother.get_tts_by_month_md()
+
+ df_factory : TTDataFrameFactory = Mock()
+ df_factory.create_tt_df.return_value = tt_df
+ df_factory.create_tts_by_month_tpl.return_value = tts_by_month_tpl
+ df_factory.create_tts_by_year_df.return_value = tts_by_year_df
+ df_factory.create_tts_by_year_month_tpl.return_value = tts_by_year_month_tpl
+ df_factory.create_tts_by_year_month_spnv_tpl.return_value = tts_by_year_month_spnv_tpl
+ df_factory.create_tts_by_year_spnv_tpl.return_value = tts_by_year_spnv_tpl
+ df_factory.create_tts_by_spn_df.return_value = tts_by_spn_df
+ df_factory.create_tts_by_spn_spv_df.return_value = tts_by_spn_spv_df
+ df_factory.create_tts_by_hashtag_df.return_value = tts_by_hashtag_df
+ df_factory.create_tts_by_hashtag_year_df.return_value = tts_by_hashtag_year_df
+ df_factory.create_tts_by_efs_tpl.return_value = tts_by_efs_tpl
+ df_factory.create_tts_by_tr_df.return_value = tts_by_tr_df
+ df_factory.create_definitions_df.return_value = definitions_df
+
+ md_factory : TTMarkdownFactory = Mock()
+ md_factory.create_tts_by_month_md.return_value = tts_by_month_md
+
+ tt_adapter : TTAdapter = TTAdapter(df_factory = df_factory, md_factory = md_factory)
+
+ setting_bag : SettingBag = ObjectMother.get_setting_bag()
+
+ # Act
+ actual : TTSummary = tt_adapter.create_summary(setting_bag=setting_bag)
+
+ # Assert
+ assert_frame_equal(actual.tt_df, tt_df)
+ assert_frame_equal(actual.tts_by_month_tpl[0], tts_by_month_tpl[0])
+ assert_frame_equal(actual.tts_by_month_tpl[1], tts_by_month_tpl[1])
+ assert_frame_equal(actual.tts_by_year_df, tts_by_year_df)
+ assert_frame_equal(actual.tts_by_year_month_tpl[0], tts_by_year_month_tpl[0])
+ assert_frame_equal(actual.tts_by_year_month_tpl[1], tts_by_year_month_tpl[1])
+ assert_frame_equal(actual.tts_by_year_month_spnv_tpl[0], tts_by_year_month_spnv_tpl[0])
+ assert_frame_equal(actual.tts_by_year_month_spnv_tpl[1], tts_by_year_month_spnv_tpl[1])
+ assert_frame_equal(actual.tts_by_year_spnv_tpl[0], tts_by_year_spnv_tpl[0])
+ assert_frame_equal(actual.tts_by_year_spnv_tpl[1], tts_by_year_spnv_tpl[1])
+ assert_frame_equal(actual.tts_by_spn_df, tts_by_spn_df)
+ assert_frame_equal(actual.tts_by_spn_spv_df, tts_by_spn_spv_df)
+ assert_frame_equal(actual.tts_by_hashtag_df, tts_by_hashtag_df)
+ assert_frame_equal(actual.tts_by_hashtag_year_df, tts_by_hashtag_year_df)
+ # assert_frame_equal(actual.tts_by_efs_tpl[0], tts_by_efs_tpl[0])
+ # assert_frame_equal(actual.tts_by_efs_tpl[1], tts_by_efs_tpl[1])
+ assert_frame_equal(actual.tts_by_tr_df, tts_by_tr_df)
+ assert_frame_equal(actual.definitions_df, definitions_df)
+ self.assertEqual(actual.tts_by_month_md, tts_by_month_md)
+class ComponentBagTestCase(unittest.TestCase):
+
+ def test_init_shouldinitializeobjectwithexpectedproperties_whendefault(self) -> None:
+
+ # Arrange
+ # Act
+ component_bag : ComponentBag = ComponentBag()
+
+ # Assert
+ self.assertIsInstance(component_bag.file_path_manager, FilePathManager)
+ self.assertIsInstance(component_bag.file_manager, FileManager)
+ self.assertIsInstance(component_bag.tt_adapter, TTAdapter)
+ self.assertIsInstance(component_bag.logging_function, FunctionType)
+ self.assertIsInstance(component_bag.displayer, Displayer)
+class TimeTrackingProcessorTestCase(unittest.TestCase):
+
+ def test_processtt_shoulddisplay_whenoptionisdisplay(self) -> None:
+
+ # Arrange
+ tt_df : DataFrame = Mock()
+
+ summary : Mock = Mock()
+ summary.tt_df = tt_df
+
+ displayer : Mock = Mock()
+ tt_adapter : Mock = Mock()
+ tt_adapter.create_summary.return_value = summary
+
+ component_bag : Mock = Mock()
+ component_bag.displayer = displayer
+ component_bag.tt_adapter = tt_adapter
+
+ setting_bag : Mock = Mock()
+ setting_bag.options_tt = ["display"]
+ setting_bag.tt_head_n = 5
+ setting_bag.tt_display_head_n_with_tail = False
+ setting_bag.tt_hide_index = True
+
+ # Act
+ tt_processor : TimeTrackingProcessor = TimeTrackingProcessor(component_bag = component_bag, setting_bag = setting_bag)
+ tt_processor.initialize()
+ tt_processor.process_tt()
+
+ # Assert
+ displayer.display.assert_called_once_with(
+ df = tt_df.head(5),
+ hide_index = True
+ )
+ def test_processttsbymonth_shoulddisplay_whenoptionisdisplay(self) -> None:
+
+ # Arrange
+ tts_by_month_tpl : Tuple[DataFrame, DataFrame] = (Mock(), Mock())
+
+ summary : Mock = Mock()
+ summary.tts_by_month_tpl = tts_by_month_tpl
+
+ displayer : Mock = Mock()
+ tt_adapter : Mock = Mock()
+ tt_adapter.create_summary.return_value = summary
+
+ component_bag : Mock = Mock()
+ component_bag.displayer = displayer
+ component_bag.tt_adapter = tt_adapter
+
+ setting_bag : Mock = Mock()
+ setting_bag.options_tts_by_month = ["display"]
+
+ # Act
+ tt_processor : TimeTrackingProcessor = TimeTrackingProcessor(component_bag = component_bag, setting_bag = setting_bag)
+ tt_processor.initialize()
+ tt_processor.process_tts_by_month()
+
+ # Assert
+ displayer.display.assert_called_once_with(
+ df = tts_by_month_tpl[1]
+ )
+ def test_processttsbyyear_shoulddisplay_whenoptionisdisplay(self) -> None:
+
+ # Arrange
+ tts_by_year_df : DataFrame = Mock()
+
+ summary : Mock = Mock()
+ summary.tts_by_year_df = tts_by_year_df
+
+ displayer : Mock = Mock()
+ tt_adapter : Mock = Mock()
+ tt_adapter.create_summary.return_value = summary
+
+ component_bag : Mock = Mock()
+ component_bag.displayer = displayer
+ component_bag.tt_adapter = tt_adapter
+
+ setting_bag : Mock = Mock()
+ setting_bag.options_tts_by_year = ["display"]
+
+ # Act
+ tt_processor : TimeTrackingProcessor = TimeTrackingProcessor(component_bag = component_bag, setting_bag = setting_bag)
+ tt_processor.initialize()
+ tt_processor.process_tts_by_year()
+
+ # Assert
+ displayer.display.assert_called_once_with(
+ df =tts_by_year_df
+ )
+ def test_processttsbyyearmonth_shoulddisplay_whenoptionisdisplay(self) -> None:
+
+ # Arrange
+ tts_by_year_month_tpl : Tuple[DataFrame, DataFrame] = (Mock(), Mock())
+
+ summary : Mock = Mock()
+ summary.tts_by_year_month_tpl = tts_by_year_month_tpl
+
+ displayer : Mock = Mock()
+ tt_adapter : Mock = Mock()
+ tt_adapter.create_summary.return_value = summary
+
+ component_bag : Mock = Mock()
+ component_bag.displayer = displayer
+ component_bag.tt_adapter = tt_adapter
+
+ setting_bag : Mock = Mock()
+ setting_bag.options_tts_by_year_month = ["display"]
+
+ # Act
+ tt_processor : TimeTrackingProcessor = TimeTrackingProcessor(component_bag = component_bag, setting_bag = setting_bag)
+ tt_processor.initialize()
+ tt_processor.process_tts_by_year_month()
+
+ # Assert
+ displayer.display.assert_called_once_with(
+ df = tts_by_year_month_tpl[1]
+ )
+ def test_processttsbyyearmonthspnv_shoulddisplay_whenoptionisdisplay(self) -> None:
+
+ # Arrange
+ tts_by_year_month_spnv_tpl : Tuple[DataFrame, DataFrame] = (Mock(), Mock())
+
+ summary : Mock = Mock()
+ summary.tts_by_year_month_spnv_tpl = tts_by_year_month_spnv_tpl
+
+ displayer : Mock = Mock()
+ tt_adapter : Mock = Mock()
+ tt_adapter.create_summary.return_value = summary
+
+ component_bag : Mock = Mock()
+ component_bag.displayer = displayer
+ component_bag.tt_adapter = tt_adapter
+
+ setting_bag : Mock = Mock()
+ setting_bag.options_tts_by_year_month_spnv = ["display"]
+ setting_bag.tts_by_year_month_spnv_formatters = {"%_DME": "{:.2f}", "%_TME": "{:.2f}"}
+
+ # Act
+ tt_processor : TimeTrackingProcessor = TimeTrackingProcessor(component_bag = component_bag, setting_bag = setting_bag)
+ tt_processor.initialize()
+ tt_processor.process_tts_by_year_month_spnv()
+
+ # Assert
+ displayer.display.assert_called_once_with(
+ df = tts_by_year_month_spnv_tpl[1],
+ formatters = setting_bag.tts_by_year_month_spnv_formatters
+ )
+ def test_processttsbyyearspnv_shoulddisplay_whenoptionisdisplay(self) -> None:
+
+ # Arrange
+ tts_by_year_spnv_tpl : Tuple[DataFrame, DataFrame] = (Mock(), Mock())
+
+ summary : Mock = Mock()
+ summary.tts_by_year_spnv_tpl = tts_by_year_spnv_tpl
+
+ displayer : Mock = Mock()
+ tt_adapter : Mock = Mock()
+ tt_adapter.create_summary.return_value = summary
+
+ component_bag : Mock = Mock()
+ component_bag.displayer = displayer
+ component_bag.tt_adapter = tt_adapter
+
+ setting_bag : Mock = Mock()
+ setting_bag.options_tts_by_year_spnv = ["display"]
+ setting_bag.tts_by_year_spnv_formatters = {"%_DYE": "{:.2f}", "%_TYE": "{:.2f}"}
+
+ # Act
+ processor : TimeTrackingProcessor = TimeTrackingProcessor(component_bag = component_bag, setting_bag = setting_bag)
+ processor.initialize()
+ processor.process_tts_by_year_spnv()
+
+ # Assert
+ displayer.display.assert_called_once_with(
+ df = tts_by_year_spnv_tpl[1],
+ formatters = setting_bag.tts_by_year_spnv_formatters
+ )
+ def test_processttsbyspn_shoulddisplay_whenoptionisdisplay(self) -> None:
+
+ # Arrange
+ tts_by_spn_df : DataFrame = Mock()
+ definitions_df : DataFrame = Mock()
+
+ summary : Mock = Mock()
+ summary.tts_by_spn_df = tts_by_spn_df
+ summary.definitions_df = definitions_df
+
+ displayer : Mock = Mock()
+ tt_adapter : Mock = Mock()
+ tt_adapter.create_summary.return_value = summary
+
+ component_bag : Mock = Mock()
+ component_bag.displayer = displayer
+ component_bag.tt_adapter = tt_adapter
+
+ setting_bag : Mock = Mock()
+ setting_bag.options_tts_by_spn = ["display"]
+ setting_bag.tts_by_spn_formatters = {"%_DE" : "{:.2f}", "%_TE" : "{:.2f}"}
+
+ # Act
+ tt_processor = TimeTrackingProcessor(component_bag = component_bag, setting_bag = setting_bag)
+ tt_processor.initialize()
+ tt_processor.process_tts_by_spn()
+
+ # Assert
+ displayer.display.assert_called_once_with(
+ df = tts_by_spn_df,
+ formatters = setting_bag.tts_by_spn_formatters
+ )
+ def test_processttsbyspnspv_shoulddisplay_whenoptionisdisplay(self) -> None:
+
+ # Arrange
+ tts_by_spn_spv_df : DataFrame = Mock()
+ definitions_df : DataFrame = Mock()
+
+ summary : Mock = Mock()
+ summary.tts_by_spn_spv_df = tts_by_spn_spv_df
+ summary.definitions_df = definitions_df
+
+ displayer : Mock = Mock()
+ tt_adapter : Mock = Mock()
+ tt_adapter.create_summary.return_value = summary
+
+ component_bag : Mock = Mock()
+ component_bag.displayer = displayer
+ component_bag.tt_adapter = tt_adapter
+
+ setting_bag : Mock = Mock()
+ setting_bag.options_tts_by_spn_spv = ["display"]
+
+ # Act
+ tt_processor = TimeTrackingProcessor(component_bag = component_bag, setting_bag = setting_bag)
+ tt_processor.initialize()
+ tt_processor.process_tts_by_spn_spv()
+
+ # Assert
+ displayer.display.assert_called_once_with(
+ df = tts_by_spn_spv_df
+ )
+ def test_processttsbyhashtag_shoulddisplay_whenoptionisdisplay(self) -> None:
+
+ # Arrange
+ tts_by_hashtag_df : DataFrame = Mock()
+
+ summary : Mock = Mock()
+ summary.tts_by_hashtag_df = tts_by_hashtag_df
+
+ displayer : Mock = Mock()
+ tt_adapter : Mock = Mock()
+ tt_adapter.create_summary.return_value = summary
+
+ component_bag : Mock = Mock()
+ component_bag.displayer = displayer
+ component_bag.tt_adapter = tt_adapter
+
+ setting_bag : Mock = Mock()
+ setting_bag.options_tts_by_hashtag = ["display"]
+ setting_bag.tts_by_hashtag_formatters = {"Effort%" : "{:.2f}"}
+
+ # Act
+ tt_processor = TimeTrackingProcessor(component_bag = component_bag, setting_bag = setting_bag)
+ tt_processor.initialize()
+ tt_processor.process_tts_by_hashtag()
+
+ # Assert
+ displayer.display.assert_called_once_with(
+ df = tts_by_hashtag_df,
+ formatters = setting_bag.tts_by_hashtag_formatters
+ )
+ def test_processttsbyefs_shoulddisplay_whenoptionisdisplay(self) -> None:
+
+ # Arrange
+ tts_by_efs_tpl : tuple = (Mock(), Mock())
+ tts_by_efs_df : DataFrame = tts_by_efs_tpl[1]
+
+ summary : Mock = Mock()
+ summary.tts_by_efs_tpl = tts_by_efs_tpl
+
+ displayer : Mock = Mock()
+ tt_adapter : Mock = Mock()
+ tt_adapter.create_summary.return_value = summary
+
+ component_bag : Mock = Mock()
+ component_bag.displayer = displayer
+ component_bag.tt_adapter = tt_adapter
+
+ setting_bag : Mock = Mock()
+ setting_bag.options_tts_by_efs = ["display"]
+
+ # Act
+ tt_processor = TimeTrackingProcessor(component_bag = component_bag, setting_bag = setting_bag)
+ tt_processor.initialize()
+ tt_processor.process_tts_by_efs()
+
+ # Assert
+ displayer.display.assert_called_once_with(
+ df = tts_by_efs_df
+ )
+ def test_processttsbytr_shoulddisplay_whenoptionisdisplay(self) -> None:
+
+ # Arrange
+ tts_by_tr_df : DataFrame = Mock()
+
+ summary : Mock = Mock()
+ summary.tts_by_tr_df = tts_by_tr_df
+
+ displayer : Mock = Mock()
+ tt_adapter : Mock = Mock()
+ tt_adapter.create_summary.return_value = summary
+
+ component_bag : Mock = Mock()
+ component_bag.displayer = displayer
+ component_bag.tt_adapter = tt_adapter
+
+ setting_bag : Mock = Mock()
+ setting_bag.options_tts_by_tr = ["display"]
+ setting_bag.tts_by_tr_head_n = uint(10)
+
+ # Act
+ tt_processor = TimeTrackingProcessor(component_bag = component_bag, setting_bag = setting_bag)
+ tt_processor.initialize()
+ tt_processor.process_tts_by_tr()
+
+ # Assert
+ displayer.display.assert_called_once_with(
+ df = tts_by_tr_df.head(10)
+ )
+ def test_processdefinitions_shoulddisplay_whenoptionisdisplay(self) -> None:
+
+ # Arrange
+ definitions_df : DataFrame = Mock()
+
+ summary : Mock = Mock()
+ summary.definitions_df = definitions_df
+
+ displayer : Mock = Mock()
+ tt_adapter : Mock = Mock()
+ tt_adapter.create_summary.return_value = summary
+
+ component_bag : Mock = Mock()
+ component_bag.displayer = displayer
+ component_bag.tt_adapter = tt_adapter
+
+ setting_bag : Mock = Mock()
+ setting_bag.options_definitions = ["display"]
+
+ # Act
+ tt_processor : TimeTrackingProcessor = TimeTrackingProcessor(component_bag = component_bag, setting_bag = setting_bag)
+ tt_processor.initialize()
+ tt_processor.process_definitions()
+
+ # Assert
+ displayer.display.assert_called_once_with(
+ df = definitions_df
+ )
+
+ @parameterized.expand([
+ ["process_tt"],
+ ["process_tts_by_month"],
+ ["process_tts_by_year"],
+ ["process_tts_by_year_month"],
+ ["process_tts_by_year_month_spnv"],
+ ["process_tts_by_year_spnv"],
+ ["process_tts_by_spn"],
+ ["process_tts_by_spn_spv"],
+ ["process_tts_by_hashtag"],
+ ["process_tts_by_hashtag_year"],
+ ["process_tts_by_efs"],
+ ["process_tts_by_tr"],
+ ["process_definitions"]
+ ])
+ def test_processmethod_shouldraiseexception_wheninitializenotrun(self, method_name : str) -> None:
+
+ # Arrange
+ tt_processor : TimeTrackingProcessor = TimeTrackingProcessor(component_bag = Mock(), setting_bag = Mock())
+
+ # Act & Assert
+ with self.assertRaises(Exception) as context:
+ getattr(tt_processor, method_name)()
+
+ self.assertEqual(str(context.exception), "Please run the 'initialize' method first.")
# MAIN
if __name__ == "__main__":