Skip to content

Commit

Permalink
add pyproject toml for backend/frontend modules
Browse files Browse the repository at this point in the history
Signed-off-by: Jack Luar <[email protected]>
  • Loading branch information
luarss committed Oct 26, 2024
1 parent d3fdd46 commit 5c6d07e
Show file tree
Hide file tree
Showing 16 changed files with 257 additions and 234 deletions.
5 changes: 0 additions & 5 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,3 @@ repos:
args:
- --maxkb=120
- --enforce-all

- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.5.1
hooks:
- id: ruff
249 changes: 125 additions & 124 deletions backend/build_docs.py

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion backend/chatbot.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ def get_history_str(chat_history: list[dict[str, str]]) -> str:
urls = output[-2][tool]['urls']
chat_history.append({'User': user_question, 'AI': llm_response})

print(f'LLM: {llm_response} \nSources: {srcs} \nURLs: {urls}\n\n')
print(f"LLM: {llm_response} \nSources: {srcs} \nURLs: {urls}\n\n")

else:
print('LLM response extraction failed')
10 changes: 5 additions & 5 deletions backend/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -3,16 +3,16 @@ requires = ['setuptools>=60', 'Cython==3.0.7', 'wheel==0.42.0']
build-backend = "setuptools.build_meta"

[project]
name = "webconsole"
name = "ora-backend"
version = "1.0.0"
dynamic = ["dependencies", "optional-dependencies"]
requires-python = ">=3.10"
requires-python = ">=3.12"
classifiers = [
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Topic :: Software Development",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3 :: Only",
]

Expand All @@ -21,7 +21,7 @@ dependencies = { file = ["requirements.txt"] }
optional-dependencies = { test = { file = ["requirements-test.txt"] } }

[tool.mypy]
python_version = "3.10"
python_version = "3.12"
warn_unused_configs = true
warn_return_any = true
warn_unused_ignores = true
Expand Down Expand Up @@ -80,7 +80,7 @@ unfixable = []
dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"

[tool.ruff.format]
quote-style = "single"
quote-style = "double"
indent-style = "space"
skip-magic-trailing-comma = false
line-ending = "auto"
Expand Down
2 changes: 1 addition & 1 deletion backend/src/agents/retriever_graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -166,7 +166,7 @@ def agent(self, state: AgentState) -> dict[str, list[str]]:
tool_calls = response.get('tool_names', [])
for tool in tool_calls:
if tool not in self.tool_names:
logging.warning(f'Tool {tool} not found in tool list.')
logging.warning(f"Tool {tool} not found in tool list.")
tool_calls.remove(tool)
else:
logging.warning('Tool selection failed. Returning empty tool list.')
Expand Down
4 changes: 2 additions & 2 deletions backend/src/tools/format_docs.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,13 +16,13 @@ def format_docs(docs: list[Document]) -> tuple[str, list[str], list[str]]:
if 'man1' in doc_src or 'man2' in doc_src:
doc_text = f"Command Name: {doc_src.split('/')[-1].replace('.md', '')}\n\n{doc.page_content}"
elif 'gh_discussions' in doc_src:
doc_text = f'{gh_discussion_prompt_template}\n\n{doc.page_content}'
doc_text = f"{gh_discussion_prompt_template}\n\n{doc.page_content}"
else:
doc_text = doc.page_content

if 'url' in doc.metadata:
doc_urls.append(doc.metadata['url'])

doc_texts += f'\n\n- - - - - - - - - - - - - - - \n\n{doc_text}'
doc_texts += f"\n\n- - - - - - - - - - - - - - - \n\n{doc_text}"

return doc_texts, doc_srcs, doc_urls
2 changes: 1 addition & 1 deletion backend/src/tools/process_html.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def process_html(
For processing OR/ORFS docs
"""
if not os.path.exists(folder_path) or not os.listdir(folder_path):
logging.error(f'{folder_path} is not populated, returning empty list.')
logging.error(f"{folder_path} is not populated, returning empty list.")
return []

with open('data/source_list.json') as f:
Expand Down
4 changes: 2 additions & 2 deletions backend/src/tools/process_json.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ def generate_knowledge_base(file_paths: list[str]) -> list[Document]:
for file_path in file_paths:
try:
with open(file_path, 'r') as file:
logging.debug(f'Processing {file_path}...')
logging.debug(f"Processing {file_path}...")
for line in file:
try:
json_object = json.loads(line)
Expand All @@ -37,6 +37,6 @@ def generate_knowledge_base(file_paths: list[str]) -> list[Document]:
except json.JSONDecodeError:
logging.error('Error: Invalid JSON format line:', line)
except FileNotFoundError:
logging.error(f'{file_path} not found.')
logging.error(f"{file_path} not found.")

return json_knowledge_base
2 changes: 1 addition & 1 deletion backend/src/tools/process_md.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ def process_md(
"""
# if no files in the directory
if not os.path.exists(folder_path) or not os.listdir(folder_path):
logging.error(f'{folder_path} is not populated, returning empty list.')
logging.error(f"{folder_path} is not populated, returning empty list.")
return []

with open('data/source_list.json') as f:
Expand Down
2 changes: 1 addition & 1 deletion backend/src/tools/process_pdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def process_pdf_docs(file_path: str) -> list[Document]:
try:
documents = loader.load_and_split(text_splitter=text_splitter)
except PdfStreamError:
logging.error(f'Error processing PDF: {file_path} is corrupted or incomplete.')
logging.error(f"Error processing PDF: {file_path} is corrupted or incomplete.")

for doc in documents:
try:
Expand Down
18 changes: 9 additions & 9 deletions backend/src/vectorstores/faiss.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ def add_md_docs(
processed_mddocs: list[Document] = []

for folder_path in folder_paths:
logging.debug(f'Processing [{folder_path}]...')
logging.debug(f"Processing [{folder_path}]...")
processed_mddocs.extend(
process_md(
folder_path=folder_path,
Expand All @@ -101,7 +101,7 @@ def add_md_docs(
)

if processed_mddocs:
logging.info(f'Adding {folder_paths} to FAISS database...\n')
logging.info(f"Adding {folder_paths} to FAISS database...\n")
self._add_to_db(documents=processed_mddocs)
self.processed_docs.extend(processed_mddocs)
else:
Expand All @@ -119,15 +119,15 @@ def add_md_manpages(

processed_manpages: list[Document] = []
for folder_path in folder_paths:
logging.debug(f'Processing [{folder_path}]...')
logging.debug(f"Processing [{folder_path}]...")
processed_manpages.extend(
process_md(
folder_path=folder_path, split_text=False, chunk_size=chunk_size
)
)

if processed_manpages:
logging.info(f'Adding {folder_paths} to FAISS database...\n')
logging.info(f"Adding {folder_paths} to FAISS database...\n")
self._add_to_db(documents=processed_manpages)
self.processed_docs.extend(processed_manpages)
else:
Expand All @@ -145,19 +145,19 @@ def add_html(

processed_html_docs: list[Document] = []
for folder_path in folder_paths:
logging.debug(f'Processing [{folder_path}]...')
logging.debug(f"Processing [{folder_path}]...")
processed_html_docs.extend(
process_html(
folder_path=folder_path, split_text=True, chunk_size=chunk_size
)
)

if processed_html_docs:
logging.info(f'Adding {folder_paths} to FAISS database...\n')
logging.info(f"Adding {folder_paths} to FAISS database...\n")
self._add_to_db(documents=processed_html_docs)
self.processed_docs.extend(processed_html_docs)
else:
raise ValueError(f'Could not add {folder_paths}. No HTML docs processed.')
raise ValueError(f"Could not add {folder_paths}. No HTML docs processed.")

if return_docs:
return processed_html_docs
Expand All @@ -172,14 +172,14 @@ def add_documents(
processed_otherdocs: list[Document] = []

for file_path in file_paths:
logging.debug(f'Processing [{file_path}]...')
logging.debug(f"Processing [{file_path}]...")
if file_type == 'pdf':
processed_otherdocs.extend(process_pdf_docs(file_path=file_path))
else:
raise ValueError('File type not supported.')

if processed_otherdocs:
logging.info(f'Adding [{file_paths}] to FAISS database...\n')
logging.info(f"Adding [{file_paths}] to FAISS database...\n")
self._add_to_db(documents=processed_otherdocs)
self.processed_docs.extend(processed_otherdocs)
else:
Expand Down
9 changes: 0 additions & 9 deletions frontend/mypy.ini

This file was deleted.

88 changes: 88 additions & 0 deletions frontend/pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,88 @@
[build-system]
requires = ['setuptools>=60', 'Cython==3.0.7', 'wheel==0.42.0']
build-backend = "setuptools.build_meta"

[project]
name = "ora-frontend"
version = "1.0.0"
dynamic = ["dependencies", "optional-dependencies"]
requires-python = ">=3.12"
classifiers = [
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Topic :: Software Development",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3 :: Only",
]

[tool.setuptools.dynamic]
dependencies = { file = ["requirements.txt"] }
optional-dependencies = { test = { file = ["requirements-test.txt"] } }

[tool.mypy]
python_version = "3.12"
warn_unused_configs = true
warn_return_any = true
warn_unused_ignores = true
strict_optional = true
disable_error_code = ["call-arg"]
exclude = "src/post_install.py"

[[tool.mypy.overrides]]
module = "transformers.*"
ignore_missing_imports = true

[tool.ruff]
exclude = [
".bzr",
".direnv",
".eggs",
".git",
".git-rewrite",
".hg",
".ipynb_checkpoints",
".mypy_cache",
".nox",
".pants.d",
".pyenv",
".pytest_cache",
".pytype",
".ruff_cache",
".svn",
".tox",
".venv",
".vscode",
"__pypackages__",
"_build",
"buck-out",
"build",
"dist",
"node_modules",
"site-packages",
"venv",
]
line-length = 88
indent-width = 4
target-version = "py310"

[tool.ruff.lint]
select = ["E4", "E7", "E9","E301","E304","E305","E401","E223","E224","E242", "E", "F" ,"N", "W", "C90"]
extend-select = ["D203", "D204"]
ignore = ["E501", "C901"]
preview = true

# Allow fix for all enabled rules (when `--fix` is provided).
fixable = ["ALL"]
unfixable = []

# Allow unused variables when underscore-prefixed.
dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"

[tool.ruff.format]
quote-style = "double"
indent-style = "space"
skip-magic-trailing-comma = false
line-ending = "auto"
docstring-code-format = false
docstring-code-line-length = "dynamic"
56 changes: 0 additions & 56 deletions frontend/ruff.toml

This file was deleted.

Loading

0 comments on commit 5c6d07e

Please sign in to comment.