Skip to content

Commit

Permalink
Merge branch 'dev' into pgvector-add-normalization
Browse files Browse the repository at this point in the history
  • Loading branch information
borisarzentar authored Jan 27, 2025
2 parents 89d4b7a + bd4980c commit 8da81c1
Show file tree
Hide file tree
Showing 12 changed files with 2,054 additions and 936 deletions.
24 changes: 24 additions & 0 deletions .github/workflows/clean_stale_pr.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
name: clean | remove stale PRs

on:
# Run this action periodically (daily at 0:00 UTC in this example).
schedule:
- cron: "0 0 * * *"
# Optionally, also run when pull requests are labeled, unlabeled, synchronized, or reopened
# to update the stale timer as needed. Uncomment if desired.
# pull_request:
# types: [labeled, unlabeled, synchronize, reopened]

jobs:
stale:
runs-on: ubuntu-latest
steps:
- name: Mark and Close Stale
uses: actions/stale@v6
with:
# Number of days of inactivity before the pull request is marked stale
days-before-stale: 60
# Number of days of inactivity after being marked stale before the pull request is closed
days-before-close: 7
# Comment to post when marking as stale
stale-pr-message: "This pull request has been automatically marke
5 changes: 3 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -217,10 +217,11 @@ Cognee supports a variety of tools and services for different operations:

## Demo

Check out our demo notebook [here](https://github.com/topoteretes/cognee/blob/main/notebooks/cognee_demo.ipynb)
Check out our demo notebook [here](https://github.com/topoteretes/cognee/blob/main/notebooks/cognee_demo.ipynb) or watch the Youtube video bellow


[<img src="https://i3.ytimg.com/vi/-ARUfIzhzC4/maxresdefault.jpg" width="100%">](https://www.youtube.com/watch?v=BDFt4xVPmro "Learn about cognee: 55")
[<img src="https://img.youtube.com/vi/fI4hDzguN5k/maxresdefault.jpg" width="100%">](https://www.youtube.com/watch?v=fI4hDzguN5k "Learn about cognee: 55")



## Get Started
Expand Down
6 changes: 4 additions & 2 deletions cognee-mcp/src/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
from .server import mcp
from .server import main as server_main


def main():
"""Main entry point for the package."""
mcp.run(transport="stdio")
import asyncio

asyncio.run(server_main())
13 changes: 9 additions & 4 deletions cognee-mcp/src/client.py
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@

# Create server parameters for stdio connection
server_params = StdioServerParameters(
command="mcp", # Executable
args=["run", "src/server.py"], # Optional command line arguments
command="uv", # Executable
args=["--directory", ".", "run", "cognee"], # Optional command line arguments
env=None, # Optional environment variables
)

Expand Down Expand Up @@ -33,10 +33,15 @@ async def run():
async with ClientSession(read, write, timedelta(minutes=3)) as session:
await session.initialize()

toolResult = await session.list_tools()

toolResult = await session.call_tool("prune", arguments={})

toolResult = await session.call_tool("cognify", arguments={"text": text})
# toolResult = await session.call_tool("search", arguments={"search_query": "AI"})

print(f"Cognify result: {toolResult}")
toolResult = await session.call_tool("search", arguments={"search_query": "AI"})

print(f"Cognify result: {toolResult.content}")


if __name__ == "__main__":
Expand Down
120 changes: 110 additions & 10 deletions cognee-mcp/src/server.py
100644 → 100755
Original file line number Diff line number Diff line change
@@ -1,16 +1,97 @@
import os
import cognee
import logging
import importlib.util
from contextlib import redirect_stderr, redirect_stdout

# from PIL import Image as PILImage
from mcp.server.fastmcp import FastMCP
import mcp.types as types
from mcp.server import Server, NotificationOptions
from mcp.server.models import InitializationOptions
from cognee.api.v1.search import SearchType
from cognee.shared.data_models import KnowledgeGraph

mcp = FastMCP("cognee", timeout=120000)
mcp = Server("cognee")

logger = logging.getLogger(__name__)


@mcp.list_tools()
async def list_tools() -> list[types.Tool]:
return [
types.Tool(
name="cognify",
description="Cognifies text into knowledge graph",
inputSchema={
"type": "object",
"properties": {
"text": {
"type": "string",
"description": "The text to cognify",
},
"graph_model_file": {
"type": "string",
"description": "The path to the graph model file",
},
"graph_model_name": {
"type": "string",
"description": "The name of the graph model",
},
},
"required": ["text"],
},
),
types.Tool(
name="search",
description="Searches for information in knowledge graph",
inputSchema={
"type": "object",
"properties": {
"search_query": {
"type": "string",
"description": "The query to search for",
},
},
"required": ["search_query"],
},
),
types.Tool(
name="prune",
description="Prunes knowledge graph",
inputSchema={
"type": "object",
"properties": {},
},
),
]


@mcp.call_tool()
async def call_tools(name: str, arguments: dict) -> list[types.TextContent]:
try:
with open(os.devnull, "w") as fnull:
with redirect_stdout(fnull), redirect_stderr(fnull):
if name == "cognify":
await cognify(
text=arguments["text"],
graph_model_file=arguments.get("graph_model_file", None),
graph_model_name=arguments.get("graph_model_name", None),
)

return [types.TextContent(type="text", text="Ingested")]
elif name == "search":
search_results = await search(arguments["search_query"])

return [types.TextContent(type="text", text=search_results)]
elif name == "prune":
await prune()

return [types.TextContent(type="text", text="Pruned")]
except Exception as e:
logger.error(f"Error calling tool '{name}': {str(e)}")
return [types.TextContent(type="text", text=f"Error calling tool '{name}': {str(e)}")]


@mcp.tool()
async def cognify(text: str, graph_model_file: str = None, graph_model_name: str = None) -> str:
"""Build knowledge graph from the input text"""
if graph_model_file and graph_model_name:
Expand All @@ -25,10 +106,7 @@ async def cognify(text: str, graph_model_file: str = None, graph_model_name: str
except Exception as e:
raise ValueError(f"Failed to cognify: {str(e)}")

return "Ingested"


@mcp.tool()
async def search(search_query: str) -> str:
"""Search the knowledge graph"""
search_results = await cognee.search(SearchType.INSIGHTS, query_text=search_query)
Expand All @@ -38,16 +116,36 @@ async def search(search_query: str) -> str:
return results


@mcp.tool()
async def prune() -> str:
"""Reset the knowledge graph"""
await cognee.prune.prune_data()
await cognee.prune.prune_system(metadata=True)

return "Pruned"

async def main():
try:
from mcp.server.stdio import stdio_server

async with stdio_server() as (read_stream, write_stream):
await mcp.run(
read_stream=read_stream,
write_stream=write_stream,
initialization_options=InitializationOptions(
server_name="cognee",
server_version="0.1.0",
capabilities=mcp.get_capabilities(
notification_options=NotificationOptions(),
experimental_capabilities={},
),
),
raise_exceptions=True,
)

except Exception as e:
logger.error(f"Server failed to start: {str(e)}", exc_info=True)
raise


# @mcp.tool()
# async def visualize() -> Image:
# """Visualize the knowledge graph"""
# try:
Expand Down Expand Up @@ -116,4 +214,6 @@ def load_class(model_file, model_name):

if __name__ == "__main__":
# Initialize and run the server
mcp.run(transport="stdio")
import asyncio

asyncio.run(main())
Loading

0 comments on commit 8da81c1

Please sign in to comment.