Skip to content

Commit

Permalink
Merge pull request #109 from FYP-2024-IQMA/SCRUM-164-Update-feedback-…
Browse files Browse the repository at this point in the history
…details-to-S3

Scrum 164 update feedback details to s3
  • Loading branch information
rrachea authored Oct 26, 2024
2 parents 586942c + 7cd2348 commit 42acba2
Show file tree
Hide file tree
Showing 11 changed files with 480 additions and 32 deletions.
3 changes: 3 additions & 0 deletions backend/src/app.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@ import quizRouter from "./routes/quizRouter";
import resultRouter from "./routes/resultRouter";
import sectionRouter from "./routes/sectionRouter";
import unitRouter from "./routes/unitRouter";
import accountsGamificationRouter from "./routes/accountsGamificationRouter";
import feedbackRouter from "./routes/feedbackRouter";

const app = express();
const port = 3000;
Expand All @@ -40,6 +42,7 @@ app.use("/lesson", lessonRouter);
app.use("/section", sectionRouter);
app.use("/clickstream", clickstreamRouter);
app.use("/accounts", accountsGamificationRouter);
app.use("/feedback", feedbackRouter);

// Start the Express server
app.listen(port, () => {
Expand Down
64 changes: 32 additions & 32 deletions backend/src/chatbot/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,38 +57,38 @@ async def generate_text(prompt: Prompt):

# @app.post("/langchain")
# async def langchain_text(prompt: Prompt):
# """
# Generate a response from Agent-integrated chain based on the role and prompt.
# """
# logger.info("Endpoint '/langchain' has been called with prompt: %s", prompt)
# try:
# # llm = ChatOpenAI(
# # model="gpt-4o-mini",
# # api_key=os.environ.get("OPENAI_API_KEY"),
# # )
# config = {
# "configurable": {
# "session_id": "abc123",
# }
# }
# if prompt.history:
# # format response for langchain
# langchain_format = convert_openai_messages(prompt.history)
# logger.info("Converted langchain messages: %s", langchain_format)
# input = {
# "input": prompt.content,
# "history": langchain_format,
# }
# response = full_chain_w_history.invoke(input, config=config)
# else:
# langchain_format = convert_openai_messages([{"role": prompt.role, "content": prompt.content}])
# logger.info("Converted langchain messages: %s", langchain_format)
# input = {
# "input": prompt.content,
# "history": langchain_format,
# }
# logger.info("Prompt: %s", prompt)
# response = full_chain_w_history.invoke({"input": input}, config=config)
# """
# Generate a response from Agent-integrated chain based on the role and prompt.
# """
# logger.info("Endpoint '/langchain' has been called with prompt: %s", prompt)
# try:
# # llm = ChatOpenAI(
# # model="gpt-4o-mini",
# # api_key=os.environ.get("OPENAI_API_KEY"),
# # )
# config = {
# "configurable": {
# "session_id": "abc123",
# }
# }
# if prompt.history:
# # format response for langchain
# langchain_format = convert_openai_messages(prompt.history)
# logger.info("Converted langchain messages: %s", langchain_format)
# input = {
# "input": prompt.content,
# "history": langchain_format,
# }
# response = full_chain_w_history.invoke(input, config=config)
# else:
# langchain_format = convert_openai_messages([{"role": prompt.role, "content": prompt.content}])
# logger.info("Converted langchain messages: %s", langchain_format)
# input = {
# "input": prompt.content,
# "history": langchain_format,
# }
# logger.info("Prompt: %s", prompt)
# response = full_chain_w_history.invoke({"input": input}, config=config)

# return {
# "role": "assistant",
Expand Down
1 change: 1 addition & 0 deletions backend/src/chatbot/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
# chromadb
fastapi
# langchain
# langchain-chroma
Expand Down
17 changes: 17 additions & 0 deletions backend/src/controllers/feedbackController.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
import { Request, Response } from "express";
import * as feedbackService from "../services/feedbackService";
import handleError from "../errors/errorHandling";

export const sendFeedback = async (req: Request, res: Response) => {
const messageBody = req.body;

try {
const message = await feedbackService.sendMessage(messageBody);
res.status(200).json({ message: 'Published message successfully' });
} catch (error: any) {
const errorResponse = handleError(error);
if (errorResponse) {
res.status(errorResponse.status).json(errorResponse);
}
}
}
7 changes: 7 additions & 0 deletions backend/src/models/feedbackModel.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
export interface Feedback {
userID: string;
timestamp: Date;
eventType: "feedback" | "bug" | "sugestion";
rating: number;
message: string;
}
17 changes: 17 additions & 0 deletions backend/src/rabbitmq/rabbitmq_logs.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
[
{
"timeTaken": 1818
},
{
"timeTaken": 1718
},
{
"feedback": 1618
},
{
"bug": 1518
},
{
"suggestion": 1418
}
]
9 changes: 9 additions & 0 deletions backend/src/routes/feedbackRouter.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
import * as feedbackController from '../controllers/feedbackController';
import { Router } from 'express';
import verifyToken from '../middleware/authMiddleware';

const router = Router();

router.post('/sendFeedback', verifyToken, feedbackController.sendFeedback);

export default router;
78 changes: 78 additions & 0 deletions backend/src/services/feedbackService.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
import amqp from "amqplib";
import { Feedback } from "../models/feedbackModel";
import { s3 } from "../config/awsConfig"; // Assuming you already have an AWS config for S3

// Create upload function
async function uploadToS3(queue: string, newFeedback: Feedback) {
const key = `${queue}/${newFeedback.userID}.json`;
const params = {
Bucket: "isb-raw-data-athena",
Key: key,
};
let existingFeedback: any[] = [];

try {
const existingData = await s3.getObject(params).promise();
let fileContent = existingData.Body!.toString("utf-8");
existingFeedback = fileContent
.split("\n")
.filter((line: string) => line.trim().length > 0)
.map((line: string) => JSON.parse(line));
} catch (error: any) {
if (error.code === "NoSuchKey") {
console.log("Creating new file");
} else {
console.error("Error uploading to S3", error);
}
}

existingFeedback.push(newFeedback);
const lineDelimitedJson = existingFeedback
.map((item) => JSON.stringify(item))
.join("\n");
s3.putObject({
...params,
Body: lineDelimitedJson,
ContentType: "application/json",
}).promise();
}

// Define your queues (you can add more if necessary)
const QUEUE_NAMES = ["feedback", "bug", "suggestion"];

// Create a function to consume messages
async function consumeMessage() {
try {
const conn = await amqp.connect(process.env.RABBITMQ_URL!);
const channel = await conn.createChannel();

for (const queue of QUEUE_NAMES) {
await channel.assertQueue(queue);
channel.consume(queue, async (message) => {
if (message !== null) {
const data = message.content.toString();
let parsedData: Feedback = JSON.parse(data);
try {
await uploadToS3(queue, parsedData);
channel.ack(message);
console.log(message)
} catch (error) {
console.error(`Error processing message from ${QUEUE_NAMES[0]}: `, error);
channel.nack(message);
}
}
})
}
} catch (err) {
console.error(`Error: ${err}`);
}
}

export async function sendMessage(feedback: Feedback) {
const queue = feedback.eventType;
const conn = await amqp.connect(process.env.RABBITMQ_URL!);
const channel = await conn.createChannel();
await channel.assertQueue(queue);
channel.sendToQueue(queue, Buffer.from(JSON.stringify(feedback)));
await consumeMessage();
}
3 changes: 3 additions & 0 deletions frontend/iQMA-Skills-Builder/app/screens/Home.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -21,13 +21,15 @@ import React, {useEffect, useRef, useState} from 'react';
import AsyncStorage from '@react-native-async-storage/async-storage';
import {AuthContext} from '@/context/AuthContext';
import {Colors} from '@/constants/Colors';
import FeedbackComponent from '@/components/Feedback';
import {Ionicons} from '@expo/vector-icons';
import {LoadingIndicator} from '@/components/LoadingIndicator';
import {SafeAreaView} from 'react-native-safe-area-context';
import SectionCard from '@/components/SectionCard';
import TopStats from '@/components/TopStats';
import {router} from 'expo-router';
import {useContext} from 'react';
import { packageFeedback } from '@/helpers/feedbackEndpoints';
import { globalStyles } from '@/constants/styles';

const HomeScreen: React.FC = () => {
Expand Down Expand Up @@ -465,6 +467,7 @@ const HomeScreen: React.FC = () => {
<Ionicons name="arrow-up" size={24} color="#7654F2" />
</TouchableOpacity>
)}
<FeedbackComponent userID={currentUser.sub}/>
</SafeAreaView>
);
};
Expand Down
Loading

0 comments on commit 42acba2

Please sign in to comment.