Skip to content
This repository has been archived by the owner on Apr 2, 2024. It is now read-only.

Commit

Permalink
Create chat, and finalises the dashboard API, #68
Browse files Browse the repository at this point in the history
Moves the websocket handler from main.go to its own package, chat. Move the logs for the dashboard and the chat to their own packages.
  • Loading branch information
hugolgst committed Mar 21, 2020
1 parent 087043c commit a72920e
Show file tree
Hide file tree
Showing 4 changed files with 139 additions and 116 deletions.
123 changes: 123 additions & 0 deletions chat/websocket.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,123 @@
package chat

import (
"encoding/json"
"fmt"
"net/http"
"reflect"
"time"

"github.com/olivia-ai/olivia/network"

"github.com/gookit/color"
"github.com/gorilla/websocket"
"github.com/olivia-ai/olivia/analysis"
"github.com/olivia-ai/olivia/user"
"github.com/olivia-ai/olivia/util"
gocache "github.com/patrickmn/go-cache"
)

var (
// Create the neural network variable to use it everywhere
neuralNetwork network.Network
// Initiatizes the cache with a 5 minute lifetime
cache = gocache.New(5*time.Minute, 5*time.Minute)
)

// Configure the upgrader
var upgrader = websocket.Upgrader{
CheckOrigin: func(r *http.Request) bool {
return true
},
}

// RequestMessage is the structure that uses entry connections to chat with the websocket
type RequestMessage struct {
Content string `json:"content"`
Token string `json:"user_token"`
Information user.Information `json:"information"`
}

// ResponseMessage is the structure used to reply to the user through the websocket
type ResponseMessage struct {
Content string `json:"content"`
Tag string `json:"tag"`
Information user.Information `json:"information"`
}

// Serve serves the websocket in the given port
func Serve(_neuralNetwork network.Network, port string) {
// Set the current global network as a global variable
neuralNetwork = _neuralNetwork

http.HandleFunc("/", Handle)

magenta := color.FgMagenta.Render
fmt.Printf("\nChat Websocket listening on the port %s...\n", magenta(port))

// Serves the chat
err := http.ListenAndServe(":"+port, nil)
if err != nil {
panic(err)
}
}

// Handle manages the entry connections and reply with the neural network
func Handle(w http.ResponseWriter, r *http.Request) {
conn, _ := upgrader.Upgrade(w, r, nil)
fmt.Println(color.FgGreen.Render("A new connection has been opened"))

for {
// Read message from browser
msgType, msg, err := conn.ReadMessage()
if err != nil {
continue
}

// Unserialize the json content of the message
var request RequestMessage
if err = json.Unmarshal(msg, &request); err != nil {
continue
}

// Set the informations from the client into the cache
if reflect.DeepEqual(user.GetUserInformation(request.Token), user.Information{}) {
user.SetUserInformation(request.Token, request.Information)
}

// Write message back to browser
response := Reply(request)
if err = conn.WriteMessage(msgType, response); err != nil {
continue
}
}
}

// Reply takes the entry message and returns an array of bytes for the answer
func Reply(request RequestMessage) []byte {
var responseSentence, responseTag string

// Send a message from res/messages.json if it is too long
if len(request.Content) > 500 {
responseTag = "too long"
responseSentence = util.GetMessage(responseTag)
} else {
responseTag, responseSentence = analysis.NewSentence(
request.Content,
).Calculate(*cache, neuralNetwork, request.Token)
}

// Marshall the response in json
response := ResponseMessage{
Content: responseSentence,
Tag: responseTag,
Information: user.GetUserInformation(request.Token),
}

bytes, err := json.Marshal(response)
if err != nil {
panic(err)
}

return bytes
}
11 changes: 9 additions & 2 deletions dashboard/api.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,12 @@ package dashboard

import (
"encoding/json"
"fmt"
"log"
"net/http"

"github.com/gookit/color"

"github.com/gorilla/mux"
"github.com/olivia-ai/olivia/network"
)
Expand All @@ -29,7 +32,7 @@ type Training struct {
}

// Serve serves the dashboard REST API on the port 8081 by default.
func Serve(_neuralNetwork network.Network) {
func Serve(_neuralNetwork network.Network, port string) {
// Set the current global network as a global variable
neuralNetwork = _neuralNetwork

Expand All @@ -38,7 +41,11 @@ func Serve(_neuralNetwork network.Network) {
// Create the routes
router.HandleFunc("/dashboard", GetDashboardData).Methods("GET")

log.Fatal(http.ListenAndServe(":8081", router))
magenta := color.FgMagenta.Render
fmt.Printf("Dashboard API listening on the port %s...\n", magenta(port))

// Serves the dashboard
log.Fatal(http.ListenAndServe(":"+port, router))
}

// GetDashboardData encodes the json for the dashboard data
Expand Down
119 changes: 6 additions & 113 deletions main.go
Original file line number Diff line number Diff line change
@@ -1,129 +1,22 @@
package main

import (
"encoding/json"
"fmt"
"net/http"
"os"
"reflect"
"time"

"github.com/olivia-ai/olivia/chat"
"github.com/olivia-ai/olivia/dashboard"

"github.com/gookit/color"
"github.com/gorilla/websocket"
"github.com/olivia-ai/olivia/analysis"
"github.com/olivia-ai/olivia/training"
"github.com/olivia-ai/olivia/user"
"github.com/olivia-ai/olivia/util"
gocache "github.com/patrickmn/go-cache"
)

var (
model = training.CreateNeuralNetwork()
cache = gocache.New(5*time.Minute, 5*time.Minute)
// Initialize the neural network by training it
neuralNetwork = training.CreateNeuralNetwork()
)

// Configure the upgrader
var upgrader = websocket.Upgrader{
CheckOrigin: func(r *http.Request) bool {
return true
},
}

type RequestMessage struct {
Content string `json:"content"`
Token string `json:"user_token"`
Information user.Information `json:"information"`
}

type ResponseMessage struct {
Content string `json:"content"`
Tag string `json:"tag"`
Information user.Information `json:"information"`
}

func main() {
http.HandleFunc("/", Handle)

port := "8080"
if os.Getenv("PORT") != "" {
port = os.Getenv("PORT")
}

magenta := color.FgMagenta.Render

// Serve the REST API inside a go routine
go func() {
fmt.Printf("Dashboard API listening on the port %s...\n", magenta(8081))

// Serve the API
dashboard.Serve(model)
dashboard.Serve(neuralNetwork, "8081")
}()

fmt.Printf("\nChat Websocket listening on the port %s...\n", magenta(port))

// Serves the websocket
err := http.ListenAndServe(":"+port, nil)
if err != nil {
panic(err)
}
}

func Handle(w http.ResponseWriter, r *http.Request) {
conn, _ := upgrader.Upgrade(w, r, nil)
fmt.Println(color.FgGreen.Render("A new connection has been opened"))

for {
// Read message from browser
msgType, msg, err := conn.ReadMessage()
if err != nil {
continue
}

// Unserialize the json content of the message
var request RequestMessage
if err = json.Unmarshal(msg, &request); err != nil {
continue
}

// Set the informations from the client into the cache
if reflect.DeepEqual(user.GetUserInformation(request.Token), user.Information{}) {
user.SetUserInformation(request.Token, request.Information)
}

// Write message back to browser
response := Reply(request)
if err = conn.WriteMessage(msgType, response); err != nil {
continue
}
}
}

func Reply(request RequestMessage) []byte {
var responseSentence, responseTag string

// Send a message from res/messages.json if it is too long
if len(request.Content) > 500 {
responseTag = "too long"
responseSentence = util.GetMessage(responseTag)
} else {
responseTag, responseSentence = analysis.NewSentence(
request.Content,
).Calculate(*cache, model, request.Token)
}

// Marshall the response in json
response := ResponseMessage{
Content: responseSentence,
Tag: responseTag,
Information: user.GetUserInformation(request.Token),
}

bytes, err := json.Marshal(response)
if err != nil {
panic(err)
}

return bytes
// Serves the chat
chat.Serve(neuralNetwork, "8080")
}
2 changes: 1 addition & 1 deletion res/training.json

Large diffs are not rendered by default.

0 comments on commit a72920e

Please sign in to comment.