You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
require"tasker"require"mutex"require"openai"require"placeos-driver/storage"modulePlaceOS::ApiclassChatGPT::ChatManagerLog= ::Log.for(self)aliasRemoteDriver= ::PlaceOS::Driver::Proxy::RemoteDriverprivategetterws_sockets={}ofUInt64=>{HTTP::WebSocket,String,OpenAI::Client,OpenAI::ChatCompletionRequest,OpenAI::FunctionExecutor}privategetterws_ping_tasks : Hash(UInt64,Tasker::Repeat(Nil))={}ofUInt64=>Tasker::Repeat(Nil)privategetterws_lock=Mutex.new(protection: :reentrant)privategetterapp : ChatGPTLLM_DRIVER="LLM"LLM_DRIVER_PROMPT="prompt"definitialize(@app)enddefstart_session(ws : HTTP::WebSocket,existing_chat : PlaceOS::Model::Chat?,system_id : String)ws_lock.synchronizedows_id=ws.object_idifexisting_socket=ws_sockets[ws_id]?
existing_socket[0].closerescuenilendifchat=existing_chatLog.debug{{chat_id: chat.id,message: "resuming chat session"}}client,executor,chat_completion=setup(chat,nil)ws_sockets[ws_id]={ws,chat.id.as(String),client,chat_completion,executor}elseLog.debug{{message: "starting new chat session"}}endws_ping_tasks[ws_id]=Tasker.every(10.seconds)dows.pingrescuenilnilendws.on_message{ |message| manage_chat(ws,message,system_id)}ws.on_closedoiftask=ws_ping_tasks.delete(ws_id)task.cancelendws_sockets.delete(ws_id)endendendprivatedefmanage_chat(ws : HTTP::WebSocket,message : String,system_id : String)iftimezone=Model::ControlSystem.find!(system_id).timezonenow=Time.local(timezone)message="sent at: #{now}\nday of week: #{now.day_of_week}\n#{message}"endws_lock.synchronizedows_id=ws.object_id_,chat_id,client,completion_req,executor=ws_sockets[ws_id]? || beginchat=PlaceOS::Model::Chat.create!(user_id: app.current_user.id.as(String),system_id: system_id,summary: message)id=chat.id.as(String)c,e,req=setup(chat,driver_prompt(chat))ws_sockets[ws_id]={ws,id,c,req,e}{ws,id,c,req,e}endopenai_interaction(client,completion_req,executor,message,chat_id)do |resp|
ws.send(resp.to_json)endendrescueerrorLog.warn(exception: error){"failure processing chat message"}ws.send({message: "error: #{error}"}.to_json)ws.closeendprivatedefsetup(chat,chat_payload)client=build_clientexecutor=build_executor(chat)chat_completion=build_completion(build_prompt(chat,chat_payload),executor.functions){client,executor,chat_completion}endprivatedefbuild_clientapp_config=app.configconfig=ifbase=app_config.api_baseOpenAI::Client::Config.azure(api_key: app_config.api_key,api_base: base)elseOpenAI::Client::Config.default(api_key: app_config.api_key)endOpenAI::Client.new(config)endprivatedefbuild_completion(messages,functions)OpenAI::ChatCompletionRequest.new(model: OpenAI::GPT4,# required for competent use of functionsmessages: messages,functions: functions,function_call: "auto")end@total_tokens : Int32=0privatedefopenai_interaction(client,request,executor,message,chat_id, &) : Nilrequest.messages << OpenAI::ChatMessage.new(role: :user,content: message)save_history(chat_id,:user,message)# track token usagediscardable_tokens=0tracking_total=0calculate_discard=falseloopdo# ensure new request will fit here# cleanup old messages, saving first system prompt and then removing messages beyond that until we're within the limit# we could also restore messages once a task has been completed if there is space# TODO::# track token usageresp=client.chat_completion(request)@total_tokens=resp.usage.total_tokensifcalculate_discarddiscardable_tokens += resp.usage.prompt_tokens - tracking_totalcalculate_discard=falseendtracking_total=@total_tokens# save relevant historymsg=resp.choices.first.messagerequest.messages << msgsave_history(chat_id,msg)unlessmsg.function_call || (msg.role.function? && msg.name != "task_complete")# perform function calls until we get a response for the useriffunc_call=msg.function_calldiscardable_tokens += resp.usage.completion_tokens# handle the AI not providing a valid function name, we want it to retryfunc_res=beginexecutor.execute(func_call)rescueexLog.error(exception: ex){"executing function call"}reply="Encountered error: #{ex.message}"result=DriverResponse.new(reply).as(JSON::Serializable)request.messages << OpenAI::ChatMessage.new(:function,result.to_pretty_json,func_call.name)nextend# process the function resultcasefunc_res.namewhen"task_complete"cleanup_messages(request,discardable_tokens)discardable_tokens=0summary=TaskCompleted.from_jsonfunc_call.arguments.as_syield({chat_id: chat_id,message: "condensing progress: #{summary.details}",type: :progress,function: func_res.name,usage: resp.usage,compressed_usage: @total_tokens})when"list_function_schemas"calculate_discard=truediscover=FunctionDiscovery.from_jsonfunc_call.arguments.as_syield({chat_id: chat_id,message: "checking #{discover.id} capabilities",type: :progress,function: func_res.name,usage: resp.usage})when"call_function"calculate_discard=trueexecute=FunctionExecutor.from_jsonfunc_call.arguments.as_syield({chat_id: chat_id,message: "performing action: #{execute.id}.#{execute.function}(#{execute.parameters})",type: :progress,function: func_res.name,usage: resp.usage})endrequest.messages << func_resnextendcleanup_messages(request,discardable_tokens)yield({chat_id: chat_id,message: msg.content,type: :response,usage: resp.usage,compressed_usage: @total_tokens})breakendendprivatedefcleanup_messages(request,discardable_tokens)# keep task summariesrequest.messages.reject! { |mess| mess.function_call || (mess.role.function? && mess.name != "task_complete")}# a good estimate of the total tokens once the cleanup is complete@total_tokens=@total_tokens - discardable_tokensendprivatedefsave_history(chat_id : String,role : PlaceOS::Model::ChatMessage::Role,message : String,func_name : String? =nil,func_args : JSON::Any? =nil) : NilPlaceOS::Model::ChatMessage.create!(role: role,chat_id: chat_id,content: message,function_name: func_name,function_args: func_args)endprivatedefsave_history(chat_id : String,msg : OpenAI::ChatMessage)save_history(chat_id,PlaceOS::Model::ChatMessage::Role.parse(msg.role.to_s),msg.content || "",msg.name,msg.function_call.try &.arguments)endprivatedefbuild_prompt(chat : PlaceOS::Model::Chat,chat_payload : Payload?)messages=[]ofOpenAI::ChatMessageifpayload=chat_payloaduser=Model::User.find!(chat.user_id)messages << OpenAI::ChatMessage.new(role: :system,content: String.build{ |str|
str << payload.promptstr << "\n\nrequest function lists and call functions as required to fulfil requests.\n"str << "make sure to interpret results and reply appropriately once you have all the information.\n"str << "remember to only use valid capability ids, they can be found in this JSON:\n```json\n#{payload.capabilities.to_json}\n```\n\n"str << "my name is: #{user.name}\n"str << "my email is: #{user.email}\n"str << "my phone number is: #{user.phone}\n"ifuser.phone.presencestr << "my swipe card number is: #{user.card_number}\n"ifuser.card_number.presencestr << "my user_id is: #{user.id}\n"str << "use these details in function calls as required.\n"str << "perform one task at a time, making as many function calls as required to complete a task. Once a task is complete call the task_complete function with details of the progress you've made.\n"str << "the chat client prepends the date-time each message was sent at in the following format YYYY-MM-DD HH:mm:ss +ZZ:ZZ:ZZ"})messages.each{ |m| save_history(chat.id.as(String),m)}elsechat.messages.eachdo |hist|
func_call=nilifhist.role.to_s == "function"ifname=hist.function_nameargs=hist.function_args || JSON::Any.new(nil)func_call=OpenAI::ChatFunctionCall.new(name,args)endendmessages << OpenAI::ChatMessage.new(role: OpenAI::ChatMessageRole.parse(hist.role.to_s),content: hist.content,name: hist.function_name,function_call: func_call)endendmessagesendprivatedefdriver_prompt(chat : PlaceOS::Model::Chat) : PayloadPayload.from_jsongrab_driver_status(chat,LLM_DRIVER,LLM_DRIVER_PROMPT)endprivatedefbuild_executor(chat)executor=OpenAI::FunctionExecutor.newexecutor.add(name: "list_function_schemas",description: "if a request could benefit from a capability, obtain the list of function schemas by providing the id string",clz: FunctionDiscovery)do |call|
request=call.as(FunctionDiscovery)reply="No response received"beginreply=grab_driver_status(chat,request.id,"function_schemas")rescueexLog.error(exception: ex){{id: request.id,status: "function_schemas"}}reply="Encountered error: #{ex.message}"endDriverResponse.new(reply).as(JSON::Serializable)endexecutor.add(name: "call_function",description: "Executes functionality offered by a capability, you'll need to obtain the function schema to perform requests",clz: FunctionExecutor)do |call|
request=call.as(FunctionExecutor)reply="No response received"beginresp,code=exec_driver_func(chat,request.id,request.function,request.parameters)reply=respif200 <= code <= 299rescueexLog.error(exception: ex){{id: request.id,function: request.function,args: request.parameters.to_s}}reply="Encountered error: #{ex.message}"endDriverResponse.new(reply).as(JSON::Serializable)endexecutor.add(name: "task_complete",description: "Once a task is complete, call this function with the details that are relevant to the conversion. Provide enough detail so you don't perform the actions again and can formulate a response to the user",clz: TaskCompleted)do |call|
request=call.as(TaskCompleted)request.as(JSON::Serializable)endexecutorendprivatedefexec_driver_func(chat,module_name,method,args=nil)remote_driver=RemoteDriver.new(sys_id: chat.system_id,module_name: module_name,index: 1,discovery: app.class.core_discovery,user_id: chat.user_id,){ |module_id|
Model::Module.find!(module_id).edge_id.as(String)}remote_driver.exec(security: app.driver_clearance(app.user_token),function: method,args: args)endprivatedefgrab_driver_status(chat,module_slug : String,key : String) : Stringmodule_name,index=RemoteDriver.get_parts(module_slug)module_id= ::PlaceOS::Driver::Proxy::System.module_id?(system_id: chat.system_id,module_name: module_name,index: index)ifmodule_idstorage=Driver::RedisStorage.new(module_id)storage[key]elseraise"error obtaining chat prompt, #{module_slug} not found on system #{chat.system_id}"endendprivatestructFunctionExecutorextendOpenAI::FuncMarkerincludeJSON::Serializable
@[JSON::Field(description: "The ID of the capability, exactly as provided in the capability list")]getterid : String
@[JSON::Field(description: "The name of the function")]getterfunction : String
@[JSON::Field(description: "a JSON hash representing the named arguments of the function, as per the JSON schema provided")]getterparameters : JSON::Any?endprivatestructFunctionDiscoveryextendOpenAI::FuncMarkerincludeJSON::Serializable
@[JSON::Field(description: "The ID of the capability, exactly as provided in the capability list")]getterid : StringendprivatestructTaskCompletedextendOpenAI::FuncMarkerincludeJSON::Serializable
@[JSON::Field(description: "the details of the task that are relevant to continuing the conversion")]getterdetails : StringendprivaterecordDriverResponse,body : StringdoincludeJSON::SerializableendstructPayloadincludeJSON::Serializablegetterprompt : Stringgettercapabilities : Array(Capabilities)gettersystem_id : StringrecordCapabilities,id : String,capability : StringdoincludeJSON::Serializableendendendend
The text was updated successfully, but these errors were encountered:
rest-api/src/placeos-rest-api/controllers/chat_gpt/chat_manager.cr
Line 122 in 0130973
The text was updated successfully, but these errors were encountered: