Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: handle case thread when original model deleted #4280

Merged
merged 2 commits into from
Dec 18, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
51 changes: 43 additions & 8 deletions web/containers/ModelDropdown/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
useClickOutside,
} from '@janhq/joi'

import { useAtom, useAtomValue, useSetAtom } from 'jotai'
import { atom, useAtom, useAtomValue, useSetAtom } from 'jotai'

import {
ChevronDownIcon,
Expand All @@ -37,6 +37,7 @@

import { formatDownloadPercentage, toGibibytes } from '@/utils/converter'

import { manualRecommendationModel } from '@/utils/model'
import {
getLogoEngine,
getTitleByEngine,
Expand Down Expand Up @@ -65,16 +66,21 @@
disabled?: boolean
}

export const modelDropdownStateAtom = atom(false)

const ModelDropdown = ({
disabled,
chatInputMode,
strictedThread = true,
}: Props) => {
const { downloadModel } = useDownloadModel()
const [modelDropdownState, setModelDropdownState] = useAtom(
modelDropdownStateAtom
)

const [searchFilter, setSearchFilter] = useState('local')
const [searchText, setSearchText] = useState('')
const [open, setOpen] = useState(false)
const [open, setOpen] = useState<boolean>(modelDropdownState)
const activeThread = useAtomValue(activeThreadAtom)
const activeAssistant = useAtomValue(activeAssistantAtom)
const downloadingModels = useAtomValue(getDownloadingModelAtom)
Expand All @@ -84,24 +90,40 @@
const [dropdownOptions, setDropdownOptions] = useState<HTMLDivElement | null>(
null
)

const downloadStates = useAtomValue(modelDownloadStateAtom)
const setThreadModelParams = useSetAtom(setThreadModelParamsAtom)
const { updateModelParameter } = useUpdateModelParameters()
const searchInputRef = useRef<HTMLInputElement>(null)
const configuredModels = useAtomValue(configuredModelsAtom)
const featuredModel = configuredModels.filter((x) =>
x.metadata?.tags?.includes('Featured')

const featuredModel = configuredModels.filter(
(x) =>
manualRecommendationModel.includes(x.id) &&
x.metadata?.tags?.includes('Featured') &&
x.metadata?.size < 5000000000
)
const { updateThreadMetadata } = useCreateNewThread()

useClickOutside(() => setOpen(false), null, [dropdownOptions, toggle])
useClickOutside(() => handleChangeStateOpen(false), null, [
dropdownOptions,
toggle,
])

const [showEngineListModel, setShowEngineListModel] = useAtom(
showEngineListModelAtom
)

const handleChangeStateOpen = useCallback(
(state: boolean) => {
setOpen(state)
setModelDropdownState(state)

Check warning on line 120 in web/containers/ModelDropdown/index.tsx

View workflow job for this annotation

GitHub Actions / coverage-check

119-120 lines are not covered with tests
},
[setModelDropdownState]
)

const isModelSupportRagAndTools = useCallback((model: Model) => {
return (

Check warning on line 126 in web/containers/ModelDropdown/index.tsx

View workflow job for this annotation

GitHub Actions / coverage-check

126 line is not covered with tests
model?.engine === InferenceEngine.openai ||
isLocalEngine(model?.engine as InferenceEngine)
)
Expand All @@ -112,7 +134,7 @@
configuredModels
.concat(
downloadedModels.filter(
(e) => !configuredModels.some((x) => x.id === e.id)

Check warning on line 137 in web/containers/ModelDropdown/index.tsx

View workflow job for this annotation

GitHub Actions / coverage-check

137 line is not covered with tests
)
)
.filter((e) =>
Expand All @@ -122,29 +144,35 @@
if (searchFilter === 'local') {
return isLocalEngine(e.engine)
}
if (searchFilter === 'remote') {
return !isLocalEngine(e.engine)

Check warning on line 148 in web/containers/ModelDropdown/index.tsx

View workflow job for this annotation

GitHub Actions / coverage-check

147-148 lines are not covered with tests
}
})
.sort((a, b) => a.name.localeCompare(b.name))

Check warning on line 151 in web/containers/ModelDropdown/index.tsx

View workflow job for this annotation

GitHub Actions / coverage-check

151 line is not covered with tests
.sort((a, b) => {
const aInDownloadedModels = downloadedModels.some(
(item) => item.id === a.id

Check warning on line 154 in web/containers/ModelDropdown/index.tsx

View workflow job for this annotation

GitHub Actions / coverage-check

153-154 lines are not covered with tests
)
const bInDownloadedModels = downloadedModels.some(
(item) => item.id === b.id

Check warning on line 157 in web/containers/ModelDropdown/index.tsx

View workflow job for this annotation

GitHub Actions / coverage-check

156-157 lines are not covered with tests
)
if (aInDownloadedModels && !bInDownloadedModels) {
return -1
} else if (!aInDownloadedModels && bInDownloadedModels) {
return 1

Check warning on line 162 in web/containers/ModelDropdown/index.tsx

View workflow job for this annotation

GitHub Actions / coverage-check

159-162 lines are not covered with tests
} else {
return 0

Check warning on line 164 in web/containers/ModelDropdown/index.tsx

View workflow job for this annotation

GitHub Actions / coverage-check

164 line is not covered with tests
}
}),
[configuredModels, searchText, searchFilter, downloadedModels]
)

useEffect(() => {
if (modelDropdownState && chatInputMode) {
setOpen(modelDropdownState)
}
}, [chatInputMode, modelDropdownState])

useEffect(() => {
if (open && searchInputRef.current) {
searchInputRef.current.focus()
Expand All @@ -157,7 +185,7 @@

let model = downloadedModels.find((model) => model.id === modelId)
if (!model) {
model = recommendedModel
model = undefined
}
setSelectedModel(model)
}, [
Expand All @@ -170,10 +198,10 @@

const onClickModelItem = useCallback(
async (modelId: string) => {
if (!activeAssistant) return
const model = downloadedModels.find((m) => m.id === modelId)
setSelectedModel(model)
setOpen(false)

Check warning on line 204 in web/containers/ModelDropdown/index.tsx

View workflow job for this annotation

GitHub Actions / coverage-check

201-204 lines are not covered with tests

if (activeThread) {
// Change assistand tools based on model support RAG
Expand Down Expand Up @@ -343,14 +371,21 @@
'inline-block max-w-[200px] cursor-pointer overflow-hidden text-ellipsis whitespace-nowrap',
open && 'border border-transparent'
)}
onClick={() => setOpen(!open)}
onClick={() => handleChangeStateOpen(!open)}
>
<span>{selectedModel?.name}</span>
<span
className={twMerge(
!selectedModel && 'text-[hsla(var(--text-tertiary))]'
)}
>
{selectedModel?.name || 'Select Model'}
</span>
</Badge>
) : (
<Input
value={selectedModel?.name || ''}
className="cursor-pointer"
placeholder="Select Model"
disabled={disabled}
readOnly
suffixIcon={
Expand Down
5 changes: 1 addition & 4 deletions web/hooks/useCreateNewThread.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@ import { toaster } from '@/containers/Toast'
import { isLocalEngine } from '@/utils/modelEngine'

import { useActiveModel } from './useActiveModel'
import useRecommendedModel from './useRecommendedModel'

import useSetActiveThread from './useSetActiveThread'

Expand Down Expand Up @@ -71,8 +70,6 @@ export const useCreateNewThread = () => {
const experimentalEnabled = useAtomValue(experimentalFeatureEnabledAtom)
const setIsGeneratingResponse = useSetAtom(isGeneratingResponseAtom)

const { recommendedModel, downloadedModels } = useRecommendedModel()

const threads = useAtomValue(threadsAtom)
const { stopInference } = useActiveModel()

Expand All @@ -84,7 +81,7 @@ export const useCreateNewThread = () => {
setIsGeneratingResponse(false)
stopInference()

const defaultModel = model ?? recommendedModel ?? downloadedModels[0]
const defaultModel = model

if (!model) {
// if we have model, which means user wants to create new thread from Model hub. Allow them.
Expand Down
7 changes: 7 additions & 0 deletions web/hooks/useSendChatMessage.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ import {
import { extractInferenceParams, extractModelLoadParams } from '@janhq/core'
import { atom, useAtom, useAtomValue, useSetAtom } from 'jotai'

import { modelDropdownStateAtom } from '@/containers/ModelDropdown'
import {
currentPromptAtom,
editPromptAtom,
Expand Down Expand Up @@ -73,6 +74,7 @@ export default function useSendChatMessage() {
const activeThreadRef = useRef<Thread | undefined>()
const activeAssistantRef = useRef<ThreadAssistantInfo | undefined>()
const setTokenSpeed = useSetAtom(tokenSpeedAtom)
const setModelDropdownState = useSetAtom(modelDropdownStateAtom)

const selectedModelRef = useRef<Model | undefined>()

Expand Down Expand Up @@ -122,6 +124,11 @@ export default function useSendChatMessage() {
return
}

if (selectedModelRef.current?.id === undefined) {
setModelDropdownState(true)
return
}

if (engineParamsUpdate) setReloadModel(true)
setTokenSpeed(undefined)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ import { modelDownloadStateAtom } from '@/hooks/useDownloadState'
import { useStarterScreen } from '@/hooks/useStarterScreen'

import { formatDownloadPercentage, toGibibytes } from '@/utils/converter'
import { manualRecommendationModel } from '@/utils/model'
import {
getLogoEngine,
getTitleByEngine,
Expand Down Expand Up @@ -56,15 +57,16 @@ const OnDeviceStarterScreen = ({ isShowStarterScreen }: Props) => {
const configuredModels = useAtomValue(configuredModelsAtom)
const setMainViewState = useSetAtom(mainViewStateAtom)

const recommendModel = ['llama3.2-1b-instruct', 'llama3.2-3b-instruct']

const featuredModel = configuredModels.filter((x) => {
const manualRecommendModel = configuredModels.filter((x) =>
recommendModel.includes(x.id)
manualRecommendationModel.includes(x.id)
)

if (manualRecommendModel.length === 2) {
return x.id === recommendModel[0] || x.id === recommendModel[1]
return (
x.id === manualRecommendationModel[0] ||
x.id === manualRecommendationModel[1]
)
} else {
return (
x.metadata?.tags?.includes('Featured') && x.metadata?.size < 5000000000
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ import useSendChatMessage from '@/hooks/useSendChatMessage'

import { getCurrentChatMessagesAtom } from '@/helpers/atoms/ChatMessage.atom'

import { selectedModelAtom } from '@/helpers/atoms/Model.atom'
import {
getActiveThreadIdAtom,
activeSettingInputBoxAtom,
Expand Down Expand Up @@ -78,7 +79,7 @@ const RichTextEditor = ({
const messages = useAtomValue(getCurrentChatMessagesAtom)
const { sendChatMessage } = useSendChatMessage()
const { stopInference } = useActiveModel()

const selectedModel = useAtomValue(selectedModelAtom)
const largeContentThreshold = 1000

// The decorate function identifies code blocks and marks the ranges
Expand Down Expand Up @@ -233,7 +234,9 @@ const RichTextEditor = ({
event.preventDefault()
if (messages[messages.length - 1]?.status !== MessageStatus.Pending) {
sendChatMessage(currentPrompt)
resetEditor()
if (selectedModel) {
resetEditor()
}
} else onStopInferenceClick()
}
},
Expand Down
105 changes: 55 additions & 50 deletions web/screens/Thread/ThreadCenterPanel/ChatInput/index.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,12 @@ const ChatInput = () => {
}
}, [activeThreadId])

useEffect(() => {
if (!selectedModel && !activeSettingInputBox) {
setActiveSettingInputBox(true)
}
}, [activeSettingInputBox, selectedModel, setActiveSettingInputBox])

const onStopInferenceClick = async () => {
stopInference()
}
Expand Down Expand Up @@ -297,6 +303,7 @@ const ChatInput = () => {
</Button>
</div>
)}

{messages[messages.length - 1]?.status !== MessageStatus.Pending &&
!isGeneratingResponse &&
!isStreamingResponse ? (
Expand Down Expand Up @@ -340,55 +347,53 @@ const ChatInput = () => {
</div>
</div>

{activeSettingInputBox && (
<div
className={twMerge(
'absolute bottom-[5px] left-[1px] flex w-[calc(100%-10px)] items-center justify-between rounded-b-lg bg-[hsla(var(--center-panel-bg))] p-3 pr-0',
!activeThread && 'bg-transparent',
stateModel.loading && 'bg-transparent'
)}
>
<div className="flex items-center gap-x-2">
<ModelDropdown chatInputMode />
<Badge
theme="secondary"
className={twMerge(
'flex cursor-pointer items-center gap-x-1',
activeTabThreadRightPanel === 'model' &&
'border border-transparent'
)}
variant={
activeTabThreadRightPanel === 'model' ? 'solid' : 'outline'
<div
className={twMerge(
'absolute bottom-[5px] left-[1px] flex w-[calc(100%-10px)] items-center justify-between rounded-b-lg bg-[hsla(var(--center-panel-bg))] p-3 pr-0',
!activeThread && 'bg-transparent',
!activeSettingInputBox && 'hidden',
stateModel.loading && 'bg-transparent'
)}
>
<div className="flex items-center gap-x-2">
<ModelDropdown chatInputMode />
<Badge
theme="secondary"
className={twMerge(
'flex cursor-pointer items-center gap-x-1',
activeTabThreadRightPanel === 'model' &&
'border border-transparent'
)}
variant={
activeTabThreadRightPanel === 'model' ? 'solid' : 'outline'
}
onClick={() => {
// TODO @faisal: should be refactor later and better experience beetwen tab and toggle button
if (showRightPanel && activeTabThreadRightPanel !== 'model') {
setShowRightPanel(true)
setActiveTabThreadRightPanel('model')
}
onClick={() => {
// TODO @faisal: should be refactor later and better experience beetwen tab and toggle button
if (showRightPanel && activeTabThreadRightPanel !== 'model') {
setShowRightPanel(true)
setActiveTabThreadRightPanel('model')
}
if (showRightPanel && activeTabThreadRightPanel === 'model') {
setShowRightPanel(false)
setActiveTabThreadRightPanel(undefined)
}
if (activeTabThreadRightPanel === undefined) {
setShowRightPanel(true)
setActiveTabThreadRightPanel('model')
}
if (
!showRightPanel &&
activeTabThreadRightPanel !== 'model'
) {
setShowRightPanel(true)
setActiveTabThreadRightPanel('model')
}
}}
>
<Settings2Icon
size={16}
className="flex-shrink-0 cursor-pointer text-[hsla(var(--text-secondary))]"
/>
</Badge>
</div>
if (showRightPanel && activeTabThreadRightPanel === 'model') {
setShowRightPanel(false)
setActiveTabThreadRightPanel(undefined)
}
if (activeTabThreadRightPanel === undefined) {
setShowRightPanel(true)
setActiveTabThreadRightPanel('model')
}
if (!showRightPanel && activeTabThreadRightPanel !== 'model') {
setShowRightPanel(true)
setActiveTabThreadRightPanel('model')
}
}}
>
<Settings2Icon
size={16}
className="flex-shrink-0 cursor-pointer text-[hsla(var(--text-secondary))]"
/>
</Badge>
</div>
{selectedModel && (
<Button
theme="icon"
onClick={() => setActiveSettingInputBox(false)}
Expand All @@ -398,8 +403,8 @@ const ChatInput = () => {
className="cursor-pointer text-[hsla(var(--text-secondary))]"
/>
</Button>
</div>
)}
)}
</div>
</div>

<input
Expand Down
5 changes: 5 additions & 0 deletions web/utils/model.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,3 +7,8 @@
export const normalizeModelId = (downloadUrl: string): string => {
return downloadUrl.split('/').pop() ?? downloadUrl
}

export const manualRecommendationModel = [
'llama3.2-1b-instruct',
'llama3.2-3b-instruct',
]
Loading