diff --git a/.env.example b/.env.example index 523d0308e..e118ce98c 100644 --- a/.env.example +++ b/.env.example @@ -2,6 +2,7 @@ NEXTAUTH_URL=http://localhost:4002 # You can use openssl to generate a random 32 character key: openssl rand -base64 32 NEXTAUTH_SECRET=rZTFtfNuSMajLnfFrWT2PZ3lX8WZv7W/Xs2H8hkEY6g= +NEXTAUTH_ADMIN_CREDENTIALS=super@boxyhq.com:999login # SMTP / Email settings SMTP_HOST= @@ -11,7 +12,7 @@ SMTP_PASSWORD= SMTP_FROM= # If you are using Docker, you can retrieve the values from: docker-compose.yml -DATABASE_URL=postgresql://:@localhost:5432/ +DATABASE_URL=postgresql://admin:admin@localhost:5432/saas-starter-kit APP_URL=http://localhost:4002 @@ -29,7 +30,7 @@ RETRACED_API_KEY= RETRACED_PROJECT_ID= # Hide landing page and redirect to login page -HIDE_LANDING_PAGE=false +HIDE_LANDING_PAGE=true # SSO groups can be prefixed with this identifier in order to avoid conflicts with other groups. # For example boxyhq-admin would be resolved to admin, boxyhq-member would be resolved to member, etc. diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 000000000..51ca18f19 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,76 @@ +ARG NODEJS_IMAGE=node:20.18.1-alpine3.19 +FROM --platform=$BUILDPLATFORM $NODEJS_IMAGE AS base + +# Install dependencies only when needed +FROM base AS deps +# Check https://github.com/nodejs/docker-node/tree/b4117f9333da4138b03a546ec926ef50a31506c3#nodealpine to understand why libc6-compat might be needed. +RUN apk add --no-cache libc6-compat +WORKDIR /app + +# Install dependencies based on the preferred package manager +COPY package.json package-lock.json ./ +COPY prisma ./prisma +RUN npm install +RUN npm rebuild --arch=x64 --platform=linux --libc=musl sharp + +# Generate prisma client for production +RUN npx prisma generate + +# Rebuild the source code only when needed +FROM base AS builder +WORKDIR /app + +COPY --from=deps /app/node_modules ./node_modules +COPY . . + +ENV NEXT_TELEMETRY_DISABLED=1 +ENV NEXT_PUBLIC_TERMS_URL=https://boxyhq.com/terms.html +ENV NEXT_PUBLIC_PRIVACY_URL=https://boxyhq.com/privacy.html +ENV NEXT_PUBLIC_DARK_MODE=false +ENV NEXT_PUBLIC_RECAPTCHA_SITE_KEY=6LfsAbMoAAAAAIl_yao0rxsz1IWk0UaYp2ofpNiy +ENV NEXT_PUBLIC_MIXPANEL_TOKEN=64202bc81e38778793e3959c16aa9704 +ENV NEXT_PUBLIC_SUPPORT_URL="mailto:support@boxyhq.com" + +RUN npm run build-ci + + +# Production image, copy all the files and run next +FROM $NODEJS_IMAGE AS runner +WORKDIR /app + +ENV NODE_OPTIONS="--max-http-header-size=81920 --dns-result-order=ipv4first" + + +ENV NODE_ENV=production +# Uncomment the following line in case you want to disable telemetry during runtime. +ENV NEXT_TELEMETRY_DISABLED=1 + +RUN addgroup --system --gid 1001 nodejs +RUN adduser --system --uid 1001 nextjs + + +COPY --from=builder /app/public ./public +COPY --from=builder /app/prisma ./prisma + +COPY --from=builder /app/sync-stripe.js ./sync-stripe.js +COPY --from=builder /app/delete-team.js ./delete-team.js + +# Automatically leverage output traces to reduce image size +# https://nextjs.org/docs/advanced-features/output-file-tracing +COPY --from=builder --chown=nextjs:nodejs /app/.next/standalone ./ +COPY --from=builder --chown=nextjs:nodejs /app/.next/static ./.next/static +RUN npm i -g json +# Print the value of devDependencies.prisma +RUN echo "Prisma Version: $(cat ./package.json| json devDependencies.prisma)" + +RUN npm i -g prisma@$(cat ./package.json| json devDependencies.prisma) + +RUN apk add --no-cache postgresql-client + +ENV PORT=4002 + +USER nextjs + +EXPOSE 4002 + +CMD ["node", "server.js"] \ No newline at end of file diff --git a/components/account/ManageSessions.tsx b/components/account/ManageSessions.tsx index e391f2866..e75b69512 100644 --- a/components/account/ManageSessions.tsx +++ b/components/account/ManageSessions.tsx @@ -1,7 +1,8 @@ import useSWR from 'swr'; import { useState } from 'react'; import { useTranslation } from 'next-i18next'; -import { ComputerDesktopIcon } from '@heroicons/react/24/outline'; +import { Laptop } from 'lucide-react'; + import toast from 'react-hot-toast'; import fetcher from '@/lib/fetcher'; @@ -75,7 +76,7 @@ const ManageSessions = () => { wrap: true, element: ( - + {session.isCurrent ? t('this-browser') : t('other')} ), diff --git a/components/account/UpdateAccount.tsx b/components/account/UpdateAccount.tsx index cc95a6e06..73ee43196 100644 --- a/components/account/UpdateAccount.tsx +++ b/components/account/UpdateAccount.tsx @@ -12,7 +12,7 @@ interface UpdateAccountProps { const UpdateAccount = ({ user, allowEmailChange }: UpdateAccountProps) => { return ( -
+
diff --git a/components/account/UpdateTheme.tsx b/components/account/UpdateTheme.tsx index a1048a528..ebeee133a 100644 --- a/components/account/UpdateTheme.tsx +++ b/components/account/UpdateTheme.tsx @@ -1,4 +1,4 @@ -import { ChevronUpDownIcon } from '@heroicons/react/24/outline'; +import { ChevronsUpDown } from 'lucide-react'; import { Card } from '@/components/shared'; import useTheme from 'hooks/useTheme'; @@ -23,7 +23,7 @@ const UpdateTheme = () => {
{selectedTheme.name}
- +
    }) => { : 'group-hover:bg-gray-50' }`} > - { rel="noopener noreferrer" > {t('contact-support')} - +
diff --git a/components/billing/LinkToPortal.tsx b/components/billing/LinkToPortal.tsx index dcd546539..a29a99ca4 100644 --- a/components/billing/LinkToPortal.tsx +++ b/components/billing/LinkToPortal.tsx @@ -1,7 +1,7 @@ import toast from 'react-hot-toast'; import { Button } from 'react-daisyui'; import { useState } from 'react'; -import { ArrowTopRightOnSquareIcon } from '@heroicons/react/24/outline'; +import { SquareArrowOutUpRight } from 'lucide-react'; import { useTranslation } from 'next-i18next'; import { Card } from '@/components/shared'; @@ -57,7 +57,7 @@ const LinkToPortal = ({ team }: LinkToPortalProps) => { onClick={() => openStripePortal()} > {t('billing-portal')} - + diff --git a/components/chats/chat/Chat.tsx b/components/chats/chat/Chat.tsx new file mode 100644 index 000000000..c5e190118 --- /dev/null +++ b/components/chats/chat/Chat.tsx @@ -0,0 +1,441 @@ +import { useContext, useEffect, useRef, useState } from 'react'; +import { useTranslation } from 'next-i18next'; +import { useAutoResizeTextArea, useFetch } from '../hooks'; +import { LLMChat, LLMModel, LLMProvidersOptionsType } from './types'; +import { ApiSuccess } from '../types'; +import { ChatContext } from '../provider'; +import { ConversationContext } from './ChatUI'; +import { defaultHeaders } from '../utils'; +import DynamicChatInput from './DynamicChatInput'; +import ConversationArea from './ConversationArea'; +import { Error } from '@/components/shared'; +import toast from 'react-hot-toast'; + +interface ChatProps { + setShowSettings: (value: boolean) => void; + conversationId?: string; + setConversationId: (value: string) => void; +} + +const Chat = ({ + setShowSettings, + conversationId, + setConversationId, +}: ChatProps) => { + const { t } = useTranslation('common'); + const [errorMessage, setErrorMessage] = useState(''); + const [message, setMessage] = useState(''); + const textAreaRef = useAutoResizeTextArea(); + const bottomOfChatRef = useRef(null); + + // Get the provider/model plus loading state from the context + const { provider, model, onError, urls } = useContext(ChatContext); + const selectedConversation = + useContext(ConversationContext)?.selectedConversation; + let isChatWithPDFProvider = + useContext(ConversationContext)?.isChatWithPDFProvider; + if (selectedConversation) { + isChatWithPDFProvider = selectedConversation.isChatWithPDFProvider; + } + const [selectedProvider, setSelectedProvider] = useState(''); + const [selectedModel, setSelectedModel] = useState(''); + + const [requestInProgress, setRequestInProgress] = useState(false); + const [isArchived, setIsArchived] = useState(false); + + // Fetch conversation thread + const { + data: conversationThreadData, + isLoading: isLoadingConversationThread, + error: errorLoadingThread, + refetch: reloadConversationThread, + } = useFetch>({ + url: conversationId ? `${urls?.conversation}/${conversationId}` : undefined, + }); + + const conversationThread = conversationThreadData?.data; + + useEffect(() => { + if (!isLoadingConversationThread && errorLoadingThread) { + onError?.(errorLoadingThread.message); + } + }, [isLoadingConversationThread, errorLoadingThread]); + + const { + data: providersData, + isLoading: isLoadingProviders, + error: errorLoadingProviders, + } = useFetch>({ + url: urls?.llmProviders, + }); + + const providers = providersData?.data; + + const showCreateLLMConfigMessage = + !isChatWithPDFProvider && + Array.isArray(providers) && + providers?.length === 0; + const showProviderSelection = + !isChatWithPDFProvider && + !showCreateLLMConfigMessage && + !provider && + Array.isArray(providers) && + providers?.length > 0 && + (selectedProvider === '' || selectedModel === ''); + + const { + data: modelsData, + isLoading: isLoadingModels, + error: errorLoadingModels, + } = useFetch>({ + url: selectedProvider + ? `${urls?.llmProviders}/${selectedProvider}/models` + : undefined, + }); + const models = modelsData?.data; + + useEffect(() => { + if (errorLoadingProviders || errorLoadingModels) { + onError?.(errorLoadingProviders?.message || errorLoadingModels?.message); + } + }, [errorLoadingProviders, errorLoadingModels]); + + useEffect(() => { + setSelectedProvider(selectedConversation?.provider || ''); + setSelectedModel(selectedConversation?.model || ''); + }, [selectedConversation]); + + useEffect(() => { + if (selectedConversation && !isChatWithPDFProvider) { + if ( + providers?.findIndex((p) => p.id === selectedConversation.provider) === + -1 || + models?.findIndex((m) => m.id === selectedConversation.model) === -1 + ) { + setIsArchived(true); + } else { + setIsArchived(false); + } + } + }, [selectedConversation, providers, models, isChatWithPDFProvider]); + + useEffect(() => { + if (textAreaRef.current) { + textAreaRef.current.style.height = '24px'; + textAreaRef.current.style.height = `${textAreaRef.current.scrollHeight}px`; + } + }, [message, textAreaRef]); + + function isRefInView(ref) { + if (!ref.current) return false; + + const rect = ref.current.getBoundingClientRect(); + return ( + rect.top >= 0 && + rect.left >= 0 && + rect.bottom <= + (window.innerHeight || document.documentElement.clientHeight) && + rect.right <= (window.innerWidth || document.documentElement.clientWidth) + ); + } + + const [trailingThread, setTrailingThread] = useState< + { content: string | null; role: string }[] + >([]); + + useEffect(() => { + if (bottomOfChatRef.current) { + if (isRefInView(bottomOfChatRef)) { + bottomOfChatRef.current.scrollIntoView({ + behavior: 'auto', + }); + } + } + }, [conversationThread, trailingThread]); + + const sendMessage = async (e: any) => { + try { + setRequestInProgress(true); + e.preventDefault(); + const _model = models?.find((m) => m.id === (model || selectedModel)); + + if (!isChatWithPDFProvider) { + if (!provider && !selectedProvider) { + setErrorMessage('Please select a Provider'); + return; + } + if (!_model) { + setErrorMessage('Please select a Model'); + return; + } + } + if (message.length < 1) { + setErrorMessage('Please enter a message.'); + return; + } else { + setErrorMessage(''); + } + + // Add the message to the conversation + setTrailingThread([ + { content: message, role: 'user' }, + { content: null, role: 'assistant' }, + ]); + + // Clear the message & remove empty chat + setMessage(''); + + if (!urls?.chat) { + toast.error('Missing API path for LLM chat'); + return; + } + + const response = await fetch(urls.chat, { + method: 'POST', + headers: defaultHeaders, + body: JSON.stringify({ + messages: [ + ...(conversationThread ?? []), + { content: message, role: 'user' }, + ], + model: _model, + provider: provider || selectedProvider, + conversationId, + isChatWithPDFProvider, + }), + }); + + if (response.ok) { + const reader = response.body?.getReader(); + const decoder = new TextDecoder('utf-8'); + let receivedData = ''; + if (reader) { + let done = false; + let value; + do { + const op = await reader.read(); + done = op.done; + value = op.value; + if (done) break; + const dt = decoder.decode(value, { stream: true }); + const jsonData = dt.split('\n').map((d) => { + if (!d) { + return { + choices: [ + { + delta: { + content: '', + }, + }, + ], + }; + } + return JSON.parse(d); + }); + for (const data of jsonData) { + if (data.conversationId) { + // last chunk + if (conversationId !== data.conversationId) { + setConversationId(data.conversationId); + } + setTrailingThread([]); + reloadConversationThread(); + } else if (data.choices) { + // new chunks get appended + if (data.choices[0]?.delta?.content) { + receivedData += data.choices[0]?.delta?.content || ''; + setTrailingThread([ + { content: message, role: 'user' }, + { content: receivedData, role: 'assistant' }, + ]); + } + } else if (data?.error?.message) { + setErrorMessage(data?.error?.message); + } + } + } while (!done); + } + } else { + const data = await response.json(); + setErrorMessage(data?.error?.message); + } + } catch (error: any) { + setErrorMessage(error.message); + + // setIsLoading(false); + } finally { + setRequestInProgress(false); + } + }; + + const handleKeypress = (e: any) => { + // It's triggers by pressing the enter key + if (e.keyCode == 13 && !e.shiftKey) { + sendMessage(e); + e.preventDefault(); + } + }; + + const [isUploadingFile, setIsUploadingFile] = useState(false); + const handleFileChange = async (e: React.ChangeEvent) => { + if (!urls?.fileUpload) { + toast.error('Missing API path for file upload'); + return; + } + const files = e.target.files; + if (files && files.length > 0) { + const formData = new FormData(); + formData.append('file', files[0]); + setIsUploadingFile(true); + const response = await fetch(urls?.fileUpload, { + method: 'POST', + body: formData, + }); + setIsUploadingFile(false); + if (!response.ok) { + const json = await response.json(); + onError?.(json.error.message); + return; + } + } + }; + + const providerName = providers?.find( + (p) => p.id === (provider || selectedProvider) + )?.name; + const modelName = models?.find( + (m) => m.id === (model || selectedModel) + )?.name; + + return ( +
+
+
+ {showProviderSelection && !conversationId && ( +
+
+
+ +
+
+ {Array.isArray(models) && models.length > 0 ? ( + + ) : ( + { + setSelectedModel(e.target.value); + }} + value={selectedModel} + /> + )} +
+
+
+ )} + {selectedProvider && selectedModel && ( +
+ {t('bui-chat-provider')}: {providerName} | {t('bui-chat-model')}:{' '} + {modelName || ''} + {isArchived && ( + + ({t('bui-chat-archived')}) + + )} +
+ )} + {errorMessage && } + + + {showCreateLLMConfigMessage && ( +
+
+ {t('bui-chat-no-chat-configs-found')} +
+
+ {t('bui-chat-goto')}{' '} + { + setShowSettings(true); + }} + > + {t('settings')} + {' '} + {t('bui-chat-to-create-new-config')} +
+
+ )} + + +
+
+
+ ); +}; + +export default Chat; diff --git a/components/chats/chat/ChatDisabled.tsx b/components/chats/chat/ChatDisabled.tsx new file mode 100644 index 000000000..ff35d7cbf --- /dev/null +++ b/components/chats/chat/ChatDisabled.tsx @@ -0,0 +1,20 @@ +import Card from '@/components/shared/Card'; +import { useTranslation } from 'next-i18next'; + +export default function ChatDisabled() { + const { t } = useTranslation('common'); + return ( +
+ + + + {t('bui-chat-disabled-title')} + + {t('bui-chat-disabled-description')} + + + + +
+ ); +} diff --git a/components/chats/chat/ChatDrawer.tsx b/components/chats/chat/ChatDrawer.tsx new file mode 100644 index 000000000..2ad8e5ac9 --- /dev/null +++ b/components/chats/chat/ChatDrawer.tsx @@ -0,0 +1,46 @@ +import MobileSidebar from './MobileSidebar'; +import Sidebar from './Sidebar'; +import { LLMConversation } from './types'; + +type ChatDrawerProps = { + isChatDrawerVisible: boolean; + toggleChatDrawerVisibility: () => void; + setShowSettings: (value: boolean) => void; + conversations?: LLMConversation[]; + conversationId?: string; + setConversationId: (value: string) => void; +}; + +export default function ChatDrawer(props: ChatDrawerProps) { + const { + isChatDrawerVisible, + toggleChatDrawerVisibility, + setShowSettings, + conversations, + conversationId, + setConversationId, + } = props; + return ( + <> + {isChatDrawerVisible ? ( + + ) : null} +
+
+ +
+
+ + ); +} diff --git a/components/chats/chat/ChatSettings.tsx b/components/chats/chat/ChatSettings.tsx new file mode 100644 index 000000000..6447254b9 --- /dev/null +++ b/components/chats/chat/ChatSettings.tsx @@ -0,0 +1,431 @@ +import { useTranslation } from 'next-i18next'; +import { useContext, useEffect, useState } from 'react'; +import { Button, Input } from 'react-daisyui'; +import { + Table, + Card, + Badge, + ConfirmationModal, + InputWithLabel, + Loading, +} from '../shared'; +import { LLMConfig, LLMModel, LLMProvidersOptionsType } from './types'; +import { ChatContext } from '../provider'; +import { useFetch } from '../hooks'; +import { ApiSuccess } from '../types'; +import { defaultHeaders } from '../utils'; + +export default function ChatSettings() { + const { t } = useTranslation('common'); + const [selectedProvider, setSelectedProvider] = useState< + LLMProvidersOptionsType[number]['id'] | '' + >('openai'); + const [selectedModel, setSelectedModel] = useState([]); + const [apiKey, setApiKey] = useState(''); + const [baseURL, setBaseURL] = useState(''); + const [loading, setLoading] = useState(false); + const [confirmationDialogVisible, setConfirmationDialogVisible] = + useState(false); + const [selectedConfig, setSelectedConfig] = useState(null); + const [isChatWithPDFProvider, setIsChatWithPDFProvider] = useState(false); + + const [view, switchView] = useState<'list' | 'create' | 'edit'>('list'); + + const { urls, onError, onSuccess } = useContext(ChatContext); + + const { + data: llmConfigsData, + isLoading: isLoadingConfigs, + refetch: reloadConfigs, + error: errorLoadingConfigs, + } = useFetch>({ + url: urls?.llmConfig, + }); + const llmConfigs = llmConfigsData?.data || []; + + const { + data: providersData, + isLoading: isLoadingProviders, + error: errorLoadingProviders, + } = useFetch>({ + url: `${urls?.llmProviders}?filterByTenant=false`, + }); + + const providers = providersData?.data || []; + + const { + data: modelsData, + isLoading: isLoadingModels, + error: errorLoadingModels, + } = useFetch>({ + url: selectedProvider + ? `${urls?.llmProviders}/${selectedProvider}/models?filterByTenant=false` + : undefined, + }); + const models = modelsData?.data || []; + + useEffect(() => { + if (errorLoadingConfigs || errorLoadingProviders || errorLoadingModels) { + onError?.( + errorLoadingConfigs?.message || + errorLoadingProviders?.message || + errorLoadingModels?.message + ); + } + }, [errorLoadingConfigs, errorLoadingProviders, errorLoadingModels]); + + const createLLMConfig = async (e: React.FormEvent) => { + e.preventDefault(); + + setLoading(true); + + const response = await fetch(`${urls?.llmConfig}`, { + method: 'POST', + headers: defaultHeaders, + body: JSON.stringify({ + provider: isChatWithPDFProvider ? 'openai' : selectedProvider, + models: isChatWithPDFProvider ? [] : selectedModel, + apiKey: apiKey ?? undefined, + baseURL, + isChatWithPDFProvider, + }), + }); + setLoading(false); + + const result = await response.json(); + + if (!response.ok) { + onError?.(result.error.message); + return; + } + + onSuccess?.(t('bui-chat-config-created')); + reloadConfigs(); + resetForm(); + }; + + const deleteConfig = async (config: any) => { + if (!config) { + return; + } + + const response = await fetch(`${urls?.llmConfig}/${config.id}`, { + method: 'DELETE', + headers: defaultHeaders, + }); + + if (!response.ok) { + const json = await response.json(); + setConfirmationDialogVisible(false); + onError?.(json.error.message); + return; + } + + setSelectedConfig(null); + reloadConfigs(); + setConfirmationDialogVisible(false); + onSuccess?.(t('bui-chat-config-deleted')); + }; + + const updateLLMConfig = async (e: React.FormEvent) => { + e.preventDefault(); + + setLoading(true); + + const response = await fetch(`${urls?.llmConfig}/${selectedConfig?.id}`, { + method: 'PUT', + headers: defaultHeaders, + body: JSON.stringify({ + provider: selectedProvider, + models: selectedModel, + apiKey: apiKey ?? undefined, + isChatWithPdfProvider: selectedConfig.isChatWithPdfProvider, + baseURL, + }), + }); + + setLoading(false); + if (!response.ok) { + const json = await response.json(); + onError?.(json.error.message); + return; + } + + onSuccess?.(t('bui-chat-config-updated')); + reloadConfigs(); + resetForm(); + }; + + const resetForm = () => { + setSelectedProvider(''); + setSelectedModel([]); + setApiKey(''); + setBaseURL(''); + switchView('list'); + }; + + return ( + <> + + + + + {t('settings')} + + + +

{t('bui-chat-llm-providers')}

+ {view === 'list' && ( + + )} + {isLoadingConfigs && } + {view === 'list' && llmConfigs.length > 0 && ( + { + const providerName = providers.find( + (p) => p.id === config.provider + )?.name; + return { + id: config.id, + cells: [ + { + element: config.isChatWithPDFProvider ? ( + + {t('bui-chat-with-pdf-provider')} + + ) : ( + {providerName} + ), + }, + { + wrap: true, + text: + config.models && typeof config.models !== 'string' + ? // .map((a: string) => { + // // const modelName = models.find((m) => m.id === a)?.name; + // // return modelName; + // // ollama is a special case where the model is open ended + // // config.models + // }) + config.models.join(', ') || '*' + : config.models, + }, + { + wrap: true, + text: new Date(config.createdAt).toDateString(), + minWidth: 160, + }, + { + buttons: [ + { + text: t('bui-shared-edit'), + onClick: () => { + setSelectedConfig(config); + // setIsEdit(true); + switchView('edit'); + setSelectedProvider(config.provider); + setSelectedModel(config.models); + // setApiKey(config.apiKey || ''); + setBaseURL(config.baseURL || ''); + }, + }, + { + color: 'error', + text: t('bui-chat-remove'), + onClick: () => { + setSelectedConfig(config); + setConfirmationDialogVisible(true); + }, + }, + ], + }, + ], + }; + })} + >
+ )} + {(view === 'edit' || view === 'create') && ( +
+
+ {view === 'create' && ( +
+ + setIsChatWithPDFProvider(e.target.checked) + } + /> + +
+ )} + {((view === 'create' && !isChatWithPDFProvider) || + (view === 'edit' && + !selectedConfig.isChatWithPDFProvider)) && ( + <> +
+
+ + {t('bui-chat-provider')} + +
+ +
+
+
+ + {t('bui-chat-model')} + +
+ +
+ + )} +
+ setApiKey('')} + onChange={(e) => setApiKey(e.target.value)} + /> +
+
+ setBaseURL(e.target.value)} + /> +
+ {(view === 'edit' || view === 'create') && ( +
+ + +
+ )} +
+
+ )} +
+
+
+ setConfirmationDialogVisible(false)} + onConfirm={() => deleteConfig(selectedConfig)} + title={t('bui-chat-config-deletion-title')} + description={t('bui-chat-config-deletion-description')} + > + + ); +} diff --git a/components/chats/chat/ChatUI.tsx b/components/chats/chat/ChatUI.tsx new file mode 100644 index 000000000..9a1f2d66c --- /dev/null +++ b/components/chats/chat/ChatUI.tsx @@ -0,0 +1,138 @@ +import { createContext, useContext, useEffect, useState } from 'react'; +import Chat from './Chat'; +import ChatSettings from './ChatSettings'; +import ChatDrawer from './ChatDrawer'; +import { useRouter } from 'next/router'; +import { useFetch } from '../hooks'; +import { ApiSuccess } from '../types'; +import { ChatContext } from '../provider'; +import { LLMConversation } from './types'; +import useTeam from 'hooks/useTeam'; +import { Loading } from '../shared'; +import { Menu, Plus } from 'lucide-react'; +import { useTranslation } from 'next-i18next'; + +interface ConversationContextType { + selectedConversation?: LLMConversation; + isLoadingConversations: boolean; + isChatWithPDFProvider: boolean; + setIsChatWithPDFProvider: (value: boolean) => void; +} + +export const ConversationContext = + createContext(null); + +export function ChatUI({ slug }) { + const { t } = useTranslation('common'); + const router = useRouter(); + const conversationId = router.query.conversationId?.[0] as string; + + const setConversationId = (newConversationId: string) => { + const basePath = router.pathname.split('/[[...conversationId]]')[0]; + const conversationRoute = basePath.split('/[slug]').join(`/${slug}`); + + if (newConversationId === '') { + router.push(conversationRoute); + } else { + router.push(`${conversationRoute}/${newConversationId}`); + } + }; + + const { urls } = useContext(ChatContext); + + const { + data: conversationsData, + isLoading: isLoadingConversations, + refetch: reloadConversations, + } = useFetch>({ url: urls?.conversation }); + + const conversations = conversationsData?.data; + + useEffect(() => { + if (conversationId) { + reloadConversations(); + } + }, [conversationId, reloadConversations]); + + const [isChatDrawerVisible, setIsChatDrawerVisible] = useState(false); + const [showSettings, setShowSettings] = useState(false); + const [isChatWithPDFProvider, setIsChatWithPDFProvider] = useState(false); + + const toggleChatDrawerVisibility = () => { + setIsChatDrawerVisible(!isChatDrawerVisible); + }; + + const selectedConversation = conversations?.find( + (c) => c.id === conversationId + ); + + const { isLoading } = useTeam(); + + if (isLoading) { + return ; + } + + return ( + +
+ +
+
+ +

+ {showSettings + ? t('settings') + : selectedConversation?.title || t('bui-chat-new-chat')} +

+ +
+
+ {showSettings ? ( +
+ +
+ ) : ( +
+ +
+ )} +
+
+
+
+ ); +} diff --git a/components/chats/chat/ConversationArea.tsx b/components/chats/chat/ConversationArea.tsx new file mode 100644 index 000000000..9753021a9 --- /dev/null +++ b/components/chats/chat/ConversationArea.tsx @@ -0,0 +1,36 @@ +import React, { useRef, useEffect } from 'react'; +import Message from './Message'; // Ensure you have a Message component imported + +const ConversationArea = ({ + conversationThread, + trailingThread, + className, +}) => { + const bottomOfChatRef = useRef(null); + + // Auto-scroll to bottom when messages update + useEffect(() => { + if (bottomOfChatRef.current) { + bottomOfChatRef.current.scrollIntoView({ behavior: 'smooth' }); + } + }, [conversationThread, trailingThread]); + + // Combine and render messages + const allMessages = [...(conversationThread ?? []), ...trailingThread]; + + return ( +
+
+ {allMessages?.map((message, index) => ( + + ))} +
+
+
+ ); +}; + +export default ConversationArea; diff --git a/components/chats/chat/DynamicChatInput.tsx b/components/chats/chat/DynamicChatInput.tsx new file mode 100644 index 000000000..d77d6f1f5 --- /dev/null +++ b/components/chats/chat/DynamicChatInput.tsx @@ -0,0 +1,132 @@ +import React, { useState, useRef, useEffect } from 'react'; +import { SendHorizontal, Paperclip } from 'lucide-react'; + +const DynamicChatInput = ({ + message, + setMessage, + sendMessage, + handleKeypress, + handleFileChange, + isChatWithPDFProvider, + isUploadingFile, + provider, + selectedProvider, + model, + selectedModel, + requestInProgress, + isArchived, +}) => { + const textareaRef = useRef(null); + const [inputHeight, setInputHeight] = useState('auto'); + + // Auto-resize textarea + useEffect(() => { + const textarea = textareaRef.current; + if (textarea) { + textarea.style.height = 'auto'; + const scrollHeight = textarea.scrollHeight; + const newHeight = Math.min(Math.max(scrollHeight, 48), 200); // Min 48px, Max 200px + + textarea.style.height = `${newHeight}px`; + setInputHeight(`${newHeight}px`); + } + }, [message]); + + // Handle input change with dynamic expansion + const handleInputChange = (e: React.ChangeEvent) => { + setMessage(e.target.value); + }; + + // Handle key events for send functionality + const handleKeyDown = (e: React.KeyboardEvent) => { + if (e.key === 'Enter' && !e.shiftKey) { + e.preventDefault(); + sendMessage(); + } + handleKeypress(e); + }; + + return ( +
+
+
+ {isChatWithPDFProvider && ( +
+ + +
+ )} + +