llama.cpp

{#if (filteredConversations.length > 0 && isSearchModeActive) || !isSearchModeActive} {isSearchModeActive ? 'Search results' : 'Conversations'} {/if} {#each filteredConversations as conversation (conversation.id)} {/each} {#if filteredConversations.length === 0}

{searchQuery.length > 0 ? 'No results found' : isSearchModeActive ? 'Start typing to see results' : 'No conversations yet'}

{/if}
{ showDeleteDialog = false; selectedConversation = null; }} /> Edit Conversation Name { if (e.key === 'Enter') { e.preventDefault(); handleConfirmEdit(); } }} placeholder="Enter a new name" type="text" bind:value={editedName} /> { showEditDialog = false; selectedConversation = null; }}>Cancel Save { showClearAllDialog = false; }} /> 💾 Memory Manager View and edit AI's persistent memory. Stored in llama_memory.txt (max 5KB).
{#if memoryLoading}

Loading memory...

{:else}

The AI will read this memory at the start of each new conversation to personalize responses.