summaryrefslogtreecommitdiff
path: root/llama.cpp/tools/server/webui/src/routes/+page.svelte
diff options
context:
space:
mode:
authorMitja Felicijan <mitja.felicijan@gmail.com>2026-02-12 20:57:17 +0100
committerMitja Felicijan <mitja.felicijan@gmail.com>2026-02-12 20:57:17 +0100
commitb333b06772c89d96aacb5490d6a219fba7c09cc6 (patch)
tree211df60083a5946baa2ed61d33d8121b7e251b06 /llama.cpp/tools/server/webui/src/routes/+page.svelte
downloadllmnpc-b333b06772c89d96aacb5490d6a219fba7c09cc6.tar.gz
Engage!
Diffstat (limited to 'llama.cpp/tools/server/webui/src/routes/+page.svelte')
-rw-r--r--llama.cpp/tools/server/webui/src/routes/+page.svelte91
1 files changed, 91 insertions, 0 deletions
diff --git a/llama.cpp/tools/server/webui/src/routes/+page.svelte b/llama.cpp/tools/server/webui/src/routes/+page.svelte
new file mode 100644
index 0000000..32a7c2e
--- /dev/null
+++ b/llama.cpp/tools/server/webui/src/routes/+page.svelte
@@ -0,0 +1,91 @@
+<script lang="ts">
+ import { ChatScreen, DialogModelNotAvailable } from '$lib/components/app';
+ import { chatStore } from '$lib/stores/chat.svelte';
+ import { conversationsStore, isConversationsInitialized } from '$lib/stores/conversations.svelte';
+ import { modelsStore, modelOptions } from '$lib/stores/models.svelte';
+ import { onMount } from 'svelte';
+ import { page } from '$app/state';
+ import { replaceState } from '$app/navigation';
+
+ let qParam = $derived(page.url.searchParams.get('q'));
+ let modelParam = $derived(page.url.searchParams.get('model'));
+ let newChatParam = $derived(page.url.searchParams.get('new_chat'));
+
+ // Dialog state for model not available error
+ let showModelNotAvailable = $state(false);
+ let requestedModelName = $state('');
+ let availableModelNames = $derived(modelOptions().map((m) => m.model));
+
+ /**
+ * Clear URL params after message is sent to prevent re-sending on refresh
+ */
+ function clearUrlParams() {
+ const url = new URL(page.url);
+
+ url.searchParams.delete('q');
+ url.searchParams.delete('model');
+ url.searchParams.delete('new_chat');
+
+ replaceState(url.toString(), {});
+ }
+
+ async function handleUrlParams() {
+ await modelsStore.fetch();
+
+ if (modelParam) {
+ const model = modelsStore.findModelByName(modelParam);
+
+ if (model) {
+ try {
+ await modelsStore.selectModelById(model.id);
+ } catch (error) {
+ console.error('Failed to select model:', error);
+ requestedModelName = modelParam;
+ showModelNotAvailable = true;
+
+ return;
+ }
+ } else {
+ requestedModelName = modelParam;
+ showModelNotAvailable = true;
+
+ return;
+ }
+ }
+
+ // Handle ?q= parameter - create new conversation and send message
+ if (qParam !== null) {
+ await conversationsStore.createConversation();
+ await chatStore.sendMessage(qParam);
+ clearUrlParams();
+ } else if (modelParam || newChatParam === 'true') {
+ clearUrlParams();
+ }
+ }
+
+ onMount(async () => {
+ if (!isConversationsInitialized()) {
+ await conversationsStore.initialize();
+ }
+
+ conversationsStore.clearActiveConversation();
+ chatStore.clearUIState();
+
+ // Handle URL params only if we have ?q= or ?model= or ?new_chat=true
+ if (qParam !== null || modelParam !== null || newChatParam === 'true') {
+ await handleUrlParams();
+ }
+ });
+</script>
+
+<svelte:head>
+ <title>llama.cpp - AI Chat Interface</title>
+</svelte:head>
+
+<ChatScreen showCenteredEmpty={true} />
+
+<DialogModelNotAvailable
+ bind:open={showModelNotAvailable}
+ modelName={requestedModelName}
+ availableModels={availableModelNames}
+/>