summaryrefslogtreecommitdiff
path: root/llama.cpp/tools/server/webui/src/lib/services/props.ts
blob: 01fead9fa3e38798e04749f25242442c358ecadd (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
import { getAuthHeaders } from '$lib/utils';

/**
 * PropsService - Server properties management
 *
 * This service handles communication with the /props endpoint to retrieve
 * server configuration, model information, and capabilities.
 *
 * **Responsibilities:**
 * - Fetch server properties from /props endpoint
 * - Handle API authentication
 * - Parse and validate server response
 *
 * **Used by:**
 * - serverStore: Primary consumer for server state management
 */
export class PropsService {
	// ─────────────────────────────────────────────────────────────────────────────
	// Fetching
	// ─────────────────────────────────────────────────────────────────────────────

	/**
	 * Fetches server properties from the /props endpoint
	 *
	 * @param autoload - If false, prevents automatic model loading (default: false)
	 * @returns {Promise<ApiLlamaCppServerProps>} Server properties
	 * @throws {Error} If the request fails or returns invalid data
	 */
	static async fetch(autoload = false): Promise<ApiLlamaCppServerProps> {
		const url = new URL('./props', window.location.href);
		if (!autoload) {
			url.searchParams.set('autoload', 'false');
		}

		const response = await fetch(url.toString(), {
			headers: getAuthHeaders()
		});

		if (!response.ok) {
			throw new Error(
				`Failed to fetch server properties: ${response.status} ${response.statusText}`
			);
		}

		const data = await response.json();
		return data as ApiLlamaCppServerProps;
	}

	/**
	 * Fetches server properties for a specific model (ROUTER mode)
	 *
	 * @param modelId - The model ID to fetch properties for
	 * @param autoload - If false, prevents automatic model loading (default: false)
	 * @returns {Promise<ApiLlamaCppServerProps>} Server properties for the model
	 * @throws {Error} If the request fails or returns invalid data
	 */
	static async fetchForModel(modelId: string, autoload = false): Promise<ApiLlamaCppServerProps> {
		const url = new URL('./props', window.location.href);
		url.searchParams.set('model', modelId);
		if (!autoload) {
			url.searchParams.set('autoload', 'false');
		}

		const response = await fetch(url.toString(), {
			headers: getAuthHeaders()
		});

		if (!response.ok) {
			throw new Error(
				`Failed to fetch model properties: ${response.status} ${response.statusText}`
			);
		}

		const data = await response.json();
		return data as ApiLlamaCppServerProps;
	}
}