1#include "models.h"
2
3ggml_cgraph * clip_graph_kimivl::build() {
4 // 2D input positions
5 ggml_tensor * pos_h = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_patches);
6 ggml_set_name(pos_h, "pos_h");
7 ggml_set_input(pos_h);
8
9 ggml_tensor * pos_w = ggml_new_tensor_1d(ctx0, GGML_TYPE_I32, n_patches);
10 ggml_set_name(pos_w, "pos_w");
11 ggml_set_input(pos_w);
12
13 ggml_tensor * learned_pos_embd = resize_position_embeddings();
14
15 // build ViT with 2D position embeddings
16 auto add_pos = [&](ggml_tensor * cur, const clip_layer &) {
17 // first half is X axis and second half is Y axis
18 return build_rope_2d(ctx0, cur, pos_w, pos_h, hparams.rope_theta, false);
19 };
20
21 ggml_tensor * inp = build_inp();
22 ggml_tensor * cur = build_vit(
23 inp, n_patches,
24 NORM_TYPE_NORMAL,
25 hparams.ffn_op,
26 learned_pos_embd,
27 add_pos);
28
29 cb(cur, "vit_out", -1);
30
31 {
32 // patch_merger
33 const int scale_factor = model.hparams.n_merge;
34 cur = build_patch_merge_permute(cur, scale_factor);
35
36 // projection norm
37 int proj_inp_dim = cur->ne[0];
38 cur = ggml_view_2d(ctx0, cur,
39 n_embd, cur->ne[1] * scale_factor * scale_factor,
40 ggml_row_size(cur->type, n_embd), 0);
41 cur = ggml_norm(ctx0, cur, 1e-5); // default nn.LayerNorm
42 cur = ggml_mul(ctx0, cur, model.mm_input_norm_w);
43 cur = ggml_add(ctx0, cur, model.mm_input_norm_b);
44 cur = ggml_view_2d(ctx0, cur,
45 proj_inp_dim, cur->ne[1] / scale_factor / scale_factor,
46 ggml_row_size(cur->type, proj_inp_dim), 0);
47 cb(cur, "proj_inp_normed", -1);
48
49 // projection mlp
50 cur = build_ffn(cur,
51 model.mm_1_w, model.mm_1_b,
52 nullptr, nullptr,
53 model.mm_2_w, model.mm_2_b,
54 FFN_GELU,
55 -1);
56 cb(cur, "proj_out", -1);
57 }
58
59 // build the graph
60 ggml_build_forward_expand(gf, cur);
61
62 return gf;
63}