1#!/usr/bin/env bash
 2
 3set -e
 4
 5MODEL_PATH="${1:-"$MODEL_PATH"}"
 6MODEL_NAME="${2:-$(basename "$MODEL_PATH")}"
 7
 8CONVERTED_MODEL_PATH="${1:-"$CONVERTED_MODEL"}"
 9CONVERTED_MODEL_NAME="${2:-$(basename "$CONVERTED_MODEL_PATH" ".gguf")}"
10
11if [ -t 0 ]; then
12    CPP_EMBEDDINGS="data/llamacpp-${CONVERTED_MODEL_NAME}-embeddings.bin"
13else
14    # Process piped JSON data and convert to binary (matching logits.cpp format)
15    TEMP_FILE=$(mktemp /tmp/tmp.XXXXXX.binn)
16    python3 -c "
17import json
18import sys
19import struct
20
21data = json.load(sys.stdin)
22
23# Flatten all embeddings completely
24flattened = []
25for item in data:
26    embedding = item['embedding']
27    for token_embedding in embedding:
28        flattened.extend(token_embedding)
29
30print(f'Total embedding values: {len(flattened)}', file=sys.stderr)
31
32# Write as binary floats - matches logitc.cpp fwrite format
33with open('$TEMP_FILE', 'wb') as f:
34    for value in flattened:
35        f.write(struct.pack('f', value))
36"
37    CPP_EMBEDDINGS="$TEMP_FILE"
38    trap "rm -f $TEMP_FILE" EXIT
39fi
40
41python scripts/utils/semantic_check.py --model-path $MODEL_PATH \
42    --python-embeddings data/pytorch-${MODEL_NAME}-embeddings.bin \
43    --cpp-embeddings $CPP_EMBEDDINGS \
44    --prompt "Hello world today" \
45    --causal
46