2023-12-24 10:36:26 +01:00
|
|
|
#include <stdio.h>
|
|
|
|
#include <stdlib.h>
|
2023-12-24 18:30:38 +01:00
|
|
|
#include "gguflib.h"
|
2023-12-24 10:36:26 +01:00
|
|
|
|
|
|
|
int main(int argc, char **argv) {
|
|
|
|
if (argc != 2) {
|
|
|
|
printf("Usage: %s <filename>\n",argv[0]);
|
|
|
|
exit(1);
|
|
|
|
}
|
|
|
|
gguf_ctx *ctx = gguf_init(argv[1]);
|
|
|
|
if (ctx == NULL) {
|
|
|
|
perror("Opening GGUF file");
|
|
|
|
exit(1);
|
|
|
|
}
|
|
|
|
|
|
|
|
/* Show general information about the neural network. */
|
|
|
|
printf("%s (ver %d): %llu key-value pairs, %llu tensors\n",
|
|
|
|
argv[1],
|
|
|
|
(int)ctx->header->version,
|
|
|
|
(unsigned long long)ctx->header->metadata_kv_count,
|
|
|
|
(unsigned long long)ctx->header->tensor_count);
|
|
|
|
|
|
|
|
/* Show all the key-value pairs. */
|
|
|
|
gguf_key key;
|
|
|
|
while (gguf_get_key(ctx,&key)) {
|
|
|
|
printf("%.*s: [%s] ", (int)key.namelen, key.name, gguf_get_value_type_name(key.type));
|
2023-12-24 12:21:41 +01:00
|
|
|
gguf_print_value(ctx,key.type,key.val,0);
|
2023-12-24 10:36:26 +01:00
|
|
|
printf("\n");
|
|
|
|
}
|
2023-12-24 17:20:04 +01:00
|
|
|
|
|
|
|
gguf_tensor tensor;
|
|
|
|
while (gguf_get_tensor(ctx,&tensor)) {
|
2023-12-24 23:44:24 +01:00
|
|
|
printf("%s tensor %.*s @%llu, %llu weights, %llu bytes\n",
|
2023-12-24 17:20:04 +01:00
|
|
|
gguf_get_tensor_type_name(tensor.type),
|
|
|
|
(int)tensor.namelen,
|
|
|
|
tensor.name,
|
|
|
|
tensor.offset,
|
2023-12-24 23:44:24 +01:00
|
|
|
tensor.num_weights,
|
|
|
|
tensor.bsize);
|
2023-12-24 17:20:04 +01:00
|
|
|
}
|
2023-12-24 10:36:26 +01:00
|
|
|
return 0;
|
|
|
|
}
|