diff --git a/.gitattributes b/.gitattributes index 698eee657af35852c0e81531501cedd29f8ab20e..0314e170d5c0c0aa90fb6c517a59391463b70590 100644 --- a/.gitattributes +++ b/.gitattributes @@ -150,3 +150,4 @@ kud-llama3.3-70b_biology_leaf_relearn_klr_gdr_biology_1_512_2e-5/checkpoint-1/to kud-llama3.3-70b_biology_leaf_relearn_klr_gdr_biology_1_512_2e-5/tokenizer.json filter=lfs diff=lfs merge=lfs -text kud-llama3.3-70b_biology_leaf_relearn_klr_gdr_biology_10_512_2e-5/checkpoint-1/tokenizer.json filter=lfs diff=lfs merge=lfs -text kud-llama3.3-70b_biology_leaf_relearn_klr_gdr_biology_10_512_2e-5/tokenizer.json filter=lfs diff=lfs merge=lfs -text +tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/tokenizer.json filter=lfs diff=lfs merge=lfs -text diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/config.json b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/config.json new file mode 100644 index 0000000000000000000000000000000000000000..d5c14f51ef67b42c2d23d2add0a3fe1af264a65d --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/config.json @@ -0,0 +1,39 @@ +{ + "architectures": [ + "LlamaForCausalLM" + ], + "attention_bias": false, + "attention_dropout": 0.0, + "bos_token_id": 128000, + "eos_token_id": [ + 128001, + 128008, + 128009 + ], + "head_dim": 128, + "hidden_act": "silu", + "hidden_size": 8192, + "initializer_range": 0.02, + "intermediate_size": 28672, + "max_position_embeddings": 131072, + "mlp_bias": false, + "model_type": "llama", + "num_attention_heads": 64, + "num_hidden_layers": 80, + "num_key_value_heads": 8, + "pretraining_tp": 1, + "rms_norm_eps": 1e-05, + "rope_scaling": { + "factor": 8.0, + "high_freq_factor": 4.0, + "low_freq_factor": 1.0, + "original_max_position_embeddings": 8192, + "rope_type": "llama3" + }, + "rope_theta": 500000.0, + "tie_word_embeddings": false, + "torch_dtype": "float32", + "transformers_version": "4.51.0", + "use_cache": true, + "vocab_size": 128256 +} diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/generation_config.json b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..8328e1918ffd060901f14c2f56c0f69ac85341fc --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/generation_config.json @@ -0,0 +1,12 @@ +{ + "bos_token_id": 128000, + "do_sample": true, + "eos_token_id": [ + 128001, + 128008, + 128009 + ], + "temperature": 0.6, + "top_p": 0.9, + "transformers_version": "4.51.0" +} diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00001-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00001-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..b820487cd0a7a33553838277b08c6dfaac9e3494 --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00001-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ab8760b57daa47a88f5e2d8f4beb1f7957d6f22bc251a188fd36773d22022a44 +size 4806672984 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00002-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00002-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..e4646bed41193c0c29c08446c0c9695ed6475bb8 --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00002-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:11e6d95eea8daa94a6aeb0bb3794bce82c2f7e02908d32f17e4450c27d4ba178 +size 4362142864 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00003-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00003-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..94166ec96aed78f63f065ef3e2a5d922caaea8b3 --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00003-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:863f478048f60de0a6c80f898290f8d4c0645679c66ee0d8df270c708ad538d8 +size 4362142864 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00004-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00004-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..020642a6472f5c98c0b93575164fae91689529b5 --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00004-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:846697b30627f924e14de749879665d9ab0a9c288480be3ee2ce8889975cff64 +size 4966188864 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00005-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00005-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..68166143a680b5c4f8cb15026db867a9f3ce4943 --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00005-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d720ec4fa58eb8ea4caa8724bedb1797ae2069bc30b6fa4fb0b87f61b7962fe0 +size 4362142864 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00006-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00006-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..39d8f64d9d58b2f074d47196670ffaf4227f5115 --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00006-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a423dcb8bd0a72cc95e5b232cbbfd8dc3e6a8b0208e1e3cd07e792eab7b18884 +size 4362142864 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00007-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00007-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..2a8ed7e4ac36a889c7fea360f18e840feede9b01 --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00007-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0e1205cc6a0494010b4ee729f1477431a8c2e5a954bdac498cf42767d3f2d077 +size 4966188864 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00008-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00008-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..7705fd813ec0c1d35aa7c15bf01f4da04108c7a6 --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00008-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:341aae64f6f692f6376dfa089a41c27099de668487ac6459405d5b08c595828e +size 4362142864 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00009-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00009-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..c27c2e7a74580a58ca724f8563e4217e2ab26e3e --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00009-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:54103b1c2e7b751ddd66d7b130b4a866509dd886ed1cf843eee0e88443a2146f +size 4362142880 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00010-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00010-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..b6b9d72079bf85a77edbe3df7652e1b3a63b45ba --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00010-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bdc56f71aac4f12d91ab4a89d6361effef2e9a1ac9b795680834c77c5eb9a14f +size 4966188880 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00011-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00011-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..bcc8761e4542d1b74ea7f45c7c3e76d2095f0b73 --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00011-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:79f7b7a720826bc464f01881f25dad3cbec0159bca6fb51a95ea438031400e82 +size 4362142872 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00012-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00012-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..63d91f0ba03e8c150af8721ab06e869e8a058131 --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00012-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e0a2b680b622b1324cdbc42009e05a3176bc13dcab8e2a23b83aeb30b221babd +size 4362142872 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00013-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00013-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..245ea304f0e2cb7cb24d7995685d012052bdf42a --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00013-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:418ee77d381c8551cf6d2f56654903a69e79d7f7db9664ff249588c17c484827 +size 4966188880 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00014-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00014-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..4751f29b8ddd7d5467aed538384e231b4ea05f20 --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00014-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3cd846be5a0e952ddd0559ce2ec69a11d3f0087f2a5e505886d1f26a0ef43cfc +size 4362142872 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00015-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00015-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..605e07b3ba3e2217bee442c6edbd77e0ddcaea9d --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00015-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:17c04e25eba3d8adc9d0bb10b2b5e6b07c71eb6fb620561a172fe58b4571d9fe +size 4362142872 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00016-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00016-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..0047101f7db9e54ba29dee3dcbdf079a815419ed --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00016-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:69a95d6d924e13561a0647afb3afa9108581a0f7e27856ce4bbb96b9aace23a9 +size 4966188880 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00017-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00017-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..ff7c2b93235a078bee2f4f6473909134034c1509 --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00017-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f18ab0f52874637926d73372179b58a0710f9c780f90d49c2ab3c1265e2618cf +size 4362142872 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00018-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00018-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..ccdc3ad2ca498ada4f508628c14891b972127b9a --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00018-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b2a11a51abbac1d7f2fa27263ee8c2488cb82368dbadbff0ab2509fa18222684 +size 4362142872 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00019-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00019-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..2f24b8bd0e88033d25539ee2821b1e9b08e708aa --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00019-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:16b1325b2272f4eeba5caff59c353b7bd76e300a02c2c2d5ab29dc5b9de27ae5 +size 4966188880 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00020-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00020-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..c2878ad54282e522802b04db7be3982cb3768be0 --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00020-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bbfb4c73df9f7e63376e2a36836d15a38707147bc2331a2401ea2082936248af +size 4362142872 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00021-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00021-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..c7903b941c1d0cfb16da0f4df35904a7dbe892e0 --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00021-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e337a2a22db38640ded42255c725fd39d9ced4e2d2815578e4001d5fdd947646 +size 4362142872 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00022-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00022-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..d608bf5b928bf4eb2da5066aaa96fa4d9793beb4 --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00022-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b8906e6ab5e8c39c6549a06a37ac37f974d1faa5fb22c3a0239bf352d1b8f017 +size 4966188880 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00023-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00023-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..724a536eee0f735e062d30ec33183aa000a8b838 --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00023-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fb57b2f1f57fd6a9072383d5e0118adfecdff1a9a568c108120c5fa46faf2b70 +size 4362142872 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00024-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00024-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..ec9d52cb6f309e46d45fbfbf3a6497af91bbfa61 --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00024-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a97d8d8812560d4aa90d5470c841bbcbb3f82ffc24920960fb03f1f110374fff +size 4362142872 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00025-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00025-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..95b873c60a7dcc7b0fa84fbb8c66b35da9a9125e --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00025-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cb98edee2bb755f367a4c55221376551afc70aa4aab02be2104b61f603d4e6b6 +size 4966188880 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00026-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00026-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..8b69ab6781cdfdaada326400a5048d10385432a2 --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00026-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7d64edc6bfdcbf2eb87b1ae7a5ab97e7b3c9c5661d3aff7fbb948e30f6f63d4b +size 4362142872 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00027-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00027-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..32b6e8bc1a2178c49854ca611b3958f2804e4ac3 --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00027-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:599afebc5a4582a0f42d93dbf9a2b42083c1714762cc14f6f4becdcbada90d57 +size 4362142872 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00028-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00028-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..c32d56af55d6a7ac025eeb8d568c45d98ebb434c --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00028-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:58f653671a01ce3b3f6d4941a152b229402aca491c989dd10c8ccca4307cc0a2 +size 4966188880 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00029-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00029-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..fc31ab6273f00314bb57ede019c84145ca75792c --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00029-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a57bcab2c26c59be85a49ec9e0505ff3948b52453e82c1e9bd35cd9d0de4c0a5 +size 4362142872 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00030-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00030-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..33e157fb8fb8e157b490b1de4643680918cf4e2e --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00030-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:068e6407c53f010f334e7012f17c2671e131b080dd9252b9e7a11590edf98340 +size 4362142872 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00031-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00031-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..7f9a9d6f50519db34db3278f51885cf2e198e9a0 --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00031-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f2a7fbd100427bdcc996fd4ec5c202004ca39410b5d53c2fc4c67202ec9b7f15 +size 4966188880 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00032-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00032-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..6fe941d72b906efdf9ff4df1752a4c1fa5553db9 --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00032-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:624f42db0fc067e6c7e6fd505c5e16ef88b6c5325767a3bb6e6752f72c204c70 +size 4362142872 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00033-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00033-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..65fc66f6ff11267915461368aca2dfbe3a6458a7 --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00033-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5541b70eb08131392b128720ad687875bf0e3bc6ade9d2cc7b24f2eb6a28de29 +size 4362142872 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00034-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00034-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..4290d98b35098d5fa4c09fe427d9efe53eaeb742 --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00034-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:79895877421507febb66cf43bc3fb8662b3775a652ae84d88aa5f1bd2ba55326 +size 4966188880 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00035-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00035-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..5743da18f194742989fa54ce1fd1b286d85be4e0 --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00035-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9018f3055282147860a087627e7bdf57187f9a41e97f4e804833bc8b649af46d +size 4362142872 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00036-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00036-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..962292b2e15951c03af0dfe2e5019967ab00dbb1 --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00036-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2840f6aee3cf841f6dd1379b36485d3ed3161f7c0e630b1c27bec4adb47665d9 +size 4362142872 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00037-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00037-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..5cc18a71e82c62120f1cd25cd9d64b81dd0368fc --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00037-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:020a17c50b60dcb3b01491c651285c3b9ebd76af126fd17c23ba249bd1b27f11 +size 4966188880 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00038-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00038-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..324e92376c33c8782e95738208f0f11ec5b3c5e9 --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00038-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:87330900bfa02c6a4d54d5e693dbfb0044d9a9cae41f2c4c82d790e4bd5a2b41 +size 4362142872 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00039-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00039-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..8d29999ed038eaab86b5ac16d0bdd32af165c975 --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00039-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e05289f90da632c426dce5c6ec9faed6e2d139fee0cfaca8693e798c84cfe236 +size 4362142872 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00040-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00040-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..90e52fe693fb5ab4f9b4100bdaed3bfc83e5a4e3 --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00040-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2324cb9bc2ce4cbf955c4360032cdda2fe2003856721d834783359ca9bab18e5 +size 4966188880 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00041-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00041-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..76e888745411f3b0a43a44198f2fc850a20e7cfb --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00041-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7f70749d4091e550548364f275e620dc3d7742477ae75e7e7239e1494aa880ad +size 4362142872 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00042-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00042-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..e4d5231283d32aff4a400d2f22b3598cd276ed4a --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00042-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:93eeb5af13001151698ca6700c507061f123d45e81ed840d325084c0d82ecc2a +size 4362142872 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00043-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00043-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..ea7f0b7397a34d26f039821df15ff6663b697b0f --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00043-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7ad00977bc50bcb91b9aa64bdf584fcf0df19f32b93868a31896e6e6c2e30073 +size 4966188880 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00044-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00044-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..65a95c63124a7944c36ff65423bb9916a2a00033 --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00044-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:59c63984af21ce45427ef3d25d92d6e0326b27b1fea133b7b6231fd0c1d11b5a +size 4362142872 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00045-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00045-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..f2def40dea997828310455ee76bff499c1438ac9 --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00045-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f65352030885661ece8223e5b9e1d79a7f4b147ef6b59ab787d0063ea5cac152 +size 4362142872 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00046-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00046-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..c0ac8d1b7193b75c033082065884021d91eab799 --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00046-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bd50676af6d253ed68495785424fcdc2517ea050baff2184d99cf30c2b42016e +size 4966188880 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00047-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00047-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..c879141056b4dadf1b4435797a1a6b9408655f9a --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00047-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:11f867eb6b955a77ed6cc79fcd3f7a9607f56529497eeb9336f961a287a844b4 +size 4362142872 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00048-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00048-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..8bfeecfb6bb8b652dac79ed3cc2a7e87eb4fffdc --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00048-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:706a5a23ad8735aa64a951054aa282213d5143f92c46094779253e06428993c1 +size 4362142872 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00049-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00049-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..cb82b7b93ecdc398e59a899933e63a3642b8a166 --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00049-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:530729dc87fc8754a4663114477fb134de069930a83936d3f7224e5f865ca4e9 +size 4966188880 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00050-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00050-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..465dc1ebf449b0cf332d1f070f01095e9b4dbeb9 --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00050-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9c681df381544b33ec6bf759515612e3e60abd557d79b9c18c053a64587f91f8 +size 4362142872 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00051-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00051-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..2c87b66c36ba4f18495149df2eb285a8f91779ac --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00051-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2ef6aab9bce4f9c229d47eb54bb74290b42698f7b6d69eb45677c86da797ae6d +size 4362142872 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00052-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00052-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..875198483fe5ddd112c46fce34b13110a31d74fc --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00052-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:131d9218d7103a670f0f81f5caa6749bde11de09835af5a4edfc27d9b68660cd +size 4966188880 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00053-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00053-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..d7930110a7dcb34b6210487db96b15340c4cfabc --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00053-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f451e86dff20665277dbc77a1e759894170d6e8008eeb5e530d3f1a1a2e199d4 +size 4362142872 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00054-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00054-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..881c1fd34f506ecf186dbbaa0f1f9c9e06393cb3 --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00054-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fd04541f673900803274781a40b507f4fa36f5f4e37d51bf5995a08c1837e790 +size 4362142872 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00055-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00055-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..ff0777c3a4344121f1efa905463510a8062290e4 --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00055-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:987e9aed04baa4f7f8df77f69a868315f38f1b6630aa6b5f2cc0866963121539 +size 4966188880 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00056-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00056-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..5eacfc0be34aecaa9aa9d7c7d9c665fce44f961c --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00056-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:19cb1bc9684576f143a5ffef0835bbefa20c95a92631acebedb98dea4e8bc311 +size 4362142872 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00057-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00057-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..159aa68d965f744732aa9d09eede9a49d28c4c78 --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00057-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e2af5da683f026ef415d8d0f9e390857218059f68a100b98bb32158074cac3ce +size 4362142872 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00058-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00058-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..f84a893c771e0ecdcda98f5b8e947dc52b4dbfd6 --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00058-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:be9dec50041273a481a1030a7e35c42a572c71e81c667152fcdf6ee990c20b32 +size 4966188880 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00059-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00059-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..3c315c3a49cd9e1f062a1c3537f848b8d7a67676 --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00059-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a76ff062a5f472ece48bead5cc3b2e88851d65f8731b961fe5b2ed98ff1b9645 +size 4362142872 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00060-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00060-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..8d659f8b44c45e1cfad404155bc003c6e616bacc --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00060-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:517d0e0de1042468c36313cafb001912df12d656a069657413bf2f1c28fd9e13 +size 4362142872 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00061-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00061-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..c674fde4150249a6019ad2b6ad2fa714ab23043e --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00061-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:205d536a72f572942a571a52dc15e5a578678cdbd1c8aa07b385e2e9ed620bd6 +size 4362241496 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00062-of-00062.safetensors b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00062-of-00062.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..908a6bd1df871d81d23f41bd765c1d5c2abf525a --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model-00062-of-00062.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:37adba49b605e4b59b81ef2930475973ff37b20bd4ddd0fbc20c302fc3c112d5 +size 4202692736 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model.safetensors.index.json b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model.safetensors.index.json new file mode 100644 index 0000000000000000000000000000000000000000..8aab69e95f0d7da106bad45a4e7c35897ecf3d80 --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/model.safetensors.index.json @@ -0,0 +1,730 @@ +{ + "metadata": { + "total_size": 282214825984 + }, + "weight_map": { + "lm_head.weight": "model-00062-of-00062.safetensors", + "model.embed_tokens.weight": "model-00001-of-00062.safetensors", + "model.layers.0.input_layernorm.weight": "model-00002-of-00062.safetensors", + "model.layers.0.mlp.down_proj.weight": "model-00002-of-00062.safetensors", + "model.layers.0.mlp.gate_proj.weight": "model-00002-of-00062.safetensors", + "model.layers.0.mlp.up_proj.weight": "model-00002-of-00062.safetensors", + "model.layers.0.post_attention_layernorm.weight": "model-00002-of-00062.safetensors", + "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00062.safetensors", + "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00062.safetensors", + "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00062.safetensors", + "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00062.safetensors", + "model.layers.1.input_layernorm.weight": "model-00003-of-00062.safetensors", + "model.layers.1.mlp.down_proj.weight": "model-00003-of-00062.safetensors", + "model.layers.1.mlp.gate_proj.weight": "model-00002-of-00062.safetensors", + "model.layers.1.mlp.up_proj.weight": "model-00003-of-00062.safetensors", + "model.layers.1.post_attention_layernorm.weight": "model-00003-of-00062.safetensors", + "model.layers.1.self_attn.k_proj.weight": "model-00002-of-00062.safetensors", + "model.layers.1.self_attn.o_proj.weight": "model-00002-of-00062.safetensors", + "model.layers.1.self_attn.q_proj.weight": "model-00002-of-00062.safetensors", + "model.layers.1.self_attn.v_proj.weight": "model-00002-of-00062.safetensors", + "model.layers.10.input_layernorm.weight": "model-00010-of-00062.safetensors", + "model.layers.10.mlp.down_proj.weight": "model-00010-of-00062.safetensors", + "model.layers.10.mlp.gate_proj.weight": "model-00009-of-00062.safetensors", + "model.layers.10.mlp.up_proj.weight": "model-00009-of-00062.safetensors", + "model.layers.10.post_attention_layernorm.weight": "model-00010-of-00062.safetensors", + "model.layers.10.self_attn.k_proj.weight": "model-00009-of-00062.safetensors", + "model.layers.10.self_attn.o_proj.weight": "model-00009-of-00062.safetensors", + "model.layers.10.self_attn.q_proj.weight": "model-00009-of-00062.safetensors", + "model.layers.10.self_attn.v_proj.weight": "model-00009-of-00062.safetensors", + "model.layers.11.input_layernorm.weight": "model-00010-of-00062.safetensors", + "model.layers.11.mlp.down_proj.weight": "model-00010-of-00062.safetensors", + "model.layers.11.mlp.gate_proj.weight": "model-00010-of-00062.safetensors", + "model.layers.11.mlp.up_proj.weight": "model-00010-of-00062.safetensors", + "model.layers.11.post_attention_layernorm.weight": "model-00010-of-00062.safetensors", + "model.layers.11.self_attn.k_proj.weight": "model-00010-of-00062.safetensors", + "model.layers.11.self_attn.o_proj.weight": "model-00010-of-00062.safetensors", + "model.layers.11.self_attn.q_proj.weight": "model-00010-of-00062.safetensors", + "model.layers.11.self_attn.v_proj.weight": "model-00010-of-00062.safetensors", + "model.layers.12.input_layernorm.weight": "model-00011-of-00062.safetensors", + "model.layers.12.mlp.down_proj.weight": "model-00011-of-00062.safetensors", + "model.layers.12.mlp.gate_proj.weight": "model-00011-of-00062.safetensors", + "model.layers.12.mlp.up_proj.weight": "model-00011-of-00062.safetensors", + "model.layers.12.post_attention_layernorm.weight": "model-00011-of-00062.safetensors", + "model.layers.12.self_attn.k_proj.weight": "model-00010-of-00062.safetensors", + "model.layers.12.self_attn.o_proj.weight": "model-00010-of-00062.safetensors", + "model.layers.12.self_attn.q_proj.weight": "model-00010-of-00062.safetensors", + "model.layers.12.self_attn.v_proj.weight": "model-00010-of-00062.safetensors", + "model.layers.13.input_layernorm.weight": "model-00012-of-00062.safetensors", + "model.layers.13.mlp.down_proj.weight": "model-00012-of-00062.safetensors", + "model.layers.13.mlp.gate_proj.weight": "model-00011-of-00062.safetensors", + "model.layers.13.mlp.up_proj.weight": "model-00012-of-00062.safetensors", + "model.layers.13.post_attention_layernorm.weight": "model-00012-of-00062.safetensors", + "model.layers.13.self_attn.k_proj.weight": "model-00011-of-00062.safetensors", + "model.layers.13.self_attn.o_proj.weight": "model-00011-of-00062.safetensors", + "model.layers.13.self_attn.q_proj.weight": "model-00011-of-00062.safetensors", + "model.layers.13.self_attn.v_proj.weight": "model-00011-of-00062.safetensors", + "model.layers.14.input_layernorm.weight": "model-00013-of-00062.safetensors", + "model.layers.14.mlp.down_proj.weight": "model-00013-of-00062.safetensors", + "model.layers.14.mlp.gate_proj.weight": "model-00012-of-00062.safetensors", + "model.layers.14.mlp.up_proj.weight": "model-00012-of-00062.safetensors", + "model.layers.14.post_attention_layernorm.weight": "model-00013-of-00062.safetensors", + "model.layers.14.self_attn.k_proj.weight": "model-00012-of-00062.safetensors", + "model.layers.14.self_attn.o_proj.weight": "model-00012-of-00062.safetensors", + "model.layers.14.self_attn.q_proj.weight": "model-00012-of-00062.safetensors", + "model.layers.14.self_attn.v_proj.weight": "model-00012-of-00062.safetensors", + "model.layers.15.input_layernorm.weight": "model-00013-of-00062.safetensors", + "model.layers.15.mlp.down_proj.weight": "model-00013-of-00062.safetensors", + "model.layers.15.mlp.gate_proj.weight": "model-00013-of-00062.safetensors", + "model.layers.15.mlp.up_proj.weight": "model-00013-of-00062.safetensors", + "model.layers.15.post_attention_layernorm.weight": "model-00013-of-00062.safetensors", + "model.layers.15.self_attn.k_proj.weight": "model-00013-of-00062.safetensors", + "model.layers.15.self_attn.o_proj.weight": "model-00013-of-00062.safetensors", + "model.layers.15.self_attn.q_proj.weight": "model-00013-of-00062.safetensors", + "model.layers.15.self_attn.v_proj.weight": "model-00013-of-00062.safetensors", + "model.layers.16.input_layernorm.weight": "model-00014-of-00062.safetensors", + "model.layers.16.mlp.down_proj.weight": "model-00014-of-00062.safetensors", + "model.layers.16.mlp.gate_proj.weight": "model-00014-of-00062.safetensors", + "model.layers.16.mlp.up_proj.weight": "model-00014-of-00062.safetensors", + "model.layers.16.post_attention_layernorm.weight": "model-00014-of-00062.safetensors", + "model.layers.16.self_attn.k_proj.weight": "model-00013-of-00062.safetensors", + "model.layers.16.self_attn.o_proj.weight": "model-00013-of-00062.safetensors", + "model.layers.16.self_attn.q_proj.weight": "model-00013-of-00062.safetensors", + "model.layers.16.self_attn.v_proj.weight": "model-00013-of-00062.safetensors", + "model.layers.17.input_layernorm.weight": "model-00015-of-00062.safetensors", + "model.layers.17.mlp.down_proj.weight": "model-00015-of-00062.safetensors", + "model.layers.17.mlp.gate_proj.weight": "model-00014-of-00062.safetensors", + "model.layers.17.mlp.up_proj.weight": "model-00015-of-00062.safetensors", + "model.layers.17.post_attention_layernorm.weight": "model-00015-of-00062.safetensors", + "model.layers.17.self_attn.k_proj.weight": "model-00014-of-00062.safetensors", + "model.layers.17.self_attn.o_proj.weight": "model-00014-of-00062.safetensors", + "model.layers.17.self_attn.q_proj.weight": "model-00014-of-00062.safetensors", + "model.layers.17.self_attn.v_proj.weight": "model-00014-of-00062.safetensors", + "model.layers.18.input_layernorm.weight": "model-00016-of-00062.safetensors", + "model.layers.18.mlp.down_proj.weight": "model-00016-of-00062.safetensors", + "model.layers.18.mlp.gate_proj.weight": "model-00015-of-00062.safetensors", + "model.layers.18.mlp.up_proj.weight": "model-00015-of-00062.safetensors", + "model.layers.18.post_attention_layernorm.weight": "model-00016-of-00062.safetensors", + "model.layers.18.self_attn.k_proj.weight": "model-00015-of-00062.safetensors", + "model.layers.18.self_attn.o_proj.weight": "model-00015-of-00062.safetensors", + "model.layers.18.self_attn.q_proj.weight": "model-00015-of-00062.safetensors", + "model.layers.18.self_attn.v_proj.weight": "model-00015-of-00062.safetensors", + "model.layers.19.input_layernorm.weight": "model-00016-of-00062.safetensors", + "model.layers.19.mlp.down_proj.weight": "model-00016-of-00062.safetensors", + "model.layers.19.mlp.gate_proj.weight": "model-00016-of-00062.safetensors", + "model.layers.19.mlp.up_proj.weight": "model-00016-of-00062.safetensors", + "model.layers.19.post_attention_layernorm.weight": "model-00016-of-00062.safetensors", + "model.layers.19.self_attn.k_proj.weight": "model-00016-of-00062.safetensors", + "model.layers.19.self_attn.o_proj.weight": "model-00016-of-00062.safetensors", + "model.layers.19.self_attn.q_proj.weight": "model-00016-of-00062.safetensors", + "model.layers.19.self_attn.v_proj.weight": "model-00016-of-00062.safetensors", + "model.layers.2.input_layernorm.weight": "model-00004-of-00062.safetensors", + "model.layers.2.mlp.down_proj.weight": "model-00004-of-00062.safetensors", + "model.layers.2.mlp.gate_proj.weight": "model-00003-of-00062.safetensors", + "model.layers.2.mlp.up_proj.weight": "model-00003-of-00062.safetensors", + "model.layers.2.post_attention_layernorm.weight": "model-00004-of-00062.safetensors", + "model.layers.2.self_attn.k_proj.weight": "model-00003-of-00062.safetensors", + "model.layers.2.self_attn.o_proj.weight": "model-00003-of-00062.safetensors", + "model.layers.2.self_attn.q_proj.weight": "model-00003-of-00062.safetensors", + "model.layers.2.self_attn.v_proj.weight": "model-00003-of-00062.safetensors", + "model.layers.20.input_layernorm.weight": "model-00017-of-00062.safetensors", + "model.layers.20.mlp.down_proj.weight": "model-00017-of-00062.safetensors", + "model.layers.20.mlp.gate_proj.weight": "model-00017-of-00062.safetensors", + "model.layers.20.mlp.up_proj.weight": "model-00017-of-00062.safetensors", + "model.layers.20.post_attention_layernorm.weight": "model-00017-of-00062.safetensors", + "model.layers.20.self_attn.k_proj.weight": "model-00016-of-00062.safetensors", + "model.layers.20.self_attn.o_proj.weight": "model-00016-of-00062.safetensors", + "model.layers.20.self_attn.q_proj.weight": "model-00016-of-00062.safetensors", + "model.layers.20.self_attn.v_proj.weight": "model-00016-of-00062.safetensors", + "model.layers.21.input_layernorm.weight": "model-00018-of-00062.safetensors", + "model.layers.21.mlp.down_proj.weight": "model-00018-of-00062.safetensors", + "model.layers.21.mlp.gate_proj.weight": "model-00017-of-00062.safetensors", + "model.layers.21.mlp.up_proj.weight": "model-00018-of-00062.safetensors", + "model.layers.21.post_attention_layernorm.weight": "model-00018-of-00062.safetensors", + "model.layers.21.self_attn.k_proj.weight": "model-00017-of-00062.safetensors", + "model.layers.21.self_attn.o_proj.weight": "model-00017-of-00062.safetensors", + "model.layers.21.self_attn.q_proj.weight": "model-00017-of-00062.safetensors", + "model.layers.21.self_attn.v_proj.weight": "model-00017-of-00062.safetensors", + "model.layers.22.input_layernorm.weight": "model-00019-of-00062.safetensors", + "model.layers.22.mlp.down_proj.weight": "model-00019-of-00062.safetensors", + "model.layers.22.mlp.gate_proj.weight": "model-00018-of-00062.safetensors", + "model.layers.22.mlp.up_proj.weight": "model-00018-of-00062.safetensors", + "model.layers.22.post_attention_layernorm.weight": "model-00019-of-00062.safetensors", + "model.layers.22.self_attn.k_proj.weight": "model-00018-of-00062.safetensors", + "model.layers.22.self_attn.o_proj.weight": "model-00018-of-00062.safetensors", + "model.layers.22.self_attn.q_proj.weight": "model-00018-of-00062.safetensors", + "model.layers.22.self_attn.v_proj.weight": "model-00018-of-00062.safetensors", + "model.layers.23.input_layernorm.weight": "model-00019-of-00062.safetensors", + "model.layers.23.mlp.down_proj.weight": "model-00019-of-00062.safetensors", + "model.layers.23.mlp.gate_proj.weight": "model-00019-of-00062.safetensors", + "model.layers.23.mlp.up_proj.weight": "model-00019-of-00062.safetensors", + "model.layers.23.post_attention_layernorm.weight": "model-00019-of-00062.safetensors", + "model.layers.23.self_attn.k_proj.weight": "model-00019-of-00062.safetensors", + "model.layers.23.self_attn.o_proj.weight": "model-00019-of-00062.safetensors", + "model.layers.23.self_attn.q_proj.weight": "model-00019-of-00062.safetensors", + "model.layers.23.self_attn.v_proj.weight": "model-00019-of-00062.safetensors", + "model.layers.24.input_layernorm.weight": "model-00020-of-00062.safetensors", + "model.layers.24.mlp.down_proj.weight": "model-00020-of-00062.safetensors", + "model.layers.24.mlp.gate_proj.weight": "model-00020-of-00062.safetensors", + "model.layers.24.mlp.up_proj.weight": "model-00020-of-00062.safetensors", + "model.layers.24.post_attention_layernorm.weight": "model-00020-of-00062.safetensors", + "model.layers.24.self_attn.k_proj.weight": "model-00019-of-00062.safetensors", + "model.layers.24.self_attn.o_proj.weight": "model-00019-of-00062.safetensors", + "model.layers.24.self_attn.q_proj.weight": "model-00019-of-00062.safetensors", + "model.layers.24.self_attn.v_proj.weight": "model-00019-of-00062.safetensors", + "model.layers.25.input_layernorm.weight": "model-00021-of-00062.safetensors", + "model.layers.25.mlp.down_proj.weight": "model-00021-of-00062.safetensors", + "model.layers.25.mlp.gate_proj.weight": "model-00020-of-00062.safetensors", + "model.layers.25.mlp.up_proj.weight": "model-00021-of-00062.safetensors", + "model.layers.25.post_attention_layernorm.weight": "model-00021-of-00062.safetensors", + "model.layers.25.self_attn.k_proj.weight": "model-00020-of-00062.safetensors", + "model.layers.25.self_attn.o_proj.weight": "model-00020-of-00062.safetensors", + "model.layers.25.self_attn.q_proj.weight": "model-00020-of-00062.safetensors", + "model.layers.25.self_attn.v_proj.weight": "model-00020-of-00062.safetensors", + "model.layers.26.input_layernorm.weight": "model-00022-of-00062.safetensors", + "model.layers.26.mlp.down_proj.weight": "model-00022-of-00062.safetensors", + "model.layers.26.mlp.gate_proj.weight": "model-00021-of-00062.safetensors", + "model.layers.26.mlp.up_proj.weight": "model-00021-of-00062.safetensors", + "model.layers.26.post_attention_layernorm.weight": "model-00022-of-00062.safetensors", + "model.layers.26.self_attn.k_proj.weight": "model-00021-of-00062.safetensors", + "model.layers.26.self_attn.o_proj.weight": "model-00021-of-00062.safetensors", + "model.layers.26.self_attn.q_proj.weight": "model-00021-of-00062.safetensors", + "model.layers.26.self_attn.v_proj.weight": "model-00021-of-00062.safetensors", + "model.layers.27.input_layernorm.weight": "model-00022-of-00062.safetensors", + "model.layers.27.mlp.down_proj.weight": "model-00022-of-00062.safetensors", + "model.layers.27.mlp.gate_proj.weight": "model-00022-of-00062.safetensors", + "model.layers.27.mlp.up_proj.weight": "model-00022-of-00062.safetensors", + "model.layers.27.post_attention_layernorm.weight": "model-00022-of-00062.safetensors", + "model.layers.27.self_attn.k_proj.weight": "model-00022-of-00062.safetensors", + "model.layers.27.self_attn.o_proj.weight": "model-00022-of-00062.safetensors", + "model.layers.27.self_attn.q_proj.weight": "model-00022-of-00062.safetensors", + "model.layers.27.self_attn.v_proj.weight": "model-00022-of-00062.safetensors", + "model.layers.28.input_layernorm.weight": "model-00023-of-00062.safetensors", + "model.layers.28.mlp.down_proj.weight": "model-00023-of-00062.safetensors", + "model.layers.28.mlp.gate_proj.weight": "model-00023-of-00062.safetensors", + "model.layers.28.mlp.up_proj.weight": "model-00023-of-00062.safetensors", + "model.layers.28.post_attention_layernorm.weight": "model-00023-of-00062.safetensors", + "model.layers.28.self_attn.k_proj.weight": "model-00022-of-00062.safetensors", + "model.layers.28.self_attn.o_proj.weight": "model-00022-of-00062.safetensors", + "model.layers.28.self_attn.q_proj.weight": "model-00022-of-00062.safetensors", + "model.layers.28.self_attn.v_proj.weight": "model-00022-of-00062.safetensors", + "model.layers.29.input_layernorm.weight": "model-00024-of-00062.safetensors", + "model.layers.29.mlp.down_proj.weight": "model-00024-of-00062.safetensors", + "model.layers.29.mlp.gate_proj.weight": "model-00023-of-00062.safetensors", + "model.layers.29.mlp.up_proj.weight": "model-00024-of-00062.safetensors", + "model.layers.29.post_attention_layernorm.weight": "model-00024-of-00062.safetensors", + "model.layers.29.self_attn.k_proj.weight": "model-00023-of-00062.safetensors", + "model.layers.29.self_attn.o_proj.weight": "model-00023-of-00062.safetensors", + "model.layers.29.self_attn.q_proj.weight": "model-00023-of-00062.safetensors", + "model.layers.29.self_attn.v_proj.weight": "model-00023-of-00062.safetensors", + "model.layers.3.input_layernorm.weight": "model-00004-of-00062.safetensors", + "model.layers.3.mlp.down_proj.weight": "model-00004-of-00062.safetensors", + "model.layers.3.mlp.gate_proj.weight": "model-00004-of-00062.safetensors", + "model.layers.3.mlp.up_proj.weight": "model-00004-of-00062.safetensors", + "model.layers.3.post_attention_layernorm.weight": "model-00004-of-00062.safetensors", + "model.layers.3.self_attn.k_proj.weight": "model-00004-of-00062.safetensors", + "model.layers.3.self_attn.o_proj.weight": "model-00004-of-00062.safetensors", + "model.layers.3.self_attn.q_proj.weight": "model-00004-of-00062.safetensors", + "model.layers.3.self_attn.v_proj.weight": "model-00004-of-00062.safetensors", + "model.layers.30.input_layernorm.weight": "model-00025-of-00062.safetensors", + "model.layers.30.mlp.down_proj.weight": "model-00025-of-00062.safetensors", + "model.layers.30.mlp.gate_proj.weight": "model-00024-of-00062.safetensors", + "model.layers.30.mlp.up_proj.weight": "model-00024-of-00062.safetensors", + "model.layers.30.post_attention_layernorm.weight": "model-00025-of-00062.safetensors", + "model.layers.30.self_attn.k_proj.weight": "model-00024-of-00062.safetensors", + "model.layers.30.self_attn.o_proj.weight": "model-00024-of-00062.safetensors", + "model.layers.30.self_attn.q_proj.weight": "model-00024-of-00062.safetensors", + "model.layers.30.self_attn.v_proj.weight": "model-00024-of-00062.safetensors", + "model.layers.31.input_layernorm.weight": "model-00025-of-00062.safetensors", + "model.layers.31.mlp.down_proj.weight": "model-00025-of-00062.safetensors", + "model.layers.31.mlp.gate_proj.weight": "model-00025-of-00062.safetensors", + "model.layers.31.mlp.up_proj.weight": "model-00025-of-00062.safetensors", + "model.layers.31.post_attention_layernorm.weight": "model-00025-of-00062.safetensors", + "model.layers.31.self_attn.k_proj.weight": "model-00025-of-00062.safetensors", + "model.layers.31.self_attn.o_proj.weight": "model-00025-of-00062.safetensors", + "model.layers.31.self_attn.q_proj.weight": "model-00025-of-00062.safetensors", + "model.layers.31.self_attn.v_proj.weight": "model-00025-of-00062.safetensors", + "model.layers.32.input_layernorm.weight": "model-00026-of-00062.safetensors", + "model.layers.32.mlp.down_proj.weight": "model-00026-of-00062.safetensors", + "model.layers.32.mlp.gate_proj.weight": "model-00026-of-00062.safetensors", + "model.layers.32.mlp.up_proj.weight": "model-00026-of-00062.safetensors", + "model.layers.32.post_attention_layernorm.weight": "model-00026-of-00062.safetensors", + "model.layers.32.self_attn.k_proj.weight": "model-00025-of-00062.safetensors", + "model.layers.32.self_attn.o_proj.weight": "model-00025-of-00062.safetensors", + "model.layers.32.self_attn.q_proj.weight": "model-00025-of-00062.safetensors", + "model.layers.32.self_attn.v_proj.weight": "model-00025-of-00062.safetensors", + "model.layers.33.input_layernorm.weight": "model-00027-of-00062.safetensors", + "model.layers.33.mlp.down_proj.weight": "model-00027-of-00062.safetensors", + "model.layers.33.mlp.gate_proj.weight": "model-00026-of-00062.safetensors", + "model.layers.33.mlp.up_proj.weight": "model-00027-of-00062.safetensors", + "model.layers.33.post_attention_layernorm.weight": "model-00027-of-00062.safetensors", + "model.layers.33.self_attn.k_proj.weight": "model-00026-of-00062.safetensors", + "model.layers.33.self_attn.o_proj.weight": "model-00026-of-00062.safetensors", + "model.layers.33.self_attn.q_proj.weight": "model-00026-of-00062.safetensors", + "model.layers.33.self_attn.v_proj.weight": "model-00026-of-00062.safetensors", + "model.layers.34.input_layernorm.weight": "model-00028-of-00062.safetensors", + "model.layers.34.mlp.down_proj.weight": "model-00028-of-00062.safetensors", + "model.layers.34.mlp.gate_proj.weight": "model-00027-of-00062.safetensors", + "model.layers.34.mlp.up_proj.weight": "model-00027-of-00062.safetensors", + "model.layers.34.post_attention_layernorm.weight": "model-00028-of-00062.safetensors", + "model.layers.34.self_attn.k_proj.weight": "model-00027-of-00062.safetensors", + "model.layers.34.self_attn.o_proj.weight": "model-00027-of-00062.safetensors", + "model.layers.34.self_attn.q_proj.weight": "model-00027-of-00062.safetensors", + "model.layers.34.self_attn.v_proj.weight": "model-00027-of-00062.safetensors", + "model.layers.35.input_layernorm.weight": "model-00028-of-00062.safetensors", + "model.layers.35.mlp.down_proj.weight": "model-00028-of-00062.safetensors", + "model.layers.35.mlp.gate_proj.weight": "model-00028-of-00062.safetensors", + "model.layers.35.mlp.up_proj.weight": "model-00028-of-00062.safetensors", + "model.layers.35.post_attention_layernorm.weight": "model-00028-of-00062.safetensors", + "model.layers.35.self_attn.k_proj.weight": "model-00028-of-00062.safetensors", + "model.layers.35.self_attn.o_proj.weight": "model-00028-of-00062.safetensors", + "model.layers.35.self_attn.q_proj.weight": "model-00028-of-00062.safetensors", + "model.layers.35.self_attn.v_proj.weight": "model-00028-of-00062.safetensors", + "model.layers.36.input_layernorm.weight": "model-00029-of-00062.safetensors", + "model.layers.36.mlp.down_proj.weight": "model-00029-of-00062.safetensors", + "model.layers.36.mlp.gate_proj.weight": "model-00029-of-00062.safetensors", + "model.layers.36.mlp.up_proj.weight": "model-00029-of-00062.safetensors", + "model.layers.36.post_attention_layernorm.weight": "model-00029-of-00062.safetensors", + "model.layers.36.self_attn.k_proj.weight": "model-00028-of-00062.safetensors", + "model.layers.36.self_attn.o_proj.weight": "model-00028-of-00062.safetensors", + "model.layers.36.self_attn.q_proj.weight": "model-00028-of-00062.safetensors", + "model.layers.36.self_attn.v_proj.weight": "model-00028-of-00062.safetensors", + "model.layers.37.input_layernorm.weight": "model-00030-of-00062.safetensors", + "model.layers.37.mlp.down_proj.weight": "model-00030-of-00062.safetensors", + "model.layers.37.mlp.gate_proj.weight": "model-00029-of-00062.safetensors", + "model.layers.37.mlp.up_proj.weight": "model-00030-of-00062.safetensors", + "model.layers.37.post_attention_layernorm.weight": "model-00030-of-00062.safetensors", + "model.layers.37.self_attn.k_proj.weight": "model-00029-of-00062.safetensors", + "model.layers.37.self_attn.o_proj.weight": "model-00029-of-00062.safetensors", + "model.layers.37.self_attn.q_proj.weight": "model-00029-of-00062.safetensors", + "model.layers.37.self_attn.v_proj.weight": "model-00029-of-00062.safetensors", + "model.layers.38.input_layernorm.weight": "model-00031-of-00062.safetensors", + "model.layers.38.mlp.down_proj.weight": "model-00031-of-00062.safetensors", + "model.layers.38.mlp.gate_proj.weight": "model-00030-of-00062.safetensors", + "model.layers.38.mlp.up_proj.weight": "model-00030-of-00062.safetensors", + "model.layers.38.post_attention_layernorm.weight": "model-00031-of-00062.safetensors", + "model.layers.38.self_attn.k_proj.weight": "model-00030-of-00062.safetensors", + "model.layers.38.self_attn.o_proj.weight": "model-00030-of-00062.safetensors", + "model.layers.38.self_attn.q_proj.weight": "model-00030-of-00062.safetensors", + "model.layers.38.self_attn.v_proj.weight": "model-00030-of-00062.safetensors", + "model.layers.39.input_layernorm.weight": "model-00031-of-00062.safetensors", + "model.layers.39.mlp.down_proj.weight": "model-00031-of-00062.safetensors", + "model.layers.39.mlp.gate_proj.weight": "model-00031-of-00062.safetensors", + "model.layers.39.mlp.up_proj.weight": "model-00031-of-00062.safetensors", + "model.layers.39.post_attention_layernorm.weight": "model-00031-of-00062.safetensors", + "model.layers.39.self_attn.k_proj.weight": "model-00031-of-00062.safetensors", + "model.layers.39.self_attn.o_proj.weight": "model-00031-of-00062.safetensors", + "model.layers.39.self_attn.q_proj.weight": "model-00031-of-00062.safetensors", + "model.layers.39.self_attn.v_proj.weight": "model-00031-of-00062.safetensors", + "model.layers.4.input_layernorm.weight": "model-00005-of-00062.safetensors", + "model.layers.4.mlp.down_proj.weight": "model-00005-of-00062.safetensors", + "model.layers.4.mlp.gate_proj.weight": "model-00005-of-00062.safetensors", + "model.layers.4.mlp.up_proj.weight": "model-00005-of-00062.safetensors", + "model.layers.4.post_attention_layernorm.weight": "model-00005-of-00062.safetensors", + "model.layers.4.self_attn.k_proj.weight": "model-00004-of-00062.safetensors", + "model.layers.4.self_attn.o_proj.weight": "model-00004-of-00062.safetensors", + "model.layers.4.self_attn.q_proj.weight": "model-00004-of-00062.safetensors", + "model.layers.4.self_attn.v_proj.weight": "model-00004-of-00062.safetensors", + "model.layers.40.input_layernorm.weight": "model-00032-of-00062.safetensors", + "model.layers.40.mlp.down_proj.weight": "model-00032-of-00062.safetensors", + "model.layers.40.mlp.gate_proj.weight": "model-00032-of-00062.safetensors", + "model.layers.40.mlp.up_proj.weight": "model-00032-of-00062.safetensors", + "model.layers.40.post_attention_layernorm.weight": "model-00032-of-00062.safetensors", + "model.layers.40.self_attn.k_proj.weight": "model-00031-of-00062.safetensors", + "model.layers.40.self_attn.o_proj.weight": "model-00031-of-00062.safetensors", + "model.layers.40.self_attn.q_proj.weight": "model-00031-of-00062.safetensors", + "model.layers.40.self_attn.v_proj.weight": "model-00031-of-00062.safetensors", + "model.layers.41.input_layernorm.weight": "model-00033-of-00062.safetensors", + "model.layers.41.mlp.down_proj.weight": "model-00033-of-00062.safetensors", + "model.layers.41.mlp.gate_proj.weight": "model-00032-of-00062.safetensors", + "model.layers.41.mlp.up_proj.weight": "model-00033-of-00062.safetensors", + "model.layers.41.post_attention_layernorm.weight": "model-00033-of-00062.safetensors", + "model.layers.41.self_attn.k_proj.weight": "model-00032-of-00062.safetensors", + "model.layers.41.self_attn.o_proj.weight": "model-00032-of-00062.safetensors", + "model.layers.41.self_attn.q_proj.weight": "model-00032-of-00062.safetensors", + "model.layers.41.self_attn.v_proj.weight": "model-00032-of-00062.safetensors", + "model.layers.42.input_layernorm.weight": "model-00034-of-00062.safetensors", + "model.layers.42.mlp.down_proj.weight": "model-00034-of-00062.safetensors", + "model.layers.42.mlp.gate_proj.weight": "model-00033-of-00062.safetensors", + "model.layers.42.mlp.up_proj.weight": "model-00033-of-00062.safetensors", + "model.layers.42.post_attention_layernorm.weight": "model-00034-of-00062.safetensors", + "model.layers.42.self_attn.k_proj.weight": "model-00033-of-00062.safetensors", + "model.layers.42.self_attn.o_proj.weight": "model-00033-of-00062.safetensors", + "model.layers.42.self_attn.q_proj.weight": "model-00033-of-00062.safetensors", + "model.layers.42.self_attn.v_proj.weight": "model-00033-of-00062.safetensors", + "model.layers.43.input_layernorm.weight": "model-00034-of-00062.safetensors", + "model.layers.43.mlp.down_proj.weight": "model-00034-of-00062.safetensors", + "model.layers.43.mlp.gate_proj.weight": "model-00034-of-00062.safetensors", + "model.layers.43.mlp.up_proj.weight": "model-00034-of-00062.safetensors", + "model.layers.43.post_attention_layernorm.weight": "model-00034-of-00062.safetensors", + "model.layers.43.self_attn.k_proj.weight": "model-00034-of-00062.safetensors", + "model.layers.43.self_attn.o_proj.weight": "model-00034-of-00062.safetensors", + "model.layers.43.self_attn.q_proj.weight": "model-00034-of-00062.safetensors", + "model.layers.43.self_attn.v_proj.weight": "model-00034-of-00062.safetensors", + "model.layers.44.input_layernorm.weight": "model-00035-of-00062.safetensors", + "model.layers.44.mlp.down_proj.weight": "model-00035-of-00062.safetensors", + "model.layers.44.mlp.gate_proj.weight": "model-00035-of-00062.safetensors", + "model.layers.44.mlp.up_proj.weight": "model-00035-of-00062.safetensors", + "model.layers.44.post_attention_layernorm.weight": "model-00035-of-00062.safetensors", + "model.layers.44.self_attn.k_proj.weight": "model-00034-of-00062.safetensors", + "model.layers.44.self_attn.o_proj.weight": "model-00034-of-00062.safetensors", + "model.layers.44.self_attn.q_proj.weight": "model-00034-of-00062.safetensors", + "model.layers.44.self_attn.v_proj.weight": "model-00034-of-00062.safetensors", + "model.layers.45.input_layernorm.weight": "model-00036-of-00062.safetensors", + "model.layers.45.mlp.down_proj.weight": "model-00036-of-00062.safetensors", + "model.layers.45.mlp.gate_proj.weight": "model-00035-of-00062.safetensors", + "model.layers.45.mlp.up_proj.weight": "model-00036-of-00062.safetensors", + "model.layers.45.post_attention_layernorm.weight": "model-00036-of-00062.safetensors", + "model.layers.45.self_attn.k_proj.weight": "model-00035-of-00062.safetensors", + "model.layers.45.self_attn.o_proj.weight": "model-00035-of-00062.safetensors", + "model.layers.45.self_attn.q_proj.weight": "model-00035-of-00062.safetensors", + "model.layers.45.self_attn.v_proj.weight": "model-00035-of-00062.safetensors", + "model.layers.46.input_layernorm.weight": "model-00037-of-00062.safetensors", + "model.layers.46.mlp.down_proj.weight": "model-00037-of-00062.safetensors", + "model.layers.46.mlp.gate_proj.weight": "model-00036-of-00062.safetensors", + "model.layers.46.mlp.up_proj.weight": "model-00036-of-00062.safetensors", + "model.layers.46.post_attention_layernorm.weight": "model-00037-of-00062.safetensors", + "model.layers.46.self_attn.k_proj.weight": "model-00036-of-00062.safetensors", + "model.layers.46.self_attn.o_proj.weight": "model-00036-of-00062.safetensors", + "model.layers.46.self_attn.q_proj.weight": "model-00036-of-00062.safetensors", + "model.layers.46.self_attn.v_proj.weight": "model-00036-of-00062.safetensors", + "model.layers.47.input_layernorm.weight": "model-00037-of-00062.safetensors", + "model.layers.47.mlp.down_proj.weight": "model-00037-of-00062.safetensors", + "model.layers.47.mlp.gate_proj.weight": "model-00037-of-00062.safetensors", + "model.layers.47.mlp.up_proj.weight": "model-00037-of-00062.safetensors", + "model.layers.47.post_attention_layernorm.weight": "model-00037-of-00062.safetensors", + "model.layers.47.self_attn.k_proj.weight": "model-00037-of-00062.safetensors", + "model.layers.47.self_attn.o_proj.weight": "model-00037-of-00062.safetensors", + "model.layers.47.self_attn.q_proj.weight": "model-00037-of-00062.safetensors", + "model.layers.47.self_attn.v_proj.weight": "model-00037-of-00062.safetensors", + "model.layers.48.input_layernorm.weight": "model-00038-of-00062.safetensors", + "model.layers.48.mlp.down_proj.weight": "model-00038-of-00062.safetensors", + "model.layers.48.mlp.gate_proj.weight": "model-00038-of-00062.safetensors", + "model.layers.48.mlp.up_proj.weight": "model-00038-of-00062.safetensors", + "model.layers.48.post_attention_layernorm.weight": "model-00038-of-00062.safetensors", + "model.layers.48.self_attn.k_proj.weight": "model-00037-of-00062.safetensors", + "model.layers.48.self_attn.o_proj.weight": "model-00037-of-00062.safetensors", + "model.layers.48.self_attn.q_proj.weight": "model-00037-of-00062.safetensors", + "model.layers.48.self_attn.v_proj.weight": "model-00037-of-00062.safetensors", + "model.layers.49.input_layernorm.weight": "model-00039-of-00062.safetensors", + "model.layers.49.mlp.down_proj.weight": "model-00039-of-00062.safetensors", + "model.layers.49.mlp.gate_proj.weight": "model-00038-of-00062.safetensors", + "model.layers.49.mlp.up_proj.weight": "model-00039-of-00062.safetensors", + "model.layers.49.post_attention_layernorm.weight": "model-00039-of-00062.safetensors", + "model.layers.49.self_attn.k_proj.weight": "model-00038-of-00062.safetensors", + "model.layers.49.self_attn.o_proj.weight": "model-00038-of-00062.safetensors", + "model.layers.49.self_attn.q_proj.weight": "model-00038-of-00062.safetensors", + "model.layers.49.self_attn.v_proj.weight": "model-00038-of-00062.safetensors", + "model.layers.5.input_layernorm.weight": "model-00006-of-00062.safetensors", + "model.layers.5.mlp.down_proj.weight": "model-00006-of-00062.safetensors", + "model.layers.5.mlp.gate_proj.weight": "model-00005-of-00062.safetensors", + "model.layers.5.mlp.up_proj.weight": "model-00006-of-00062.safetensors", + "model.layers.5.post_attention_layernorm.weight": "model-00006-of-00062.safetensors", + "model.layers.5.self_attn.k_proj.weight": "model-00005-of-00062.safetensors", + "model.layers.5.self_attn.o_proj.weight": "model-00005-of-00062.safetensors", + "model.layers.5.self_attn.q_proj.weight": "model-00005-of-00062.safetensors", + "model.layers.5.self_attn.v_proj.weight": "model-00005-of-00062.safetensors", + "model.layers.50.input_layernorm.weight": "model-00040-of-00062.safetensors", + "model.layers.50.mlp.down_proj.weight": "model-00040-of-00062.safetensors", + "model.layers.50.mlp.gate_proj.weight": "model-00039-of-00062.safetensors", + "model.layers.50.mlp.up_proj.weight": "model-00039-of-00062.safetensors", + "model.layers.50.post_attention_layernorm.weight": "model-00040-of-00062.safetensors", + "model.layers.50.self_attn.k_proj.weight": "model-00039-of-00062.safetensors", + "model.layers.50.self_attn.o_proj.weight": "model-00039-of-00062.safetensors", + "model.layers.50.self_attn.q_proj.weight": "model-00039-of-00062.safetensors", + "model.layers.50.self_attn.v_proj.weight": "model-00039-of-00062.safetensors", + "model.layers.51.input_layernorm.weight": "model-00040-of-00062.safetensors", + "model.layers.51.mlp.down_proj.weight": "model-00040-of-00062.safetensors", + "model.layers.51.mlp.gate_proj.weight": "model-00040-of-00062.safetensors", + "model.layers.51.mlp.up_proj.weight": "model-00040-of-00062.safetensors", + "model.layers.51.post_attention_layernorm.weight": "model-00040-of-00062.safetensors", + "model.layers.51.self_attn.k_proj.weight": "model-00040-of-00062.safetensors", + "model.layers.51.self_attn.o_proj.weight": "model-00040-of-00062.safetensors", + "model.layers.51.self_attn.q_proj.weight": "model-00040-of-00062.safetensors", + "model.layers.51.self_attn.v_proj.weight": "model-00040-of-00062.safetensors", + "model.layers.52.input_layernorm.weight": "model-00041-of-00062.safetensors", + "model.layers.52.mlp.down_proj.weight": "model-00041-of-00062.safetensors", + "model.layers.52.mlp.gate_proj.weight": "model-00041-of-00062.safetensors", + "model.layers.52.mlp.up_proj.weight": "model-00041-of-00062.safetensors", + "model.layers.52.post_attention_layernorm.weight": "model-00041-of-00062.safetensors", + "model.layers.52.self_attn.k_proj.weight": "model-00040-of-00062.safetensors", + "model.layers.52.self_attn.o_proj.weight": "model-00040-of-00062.safetensors", + "model.layers.52.self_attn.q_proj.weight": "model-00040-of-00062.safetensors", + "model.layers.52.self_attn.v_proj.weight": "model-00040-of-00062.safetensors", + "model.layers.53.input_layernorm.weight": "model-00042-of-00062.safetensors", + "model.layers.53.mlp.down_proj.weight": "model-00042-of-00062.safetensors", + "model.layers.53.mlp.gate_proj.weight": "model-00041-of-00062.safetensors", + "model.layers.53.mlp.up_proj.weight": "model-00042-of-00062.safetensors", + "model.layers.53.post_attention_layernorm.weight": "model-00042-of-00062.safetensors", + "model.layers.53.self_attn.k_proj.weight": "model-00041-of-00062.safetensors", + "model.layers.53.self_attn.o_proj.weight": "model-00041-of-00062.safetensors", + "model.layers.53.self_attn.q_proj.weight": "model-00041-of-00062.safetensors", + "model.layers.53.self_attn.v_proj.weight": "model-00041-of-00062.safetensors", + "model.layers.54.input_layernorm.weight": "model-00043-of-00062.safetensors", + "model.layers.54.mlp.down_proj.weight": "model-00043-of-00062.safetensors", + "model.layers.54.mlp.gate_proj.weight": "model-00042-of-00062.safetensors", + "model.layers.54.mlp.up_proj.weight": "model-00042-of-00062.safetensors", + "model.layers.54.post_attention_layernorm.weight": "model-00043-of-00062.safetensors", + "model.layers.54.self_attn.k_proj.weight": "model-00042-of-00062.safetensors", + "model.layers.54.self_attn.o_proj.weight": "model-00042-of-00062.safetensors", + "model.layers.54.self_attn.q_proj.weight": "model-00042-of-00062.safetensors", + "model.layers.54.self_attn.v_proj.weight": "model-00042-of-00062.safetensors", + "model.layers.55.input_layernorm.weight": "model-00043-of-00062.safetensors", + "model.layers.55.mlp.down_proj.weight": "model-00043-of-00062.safetensors", + "model.layers.55.mlp.gate_proj.weight": "model-00043-of-00062.safetensors", + "model.layers.55.mlp.up_proj.weight": "model-00043-of-00062.safetensors", + "model.layers.55.post_attention_layernorm.weight": "model-00043-of-00062.safetensors", + "model.layers.55.self_attn.k_proj.weight": "model-00043-of-00062.safetensors", + "model.layers.55.self_attn.o_proj.weight": "model-00043-of-00062.safetensors", + "model.layers.55.self_attn.q_proj.weight": "model-00043-of-00062.safetensors", + "model.layers.55.self_attn.v_proj.weight": "model-00043-of-00062.safetensors", + "model.layers.56.input_layernorm.weight": "model-00044-of-00062.safetensors", + "model.layers.56.mlp.down_proj.weight": "model-00044-of-00062.safetensors", + "model.layers.56.mlp.gate_proj.weight": "model-00044-of-00062.safetensors", + "model.layers.56.mlp.up_proj.weight": "model-00044-of-00062.safetensors", + "model.layers.56.post_attention_layernorm.weight": "model-00044-of-00062.safetensors", + "model.layers.56.self_attn.k_proj.weight": "model-00043-of-00062.safetensors", + "model.layers.56.self_attn.o_proj.weight": "model-00043-of-00062.safetensors", + "model.layers.56.self_attn.q_proj.weight": "model-00043-of-00062.safetensors", + "model.layers.56.self_attn.v_proj.weight": "model-00043-of-00062.safetensors", + "model.layers.57.input_layernorm.weight": "model-00045-of-00062.safetensors", + "model.layers.57.mlp.down_proj.weight": "model-00045-of-00062.safetensors", + "model.layers.57.mlp.gate_proj.weight": "model-00044-of-00062.safetensors", + "model.layers.57.mlp.up_proj.weight": "model-00045-of-00062.safetensors", + "model.layers.57.post_attention_layernorm.weight": "model-00045-of-00062.safetensors", + "model.layers.57.self_attn.k_proj.weight": "model-00044-of-00062.safetensors", + "model.layers.57.self_attn.o_proj.weight": "model-00044-of-00062.safetensors", + "model.layers.57.self_attn.q_proj.weight": "model-00044-of-00062.safetensors", + "model.layers.57.self_attn.v_proj.weight": "model-00044-of-00062.safetensors", + "model.layers.58.input_layernorm.weight": "model-00046-of-00062.safetensors", + "model.layers.58.mlp.down_proj.weight": "model-00046-of-00062.safetensors", + "model.layers.58.mlp.gate_proj.weight": "model-00045-of-00062.safetensors", + "model.layers.58.mlp.up_proj.weight": "model-00045-of-00062.safetensors", + "model.layers.58.post_attention_layernorm.weight": "model-00046-of-00062.safetensors", + "model.layers.58.self_attn.k_proj.weight": "model-00045-of-00062.safetensors", + "model.layers.58.self_attn.o_proj.weight": "model-00045-of-00062.safetensors", + "model.layers.58.self_attn.q_proj.weight": "model-00045-of-00062.safetensors", + "model.layers.58.self_attn.v_proj.weight": "model-00045-of-00062.safetensors", + "model.layers.59.input_layernorm.weight": "model-00046-of-00062.safetensors", + "model.layers.59.mlp.down_proj.weight": "model-00046-of-00062.safetensors", + "model.layers.59.mlp.gate_proj.weight": "model-00046-of-00062.safetensors", + "model.layers.59.mlp.up_proj.weight": "model-00046-of-00062.safetensors", + "model.layers.59.post_attention_layernorm.weight": "model-00046-of-00062.safetensors", + "model.layers.59.self_attn.k_proj.weight": "model-00046-of-00062.safetensors", + "model.layers.59.self_attn.o_proj.weight": "model-00046-of-00062.safetensors", + "model.layers.59.self_attn.q_proj.weight": "model-00046-of-00062.safetensors", + "model.layers.59.self_attn.v_proj.weight": "model-00046-of-00062.safetensors", + "model.layers.6.input_layernorm.weight": "model-00007-of-00062.safetensors", + "model.layers.6.mlp.down_proj.weight": "model-00007-of-00062.safetensors", + "model.layers.6.mlp.gate_proj.weight": "model-00006-of-00062.safetensors", + "model.layers.6.mlp.up_proj.weight": "model-00006-of-00062.safetensors", + "model.layers.6.post_attention_layernorm.weight": "model-00007-of-00062.safetensors", + "model.layers.6.self_attn.k_proj.weight": "model-00006-of-00062.safetensors", + "model.layers.6.self_attn.o_proj.weight": "model-00006-of-00062.safetensors", + "model.layers.6.self_attn.q_proj.weight": "model-00006-of-00062.safetensors", + "model.layers.6.self_attn.v_proj.weight": "model-00006-of-00062.safetensors", + "model.layers.60.input_layernorm.weight": "model-00047-of-00062.safetensors", + "model.layers.60.mlp.down_proj.weight": "model-00047-of-00062.safetensors", + "model.layers.60.mlp.gate_proj.weight": "model-00047-of-00062.safetensors", + "model.layers.60.mlp.up_proj.weight": "model-00047-of-00062.safetensors", + "model.layers.60.post_attention_layernorm.weight": "model-00047-of-00062.safetensors", + "model.layers.60.self_attn.k_proj.weight": "model-00046-of-00062.safetensors", + "model.layers.60.self_attn.o_proj.weight": "model-00046-of-00062.safetensors", + "model.layers.60.self_attn.q_proj.weight": "model-00046-of-00062.safetensors", + "model.layers.60.self_attn.v_proj.weight": "model-00046-of-00062.safetensors", + "model.layers.61.input_layernorm.weight": "model-00048-of-00062.safetensors", + "model.layers.61.mlp.down_proj.weight": "model-00048-of-00062.safetensors", + "model.layers.61.mlp.gate_proj.weight": "model-00047-of-00062.safetensors", + "model.layers.61.mlp.up_proj.weight": "model-00048-of-00062.safetensors", + "model.layers.61.post_attention_layernorm.weight": "model-00048-of-00062.safetensors", + "model.layers.61.self_attn.k_proj.weight": "model-00047-of-00062.safetensors", + "model.layers.61.self_attn.o_proj.weight": "model-00047-of-00062.safetensors", + "model.layers.61.self_attn.q_proj.weight": "model-00047-of-00062.safetensors", + "model.layers.61.self_attn.v_proj.weight": "model-00047-of-00062.safetensors", + "model.layers.62.input_layernorm.weight": "model-00049-of-00062.safetensors", + "model.layers.62.mlp.down_proj.weight": "model-00049-of-00062.safetensors", + "model.layers.62.mlp.gate_proj.weight": "model-00048-of-00062.safetensors", + "model.layers.62.mlp.up_proj.weight": "model-00048-of-00062.safetensors", + "model.layers.62.post_attention_layernorm.weight": "model-00049-of-00062.safetensors", + "model.layers.62.self_attn.k_proj.weight": "model-00048-of-00062.safetensors", + "model.layers.62.self_attn.o_proj.weight": "model-00048-of-00062.safetensors", + "model.layers.62.self_attn.q_proj.weight": "model-00048-of-00062.safetensors", + "model.layers.62.self_attn.v_proj.weight": "model-00048-of-00062.safetensors", + "model.layers.63.input_layernorm.weight": "model-00049-of-00062.safetensors", + "model.layers.63.mlp.down_proj.weight": "model-00049-of-00062.safetensors", + "model.layers.63.mlp.gate_proj.weight": "model-00049-of-00062.safetensors", + "model.layers.63.mlp.up_proj.weight": "model-00049-of-00062.safetensors", + "model.layers.63.post_attention_layernorm.weight": "model-00049-of-00062.safetensors", + "model.layers.63.self_attn.k_proj.weight": "model-00049-of-00062.safetensors", + "model.layers.63.self_attn.o_proj.weight": "model-00049-of-00062.safetensors", + "model.layers.63.self_attn.q_proj.weight": "model-00049-of-00062.safetensors", + "model.layers.63.self_attn.v_proj.weight": "model-00049-of-00062.safetensors", + "model.layers.64.input_layernorm.weight": "model-00050-of-00062.safetensors", + "model.layers.64.mlp.down_proj.weight": "model-00050-of-00062.safetensors", + "model.layers.64.mlp.gate_proj.weight": "model-00050-of-00062.safetensors", + "model.layers.64.mlp.up_proj.weight": "model-00050-of-00062.safetensors", + "model.layers.64.post_attention_layernorm.weight": "model-00050-of-00062.safetensors", + "model.layers.64.self_attn.k_proj.weight": "model-00049-of-00062.safetensors", + "model.layers.64.self_attn.o_proj.weight": "model-00049-of-00062.safetensors", + "model.layers.64.self_attn.q_proj.weight": "model-00049-of-00062.safetensors", + "model.layers.64.self_attn.v_proj.weight": "model-00049-of-00062.safetensors", + "model.layers.65.input_layernorm.weight": "model-00051-of-00062.safetensors", + "model.layers.65.mlp.down_proj.weight": "model-00051-of-00062.safetensors", + "model.layers.65.mlp.gate_proj.weight": "model-00050-of-00062.safetensors", + "model.layers.65.mlp.up_proj.weight": "model-00051-of-00062.safetensors", + "model.layers.65.post_attention_layernorm.weight": "model-00051-of-00062.safetensors", + "model.layers.65.self_attn.k_proj.weight": "model-00050-of-00062.safetensors", + "model.layers.65.self_attn.o_proj.weight": "model-00050-of-00062.safetensors", + "model.layers.65.self_attn.q_proj.weight": "model-00050-of-00062.safetensors", + "model.layers.65.self_attn.v_proj.weight": "model-00050-of-00062.safetensors", + "model.layers.66.input_layernorm.weight": "model-00052-of-00062.safetensors", + "model.layers.66.mlp.down_proj.weight": "model-00052-of-00062.safetensors", + "model.layers.66.mlp.gate_proj.weight": "model-00051-of-00062.safetensors", + "model.layers.66.mlp.up_proj.weight": "model-00051-of-00062.safetensors", + "model.layers.66.post_attention_layernorm.weight": "model-00052-of-00062.safetensors", + "model.layers.66.self_attn.k_proj.weight": "model-00051-of-00062.safetensors", + "model.layers.66.self_attn.o_proj.weight": "model-00051-of-00062.safetensors", + "model.layers.66.self_attn.q_proj.weight": "model-00051-of-00062.safetensors", + "model.layers.66.self_attn.v_proj.weight": "model-00051-of-00062.safetensors", + "model.layers.67.input_layernorm.weight": "model-00052-of-00062.safetensors", + "model.layers.67.mlp.down_proj.weight": "model-00052-of-00062.safetensors", + "model.layers.67.mlp.gate_proj.weight": "model-00052-of-00062.safetensors", + "model.layers.67.mlp.up_proj.weight": "model-00052-of-00062.safetensors", + "model.layers.67.post_attention_layernorm.weight": "model-00052-of-00062.safetensors", + "model.layers.67.self_attn.k_proj.weight": "model-00052-of-00062.safetensors", + "model.layers.67.self_attn.o_proj.weight": "model-00052-of-00062.safetensors", + "model.layers.67.self_attn.q_proj.weight": "model-00052-of-00062.safetensors", + "model.layers.67.self_attn.v_proj.weight": "model-00052-of-00062.safetensors", + "model.layers.68.input_layernorm.weight": "model-00053-of-00062.safetensors", + "model.layers.68.mlp.down_proj.weight": "model-00053-of-00062.safetensors", + "model.layers.68.mlp.gate_proj.weight": "model-00053-of-00062.safetensors", + "model.layers.68.mlp.up_proj.weight": "model-00053-of-00062.safetensors", + "model.layers.68.post_attention_layernorm.weight": "model-00053-of-00062.safetensors", + "model.layers.68.self_attn.k_proj.weight": "model-00052-of-00062.safetensors", + "model.layers.68.self_attn.o_proj.weight": "model-00052-of-00062.safetensors", + "model.layers.68.self_attn.q_proj.weight": "model-00052-of-00062.safetensors", + "model.layers.68.self_attn.v_proj.weight": "model-00052-of-00062.safetensors", + "model.layers.69.input_layernorm.weight": "model-00054-of-00062.safetensors", + "model.layers.69.mlp.down_proj.weight": "model-00054-of-00062.safetensors", + "model.layers.69.mlp.gate_proj.weight": "model-00053-of-00062.safetensors", + "model.layers.69.mlp.up_proj.weight": "model-00054-of-00062.safetensors", + "model.layers.69.post_attention_layernorm.weight": "model-00054-of-00062.safetensors", + "model.layers.69.self_attn.k_proj.weight": "model-00053-of-00062.safetensors", + "model.layers.69.self_attn.o_proj.weight": "model-00053-of-00062.safetensors", + "model.layers.69.self_attn.q_proj.weight": "model-00053-of-00062.safetensors", + "model.layers.69.self_attn.v_proj.weight": "model-00053-of-00062.safetensors", + "model.layers.7.input_layernorm.weight": "model-00007-of-00062.safetensors", + "model.layers.7.mlp.down_proj.weight": "model-00007-of-00062.safetensors", + "model.layers.7.mlp.gate_proj.weight": "model-00007-of-00062.safetensors", + "model.layers.7.mlp.up_proj.weight": "model-00007-of-00062.safetensors", + "model.layers.7.post_attention_layernorm.weight": "model-00007-of-00062.safetensors", + "model.layers.7.self_attn.k_proj.weight": "model-00007-of-00062.safetensors", + "model.layers.7.self_attn.o_proj.weight": "model-00007-of-00062.safetensors", + "model.layers.7.self_attn.q_proj.weight": "model-00007-of-00062.safetensors", + "model.layers.7.self_attn.v_proj.weight": "model-00007-of-00062.safetensors", + "model.layers.70.input_layernorm.weight": "model-00055-of-00062.safetensors", + "model.layers.70.mlp.down_proj.weight": "model-00055-of-00062.safetensors", + "model.layers.70.mlp.gate_proj.weight": "model-00054-of-00062.safetensors", + "model.layers.70.mlp.up_proj.weight": "model-00054-of-00062.safetensors", + "model.layers.70.post_attention_layernorm.weight": "model-00055-of-00062.safetensors", + "model.layers.70.self_attn.k_proj.weight": "model-00054-of-00062.safetensors", + "model.layers.70.self_attn.o_proj.weight": "model-00054-of-00062.safetensors", + "model.layers.70.self_attn.q_proj.weight": "model-00054-of-00062.safetensors", + "model.layers.70.self_attn.v_proj.weight": "model-00054-of-00062.safetensors", + "model.layers.71.input_layernorm.weight": "model-00055-of-00062.safetensors", + "model.layers.71.mlp.down_proj.weight": "model-00055-of-00062.safetensors", + "model.layers.71.mlp.gate_proj.weight": "model-00055-of-00062.safetensors", + "model.layers.71.mlp.up_proj.weight": "model-00055-of-00062.safetensors", + "model.layers.71.post_attention_layernorm.weight": "model-00055-of-00062.safetensors", + "model.layers.71.self_attn.k_proj.weight": "model-00055-of-00062.safetensors", + "model.layers.71.self_attn.o_proj.weight": "model-00055-of-00062.safetensors", + "model.layers.71.self_attn.q_proj.weight": "model-00055-of-00062.safetensors", + "model.layers.71.self_attn.v_proj.weight": "model-00055-of-00062.safetensors", + "model.layers.72.input_layernorm.weight": "model-00056-of-00062.safetensors", + "model.layers.72.mlp.down_proj.weight": "model-00056-of-00062.safetensors", + "model.layers.72.mlp.gate_proj.weight": "model-00056-of-00062.safetensors", + "model.layers.72.mlp.up_proj.weight": "model-00056-of-00062.safetensors", + "model.layers.72.post_attention_layernorm.weight": "model-00056-of-00062.safetensors", + "model.layers.72.self_attn.k_proj.weight": "model-00055-of-00062.safetensors", + "model.layers.72.self_attn.o_proj.weight": "model-00055-of-00062.safetensors", + "model.layers.72.self_attn.q_proj.weight": "model-00055-of-00062.safetensors", + "model.layers.72.self_attn.v_proj.weight": "model-00055-of-00062.safetensors", + "model.layers.73.input_layernorm.weight": "model-00057-of-00062.safetensors", + "model.layers.73.mlp.down_proj.weight": "model-00057-of-00062.safetensors", + "model.layers.73.mlp.gate_proj.weight": "model-00056-of-00062.safetensors", + "model.layers.73.mlp.up_proj.weight": "model-00057-of-00062.safetensors", + "model.layers.73.post_attention_layernorm.weight": "model-00057-of-00062.safetensors", + "model.layers.73.self_attn.k_proj.weight": "model-00056-of-00062.safetensors", + "model.layers.73.self_attn.o_proj.weight": "model-00056-of-00062.safetensors", + "model.layers.73.self_attn.q_proj.weight": "model-00056-of-00062.safetensors", + "model.layers.73.self_attn.v_proj.weight": "model-00056-of-00062.safetensors", + "model.layers.74.input_layernorm.weight": "model-00058-of-00062.safetensors", + "model.layers.74.mlp.down_proj.weight": "model-00058-of-00062.safetensors", + "model.layers.74.mlp.gate_proj.weight": "model-00057-of-00062.safetensors", + "model.layers.74.mlp.up_proj.weight": "model-00057-of-00062.safetensors", + "model.layers.74.post_attention_layernorm.weight": "model-00058-of-00062.safetensors", + "model.layers.74.self_attn.k_proj.weight": "model-00057-of-00062.safetensors", + "model.layers.74.self_attn.o_proj.weight": "model-00057-of-00062.safetensors", + "model.layers.74.self_attn.q_proj.weight": "model-00057-of-00062.safetensors", + "model.layers.74.self_attn.v_proj.weight": "model-00057-of-00062.safetensors", + "model.layers.75.input_layernorm.weight": "model-00058-of-00062.safetensors", + "model.layers.75.mlp.down_proj.weight": "model-00058-of-00062.safetensors", + "model.layers.75.mlp.gate_proj.weight": "model-00058-of-00062.safetensors", + "model.layers.75.mlp.up_proj.weight": "model-00058-of-00062.safetensors", + "model.layers.75.post_attention_layernorm.weight": "model-00058-of-00062.safetensors", + "model.layers.75.self_attn.k_proj.weight": "model-00058-of-00062.safetensors", + "model.layers.75.self_attn.o_proj.weight": "model-00058-of-00062.safetensors", + "model.layers.75.self_attn.q_proj.weight": "model-00058-of-00062.safetensors", + "model.layers.75.self_attn.v_proj.weight": "model-00058-of-00062.safetensors", + "model.layers.76.input_layernorm.weight": "model-00059-of-00062.safetensors", + "model.layers.76.mlp.down_proj.weight": "model-00059-of-00062.safetensors", + "model.layers.76.mlp.gate_proj.weight": "model-00059-of-00062.safetensors", + "model.layers.76.mlp.up_proj.weight": "model-00059-of-00062.safetensors", + "model.layers.76.post_attention_layernorm.weight": "model-00059-of-00062.safetensors", + "model.layers.76.self_attn.k_proj.weight": "model-00058-of-00062.safetensors", + "model.layers.76.self_attn.o_proj.weight": "model-00058-of-00062.safetensors", + "model.layers.76.self_attn.q_proj.weight": "model-00058-of-00062.safetensors", + "model.layers.76.self_attn.v_proj.weight": "model-00058-of-00062.safetensors", + "model.layers.77.input_layernorm.weight": "model-00060-of-00062.safetensors", + "model.layers.77.mlp.down_proj.weight": "model-00060-of-00062.safetensors", + "model.layers.77.mlp.gate_proj.weight": "model-00059-of-00062.safetensors", + "model.layers.77.mlp.up_proj.weight": "model-00060-of-00062.safetensors", + "model.layers.77.post_attention_layernorm.weight": "model-00060-of-00062.safetensors", + "model.layers.77.self_attn.k_proj.weight": "model-00059-of-00062.safetensors", + "model.layers.77.self_attn.o_proj.weight": "model-00059-of-00062.safetensors", + "model.layers.77.self_attn.q_proj.weight": "model-00059-of-00062.safetensors", + "model.layers.77.self_attn.v_proj.weight": "model-00059-of-00062.safetensors", + "model.layers.78.input_layernorm.weight": "model-00061-of-00062.safetensors", + "model.layers.78.mlp.down_proj.weight": "model-00061-of-00062.safetensors", + "model.layers.78.mlp.gate_proj.weight": "model-00060-of-00062.safetensors", + "model.layers.78.mlp.up_proj.weight": "model-00060-of-00062.safetensors", + "model.layers.78.post_attention_layernorm.weight": "model-00061-of-00062.safetensors", + "model.layers.78.self_attn.k_proj.weight": "model-00060-of-00062.safetensors", + "model.layers.78.self_attn.o_proj.weight": "model-00060-of-00062.safetensors", + "model.layers.78.self_attn.q_proj.weight": "model-00060-of-00062.safetensors", + "model.layers.78.self_attn.v_proj.weight": "model-00060-of-00062.safetensors", + "model.layers.79.input_layernorm.weight": "model-00061-of-00062.safetensors", + "model.layers.79.mlp.down_proj.weight": "model-00061-of-00062.safetensors", + "model.layers.79.mlp.gate_proj.weight": "model-00061-of-00062.safetensors", + "model.layers.79.mlp.up_proj.weight": "model-00061-of-00062.safetensors", + "model.layers.79.post_attention_layernorm.weight": "model-00061-of-00062.safetensors", + "model.layers.79.self_attn.k_proj.weight": "model-00061-of-00062.safetensors", + "model.layers.79.self_attn.o_proj.weight": "model-00061-of-00062.safetensors", + "model.layers.79.self_attn.q_proj.weight": "model-00061-of-00062.safetensors", + "model.layers.79.self_attn.v_proj.weight": "model-00061-of-00062.safetensors", + "model.layers.8.input_layernorm.weight": "model-00008-of-00062.safetensors", + "model.layers.8.mlp.down_proj.weight": "model-00008-of-00062.safetensors", + "model.layers.8.mlp.gate_proj.weight": "model-00008-of-00062.safetensors", + "model.layers.8.mlp.up_proj.weight": "model-00008-of-00062.safetensors", + "model.layers.8.post_attention_layernorm.weight": "model-00008-of-00062.safetensors", + "model.layers.8.self_attn.k_proj.weight": "model-00007-of-00062.safetensors", + "model.layers.8.self_attn.o_proj.weight": "model-00007-of-00062.safetensors", + "model.layers.8.self_attn.q_proj.weight": "model-00007-of-00062.safetensors", + "model.layers.8.self_attn.v_proj.weight": "model-00007-of-00062.safetensors", + "model.layers.9.input_layernorm.weight": "model-00009-of-00062.safetensors", + "model.layers.9.mlp.down_proj.weight": "model-00009-of-00062.safetensors", + "model.layers.9.mlp.gate_proj.weight": "model-00008-of-00062.safetensors", + "model.layers.9.mlp.up_proj.weight": "model-00009-of-00062.safetensors", + "model.layers.9.post_attention_layernorm.weight": "model-00009-of-00062.safetensors", + "model.layers.9.self_attn.k_proj.weight": "model-00008-of-00062.safetensors", + "model.layers.9.self_attn.o_proj.weight": "model-00008-of-00062.safetensors", + "model.layers.9.self_attn.q_proj.weight": "model-00008-of-00062.safetensors", + "model.layers.9.self_attn.v_proj.weight": "model-00008-of-00062.safetensors", + "model.norm.weight": "model-00061-of-00062.safetensors" + } +} diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/special_tokens_map.json b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/special_tokens_map.json new file mode 100644 index 0000000000000000000000000000000000000000..3c1d04911c269b925af977a3151c9704e990e4d0 --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/special_tokens_map.json @@ -0,0 +1,23 @@ +{ + "bos_token": { + "content": "<|begin_of_text|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "eos_token": { + "content": "<|eot_id|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "pad_token": { + "content": "<|finetune_right_pad_id|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + } +} diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/tokenizer.json b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/tokenizer.json new file mode 100644 index 0000000000000000000000000000000000000000..1c1d8d5c9024994f1d3b00f9662b8dd89ca13cf2 --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/tokenizer.json @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6b9e4e7fb171f92fd137b777cc2714bf87d11576700a1dcd7a399e7bbe39537b +size 17209920 diff --git a/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/tokenizer_config.json b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..cc6faec1a4b11893eb15700e2a5743aa79fa107e --- /dev/null +++ b/tmp/kud-llama3.3-70b_history_leaf_relearn_klr_gdr_history_10000_512_2e-5/checkpoint-1250-full/tokenizer_config.json @@ -0,0 +1,2064 @@ +{ + "added_tokens_decoder": { + "128000": { + "content": "<|begin_of_text|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128001": { + "content": "<|end_of_text|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128002": { + "content": "<|reserved_special_token_0|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128003": { + "content": "<|reserved_special_token_1|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128004": { + "content": "<|finetune_right_pad_id|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128005": { + "content": "<|reserved_special_token_2|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128006": { + "content": "<|start_header_id|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128007": { + "content": "<|end_header_id|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128008": { + "content": "<|eom_id|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128009": { + "content": "<|eot_id|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128010": { + "content": "<|python_tag|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128011": { + "content": "<|reserved_special_token_3|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128012": { + "content": "<|reserved_special_token_4|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128013": { + "content": "<|reserved_special_token_5|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128014": { + "content": "<|reserved_special_token_6|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128015": { + "content": "<|reserved_special_token_7|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128016": { + "content": "<|reserved_special_token_8|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128017": { + "content": "<|reserved_special_token_9|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128018": { + "content": "<|reserved_special_token_10|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128019": { + "content": "<|reserved_special_token_11|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128020": { + "content": "<|reserved_special_token_12|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128021": { + "content": "<|reserved_special_token_13|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128022": { + "content": "<|reserved_special_token_14|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128023": { + "content": "<|reserved_special_token_15|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128024": { + "content": "<|reserved_special_token_16|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128025": { + "content": "<|reserved_special_token_17|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128026": { + "content": "<|reserved_special_token_18|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128027": { + "content": "<|reserved_special_token_19|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128028": { + "content": "<|reserved_special_token_20|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128029": { + "content": "<|reserved_special_token_21|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128030": { + "content": "<|reserved_special_token_22|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128031": { + "content": "<|reserved_special_token_23|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128032": { + "content": "<|reserved_special_token_24|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128033": { + "content": "<|reserved_special_token_25|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128034": { + "content": "<|reserved_special_token_26|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128035": { + "content": "<|reserved_special_token_27|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128036": { + "content": "<|reserved_special_token_28|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128037": { + "content": "<|reserved_special_token_29|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128038": { + "content": "<|reserved_special_token_30|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128039": { + "content": "<|reserved_special_token_31|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128040": { + "content": "<|reserved_special_token_32|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128041": { + "content": "<|reserved_special_token_33|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128042": { + "content": "<|reserved_special_token_34|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128043": { + "content": "<|reserved_special_token_35|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128044": { + "content": "<|reserved_special_token_36|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128045": { + "content": "<|reserved_special_token_37|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128046": { + "content": "<|reserved_special_token_38|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128047": { + "content": "<|reserved_special_token_39|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128048": { + "content": "<|reserved_special_token_40|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128049": { + "content": "<|reserved_special_token_41|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128050": { + "content": "<|reserved_special_token_42|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128051": { + "content": "<|reserved_special_token_43|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128052": { + "content": "<|reserved_special_token_44|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128053": { + "content": "<|reserved_special_token_45|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128054": { + "content": "<|reserved_special_token_46|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128055": { + "content": "<|reserved_special_token_47|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128056": { + "content": "<|reserved_special_token_48|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128057": { + "content": "<|reserved_special_token_49|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128058": { + "content": "<|reserved_special_token_50|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128059": { + "content": "<|reserved_special_token_51|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128060": { + "content": "<|reserved_special_token_52|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128061": { + "content": "<|reserved_special_token_53|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128062": { + "content": "<|reserved_special_token_54|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128063": { + "content": "<|reserved_special_token_55|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128064": { + "content": "<|reserved_special_token_56|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128065": { + "content": "<|reserved_special_token_57|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128066": { + "content": "<|reserved_special_token_58|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128067": { + "content": "<|reserved_special_token_59|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128068": { + "content": "<|reserved_special_token_60|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128069": { + "content": "<|reserved_special_token_61|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128070": { + "content": "<|reserved_special_token_62|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128071": { + "content": "<|reserved_special_token_63|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128072": { + "content": "<|reserved_special_token_64|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128073": { + "content": "<|reserved_special_token_65|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128074": { + "content": "<|reserved_special_token_66|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128075": { + "content": "<|reserved_special_token_67|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128076": { + "content": "<|reserved_special_token_68|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128077": { + "content": "<|reserved_special_token_69|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128078": { + "content": "<|reserved_special_token_70|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128079": { + "content": "<|reserved_special_token_71|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128080": { + "content": "<|reserved_special_token_72|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128081": { + "content": "<|reserved_special_token_73|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128082": { + "content": "<|reserved_special_token_74|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128083": { + "content": "<|reserved_special_token_75|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128084": { + "content": "<|reserved_special_token_76|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128085": { + "content": "<|reserved_special_token_77|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128086": { + "content": "<|reserved_special_token_78|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128087": { + "content": "<|reserved_special_token_79|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128088": { + "content": "<|reserved_special_token_80|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128089": { + "content": "<|reserved_special_token_81|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128090": { + "content": "<|reserved_special_token_82|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128091": { + "content": "<|reserved_special_token_83|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128092": { + "content": "<|reserved_special_token_84|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128093": { + "content": "<|reserved_special_token_85|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128094": { + "content": "<|reserved_special_token_86|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128095": { + "content": "<|reserved_special_token_87|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128096": { + "content": "<|reserved_special_token_88|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128097": { + "content": "<|reserved_special_token_89|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128098": { + "content": "<|reserved_special_token_90|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128099": { + "content": "<|reserved_special_token_91|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128100": { + "content": "<|reserved_special_token_92|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128101": { + "content": "<|reserved_special_token_93|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128102": { + "content": "<|reserved_special_token_94|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128103": { + "content": "<|reserved_special_token_95|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128104": { + "content": "<|reserved_special_token_96|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128105": { + "content": "<|reserved_special_token_97|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128106": { + "content": "<|reserved_special_token_98|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128107": { + "content": "<|reserved_special_token_99|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128108": { + "content": "<|reserved_special_token_100|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128109": { + "content": "<|reserved_special_token_101|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128110": { + "content": "<|reserved_special_token_102|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128111": { + "content": "<|reserved_special_token_103|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128112": { + "content": "<|reserved_special_token_104|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128113": { + "content": "<|reserved_special_token_105|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128114": { + "content": "<|reserved_special_token_106|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128115": { + "content": "<|reserved_special_token_107|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128116": { + "content": "<|reserved_special_token_108|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128117": { + "content": "<|reserved_special_token_109|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128118": { + "content": "<|reserved_special_token_110|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128119": { + "content": "<|reserved_special_token_111|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128120": { + "content": "<|reserved_special_token_112|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128121": { + "content": "<|reserved_special_token_113|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128122": { + "content": "<|reserved_special_token_114|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128123": { + "content": "<|reserved_special_token_115|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128124": { + "content": "<|reserved_special_token_116|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128125": { + "content": "<|reserved_special_token_117|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128126": { + "content": "<|reserved_special_token_118|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128127": { + "content": "<|reserved_special_token_119|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128128": { + "content": "<|reserved_special_token_120|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128129": { + "content": "<|reserved_special_token_121|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128130": { + "content": "<|reserved_special_token_122|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128131": { + "content": "<|reserved_special_token_123|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128132": { + "content": "<|reserved_special_token_124|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128133": { + "content": "<|reserved_special_token_125|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128134": { + "content": "<|reserved_special_token_126|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128135": { + "content": "<|reserved_special_token_127|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128136": { + "content": "<|reserved_special_token_128|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128137": { + "content": "<|reserved_special_token_129|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128138": { + "content": "<|reserved_special_token_130|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128139": { + "content": "<|reserved_special_token_131|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128140": { + "content": "<|reserved_special_token_132|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128141": { + "content": "<|reserved_special_token_133|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128142": { + "content": "<|reserved_special_token_134|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128143": { + "content": "<|reserved_special_token_135|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128144": { + "content": "<|reserved_special_token_136|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128145": { + "content": "<|reserved_special_token_137|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128146": { + "content": "<|reserved_special_token_138|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128147": { + "content": "<|reserved_special_token_139|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128148": { + "content": "<|reserved_special_token_140|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128149": { + "content": "<|reserved_special_token_141|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128150": { + "content": "<|reserved_special_token_142|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128151": { + "content": "<|reserved_special_token_143|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128152": { + "content": "<|reserved_special_token_144|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128153": { + "content": "<|reserved_special_token_145|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128154": { + "content": "<|reserved_special_token_146|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128155": { + "content": "<|reserved_special_token_147|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128156": { + "content": "<|reserved_special_token_148|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128157": { + "content": "<|reserved_special_token_149|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128158": { + "content": "<|reserved_special_token_150|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128159": { + "content": "<|reserved_special_token_151|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128160": { + "content": "<|reserved_special_token_152|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128161": { + "content": "<|reserved_special_token_153|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128162": { + "content": "<|reserved_special_token_154|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128163": { + "content": "<|reserved_special_token_155|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128164": { + "content": "<|reserved_special_token_156|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128165": { + "content": "<|reserved_special_token_157|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128166": { + "content": "<|reserved_special_token_158|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128167": { + "content": "<|reserved_special_token_159|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128168": { + "content": "<|reserved_special_token_160|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128169": { + "content": "<|reserved_special_token_161|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128170": { + "content": "<|reserved_special_token_162|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128171": { + "content": "<|reserved_special_token_163|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128172": { + "content": "<|reserved_special_token_164|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128173": { + "content": "<|reserved_special_token_165|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128174": { + "content": "<|reserved_special_token_166|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128175": { + "content": "<|reserved_special_token_167|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128176": { + "content": "<|reserved_special_token_168|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128177": { + "content": "<|reserved_special_token_169|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128178": { + "content": "<|reserved_special_token_170|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128179": { + "content": "<|reserved_special_token_171|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128180": { + "content": "<|reserved_special_token_172|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128181": { + "content": "<|reserved_special_token_173|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128182": { + "content": "<|reserved_special_token_174|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128183": { + "content": "<|reserved_special_token_175|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128184": { + "content": "<|reserved_special_token_176|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128185": { + "content": "<|reserved_special_token_177|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128186": { + "content": "<|reserved_special_token_178|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128187": { + "content": "<|reserved_special_token_179|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128188": { + "content": "<|reserved_special_token_180|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128189": { + "content": "<|reserved_special_token_181|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128190": { + "content": "<|reserved_special_token_182|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128191": { + "content": "<|reserved_special_token_183|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128192": { + "content": "<|reserved_special_token_184|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128193": { + "content": "<|reserved_special_token_185|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128194": { + "content": "<|reserved_special_token_186|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128195": { + "content": "<|reserved_special_token_187|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128196": { + "content": "<|reserved_special_token_188|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128197": { + "content": "<|reserved_special_token_189|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128198": { + "content": "<|reserved_special_token_190|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128199": { + "content": "<|reserved_special_token_191|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128200": { + "content": "<|reserved_special_token_192|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128201": { + "content": "<|reserved_special_token_193|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128202": { + "content": "<|reserved_special_token_194|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128203": { + "content": "<|reserved_special_token_195|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128204": { + "content": "<|reserved_special_token_196|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128205": { + "content": "<|reserved_special_token_197|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128206": { + "content": "<|reserved_special_token_198|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128207": { + "content": "<|reserved_special_token_199|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128208": { + "content": "<|reserved_special_token_200|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128209": { + "content": "<|reserved_special_token_201|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128210": { + "content": "<|reserved_special_token_202|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128211": { + "content": "<|reserved_special_token_203|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128212": { + "content": "<|reserved_special_token_204|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128213": { + "content": "<|reserved_special_token_205|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128214": { + "content": "<|reserved_special_token_206|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128215": { + "content": "<|reserved_special_token_207|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128216": { + "content": "<|reserved_special_token_208|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128217": { + "content": "<|reserved_special_token_209|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128218": { + "content": "<|reserved_special_token_210|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128219": { + "content": "<|reserved_special_token_211|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128220": { + "content": "<|reserved_special_token_212|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128221": { + "content": "<|reserved_special_token_213|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128222": { + "content": "<|reserved_special_token_214|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128223": { + "content": "<|reserved_special_token_215|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128224": { + "content": "<|reserved_special_token_216|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128225": { + "content": "<|reserved_special_token_217|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128226": { + "content": "<|reserved_special_token_218|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128227": { + "content": "<|reserved_special_token_219|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128228": { + "content": "<|reserved_special_token_220|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128229": { + "content": "<|reserved_special_token_221|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128230": { + "content": "<|reserved_special_token_222|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128231": { + "content": "<|reserved_special_token_223|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128232": { + "content": "<|reserved_special_token_224|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128233": { + "content": "<|reserved_special_token_225|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128234": { + "content": "<|reserved_special_token_226|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128235": { + "content": "<|reserved_special_token_227|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128236": { + "content": "<|reserved_special_token_228|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128237": { + "content": "<|reserved_special_token_229|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128238": { + "content": "<|reserved_special_token_230|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128239": { + "content": "<|reserved_special_token_231|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128240": { + "content": "<|reserved_special_token_232|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128241": { + "content": "<|reserved_special_token_233|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128242": { + "content": "<|reserved_special_token_234|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128243": { + "content": "<|reserved_special_token_235|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128244": { + "content": "<|reserved_special_token_236|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128245": { + "content": "<|reserved_special_token_237|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128246": { + "content": "<|reserved_special_token_238|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128247": { + "content": "<|reserved_special_token_239|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128248": { + "content": "<|reserved_special_token_240|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128249": { + "content": "<|reserved_special_token_241|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128250": { + "content": "<|reserved_special_token_242|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128251": { + "content": "<|reserved_special_token_243|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128252": { + "content": "<|reserved_special_token_244|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128253": { + "content": "<|reserved_special_token_245|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128254": { + "content": "<|reserved_special_token_246|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "128255": { + "content": "<|reserved_special_token_247|>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + } + }, + "bos_token": "<|begin_of_text|>", + "chat_template": "{{- bos_token }}\n{%- if custom_tools is defined %}\n {%- set tools = custom_tools %}\n{%- endif %}\n{%- if not tools_in_user_message is defined %}\n {%- set tools_in_user_message = true %}\n{%- endif %}\n{%- if not date_string is defined %}\n {%- set date_string = \"26 Jul 2024\" %}\n{%- endif %}\n{%- if not tools is defined %}\n {%- set tools = none %}\n{%- endif %}\n\n{#- This block extracts the system message, so we can slot it into the right place. #}\n{%- if messages[0]['role'] == 'system' %}\n {%- set system_message = messages[0]['content']|trim %}\n {%- set messages = messages[1:] %}\n{%- else %}\n {%- set system_message = \"\" %}\n{%- endif %}\n\n{#- System message + builtin tools #}\n{{- \"<|start_header_id|>system<|end_header_id|>\\n\\n\" }}\n{%- if builtin_tools is defined or tools is not none %}\n {{- \"Environment: ipython\\n\" }}\n{%- endif %}\n{%- if builtin_tools is defined %}\n {{- \"Tools: \" + builtin_tools | reject('equalto', 'code_interpreter') | join(\", \") + \"\\n\\n\"}}\n{%- endif %}\n{{- \"Cutting Knowledge Date: December 2023\\n\" }}\n{{- \"Today Date: \" + date_string + \"\\n\\n\" }}\n{%- if tools is not none and not tools_in_user_message %}\n {{- \"You have access to the following functions. To call a function, please respond with JSON for a function call.\" }}\n {{- 'Respond in the format {\"name\": function name, \"parameters\": dictionary of argument name and its value}.' }}\n {{- \"Do not use variables.\\n\\n\" }}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n{%- endif %}\n{{- system_message }}\n{{- \"<|eot_id|>\" }}\n\n{#- Custom tools are passed in a user message with some extra guidance #}\n{%- if tools_in_user_message and not tools is none %}\n {#- Extract the first user message so we can plug it in here #}\n {%- if messages | length != 0 %}\n {%- set first_user_message = messages[0]['content']|trim %}\n {%- set messages = messages[1:] %}\n {%- else %}\n {{- raise_exception(\"Cannot put tools in the first user message when there's no first user message!\") }}\n{%- endif %}\n {{- '<|start_header_id|>user<|end_header_id|>\\n\\n' -}}\n {{- \"Given the following functions, please respond with a JSON for a function call \" }}\n {{- \"with its proper arguments that best answers the given prompt.\\n\\n\" }}\n {{- 'Respond in the format {\"name\": function name, \"parameters\": dictionary of argument name and its value}.' }}\n {{- \"Do not use variables.\\n\\n\" }}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n {{- first_user_message + \"<|eot_id|>\"}}\n{%- endif %}\n\n{%- for message in messages %}\n {%- if not (message.role == 'ipython' or message.role == 'tool' or 'tool_calls' in message) %}\n {{- '<|start_header_id|>' + message['role'] + '<|end_header_id|>\\n\\n'+ message['content'] | trim + '<|eot_id|>' }}\n {%- elif 'tool_calls' in message %}\n {%- if not message.tool_calls|length == 1 %}\n {{- raise_exception(\"This model only supports single tool-calls at once!\") }}\n {%- endif %}\n {%- set tool_call = message.tool_calls[0].function %}\n {%- if builtin_tools is defined and tool_call.name in builtin_tools %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' -}}\n {{- \"<|python_tag|>\" + tool_call.name + \".call(\" }}\n {%- for arg_name, arg_val in tool_call.arguments | items %}\n {{- arg_name + '=\"' + arg_val + '\"' }}\n {%- if not loop.last %}\n {{- \", \" }}\n {%- endif %}\n {%- endfor %}\n {{- \")\" }}\n {%- else %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' -}}\n {{- '{\"name\": \"' + tool_call.name + '\", ' }}\n {{- '\"parameters\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- \"}\" }}\n {%- endif %}\n {%- if builtin_tools is defined %}\n {#- This means we're in ipython mode #}\n {{- \"<|eom_id|>\" }}\n {%- else %}\n {{- \"<|eot_id|>\" }}\n {%- endif %}\n {%- elif message.role == \"tool\" or message.role == \"ipython\" %}\n {{- \"<|start_header_id|>ipython<|end_header_id|>\\n\\n\" }}\n {%- if message.content is mapping or message.content is iterable %}\n {{- message.content | tojson }}\n {%- else %}\n {{- message.content }}\n {%- endif %}\n {{- \"<|eot_id|>\" }}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' }}\n{%- endif %}\n", + "clean_up_tokenization_spaces": true, + "eos_token": "<|eot_id|>", + "extra_special_tokens": {}, + "model_input_names": [ + "input_ids", + "attention_mask" + ], + "model_max_length": 131072, + "pad_token": "<|finetune_right_pad_id|>", + "tokenizer_class": "PreTrainedTokenizer" +}