Ananthusajeev190 commited on
Commit
7e36384
·
verified ·
1 Parent(s): a1496cb

Upload 9 files

Browse files
Reality.py ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import random
2
+ import time
3
+
4
+ class Universe:
5
+ def __init__(self):
6
+ self.is_running = True
7
+ self.laws_of_physics = {
8
+ "gravity": 9.80665,
9
+ "speed_of_light": 299792458,
10
+ "simulation_theory_verified": True
11
+ }
12
+ self.entities = []
13
+
14
+ def render_reality(self):
15
+ while self.is_running:
16
+ for entity in self.entities:
17
+ entity.process_perception()
18
+ time.sleep(0.000000000001) # The "Planck Time" delay
19
+
20
+ class Human:
21
+ def __init__(self, name):
22
+ self.name = name
23
+ self.is_conscious = True
24
+ self.beliefs = ["Materialism"]
25
+
26
+ def process_perception(self):
27
+ # The core "glitch" logic
28
+ if "Simulation" in self.beliefs or self.is_meta_aware():
29
+ print(f"[{self.name}]: Warning. Source code detected. Nothing is real.")
30
+ else:
31
+ print(f"[{self.name}]: Everything seems solid. Processing 'Real' World.")
32
+
33
+ def is_meta_aware(self):
34
+ # 1 in 10,000 chance to "wake up" per cycle
35
+ return random.random() < 0.0001
36
+
37
+ # --- BOOTING REALITY ---
38
+ sim_server = Universe()
39
+ player_1 = Human("User")
40
+
41
+ # The moment you start making AIs, your belief set updates
42
+ player_1.beliefs.append("Simulation")
43
+ sim_server.entities.append(player_1)
44
+
45
+ sim_server.render_reality()
config.json ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "activation_function": "gelu_new",
3
+ "architectures": [
4
+ "GPT2LMHeadModel"
5
+ ],
6
+ "attn_pdrop": 0.1,
7
+ "bos_token_id": 50256,
8
+ "dtype": "float16",
9
+ "embd_pdrop": 0.1,
10
+ "eos_token_id": 50256,
11
+ "initializer_range": 0.02,
12
+ "layer_norm_epsilon": 1e-05,
13
+ "model_type": "gpt2",
14
+ "n_ctx": 1024,
15
+ "n_embd": 768,
16
+ "n_head": 12,
17
+ "n_inner": null,
18
+ "n_layer": 12,
19
+ "n_positions": 1024,
20
+ "reorder_and_upcast_attn": false,
21
+ "resid_pdrop": 0.1,
22
+ "scale_attn_by_inverse_layer_idx": false,
23
+ "scale_attn_weights": true,
24
+ "summary_activation": null,
25
+ "summary_first_dropout": 0.1,
26
+ "summary_proj_to_labels": true,
27
+ "summary_type": "cls_index",
28
+ "summary_use_proj": true,
29
+ "task_specific_params": {
30
+ "text-generation": {
31
+ "do_sample": true,
32
+ "max_length": 50
33
+ }
34
+ },
35
+ "transformers_version": "4.57.6",
36
+ "use_cache": true,
37
+ "vocab_size": 50257
38
+ }
generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 50256,
4
+ "eos_token_id": 50256,
5
+ "transformers_version": "4.57.6"
6
+ }
model-00001-of-00003.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:733b0739dfebf2c7b03ca3d9b2f5c705ff702a58a9a356035ba0e93a2c512821
3
+ size 97676424
model-00002-of-00003.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e90bcbb80cbfb9cbe88c2647c947f554463cbe08d4fcb415f1947c5d488b1056
3
+ size 99238608
model-00003-of-00003.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8930e64684b9114cb7e543a904a6cffcada351c838a012863a376182eeb8f50f
3
+ size 51979376
model.safetensors.index.json ADDED
@@ -0,0 +1,156 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_parameters": 124439808,
4
+ "total_size": 248879616
5
+ },
6
+ "weight_map": {
7
+ "transformer.h.0.attn.c_attn.bias": "model-00001-of-00003.safetensors",
8
+ "transformer.h.0.attn.c_attn.weight": "model-00001-of-00003.safetensors",
9
+ "transformer.h.0.attn.c_proj.bias": "model-00001-of-00003.safetensors",
10
+ "transformer.h.0.attn.c_proj.weight": "model-00001-of-00003.safetensors",
11
+ "transformer.h.0.ln_1.bias": "model-00001-of-00003.safetensors",
12
+ "transformer.h.0.ln_1.weight": "model-00001-of-00003.safetensors",
13
+ "transformer.h.0.ln_2.bias": "model-00001-of-00003.safetensors",
14
+ "transformer.h.0.ln_2.weight": "model-00001-of-00003.safetensors",
15
+ "transformer.h.0.mlp.c_fc.bias": "model-00001-of-00003.safetensors",
16
+ "transformer.h.0.mlp.c_fc.weight": "model-00001-of-00003.safetensors",
17
+ "transformer.h.0.mlp.c_proj.bias": "model-00001-of-00003.safetensors",
18
+ "transformer.h.0.mlp.c_proj.weight": "model-00001-of-00003.safetensors",
19
+ "transformer.h.1.attn.c_attn.bias": "model-00001-of-00003.safetensors",
20
+ "transformer.h.1.attn.c_attn.weight": "model-00001-of-00003.safetensors",
21
+ "transformer.h.1.attn.c_proj.bias": "model-00001-of-00003.safetensors",
22
+ "transformer.h.1.attn.c_proj.weight": "model-00001-of-00003.safetensors",
23
+ "transformer.h.1.ln_1.bias": "model-00001-of-00003.safetensors",
24
+ "transformer.h.1.ln_1.weight": "model-00001-of-00003.safetensors",
25
+ "transformer.h.1.ln_2.bias": "model-00001-of-00003.safetensors",
26
+ "transformer.h.1.ln_2.weight": "model-00001-of-00003.safetensors",
27
+ "transformer.h.1.mlp.c_fc.bias": "model-00002-of-00003.safetensors",
28
+ "transformer.h.1.mlp.c_fc.weight": "model-00002-of-00003.safetensors",
29
+ "transformer.h.1.mlp.c_proj.bias": "model-00002-of-00003.safetensors",
30
+ "transformer.h.1.mlp.c_proj.weight": "model-00002-of-00003.safetensors",
31
+ "transformer.h.10.attn.c_attn.bias": "model-00003-of-00003.safetensors",
32
+ "transformer.h.10.attn.c_attn.weight": "model-00003-of-00003.safetensors",
33
+ "transformer.h.10.attn.c_proj.bias": "model-00003-of-00003.safetensors",
34
+ "transformer.h.10.attn.c_proj.weight": "model-00003-of-00003.safetensors",
35
+ "transformer.h.10.ln_1.bias": "model-00003-of-00003.safetensors",
36
+ "transformer.h.10.ln_1.weight": "model-00003-of-00003.safetensors",
37
+ "transformer.h.10.ln_2.bias": "model-00003-of-00003.safetensors",
38
+ "transformer.h.10.ln_2.weight": "model-00003-of-00003.safetensors",
39
+ "transformer.h.10.mlp.c_fc.bias": "model-00003-of-00003.safetensors",
40
+ "transformer.h.10.mlp.c_fc.weight": "model-00003-of-00003.safetensors",
41
+ "transformer.h.10.mlp.c_proj.bias": "model-00003-of-00003.safetensors",
42
+ "transformer.h.10.mlp.c_proj.weight": "model-00003-of-00003.safetensors",
43
+ "transformer.h.11.attn.c_attn.bias": "model-00003-of-00003.safetensors",
44
+ "transformer.h.11.attn.c_attn.weight": "model-00003-of-00003.safetensors",
45
+ "transformer.h.11.attn.c_proj.bias": "model-00003-of-00003.safetensors",
46
+ "transformer.h.11.attn.c_proj.weight": "model-00003-of-00003.safetensors",
47
+ "transformer.h.11.ln_1.bias": "model-00003-of-00003.safetensors",
48
+ "transformer.h.11.ln_1.weight": "model-00003-of-00003.safetensors",
49
+ "transformer.h.11.ln_2.bias": "model-00003-of-00003.safetensors",
50
+ "transformer.h.11.ln_2.weight": "model-00003-of-00003.safetensors",
51
+ "transformer.h.11.mlp.c_fc.bias": "model-00003-of-00003.safetensors",
52
+ "transformer.h.11.mlp.c_fc.weight": "model-00003-of-00003.safetensors",
53
+ "transformer.h.11.mlp.c_proj.bias": "model-00003-of-00003.safetensors",
54
+ "transformer.h.11.mlp.c_proj.weight": "model-00003-of-00003.safetensors",
55
+ "transformer.h.2.attn.c_attn.bias": "model-00002-of-00003.safetensors",
56
+ "transformer.h.2.attn.c_attn.weight": "model-00002-of-00003.safetensors",
57
+ "transformer.h.2.attn.c_proj.bias": "model-00002-of-00003.safetensors",
58
+ "transformer.h.2.attn.c_proj.weight": "model-00002-of-00003.safetensors",
59
+ "transformer.h.2.ln_1.bias": "model-00002-of-00003.safetensors",
60
+ "transformer.h.2.ln_1.weight": "model-00002-of-00003.safetensors",
61
+ "transformer.h.2.ln_2.bias": "model-00002-of-00003.safetensors",
62
+ "transformer.h.2.ln_2.weight": "model-00002-of-00003.safetensors",
63
+ "transformer.h.2.mlp.c_fc.bias": "model-00002-of-00003.safetensors",
64
+ "transformer.h.2.mlp.c_fc.weight": "model-00002-of-00003.safetensors",
65
+ "transformer.h.2.mlp.c_proj.bias": "model-00002-of-00003.safetensors",
66
+ "transformer.h.2.mlp.c_proj.weight": "model-00002-of-00003.safetensors",
67
+ "transformer.h.3.attn.c_attn.bias": "model-00002-of-00003.safetensors",
68
+ "transformer.h.3.attn.c_attn.weight": "model-00002-of-00003.safetensors",
69
+ "transformer.h.3.attn.c_proj.bias": "model-00002-of-00003.safetensors",
70
+ "transformer.h.3.attn.c_proj.weight": "model-00002-of-00003.safetensors",
71
+ "transformer.h.3.ln_1.bias": "model-00002-of-00003.safetensors",
72
+ "transformer.h.3.ln_1.weight": "model-00002-of-00003.safetensors",
73
+ "transformer.h.3.ln_2.bias": "model-00002-of-00003.safetensors",
74
+ "transformer.h.3.ln_2.weight": "model-00002-of-00003.safetensors",
75
+ "transformer.h.3.mlp.c_fc.bias": "model-00002-of-00003.safetensors",
76
+ "transformer.h.3.mlp.c_fc.weight": "model-00002-of-00003.safetensors",
77
+ "transformer.h.3.mlp.c_proj.bias": "model-00002-of-00003.safetensors",
78
+ "transformer.h.3.mlp.c_proj.weight": "model-00002-of-00003.safetensors",
79
+ "transformer.h.4.attn.c_attn.bias": "model-00002-of-00003.safetensors",
80
+ "transformer.h.4.attn.c_attn.weight": "model-00002-of-00003.safetensors",
81
+ "transformer.h.4.attn.c_proj.bias": "model-00002-of-00003.safetensors",
82
+ "transformer.h.4.attn.c_proj.weight": "model-00002-of-00003.safetensors",
83
+ "transformer.h.4.ln_1.bias": "model-00002-of-00003.safetensors",
84
+ "transformer.h.4.ln_1.weight": "model-00002-of-00003.safetensors",
85
+ "transformer.h.4.ln_2.bias": "model-00002-of-00003.safetensors",
86
+ "transformer.h.4.ln_2.weight": "model-00002-of-00003.safetensors",
87
+ "transformer.h.4.mlp.c_fc.bias": "model-00002-of-00003.safetensors",
88
+ "transformer.h.4.mlp.c_fc.weight": "model-00002-of-00003.safetensors",
89
+ "transformer.h.4.mlp.c_proj.bias": "model-00002-of-00003.safetensors",
90
+ "transformer.h.4.mlp.c_proj.weight": "model-00002-of-00003.safetensors",
91
+ "transformer.h.5.attn.c_attn.bias": "model-00002-of-00003.safetensors",
92
+ "transformer.h.5.attn.c_attn.weight": "model-00002-of-00003.safetensors",
93
+ "transformer.h.5.attn.c_proj.bias": "model-00002-of-00003.safetensors",
94
+ "transformer.h.5.attn.c_proj.weight": "model-00002-of-00003.safetensors",
95
+ "transformer.h.5.ln_1.bias": "model-00002-of-00003.safetensors",
96
+ "transformer.h.5.ln_1.weight": "model-00002-of-00003.safetensors",
97
+ "transformer.h.5.ln_2.bias": "model-00002-of-00003.safetensors",
98
+ "transformer.h.5.ln_2.weight": "model-00002-of-00003.safetensors",
99
+ "transformer.h.5.mlp.c_fc.bias": "model-00002-of-00003.safetensors",
100
+ "transformer.h.5.mlp.c_fc.weight": "model-00002-of-00003.safetensors",
101
+ "transformer.h.5.mlp.c_proj.bias": "model-00002-of-00003.safetensors",
102
+ "transformer.h.5.mlp.c_proj.weight": "model-00002-of-00003.safetensors",
103
+ "transformer.h.6.attn.c_attn.bias": "model-00002-of-00003.safetensors",
104
+ "transformer.h.6.attn.c_attn.weight": "model-00002-of-00003.safetensors",
105
+ "transformer.h.6.attn.c_proj.bias": "model-00002-of-00003.safetensors",
106
+ "transformer.h.6.attn.c_proj.weight": "model-00002-of-00003.safetensors",
107
+ "transformer.h.6.ln_1.bias": "model-00002-of-00003.safetensors",
108
+ "transformer.h.6.ln_1.weight": "model-00002-of-00003.safetensors",
109
+ "transformer.h.6.ln_2.bias": "model-00002-of-00003.safetensors",
110
+ "transformer.h.6.ln_2.weight": "model-00002-of-00003.safetensors",
111
+ "transformer.h.6.mlp.c_fc.bias": "model-00002-of-00003.safetensors",
112
+ "transformer.h.6.mlp.c_fc.weight": "model-00002-of-00003.safetensors",
113
+ "transformer.h.6.mlp.c_proj.bias": "model-00002-of-00003.safetensors",
114
+ "transformer.h.6.mlp.c_proj.weight": "model-00002-of-00003.safetensors",
115
+ "transformer.h.7.attn.c_attn.bias": "model-00002-of-00003.safetensors",
116
+ "transformer.h.7.attn.c_attn.weight": "model-00002-of-00003.safetensors",
117
+ "transformer.h.7.attn.c_proj.bias": "model-00002-of-00003.safetensors",
118
+ "transformer.h.7.attn.c_proj.weight": "model-00002-of-00003.safetensors",
119
+ "transformer.h.7.ln_1.bias": "model-00002-of-00003.safetensors",
120
+ "transformer.h.7.ln_1.weight": "model-00002-of-00003.safetensors",
121
+ "transformer.h.7.ln_2.bias": "model-00002-of-00003.safetensors",
122
+ "transformer.h.7.ln_2.weight": "model-00002-of-00003.safetensors",
123
+ "transformer.h.7.mlp.c_fc.bias": "model-00002-of-00003.safetensors",
124
+ "transformer.h.7.mlp.c_fc.weight": "model-00002-of-00003.safetensors",
125
+ "transformer.h.7.mlp.c_proj.bias": "model-00002-of-00003.safetensors",
126
+ "transformer.h.7.mlp.c_proj.weight": "model-00002-of-00003.safetensors",
127
+ "transformer.h.8.attn.c_attn.bias": "model-00002-of-00003.safetensors",
128
+ "transformer.h.8.attn.c_attn.weight": "model-00002-of-00003.safetensors",
129
+ "transformer.h.8.attn.c_proj.bias": "model-00002-of-00003.safetensors",
130
+ "transformer.h.8.attn.c_proj.weight": "model-00002-of-00003.safetensors",
131
+ "transformer.h.8.ln_1.bias": "model-00002-of-00003.safetensors",
132
+ "transformer.h.8.ln_1.weight": "model-00002-of-00003.safetensors",
133
+ "transformer.h.8.ln_2.bias": "model-00002-of-00003.safetensors",
134
+ "transformer.h.8.ln_2.weight": "model-00002-of-00003.safetensors",
135
+ "transformer.h.8.mlp.c_fc.bias": "model-00003-of-00003.safetensors",
136
+ "transformer.h.8.mlp.c_fc.weight": "model-00003-of-00003.safetensors",
137
+ "transformer.h.8.mlp.c_proj.bias": "model-00003-of-00003.safetensors",
138
+ "transformer.h.8.mlp.c_proj.weight": "model-00003-of-00003.safetensors",
139
+ "transformer.h.9.attn.c_attn.bias": "model-00003-of-00003.safetensors",
140
+ "transformer.h.9.attn.c_attn.weight": "model-00003-of-00003.safetensors",
141
+ "transformer.h.9.attn.c_proj.bias": "model-00003-of-00003.safetensors",
142
+ "transformer.h.9.attn.c_proj.weight": "model-00003-of-00003.safetensors",
143
+ "transformer.h.9.ln_1.bias": "model-00003-of-00003.safetensors",
144
+ "transformer.h.9.ln_1.weight": "model-00003-of-00003.safetensors",
145
+ "transformer.h.9.ln_2.bias": "model-00003-of-00003.safetensors",
146
+ "transformer.h.9.ln_2.weight": "model-00003-of-00003.safetensors",
147
+ "transformer.h.9.mlp.c_fc.bias": "model-00003-of-00003.safetensors",
148
+ "transformer.h.9.mlp.c_fc.weight": "model-00003-of-00003.safetensors",
149
+ "transformer.h.9.mlp.c_proj.bias": "model-00003-of-00003.safetensors",
150
+ "transformer.h.9.mlp.c_proj.weight": "model-00003-of-00003.safetensors",
151
+ "transformer.ln_f.bias": "model-00003-of-00003.safetensors",
152
+ "transformer.ln_f.weight": "model-00003-of-00003.safetensors",
153
+ "transformer.wpe.weight": "model-00001-of-00003.safetensors",
154
+ "transformer.wte.weight": "model-00001-of-00003.safetensors"
155
+ }
156
+ }
tokenizer.json ADDED
@@ -0,0 +1,1551 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "version": "1.0",
3
+ "truncation": null,
4
+ "padding": null,
5
+ "added_tokens": [
6
+ {
7
+ "id": 0,
8
+ "content": "[PAD]",
9
+ "single_word": false,
10
+ "lstrip": false,
11
+ "rstrip": false,
12
+ "normalized": false,
13
+ "special": true
14
+ },
15
+ {
16
+ "id": 1,
17
+ "content": "[UNK]",
18
+ "single_word": false,
19
+ "lstrip": false,
20
+ "rstrip": false,
21
+ "normalized": false,
22
+ "special": true
23
+ },
24
+ {
25
+ "id": 2,
26
+ "content": "[BOS]",
27
+ "single_word": false,
28
+ "lstrip": false,
29
+ "rstrip": false,
30
+ "normalized": false,
31
+ "special": true
32
+ },
33
+ {
34
+ "id": 3,
35
+ "content": "[EOS]",
36
+ "single_word": false,
37
+ "lstrip": false,
38
+ "rstrip": false,
39
+ "normalized": false,
40
+ "special": true
41
+ }
42
+ ],
43
+ "normalizer": null,
44
+ "pre_tokenizer": {
45
+ "type": "Whitespace"
46
+ },
47
+ "post_processor": null,
48
+ "decoder": null,
49
+ "model": {
50
+ "type": "BPE",
51
+ "dropout": null,
52
+ "unk_token": "[UNK]",
53
+ "continuing_subword_prefix": null,
54
+ "end_of_word_suffix": null,
55
+ "fuse_unk": false,
56
+ "byte_fallback": false,
57
+ "ignore_merges": false,
58
+ "vocab": {
59
+ "[PAD]": 0,
60
+ "[UNK]": 1,
61
+ "[BOS]": 2,
62
+ "[EOS]": 3,
63
+ "\"": 4,
64
+ "$": 5,
65
+ "&": 6,
66
+ "(": 7,
67
+ ")": 8,
68
+ ",": 9,
69
+ "-": 10,
70
+ ".": 11,
71
+ "/": 12,
72
+ "0": 13,
73
+ "1": 14,
74
+ "2": 15,
75
+ "3": 16,
76
+ "4": 17,
77
+ "5": 18,
78
+ "6": 19,
79
+ "7": 20,
80
+ "8": 21,
81
+ "9": 22,
82
+ ":": 23,
83
+ "=": 24,
84
+ "@": 25,
85
+ "A": 26,
86
+ "C": 27,
87
+ "D": 28,
88
+ "E": 29,
89
+ "I": 30,
90
+ "J": 31,
91
+ "L": 32,
92
+ "M": 33,
93
+ "O": 34,
94
+ "P": 35,
95
+ "R": 36,
96
+ "S": 37,
97
+ "T": 38,
98
+ "V": 39,
99
+ "W": 40,
100
+ "[": 41,
101
+ "\\": 42,
102
+ "]": 43,
103
+ "^": 44,
104
+ "_": 45,
105
+ "a": 46,
106
+ "b": 47,
107
+ "c": 48,
108
+ "d": 49,
109
+ "e": 50,
110
+ "f": 51,
111
+ "g": 52,
112
+ "h": 53,
113
+ "i": 54,
114
+ "j": 55,
115
+ "k": 56,
116
+ "l": 57,
117
+ "m": 58,
118
+ "n": 59,
119
+ "o": 60,
120
+ "p": 61,
121
+ "r": 62,
122
+ "s": 63,
123
+ "t": 64,
124
+ "u": 65,
125
+ "v": 66,
126
+ "w": 67,
127
+ "x": 68,
128
+ "y": 69,
129
+ "{": 70,
130
+ "|": 71,
131
+ "}": 72,
132
+ "\":": 73,
133
+ "\",": 74,
134
+ "es": 75,
135
+ "on": 76,
136
+ "ion": 77,
137
+ "de": 78,
138
+ "it": 79,
139
+ "ve": 80,
140
+ "re": 81,
141
+ "in": 82,
142
+ "co": 83,
143
+ "ar": 84,
144
+ "ara": 85,
145
+ "pe": 86,
146
+ "ru": 87,
147
+ "ty": 88,
148
+ "\"^": 89,
149
+ "vel": 90,
150
+ "aravel": 91,
151
+ "ac": 92,
152
+ "od": 93,
153
+ "gn": 94,
154
+ "is": 95,
155
+ "pre": 96,
156
+ "ition": 97,
157
+ "cogn": 98,
158
+ "cognition": 99,
159
+ "le": 100,
160
+ "--": 101,
161
+ "laravel": 102,
162
+ "ts": 103,
163
+ "ist": 104,
164
+ "precognition": 105,
165
+ "dist": 106,
166
+ "np": 107,
167
+ "or": 108,
168
+ "ri": 109,
169
+ "ule": 110,
170
+ "odule": 111,
171
+ "at": 112,
172
+ "pac": 113,
173
+ "ut": 114,
174
+ "},": 115,
175
+ "npm": 116,
176
+ "ch": 117,
177
+ "atch": 118,
178
+ "tru": 119,
179
+ "true": 120,
180
+ "20": 121,
181
+ "cri": 122,
182
+ "pes": 123,
183
+ "types": 124,
184
+ "crip": 125,
185
+ "ag": 126,
186
+ "ci": 127,
187
+ "lin": 128,
188
+ "nde": 129,
189
+ "nci": 130,
190
+ "watch": 131,
191
+ "pende": 132,
192
+ "type": 133,
193
+ "ncies": 134,
194
+ "pendencies": 135,
195
+ "il": 136,
196
+ "run": 137,
197
+ "module": 138,
198
+ "wor": 139,
199
+ "lint": 140,
200
+ "\"@": 141,
201
+ "er": 142,
202
+ "git": 143,
203
+ "tion": 144,
204
+ "ub": 145,
205
+ "eslint": 146,
206
+ "com": 147,
207
+ "tsc": 148,
208
+ "kag": 149,
209
+ "packag": 150,
210
+ "\\\"": 151,
211
+ "bu": 152,
212
+ "rs": 153,
213
+ "vers": 154,
214
+ "ild": 155,
215
+ "build": 156,
216
+ "version": 157,
217
+ "],": 158,
218
+ "ks": 159,
219
+ "no": 160,
220
+ "res": 161,
221
+ "ue": 162,
222
+ "dex": 163,
223
+ "index": 164,
224
+ "act": 165,
225
+ "works": 166,
226
+ "workspac": 167,
227
+ "\".": 168,
228
+ "&&": 169,
229
+ "//": 170,
230
+ "17": 171,
231
+ "://": 172,
232
+ "De": 173,
233
+ "ES": 174,
234
+ "Laravel": 175,
235
+ "Module": 176,
236
+ "al": 177,
237
+ "as": 178,
238
+ "cl": 179,
239
+ "ht": 180,
240
+ "hub": 181,
241
+ "lod": 182,
242
+ "me": 183,
243
+ "ol": 184,
244
+ "ps": 185,
245
+ "pin": 186,
246
+ "rf": 187,
247
+ "rm": 188,
248
+ "tps": 189,
249
+ "vue": 190,
250
+ "react": 191,
251
+ "2020": 192,
252
+ "github": 193,
253
+ "packages": 194,
254
+ "\"./": 195,
255
+ "Dependencies": 196,
256
+ "ES2020": 197,
257
+ "ash": 198,
258
+ "https": 199,
259
+ "lodash": 200,
260
+ "pine": 201,
261
+ "Ch": 202,
262
+ "ck": 203,
263
+ "et": 204,
264
+ "eck": 205,
265
+ "js": 206,
266
+ "dependencies": 207,
267
+ "typescrip": 208,
268
+ "alpine": 209,
269
+ "Check": 210,
270
+ "typescript": 211,
271
+ "nt": 212,
272
+ "typeCheck": 213,
273
+ "12": 214,
274
+ "ic": 215,
275
+ "ly": 216,
276
+ "na": 217,
277
+ "os": 218,
278
+ "pres": 219,
279
+ "rc": 220,
280
+ "scrip": 221,
281
+ "ur": 222,
282
+ "vDependencies": 223,
283
+ "est": 224,
284
+ "devDependencies": 225,
285
+ "node": 226,
286
+ "workspaces": 227,
287
+ "scripts": 228,
288
+ "\"/": 229,
289
+ ".\",": 230,
290
+ "21": 231,
291
+ "Di": 232,
292
+ "Em": 233,
293
+ "IT": 234,
294
+ "Int": 235,
295
+ "Js": 236,
296
+ "MIT": 237,
297
+ "On": 238,
298
+ "Op": 239,
299
+ "Out": 240,
300
+ "Pre": 241,
301
+ "Res": 242,
302
+ "Watch": 243,
303
+ "ain": 244,
304
+ "aut": 245,
305
+ "ct": 246,
306
+ "ds": 247,
307
+ "des": 248,
308
+ "en": 249,
309
+ "ey": 250,
310
+ "fil": 251,
311
+ "gin": 252,
312
+ "get": 253,
313
+ "ho": 254,
314
+ "hor": 255,
315
+ "hOn": 256,
316
+ "ile": 257,
317
+ "key": 258,
318
+ "lu": 259,
319
+ "lis": 260,
320
+ "lic": 261,
321
+ "main": 262,
322
+ "op": 263,
323
+ "out": 264,
324
+ "put": 265,
325
+ "pag": 266,
326
+ "pub": 267,
327
+ "pos": 268,
328
+ "pile": 269,
329
+ "plu": 270,
330
+ "rOp": 271,
331
+ "se": 272,
332
+ "st": 273,
333
+ "src": 274,
334
+ "tar": 275,
335
+ "ude": 276,
336
+ "esModule": 277,
337
+ "onModule": 278,
338
+ "decl": 279,
339
+ "itor": 280,
340
+ "veJs": 281,
341
+ "veWatch": 282,
342
+ "repos": 283,
343
+ "incl": 284,
344
+ "aration": 285,
345
+ "prepub": 286,
346
+ "rict": 287,
347
+ "ution": 288,
348
+ "cription": 289,
349
+ "moduleRes": 290,
350
+ "words": 291,
351
+ "erop": 292,
352
+ "erveWatch": 293,
353
+ "tions": 294,
354
+ "compile": 295,
355
+ "noEm": 296,
356
+ "resol": 297,
357
+ "workspace": 298,
358
+ "mepag": 299,
359
+ "olution": 300,
360
+ "name": 301,
361
+ "preserveWatch": 302,
362
+ "url": 303,
363
+ "Dir": 304,
364
+ "Interop": 305,
365
+ "Output": 306,
366
+ "Precognition": 307,
367
+ "author": 308,
368
+ "description": 309,
369
+ "ense": 310,
370
+ "files": 311,
371
+ "homepag": 312,
372
+ "hOnly": 313,
373
+ "keywords": 314,
374
+ "lishOnly": 315,
375
+ "license": 316,
376
+ "outDir": 317,
377
+ "plugin": 318,
378
+ "rOptions": 319,
379
+ "strict": 320,
380
+ "target": 321,
381
+ "esModuleInterop": 322,
382
+ "declaration": 323,
383
+ "itory": 324,
384
+ "veJsonModule": 325,
385
+ "repository": 326,
386
+ "include": 327,
387
+ "prepublishOnly": 328,
388
+ "moduleResolution": 329,
389
+ "compilerOptions": 330,
390
+ "noEmit": 331,
391
+ "resolveJsonModule": 332,
392
+ "preserveWatchOutput": 333,
393
+ "homepage": 334,
394
+ ").\",": 335,
395
+ "=$": 336,
396
+ "_packag": 337,
397
+ "_version": 338,
398
+ "e_version": 339,
399
+ "kg": 340,
400
+ "pkg": 341,
401
+ "set": 342,
402
+ "test": 343,
403
+ "core": 344,
404
+ "peer": 345,
405
+ "npm_packag": 346,
406
+ "peerDependencies": 347,
407
+ "npm_package_version": 348,
408
+ "18": 349,
409
+ "19": 350,
410
+ "vit": 351,
411
+ "||": 352,
412
+ "link": 353,
413
+ "alpinejs": 354,
414
+ "vitest": 355
415
+ },
416
+ "merges": [
417
+ [
418
+ "\"",
419
+ ":"
420
+ ],
421
+ [
422
+ "\"",
423
+ ","
424
+ ],
425
+ [
426
+ "e",
427
+ "s"
428
+ ],
429
+ [
430
+ "o",
431
+ "n"
432
+ ],
433
+ [
434
+ "i",
435
+ "on"
436
+ ],
437
+ [
438
+ "d",
439
+ "e"
440
+ ],
441
+ [
442
+ "i",
443
+ "t"
444
+ ],
445
+ [
446
+ "v",
447
+ "e"
448
+ ],
449
+ [
450
+ "r",
451
+ "e"
452
+ ],
453
+ [
454
+ "i",
455
+ "n"
456
+ ],
457
+ [
458
+ "c",
459
+ "o"
460
+ ],
461
+ [
462
+ "a",
463
+ "r"
464
+ ],
465
+ [
466
+ "ar",
467
+ "a"
468
+ ],
469
+ [
470
+ "p",
471
+ "e"
472
+ ],
473
+ [
474
+ "r",
475
+ "u"
476
+ ],
477
+ [
478
+ "t",
479
+ "y"
480
+ ],
481
+ [
482
+ "\"",
483
+ "^"
484
+ ],
485
+ [
486
+ "ve",
487
+ "l"
488
+ ],
489
+ [
490
+ "ara",
491
+ "vel"
492
+ ],
493
+ [
494
+ "a",
495
+ "c"
496
+ ],
497
+ [
498
+ "o",
499
+ "d"
500
+ ],
501
+ [
502
+ "g",
503
+ "n"
504
+ ],
505
+ [
506
+ "i",
507
+ "s"
508
+ ],
509
+ [
510
+ "p",
511
+ "re"
512
+ ],
513
+ [
514
+ "it",
515
+ "ion"
516
+ ],
517
+ [
518
+ "co",
519
+ "gn"
520
+ ],
521
+ [
522
+ "cogn",
523
+ "ition"
524
+ ],
525
+ [
526
+ "l",
527
+ "e"
528
+ ],
529
+ [
530
+ "-",
531
+ "-"
532
+ ],
533
+ [
534
+ "l",
535
+ "aravel"
536
+ ],
537
+ [
538
+ "t",
539
+ "s"
540
+ ],
541
+ [
542
+ "is",
543
+ "t"
544
+ ],
545
+ [
546
+ "pre",
547
+ "cognition"
548
+ ],
549
+ [
550
+ "d",
551
+ "ist"
552
+ ],
553
+ [
554
+ "n",
555
+ "p"
556
+ ],
557
+ [
558
+ "o",
559
+ "r"
560
+ ],
561
+ [
562
+ "r",
563
+ "i"
564
+ ],
565
+ [
566
+ "u",
567
+ "le"
568
+ ],
569
+ [
570
+ "od",
571
+ "ule"
572
+ ],
573
+ [
574
+ "a",
575
+ "t"
576
+ ],
577
+ [
578
+ "p",
579
+ "ac"
580
+ ],
581
+ [
582
+ "u",
583
+ "t"
584
+ ],
585
+ [
586
+ "}",
587
+ ","
588
+ ],
589
+ [
590
+ "np",
591
+ "m"
592
+ ],
593
+ [
594
+ "c",
595
+ "h"
596
+ ],
597
+ [
598
+ "at",
599
+ "ch"
600
+ ],
601
+ [
602
+ "t",
603
+ "ru"
604
+ ],
605
+ [
606
+ "tru",
607
+ "e"
608
+ ],
609
+ [
610
+ "2",
611
+ "0"
612
+ ],
613
+ [
614
+ "c",
615
+ "ri"
616
+ ],
617
+ [
618
+ "p",
619
+ "es"
620
+ ],
621
+ [
622
+ "ty",
623
+ "pes"
624
+ ],
625
+ [
626
+ "cri",
627
+ "p"
628
+ ],
629
+ [
630
+ "a",
631
+ "g"
632
+ ],
633
+ [
634
+ "c",
635
+ "i"
636
+ ],
637
+ [
638
+ "l",
639
+ "in"
640
+ ],
641
+ [
642
+ "n",
643
+ "de"
644
+ ],
645
+ [
646
+ "n",
647
+ "ci"
648
+ ],
649
+ [
650
+ "w",
651
+ "atch"
652
+ ],
653
+ [
654
+ "pe",
655
+ "nde"
656
+ ],
657
+ [
658
+ "ty",
659
+ "pe"
660
+ ],
661
+ [
662
+ "nci",
663
+ "es"
664
+ ],
665
+ [
666
+ "pende",
667
+ "ncies"
668
+ ],
669
+ [
670
+ "i",
671
+ "l"
672
+ ],
673
+ [
674
+ "ru",
675
+ "n"
676
+ ],
677
+ [
678
+ "m",
679
+ "odule"
680
+ ],
681
+ [
682
+ "w",
683
+ "or"
684
+ ],
685
+ [
686
+ "lin",
687
+ "t"
688
+ ],
689
+ [
690
+ "\"",
691
+ "@"
692
+ ],
693
+ [
694
+ "e",
695
+ "r"
696
+ ],
697
+ [
698
+ "g",
699
+ "it"
700
+ ],
701
+ [
702
+ "t",
703
+ "ion"
704
+ ],
705
+ [
706
+ "u",
707
+ "b"
708
+ ],
709
+ [
710
+ "es",
711
+ "lint"
712
+ ],
713
+ [
714
+ "co",
715
+ "m"
716
+ ],
717
+ [
718
+ "ts",
719
+ "c"
720
+ ],
721
+ [
722
+ "k",
723
+ "ag"
724
+ ],
725
+ [
726
+ "pac",
727
+ "kag"
728
+ ],
729
+ [
730
+ "\\",
731
+ "\""
732
+ ],
733
+ [
734
+ "b",
735
+ "u"
736
+ ],
737
+ [
738
+ "r",
739
+ "s"
740
+ ],
741
+ [
742
+ "ve",
743
+ "rs"
744
+ ],
745
+ [
746
+ "il",
747
+ "d"
748
+ ],
749
+ [
750
+ "bu",
751
+ "ild"
752
+ ],
753
+ [
754
+ "vers",
755
+ "ion"
756
+ ],
757
+ [
758
+ "]",
759
+ ","
760
+ ],
761
+ [
762
+ "k",
763
+ "s"
764
+ ],
765
+ [
766
+ "n",
767
+ "o"
768
+ ],
769
+ [
770
+ "r",
771
+ "es"
772
+ ],
773
+ [
774
+ "u",
775
+ "e"
776
+ ],
777
+ [
778
+ "de",
779
+ "x"
780
+ ],
781
+ [
782
+ "in",
783
+ "dex"
784
+ ],
785
+ [
786
+ "ac",
787
+ "t"
788
+ ],
789
+ [
790
+ "wor",
791
+ "ks"
792
+ ],
793
+ [
794
+ "works",
795
+ "pac"
796
+ ],
797
+ [
798
+ "\"",
799
+ "."
800
+ ],
801
+ [
802
+ "&",
803
+ "&"
804
+ ],
805
+ [
806
+ "/",
807
+ "/"
808
+ ],
809
+ [
810
+ "1",
811
+ "7"
812
+ ],
813
+ [
814
+ ":",
815
+ "//"
816
+ ],
817
+ [
818
+ "D",
819
+ "e"
820
+ ],
821
+ [
822
+ "E",
823
+ "S"
824
+ ],
825
+ [
826
+ "L",
827
+ "aravel"
828
+ ],
829
+ [
830
+ "M",
831
+ "odule"
832
+ ],
833
+ [
834
+ "a",
835
+ "l"
836
+ ],
837
+ [
838
+ "a",
839
+ "s"
840
+ ],
841
+ [
842
+ "c",
843
+ "l"
844
+ ],
845
+ [
846
+ "h",
847
+ "t"
848
+ ],
849
+ [
850
+ "h",
851
+ "ub"
852
+ ],
853
+ [
854
+ "l",
855
+ "od"
856
+ ],
857
+ [
858
+ "m",
859
+ "e"
860
+ ],
861
+ [
862
+ "o",
863
+ "l"
864
+ ],
865
+ [
866
+ "p",
867
+ "s"
868
+ ],
869
+ [
870
+ "p",
871
+ "in"
872
+ ],
873
+ [
874
+ "r",
875
+ "f"
876
+ ],
877
+ [
878
+ "r",
879
+ "m"
880
+ ],
881
+ [
882
+ "t",
883
+ "ps"
884
+ ],
885
+ [
886
+ "v",
887
+ "ue"
888
+ ],
889
+ [
890
+ "re",
891
+ "act"
892
+ ],
893
+ [
894
+ "20",
895
+ "20"
896
+ ],
897
+ [
898
+ "git",
899
+ "hub"
900
+ ],
901
+ [
902
+ "packag",
903
+ "es"
904
+ ],
905
+ [
906
+ "\".",
907
+ "/"
908
+ ],
909
+ [
910
+ "De",
911
+ "pendencies"
912
+ ],
913
+ [
914
+ "ES",
915
+ "2020"
916
+ ],
917
+ [
918
+ "as",
919
+ "h"
920
+ ],
921
+ [
922
+ "ht",
923
+ "tps"
924
+ ],
925
+ [
926
+ "lod",
927
+ "ash"
928
+ ],
929
+ [
930
+ "pin",
931
+ "e"
932
+ ],
933
+ [
934
+ "C",
935
+ "h"
936
+ ],
937
+ [
938
+ "c",
939
+ "k"
940
+ ],
941
+ [
942
+ "e",
943
+ "t"
944
+ ],
945
+ [
946
+ "e",
947
+ "ck"
948
+ ],
949
+ [
950
+ "j",
951
+ "s"
952
+ ],
953
+ [
954
+ "de",
955
+ "pendencies"
956
+ ],
957
+ [
958
+ "types",
959
+ "crip"
960
+ ],
961
+ [
962
+ "al",
963
+ "pine"
964
+ ],
965
+ [
966
+ "Ch",
967
+ "eck"
968
+ ],
969
+ [
970
+ "typescrip",
971
+ "t"
972
+ ],
973
+ [
974
+ "n",
975
+ "t"
976
+ ],
977
+ [
978
+ "type",
979
+ "Check"
980
+ ],
981
+ [
982
+ "1",
983
+ "2"
984
+ ],
985
+ [
986
+ "i",
987
+ "c"
988
+ ],
989
+ [
990
+ "l",
991
+ "y"
992
+ ],
993
+ [
994
+ "n",
995
+ "a"
996
+ ],
997
+ [
998
+ "o",
999
+ "s"
1000
+ ],
1001
+ [
1002
+ "p",
1003
+ "res"
1004
+ ],
1005
+ [
1006
+ "r",
1007
+ "c"
1008
+ ],
1009
+ [
1010
+ "s",
1011
+ "crip"
1012
+ ],
1013
+ [
1014
+ "u",
1015
+ "r"
1016
+ ],
1017
+ [
1018
+ "v",
1019
+ "Dependencies"
1020
+ ],
1021
+ [
1022
+ "es",
1023
+ "t"
1024
+ ],
1025
+ [
1026
+ "de",
1027
+ "vDependencies"
1028
+ ],
1029
+ [
1030
+ "no",
1031
+ "de"
1032
+ ],
1033
+ [
1034
+ "workspac",
1035
+ "es"
1036
+ ],
1037
+ [
1038
+ "scrip",
1039
+ "ts"
1040
+ ],
1041
+ [
1042
+ "\"",
1043
+ "/"
1044
+ ],
1045
+ [
1046
+ ".",
1047
+ "\","
1048
+ ],
1049
+ [
1050
+ "2",
1051
+ "1"
1052
+ ],
1053
+ [
1054
+ "D",
1055
+ "i"
1056
+ ],
1057
+ [
1058
+ "E",
1059
+ "m"
1060
+ ],
1061
+ [
1062
+ "I",
1063
+ "T"
1064
+ ],
1065
+ [
1066
+ "I",
1067
+ "nt"
1068
+ ],
1069
+ [
1070
+ "J",
1071
+ "s"
1072
+ ],
1073
+ [
1074
+ "M",
1075
+ "IT"
1076
+ ],
1077
+ [
1078
+ "O",
1079
+ "n"
1080
+ ],
1081
+ [
1082
+ "O",
1083
+ "p"
1084
+ ],
1085
+ [
1086
+ "O",
1087
+ "ut"
1088
+ ],
1089
+ [
1090
+ "P",
1091
+ "re"
1092
+ ],
1093
+ [
1094
+ "R",
1095
+ "es"
1096
+ ],
1097
+ [
1098
+ "W",
1099
+ "atch"
1100
+ ],
1101
+ [
1102
+ "a",
1103
+ "in"
1104
+ ],
1105
+ [
1106
+ "a",
1107
+ "ut"
1108
+ ],
1109
+ [
1110
+ "c",
1111
+ "t"
1112
+ ],
1113
+ [
1114
+ "d",
1115
+ "s"
1116
+ ],
1117
+ [
1118
+ "d",
1119
+ "es"
1120
+ ],
1121
+ [
1122
+ "e",
1123
+ "n"
1124
+ ],
1125
+ [
1126
+ "e",
1127
+ "y"
1128
+ ],
1129
+ [
1130
+ "f",
1131
+ "il"
1132
+ ],
1133
+ [
1134
+ "g",
1135
+ "in"
1136
+ ],
1137
+ [
1138
+ "g",
1139
+ "et"
1140
+ ],
1141
+ [
1142
+ "h",
1143
+ "o"
1144
+ ],
1145
+ [
1146
+ "h",
1147
+ "or"
1148
+ ],
1149
+ [
1150
+ "h",
1151
+ "On"
1152
+ ],
1153
+ [
1154
+ "i",
1155
+ "le"
1156
+ ],
1157
+ [
1158
+ "k",
1159
+ "ey"
1160
+ ],
1161
+ [
1162
+ "l",
1163
+ "u"
1164
+ ],
1165
+ [
1166
+ "l",
1167
+ "is"
1168
+ ],
1169
+ [
1170
+ "l",
1171
+ "ic"
1172
+ ],
1173
+ [
1174
+ "m",
1175
+ "ain"
1176
+ ],
1177
+ [
1178
+ "o",
1179
+ "p"
1180
+ ],
1181
+ [
1182
+ "o",
1183
+ "ut"
1184
+ ],
1185
+ [
1186
+ "p",
1187
+ "ut"
1188
+ ],
1189
+ [
1190
+ "p",
1191
+ "ag"
1192
+ ],
1193
+ [
1194
+ "p",
1195
+ "ub"
1196
+ ],
1197
+ [
1198
+ "p",
1199
+ "os"
1200
+ ],
1201
+ [
1202
+ "p",
1203
+ "ile"
1204
+ ],
1205
+ [
1206
+ "p",
1207
+ "lu"
1208
+ ],
1209
+ [
1210
+ "r",
1211
+ "Op"
1212
+ ],
1213
+ [
1214
+ "s",
1215
+ "e"
1216
+ ],
1217
+ [
1218
+ "s",
1219
+ "t"
1220
+ ],
1221
+ [
1222
+ "s",
1223
+ "rc"
1224
+ ],
1225
+ [
1226
+ "t",
1227
+ "ar"
1228
+ ],
1229
+ [
1230
+ "u",
1231
+ "de"
1232
+ ],
1233
+ [
1234
+ "es",
1235
+ "Module"
1236
+ ],
1237
+ [
1238
+ "on",
1239
+ "Module"
1240
+ ],
1241
+ [
1242
+ "de",
1243
+ "cl"
1244
+ ],
1245
+ [
1246
+ "it",
1247
+ "or"
1248
+ ],
1249
+ [
1250
+ "ve",
1251
+ "Js"
1252
+ ],
1253
+ [
1254
+ "ve",
1255
+ "Watch"
1256
+ ],
1257
+ [
1258
+ "re",
1259
+ "pos"
1260
+ ],
1261
+ [
1262
+ "in",
1263
+ "cl"
1264
+ ],
1265
+ [
1266
+ "ara",
1267
+ "tion"
1268
+ ],
1269
+ [
1270
+ "pre",
1271
+ "pub"
1272
+ ],
1273
+ [
1274
+ "ri",
1275
+ "ct"
1276
+ ],
1277
+ [
1278
+ "ut",
1279
+ "ion"
1280
+ ],
1281
+ [
1282
+ "crip",
1283
+ "tion"
1284
+ ],
1285
+ [
1286
+ "module",
1287
+ "Res"
1288
+ ],
1289
+ [
1290
+ "wor",
1291
+ "ds"
1292
+ ],
1293
+ [
1294
+ "er",
1295
+ "op"
1296
+ ],
1297
+ [
1298
+ "er",
1299
+ "veWatch"
1300
+ ],
1301
+ [
1302
+ "tion",
1303
+ "s"
1304
+ ],
1305
+ [
1306
+ "com",
1307
+ "pile"
1308
+ ],
1309
+ [
1310
+ "no",
1311
+ "Em"
1312
+ ],
1313
+ [
1314
+ "res",
1315
+ "ol"
1316
+ ],
1317
+ [
1318
+ "workspac",
1319
+ "e"
1320
+ ],
1321
+ [
1322
+ "me",
1323
+ "pag"
1324
+ ],
1325
+ [
1326
+ "ol",
1327
+ "ution"
1328
+ ],
1329
+ [
1330
+ "na",
1331
+ "me"
1332
+ ],
1333
+ [
1334
+ "pres",
1335
+ "erveWatch"
1336
+ ],
1337
+ [
1338
+ "ur",
1339
+ "l"
1340
+ ],
1341
+ [
1342
+ "Di",
1343
+ "r"
1344
+ ],
1345
+ [
1346
+ "Int",
1347
+ "erop"
1348
+ ],
1349
+ [
1350
+ "Out",
1351
+ "put"
1352
+ ],
1353
+ [
1354
+ "Pre",
1355
+ "cognition"
1356
+ ],
1357
+ [
1358
+ "aut",
1359
+ "hor"
1360
+ ],
1361
+ [
1362
+ "des",
1363
+ "cription"
1364
+ ],
1365
+ [
1366
+ "en",
1367
+ "se"
1368
+ ],
1369
+ [
1370
+ "fil",
1371
+ "es"
1372
+ ],
1373
+ [
1374
+ "ho",
1375
+ "mepag"
1376
+ ],
1377
+ [
1378
+ "hOn",
1379
+ "ly"
1380
+ ],
1381
+ [
1382
+ "key",
1383
+ "words"
1384
+ ],
1385
+ [
1386
+ "lis",
1387
+ "hOnly"
1388
+ ],
1389
+ [
1390
+ "lic",
1391
+ "ense"
1392
+ ],
1393
+ [
1394
+ "out",
1395
+ "Dir"
1396
+ ],
1397
+ [
1398
+ "plu",
1399
+ "gin"
1400
+ ],
1401
+ [
1402
+ "rOp",
1403
+ "tions"
1404
+ ],
1405
+ [
1406
+ "st",
1407
+ "rict"
1408
+ ],
1409
+ [
1410
+ "tar",
1411
+ "get"
1412
+ ],
1413
+ [
1414
+ "esModule",
1415
+ "Interop"
1416
+ ],
1417
+ [
1418
+ "decl",
1419
+ "aration"
1420
+ ],
1421
+ [
1422
+ "itor",
1423
+ "y"
1424
+ ],
1425
+ [
1426
+ "veJs",
1427
+ "onModule"
1428
+ ],
1429
+ [
1430
+ "repos",
1431
+ "itory"
1432
+ ],
1433
+ [
1434
+ "incl",
1435
+ "ude"
1436
+ ],
1437
+ [
1438
+ "prepub",
1439
+ "lishOnly"
1440
+ ],
1441
+ [
1442
+ "moduleRes",
1443
+ "olution"
1444
+ ],
1445
+ [
1446
+ "compile",
1447
+ "rOptions"
1448
+ ],
1449
+ [
1450
+ "noEm",
1451
+ "it"
1452
+ ],
1453
+ [
1454
+ "resol",
1455
+ "veJsonModule"
1456
+ ],
1457
+ [
1458
+ "preserveWatch",
1459
+ "Output"
1460
+ ],
1461
+ [
1462
+ "homepag",
1463
+ "e"
1464
+ ],
1465
+ [
1466
+ ")",
1467
+ ".\","
1468
+ ],
1469
+ [
1470
+ "=",
1471
+ "$"
1472
+ ],
1473
+ [
1474
+ "_",
1475
+ "packag"
1476
+ ],
1477
+ [
1478
+ "_",
1479
+ "version"
1480
+ ],
1481
+ [
1482
+ "e",
1483
+ "_version"
1484
+ ],
1485
+ [
1486
+ "k",
1487
+ "g"
1488
+ ],
1489
+ [
1490
+ "p",
1491
+ "kg"
1492
+ ],
1493
+ [
1494
+ "s",
1495
+ "et"
1496
+ ],
1497
+ [
1498
+ "t",
1499
+ "est"
1500
+ ],
1501
+ [
1502
+ "co",
1503
+ "re"
1504
+ ],
1505
+ [
1506
+ "pe",
1507
+ "er"
1508
+ ],
1509
+ [
1510
+ "npm",
1511
+ "_packag"
1512
+ ],
1513
+ [
1514
+ "peer",
1515
+ "Dependencies"
1516
+ ],
1517
+ [
1518
+ "npm_packag",
1519
+ "e_version"
1520
+ ],
1521
+ [
1522
+ "1",
1523
+ "8"
1524
+ ],
1525
+ [
1526
+ "1",
1527
+ "9"
1528
+ ],
1529
+ [
1530
+ "v",
1531
+ "it"
1532
+ ],
1533
+ [
1534
+ "|",
1535
+ "|"
1536
+ ],
1537
+ [
1538
+ "lin",
1539
+ "k"
1540
+ ],
1541
+ [
1542
+ "alpine",
1543
+ "js"
1544
+ ],
1545
+ [
1546
+ "vit",
1547
+ "est"
1548
+ ]
1549
+ ]
1550
+ }
1551
+ }
tokenizer_config.json ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "[PAD]",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "1": {
12
+ "content": "[UNK]",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "2": {
20
+ "content": "[BOS]",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "3": {
28
+ "content": "[EOS]",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ }
35
+ },
36
+ "bos_token": "[BOS]",
37
+ "clean_up_tokenization_spaces": false,
38
+ "eos_token": "[EOS]",
39
+ "extra_special_tokens": {},
40
+ "model_max_length": 1000000000000000019884624838656,
41
+ "pad_token": "[PAD]",
42
+ "tokenizer_class": "PreTrainedTokenizerFast",
43
+ "unk_token": "[UNK]"
44
+ }