update viz util

This commit is contained in:
Cong Thanh Vu 2023-06-24 02:59:32 +00:00
parent 53443a4026
commit 45ce0c995b
73 changed files with 1374675 additions and 304886 deletions

View File

@ -0,0 +1,15 @@
# triples: 86517
# entities: 7128
# relations: 12409
# timesteps: 208
# test triples: 8218
# valid triples: 8193
# train triples: 70106
Measure method: N/A
Target Size : 0
Grow Factor: 0
Shrink Factor: 0
Epsilon Factor: 0
Search method: N/A
filter_dupes: both
nonames: False

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

8218
data/icews14_both/test.txt Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,209 @@
0 0 2
1 3 5
2 6 7
3 8 9
4 10 12
5 13 14
6 15 16
7 17 19
8 20 21
9 22 23
10 24 26
11 27 28
12 29 30
13 31 33
14 34 35
15 36 37
16 38 40
17 41 42
18 43 44
19 45 46
20 47 48
21 49 49
22 50 50
23 51 51
24 52 53
25 54 54
26 55 55
27 56 57
28 58 59
29 60 61
30 62 62
31 63 63
32 64 65
33 66 68
34 69 70
35 71 71
36 72 72
37 73 74
38 75 76
39 77 78
40 79 80
41 81 82
42 83 84
43 85 85
44 86 87
45 88 89
46 90 91
47 92 93
48 94 96
49 97 97
50 98 99
51 100 101
52 102 103
53 104 105
54 106 107
55 108 110
56 111 112
57 113 114
58 115 116
59 117 118
60 119 119
61 120 121
62 122 124
63 125 125
64 126 127
65 128 129
66 130 131
67 132 133
68 134 135
69 136 138
70 139 139
71 140 140
72 141 141
73 142 143
74 144 145
75 146 147
76 148 148
77 149 150
78 151 152
79 153 154
80 155 155
81 156 157
82 158 159
83 160 161
84 162 163
85 164 166
86 167 167
87 168 168
88 169 169
89 170 170
90 171 173
91 174 175
92 176 177
93 178 180
94 181 182
95 183 183
96 184 185
97 186 187
98 188 188
99 189 190
100 191 192
101 193 194
102 195 195
103 196 197
104 198 199
105 200 201
106 202 203
107 204 205
108 206 208
109 209 210
110 211 212
111 213 215
112 216 217
113 218 219
114 220 221
115 222 222
116 223 224
117 225 226
118 227 229
119 230 231
120 232 233
121 234 236
122 237 238
123 239 239
124 240 241
125 242 243
126 244 245
127 246 246
128 247 248
129 249 250
130 251 251
131 252 252
132 253 253
133 254 254
134 255 256
135 257 257
136 258 259
137 260 261
138 262 263
139 264 264
140 265 265
141 266 266
142 267 267
143 268 269
144 270 271
145 272 272
146 273 273
147 274 274
148 275 276
149 277 278
150 279 279
151 280 281
152 282 283
153 284 285
154 286 286
155 287 287
156 288 288
157 289 289
158 290 291
159 292 292
160 293 293
161 294 294
162 295 295
163 296 297
164 298 299
165 300 300
166 301 301
167 302 303
168 304 305
169 306 307
170 308 309
171 310 310
172 311 312
173 313 313
174 314 314
175 315 315
176 316 316
177 317 317
178 318 319
179 320 320
180 321 321
181 322 322
182 323 323
183 324 324
184 325 326
185 327 327
186 328 328
187 329 329
188 330 330
189 331 332
190 333 334
191 335 335
192 336 336
193 337 338
194 339 340
195 341 342
196 343 343
197 344 344
198 345 346
199 347 348
200 349 349
201 350 350
202 351 352
203 353 355
204 356 357
205 358 359
206 360 362
207 363 365
208 366 366

70106
data/icews14_both/train.txt Normal file

File diff suppressed because it is too large Load Diff

8193
data/icews14_both/valid.txt Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,15 @@
# triples: 231529
# entities: 12554
# relations: 423
# timesteps: 70
# test triples: 16195
# valid triples: 16707
# train triples: 198627
Measure method: N/A
Target Size : 423
Grow Factor: 0
Shrink Factor: 4.0
Epsilon Factor: 0
Search method: N/A
filter_dupes: both
nonames: False

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,423 @@
0 P131[0-0]
1 P131[1-1]
2 P131[2-2]
3 P131[3-3]
4 P131[4-4]
5 P131[5-5]
6 P131[6-6]
7 P131[7-7]
8 P131[8-8]
9 P131[9-9]
10 P131[10-10]
11 P131[11-11]
12 P131[12-12]
13 P131[13-13]
14 P131[14-14]
15 P131[15-15]
16 P131[16-16]
17 P131[17-17]
18 P131[18-18]
19 P131[19-19]
20 P131[20-20]
21 P131[21-21]
22 P131[22-22]
23 P131[23-23]
24 P131[24-24]
25 P131[25-25]
26 P131[26-26]
27 P131[27-27]
28 P131[28-28]
29 P131[29-29]
30 P131[30-30]
31 P131[31-31]
32 P131[32-32]
33 P131[33-33]
34 P131[34-34]
35 P131[35-35]
36 P131[36-36]
37 P131[37-37]
38 P131[38-38]
39 P131[39-39]
40 P131[40-40]
41 P131[41-41]
42 P131[42-42]
43 P131[43-43]
44 P131[44-44]
45 P131[45-45]
46 P131[46-46]
47 P131[47-47]
48 P131[48-48]
49 P131[49-49]
50 P131[50-50]
51 P131[51-51]
52 P131[52-52]
53 P131[53-53]
54 P131[54-54]
55 P131[55-55]
56 P131[56-56]
57 P131[57-57]
58 P131[58-58]
59 P131[59-59]
60 P131[60-60]
61 P131[61-61]
62 P131[62-62]
63 P131[63-63]
64 P131[64-64]
65 P131[65-65]
66 P131[66-66]
67 P131[67-67]
68 P131[68-68]
69 P131[69-69]
70 P1435[65-65]
71 P39[49-49]
72 P39[50-50]
73 P39[51-51]
74 P39[52-52]
75 P39[53-53]
76 P39[54-54]
77 P39[55-55]
78 P39[56-56]
79 P39[57-57]
80 P39[58-58]
81 P39[59-59]
82 P39[60-60]
83 P39[61-61]
84 P39[62-62]
85 P39[63-63]
86 P39[64-64]
87 P39[65-65]
88 P39[66-66]
89 P39[67-67]
90 P39[68-68]
91 P39[69-69]
92 P54[40-40]
93 P54[41-41]
94 P54[42-42]
95 P54[43-43]
96 P54[44-44]
97 P54[45-45]
98 P54[46-46]
99 P54[47-47]
100 P54[48-48]
101 P54[49-49]
102 P54[50-50]
103 P54[51-51]
104 P54[52-52]
105 P54[53-53]
106 P54[54-54]
107 P54[55-55]
108 P54[56-56]
109 P54[57-57]
110 P54[58-58]
111 P54[59-59]
112 P54[60-60]
113 P54[61-61]
114 P54[62-62]
115 P54[63-63]
116 P54[64-64]
117 P54[65-65]
118 P54[66-66]
119 P54[67-67]
120 P54[68-68]
121 P54[69-69]
122 P31[0-0]
123 P31[1-1]
124 P31[2-2]
125 P31[3-3]
126 P31[4-4]
127 P31[5-5]
128 P31[6-6]
129 P31[7-7]
130 P31[8-8]
131 P31[9-9]
132 P31[10-10]
133 P31[11-11]
134 P31[12-12]
135 P31[13-13]
136 P31[14-14]
137 P31[15-15]
138 P31[16-16]
139 P31[17-17]
140 P31[18-18]
141 P31[19-19]
142 P31[20-20]
143 P31[21-21]
144 P31[22-22]
145 P31[23-23]
146 P31[24-24]
147 P31[25-25]
148 P31[26-26]
149 P31[27-27]
150 P31[28-28]
151 P31[29-29]
152 P31[30-30]
153 P31[31-31]
154 P31[32-32]
155 P31[33-33]
156 P31[34-34]
157 P31[35-35]
158 P31[36-36]
159 P31[37-37]
160 P31[38-38]
161 P31[39-39]
162 P31[40-40]
163 P31[41-41]
164 P31[42-42]
165 P31[43-43]
166 P31[44-44]
167 P31[45-45]
168 P31[46-46]
169 P31[47-47]
170 P31[48-48]
171 P31[49-49]
172 P31[50-50]
173 P31[51-51]
174 P31[52-52]
175 P31[53-53]
176 P31[54-54]
177 P31[55-55]
178 P31[56-56]
179 P31[57-57]
180 P31[58-58]
181 P31[59-59]
182 P31[60-60]
183 P31[61-61]
184 P31[62-62]
185 P31[63-63]
186 P31[64-64]
187 P31[65-65]
188 P31[66-66]
189 P31[67-67]
190 P31[68-68]
191 P31[69-69]
192 P463[26-26]
193 P463[27-27]
194 P463[28-28]
195 P463[29-29]
196 P463[30-30]
197 P463[31-31]
198 P463[32-32]
199 P463[33-33]
200 P463[34-34]
201 P463[35-35]
202 P463[36-36]
203 P463[37-37]
204 P463[38-38]
205 P463[39-39]
206 P463[40-40]
207 P463[41-41]
208 P463[42-42]
209 P463[43-43]
210 P463[44-44]
211 P463[45-45]
212 P463[46-46]
213 P463[47-47]
214 P463[48-48]
215 P463[49-49]
216 P463[50-50]
217 P463[51-51]
218 P463[52-52]
219 P463[53-53]
220 P463[54-54]
221 P463[55-55]
222 P463[56-56]
223 P463[57-57]
224 P463[58-58]
225 P463[59-59]
226 P463[60-60]
227 P463[61-61]
228 P463[62-62]
229 P463[63-63]
230 P463[64-64]
231 P463[65-65]
232 P463[66-66]
233 P463[67-67]
234 P463[68-68]
235 P463[69-69]
236 P512[4-69]
237 P190[0-29]
238 P150[0-3]
239 P1376[39-47]
240 P463[0-7]
241 P166[0-7]
242 P2962[18-30]
243 P108[29-36]
244 P39[0-3]
245 P17[47-48]
246 P166[21-23]
247 P793[46-69]
248 P69[32-41]
249 P17[57-58]
250 P190[42-45]
251 P2962[39-42]
252 P54[0-18]
253 P26[56-61]
254 P150[14-17]
255 P463[16-17]
256 P26[39-46]
257 P579[36-43]
258 P579[16-23]
259 P2962[59-60]
260 P1411[59-61]
261 P26[20-27]
262 P6[4-69]
263 P1435[33-34]
264 P166[52-53]
265 P108[49-57]
266 P150[10-13]
267 P1346[47-68]
268 P150[18-21]
269 P1346[13-46]
270 P69[20-23]
271 P39[31-32]
272 P1411[32-37]
273 P166[62-63]
274 P150[44-47]
275 P2962[61-62]
276 P150[48-51]
277 P150[52-55]
278 P1411[62-67]
279 P1435[35-36]
280 P1411[48-51]
281 P150[22-25]
282 P2962[63-64]
283 P2962[65-66]
284 P166[58-59]
285 P190[46-49]
286 P54[34-35]
287 P1435[4-16]
288 P463[18-19]
289 P150[31-34]
290 P150[35-38]
291 P39[35-36]
292 P26[62-69]
293 P1411[56-58]
294 P1435[37-38]
295 P166[60-61]
296 P39[33-34]
297 P102[24-31]
298 P2962[43-46]
299 P108[37-48]
300 P190[50-53]
301 P39[4-6]
302 P1435[39-40]
303 P793[0-45]
304 P150[64-69]
305 P39[19-22]
306 P27[30-38]
307 P2962[31-38]
308 P1411[24-31]
309 P102[40-45]
310 P39[37-38]
311 P463[8-11]
312 P1435[41-42]
313 P27[52-59]
314 P69[16-19]
315 P17[16-18]
316 P190[54-57]
317 P1435[43-44]
318 P166[8-15]
319 P166[45-47]
320 P2962[47-50]
321 P39[39-40]
322 P1411[52-55]
323 P108[58-69]
324 P463[20-21]
325 P39[41-42]
326 P150[26-30]
327 P150[39-43]
328 P1435[45-46]
329 P26[28-38]
330 P54[27-30]
331 P190[58-61]
332 P17[59-61]
333 P54[36-37]
334 P166[16-20]
335 P166[37-40]
336 P1435[47-48]
337 P17[0-3]
338 P26[47-55]
339 P1435[49-50]
340 P1435[25-28]
341 P150[4-9]
342 P102[63-69]
343 P26[0-19]
344 P1435[17-24]
345 P39[23-26]
346 P1435[51-52]
347 P39[7-11]
348 P69[12-15]
349 P69[24-31]
350 P102[0-23]
351 P39[43-44]
352 P579[24-35]
353 P190[62-65]
354 P1435[53-54]
355 P1376[0-18]
356 P27[0-14]
357 P463[12-15]
358 P166[33-36]
359 P102[32-39]
360 P17[4-7]
361 P190[30-41]
362 P166[24-28]
363 P190[66-69]
364 P69[42-69]
365 P1435[55-56]
366 P54[31-33]
367 P39[45-46]
368 P17[12-15]
369 P1435[57-58]
370 P54[19-26]
371 P2962[51-54]
372 P2962[67-69]
373 P1435[59-60]
374 P579[44-56]
375 P1435[61-62]
376 P166[41-44]
377 P17[19-22]
378 P1376[19-38]
379 P17[23-26]
380 P1376[48-69]
381 P463[22-23]
382 P17[27-30]
383 P1435[63-64]
384 P69[0-3]
385 P1435[66-67]
386 P17[35-38]
387 P69[8-11]
388 P1435[68-69]
389 P17[31-34]
390 P102[46-53]
391 P27[60-69]
392 P579[57-69]
393 P69[4-7]
394 P1411[7-14]
395 P551[0-35]
396 P108[0-28]
397 P17[8-11]
398 P1411[38-47]
399 P17[43-46]
400 P17[49-52]
401 P166[64-69]
402 P1435[29-32]
403 P54[38-39]
404 P39[27-30]
405 P2962[55-58]
406 P463[24-25]
407 P17[39-42]
408 P17[53-56]
409 P17[66-69]
410 P17[62-65]
411 P1411[15-23]
412 P166[48-51]
413 P27[15-29]
414 P150[56-63]
415 P27[39-51]
416 P39[47-48]
417 P166[29-32]
418 P39[12-18]
419 P166[54-57]
420 P551[36-69]
421 P579[0-15]
422 P102[54-62]

16195
data/wikidata12k_both/test.txt Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,71 @@
0 19 19
1 20 1643
2 1644 1790
3 1791 1816
4 1817 1855
5 1856 1871
6 1872 1893
7 1894 1905
8 1906 1913
9 1914 1918
10 1919 1920
11 1921 1924
12 1925 1929
13 1930 1933
14 1934 1937
15 1938 1941
16 1942 1945
17 1946 1948
18 1949 1950
19 1951 1953
20 1954 1956
21 1957 1959
22 1960 1961
23 1962 1963
24 1964 1965
25 1966 1967
26 1968 1968
27 1969 1970
28 1971 1972
29 1973 1974
30 1975 1976
31 1977 1978
32 1979 1980
33 1981 1982
34 1983 1983
35 1984 1984
36 1985 1985
37 1986 1986
38 1987 1987
39 1988 1988
40 1989 1989
41 1990 1990
42 1991 1991
43 1992 1992
44 1993 1993
45 1994 1994
46 1995 1995
47 1996 1996
48 1997 1997
49 1998 1998
50 1999 1999
51 2000 2000
52 2001 2001
53 2002 2002
54 2003 2003
55 2004 2004
56 2005 2005
57 2006 2006
58 2007 2007
59 2008 2008
60 2009 2009
61 2010 2010
62 2011 2011
63 2012 2012
64 2013 2013
65 2014 2014
66 2015 2015
67 2016 2016
68 2017 2017
69 2018 2020
70 2021 2021

198627
data/wikidata12k_both/train.txt Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,15 @@
# triples: 291818
# entities: 12554
# relations: 423
# timesteps: 70
# test triples: 19271
# valid triples: 20208
# train triples: 252339
Measure method: N/A
Target Size : 423
Grow Factor: 0
Shrink Factor: 4.0
Epsilon Factor: 0
Search method: N/A
filter_dupes: inter
nonames: False

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,24 @@
P1376 0
P512 4
P579 3
P150 18
P190 5
P551 19
P131 1
P793 21
P1435 13
P39 14
P17 6
P54 22
P31 15
P6 7
P1411 20
P2962 2
P463 9
P1346 16
P108 10
P69 23
P166 17
P102 11
P27 12
P26 8

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,423 @@
0 P131[0-0]
1 P131[1-1]
2 P131[2-2]
3 P131[3-3]
4 P131[4-4]
5 P131[5-5]
6 P131[6-6]
7 P131[7-7]
8 P131[8-8]
9 P131[9-9]
10 P131[10-10]
11 P131[11-11]
12 P131[12-12]
13 P131[13-13]
14 P131[14-14]
15 P131[15-15]
16 P131[16-16]
17 P131[17-17]
18 P131[18-18]
19 P131[19-19]
20 P131[20-20]
21 P131[21-21]
22 P131[22-22]
23 P131[23-23]
24 P131[24-24]
25 P131[25-25]
26 P131[26-26]
27 P131[27-27]
28 P131[28-28]
29 P131[29-29]
30 P131[30-30]
31 P131[31-31]
32 P131[32-32]
33 P131[33-33]
34 P131[34-34]
35 P131[35-35]
36 P131[36-36]
37 P131[37-37]
38 P131[38-38]
39 P131[39-39]
40 P131[40-40]
41 P131[41-41]
42 P131[42-42]
43 P131[43-43]
44 P131[44-44]
45 P131[45-45]
46 P131[46-46]
47 P131[47-47]
48 P131[48-48]
49 P131[49-49]
50 P131[50-50]
51 P131[51-51]
52 P131[52-52]
53 P131[53-53]
54 P131[54-54]
55 P131[55-55]
56 P131[56-56]
57 P131[57-57]
58 P131[58-58]
59 P131[59-59]
60 P131[60-60]
61 P131[61-61]
62 P131[62-62]
63 P131[63-63]
64 P131[64-64]
65 P131[65-65]
66 P131[66-66]
67 P131[67-67]
68 P131[68-68]
69 P131[69-69]
70 P1435[65-65]
71 P39[49-49]
72 P39[50-50]
73 P39[51-51]
74 P39[52-52]
75 P39[53-53]
76 P39[54-54]
77 P39[55-55]
78 P39[56-56]
79 P39[57-57]
80 P39[58-58]
81 P39[59-59]
82 P39[60-60]
83 P39[61-61]
84 P39[62-62]
85 P39[63-63]
86 P39[64-64]
87 P39[65-65]
88 P39[66-66]
89 P39[67-67]
90 P39[68-68]
91 P39[69-69]
92 P54[40-40]
93 P54[41-41]
94 P54[42-42]
95 P54[43-43]
96 P54[44-44]
97 P54[45-45]
98 P54[46-46]
99 P54[47-47]
100 P54[48-48]
101 P54[49-49]
102 P54[50-50]
103 P54[51-51]
104 P54[52-52]
105 P54[53-53]
106 P54[54-54]
107 P54[55-55]
108 P54[56-56]
109 P54[57-57]
110 P54[58-58]
111 P54[59-59]
112 P54[60-60]
113 P54[61-61]
114 P54[62-62]
115 P54[63-63]
116 P54[64-64]
117 P54[65-65]
118 P54[66-66]
119 P54[67-67]
120 P54[68-68]
121 P54[69-69]
122 P31[0-0]
123 P31[1-1]
124 P31[2-2]
125 P31[3-3]
126 P31[4-4]
127 P31[5-5]
128 P31[6-6]
129 P31[7-7]
130 P31[8-8]
131 P31[9-9]
132 P31[10-10]
133 P31[11-11]
134 P31[12-12]
135 P31[13-13]
136 P31[14-14]
137 P31[15-15]
138 P31[16-16]
139 P31[17-17]
140 P31[18-18]
141 P31[19-19]
142 P31[20-20]
143 P31[21-21]
144 P31[22-22]
145 P31[23-23]
146 P31[24-24]
147 P31[25-25]
148 P31[26-26]
149 P31[27-27]
150 P31[28-28]
151 P31[29-29]
152 P31[30-30]
153 P31[31-31]
154 P31[32-32]
155 P31[33-33]
156 P31[34-34]
157 P31[35-35]
158 P31[36-36]
159 P31[37-37]
160 P31[38-38]
161 P31[39-39]
162 P31[40-40]
163 P31[41-41]
164 P31[42-42]
165 P31[43-43]
166 P31[44-44]
167 P31[45-45]
168 P31[46-46]
169 P31[47-47]
170 P31[48-48]
171 P31[49-49]
172 P31[50-50]
173 P31[51-51]
174 P31[52-52]
175 P31[53-53]
176 P31[54-54]
177 P31[55-55]
178 P31[56-56]
179 P31[57-57]
180 P31[58-58]
181 P31[59-59]
182 P31[60-60]
183 P31[61-61]
184 P31[62-62]
185 P31[63-63]
186 P31[64-64]
187 P31[65-65]
188 P31[66-66]
189 P31[67-67]
190 P31[68-68]
191 P31[69-69]
192 P463[26-26]
193 P463[27-27]
194 P463[28-28]
195 P463[29-29]
196 P463[30-30]
197 P463[31-31]
198 P463[32-32]
199 P463[33-33]
200 P463[34-34]
201 P463[35-35]
202 P463[36-36]
203 P463[37-37]
204 P463[38-38]
205 P463[39-39]
206 P463[40-40]
207 P463[41-41]
208 P463[42-42]
209 P463[43-43]
210 P463[44-44]
211 P463[45-45]
212 P463[46-46]
213 P463[47-47]
214 P463[48-48]
215 P463[49-49]
216 P463[50-50]
217 P463[51-51]
218 P463[52-52]
219 P463[53-53]
220 P463[54-54]
221 P463[55-55]
222 P463[56-56]
223 P463[57-57]
224 P463[58-58]
225 P463[59-59]
226 P463[60-60]
227 P463[61-61]
228 P463[62-62]
229 P463[63-63]
230 P463[64-64]
231 P463[65-65]
232 P463[66-66]
233 P463[67-67]
234 P463[68-68]
235 P463[69-69]
236 P512[4-69]
237 P190[0-29]
238 P150[0-3]
239 P1376[39-47]
240 P463[0-7]
241 P166[0-7]
242 P2962[18-30]
243 P108[29-36]
244 P39[0-3]
245 P17[47-48]
246 P166[21-23]
247 P793[46-69]
248 P69[32-41]
249 P17[57-58]
250 P190[42-45]
251 P2962[39-42]
252 P54[0-18]
253 P26[56-61]
254 P150[14-17]
255 P463[16-17]
256 P26[39-46]
257 P579[36-43]
258 P579[16-23]
259 P2962[59-60]
260 P1411[59-61]
261 P26[20-27]
262 P6[4-69]
263 P1435[33-34]
264 P166[52-53]
265 P108[49-57]
266 P150[10-13]
267 P1346[47-68]
268 P150[18-21]
269 P1346[13-46]
270 P69[20-23]
271 P39[31-32]
272 P1411[32-37]
273 P166[62-63]
274 P150[44-47]
275 P2962[61-62]
276 P150[48-51]
277 P150[52-55]
278 P1411[62-67]
279 P1435[35-36]
280 P1411[48-51]
281 P150[22-25]
282 P2962[63-64]
283 P2962[65-66]
284 P166[58-59]
285 P190[46-49]
286 P54[34-35]
287 P1435[4-16]
288 P463[18-19]
289 P150[31-34]
290 P150[35-38]
291 P39[35-36]
292 P26[62-69]
293 P1411[56-58]
294 P1435[37-38]
295 P166[60-61]
296 P39[33-34]
297 P102[24-31]
298 P2962[43-46]
299 P108[37-48]
300 P190[50-53]
301 P39[4-6]
302 P1435[39-40]
303 P793[0-45]
304 P150[64-69]
305 P39[19-22]
306 P27[30-38]
307 P2962[31-38]
308 P1411[24-31]
309 P102[40-45]
310 P39[37-38]
311 P463[8-11]
312 P1435[41-42]
313 P27[52-59]
314 P69[16-19]
315 P17[16-18]
316 P190[54-57]
317 P1435[43-44]
318 P166[8-15]
319 P166[45-47]
320 P2962[47-50]
321 P39[39-40]
322 P1411[52-55]
323 P108[58-69]
324 P463[20-21]
325 P39[41-42]
326 P150[26-30]
327 P150[39-43]
328 P1435[45-46]
329 P26[28-38]
330 P54[27-30]
331 P190[58-61]
332 P17[59-61]
333 P54[36-37]
334 P166[16-20]
335 P166[37-40]
336 P1435[47-48]
337 P17[0-3]
338 P26[47-55]
339 P1435[49-50]
340 P1435[25-28]
341 P150[4-9]
342 P102[63-69]
343 P26[0-19]
344 P1435[17-24]
345 P39[23-26]
346 P1435[51-52]
347 P39[7-11]
348 P69[12-15]
349 P69[24-31]
350 P102[0-23]
351 P39[43-44]
352 P579[24-35]
353 P190[62-65]
354 P1435[53-54]
355 P1376[0-18]
356 P27[0-14]
357 P463[12-15]
358 P166[33-36]
359 P102[32-39]
360 P17[4-7]
361 P190[30-41]
362 P166[24-28]
363 P190[66-69]
364 P69[42-69]
365 P1435[55-56]
366 P54[31-33]
367 P39[45-46]
368 P17[12-15]
369 P1435[57-58]
370 P54[19-26]
371 P2962[51-54]
372 P2962[67-69]
373 P1435[59-60]
374 P579[44-56]
375 P1435[61-62]
376 P166[41-44]
377 P17[19-22]
378 P1376[19-38]
379 P17[23-26]
380 P1376[48-69]
381 P463[22-23]
382 P17[27-30]
383 P1435[63-64]
384 P69[0-3]
385 P1435[66-67]
386 P17[35-38]
387 P69[8-11]
388 P1435[68-69]
389 P17[31-34]
390 P102[46-53]
391 P27[60-69]
392 P579[57-69]
393 P69[4-7]
394 P1411[7-14]
395 P551[0-35]
396 P108[0-28]
397 P17[8-11]
398 P1411[38-47]
399 P17[43-46]
400 P17[49-52]
401 P166[64-69]
402 P1435[29-32]
403 P54[38-39]
404 P39[27-30]
405 P2962[55-58]
406 P463[24-25]
407 P17[39-42]
408 P17[53-56]
409 P17[66-69]
410 P17[62-65]
411 P1411[15-23]
412 P166[48-51]
413 P27[15-29]
414 P150[56-63]
415 P27[39-51]
416 P39[47-48]
417 P166[29-32]
418 P39[12-18]
419 P166[54-57]
420 P551[36-69]
421 P579[0-15]
422 P102[54-62]

19271
data/wikidata12k_old/test.txt Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,71 @@
0 19 19
1 20 1643
2 1644 1790
3 1791 1816
4 1817 1855
5 1856 1871
6 1872 1893
7 1894 1905
8 1906 1913
9 1914 1918
10 1919 1920
11 1921 1924
12 1925 1929
13 1930 1933
14 1934 1937
15 1938 1941
16 1942 1945
17 1946 1948
18 1949 1950
19 1951 1953
20 1954 1956
21 1957 1959
22 1960 1961
23 1962 1963
24 1964 1965
25 1966 1967
26 1968 1968
27 1969 1970
28 1971 1972
29 1973 1974
30 1975 1976
31 1977 1978
32 1979 1980
33 1981 1982
34 1983 1983
35 1984 1984
36 1985 1985
37 1986 1986
38 1987 1987
39 1988 1988
40 1989 1989
41 1990 1990
42 1991 1991
43 1992 1992
44 1993 1993
45 1994 1994
46 1995 1995
47 1996 1996
48 1997 1997
49 1998 1998
50 1999 1999
51 2000 2000
52 2001 2001
53 2002 2002
54 2003 2003
55 2004 2004
56 2005 2005
57 2006 2006
58 2007 2007
59 2008 2008
60 2009 2009
61 2010 2010
62 2011 2011
63 2012 2012
64 2013 2013
65 2014 2014
66 2015 2015
67 2016 2016
68 2017 2017
69 2018 2020
70 2021 2021

252339
data/wikidata12k_old/train.txt Normal file

File diff suppressed because it is too large Load Diff

20208
data/wikidata12k_old/valid.txt Normal file

File diff suppressed because it is too large Load Diff

15
data/yago/about.txt Normal file
View File

@ -0,0 +1,15 @@
# triples: 78032
# entities: 10526
# relations: 177
# timesteps: 46
# test triples: 6909
# valid triples: 7198
# train triples: 63925
Measure method: N/A
Target Size : 0
Grow Factor: 0
Shrink Factor: 0
Epsilon Factor: 5.0
Search method: N/A
filter_dupes: inter
nonames: False

10526
data/yago/entities.dict Normal file

File diff suppressed because it is too large Load Diff

177
data/yago/relations.dict Normal file
View File

@ -0,0 +1,177 @@
0 <wasBornIn>[0-2]
1 <wasBornIn>[2-5]
2 <wasBornIn>[5-7]
3 <wasBornIn>[7-10]
4 <wasBornIn>[10-12]
5 <wasBornIn>[12-15]
6 <wasBornIn>[15-17]
7 <wasBornIn>[17-20]
8 <wasBornIn>[20-22]
9 <wasBornIn>[22-25]
10 <wasBornIn>[25-27]
11 <wasBornIn>[27-30]
12 <wasBornIn>[30-32]
13 <wasBornIn>[32-35]
14 <wasBornIn>[35-45]
15 <wasBornIn>[52-52]
16 <diedIn>[0-3]
17 <diedIn>[3-5]
18 <diedIn>[5-7]
19 <diedIn>[7-10]
20 <diedIn>[10-12]
21 <diedIn>[12-14]
22 <diedIn>[14-17]
23 <diedIn>[17-19]
24 <diedIn>[19-21]
25 <diedIn>[21-23]
26 <diedIn>[23-25]
27 <diedIn>[25-27]
28 <diedIn>[27-29]
29 <diedIn>[29-32]
30 <diedIn>[32-34]
31 <diedIn>[34-36]
32 <diedIn>[36-38]
33 <diedIn>[38-40]
34 <diedIn>[40-42]
35 <diedIn>[42-44]
36 <diedIn>[44-47]
37 <diedIn>[47-49]
38 <diedIn>[49-51]
39 <diedIn>[51-53]
40 <diedIn>[53-55]
41 <diedIn>[55-57]
42 <diedIn>[59-59]
43 <worksAt>[0-3]
44 <worksAt>[3-5]
45 <worksAt>[5-7]
46 <worksAt>[7-10]
47 <worksAt>[10-12]
48 <worksAt>[12-14]
49 <worksAt>[14-17]
50 <worksAt>[17-19]
51 <worksAt>[19-21]
52 <worksAt>[21-23]
53 <worksAt>[23-25]
54 <worksAt>[25-27]
55 <worksAt>[27-29]
56 <worksAt>[29-32]
57 <worksAt>[32-34]
58 <worksAt>[34-36]
59 <worksAt>[36-40]
60 <worksAt>[40-42]
61 <worksAt>[42-47]
62 <worksAt>[47-53]
63 <worksAt>[59-59]
64 <playsFor>[0-3]
65 <playsFor>[3-5]
66 <playsFor>[5-23]
67 <playsFor>[23-25]
68 <playsFor>[25-27]
69 <playsFor>[27-29]
70 <playsFor>[29-32]
71 <playsFor>[32-34]
72 <playsFor>[34-36]
73 <playsFor>[36-38]
74 <playsFor>[38-40]
75 <playsFor>[40-42]
76 <playsFor>[42-44]
77 <playsFor>[44-47]
78 <playsFor>[47-51]
79 <playsFor>[59-59]
80 <hasWonPrize>[1-4]
81 <hasWonPrize>[4-6]
82 <hasWonPrize>[6-8]
83 <hasWonPrize>[8-11]
84 <hasWonPrize>[11-15]
85 <hasWonPrize>[15-18]
86 <hasWonPrize>[18-22]
87 <hasWonPrize>[22-26]
88 <hasWonPrize>[26-30]
89 <hasWonPrize>[30-33]
90 <hasWonPrize>[33-37]
91 <hasWonPrize>[37-47]
92 <hasWonPrize>[47-53]
93 <hasWonPrize>[59-59]
94 <isMarriedTo>[0-3]
95 <isMarriedTo>[3-5]
96 <isMarriedTo>[5-7]
97 <isMarriedTo>[7-10]
98 <isMarriedTo>[10-12]
99 <isMarriedTo>[12-14]
100 <isMarriedTo>[14-17]
101 <isMarriedTo>[17-19]
102 <isMarriedTo>[19-21]
103 <isMarriedTo>[21-23]
104 <isMarriedTo>[23-25]
105 <isMarriedTo>[25-27]
106 <isMarriedTo>[27-29]
107 <isMarriedTo>[29-32]
108 <isMarriedTo>[32-34]
109 <isMarriedTo>[34-38]
110 <isMarriedTo>[38-42]
111 <isMarriedTo>[42-47]
112 <isMarriedTo>[47-51]
113 <isMarriedTo>[51-55]
114 <isMarriedTo>[59-59]
115 <owns>[0-10]
116 <owns>[10-17]
117 <owns>[17-19]
118 <owns>[19-23]
119 <owns>[23-36]
120 <owns>[36-38]
121 <owns>[59-59]
122 <graduatedFrom>[0-3]
123 <graduatedFrom>[3-5]
124 <graduatedFrom>[5-7]
125 <graduatedFrom>[7-10]
126 <graduatedFrom>[10-14]
127 <graduatedFrom>[14-17]
128 <graduatedFrom>[17-19]
129 <graduatedFrom>[19-21]
130 <graduatedFrom>[21-23]
131 <graduatedFrom>[23-27]
132 <graduatedFrom>[27-32]
133 <graduatedFrom>[32-34]
134 <graduatedFrom>[34-38]
135 <graduatedFrom>[38-42]
136 <graduatedFrom>[59-59]
137 <isAffiliatedTo>[1-4]
138 <isAffiliatedTo>[4-6]
139 <isAffiliatedTo>[6-8]
140 <isAffiliatedTo>[8-11]
141 <isAffiliatedTo>[11-13]
142 <isAffiliatedTo>[13-15]
143 <isAffiliatedTo>[15-18]
144 <isAffiliatedTo>[18-20]
145 <isAffiliatedTo>[20-22]
146 <isAffiliatedTo>[22-24]
147 <isAffiliatedTo>[24-26]
148 <isAffiliatedTo>[26-28]
149 <isAffiliatedTo>[28-30]
150 <isAffiliatedTo>[30-33]
151 <isAffiliatedTo>[33-35]
152 <isAffiliatedTo>[35-37]
153 <isAffiliatedTo>[37-40]
154 <isAffiliatedTo>[40-42]
155 <isAffiliatedTo>[42-44]
156 <isAffiliatedTo>[44-47]
157 <isAffiliatedTo>[47-49]
158 <isAffiliatedTo>[49-51]
159 <isAffiliatedTo>[51-53]
160 <isAffiliatedTo>[53-55]
161 <isAffiliatedTo>[55-57]
162 <isAffiliatedTo>[59-59]
163 <created>[0-3]
164 <created>[3-5]
165 <created>[5-10]
166 <created>[10-12]
167 <created>[12-17]
168 <created>[17-19]
169 <created>[19-25]
170 <created>[25-29]
171 <created>[29-32]
172 <created>[32-36]
173 <created>[36-42]
174 <created>[42-47]
175 <created>[47-53]
176 <created>[59-59]

6909
data/yago/test.txt Normal file

File diff suppressed because it is too large Load Diff

60
data/yago/time_map.dict Normal file
View File

@ -0,0 +1,60 @@
0 -431 1782
1 1783 1848
2 1849 1870
3 1871 1888
4 1889 1899
5 1900 1906
6 1907 1912
7 1913 1917
8 1918 1922
9 1923 1926
10 1927 1930
11 1931 1934
12 1935 1938
13 1939 1941
14 1942 1944
15 1945 1947
16 1948 1950
17 1951 1953
18 1954 1956
19 1957 1959
20 1960 1962
21 1963 1965
22 1966 1967
23 1968 1969
24 1970 1971
25 1972 1973
26 1974 1975
27 1976 1977
28 1978 1979
29 1980 1981
30 1982 1983
31 1984 1985
32 1986 1987
33 1988 1989
34 1990 1991
35 1992 1993
36 1994 1994
37 1995 1996
38 1997 1997
39 1998 1998
40 1999 1999
41 2000 2000
42 2001 2001
43 2002 2002
44 2003 2003
45 2004 2004
46 2005 2005
47 2006 2006
48 2007 2007
49 2008 2008
50 2009 2009
51 2010 2010
52 2011 2011
53 2012 2012
54 2013 2013
55 2014 2014
56 2015 2015
57 2016 2016
58 2017 2017
59 2018 2018

63925
data/yago/train.txt Normal file

File diff suppressed because it is too large Load Diff

7198
data/yago/valid.txt Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,793 @@
0
2
4
9
11
12
16
17
19
27
29
34
35
37
38
41
42
45
49
51
52
54
56
57
61
64
65
67
69
70
72
76
78
79
83
86
87
89
101
102
103
108
111
112
119
121
122
126
128
129
132
134
138
141
144
146
153
154
155
156
158
159
160
161
162
164
165
166
168
173
175
176
177
182
184
185
186
187
188
190
192
193
201
202
208
209
211
213
215
216
217
222
227
229
235
239
240
242
243
245
246
247
251
252
254
257
261
263
266
268
271
279
282
292
300
303
305
308
309
311
313
316
319
322
324
325
329
331
332
333
334
337
339
342
343
346
347
348
349
350
352
353
355
357
361
362
363
367
371
373
378
379
383
384
385
389
392
394
395
396
397
399
400
402
403
407
409
415
416
420
421
422
428
429
432
433
440
442
443
450
452
459
463
464
466
471
472
476
480
484
489
490
493
494
495
500
503
507
509
515
519
520
521
525
528
529
533
534
539
541
542
548
550
556
559
563
566
567
569
573
575
576
579
582
585
588
592
593
594
596
597
598
599
603
604
605
606
607
613
614
616
617
618
619
621
623
624
625
628
638
641
642
648
651
659
660
661
663
664
676
677
678
680
682
686
688
689
691
694
698
704
707
708
712
713
716
719
723
724
726
728
732
741
742
743
744
745
746
750
752
755
759
762
764
767
768
770
772
775
777
780
782
785
789
799
800
801
802
804
805
810
811
816
822
823
826
829
832
834
835
838
839
842
847
848
850
851
852
856
861
862
865
867
868
869
874
876
882
883
884
885
891
893
898
899
906
909
910
911
912
920
923
924
926
928
934
938
941
942
943
944
945
951
954
956
957
958
960
961
963
964
968
970
975
976
977
979
981
988
989
992
993
995
997
1005
1008
1009
1012
1013
1014
1015
1023
1029
1032
1038
1044
1045
1052
1053
1055
1057
1060
1061
1065
1066
1074
1077
1079
1080
1082
1083
1085
1086
1089
1090
1091
1095
1104
1107
1111
1114
1121
1124
1126
1127
1128
1131
1132
1139
1140
1142
1143
1145
1148
1150
1157
1163
1164
1168
1170
1171
1172
1173
1179
1182
1186
1189
1190
1191
1194
1196
1198
1201
1204
1206
1208
1217
1220
1223
1228
1231
1232
1235
1236
1237
1238
1240
1246
1247
1249
1252
1258
1260
1265
1266
1273
1274
1278
1279
1280
1284
1286
1287
1288
1289
1290
1293
1294
1295
1297
1298
1301
1303
1304
1305
1307
1308
1309
1314
1318
1319
1323
1325
1327
1328
1333
1337
1340
1341
1343
1345
1346
1347
1349
1350
1351
1358
1364
1365
1367
1368
1369
1370
1373
1375
1376
1378
1380
1381
1382
1385
1387
1390
1391
1394
1396
1397
1399
1400
1406
1409
1412
1416
1417
1418
1420
1423
1425
1428
1430
1431
1432
1437
1438
1439
1444
1447
1450
1454
1456
1457
1460
1464
1465
1469
1473
1474
1475
1477
1479
1488
1490
1493
1494
1497
1500
1502
1503
1504
1505
1507
1508
1510
1514
1515
1520
1522
1523
1526
1547
1549
1553
1556
1557
1558
1562
1563
1564
1565
1570
1571
1574
1575
1579
1591
1592
1594
1601
1604
1605
1606
1608
1609
1613
1618
1619
1620
1621
1632
1634
1635
1636
1642
1643
1648
1650
1652
1653
1660
1661
1662
1666
1669
1670
1676
1677
1682
1683
1690
1692
1693
1697
1698
1702
1703
1706
1709
1711
1713
1715
1717
1721
1724
1725
1729
1730
1733
1734
1735
1736
1741
1745
1746
1748
1749
1751
1755
1761
1763
1766
1767
1768
1769
1773
1775
1777
1778
1783
1789
1790
1792
1793
1795
1800
1803
1805
1809
1812
1815
1816
1819
1820
1822
1823
1824
1825
1828
1831
1833
1834
1835
1836
1837
1842
1848
1849
1852
1853
1854
1856
1857
1858
1859
1861
1864
1865
1869
1873
1874
1876
1877
1882
1883
1884
1885
1888
1889
1890
1892
1894
1896
1899
1902
1903
1905
1908
1910
1913
1914
1915
1920
1928
1931
1936
1938
1941
1942
1944
1946
1947
1948
1954
1956
1958
1961
1966
1968
1969
1971
1972
1977
1979
1985
1986
1987
1988
1989
1990
1999
2001
2005
2009
2010
2012
2013
2014
2015
2017
2018
2022
2023
2028
2032
2036
2037
2038
2041
2042
2043
2044
2045
2046
2048

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,809 @@
7
9
12
15
17
22
24
25
28
29
32
37
38
41
43
49
52
54
57
58
59
60
66
69
72
76
78
81
83
84
85
88
89
94
100
102
105
106
107
108
109
115
116
121
123
124
127
128
133
135
137
138
141
144
156
157
159
161
168
171
172
174
175
176
181
182
186
188
189
190
191
195
197
198
200
201
204
208
212
215
216
217
218
219
220
222
224
225
227
229
230
233
236
239
240
242
243
244
246
247
250
251
253
254
255
256
257
261
265
266
271
273
274
275
276
279
280
282
284
287
289
292
296
297
299
300
302
308
311
312
315
316
317
320
321
322
326
331
333
335
336
337
339
344
345
346
347
351
352
353
354
355
358
359
362
364
366
368
373
376
388
390
392
393
394
395
397
398
401
403
406
407
409
410
412
413
415
416
418
420
421
425
435
437
443
444
445
446
448
453
455
456
462
463
468
470
471
477
478
481
485
490
491
493
498
505
510
513
514
515
516
521
523
527
529
531
534
535
536
537
538
543
547
550
555
557
558
559
560
561
565
566
567
570
575
577
580
581
582
593
594
595
597
599
602
605
609
612
614
616
618
620
625
628
632
633
635
636
643
644
645
646
647
648
652
662
663
669
670
672
673
681
682
685
687
689
693
697
700
702
707
711
716
717
719
727
728
729
730
732
733
734
738
740
744
751
754
755
756
760
761
762
763
766
768
770
771
772
773
774
775
776
777
779
781
782
784
786
788
789
792
795
799
800
805
808
810
812
815
817
820
821
825
827
829
831
832
833
834
836
837
840
841
842
848
849
850
851
852
855
858
860
861
866
869
871
872
877
878
880
881
882
884
885
886
891
893
897
899
901
908
911
913
919
921
925
927
929
934
937
938
945
947
948
950
957
962
963
965
969
971
976
977
978
985
986
989
990
991
993
996
997
998
1003
1005
1006
1007
1010
1011
1013
1014
1019
1022
1024
1025
1028
1031
1032
1034
1038
1040
1042
1043
1048
1049
1050
1051
1056
1057
1059
1062
1063
1065
1066
1071
1072
1074
1075
1076
1080
1081
1082
1085
1086
1087
1090
1095
1096
1098
1099
1101
1105
1108
1109
1110
1112
1117
1119
1121
1122
1123
1125
1129
1133
1139
1143
1145
1147
1151
1153
1154
1158
1159
1162
1165
1167
1169
1171
1175
1177
1178
1180
1181
1182
1185
1187
1188
1193
1197
1199
1217
1218
1220
1221
1225
1227
1230
1232
1233
1235
1236
1237
1238
1239
1244
1246
1247
1248
1262
1263
1264
1266
1267
1269
1271
1272
1273
1276
1277
1278
1280
1284
1291
1292
1293
1295
1296
1297
1299
1300
1301
1305
1307
1309
1312
1322
1325
1330
1334
1335
1337
1338
1340
1342
1346
1347
1350
1351
1356
1357
1358
1359
1366
1368
1370
1371
1373
1376
1379
1382
1383
1384
1385
1386
1387
1389
1393
1394
1398
1403
1404
1409
1411
1413
1415
1420
1421
1423
1426
1429
1431
1433
1434
1439
1444
1446
1447
1456
1457
1461
1467
1469
1472
1477
1481
1493
1494
1496
1498
1499
1501
1503
1506
1507
1508
1510
1511
1512
1516
1521
1523
1527
1528
1529
1530
1535
1537
1540
1541
1542
1546
1552
1559
1562
1563
1564
1567
1572
1574
1577
1581
1587
1589
1594
1601
1603
1613
1614
1616
1617
1618
1619
1620
1621
1623
1624
1625
1626
1631
1632
1634
1635
1636
1638
1640
1642
1643
1645
1647
1648
1650
1651
1652
1655
1656
1658
1661
1662
1663
1665
1666
1667
1668
1669
1674
1675
1685
1686
1695
1697
1698
1699
1706
1713
1716
1717
1718
1723
1724
1725
1729
1730
1731
1732
1733
1734
1738
1741
1744
1749
1761
1762
1769
1772
1774
1776
1777
1781
1784
1787
1789
1790
1791
1798
1804
1805
1807
1808
1811
1812
1816
1821
1824
1827
1831
1834
1835
1836
1842
1847
1852
1855
1856
1859
1861
1862
1865
1866
1871
1872
1874
1876
1877
1878
1879
1880
1881
1885
1887
1888
1889
1891
1892
1894
1895
1899
1900
1907
1911
1913
1914
1915
1917
1919
1920
1923
1924
1925
1927
1934
1939
1940
1941
1948
1951
1953
1957
1958
1959
1961
1962
1965
1966
1967
1971
1973
1975
1979
1981
1983
1989
1996
1997
2000
2004
2007
2009
2011
2012
2016
2017
2019
2021
2024
2029
2040
2042
2044
2048

View File

@ -0,0 +1,15 @@
# triples: 78032
# entities: 10526
# relations: 177
# timesteps: 46
# test triples: 6909
# valid triples: 7198
# train triples: 63925
Measure method: N/A
Target Size : 0
Grow Factor: 0
Shrink Factor: 0
Epsilon Factor: 5.0
Search method: N/A
filter_dupes: both
nonames: False

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,177 @@
0 <wasBornIn>[0-2]
1 <wasBornIn>[2-5]
2 <wasBornIn>[5-7]
3 <wasBornIn>[7-10]
4 <wasBornIn>[10-12]
5 <wasBornIn>[12-15]
6 <wasBornIn>[15-17]
7 <wasBornIn>[17-20]
8 <wasBornIn>[20-22]
9 <wasBornIn>[22-25]
10 <wasBornIn>[25-27]
11 <wasBornIn>[27-30]
12 <wasBornIn>[30-32]
13 <wasBornIn>[32-35]
14 <wasBornIn>[35-45]
15 <wasBornIn>[52-52]
16 <diedIn>[0-3]
17 <diedIn>[3-5]
18 <diedIn>[5-7]
19 <diedIn>[7-10]
20 <diedIn>[10-12]
21 <diedIn>[12-14]
22 <diedIn>[14-17]
23 <diedIn>[17-19]
24 <diedIn>[19-21]
25 <diedIn>[21-23]
26 <diedIn>[23-25]
27 <diedIn>[25-27]
28 <diedIn>[27-29]
29 <diedIn>[29-32]
30 <diedIn>[32-34]
31 <diedIn>[34-36]
32 <diedIn>[36-38]
33 <diedIn>[38-40]
34 <diedIn>[40-42]
35 <diedIn>[42-44]
36 <diedIn>[44-47]
37 <diedIn>[47-49]
38 <diedIn>[49-51]
39 <diedIn>[51-53]
40 <diedIn>[53-55]
41 <diedIn>[55-57]
42 <diedIn>[59-59]
43 <worksAt>[0-3]
44 <worksAt>[3-5]
45 <worksAt>[5-7]
46 <worksAt>[7-10]
47 <worksAt>[10-12]
48 <worksAt>[12-14]
49 <worksAt>[14-17]
50 <worksAt>[17-19]
51 <worksAt>[19-21]
52 <worksAt>[21-23]
53 <worksAt>[23-25]
54 <worksAt>[25-27]
55 <worksAt>[27-29]
56 <worksAt>[29-32]
57 <worksAt>[32-34]
58 <worksAt>[34-36]
59 <worksAt>[36-40]
60 <worksAt>[40-42]
61 <worksAt>[42-47]
62 <worksAt>[47-53]
63 <worksAt>[59-59]
64 <playsFor>[0-3]
65 <playsFor>[3-5]
66 <playsFor>[5-23]
67 <playsFor>[23-25]
68 <playsFor>[25-27]
69 <playsFor>[27-29]
70 <playsFor>[29-32]
71 <playsFor>[32-34]
72 <playsFor>[34-36]
73 <playsFor>[36-38]
74 <playsFor>[38-40]
75 <playsFor>[40-42]
76 <playsFor>[42-44]
77 <playsFor>[44-47]
78 <playsFor>[47-51]
79 <playsFor>[59-59]
80 <hasWonPrize>[1-4]
81 <hasWonPrize>[4-6]
82 <hasWonPrize>[6-8]
83 <hasWonPrize>[8-11]
84 <hasWonPrize>[11-15]
85 <hasWonPrize>[15-18]
86 <hasWonPrize>[18-22]
87 <hasWonPrize>[22-26]
88 <hasWonPrize>[26-30]
89 <hasWonPrize>[30-33]
90 <hasWonPrize>[33-37]
91 <hasWonPrize>[37-47]
92 <hasWonPrize>[47-53]
93 <hasWonPrize>[59-59]
94 <isMarriedTo>[0-3]
95 <isMarriedTo>[3-5]
96 <isMarriedTo>[5-7]
97 <isMarriedTo>[7-10]
98 <isMarriedTo>[10-12]
99 <isMarriedTo>[12-14]
100 <isMarriedTo>[14-17]
101 <isMarriedTo>[17-19]
102 <isMarriedTo>[19-21]
103 <isMarriedTo>[21-23]
104 <isMarriedTo>[23-25]
105 <isMarriedTo>[25-27]
106 <isMarriedTo>[27-29]
107 <isMarriedTo>[29-32]
108 <isMarriedTo>[32-34]
109 <isMarriedTo>[34-38]
110 <isMarriedTo>[38-42]
111 <isMarriedTo>[42-47]
112 <isMarriedTo>[47-51]
113 <isMarriedTo>[51-55]
114 <isMarriedTo>[59-59]
115 <owns>[0-10]
116 <owns>[10-17]
117 <owns>[17-19]
118 <owns>[19-23]
119 <owns>[23-36]
120 <owns>[36-38]
121 <owns>[59-59]
122 <graduatedFrom>[0-3]
123 <graduatedFrom>[3-5]
124 <graduatedFrom>[5-7]
125 <graduatedFrom>[7-10]
126 <graduatedFrom>[10-14]
127 <graduatedFrom>[14-17]
128 <graduatedFrom>[17-19]
129 <graduatedFrom>[19-21]
130 <graduatedFrom>[21-23]
131 <graduatedFrom>[23-27]
132 <graduatedFrom>[27-32]
133 <graduatedFrom>[32-34]
134 <graduatedFrom>[34-38]
135 <graduatedFrom>[38-42]
136 <graduatedFrom>[59-59]
137 <isAffiliatedTo>[1-4]
138 <isAffiliatedTo>[4-6]
139 <isAffiliatedTo>[6-8]
140 <isAffiliatedTo>[8-11]
141 <isAffiliatedTo>[11-13]
142 <isAffiliatedTo>[13-15]
143 <isAffiliatedTo>[15-18]
144 <isAffiliatedTo>[18-20]
145 <isAffiliatedTo>[20-22]
146 <isAffiliatedTo>[22-24]
147 <isAffiliatedTo>[24-26]
148 <isAffiliatedTo>[26-28]
149 <isAffiliatedTo>[28-30]
150 <isAffiliatedTo>[30-33]
151 <isAffiliatedTo>[33-35]
152 <isAffiliatedTo>[35-37]
153 <isAffiliatedTo>[37-40]
154 <isAffiliatedTo>[40-42]
155 <isAffiliatedTo>[42-44]
156 <isAffiliatedTo>[44-47]
157 <isAffiliatedTo>[47-49]
158 <isAffiliatedTo>[49-51]
159 <isAffiliatedTo>[51-53]
160 <isAffiliatedTo>[53-55]
161 <isAffiliatedTo>[55-57]
162 <isAffiliatedTo>[59-59]
163 <created>[0-3]
164 <created>[3-5]
165 <created>[5-10]
166 <created>[10-12]
167 <created>[12-17]
168 <created>[17-19]
169 <created>[19-25]
170 <created>[25-29]
171 <created>[29-32]
172 <created>[32-36]
173 <created>[36-42]
174 <created>[42-47]
175 <created>[47-53]
176 <created>[59-59]

6909
data/yago11k_both/test.txt Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,60 @@
0 -431 1782
1 1783 1848
2 1849 1870
3 1871 1888
4 1889 1899
5 1900 1906
6 1907 1912
7 1913 1917
8 1918 1922
9 1923 1926
10 1927 1930
11 1931 1934
12 1935 1938
13 1939 1941
14 1942 1944
15 1945 1947
16 1948 1950
17 1951 1953
18 1954 1956
19 1957 1959
20 1960 1962
21 1963 1965
22 1966 1967
23 1968 1969
24 1970 1971
25 1972 1973
26 1974 1975
27 1976 1977
28 1978 1979
29 1980 1981
30 1982 1983
31 1984 1985
32 1986 1987
33 1988 1989
34 1990 1991
35 1992 1993
36 1994 1994
37 1995 1996
38 1997 1997
39 1998 1998
40 1999 1999
41 2000 2000
42 2001 2001
43 2002 2002
44 2003 2003
45 2004 2004
46 2005 2005
47 2006 2006
48 2007 2007
49 2008 2008
50 2009 2009
51 2010 2010
52 2011 2011
53 2012 2012
54 2013 2013
55 2014 2014
56 2015 2015
57 2016 2016
58 2017 2017
59 2018 2018

63925
data/yago11k_both/train.txt Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

7198
data/yago11k_both/valid.txt Normal file

File diff suppressed because it is too large Load Diff

9483
icews14.out Normal file

File diff suppressed because it is too large Load Diff

4331
icews14_l2_1e-5.out Normal file

File diff suppressed because it is too large Load Diff

View File

@ -1964,3 +1964,25 @@
2023-05-04 08:27:31,384 - fb_one_to_x - [INFO] - [E:34| 1500]: Train Loss:0.0027362, Val MRR:0.33574, fb_one_to_x
2023-05-04 08:29:20,404 - fb_one_to_x - [INFO] - [E:34| 1600]: Train Loss:0.0027362, Val MRR:0.33574, fb_one_to_x
2023-05-04 08:31:12,139 - fb_one_to_x - [INFO] - [E:34| 1700]: Train Loss:0.0027362, Val MRR:0.33574, fb_one_to_x
2023-05-04 08:55:56,065 - fb_one_to_x - [INFO] - {'dataset': 'FB15k-237', 'name': 'fb_one_to_x', 'gpu': '0', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.1, 'drop': 0.2, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': True}
2023-05-04 08:56:07,953 - fb_one_to_x - [INFO] - [Test, Tail_Batch Step 0] fb_one_to_x
2023-05-04 08:56:53,173 - fb_one_to_x - [INFO] - [Test, Tail_Batch Step 100] fb_one_to_x
2023-05-04 08:57:20,187 - fb_one_to_x - [INFO] - [Test, Head_Batch Step 0] fb_one_to_x
2023-05-04 08:58:08,090 - fb_one_to_x - [INFO] - [Test, Head_Batch Step 100] fb_one_to_x
2023-05-04 08:58:36,338 - fb_one_to_x - [INFO] - [Evaluating Epoch 0 test]:
MRR: Tail : 0.43029, Head : 0.23256, Avg : 0.33142
MR: Tail : 149.91, Head : 288.48, Avg : 219.2
Hit-1: Tail : 0.33563, Head : 0.14883, Avg : 0.24223
Hit-3: Tail : 0.47068, Head : 0.25515, Avg : 0.36292
Hit-10: Tail : 0.61952, Head : 0.40096, Avg : 0.51024
2023-05-04 09:03:55,555 - fb_one_to_x - [INFO] - {'dataset': 'FB15k-237', 'name': 'fb_one_to_x', 'gpu': '0', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.1, 'drop': 0.2, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': True}
2023-05-04 09:04:07,491 - fb_one_to_x - [INFO] - [Test, Tail_Batch Step 0] fb_one_to_x
2023-05-04 09:04:52,620 - fb_one_to_x - [INFO] - [Test, Tail_Batch Step 100] fb_one_to_x
2023-05-04 09:05:19,645 - fb_one_to_x - [INFO] - [Test, Head_Batch Step 0] fb_one_to_x
2023-05-04 09:06:07,591 - fb_one_to_x - [INFO] - [Test, Head_Batch Step 100] fb_one_to_x
2023-05-04 09:06:35,660 - fb_one_to_x - [INFO] - [Evaluating Epoch 0 test]:
MRR: Tail : 0.43029, Head : 0.23256, Avg : 0.33142
MR: Tail : 149.91, Head : 288.48, Avg : 219.2
Hit-1: Tail : 0.33563, Head : 0.14883, Avg : 0.24223
Hit-3: Tail : 0.47068, Head : 0.25515, Avg : 0.36292
Hit-10: Tail : 0.61952, Head : 0.40096, Avg : 0.51024

27
main.py
View File

@ -3,6 +3,7 @@ import uuid
import argparse
import logging
import logging.config
import time
import torch
import numpy as np
@ -76,7 +77,7 @@ class Main(object):
ent_set, rel_set = OrderedSet(), OrderedSet()
for split in ['train', 'test', 'valid']:
for line in open('./data/{}/{}.txt'.format(self.p.dataset, split)):
sub, rel, obj = map(str.lower, line.strip().split('\t'))
sub, rel, obj, *_ = map(str.lower, line.strip().split('\t'))
ent_set.add(sub)
rel_set.add(rel)
ent_set.add(obj)
@ -107,14 +108,20 @@ class Main(object):
sr2o = ddict(set)
for split in ['train', 'test', 'valid']:
for line in open('./data/{}/{}.txt'.format(self.p.dataset, split)):
sub, rel, obj = map(str.lower, line.strip().split('\t'))
samples = 0
for i, line in enumerate(open('./data/{}/{}.txt'.format(self.p.dataset, split))):
sub, rel, obj, rel_type, *_ = map(str.lower, line.strip().split('\t'))
if (split == 'test' and self.p.rel_type is not None):
if rel_type != self.p.rel_type:
continue
sub, rel, obj = self.ent2id[sub], self.rel2id[rel], self.ent2id[obj]
self.data[split].append((sub, rel, obj))
if split == 'train':
sr2o[(sub, rel)].add(obj)
sr2o[(obj, rel+self.p.num_rel)].add(sub)
samples += 1
print(split.capitalize() + ': ' + str(samples) + ' samples')
self.data = dict(self.data)
self.sr2o = {k: list(v) for k, v in sr2o.items()}
@ -152,6 +159,8 @@ class Main(object):
{'triple': (obj, rel_inv, sub), 'label': self.sr2o_all[(obj, rel_inv)]})
self.triples = dict(self.triples)
print(len(self.triples['test_head']))
print(len(self.triples['test_tail']))
def get_data_loader(dataset_class, split, batch_size, shuffle=True):
return DataLoader(
@ -626,6 +635,7 @@ if __name__ == "__main__":
parser.add_argument('--test_only', action='store_true', default=False)
parser.add_argument('--grid_search', action='store_true', default=False)
parser.add_argument('--rel_type', default=None, type=str)
args = parser.parse_args()
@ -690,4 +700,13 @@ if __name__ == "__main__":
model.load_model(save_path)
model.evaluate('test')
else:
model.fit()
while True:
try:
model.fit()
except Exception as e:
print(e)
time.sleep(30)
del model
model = Main(args)
continue
break

14
run.sh
View File

@ -24,3 +24,17 @@ PID: 4503
test: testrun_d542676f
---
nohup python main.py --gpu 3 --data WN18RR --drop 0.0 --drop_path 0.0 >run_log/fnet-wn.log 2>&1 &
---
nohup python main.py --name ice0003 --lr 0.0003 --data icews14 --gpu 1 >run_log/ice0003.log 2>&1 &
PID: 3076
tail -f -n 200 run_log/ice0003.log
---
nohup python main.py --name ice0003_2 --lr 0.00003 --data icews14 --gpu 3 >run_log/ice0003_2.log 2>&1 &
PID: 3390
tail -f -n 200 run_log/ice0003_2.log
---
nohup python main.py --name ice00001 --lr 0.00001 --data icews14 --gpu 2 >run_log/ice00001.log 2>&1 &
PID:
___
nohup python main.py --name ice001 --lr 0.001 --data icews14 --gpu 3 >run_log/0.001.log 2>&1 &

10708
run_log/icews14/0.00001.log Normal file

File diff suppressed because it is too large Load Diff

6653
run_log/icews14/0.00003.log Normal file

File diff suppressed because it is too large Load Diff

9511
run_log/icews14/0.0001.out Normal file

File diff suppressed because it is too large Load Diff

4950
run_log/icews14/0.0003.log Normal file

File diff suppressed because it is too large Load Diff

6249
run_log/icews14/0.001.log Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,425 @@
nohup: ignoring input
2023-05-27 04:41:18,497 - [INFO] - {'dataset': 'wikidata12k', 'name': 'wikidata12k_0.001', 'gpu': '3', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': False}
{'batch_size': 128,
'bias': False,
'config_dir': './config/',
'dataset': 'wikidata12k',
'drop': 0.0,
'drop_path': 0.0,
'embed_dim': 400,
'ent_vec_dim': 400,
'expansion_factor': 4,
'expansion_factor_token': 0.5,
'feat_drop': 0.2,
'filt_h': 1,
'filt_w': 9,
'form': 'plain',
'gpu': '3',
'grid_search': False,
'hid_drop': 0.5,
'image_h': 128,
'image_w': 128,
'in_channels': 1,
'inp_drop': 0.2,
'k_h': 20,
'k_w': 10,
'ker_sz': 9,
'l2': 0.0,
'lbl_smooth': 0.1,
'log_dir': './log/',
'lr': 0.001,
'max_epochs': 500,
'mixer_depth': 16,
'mixer_dim': 256,
'mixer_dropout': 0.2,
'name': 'wikidata12k_0.001',
'neg_num': 1000,
'num_filt': 96,
'num_workers': 0,
'opt': 'adam',
'out_channels': 32,
'patch_size': 8,
'perm': 1,
'rel_vec_dim': 400,
'restore': False,
'seed': 42,
'test_only': False,
'train_strategy': 'one_to_n'}
2023-05-27 04:41:28,635 - [INFO] - [E:0| 0]: Train Loss:0.69813, Val MRR:0.0, wikidata12k_0.001
2023-05-27 04:42:32,570 - [INFO] - [E:0| 100]: Train Loss:0.053587, Val MRR:0.0, wikidata12k_0.001
2023-05-27 04:43:36,618 - [INFO] - [E:0| 200]: Train Loss:0.028724, Val MRR:0.0, wikidata12k_0.001
2023-05-27 04:44:40,687 - [INFO] - [E:0| 300]: Train Loss:0.020033, Val MRR:0.0, wikidata12k_0.001
2023-05-27 04:45:44,799 - [INFO] - [E:0| 400]: Train Loss:0.015589, Val MRR:0.0, wikidata12k_0.001
2023-05-27 04:46:48,901 - [INFO] - [E:0| 500]: Train Loss:0.012878, Val MRR:0.0, wikidata12k_0.001
2023-05-27 04:47:53,124 - [INFO] - [E:0| 600]: Train Loss:0.011054, Val MRR:0.0, wikidata12k_0.001
2023-05-27 04:48:57,224 - [INFO] - [E:0| 700]: Train Loss:0.0097532, Val MRR:0.0, wikidata12k_0.001
2023-05-27 04:50:01,352 - [INFO] - [E:0| 800]: Train Loss:0.008763, Val MRR:0.0, wikidata12k_0.001
2023-05-27 04:51:05,445 - [INFO] - [E:0| 900]: Train Loss:0.0079929, Val MRR:0.0, wikidata12k_0.001
2023-05-27 04:52:09,559 - [INFO] - [E:0| 1000]: Train Loss:0.0073745, Val MRR:0.0, wikidata12k_0.001
2023-05-27 04:53:13,624 - [INFO] - [E:0| 1100]: Train Loss:0.0068693, Val MRR:0.0, wikidata12k_0.001
2023-05-27 04:54:17,823 - [INFO] - [E:0| 1200]: Train Loss:0.0064497, Val MRR:0.0, wikidata12k_0.001
2023-05-27 04:55:21,967 - [INFO] - [E:0| 1300]: Train Loss:0.0060945, Val MRR:0.0, wikidata12k_0.001
2023-05-27 04:56:26,129 - [INFO] - [E:0| 1400]: Train Loss:0.0057879, Val MRR:0.0, wikidata12k_0.001
2023-05-27 04:57:30,256 - [INFO] - [E:0| 1500]: Train Loss:0.0055195, Val MRR:0.0, wikidata12k_0.001
2023-05-27 04:58:34,350 - [INFO] - [E:0| 1600]: Train Loss:0.0052845, Val MRR:0.0, wikidata12k_0.001
2023-05-27 04:59:16,259 - [INFO] - [Epoch:0]: Training Loss:0.005147
2023-05-27 04:59:16,481 - [INFO] - [Valid, Tail_Batch Step 0] wikidata12k_0.001
2023-05-27 04:59:38,187 - [INFO] - [Valid, Tail_Batch Step 100] wikidata12k_0.001
2023-05-27 04:59:50,745 - [INFO] - [Valid, Head_Batch Step 0] wikidata12k_0.001
2023-05-27 05:00:12,609 - [INFO] - [Valid, Head_Batch Step 100] wikidata12k_0.001
2023-05-27 05:00:25,062 - [INFO] - [Evaluating Epoch 0 valid]:
MRR: Tail : 0.08049, Head : 0.01947, Avg : 0.04998
2023-05-27 05:00:26,469 - [INFO] - [Epoch 0]: Training Loss: 0.0051469, Valid MRR: 0.04998,
2023-05-27 05:00:27,127 - [INFO] - [E:1| 0]: Train Loss:0.0016275, Val MRR:0.04998, wikidata12k_0.001
2023-05-27 05:01:31,277 - [INFO] - [E:1| 100]: Train Loss:0.0017991, Val MRR:0.04998, wikidata12k_0.001
2023-05-27 05:02:35,390 - [INFO] - [E:1| 200]: Train Loss:0.0017846, Val MRR:0.04998, wikidata12k_0.001
2023-05-27 05:03:39,590 - [INFO] - [E:1| 300]: Train Loss:0.0017789, Val MRR:0.04998, wikidata12k_0.001
2023-05-27 05:04:43,748 - [INFO] - [E:1| 400]: Train Loss:0.001772, Val MRR:0.04998, wikidata12k_0.001
2023-05-27 05:05:47,967 - [INFO] - [E:1| 500]: Train Loss:0.0017692, Val MRR:0.04998, wikidata12k_0.001
2023-05-27 05:06:52,036 - [INFO] - [E:1| 600]: Train Loss:0.0017597, Val MRR:0.04998, wikidata12k_0.001
2023-05-27 05:07:56,215 - [INFO] - [E:1| 700]: Train Loss:0.0017589, Val MRR:0.04998, wikidata12k_0.001
2023-05-27 05:09:00,363 - [INFO] - [E:1| 800]: Train Loss:0.0017555, Val MRR:0.04998, wikidata12k_0.001
2023-05-27 05:10:04,516 - [INFO] - [E:1| 900]: Train Loss:0.0017507, Val MRR:0.04998, wikidata12k_0.001
2023-05-27 05:11:08,719 - [INFO] - [E:1| 1000]: Train Loss:0.0017476, Val MRR:0.04998, wikidata12k_0.001
2023-05-27 05:12:12,940 - [INFO] - [E:1| 1100]: Train Loss:0.0017427, Val MRR:0.04998, wikidata12k_0.001
2023-05-27 05:13:17,076 - [INFO] - [E:1| 1200]: Train Loss:0.0017384, Val MRR:0.04998, wikidata12k_0.001
2023-05-27 05:14:21,295 - [INFO] - [E:1| 1300]: Train Loss:0.0017345, Val MRR:0.04998, wikidata12k_0.001
2023-05-27 05:15:25,462 - [INFO] - [E:1| 1400]: Train Loss:0.0017307, Val MRR:0.04998, wikidata12k_0.001
2023-05-27 05:16:29,614 - [INFO] - [E:1| 1500]: Train Loss:0.0017243, Val MRR:0.04998, wikidata12k_0.001
2023-05-27 05:17:33,705 - [INFO] - [E:1| 1600]: Train Loss:0.001719, Val MRR:0.04998, wikidata12k_0.001
2023-05-27 05:18:15,618 - [INFO] - [Epoch:1]: Training Loss:0.001714
2023-05-27 05:18:15,839 - [INFO] - [Valid, Tail_Batch Step 0] wikidata12k_0.001
2023-05-27 05:18:37,583 - [INFO] - [Valid, Tail_Batch Step 100] wikidata12k_0.001
2023-05-27 05:18:50,191 - [INFO] - [Valid, Head_Batch Step 0] wikidata12k_0.001
2023-05-27 05:19:12,067 - [INFO] - [Valid, Head_Batch Step 100] wikidata12k_0.001
2023-05-27 05:19:24,503 - [INFO] - [Evaluating Epoch 1 valid]:
MRR: Tail : 0.1748, Head : 0.04108, Avg : 0.10794
2023-05-27 05:19:25,566 - [INFO] - [Epoch 1]: Training Loss: 0.0017143, Valid MRR: 0.10794,
2023-05-27 05:19:26,219 - [INFO] - [E:2| 0]: Train Loss:0.0016961, Val MRR:0.10794, wikidata12k_0.001
2023-05-27 05:20:30,344 - [INFO] - [E:2| 100]: Train Loss:0.0016227, Val MRR:0.10794, wikidata12k_0.001
2023-05-27 05:21:34,535 - [INFO] - [E:2| 200]: Train Loss:0.0016161, Val MRR:0.10794, wikidata12k_0.001
2023-05-27 05:22:38,770 - [INFO] - [E:2| 300]: Train Loss:0.0016161, Val MRR:0.10794, wikidata12k_0.001
2023-05-27 05:23:43,004 - [INFO] - [E:2| 400]: Train Loss:0.0016106, Val MRR:0.10794, wikidata12k_0.001
2023-05-27 05:24:47,137 - [INFO] - [E:2| 500]: Train Loss:0.0016058, Val MRR:0.10794, wikidata12k_0.001
2023-05-27 05:25:51,362 - [INFO] - [E:2| 600]: Train Loss:0.0016067, Val MRR:0.10794, wikidata12k_0.001
2023-05-27 05:26:55,499 - [INFO] - [E:2| 700]: Train Loss:0.0016013, Val MRR:0.10794, wikidata12k_0.001
2023-05-27 05:27:59,761 - [INFO] - [E:2| 800]: Train Loss:0.0015978, Val MRR:0.10794, wikidata12k_0.001
2023-05-27 05:29:03,935 - [INFO] - [E:2| 900]: Train Loss:0.0015935, Val MRR:0.10794, wikidata12k_0.001
2023-05-27 05:30:08,210 - [INFO] - [E:2| 1000]: Train Loss:0.0015896, Val MRR:0.10794, wikidata12k_0.001
2023-05-27 05:31:12,398 - [INFO] - [E:2| 1100]: Train Loss:0.0015856, Val MRR:0.10794, wikidata12k_0.001
2023-05-27 05:32:16,608 - [INFO] - [E:2| 1200]: Train Loss:0.0015814, Val MRR:0.10794, wikidata12k_0.001
2023-05-27 05:33:20,836 - [INFO] - [E:2| 1300]: Train Loss:0.0015758, Val MRR:0.10794, wikidata12k_0.001
2023-05-27 05:34:25,014 - [INFO] - [E:2| 1400]: Train Loss:0.001571, Val MRR:0.10794, wikidata12k_0.001
2023-05-27 05:35:29,265 - [INFO] - [E:2| 1500]: Train Loss:0.001565, Val MRR:0.10794, wikidata12k_0.001
2023-05-27 05:36:33,450 - [INFO] - [E:2| 1600]: Train Loss:0.0015589, Val MRR:0.10794, wikidata12k_0.001
2023-05-27 05:37:15,383 - [INFO] - [Epoch:2]: Training Loss:0.001556
2023-05-27 05:37:15,603 - [INFO] - [Valid, Tail_Batch Step 0] wikidata12k_0.001
2023-05-27 05:37:37,308 - [INFO] - [Valid, Tail_Batch Step 100] wikidata12k_0.001
2023-05-27 05:37:49,874 - [INFO] - [Valid, Head_Batch Step 0] wikidata12k_0.001
2023-05-27 05:38:11,738 - [INFO] - [Valid, Head_Batch Step 100] wikidata12k_0.001
2023-05-27 05:38:24,173 - [INFO] - [Evaluating Epoch 2 valid]:
MRR: Tail : 0.28305, Head : 0.07818, Avg : 0.18062
2023-05-27 05:38:25,157 - [INFO] - [Epoch 2]: Training Loss: 0.001556, Valid MRR: 0.18062,
2023-05-27 05:38:25,813 - [INFO] - [E:3| 0]: Train Loss:0.0013897, Val MRR:0.18062, wikidata12k_0.001
2023-05-27 05:39:30,024 - [INFO] - [E:3| 100]: Train Loss:0.0014599, Val MRR:0.18062, wikidata12k_0.001
2023-05-27 05:40:34,122 - [INFO] - [E:3| 200]: Train Loss:0.0014516, Val MRR:0.18062, wikidata12k_0.001
2023-05-27 05:41:38,261 - [INFO] - [E:3| 300]: Train Loss:0.0014552, Val MRR:0.18062, wikidata12k_0.001
2023-05-27 05:42:42,459 - [INFO] - [E:3| 400]: Train Loss:0.0014541, Val MRR:0.18062, wikidata12k_0.001
2023-05-27 05:43:46,707 - [INFO] - [E:3| 500]: Train Loss:0.0014521, Val MRR:0.18062, wikidata12k_0.001
2023-05-27 05:44:50,829 - [INFO] - [E:3| 600]: Train Loss:0.0014476, Val MRR:0.18062, wikidata12k_0.001
2023-05-27 05:45:54,979 - [INFO] - [E:3| 700]: Train Loss:0.0014439, Val MRR:0.18062, wikidata12k_0.001
2023-05-27 05:46:59,115 - [INFO] - [E:3| 800]: Train Loss:0.0014396, Val MRR:0.18062, wikidata12k_0.001
2023-05-27 05:48:03,341 - [INFO] - [E:3| 900]: Train Loss:0.0014367, Val MRR:0.18062, wikidata12k_0.001
2023-05-27 05:49:07,419 - [INFO] - [E:3| 1000]: Train Loss:0.0014329, Val MRR:0.18062, wikidata12k_0.001
2023-05-27 05:50:11,647 - [INFO] - [E:3| 1100]: Train Loss:0.0014308, Val MRR:0.18062, wikidata12k_0.001
2023-05-27 05:51:15,783 - [INFO] - [E:3| 1200]: Train Loss:0.0014276, Val MRR:0.18062, wikidata12k_0.001
2023-05-27 05:52:19,915 - [INFO] - [E:3| 1300]: Train Loss:0.0014245, Val MRR:0.18062, wikidata12k_0.001
2023-05-27 05:53:24,121 - [INFO] - [E:3| 1400]: Train Loss:0.0014212, Val MRR:0.18062, wikidata12k_0.001
2023-05-27 05:54:28,236 - [INFO] - [E:3| 1500]: Train Loss:0.0014184, Val MRR:0.18062, wikidata12k_0.001
2023-05-27 05:55:32,482 - [INFO] - [E:3| 1600]: Train Loss:0.0014147, Val MRR:0.18062, wikidata12k_0.001
2023-05-27 05:56:14,438 - [INFO] - [Epoch:3]: Training Loss:0.001413
2023-05-27 05:56:14,658 - [INFO] - [Valid, Tail_Batch Step 0] wikidata12k_0.001
2023-05-27 05:56:36,372 - [INFO] - [Valid, Tail_Batch Step 100] wikidata12k_0.001
2023-05-27 05:56:48,954 - [INFO] - [Valid, Head_Batch Step 0] wikidata12k_0.001
2023-05-27 05:57:10,881 - [INFO] - [Valid, Head_Batch Step 100] wikidata12k_0.001
2023-05-27 05:57:23,328 - [INFO] - [Evaluating Epoch 3 valid]:
MRR: Tail : 0.31549, Head : 0.09979, Avg : 0.20764
2023-05-27 05:57:24,420 - [INFO] - [Epoch 3]: Training Loss: 0.001413, Valid MRR: 0.20764,
2023-05-27 05:57:25,077 - [INFO] - [E:4| 0]: Train Loss:0.0014323, Val MRR:0.20764, wikidata12k_0.001
2023-05-27 05:58:29,238 - [INFO] - [E:4| 100]: Train Loss:0.0013524, Val MRR:0.20764, wikidata12k_0.001
2023-05-27 05:59:33,410 - [INFO] - [E:4| 200]: Train Loss:0.0013439, Val MRR:0.20764, wikidata12k_0.001
2023-05-27 06:00:37,566 - [INFO] - [E:4| 300]: Train Loss:0.0013507, Val MRR:0.20764, wikidata12k_0.001
2023-05-27 06:01:41,692 - [INFO] - [E:4| 400]: Train Loss:0.0013525, Val MRR:0.20764, wikidata12k_0.001
2023-05-27 06:02:45,877 - [INFO] - [E:4| 500]: Train Loss:0.0013497, Val MRR:0.20764, wikidata12k_0.001
2023-05-27 06:03:50,088 - [INFO] - [E:4| 600]: Train Loss:0.0013468, Val MRR:0.20764, wikidata12k_0.001
2023-05-27 06:04:54,238 - [INFO] - [E:4| 700]: Train Loss:0.0013447, Val MRR:0.20764, wikidata12k_0.001
2023-05-27 06:05:58,490 - [INFO] - [E:4| 800]: Train Loss:0.0013417, Val MRR:0.20764, wikidata12k_0.001
2023-05-27 06:07:02,645 - [INFO] - [E:4| 900]: Train Loss:0.001339, Val MRR:0.20764, wikidata12k_0.001
2023-05-27 06:08:06,755 - [INFO] - [E:4| 1000]: Train Loss:0.0013377, Val MRR:0.20764, wikidata12k_0.001
2023-05-27 06:09:10,902 - [INFO] - [E:4| 1100]: Train Loss:0.0013348, Val MRR:0.20764, wikidata12k_0.001
2023-05-27 06:10:15,038 - [INFO] - [E:4| 1200]: Train Loss:0.0013326, Val MRR:0.20764, wikidata12k_0.001
2023-05-27 06:11:19,143 - [INFO] - [E:4| 1300]: Train Loss:0.0013302, Val MRR:0.20764, wikidata12k_0.001
2023-05-27 06:12:23,347 - [INFO] - [E:4| 1400]: Train Loss:0.0013283, Val MRR:0.20764, wikidata12k_0.001
2023-05-27 06:13:27,477 - [INFO] - [E:4| 1500]: Train Loss:0.0013269, Val MRR:0.20764, wikidata12k_0.001
2023-05-27 06:14:31,542 - [INFO] - [E:4| 1600]: Train Loss:0.0013247, Val MRR:0.20764, wikidata12k_0.001
2023-05-27 06:15:13,457 - [INFO] - [Epoch:4]: Training Loss:0.001323
2023-05-27 06:15:13,677 - [INFO] - [Valid, Tail_Batch Step 0] wikidata12k_0.001
2023-05-27 06:15:35,362 - [INFO] - [Valid, Tail_Batch Step 100] wikidata12k_0.001
2023-05-27 06:15:47,916 - [INFO] - [Valid, Head_Batch Step 0] wikidata12k_0.001
2023-05-27 06:16:09,784 - [INFO] - [Valid, Head_Batch Step 100] wikidata12k_0.001
2023-05-27 06:16:22,221 - [INFO] - [Evaluating Epoch 4 valid]:
MRR: Tail : 0.36022, Head : 0.1037, Avg : 0.23196
2023-05-27 06:16:23,220 - [INFO] - [Epoch 4]: Training Loss: 0.0013235, Valid MRR: 0.23196,
2023-05-27 06:16:23,875 - [INFO] - [E:5| 0]: Train Loss:0.0013387, Val MRR:0.23196, wikidata12k_0.001
2023-05-27 06:17:28,154 - [INFO] - [E:5| 100]: Train Loss:0.0012781, Val MRR:0.23196, wikidata12k_0.001
2023-05-27 06:18:32,286 - [INFO] - [E:5| 200]: Train Loss:0.0012786, Val MRR:0.23196, wikidata12k_0.001
2023-05-27 06:19:36,495 - [INFO] - [E:5| 300]: Train Loss:0.0012809, Val MRR:0.23196, wikidata12k_0.001
2023-05-27 06:20:40,588 - [INFO] - [E:5| 400]: Train Loss:0.0012857, Val MRR:0.23196, wikidata12k_0.001
2023-05-27 06:21:44,792 - [INFO] - [E:5| 500]: Train Loss:0.0012853, Val MRR:0.23196, wikidata12k_0.001
2023-05-27 06:22:49,006 - [INFO] - [E:5| 600]: Train Loss:0.0012833, Val MRR:0.23196, wikidata12k_0.001
2023-05-27 06:23:53,190 - [INFO] - [E:5| 700]: Train Loss:0.0012812, Val MRR:0.23196, wikidata12k_0.001
2023-05-27 06:24:57,311 - [INFO] - [E:5| 800]: Train Loss:0.0012813, Val MRR:0.23196, wikidata12k_0.001
2023-05-27 06:26:01,510 - [INFO] - [E:5| 900]: Train Loss:0.0012801, Val MRR:0.23196, wikidata12k_0.001
2023-05-27 06:27:05,756 - [INFO] - [E:5| 1000]: Train Loss:0.0012789, Val MRR:0.23196, wikidata12k_0.001
2023-05-27 06:28:09,936 - [INFO] - [E:5| 1100]: Train Loss:0.0012769, Val MRR:0.23196, wikidata12k_0.001
2023-05-27 06:29:14,145 - [INFO] - [E:5| 1200]: Train Loss:0.0012746, Val MRR:0.23196, wikidata12k_0.001
2023-05-27 06:30:18,293 - [INFO] - [E:5| 1300]: Train Loss:0.0012721, Val MRR:0.23196, wikidata12k_0.001
2023-05-27 06:31:22,538 - [INFO] - [E:5| 1400]: Train Loss:0.0012703, Val MRR:0.23196, wikidata12k_0.001
2023-05-27 06:32:26,694 - [INFO] - [E:5| 1500]: Train Loss:0.0012689, Val MRR:0.23196, wikidata12k_0.001
2023-05-27 06:33:30,913 - [INFO] - [E:5| 1600]: Train Loss:0.0012677, Val MRR:0.23196, wikidata12k_0.001
2023-05-27 06:34:12,771 - [INFO] - [Epoch:5]: Training Loss:0.001267
2023-05-27 06:34:12,992 - [INFO] - [Valid, Tail_Batch Step 0] wikidata12k_0.001
2023-05-27 06:34:34,725 - [INFO] - [Valid, Tail_Batch Step 100] wikidata12k_0.001
2023-05-27 06:34:47,309 - [INFO] - [Valid, Head_Batch Step 0] wikidata12k_0.001
2023-05-27 06:35:09,233 - [INFO] - [Valid, Head_Batch Step 100] wikidata12k_0.001
2023-05-27 06:35:21,676 - [INFO] - [Evaluating Epoch 5 valid]:
MRR: Tail : 0.39017, Head : 0.12832, Avg : 0.25924
2023-05-27 06:35:22,811 - [INFO] - [Epoch 5]: Training Loss: 0.0012668, Valid MRR: 0.25924,
2023-05-27 06:35:23,469 - [INFO] - [E:6| 0]: Train Loss:0.0011894, Val MRR:0.25924, wikidata12k_0.001
2023-05-27 06:36:27,594 - [INFO] - [E:6| 100]: Train Loss:0.0012342, Val MRR:0.25924, wikidata12k_0.001
2023-05-27 06:37:31,786 - [INFO] - [E:6| 200]: Train Loss:0.0012378, Val MRR:0.25924, wikidata12k_0.001
2023-05-27 06:38:35,956 - [INFO] - [E:6| 300]: Train Loss:0.0012388, Val MRR:0.25924, wikidata12k_0.001
2023-05-27 06:39:40,226 - [INFO] - [E:6| 400]: Train Loss:0.0012378, Val MRR:0.25924, wikidata12k_0.001
2023-05-27 06:40:44,423 - [INFO] - [E:6| 500]: Train Loss:0.0012438, Val MRR:0.25924, wikidata12k_0.001
2023-05-27 06:41:48,645 - [INFO] - [E:6| 600]: Train Loss:0.0012421, Val MRR:0.25924, wikidata12k_0.001
2023-05-27 06:42:52,773 - [INFO] - [E:6| 700]: Train Loss:0.0012408, Val MRR:0.25924, wikidata12k_0.001
2023-05-27 06:43:56,948 - [INFO] - [E:6| 800]: Train Loss:0.0012416, Val MRR:0.25924, wikidata12k_0.001
2023-05-27 06:45:01,063 - [INFO] - [E:6| 900]: Train Loss:0.001242, Val MRR:0.25924, wikidata12k_0.001
2023-05-27 06:46:05,216 - [INFO] - [E:6| 1000]: Train Loss:0.0012397, Val MRR:0.25924, wikidata12k_0.001
2023-05-27 06:47:09,350 - [INFO] - [E:6| 1100]: Train Loss:0.0012386, Val MRR:0.25924, wikidata12k_0.001
2023-05-27 06:48:13,445 - [INFO] - [E:6| 1200]: Train Loss:0.0012373, Val MRR:0.25924, wikidata12k_0.001
2023-05-27 06:49:17,622 - [INFO] - [E:6| 1300]: Train Loss:0.0012363, Val MRR:0.25924, wikidata12k_0.001
2023-05-27 06:50:21,832 - [INFO] - [E:6| 1400]: Train Loss:0.0012346, Val MRR:0.25924, wikidata12k_0.001
2023-05-27 06:51:26,056 - [INFO] - [E:6| 1500]: Train Loss:0.0012342, Val MRR:0.25924, wikidata12k_0.001
2023-05-27 06:52:30,214 - [INFO] - [E:6| 1600]: Train Loss:0.001233, Val MRR:0.25924, wikidata12k_0.001
2023-05-27 06:53:12,160 - [INFO] - [Epoch:6]: Training Loss:0.001232
2023-05-27 06:53:12,380 - [INFO] - [Valid, Tail_Batch Step 0] wikidata12k_0.001
2023-05-27 06:53:34,088 - [INFO] - [Valid, Tail_Batch Step 100] wikidata12k_0.001
2023-05-27 06:53:46,650 - [INFO] - [Valid, Head_Batch Step 0] wikidata12k_0.001
2023-05-27 06:54:08,519 - [INFO] - [Valid, Head_Batch Step 100] wikidata12k_0.001
2023-05-27 06:54:20,952 - [INFO] - [Evaluating Epoch 6 valid]:
MRR: Tail : 0.37877, Head : 0.18554, Avg : 0.28215
2023-05-27 06:54:22,025 - [INFO] - [Epoch 6]: Training Loss: 0.0012324, Valid MRR: 0.28215,
2023-05-27 06:54:22,682 - [INFO] - [E:7| 0]: Train Loss:0.0011315, Val MRR:0.28215, wikidata12k_0.001
2023-05-27 06:55:26,826 - [INFO] - [E:7| 100]: Train Loss:0.001205, Val MRR:0.28215, wikidata12k_0.001
2023-05-27 06:56:30,996 - [INFO] - [E:7| 200]: Train Loss:0.0012037, Val MRR:0.28215, wikidata12k_0.001
2023-05-27 06:57:35,173 - [INFO] - [E:7| 300]: Train Loss:0.0012034, Val MRR:0.28215, wikidata12k_0.001
2023-05-27 06:58:39,365 - [INFO] - [E:7| 400]: Train Loss:0.0012073, Val MRR:0.28215, wikidata12k_0.001
2023-05-27 06:59:43,659 - [INFO] - [E:7| 500]: Train Loss:0.0012094, Val MRR:0.28215, wikidata12k_0.001
2023-05-27 07:00:47,839 - [INFO] - [E:7| 600]: Train Loss:0.0012093, Val MRR:0.28215, wikidata12k_0.001
2023-05-27 07:01:51,994 - [INFO] - [E:7| 700]: Train Loss:0.0012077, Val MRR:0.28215, wikidata12k_0.001
2023-05-27 07:02:56,159 - [INFO] - [E:7| 800]: Train Loss:0.0012085, Val MRR:0.28215, wikidata12k_0.001
2023-05-27 07:04:00,272 - [INFO] - [E:7| 900]: Train Loss:0.0012086, Val MRR:0.28215, wikidata12k_0.001
2023-05-27 07:05:04,432 - [INFO] - [E:7| 1000]: Train Loss:0.0012104, Val MRR:0.28215, wikidata12k_0.001
2023-05-27 07:06:08,565 - [INFO] - [E:7| 1100]: Train Loss:0.00121, Val MRR:0.28215, wikidata12k_0.001
2023-05-27 07:07:12,766 - [INFO] - [E:7| 1200]: Train Loss:0.0012097, Val MRR:0.28215, wikidata12k_0.001
2023-05-27 07:08:16,920 - [INFO] - [E:7| 1300]: Train Loss:0.0012101, Val MRR:0.28215, wikidata12k_0.001
2023-05-27 07:09:21,081 - [INFO] - [E:7| 1400]: Train Loss:0.0012095, Val MRR:0.28215, wikidata12k_0.001
2023-05-27 07:10:25,247 - [INFO] - [E:7| 1500]: Train Loss:0.0012082, Val MRR:0.28215, wikidata12k_0.001
2023-05-27 07:11:29,494 - [INFO] - [E:7| 1600]: Train Loss:0.0012075, Val MRR:0.28215, wikidata12k_0.001
2023-05-27 07:12:11,381 - [INFO] - [Epoch:7]: Training Loss:0.001208
2023-05-27 07:12:11,602 - [INFO] - [Valid, Tail_Batch Step 0] wikidata12k_0.001
2023-05-27 07:12:33,359 - [INFO] - [Valid, Tail_Batch Step 100] wikidata12k_0.001
2023-05-27 07:12:45,946 - [INFO] - [Valid, Head_Batch Step 0] wikidata12k_0.001
2023-05-27 07:13:07,852 - [INFO] - [Valid, Head_Batch Step 100] wikidata12k_0.001
2023-05-27 07:13:20,334 - [INFO] - [Evaluating Epoch 7 valid]:
MRR: Tail : 0.40626, Head : 0.21375, Avg : 0.31001
2023-05-27 07:13:21,326 - [INFO] - [Epoch 7]: Training Loss: 0.0012077, Valid MRR: 0.31001,
2023-05-27 07:13:21,980 - [INFO] - [E:8| 0]: Train Loss:0.0012363, Val MRR:0.31001, wikidata12k_0.001
2023-05-27 07:14:26,096 - [INFO] - [E:8| 100]: Train Loss:0.0011868, Val MRR:0.31001, wikidata12k_0.001
2023-05-27 07:15:30,354 - [INFO] - [E:8| 200]: Train Loss:0.0011847, Val MRR:0.31001, wikidata12k_0.001
2023-05-27 07:16:34,466 - [INFO] - [E:8| 300]: Train Loss:0.0011814, Val MRR:0.31001, wikidata12k_0.001
2023-05-27 07:17:38,565 - [INFO] - [E:8| 400]: Train Loss:0.0011847, Val MRR:0.31001, wikidata12k_0.001
2023-05-27 07:18:42,799 - [INFO] - [E:8| 500]: Train Loss:0.0011887, Val MRR:0.31001, wikidata12k_0.001
2023-05-27 07:19:46,964 - [INFO] - [E:8| 600]: Train Loss:0.0011901, Val MRR:0.31001, wikidata12k_0.001
2023-05-27 07:20:51,144 - [INFO] - [E:8| 700]: Train Loss:0.0011897, Val MRR:0.31001, wikidata12k_0.001
2023-05-27 07:21:55,282 - [INFO] - [E:8| 800]: Train Loss:0.0011913, Val MRR:0.31001, wikidata12k_0.001
2023-05-27 07:22:59,411 - [INFO] - [E:8| 900]: Train Loss:0.0011918, Val MRR:0.31001, wikidata12k_0.001
2023-05-27 07:24:03,538 - [INFO] - [E:8| 1000]: Train Loss:0.0011908, Val MRR:0.31001, wikidata12k_0.001
2023-05-27 07:25:07,761 - [INFO] - [E:8| 1100]: Train Loss:0.0011915, Val MRR:0.31001, wikidata12k_0.001
2023-05-27 07:26:11,872 - [INFO] - [E:8| 1200]: Train Loss:0.0011925, Val MRR:0.31001, wikidata12k_0.001
2023-05-27 07:27:16,041 - [INFO] - [E:8| 1300]: Train Loss:0.0011918, Val MRR:0.31001, wikidata12k_0.001
2023-05-27 07:28:20,210 - [INFO] - [E:8| 1400]: Train Loss:0.0011905, Val MRR:0.31001, wikidata12k_0.001
2023-05-27 07:29:24,336 - [INFO] - [E:8| 1500]: Train Loss:0.0011898, Val MRR:0.31001, wikidata12k_0.001
2023-05-27 07:30:28,566 - [INFO] - [E:8| 1600]: Train Loss:0.0011888, Val MRR:0.31001, wikidata12k_0.001
2023-05-27 07:31:10,538 - [INFO] - [Epoch:8]: Training Loss:0.001189
2023-05-27 07:31:10,758 - [INFO] - [Valid, Tail_Batch Step 0] wikidata12k_0.001
2023-05-27 07:31:32,478 - [INFO] - [Valid, Tail_Batch Step 100] wikidata12k_0.001
2023-05-27 07:31:45,038 - [INFO] - [Valid, Head_Batch Step 0] wikidata12k_0.001
2023-05-27 07:32:06,913 - [INFO] - [Valid, Head_Batch Step 100] wikidata12k_0.001
2023-05-27 07:32:19,354 - [INFO] - [Evaluating Epoch 8 valid]:
MRR: Tail : 0.41408, Head : 0.20141, Avg : 0.30774
2023-05-27 07:32:19,354 - [INFO] - [Epoch 8]: Training Loss: 0.0011888, Valid MRR: 0.31001,
2023-05-27 07:32:20,011 - [INFO] - [E:9| 0]: Train Loss:0.0011748, Val MRR:0.31001, wikidata12k_0.001
2023-05-27 07:33:24,159 - [INFO] - [E:9| 100]: Train Loss:0.0011746, Val MRR:0.31001, wikidata12k_0.001
2023-05-27 07:34:28,351 - [INFO] - [E:9| 200]: Train Loss:0.0011787, Val MRR:0.31001, wikidata12k_0.001
2023-05-27 07:35:32,472 - [INFO] - [E:9| 300]: Train Loss:0.0011761, Val MRR:0.31001, wikidata12k_0.001
2023-05-27 07:36:36,656 - [INFO] - [E:9| 400]: Train Loss:0.0011729, Val MRR:0.31001, wikidata12k_0.001
2023-05-27 07:37:40,796 - [INFO] - [E:9| 500]: Train Loss:0.0011725, Val MRR:0.31001, wikidata12k_0.001
2023-05-27 07:38:44,981 - [INFO] - [E:9| 600]: Train Loss:0.0011741, Val MRR:0.31001, wikidata12k_0.001
2023-05-27 07:39:49,133 - [INFO] - [E:9| 700]: Train Loss:0.001173, Val MRR:0.31001, wikidata12k_0.001
2023-05-27 07:40:53,329 - [INFO] - [E:9| 800]: Train Loss:0.0011736, Val MRR:0.31001, wikidata12k_0.001
2023-05-27 07:41:57,558 - [INFO] - [E:9| 900]: Train Loss:0.0011731, Val MRR:0.31001, wikidata12k_0.001
2023-05-27 07:43:01,737 - [INFO] - [E:9| 1000]: Train Loss:0.0011729, Val MRR:0.31001, wikidata12k_0.001
2023-05-27 07:44:05,854 - [INFO] - [E:9| 1100]: Train Loss:0.001173, Val MRR:0.31001, wikidata12k_0.001
2023-05-27 07:45:10,080 - [INFO] - [E:9| 1200]: Train Loss:0.0011727, Val MRR:0.31001, wikidata12k_0.001
2023-05-27 07:46:14,191 - [INFO] - [E:9| 1300]: Train Loss:0.0011718, Val MRR:0.31001, wikidata12k_0.001
2023-05-27 07:47:18,385 - [INFO] - [E:9| 1400]: Train Loss:0.001171, Val MRR:0.31001, wikidata12k_0.001
2023-05-27 07:48:22,543 - [INFO] - [E:9| 1500]: Train Loss:0.0011709, Val MRR:0.31001, wikidata12k_0.001
2023-05-27 07:49:26,748 - [INFO] - [E:9| 1600]: Train Loss:0.0011712, Val MRR:0.31001, wikidata12k_0.001
2023-05-27 07:50:08,734 - [INFO] - [Epoch:9]: Training Loss:0.001171
2023-05-27 07:50:08,954 - [INFO] - [Valid, Tail_Batch Step 0] wikidata12k_0.001
2023-05-27 07:50:30,672 - [INFO] - [Valid, Tail_Batch Step 100] wikidata12k_0.001
2023-05-27 07:50:43,251 - [INFO] - [Valid, Head_Batch Step 0] wikidata12k_0.001
2023-05-27 07:51:05,138 - [INFO] - [Valid, Head_Batch Step 100] wikidata12k_0.001
2023-05-27 07:51:17,628 - [INFO] - [Evaluating Epoch 9 valid]:
MRR: Tail : 0.42849, Head : 0.23814, Avg : 0.33331
MR: Tail : 655.47, Head : 840.42, Avg : 747.94
Hit-1: Tail : 0.35832, Head : 0.15504, Avg : 0.25668
Hit-3: Tail : 0.45838, Head : 0.2739, Avg : 0.36614
Hit-10: Tail : 0.55785, Head : 0.39074, Avg : 0.47429
2023-05-27 07:51:18,545 - [INFO] - [Epoch 9]: Training Loss: 0.0011709, Valid MRR: 0.33331,
2023-05-27 07:51:19,204 - [INFO] - [E:10| 0]: Train Loss:0.00113, Val MRR:0.33331, wikidata12k_0.001
2023-05-27 07:52:23,358 - [INFO] - [E:10| 100]: Train Loss:0.0011531, Val MRR:0.33331, wikidata12k_0.001
2023-05-27 07:53:27,523 - [INFO] - [E:10| 200]: Train Loss:0.0011557, Val MRR:0.33331, wikidata12k_0.001
2023-05-27 07:54:31,758 - [INFO] - [E:10| 300]: Train Loss:0.0011545, Val MRR:0.33331, wikidata12k_0.001
2023-05-27 07:55:36,004 - [INFO] - [E:10| 400]: Train Loss:0.0011554, Val MRR:0.33331, wikidata12k_0.001
2023-05-27 07:56:40,140 - [INFO] - [E:10| 500]: Train Loss:0.001154, Val MRR:0.33331, wikidata12k_0.001
2023-05-27 07:57:44,301 - [INFO] - [E:10| 600]: Train Loss:0.0011525, Val MRR:0.33331, wikidata12k_0.001
2023-05-27 07:58:48,517 - [INFO] - [E:10| 700]: Train Loss:0.0011538, Val MRR:0.33331, wikidata12k_0.001
2023-05-27 07:59:52,698 - [INFO] - [E:10| 800]: Train Loss:0.0011536, Val MRR:0.33331, wikidata12k_0.001
2023-05-27 08:00:56,912 - [INFO] - [E:10| 900]: Train Loss:0.0011541, Val MRR:0.33331, wikidata12k_0.001
2023-05-27 08:02:01,143 - [INFO] - [E:10| 1000]: Train Loss:0.0011546, Val MRR:0.33331, wikidata12k_0.001
2023-05-27 08:03:05,293 - [INFO] - [E:10| 1100]: Train Loss:0.0011542, Val MRR:0.33331, wikidata12k_0.001
2023-05-27 08:04:09,471 - [INFO] - [E:10| 1200]: Train Loss:0.0011539, Val MRR:0.33331, wikidata12k_0.001
2023-05-27 08:05:13,701 - [INFO] - [E:10| 1300]: Train Loss:0.0011531, Val MRR:0.33331, wikidata12k_0.001
2023-05-27 08:06:17,887 - [INFO] - [E:10| 1400]: Train Loss:0.0011534, Val MRR:0.33331, wikidata12k_0.001
2023-05-27 08:07:22,089 - [INFO] - [E:10| 1500]: Train Loss:0.0011546, Val MRR:0.33331, wikidata12k_0.001
2023-05-27 08:08:26,239 - [INFO] - [E:10| 1600]: Train Loss:0.0011552, Val MRR:0.33331, wikidata12k_0.001
2023-05-27 08:09:08,153 - [INFO] - [Epoch:10]: Training Loss:0.001156
2023-05-27 08:09:08,373 - [INFO] - [Valid, Tail_Batch Step 0] wikidata12k_0.001
2023-05-27 08:09:30,456 - [INFO] - [Valid, Tail_Batch Step 100] wikidata12k_0.001
2023-05-27 08:09:43,084 - [INFO] - [Valid, Head_Batch Step 0] wikidata12k_0.001
2023-05-27 08:10:05,005 - [INFO] - [Valid, Head_Batch Step 100] wikidata12k_0.001
2023-05-27 08:10:17,448 - [INFO] - [Evaluating Epoch 10 valid]:
MRR: Tail : 0.45191, Head : 0.21626, Avg : 0.33409
2023-05-27 08:10:18,436 - [INFO] - [Epoch 10]: Training Loss: 0.0011556, Valid MRR: 0.33409,
2023-05-27 08:10:19,090 - [INFO] - [E:11| 0]: Train Loss:0.0011363, Val MRR:0.33409, wikidata12k_0.001
2023-05-27 08:11:23,530 - [INFO] - [E:11| 100]: Train Loss:0.0011426, Val MRR:0.33409, wikidata12k_0.001
2023-05-27 08:12:27,950 - [INFO] - [E:11| 200]: Train Loss:0.0011483, Val MRR:0.33409, wikidata12k_0.001
2023-05-27 08:13:32,143 - [INFO] - [E:11| 300]: Train Loss:0.0011472, Val MRR:0.33409, wikidata12k_0.001
2023-05-27 08:14:36,469 - [INFO] - [E:11| 400]: Train Loss:0.0011477, Val MRR:0.33409, wikidata12k_0.001
2023-05-27 08:15:40,641 - [INFO] - [E:11| 500]: Train Loss:0.0011474, Val MRR:0.33409, wikidata12k_0.001
2023-05-27 08:16:44,731 - [INFO] - [E:11| 600]: Train Loss:0.0011465, Val MRR:0.33409, wikidata12k_0.001
2023-05-27 08:17:48,900 - [INFO] - [E:11| 700]: Train Loss:0.0011469, Val MRR:0.33409, wikidata12k_0.001
2023-05-27 08:18:53,113 - [INFO] - [E:11| 800]: Train Loss:0.0011469, Val MRR:0.33409, wikidata12k_0.001
2023-05-27 08:19:57,285 - [INFO] - [E:11| 900]: Train Loss:0.0011457, Val MRR:0.33409, wikidata12k_0.001
2023-05-27 08:21:01,406 - [INFO] - [E:11| 1000]: Train Loss:0.0011445, Val MRR:0.33409, wikidata12k_0.001
2023-05-27 08:22:05,596 - [INFO] - [E:11| 1100]: Train Loss:0.0011434, Val MRR:0.33409, wikidata12k_0.001
2023-05-27 08:23:09,693 - [INFO] - [E:11| 1200]: Train Loss:0.0011431, Val MRR:0.33409, wikidata12k_0.001
2023-05-27 08:24:13,830 - [INFO] - [E:11| 1300]: Train Loss:0.001143, Val MRR:0.33409, wikidata12k_0.001
2023-05-27 08:25:18,076 - [INFO] - [E:11| 1400]: Train Loss:0.0011426, Val MRR:0.33409, wikidata12k_0.001
2023-05-27 08:26:22,160 - [INFO] - [E:11| 1500]: Train Loss:0.0011422, Val MRR:0.33409, wikidata12k_0.001
2023-05-27 08:27:26,373 - [INFO] - [E:11| 1600]: Train Loss:0.0011418, Val MRR:0.33409, wikidata12k_0.001
2023-05-27 08:28:08,368 - [INFO] - [Epoch:11]: Training Loss:0.001142
2023-05-27 08:28:08,589 - [INFO] - [Valid, Tail_Batch Step 0] wikidata12k_0.001
2023-05-27 08:28:30,301 - [INFO] - [Valid, Tail_Batch Step 100] wikidata12k_0.001
2023-05-27 08:28:42,888 - [INFO] - [Valid, Head_Batch Step 0] wikidata12k_0.001
2023-05-27 08:29:04,760 - [INFO] - [Valid, Head_Batch Step 100] wikidata12k_0.001
2023-05-27 08:29:17,200 - [INFO] - [Evaluating Epoch 11 valid]:
MRR: Tail : 0.4433, Head : 0.23916, Avg : 0.34123
2023-05-27 08:29:18,266 - [INFO] - [Epoch 11]: Training Loss: 0.0011416, Valid MRR: 0.34123,
2023-05-27 08:29:18,927 - [INFO] - [E:12| 0]: Train Loss:0.0010957, Val MRR:0.34123, wikidata12k_0.001
2023-05-27 08:30:23,063 - [INFO] - [E:12| 100]: Train Loss:0.0011303, Val MRR:0.34123, wikidata12k_0.001
2023-05-27 08:31:27,243 - [INFO] - [E:12| 200]: Train Loss:0.001132, Val MRR:0.34123, wikidata12k_0.001
2023-05-27 08:32:31,360 - [INFO] - [E:12| 300]: Train Loss:0.0011321, Val MRR:0.34123, wikidata12k_0.001
2023-05-27 08:33:35,484 - [INFO] - [E:12| 400]: Train Loss:0.0011313, Val MRR:0.34123, wikidata12k_0.001
2023-05-27 08:34:39,656 - [INFO] - [E:12| 500]: Train Loss:0.0011302, Val MRR:0.34123, wikidata12k_0.001
2023-05-27 08:35:43,783 - [INFO] - [E:12| 600]: Train Loss:0.0011318, Val MRR:0.34123, wikidata12k_0.001
2023-05-27 08:36:47,900 - [INFO] - [E:12| 700]: Train Loss:0.0011316, Val MRR:0.34123, wikidata12k_0.001
2023-05-27 08:37:52,082 - [INFO] - [E:12| 800]: Train Loss:0.0011323, Val MRR:0.34123, wikidata12k_0.001
2023-05-27 08:38:56,174 - [INFO] - [E:12| 900]: Train Loss:0.001132, Val MRR:0.34123, wikidata12k_0.001
2023-05-27 08:40:00,316 - [INFO] - [E:12| 1000]: Train Loss:0.0011317, Val MRR:0.34123, wikidata12k_0.001
2023-05-27 08:41:04,530 - [INFO] - [E:12| 1100]: Train Loss:0.0011322, Val MRR:0.34123, wikidata12k_0.001
2023-05-27 08:42:08,648 - [INFO] - [E:12| 1200]: Train Loss:0.0011318, Val MRR:0.34123, wikidata12k_0.001
2023-05-27 08:43:12,819 - [INFO] - [E:12| 1300]: Train Loss:0.0011314, Val MRR:0.34123, wikidata12k_0.001
2023-05-27 08:44:18,052 - [INFO] - [E:12| 1400]: Train Loss:0.0011312, Val MRR:0.34123, wikidata12k_0.001

53
visualization_util.py Normal file
View File

@ -0,0 +1,53 @@
import argparse
import re
import os
import matplotlib.pyplot as plt
import numpy as np
from datetime import datetime
def extract_learning_curves(args):
paths = args.log_path.split(',')
if len(paths) == 1 and os.path.isdir(paths[0]):
paths = [os.path.join(paths[0], f) for f in os.listdir(paths[0]) if os.path.isfile(os.path.join(paths[0], f))]
learning_curves = {}
for path in paths:
learning_curve = []
lines = open(path, 'r').readlines()
for line in lines:
matched = re.match(r'[0-9\- :,]*\[INFO\] - \[Epoch ([0-9]+)\].*Valid MRR: ([0-9\.]+).*', line)
if matched:
learning_curve.append(float(matched.group(2)))
if int(matched.group(1)) >= args.num_epochs:
break
learning_curves[os.path.basename(path)] = learning_curve
return learning_curves
def draw_learning_curves(args, learning_curves):
for name in learning_curves.keys():
epochs = np.arange(len(learning_curves[name]))
matched = re.match(r'(.*)\..*', name)
if matched:
label = matched.group(1)
else:
label = name
plt.plot(epochs, learning_curves[name], label = label)
plt.xlabel("Epochs")
plt.ylabel("MRR")
plt.legend(title=args.legend_title)
plt.savefig(os.path.join(args.out_path, str(round(datetime.utcnow().timestamp() * 1000)) + '.' + args.fig_filetype))
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="Parser For Arguments", formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--task', default = None, type=str)
parser.add_argument('--log_path', type=str, default=None)
parser.add_argument('--out_path', type=str, default=None)
parser.add_argument('--num_epochs', type=int, default=200)
parser.add_argument('--legend_title', type=str, default="Learning rate")
parser.add_argument('--fig_filetype', type=str, default="svg")
args = parser.parse_args()
if (args.task == 'learning_curve'):
draw_learning_curves(args, extract_learning_curves(args))

1072
wikidata12k.log Normal file

File diff suppressed because it is too large Load Diff

15209
wikidata12k_1n.out Normal file

File diff suppressed because it is too large Load Diff

75
wikidata12k_at.out Normal file
View File

@ -0,0 +1,75 @@
nohup: ignoring input
2023-05-27 08:51:48,116 - [INFO] - {'dataset': 'wikidata12k', 'name': 'wikidata12k_at', 'gpu': '3', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': False}
{'batch_size': 128,
'bias': False,
'config_dir': './config/',
'dataset': 'wikidata12k',
'drop': 0.0,
'drop_path': 0.0,
'embed_dim': 400,
'ent_vec_dim': 400,
'expansion_factor': 4,
'expansion_factor_token': 0.5,
'feat_drop': 0.2,
'filt_h': 1,
'filt_w': 9,
'form': 'plain',
'gpu': '3',
'grid_search': False,
'hid_drop': 0.5,
'image_h': 128,
'image_w': 128,
'in_channels': 1,
'inp_drop': 0.2,
'k_h': 20,
'k_w': 10,
'ker_sz': 9,
'l2': 0.0,
'lbl_smooth': 0.1,
'log_dir': './log/',
'lr': 0.0001,
'max_epochs': 500,
'mixer_depth': 16,
'mixer_dim': 256,
'mixer_dropout': 0.2,
'name': 'wikidata12k_at',
'neg_num': 1000,
'num_filt': 96,
'num_workers': 0,
'opt': 'adam',
'out_channels': 32,
'patch_size': 8,
'perm': 1,
'rel_vec_dim': 400,
'restore': False,
'seed': 42,
'test_only': False,
'train_strategy': 'one_to_n'}
Traceback (most recent call last):
File "main.py", line 693, in <module>
model.fit()
File "main.py", line 492, in fit
train_loss = self.run_epoch(epoch)
File "main.py", line 458, in run_epoch
pred = self.model.forward(sub, rel, neg_ent, self.p.train_strategy)
File "/root/kg_374/Thesis_split/models.py", line 558, in forward
z = self.forward_tokens(z)
File "/root/kg_374/Thesis_split/models.py", line 547, in forward_tokens
x = block(x)
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/torch/nn/modules/module.py", line 1130, in _call_impl
return forward_call(*input, **kwargs)
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/torch/nn/modules/container.py", line 139, in forward
input = module(input)
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/torch/nn/modules/module.py", line 1130, in _call_impl
return forward_call(*input, **kwargs)
File "/root/kg_374/Thesis_split/models.py", line 757, in forward
* self.mlp(self.norm2(x)))
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/torch/nn/modules/module.py", line 1130, in _call_impl
return forward_call(*input, **kwargs)
File "/root/kg_374/Thesis_split/models.py", line 821, in forward
x = self.act(x)
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/torch/nn/modules/module.py", line 1130, in _call_impl
return forward_call(*input, **kwargs)
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/torch/nn/modules/activation.py", line 681, in forward
return F.gelu(input, approximate=self.approximate)
RuntimeError: CUDA out of memory. Tried to allocate 800.00 MiB (GPU 0; 31.72 GiB total capacity; 10.92 GiB already allocated; 669.94 MiB free; 10.98 GiB reserved in total by PyTorch) If reserved memory is >> allocated memory try setting max_split_size_mb to avoid fragmentation. See documentation for Memory Management and PYTORCH_CUDA_ALLOC_CONF

9207
yago11k.out Normal file

File diff suppressed because it is too large Load Diff