36 Commits

Author SHA1 Message Date
3f0018fedc add rel 2024-06-22 20:42:41 +07:00
9502c8d009 test 2024-06-19 00:24:20 +07:00
2637f53848 test 2024-06-19 00:20:44 +07:00
975a0a77c2 test 2024-06-19 00:19:31 +07:00
a064d12763 test 2024-06-19 00:16:05 +07:00
6d43b88599 test 2024-06-19 00:12:43 +07:00
7448528eec test 2024-06-19 00:11:37 +07:00
7194f8046c test 2024-06-19 00:10:51 +07:00
417a38d2e5 test 2024-06-19 00:08:27 +07:00
03f42561c6 test 2024-06-19 00:05:57 +07:00
936c37d0f6 test 2024-06-19 00:03:14 +07:00
39734013c4 test 2024-06-18 23:46:20 +07:00
bb9856ecd1 test 2024-06-18 21:11:18 +07:00
c2b17ec1ba try to add attn 2024-06-16 19:09:47 +07:00
f8e969cbd1 try swin 2024-04-27 11:52:23 +07:00
ae0f43ab4d try swin 2024-04-27 11:51:35 +07:00
dda7f13dbd try swin 2024-04-27 11:49:07 +07:00
1dd423edf0 try swin 2024-04-27 11:48:25 +07:00
a1bf2d7389 try swin 2024-04-27 11:46:32 +07:00
c31588cc5f try swin 2024-04-27 11:45:24 +07:00
c03e24f4c2 try swin 2024-04-27 11:43:15 +07:00
a47a60f6a1 try swin 2024-04-27 11:40:27 +07:00
ba388148d4 try swin 2024-04-27 11:27:38 +07:00
1b816fed50 try swin 2024-04-27 11:24:57 +07:00
32962bf421 try swin 2024-04-27 11:23:28 +07:00
b9efe68d3c try swin 2024-04-27 11:12:52 +07:00
465f98bef8 try swin 2024-04-27 11:08:46 +07:00
d4ac470c54 try swin 2024-04-27 11:07:48 +07:00
28a8352044 try swin 2024-04-27 10:59:11 +07:00
b77c79708e try swin 2024-04-27 10:56:10 +07:00
22d44d1a99 try swin 2024-04-27 10:32:08 +07:00
63ccb4ec75 try swin 2024-04-27 10:26:58 +07:00
6ec566505f try swin 2024-04-27 10:18:48 +07:00
30805a0af9 try swin 2024-04-27 10:04:41 +07:00
2e2b12571a update viz util 2023-06-24 04:11:17 +00:00
d4b29eec2c auto retry 2023-06-08 06:40:16 +00:00
103 changed files with 514568 additions and 779496 deletions

View File

@ -12407,3 +12407,233 @@
12406 Carry out roadside bombing[65]
12407 Appeal for target to allow international involvement (non-mediation)[1]
12408 Reject request for change in leadership[179]
12409 Criticize or denounce
12410 Express intent to meet or negotiate
12411 Consult
12412 Make an appeal or request
12413 Abduct, hijack, or take hostage
12414 Praise or endorse
12415 Engage in negotiation
12416 Use unconventional violence
12417 Make statement
12418 Arrest, detain, or charge with legal action
12419 Use conventional military force
12420 Complain officially
12421 Impose administrative sanctions
12422 Express intent to cooperate
12423 Make a visit
12424 Appeal for de-escalation of military engagement
12425 Sign formal agreement
12426 Attempt to assassinate
12427 Host a visit
12428 Increase military alert status
12429 Impose embargo, boycott, or sanctions
12430 Provide economic aid
12431 Demonstrate or rally
12432 Express intent to engage in diplomatic cooperation (such as policy support)
12433 Appeal for intelligence
12434 Demand
12435 Carry out suicide bombing
12436 Threaten
12437 Express intent to provide material aid
12438 Grant diplomatic recognition
12439 Meet at a 'third' location
12440 Accuse
12441 Investigate
12442 Reject
12443 Appeal for diplomatic cooperation (such as policy support)
12444 Engage in symbolic act
12445 Defy norms, law
12446 Consider policy option
12447 Provide aid
12448 Sexually assault
12449 Make empathetic comment
12450 Bring lawsuit against
12451 Impose blockade, restrict movement
12452 Make pessimistic comment
12453 Protest violently, riot
12454 Reduce or break diplomatic relations
12455 Grant asylum
12456 Engage in diplomatic cooperation
12457 Make optimistic comment
12458 Torture
12459 Refuse to yield
12460 Appeal for change in leadership
12461 Cooperate militarily
12462 Mobilize or increase armed forces
12463 fight with small arms and light weapons
12464 Ease administrative sanctions
12465 Appeal for political reform
12466 Return, release person(s)
12467 Discuss by telephone
12468 Demonstrate for leadership change
12469 Impose restrictions on political freedoms
12470 Reduce relations
12471 Investigate crime, corruption
12472 Engage in material cooperation
12473 Appeal to others to meet or negotiate
12474 Provide humanitarian aid
12475 Use tactics of violent repression
12476 Occupy territory
12477 Demand humanitarian aid
12478 Threaten non-force
12479 Express intent to cooperate economically
12480 Conduct suicide, car, or other non-military bombing
12481 Demand diplomatic cooperation (such as policy support)
12482 Demand meeting, negotiation
12483 Deny responsibility
12484 Express intent to change institutions, regime
12485 Give ultimatum
12486 Appeal for judicial cooperation
12487 Rally support on behalf of
12488 Obstruct passage, block
12489 Share intelligence or information
12490 Expel or deport individuals
12491 Confiscate property
12492 Accuse of aggression
12493 Physically assault
12494 Retreat or surrender militarily
12495 Veto
12496 Kill by physical assault
12497 Assassinate
12498 Appeal for change in institutions, regime
12499 Forgive
12500 Reject proposal to meet, discuss, or negotiate
12501 Express intent to provide humanitarian aid
12502 Appeal for release of persons or property
12503 Acknowledge or claim responsibility
12504 Ease economic sanctions, boycott, embargo
12505 Express intent to cooperate militarily
12506 Cooperate economically
12507 Express intent to provide economic aid
12508 Mobilize or increase police power
12509 Employ aerial weapons
12510 Accuse of human rights abuses
12511 Conduct strike or boycott
12512 Appeal for policy change
12513 Demonstrate military or police power
12514 Provide military aid
12515 Reject plan, agreement to settle dispute
12516 Yield
12517 Appeal for easing of administrative sanctions
12518 Mediate
12519 Apologize
12520 Express intent to release persons or property
12521 Express intent to de-escalate military engagement
12522 Accede to demands for rights
12523 Demand economic aid
12524 Impose state of emergency or martial law
12525 Receive deployment of peacekeepers
12526 Demand de-escalation of military engagement
12527 Declare truce, ceasefire
12528 Reduce or stop humanitarian assistance
12529 Appeal to others to settle dispute
12530 Reject request for military aid
12531 Threaten with political dissent, protest
12532 Appeal to engage in or accept mediation
12533 Express intent to ease economic sanctions, boycott, or embargo
12534 Coerce
12535 fight with artillery and tanks
12536 Express intent to cooperate on intelligence
12537 Express intent to settle dispute
12538 Express accord
12539 Decline comment
12540 Rally opposition against
12541 Halt negotiations
12542 Demand that target yields
12543 Appeal for military aid
12544 Threaten with military force
12545 Express intent to provide military protection or peacekeeping
12546 Threaten with sanctions, boycott, embargo
12547 Express intent to provide military aid
12548 Demand change in leadership
12549 Appeal for economic aid
12550 Refuse to de-escalate military engagement
12551 Refuse to release persons or property
12552 Increase police alert status
12553 Return, release property
12554 Ease military blockade
12555 Appeal for material cooperation
12556 Express intent to cooperate on judicial matters
12557 Appeal for economic cooperation
12558 Demand settling of dispute
12559 Accuse of crime, corruption
12560 Defend verbally
12561 Provide military protection or peacekeeping
12562 Accuse of espionage, treason
12563 Seize or damage property
12564 Accede to requests or demands for political reform
12565 Appeal for easing of economic sanctions, boycott, or embargo
12566 Threaten to reduce or stop aid
12567 Engage in judicial cooperation
12568 Appeal to yield
12569 Demand military aid
12570 Refuse to ease administrative sanctions
12571 Demand release of persons or property
12572 Accede to demands for change in leadership
12573 Appeal for humanitarian aid
12574 Threaten with repression
12575 Demand change in institutions, regime
12576 Demonstrate for policy change
12577 Appeal for aid
12578 Appeal for rights
12579 Engage in violent protest for rights
12580 Express intent to mediate
12581 Expel or withdraw peacekeepers
12582 Appeal for military protection or peacekeeping
12583 Engage in mass killings
12584 Accuse of war crimes
12585 Reject military cooperation
12586 Threaten to halt negotiations
12587 Ban political parties or politicians
12588 Express intent to change leadership
12589 Demand material cooperation
12590 Express intent to institute political reform
12591 Demand easing of administrative sanctions
12592 Express intent to engage in material cooperation
12593 Reduce or stop economic assistance
12594 Express intent to ease administrative sanctions
12595 Demand intelligence cooperation
12596 Ease curfew
12597 Receive inspectors
12598 Demand rights
12599 Demand political reform
12600 Demand judicial cooperation
12601 Engage in political dissent
12602 Detonate nuclear weapons
12603 Violate ceasefire
12604 Express intent to accept mediation
12605 Refuse to ease economic sanctions, boycott, or embargo
12606 Demand mediation
12607 Obstruct passage to demand leadership change
12608 Express intent to yield
12609 Conduct hunger strike
12610 Threaten to halt mediation
12611 Reject judicial cooperation
12612 Reduce or stop military assistance
12613 Ease political dissent
12614 Threaten to reduce or break relations
12615 Demobilize armed forces
12616 Use as human shield
12617 Demand policy change
12618 Accede to demands for change in institutions, regime
12619 Reject economic cooperation
12620 Reject material cooperation
12621 Halt mediation
12622 Accede to demands for change in policy
12623 Investigate war crimes
12624 Threaten with administrative sanctions
12625 Reduce or stop material aid
12626 Destroy property
12627 Express intent to change policy
12628 Use chemical, biological, or radiological weapons
12629 Reject request for military protection or peacekeeping
12630 Demand material aid
12631 Engage in mass expulsion
12632 Investigate human rights abuses
12633 Carry out car bombing
12634 Expel or withdraw
12635 Ease state of emergency or martial law
12636 Carry out roadside bombing
12637 Appeal for target to allow international involvement (non-mediation)
12638 Reject request for change in leadership

View File

@ -1,15 +1,15 @@
# triples: 291818
# entities: 12554
# relations: 423
# timesteps: 70
# test triples: 19271
# valid triples: 20208
# train triples: 252339
Measure method: N/A
Target Size : 423
Grow Factor: 0
Shrink Factor: 4.0
Epsilon Factor: 0
Search method: N/A
filter_dupes: inter
nonames: False
# triples: 291818
# entities: 12554
# relations: 423
# timesteps: 70
# test triples: 19271
# valid triples: 20208
# train triples: 252339
Measure method: N/A
Target Size : 423
Grow Factor: 0
Shrink Factor: 4.0
Epsilon Factor: 0
Search method: N/A
filter_dupes: inter
nonames: False

File diff suppressed because it is too large Load Diff

View File

@ -1,423 +1,423 @@
0 P131[0-0]
1 P131[1-1]
2 P131[2-2]
3 P131[3-3]
4 P131[4-4]
5 P131[5-5]
6 P131[6-6]
7 P131[7-7]
8 P131[8-8]
9 P131[9-9]
10 P131[10-10]
11 P131[11-11]
12 P131[12-12]
13 P131[13-13]
14 P131[14-14]
15 P131[15-15]
16 P131[16-16]
17 P131[17-17]
18 P131[18-18]
19 P131[19-19]
20 P131[20-20]
21 P131[21-21]
22 P131[22-22]
23 P131[23-23]
24 P131[24-24]
25 P131[25-25]
26 P131[26-26]
27 P131[27-27]
28 P131[28-28]
29 P131[29-29]
30 P131[30-30]
31 P131[31-31]
32 P131[32-32]
33 P131[33-33]
34 P131[34-34]
35 P131[35-35]
36 P131[36-36]
37 P131[37-37]
38 P131[38-38]
39 P131[39-39]
40 P131[40-40]
41 P131[41-41]
42 P131[42-42]
43 P131[43-43]
44 P131[44-44]
45 P131[45-45]
46 P131[46-46]
47 P131[47-47]
48 P131[48-48]
49 P131[49-49]
50 P131[50-50]
51 P131[51-51]
52 P131[52-52]
53 P131[53-53]
54 P131[54-54]
55 P131[55-55]
56 P131[56-56]
57 P131[57-57]
58 P131[58-58]
59 P131[59-59]
60 P131[60-60]
61 P131[61-61]
62 P131[62-62]
63 P131[63-63]
64 P131[64-64]
65 P131[65-65]
66 P131[66-66]
67 P131[67-67]
68 P131[68-68]
69 P131[69-69]
70 P1435[65-65]
71 P39[49-49]
72 P39[50-50]
73 P39[51-51]
74 P39[52-52]
75 P39[53-53]
76 P39[54-54]
77 P39[55-55]
78 P39[56-56]
79 P39[57-57]
80 P39[58-58]
81 P39[59-59]
82 P39[60-60]
83 P39[61-61]
84 P39[62-62]
85 P39[63-63]
86 P39[64-64]
87 P39[65-65]
88 P39[66-66]
89 P39[67-67]
90 P39[68-68]
91 P39[69-69]
92 P54[40-40]
93 P54[41-41]
94 P54[42-42]
95 P54[43-43]
96 P54[44-44]
97 P54[45-45]
98 P54[46-46]
99 P54[47-47]
100 P54[48-48]
101 P54[49-49]
102 P54[50-50]
103 P54[51-51]
104 P54[52-52]
105 P54[53-53]
106 P54[54-54]
107 P54[55-55]
108 P54[56-56]
109 P54[57-57]
110 P54[58-58]
111 P54[59-59]
112 P54[60-60]
113 P54[61-61]
114 P54[62-62]
115 P54[63-63]
116 P54[64-64]
117 P54[65-65]
118 P54[66-66]
119 P54[67-67]
120 P54[68-68]
121 P54[69-69]
122 P31[0-0]
123 P31[1-1]
124 P31[2-2]
125 P31[3-3]
126 P31[4-4]
127 P31[5-5]
128 P31[6-6]
129 P31[7-7]
130 P31[8-8]
131 P31[9-9]
132 P31[10-10]
133 P31[11-11]
134 P31[12-12]
135 P31[13-13]
136 P31[14-14]
137 P31[15-15]
138 P31[16-16]
139 P31[17-17]
140 P31[18-18]
141 P31[19-19]
142 P31[20-20]
143 P31[21-21]
144 P31[22-22]
145 P31[23-23]
146 P31[24-24]
147 P31[25-25]
148 P31[26-26]
149 P31[27-27]
150 P31[28-28]
151 P31[29-29]
152 P31[30-30]
153 P31[31-31]
154 P31[32-32]
155 P31[33-33]
156 P31[34-34]
157 P31[35-35]
158 P31[36-36]
159 P31[37-37]
160 P31[38-38]
161 P31[39-39]
162 P31[40-40]
163 P31[41-41]
164 P31[42-42]
165 P31[43-43]
166 P31[44-44]
167 P31[45-45]
168 P31[46-46]
169 P31[47-47]
170 P31[48-48]
171 P31[49-49]
172 P31[50-50]
173 P31[51-51]
174 P31[52-52]
175 P31[53-53]
176 P31[54-54]
177 P31[55-55]
178 P31[56-56]
179 P31[57-57]
180 P31[58-58]
181 P31[59-59]
182 P31[60-60]
183 P31[61-61]
184 P31[62-62]
185 P31[63-63]
186 P31[64-64]
187 P31[65-65]
188 P31[66-66]
189 P31[67-67]
190 P31[68-68]
191 P31[69-69]
192 P463[26-26]
193 P463[27-27]
194 P463[28-28]
195 P463[29-29]
196 P463[30-30]
197 P463[31-31]
198 P463[32-32]
199 P463[33-33]
200 P463[34-34]
201 P463[35-35]
202 P463[36-36]
203 P463[37-37]
204 P463[38-38]
205 P463[39-39]
206 P463[40-40]
207 P463[41-41]
208 P463[42-42]
209 P463[43-43]
210 P463[44-44]
211 P463[45-45]
212 P463[46-46]
213 P463[47-47]
214 P463[48-48]
215 P463[49-49]
216 P463[50-50]
217 P463[51-51]
218 P463[52-52]
219 P463[53-53]
220 P463[54-54]
221 P463[55-55]
222 P463[56-56]
223 P463[57-57]
224 P463[58-58]
225 P463[59-59]
226 P463[60-60]
227 P463[61-61]
228 P463[62-62]
229 P463[63-63]
230 P463[64-64]
231 P463[65-65]
232 P463[66-66]
233 P463[67-67]
234 P463[68-68]
235 P463[69-69]
236 P512[4-69]
237 P190[0-29]
238 P150[0-3]
239 P1376[39-47]
240 P463[0-7]
241 P166[0-7]
242 P2962[18-30]
243 P108[29-36]
244 P39[0-3]
245 P17[47-48]
246 P166[21-23]
247 P793[46-69]
248 P69[32-41]
249 P17[57-58]
250 P190[42-45]
251 P2962[39-42]
252 P54[0-18]
253 P26[56-61]
254 P150[14-17]
255 P463[16-17]
256 P26[39-46]
257 P579[36-43]
258 P579[16-23]
259 P2962[59-60]
260 P1411[59-61]
261 P26[20-27]
262 P6[4-69]
263 P1435[33-34]
264 P166[52-53]
265 P108[49-57]
266 P150[10-13]
267 P1346[47-68]
268 P150[18-21]
269 P1346[13-46]
270 P69[20-23]
271 P39[31-32]
272 P1411[32-37]
273 P166[62-63]
274 P150[44-47]
275 P2962[61-62]
276 P150[48-51]
277 P150[52-55]
278 P1411[62-67]
279 P1435[35-36]
280 P1411[48-51]
281 P150[22-25]
282 P2962[63-64]
283 P2962[65-66]
284 P166[58-59]
285 P190[46-49]
286 P54[34-35]
287 P1435[4-16]
288 P463[18-19]
289 P150[31-34]
290 P150[35-38]
291 P39[35-36]
292 P26[62-69]
293 P1411[56-58]
294 P1435[37-38]
295 P166[60-61]
296 P39[33-34]
297 P102[24-31]
298 P2962[43-46]
299 P108[37-48]
300 P190[50-53]
301 P39[4-6]
302 P1435[39-40]
303 P793[0-45]
304 P150[64-69]
305 P39[19-22]
306 P27[30-38]
307 P2962[31-38]
308 P1411[24-31]
309 P102[40-45]
310 P39[37-38]
311 P463[8-11]
312 P1435[41-42]
313 P27[52-59]
314 P69[16-19]
315 P17[16-18]
316 P190[54-57]
317 P1435[43-44]
318 P166[8-15]
319 P166[45-47]
320 P2962[47-50]
321 P39[39-40]
322 P1411[52-55]
323 P108[58-69]
324 P463[20-21]
325 P39[41-42]
326 P150[26-30]
327 P150[39-43]
328 P1435[45-46]
329 P26[28-38]
330 P54[27-30]
331 P190[58-61]
332 P17[59-61]
333 P54[36-37]
334 P166[16-20]
335 P166[37-40]
336 P1435[47-48]
337 P17[0-3]
338 P26[47-55]
339 P1435[49-50]
340 P1435[25-28]
341 P150[4-9]
342 P102[63-69]
343 P26[0-19]
344 P1435[17-24]
345 P39[23-26]
346 P1435[51-52]
347 P39[7-11]
348 P69[12-15]
349 P69[24-31]
350 P102[0-23]
351 P39[43-44]
352 P579[24-35]
353 P190[62-65]
354 P1435[53-54]
355 P1376[0-18]
356 P27[0-14]
357 P463[12-15]
358 P166[33-36]
359 P102[32-39]
360 P17[4-7]
361 P190[30-41]
362 P166[24-28]
363 P190[66-69]
364 P69[42-69]
365 P1435[55-56]
366 P54[31-33]
367 P39[45-46]
368 P17[12-15]
369 P1435[57-58]
370 P54[19-26]
371 P2962[51-54]
372 P2962[67-69]
373 P1435[59-60]
374 P579[44-56]
375 P1435[61-62]
376 P166[41-44]
377 P17[19-22]
378 P1376[19-38]
379 P17[23-26]
380 P1376[48-69]
381 P463[22-23]
382 P17[27-30]
383 P1435[63-64]
384 P69[0-3]
385 P1435[66-67]
386 P17[35-38]
387 P69[8-11]
388 P1435[68-69]
389 P17[31-34]
390 P102[46-53]
391 P27[60-69]
392 P579[57-69]
393 P69[4-7]
394 P1411[7-14]
395 P551[0-35]
396 P108[0-28]
397 P17[8-11]
398 P1411[38-47]
399 P17[43-46]
400 P17[49-52]
401 P166[64-69]
402 P1435[29-32]
403 P54[38-39]
404 P39[27-30]
405 P2962[55-58]
406 P463[24-25]
407 P17[39-42]
408 P17[53-56]
409 P17[66-69]
410 P17[62-65]
411 P1411[15-23]
412 P166[48-51]
413 P27[15-29]
414 P150[56-63]
415 P27[39-51]
416 P39[47-48]
417 P166[29-32]
418 P39[12-18]
419 P166[54-57]
420 P551[36-69]
421 P579[0-15]
422 P102[54-62]
0 P131[0-0]
1 P131[1-1]
2 P131[2-2]
3 P131[3-3]
4 P131[4-4]
5 P131[5-5]
6 P131[6-6]
7 P131[7-7]
8 P131[8-8]
9 P131[9-9]
10 P131[10-10]
11 P131[11-11]
12 P131[12-12]
13 P131[13-13]
14 P131[14-14]
15 P131[15-15]
16 P131[16-16]
17 P131[17-17]
18 P131[18-18]
19 P131[19-19]
20 P131[20-20]
21 P131[21-21]
22 P131[22-22]
23 P131[23-23]
24 P131[24-24]
25 P131[25-25]
26 P131[26-26]
27 P131[27-27]
28 P131[28-28]
29 P131[29-29]
30 P131[30-30]
31 P131[31-31]
32 P131[32-32]
33 P131[33-33]
34 P131[34-34]
35 P131[35-35]
36 P131[36-36]
37 P131[37-37]
38 P131[38-38]
39 P131[39-39]
40 P131[40-40]
41 P131[41-41]
42 P131[42-42]
43 P131[43-43]
44 P131[44-44]
45 P131[45-45]
46 P131[46-46]
47 P131[47-47]
48 P131[48-48]
49 P131[49-49]
50 P131[50-50]
51 P131[51-51]
52 P131[52-52]
53 P131[53-53]
54 P131[54-54]
55 P131[55-55]
56 P131[56-56]
57 P131[57-57]
58 P131[58-58]
59 P131[59-59]
60 P131[60-60]
61 P131[61-61]
62 P131[62-62]
63 P131[63-63]
64 P131[64-64]
65 P131[65-65]
66 P131[66-66]
67 P131[67-67]
68 P131[68-68]
69 P131[69-69]
70 P1435[65-65]
71 P39[49-49]
72 P39[50-50]
73 P39[51-51]
74 P39[52-52]
75 P39[53-53]
76 P39[54-54]
77 P39[55-55]
78 P39[56-56]
79 P39[57-57]
80 P39[58-58]
81 P39[59-59]
82 P39[60-60]
83 P39[61-61]
84 P39[62-62]
85 P39[63-63]
86 P39[64-64]
87 P39[65-65]
88 P39[66-66]
89 P39[67-67]
90 P39[68-68]
91 P39[69-69]
92 P54[40-40]
93 P54[41-41]
94 P54[42-42]
95 P54[43-43]
96 P54[44-44]
97 P54[45-45]
98 P54[46-46]
99 P54[47-47]
100 P54[48-48]
101 P54[49-49]
102 P54[50-50]
103 P54[51-51]
104 P54[52-52]
105 P54[53-53]
106 P54[54-54]
107 P54[55-55]
108 P54[56-56]
109 P54[57-57]
110 P54[58-58]
111 P54[59-59]
112 P54[60-60]
113 P54[61-61]
114 P54[62-62]
115 P54[63-63]
116 P54[64-64]
117 P54[65-65]
118 P54[66-66]
119 P54[67-67]
120 P54[68-68]
121 P54[69-69]
122 P31[0-0]
123 P31[1-1]
124 P31[2-2]
125 P31[3-3]
126 P31[4-4]
127 P31[5-5]
128 P31[6-6]
129 P31[7-7]
130 P31[8-8]
131 P31[9-9]
132 P31[10-10]
133 P31[11-11]
134 P31[12-12]
135 P31[13-13]
136 P31[14-14]
137 P31[15-15]
138 P31[16-16]
139 P31[17-17]
140 P31[18-18]
141 P31[19-19]
142 P31[20-20]
143 P31[21-21]
144 P31[22-22]
145 P31[23-23]
146 P31[24-24]
147 P31[25-25]
148 P31[26-26]
149 P31[27-27]
150 P31[28-28]
151 P31[29-29]
152 P31[30-30]
153 P31[31-31]
154 P31[32-32]
155 P31[33-33]
156 P31[34-34]
157 P31[35-35]
158 P31[36-36]
159 P31[37-37]
160 P31[38-38]
161 P31[39-39]
162 P31[40-40]
163 P31[41-41]
164 P31[42-42]
165 P31[43-43]
166 P31[44-44]
167 P31[45-45]
168 P31[46-46]
169 P31[47-47]
170 P31[48-48]
171 P31[49-49]
172 P31[50-50]
173 P31[51-51]
174 P31[52-52]
175 P31[53-53]
176 P31[54-54]
177 P31[55-55]
178 P31[56-56]
179 P31[57-57]
180 P31[58-58]
181 P31[59-59]
182 P31[60-60]
183 P31[61-61]
184 P31[62-62]
185 P31[63-63]
186 P31[64-64]
187 P31[65-65]
188 P31[66-66]
189 P31[67-67]
190 P31[68-68]
191 P31[69-69]
192 P463[26-26]
193 P463[27-27]
194 P463[28-28]
195 P463[29-29]
196 P463[30-30]
197 P463[31-31]
198 P463[32-32]
199 P463[33-33]
200 P463[34-34]
201 P463[35-35]
202 P463[36-36]
203 P463[37-37]
204 P463[38-38]
205 P463[39-39]
206 P463[40-40]
207 P463[41-41]
208 P463[42-42]
209 P463[43-43]
210 P463[44-44]
211 P463[45-45]
212 P463[46-46]
213 P463[47-47]
214 P463[48-48]
215 P463[49-49]
216 P463[50-50]
217 P463[51-51]
218 P463[52-52]
219 P463[53-53]
220 P463[54-54]
221 P463[55-55]
222 P463[56-56]
223 P463[57-57]
224 P463[58-58]
225 P463[59-59]
226 P463[60-60]
227 P463[61-61]
228 P463[62-62]
229 P463[63-63]
230 P463[64-64]
231 P463[65-65]
232 P463[66-66]
233 P463[67-67]
234 P463[68-68]
235 P463[69-69]
236 P512[4-69]
237 P190[0-29]
238 P150[0-3]
239 P1376[39-47]
240 P463[0-7]
241 P166[0-7]
242 P2962[18-30]
243 P108[29-36]
244 P39[0-3]
245 P17[47-48]
246 P166[21-23]
247 P793[46-69]
248 P69[32-41]
249 P17[57-58]
250 P190[42-45]
251 P2962[39-42]
252 P54[0-18]
253 P26[56-61]
254 P150[14-17]
255 P463[16-17]
256 P26[39-46]
257 P579[36-43]
258 P579[16-23]
259 P2962[59-60]
260 P1411[59-61]
261 P26[20-27]
262 P6[4-69]
263 P1435[33-34]
264 P166[52-53]
265 P108[49-57]
266 P150[10-13]
267 P1346[47-68]
268 P150[18-21]
269 P1346[13-46]
270 P69[20-23]
271 P39[31-32]
272 P1411[32-37]
273 P166[62-63]
274 P150[44-47]
275 P2962[61-62]
276 P150[48-51]
277 P150[52-55]
278 P1411[62-67]
279 P1435[35-36]
280 P1411[48-51]
281 P150[22-25]
282 P2962[63-64]
283 P2962[65-66]
284 P166[58-59]
285 P190[46-49]
286 P54[34-35]
287 P1435[4-16]
288 P463[18-19]
289 P150[31-34]
290 P150[35-38]
291 P39[35-36]
292 P26[62-69]
293 P1411[56-58]
294 P1435[37-38]
295 P166[60-61]
296 P39[33-34]
297 P102[24-31]
298 P2962[43-46]
299 P108[37-48]
300 P190[50-53]
301 P39[4-6]
302 P1435[39-40]
303 P793[0-45]
304 P150[64-69]
305 P39[19-22]
306 P27[30-38]
307 P2962[31-38]
308 P1411[24-31]
309 P102[40-45]
310 P39[37-38]
311 P463[8-11]
312 P1435[41-42]
313 P27[52-59]
314 P69[16-19]
315 P17[16-18]
316 P190[54-57]
317 P1435[43-44]
318 P166[8-15]
319 P166[45-47]
320 P2962[47-50]
321 P39[39-40]
322 P1411[52-55]
323 P108[58-69]
324 P463[20-21]
325 P39[41-42]
326 P150[26-30]
327 P150[39-43]
328 P1435[45-46]
329 P26[28-38]
330 P54[27-30]
331 P190[58-61]
332 P17[59-61]
333 P54[36-37]
334 P166[16-20]
335 P166[37-40]
336 P1435[47-48]
337 P17[0-3]
338 P26[47-55]
339 P1435[49-50]
340 P1435[25-28]
341 P150[4-9]
342 P102[63-69]
343 P26[0-19]
344 P1435[17-24]
345 P39[23-26]
346 P1435[51-52]
347 P39[7-11]
348 P69[12-15]
349 P69[24-31]
350 P102[0-23]
351 P39[43-44]
352 P579[24-35]
353 P190[62-65]
354 P1435[53-54]
355 P1376[0-18]
356 P27[0-14]
357 P463[12-15]
358 P166[33-36]
359 P102[32-39]
360 P17[4-7]
361 P190[30-41]
362 P166[24-28]
363 P190[66-69]
364 P69[42-69]
365 P1435[55-56]
366 P54[31-33]
367 P39[45-46]
368 P17[12-15]
369 P1435[57-58]
370 P54[19-26]
371 P2962[51-54]
372 P2962[67-69]
373 P1435[59-60]
374 P579[44-56]
375 P1435[61-62]
376 P166[41-44]
377 P17[19-22]
378 P1376[19-38]
379 P17[23-26]
380 P1376[48-69]
381 P463[22-23]
382 P17[27-30]
383 P1435[63-64]
384 P69[0-3]
385 P1435[66-67]
386 P17[35-38]
387 P69[8-11]
388 P1435[68-69]
389 P17[31-34]
390 P102[46-53]
391 P27[60-69]
392 P579[57-69]
393 P69[4-7]
394 P1411[7-14]
395 P551[0-35]
396 P108[0-28]
397 P17[8-11]
398 P1411[38-47]
399 P17[43-46]
400 P17[49-52]
401 P166[64-69]
402 P1435[29-32]
403 P54[38-39]
404 P39[27-30]
405 P2962[55-58]
406 P463[24-25]
407 P17[39-42]
408 P17[53-56]
409 P17[66-69]
410 P17[62-65]
411 P1411[15-23]
412 P166[48-51]
413 P27[15-29]
414 P150[56-63]
415 P27[39-51]
416 P39[47-48]
417 P166[29-32]
418 P39[12-18]
419 P166[54-57]
420 P551[36-69]
421 P579[0-15]
422 P102[54-62]

File diff suppressed because it is too large Load Diff

View File

@ -1,71 +1,71 @@
0 19 19
1 20 1643
2 1644 1790
3 1791 1816
4 1817 1855
5 1856 1871
6 1872 1893
7 1894 1905
8 1906 1913
9 1914 1918
10 1919 1920
11 1921 1924
12 1925 1929
13 1930 1933
14 1934 1937
15 1938 1941
16 1942 1945
17 1946 1948
18 1949 1950
19 1951 1953
20 1954 1956
21 1957 1959
22 1960 1961
23 1962 1963
24 1964 1965
25 1966 1967
26 1968 1968
27 1969 1970
28 1971 1972
29 1973 1974
30 1975 1976
31 1977 1978
32 1979 1980
33 1981 1982
34 1983 1983
35 1984 1984
36 1985 1985
37 1986 1986
38 1987 1987
39 1988 1988
40 1989 1989
41 1990 1990
42 1991 1991
43 1992 1992
44 1993 1993
45 1994 1994
46 1995 1995
47 1996 1996
48 1997 1997
49 1998 1998
50 1999 1999
51 2000 2000
52 2001 2001
53 2002 2002
54 2003 2003
55 2004 2004
56 2005 2005
57 2006 2006
58 2007 2007
59 2008 2008
60 2009 2009
61 2010 2010
62 2011 2011
63 2012 2012
64 2013 2013
65 2014 2014
66 2015 2015
67 2016 2016
68 2017 2017
69 2018 2020
70 2021 2021
0 19 19
1 20 1643
2 1644 1790
3 1791 1816
4 1817 1855
5 1856 1871
6 1872 1893
7 1894 1905
8 1906 1913
9 1914 1918
10 1919 1920
11 1921 1924
12 1925 1929
13 1930 1933
14 1934 1937
15 1938 1941
16 1942 1945
17 1946 1948
18 1949 1950
19 1951 1953
20 1954 1956
21 1957 1959
22 1960 1961
23 1962 1963
24 1964 1965
25 1966 1967
26 1968 1968
27 1969 1970
28 1971 1972
29 1973 1974
30 1975 1976
31 1977 1978
32 1979 1980
33 1981 1982
34 1983 1983
35 1984 1984
36 1985 1985
37 1986 1986
38 1987 1987
39 1988 1988
40 1989 1989
41 1990 1990
42 1991 1991
43 1992 1992
44 1993 1993
45 1994 1994
46 1995 1995
47 1996 1996
48 1997 1997
49 1998 1998
50 1999 1999
51 2000 2000
52 2001 2001
53 2002 2002
54 2003 2003
55 2004 2004
56 2005 2005
57 2006 2006
58 2007 2007
59 2008 2008
60 2009 2009
61 2010 2010
62 2011 2011
63 2012 2012
64 2013 2013
65 2014 2014
66 2015 2015
67 2016 2016
68 2017 2017
69 2018 2020
70 2021 2021

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -421,3 +421,27 @@
420 P551[36-69]
421 P579[0-15]
422 P102[54-62]
423 P131
424 P1435
425 P39
426 P54
427 P31
428 P463
429 P512
430 P190
431 P150
432 P1376
433 P166
434 P2962
435 P108
436 P17
437 P793
438 P69
439 P26
440 P579
441 P1411
442 P6
443 P1346
444 P102
445 P27
446 P551

View File

@ -1,15 +0,0 @@
# triples: 291818
# entities: 12554
# relations: 423
# timesteps: 70
# test triples: 19271
# valid triples: 20208
# train triples: 252339
Measure method: N/A
Target Size : 423
Grow Factor: 0
Shrink Factor: 4.0
Epsilon Factor: 0
Search method: N/A
filter_dupes: inter
nonames: False

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,24 +0,0 @@
P1376 0
P512 4
P579 3
P150 18
P190 5
P551 19
P131 1
P793 21
P1435 13
P39 14
P17 6
P54 22
P31 15
P6 7
P1411 20
P2962 2
P463 9
P1346 16
P108 10
P69 23
P166 17
P102 11
P27 12
P26 8

File diff suppressed because it is too large Load Diff

View File

@ -1,423 +0,0 @@
0 P131[0-0]
1 P131[1-1]
2 P131[2-2]
3 P131[3-3]
4 P131[4-4]
5 P131[5-5]
6 P131[6-6]
7 P131[7-7]
8 P131[8-8]
9 P131[9-9]
10 P131[10-10]
11 P131[11-11]
12 P131[12-12]
13 P131[13-13]
14 P131[14-14]
15 P131[15-15]
16 P131[16-16]
17 P131[17-17]
18 P131[18-18]
19 P131[19-19]
20 P131[20-20]
21 P131[21-21]
22 P131[22-22]
23 P131[23-23]
24 P131[24-24]
25 P131[25-25]
26 P131[26-26]
27 P131[27-27]
28 P131[28-28]
29 P131[29-29]
30 P131[30-30]
31 P131[31-31]
32 P131[32-32]
33 P131[33-33]
34 P131[34-34]
35 P131[35-35]
36 P131[36-36]
37 P131[37-37]
38 P131[38-38]
39 P131[39-39]
40 P131[40-40]
41 P131[41-41]
42 P131[42-42]
43 P131[43-43]
44 P131[44-44]
45 P131[45-45]
46 P131[46-46]
47 P131[47-47]
48 P131[48-48]
49 P131[49-49]
50 P131[50-50]
51 P131[51-51]
52 P131[52-52]
53 P131[53-53]
54 P131[54-54]
55 P131[55-55]
56 P131[56-56]
57 P131[57-57]
58 P131[58-58]
59 P131[59-59]
60 P131[60-60]
61 P131[61-61]
62 P131[62-62]
63 P131[63-63]
64 P131[64-64]
65 P131[65-65]
66 P131[66-66]
67 P131[67-67]
68 P131[68-68]
69 P131[69-69]
70 P1435[65-65]
71 P39[49-49]
72 P39[50-50]
73 P39[51-51]
74 P39[52-52]
75 P39[53-53]
76 P39[54-54]
77 P39[55-55]
78 P39[56-56]
79 P39[57-57]
80 P39[58-58]
81 P39[59-59]
82 P39[60-60]
83 P39[61-61]
84 P39[62-62]
85 P39[63-63]
86 P39[64-64]
87 P39[65-65]
88 P39[66-66]
89 P39[67-67]
90 P39[68-68]
91 P39[69-69]
92 P54[40-40]
93 P54[41-41]
94 P54[42-42]
95 P54[43-43]
96 P54[44-44]
97 P54[45-45]
98 P54[46-46]
99 P54[47-47]
100 P54[48-48]
101 P54[49-49]
102 P54[50-50]
103 P54[51-51]
104 P54[52-52]
105 P54[53-53]
106 P54[54-54]
107 P54[55-55]
108 P54[56-56]
109 P54[57-57]
110 P54[58-58]
111 P54[59-59]
112 P54[60-60]
113 P54[61-61]
114 P54[62-62]
115 P54[63-63]
116 P54[64-64]
117 P54[65-65]
118 P54[66-66]
119 P54[67-67]
120 P54[68-68]
121 P54[69-69]
122 P31[0-0]
123 P31[1-1]
124 P31[2-2]
125 P31[3-3]
126 P31[4-4]
127 P31[5-5]
128 P31[6-6]
129 P31[7-7]
130 P31[8-8]
131 P31[9-9]
132 P31[10-10]
133 P31[11-11]
134 P31[12-12]
135 P31[13-13]
136 P31[14-14]
137 P31[15-15]
138 P31[16-16]
139 P31[17-17]
140 P31[18-18]
141 P31[19-19]
142 P31[20-20]
143 P31[21-21]
144 P31[22-22]
145 P31[23-23]
146 P31[24-24]
147 P31[25-25]
148 P31[26-26]
149 P31[27-27]
150 P31[28-28]
151 P31[29-29]
152 P31[30-30]
153 P31[31-31]
154 P31[32-32]
155 P31[33-33]
156 P31[34-34]
157 P31[35-35]
158 P31[36-36]
159 P31[37-37]
160 P31[38-38]
161 P31[39-39]
162 P31[40-40]
163 P31[41-41]
164 P31[42-42]
165 P31[43-43]
166 P31[44-44]
167 P31[45-45]
168 P31[46-46]
169 P31[47-47]
170 P31[48-48]
171 P31[49-49]
172 P31[50-50]
173 P31[51-51]
174 P31[52-52]
175 P31[53-53]
176 P31[54-54]
177 P31[55-55]
178 P31[56-56]
179 P31[57-57]
180 P31[58-58]
181 P31[59-59]
182 P31[60-60]
183 P31[61-61]
184 P31[62-62]
185 P31[63-63]
186 P31[64-64]
187 P31[65-65]
188 P31[66-66]
189 P31[67-67]
190 P31[68-68]
191 P31[69-69]
192 P463[26-26]
193 P463[27-27]
194 P463[28-28]
195 P463[29-29]
196 P463[30-30]
197 P463[31-31]
198 P463[32-32]
199 P463[33-33]
200 P463[34-34]
201 P463[35-35]
202 P463[36-36]
203 P463[37-37]
204 P463[38-38]
205 P463[39-39]
206 P463[40-40]
207 P463[41-41]
208 P463[42-42]
209 P463[43-43]
210 P463[44-44]
211 P463[45-45]
212 P463[46-46]
213 P463[47-47]
214 P463[48-48]
215 P463[49-49]
216 P463[50-50]
217 P463[51-51]
218 P463[52-52]
219 P463[53-53]
220 P463[54-54]
221 P463[55-55]
222 P463[56-56]
223 P463[57-57]
224 P463[58-58]
225 P463[59-59]
226 P463[60-60]
227 P463[61-61]
228 P463[62-62]
229 P463[63-63]
230 P463[64-64]
231 P463[65-65]
232 P463[66-66]
233 P463[67-67]
234 P463[68-68]
235 P463[69-69]
236 P512[4-69]
237 P190[0-29]
238 P150[0-3]
239 P1376[39-47]
240 P463[0-7]
241 P166[0-7]
242 P2962[18-30]
243 P108[29-36]
244 P39[0-3]
245 P17[47-48]
246 P166[21-23]
247 P793[46-69]
248 P69[32-41]
249 P17[57-58]
250 P190[42-45]
251 P2962[39-42]
252 P54[0-18]
253 P26[56-61]
254 P150[14-17]
255 P463[16-17]
256 P26[39-46]
257 P579[36-43]
258 P579[16-23]
259 P2962[59-60]
260 P1411[59-61]
261 P26[20-27]
262 P6[4-69]
263 P1435[33-34]
264 P166[52-53]
265 P108[49-57]
266 P150[10-13]
267 P1346[47-68]
268 P150[18-21]
269 P1346[13-46]
270 P69[20-23]
271 P39[31-32]
272 P1411[32-37]
273 P166[62-63]
274 P150[44-47]
275 P2962[61-62]
276 P150[48-51]
277 P150[52-55]
278 P1411[62-67]
279 P1435[35-36]
280 P1411[48-51]
281 P150[22-25]
282 P2962[63-64]
283 P2962[65-66]
284 P166[58-59]
285 P190[46-49]
286 P54[34-35]
287 P1435[4-16]
288 P463[18-19]
289 P150[31-34]
290 P150[35-38]
291 P39[35-36]
292 P26[62-69]
293 P1411[56-58]
294 P1435[37-38]
295 P166[60-61]
296 P39[33-34]
297 P102[24-31]
298 P2962[43-46]
299 P108[37-48]
300 P190[50-53]
301 P39[4-6]
302 P1435[39-40]
303 P793[0-45]
304 P150[64-69]
305 P39[19-22]
306 P27[30-38]
307 P2962[31-38]
308 P1411[24-31]
309 P102[40-45]
310 P39[37-38]
311 P463[8-11]
312 P1435[41-42]
313 P27[52-59]
314 P69[16-19]
315 P17[16-18]
316 P190[54-57]
317 P1435[43-44]
318 P166[8-15]
319 P166[45-47]
320 P2962[47-50]
321 P39[39-40]
322 P1411[52-55]
323 P108[58-69]
324 P463[20-21]
325 P39[41-42]
326 P150[26-30]
327 P150[39-43]
328 P1435[45-46]
329 P26[28-38]
330 P54[27-30]
331 P190[58-61]
332 P17[59-61]
333 P54[36-37]
334 P166[16-20]
335 P166[37-40]
336 P1435[47-48]
337 P17[0-3]
338 P26[47-55]
339 P1435[49-50]
340 P1435[25-28]
341 P150[4-9]
342 P102[63-69]
343 P26[0-19]
344 P1435[17-24]
345 P39[23-26]
346 P1435[51-52]
347 P39[7-11]
348 P69[12-15]
349 P69[24-31]
350 P102[0-23]
351 P39[43-44]
352 P579[24-35]
353 P190[62-65]
354 P1435[53-54]
355 P1376[0-18]
356 P27[0-14]
357 P463[12-15]
358 P166[33-36]
359 P102[32-39]
360 P17[4-7]
361 P190[30-41]
362 P166[24-28]
363 P190[66-69]
364 P69[42-69]
365 P1435[55-56]
366 P54[31-33]
367 P39[45-46]
368 P17[12-15]
369 P1435[57-58]
370 P54[19-26]
371 P2962[51-54]
372 P2962[67-69]
373 P1435[59-60]
374 P579[44-56]
375 P1435[61-62]
376 P166[41-44]
377 P17[19-22]
378 P1376[19-38]
379 P17[23-26]
380 P1376[48-69]
381 P463[22-23]
382 P17[27-30]
383 P1435[63-64]
384 P69[0-3]
385 P1435[66-67]
386 P17[35-38]
387 P69[8-11]
388 P1435[68-69]
389 P17[31-34]
390 P102[46-53]
391 P27[60-69]
392 P579[57-69]
393 P69[4-7]
394 P1411[7-14]
395 P551[0-35]
396 P108[0-28]
397 P17[8-11]
398 P1411[38-47]
399 P17[43-46]
400 P17[49-52]
401 P166[64-69]
402 P1435[29-32]
403 P54[38-39]
404 P39[27-30]
405 P2962[55-58]
406 P463[24-25]
407 P17[39-42]
408 P17[53-56]
409 P17[66-69]
410 P17[62-65]
411 P1411[15-23]
412 P166[48-51]
413 P27[15-29]
414 P150[56-63]
415 P27[39-51]
416 P39[47-48]
417 P166[29-32]
418 P39[12-18]
419 P166[54-57]
420 P551[36-69]
421 P579[0-15]
422 P102[54-62]

File diff suppressed because it is too large Load Diff

View File

@ -1,71 +0,0 @@
0 19 19
1 20 1643
2 1644 1790
3 1791 1816
4 1817 1855
5 1856 1871
6 1872 1893
7 1894 1905
8 1906 1913
9 1914 1918
10 1919 1920
11 1921 1924
12 1925 1929
13 1930 1933
14 1934 1937
15 1938 1941
16 1942 1945
17 1946 1948
18 1949 1950
19 1951 1953
20 1954 1956
21 1957 1959
22 1960 1961
23 1962 1963
24 1964 1965
25 1966 1967
26 1968 1968
27 1969 1970
28 1971 1972
29 1973 1974
30 1975 1976
31 1977 1978
32 1979 1980
33 1981 1982
34 1983 1983
35 1984 1984
36 1985 1985
37 1986 1986
38 1987 1987
39 1988 1988
40 1989 1989
41 1990 1990
42 1991 1991
43 1992 1992
44 1993 1993
45 1994 1994
46 1995 1995
47 1996 1996
48 1997 1997
49 1998 1998
50 1999 1999
51 2000 2000
52 2001 2001
53 2002 2002
54 2003 2003
55 2004 2004
56 2005 2005
57 2006 2006
58 2007 2007
59 2008 2008
60 2009 2009
61 2010 2010
62 2011 2011
63 2012 2012
64 2013 2013
65 2014 2014
66 2015 2015
67 2016 2016
68 2017 2017
69 2018 2020
70 2021 2021

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,15 +0,0 @@
# triples: 78032
# entities: 10526
# relations: 177
# timesteps: 46
# test triples: 6909
# valid triples: 7198
# train triples: 63925
Measure method: N/A
Target Size : 0
Grow Factor: 0
Shrink Factor: 0
Epsilon Factor: 5.0
Search method: N/A
filter_dupes: inter
nonames: False

File diff suppressed because it is too large Load Diff

View File

@ -1,177 +0,0 @@
0 <wasBornIn>[0-2]
1 <wasBornIn>[2-5]
2 <wasBornIn>[5-7]
3 <wasBornIn>[7-10]
4 <wasBornIn>[10-12]
5 <wasBornIn>[12-15]
6 <wasBornIn>[15-17]
7 <wasBornIn>[17-20]
8 <wasBornIn>[20-22]
9 <wasBornIn>[22-25]
10 <wasBornIn>[25-27]
11 <wasBornIn>[27-30]
12 <wasBornIn>[30-32]
13 <wasBornIn>[32-35]
14 <wasBornIn>[35-45]
15 <wasBornIn>[52-52]
16 <diedIn>[0-3]
17 <diedIn>[3-5]
18 <diedIn>[5-7]
19 <diedIn>[7-10]
20 <diedIn>[10-12]
21 <diedIn>[12-14]
22 <diedIn>[14-17]
23 <diedIn>[17-19]
24 <diedIn>[19-21]
25 <diedIn>[21-23]
26 <diedIn>[23-25]
27 <diedIn>[25-27]
28 <diedIn>[27-29]
29 <diedIn>[29-32]
30 <diedIn>[32-34]
31 <diedIn>[34-36]
32 <diedIn>[36-38]
33 <diedIn>[38-40]
34 <diedIn>[40-42]
35 <diedIn>[42-44]
36 <diedIn>[44-47]
37 <diedIn>[47-49]
38 <diedIn>[49-51]
39 <diedIn>[51-53]
40 <diedIn>[53-55]
41 <diedIn>[55-57]
42 <diedIn>[59-59]
43 <worksAt>[0-3]
44 <worksAt>[3-5]
45 <worksAt>[5-7]
46 <worksAt>[7-10]
47 <worksAt>[10-12]
48 <worksAt>[12-14]
49 <worksAt>[14-17]
50 <worksAt>[17-19]
51 <worksAt>[19-21]
52 <worksAt>[21-23]
53 <worksAt>[23-25]
54 <worksAt>[25-27]
55 <worksAt>[27-29]
56 <worksAt>[29-32]
57 <worksAt>[32-34]
58 <worksAt>[34-36]
59 <worksAt>[36-40]
60 <worksAt>[40-42]
61 <worksAt>[42-47]
62 <worksAt>[47-53]
63 <worksAt>[59-59]
64 <playsFor>[0-3]
65 <playsFor>[3-5]
66 <playsFor>[5-23]
67 <playsFor>[23-25]
68 <playsFor>[25-27]
69 <playsFor>[27-29]
70 <playsFor>[29-32]
71 <playsFor>[32-34]
72 <playsFor>[34-36]
73 <playsFor>[36-38]
74 <playsFor>[38-40]
75 <playsFor>[40-42]
76 <playsFor>[42-44]
77 <playsFor>[44-47]
78 <playsFor>[47-51]
79 <playsFor>[59-59]
80 <hasWonPrize>[1-4]
81 <hasWonPrize>[4-6]
82 <hasWonPrize>[6-8]
83 <hasWonPrize>[8-11]
84 <hasWonPrize>[11-15]
85 <hasWonPrize>[15-18]
86 <hasWonPrize>[18-22]
87 <hasWonPrize>[22-26]
88 <hasWonPrize>[26-30]
89 <hasWonPrize>[30-33]
90 <hasWonPrize>[33-37]
91 <hasWonPrize>[37-47]
92 <hasWonPrize>[47-53]
93 <hasWonPrize>[59-59]
94 <isMarriedTo>[0-3]
95 <isMarriedTo>[3-5]
96 <isMarriedTo>[5-7]
97 <isMarriedTo>[7-10]
98 <isMarriedTo>[10-12]
99 <isMarriedTo>[12-14]
100 <isMarriedTo>[14-17]
101 <isMarriedTo>[17-19]
102 <isMarriedTo>[19-21]
103 <isMarriedTo>[21-23]
104 <isMarriedTo>[23-25]
105 <isMarriedTo>[25-27]
106 <isMarriedTo>[27-29]
107 <isMarriedTo>[29-32]
108 <isMarriedTo>[32-34]
109 <isMarriedTo>[34-38]
110 <isMarriedTo>[38-42]
111 <isMarriedTo>[42-47]
112 <isMarriedTo>[47-51]
113 <isMarriedTo>[51-55]
114 <isMarriedTo>[59-59]
115 <owns>[0-10]
116 <owns>[10-17]
117 <owns>[17-19]
118 <owns>[19-23]
119 <owns>[23-36]
120 <owns>[36-38]
121 <owns>[59-59]
122 <graduatedFrom>[0-3]
123 <graduatedFrom>[3-5]
124 <graduatedFrom>[5-7]
125 <graduatedFrom>[7-10]
126 <graduatedFrom>[10-14]
127 <graduatedFrom>[14-17]
128 <graduatedFrom>[17-19]
129 <graduatedFrom>[19-21]
130 <graduatedFrom>[21-23]
131 <graduatedFrom>[23-27]
132 <graduatedFrom>[27-32]
133 <graduatedFrom>[32-34]
134 <graduatedFrom>[34-38]
135 <graduatedFrom>[38-42]
136 <graduatedFrom>[59-59]
137 <isAffiliatedTo>[1-4]
138 <isAffiliatedTo>[4-6]
139 <isAffiliatedTo>[6-8]
140 <isAffiliatedTo>[8-11]
141 <isAffiliatedTo>[11-13]
142 <isAffiliatedTo>[13-15]
143 <isAffiliatedTo>[15-18]
144 <isAffiliatedTo>[18-20]
145 <isAffiliatedTo>[20-22]
146 <isAffiliatedTo>[22-24]
147 <isAffiliatedTo>[24-26]
148 <isAffiliatedTo>[26-28]
149 <isAffiliatedTo>[28-30]
150 <isAffiliatedTo>[30-33]
151 <isAffiliatedTo>[33-35]
152 <isAffiliatedTo>[35-37]
153 <isAffiliatedTo>[37-40]
154 <isAffiliatedTo>[40-42]
155 <isAffiliatedTo>[42-44]
156 <isAffiliatedTo>[44-47]
157 <isAffiliatedTo>[47-49]
158 <isAffiliatedTo>[49-51]
159 <isAffiliatedTo>[51-53]
160 <isAffiliatedTo>[53-55]
161 <isAffiliatedTo>[55-57]
162 <isAffiliatedTo>[59-59]
163 <created>[0-3]
164 <created>[3-5]
165 <created>[5-10]
166 <created>[10-12]
167 <created>[12-17]
168 <created>[17-19]
169 <created>[19-25]
170 <created>[25-29]
171 <created>[29-32]
172 <created>[32-36]
173 <created>[36-42]
174 <created>[42-47]
175 <created>[47-53]
176 <created>[59-59]

File diff suppressed because it is too large Load Diff

View File

@ -1,60 +0,0 @@
0 -431 1782
1 1783 1848
2 1849 1870
3 1871 1888
4 1889 1899
5 1900 1906
6 1907 1912
7 1913 1917
8 1918 1922
9 1923 1926
10 1927 1930
11 1931 1934
12 1935 1938
13 1939 1941
14 1942 1944
15 1945 1947
16 1948 1950
17 1951 1953
18 1954 1956
19 1957 1959
20 1960 1962
21 1963 1965
22 1966 1967
23 1968 1969
24 1970 1971
25 1972 1973
26 1974 1975
27 1976 1977
28 1978 1979
29 1980 1981
30 1982 1983
31 1984 1985
32 1986 1987
33 1988 1989
34 1990 1991
35 1992 1993
36 1994 1994
37 1995 1996
38 1997 1997
39 1998 1998
40 1999 1999
41 2000 2000
42 2001 2001
43 2002 2002
44 2003 2003
45 2004 2004
46 2005 2005
47 2006 2006
48 2007 2007
49 2008 2008
50 2009 2009
51 2010 2010
52 2011 2011
53 2012 2012
54 2013 2013
55 2014 2014
56 2015 2015
57 2016 2016
58 2017 2017
59 2018 2018

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,793 +0,0 @@
0
2
4
9
11
12
16
17
19
27
29
34
35
37
38
41
42
45
49
51
52
54
56
57
61
64
65
67
69
70
72
76
78
79
83
86
87
89
101
102
103
108
111
112
119
121
122
126
128
129
132
134
138
141
144
146
153
154
155
156
158
159
160
161
162
164
165
166
168
173
175
176
177
182
184
185
186
187
188
190
192
193
201
202
208
209
211
213
215
216
217
222
227
229
235
239
240
242
243
245
246
247
251
252
254
257
261
263
266
268
271
279
282
292
300
303
305
308
309
311
313
316
319
322
324
325
329
331
332
333
334
337
339
342
343
346
347
348
349
350
352
353
355
357
361
362
363
367
371
373
378
379
383
384
385
389
392
394
395
396
397
399
400
402
403
407
409
415
416
420
421
422
428
429
432
433
440
442
443
450
452
459
463
464
466
471
472
476
480
484
489
490
493
494
495
500
503
507
509
515
519
520
521
525
528
529
533
534
539
541
542
548
550
556
559
563
566
567
569
573
575
576
579
582
585
588
592
593
594
596
597
598
599
603
604
605
606
607
613
614
616
617
618
619
621
623
624
625
628
638
641
642
648
651
659
660
661
663
664
676
677
678
680
682
686
688
689
691
694
698
704
707
708
712
713
716
719
723
724
726
728
732
741
742
743
744
745
746
750
752
755
759
762
764
767
768
770
772
775
777
780
782
785
789
799
800
801
802
804
805
810
811
816
822
823
826
829
832
834
835
838
839
842
847
848
850
851
852
856
861
862
865
867
868
869
874
876
882
883
884
885
891
893
898
899
906
909
910
911
912
920
923
924
926
928
934
938
941
942
943
944
945
951
954
956
957
958
960
961
963
964
968
970
975
976
977
979
981
988
989
992
993
995
997
1005
1008
1009
1012
1013
1014
1015
1023
1029
1032
1038
1044
1045
1052
1053
1055
1057
1060
1061
1065
1066
1074
1077
1079
1080
1082
1083
1085
1086
1089
1090
1091
1095
1104
1107
1111
1114
1121
1124
1126
1127
1128
1131
1132
1139
1140
1142
1143
1145
1148
1150
1157
1163
1164
1168
1170
1171
1172
1173
1179
1182
1186
1189
1190
1191
1194
1196
1198
1201
1204
1206
1208
1217
1220
1223
1228
1231
1232
1235
1236
1237
1238
1240
1246
1247
1249
1252
1258
1260
1265
1266
1273
1274
1278
1279
1280
1284
1286
1287
1288
1289
1290
1293
1294
1295
1297
1298
1301
1303
1304
1305
1307
1308
1309
1314
1318
1319
1323
1325
1327
1328
1333
1337
1340
1341
1343
1345
1346
1347
1349
1350
1351
1358
1364
1365
1367
1368
1369
1370
1373
1375
1376
1378
1380
1381
1382
1385
1387
1390
1391
1394
1396
1397
1399
1400
1406
1409
1412
1416
1417
1418
1420
1423
1425
1428
1430
1431
1432
1437
1438
1439
1444
1447
1450
1454
1456
1457
1460
1464
1465
1469
1473
1474
1475
1477
1479
1488
1490
1493
1494
1497
1500
1502
1503
1504
1505
1507
1508
1510
1514
1515
1520
1522
1523
1526
1547
1549
1553
1556
1557
1558
1562
1563
1564
1565
1570
1571
1574
1575
1579
1591
1592
1594
1601
1604
1605
1606
1608
1609
1613
1618
1619
1620
1621
1632
1634
1635
1636
1642
1643
1648
1650
1652
1653
1660
1661
1662
1666
1669
1670
1676
1677
1682
1683
1690
1692
1693
1697
1698
1702
1703
1706
1709
1711
1713
1715
1717
1721
1724
1725
1729
1730
1733
1734
1735
1736
1741
1745
1746
1748
1749
1751
1755
1761
1763
1766
1767
1768
1769
1773
1775
1777
1778
1783
1789
1790
1792
1793
1795
1800
1803
1805
1809
1812
1815
1816
1819
1820
1822
1823
1824
1825
1828
1831
1833
1834
1835
1836
1837
1842
1848
1849
1852
1853
1854
1856
1857
1858
1859
1861
1864
1865
1869
1873
1874
1876
1877
1882
1883
1884
1885
1888
1889
1890
1892
1894
1896
1899
1902
1903
1905
1908
1910
1913
1914
1915
1920
1928
1931
1936
1938
1941
1942
1944
1946
1947
1948
1954
1956
1958
1961
1966
1968
1969
1971
1972
1977
1979
1985
1986
1987
1988
1989
1990
1999
2001
2005
2009
2010
2012
2013
2014
2015
2017
2018
2022
2023
2028
2032
2036
2037
2038
2041
2042
2043
2044
2045
2046
2048

File diff suppressed because it is too large Load Diff

View File

@ -1,809 +0,0 @@
7
9
12
15
17
22
24
25
28
29
32
37
38
41
43
49
52
54
57
58
59
60
66
69
72
76
78
81
83
84
85
88
89
94
100
102
105
106
107
108
109
115
116
121
123
124
127
128
133
135
137
138
141
144
156
157
159
161
168
171
172
174
175
176
181
182
186
188
189
190
191
195
197
198
200
201
204
208
212
215
216
217
218
219
220
222
224
225
227
229
230
233
236
239
240
242
243
244
246
247
250
251
253
254
255
256
257
261
265
266
271
273
274
275
276
279
280
282
284
287
289
292
296
297
299
300
302
308
311
312
315
316
317
320
321
322
326
331
333
335
336
337
339
344
345
346
347
351
352
353
354
355
358
359
362
364
366
368
373
376
388
390
392
393
394
395
397
398
401
403
406
407
409
410
412
413
415
416
418
420
421
425
435
437
443
444
445
446
448
453
455
456
462
463
468
470
471
477
478
481
485
490
491
493
498
505
510
513
514
515
516
521
523
527
529
531
534
535
536
537
538
543
547
550
555
557
558
559
560
561
565
566
567
570
575
577
580
581
582
593
594
595
597
599
602
605
609
612
614
616
618
620
625
628
632
633
635
636
643
644
645
646
647
648
652
662
663
669
670
672
673
681
682
685
687
689
693
697
700
702
707
711
716
717
719
727
728
729
730
732
733
734
738
740
744
751
754
755
756
760
761
762
763
766
768
770
771
772
773
774
775
776
777
779
781
782
784
786
788
789
792
795
799
800
805
808
810
812
815
817
820
821
825
827
829
831
832
833
834
836
837
840
841
842
848
849
850
851
852
855
858
860
861
866
869
871
872
877
878
880
881
882
884
885
886
891
893
897
899
901
908
911
913
919
921
925
927
929
934
937
938
945
947
948
950
957
962
963
965
969
971
976
977
978
985
986
989
990
991
993
996
997
998
1003
1005
1006
1007
1010
1011
1013
1014
1019
1022
1024
1025
1028
1031
1032
1034
1038
1040
1042
1043
1048
1049
1050
1051
1056
1057
1059
1062
1063
1065
1066
1071
1072
1074
1075
1076
1080
1081
1082
1085
1086
1087
1090
1095
1096
1098
1099
1101
1105
1108
1109
1110
1112
1117
1119
1121
1122
1123
1125
1129
1133
1139
1143
1145
1147
1151
1153
1154
1158
1159
1162
1165
1167
1169
1171
1175
1177
1178
1180
1181
1182
1185
1187
1188
1193
1197
1199
1217
1218
1220
1221
1225
1227
1230
1232
1233
1235
1236
1237
1238
1239
1244
1246
1247
1248
1262
1263
1264
1266
1267
1269
1271
1272
1273
1276
1277
1278
1280
1284
1291
1292
1293
1295
1296
1297
1299
1300
1301
1305
1307
1309
1312
1322
1325
1330
1334
1335
1337
1338
1340
1342
1346
1347
1350
1351
1356
1357
1358
1359
1366
1368
1370
1371
1373
1376
1379
1382
1383
1384
1385
1386
1387
1389
1393
1394
1398
1403
1404
1409
1411
1413
1415
1420
1421
1423
1426
1429
1431
1433
1434
1439
1444
1446
1447
1456
1457
1461
1467
1469
1472
1477
1481
1493
1494
1496
1498
1499
1501
1503
1506
1507
1508
1510
1511
1512
1516
1521
1523
1527
1528
1529
1530
1535
1537
1540
1541
1542
1546
1552
1559
1562
1563
1564
1567
1572
1574
1577
1581
1587
1589
1594
1601
1603
1613
1614
1616
1617
1618
1619
1620
1621
1623
1624
1625
1626
1631
1632
1634
1635
1636
1638
1640
1642
1643
1645
1647
1648
1650
1651
1652
1655
1656
1658
1661
1662
1663
1665
1666
1667
1668
1669
1674
1675
1685
1686
1695
1697
1698
1699
1706
1713
1716
1717
1718
1723
1724
1725
1729
1730
1731
1732
1733
1734
1738
1741
1744
1749
1761
1762
1769
1772
1774
1776
1777
1781
1784
1787
1789
1790
1791
1798
1804
1805
1807
1808
1811
1812
1816
1821
1824
1827
1831
1834
1835
1836
1842
1847
1852
1855
1856
1859
1861
1862
1865
1866
1871
1872
1874
1876
1877
1878
1879
1880
1881
1885
1887
1888
1889
1891
1892
1894
1895
1899
1900
1907
1911
1913
1914
1915
1917
1919
1920
1923
1924
1925
1927
1934
1939
1940
1941
1948
1951
1953
1957
1958
1959
1961
1962
1965
1966
1967
1971
1973
1975
1979
1981
1983
1989
1996
1997
2000
2004
2007
2009
2011
2012
2016
2017
2019
2021
2024
2029
2040
2042
2044
2048

3
icews14_both.log Normal file
View File

@ -0,0 +1,3 @@
nohup: ignoring input
2023-06-20 09:22:51,618 - [INFO] - {'dataset': 'icews14_both', 'name': 'icews14_both', 'gpu': '2', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': False}
2023-06-20 09:22:57,979 - [INFO] - [E:0| 0]: Train Loss:0.70005, Val MRR:0.0, icews14_both

14945
log/ice00001 Normal file

File diff suppressed because it is too large Load Diff

4904
log/ice0003 Normal file

File diff suppressed because it is too large Load Diff

6607
log/ice0003_2 Normal file

File diff suppressed because it is too large Load Diff

6205
log/ice001 Normal file

File diff suppressed because it is too large Load Diff

9541
log/ice14ws_128 Normal file

File diff suppressed because it is too large Load Diff

4154
log/iceboth Normal file

File diff suppressed because it is too large Load Diff

9482
log/icews14 Normal file

File diff suppressed because it is too large Load Diff

1
log/icews14_128 Normal file
View File

@ -0,0 +1 @@
2023-05-13 03:52:44,141 - icews14_128 - [INFO] - {'dataset': 'icews14', 'name': 'icews14_128', 'gpu': '0', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': True, 'filtered': False}

10670
log/icews14_both Normal file

File diff suppressed because it is too large Load Diff

2
log/poofnet.log Normal file
View File

@ -0,0 +1,2 @@
nohup: ignoring input
python: can't open file 'run.py': [Errno 2] No such file or directory

1
log/testrun_227cb2f9 Normal file
View File

@ -0,0 +1 @@
2023-05-17 06:54:57,988 - testrun_227cb2f9 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_227cb2f9', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}

1
log/testrun_30d70322 Normal file
View File

@ -0,0 +1 @@
2023-05-17 06:23:34,181 - testrun_30d70322 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_30d70322', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}

1
log/testrun_3212b281 Normal file
View File

@ -0,0 +1 @@
2023-05-17 06:53:01,668 - testrun_3212b281 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_3212b281', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}

1
log/testrun_3dbc9e89 Normal file
View File

@ -0,0 +1 @@
2023-05-06 08:35:38,753 - testrun_3dbc9e89 - [INFO] - {'dataset': 'wikidata12k', 'name': 'testrun_3dbc9e89', 'gpu': '0', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False}

1
log/testrun_43389ddf Normal file
View File

@ -0,0 +1 @@
2023-05-17 06:38:00,469 - testrun_43389ddf - [INFO] - {'dataset': 'icews14', 'name': 'testrun_43389ddf', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}

1
log/testrun_47ede3b9 Normal file
View File

@ -0,0 +1 @@
2023-05-17 06:13:02,952 - testrun_47ede3b9 - [INFO] - {'dataset': 'FB15k-237', 'name': 'testrun_47ede3b9', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}

1
log/testrun_49495af8 Normal file
View File

@ -0,0 +1 @@
2023-05-06 08:37:18,939 - testrun_49495af8 - [INFO] - {'dataset': 'wikidata12k', 'name': 'testrun_49495af8', 'gpu': '3', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False}

7877
log/testrun_4a235016 Normal file

File diff suppressed because it is too large Load Diff

1
log/testrun_4f5d8391 Normal file
View File

@ -0,0 +1 @@
2023-05-06 08:35:13,356 - testrun_4f5d8391 - [INFO] - {'dataset': 'wikidata12k', 'name': 'testrun_4f5d8391', 'gpu': '0', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False}

1
log/testrun_540f6a03 Normal file
View File

@ -0,0 +1 @@
2023-05-06 08:34:55,992 - testrun_540f6a03 - [INFO] - {'dataset': 'wikidata12k', 'name': 'testrun_540f6a03', 'gpu': '0', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False}

1
log/testrun_5a901712 Normal file
View File

@ -0,0 +1 @@
2023-05-17 07:04:56,051 - testrun_5a901712 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_5a901712', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}

44
log/testrun_5cafe61a Normal file
View File

@ -0,0 +1,44 @@
2023-05-17 06:48:57,396 - testrun_5cafe61a - [INFO] - {'dataset': 'icews14', 'name': 'testrun_5cafe61a', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}
2023-05-17 06:49:44,802 - concurrent.futures - [ERROR] - exception calling callback for <Future at 0x7efb51b74160 state=finished raised BrokenProcessPool>
joblib.externals.loky.process_executor._RemoteTraceback:
"""
Traceback (most recent call last):
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/joblib/externals/loky/process_executor.py", line 391, in _process_worker
call_item = call_queue.get(block=True, timeout=timeout)
File "/opt/conda/envs/kgs2s/lib/python3.8/multiprocessing/queues.py", line 116, in get
return _ForkingPickler.loads(res)
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/torch/storage.py", line 222, in _load_from_bytes
return torch.load(io.BytesIO(b))
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/torch/serialization.py", line 713, in load
return _legacy_load(opened_file, map_location, pickle_module, **pickle_load_args)
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/torch/serialization.py", line 930, in _legacy_load
result = unpickler.load()
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/torch/serialization.py", line 876, in persistent_load
wrap_storage=restore_location(obj, location),
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/torch/serialization.py", line 175, in default_restore_location
result = fn(storage, location)
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/torch/serialization.py", line 155, in _cuda_deserialize
return torch._UntypedStorage(obj.nbytes(), device=torch.device(location))
RuntimeError: CUDA out of memory. Tried to allocate 678.00 MiB (GPU 0; 31.72 GiB total capacity; 0 bytes already allocated; 593.94 MiB free; 0 bytes reserved in total by PyTorch) If reserved memory is >> allocated memory try setting max_split_size_mb to avoid fragmentation. See documentation for Memory Management and PYTORCH_CUDA_ALLOC_CONF
"""
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/joblib/externals/loky/_base.py", line 26, in _invoke_callbacks
callback(self)
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/joblib/parallel.py", line 385, in __call__
self.parallel.dispatch_next()
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/joblib/parallel.py", line 834, in dispatch_next
if not self.dispatch_one_batch(self._original_iterator):
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/joblib/parallel.py", line 901, in dispatch_one_batch
self._dispatch(tasks)
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/joblib/parallel.py", line 819, in _dispatch
job = self._backend.apply_async(batch, callback=cb)
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/joblib/_parallel_backends.py", line 556, in apply_async
future = self._workers.submit(SafeFunction(func))
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/joblib/externals/loky/reusable_executor.py", line 176, in submit
return super().submit(fn, *args, **kwargs)
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/joblib/externals/loky/process_executor.py", line 1129, in submit
raise self._flags.broken
joblib.externals.loky.process_executor.BrokenProcessPool: A task has failed to un-serialize. Please ensure that the arguments of the function are all picklable.

1
log/testrun_6fd94d59 Normal file
View File

@ -0,0 +1 @@
2023-05-06 08:34:33,652 - testrun_6fd94d59 - [INFO] - {'dataset': 'wikidata12k', 'name': 'testrun_6fd94d59', 'gpu': '3', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False}

1
log/testrun_7c096a18 Normal file
View File

@ -0,0 +1 @@
2023-05-17 06:56:35,124 - testrun_7c096a18 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_7c096a18', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}

1
log/testrun_7fb885ee Normal file
View File

@ -0,0 +1 @@
2023-05-17 07:13:14,777 - testrun_7fb885ee - [INFO] - {'dataset': 'icews14', 'name': 'testrun_7fb885ee', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}

1
log/testrun_8f32040f Normal file
View File

@ -0,0 +1 @@
2023-05-17 06:59:35,220 - testrun_8f32040f - [INFO] - {'dataset': 'icews14', 'name': 'testrun_8f32040f', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}

1
log/testrun_958ef154 Normal file
View File

@ -0,0 +1 @@
2023-05-17 06:16:45,427 - testrun_958ef154 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_958ef154', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}

2
log/testrun_9acdfb58 Normal file
View File

@ -0,0 +1,2 @@
2023-05-06 08:36:46,668 - testrun_9acdfb58 - [INFO] - {'dataset': 'wikidata12k', 'name': 'testrun_9acdfb58', 'gpu': '3', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False}
2023-05-06 08:36:57,409 - testrun_9acdfb58 - [INFO] - [E:0| 0]: Train Loss:0.69813, Val MRR:0.0, testrun_9acdfb58

1
log/testrun_a051cf32 Normal file
View File

@ -0,0 +1 @@
2023-05-17 06:36:14,606 - testrun_a051cf32 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_a051cf32', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}

1
log/testrun_a06d39d0 Normal file
View File

@ -0,0 +1 @@
2023-05-17 06:13:16,274 - testrun_a06d39d0 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_a06d39d0', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}

1
log/testrun_aca2b734 Normal file
View File

@ -0,0 +1 @@
2023-05-17 06:41:20,654 - testrun_aca2b734 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_aca2b734', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}

1
log/testrun_ad7a0edb Normal file
View File

@ -0,0 +1 @@
2023-05-17 06:45:54,332 - testrun_ad7a0edb - [INFO] - {'dataset': 'icews14', 'name': 'testrun_ad7a0edb', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}

7958
log/testrun_ae6f81ee Normal file

File diff suppressed because it is too large Load Diff

1
log/testrun_b381870f Normal file
View File

@ -0,0 +1 @@
2023-05-30 17:54:20,857 - testrun_b381870f - [INFO] - {'dataset': 'wikidata12k', 'name': 'testrun_b381870f', 'gpu': '0', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0003, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': False}

2
log/testrun_b396dcde Normal file
View File

@ -0,0 +1,2 @@
2023-05-30 17:56:25,430 - testrun_b396dcde - [INFO] - {'dataset': 'wikidata12k', 'name': 'testrun_b396dcde', 'gpu': '0', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0003, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': False}
2023-05-30 17:57:00,673 - testrun_b396dcde - [INFO] - {'dataset': 'wikidata12k', 'name': 'testrun_b396dcde', 'gpu': '0', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0003, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': False, 'num_ent': 12554, 'num_rel': 423}

1
log/testrun_bbf65ab5 Normal file
View File

@ -0,0 +1 @@
2023-05-17 06:21:14,228 - testrun_bbf65ab5 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_bbf65ab5', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}

1
log/testrun_bfaa042b Normal file
View File

@ -0,0 +1 @@
2023-05-17 06:50:58,251 - testrun_bfaa042b - [INFO] - {'dataset': 'icews14', 'name': 'testrun_bfaa042b', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}

1
log/testrun_c77a8ec3 Normal file
View File

@ -0,0 +1 @@
2023-05-17 06:37:11,288 - testrun_c77a8ec3 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_c77a8ec3', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}

1
log/testrun_cb3528f3 Normal file
View File

@ -0,0 +1 @@
2023-05-17 07:08:13,688 - testrun_cb3528f3 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_cb3528f3', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}

1
log/testrun_cd333c33 Normal file
View File

@ -0,0 +1 @@
2023-05-17 06:25:12,047 - testrun_cd333c33 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_cd333c33', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}

2
log/testrun_d0367b19 Normal file
View File

@ -0,0 +1,2 @@
2023-05-06 08:37:25,129 - testrun_d0367b19 - [INFO] - {'dataset': 'wikidata12k', 'name': 'testrun_d0367b19', 'gpu': '3', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False}
2023-05-06 08:37:36,239 - testrun_d0367b19 - [INFO] - [E:0| 0]: Train Loss:0.69813, Val MRR:0.0, testrun_d0367b19

9001
log/testrun_d2ab6391 Normal file

File diff suppressed because it is too large Load Diff

11836
log/testrun_e1726b98 Normal file

File diff suppressed because it is too large Load Diff

1
log/testrun_f0394b3c Normal file
View File

@ -0,0 +1 @@
2023-05-17 06:47:48,537 - testrun_f0394b3c - [INFO] - {'dataset': 'icews14', 'name': 'testrun_f0394b3c', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}

1
log/testrun_f42f568c Normal file
View File

@ -0,0 +1 @@
2023-05-30 17:55:52,461 - testrun_f42f568c - [INFO] - {'dataset': 'wikidata12k', 'name': 'testrun_f42f568c', 'gpu': '0', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0003, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': False}

1
log/testrun_fdb0e82c Normal file
View File

@ -0,0 +1 @@
2023-05-17 06:39:01,301 - testrun_fdb0e82c - [INFO] - {'dataset': 'icews14', 'name': 'testrun_fdb0e82c', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}

1116
log/wikidata12k Normal file

File diff suppressed because it is too large Load Diff

2
log/wikidata12k_0.00003 Normal file
View File

@ -0,0 +1,2 @@
2023-06-04 17:05:45,012 - wikidata12k_0.00003 - [INFO] - {'dataset': 'wikidata12k', 'name': 'wikidata12k_0.00003', 'gpu': '2', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': False}
2023-06-04 17:06:06,702 - wikidata12k_0.00003 - [INFO] - [E:0| 0]: Train Loss:0.69813, Val MRR:0.0, wikidata12k_0.00003

4918
log/wikidata12k_0.001 Normal file

File diff suppressed because it is too large Load Diff

15357
log/wikidata12k_1n Normal file

File diff suppressed because it is too large Load Diff

11565
log/wikidata12k_both Normal file

File diff suppressed because it is too large Load Diff

9241
log/yago11k Normal file

File diff suppressed because it is too large Load Diff

9654
log/yago11k_0.00003 Normal file

File diff suppressed because it is too large Load Diff

9599
log/yago11k_0.0003 Normal file

File diff suppressed because it is too large Load Diff

7233
log/yago11k_0.001 Normal file

File diff suppressed because it is too large Load Diff

18847
log/yago11k_0.001.log Normal file

File diff suppressed because it is too large Load Diff

9169
log/yago11k_both Normal file

File diff suppressed because it is too large Load Diff

9162
log/yago11k_both_0.001 Normal file

File diff suppressed because it is too large Load Diff

133
main.py
View File

@ -3,10 +3,12 @@ import uuid
import argparse
import logging
import logging.config
import time
import pandas as pd
import sys
import torch
import numpy as np
import time
from collections import defaultdict as ddict
from pprint import pprint
@ -18,11 +20,12 @@ from data_loader import TrainDataset, TestDataset
from utils import get_logger, get_combined_results, set_gpu, prepare_env, set_seed
from models import ComplEx, ConvE, HypER, InteractE, FouriER, TuckER
import traceback
class Main(object):
def __init__(self, params):
def __init__(self, params, logger):
"""
Constructor of the runner class
Parameters
@ -35,11 +38,9 @@ class Main(object):
"""
self.p = params
self.logger = get_logger(
self.p.name, self.p.log_dir, self.p.config_dir)
self.logger = logger
self.logger.info(vars(self.p))
pprint(vars(self.p))
if self.p.gpu != '-1' and torch.cuda.is_available():
self.device = torch.device('cuda')
@ -84,15 +85,17 @@ class Main(object):
self.ent2id = {}
for line in open('./data/{}/{}'.format(self.p.dataset, "entities.dict")):
id, ent = map(str.lower, line.strip().split('\t'))
id, ent = map(str.lower, line.replace('\xa0', '').strip().split('\t'))
self.ent2id[ent] = int(id)
self.rel2id = {}
for line in open('./data/{}/{}'.format(self.p.dataset, "relations.dict")):
id, rel = map(str.lower, line.strip().split('\t'))
self.rel2id[rel] = int(id)
rel_set.add(rel)
# self.ent2id = {ent: idx for idx, ent in enumerate(ent_set)}
# self.rel2id = {rel: idx for idx, rel in enumerate(rel_set)}
self.rel2id.update({rel+'_reverse': idx+len(self.rel2id)
for idx, rel in enumerate(rel_set)})
@ -108,59 +111,52 @@ class Main(object):
sr2o = ddict(set)
for split in ['train', 'test', 'valid']:
samples = 0
for i, line in enumerate(open('./data/{}/{}.txt'.format(self.p.dataset, split))):
sub, rel, obj, rel_type, *_ = map(str.lower, line.strip().split('\t'))
if (split == 'test' and self.p.rel_type is not None):
if rel_type != self.p.rel_type:
continue
sub, rel, obj = self.ent2id[sub], self.rel2id[rel], self.ent2id[obj]
self.data[split].append((sub, rel, obj))
for line in open('./data/{}/{}.txt'.format(self.p.dataset, split)):
sub, rel, obj, *_ = map(str.lower, line.replace('\xa0', '').strip().split('\t'))
nt_rel = rel.split('[')[0]
sub, rel, obj, nt_rel = self.ent2id[sub], self.rel2id[rel], self.ent2id[obj], self.rel2id[nt_rel]
self.data[split].append((sub, rel, obj, nt_rel))
if split == 'train':
sr2o[(sub, rel)].add(obj)
sr2o[(obj, rel+self.p.num_rel)].add(sub)
samples += 1
print(split.capitalize() + ': ' + str(samples) + ' samples')
sr2o[(sub, rel, nt_rel)].add(obj)
sr2o[(obj, rel+self.p.num_rel, nt_rel + self.p.num_rel)].add(sub)
self.data = dict(self.data)
self.sr2o = {k: list(v) for k, v in sr2o.items()}
for split in ['test', 'valid']:
for sub, rel, obj in self.data[split]:
sr2o[(sub, rel)].add(obj)
sr2o[(obj, rel+self.p.num_rel)].add(sub)
for sub, rel, obj, nt_rel in self.data[split]:
sr2o[(sub, rel, nt_rel)].add(obj)
sr2o[(obj, rel+self.p.num_rel, nt_rel + self.p.num_rel)].add(sub)
self.sr2o_all = {k: list(v) for k, v in sr2o.items()}
self.triples = ddict(list)
if self.p.train_strategy == 'one_to_n':
for (sub, rel), obj in self.sr2o.items():
for (sub, rel, nt_rel), obj in self.sr2o.items():
self.triples['train'].append(
{'triple': (sub, rel, -1), 'label': self.sr2o[(sub, rel)], 'sub_samp': 1})
{'triple': (sub, rel, -1, nt_rel), 'label': self.sr2o[(sub, rel, nt_rel)], 'sub_samp': 1})
else:
for sub, rel, obj in self.data['train']:
for sub, rel, obj, nt_rel in self.data['train']:
rel_inv = rel + self.p.num_rel
sub_samp = len(self.sr2o[(sub, rel)]) + \
len(self.sr2o[(obj, rel_inv)])
sub_samp = len(self.sr2o[(sub, rel, nt_rel)]) + \
len(self.sr2o[(obj, rel_inv, nt_rel + self.p.num_rel)])
sub_samp = np.sqrt(1/sub_samp)
self.triples['train'].append({'triple': (
sub, rel, obj), 'label': self.sr2o[(sub, rel)], 'sub_samp': sub_samp})
sub, rel, obj, nt_rel), 'label': self.sr2o[(sub, rel, nt_rel)], 'sub_samp': sub_samp})
self.triples['train'].append({'triple': (
obj, rel_inv, sub), 'label': self.sr2o[(obj, rel_inv)], 'sub_samp': sub_samp})
obj, rel_inv, sub, nt_rel + self.p.num_rel), 'label': self.sr2o[(obj, rel_inv, nt_rel + self.p.num_rel)], 'sub_samp': sub_samp})
for split in ['test', 'valid']:
for sub, rel, obj in self.data[split]:
for sub, rel, obj, nt_rel in self.data[split]:
rel_inv = rel + self.p.num_rel
self.triples['{}_{}'.format(split, 'tail')].append(
{'triple': (sub, rel, obj), 'label': self.sr2o_all[(sub, rel)]})
{'triple': (sub, rel, obj, nt_rel), 'label': self.sr2o_all[(sub, rel, nt_rel)]})
self.triples['{}_{}'.format(split, 'head')].append(
{'triple': (obj, rel_inv, sub), 'label': self.sr2o_all[(obj, rel_inv)]})
{'triple': (obj, rel_inv, sub, nt_rel + self.p.num_rel), 'label': self.sr2o_all[(obj, rel_inv, nt_rel + self.p.num_rel)]})
self.triples = dict(self.triples)
print(len(self.triples['test_head']))
print(len(self.triples['test_tail']))
def get_data_loader(dataset_class, split, batch_size, shuffle=True):
return DataLoader(
@ -282,13 +278,13 @@ class Main(object):
if self.p.train_strategy == 'one_to_x':
triple, label, neg_ent, sub_samp = [
_.to(self.device) for _ in batch]
return triple[:, 0], triple[:, 1], triple[:, 2], label, neg_ent, sub_samp
return triple[:, 0], triple[:, 1], triple[:, 2], triple[:, 3], label, neg_ent, sub_samp
else:
triple, label = [_.to(self.device) for _ in batch]
return triple[:, 0], triple[:, 1], triple[:, 2], label, None, None
return triple[:, 0], triple[:, 1], triple[:, 2], triple[:, 3], label, None, None
else:
triple, label = [_.to(self.device) for _ in batch]
return triple[:, 0], triple[:, 1], triple[:, 2], label
return triple[:, 0], triple[:, 1], triple[:, 2], triple[:, 3], label
def save_model(self, save_path):
"""
@ -415,16 +411,35 @@ class Main(object):
train_iter = iter(
self.data_iter['{}_{}'.format(split, mode.split('_')[0])])
sub_all = []
obj_all = []
rel_all = []
target_score = []
target_rank = []
obj_pred = []
obj_pred_score = []
for step, batch in enumerate(train_iter):
sub, rel, obj, label = self.read_batch(batch, split)
pred = self.model.forward(sub, rel, None, 'one_to_n')
sub, rel, obj, nt_rel, label = self.read_batch(batch, split)
pred = self.model.forward(sub, rel, nt_rel, None, 'one_to_n')
b_range = torch.arange(pred.size()[0], device=self.device)
target_pred = pred[b_range, obj]
pred = torch.where(label.byte(), torch.zeros_like(pred), pred)
pred[b_range, obj] = target_pred
highest = torch.argsort(pred, dim=1, descending=True)[:,0]
highest_score = pred[b_range, highest]
ranks = 1 + torch.argsort(torch.argsort(pred, dim=1,
descending=True), dim=1, descending=False)[b_range, obj]
sub_all.extend(sub.cpu().numpy())
obj_all.extend(obj.cpu().numpy())
rel_all.extend(rel.cpu().numpy())
target_score.extend(target_pred.cpu().numpy())
target_rank.extend(ranks.cpu().numpy())
obj_pred.extend(highest.cpu().numpy())
obj_pred_score.extend(highest_score.cpu().numpy())
ranks = ranks.float()
results['count'] = torch.numel(
ranks) + results.get('count', 0.0)
@ -439,7 +454,8 @@ class Main(object):
if step % 100 == 0:
self.logger.info('[{}, {} Step {}]\t{}'.format(
split.title(), mode.title(), step, self.p.name))
df = pd.DataFrame({"sub":sub_all,"rel":rel_all,"obj":obj_all, "rank": target_rank,"score":target_score, "pred":obj_pred,"pred_score":obj_pred_score})
df.to_csv(f"{self.p.name}_result.csv",header=True, index=False)
return results
def run_epoch(self, epoch):
@ -461,10 +477,10 @@ class Main(object):
for step, batch in enumerate(train_iter):
self.optimizer.zero_grad()
sub, rel, obj, label, neg_ent, sub_samp = self.read_batch(
sub, rel, obj, nt_rel, label, neg_ent, sub_samp = self.read_batch(
batch, 'train')
pred = self.model.forward(sub, rel, neg_ent, self.p.train_strategy)
pred = self.model.forward(sub, rel, nt_rel, neg_ent, self.p.train_strategy)
loss = self.model.loss(pred, label, sub_samp)
loss.backward()
@ -635,7 +651,6 @@ if __name__ == "__main__":
parser.add_argument('--test_only', action='store_true', default=False)
parser.add_argument('--grid_search', action='store_true', default=False)
parser.add_argument('--rel_type', default=None, type=str)
args = parser.parse_args()
@ -644,9 +659,10 @@ if __name__ == "__main__":
set_gpu(args.gpu)
set_seed(args.seed)
model = Main(args)
if (args.grid_search):
model = Main(args)
from sklearn.model_selection import GridSearchCV
from skorch import NeuralNet
@ -677,7 +693,7 @@ if __name__ == "__main__":
collate_fn=TrainDataset.collate_fn
))
for step, batch in enumerate(dataloader):
sub, rel, obj, label, neg_ent, sub_samp = model.read_batch(
sub, rel, obj, nt_rel, label, neg_ent, sub_samp = model.read_batch(
batch, 'train')
if (neg_ent is None):
@ -695,18 +711,27 @@ if __name__ == "__main__":
search = grid.fit(inputs, label)
print("BEST SCORE: ", search.best_score_)
print("BEST PARAMS: ", search.best_params_)
logger = get_logger(
args.name, args.log_dir, args.config_dir)
if (args.test_only):
model = Main(args, logger)
save_path = os.path.join('./torch_saved', args.name)
model.load_model(save_path)
model.evaluate('test')
else:
while True:
try:
model.fit()
except Exception as e:
print(e)
time.sleep(30)
del model
model = Main(args)
continue
break
model = Main(args, logger)
model.fit()
# while True:
# try:
# model = Main(args, logger)
# model.fit()
# except Exception as e:
# print(e)
# traceback.print_exc()
# try:
# del model
# except Exception:
# pass
# time.sleep(30)
# continue
# break

201
models.py
View File

@ -9,7 +9,7 @@ from layers import *
from timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD
from timm.models.layers import DropPath, trunc_normal_
from timm.models.registry import register_model
from timm.models.layers.helpers import to_2tuple
from timm.layers.helpers import to_2tuple
class ConvE(torch.nn.Module):
@ -466,6 +466,10 @@ class FouriER(torch.nn.Module):
self.p.ent_vec_dim, image_h*image_w)
torch.nn.init.xavier_normal_(self.ent_fusion.weight)
self.ent_attn = torch.nn.Linear(
128, 128)
torch.nn.init.xavier_normal_(self.ent_attn.weight)
self.rel_fusion = torch.nn.Linear(
self.p.rel_vec_dim, image_h*image_w)
torch.nn.init.xavier_normal_(self.rel_fusion.weight)
@ -547,8 +551,15 @@ class FouriER(torch.nn.Module):
x = block(x)
# output only the features of last layer for image classification
return x
def fuse_attention(self, s_embedding, l_embedding):
w1 = self.ent_attn(torch.tanh(s_embedding))
w2 = self.ent_attn(torch.tanh(l_embedding))
aff = F.softmax(torch.cat((w1,w2),1), 1)
en_embedding = aff[:,0].unsqueeze(1) * s_embedding + aff[:, 1].unsqueeze(1) * l_embedding
return en_embedding
def forward(self, sub, rel, neg_ents, strategy='one_to_x'):
def forward(self, sub, rel, nt_rel, neg_ents, strategy='one_to_x'):
sub_emb = self.ent_fusion(self.ent_embed(sub))
rel_emb = self.rel_fusion(self.rel_embed(rel))
comb_emb = torch.stack([sub_emb.view(-1, self.p.image_h, self.p.image_w), rel_emb.view(-1, self.p.image_h, self.p.image_w)], dim=1)
@ -557,6 +568,17 @@ class FouriER(torch.nn.Module):
z = self.forward_embeddings(y)
z = self.forward_tokens(z)
z = z.mean([-2, -1])
nt_rel_emb = self.rel_fusion(self.rel_embed(nt_rel))
comb_emb_1 = torch.stack([sub_emb.view(-1, self.p.image_h, self.p.image_w), nt_rel_emb.view(-1, self.p.image_h, self.p.image_w)], dim=1)
y_1 = comb_emb_1.view(-1, 2, self.p.image_h, self.p.image_w)
y_1 = self.bn0(y_1)
z_1 = self.forward_embeddings(y_1)
z_1 = self.forward_tokens(z_1)
z_1 = z_1.mean([-2, -1])
z = self.fuse_attention(z, z_1)
z = self.norm(z)
x = self.head(z)
x = self.hidden_drop(x)
@ -707,6 +729,166 @@ def basic_blocks(dim, index, layers,
return blocks
def window_partition(x, window_size):
"""
Args:
x: (B, H, W, C)
window_size (int): window size
Returns:
windows: (num_windows*B, window_size, window_size, C)
"""
B, C, H, W = x.shape
x = x.view(B, H // window_size, window_size, W // window_size, window_size, C)
windows = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(-1, window_size, window_size, C)
return windows
class WindowAttention(nn.Module):
r""" Window based multi-head self attention (W-MSA) module with relative position bias.
It supports both of shifted and non-shifted window.
Args:
dim (int): Number of input channels.
window_size (tuple[int]): The height and width of the window.
num_heads (int): Number of attention heads.
qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True
attn_drop (float, optional): Dropout ratio of attention weight. Default: 0.0
proj_drop (float, optional): Dropout ratio of output. Default: 0.0
pretrained_window_size (tuple[int]): The height and width of the window in pre-training.
"""
def __init__(self, dim, window_size, num_heads, qkv_bias=True, attn_drop=0., proj_drop=0.,
pretrained_window_size=[0, 0]):
super().__init__()
self.dim = dim
self.window_size = window_size # Wh, Ww
self.pretrained_window_size = pretrained_window_size
self.num_heads = num_heads
self.logit_scale = nn.Parameter(torch.log(10 * torch.ones((num_heads, 1, 1))), requires_grad=True)
# mlp to generate continuous relative position bias
self.cpb_mlp = nn.Sequential(nn.Linear(2, 512, bias=True),
nn.ReLU(inplace=True),
nn.Linear(512, num_heads, bias=False))
# get relative_coords_table
relative_coords_h = torch.arange(-(self.window_size[0] - 1), self.window_size[0], dtype=torch.float32)
relative_coords_w = torch.arange(-(self.window_size[1] - 1), self.window_size[1], dtype=torch.float32)
relative_coords_table = torch.stack(
torch.meshgrid([relative_coords_h,
relative_coords_w])).permute(1, 2, 0).contiguous().unsqueeze(0) # 1, 2*Wh-1, 2*Ww-1, 2
if pretrained_window_size[0] > 0:
relative_coords_table[:, :, :, 0] /= (pretrained_window_size[0] - 1)
relative_coords_table[:, :, :, 1] /= (pretrained_window_size[1] - 1)
else:
relative_coords_table[:, :, :, 0] /= (self.window_size[0] - 1)
relative_coords_table[:, :, :, 1] /= (self.window_size[1] - 1)
relative_coords_table *= 8 # normalize to -8, 8
relative_coords_table = torch.sign(relative_coords_table) * torch.log2(
torch.abs(relative_coords_table) + 1.0) / np.log2(8)
self.register_buffer("relative_coords_table", relative_coords_table)
# get pair-wise relative position index for each token inside the window
coords_h = torch.arange(self.window_size[0])
coords_w = torch.arange(self.window_size[1])
coords = torch.stack(torch.meshgrid([coords_h, coords_w])) # 2, Wh, Ww
coords_flatten = torch.flatten(coords, 1) # 2, Wh*Ww
relative_coords = coords_flatten[:, :, None] - coords_flatten[:, None, :] # 2, Wh*Ww, Wh*Ww
relative_coords = relative_coords.permute(1, 2, 0).contiguous() # Wh*Ww, Wh*Ww, 2
relative_coords[:, :, 0] += self.window_size[0] - 1 # shift to start from 0
relative_coords[:, :, 1] += self.window_size[1] - 1
relative_coords[:, :, 0] *= 2 * self.window_size[1] - 1
relative_position_index = relative_coords.sum(-1) # Wh*Ww, Wh*Ww
self.register_buffer("relative_position_index", relative_position_index)
self.qkv = nn.Linear(dim, dim * 3, bias=False)
if qkv_bias:
self.q_bias = nn.Parameter(torch.zeros(dim))
self.v_bias = nn.Parameter(torch.zeros(dim))
else:
self.q_bias = None
self.v_bias = None
self.attn_drop = nn.Dropout(attn_drop)
self.proj = nn.Linear(dim, dim)
self.proj_drop = nn.Dropout(proj_drop)
self.softmax = nn.Softmax(dim=-1)
def forward(self, x, mask=None):
"""
Args:
x: input features with shape of (num_windows*B, N, C)
mask: (0/-inf) mask with shape of (num_windows, Wh*Ww, Wh*Ww) or None
"""
B_, N, C = x.shape
qkv_bias = None
if self.q_bias is not None:
qkv_bias = torch.cat((self.q_bias, torch.zeros_like(self.v_bias, requires_grad=False), self.v_bias))
qkv = F.linear(input=x, weight=self.qkv.weight, bias=qkv_bias)
qkv = qkv.reshape(B_, N, 3, self.num_heads, -1).permute(2, 0, 3, 1, 4)
q, k, v = qkv[0], qkv[1], qkv[2] # make torchscript happy (cannot use tensor as tuple)
# cosine attention
attn = (F.normalize(q, dim=-1) @ F.normalize(k, dim=-1).transpose(-2, -1))
logit_scale = torch.clamp(self.logit_scale, max=torch.log(torch.tensor(1. / 0.01)).cuda()).exp()
attn = attn * logit_scale
relative_position_bias_table = self.cpb_mlp(self.relative_coords_table).view(-1, self.num_heads)
relative_position_bias = relative_position_bias_table[self.relative_position_index.view(-1)].view(
self.window_size[0] * self.window_size[1], self.window_size[0] * self.window_size[1], -1) # Wh*Ww,Wh*Ww,nH
relative_position_bias = relative_position_bias.permute(2, 0, 1).contiguous() # nH, Wh*Ww, Wh*Ww
relative_position_bias = 16 * torch.sigmoid(relative_position_bias)
attn = attn + relative_position_bias.unsqueeze(0)
if mask is not None:
nW = mask.shape[0]
attn = attn.view(B_ // nW, nW, self.num_heads, N, N) + mask.unsqueeze(1).unsqueeze(0)
attn = attn.view(-1, self.num_heads, N, N)
attn = self.softmax(attn)
else:
attn = self.softmax(attn)
attn = self.attn_drop(attn)
x = (attn @ v).transpose(1, 2).reshape(B_, N, C)
x = self.proj(x)
x = self.proj_drop(x)
return x
def extra_repr(self) -> str:
return f'dim={self.dim}, window_size={self.window_size}, ' \
f'pretrained_window_size={self.pretrained_window_size}, num_heads={self.num_heads}'
def flops(self, N):
# calculate flops for 1 window with token length of N
flops = 0
# qkv = self.qkv(x)
flops += N * self.dim * 3 * self.dim
# attn = (q @ k.transpose(-2, -1))
flops += self.num_heads * N * (self.dim // self.num_heads) * N
# x = (attn @ v)
flops += self.num_heads * N * N * (self.dim // self.num_heads)
# x = self.proj(x)
flops += N * self.dim * self.dim
return flops
def window_reverse(windows, window_size, H, W):
"""
Args:
windows: (num_windows*B, window_size, window_size, C)
window_size (int): Window size
H (int): Height of image
W (int): Width of image
Returns:
x: (B, H, W, C)
"""
B = int(windows.shape[0] / (H * W / window_size / window_size))
x = windows.view(B, H // window_size, W // window_size, window_size, window_size, -1)
x = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(B, -1, H, W)
return x
class PoolFormerBlock(nn.Module):
"""
@ -731,7 +913,10 @@ class PoolFormerBlock(nn.Module):
self.norm1 = norm_layer(dim)
#self.token_mixer = Pooling(pool_size=pool_size)
self.token_mixer = FNetBlock()
# self.token_mixer = FNetBlock()
self.window_size = 4
self.attn_mask = None
self.token_mixer = WindowAttention(dim=dim, window_size=to_2tuple(self.window_size), num_heads=4)
self.norm2 = norm_layer(dim)
mlp_hidden_dim = int(dim * mlp_ratio)
self.mlp = Mlp(in_features=dim, hidden_features=mlp_hidden_dim,
@ -748,15 +933,21 @@ class PoolFormerBlock(nn.Module):
layer_scale_init_value * torch.ones((dim)), requires_grad=True)
def forward(self, x):
B, C, H, W = x.shape
x_windows = window_partition(x, self.window_size)
x_windows = x_windows.view(-1, self.window_size * self.window_size, C)
attn_windows = self.token_mixer(x_windows, mask=self.attn_mask)
attn_windows = attn_windows.view(-1, self.window_size, self.window_size, C)
x_attn = window_reverse(attn_windows, self.window_size, H, W)
if self.use_layer_scale:
x = x + self.drop_path(
self.layer_scale_1.unsqueeze(-1).unsqueeze(-1)
* self.token_mixer(self.norm1(x)))
* x_attn)
x = x + self.drop_path(
self.layer_scale_2.unsqueeze(-1).unsqueeze(-1)
* self.mlp(self.norm2(x)))
else:
x = x + self.drop_path(self.token_mixer(self.norm1(x)))
x = x + self.drop_path(x_attn)
x = x + self.drop_path(self.mlp(self.norm2(x)))
return x
class PatchEmbed(nn.Module):

View File

@ -1,4 +1,6 @@
torch==1.12.1+cu116
ordered-set==4.1.0
numpy==1.21.5
einops==0.4.1
einops==0.4.1
pandas
timm==0.9.16

5
run.sh
View File

@ -37,4 +37,7 @@ nohup python main.py --name ice00001 --lr 0.00001 --data icews14 --gpu 2 >run_lo
PID:
___
nohup python main.py --name ice001 --lr 0.001 --data icews14 --gpu 3 >run_log/0.001.log 2>&1 &
nohup python main.py --name ice001 --lr 0.001 --data icews14 --gpu 3 >run_log/0.001.log 2>&1 &
___
nohup python main.py --name iceboth --data icews14_both --gpu 0 >run_log/iceboth.log 2>&1 &
PID: 21984

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

Some files were not shown because too many files have changed in this diff Show More