36 Commits

Author SHA1 Message Date
3f0018fedc add rel 2024-06-22 20:42:41 +07:00
9502c8d009 test 2024-06-19 00:24:20 +07:00
2637f53848 test 2024-06-19 00:20:44 +07:00
975a0a77c2 test 2024-06-19 00:19:31 +07:00
a064d12763 test 2024-06-19 00:16:05 +07:00
6d43b88599 test 2024-06-19 00:12:43 +07:00
7448528eec test 2024-06-19 00:11:37 +07:00
7194f8046c test 2024-06-19 00:10:51 +07:00
417a38d2e5 test 2024-06-19 00:08:27 +07:00
03f42561c6 test 2024-06-19 00:05:57 +07:00
936c37d0f6 test 2024-06-19 00:03:14 +07:00
39734013c4 test 2024-06-18 23:46:20 +07:00
bb9856ecd1 test 2024-06-18 21:11:18 +07:00
c2b17ec1ba try to add attn 2024-06-16 19:09:47 +07:00
f8e969cbd1 try swin 2024-04-27 11:52:23 +07:00
ae0f43ab4d try swin 2024-04-27 11:51:35 +07:00
dda7f13dbd try swin 2024-04-27 11:49:07 +07:00
1dd423edf0 try swin 2024-04-27 11:48:25 +07:00
a1bf2d7389 try swin 2024-04-27 11:46:32 +07:00
c31588cc5f try swin 2024-04-27 11:45:24 +07:00
c03e24f4c2 try swin 2024-04-27 11:43:15 +07:00
a47a60f6a1 try swin 2024-04-27 11:40:27 +07:00
ba388148d4 try swin 2024-04-27 11:27:38 +07:00
1b816fed50 try swin 2024-04-27 11:24:57 +07:00
32962bf421 try swin 2024-04-27 11:23:28 +07:00
b9efe68d3c try swin 2024-04-27 11:12:52 +07:00
465f98bef8 try swin 2024-04-27 11:08:46 +07:00
d4ac470c54 try swin 2024-04-27 11:07:48 +07:00
28a8352044 try swin 2024-04-27 10:59:11 +07:00
b77c79708e try swin 2024-04-27 10:56:10 +07:00
22d44d1a99 try swin 2024-04-27 10:32:08 +07:00
63ccb4ec75 try swin 2024-04-27 10:26:58 +07:00
6ec566505f try swin 2024-04-27 10:18:48 +07:00
30805a0af9 try swin 2024-04-27 10:04:41 +07:00
2e2b12571a update viz util 2023-06-24 04:11:17 +00:00
d4b29eec2c auto retry 2023-06-08 06:40:16 +00:00
98 changed files with 804902 additions and 41 deletions

View File

@ -12407,3 +12407,233 @@
12406 Carry out roadside bombing[65]
12407 Appeal for target to allow international involvement (non-mediation)[1]
12408 Reject request for change in leadership[179]
12409 Criticize or denounce
12410 Express intent to meet or negotiate
12411 Consult
12412 Make an appeal or request
12413 Abduct, hijack, or take hostage
12414 Praise or endorse
12415 Engage in negotiation
12416 Use unconventional violence
12417 Make statement
12418 Arrest, detain, or charge with legal action
12419 Use conventional military force
12420 Complain officially
12421 Impose administrative sanctions
12422 Express intent to cooperate
12423 Make a visit
12424 Appeal for de-escalation of military engagement
12425 Sign formal agreement
12426 Attempt to assassinate
12427 Host a visit
12428 Increase military alert status
12429 Impose embargo, boycott, or sanctions
12430 Provide economic aid
12431 Demonstrate or rally
12432 Express intent to engage in diplomatic cooperation (such as policy support)
12433 Appeal for intelligence
12434 Demand
12435 Carry out suicide bombing
12436 Threaten
12437 Express intent to provide material aid
12438 Grant diplomatic recognition
12439 Meet at a 'third' location
12440 Accuse
12441 Investigate
12442 Reject
12443 Appeal for diplomatic cooperation (such as policy support)
12444 Engage in symbolic act
12445 Defy norms, law
12446 Consider policy option
12447 Provide aid
12448 Sexually assault
12449 Make empathetic comment
12450 Bring lawsuit against
12451 Impose blockade, restrict movement
12452 Make pessimistic comment
12453 Protest violently, riot
12454 Reduce or break diplomatic relations
12455 Grant asylum
12456 Engage in diplomatic cooperation
12457 Make optimistic comment
12458 Torture
12459 Refuse to yield
12460 Appeal for change in leadership
12461 Cooperate militarily
12462 Mobilize or increase armed forces
12463 fight with small arms and light weapons
12464 Ease administrative sanctions
12465 Appeal for political reform
12466 Return, release person(s)
12467 Discuss by telephone
12468 Demonstrate for leadership change
12469 Impose restrictions on political freedoms
12470 Reduce relations
12471 Investigate crime, corruption
12472 Engage in material cooperation
12473 Appeal to others to meet or negotiate
12474 Provide humanitarian aid
12475 Use tactics of violent repression
12476 Occupy territory
12477 Demand humanitarian aid
12478 Threaten non-force
12479 Express intent to cooperate economically
12480 Conduct suicide, car, or other non-military bombing
12481 Demand diplomatic cooperation (such as policy support)
12482 Demand meeting, negotiation
12483 Deny responsibility
12484 Express intent to change institutions, regime
12485 Give ultimatum
12486 Appeal for judicial cooperation
12487 Rally support on behalf of
12488 Obstruct passage, block
12489 Share intelligence or information
12490 Expel or deport individuals
12491 Confiscate property
12492 Accuse of aggression
12493 Physically assault
12494 Retreat or surrender militarily
12495 Veto
12496 Kill by physical assault
12497 Assassinate
12498 Appeal for change in institutions, regime
12499 Forgive
12500 Reject proposal to meet, discuss, or negotiate
12501 Express intent to provide humanitarian aid
12502 Appeal for release of persons or property
12503 Acknowledge or claim responsibility
12504 Ease economic sanctions, boycott, embargo
12505 Express intent to cooperate militarily
12506 Cooperate economically
12507 Express intent to provide economic aid
12508 Mobilize or increase police power
12509 Employ aerial weapons
12510 Accuse of human rights abuses
12511 Conduct strike or boycott
12512 Appeal for policy change
12513 Demonstrate military or police power
12514 Provide military aid
12515 Reject plan, agreement to settle dispute
12516 Yield
12517 Appeal for easing of administrative sanctions
12518 Mediate
12519 Apologize
12520 Express intent to release persons or property
12521 Express intent to de-escalate military engagement
12522 Accede to demands for rights
12523 Demand economic aid
12524 Impose state of emergency or martial law
12525 Receive deployment of peacekeepers
12526 Demand de-escalation of military engagement
12527 Declare truce, ceasefire
12528 Reduce or stop humanitarian assistance
12529 Appeal to others to settle dispute
12530 Reject request for military aid
12531 Threaten with political dissent, protest
12532 Appeal to engage in or accept mediation
12533 Express intent to ease economic sanctions, boycott, or embargo
12534 Coerce
12535 fight with artillery and tanks
12536 Express intent to cooperate on intelligence
12537 Express intent to settle dispute
12538 Express accord
12539 Decline comment
12540 Rally opposition against
12541 Halt negotiations
12542 Demand that target yields
12543 Appeal for military aid
12544 Threaten with military force
12545 Express intent to provide military protection or peacekeeping
12546 Threaten with sanctions, boycott, embargo
12547 Express intent to provide military aid
12548 Demand change in leadership
12549 Appeal for economic aid
12550 Refuse to de-escalate military engagement
12551 Refuse to release persons or property
12552 Increase police alert status
12553 Return, release property
12554 Ease military blockade
12555 Appeal for material cooperation
12556 Express intent to cooperate on judicial matters
12557 Appeal for economic cooperation
12558 Demand settling of dispute
12559 Accuse of crime, corruption
12560 Defend verbally
12561 Provide military protection or peacekeeping
12562 Accuse of espionage, treason
12563 Seize or damage property
12564 Accede to requests or demands for political reform
12565 Appeal for easing of economic sanctions, boycott, or embargo
12566 Threaten to reduce or stop aid
12567 Engage in judicial cooperation
12568 Appeal to yield
12569 Demand military aid
12570 Refuse to ease administrative sanctions
12571 Demand release of persons or property
12572 Accede to demands for change in leadership
12573 Appeal for humanitarian aid
12574 Threaten with repression
12575 Demand change in institutions, regime
12576 Demonstrate for policy change
12577 Appeal for aid
12578 Appeal for rights
12579 Engage in violent protest for rights
12580 Express intent to mediate
12581 Expel or withdraw peacekeepers
12582 Appeal for military protection or peacekeeping
12583 Engage in mass killings
12584 Accuse of war crimes
12585 Reject military cooperation
12586 Threaten to halt negotiations
12587 Ban political parties or politicians
12588 Express intent to change leadership
12589 Demand material cooperation
12590 Express intent to institute political reform
12591 Demand easing of administrative sanctions
12592 Express intent to engage in material cooperation
12593 Reduce or stop economic assistance
12594 Express intent to ease administrative sanctions
12595 Demand intelligence cooperation
12596 Ease curfew
12597 Receive inspectors
12598 Demand rights
12599 Demand political reform
12600 Demand judicial cooperation
12601 Engage in political dissent
12602 Detonate nuclear weapons
12603 Violate ceasefire
12604 Express intent to accept mediation
12605 Refuse to ease economic sanctions, boycott, or embargo
12606 Demand mediation
12607 Obstruct passage to demand leadership change
12608 Express intent to yield
12609 Conduct hunger strike
12610 Threaten to halt mediation
12611 Reject judicial cooperation
12612 Reduce or stop military assistance
12613 Ease political dissent
12614 Threaten to reduce or break relations
12615 Demobilize armed forces
12616 Use as human shield
12617 Demand policy change
12618 Accede to demands for change in institutions, regime
12619 Reject economic cooperation
12620 Reject material cooperation
12621 Halt mediation
12622 Accede to demands for change in policy
12623 Investigate war crimes
12624 Threaten with administrative sanctions
12625 Reduce or stop material aid
12626 Destroy property
12627 Express intent to change policy
12628 Use chemical, biological, or radiological weapons
12629 Reject request for military protection or peacekeeping
12630 Demand material aid
12631 Engage in mass expulsion
12632 Investigate human rights abuses
12633 Carry out car bombing
12634 Expel or withdraw
12635 Ease state of emergency or martial law
12636 Carry out roadside bombing
12637 Appeal for target to allow international involvement (non-mediation)
12638 Reject request for change in leadership

View File

@ -0,0 +1,15 @@
# triples: 86517
# entities: 7128
# relations: 12409
# timesteps: 208
# test triples: 8218
# valid triples: 8193
# train triples: 70106
Measure method: N/A
Target Size : 0
Grow Factor: 0
Shrink Factor: 0
Epsilon Factor: 0
Search method: N/A
filter_dupes: both
nonames: False

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

8218
data/icews14_both/test.txt Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,209 @@
0 0 2
1 3 5
2 6 7
3 8 9
4 10 12
5 13 14
6 15 16
7 17 19
8 20 21
9 22 23
10 24 26
11 27 28
12 29 30
13 31 33
14 34 35
15 36 37
16 38 40
17 41 42
18 43 44
19 45 46
20 47 48
21 49 49
22 50 50
23 51 51
24 52 53
25 54 54
26 55 55
27 56 57
28 58 59
29 60 61
30 62 62
31 63 63
32 64 65
33 66 68
34 69 70
35 71 71
36 72 72
37 73 74
38 75 76
39 77 78
40 79 80
41 81 82
42 83 84
43 85 85
44 86 87
45 88 89
46 90 91
47 92 93
48 94 96
49 97 97
50 98 99
51 100 101
52 102 103
53 104 105
54 106 107
55 108 110
56 111 112
57 113 114
58 115 116
59 117 118
60 119 119
61 120 121
62 122 124
63 125 125
64 126 127
65 128 129
66 130 131
67 132 133
68 134 135
69 136 138
70 139 139
71 140 140
72 141 141
73 142 143
74 144 145
75 146 147
76 148 148
77 149 150
78 151 152
79 153 154
80 155 155
81 156 157
82 158 159
83 160 161
84 162 163
85 164 166
86 167 167
87 168 168
88 169 169
89 170 170
90 171 173
91 174 175
92 176 177
93 178 180
94 181 182
95 183 183
96 184 185
97 186 187
98 188 188
99 189 190
100 191 192
101 193 194
102 195 195
103 196 197
104 198 199
105 200 201
106 202 203
107 204 205
108 206 208
109 209 210
110 211 212
111 213 215
112 216 217
113 218 219
114 220 221
115 222 222
116 223 224
117 225 226
118 227 229
119 230 231
120 232 233
121 234 236
122 237 238
123 239 239
124 240 241
125 242 243
126 244 245
127 246 246
128 247 248
129 249 250
130 251 251
131 252 252
132 253 253
133 254 254
134 255 256
135 257 257
136 258 259
137 260 261
138 262 263
139 264 264
140 265 265
141 266 266
142 267 267
143 268 269
144 270 271
145 272 272
146 273 273
147 274 274
148 275 276
149 277 278
150 279 279
151 280 281
152 282 283
153 284 285
154 286 286
155 287 287
156 288 288
157 289 289
158 290 291
159 292 292
160 293 293
161 294 294
162 295 295
163 296 297
164 298 299
165 300 300
166 301 301
167 302 303
168 304 305
169 306 307
170 308 309
171 310 310
172 311 312
173 313 313
174 314 314
175 315 315
176 316 316
177 317 317
178 318 319
179 320 320
180 321 321
181 322 322
182 323 323
183 324 324
184 325 326
185 327 327
186 328 328
187 329 329
188 330 330
189 331 332
190 333 334
191 335 335
192 336 336
193 337 338
194 339 340
195 341 342
196 343 343
197 344 344
198 345 346
199 347 348
200 349 349
201 350 350
202 351 352
203 353 355
204 356 357
205 358 359
206 360 362
207 363 365
208 366 366

70106
data/icews14_both/train.txt Normal file

File diff suppressed because it is too large Load Diff

8193
data/icews14_both/valid.txt Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,15 @@
# triples: 231529
# entities: 12554
# relations: 423
# timesteps: 70
# test triples: 16195
# valid triples: 16707
# train triples: 198627
Measure method: N/A
Target Size : 423
Grow Factor: 0
Shrink Factor: 4.0
Epsilon Factor: 0
Search method: N/A
filter_dupes: both
nonames: False

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,447 @@
0 P131[0-0]
1 P131[1-1]
2 P131[2-2]
3 P131[3-3]
4 P131[4-4]
5 P131[5-5]
6 P131[6-6]
7 P131[7-7]
8 P131[8-8]
9 P131[9-9]
10 P131[10-10]
11 P131[11-11]
12 P131[12-12]
13 P131[13-13]
14 P131[14-14]
15 P131[15-15]
16 P131[16-16]
17 P131[17-17]
18 P131[18-18]
19 P131[19-19]
20 P131[20-20]
21 P131[21-21]
22 P131[22-22]
23 P131[23-23]
24 P131[24-24]
25 P131[25-25]
26 P131[26-26]
27 P131[27-27]
28 P131[28-28]
29 P131[29-29]
30 P131[30-30]
31 P131[31-31]
32 P131[32-32]
33 P131[33-33]
34 P131[34-34]
35 P131[35-35]
36 P131[36-36]
37 P131[37-37]
38 P131[38-38]
39 P131[39-39]
40 P131[40-40]
41 P131[41-41]
42 P131[42-42]
43 P131[43-43]
44 P131[44-44]
45 P131[45-45]
46 P131[46-46]
47 P131[47-47]
48 P131[48-48]
49 P131[49-49]
50 P131[50-50]
51 P131[51-51]
52 P131[52-52]
53 P131[53-53]
54 P131[54-54]
55 P131[55-55]
56 P131[56-56]
57 P131[57-57]
58 P131[58-58]
59 P131[59-59]
60 P131[60-60]
61 P131[61-61]
62 P131[62-62]
63 P131[63-63]
64 P131[64-64]
65 P131[65-65]
66 P131[66-66]
67 P131[67-67]
68 P131[68-68]
69 P131[69-69]
70 P1435[65-65]
71 P39[49-49]
72 P39[50-50]
73 P39[51-51]
74 P39[52-52]
75 P39[53-53]
76 P39[54-54]
77 P39[55-55]
78 P39[56-56]
79 P39[57-57]
80 P39[58-58]
81 P39[59-59]
82 P39[60-60]
83 P39[61-61]
84 P39[62-62]
85 P39[63-63]
86 P39[64-64]
87 P39[65-65]
88 P39[66-66]
89 P39[67-67]
90 P39[68-68]
91 P39[69-69]
92 P54[40-40]
93 P54[41-41]
94 P54[42-42]
95 P54[43-43]
96 P54[44-44]
97 P54[45-45]
98 P54[46-46]
99 P54[47-47]
100 P54[48-48]
101 P54[49-49]
102 P54[50-50]
103 P54[51-51]
104 P54[52-52]
105 P54[53-53]
106 P54[54-54]
107 P54[55-55]
108 P54[56-56]
109 P54[57-57]
110 P54[58-58]
111 P54[59-59]
112 P54[60-60]
113 P54[61-61]
114 P54[62-62]
115 P54[63-63]
116 P54[64-64]
117 P54[65-65]
118 P54[66-66]
119 P54[67-67]
120 P54[68-68]
121 P54[69-69]
122 P31[0-0]
123 P31[1-1]
124 P31[2-2]
125 P31[3-3]
126 P31[4-4]
127 P31[5-5]
128 P31[6-6]
129 P31[7-7]
130 P31[8-8]
131 P31[9-9]
132 P31[10-10]
133 P31[11-11]
134 P31[12-12]
135 P31[13-13]
136 P31[14-14]
137 P31[15-15]
138 P31[16-16]
139 P31[17-17]
140 P31[18-18]
141 P31[19-19]
142 P31[20-20]
143 P31[21-21]
144 P31[22-22]
145 P31[23-23]
146 P31[24-24]
147 P31[25-25]
148 P31[26-26]
149 P31[27-27]
150 P31[28-28]
151 P31[29-29]
152 P31[30-30]
153 P31[31-31]
154 P31[32-32]
155 P31[33-33]
156 P31[34-34]
157 P31[35-35]
158 P31[36-36]
159 P31[37-37]
160 P31[38-38]
161 P31[39-39]
162 P31[40-40]
163 P31[41-41]
164 P31[42-42]
165 P31[43-43]
166 P31[44-44]
167 P31[45-45]
168 P31[46-46]
169 P31[47-47]
170 P31[48-48]
171 P31[49-49]
172 P31[50-50]
173 P31[51-51]
174 P31[52-52]
175 P31[53-53]
176 P31[54-54]
177 P31[55-55]
178 P31[56-56]
179 P31[57-57]
180 P31[58-58]
181 P31[59-59]
182 P31[60-60]
183 P31[61-61]
184 P31[62-62]
185 P31[63-63]
186 P31[64-64]
187 P31[65-65]
188 P31[66-66]
189 P31[67-67]
190 P31[68-68]
191 P31[69-69]
192 P463[26-26]
193 P463[27-27]
194 P463[28-28]
195 P463[29-29]
196 P463[30-30]
197 P463[31-31]
198 P463[32-32]
199 P463[33-33]
200 P463[34-34]
201 P463[35-35]
202 P463[36-36]
203 P463[37-37]
204 P463[38-38]
205 P463[39-39]
206 P463[40-40]
207 P463[41-41]
208 P463[42-42]
209 P463[43-43]
210 P463[44-44]
211 P463[45-45]
212 P463[46-46]
213 P463[47-47]
214 P463[48-48]
215 P463[49-49]
216 P463[50-50]
217 P463[51-51]
218 P463[52-52]
219 P463[53-53]
220 P463[54-54]
221 P463[55-55]
222 P463[56-56]
223 P463[57-57]
224 P463[58-58]
225 P463[59-59]
226 P463[60-60]
227 P463[61-61]
228 P463[62-62]
229 P463[63-63]
230 P463[64-64]
231 P463[65-65]
232 P463[66-66]
233 P463[67-67]
234 P463[68-68]
235 P463[69-69]
236 P512[4-69]
237 P190[0-29]
238 P150[0-3]
239 P1376[39-47]
240 P463[0-7]
241 P166[0-7]
242 P2962[18-30]
243 P108[29-36]
244 P39[0-3]
245 P17[47-48]
246 P166[21-23]
247 P793[46-69]
248 P69[32-41]
249 P17[57-58]
250 P190[42-45]
251 P2962[39-42]
252 P54[0-18]
253 P26[56-61]
254 P150[14-17]
255 P463[16-17]
256 P26[39-46]
257 P579[36-43]
258 P579[16-23]
259 P2962[59-60]
260 P1411[59-61]
261 P26[20-27]
262 P6[4-69]
263 P1435[33-34]
264 P166[52-53]
265 P108[49-57]
266 P150[10-13]
267 P1346[47-68]
268 P150[18-21]
269 P1346[13-46]
270 P69[20-23]
271 P39[31-32]
272 P1411[32-37]
273 P166[62-63]
274 P150[44-47]
275 P2962[61-62]
276 P150[48-51]
277 P150[52-55]
278 P1411[62-67]
279 P1435[35-36]
280 P1411[48-51]
281 P150[22-25]
282 P2962[63-64]
283 P2962[65-66]
284 P166[58-59]
285 P190[46-49]
286 P54[34-35]
287 P1435[4-16]
288 P463[18-19]
289 P150[31-34]
290 P150[35-38]
291 P39[35-36]
292 P26[62-69]
293 P1411[56-58]
294 P1435[37-38]
295 P166[60-61]
296 P39[33-34]
297 P102[24-31]
298 P2962[43-46]
299 P108[37-48]
300 P190[50-53]
301 P39[4-6]
302 P1435[39-40]
303 P793[0-45]
304 P150[64-69]
305 P39[19-22]
306 P27[30-38]
307 P2962[31-38]
308 P1411[24-31]
309 P102[40-45]
310 P39[37-38]
311 P463[8-11]
312 P1435[41-42]
313 P27[52-59]
314 P69[16-19]
315 P17[16-18]
316 P190[54-57]
317 P1435[43-44]
318 P166[8-15]
319 P166[45-47]
320 P2962[47-50]
321 P39[39-40]
322 P1411[52-55]
323 P108[58-69]
324 P463[20-21]
325 P39[41-42]
326 P150[26-30]
327 P150[39-43]
328 P1435[45-46]
329 P26[28-38]
330 P54[27-30]
331 P190[58-61]
332 P17[59-61]
333 P54[36-37]
334 P166[16-20]
335 P166[37-40]
336 P1435[47-48]
337 P17[0-3]
338 P26[47-55]
339 P1435[49-50]
340 P1435[25-28]
341 P150[4-9]
342 P102[63-69]
343 P26[0-19]
344 P1435[17-24]
345 P39[23-26]
346 P1435[51-52]
347 P39[7-11]
348 P69[12-15]
349 P69[24-31]
350 P102[0-23]
351 P39[43-44]
352 P579[24-35]
353 P190[62-65]
354 P1435[53-54]
355 P1376[0-18]
356 P27[0-14]
357 P463[12-15]
358 P166[33-36]
359 P102[32-39]
360 P17[4-7]
361 P190[30-41]
362 P166[24-28]
363 P190[66-69]
364 P69[42-69]
365 P1435[55-56]
366 P54[31-33]
367 P39[45-46]
368 P17[12-15]
369 P1435[57-58]
370 P54[19-26]
371 P2962[51-54]
372 P2962[67-69]
373 P1435[59-60]
374 P579[44-56]
375 P1435[61-62]
376 P166[41-44]
377 P17[19-22]
378 P1376[19-38]
379 P17[23-26]
380 P1376[48-69]
381 P463[22-23]
382 P17[27-30]
383 P1435[63-64]
384 P69[0-3]
385 P1435[66-67]
386 P17[35-38]
387 P69[8-11]
388 P1435[68-69]
389 P17[31-34]
390 P102[46-53]
391 P27[60-69]
392 P579[57-69]
393 P69[4-7]
394 P1411[7-14]
395 P551[0-35]
396 P108[0-28]
397 P17[8-11]
398 P1411[38-47]
399 P17[43-46]
400 P17[49-52]
401 P166[64-69]
402 P1435[29-32]
403 P54[38-39]
404 P39[27-30]
405 P2962[55-58]
406 P463[24-25]
407 P17[39-42]
408 P17[53-56]
409 P17[66-69]
410 P17[62-65]
411 P1411[15-23]
412 P166[48-51]
413 P27[15-29]
414 P150[56-63]
415 P27[39-51]
416 P39[47-48]
417 P166[29-32]
418 P39[12-18]
419 P166[54-57]
420 P551[36-69]
421 P579[0-15]
422 P102[54-62]
423 P131
424 P1435
425 P39
426 P54
427 P31
428 P463
429 P512
430 P190
431 P150
432 P1376
433 P166
434 P2962
435 P108
436 P17
437 P793
438 P69
439 P26
440 P579
441 P1411
442 P6
443 P1346
444 P102
445 P27
446 P551

16195
data/wikidata12k_both/test.txt Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,71 @@
0 19 19
1 20 1643
2 1644 1790
3 1791 1816
4 1817 1855
5 1856 1871
6 1872 1893
7 1894 1905
8 1906 1913
9 1914 1918
10 1919 1920
11 1921 1924
12 1925 1929
13 1930 1933
14 1934 1937
15 1938 1941
16 1942 1945
17 1946 1948
18 1949 1950
19 1951 1953
20 1954 1956
21 1957 1959
22 1960 1961
23 1962 1963
24 1964 1965
25 1966 1967
26 1968 1968
27 1969 1970
28 1971 1972
29 1973 1974
30 1975 1976
31 1977 1978
32 1979 1980
33 1981 1982
34 1983 1983
35 1984 1984
36 1985 1985
37 1986 1986
38 1987 1987
39 1988 1988
40 1989 1989
41 1990 1990
42 1991 1991
43 1992 1992
44 1993 1993
45 1994 1994
46 1995 1995
47 1996 1996
48 1997 1997
49 1998 1998
50 1999 1999
51 2000 2000
52 2001 2001
53 2002 2002
54 2003 2003
55 2004 2004
56 2005 2005
57 2006 2006
58 2007 2007
59 2008 2008
60 2009 2009
61 2010 2010
62 2011 2011
63 2012 2012
64 2013 2013
65 2014 2014
66 2015 2015
67 2016 2016
68 2017 2017
69 2018 2020
70 2021 2021

198627
data/wikidata12k_both/train.txt Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,15 @@
# triples: 78032
# entities: 10526
# relations: 177
# timesteps: 46
# test triples: 6909
# valid triples: 7198
# train triples: 63925
Measure method: N/A
Target Size : 0
Grow Factor: 0
Shrink Factor: 0
Epsilon Factor: 5.0
Search method: N/A
filter_dupes: both
nonames: False

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,177 @@
0 <wasBornIn>[0-2]
1 <wasBornIn>[2-5]
2 <wasBornIn>[5-7]
3 <wasBornIn>[7-10]
4 <wasBornIn>[10-12]
5 <wasBornIn>[12-15]
6 <wasBornIn>[15-17]
7 <wasBornIn>[17-20]
8 <wasBornIn>[20-22]
9 <wasBornIn>[22-25]
10 <wasBornIn>[25-27]
11 <wasBornIn>[27-30]
12 <wasBornIn>[30-32]
13 <wasBornIn>[32-35]
14 <wasBornIn>[35-45]
15 <wasBornIn>[52-52]
16 <diedIn>[0-3]
17 <diedIn>[3-5]
18 <diedIn>[5-7]
19 <diedIn>[7-10]
20 <diedIn>[10-12]
21 <diedIn>[12-14]
22 <diedIn>[14-17]
23 <diedIn>[17-19]
24 <diedIn>[19-21]
25 <diedIn>[21-23]
26 <diedIn>[23-25]
27 <diedIn>[25-27]
28 <diedIn>[27-29]
29 <diedIn>[29-32]
30 <diedIn>[32-34]
31 <diedIn>[34-36]
32 <diedIn>[36-38]
33 <diedIn>[38-40]
34 <diedIn>[40-42]
35 <diedIn>[42-44]
36 <diedIn>[44-47]
37 <diedIn>[47-49]
38 <diedIn>[49-51]
39 <diedIn>[51-53]
40 <diedIn>[53-55]
41 <diedIn>[55-57]
42 <diedIn>[59-59]
43 <worksAt>[0-3]
44 <worksAt>[3-5]
45 <worksAt>[5-7]
46 <worksAt>[7-10]
47 <worksAt>[10-12]
48 <worksAt>[12-14]
49 <worksAt>[14-17]
50 <worksAt>[17-19]
51 <worksAt>[19-21]
52 <worksAt>[21-23]
53 <worksAt>[23-25]
54 <worksAt>[25-27]
55 <worksAt>[27-29]
56 <worksAt>[29-32]
57 <worksAt>[32-34]
58 <worksAt>[34-36]
59 <worksAt>[36-40]
60 <worksAt>[40-42]
61 <worksAt>[42-47]
62 <worksAt>[47-53]
63 <worksAt>[59-59]
64 <playsFor>[0-3]
65 <playsFor>[3-5]
66 <playsFor>[5-23]
67 <playsFor>[23-25]
68 <playsFor>[25-27]
69 <playsFor>[27-29]
70 <playsFor>[29-32]
71 <playsFor>[32-34]
72 <playsFor>[34-36]
73 <playsFor>[36-38]
74 <playsFor>[38-40]
75 <playsFor>[40-42]
76 <playsFor>[42-44]
77 <playsFor>[44-47]
78 <playsFor>[47-51]
79 <playsFor>[59-59]
80 <hasWonPrize>[1-4]
81 <hasWonPrize>[4-6]
82 <hasWonPrize>[6-8]
83 <hasWonPrize>[8-11]
84 <hasWonPrize>[11-15]
85 <hasWonPrize>[15-18]
86 <hasWonPrize>[18-22]
87 <hasWonPrize>[22-26]
88 <hasWonPrize>[26-30]
89 <hasWonPrize>[30-33]
90 <hasWonPrize>[33-37]
91 <hasWonPrize>[37-47]
92 <hasWonPrize>[47-53]
93 <hasWonPrize>[59-59]
94 <isMarriedTo>[0-3]
95 <isMarriedTo>[3-5]
96 <isMarriedTo>[5-7]
97 <isMarriedTo>[7-10]
98 <isMarriedTo>[10-12]
99 <isMarriedTo>[12-14]
100 <isMarriedTo>[14-17]
101 <isMarriedTo>[17-19]
102 <isMarriedTo>[19-21]
103 <isMarriedTo>[21-23]
104 <isMarriedTo>[23-25]
105 <isMarriedTo>[25-27]
106 <isMarriedTo>[27-29]
107 <isMarriedTo>[29-32]
108 <isMarriedTo>[32-34]
109 <isMarriedTo>[34-38]
110 <isMarriedTo>[38-42]
111 <isMarriedTo>[42-47]
112 <isMarriedTo>[47-51]
113 <isMarriedTo>[51-55]
114 <isMarriedTo>[59-59]
115 <owns>[0-10]
116 <owns>[10-17]
117 <owns>[17-19]
118 <owns>[19-23]
119 <owns>[23-36]
120 <owns>[36-38]
121 <owns>[59-59]
122 <graduatedFrom>[0-3]
123 <graduatedFrom>[3-5]
124 <graduatedFrom>[5-7]
125 <graduatedFrom>[7-10]
126 <graduatedFrom>[10-14]
127 <graduatedFrom>[14-17]
128 <graduatedFrom>[17-19]
129 <graduatedFrom>[19-21]
130 <graduatedFrom>[21-23]
131 <graduatedFrom>[23-27]
132 <graduatedFrom>[27-32]
133 <graduatedFrom>[32-34]
134 <graduatedFrom>[34-38]
135 <graduatedFrom>[38-42]
136 <graduatedFrom>[59-59]
137 <isAffiliatedTo>[1-4]
138 <isAffiliatedTo>[4-6]
139 <isAffiliatedTo>[6-8]
140 <isAffiliatedTo>[8-11]
141 <isAffiliatedTo>[11-13]
142 <isAffiliatedTo>[13-15]
143 <isAffiliatedTo>[15-18]
144 <isAffiliatedTo>[18-20]
145 <isAffiliatedTo>[20-22]
146 <isAffiliatedTo>[22-24]
147 <isAffiliatedTo>[24-26]
148 <isAffiliatedTo>[26-28]
149 <isAffiliatedTo>[28-30]
150 <isAffiliatedTo>[30-33]
151 <isAffiliatedTo>[33-35]
152 <isAffiliatedTo>[35-37]
153 <isAffiliatedTo>[37-40]
154 <isAffiliatedTo>[40-42]
155 <isAffiliatedTo>[42-44]
156 <isAffiliatedTo>[44-47]
157 <isAffiliatedTo>[47-49]
158 <isAffiliatedTo>[49-51]
159 <isAffiliatedTo>[51-53]
160 <isAffiliatedTo>[53-55]
161 <isAffiliatedTo>[55-57]
162 <isAffiliatedTo>[59-59]
163 <created>[0-3]
164 <created>[3-5]
165 <created>[5-10]
166 <created>[10-12]
167 <created>[12-17]
168 <created>[17-19]
169 <created>[19-25]
170 <created>[25-29]
171 <created>[29-32]
172 <created>[32-36]
173 <created>[36-42]
174 <created>[42-47]
175 <created>[47-53]
176 <created>[59-59]

6909
data/yago11k_both/test.txt Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,60 @@
0 -431 1782
1 1783 1848
2 1849 1870
3 1871 1888
4 1889 1899
5 1900 1906
6 1907 1912
7 1913 1917
8 1918 1922
9 1923 1926
10 1927 1930
11 1931 1934
12 1935 1938
13 1939 1941
14 1942 1944
15 1945 1947
16 1948 1950
17 1951 1953
18 1954 1956
19 1957 1959
20 1960 1962
21 1963 1965
22 1966 1967
23 1968 1969
24 1970 1971
25 1972 1973
26 1974 1975
27 1976 1977
28 1978 1979
29 1980 1981
30 1982 1983
31 1984 1985
32 1986 1987
33 1988 1989
34 1990 1991
35 1992 1993
36 1994 1994
37 1995 1996
38 1997 1997
39 1998 1998
40 1999 1999
41 2000 2000
42 2001 2001
43 2002 2002
44 2003 2003
45 2004 2004
46 2005 2005
47 2006 2006
48 2007 2007
49 2008 2008
50 2009 2009
51 2010 2010
52 2011 2011
53 2012 2012
54 2013 2013
55 2014 2014
56 2015 2015
57 2016 2016
58 2017 2017
59 2018 2018

63925
data/yago11k_both/train.txt Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

7198
data/yago11k_both/valid.txt Normal file

File diff suppressed because it is too large Load Diff

9483
icews14.out Normal file

File diff suppressed because it is too large Load Diff

3
icews14_both.log Normal file
View File

@ -0,0 +1,3 @@
nohup: ignoring input
2023-06-20 09:22:51,618 - [INFO] - {'dataset': 'icews14_both', 'name': 'icews14_both', 'gpu': '2', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': False}
2023-06-20 09:22:57,979 - [INFO] - [E:0| 0]: Train Loss:0.70005, Val MRR:0.0, icews14_both

4331
icews14_l2_1e-5.out Normal file

File diff suppressed because it is too large Load Diff

View File

@ -1964,3 +1964,25 @@
2023-05-04 08:27:31,384 - fb_one_to_x - [INFO] - [E:34| 1500]: Train Loss:0.0027362, Val MRR:0.33574, fb_one_to_x
2023-05-04 08:29:20,404 - fb_one_to_x - [INFO] - [E:34| 1600]: Train Loss:0.0027362, Val MRR:0.33574, fb_one_to_x
2023-05-04 08:31:12,139 - fb_one_to_x - [INFO] - [E:34| 1700]: Train Loss:0.0027362, Val MRR:0.33574, fb_one_to_x
2023-05-04 08:55:56,065 - fb_one_to_x - [INFO] - {'dataset': 'FB15k-237', 'name': 'fb_one_to_x', 'gpu': '0', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.1, 'drop': 0.2, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': True}
2023-05-04 08:56:07,953 - fb_one_to_x - [INFO] - [Test, Tail_Batch Step 0] fb_one_to_x
2023-05-04 08:56:53,173 - fb_one_to_x - [INFO] - [Test, Tail_Batch Step 100] fb_one_to_x
2023-05-04 08:57:20,187 - fb_one_to_x - [INFO] - [Test, Head_Batch Step 0] fb_one_to_x
2023-05-04 08:58:08,090 - fb_one_to_x - [INFO] - [Test, Head_Batch Step 100] fb_one_to_x
2023-05-04 08:58:36,338 - fb_one_to_x - [INFO] - [Evaluating Epoch 0 test]:
MRR: Tail : 0.43029, Head : 0.23256, Avg : 0.33142
MR: Tail : 149.91, Head : 288.48, Avg : 219.2
Hit-1: Tail : 0.33563, Head : 0.14883, Avg : 0.24223
Hit-3: Tail : 0.47068, Head : 0.25515, Avg : 0.36292
Hit-10: Tail : 0.61952, Head : 0.40096, Avg : 0.51024
2023-05-04 09:03:55,555 - fb_one_to_x - [INFO] - {'dataset': 'FB15k-237', 'name': 'fb_one_to_x', 'gpu': '0', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.1, 'drop': 0.2, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': True}
2023-05-04 09:04:07,491 - fb_one_to_x - [INFO] - [Test, Tail_Batch Step 0] fb_one_to_x
2023-05-04 09:04:52,620 - fb_one_to_x - [INFO] - [Test, Tail_Batch Step 100] fb_one_to_x
2023-05-04 09:05:19,645 - fb_one_to_x - [INFO] - [Test, Head_Batch Step 0] fb_one_to_x
2023-05-04 09:06:07,591 - fb_one_to_x - [INFO] - [Test, Head_Batch Step 100] fb_one_to_x
2023-05-04 09:06:35,660 - fb_one_to_x - [INFO] - [Evaluating Epoch 0 test]:
MRR: Tail : 0.43029, Head : 0.23256, Avg : 0.33142
MR: Tail : 149.91, Head : 288.48, Avg : 219.2
Hit-1: Tail : 0.33563, Head : 0.14883, Avg : 0.24223
Hit-3: Tail : 0.47068, Head : 0.25515, Avg : 0.36292
Hit-10: Tail : 0.61952, Head : 0.40096, Avg : 0.51024

14945
log/ice00001 Normal file

File diff suppressed because it is too large Load Diff

4904
log/ice0003 Normal file

File diff suppressed because it is too large Load Diff

6607
log/ice0003_2 Normal file

File diff suppressed because it is too large Load Diff

6205
log/ice001 Normal file

File diff suppressed because it is too large Load Diff

9541
log/ice14ws_128 Normal file

File diff suppressed because it is too large Load Diff

4154
log/iceboth Normal file

File diff suppressed because it is too large Load Diff

9482
log/icews14 Normal file

File diff suppressed because it is too large Load Diff

1
log/icews14_128 Normal file
View File

@ -0,0 +1 @@
2023-05-13 03:52:44,141 - icews14_128 - [INFO] - {'dataset': 'icews14', 'name': 'icews14_128', 'gpu': '0', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': True, 'filtered': False}

10670
log/icews14_both Normal file

File diff suppressed because it is too large Load Diff

2
log/poofnet.log Normal file
View File

@ -0,0 +1,2 @@
nohup: ignoring input
python: can't open file 'run.py': [Errno 2] No such file or directory

1
log/testrun_227cb2f9 Normal file
View File

@ -0,0 +1 @@
2023-05-17 06:54:57,988 - testrun_227cb2f9 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_227cb2f9', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}

1
log/testrun_30d70322 Normal file
View File

@ -0,0 +1 @@
2023-05-17 06:23:34,181 - testrun_30d70322 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_30d70322', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}

1
log/testrun_3212b281 Normal file
View File

@ -0,0 +1 @@
2023-05-17 06:53:01,668 - testrun_3212b281 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_3212b281', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}

1
log/testrun_3dbc9e89 Normal file
View File

@ -0,0 +1 @@
2023-05-06 08:35:38,753 - testrun_3dbc9e89 - [INFO] - {'dataset': 'wikidata12k', 'name': 'testrun_3dbc9e89', 'gpu': '0', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False}

1
log/testrun_43389ddf Normal file
View File

@ -0,0 +1 @@
2023-05-17 06:38:00,469 - testrun_43389ddf - [INFO] - {'dataset': 'icews14', 'name': 'testrun_43389ddf', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}

1
log/testrun_47ede3b9 Normal file
View File

@ -0,0 +1 @@
2023-05-17 06:13:02,952 - testrun_47ede3b9 - [INFO] - {'dataset': 'FB15k-237', 'name': 'testrun_47ede3b9', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}

1
log/testrun_49495af8 Normal file
View File

@ -0,0 +1 @@
2023-05-06 08:37:18,939 - testrun_49495af8 - [INFO] - {'dataset': 'wikidata12k', 'name': 'testrun_49495af8', 'gpu': '3', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False}

7877
log/testrun_4a235016 Normal file

File diff suppressed because it is too large Load Diff

1
log/testrun_4f5d8391 Normal file
View File

@ -0,0 +1 @@
2023-05-06 08:35:13,356 - testrun_4f5d8391 - [INFO] - {'dataset': 'wikidata12k', 'name': 'testrun_4f5d8391', 'gpu': '0', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False}

1
log/testrun_540f6a03 Normal file
View File

@ -0,0 +1 @@
2023-05-06 08:34:55,992 - testrun_540f6a03 - [INFO] - {'dataset': 'wikidata12k', 'name': 'testrun_540f6a03', 'gpu': '0', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False}

1
log/testrun_5a901712 Normal file
View File

@ -0,0 +1 @@
2023-05-17 07:04:56,051 - testrun_5a901712 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_5a901712', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}

44
log/testrun_5cafe61a Normal file
View File

@ -0,0 +1,44 @@
2023-05-17 06:48:57,396 - testrun_5cafe61a - [INFO] - {'dataset': 'icews14', 'name': 'testrun_5cafe61a', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}
2023-05-17 06:49:44,802 - concurrent.futures - [ERROR] - exception calling callback for <Future at 0x7efb51b74160 state=finished raised BrokenProcessPool>
joblib.externals.loky.process_executor._RemoteTraceback:
"""
Traceback (most recent call last):
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/joblib/externals/loky/process_executor.py", line 391, in _process_worker
call_item = call_queue.get(block=True, timeout=timeout)
File "/opt/conda/envs/kgs2s/lib/python3.8/multiprocessing/queues.py", line 116, in get
return _ForkingPickler.loads(res)
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/torch/storage.py", line 222, in _load_from_bytes
return torch.load(io.BytesIO(b))
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/torch/serialization.py", line 713, in load
return _legacy_load(opened_file, map_location, pickle_module, **pickle_load_args)
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/torch/serialization.py", line 930, in _legacy_load
result = unpickler.load()
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/torch/serialization.py", line 876, in persistent_load
wrap_storage=restore_location(obj, location),
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/torch/serialization.py", line 175, in default_restore_location
result = fn(storage, location)
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/torch/serialization.py", line 155, in _cuda_deserialize
return torch._UntypedStorage(obj.nbytes(), device=torch.device(location))
RuntimeError: CUDA out of memory. Tried to allocate 678.00 MiB (GPU 0; 31.72 GiB total capacity; 0 bytes already allocated; 593.94 MiB free; 0 bytes reserved in total by PyTorch) If reserved memory is >> allocated memory try setting max_split_size_mb to avoid fragmentation. See documentation for Memory Management and PYTORCH_CUDA_ALLOC_CONF
"""
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/joblib/externals/loky/_base.py", line 26, in _invoke_callbacks
callback(self)
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/joblib/parallel.py", line 385, in __call__
self.parallel.dispatch_next()
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/joblib/parallel.py", line 834, in dispatch_next
if not self.dispatch_one_batch(self._original_iterator):
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/joblib/parallel.py", line 901, in dispatch_one_batch
self._dispatch(tasks)
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/joblib/parallel.py", line 819, in _dispatch
job = self._backend.apply_async(batch, callback=cb)
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/joblib/_parallel_backends.py", line 556, in apply_async
future = self._workers.submit(SafeFunction(func))
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/joblib/externals/loky/reusable_executor.py", line 176, in submit
return super().submit(fn, *args, **kwargs)
File "/opt/conda/envs/kgs2s/lib/python3.8/site-packages/joblib/externals/loky/process_executor.py", line 1129, in submit
raise self._flags.broken
joblib.externals.loky.process_executor.BrokenProcessPool: A task has failed to un-serialize. Please ensure that the arguments of the function are all picklable.

1
log/testrun_6fd94d59 Normal file
View File

@ -0,0 +1 @@
2023-05-06 08:34:33,652 - testrun_6fd94d59 - [INFO] - {'dataset': 'wikidata12k', 'name': 'testrun_6fd94d59', 'gpu': '3', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False}

1
log/testrun_7c096a18 Normal file
View File

@ -0,0 +1 @@
2023-05-17 06:56:35,124 - testrun_7c096a18 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_7c096a18', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}

1
log/testrun_7fb885ee Normal file
View File

@ -0,0 +1 @@
2023-05-17 07:13:14,777 - testrun_7fb885ee - [INFO] - {'dataset': 'icews14', 'name': 'testrun_7fb885ee', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}

1
log/testrun_8f32040f Normal file
View File

@ -0,0 +1 @@
2023-05-17 06:59:35,220 - testrun_8f32040f - [INFO] - {'dataset': 'icews14', 'name': 'testrun_8f32040f', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}

1
log/testrun_958ef154 Normal file
View File

@ -0,0 +1 @@
2023-05-17 06:16:45,427 - testrun_958ef154 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_958ef154', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}

2
log/testrun_9acdfb58 Normal file
View File

@ -0,0 +1,2 @@
2023-05-06 08:36:46,668 - testrun_9acdfb58 - [INFO] - {'dataset': 'wikidata12k', 'name': 'testrun_9acdfb58', 'gpu': '3', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False}
2023-05-06 08:36:57,409 - testrun_9acdfb58 - [INFO] - [E:0| 0]: Train Loss:0.69813, Val MRR:0.0, testrun_9acdfb58

1
log/testrun_a051cf32 Normal file
View File

@ -0,0 +1 @@
2023-05-17 06:36:14,606 - testrun_a051cf32 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_a051cf32', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}

1
log/testrun_a06d39d0 Normal file
View File

@ -0,0 +1 @@
2023-05-17 06:13:16,274 - testrun_a06d39d0 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_a06d39d0', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}

1
log/testrun_aca2b734 Normal file
View File

@ -0,0 +1 @@
2023-05-17 06:41:20,654 - testrun_aca2b734 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_aca2b734', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}

1
log/testrun_ad7a0edb Normal file
View File

@ -0,0 +1 @@
2023-05-17 06:45:54,332 - testrun_ad7a0edb - [INFO] - {'dataset': 'icews14', 'name': 'testrun_ad7a0edb', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}

7958
log/testrun_ae6f81ee Normal file

File diff suppressed because it is too large Load Diff

1
log/testrun_b381870f Normal file
View File

@ -0,0 +1 @@
2023-05-30 17:54:20,857 - testrun_b381870f - [INFO] - {'dataset': 'wikidata12k', 'name': 'testrun_b381870f', 'gpu': '0', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0003, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': False}

2
log/testrun_b396dcde Normal file
View File

@ -0,0 +1,2 @@
2023-05-30 17:56:25,430 - testrun_b396dcde - [INFO] - {'dataset': 'wikidata12k', 'name': 'testrun_b396dcde', 'gpu': '0', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0003, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': False}
2023-05-30 17:57:00,673 - testrun_b396dcde - [INFO] - {'dataset': 'wikidata12k', 'name': 'testrun_b396dcde', 'gpu': '0', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0003, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': False, 'num_ent': 12554, 'num_rel': 423}

1
log/testrun_bbf65ab5 Normal file
View File

@ -0,0 +1 @@
2023-05-17 06:21:14,228 - testrun_bbf65ab5 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_bbf65ab5', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}

1
log/testrun_bfaa042b Normal file
View File

@ -0,0 +1 @@
2023-05-17 06:50:58,251 - testrun_bfaa042b - [INFO] - {'dataset': 'icews14', 'name': 'testrun_bfaa042b', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}

1
log/testrun_c77a8ec3 Normal file
View File

@ -0,0 +1 @@
2023-05-17 06:37:11,288 - testrun_c77a8ec3 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_c77a8ec3', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}

1
log/testrun_cb3528f3 Normal file
View File

@ -0,0 +1 @@
2023-05-17 07:08:13,688 - testrun_cb3528f3 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_cb3528f3', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}

1
log/testrun_cd333c33 Normal file
View File

@ -0,0 +1 @@
2023-05-17 06:25:12,047 - testrun_cd333c33 - [INFO] - {'dataset': 'icews14', 'name': 'testrun_cd333c33', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}

2
log/testrun_d0367b19 Normal file
View File

@ -0,0 +1,2 @@
2023-05-06 08:37:25,129 - testrun_d0367b19 - [INFO] - {'dataset': 'wikidata12k', 'name': 'testrun_d0367b19', 'gpu': '3', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False}
2023-05-06 08:37:36,239 - testrun_d0367b19 - [INFO] - [E:0| 0]: Train Loss:0.69813, Val MRR:0.0, testrun_d0367b19

9001
log/testrun_d2ab6391 Normal file

File diff suppressed because it is too large Load Diff

11836
log/testrun_e1726b98 Normal file

File diff suppressed because it is too large Load Diff

1
log/testrun_f0394b3c Normal file
View File

@ -0,0 +1 @@
2023-05-17 06:47:48,537 - testrun_f0394b3c - [INFO] - {'dataset': 'icews14', 'name': 'testrun_f0394b3c', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}

1
log/testrun_f42f568c Normal file
View File

@ -0,0 +1 @@
2023-05-30 17:55:52,461 - testrun_f42f568c - [INFO] - {'dataset': 'wikidata12k', 'name': 'testrun_f42f568c', 'gpu': '0', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0003, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': False}

1
log/testrun_fdb0e82c Normal file
View File

@ -0,0 +1 @@
2023-05-17 06:39:01,301 - testrun_fdb0e82c - [INFO] - {'dataset': 'icews14', 'name': 'testrun_fdb0e82c', 'gpu': '1', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': True}

1116
log/wikidata12k Normal file

File diff suppressed because it is too large Load Diff

2
log/wikidata12k_0.00003 Normal file
View File

@ -0,0 +1,2 @@
2023-06-04 17:05:45,012 - wikidata12k_0.00003 - [INFO] - {'dataset': 'wikidata12k', 'name': 'wikidata12k_0.00003', 'gpu': '2', 'train_strategy': 'one_to_n', 'opt': 'adam', 'neg_num': 1000, 'batch_size': 128, 'l2': 0.0, 'lr': 0.0001, 'max_epochs': 500, 'num_workers': 0, 'seed': 42, 'restore': False, 'lbl_smooth': 0.1, 'embed_dim': 400, 'ent_vec_dim': 400, 'rel_vec_dim': 400, 'bias': False, 'form': 'plain', 'k_w': 10, 'k_h': 20, 'num_filt': 96, 'ker_sz': 9, 'perm': 1, 'hid_drop': 0.5, 'feat_drop': 0.2, 'inp_drop': 0.2, 'drop_path': 0.0, 'drop': 0.0, 'in_channels': 1, 'out_channels': 32, 'filt_h': 1, 'filt_w': 9, 'image_h': 128, 'image_w': 128, 'patch_size': 8, 'mixer_dim': 256, 'expansion_factor': 4, 'expansion_factor_token': 0.5, 'mixer_depth': 16, 'mixer_dropout': 0.2, 'log_dir': './log/', 'config_dir': './config/', 'test_only': False, 'grid_search': False}
2023-06-04 17:06:06,702 - wikidata12k_0.00003 - [INFO] - [E:0| 0]: Train Loss:0.69813, Val MRR:0.0, wikidata12k_0.00003

4918
log/wikidata12k_0.001 Normal file

File diff suppressed because it is too large Load Diff

15357
log/wikidata12k_1n Normal file

File diff suppressed because it is too large Load Diff

11565
log/wikidata12k_both Normal file

File diff suppressed because it is too large Load Diff

9241
log/yago11k Normal file

File diff suppressed because it is too large Load Diff

9654
log/yago11k_0.00003 Normal file

File diff suppressed because it is too large Load Diff

9599
log/yago11k_0.0003 Normal file

File diff suppressed because it is too large Load Diff

7233
log/yago11k_0.001 Normal file

File diff suppressed because it is too large Load Diff

18847
log/yago11k_0.001.log Normal file

File diff suppressed because it is too large Load Diff

9169
log/yago11k_both Normal file

File diff suppressed because it is too large Load Diff

9162
log/yago11k_both_0.001 Normal file

File diff suppressed because it is too large Load Diff

112
main.py
View File

@ -3,9 +3,12 @@ import uuid
import argparse
import logging
import logging.config
import pandas as pd
import sys
import torch
import numpy as np
import time
from collections import defaultdict as ddict
from pprint import pprint
@ -17,11 +20,12 @@ from data_loader import TrainDataset, TestDataset
from utils import get_logger, get_combined_results, set_gpu, prepare_env, set_seed
from models import ComplEx, ConvE, HypER, InteractE, FouriER, TuckER
import traceback
class Main(object):
def __init__(self, params):
def __init__(self, params, logger):
"""
Constructor of the runner class
Parameters
@ -34,11 +38,9 @@ class Main(object):
"""
self.p = params
self.logger = get_logger(
self.p.name, self.p.log_dir, self.p.config_dir)
self.logger = logger
self.logger.info(vars(self.p))
pprint(vars(self.p))
if self.p.gpu != '-1' and torch.cuda.is_available():
self.device = torch.device('cuda')
@ -76,22 +78,24 @@ class Main(object):
ent_set, rel_set = OrderedSet(), OrderedSet()
for split in ['train', 'test', 'valid']:
for line in open('./data/{}/{}.txt'.format(self.p.dataset, split)):
sub, rel, obj = map(str.lower, line.strip().split('\t'))
sub, rel, obj, *_ = map(str.lower, line.strip().split('\t'))
ent_set.add(sub)
rel_set.add(rel)
ent_set.add(obj)
self.ent2id = {}
for line in open('./data/{}/{}'.format(self.p.dataset, "entities.dict")):
id, ent = map(str.lower, line.strip().split('\t'))
id, ent = map(str.lower, line.replace('\xa0', '').strip().split('\t'))
self.ent2id[ent] = int(id)
self.rel2id = {}
for line in open('./data/{}/{}'.format(self.p.dataset, "relations.dict")):
id, rel = map(str.lower, line.strip().split('\t'))
self.rel2id[rel] = int(id)
rel_set.add(rel)
# self.ent2id = {ent: idx for idx, ent in enumerate(ent_set)}
# self.rel2id = {rel: idx for idx, rel in enumerate(rel_set)}
self.rel2id.update({rel+'_reverse': idx+len(self.rel2id)
for idx, rel in enumerate(rel_set)})
@ -108,48 +112,49 @@ class Main(object):
for split in ['train', 'test', 'valid']:
for line in open('./data/{}/{}.txt'.format(self.p.dataset, split)):
sub, rel, obj = map(str.lower, line.strip().split('\t'))
sub, rel, obj = self.ent2id[sub], self.rel2id[rel], self.ent2id[obj]
self.data[split].append((sub, rel, obj))
sub, rel, obj, *_ = map(str.lower, line.replace('\xa0', '').strip().split('\t'))
nt_rel = rel.split('[')[0]
sub, rel, obj, nt_rel = self.ent2id[sub], self.rel2id[rel], self.ent2id[obj], self.rel2id[nt_rel]
self.data[split].append((sub, rel, obj, nt_rel))
if split == 'train':
sr2o[(sub, rel)].add(obj)
sr2o[(obj, rel+self.p.num_rel)].add(sub)
sr2o[(sub, rel, nt_rel)].add(obj)
sr2o[(obj, rel+self.p.num_rel, nt_rel + self.p.num_rel)].add(sub)
self.data = dict(self.data)
self.sr2o = {k: list(v) for k, v in sr2o.items()}
for split in ['test', 'valid']:
for sub, rel, obj in self.data[split]:
sr2o[(sub, rel)].add(obj)
sr2o[(obj, rel+self.p.num_rel)].add(sub)
for sub, rel, obj, nt_rel in self.data[split]:
sr2o[(sub, rel, nt_rel)].add(obj)
sr2o[(obj, rel+self.p.num_rel, nt_rel + self.p.num_rel)].add(sub)
self.sr2o_all = {k: list(v) for k, v in sr2o.items()}
self.triples = ddict(list)
if self.p.train_strategy == 'one_to_n':
for (sub, rel), obj in self.sr2o.items():
for (sub, rel, nt_rel), obj in self.sr2o.items():
self.triples['train'].append(
{'triple': (sub, rel, -1), 'label': self.sr2o[(sub, rel)], 'sub_samp': 1})
{'triple': (sub, rel, -1, nt_rel), 'label': self.sr2o[(sub, rel, nt_rel)], 'sub_samp': 1})
else:
for sub, rel, obj in self.data['train']:
for sub, rel, obj, nt_rel in self.data['train']:
rel_inv = rel + self.p.num_rel
sub_samp = len(self.sr2o[(sub, rel)]) + \
len(self.sr2o[(obj, rel_inv)])
sub_samp = len(self.sr2o[(sub, rel, nt_rel)]) + \
len(self.sr2o[(obj, rel_inv, nt_rel + self.p.num_rel)])
sub_samp = np.sqrt(1/sub_samp)
self.triples['train'].append({'triple': (
sub, rel, obj), 'label': self.sr2o[(sub, rel)], 'sub_samp': sub_samp})
sub, rel, obj, nt_rel), 'label': self.sr2o[(sub, rel, nt_rel)], 'sub_samp': sub_samp})
self.triples['train'].append({'triple': (
obj, rel_inv, sub), 'label': self.sr2o[(obj, rel_inv)], 'sub_samp': sub_samp})
obj, rel_inv, sub, nt_rel + self.p.num_rel), 'label': self.sr2o[(obj, rel_inv, nt_rel + self.p.num_rel)], 'sub_samp': sub_samp})
for split in ['test', 'valid']:
for sub, rel, obj in self.data[split]:
for sub, rel, obj, nt_rel in self.data[split]:
rel_inv = rel + self.p.num_rel
self.triples['{}_{}'.format(split, 'tail')].append(
{'triple': (sub, rel, obj), 'label': self.sr2o_all[(sub, rel)]})
{'triple': (sub, rel, obj, nt_rel), 'label': self.sr2o_all[(sub, rel, nt_rel)]})
self.triples['{}_{}'.format(split, 'head')].append(
{'triple': (obj, rel_inv, sub), 'label': self.sr2o_all[(obj, rel_inv)]})
{'triple': (obj, rel_inv, sub, nt_rel + self.p.num_rel), 'label': self.sr2o_all[(obj, rel_inv, nt_rel + self.p.num_rel)]})
self.triples = dict(self.triples)
@ -273,13 +278,13 @@ class Main(object):
if self.p.train_strategy == 'one_to_x':
triple, label, neg_ent, sub_samp = [
_.to(self.device) for _ in batch]
return triple[:, 0], triple[:, 1], triple[:, 2], label, neg_ent, sub_samp
return triple[:, 0], triple[:, 1], triple[:, 2], triple[:, 3], label, neg_ent, sub_samp
else:
triple, label = [_.to(self.device) for _ in batch]
return triple[:, 0], triple[:, 1], triple[:, 2], label, None, None
return triple[:, 0], triple[:, 1], triple[:, 2], triple[:, 3], label, None, None
else:
triple, label = [_.to(self.device) for _ in batch]
return triple[:, 0], triple[:, 1], triple[:, 2], label
return triple[:, 0], triple[:, 1], triple[:, 2], triple[:, 3], label
def save_model(self, save_path):
"""
@ -406,16 +411,35 @@ class Main(object):
train_iter = iter(
self.data_iter['{}_{}'.format(split, mode.split('_')[0])])
sub_all = []
obj_all = []
rel_all = []
target_score = []
target_rank = []
obj_pred = []
obj_pred_score = []
for step, batch in enumerate(train_iter):
sub, rel, obj, label = self.read_batch(batch, split)
pred = self.model.forward(sub, rel, None, 'one_to_n')
sub, rel, obj, nt_rel, label = self.read_batch(batch, split)
pred = self.model.forward(sub, rel, nt_rel, None, 'one_to_n')
b_range = torch.arange(pred.size()[0], device=self.device)
target_pred = pred[b_range, obj]
pred = torch.where(label.byte(), torch.zeros_like(pred), pred)
pred[b_range, obj] = target_pred
highest = torch.argsort(pred, dim=1, descending=True)[:,0]
highest_score = pred[b_range, highest]
ranks = 1 + torch.argsort(torch.argsort(pred, dim=1,
descending=True), dim=1, descending=False)[b_range, obj]
sub_all.extend(sub.cpu().numpy())
obj_all.extend(obj.cpu().numpy())
rel_all.extend(rel.cpu().numpy())
target_score.extend(target_pred.cpu().numpy())
target_rank.extend(ranks.cpu().numpy())
obj_pred.extend(highest.cpu().numpy())
obj_pred_score.extend(highest_score.cpu().numpy())
ranks = ranks.float()
results['count'] = torch.numel(
ranks) + results.get('count', 0.0)
@ -430,7 +454,8 @@ class Main(object):
if step % 100 == 0:
self.logger.info('[{}, {} Step {}]\t{}'.format(
split.title(), mode.title(), step, self.p.name))
df = pd.DataFrame({"sub":sub_all,"rel":rel_all,"obj":obj_all, "rank": target_rank,"score":target_score, "pred":obj_pred,"pred_score":obj_pred_score})
df.to_csv(f"{self.p.name}_result.csv",header=True, index=False)
return results
def run_epoch(self, epoch):
@ -452,10 +477,10 @@ class Main(object):
for step, batch in enumerate(train_iter):
self.optimizer.zero_grad()
sub, rel, obj, label, neg_ent, sub_samp = self.read_batch(
sub, rel, obj, nt_rel, label, neg_ent, sub_samp = self.read_batch(
batch, 'train')
pred = self.model.forward(sub, rel, neg_ent, self.p.train_strategy)
pred = self.model.forward(sub, rel, nt_rel, neg_ent, self.p.train_strategy)
loss = self.model.loss(pred, label, sub_samp)
loss.backward()
@ -634,9 +659,10 @@ if __name__ == "__main__":
set_gpu(args.gpu)
set_seed(args.seed)
model = Main(args)
if (args.grid_search):
model = Main(args)
from sklearn.model_selection import GridSearchCV
from skorch import NeuralNet
@ -667,7 +693,7 @@ if __name__ == "__main__":
collate_fn=TrainDataset.collate_fn
))
for step, batch in enumerate(dataloader):
sub, rel, obj, label, neg_ent, sub_samp = model.read_batch(
sub, rel, obj, nt_rel, label, neg_ent, sub_samp = model.read_batch(
batch, 'train')
if (neg_ent is None):
@ -685,9 +711,27 @@ if __name__ == "__main__":
search = grid.fit(inputs, label)
print("BEST SCORE: ", search.best_score_)
print("BEST PARAMS: ", search.best_params_)
logger = get_logger(
args.name, args.log_dir, args.config_dir)
if (args.test_only):
model = Main(args, logger)
save_path = os.path.join('./torch_saved', args.name)
model.load_model(save_path)
model.evaluate('test')
else:
model = Main(args, logger)
model.fit()
# while True:
# try:
# model = Main(args, logger)
# model.fit()
# except Exception as e:
# print(e)
# traceback.print_exc()
# try:
# del model
# except Exception:
# pass
# time.sleep(30)
# continue
# break

201
models.py
View File

@ -9,7 +9,7 @@ from layers import *
from timm.data import IMAGENET_DEFAULT_MEAN, IMAGENET_DEFAULT_STD
from timm.models.layers import DropPath, trunc_normal_
from timm.models.registry import register_model
from timm.models.layers.helpers import to_2tuple
from timm.layers.helpers import to_2tuple
class ConvE(torch.nn.Module):
@ -466,6 +466,10 @@ class FouriER(torch.nn.Module):
self.p.ent_vec_dim, image_h*image_w)
torch.nn.init.xavier_normal_(self.ent_fusion.weight)
self.ent_attn = torch.nn.Linear(
128, 128)
torch.nn.init.xavier_normal_(self.ent_attn.weight)
self.rel_fusion = torch.nn.Linear(
self.p.rel_vec_dim, image_h*image_w)
torch.nn.init.xavier_normal_(self.rel_fusion.weight)
@ -548,7 +552,14 @@ class FouriER(torch.nn.Module):
# output only the features of last layer for image classification
return x
def forward(self, sub, rel, neg_ents, strategy='one_to_x'):
def fuse_attention(self, s_embedding, l_embedding):
w1 = self.ent_attn(torch.tanh(s_embedding))
w2 = self.ent_attn(torch.tanh(l_embedding))
aff = F.softmax(torch.cat((w1,w2),1), 1)
en_embedding = aff[:,0].unsqueeze(1) * s_embedding + aff[:, 1].unsqueeze(1) * l_embedding
return en_embedding
def forward(self, sub, rel, nt_rel, neg_ents, strategy='one_to_x'):
sub_emb = self.ent_fusion(self.ent_embed(sub))
rel_emb = self.rel_fusion(self.rel_embed(rel))
comb_emb = torch.stack([sub_emb.view(-1, self.p.image_h, self.p.image_w), rel_emb.view(-1, self.p.image_h, self.p.image_w)], dim=1)
@ -557,6 +568,17 @@ class FouriER(torch.nn.Module):
z = self.forward_embeddings(y)
z = self.forward_tokens(z)
z = z.mean([-2, -1])
nt_rel_emb = self.rel_fusion(self.rel_embed(nt_rel))
comb_emb_1 = torch.stack([sub_emb.view(-1, self.p.image_h, self.p.image_w), nt_rel_emb.view(-1, self.p.image_h, self.p.image_w)], dim=1)
y_1 = comb_emb_1.view(-1, 2, self.p.image_h, self.p.image_w)
y_1 = self.bn0(y_1)
z_1 = self.forward_embeddings(y_1)
z_1 = self.forward_tokens(z_1)
z_1 = z_1.mean([-2, -1])
z = self.fuse_attention(z, z_1)
z = self.norm(z)
x = self.head(z)
x = self.hidden_drop(x)
@ -707,6 +729,166 @@ def basic_blocks(dim, index, layers,
return blocks
def window_partition(x, window_size):
"""
Args:
x: (B, H, W, C)
window_size (int): window size
Returns:
windows: (num_windows*B, window_size, window_size, C)
"""
B, C, H, W = x.shape
x = x.view(B, H // window_size, window_size, W // window_size, window_size, C)
windows = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(-1, window_size, window_size, C)
return windows
class WindowAttention(nn.Module):
r""" Window based multi-head self attention (W-MSA) module with relative position bias.
It supports both of shifted and non-shifted window.
Args:
dim (int): Number of input channels.
window_size (tuple[int]): The height and width of the window.
num_heads (int): Number of attention heads.
qkv_bias (bool, optional): If True, add a learnable bias to query, key, value. Default: True
attn_drop (float, optional): Dropout ratio of attention weight. Default: 0.0
proj_drop (float, optional): Dropout ratio of output. Default: 0.0
pretrained_window_size (tuple[int]): The height and width of the window in pre-training.
"""
def __init__(self, dim, window_size, num_heads, qkv_bias=True, attn_drop=0., proj_drop=0.,
pretrained_window_size=[0, 0]):
super().__init__()
self.dim = dim
self.window_size = window_size # Wh, Ww
self.pretrained_window_size = pretrained_window_size
self.num_heads = num_heads
self.logit_scale = nn.Parameter(torch.log(10 * torch.ones((num_heads, 1, 1))), requires_grad=True)
# mlp to generate continuous relative position bias
self.cpb_mlp = nn.Sequential(nn.Linear(2, 512, bias=True),
nn.ReLU(inplace=True),
nn.Linear(512, num_heads, bias=False))
# get relative_coords_table
relative_coords_h = torch.arange(-(self.window_size[0] - 1), self.window_size[0], dtype=torch.float32)
relative_coords_w = torch.arange(-(self.window_size[1] - 1), self.window_size[1], dtype=torch.float32)
relative_coords_table = torch.stack(
torch.meshgrid([relative_coords_h,
relative_coords_w])).permute(1, 2, 0).contiguous().unsqueeze(0) # 1, 2*Wh-1, 2*Ww-1, 2
if pretrained_window_size[0] > 0:
relative_coords_table[:, :, :, 0] /= (pretrained_window_size[0] - 1)
relative_coords_table[:, :, :, 1] /= (pretrained_window_size[1] - 1)
else:
relative_coords_table[:, :, :, 0] /= (self.window_size[0] - 1)
relative_coords_table[:, :, :, 1] /= (self.window_size[1] - 1)
relative_coords_table *= 8 # normalize to -8, 8
relative_coords_table = torch.sign(relative_coords_table) * torch.log2(
torch.abs(relative_coords_table) + 1.0) / np.log2(8)
self.register_buffer("relative_coords_table", relative_coords_table)
# get pair-wise relative position index for each token inside the window
coords_h = torch.arange(self.window_size[0])
coords_w = torch.arange(self.window_size[1])
coords = torch.stack(torch.meshgrid([coords_h, coords_w])) # 2, Wh, Ww
coords_flatten = torch.flatten(coords, 1) # 2, Wh*Ww
relative_coords = coords_flatten[:, :, None] - coords_flatten[:, None, :] # 2, Wh*Ww, Wh*Ww
relative_coords = relative_coords.permute(1, 2, 0).contiguous() # Wh*Ww, Wh*Ww, 2
relative_coords[:, :, 0] += self.window_size[0] - 1 # shift to start from 0
relative_coords[:, :, 1] += self.window_size[1] - 1
relative_coords[:, :, 0] *= 2 * self.window_size[1] - 1
relative_position_index = relative_coords.sum(-1) # Wh*Ww, Wh*Ww
self.register_buffer("relative_position_index", relative_position_index)
self.qkv = nn.Linear(dim, dim * 3, bias=False)
if qkv_bias:
self.q_bias = nn.Parameter(torch.zeros(dim))
self.v_bias = nn.Parameter(torch.zeros(dim))
else:
self.q_bias = None
self.v_bias = None
self.attn_drop = nn.Dropout(attn_drop)
self.proj = nn.Linear(dim, dim)
self.proj_drop = nn.Dropout(proj_drop)
self.softmax = nn.Softmax(dim=-1)
def forward(self, x, mask=None):
"""
Args:
x: input features with shape of (num_windows*B, N, C)
mask: (0/-inf) mask with shape of (num_windows, Wh*Ww, Wh*Ww) or None
"""
B_, N, C = x.shape
qkv_bias = None
if self.q_bias is not None:
qkv_bias = torch.cat((self.q_bias, torch.zeros_like(self.v_bias, requires_grad=False), self.v_bias))
qkv = F.linear(input=x, weight=self.qkv.weight, bias=qkv_bias)
qkv = qkv.reshape(B_, N, 3, self.num_heads, -1).permute(2, 0, 3, 1, 4)
q, k, v = qkv[0], qkv[1], qkv[2] # make torchscript happy (cannot use tensor as tuple)
# cosine attention
attn = (F.normalize(q, dim=-1) @ F.normalize(k, dim=-1).transpose(-2, -1))
logit_scale = torch.clamp(self.logit_scale, max=torch.log(torch.tensor(1. / 0.01)).cuda()).exp()
attn = attn * logit_scale
relative_position_bias_table = self.cpb_mlp(self.relative_coords_table).view(-1, self.num_heads)
relative_position_bias = relative_position_bias_table[self.relative_position_index.view(-1)].view(
self.window_size[0] * self.window_size[1], self.window_size[0] * self.window_size[1], -1) # Wh*Ww,Wh*Ww,nH
relative_position_bias = relative_position_bias.permute(2, 0, 1).contiguous() # nH, Wh*Ww, Wh*Ww
relative_position_bias = 16 * torch.sigmoid(relative_position_bias)
attn = attn + relative_position_bias.unsqueeze(0)
if mask is not None:
nW = mask.shape[0]
attn = attn.view(B_ // nW, nW, self.num_heads, N, N) + mask.unsqueeze(1).unsqueeze(0)
attn = attn.view(-1, self.num_heads, N, N)
attn = self.softmax(attn)
else:
attn = self.softmax(attn)
attn = self.attn_drop(attn)
x = (attn @ v).transpose(1, 2).reshape(B_, N, C)
x = self.proj(x)
x = self.proj_drop(x)
return x
def extra_repr(self) -> str:
return f'dim={self.dim}, window_size={self.window_size}, ' \
f'pretrained_window_size={self.pretrained_window_size}, num_heads={self.num_heads}'
def flops(self, N):
# calculate flops for 1 window with token length of N
flops = 0
# qkv = self.qkv(x)
flops += N * self.dim * 3 * self.dim
# attn = (q @ k.transpose(-2, -1))
flops += self.num_heads * N * (self.dim // self.num_heads) * N
# x = (attn @ v)
flops += self.num_heads * N * N * (self.dim // self.num_heads)
# x = self.proj(x)
flops += N * self.dim * self.dim
return flops
def window_reverse(windows, window_size, H, W):
"""
Args:
windows: (num_windows*B, window_size, window_size, C)
window_size (int): Window size
H (int): Height of image
W (int): Width of image
Returns:
x: (B, H, W, C)
"""
B = int(windows.shape[0] / (H * W / window_size / window_size))
x = windows.view(B, H // window_size, W // window_size, window_size, window_size, -1)
x = x.permute(0, 1, 3, 2, 4, 5).contiguous().view(B, -1, H, W)
return x
class PoolFormerBlock(nn.Module):
"""
@ -731,7 +913,10 @@ class PoolFormerBlock(nn.Module):
self.norm1 = norm_layer(dim)
#self.token_mixer = Pooling(pool_size=pool_size)
self.token_mixer = FNetBlock()
# self.token_mixer = FNetBlock()
self.window_size = 4
self.attn_mask = None
self.token_mixer = WindowAttention(dim=dim, window_size=to_2tuple(self.window_size), num_heads=4)
self.norm2 = norm_layer(dim)
mlp_hidden_dim = int(dim * mlp_ratio)
self.mlp = Mlp(in_features=dim, hidden_features=mlp_hidden_dim,
@ -748,15 +933,21 @@ class PoolFormerBlock(nn.Module):
layer_scale_init_value * torch.ones((dim)), requires_grad=True)
def forward(self, x):
B, C, H, W = x.shape
x_windows = window_partition(x, self.window_size)
x_windows = x_windows.view(-1, self.window_size * self.window_size, C)
attn_windows = self.token_mixer(x_windows, mask=self.attn_mask)
attn_windows = attn_windows.view(-1, self.window_size, self.window_size, C)
x_attn = window_reverse(attn_windows, self.window_size, H, W)
if self.use_layer_scale:
x = x + self.drop_path(
self.layer_scale_1.unsqueeze(-1).unsqueeze(-1)
* self.token_mixer(self.norm1(x)))
* x_attn)
x = x + self.drop_path(
self.layer_scale_2.unsqueeze(-1).unsqueeze(-1)
* self.mlp(self.norm2(x)))
else:
x = x + self.drop_path(self.token_mixer(self.norm1(x)))
x = x + self.drop_path(x_attn)
x = x + self.drop_path(self.mlp(self.norm2(x)))
return x
class PatchEmbed(nn.Module):

View File

@ -2,3 +2,5 @@ torch==1.12.1+cu116
ordered-set==4.1.0
numpy==1.21.5
einops==0.4.1
pandas
timm==0.9.16

17
run.sh
View File

@ -24,3 +24,20 @@ PID: 4503
test: testrun_d542676f
---
nohup python main.py --gpu 3 --data WN18RR --drop 0.0 --drop_path 0.0 >run_log/fnet-wn.log 2>&1 &
---
nohup python main.py --name ice0003 --lr 0.0003 --data icews14 --gpu 1 >run_log/ice0003.log 2>&1 &
PID: 3076
tail -f -n 200 run_log/ice0003.log
---
nohup python main.py --name ice0003_2 --lr 0.00003 --data icews14 --gpu 3 >run_log/ice0003_2.log 2>&1 &
PID: 3390
tail -f -n 200 run_log/ice0003_2.log
---
nohup python main.py --name ice00001 --lr 0.00001 --data icews14 --gpu 2 >run_log/ice00001.log 2>&1 &
PID:
___
nohup python main.py --name ice001 --lr 0.001 --data icews14 --gpu 3 >run_log/0.001.log 2>&1 &
___
nohup python main.py --name iceboth --data icews14_both --gpu 0 >run_log/iceboth.log 2>&1 &
PID: 21984

74
visualization_util.py Normal file
View File

@ -0,0 +1,74 @@
import argparse
import re
import os
import matplotlib.pyplot as plt
import numpy as np
from datetime import datetime
def extract_learning_curves(args):
paths = args.log_path.split(',')
if len(paths) == 1 and os.path.isdir(paths[0]):
paths = [os.path.join(paths[0], f) for f in os.listdir(paths[0]) if os.path.isfile(os.path.join(paths[0], f))]
learning_curves = {}
print(paths)
for path in paths:
print(path)
learning_curve = []
lines = open(path, 'r').readlines()
last_epoch = -1
stacked_epoch = -1
max_epoch = -1
for line in lines:
matched = re.match(r'[0-9\- :,]*\[INFO\] - \[Epoch ([0-9]+)\].*Valid MRR: ([0-9\.]+).*', line)
# matched = re.match(r'\tMRR: Tail : [0-9\.]+, Head : [0-9\.]+, Avg : ([0-9\.]+)', line)
if matched:
this_epoch = int(matched.group(1))
if (this_epoch > max_epoch):
learning_curve.append(float(matched.group(2)))
max_epoch = this_epoch
stacked_epoch = this_epoch
elif (this_epoch < max_epoch and this_epoch > last_epoch):
last_epoch = this_epoch
max_epoch = stacked_epoch + 1 + this_epoch
learning_curve.append(float(matched.group(2)))
if max_epoch >= args.num_epochs:
break
# if matched:
# max_epoch += 1
# learning_curve.append(float(matched.group(1)))
# if max_epoch >= args.num_epochs:
# break
while len(learning_curve) < args.num_epochs:
learning_curve.append(learning_curve[-1])
learning_curves[os.path.basename(path)] = learning_curve
return learning_curves
def draw_learning_curves(args, learning_curves):
for name in learning_curves.keys():
epochs = np.arange(len(learning_curves[name]))
matched = re.match(r'(.*)\..*', name)
if matched:
label = matched.group(1)
else:
label = name
plt.plot(epochs, learning_curves[name], label = label)
plt.xlabel("Epochs")
plt.ylabel("Best Valid MRR")
plt.legend(title=args.legend_title)
plt.savefig(os.path.join(args.out_path, str(round(datetime.utcnow().timestamp() * 1000)) + '.' + args.fig_filetype))
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="Parser For Arguments", formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--task', default = None, type=str)
parser.add_argument('--log_path', type=str, default=None)
parser.add_argument('--out_path', type=str, default=None)
parser.add_argument('--num_epochs', type=int, default=200)
parser.add_argument('--legend_title', type=str, default="Learning rate")
parser.add_argument('--fig_filetype', type=str, default="svg")
args = parser.parse_args()
if (args.task == 'learning_curve'):
draw_learning_curves(args, extract_learning_curves(args))

1072
wikidata12k.log Normal file

File diff suppressed because it is too large Load Diff

15209
wikidata12k_1n.out Normal file

File diff suppressed because it is too large Load Diff

9207
yago11k.out Normal file

File diff suppressed because it is too large Load Diff