diff --git a/data/TUDataset/MUTAG/processed/data.pt b/data/TUDataset/MUTAG/processed/data.pt deleted file mode 100644 index c9891b3..0000000 Binary files a/data/TUDataset/MUTAG/processed/data.pt and /dev/null differ diff --git a/data/TUDataset/MUTAG/processed/pre_filter.pt b/data/TUDataset/MUTAG/processed/pre_filter.pt deleted file mode 100644 index cf16036..0000000 Binary files a/data/TUDataset/MUTAG/processed/pre_filter.pt and /dev/null differ diff --git a/data/TUDataset/MUTAG/processed/pre_transform.pt b/data/TUDataset/MUTAG/processed/pre_transform.pt deleted file mode 100644 index cf16036..0000000 Binary files a/data/TUDataset/MUTAG/processed/pre_transform.pt and /dev/null differ diff --git a/data/TUDataset/MUTAG/raw/MUTAG_A.txt b/data/TUDataset/MUTAG/raw/MUTAG_A.txt deleted file mode 100644 index b5cabc1..0000000 --- a/data/TUDataset/MUTAG/raw/MUTAG_A.txt +++ /dev/null @@ -1,7442 +0,0 @@ -2, 1 -1, 2 -3, 2 -2, 3 -4, 3 -3, 4 -5, 4 -4, 5 -6, 5 -5, 6 -6, 1 -1, 6 -7, 5 -5, 7 -8, 7 -7, 8 -9, 8 -8, 9 -10, 9 -9, 10 -10, 4 -4, 10 -11, 10 -10, 11 -12, 11 -11, 12 -13, 12 -12, 13 -14, 13 -13, 14 -14, 9 -9, 14 -15, 13 -13, 15 -16, 15 -15, 16 -17, 15 -15, 17 -19, 18 -18, 19 -20, 19 -19, 20 -21, 20 -20, 21 -22, 21 -21, 22 -23, 22 -22, 23 -24, 23 -23, 24 -25, 24 -24, 25 -25, 20 -20, 25 -26, 25 -25, 26 -27, 26 -26, 27 -27, 18 -18, 27 -28, 26 -26, 28 -29, 28 -28, 29 -30, 28 -28, 30 -32, 31 -31, 32 -33, 32 -32, 33 -34, 33 -33, 34 -35, 34 -34, 35 -36, 35 -35, 36 -37, 36 -36, 37 -38, 37 -37, 38 -38, 33 -33, 38 -39, 34 -34, 39 -40, 39 -39, 40 -40, 31 -31, 40 -41, 39 -39, 41 -42, 41 -41, 42 -43, 41 -41, 43 -45, 44 -44, 45 -46, 45 -45, 46 -47, 46 -46, 47 -48, 47 -47, 48 -49, 48 -48, 49 -49, 44 -44, 49 -50, 48 -48, 50 -51, 50 -50, 51 -52, 51 -51, 52 -53, 52 -52, 53 -54, 53 -53, 54 -55, 54 -54, 55 -55, 50 -50, 55 -56, 55 -55, 56 -56, 47 -47, 56 -57, 56 -56, 57 -58, 57 -57, 58 -59, 58 -58, 59 -59, 54 -54, 59 -60, 57 -57, 60 -61, 60 -60, 61 -62, 60 -60, 62 -64, 63 -63, 64 -65, 64 -64, 65 -66, 65 -65, 66 -67, 66 -66, 67 -68, 67 -67, 68 -68, 63 -63, 68 -69, 68 -68, 69 -70, 66 -66, 70 -71, 70 -70, 71 -72, 70 -70, 72 -73, 65 -65, 73 -75, 74 -74, 75 -76, 75 -75, 76 -77, 76 -76, 77 -78, 77 -77, 78 -79, 78 -78, 79 -80, 79 -79, 80 -81, 80 -80, 81 -81, 76 -76, 81 -82, 81 -81, 82 -83, 82 -82, 83 -83, 74 -74, 83 -84, 83 -83, 84 -85, 84 -84, 85 -86, 85 -85, 86 -87, 86 -86, 87 -87, 82 -82, 87 -88, 87 -87, 88 -89, 88 -88, 89 -89, 80 -80, 89 -90, 86 -86, 90 -91, 90 -90, 91 -92, 90 -90, 92 -93, 84 -84, 93 -94, 93 -93, 94 -95, 93 -93, 95 -96, 79 -79, 96 -97, 96 -96, 97 -98, 96 -96, 98 -99, 77 -77, 99 -100, 99 -99, 100 -101, 99 -99, 101 -103, 102 -102, 103 -104, 103 -103, 104 -105, 104 -104, 105 -106, 105 -105, 106 -107, 106 -106, 107 -108, 107 -107, 108 -108, 103 -103, 108 -109, 108 -108, 109 -110, 109 -109, 110 -111, 109 -109, 111 -112, 105 -105, 112 -113, 112 -112, 113 -114, 113 -113, 114 -115, 114 -114, 115 -116, 115 -115, 116 -117, 116 -116, 117 -117, 112 -112, 117 -119, 118 -118, 119 -120, 119 -119, 120 -121, 119 -119, 121 -122, 121 -121, 122 -123, 122 -122, 123 -124, 123 -123, 124 -125, 124 -124, 125 -126, 125 -125, 126 -127, 126 -126, 127 -127, 122 -122, 127 -128, 126 -126, 128 -129, 128 -128, 129 -130, 129 -129, 130 -130, 125 -125, 130 -131, 130 -130, 131 -132, 131 -131, 132 -133, 132 -132, 133 -134, 133 -133, 134 -134, 129 -129, 134 -135, 133 -133, 135 -136, 135 -135, 136 -137, 135 -135, 137 -139, 138 -138, 139 -140, 139 -139, 140 -141, 140 -140, 141 -142, 141 -141, 142 -143, 142 -142, 143 -144, 143 -143, 144 -144, 140 -140, 144 -145, 144 -144, 145 -146, 145 -145, 146 -146, 138 -138, 146 -147, 146 -146, 147 -148, 147 -147, 148 -149, 147 -147, 149 -151, 150 -150, 151 -152, 151 -151, 152 -153, 152 -152, 153 -154, 153 -153, 154 -155, 154 -154, 155 -156, 155 -155, 156 -156, 151 -151, 156 -157, 155 -155, 157 -158, 157 -157, 158 -159, 158 -158, 159 -159, 154 -154, 159 -160, 159 -159, 160 -161, 160 -160, 161 -162, 161 -161, 162 -163, 162 -162, 163 -163, 158 -158, 163 -164, 162 -162, 164 -165, 164 -164, 165 -166, 164 -164, 166 -168, 167 -167, 168 -169, 168 -168, 169 -170, 169 -169, 170 -171, 170 -170, 171 -172, 171 -171, 172 -172, 167 -167, 172 -173, 171 -171, 173 -174, 173 -173, 174 -175, 174 -174, 175 -176, 175 -175, 176 -176, 170 -170, 176 -177, 176 -176, 177 -178, 177 -177, 178 -179, 178 -178, 179 -180, 179 -179, 180 -180, 175 -175, 180 -181, 174 -174, 181 -182, 181 -181, 182 -183, 181 -181, 183 -185, 184 -184, 185 -186, 185 -185, 186 -187, 186 -186, 187 -188, 187 -187, 188 -189, 188 -188, 189 -190, 189 -189, 190 -191, 190 -190, 191 -191, 186 -186, 191 -192, 191 -191, 192 -193, 192 -192, 193 -193, 184 -184, 193 -194, 193 -193, 194 -195, 194 -194, 195 -196, 195 -195, 196 -197, 196 -196, 197 -197, 192 -192, 197 -198, 197 -197, 198 -199, 198 -198, 199 -199, 190 -190, 199 -200, 196 -196, 200 -201, 189 -189, 201 -202, 201 -201, 202 -203, 201 -201, 203 -205, 204 -204, 205 -206, 205 -205, 206 -207, 206 -206, 207 -208, 207 -207, 208 -209, 208 -208, 209 -209, 204 -204, 209 -210, 208 -208, 210 -211, 210 -210, 211 -212, 211 -211, 212 -213, 212 -212, 213 -214, 213 -213, 214 -215, 214 -214, 215 -215, 210 -210, 215 -216, 215 -215, 216 -216, 207 -207, 216 -217, 216 -216, 217 -218, 217 -217, 218 -219, 218 -218, 219 -219, 214 -214, 219 -220, 218 -218, 220 -221, 220 -220, 221 -222, 220 -220, 222 -223, 213 -213, 223 -224, 223 -223, 224 -225, 223 -223, 225 -227, 226 -226, 227 -228, 227 -227, 228 -229, 228 -228, 229 -230, 229 -229, 230 -231, 230 -230, 231 -232, 231 -231, 232 -233, 232 -232, 233 -233, 228 -228, 233 -234, 233 -233, 234 -235, 234 -234, 235 -235, 227 -227, 235 -236, 230 -230, 236 -237, 236 -236, 237 -238, 236 -236, 238 -240, 239 -239, 240 -241, 240 -240, 241 -242, 241 -241, 242 -243, 242 -242, 243 -244, 243 -243, 244 -245, 244 -244, 245 -246, 245 -245, 246 -247, 246 -246, 247 -248, 247 -247, 248 -249, 248 -248, 249 -249, 244 -244, 249 -250, 249 -249, 250 -250, 241 -241, 250 -251, 250 -250, 251 -252, 251 -251, 252 -252, 239 -239, 252 -253, 251 -251, 253 -254, 253 -253, 254 -254, 248 -248, 254 -255, 247 -247, 255 -256, 255 -255, 256 -257, 255 -255, 257 -259, 258 -258, 259 -260, 259 -259, 260 -261, 260 -260, 261 -262, 261 -261, 262 -263, 262 -262, 263 -263, 258 -258, 263 -264, 262 -262, 264 -265, 264 -264, 265 -266, 265 -265, 266 -267, 266 -266, 267 -268, 267 -267, 268 -269, 268 -268, 269 -269, 264 -264, 269 -270, 269 -269, 270 -270, 261 -261, 270 -271, 270 -270, 271 -272, 271 -271, 272 -273, 272 -272, 273 -273, 268 -268, 273 -274, 273 -273, 274 -275, 274 -274, 275 -276, 274 -274, 276 -277, 267 -267, 277 -278, 277 -277, 278 -279, 277 -277, 279 -281, 280 -280, 281 -282, 281 -281, 282 -283, 282 -282, 283 -284, 283 -283, 284 -285, 284 -284, 285 -285, 280 -280, 285 -286, 284 -284, 286 -287, 283 -283, 287 -288, 287 -287, 288 -289, 287 -287, 289 -290, 282 -282, 290 -292, 291 -291, 292 -293, 292 -292, 293 -294, 293 -293, 294 -295, 294 -294, 295 -296, 295 -295, 296 -296, 291 -291, 296 -297, 295 -295, 297 -298, 297 -297, 298 -299, 298 -298, 299 -300, 299 -299, 300 -301, 300 -300, 301 -302, 301 -301, 302 -302, 297 -297, 302 -303, 302 -302, 303 -303, 294 -294, 303 -304, 303 -303, 304 -305, 299 -299, 305 -306, 305 -305, 306 -307, 305 -305, 307 -309, 308 -308, 309 -310, 309 -309, 310 -311, 310 -310, 311 -312, 311 -311, 312 -313, 312 -312, 313 -314, 313 -313, 314 -315, 314 -314, 315 -315, 310 -310, 315 -316, 315 -315, 316 -317, 316 -316, 317 -317, 308 -308, 317 -318, 312 -312, 318 -319, 318 -318, 319 -320, 318 -318, 320 -322, 321 -321, 322 -323, 322 -322, 323 -324, 323 -323, 324 -325, 324 -324, 325 -326, 325 -325, 326 -326, 321 -321, 326 -327, 326 -326, 327 -328, 327 -327, 328 -329, 324 -324, 329 -330, 329 -329, 330 -331, 330 -330, 331 -331, 323 -323, 331 -332, 331 -331, 332 -333, 332 -332, 333 -334, 333 -333, 334 -335, 334 -334, 335 -335, 330 -330, 335 -336, 334 -334, 336 -337, 336 -336, 337 -338, 336 -336, 338 -340, 339 -339, 340 -341, 340 -340, 341 -342, 341 -341, 342 -343, 342 -342, 343 -344, 343 -343, 344 -344, 339 -339, 344 -345, 343 -343, 345 -346, 345 -345, 346 -347, 345 -345, 347 -348, 341 -341, 348 -349, 348 -348, 349 -350, 349 -349, 350 -351, 350 -350, 351 -352, 351 -351, 352 -353, 352 -352, 353 -353, 348 -348, 353 -354, 351 -351, 354 -355, 354 -354, 355 -356, 354 -354, 356 -358, 357 -357, 358 -359, 358 -358, 359 -360, 359 -359, 360 -361, 360 -360, 361 -362, 361 -361, 362 -362, 357 -357, 362 -363, 362 -362, 363 -364, 363 -363, 364 -365, 363 -363, 365 -366, 360 -360, 366 -367, 366 -366, 367 -368, 367 -367, 368 -368, 359 -359, 368 -369, 368 -368, 369 -370, 369 -369, 370 -371, 370 -370, 371 -372, 371 -371, 372 -372, 367 -367, 372 -373, 371 -371, 373 -375, 374 -374, 375 -376, 375 -375, 376 -377, 376 -376, 377 -378, 377 -377, 378 -379, 378 -378, 379 -379, 374 -374, 379 -380, 378 -378, 380 -381, 380 -380, 381 -382, 381 -381, 382 -383, 382 -382, 383 -383, 377 -377, 383 -384, 383 -383, 384 -385, 384 -384, 385 -386, 385 -385, 386 -387, 386 -386, 387 -388, 387 -387, 388 -389, 388 -388, 389 -390, 389 -389, 390 -390, 385 -385, 390 -391, 390 -390, 391 -391, 382 -382, 391 -392, 391 -391, 392 -393, 392 -392, 393 -393, 389 -389, 393 -394, 393 -393, 394 -395, 394 -394, 395 -396, 394 -394, 396 -398, 397 -397, 398 -399, 398 -398, 399 -400, 399 -399, 400 -401, 400 -400, 401 -402, 401 -401, 402 -403, 402 -402, 403 -404, 403 -403, 404 -405, 404 -404, 405 -406, 405 -405, 406 -407, 406 -406, 407 -408, 407 -407, 408 -409, 408 -408, 409 -410, 409 -409, 410 -411, 410 -410, 411 -412, 411 -411, 412 -413, 412 -412, 413 -414, 413 -413, 414 -414, 397 -397, 414 -415, 414 -414, 415 -416, 415 -415, 416 -416, 399 -399, 416 -417, 416 -416, 417 -417, 402 -402, 417 -418, 417 -417, 418 -418, 405 -405, 418 -419, 418 -418, 419 -419, 408 -408, 419 -420, 419 -419, 420 -420, 411 -411, 420 -420, 415 -415, 420 -421, 406 -406, 421 -422, 421 -421, 422 -423, 421 -421, 423 -425, 424 -424, 425 -426, 425 -425, 426 -427, 426 -426, 427 -428, 427 -427, 428 -429, 428 -428, 429 -429, 424 -424, 429 -430, 429 -429, 430 -431, 430 -430, 431 -432, 430 -430, 432 -433, 427 -427, 433 -434, 433 -433, 434 -435, 434 -434, 435 -435, 426 -426, 435 -436, 435 -435, 436 -437, 436 -436, 437 -438, 437 -437, 438 -439, 438 -438, 439 -439, 434 -434, 439 -440, 438 -438, 440 -442, 441 -441, 442 -443, 442 -442, 443 -444, 443 -443, 444 -445, 444 -444, 445 -446, 445 -445, 446 -446, 441 -441, 446 -447, 446 -446, 447 -448, 447 -447, 448 -449, 447 -447, 449 -450, 444 -444, 450 -451, 450 -450, 451 -452, 450 -450, 452 -453, 443 -443, 453 -455, 454 -454, 455 -456, 455 -455, 456 -457, 456 -456, 457 -458, 457 -457, 458 -459, 458 -458, 459 -459, 454 -454, 459 -460, 458 -458, 460 -461, 460 -460, 461 -462, 461 -461, 462 -463, 462 -462, 463 -464, 463 -463, 464 -465, 464 -464, 465 -466, 465 -465, 466 -467, 466 -466, 467 -468, 467 -467, 468 -469, 468 -468, 469 -469, 464 -464, 469 -470, 469 -469, 470 -470, 461 -461, 470 -471, 470 -470, 471 -471, 457 -457, 471 -472, 471 -471, 472 -473, 472 -472, 473 -473, 468 -468, 473 -474, 467 -467, 474 -475, 474 -474, 475 -476, 474 -474, 476 -478, 477 -477, 478 -479, 478 -478, 479 -480, 479 -479, 480 -481, 480 -480, 481 -482, 481 -481, 482 -482, 477 -477, 482 -483, 482 -482, 483 -484, 483 -483, 484 -485, 483 -483, 485 -486, 480 -480, 486 -487, 486 -486, 487 -488, 487 -487, 488 -488, 479 -479, 488 -489, 488 -488, 489 -490, 489 -489, 490 -491, 490 -490, 491 -492, 491 -491, 492 -492, 487 -487, 492 -493, 491 -491, 493 -495, 494 -494, 495 -496, 495 -495, 496 -497, 496 -496, 497 -498, 497 -497, 498 -499, 498 -498, 499 -499, 494 -494, 499 -500, 499 -499, 500 -501, 500 -500, 501 -502, 500 -500, 502 -503, 497 -497, 503 -504, 503 -503, 504 -505, 503 -503, 505 -506, 505 -505, 506 -506, 496 -496, 506 -507, 506 -506, 507 -508, 507 -507, 508 -509, 508 -508, 509 -510, 509 -509, 510 -510, 505 -505, 510 -511, 509 -509, 511 -512, 511 -511, 512 -513, 511 -511, 513 -514, 507 -507, 514 -515, 514 -514, 515 -516, 514 -514, 516 -518, 517 -517, 518 -519, 518 -518, 519 -520, 519 -519, 520 -521, 520 -520, 521 -522, 521 -521, 522 -523, 522 -522, 523 -524, 523 -523, 524 -525, 524 -524, 525 -526, 525 -525, 526 -527, 526 -526, 527 -528, 527 -527, 528 -528, 523 -523, 528 -529, 528 -528, 529 -530, 529 -529, 530 -531, 530 -530, 531 -531, 522 -522, 531 -532, 531 -531, 532 -533, 532 -532, 533 -533, 520 -520, 533 -534, 533 -533, 534 -534, 517 -517, 534 -535, 529 -529, 535 -536, 535 -535, 536 -536, 527 -527, 536 -537, 536 -536, 537 -538, 537 -537, 538 -539, 537 -537, 539 -541, 540 -540, 541 -542, 541 -541, 542 -543, 542 -542, 543 -544, 543 -543, 544 -545, 544 -544, 545 -545, 540 -540, 545 -546, 544 -544, 546 -547, 546 -546, 547 -548, 547 -547, 548 -549, 548 -548, 549 -550, 549 -549, 550 -551, 550 -550, 551 -551, 546 -546, 551 -552, 551 -551, 552 -552, 543 -543, 552 -553, 552 -552, 553 -554, 553 -553, 554 -555, 554 -554, 555 -555, 550 -550, 555 -556, 554 -554, 556 -557, 556 -556, 557 -558, 556 -556, 558 -559, 553 -553, 559 -560, 559 -559, 560 -561, 559 -559, 561 -563, 562 -562, 563 -564, 563 -563, 564 -565, 564 -564, 565 -566, 565 -565, 566 -567, 566 -566, 567 -567, 562 -562, 567 -568, 567 -567, 568 -569, 568 -568, 569 -570, 569 -569, 570 -571, 570 -570, 571 -572, 571 -571, 572 -573, 572 -572, 573 -573, 568 -568, 573 -574, 573 -573, 574 -575, 574 -574, 575 -576, 574 -574, 576 -577, 571 -571, 577 -578, 577 -577, 578 -579, 577 -577, 579 -580, 565 -565, 580 -581, 580 -580, 581 -582, 580 -580, 582 -583, 564 -564, 583 -584, 583 -583, 584 -585, 583 -583, 585 -587, 586 -586, 587 -588, 587 -587, 588 -589, 588 -588, 589 -590, 589 -589, 590 -591, 590 -590, 591 -591, 586 -586, 591 -592, 591 -591, 592 -593, 592 -592, 593 -594, 593 -593, 594 -595, 593 -593, 595 -596, 595 -595, 596 -597, 595 -595, 597 -598, 595 -595, 598 -599, 589 -589, 599 -600, 599 -599, 600 -601, 600 -600, 601 -601, 588 -588, 601 -602, 601 -601, 602 -603, 602 -602, 603 -604, 603 -603, 604 -605, 604 -604, 605 -605, 600 -600, 605 -606, 604 -604, 606 -607, 606 -606, 607 -608, 606 -606, 608 -610, 609 -609, 610 -611, 610 -610, 611 -612, 611 -611, 612 -613, 612 -612, 613 -614, 613 -613, 614 -615, 614 -614, 615 -616, 615 -615, 616 -617, 616 -616, 617 -617, 612 -612, 617 -618, 617 -617, 618 -618, 609 -609, 618 -619, 614 -614, 619 -620, 619 -619, 620 -621, 619 -619, 621 -623, 622 -622, 623 -624, 623 -623, 624 -625, 624 -624, 625 -626, 625 -625, 626 -627, 626 -626, 627 -627, 622 -622, 627 -628, 627 -627, 628 -629, 625 -625, 629 -630, 629 -629, 630 -631, 630 -630, 631 -631, 624 -624, 631 -632, 631 -631, 632 -633, 632 -632, 633 -634, 633 -633, 634 -635, 634 -634, 635 -635, 630 -630, 635 -636, 634 -634, 636 -637, 636 -636, 637 -638, 636 -636, 638 -640, 639 -639, 640 -641, 640 -640, 641 -642, 641 -641, 642 -643, 642 -642, 643 -644, 643 -643, 644 -645, 644 -644, 645 -646, 645 -645, 646 -647, 646 -646, 647 -647, 642 -642, 647 -648, 647 -647, 648 -649, 648 -648, 649 -649, 640 -640, 649 -650, 649 -649, 650 -651, 650 -650, 651 -652, 650 -650, 652 -654, 653 -653, 654 -655, 654 -654, 655 -656, 655 -655, 656 -657, 656 -656, 657 -658, 657 -657, 658 -658, 653 -653, 658 -659, 656 -656, 659 -660, 659 -659, 660 -661, 660 -660, 661 -662, 661 -661, 662 -663, 662 -662, 663 -664, 663 -663, 664 -665, 664 -664, 665 -666, 665 -665, 666 -666, 661 -661, 666 -667, 664 -664, 667 -668, 667 -667, 668 -669, 667 -667, 669 -671, 670 -670, 671 -672, 671 -671, 672 -673, 672 -672, 673 -674, 673 -673, 674 -675, 674 -674, 675 -675, 670 -670, 675 -676, 673 -673, 676 -677, 676 -676, 677 -678, 677 -677, 678 -679, 678 -678, 679 -680, 679 -679, 680 -681, 680 -680, 681 -681, 676 -676, 681 -682, 679 -679, 682 -683, 682 -682, 683 -684, 682 -682, 684 -686, 685 -685, 686 -687, 686 -686, 687 -688, 687 -687, 688 -689, 688 -688, 689 -690, 689 -689, 690 -691, 690 -690, 691 -692, 691 -691, 692 -693, 692 -692, 693 -694, 693 -693, 694 -694, 689 -689, 694 -695, 694 -694, 695 -696, 695 -695, 696 -696, 687 -687, 696 -697, 695 -695, 697 -698, 697 -697, 698 -699, 697 -697, 699 -701, 700 -700, 701 -702, 701 -701, 702 -703, 702 -702, 703 -704, 703 -703, 704 -705, 704 -704, 705 -705, 700 -700, 705 -706, 704 -704, 706 -707, 706 -706, 707 -708, 707 -707, 708 -709, 708 -708, 709 -709, 703 -703, 709 -710, 709 -709, 710 -711, 710 -710, 711 -712, 710 -710, 712 -714, 713 -713, 714 -715, 714 -714, 715 -716, 715 -715, 716 -717, 716 -716, 717 -718, 717 -717, 718 -718, 713 -713, 718 -719, 718 -718, 719 -720, 719 -719, 720 -721, 719 -719, 721 -722, 716 -716, 722 -723, 722 -722, 723 -724, 723 -723, 724 -724, 715 -715, 724 -725, 724 -724, 725 -726, 725 -725, 726 -727, 726 -726, 727 -728, 727 -727, 728 -728, 723 -723, 728 -729, 727 -727, 729 -731, 730 -730, 731 -732, 731 -731, 732 -733, 732 -732, 733 -734, 733 -733, 734 -735, 734 -734, 735 -736, 735 -735, 736 -737, 736 -736, 737 -738, 737 -737, 738 -738, 733 -733, 738 -739, 738 -738, 739 -739, 731 -731, 739 -740, 735 -735, 740 -741, 740 -740, 741 -742, 740 -740, 742 -744, 743 -743, 744 -745, 744 -744, 745 -746, 745 -745, 746 -747, 746 -746, 747 -748, 747 -747, 748 -748, 743 -743, 748 -749, 747 -747, 749 -750, 749 -749, 750 -751, 750 -750, 751 -752, 751 -751, 752 -753, 752 -752, 753 -754, 753 -753, 754 -754, 749 -749, 754 -755, 754 -754, 755 -755, 746 -746, 755 -756, 755 -755, 756 -757, 756 -756, 757 -758, 757 -757, 758 -758, 753 -753, 758 -759, 757 -757, 759 -760, 759 -759, 760 -761, 759 -759, 761 -763, 762 -762, 763 -764, 763 -763, 764 -765, 764 -764, 765 -766, 765 -765, 766 -767, 766 -766, 767 -767, 762 -762, 767 -768, 766 -766, 768 -769, 768 -768, 769 -770, 769 -769, 770 -771, 770 -770, 771 -771, 765 -765, 771 -772, 771 -771, 772 -773, 772 -772, 773 -774, 773 -773, 774 -775, 774 -774, 775 -775, 770 -770, 775 -776, 774 -774, 776 -777, 776 -776, 777 -778, 776 -776, 778 -780, 779 -779, 780 -781, 780 -780, 781 -782, 781 -781, 782 -783, 782 -782, 783 -784, 783 -783, 784 -784, 779 -779, 784 -785, 784 -784, 785 -786, 785 -785, 786 -787, 785 -785, 787 -788, 781 -781, 788 -789, 788 -788, 789 -790, 788 -788, 790 -792, 791 -791, 792 -793, 792 -792, 793 -794, 793 -793, 794 -795, 794 -794, 795 -796, 795 -795, 796 -797, 796 -796, 797 -798, 797 -797, 798 -798, 793 -793, 798 -799, 798 -798, 799 -800, 799 -799, 800 -800, 791 -791, 800 -801, 800 -800, 801 -802, 801 -801, 802 -803, 802 -802, 803 -804, 803 -803, 804 -804, 799 -799, 804 -805, 804 -804, 805 -806, 805 -805, 806 -806, 797 -797, 806 -807, 803 -803, 807 -808, 807 -807, 808 -809, 807 -807, 809 -810, 796 -796, 810 -811, 810 -810, 811 -812, 811 -811, 812 -813, 812 -812, 813 -813, 795 -795, 813 -815, 814 -814, 815 -816, 815 -815, 816 -817, 816 -816, 817 -818, 817 -817, 818 -819, 818 -818, 819 -819, 814 -814, 819 -820, 819 -819, 820 -821, 820 -820, 821 -822, 821 -821, 822 -823, 822 -822, 823 -823, 818 -818, 823 -824, 823 -823, 824 -825, 824 -824, 825 -826, 825 -825, 826 -827, 826 -826, 827 -827, 822 -822, 827 -828, 826 -826, 828 -829, 828 -828, 829 -830, 828 -828, 830 -831, 824 -824, 831 -832, 831 -831, 832 -832, 817 -817, 832 -833, 815 -815, 833 -834, 833 -833, 834 -835, 833 -833, 835 -837, 836 -836, 837 -838, 837 -837, 838 -839, 838 -838, 839 -840, 839 -839, 840 -841, 840 -840, 841 -842, 841 -841, 842 -843, 842 -842, 843 -844, 843 -843, 844 -845, 844 -844, 845 -846, 845 -845, 846 -846, 841 -841, 846 -847, 846 -846, 847 -848, 847 -847, 848 -848, 839 -839, 848 -849, 848 -848, 849 -849, 836 -836, 849 -850, 843 -843, 850 -851, 850 -850, 851 -852, 850 -850, 852 -854, 853 -853, 854 -855, 854 -854, 855 -856, 855 -855, 856 -857, 856 -856, 857 -858, 857 -857, 858 -858, 853 -853, 858 -859, 858 -858, 859 -860, 859 -859, 860 -861, 859 -859, 861 -862, 856 -856, 862 -863, 862 -862, 863 -864, 863 -863, 864 -865, 864 -864, 865 -866, 865 -865, 866 -867, 866 -866, 867 -868, 867 -867, 868 -868, 863 -863, 868 -869, 868 -868, 869 -869, 855 -855, 869 -870, 865 -865, 870 -871, 870 -870, 871 -872, 870 -870, 872 -874, 873 -873, 874 -875, 874 -874, 875 -876, 875 -875, 876 -877, 876 -876, 877 -878, 877 -877, 878 -879, 878 -878, 879 -880, 879 -879, 880 -880, 875 -875, 880 -881, 880 -880, 881 -882, 881 -881, 882 -883, 881 -881, 883 -884, 876 -876, 884 -885, 884 -884, 885 -885, 873 -873, 885 -886, 884 -884, 886 -887, 886 -886, 887 -888, 886 -886, 888 -890, 889 -889, 890 -891, 890 -890, 891 -892, 891 -891, 892 -893, 892 -892, 893 -894, 893 -893, 894 -895, 894 -894, 895 -896, 895 -895, 896 -896, 891 -891, 896 -897, 896 -896, 897 -898, 897 -897, 898 -898, 889 -889, 898 -899, 898 -898, 899 -900, 899 -899, 900 -901, 900 -900, 901 -902, 901 -901, 902 -902, 897 -897, 902 -903, 902 -902, 903 -904, 903 -903, 904 -904, 895 -895, 904 -905, 904 -904, 905 -906, 905 -905, 906 -907, 906 -906, 907 -908, 907 -907, 908 -908, 903 -903, 908 -909, 901 -901, 909 -910, 909 -909, 910 -911, 909 -909, 911 -912, 892 -892, 912 -913, 912 -912, 913 -914, 912 -912, 914 -916, 915 -915, 916 -917, 916 -916, 917 -918, 917 -917, 918 -919, 918 -918, 919 -920, 919 -919, 920 -921, 920 -920, 921 -922, 921 -921, 922 -923, 922 -922, 923 -924, 923 -923, 924 -925, 924 -924, 925 -925, 920 -920, 925 -926, 925 -925, 926 -926, 917 -917, 926 -927, 926 -926, 927 -928, 927 -927, 928 -928, 915 -915, 928 -929, 927 -927, 929 -930, 929 -929, 930 -930, 924 -924, 930 -931, 930 -930, 931 -932, 931 -931, 932 -933, 932 -932, 933 -934, 933 -933, 934 -934, 929 -929, 934 -935, 923 -923, 935 -936, 935 -935, 936 -937, 935 -935, 937 -938, 921 -921, 938 -939, 938 -938, 939 -940, 938 -938, 940 -942, 941 -941, 942 -943, 942 -942, 943 -944, 943 -943, 944 -945, 944 -944, 945 -946, 945 -945, 946 -946, 941 -941, 946 -947, 946 -946, 947 -948, 947 -947, 948 -949, 947 -947, 949 -950, 944 -944, 950 -951, 950 -950, 951 -952, 951 -951, 952 -952, 943 -943, 952 -953, 952 -952, 953 -954, 953 -953, 954 -955, 954 -954, 955 -956, 955 -955, 956 -956, 951 -951, 956 -957, 955 -955, 957 -958, 957 -957, 958 -959, 957 -957, 959 -961, 960 -960, 961 -962, 961 -961, 962 -963, 962 -962, 963 -964, 963 -963, 964 -965, 964 -964, 965 -965, 960 -960, 965 -966, 965 -965, 966 -967, 966 -966, 967 -968, 966 -966, 968 -969, 963 -963, 969 -970, 969 -969, 970 -971, 970 -970, 971 -972, 971 -971, 972 -973, 972 -972, 973 -974, 973 -973, 974 -975, 974 -974, 975 -975, 970 -970, 975 -976, 975 -975, 976 -976, 962 -962, 976 -977, 973 -973, 977 -978, 972 -972, 978 -980, 979 -979, 980 -981, 980 -980, 981 -982, 981 -981, 982 -983, 982 -982, 983 -984, 983 -983, 984 -985, 984 -984, 985 -986, 985 -985, 986 -986, 981 -981, 986 -987, 986 -986, 987 -988, 987 -987, 988 -989, 988 -988, 989 -989, 980 -980, 989 -990, 989 -989, 990 -991, 990 -990, 991 -992, 990 -990, 992 -994, 993 -993, 994 -995, 994 -994, 995 -996, 995 -995, 996 -997, 996 -996, 997 -998, 997 -997, 998 -998, 993 -993, 998 -999, 997 -997, 999 -1000, 999 -999, 1000 -1001, 1000 -1000, 1001 -1002, 1001 -1001, 1002 -1003, 1002 -1002, 1003 -1004, 1003 -1003, 1004 -1005, 1004 -1004, 1005 -1005, 1000 -1000, 1005 -1006, 1005 -1005, 1006 -1006, 996 -996, 1006 -1007, 1003 -1003, 1007 -1008, 1007 -1007, 1008 -1009, 1007 -1007, 1009 -1011, 1010 -1010, 1011 -1012, 1011 -1011, 1012 -1013, 1012 -1012, 1013 -1014, 1013 -1013, 1014 -1015, 1014 -1014, 1015 -1015, 1010 -1010, 1015 -1016, 1015 -1015, 1016 -1017, 1016 -1016, 1017 -1018, 1017 -1017, 1018 -1019, 1018 -1018, 1019 -1020, 1019 -1019, 1020 -1021, 1020 -1020, 1021 -1021, 1016 -1016, 1021 -1022, 1021 -1021, 1022 -1023, 1022 -1022, 1023 -1024, 1022 -1022, 1024 -1025, 1019 -1019, 1025 -1026, 1025 -1025, 1026 -1027, 1025 -1025, 1027 -1028, 1012 -1012, 1028 -1029, 1028 -1028, 1029 -1030, 1028 -1028, 1030 -1032, 1031 -1031, 1032 -1033, 1032 -1032, 1033 -1034, 1033 -1033, 1034 -1035, 1034 -1034, 1035 -1036, 1035 -1035, 1036 -1037, 1036 -1036, 1037 -1038, 1037 -1037, 1038 -1039, 1038 -1038, 1039 -1039, 1034 -1034, 1039 -1040, 1039 -1039, 1040 -1040, 1031 -1031, 1040 -1041, 1040 -1040, 1041 -1042, 1041 -1041, 1042 -1043, 1042 -1042, 1043 -1043, 1038 -1038, 1043 -1044, 1043 -1043, 1044 -1045, 1044 -1044, 1045 -1046, 1044 -1044, 1046 -1047, 1035 -1035, 1047 -1048, 1047 -1047, 1048 -1049, 1048 -1048, 1049 -1049, 1033 -1033, 1049 -1050, 1049 -1049, 1050 -1051, 1050 -1050, 1051 -1052, 1051 -1051, 1052 -1053, 1052 -1052, 1053 -1053, 1048 -1048, 1053 -1054, 1051 -1051, 1054 -1055, 1050 -1050, 1055 -1057, 1056 -1056, 1057 -1058, 1057 -1057, 1058 -1059, 1058 -1058, 1059 -1060, 1059 -1059, 1060 -1061, 1060 -1060, 1061 -1061, 1056 -1056, 1061 -1062, 1060 -1060, 1062 -1063, 1062 -1062, 1063 -1064, 1063 -1063, 1064 -1065, 1064 -1064, 1065 -1065, 1059 -1059, 1065 -1066, 1065 -1065, 1066 -1067, 1066 -1066, 1067 -1068, 1067 -1067, 1068 -1069, 1068 -1068, 1069 -1069, 1064 -1064, 1069 -1070, 1068 -1068, 1070 -1071, 1070 -1070, 1071 -1072, 1071 -1071, 1072 -1073, 1072 -1072, 1073 -1073, 1067 -1067, 1073 -1074, 1073 -1073, 1074 -1075, 1074 -1074, 1075 -1075, 1066 -1066, 1075 -1076, 1074 -1074, 1076 -1077, 1076 -1076, 1077 -1078, 1076 -1076, 1078 -1080, 1079 -1079, 1080 -1081, 1080 -1080, 1081 -1082, 1081 -1081, 1082 -1083, 1082 -1082, 1083 -1084, 1083 -1083, 1084 -1085, 1084 -1084, 1085 -1086, 1085 -1085, 1086 -1086, 1081 -1081, 1086 -1087, 1086 -1086, 1087 -1088, 1087 -1087, 1088 -1088, 1079 -1079, 1088 -1089, 1087 -1087, 1089 -1090, 1089 -1089, 1090 -1091, 1089 -1089, 1091 -1092, 1085 -1085, 1092 -1093, 1092 -1092, 1093 -1094, 1092 -1092, 1094 -1095, 1083 -1083, 1095 -1096, 1095 -1095, 1096 -1097, 1095 -1095, 1097 -1099, 1098 -1098, 1099 -1100, 1099 -1099, 1100 -1101, 1100 -1100, 1101 -1102, 1101 -1101, 1102 -1103, 1102 -1102, 1103 -1103, 1098 -1098, 1103 -1104, 1103 -1103, 1104 -1105, 1104 -1104, 1105 -1106, 1104 -1104, 1106 -1107, 1101 -1101, 1107 -1108, 1107 -1107, 1108 -1109, 1108 -1108, 1109 -1109, 1100 -1100, 1109 -1110, 1109 -1109, 1110 -1111, 1110 -1110, 1111 -1112, 1111 -1111, 1112 -1113, 1112 -1112, 1113 -1113, 1108 -1108, 1113 -1114, 1112 -1112, 1114 -1116, 1115 -1115, 1116 -1117, 1116 -1116, 1117 -1118, 1117 -1117, 1118 -1119, 1118 -1118, 1119 -1120, 1119 -1119, 1120 -1120, 1115 -1115, 1120 -1121, 1120 -1120, 1121 -1122, 1121 -1121, 1122 -1123, 1121 -1121, 1123 -1124, 1118 -1118, 1124 -1125, 1117 -1117, 1125 -1127, 1126 -1126, 1127 -1128, 1127 -1127, 1128 -1129, 1128 -1128, 1129 -1130, 1129 -1129, 1130 -1130, 1126 -1126, 1130 -1131, 1130 -1130, 1131 -1132, 1131 -1131, 1132 -1133, 1132 -1132, 1133 -1134, 1133 -1133, 1134 -1135, 1133 -1133, 1135 -1136, 1135 -1135, 1136 -1137, 1136 -1136, 1137 -1138, 1137 -1137, 1138 -1139, 1138 -1138, 1139 -1140, 1139 -1139, 1140 -1141, 1140 -1140, 1141 -1142, 1141 -1141, 1142 -1142, 1138 -1138, 1142 -1143, 1141 -1141, 1143 -1144, 1143 -1143, 1144 -1145, 1143 -1143, 1145 -1146, 1128 -1128, 1146 -1147, 1146 -1146, 1147 -1148, 1146 -1146, 1148 -1150, 1149 -1149, 1150 -1151, 1150 -1150, 1151 -1152, 1151 -1151, 1152 -1153, 1152 -1152, 1153 -1154, 1153 -1153, 1154 -1154, 1149 -1149, 1154 -1155, 1154 -1154, 1155 -1156, 1155 -1155, 1156 -1157, 1155 -1155, 1157 -1158, 1152 -1152, 1158 -1159, 1158 -1158, 1159 -1160, 1158 -1158, 1160 -1161, 1160 -1160, 1161 -1161, 1151 -1151, 1161 -1162, 1161 -1161, 1162 -1163, 1162 -1162, 1163 -1164, 1163 -1163, 1164 -1165, 1164 -1164, 1165 -1165, 1160 -1160, 1165 -1166, 1164 -1164, 1166 -1167, 1166 -1166, 1167 -1168, 1166 -1166, 1168 -1170, 1169 -1169, 1170 -1171, 1170 -1170, 1171 -1172, 1171 -1171, 1172 -1173, 1172 -1172, 1173 -1174, 1173 -1173, 1174 -1174, 1169 -1169, 1174 -1175, 1173 -1173, 1175 -1176, 1175 -1175, 1176 -1177, 1176 -1176, 1177 -1178, 1177 -1177, 1178 -1178, 1172 -1172, 1178 -1179, 1178 -1178, 1179 -1180, 1179 -1179, 1180 -1181, 1179 -1179, 1181 -1182, 1176 -1176, 1182 -1183, 1182 -1182, 1183 -1184, 1182 -1182, 1184 -1186, 1185 -1185, 1186 -1187, 1186 -1186, 1187 -1188, 1187 -1187, 1188 -1189, 1188 -1188, 1189 -1190, 1189 -1189, 1190 -1190, 1185 -1185, 1190 -1191, 1189 -1189, 1191 -1192, 1191 -1191, 1192 -1193, 1191 -1191, 1193 -1194, 1187 -1187, 1194 -1195, 1194 -1194, 1195 -1196, 1195 -1195, 1196 -1197, 1196 -1196, 1197 -1198, 1197 -1197, 1198 -1199, 1198 -1198, 1199 -1199, 1194 -1194, 1199 -1200, 1197 -1197, 1200 -1202, 1201 -1201, 1202 -1203, 1202 -1202, 1203 -1204, 1203 -1203, 1204 -1205, 1204 -1204, 1205 -1206, 1205 -1205, 1206 -1206, 1201 -1201, 1206 -1207, 1205 -1205, 1207 -1208, 1207 -1207, 1208 -1209, 1208 -1208, 1209 -1210, 1209 -1209, 1210 -1211, 1210 -1210, 1211 -1212, 1211 -1211, 1212 -1212, 1207 -1207, 1212 -1213, 1212 -1212, 1213 -1213, 1204 -1204, 1213 -1214, 1213 -1213, 1214 -1215, 1214 -1214, 1215 -1216, 1215 -1215, 1216 -1216, 1211 -1211, 1216 -1217, 1215 -1215, 1217 -1218, 1214 -1214, 1218 -1219, 1218 -1218, 1219 -1220, 1218 -1218, 1220 -1222, 1221 -1221, 1222 -1223, 1222 -1222, 1223 -1224, 1223 -1223, 1224 -1225, 1224 -1224, 1225 -1226, 1225 -1225, 1226 -1226, 1221 -1221, 1226 -1227, 1225 -1225, 1227 -1228, 1227 -1227, 1228 -1229, 1228 -1228, 1229 -1230, 1229 -1229, 1230 -1231, 1230 -1230, 1231 -1232, 1231 -1231, 1232 -1233, 1232 -1232, 1233 -1234, 1233 -1233, 1234 -1235, 1234 -1234, 1235 -1236, 1235 -1235, 1236 -1236, 1231 -1231, 1236 -1237, 1236 -1236, 1237 -1237, 1228 -1228, 1237 -1238, 1237 -1237, 1238 -1238, 1224 -1224, 1238 -1239, 1238 -1238, 1239 -1240, 1239 -1239, 1240 -1240, 1235 -1235, 1240 -1241, 1232 -1232, 1241 -1242, 1241 -1241, 1242 -1243, 1241 -1241, 1243 -1245, 1244 -1244, 1245 -1246, 1245 -1245, 1246 -1247, 1246 -1246, 1247 -1248, 1247 -1247, 1248 -1249, 1248 -1248, 1249 -1249, 1244 -1244, 1249 -1250, 1248 -1248, 1250 -1251, 1250 -1250, 1251 -1252, 1250 -1250, 1252 -1253, 1247 -1247, 1253 -1254, 1253 -1253, 1254 -1255, 1254 -1254, 1255 -1256, 1255 -1255, 1256 -1257, 1256 -1256, 1257 -1258, 1257 -1257, 1258 -1258, 1253 -1253, 1258 -1259, 1258 -1258, 1259 -1259, 1246 -1246, 1259 -1260, 1256 -1256, 1260 -1261, 1260 -1260, 1261 -1262, 1260 -1260, 1262 -1264, 1263 -1263, 1264 -1265, 1264 -1264, 1265 -1266, 1265 -1265, 1266 -1267, 1266 -1266, 1267 -1268, 1267 -1267, 1268 -1268, 1263 -1263, 1268 -1269, 1268 -1268, 1269 -1270, 1269 -1269, 1270 -1271, 1269 -1269, 1271 -1272, 1266 -1266, 1272 -1273, 1272 -1272, 1273 -1274, 1272 -1272, 1274 -1275, 1274 -1274, 1275 -1276, 1274 -1274, 1276 -1276, 1265 -1265, 1276 -1278, 1277 -1277, 1278 -1279, 1278 -1278, 1279 -1280, 1279 -1279, 1280 -1281, 1280 -1280, 1281 -1282, 1281 -1281, 1282 -1283, 1282 -1282, 1283 -1284, 1283 -1283, 1284 -1284, 1279 -1279, 1284 -1285, 1284 -1284, 1285 -1286, 1285 -1285, 1286 -1286, 1277 -1277, 1286 -1287, 1286 -1286, 1287 -1288, 1287 -1287, 1288 -1289, 1288 -1288, 1289 -1290, 1289 -1289, 1290 -1290, 1285 -1285, 1290 -1291, 1290 -1290, 1291 -1292, 1291 -1291, 1292 -1292, 1283 -1283, 1292 -1293, 1292 -1292, 1293 -1294, 1293 -1293, 1294 -1295, 1294 -1294, 1295 -1296, 1295 -1295, 1296 -1296, 1291 -1291, 1296 -1297, 1289 -1289, 1297 -1298, 1297 -1297, 1298 -1299, 1297 -1297, 1299 -1300, 1282 -1282, 1300 -1301, 1300 -1300, 1301 -1302, 1300 -1300, 1302 -1304, 1303 -1303, 1304 -1305, 1304 -1304, 1305 -1306, 1305 -1305, 1306 -1307, 1306 -1306, 1307 -1308, 1307 -1307, 1308 -1308, 1303 -1303, 1308 -1309, 1307 -1307, 1309 -1310, 1309 -1309, 1310 -1311, 1310 -1310, 1311 -1311, 1306 -1306, 1311 -1312, 1311 -1311, 1312 -1313, 1312 -1312, 1313 -1314, 1313 -1313, 1314 -1315, 1314 -1314, 1315 -1315, 1310 -1310, 1315 -1316, 1314 -1314, 1316 -1317, 1316 -1316, 1317 -1318, 1316 -1316, 1318 -1320, 1319 -1319, 1320 -1321, 1320 -1320, 1321 -1322, 1321 -1321, 1322 -1323, 1322 -1322, 1323 -1324, 1323 -1323, 1324 -1324, 1319 -1319, 1324 -1325, 1323 -1323, 1325 -1326, 1325 -1325, 1326 -1327, 1326 -1326, 1327 -1328, 1327 -1327, 1328 -1329, 1328 -1328, 1329 -1330, 1329 -1329, 1330 -1330, 1325 -1325, 1330 -1331, 1329 -1329, 1331 -1332, 1322 -1322, 1332 -1333, 1332 -1332, 1333 -1334, 1332 -1332, 1334 -1336, 1335 -1335, 1336 -1337, 1336 -1336, 1337 -1338, 1337 -1337, 1338 -1339, 1338 -1338, 1339 -1340, 1339 -1339, 1340 -1341, 1340 -1340, 1341 -1342, 1341 -1341, 1342 -1342, 1337 -1337, 1342 -1343, 1342 -1342, 1343 -1344, 1343 -1343, 1344 -1344, 1335 -1335, 1344 -1345, 1343 -1343, 1345 -1346, 1345 -1345, 1346 -1346, 1341 -1341, 1346 -1347, 1346 -1346, 1347 -1348, 1347 -1347, 1348 -1349, 1348 -1348, 1349 -1350, 1349 -1349, 1350 -1351, 1350 -1350, 1351 -1352, 1351 -1351, 1352 -1353, 1352 -1352, 1353 -1353, 1348 -1348, 1353 -1354, 1353 -1353, 1354 -1354, 1345 -1345, 1354 -1355, 1350 -1350, 1355 -1356, 1355 -1355, 1356 -1357, 1355 -1355, 1357 -1359, 1358 -1358, 1359 -1360, 1359 -1359, 1360 -1361, 1360 -1360, 1361 -1362, 1361 -1361, 1362 -1363, 1362 -1362, 1363 -1363, 1358 -1358, 1363 -1364, 1363 -1363, 1364 -1365, 1364 -1364, 1365 -1366, 1365 -1365, 1366 -1367, 1366 -1366, 1367 -1368, 1367 -1367, 1368 -1369, 1368 -1368, 1369 -1369, 1364 -1364, 1369 -1370, 1367 -1367, 1370 -1371, 1370 -1370, 1371 -1372, 1370 -1370, 1372 -1373, 1360 -1360, 1373 -1374, 1373 -1373, 1374 -1375, 1373 -1373, 1375 -1377, 1376 -1376, 1377 -1378, 1377 -1377, 1378 -1379, 1378 -1378, 1379 -1380, 1379 -1379, 1380 -1381, 1380 -1380, 1381 -1381, 1376 -1376, 1381 -1382, 1380 -1380, 1382 -1383, 1382 -1382, 1383 -1384, 1382 -1382, 1384 -1385, 1379 -1379, 1385 -1387, 1386 -1386, 1387 -1388, 1387 -1387, 1388 -1389, 1388 -1388, 1389 -1390, 1389 -1389, 1390 -1391, 1390 -1390, 1391 -1391, 1386 -1386, 1391 -1392, 1390 -1390, 1392 -1393, 1392 -1392, 1393 -1394, 1393 -1393, 1394 -1395, 1394 -1394, 1395 -1396, 1395 -1395, 1396 -1397, 1396 -1396, 1397 -1397, 1392 -1392, 1397 -1398, 1396 -1396, 1398 -1399, 1398 -1398, 1399 -1400, 1398 -1398, 1400 -1401, 1389 -1389, 1401 -1403, 1402 -1402, 1403 -1404, 1403 -1403, 1404 -1405, 1404 -1404, 1405 -1406, 1405 -1405, 1406 -1407, 1406 -1406, 1407 -1408, 1407 -1407, 1408 -1408, 1403 -1403, 1408 -1409, 1408 -1408, 1409 -1410, 1409 -1409, 1410 -1411, 1410 -1410, 1411 -1412, 1411 -1411, 1412 -1413, 1412 -1412, 1413 -1414, 1413 -1413, 1414 -1414, 1409 -1409, 1414 -1415, 1412 -1412, 1415 -1416, 1415 -1415, 1416 -1417, 1415 -1415, 1417 -1419, 1418 -1418, 1419 -1420, 1419 -1419, 1420 -1421, 1420 -1420, 1421 -1422, 1421 -1421, 1422 -1423, 1422 -1422, 1423 -1423, 1418 -1418, 1423 -1424, 1422 -1422, 1424 -1425, 1424 -1424, 1425 -1426, 1425 -1425, 1426 -1427, 1426 -1426, 1427 -1428, 1427 -1427, 1428 -1429, 1428 -1428, 1429 -1430, 1429 -1429, 1430 -1430, 1425 -1425, 1430 -1431, 1430 -1430, 1431 -1431, 1421 -1421, 1431 -1432, 1428 -1428, 1432 -1433, 1432 -1432, 1433 -1434, 1432 -1432, 1434 -1436, 1435 -1435, 1436 -1437, 1436 -1436, 1437 -1438, 1437 -1437, 1438 -1439, 1438 -1438, 1439 -1440, 1439 -1439, 1440 -1441, 1440 -1440, 1441 -1442, 1441 -1441, 1442 -1442, 1437 -1437, 1442 -1443, 1442 -1442, 1443 -1444, 1443 -1443, 1444 -1444, 1435 -1435, 1444 -1445, 1443 -1443, 1445 -1446, 1445 -1445, 1446 -1446, 1441 -1441, 1446 -1447, 1446 -1446, 1447 -1448, 1447 -1447, 1448 -1449, 1448 -1448, 1449 -1450, 1449 -1449, 1450 -1450, 1445 -1445, 1450 -1451, 1447 -1447, 1451 -1452, 1451 -1451, 1452 -1453, 1451 -1451, 1453 -1455, 1454 -1454, 1455 -1456, 1455 -1455, 1456 -1457, 1456 -1456, 1457 -1458, 1457 -1457, 1458 -1459, 1458 -1458, 1459 -1459, 1454 -1454, 1459 -1460, 1458 -1458, 1460 -1461, 1460 -1460, 1461 -1462, 1460 -1460, 1462 -1463, 1456 -1456, 1463 -1464, 1463 -1463, 1464 -1465, 1463 -1463, 1465 -1467, 1466 -1466, 1467 -1468, 1467 -1467, 1468 -1469, 1468 -1468, 1469 -1470, 1469 -1469, 1470 -1471, 1470 -1470, 1471 -1471, 1466 -1466, 1471 -1472, 1470 -1470, 1472 -1473, 1472 -1472, 1473 -1474, 1473 -1473, 1474 -1475, 1474 -1474, 1475 -1476, 1475 -1475, 1476 -1477, 1476 -1476, 1477 -1477, 1472 -1472, 1477 -1478, 1477 -1477, 1478 -1478, 1469 -1469, 1478 -1479, 1478 -1478, 1479 -1480, 1479 -1479, 1480 -1481, 1480 -1480, 1481 -1481, 1476 -1476, 1481 -1482, 1480 -1480, 1482 -1483, 1482 -1482, 1483 -1484, 1482 -1482, 1484 -1485, 1479 -1479, 1485 -1486, 1485 -1485, 1486 -1487, 1485 -1485, 1487 -1488, 1475 -1475, 1488 -1489, 1488 -1488, 1489 -1490, 1488 -1488, 1490 -1492, 1491 -1491, 1492 -1493, 1492 -1492, 1493 -1494, 1493 -1493, 1494 -1495, 1494 -1494, 1495 -1496, 1495 -1495, 1496 -1497, 1496 -1496, 1497 -1498, 1497 -1497, 1498 -1499, 1498 -1498, 1499 -1500, 1499 -1499, 1500 -1501, 1500 -1500, 1501 -1501, 1496 -1496, 1501 -1502, 1501 -1501, 1502 -1502, 1494 -1494, 1502 -1503, 1502 -1502, 1503 -1503, 1491 -1491, 1503 -1504, 1498 -1498, 1504 -1505, 1504 -1504, 1505 -1506, 1504 -1504, 1506 -1508, 1507 -1507, 1508 -1509, 1508 -1508, 1509 -1510, 1509 -1509, 1510 -1511, 1510 -1510, 1511 -1512, 1511 -1511, 1512 -1513, 1512 -1512, 1513 -1514, 1513 -1513, 1514 -1514, 1509 -1509, 1514 -1515, 1512 -1512, 1515 -1516, 1515 -1515, 1516 -1517, 1515 -1515, 1517 -1519, 1518 -1518, 1519 -1520, 1519 -1519, 1520 -1521, 1520 -1520, 1521 -1522, 1521 -1521, 1522 -1523, 1522 -1522, 1523 -1523, 1518 -1518, 1523 -1524, 1522 -1522, 1524 -1525, 1524 -1524, 1525 -1526, 1525 -1525, 1526 -1527, 1526 -1526, 1527 -1528, 1527 -1527, 1528 -1529, 1528 -1528, 1529 -1530, 1529 -1529, 1530 -1531, 1530 -1530, 1531 -1531, 1526 -1526, 1531 -1532, 1531 -1531, 1532 -1533, 1532 -1532, 1533 -1534, 1533 -1533, 1534 -1534, 1525 -1525, 1534 -1535, 1534 -1534, 1535 -1535, 1521 -1521, 1535 -1536, 1535 -1535, 1536 -1537, 1536 -1536, 1537 -1537, 1533 -1533, 1537 -1538, 1537 -1537, 1538 -1539, 1538 -1538, 1539 -1540, 1538 -1538, 1540 -1542, 1541 -1541, 1542 -1543, 1542 -1542, 1543 -1544, 1543 -1543, 1544 -1545, 1544 -1544, 1545 -1546, 1545 -1545, 1546 -1547, 1546 -1546, 1547 -1548, 1547 -1547, 1548 -1549, 1548 -1548, 1549 -1550, 1549 -1549, 1550 -1551, 1550 -1550, 1551 -1552, 1551 -1551, 1552 -1552, 1547 -1547, 1552 -1553, 1552 -1552, 1553 -1554, 1553 -1553, 1554 -1555, 1554 -1554, 1555 -1555, 1546 -1546, 1555 -1556, 1555 -1555, 1556 -1557, 1556 -1556, 1557 -1557, 1544 -1544, 1557 -1558, 1557 -1557, 1558 -1558, 1541 -1541, 1558 -1559, 1553 -1553, 1559 -1560, 1559 -1559, 1560 -1560, 1551 -1551, 1560 -1561, 1559 -1559, 1561 -1562, 1561 -1561, 1562 -1563, 1561 -1561, 1563 -1565, 1564 -1564, 1565 -1566, 1565 -1565, 1566 -1567, 1566 -1566, 1567 -1568, 1567 -1567, 1568 -1569, 1568 -1568, 1569 -1569, 1564 -1564, 1569 -1570, 1568 -1568, 1570 -1571, 1570 -1570, 1571 -1572, 1571 -1571, 1572 -1573, 1572 -1572, 1573 -1574, 1573 -1573, 1574 -1575, 1574 -1574, 1575 -1575, 1570 -1570, 1575 -1576, 1575 -1575, 1576 -1576, 1567 -1567, 1576 -1577, 1572 -1572, 1577 -1578, 1577 -1577, 1578 -1579, 1577 -1577, 1579 -1581, 1580 -1580, 1581 -1582, 1581 -1581, 1582 -1583, 1581 -1581, 1583 -1584, 1583 -1583, 1584 -1585, 1584 -1584, 1585 -1586, 1585 -1585, 1586 -1587, 1586 -1586, 1587 -1588, 1587 -1587, 1588 -1588, 1583 -1583, 1588 -1589, 1586 -1586, 1589 -1590, 1589 -1589, 1590 -1591, 1589 -1589, 1591 -1593, 1592 -1592, 1593 -1594, 1593 -1593, 1594 -1595, 1594 -1594, 1595 -1596, 1595 -1595, 1596 -1597, 1596 -1596, 1597 -1598, 1597 -1597, 1598 -1599, 1598 -1598, 1599 -1599, 1594 -1594, 1599 -1600, 1599 -1599, 1600 -1601, 1600 -1600, 1601 -1602, 1600 -1600, 1602 -1603, 1595 -1595, 1603 -1604, 1603 -1603, 1604 -1604, 1593 -1593, 1604 -1606, 1605 -1605, 1606 -1607, 1606 -1606, 1607 -1608, 1607 -1607, 1608 -1609, 1608 -1608, 1609 -1610, 1609 -1609, 1610 -1610, 1605 -1605, 1610 -1611, 1609 -1609, 1611 -1612, 1611 -1611, 1612 -1613, 1612 -1612, 1613 -1614, 1613 -1613, 1614 -1615, 1614 -1614, 1615 -1616, 1615 -1615, 1616 -1616, 1611 -1611, 1616 -1617, 1616 -1616, 1617 -1618, 1617 -1617, 1618 -1618, 1608 -1608, 1618 -1619, 1618 -1618, 1619 -1620, 1619 -1619, 1620 -1621, 1620 -1620, 1621 -1622, 1621 -1621, 1622 -1622, 1617 -1617, 1622 -1623, 1622 -1622, 1623 -1624, 1623 -1623, 1624 -1624, 1615 -1615, 1624 -1625, 1619 -1619, 1625 -1626, 1625 -1625, 1626 -1627, 1625 -1625, 1627 -1629, 1628 -1628, 1629 -1630, 1629 -1629, 1630 -1631, 1630 -1630, 1631 -1632, 1631 -1631, 1632 -1633, 1632 -1632, 1633 -1634, 1633 -1633, 1634 -1635, 1634 -1634, 1635 -1635, 1630 -1630, 1635 -1636, 1635 -1635, 1636 -1637, 1636 -1636, 1637 -1637, 1628 -1628, 1637 -1638, 1637 -1637, 1638 -1639, 1638 -1638, 1639 -1640, 1639 -1639, 1640 -1641, 1640 -1640, 1641 -1641, 1636 -1636, 1641 -1642, 1641 -1641, 1642 -1643, 1642 -1642, 1643 -1643, 1634 -1634, 1643 -1644, 1640 -1640, 1644 -1645, 1644 -1644, 1645 -1646, 1644 -1644, 1646 -1647, 1632 -1632, 1647 -1648, 1647 -1647, 1648 -1649, 1648 -1648, 1649 -1650, 1649 -1649, 1650 -1650, 1631 -1631, 1650 -1651, 1648 -1648, 1651 -1652, 1647 -1647, 1652 -1654, 1653 -1653, 1654 -1655, 1654 -1654, 1655 -1656, 1655 -1655, 1656 -1657, 1656 -1656, 1657 -1658, 1657 -1657, 1658 -1658, 1653 -1653, 1658 -1659, 1657 -1657, 1659 -1660, 1659 -1659, 1660 -1661, 1660 -1660, 1661 -1662, 1661 -1661, 1662 -1662, 1656 -1656, 1662 -1663, 1662 -1662, 1663 -1664, 1663 -1663, 1664 -1665, 1664 -1664, 1665 -1666, 1665 -1665, 1666 -1666, 1661 -1661, 1666 -1667, 1665 -1665, 1667 -1668, 1667 -1667, 1668 -1669, 1667 -1667, 1669 -1670, 1663 -1663, 1670 -1671, 1670 -1670, 1671 -1671, 1655 -1655, 1671 -1673, 1672 -1672, 1673 -1674, 1673 -1673, 1674 -1675, 1674 -1674, 1675 -1676, 1675 -1675, 1676 -1677, 1676 -1676, 1677 -1677, 1672 -1672, 1677 -1678, 1676 -1676, 1678 -1679, 1678 -1678, 1679 -1680, 1679 -1679, 1680 -1681, 1680 -1680, 1681 -1682, 1681 -1681, 1682 -1683, 1682 -1682, 1683 -1683, 1678 -1678, 1683 -1684, 1683 -1683, 1684 -1685, 1684 -1684, 1685 -1685, 1675 -1675, 1685 -1686, 1685 -1685, 1686 -1687, 1686 -1686, 1687 -1688, 1687 -1687, 1688 -1689, 1688 -1688, 1689 -1689, 1684 -1684, 1689 -1690, 1689 -1689, 1690 -1691, 1690 -1690, 1691 -1691, 1682 -1682, 1691 -1692, 1688 -1688, 1692 -1693, 1692 -1692, 1693 -1694, 1692 -1692, 1694 -1696, 1695 -1695, 1696 -1697, 1696 -1696, 1697 -1698, 1697 -1697, 1698 -1699, 1698 -1698, 1699 -1700, 1699 -1699, 1700 -1701, 1700 -1700, 1701 -1702, 1701 -1701, 1702 -1703, 1702 -1702, 1703 -1704, 1703 -1703, 1704 -1705, 1704 -1704, 1705 -1705, 1700 -1700, 1705 -1706, 1705 -1705, 1706 -1706, 1697 -1697, 1706 -1707, 1706 -1706, 1707 -1708, 1707 -1707, 1708 -1708, 1695 -1695, 1708 -1709, 1707 -1707, 1709 -1710, 1709 -1709, 1710 -1710, 1704 -1704, 1710 -1711, 1710 -1710, 1711 -1712, 1711 -1711, 1712 -1713, 1711 -1711, 1713 -1715, 1714 -1714, 1715 -1716, 1715 -1715, 1716 -1717, 1716 -1716, 1717 -1718, 1717 -1717, 1718 -1719, 1718 -1718, 1719 -1720, 1719 -1719, 1720 -1721, 1720 -1720, 1721 -1721, 1716 -1716, 1721 -1722, 1721 -1721, 1722 -1723, 1722 -1722, 1723 -1723, 1714 -1714, 1723 -1724, 1722 -1722, 1724 -1725, 1724 -1724, 1725 -1726, 1724 -1724, 1726 -1727, 1719 -1719, 1727 -1728, 1727 -1727, 1728 -1729, 1727 -1727, 1729 -1730, 1718 -1718, 1730 -1731, 1730 -1730, 1731 -1732, 1730 -1730, 1732 -1734, 1733 -1733, 1734 -1735, 1734 -1734, 1735 -1736, 1735 -1735, 1736 -1737, 1736 -1736, 1737 -1738, 1737 -1737, 1738 -1739, 1738 -1738, 1739 -1740, 1739 -1739, 1740 -1740, 1735 -1735, 1740 -1741, 1740 -1740, 1741 -1742, 1741 -1741, 1742 -1742, 1733 -1733, 1742 -1743, 1742 -1742, 1743 -1744, 1743 -1743, 1744 -1745, 1744 -1744, 1745 -1746, 1745 -1745, 1746 -1746, 1741 -1741, 1746 -1747, 1746 -1746, 1747 -1748, 1747 -1747, 1748 -1748, 1739 -1739, 1748 -1749, 1745 -1745, 1749 -1750, 1749 -1749, 1750 -1751, 1749 -1749, 1751 -1752, 1738 -1738, 1752 -1753, 1752 -1752, 1753 -1754, 1752 -1752, 1754 -1755, 1736 -1736, 1755 -1756, 1755 -1755, 1756 -1757, 1755 -1755, 1757 -1759, 1758 -1758, 1759 -1760, 1759 -1759, 1760 -1761, 1760 -1760, 1761 -1762, 1761 -1761, 1762 -1763, 1762 -1762, 1763 -1764, 1763 -1763, 1764 -1765, 1764 -1764, 1765 -1765, 1760 -1760, 1765 -1766, 1764 -1764, 1766 -1767, 1766 -1766, 1767 -1768, 1767 -1767, 1768 -1768, 1763 -1763, 1768 -1769, 1768 -1768, 1769 -1770, 1769 -1769, 1770 -1771, 1770 -1770, 1771 -1772, 1771 -1771, 1772 -1772, 1767 -1767, 1772 -1773, 1771 -1771, 1773 -1774, 1773 -1773, 1774 -1775, 1773 -1773, 1775 -1777, 1776 -1776, 1777 -1778, 1777 -1777, 1778 -1779, 1778 -1778, 1779 -1780, 1779 -1779, 1780 -1781, 1780 -1780, 1781 -1782, 1781 -1781, 1782 -1783, 1782 -1782, 1783 -1783, 1778 -1778, 1783 -1784, 1783 -1783, 1784 -1785, 1784 -1784, 1785 -1785, 1776 -1776, 1785 -1786, 1784 -1784, 1786 -1787, 1786 -1786, 1787 -1788, 1786 -1786, 1788 -1790, 1789 -1789, 1790 -1791, 1790 -1790, 1791 -1792, 1791 -1791, 1792 -1793, 1792 -1792, 1793 -1794, 1793 -1793, 1794 -1795, 1794 -1794, 1795 -1796, 1795 -1795, 1796 -1796, 1791 -1791, 1796 -1797, 1796 -1796, 1797 -1798, 1797 -1797, 1798 -1798, 1789 -1789, 1798 -1799, 1797 -1797, 1799 -1800, 1799 -1799, 1800 -1800, 1795 -1795, 1800 -1801, 1800 -1800, 1801 -1802, 1801 -1801, 1802 -1803, 1801 -1801, 1803 -1805, 1804 -1804, 1805 -1806, 1805 -1805, 1806 -1807, 1806 -1806, 1807 -1808, 1807 -1807, 1808 -1809, 1808 -1808, 1809 -1809, 1804 -1804, 1809 -1810, 1808 -1808, 1810 -1811, 1806 -1806, 1811 -1812, 1811 -1811, 1812 -1813, 1812 -1812, 1813 -1814, 1813 -1813, 1814 -1815, 1814 -1814, 1815 -1816, 1815 -1815, 1816 -1816, 1811 -1811, 1816 -1817, 1815 -1815, 1817 -1818, 1817 -1817, 1818 -1819, 1817 -1817, 1819 -1821, 1820 -1820, 1821 -1822, 1821 -1821, 1822 -1823, 1822 -1822, 1823 -1824, 1823 -1823, 1824 -1825, 1824 -1824, 1825 -1826, 1825 -1825, 1826 -1827, 1826 -1826, 1827 -1828, 1827 -1827, 1828 -1829, 1828 -1828, 1829 -1830, 1829 -1829, 1830 -1830, 1825 -1825, 1830 -1831, 1830 -1830, 1831 -1831, 1822 -1822, 1831 -1832, 1831 -1831, 1832 -1833, 1832 -1832, 1833 -1833, 1820 -1820, 1833 -1834, 1832 -1832, 1834 -1835, 1834 -1834, 1835 -1835, 1829 -1829, 1835 -1836, 1835 -1835, 1836 -1837, 1836 -1836, 1837 -1838, 1837 -1837, 1838 -1839, 1838 -1838, 1839 -1839, 1834 -1834, 1839 -1840, 1826 -1826, 1840 -1841, 1840 -1840, 1841 -1842, 1840 -1840, 1842 -1844, 1843 -1843, 1844 -1845, 1844 -1844, 1845 -1846, 1845 -1845, 1846 -1847, 1846 -1846, 1847 -1848, 1847 -1847, 1848 -1848, 1843 -1843, 1848 -1849, 1848 -1848, 1849 -1850, 1849 -1849, 1850 -1851, 1849 -1849, 1851 -1852, 1851 -1851, 1852 -1852, 1847 -1847, 1852 -1853, 1852 -1852, 1853 -1854, 1853 -1853, 1854 -1855, 1854 -1854, 1855 -1856, 1855 -1855, 1856 -1856, 1851 -1851, 1856 -1857, 1855 -1855, 1857 -1858, 1857 -1857, 1858 -1859, 1857 -1857, 1859 -1860, 1853 -1853, 1860 -1861, 1860 -1860, 1861 -1862, 1860 -1860, 1862 -1863, 1846 -1846, 1863 -1864, 1863 -1863, 1864 -1865, 1863 -1863, 1865 -1866, 1844 -1844, 1866 -1867, 1866 -1866, 1867 -1868, 1866 -1866, 1868 -1870, 1869 -1869, 1870 -1871, 1870 -1870, 1871 -1872, 1871 -1871, 1872 -1873, 1872 -1872, 1873 -1874, 1873 -1873, 1874 -1875, 1874 -1874, 1875 -1876, 1875 -1875, 1876 -1877, 1876 -1876, 1877 -1878, 1877 -1877, 1878 -1879, 1878 -1878, 1879 -1879, 1874 -1874, 1879 -1880, 1879 -1879, 1880 -1880, 1871 -1871, 1880 -1881, 1880 -1880, 1881 -1882, 1881 -1881, 1882 -1882, 1869 -1869, 1882 -1883, 1881 -1881, 1883 -1884, 1883 -1883, 1884 -1884, 1878 -1878, 1884 -1885, 1876 -1876, 1885 -1886, 1885 -1885, 1886 -1887, 1885 -1885, 1887 -1889, 1888 -1888, 1889 -1890, 1889 -1889, 1890 -1891, 1890 -1890, 1891 -1892, 1891 -1891, 1892 -1893, 1892 -1892, 1893 -1894, 1893 -1893, 1894 -1895, 1894 -1894, 1895 -1896, 1895 -1895, 1896 -1897, 1896 -1896, 1897 -1898, 1897 -1897, 1898 -1898, 1893 -1893, 1898 -1899, 1898 -1898, 1899 -1899, 1890 -1890, 1899 -1900, 1899 -1899, 1900 -1901, 1900 -1900, 1901 -1901, 1888 -1888, 1901 -1902, 1900 -1900, 1902 -1903, 1902 -1902, 1903 -1903, 1897 -1897, 1903 -1904, 1903 -1903, 1904 -1905, 1904 -1904, 1905 -1906, 1905 -1905, 1906 -1907, 1906 -1906, 1907 -1907, 1902 -1902, 1907 -1908, 1896 -1896, 1908 -1909, 1908 -1908, 1909 -1910, 1908 -1908, 1910 -1912, 1911 -1911, 1912 -1913, 1912 -1912, 1913 -1914, 1913 -1913, 1914 -1915, 1914 -1914, 1915 -1916, 1915 -1915, 1916 -1917, 1916 -1916, 1917 -1918, 1917 -1917, 1918 -1919, 1918 -1918, 1919 -1920, 1919 -1919, 1920 -1921, 1920 -1920, 1921 -1921, 1916 -1916, 1921 -1922, 1921 -1921, 1922 -1922, 1913 -1913, 1922 -1923, 1922 -1922, 1923 -1924, 1923 -1923, 1924 -1924, 1911 -1911, 1924 -1925, 1915 -1915, 1925 -1926, 1925 -1925, 1926 -1927, 1925 -1925, 1927 -1929, 1928 -1928, 1929 -1930, 1929 -1929, 1930 -1931, 1930 -1930, 1931 -1932, 1931 -1931, 1932 -1933, 1932 -1932, 1933 -1934, 1933 -1933, 1934 -1935, 1934 -1934, 1935 -1936, 1935 -1935, 1936 -1937, 1936 -1936, 1937 -1938, 1937 -1937, 1938 -1938, 1933 -1933, 1938 -1939, 1938 -1938, 1939 -1939, 1930 -1930, 1939 -1940, 1939 -1939, 1940 -1941, 1940 -1940, 1941 -1941, 1928 -1928, 1941 -1942, 1940 -1940, 1942 -1943, 1942 -1942, 1943 -1943, 1937 -1937, 1943 -1944, 1943 -1943, 1944 -1945, 1936 -1936, 1945 -1946, 1945 -1945, 1946 -1947, 1945 -1945, 1947 -1949, 1948 -1948, 1949 -1950, 1949 -1949, 1950 -1951, 1950 -1950, 1951 -1952, 1951 -1951, 1952 -1953, 1952 -1952, 1953 -1953, 1948 -1948, 1953 -1954, 1952 -1952, 1954 -1955, 1954 -1954, 1955 -1956, 1955 -1955, 1956 -1957, 1956 -1956, 1957 -1958, 1957 -1957, 1958 -1959, 1958 -1958, 1959 -1959, 1954 -1954, 1959 -1960, 1959 -1959, 1960 -1960, 1951 -1951, 1960 -1961, 1960 -1960, 1961 -1962, 1961 -1961, 1962 -1963, 1962 -1962, 1963 -1963, 1958 -1958, 1963 -1964, 1963 -1963, 1964 -1965, 1964 -1964, 1965 -1966, 1964 -1964, 1966 -1967, 1962 -1962, 1967 -1968, 1967 -1967, 1968 -1969, 1967 -1967, 1969 -1970, 1956 -1956, 1970 -1971, 1970 -1970, 1971 -1972, 1970 -1970, 1972 -1974, 1973 -1973, 1974 -1975, 1974 -1974, 1975 -1976, 1975 -1975, 1976 -1977, 1976 -1976, 1977 -1978, 1977 -1977, 1978 -1978, 1973 -1973, 1978 -1979, 1977 -1977, 1979 -1980, 1979 -1979, 1980 -1981, 1980 -1980, 1981 -1982, 1981 -1981, 1982 -1983, 1982 -1982, 1983 -1984, 1983 -1983, 1984 -1984, 1979 -1979, 1984 -1985, 1984 -1984, 1985 -1985, 1976 -1976, 1985 -1986, 1985 -1985, 1986 -1987, 1986 -1986, 1987 -1988, 1987 -1987, 1988 -1988, 1983 -1983, 1988 -1989, 1988 -1988, 1989 -1990, 1989 -1989, 1990 -1991, 1989 -1989, 1991 -1993, 1992 -1992, 1993 -1994, 1993 -1993, 1994 -1995, 1994 -1994, 1995 -1996, 1995 -1995, 1996 -1997, 1996 -1996, 1997 -1998, 1997 -1997, 1998 -1999, 1998 -1998, 1999 -2000, 1999 -1999, 2000 -2001, 2000 -2000, 2001 -2002, 2001 -2001, 2002 -2002, 1997 -1997, 2002 -2003, 2002 -2002, 2003 -2003, 1994 -1994, 2003 -2004, 2003 -2003, 2004 -2005, 2004 -2004, 2005 -2005, 1992 -1992, 2005 -2006, 2005 -2005, 2006 -2007, 2006 -2006, 2007 -2008, 2007 -2007, 2008 -2009, 2008 -2008, 2009 -2009, 2004 -2004, 2009 -2010, 2009 -2009, 2010 -2010, 2001 -2001, 2010 -2011, 2010 -2010, 2011 -2012, 2011 -2011, 2012 -2013, 2012 -2012, 2013 -2013, 2008 -2008, 2013 -2014, 2013 -2013, 2014 -2015, 2014 -2014, 2015 -2016, 2014 -2014, 2016 -2017, 1998 -1998, 2017 -2018, 2017 -2017, 2018 -2019, 2017 -2017, 2019 -2021, 2020 -2020, 2021 -2022, 2021 -2021, 2022 -2023, 2022 -2022, 2023 -2024, 2023 -2023, 2024 -2025, 2024 -2024, 2025 -2025, 2020 -2020, 2025 -2026, 2024 -2024, 2026 -2027, 2026 -2026, 2027 -2028, 2026 -2026, 2028 -2029, 2023 -2023, 2029 -2030, 2029 -2029, 2030 -2031, 2030 -2030, 2031 -2032, 2031 -2031, 2032 -2033, 2032 -2032, 2033 -2034, 2033 -2033, 2034 -2034, 2029 -2029, 2034 -2035, 2034 -2034, 2035 -2036, 2035 -2035, 2036 -2037, 2035 -2035, 2037 -2038, 2032 -2032, 2038 -2039, 2038 -2038, 2039 -2040, 2038 -2038, 2040 -2041, 2022 -2022, 2041 -2042, 2041 -2041, 2042 -2043, 2041 -2041, 2043 -2045, 2044 -2044, 2045 -2046, 2045 -2045, 2046 -2047, 2046 -2046, 2047 -2048, 2047 -2047, 2048 -2049, 2048 -2048, 2049 -2049, 2044 -2044, 2049 -2050, 2049 -2049, 2050 -2051, 2050 -2050, 2051 -2052, 2046 -2046, 2052 -2053, 2052 -2052, 2053 -2054, 2052 -2052, 2054 -2056, 2055 -2055, 2056 -2057, 2056 -2056, 2057 -2058, 2057 -2057, 2058 -2059, 2058 -2058, 2059 -2060, 2059 -2059, 2060 -2061, 2060 -2060, 2061 -2062, 2061 -2061, 2062 -2062, 2057 -2057, 2062 -2063, 2062 -2062, 2063 -2064, 2063 -2063, 2064 -2064, 2055 -2055, 2064 -2065, 2063 -2063, 2065 -2066, 2065 -2065, 2066 -2066, 2061 -2061, 2066 -2067, 2060 -2060, 2067 -2068, 2067 -2067, 2068 -2069, 2067 -2067, 2069 -2071, 2070 -2070, 2071 -2072, 2071 -2071, 2072 -2073, 2072 -2072, 2073 -2074, 2073 -2073, 2074 -2075, 2074 -2074, 2075 -2075, 2070 -2070, 2075 -2076, 2075 -2075, 2076 -2077, 2076 -2076, 2077 -2078, 2076 -2076, 2078 -2079, 2073 -2073, 2079 -2080, 2079 -2079, 2080 -2081, 2079 -2079, 2081 -2082, 2072 -2072, 2082 -2084, 2083 -2083, 2084 -2085, 2084 -2084, 2085 -2086, 2085 -2085, 2086 -2087, 2086 -2086, 2087 -2088, 2087 -2087, 2088 -2088, 2083 -2083, 2088 -2089, 2087 -2087, 2089 -2090, 2089 -2089, 2090 -2091, 2090 -2090, 2091 -2092, 2091 -2091, 2092 -2093, 2092 -2092, 2093 -2094, 2093 -2093, 2094 -2094, 2089 -2089, 2094 -2095, 2092 -2092, 2095 -2096, 2095 -2095, 2096 -2097, 2095 -2095, 2097 -2098, 2086 -2086, 2098 -2100, 2099 -2099, 2100 -2101, 2100 -2100, 2101 -2102, 2101 -2101, 2102 -2103, 2102 -2102, 2103 -2104, 2103 -2103, 2104 -2104, 2099 -2099, 2104 -2105, 2104 -2104, 2105 -2106, 2105 -2105, 2106 -2107, 2105 -2105, 2107 -2108, 2102 -2102, 2108 -2109, 2108 -2108, 2109 -2110, 2109 -2109, 2110 -2110, 2101 -2101, 2110 -2112, 2111 -2111, 2112 -2113, 2112 -2112, 2113 -2114, 2113 -2113, 2114 -2115, 2114 -2114, 2115 -2116, 2115 -2115, 2116 -2116, 2111 -2111, 2116 -2117, 2116 -2116, 2117 -2118, 2117 -2117, 2118 -2119, 2117 -2117, 2119 -2120, 2113 -2113, 2120 -2122, 2121 -2121, 2122 -2123, 2122 -2122, 2123 -2124, 2123 -2123, 2124 -2125, 2124 -2124, 2125 -2126, 2125 -2125, 2126 -2126, 2121 -2121, 2126 -2127, 2125 -2125, 2127 -2128, 2127 -2127, 2128 -2129, 2127 -2127, 2129 -2130, 2123 -2123, 2130 -2131, 2130 -2130, 2131 -2132, 2131 -2131, 2132 -2133, 2132 -2132, 2133 -2134, 2133 -2133, 2134 -2135, 2134 -2134, 2135 -2135, 2130 -2130, 2135 -2136, 2135 -2135, 2136 -2137, 2136 -2136, 2137 -2138, 2136 -2136, 2138 -2139, 2133 -2133, 2139 -2140, 2139 -2139, 2140 -2141, 2139 -2139, 2141 -2143, 2142 -2142, 2143 -2144, 2143 -2143, 2144 -2145, 2144 -2144, 2145 -2146, 2145 -2145, 2146 -2147, 2146 -2146, 2147 -2148, 2147 -2147, 2148 -2149, 2148 -2148, 2149 -2149, 2144 -2144, 2149 -2150, 2149 -2149, 2150 -2151, 2150 -2150, 2151 -2151, 2142 -2142, 2151 -2152, 2151 -2151, 2152 -2153, 2152 -2152, 2153 -2154, 2153 -2153, 2154 -2155, 2154 -2154, 2155 -2155, 2150 -2150, 2155 -2156, 2155 -2155, 2156 -2157, 2156 -2156, 2157 -2157, 2148 -2148, 2157 -2158, 2154 -2154, 2158 -2159, 2158 -2158, 2159 -2160, 2158 -2158, 2160 -2161, 2146 -2146, 2161 -2162, 2161 -2161, 2162 -2163, 2162 -2162, 2163 -2164, 2163 -2163, 2164 -2164, 2145 -2145, 2164 -2166, 2165 -2165, 2166 -2167, 2166 -2166, 2167 -2168, 2167 -2167, 2168 -2169, 2168 -2168, 2169 -2170, 2169 -2169, 2170 -2170, 2165 -2165, 2170 -2171, 2169 -2169, 2171 -2172, 2171 -2171, 2172 -2173, 2172 -2172, 2173 -2174, 2173 -2173, 2174 -2175, 2174 -2174, 2175 -2176, 2175 -2175, 2176 -2176, 2171 -2171, 2176 -2177, 2176 -2176, 2177 -2178, 2177 -2177, 2178 -2179, 2177 -2177, 2179 -2180, 2174 -2174, 2180 -2181, 2180 -2180, 2181 -2182, 2180 -2180, 2182 -2183, 2168 -2168, 2183 -2184, 2183 -2183, 2184 -2185, 2183 -2183, 2185 -2187, 2186 -2186, 2187 -2188, 2187 -2187, 2188 -2189, 2188 -2188, 2189 -2190, 2189 -2189, 2190 -2191, 2190 -2190, 2191 -2192, 2191 -2191, 2192 -2193, 2192 -2192, 2193 -2193, 2188 -2188, 2193 -2194, 2193 -2193, 2194 -2195, 2194 -2194, 2195 -2195, 2187 -2187, 2195 -2196, 2191 -2191, 2196 -2197, 2196 -2196, 2197 -2198, 2196 -2196, 2198 -2200, 2199 -2199, 2200 -2201, 2200 -2200, 2201 -2202, 2201 -2201, 2202 -2203, 2202 -2202, 2203 -2204, 2203 -2203, 2204 -2204, 2199 -2199, 2204 -2205, 2203 -2203, 2205 -2206, 2205 -2205, 2206 -2207, 2206 -2206, 2207 -2208, 2207 -2207, 2208 -2209, 2208 -2208, 2209 -2210, 2209 -2209, 2210 -2210, 2205 -2205, 2210 -2211, 2210 -2210, 2211 -2211, 2202 -2202, 2211 -2212, 2211 -2211, 2212 -2213, 2212 -2212, 2213 -2214, 2213 -2213, 2214 -2214, 2209 -2209, 2214 -2215, 2213 -2213, 2215 -2216, 2215 -2215, 2216 -2217, 2215 -2215, 2217 -2218, 2212 -2212, 2218 -2219, 2218 -2218, 2219 -2220, 2218 -2218, 2220 -2221, 2207 -2207, 2221 -2222, 2221 -2221, 2222 -2223, 2221 -2221, 2223 -2225, 2224 -2224, 2225 -2226, 2225 -2225, 2226 -2227, 2226 -2226, 2227 -2228, 2227 -2227, 2228 -2229, 2228 -2228, 2229 -2229, 2224 -2224, 2229 -2230, 2228 -2228, 2230 -2231, 2230 -2230, 2231 -2232, 2231 -2231, 2232 -2233, 2232 -2232, 2233 -2233, 2227 -2227, 2233 -2234, 2233 -2233, 2234 -2235, 2234 -2234, 2235 -2236, 2235 -2235, 2236 -2237, 2236 -2236, 2237 -2237, 2232 -2232, 2237 -2238, 2237 -2237, 2238 -2239, 2238 -2238, 2239 -2240, 2239 -2239, 2240 -2241, 2240 -2240, 2241 -2241, 2236 -2236, 2241 -2242, 2235 -2235, 2242 -2243, 2242 -2242, 2243 -2244, 2242 -2242, 2244 -2246, 2245 -2245, 2246 -2247, 2246 -2246, 2247 -2248, 2247 -2247, 2248 -2249, 2248 -2248, 2249 -2250, 2249 -2249, 2250 -2251, 2250 -2250, 2251 -2251, 2246 -2246, 2251 -2252, 2251 -2251, 2252 -2253, 2252 -2252, 2253 -2254, 2253 -2253, 2254 -2255, 2254 -2254, 2255 -2256, 2255 -2255, 2256 -2257, 2256 -2256, 2257 -2257, 2252 -2252, 2257 -2258, 2256 -2256, 2258 -2259, 2255 -2255, 2259 -2260, 2259 -2259, 2260 -2261, 2259 -2259, 2261 -2263, 2262 -2262, 2263 -2264, 2263 -2263, 2264 -2265, 2264 -2264, 2265 -2266, 2265 -2265, 2266 -2267, 2266 -2266, 2267 -2267, 2262 -2262, 2267 -2268, 2267 -2267, 2268 -2269, 2265 -2265, 2269 -2270, 2264 -2264, 2270 -2271, 2270 -2270, 2271 -2272, 2270 -2270, 2272 -2274, 2273 -2273, 2274 -2275, 2274 -2274, 2275 -2276, 2275 -2275, 2276 -2277, 2276 -2276, 2277 -2278, 2277 -2277, 2278 -2279, 2278 -2278, 2279 -2280, 2279 -2279, 2280 -2281, 2280 -2280, 2281 -2282, 2281 -2281, 2282 -2283, 2282 -2282, 2283 -2283, 2278 -2278, 2283 -2284, 2283 -2283, 2284 -2284, 2275 -2275, 2284 -2285, 2284 -2284, 2285 -2286, 2285 -2285, 2286 -2286, 2273 -2273, 2286 -2287, 2285 -2285, 2287 -2288, 2287 -2287, 2288 -2288, 2282 -2282, 2288 -2289, 2280 -2280, 2289 -2290, 2289 -2289, 2290 -2291, 2289 -2289, 2291 -2293, 2292 -2292, 2293 -2294, 2293 -2293, 2294 -2295, 2294 -2294, 2295 -2296, 2295 -2295, 2296 -2297, 2296 -2296, 2297 -2297, 2292 -2292, 2297 -2298, 2296 -2296, 2298 -2299, 2298 -2298, 2299 -2300, 2298 -2298, 2300 -2301, 2295 -2295, 2301 -2302, 2301 -2301, 2302 -2303, 2302 -2302, 2303 -2304, 2303 -2303, 2304 -2305, 2304 -2304, 2305 -2306, 2305 -2305, 2306 -2307, 2306 -2306, 2307 -2307, 2302 -2302, 2307 -2308, 2307 -2307, 2308 -2308, 2294 -2294, 2308 -2309, 2305 -2305, 2309 -2310, 2309 -2309, 2310 -2311, 2309 -2309, 2311 -2313, 2312 -2312, 2313 -2314, 2313 -2313, 2314 -2315, 2314 -2314, 2315 -2316, 2315 -2315, 2316 -2317, 2316 -2316, 2317 -2317, 2312 -2312, 2317 -2318, 2316 -2316, 2318 -2319, 2318 -2318, 2319 -2320, 2318 -2318, 2320 -2321, 2314 -2314, 2321 -2322, 2321 -2321, 2322 -2323, 2322 -2322, 2323 -2324, 2323 -2323, 2324 -2325, 2324 -2324, 2325 -2326, 2325 -2325, 2326 -2326, 2321 -2321, 2326 -2327, 2325 -2325, 2327 -2328, 2327 -2327, 2328 -2329, 2327 -2327, 2329 -2330, 2324 -2324, 2330 -2331, 2330 -2330, 2331 -2332, 2330 -2330, 2332 -2334, 2333 -2333, 2334 -2335, 2334 -2334, 2335 -2336, 2335 -2335, 2336 -2337, 2336 -2336, 2337 -2338, 2337 -2337, 2338 -2339, 2338 -2338, 2339 -2340, 2339 -2339, 2340 -2341, 2340 -2340, 2341 -2341, 2336 -2336, 2341 -2342, 2341 -2341, 2342 -2342, 2333 -2333, 2342 -2343, 2340 -2340, 2343 -2344, 2343 -2343, 2344 -2345, 2343 -2343, 2345 -2346, 2337 -2337, 2346 -2347, 2346 -2346, 2347 -2347, 2335 -2335, 2347 -2349, 2348 -2348, 2349 -2350, 2349 -2349, 2350 -2351, 2350 -2350, 2351 -2352, 2351 -2351, 2352 -2353, 2352 -2352, 2353 -2353, 2348 -2348, 2353 -2354, 2353 -2353, 2354 -2355, 2354 -2354, 2355 -2356, 2354 -2354, 2356 -2357, 2351 -2351, 2357 -2358, 2357 -2357, 2358 -2359, 2357 -2357, 2359 -2360, 2350 -2350, 2360 -2361, 2360 -2360, 2361 -2363, 2362 -2362, 2363 -2364, 2363 -2363, 2364 -2365, 2364 -2364, 2365 -2366, 2365 -2365, 2366 -2367, 2366 -2366, 2367 -2368, 2367 -2367, 2368 -2369, 2368 -2368, 2369 -2369, 2364 -2364, 2369 -2370, 2369 -2369, 2370 -2371, 2370 -2370, 2371 -2372, 2370 -2370, 2372 -2374, 2373 -2373, 2374 -2375, 2374 -2374, 2375 -2376, 2375 -2375, 2376 -2377, 2376 -2376, 2377 -2378, 2377 -2377, 2378 -2378, 2373 -2373, 2378 -2379, 2376 -2376, 2379 -2380, 2379 -2379, 2380 -2381, 2380 -2380, 2381 -2382, 2381 -2381, 2382 -2383, 2381 -2381, 2383 -2384, 2383 -2383, 2384 -2385, 2384 -2384, 2385 -2386, 2385 -2385, 2386 -2387, 2386 -2386, 2387 -2388, 2387 -2387, 2388 -2388, 2383 -2383, 2388 -2389, 2386 -2386, 2389 -2390, 2389 -2389, 2390 -2391, 2389 -2389, 2391 -2393, 2392 -2392, 2393 -2394, 2393 -2393, 2394 -2395, 2394 -2394, 2395 -2396, 2395 -2395, 2396 -2397, 2396 -2396, 2397 -2397, 2392 -2392, 2397 -2398, 2396 -2396, 2398 -2399, 2398 -2398, 2399 -2400, 2395 -2395, 2400 -2401, 2400 -2400, 2401 -2402, 2400 -2400, 2402 -2404, 2403 -2403, 2404 -2405, 2404 -2404, 2405 -2406, 2405 -2405, 2406 -2407, 2406 -2406, 2407 -2408, 2407 -2407, 2408 -2408, 2403 -2403, 2408 -2409, 2408 -2408, 2409 -2410, 2407 -2407, 2410 -2411, 2405 -2405, 2411 -2412, 2411 -2411, 2412 -2413, 2412 -2412, 2413 -2414, 2413 -2413, 2414 -2415, 2414 -2414, 2415 -2416, 2415 -2415, 2416 -2417, 2416 -2416, 2417 -2417, 2412 -2412, 2417 -2418, 2417 -2417, 2418 -2419, 2416 -2416, 2419 -2420, 2419 -2419, 2420 -2421, 2419 -2419, 2421 -2422, 2415 -2415, 2422 -2423, 2413 -2413, 2423 -2423, 2404 -2404, 2423 -2425, 2424 -2424, 2425 -2426, 2425 -2425, 2426 -2427, 2426 -2426, 2427 -2428, 2427 -2427, 2428 -2429, 2428 -2428, 2429 -2430, 2429 -2429, 2430 -2431, 2430 -2430, 2431 -2431, 2426 -2426, 2431 -2432, 2431 -2431, 2432 -2433, 2432 -2432, 2433 -2433, 2424 -2424, 2433 -2434, 2433 -2433, 2434 -2435, 2434 -2434, 2435 -2436, 2435 -2435, 2436 -2437, 2436 -2436, 2437 -2437, 2432 -2432, 2437 -2438, 2437 -2437, 2438 -2439, 2438 -2438, 2439 -2439, 2430 -2430, 2439 -2440, 2435 -2435, 2440 -2441, 2440 -2440, 2441 -2442, 2440 -2440, 2442 -2443, 2428 -2428, 2443 -2444, 2443 -2443, 2444 -2445, 2443 -2443, 2445 -2447, 2446 -2446, 2447 -2448, 2447 -2447, 2448 -2449, 2448 -2448, 2449 -2450, 2449 -2449, 2450 -2451, 2450 -2450, 2451 -2451, 2446 -2446, 2451 -2452, 2451 -2451, 2452 -2453, 2452 -2452, 2453 -2454, 2452 -2452, 2454 -2455, 2449 -2449, 2455 -2456, 2448 -2448, 2456 -2458, 2457 -2457, 2458 -2459, 2458 -2458, 2459 -2460, 2459 -2459, 2460 -2461, 2460 -2460, 2461 -2462, 2461 -2461, 2462 -2463, 2462 -2462, 2463 -2464, 2463 -2463, 2464 -2465, 2464 -2464, 2465 -2466, 2465 -2465, 2466 -2467, 2466 -2466, 2467 -2467, 2462 -2462, 2467 -2468, 2467 -2467, 2468 -2468, 2459 -2459, 2468 -2469, 2468 -2468, 2469 -2470, 2469 -2469, 2470 -2470, 2457 -2457, 2470 -2471, 2469 -2469, 2471 -2472, 2471 -2471, 2472 -2472, 2466 -2466, 2472 -2473, 2465 -2465, 2473 -2474, 2463 -2463, 2474 -2475, 2474 -2474, 2475 -2476, 2474 -2474, 2476 -2478, 2477 -2477, 2478 -2479, 2478 -2478, 2479 -2480, 2479 -2479, 2480 -2481, 2480 -2480, 2481 -2482, 2481 -2481, 2482 -2482, 2477 -2477, 2482 -2483, 2481 -2481, 2483 -2484, 2483 -2483, 2484 -2485, 2484 -2484, 2485 -2486, 2485 -2485, 2486 -2487, 2486 -2486, 2487 -2488, 2487 -2487, 2488 -2488, 2483 -2483, 2488 -2489, 2488 -2488, 2489 -2489, 2480 -2480, 2489 -2490, 2489 -2489, 2490 -2491, 2490 -2490, 2491 -2492, 2491 -2491, 2492 -2492, 2487 -2487, 2492 -2493, 2492 -2492, 2493 -2494, 2493 -2493, 2494 -2495, 2493 -2493, 2495 -2496, 2491 -2491, 2496 -2497, 2496 -2496, 2497 -2498, 2496 -2496, 2498 -2500, 2499 -2499, 2500 -2501, 2500 -2500, 2501 -2502, 2501 -2501, 2502 -2503, 2502 -2502, 2503 -2504, 2503 -2503, 2504 -2505, 2504 -2504, 2505 -2506, 2505 -2505, 2506 -2507, 2506 -2506, 2507 -2507, 2502 -2502, 2507 -2508, 2507 -2507, 2508 -2508, 2500 -2500, 2508 -2509, 2504 -2504, 2509 -2510, 2509 -2509, 2510 -2511, 2509 -2509, 2511 -2513, 2512 -2512, 2513 -2514, 2513 -2513, 2514 -2515, 2514 -2514, 2515 -2516, 2515 -2515, 2516 -2517, 2516 -2516, 2517 -2517, 2512 -2512, 2517 -2518, 2517 -2517, 2518 -2519, 2515 -2515, 2519 -2520, 2519 -2519, 2520 -2521, 2519 -2519, 2521 -2522, 2514 -2514, 2522 -2524, 2523 -2523, 2524 -2525, 2524 -2524, 2525 -2526, 2525 -2525, 2526 -2527, 2526 -2526, 2527 -2528, 2527 -2527, 2528 -2529, 2528 -2528, 2529 -2530, 2529 -2529, 2530 -2530, 2525 -2525, 2530 -2531, 2530 -2530, 2531 -2532, 2531 -2531, 2532 -2532, 2523 -2523, 2532 -2533, 2531 -2531, 2533 -2534, 2533 -2533, 2534 -2535, 2533 -2533, 2535 -2536, 2529 -2529, 2536 -2537, 2536 -2536, 2537 -2538, 2536 -2536, 2538 -2540, 2539 -2539, 2540 -2541, 2540 -2540, 2541 -2542, 2541 -2541, 2542 -2543, 2542 -2542, 2543 -2544, 2543 -2543, 2544 -2544, 2539 -2539, 2544 -2545, 2543 -2543, 2545 -2546, 2542 -2542, 2546 -2547, 2541 -2541, 2547 -2548, 2547 -2547, 2548 -2549, 2547 -2547, 2549 -2551, 2550 -2550, 2551 -2552, 2551 -2551, 2552 -2553, 2552 -2552, 2553 -2554, 2553 -2553, 2554 -2555, 2554 -2554, 2555 -2555, 2550 -2550, 2555 -2556, 2555 -2555, 2556 -2557, 2556 -2556, 2557 -2558, 2557 -2557, 2558 -2559, 2558 -2558, 2559 -2560, 2559 -2559, 2560 -2561, 2560 -2560, 2561 -2562, 2561 -2561, 2562 -2562, 2557 -2557, 2562 -2563, 2562 -2562, 2563 -2563, 2554 -2554, 2563 -2564, 2560 -2560, 2564 -2565, 2559 -2559, 2565 -2566, 2552 -2552, 2566 -2567, 2551 -2551, 2567 -2568, 2567 -2567, 2568 -2569, 2567 -2567, 2569 -2571, 2570 -2570, 2571 -2572, 2571 -2571, 2572 -2573, 2572 -2572, 2573 -2574, 2573 -2573, 2574 -2575, 2574 -2574, 2575 -2576, 2575 -2575, 2576 -2577, 2576 -2576, 2577 -2578, 2577 -2577, 2578 -2578, 2573 -2573, 2578 -2579, 2578 -2578, 2579 -2580, 2579 -2579, 2580 -2581, 2579 -2579, 2581 -2583, 2582 -2582, 2583 -2584, 2583 -2583, 2584 -2585, 2584 -2584, 2585 -2586, 2585 -2585, 2586 -2587, 2586 -2586, 2587 -2587, 2582 -2582, 2587 -2588, 2587 -2587, 2588 -2589, 2585 -2585, 2589 -2590, 2589 -2589, 2590 -2591, 2589 -2589, 2591 -2592, 2584 -2584, 2592 -2594, 2593 -2593, 2594 -2595, 2594 -2594, 2595 -2596, 2595 -2595, 2596 -2597, 2596 -2596, 2597 -2598, 2597 -2597, 2598 -2598, 2593 -2593, 2598 -2599, 2597 -2597, 2599 -2600, 2595 -2595, 2600 -2601, 2600 -2600, 2601 -2602, 2601 -2601, 2602 -2603, 2602 -2602, 2603 -2604, 2603 -2603, 2604 -2605, 2604 -2604, 2605 -2605, 2600 -2600, 2605 -2606, 2603 -2603, 2606 -2607, 2606 -2606, 2607 -2608, 2606 -2606, 2608 -2610, 2609 -2609, 2610 -2611, 2610 -2610, 2611 -2612, 2611 -2611, 2612 -2613, 2612 -2612, 2613 -2614, 2613 -2613, 2614 -2614, 2609 -2609, 2614 -2615, 2614 -2614, 2615 -2616, 2615 -2615, 2616 -2617, 2616 -2616, 2617 -2618, 2617 -2617, 2618 -2619, 2618 -2618, 2619 -2620, 2619 -2619, 2620 -2620, 2615 -2615, 2620 -2621, 2618 -2618, 2621 -2622, 2621 -2621, 2622 -2623, 2621 -2621, 2623 -2624, 2611 -2611, 2624 -2626, 2625 -2625, 2626 -2627, 2626 -2626, 2627 -2628, 2627 -2627, 2628 -2629, 2628 -2628, 2629 -2630, 2629 -2629, 2630 -2631, 2630 -2630, 2631 -2632, 2631 -2631, 2632 -2633, 2632 -2632, 2633 -2633, 2628 -2628, 2633 -2634, 2633 -2633, 2634 -2634, 2626 -2626, 2634 -2635, 2630 -2630, 2635 -2636, 2635 -2635, 2636 -2637, 2635 -2635, 2637 -2639, 2638 -2638, 2639 -2640, 2639 -2639, 2640 -2641, 2640 -2640, 2641 -2642, 2641 -2641, 2642 -2643, 2642 -2642, 2643 -2644, 2643 -2643, 2644 -2645, 2644 -2644, 2645 -2645, 2640 -2640, 2645 -2646, 2645 -2645, 2646 -2647, 2646 -2646, 2647 -2647, 2638 -2638, 2647 -2648, 2646 -2646, 2648 -2649, 2648 -2648, 2649 -2649, 2644 -2644, 2649 -2650, 2643 -2643, 2650 -2651, 2650 -2650, 2651 -2652, 2650 -2650, 2652 -2654, 2653 -2653, 2654 -2655, 2654 -2654, 2655 -2656, 2655 -2655, 2656 -2657, 2656 -2656, 2657 -2658, 2657 -2657, 2658 -2658, 2653 -2653, 2658 -2659, 2658 -2658, 2659 -2660, 2659 -2659, 2660 -2661, 2659 -2659, 2661 -2662, 2656 -2656, 2662 -2663, 2662 -2662, 2663 -2664, 2663 -2663, 2664 -2665, 2664 -2664, 2665 -2666, 2665 -2665, 2666 -2667, 2666 -2666, 2667 -2668, 2667 -2667, 2668 -2668, 2663 -2663, 2668 -2669, 2668 -2668, 2669 -2669, 2655 -2655, 2669 -2670, 2666 -2666, 2670 -2671, 2670 -2670, 2671 -2672, 2670 -2670, 2672 -2674, 2673 -2673, 2674 -2675, 2674 -2674, 2675 -2676, 2675 -2675, 2676 -2677, 2676 -2676, 2677 -2678, 2677 -2677, 2678 -2678, 2674 -2674, 2678 -2679, 2678 -2678, 2679 -2680, 2679 -2679, 2680 -2681, 2680 -2680, 2681 -2682, 2677 -2677, 2682 -2683, 2682 -2682, 2683 -2684, 2682 -2682, 2684 -2686, 2685 -2685, 2686 -2687, 2686 -2686, 2687 -2688, 2687 -2687, 2688 -2689, 2688 -2688, 2689 -2690, 2689 -2689, 2690 -2691, 2690 -2690, 2691 -2691, 2687 -2687, 2691 -2692, 2691 -2691, 2692 -2693, 2692 -2692, 2693 -2693, 2685 -2685, 2693 -2694, 2692 -2692, 2694 -2695, 2694 -2694, 2695 -2696, 2694 -2694, 2696 -2698, 2697 -2697, 2698 -2699, 2698 -2698, 2699 -2700, 2699 -2699, 2700 -2701, 2700 -2700, 2701 -2702, 2701 -2701, 2702 -2702, 2697 -2697, 2702 -2703, 2701 -2701, 2703 -2704, 2703 -2703, 2704 -2705, 2704 -2704, 2705 -2706, 2705 -2705, 2706 -2707, 2706 -2706, 2707 -2708, 2707 -2707, 2708 -2708, 2703 -2703, 2708 -2709, 2708 -2708, 2709 -2709, 2700 -2700, 2709 -2710, 2709 -2709, 2710 -2711, 2710 -2710, 2711 -2712, 2711 -2711, 2712 -2712, 2707 -2707, 2712 -2713, 2711 -2711, 2713 -2714, 2713 -2713, 2714 -2715, 2713 -2713, 2715 -2716, 2705 -2705, 2716 -2717, 2716 -2716, 2717 -2718, 2716 -2716, 2718 -2720, 2719 -2719, 2720 -2721, 2720 -2720, 2721 -2722, 2721 -2721, 2722 -2723, 2722 -2722, 2723 -2724, 2723 -2723, 2724 -2725, 2724 -2724, 2725 -2726, 2725 -2725, 2726 -2727, 2726 -2726, 2727 -2727, 2722 -2722, 2727 -2728, 2727 -2727, 2728 -2728, 2719 -2719, 2728 -2729, 2726 -2726, 2729 -2730, 2729 -2729, 2730 -2731, 2729 -2729, 2731 -2732, 2723 -2723, 2732 -2733, 2732 -2732, 2733 -2733, 2721 -2721, 2733 -2735, 2734 -2734, 2735 -2736, 2735 -2735, 2736 -2737, 2736 -2736, 2737 -2738, 2737 -2737, 2738 -2739, 2738 -2738, 2739 -2740, 2739 -2739, 2740 -2740, 2736 -2736, 2740 -2741, 2739 -2739, 2741 -2742, 2741 -2741, 2742 -2743, 2741 -2741, 2743 -2744, 2737 -2737, 2744 -2745, 2744 -2744, 2745 -2745, 2734 -2734, 2745 -2747, 2746 -2746, 2747 -2748, 2747 -2747, 2748 -2749, 2748 -2748, 2749 -2750, 2749 -2749, 2750 -2751, 2750 -2750, 2751 -2752, 2751 -2751, 2752 -2753, 2752 -2752, 2753 -2753, 2748 -2748, 2753 -2754, 2753 -2753, 2754 -2755, 2754 -2754, 2755 -2756, 2754 -2754, 2756 -2757, 2751 -2751, 2757 -2758, 2757 -2757, 2758 -2759, 2757 -2757, 2759 -2761, 2760 -2760, 2761 -2762, 2761 -2761, 2762 -2763, 2762 -2762, 2763 -2764, 2763 -2763, 2764 -2765, 2764 -2764, 2765 -2765, 2760 -2760, 2765 -2766, 2765 -2765, 2766 -2767, 2766 -2766, 2767 -2768, 2766 -2766, 2768 -2769, 2763 -2763, 2769 -2770, 2769 -2769, 2770 -2771, 2770 -2770, 2771 -2771, 2762 -2762, 2771 -2773, 2772 -2772, 2773 -2774, 2773 -2773, 2774 -2775, 2774 -2774, 2775 -2776, 2775 -2775, 2776 -2777, 2776 -2776, 2777 -2778, 2777 -2777, 2778 -2779, 2778 -2778, 2779 -2779, 2774 -2774, 2779 -2780, 2779 -2779, 2780 -2781, 2780 -2780, 2781 -2781, 2772 -2772, 2781 -2782, 2781 -2781, 2782 -2783, 2782 -2782, 2783 -2784, 2783 -2783, 2784 -2785, 2784 -2784, 2785 -2785, 2780 -2780, 2785 -2786, 2785 -2785, 2786 -2787, 2786 -2786, 2787 -2787, 2778 -2778, 2787 -2788, 2784 -2784, 2788 -2789, 2775 -2775, 2789 -2790, 2789 -2789, 2790 -2791, 2789 -2789, 2791 -2793, 2792 -2792, 2793 -2794, 2793 -2793, 2794 -2795, 2793 -2793, 2795 -2796, 2795 -2795, 2796 -2797, 2796 -2796, 2797 -2798, 2797 -2797, 2798 -2799, 2798 -2798, 2799 -2800, 2799 -2799, 2800 -2801, 2800 -2800, 2801 -2801, 2796 -2796, 2801 -2802, 2800 -2800, 2802 -2803, 2802 -2802, 2803 -2804, 2803 -2803, 2804 -2804, 2799 -2799, 2804 -2805, 2804 -2804, 2805 -2806, 2805 -2805, 2806 -2807, 2806 -2806, 2807 -2808, 2807 -2807, 2808 -2808, 2803 -2803, 2808 -2809, 2807 -2807, 2809 -2810, 2809 -2809, 2810 -2811, 2809 -2809, 2811 -2813, 2812 -2812, 2813 -2814, 2813 -2813, 2814 -2815, 2814 -2814, 2815 -2816, 2815 -2815, 2816 -2817, 2816 -2816, 2817 -2818, 2817 -2817, 2818 -2819, 2818 -2818, 2819 -2819, 2814 -2814, 2819 -2820, 2819 -2819, 2820 -2821, 2820 -2820, 2821 -2821, 2812 -2812, 2821 -2822, 2821 -2821, 2822 -2823, 2822 -2822, 2823 -2824, 2823 -2823, 2824 -2825, 2824 -2824, 2825 -2825, 2820 -2820, 2825 -2826, 2825 -2825, 2826 -2827, 2826 -2826, 2827 -2827, 2818 -2818, 2827 -2828, 2824 -2824, 2828 -2829, 2828 -2828, 2829 -2830, 2828 -2828, 2830 -2831, 2817 -2817, 2831 -2833, 2832 -2832, 2833 -2834, 2833 -2833, 2834 -2835, 2834 -2834, 2835 -2836, 2835 -2835, 2836 -2837, 2836 -2836, 2837 -2837, 2832 -2832, 2837 -2838, 2837 -2837, 2838 -2839, 2838 -2838, 2839 -2840, 2838 -2838, 2840 -2841, 2836 -2836, 2841 -2842, 2835 -2835, 2842 -2843, 2833 -2833, 2843 -2844, 2843 -2843, 2844 -2845, 2843 -2843, 2845 -2847, 2846 -2846, 2847 -2848, 2847 -2847, 2848 -2849, 2848 -2848, 2849 -2850, 2849 -2849, 2850 -2851, 2850 -2850, 2851 -2851, 2846 -2846, 2851 -2852, 2850 -2850, 2852 -2853, 2852 -2852, 2853 -2854, 2853 -2853, 2854 -2855, 2854 -2854, 2855 -2856, 2855 -2855, 2856 -2857, 2856 -2856, 2857 -2857, 2852 -2852, 2857 -2858, 2857 -2857, 2858 -2859, 2858 -2858, 2859 -2859, 2849 -2849, 2859 -2860, 2859 -2859, 2860 -2861, 2860 -2860, 2861 -2862, 2861 -2861, 2862 -2863, 2862 -2862, 2863 -2863, 2858 -2858, 2863 -2864, 2863 -2863, 2864 -2865, 2864 -2864, 2865 -2865, 2856 -2856, 2865 -2866, 2860 -2860, 2866 -2867, 2866 -2866, 2867 -2868, 2866 -2866, 2868 -2869, 2855 -2855, 2869 -2870, 2869 -2869, 2870 -2871, 2869 -2869, 2871 -2873, 2872 -2872, 2873 -2874, 2873 -2873, 2874 -2875, 2874 -2874, 2875 -2876, 2875 -2875, 2876 -2877, 2876 -2876, 2877 -2877, 2872 -2872, 2877 -2878, 2877 -2877, 2878 -2879, 2878 -2878, 2879 -2880, 2879 -2879, 2880 -2881, 2880 -2880, 2881 -2882, 2881 -2881, 2882 -2883, 2882 -2882, 2883 -2883, 2878 -2878, 2883 -2884, 2882 -2882, 2884 -2885, 2884 -2884, 2885 -2886, 2884 -2884, 2886 -2887, 2881 -2881, 2887 -2888, 2887 -2887, 2888 -2889, 2887 -2887, 2889 -2890, 2875 -2875, 2890 -2891, 2890 -2890, 2891 -2892, 2890 -2890, 2892 -2893, 2874 -2874, 2893 -2894, 2893 -2893, 2894 -2895, 2893 -2893, 2895 -2897, 2896 -2896, 2897 -2898, 2897 -2897, 2898 -2899, 2898 -2898, 2899 -2900, 2899 -2899, 2900 -2901, 2900 -2900, 2901 -2902, 2901 -2901, 2902 -2903, 2902 -2902, 2903 -2903, 2898 -2898, 2903 -2904, 2903 -2903, 2904 -2905, 2904 -2904, 2905 -2905, 2896 -2896, 2905 -2906, 2905 -2905, 2906 -2907, 2906 -2906, 2907 -2908, 2907 -2907, 2908 -2909, 2908 -2908, 2909 -2909, 2904 -2904, 2909 -2910, 2909 -2909, 2910 -2911, 2910 -2910, 2911 -2911, 2902 -2902, 2911 -2912, 2907 -2907, 2912 -2913, 2912 -2912, 2913 -2914, 2912 -2912, 2914 -2915, 2900 -2900, 2915 -2916, 2915 -2915, 2916 -2917, 2915 -2915, 2917 -2919, 2918 -2918, 2919 -2920, 2919 -2919, 2920 -2921, 2920 -2920, 2921 -2922, 2921 -2921, 2922 -2923, 2922 -2922, 2923 -2924, 2923 -2923, 2924 -2925, 2924 -2924, 2925 -2926, 2925 -2925, 2926 -2927, 2926 -2926, 2927 -2928, 2927 -2927, 2928 -2928, 2923 -2923, 2928 -2929, 2928 -2928, 2929 -2929, 2920 -2920, 2929 -2930, 2929 -2929, 2930 -2931, 2930 -2930, 2931 -2931, 2918 -2918, 2931 -2932, 2930 -2930, 2932 -2933, 2932 -2932, 2933 -2933, 2927 -2927, 2933 -2934, 2926 -2926, 2934 -2935, 2934 -2934, 2935 -2936, 2934 -2934, 2936 -2937, 2924 -2924, 2937 -2938, 2937 -2937, 2938 -2939, 2937 -2937, 2939 -2941, 2940 -2940, 2941 -2942, 2941 -2941, 2942 -2943, 2942 -2942, 2943 -2944, 2943 -2943, 2944 -2945, 2944 -2944, 2945 -2946, 2945 -2945, 2946 -2947, 2946 -2946, 2947 -2947, 2942 -2942, 2947 -2948, 2947 -2947, 2948 -2949, 2948 -2948, 2949 -2949, 2940 -2940, 2949 -2950, 2948 -2948, 2950 -2951, 2950 -2950, 2951 -2951, 2946 -2946, 2951 -2952, 2951 -2951, 2952 -2953, 2952 -2952, 2953 -2954, 2953 -2953, 2954 -2955, 2954 -2954, 2955 -2955, 2950 -2950, 2955 -2956, 2953 -2953, 2956 -2957, 2956 -2956, 2957 -2958, 2956 -2956, 2958 -2960, 2959 -2959, 2960 -2961, 2960 -2960, 2961 -2962, 2960 -2960, 2962 -2963, 2962 -2962, 2963 -2964, 2963 -2963, 2964 -2965, 2964 -2964, 2965 -2966, 2965 -2965, 2966 -2967, 2966 -2966, 2967 -2968, 2967 -2967, 2968 -2969, 2968 -2968, 2969 -2970, 2969 -2969, 2970 -2971, 2970 -2970, 2971 -2972, 2971 -2971, 2972 -2973, 2972 -2972, 2973 -2974, 2973 -2973, 2974 -2974, 2969 -2969, 2974 -2975, 2974 -2974, 2975 -2975, 2966 -2966, 2975 -2976, 2975 -2975, 2976 -2976, 2963 -2963, 2976 -2977, 2976 -2976, 2977 -2978, 2977 -2977, 2978 -2978, 2973 -2973, 2978 -2979, 2965 -2965, 2979 -2980, 2979 -2979, 2980 -2981, 2979 -2979, 2981 -2983, 2982 -2982, 2983 -2984, 2983 -2983, 2984 -2985, 2984 -2984, 2985 -2986, 2985 -2985, 2986 -2987, 2986 -2986, 2987 -2988, 2987 -2987, 2988 -2989, 2988 -2988, 2989 -2989, 2984 -2984, 2989 -2990, 2989 -2989, 2990 -2991, 2990 -2990, 2991 -2991, 2982 -2982, 2991 -2992, 2991 -2991, 2992 -2993, 2992 -2992, 2993 -2994, 2993 -2993, 2994 -2995, 2994 -2994, 2995 -2995, 2990 -2990, 2995 -2996, 2995 -2995, 2996 -2997, 2996 -2996, 2997 -2997, 2988 -2988, 2997 -2998, 2994 -2994, 2998 -2999, 2998 -2998, 2999 -3000, 2998 -2998, 3000 -3001, 2985 -2985, 3001 -3002, 3001 -3001, 3002 -3003, 3001 -3001, 3003 -3005, 3004 -3004, 3005 -3006, 3005 -3005, 3006 -3007, 3006 -3006, 3007 -3008, 3007 -3007, 3008 -3009, 3008 -3008, 3009 -3010, 3009 -3009, 3010 -3011, 3010 -3010, 3011 -3011, 3006 -3006, 3011 -3012, 3009 -3009, 3012 -3013, 3012 -3012, 3013 -3014, 3012 -3012, 3014 -3016, 3015 -3015, 3016 -3017, 3016 -3016, 3017 -3018, 3017 -3017, 3018 -3019, 3018 -3018, 3019 -3020, 3019 -3019, 3020 -3020, 3015 -3015, 3020 -3021, 3020 -3020, 3021 -3022, 3021 -3021, 3022 -3023, 3021 -3021, 3023 -3024, 3018 -3018, 3024 -3025, 3024 -3024, 3025 -3026, 3024 -3024, 3026 -3027, 3017 -3017, 3027 -3029, 3028 -3028, 3029 -3030, 3029 -3029, 3030 -3031, 3030 -3030, 3031 -3032, 3031 -3031, 3032 -3033, 3032 -3032, 3033 -3033, 3028 -3028, 3033 -3034, 3032 -3032, 3034 -3035, 3034 -3034, 3035 -3036, 3035 -3035, 3036 -3037, 3036 -3036, 3037 -3038, 3037 -3037, 3038 -3039, 3038 -3038, 3039 -3040, 3039 -3039, 3040 -3040, 3035 -3035, 3040 -3041, 3040 -3040, 3041 -3041, 3031 -3031, 3041 -3042, 3039 -3039, 3042 -3043, 3042 -3042, 3043 -3044, 3042 -3042, 3044 -3046, 3045 -3045, 3046 -3047, 3046 -3046, 3047 -3048, 3047 -3047, 3048 -3049, 3048 -3048, 3049 -3050, 3049 -3049, 3050 -3050, 3045 -3045, 3050 -3051, 3050 -3050, 3051 -3052, 3051 -3051, 3052 -3053, 3051 -3051, 3053 -3054, 3048 -3048, 3054 -3055, 3054 -3054, 3055 -3056, 3054 -3054, 3056 -3057, 3047 -3047, 3057 -3058, 3057 -3057, 3058 -3059, 3058 -3058, 3059 -3060, 3059 -3059, 3060 -3061, 3060 -3060, 3061 -3062, 3061 -3061, 3062 -3062, 3057 -3057, 3062 -3063, 3062 -3062, 3063 -3064, 3063 -3063, 3064 -3065, 3063 -3063, 3065 -3066, 3060 -3060, 3066 -3067, 3066 -3066, 3067 -3068, 3066 -3066, 3068 -3070, 3069 -3069, 3070 -3071, 3070 -3070, 3071 -3072, 3071 -3071, 3072 -3073, 3072 -3072, 3073 -3074, 3073 -3073, 3074 -3075, 3074 -3074, 3075 -3076, 3075 -3075, 3076 -3077, 3076 -3076, 3077 -3077, 3073 -3073, 3077 -3077, 3069 -3069, 3077 -3078, 3071 -3071, 3078 -3079, 3078 -3078, 3079 -3080, 3078 -3078, 3080 -3082, 3081 -3081, 3082 -3083, 3082 -3082, 3083 -3084, 3083 -3083, 3084 -3085, 3084 -3084, 3085 -3086, 3085 -3085, 3086 -3086, 3081 -3081, 3086 -3087, 3086 -3086, 3087 -3088, 3087 -3087, 3088 -3089, 3088 -3088, 3089 -3090, 3089 -3089, 3090 -3091, 3090 -3090, 3091 -3092, 3091 -3091, 3092 -3092, 3087 -3087, 3092 -3093, 3091 -3091, 3093 -3094, 3093 -3093, 3094 -3095, 3093 -3093, 3095 -3096, 3090 -3090, 3096 -3097, 3096 -3096, 3097 -3098, 3096 -3096, 3098 -3099, 3083 -3083, 3099 -3100, 3099 -3099, 3100 -3101, 3099 -3099, 3101 -3103, 3102 -3102, 3103 -3104, 3103 -3103, 3104 -3105, 3104 -3104, 3105 -3106, 3105 -3105, 3106 -3107, 3106 -3106, 3107 -3108, 3107 -3107, 3108 -3109, 3108 -3108, 3109 -3109, 3104 -3104, 3109 -3110, 3109 -3109, 3110 -3111, 3110 -3110, 3111 -3111, 3102 -3102, 3111 -3112, 3110 -3110, 3112 -3113, 3112 -3112, 3113 -3113, 3108 -3108, 3113 -3114, 3113 -3113, 3114 -3115, 3114 -3114, 3115 -3116, 3115 -3115, 3116 -3117, 3116 -3116, 3117 -3117, 3112 -3112, 3117 -3118, 3115 -3115, 3118 -3119, 3118 -3118, 3119 -3120, 3118 -3118, 3120 -3121, 3105 -3105, 3121 -3122, 3121 -3121, 3122 -3123, 3121 -3121, 3123 -3125, 3124 -3124, 3125 -3126, 3125 -3125, 3126 -3127, 3126 -3126, 3127 -3128, 3127 -3127, 3128 -3129, 3128 -3128, 3129 -3129, 3124 -3124, 3129 -3130, 3129 -3129, 3130 -3131, 3128 -3128, 3131 -3132, 3126 -3126, 3132 -3133, 3132 -3132, 3133 -3134, 3133 -3133, 3134 -3135, 3134 -3134, 3135 -3136, 3135 -3135, 3136 -3137, 3136 -3136, 3137 -3138, 3137 -3137, 3138 -3138, 3133 -3133, 3138 -3139, 3138 -3138, 3139 -3140, 3137 -3137, 3140 -3141, 3136 -3136, 3141 -3142, 3141 -3141, 3142 -3143, 3141 -3141, 3143 -3144, 3135 -3135, 3144 -3145, 3134 -3134, 3145 -3145, 3125 -3125, 3145 -3147, 3146 -3146, 3147 -3148, 3147 -3147, 3148 -3149, 3148 -3148, 3149 -3150, 3149 -3149, 3150 -3151, 3150 -3150, 3151 -3151, 3146 -3146, 3151 -3152, 3151 -3151, 3152 -3153, 3152 -3152, 3153 -3154, 3152 -3152, 3154 -3155, 3149 -3149, 3155 -3156, 3155 -3155, 3156 -3157, 3156 -3156, 3157 -3157, 3148 -3148, 3157 -3159, 3158 -3158, 3159 -3160, 3159 -3159, 3160 -3161, 3160 -3160, 3161 -3162, 3161 -3161, 3162 -3163, 3162 -3162, 3163 -3163, 3158 -3158, 3163 -3164, 3162 -3162, 3164 -3165, 3164 -3164, 3165 -3166, 3164 -3164, 3166 -3167, 3161 -3161, 3167 -3168, 3167 -3167, 3168 -3169, 3168 -3168, 3169 -3170, 3169 -3169, 3170 -3170, 3160 -3160, 3170 -3171, 3169 -3169, 3171 -3172, 3171 -3171, 3172 -3173, 3172 -3172, 3173 -3174, 3173 -3173, 3174 -3174, 3168 -3168, 3174 -3175, 3174 -3174, 3175 -3176, 3175 -3175, 3176 -3177, 3175 -3175, 3177 -3179, 3178 -3178, 3179 -3180, 3179 -3179, 3180 -3181, 3180 -3180, 3181 -3182, 3181 -3181, 3182 -3183, 3182 -3182, 3183 -3183, 3179 -3179, 3183 -3184, 3182 -3182, 3184 -3185, 3184 -3184, 3185 -3186, 3185 -3185, 3186 -3187, 3186 -3186, 3187 -3187, 3181 -3181, 3187 -3188, 3187 -3187, 3188 -3189, 3188 -3188, 3189 -3190, 3188 -3188, 3190 -3192, 3191 -3191, 3192 -3193, 3192 -3192, 3193 -3194, 3193 -3193, 3194 -3195, 3194 -3194, 3195 -3196, 3195 -3195, 3196 -3196, 3191 -3191, 3196 -3197, 3196 -3196, 3197 -3198, 3197 -3197, 3198 -3199, 3198 -3198, 3199 -3200, 3199 -3199, 3200 -3201, 3199 -3199, 3201 -3202, 3201 -3201, 3202 -3203, 3202 -3202, 3203 -3204, 3203 -3203, 3204 -3205, 3204 -3204, 3205 -3206, 3205 -3205, 3206 -3206, 3201 -3201, 3206 -3207, 3204 -3204, 3207 -3208, 3207 -3207, 3208 -3209, 3207 -3207, 3209 -3210, 3193 -3193, 3210 -3211, 3210 -3210, 3211 -3212, 3210 -3210, 3212 -3214, 3213 -3213, 3214 -3215, 3214 -3214, 3215 -3216, 3215 -3215, 3216 -3217, 3216 -3216, 3217 -3218, 3217 -3217, 3218 -3219, 3218 -3218, 3219 -3220, 3219 -3219, 3220 -3221, 3220 -3220, 3221 -3222, 3221 -3221, 3222 -3223, 3222 -3222, 3223 -3223, 3218 -3218, 3223 -3224, 3223 -3223, 3224 -3224, 3215 -3215, 3224 -3225, 3224 -3224, 3225 -3226, 3225 -3225, 3226 -3226, 3213 -3213, 3226 -3227, 3226 -3226, 3227 -3228, 3227 -3227, 3228 -3229, 3228 -3228, 3229 -3230, 3229 -3229, 3230 -3230, 3225 -3225, 3230 -3231, 3230 -3230, 3231 -3231, 3222 -3222, 3231 -3232, 3231 -3231, 3232 -3233, 3232 -3232, 3233 -3234, 3233 -3233, 3234 -3234, 3229 -3229, 3234 -3235, 3232 -3232, 3235 -3236, 3235 -3235, 3236 -3237, 3235 -3235, 3237 -3238, 3219 -3219, 3238 -3239, 3238 -3238, 3239 -3240, 3238 -3238, 3240 -3242, 3241 -3241, 3242 -3243, 3242 -3242, 3243 -3244, 3243 -3243, 3244 -3245, 3244 -3244, 3245 -3246, 3245 -3245, 3246 -3246, 3241 -3241, 3246 -3247, 3246 -3246, 3247 -3248, 3244 -3244, 3248 -3249, 3248 -3248, 3249 -3250, 3248 -3248, 3250 -3251, 3243 -3243, 3251 -3253, 3252 -3252, 3253 -3254, 3253 -3253, 3254 -3255, 3254 -3254, 3255 -3256, 3255 -3255, 3256 -3257, 3256 -3256, 3257 -3258, 3257 -3257, 3258 -3259, 3258 -3258, 3259 -3260, 3259 -3259, 3260 -3261, 3260 -3260, 3261 -3261, 3256 -3256, 3261 -3262, 3261 -3261, 3262 -3262, 3253 -3253, 3262 -3263, 3260 -3260, 3263 -3264, 3263 -3263, 3264 -3265, 3263 -3263, 3265 -3267, 3266 -3266, 3267 -3268, 3267 -3267, 3268 -3269, 3268 -3268, 3269 -3270, 3269 -3269, 3270 -3271, 3270 -3270, 3271 -3272, 3271 -3271, 3272 -3273, 3272 -3272, 3273 -3273, 3268 -3268, 3273 -3274, 3273 -3273, 3274 -3275, 3274 -3274, 3275 -3275, 3266 -3266, 3275 -3276, 3275 -3275, 3276 -3277, 3276 -3276, 3277 -3278, 3277 -3277, 3278 -3279, 3278 -3278, 3279 -3279, 3274 -3274, 3279 -3280, 3279 -3279, 3280 -3281, 3280 -3280, 3281 -3281, 3272 -3272, 3281 -3282, 3278 -3278, 3282 -3283, 3282 -3282, 3283 -3284, 3282 -3282, 3284 -3285, 3271 -3271, 3285 -3286, 3285 -3285, 3286 -3287, 3285 -3285, 3287 -3289, 3288 -3288, 3289 -3290, 3289 -3289, 3290 -3291, 3290 -3290, 3291 -3292, 3291 -3291, 3292 -3293, 3292 -3292, 3293 -3294, 3293 -3293, 3294 -3295, 3294 -3294, 3295 -3295, 3290 -3290, 3295 -3296, 3295 -3295, 3296 -3297, 3296 -3296, 3297 -3297, 3288 -3288, 3297 -3298, 3296 -3296, 3298 -3299, 3298 -3298, 3299 -3299, 3294 -3294, 3299 -3300, 3299 -3299, 3300 -3301, 3300 -3300, 3301 -3302, 3301 -3301, 3302 -3303, 3302 -3302, 3303 -3303, 3298 -3298, 3303 -3304, 3303 -3303, 3304 -3305, 3304 -3304, 3305 -3306, 3304 -3304, 3306 -3307, 3291 -3291, 3307 -3308, 3307 -3307, 3308 -3309, 3307 -3307, 3309 -3311, 3310 -3310, 3311 -3312, 3311 -3311, 3312 -3313, 3312 -3312, 3313 -3314, 3313 -3313, 3314 -3315, 3314 -3314, 3315 -3315, 3310 -3310, 3315 -3316, 3315 -3315, 3316 -3317, 3313 -3313, 3317 -3318, 3317 -3317, 3318 -3319, 3317 -3317, 3319 -3320, 3312 -3312, 3320 -3321, 3320 -3320, 3321 -3322, 3320 -3320, 3322 -3324, 3323 -3323, 3324 -3325, 3324 -3324, 3325 -3326, 3325 -3325, 3326 -3327, 3326 -3326, 3327 -3328, 3327 -3327, 3328 -3328, 3323 -3323, 3328 -3329, 3328 -3328, 3329 -3330, 3329 -3329, 3330 -3331, 3329 -3329, 3331 -3332, 3326 -3326, 3332 -3333, 3332 -3332, 3333 -3334, 3333 -3333, 3334 -3334, 3325 -3325, 3334 -3336, 3335 -3335, 3336 -3337, 3336 -3336, 3337 -3338, 3337 -3337, 3338 -3339, 3338 -3338, 3339 -3340, 3339 -3339, 3340 -3340, 3335 -3335, 3340 -3341, 3339 -3339, 3341 -3342, 3341 -3341, 3342 -3343, 3342 -3342, 3343 -3344, 3343 -3343, 3344 -3345, 3344 -3344, 3345 -3346, 3345 -3345, 3346 -3346, 3341 -3341, 3346 -3347, 3346 -3346, 3347 -3348, 3347 -3347, 3348 -3348, 3338 -3338, 3348 -3349, 3348 -3348, 3349 -3350, 3349 -3349, 3350 -3351, 3350 -3350, 3351 -3352, 3351 -3351, 3352 -3352, 3347 -3347, 3352 -3353, 3351 -3351, 3353 -3354, 3353 -3353, 3354 -3355, 3353 -3353, 3355 -3357, 3356 -3356, 3357 -3358, 3357 -3357, 3358 -3359, 3358 -3358, 3359 -3360, 3359 -3359, 3360 -3361, 3360 -3360, 3361 -3362, 3361 -3361, 3362 -3363, 3362 -3362, 3363 -3364, 3363 -3363, 3364 -3365, 3364 -3364, 3365 -3366, 3365 -3365, 3366 -3366, 3361 -3361, 3366 -3367, 3366 -3366, 3367 -3367, 3359 -3359, 3367 -3368, 3367 -3367, 3368 -3368, 3356 -3356, 3368 -3369, 3364 -3364, 3369 -3370, 3369 -3369, 3370 -3371, 3369 -3369, 3371 diff --git a/data/TUDataset/MUTAG/raw/MUTAG_edge_labels.txt b/data/TUDataset/MUTAG/raw/MUTAG_edge_labels.txt deleted file mode 100644 index 66a74dc..0000000 --- a/data/TUDataset/MUTAG/raw/MUTAG_edge_labels.txt +++ /dev/null @@ -1,7442 +0,0 @@ -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -2 -2 -1 -1 -2 -2 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -2 -2 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -2 -2 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -3 -3 -1 -1 -1 -1 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -1 -1 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -1 -1 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -1 -1 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -2 -2 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -1 -1 -2 -2 -1 -1 -2 -2 -1 -1 -2 -2 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -1 -1 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -2 -2 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -2 -2 -1 -1 -2 -2 -1 -1 -2 -2 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -0 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -1 -1 -2 -2 -1 -1 -1 -1 -2 -2 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -1 -1 -2 -2 -1 -1 -2 -2 -1 -1 -2 -2 -1 -1 -2 -2 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -1 -1 diff --git a/data/TUDataset/MUTAG/raw/MUTAG_graph_indicator.txt b/data/TUDataset/MUTAG/raw/MUTAG_graph_indicator.txt deleted file mode 100644 index 74f6d67..0000000 --- a/data/TUDataset/MUTAG/raw/MUTAG_graph_indicator.txt +++ /dev/null @@ -1,3371 +0,0 @@ -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -2 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -3 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -4 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -5 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -6 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -7 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -8 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -9 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -10 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -11 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -12 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -13 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -14 -15 -15 -15 -15 -15 -15 -15 -15 -15 -15 -15 -15 -15 -15 -15 -15 -15 -15 -15 -16 -16 -16 -16 -16 -16 -16 -16 -16 -16 -16 -16 -16 -16 -16 -16 -16 -16 -16 -16 -16 -16 -17 -17 -17 -17 -17 -17 -17 -17 -17 -17 -17 -18 -18 -18 -18 -18 -18 -18 -18 -18 -18 -18 -18 -18 -18 -18 -18 -18 -19 -19 -19 -19 -19 -19 -19 -19 -19 -19 -19 -19 -19 -20 -20 -20 -20 -20 -20 -20 -20 -20 -20 -20 -20 -20 -20 -20 -20 -20 -20 -21 -21 -21 -21 -21 -21 -21 -21 -21 -21 -21 -21 -21 -21 -21 -21 -21 -21 -22 -22 -22 -22 -22 -22 -22 -22 -22 -22 -22 -22 -22 -22 -22 -22 -22 -23 -23 -23 -23 -23 -23 -23 -23 -23 -23 -23 -23 -23 -23 -23 -23 -23 -23 -23 -23 -23 -23 -23 -24 -24 -24 -24 -24 -24 -24 -24 -24 -24 -24 -24 -24 -24 -24 -24 -24 -24 -24 -24 -24 -24 -24 -24 -24 -24 -24 -25 -25 -25 -25 -25 -25 -25 -25 -25 -25 -25 -25 -25 -25 -25 -25 -25 -26 -26 -26 -26 -26 -26 -26 -26 -26 -26 -26 -26 -26 -27 -27 -27 -27 -27 -27 -27 -27 -27 -27 -27 -27 -27 -27 -27 -27 -27 -27 -27 -27 -27 -27 -27 -28 -28 -28 -28 -28 -28 -28 -28 -28 -28 -28 -28 -28 -28 -28 -28 -28 -29 -29 -29 -29 -29 -29 -29 -29 -29 -29 -29 -29 -29 -29 -29 -29 -29 -29 -29 -29 -29 -29 -29 -30 -30 -30 -30 -30 -30 -30 -30 -30 -30 -30 -30 -30 -30 -30 -30 -30 -30 -30 -30 -30 -30 -30 -31 -31 -31 -31 -31 -31 -31 -31 -31 -31 -31 -31 -31 -31 -31 -31 -31 -31 -31 -31 -31 -31 -32 -32 -32 -32 -32 -32 -32 -32 -32 -32 -32 -32 -32 -32 -32 -32 -32 -32 -32 -32 -32 -32 -32 -32 -33 -33 -33 -33 -33 -33 -33 -33 -33 -33 -33 -33 -33 -33 -33 -33 -33 -33 -33 -33 -33 -33 -33 -34 -34 -34 -34 -34 -34 -34 -34 -34 -34 -34 -34 -34 -35 -35 -35 -35 -35 -35 -35 -35 -35 -35 -35 -35 -35 -35 -35 -35 -35 -36 -36 -36 -36 -36 -36 -36 -36 -36 -36 -36 -36 -36 -36 -37 -37 -37 -37 -37 -37 -37 -37 -37 -37 -37 -37 -37 -37 -37 -37 -37 -38 -38 -38 -38 -38 -38 -38 -38 -38 -38 -38 -38 -38 -38 -38 -39 -39 -39 -39 -39 -39 -39 -39 -39 -39 -39 -39 -39 -39 -39 -40 -40 -40 -40 -40 -40 -40 -40 -40 -40 -40 -40 -40 -41 -41 -41 -41 -41 -41 -41 -41 -41 -41 -41 -41 -41 -41 -41 -41 -41 -42 -42 -42 -42 -42 -42 -42 -42 -42 -42 -42 -42 -42 -43 -43 -43 -43 -43 -43 -43 -43 -43 -43 -43 -43 -43 -43 -43 -43 -43 -43 -43 -44 -44 -44 -44 -44 -44 -44 -44 -44 -44 -44 -44 -44 -44 -44 -44 -44 -45 -45 -45 -45 -45 -45 -45 -45 -45 -45 -45 -45 -46 -46 -46 -46 -46 -46 -46 -46 -46 -46 -46 -46 -46 -46 -46 -46 -46 -46 -46 -46 -46 -46 -46 -47 -47 -47 -47 -47 -47 -47 -47 -47 -47 -47 -47 -47 -47 -47 -47 -47 -47 -47 -47 -47 -47 -48 -48 -48 -48 -48 -48 -48 -48 -48 -48 -48 -48 -48 -48 -48 -48 -48 -49 -49 -49 -49 -49 -49 -49 -49 -49 -49 -49 -49 -49 -49 -49 -49 -49 -49 -49 -49 -50 -50 -50 -50 -50 -50 -50 -50 -50 -50 -50 -50 -50 -50 -50 -50 -51 -51 -51 -51 -51 -51 -51 -51 -51 -51 -51 -51 -51 -51 -51 -51 -51 -51 -51 -51 -51 -51 -51 -51 -51 -51 -52 -52 -52 -52 -52 -52 -52 -52 -52 -52 -52 -52 -52 -52 -52 -52 -52 -52 -52 -52 -52 -52 -52 -52 -52 -52 -53 -53 -53 -53 -53 -53 -53 -53 -53 -53 -53 -53 -53 -53 -53 -53 -53 -53 -53 -54 -54 -54 -54 -54 -54 -54 -54 -54 -54 -54 -54 -54 -54 -54 -54 -54 -54 -54 -55 -55 -55 -55 -55 -55 -55 -55 -55 -55 -55 -55 -55 -55 -56 -56 -56 -56 -56 -56 -56 -56 -56 -56 -56 -56 -56 -56 -56 -56 -56 -57 -57 -57 -57 -57 -57 -57 -57 -57 -57 -57 -57 -57 -57 -57 -57 -57 -57 -57 -57 -57 -58 -58 -58 -58 -58 -58 -58 -58 -58 -58 -58 -58 -58 -58 -58 -58 -58 -58 -58 -58 -58 -58 -58 -58 -58 -59 -59 -59 -59 -59 -59 -59 -59 -59 -59 -59 -59 -59 -59 -59 -59 -59 -59 -59 -59 -59 -59 -59 -60 -60 -60 -60 -60 -60 -60 -60 -60 -60 -60 -60 -60 -60 -60 -60 -60 -60 -60 -61 -61 -61 -61 -61 -61 -61 -61 -61 -61 -61 -61 -61 -61 -61 -61 -61 -62 -62 -62 -62 -62 -62 -62 -62 -62 -62 -62 -63 -63 -63 -63 -63 -63 -63 -63 -63 -63 -63 -63 -63 -63 -63 -63 -63 -63 -63 -63 -63 -63 -63 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -64 -65 -65 -65 -65 -65 -65 -65 -65 -65 -65 -65 -65 -65 -65 -65 -65 -66 -66 -66 -66 -66 -66 -66 -66 -66 -66 -66 -66 -66 -66 -66 -66 -67 -67 -67 -67 -67 -67 -67 -67 -67 -67 -67 -67 -67 -67 -67 -67 -67 -67 -67 -67 -68 -68 -68 -68 -68 -68 -68 -68 -68 -68 -68 -68 -68 -68 -68 -68 -68 -68 -68 -68 -68 -68 -68 -69 -69 -69 -69 -69 -69 -69 -69 -69 -69 -69 -69 -69 -69 -69 -69 -69 -69 -69 -70 -70 -70 -70 -70 -70 -70 -70 -70 -70 -70 -70 -70 -70 -71 -71 -71 -71 -71 -71 -71 -71 -71 -71 -71 -71 -71 -71 -71 -71 -71 -71 -71 -71 -71 -71 -71 -71 -71 -71 -72 -72 -72 -72 -72 -72 -72 -72 -72 -72 -72 -72 -72 -72 -72 -72 -73 -73 -73 -73 -73 -73 -73 -73 -73 -73 -73 -73 -73 -73 -73 -73 -74 -74 -74 -74 -74 -74 -74 -74 -74 -74 -74 -74 -74 -74 -74 -74 -74 -74 -74 -74 -74 -74 -74 -75 -75 -75 -75 -75 -75 -75 -75 -75 -75 -75 -75 -75 -75 -75 -75 -75 -75 -76 -76 -76 -76 -76 -76 -76 -76 -76 -76 -77 -77 -77 -77 -77 -77 -77 -77 -77 -77 -77 -77 -77 -77 -77 -77 -78 -78 -78 -78 -78 -78 -78 -78 -78 -78 -78 -78 -78 -78 -78 -78 -79 -79 -79 -79 -79 -79 -79 -79 -79 -79 -79 -79 -79 -79 -79 -79 -79 -80 -80 -80 -80 -80 -80 -80 -80 -80 -80 -80 -80 -80 -80 -80 -80 -80 -80 -80 -81 -81 -81 -81 -81 -81 -81 -81 -81 -81 -81 -81 -82 -82 -82 -82 -82 -82 -82 -82 -82 -82 -82 -82 -82 -82 -82 -82 -82 -82 -82 -82 -82 -82 -82 -82 -82 -83 -83 -83 -83 -83 -83 -83 -83 -83 -83 -83 -83 -83 -83 -83 -83 -84 -84 -84 -84 -84 -84 -84 -84 -84 -84 -84 -85 -85 -85 -85 -85 -85 -85 -85 -85 -85 -85 -85 -85 -85 -85 -85 -85 -85 -85 -85 -85 -85 -85 -86 -86 -86 -86 -86 -86 -86 -86 -86 -86 -86 -86 -86 -86 -86 -86 -86 -86 -86 -86 -86 -86 -86 -87 -87 -87 -87 -87 -87 -87 -87 -87 -87 -87 -87 -87 -87 -87 -87 -88 -88 -88 -88 -88 -88 -88 -88 -88 -88 -88 -88 -89 -89 -89 -89 -89 -89 -89 -89 -89 -89 -89 -89 -89 -90 -90 -90 -90 -90 -90 -90 -90 -90 -90 -90 -90 -90 -90 -90 -90 -90 -90 -90 -90 -90 -90 -90 -91 -91 -91 -91 -91 -91 -91 -91 -91 -91 -91 -91 -91 -91 -91 -91 -91 -91 -91 -91 -91 -91 -91 -91 -91 -92 -92 -92 -92 -92 -92 -92 -92 -92 -92 -92 -92 -92 -92 -92 -92 -92 -92 -92 -93 -93 -93 -93 -93 -93 -93 -93 -93 -93 -93 -93 -93 -93 -93 -93 -93 -93 -93 -93 -93 -93 -93 -94 -94 -94 -94 -94 -94 -94 -94 -94 -94 -94 -94 -94 -94 -94 -94 -94 -94 -94 -95 -95 -95 -95 -95 -95 -95 -95 -95 -95 -95 -95 -95 -95 -95 -95 -95 -95 -95 -96 -96 -96 -96 -96 -96 -96 -96 -96 -96 -96 -96 -96 -96 -96 -96 -96 -96 -96 -96 -96 -96 -96 -96 -96 -97 -97 -97 -97 -97 -97 -97 -97 -97 -97 -97 -97 -97 -97 -97 -97 -97 -97 -98 -98 -98 -98 -98 -98 -98 -98 -98 -98 -98 -98 -98 -99 -99 -99 -99 -99 -99 -99 -99 -99 -99 -99 -99 -99 -99 -99 -100 -100 -100 -100 -100 -100 -100 -100 -100 -100 -100 -100 -100 -100 -100 -100 -101 -101 -101 -101 -101 -101 -101 -101 -101 -101 -101 -101 -101 -101 -101 -101 -101 -101 -101 -101 -101 -101 -101 -102 -102 -102 -102 -102 -102 -102 -102 -102 -102 -102 -102 -102 -102 -102 -102 -102 -102 -102 -102 -102 -102 -102 -102 -102 -102 -103 -103 -103 -103 -103 -103 -103 -103 -103 -103 -103 -103 -103 -103 -103 -103 -103 -103 -103 -104 -104 -104 -104 -104 -104 -104 -104 -104 -104 -104 -104 -104 -104 -104 -104 -104 -104 -104 -104 -104 -104 -104 -105 -105 -105 -105 -105 -105 -105 -105 -105 -105 -105 -105 -105 -105 -105 -105 -105 -106 -106 -106 -106 -106 -106 -106 -106 -106 -106 -106 -106 -106 -106 -106 -106 -106 -106 -106 -106 -107 -107 -107 -107 -107 -107 -107 -107 -107 -107 -107 -107 -107 -107 -107 -107 -107 -107 -107 -107 -107 -107 -107 -107 -107 -108 -108 -108 -108 -108 -108 -108 -108 -108 -108 -108 -108 -108 -108 -108 -108 -108 -108 -108 -109 -109 -109 -109 -109 -109 -109 -109 -109 -109 -109 -109 -109 -109 -109 -109 -109 -109 -109 -109 -109 -109 -109 -109 -109 -109 -109 -109 -110 -110 -110 -110 -110 -110 -110 -110 -110 -110 -110 -110 -110 -110 -110 -110 -110 -110 -110 -110 -110 -110 -110 -110 -111 -111 -111 -111 -111 -111 -111 -111 -111 -111 -111 -112 -112 -112 -112 -112 -112 -112 -112 -112 -112 -112 -112 -112 -112 -112 -113 -113 -113 -113 -113 -113 -113 -113 -113 -113 -113 -113 -113 -114 -114 -114 -114 -114 -114 -114 -114 -114 -114 -114 -114 -114 -114 -114 -114 -115 -115 -115 -115 -115 -115 -115 -115 -115 -115 -115 -115 -116 -116 -116 -116 -116 -116 -116 -116 -116 -116 -117 -117 -117 -117 -117 -117 -117 -117 -117 -117 -117 -117 -117 -117 -117 -117 -117 -117 -117 -117 -117 -118 -118 -118 -118 -118 -118 -118 -118 -118 -118 -118 -118 -118 -118 -118 -118 -118 -118 -118 -118 -118 -118 -118 -119 -119 -119 -119 -119 -119 -119 -119 -119 -119 -119 -119 -119 -119 -119 -119 -119 -119 -119 -119 -119 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -120 -121 -121 -121 -121 -121 -121 -121 -121 -121 -121 -121 -121 -121 -121 -121 -121 -121 -121 -121 -121 -121 -121 -121 -121 -121 -122 -122 -122 -122 -122 -122 -122 -122 -122 -122 -122 -122 -122 -122 -122 -122 -122 -122 -122 -122 -122 -123 -123 -123 -123 -123 -123 -123 -123 -123 -123 -123 -123 -123 -123 -123 -123 -123 -124 -124 -124 -124 -124 -124 -124 -124 -124 -124 -124 -125 -125 -125 -125 -125 -125 -125 -125 -125 -125 -125 -125 -125 -125 -125 -125 -125 -125 -125 -126 -126 -126 -126 -126 -126 -126 -126 -126 -126 -126 -126 -126 -126 -126 -126 -126 -126 -126 -126 -127 -127 -127 -127 -127 -127 -127 -127 -127 -127 -127 -127 -127 -127 -127 -127 -127 -127 -127 -127 -127 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -128 -129 -129 -129 -129 -129 -129 -129 -129 -129 -129 -129 -129 -129 -129 -130 -130 -130 -130 -130 -130 -130 -130 -130 -130 -130 -131 -131 -131 -131 -131 -131 -131 -131 -131 -131 -131 -131 -131 -131 -131 -131 -131 -131 -131 -132 -132 -132 -132 -132 -132 -132 -132 -132 -132 -132 -133 -133 -133 -133 -133 -133 -133 -133 -133 -133 -133 -133 -133 -133 -133 -133 -133 -133 -133 -133 -133 -134 -134 -134 -134 -134 -134 -134 -134 -134 -134 -134 -134 -134 -134 -134 -134 -134 -134 -134 -134 -134 -134 -135 -135 -135 -135 -135 -135 -135 -135 -135 -135 -135 -136 -136 -136 -136 -136 -136 -136 -136 -136 -136 -136 -136 -136 -136 -136 -136 -136 -136 -136 -136 -137 -137 -137 -137 -137 -137 -137 -137 -137 -137 -137 -137 -137 -137 -137 -137 -137 -137 -137 -137 -137 -137 -138 -138 -138 -138 -138 -138 -138 -138 -138 -138 -138 -138 -138 -139 -139 -139 -139 -139 -139 -139 -139 -139 -139 -139 -140 -140 -140 -140 -140 -140 -140 -140 -140 -140 -140 -140 -140 -140 -140 -140 -141 -141 -141 -141 -141 -141 -141 -141 -141 -141 -141 -142 -142 -142 -142 -142 -142 -142 -142 -142 -142 -142 -142 -142 -142 -142 -142 -142 -142 -142 -142 -143 -143 -143 -143 -143 -143 -143 -143 -143 -143 -143 -143 -144 -144 -144 -144 -144 -144 -144 -144 -144 -144 -144 -145 -145 -145 -145 -145 -145 -145 -145 -145 -145 -145 -145 -145 -145 -145 -145 -146 -146 -146 -146 -146 -146 -146 -146 -146 -146 -146 -146 -146 -146 -146 -146 -147 -147 -147 -147 -147 -147 -147 -147 -147 -147 -147 -147 -147 -148 -148 -148 -148 -148 -148 -148 -148 -148 -148 -148 -148 -148 -148 -148 -149 -149 -149 -149 -149 -149 -149 -149 -149 -149 -149 -149 -149 -149 -149 -149 -149 -149 -149 -149 -150 -150 -150 -150 -150 -150 -150 -150 -150 -150 -150 -150 -151 -151 -151 -151 -151 -151 -151 -151 -151 -151 -151 -151 -152 -152 -152 -152 -152 -152 -152 -152 -152 -152 -152 -152 -152 -152 -152 -152 -152 -152 -152 -152 -152 -152 -153 -153 -153 -153 -153 -153 -153 -153 -153 -153 -153 -153 -153 -153 -153 -154 -154 -154 -154 -154 -154 -154 -154 -154 -154 -154 -154 -155 -155 -155 -155 -155 -155 -155 -155 -155 -155 -155 -155 -155 -155 -156 -156 -156 -156 -156 -156 -156 -156 -156 -156 -156 -156 -157 -157 -157 -157 -157 -157 -157 -157 -157 -157 -157 -157 -157 -157 -157 -157 -157 -157 -157 -157 -158 -158 -158 -158 -158 -158 -158 -158 -158 -158 -158 -158 -158 -158 -158 -158 -158 -158 -158 -158 -159 -159 -159 -159 -159 -159 -159 -159 -159 -159 -159 -159 -159 -159 -159 -159 -159 -159 -159 -159 -160 -160 -160 -160 -160 -160 -160 -160 -160 -160 -160 -160 -160 -160 -161 -161 -161 -161 -161 -161 -161 -161 -161 -161 -161 -161 -161 -161 -161 -161 -161 -161 -161 -161 -161 -161 -161 -161 -161 -161 -162 -162 -162 -162 -162 -162 -162 -162 -162 -162 -162 -162 -162 -162 -162 -162 -162 -162 -162 -162 -162 -162 -162 -162 -163 -163 -163 -163 -163 -163 -163 -163 -163 -163 -163 -163 -163 -163 -163 -163 -163 -163 -163 -163 -163 -163 -164 -164 -164 -164 -164 -164 -164 -164 -164 -164 -164 -164 -164 -164 -164 -164 -164 -164 -164 -164 -164 -164 -165 -165 -165 -165 -165 -165 -165 -165 -165 -165 -165 -165 -165 -165 -165 -165 -165 -165 -165 -166 -166 -166 -166 -166 -166 -166 -166 -166 -166 -166 -166 -166 -166 -166 -166 -166 -166 -166 -166 -166 -166 -166 -167 -167 -167 -167 -167 -167 -167 -167 -167 -167 -167 -167 -167 -167 -167 -167 -167 -167 -167 -167 -167 -167 -168 -168 -168 -168 -168 -168 -168 -168 -168 -168 -168 -169 -169 -169 -169 -169 -169 -169 -169 -169 -169 -169 -169 -169 -170 -170 -170 -170 -170 -170 -170 -170 -170 -170 -170 -170 -170 -170 -170 -170 -170 -171 -171 -171 -171 -171 -171 -171 -171 -171 -171 -171 -171 -171 -171 -171 -171 -171 -171 -171 -171 -171 -171 -171 -171 -172 -172 -172 -172 -172 -172 -172 -172 -172 -172 -172 -172 -173 -173 -173 -173 -173 -173 -173 -173 -173 -173 -173 -173 -173 -173 -173 -173 -173 -173 -173 -173 -173 -174 -174 -174 -174 -174 -174 -174 -174 -174 -174 -174 -174 -174 -174 -174 -174 -174 -174 -174 -174 -174 -174 -175 -175 -175 -175 -175 -175 -175 -175 -175 -175 -175 -175 -175 -175 -175 -175 -175 -175 -175 -175 -175 -175 -176 -176 -176 -176 -176 -176 -176 -176 -176 -176 -176 -176 -177 -177 -177 -177 -177 -177 -177 -177 -177 -177 -177 -177 -177 -177 -177 -177 -177 -177 -177 -177 -178 -178 -178 -178 -178 -178 -178 -178 -178 -178 -178 -178 -178 -179 -179 -179 -179 -179 -179 -179 -179 -179 -179 -179 -179 -179 -179 -179 -179 -179 -179 -179 -179 -179 -179 -180 -180 -180 -180 -180 -180 -180 -180 -180 -180 -180 -180 -180 -180 -180 -180 -180 -180 -180 -180 -180 -180 -180 -180 -180 -180 -180 -180 -181 -181 -181 -181 -181 -181 -181 -181 -181 -181 -181 -182 -182 -182 -182 -182 -182 -182 -182 -182 -182 -182 -182 -182 -182 -183 -183 -183 -183 -183 -183 -183 -183 -183 -183 -183 -183 -183 -183 -183 -183 -183 -183 -183 -183 -183 -183 -184 -184 -184 -184 -184 -184 -184 -184 -184 -184 -184 -184 -184 -184 -184 -184 -184 -184 -184 -184 -184 -184 -185 -185 -185 -185 -185 -185 -185 -185 -185 -185 -185 -185 -185 -186 -186 -186 -186 -186 -186 -186 -186 -186 -186 -186 -186 -187 -187 -187 -187 -187 -187 -187 -187 -187 -187 -187 -187 -187 -187 -187 -187 -187 -187 -187 -187 -187 -188 -188 -188 -188 -188 -188 -188 -188 -188 -188 -188 -188 -188 -188 -188 -188 diff --git a/data/TUDataset/MUTAG/raw/MUTAG_graph_labels.txt b/data/TUDataset/MUTAG/raw/MUTAG_graph_labels.txt deleted file mode 100644 index cd9a9c8..0000000 --- a/data/TUDataset/MUTAG/raw/MUTAG_graph_labels.txt +++ /dev/null @@ -1,188 +0,0 @@ -1 --1 --1 -1 --1 -1 --1 -1 --1 -1 -1 -1 -1 --1 -1 -1 --1 -1 --1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 --1 -1 --1 -1 --1 --1 --1 -1 --1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 --1 -1 -1 -1 -1 -1 -1 --1 -1 -1 --1 --1 -1 -1 -1 --1 -1 -1 --1 -1 -1 --1 --1 --1 -1 -1 -1 -1 -1 --1 -1 -1 -1 --1 --1 -1 -1 -1 -1 -1 -1 -1 -1 --1 -1 --1 -1 -1 -1 -1 -1 -1 -1 -1 -1 --1 --1 -1 --1 --1 -1 --1 -1 -1 --1 --1 -1 -1 --1 --1 -1 -1 -1 -1 --1 --1 --1 --1 --1 -1 --1 -1 -1 --1 --1 -1 --1 --1 --1 --1 -1 -1 --1 -1 -1 --1 -1 -1 -1 --1 --1 --1 -1 -1 -1 --1 -1 -1 -1 -1 -1 -1 -1 --1 -1 -1 -1 -1 -1 -1 --1 -1 -1 -1 --1 -1 --1 --1 -1 -1 --1 --1 -1 --1 diff --git a/data/TUDataset/MUTAG/raw/MUTAG_node_labels.txt b/data/TUDataset/MUTAG/raw/MUTAG_node_labels.txt deleted file mode 100644 index cc11ed9..0000000 --- a/data/TUDataset/MUTAG/raw/MUTAG_node_labels.txt +++ /dev/null @@ -1,3371 +0,0 @@ -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -1 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -1 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -3 -1 -2 -2 -3 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -1 -2 -2 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -1 -0 -1 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -0 -1 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -2 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -1 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -1 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -4 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -2 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -5 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -3 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -2 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -1 -2 -2 -1 -2 -2 -0 -0 -0 -0 -0 -0 -1 -0 -2 -0 -3 -3 -3 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -1 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -2 -0 -0 -0 -0 -0 -0 -1 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -6 -0 -1 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -1 -2 -2 -2 -0 -0 -0 -0 -0 -0 -2 -5 -5 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -1 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -1 -2 -2 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -5 -0 -0 -0 -0 -0 -0 -1 -2 -2 -2 -1 -0 -0 -0 -2 -0 -0 -0 -0 -2 -1 -1 -0 -0 -0 -0 -0 -2 -1 -2 -2 -1 -2 -2 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -2 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -2 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -2 -0 -2 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -0 -0 -0 -0 -0 -0 -1 -2 -2 -5 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -2 -0 -0 -0 -0 -0 -0 -2 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -1 -2 -2 -0 -0 -0 -0 -1 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -2 -1 -2 -2 -0 -0 -2 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -1 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -1 -2 -2 -0 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -1 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -1 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -2 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -1 -2 -2 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -0 -0 -1 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -2 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -2 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -3 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -1 -2 -2 -0 -1 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -3 -3 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -1 -1 -0 -2 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -2 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -2 -1 -2 -2 -0 -0 -0 -0 -0 -0 -5 -5 -2 -0 -0 -0 -0 -0 -0 -5 -1 -2 -2 -5 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -0 -0 -0 -0 -0 -0 -1 -2 -2 -5 -3 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -2 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -5 -1 -2 -2 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -0 -0 -0 -0 -0 -0 -5 -5 -1 -2 -2 -0 -0 -0 -0 -0 -0 -2 -0 -0 -0 -0 -0 -0 -2 -5 -5 -5 -1 -2 -2 -0 -0 -2 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -5 -1 -2 -2 -5 -0 -0 -0 -0 -0 -0 -1 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -0 -0 -1 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -0 -0 -1 -0 -0 -1 -0 -0 -2 -1 -2 -2 -0 -0 -0 -0 -1 -1 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -1 -0 -1 -1 -2 -2 -0 -0 -0 -2 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -2 -1 -2 -2 -0 -0 -2 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -2 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -6 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -1 -2 -2 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -0 -2 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -3 -0 -0 -0 -0 -0 -0 -1 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -0 -0 -0 -0 -0 -1 -1 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -0 -0 -0 -0 -0 -0 -5 -5 -2 -0 -0 -0 -0 -0 -0 -5 -5 -1 -2 -2 -5 -2 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -0 -0 -1 -0 -0 -0 -0 -1 -2 -2 -0 -1 -1 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -2 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -1 -2 -2 -0 -0 -0 -0 -0 -0 -3 -1 -2 -2 -1 -2 -2 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -1 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 -0 -0 -0 -0 -1 -0 -0 -0 -0 -0 -0 -0 -0 -1 -2 -2 diff --git a/data/TUDataset/MUTAG/raw/README.txt b/data/TUDataset/MUTAG/raw/README.txt deleted file mode 100644 index 7c1cafa..0000000 --- a/data/TUDataset/MUTAG/raw/README.txt +++ /dev/null @@ -1,85 +0,0 @@ -README for dataset MUTAG - - -=== Usage === - -This folder contains the following comma separated text files -(replace DS by the name of the dataset): - -n = total number of nodes -m = total number of edges -N = number of graphs - -(1) DS_A.txt (m lines) - sparse (block diagonal) adjacency matrix for all graphs, - each line corresponds to (row, col) resp. (node_id, node_id) - -(2) DS_graph_indicator.txt (n lines) - column vector of graph identifiers for all nodes of all graphs, - the value in the i-th line is the graph_id of the node with node_id i - -(3) DS_graph_labels.txt (N lines) - class labels for all graphs in the dataset, - the value in the i-th line is the class label of the graph with graph_id i - -(4) DS_node_labels.txt (n lines) - column vector of node labels, - the value in the i-th line corresponds to the node with node_id i - -There are OPTIONAL files if the respective information is available: - -(5) DS_edge_labels.txt (m lines; same size as DS_A_sparse.txt) - labels for the edges in DD_A_sparse.txt - -(6) DS_edge_attributes.txt (m lines; same size as DS_A.txt) - attributes for the edges in DS_A.txt - -(7) DS_node_attributes.txt (n lines) - matrix of node attributes, - the comma seperated values in the i-th line is the attribute vector of the node with node_id i - -(8) DS_graph_attributes.txt (N lines) - regression values for all graphs in the dataset, - the value in the i-th line is the attribute of the graph with graph_id i - - -=== Description of the dataset === - -The MUTAG dataset consists of 188 chemical compounds divided into two -classes according to their mutagenic effect on a bacterium. - -The chemical data was obtained form http://cdb.ics.uci.edu and converted -to graphs, where vertices represent atoms and edges represent chemical -bonds. Explicit hydrogen atoms have been removed and vertices are labeled -by atom type and edges by bond type (single, double, triple or aromatic). -Chemical data was processed using the Chemistry Development Kit (v1.4). - -Node labels: - - 0 C - 1 N - 2 O - 3 F - 4 I - 5 Cl - 6 Br - -Edge labels: - - 0 aromatic - 1 single - 2 double - 3 triple - - -=== Previous Use of the Dataset === - -Kriege, N., Mutzel, P.: Subgraph matching kernels for attributed graphs. In: Proceedings -of the 29th International Conference on Machine Learning (ICML-2012) (2012). - - -=== References === - -Debnath, A.K., Lopez de Compadre, R.L., Debnath, G., Shusterman, A.J., and Hansch, C. -Structure-activity relationship of mutagenic aromatic and heteroaromatic nitro compounds. -Correlation with molecular orbital energies and hydrophobicity. J. Med. Chem. 34(2):786-797 (1991). diff --git a/src/data_process.py b/src/data_process.py index 92bbd65..6af1599 100644 --- a/src/data_process.py +++ b/src/data_process.py @@ -1,19 +1,25 @@ # setting of data generation -import torch +import pickle as pkl import random import sys -import pickle as pkl + +import networkx as nx import numpy as np import scipy.sparse as sp -import networkx as nx +import torch import torch_geometric import torch_sparse -def generate_data(number_of_nodes: int, class_num: int, link_inclass_prob: float, link_outclass_prob: float) -> tuple: - ''' - This function generates a synthetic graph dataset returns components of the graph dataset - features, +def generate_data( + number_of_nodes: int, + class_num: int, + link_inclass_prob: float, + link_outclass_prob: float, +) -> tuple: + """ + This function generates a synthetic graph dataset returns components of the graph dataset - features, adjacency matrix, labels, and indices for training, validation, and testing. Args: @@ -31,62 +37,60 @@ def generate_data(number_of_nodes: int, class_num: int, link_inclass_prob: float idx_test: (torch.LongTensor) - Indices of nodes used for test dataset Notes: - ''' - - - adj=torch.zeros(number_of_nodes,number_of_nodes) #n*n adj matrix + """ - labels=torch.randint(0,class_num,(number_of_nodes,)) #assign random label with equal probability - labels=labels.to(dtype=torch.long) - #label_node, speed up the generation of edges - label_node_dict=dict() + adj = torch.zeros(number_of_nodes, number_of_nodes) # n*n adj matrix - #Create an empty dictionary for the labels + labels = torch.randint( + 0, class_num, (number_of_nodes,) + ) # assign random label with equal probability + labels = labels.to(dtype=torch.long) + # label_node, speed up the generation of edges + label_node_dict: dict[int, list[int]] = dict() + + # Create an empty dictionary for the labels for j in range(class_num): - label_node_dict[j]=[] + label_node_dict[j] = [] - #Populating the above dictionary - for each label with a list of node indices having that label + # Populating the above dictionary - for each label with a list of node indices having that label for i in range(len(labels)): - label_node_dict[int(labels[i])]+=[int(i)] - + label_node_dict[int(labels[i])] += [int(i)] - #generate graph + # generate graph for node_id in range(number_of_nodes): - j=labels[node_id] - for l in label_node_dict: - if l==j: #same class - for z in label_node_dict[l]: #z>node_id, symmetrix matrix, no repeat - if z>node_id and random.random()node_id and random.random()node_id, symmetric matrix, no repeat + if z > node_id and random.random() < link_inclass_prob: + adj[node_id, z] = 1 + adj[z, node_id] = 1 + else: # different class + for z in label_node_dict[l]: + if z > node_id and random.random() < link_outclass_prob: + adj[node_id, z] = 1 + adj[z, node_id] = 1 - #generate feature use eye matrix - features=torch.eye(number_of_nodes,number_of_nodes) + adj = torch_sparse.tensor.SparseTensor.from_dense(adj.float()) - #seprate train,val,test - idx_train = torch.LongTensor(range(number_of_nodes//5)) - idx_val = torch.LongTensor(range(number_of_nodes//5, number_of_nodes//2)) - idx_test = torch.LongTensor(range(number_of_nodes//2, number_of_nodes)) + # generate feature use eye matrix + features = torch.eye(number_of_nodes, number_of_nodes) + # separate train,val,test + idx_train = torch.LongTensor(range(number_of_nodes // 5)) + idx_val = torch.LongTensor(range(number_of_nodes // 5, number_of_nodes // 2)) + idx_test = torch.LongTensor(range(number_of_nodes // 2, number_of_nodes)) return features.float(), adj, labels, idx_train, idx_val, idx_test - def parse_index_file(filename: str) -> list: """ This function reads and parses an index file - Args: + Args: filename: (str) - name or path of the file to parse - + Return: index: (list) - list of integers, each integer in the list represents int of the lines lines of the input file. """ @@ -95,39 +99,37 @@ def parse_index_file(filename: str) -> list: index.append(int(line.strip())) return index - def normalize(mx: sp.csc_matrix) -> sp.csr_matrix: """ This function is to row-normalize sparse matrix for efficient computation of the graph - + Argument: - mx: (sparse matrix) - Input sparse matrix to row-normalize. + mx: (sparse matrix) - Input sparse matrix to row-normalize. Return: mx: (sparse matrix) - Returns the row-normalized sparse matrix. Note: - Row-normalizing is usually done in graph algorithms to enable equal node contributions regardless of the node's degree + Row-normalizing is usually done in graph algorithms to enable equal node contributions regardless of the node's degree and to stabilize, ease numerical computations """ rowsum = np.array(mx.sum(1)) r_inv = np.power(rowsum, -1).flatten() - r_inv[np.isinf(r_inv)] = 0. + r_inv[np.isinf(r_inv)] = 0.0 r_mat_inv = sp.diags(r_inv) mx = r_mat_inv.dot(mx) return mx - def load_data(dataset_str: str) -> tuple: - ''' + """ This function loads input data from gcn/data directory Argument: dataset_str: Dataset name - Return: + Return: All data input files loaded (as well as the training/test data). Note: @@ -143,31 +145,35 @@ def load_data(dataset_str: str) -> tuple: ind.dataset_str.test.index => the indices of test instances in graph, for the inductive setting as list object. All objects above must be saved using python pickle module. - ''' + """ - if dataset_str in ['cora', 'citeseer', 'pubmed']: - names = ['x', 'y', 'tx', 'ty', 'allx', 'ally', 'graph'] + if dataset_str in ["cora", "citeseer", "pubmed"]: + names = ["x", "y", "tx", "ty", "allx", "ally", "graph"] objects = [] for i in range(len(names)): - with open("data/ind.{}.{}".format(dataset_str, names[i]), 'rb') as f: + with open("data/ind.{}.{}".format(dataset_str, names[i]), "rb") as f: if sys.version_info > (3, 0): - objects.append(pkl.load(f, encoding='latin1')) + objects.append(pkl.load(f, encoding="latin1")) else: objects.append(pkl.load(f)) x, y, tx, ty, allx, ally, graph = tuple(objects) - test_idx_reorder = parse_index_file("data/ind.{}.test.index".format(dataset_str)) + test_idx_reorder = parse_index_file( + "data/ind.{}.test.index".format(dataset_str) + ) test_idx_range = np.sort(test_idx_reorder) - if dataset_str == 'citeseer': + if dataset_str == "citeseer": # Fix citeseer dataset (there are some isolated nodes in the graph) # Find isolated nodes, add them as zero-vecs into the right position - test_idx_range_full = range(min(test_idx_reorder), max(test_idx_reorder)+1) + test_idx_range_full = range( + min(test_idx_reorder), max(test_idx_reorder) + 1 + ) tx_extended = sp.lil_matrix((len(test_idx_range_full), x.shape[1])) - tx_extended[test_idx_range-min(test_idx_range), :] = tx + tx_extended[test_idx_range - min(test_idx_range), :] = tx tx = tx_extended ty_extended = np.zeros((len(test_idx_range_full), y.shape[1])) - ty_extended[test_idx_range-min(test_idx_range), :] = ty + ty_extended[test_idx_range - min(test_idx_range), :] = ty ty = ty_extended features = sp.vstack((allx, tx)).tolil() @@ -177,72 +183,68 @@ def load_data(dataset_str: str) -> tuple: labels = np.vstack((ally, ty)) labels[test_idx_reorder, :] = labels[test_idx_range, :] - number_of_nodes=adj.shape[0] + idx_test = torch.LongTensor(test_idx_range.tolist()) + idx_train = torch.LongTensor(range(len(y))) + idx_val = torch.LongTensor(range(len(y), len(y) + 500)) + # features = normalize(features) + # adj = normalize(adj) # no normalize adj here, normalize it in the training process - idx_test = test_idx_range.tolist() - idx_train = range(len(y)) - idx_val = range(len(y), len(y)+500) - - idx_train = torch.LongTensor(idx_train) - idx_val = torch.LongTensor(idx_val) - idx_test = torch.LongTensor(idx_test) - - #features = normalize(features) #cannot converge if use SGD, why?????????? - #adj = normalize(adj) # no normalize adj here, normalize it in the training process - - - features=torch.tensor(features.toarray()).float() + features = torch.tensor(features.toarray()).float() adj = torch.tensor(adj.toarray()).float() adj = torch_sparse.tensor.SparseTensor.from_dense(adj) - labels=torch.tensor(labels) - labels=torch.argmax(labels,dim=1) - - elif dataset_str in ['ogbn-arxiv', 'ogbn-products', 'ogbn-mag', 'ogbn-papers100M']: #'ogbn-mag' is heteregeneous + labels = torch.tensor(labels) + labels = torch.argmax(labels, dim=1) + + elif dataset_str in [ + "ogbn-arxiv", + "ogbn-products", + "ogbn-mag", + "ogbn-papers100M", + ]: #'ogbn-mag' is heteregeneous from ogb.nodeproppred import PygNodePropPredDataset # Download and process data at './dataset/.' - dataset = PygNodePropPredDataset(name=dataset_str, - transform=torch_geometric.transforms.ToSparseTensor()) + dataset = PygNodePropPredDataset( + name=dataset_str, transform=torch_geometric.transforms.ToSparseTensor() + ) split_idx = dataset.get_idx_split() - idx_train, idx_val, idx_test = split_idx["train"], split_idx["valid"], split_idx["test"] + idx_train, idx_val, idx_test = ( + split_idx["train"], + split_idx["valid"], + split_idx["test"], + ) idx_train = torch.LongTensor(idx_train) idx_val = torch.LongTensor(idx_val) idx_test = torch.LongTensor(idx_test) data = dataset[0] - + features = data.x labels = data.y.reshape(-1) - if dataset_str == 'ogbn-arxiv': + if dataset_str == "ogbn-arxiv": adj = data.adj_t.to_symmetric() else: adj = data.adj_t - - elif dataset_str == 'reddit': + + elif dataset_str == "reddit": from dgl.data import RedditDataset + data = RedditDataset() g = data[0] - num_classes = data.num_classes - + adj = torch_sparse.tensor.SparseTensor.from_edge_index(g.edges()) - features = g.ndata['feat'] - train_mask = g.ndata['train_mask'] - val_mask = g.ndata['val_mask'] - test_mask = g.ndata['test_mask'] - + features = g.ndata["feat"] + train_mask = g.ndata["train_mask"] + val_mask = g.ndata["val_mask"] + test_mask = g.ndata["test_mask"] + idx_train = (train_mask == True).nonzero().view(-1) idx_val = (val_mask == True).nonzero().view(-1) idx_test = (test_mask == True).nonzero().view(-1) - - labels = g.ndata['label'] - - return features.float(), adj, labels, idx_train, idx_val, idx_test - - - - + labels = g.ndata["label"] + return features.float(), adj, labels, idx_train, idx_val, idx_test diff --git a/src/fedgcn_run.py b/src/fedgcn_run.py index f03bd95..84f3f7b 100644 --- a/src/fedgcn_run.py +++ b/src/fedgcn_run.py @@ -1,316 +1,81 @@ -# --- -# jupyter: -# jupytext: -# formats: py:light -# text_representation: -# extension: .py -# format_name: light -# format_version: '1.5' -# jupytext_version: 1.14.4 -# kernelspec: -# display_name: Python 3 (ipykernel) -# language: python -# name: python3 -# --- - -# + -from time import sleep -import ray -import os import argparse +import os from pathlib import Path -import yaml - -try: - import ogb -except: - os.system("pip install ogb") - -try: - import tensorboard -except: - os.system("pip install tensorboard") - +from typing import Any import numpy as np +import ray import torch -import torch_geometric -from torch import Tensor -from torch_geometric.loader import DataLoader - +import yaml from torch.utils.tensorboard import SummaryWriter -import os - print(os.getcwd()) print(os.listdir()) -print(os.listdir('..')) +print(os.listdir("..")) import sys -#adding additional module folders -sys.path.append(os.path.join(sys.path[0],'src','utility')) -sys.path.append(os.path.join(sys.path[0],'src','data')) - - -remote = False #false for local simulation - -if remote: - print(os.listdir('modules')) - # GCN model - from gnn_models import GCN_Graph_Classification, GCN, GCN_arxiv, SAGE_products, GCN_products - from train import test, train, Lhop_Block_matrix_train - from utils import label_dirichlet_partition, parition_non_iid, get_in_comm_indexes, get_in_comm_indexes_BDS_GCN, increment_dir, setdiff1d - from data_process import generate_data, load_data - -else: - from gnn_models import GCN_Graph_Classification, GCN, GCN_arxiv, SAGE_products, GCN_products - from train import test, train, Lhop_Block_matrix_train - from utils import label_dirichlet_partition, parition_non_iid, get_in_comm_indexes, get_in_comm_indexes_BDS_GCN, increment_dir, setdiff1d - from data_process import generate_data, load_data - - -# - - - -class Trainer_General: - def __init__(self, rank, communicate_index, adj, labels, features, idx_train, idx_test, local_steps, num_layers, args_hidden, class_num, learning_rate, device): - # from gnn_models import GCN_Graph_Classification - torch.manual_seed(rank) - - # seems that new trainer process will not inherit sys.path from parent, need to reimport! - if args.dataset == "ogbn-arxiv": - self.model = GCN_arxiv(nfeat=features.shape[1], - nhid=args_hidden, - nclass=class_num, - dropout=0.5, - NumLayers=args.num_layers).to(device) - elif args.dataset == "ogbn-products": - self.model = SAGE_products(nfeat=features.shape[1], - nhid=args_hidden, - nclass=class_num, - dropout=0.5, - NumLayers=args.num_layers).to(device) - else: - self.model = GCN(nfeat=in_feat, - nhid=args_hidden, - nclass=class_num, - dropout=0.5, - NumLayers=args.num_layers).to(device) - - self.rank = rank #rank = client ID - - self.device = device - - self.optimizer = torch.optim.SGD(self.model.parameters(), - lr=learning_rate, weight_decay=5e-4) - - self.criterion = torch.nn.CrossEntropyLoss() - - self.train_losses = [] - self.train_accs = [] - - self.test_losses = [] - self.test_accs = [] - - self.adj = adj.to(device) - self.labels = labels.to(device) - self.features = features.to(device) - self.idx_train = idx_train.to(device) - self.idx_test = idx_test.to(device) - - self.local_steps = local_steps - - - @torch.no_grad() - def update_params(self, params, current_global_epoch): - #load global parameter from global server - self.model.to('cpu') - for p, mp, in zip(params, self.model.parameters()): - mp.data = p - self.model.to(self.device) - - def train(self, current_global_round): - #clean cache - torch.cuda.empty_cache() - for iteration in range(self.local_steps): - - self.model.train() - - - loss_train, acc_train = train(iteration, self.model, self.optimizer, - self.features, self.adj, self.labels, self.idx_train) - self.train_losses.append(loss_train) - self.train_accs.append(acc_train) - - - loss_test, acc_test = local_test_loss, local_test_acc = self.local_test() - self.test_losses.append(loss_test) - self.test_accs.append(acc_test) - - - def local_test(self): - local_test_loss, local_test_acc = test(self.model, self.features, self.adj, self.labels, self.idx_test) - return [local_test_loss, local_test_acc] - - def get_params(self): - self.optimizer.zero_grad(set_to_none=True) - return tuple(self.model.parameters()) - def get_all_loss_accuray(self): - return [np.array(self.train_losses), np.array(self.train_accs), np.array(self.test_losses), np.array(self.test_accs)] - def get_rank(self): - return self.rank - - - -class Server: - def __init__(self): - #server model on cpu - if args.dataset == "ogbn-arxiv": - self.model = GCN_arxiv(nfeat=features.shape[1], - nhid=args_hidden, - nclass=class_num, - dropout=0.5, - NumLayers=args.num_layers) - elif args.dataset == "ogbn-products": - self.model = SAGE_products(nfeat=features.shape[1], - nhid=args_hidden, - nclass=class_num, - dropout=0.5, - NumLayers=args.num_layers) - else: #CORA, CITESEER, PUBMED, REDDIT - self.model = GCN(nfeat=in_feat, - nhid=args_hidden, - nclass=class_num, - dropout=0.5, - NumLayers=args.num_layers) - - - if device.type == 'cpu': - @ray.remote(num_cpus=0.1, scheduling_strategy='SPREAD') - class Trainer(Trainer_General): - def __init__(self, rank, communicate_index, adj, labels, features, idx_train, idx_test, local_step, num_layers, args_hidden, class_num, learning_rate, device): - super().__init__(rank, communicate_index, adj, labels, features, idx_train, idx_test, local_step, num_layers, args_hidden, class_num, learning_rate, device) - - elif args.dataset == "ogbn-arxiv": - @ray.remote(num_gpus=0.5, num_cpus=5, scheduling_strategy='SPREAD') - class Trainer(Trainer_General): - def __init__(self, rank, communicate_index, adj, labels, features, idx_train, idx_test, local_step, num_layers, args_hidden, class_num, learning_rate, device): - - super().__init__(rank, communicate_index, adj, labels, features, idx_train, idx_test, local_step, num_layers, args_hidden, class_num, learning_rate, device) - else: - @ray.remote(num_gpus=1, num_cpus=10, scheduling_strategy='SPREAD') - class Trainer(Trainer_General): - def __init__(self, rank, communicate_index, adj, labels, features, idx_train, idx_test, local_step, num_layers, args_hidden, class_num, learning_rate, device): - - super().__init__(rank, communicate_index, adj, labels, features, idx_train, idx_test, local_step, num_layers, args_hidden, class_num, learning_rate, device) - - if args.fedtype == 'fedsage+': - print("running fedsage+") - features_in_clients = [] - #assume the linear generator learnt the optimal (the average of features of neighbor nodes) - #gaussian noise - - for i in range(args.n_trainer): - #orignial features of outside neighbors of nodes in client i - original_feature_i = features[setdiff1d(split_data_indexes[i], communicate_indexes[i])].clone() - - #add gaussian noise to the communicated feature - gaussian_feature_i = original_feature_i + torch.normal(0, 0.1, original_feature_i.shape).cpu() - - copy_feature = features.clone() - - copy_feature[setdiff1d(split_data_indexes[i], communicate_indexes[i])] = gaussian_feature_i - - features_in_clients.append(copy_feature[communicate_indexes[i]]) - - self.trainers = [Trainer.remote(i, communicate_indexes[i], edge_indexes_clients[i], - labels[communicate_indexes[i]], features_in_clients[i], - in_com_train_data_indexes[i], in_com_test_data_indexes[i], args.local_step, args.num_layers, args_hidden, class_num, args.learning_rate, device) for i in range(args.n_trainer)] - - else: - self.trainers = [Trainer.remote(i, communicate_indexes[i], edge_indexes_clients[i], - labels[communicate_indexes[i]], features[communicate_indexes[i]], - in_com_train_data_indexes[i], in_com_test_data_indexes[i], args.local_step, args.num_layers, args_hidden, class_num, args.learning_rate, device) for i in range(args.n_trainer)] +# adding additional module folders +sys.path.append(os.path.join(sys.path[0], "src", "utility")) +sys.path.append(os.path.join(sys.path[0], "src", "data")) - self.broadcast_params(-1) - @torch.no_grad() - def zero_params(self): - for p in self.model.parameters(): - p.zero_() - @torch.no_grad() - def train(self, current_global_epoch): +ray.init() - for trainer in self.trainers: - trainer.train.remote(i) - params = [trainer.get_params.remote() for trainer in self.trainers] - self.zero_params() +from data_process import generate_data, load_data +from gnn_models import GCN, GCN_arxiv, SAGE_products +from server_class import Server +from trainer_class import Trainer_General +from utils import ( + get_in_comm_indexes, + get_in_comm_indexes_BDS_GCN, + increment_dir, + label_dirichlet_partition, + parition_non_iid, + setdiff1d, +) - while True: - ready, left = ray.wait(params, num_returns=1, timeout=None) - if ready: - for t in ready: - for p, mp in zip(ray.get(t), self.model.parameters()): - mp.data += p.cpu() - params = left - if not params: - break - - for p in self.model.parameters(): - p /= args.n_trainer - self.broadcast_params(current_global_epoch) +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("-d", "--dataset", default="cora", type=str) + parser.add_argument("-f", "--fedtype", default="fedgcn", type=str) - def broadcast_params(self, current_global_epoch): - for trainer in self.trainers: - trainer.update_params.remote(tuple(self.model.parameters()), current_global_epoch) # run in submit order -# - + parser.add_argument("-c", "--global_rounds", default=100, type=int) + parser.add_argument("-i", "--local_step", default=3, type=int) + parser.add_argument("-lr", "--learning_rate", default=0.5, type=float) + parser.add_argument("-n", "--n_trainer", default=5, type=int) + parser.add_argument("-nl", "--num_layers", default=2, type=int) + parser.add_argument("-nhop", "--num_hops", default=2, type=int) + parser.add_argument("-g", "--gpu", action="store_true") # if -g, use gpu + parser.add_argument("-iid_b", "--iid_beta", default=10000, type=float) + parser.add_argument("-l", "--logdir", default="./runs", type=str) -# + -if __name__=="__main__": - parser = argparse.ArgumentParser() - parser.add_argument('-d', '--dataset', default='cora', type=str) - - parser.add_argument('-f', '--fedtype', default='fedgcn', type=str) - - parser.add_argument('-c', '--global_rounds',default=100, type=int) - parser.add_argument('-i', '--local_step',default=3, type=int) - parser.add_argument('-lr', '--learning_rate', default=0.5, type=float) - - parser.add_argument('-n', '--n_trainer', default=5, type=int) - parser.add_argument('-nl', '--num_layers', default=2, type=int) - parser.add_argument('-nhop', '--num_hops', default=2, type=int) - parser.add_argument('-g', '--gpu', action='store_true') #if -g, use gpu - parser.add_argument('-iid_b', '--iid_beta', default=10000, type=float) - - parser.add_argument('-l', '--logdir', default='./runs', type=str) - - - parser.add_argument('-r', '--repeat_time', default=10, type=int) + parser.add_argument("-r", "--repeat_time", default=10, type=int) args = parser.parse_args() print(args) - - #'cora', 'citeseer', 'pubmed' #simulate #other dataset twitter, - #'ogbn-arxiv', reddit, "ogbn-products" + + # 'cora', 'citeseer', 'pubmed' #simulate #other dataset twitter, + # 'ogbn-arxiv', reddit, "ogbn-products" np.random.seed(42) torch.manual_seed(42) - - #load data to cpu - if args.dataset == 'simulate': - number_of_nodes=200 - class_num=3 - link_inclass_prob=10/number_of_nodes - link_outclass_prob=link_inclass_prob/20 - features, adj, labels, idx_train, idx_val, idx_test = generate_data(number_of_nodes, class_num, link_inclass_prob, link_outclass_prob) + + # load data to cpu + if args.dataset == "simulate": + number_of_nodes = 200 + class_num = 3 + link_inclass_prob = 10 / number_of_nodes + link_outclass_prob = link_inclass_prob / 20 + features, adj, labels, idx_train, idx_val, idx_test = generate_data( + number_of_nodes, class_num, link_inclass_prob, link_outclass_prob + ) else: features, adj, labels, idx_train, idx_val, idx_test = load_data(args.dataset) class_num = labels.max().item() + 1 - in_feat = features.shape[1] - if args.dataset in ['simulate', 'cora', 'citeseer', 'pubmed', "reddit"]: + if args.dataset in ["simulate", "cora", "citeseer", "pubmed", "reddit"]: args_hidden = 16 else: args_hidden = 256 @@ -318,118 +83,273 @@ def broadcast_params(self, current_global_epoch): row, col, edge_attr = adj.coo() edge_index = torch.stack([row, col], dim=0) - - #specifying a target GPU + # specifying a target GPU if args.gpu: - device = torch.device('cuda') - #running on a local machine with multiple gpu - if args.dataset == 'ogbn-products': + device = torch.device("cuda") + # running on a local machine with multiple gpu + if args.dataset == "ogbn-products": edge_index = edge_index.to("cuda:7") else: edge_index = edge_index.to("cuda:0") else: - device = torch.device('cpu') - - - - #repeat experiments + device = torch.device("cpu") + + if device.type == "cpu": + num_cpus = 0.1 + num_gpus = 0.0 + elif args.dataset == "ogbn-arxiv": + num_cpus = 5.0 + num_gpus = 0.5 + else: + num_cpus = 10 + num_gpus = 1.0 + + # repeat experiments average_final_test_loss_repeats = [] average_final_test_accuracy_repeats = [] - + for repeat in range(args.repeat_time): + # load data to cpu - #load data to cpu + # beta = 0.0001 extremely Non-IID, beta = 10000, IID + split_data_indexes = label_dirichlet_partition( + labels, len(labels), class_num, args.n_trainer, beta=args.iid_beta + ) - #beta = 0.0001 extremly Non-IID, beta = 10000, IID - split_data_indexes = label_dirichlet_partition(labels, len(labels), class_num, args.n_trainer, beta = args.iid_beta) - for i in range(args.n_trainer): split_data_indexes[i] = np.array(split_data_indexes[i]) split_data_indexes[i].sort() split_data_indexes[i] = torch.tensor(split_data_indexes[i]) - - if args.fedtype == 'bds-gcn': + + if args.fedtype == "bds-gcn": print("running bds-gcn") - #No args.num_hops - communicate_indexes, in_com_train_data_indexes, in_com_test_data_indexes, edge_indexes_clients = get_in_comm_indexes_BDS_GCN(edge_index, split_data_indexes, args.n_trainer, idx_train, idx_test) + # No args.num_hops + ( + communicate_indexes, + in_com_train_data_indexes, + in_com_test_data_indexes, + edge_indexes_clients, + ) = get_in_comm_indexes_BDS_GCN( + edge_index, split_data_indexes, args.n_trainer, idx_train, idx_test + ) else: - communicate_indexes, in_com_train_data_indexes, in_com_test_data_indexes, edge_indexes_clients = get_in_comm_indexes(edge_index, split_data_indexes, args.n_trainer, args.num_hops, idx_train, idx_test) - + ( + communicate_indexes, + in_com_train_data_indexes, + in_com_test_data_indexes, + edge_indexes_clients, + ) = get_in_comm_indexes( + edge_index, + split_data_indexes, + args.n_trainer, + args.num_hops, + idx_train, + idx_test, + ) + + # determine the resources for each trainer + @ray.remote( + num_gpus=num_gpus, + num_cpus=num_cpus, + scheduling_strategy="SPREAD", + ) + class Trainer(Trainer_General): + def __init__(self, *args: Any, **kwds: Any): + super().__init__(*args, **kwds) + + if args.fedtype == "fedsage+": + print("running fedsage+") + features_in_clients = [] + # assume the linear generator learnt the optimal (the average of features of neighbor nodes) + # gaussian noise + + for i in range(args.n_trainer): + # original features of outside neighbors of nodes in client i + original_feature_i = features[ + setdiff1d(split_data_indexes[i], communicate_indexes[i]) + ].clone() + + # add gaussian noise to the communicated feature + gaussian_feature_i = ( + original_feature_i + + torch.normal(0, 0.1, original_feature_i.shape).cpu() + ) + + copy_feature = features.clone() + + copy_feature[ + setdiff1d(split_data_indexes[i], communicate_indexes[i]) + ] = gaussian_feature_i - args.log_dir = increment_dir(Path(args.logdir) / 'exp') + features_in_clients.append(copy_feature[communicate_indexes[i]]) + trainers = [ + Trainer.remote( + i, + edge_indexes_clients[i], + labels[communicate_indexes[i]], + features_in_clients[i], + in_com_train_data_indexes[i], + in_com_test_data_indexes[i], + args_hidden, + class_num, + device, + args, + ) + for i in range(args.n_trainer) + ] + else: + trainers = [ + Trainer.remote( + i, + edge_indexes_clients[i], + labels[communicate_indexes[i]], + features[communicate_indexes[i]], + in_com_train_data_indexes[i], + in_com_test_data_indexes[i], + args_hidden, + class_num, + device, + args, + ) + for i in range(args.n_trainer) + ] + + args.log_dir = increment_dir(Path(args.logdir) / "exp") os.makedirs(args.log_dir) yaml_file = str(Path(args.log_dir) / "args.yaml") - with open(yaml_file, 'w') as out: + with open(yaml_file, "w") as out: yaml.dump(args.__dict__, out, default_flow_style=False) - writer = SummaryWriter(args.log_dir) - #clear cache + # clear cache torch.cuda.empty_cache() - server = Server() + server = Server( + features.shape[1], args_hidden, class_num, device, trainers, args + ) print("global_rounds", args.global_rounds) for i in range(args.global_rounds): server.train(i) - results = [trainer.get_all_loss_accuray.remote() for trainer in server.trainers] results = np.array([ray.get(result) for result in results]) client_id = 0 for result in results: for iteration in range(len(result[0])): - writer.add_scalar('Train Loss/Client_{}'.format(client_id), result[0][iteration], iteration) + writer.add_scalar( + "Train Loss/Client_{}".format(client_id), + result[0][iteration], + iteration, + ) for iteration in range(len(result[1])): - writer.add_scalar('Train Accuracy/Client_{}'.format(client_id), result[1][iteration], iteration) + writer.add_scalar( + "Train Accuracy/Client_{}".format(client_id), + result[1][iteration], + iteration, + ) for iteration in range(len(result[2])): - writer.add_scalar('Test Loss/Client_{}.format(client_id)', result[2][iteration], iteration) + writer.add_scalar( + "Test Loss/Client_{}.format(client_id)", + result[2][iteration], + iteration, + ) for iteration in range(len(result[3])): - writer.add_scalar('Test Accuracy/Client_{}'.format(client_id), result[3][iteration], iteration) + writer.add_scalar( + "Test Accuracy/Client_{}".format(client_id), + result[3][iteration], + iteration, + ) client_id += 1 - #print('finished') train_data_weights = [len(i) for i in in_com_train_data_indexes] test_data_weights = [len(i) for i in in_com_test_data_indexes] - average_train_loss = np.average(results[:,0], weights = train_data_weights, axis = 0) - average_train_accuracy = np.average(results[:,1], weights = train_data_weights, axis = 0) - average_test_loss = np.average(results[:,2], weights = test_data_weights, axis = 0) - average_test_accuracy = np.average(results[:,3], weights = test_data_weights, axis = 0) - for iteration in range(len(results[0][0])): - writer.add_scalar('Train Loss/Clients_Overall'.format(), average_train_loss[iteration], iteration) - writer.add_scalar('Train Accuracy/Clients_Overall'.format(), average_train_accuracy[iteration], iteration) - writer.add_scalar('Test Loss/Clients_Overall'.format(), average_test_loss[iteration], iteration) - writer.add_scalar('Train Accuracy/Clients_Overall'.format(), average_test_accuracy[iteration], iteration) + average_train_loss = np.average( + [row[0] for row in results], weights=train_data_weights, axis=0 + ) + average_train_accuracy = np.average( + [row[1] for row in results], weights=train_data_weights, axis=0 + ) + average_test_loss = np.average( + [row[2] for row in results], weights=test_data_weights, axis=0 + ) + average_test_accuracy = np.average( + [row[3] for row in results], weights=test_data_weights, axis=0 + ) - + for iteration in range(len(results[0][0])): + writer.add_scalar( + "Train Loss/Clients_Overall".format(), + average_train_loss[iteration], + iteration, + ) + writer.add_scalar( + "Train Accuracy/Clients_Overall".format(), + average_train_accuracy[iteration], + iteration, + ) + writer.add_scalar( + "Test Loss/Clients_Overall".format(), + average_test_loss[iteration], + iteration, + ) + writer.add_scalar( + "Train Accuracy/Clients_Overall".format(), + average_test_accuracy[iteration], + iteration, + ) - results = [trainer.local_test.remote() for trainer in server.trainers] results = np.array([ray.get(result) for result in results]) - - average_final_test_loss = np.average(results[:,0], weights = test_data_weights, axis = 0) - average_final_test_accuracy = np.average(results[:,1], weights = test_data_weights, axis = 0) - - print(average_final_test_loss, average_final_test_accuracy) - - #sleep(5) # wait for print message from remote workers - filename = args.dataset + "_" + args.fedtype + "_" + str(args.num_layers) + "_layer_" + str(args.num_hops) + "_hop_iid_beta_" + str(args.iid_beta) + "_n_trainer_" + str(args.n_trainer) + "_local_step_" + str(args.local_step) + ".txt" - with open(filename, 'a+') as a: - a.write(f'{average_final_test_loss} {average_final_test_accuracy}\n') - average_final_test_loss_repeats.append(average_final_test_loss) - average_final_test_accuracy_repeats.append(average_final_test_accuracy) - - #finish experiments - with open(f'{args.dataset}_{args.fedtype}_{args.num_layers}_layer_{args.num_hops}_hop_iid_beta_{args.iid_beta}_n_trainer_{args.n_trainer}_local_step_{args.local_step}.txt', 'a+') as a: - a.write(f'average_testing_loss {np.average(average_final_test_loss_repeats)} std {np.std(average_final_test_loss_repeats)}\n') - a.write(f'average_testing_accuracy {np.average(average_final_test_accuracy_repeats)} std {np.std(average_final_test_accuracy_repeats)}\n') - - print(f'average_testing_loss {np.average(average_final_test_loss_repeats)} std {np.std(average_final_test_loss_repeats)}') - print(f'average_testing_accuracy {np.average(average_final_test_accuracy_repeats)} std {np.std(average_final_test_accuracy_repeats)}') -ray.shutdown() + average_final_test_loss = np.average( + [row[0] for row in results], weights=test_data_weights, axis=0 + ) + average_final_test_accuracy = np.average( + [row[1] for row in results], weights=test_data_weights, axis=0 + ) + print(average_final_test_loss, average_final_test_accuracy) + # sleep(5) # wait for print message from remote workers + filename = ( + args.dataset + + "_" + + args.fedtype + + "_" + + str(args.num_layers) + + "_layer_" + + str(args.num_hops) + + "_hop_iid_beta_" + + str(args.iid_beta) + + "_n_trainer_" + + str(args.n_trainer) + + "_local_step_" + + str(args.local_step) + + ".txt" + ) + with open(filename, "a+") as a: + a.write(f"{average_final_test_loss} {average_final_test_accuracy}\n") + average_final_test_loss_repeats.append(average_final_test_loss) + average_final_test_accuracy_repeats.append(average_final_test_accuracy) -# - + # finish experiments + with open( + f"{args.dataset}_{args.fedtype}_{args.num_layers}_layer_{args.num_hops}_hop_iid_beta_{args.iid_beta}_n_trainer_{args.n_trainer}_local_step_{args.local_step}.txt", + "a+", + ) as a: + a.write( + f"average_testing_loss {np.average(average_final_test_loss_repeats)} std {np.std(average_final_test_loss_repeats)}\n" + ) + a.write( + f"average_testing_accuracy {np.average(average_final_test_accuracy_repeats)} std {np.std(average_final_test_accuracy_repeats)}\n" + ) + + print( + f"average_testing_loss {np.average(average_final_test_loss_repeats)} std {np.std(average_final_test_loss_repeats)}" + ) + print( + f"average_testing_accuracy {np.average(average_final_test_accuracy_repeats)} std {np.std(average_final_test_accuracy_repeats)}" + ) +ray.shutdown() diff --git a/src/gnn_models.py b/src/gnn_models.py index 7ae3193..0b70dda 100644 --- a/src/gnn_models.py +++ b/src/gnn_models.py @@ -1,12 +1,14 @@ import torch -from torch.nn import Linear import torch.nn.functional as F -from torch_geometric.nn import GCNConv, SAGEConv -from torch_geometric.nn import global_mean_pool +from torch.nn import Linear +from torch_geometric.nn import GCNConv, SAGEConv, global_mean_pool + class GCN(torch.nn.Module): - def __init__(self, nfeat: int, nhid: int, nclass: int, dropout: float, NumLayers: int): - ''' + def __init__( + self, nfeat: int, nhid: int, nclass: int, dropout: float, NumLayers: int + ): + """ This constructor method initializes the GCN model Arguments: @@ -14,30 +16,28 @@ def __init__(self, nfeat: int, nhid: int, nclass: int, dropout: float, NumLayers nhid: (int) - Number of hidden features in the hidden layers of the network nclass: (int) - Number of output classes dropout: (float) - Dropout probability - NumLayers: (int) - Number of GCN layers in the network. - ''' + NumLayers: (int) - Number of GCN layers in the network. + """ super(GCN, self).__init__() self.convs = torch.nn.ModuleList() - self.convs.append( - GCNConv(nfeat, nhid, normalize=True, cached=True)) + self.convs.append(GCNConv(nfeat, nhid, normalize=True, cached=True)) for _ in range(NumLayers - 2): - self.convs.append( - GCNConv(nhid, nhid, normalize=True, cached=True)) - self.convs.append( - GCNConv(nhid, nclass, normalize=True, cached=True)) + self.convs.append(GCNConv(nhid, nhid, normalize=True, cached=True)) + self.convs.append(GCNConv(nhid, nclass, normalize=True, cached=True)) self.dropout = dropout - def reset_parameters(self): - ''' - This function is available to cater to weight initialization requirements as necessary. - ''' + def reset_parameters(self) -> None: + """ + This function is available to cater to weight initialization requirements as necessary. + """ for conv in self.convs: conv.reset_parameters() + return None def forward(self, x: torch.Tensor, adj_t: torch.Tensor) -> torch.Tensor: - ''' + """ This function represents the forward pass computation of a GCN Arguments: @@ -47,7 +47,7 @@ def forward(self, x: torch.Tensor, adj_t: torch.Tensor) -> torch.Tensor: Returns: The output of the forward pass, a PyTorch tensor - ''' + """ for conv in self.convs[:-1]: x = conv(x, adj_t) x = F.relu(x) @@ -55,9 +55,12 @@ def forward(self, x: torch.Tensor, adj_t: torch.Tensor) -> torch.Tensor: x = self.convs[-1](x, adj_t) return torch.log_softmax(x, dim=-1) + class GCN_products(torch.nn.Module): - def __init__(self, nfeat: int, nhid: int, nclass: int, dropout: float, NumLayers: int): - ''' + def __init__( + self, nfeat: int, nhid: int, nclass: int, dropout: float, NumLayers: int + ): + """ This constructor method initializes the GCN_products model Arguments: @@ -65,31 +68,29 @@ def __init__(self, nfeat: int, nhid: int, nclass: int, dropout: float, NumLayers nhid: (int) - Number of hidden features in the hidden layers of the network nclass: (int) - Number of output classes dropout: (float) - Dropout probability - NumLayers: (int) - Number of GCN layers in the network. - ''' + NumLayers: (int) - Number of GCN layers in the network. + """ super(GCN_products, self).__init__() - + self.convs = torch.nn.ModuleList() - self.convs.append( - GCNConv(nfeat, nhid, normalize=False)) + self.convs.append(GCNConv(nfeat, nhid, normalize=False)) for _ in range(NumLayers - 2): - self.convs.append( - GCNConv(nhid, nhid, normalize=False)) - self.convs.append( - GCNConv(nhid, nclass, normalize=False)) + self.convs.append(GCNConv(nhid, nhid, normalize=False)) + self.convs.append(GCNConv(nhid, nclass, normalize=False)) self.dropout = dropout - def reset_parameters(self): - ''' - This function is available to cater to weight initialization requirements as necessary. - ''' + def reset_parameters(self) -> None: + """ + This function is available to cater to weight initialization requirements as necessary. + """ for conv in self.convs: conv.reset_parameters() + return None def forward(self, x: torch.Tensor, adj_t: torch.Tensor) -> torch.Tensor: - ''' + """ This function represents the forward pass computation of a GCN Arguments: @@ -98,18 +99,21 @@ def forward(self, x: torch.Tensor, adj_t: torch.Tensor) -> torch.Tensor: Returns: The output of the forward pass, a PyTorch tensor - - ''' + + """ for conv in self.convs[:-1]: x = conv(x, adj_t) x = F.relu(x) x = F.dropout(x, p=self.dropout, training=self.training) x = self.convs[-1](x, adj_t) return torch.log_softmax(x, dim=-1) - + + class SAGE_products(torch.nn.Module): - def __init__(self, nfeat: int, nhid: int, nclass: int, dropout: float, NumLayers: int): - ''' + def __init__( + self, nfeat: int, nhid: int, nclass: int, dropout: float, NumLayers: int + ): + """ This constructor method initializes the Graph Sage model Arguments: @@ -117,8 +121,8 @@ def __init__(self, nfeat: int, nhid: int, nclass: int, dropout: float, NumLayers nhid: (int) - Number of hidden features in the hidden layers of the network nclass: (int) - Number of output classes dropout: (float) - Dropout probability - NumLayers: (int) - Number of Graph Sage layers in the network - ''' + NumLayers: (int) - Number of Graph Sage layers in the network + """ super(SAGE_products, self).__init__() self.convs = torch.nn.ModuleList() @@ -129,15 +133,16 @@ def __init__(self, nfeat: int, nhid: int, nclass: int, dropout: float, NumLayers self.dropout = dropout - def reset_parameters(self): - ''' - This function is available to cater to weight initialization requirements as necessary. - ''' + def reset_parameters(self) -> None: + """ + This function is available to cater to weight initialization requirements as necessary. + """ for conv in self.convs: conv.reset_parameters() + return None def forward(self, x: torch.Tensor, adj_t: torch.Tensor) -> torch.Tensor: - ''' + """ This function represents the forward pass computation of a GCN Arguments: @@ -146,19 +151,22 @@ def forward(self, x: torch.Tensor, adj_t: torch.Tensor) -> torch.Tensor: Returns: The output of the forward pass, a PyTorch tensor - - ''' + + """ for conv in self.convs[:-1]: x = conv(x, adj_t) x = F.relu(x) x = F.dropout(x, p=self.dropout, training=self.training) x = self.convs[-1](x, adj_t) return torch.log_softmax(x, dim=-1) - + + # + class GCN_arxiv(torch.nn.Module): - def __init__(self, nfeat: int, nhid: int, nclass: int, dropout: float, NumLayers: int): - ''' + def __init__( + self, nfeat: int, nhid: int, nclass: int, dropout: float, NumLayers: int + ): + """ This constructor method initializes the Graph Sage model Arguments: @@ -166,8 +174,8 @@ def __init__(self, nfeat: int, nhid: int, nclass: int, dropout: float, NumLayers nhid: (int) - Number of hidden features in the hidden layers of the network nclass: (int) - Number of output classes dropout: (float) - Dropout probability - NumLayers: (int) - Number of Graph Sage layers in the network - ''' + NumLayers: (int) - Number of Graph Sage layers in the network + """ super(GCN_arxiv, self).__init__() self.convs = torch.nn.ModuleList() @@ -175,24 +183,24 @@ def __init__(self, nfeat: int, nhid: int, nclass: int, dropout: float, NumLayers self.bns = torch.nn.ModuleList() self.bns.append(torch.nn.BatchNorm1d(nhid)) for _ in range(NumLayers - 2): - self.convs.append( - GCNConv(nhid, nhid, cached=True)) + self.convs.append(GCNConv(nhid, nhid, cached=True)) self.bns.append(torch.nn.BatchNorm1d(nhid)) self.convs.append(GCNConv(nhid, nclass, cached=True)) self.dropout = dropout - def reset_parameters(self): - ''' - This function is available to cater to weight initialization requirements as necessary. - ''' + def reset_parameters(self) -> None: + """ + This function is available to cater to weight initialization requirements as necessary. + """ for conv in self.convs: conv.reset_parameters() for bn in self.bns: bn.reset_parameters() + return None def forward(self, x: torch.Tensor, adj_t: torch.Tensor) -> torch.Tensor: - ''' + """ This function represents the forward pass computation of a GCN Arguments: @@ -201,8 +209,8 @@ def forward(self, x: torch.Tensor, adj_t: torch.Tensor) -> torch.Tensor: Returns: The output of the forward pass, a PyTorch tensor - - ''' + + """ for i, conv in enumerate(self.convs[:-1]): x = conv(x, adj_t) x = self.bns[i](x) @@ -210,34 +218,3 @@ def forward(self, x: torch.Tensor, adj_t: torch.Tensor) -> torch.Tensor: x = F.dropout(x, p=self.dropout, training=self.training) x = self.convs[-1](x, adj_t) return x.log_softmax(dim=-1) - - - -# - -#REDUNDANT - -class GCN_Graph_Classification(torch.nn.Module): - def __init__(self, num_node_features, nhid, num_classes): - super(GCN_Graph_Classification, self).__init__() - torch.manual_seed(12345) - self.conv1 = GCNConv(num_node_features, nhid) - self.conv2 = GCNConv(nhid, nhid) - self.conv3 = GCNConv(nhid, nhid) - self.lin = Linear(nhid, num_classes) - - def forward(self, x, edge_index, batch): - # 1. Obtain node embeddings - x = self.conv1(x, edge_index) - x = x.relu() - x = self.conv2(x, edge_index) - x = x.relu() - x = self.conv3(x, edge_index) - - # 2. Readout layer - x = global_mean_pool(x, batch) # [batch_size, nhid] - - # 3. Apply a final classifier - x = F.dropout(x, p=0.5, training=self.training) - x = self.lin(x) - - return x diff --git a/src/server_class.py b/src/server_class.py new file mode 100644 index 0000000..1a2be2c --- /dev/null +++ b/src/server_class.py @@ -0,0 +1,79 @@ +from typing import Any + +import ray +import torch +from gnn_models import GCN, GCN_arxiv, SAGE_products +from trainer_class import Trainer_General + + +class Server: + def __init__( + self, + feature_dim: int, + args_hidden: int, + class_num: int, + device: torch.device, + trainers: list[Trainer_General], + args: Any, + ) -> None: + # server model on cpu + if args.dataset == "ogbn-arxiv": + self.model = GCN_arxiv( + nfeat=feature_dim, + nhid=args_hidden, + nclass=class_num, + dropout=0.5, + NumLayers=args.num_layers, + ) + elif args.dataset == "ogbn-products": + self.model = SAGE_products( + nfeat=feature_dim, + nhid=args_hidden, + nclass=class_num, + dropout=0.5, + NumLayers=args.num_layers, + ) + else: # CORA, CITESEER, PUBMED, REDDIT + self.model = GCN( + nfeat=feature_dim, + nhid=args_hidden, + nclass=class_num, + dropout=0.5, + NumLayers=args.num_layers, + ) + + self.trainers = trainers + self.num_of_trainers = len(trainers) + self.broadcast_params(-1) + + @torch.no_grad() + def zero_params(self) -> None: + for p in self.model.parameters(): + p.zero_() + + @torch.no_grad() + def train(self, current_global_epoch: int) -> None: + for trainer in self.trainers: + trainer.train.remote(current_global_epoch) + params = [trainer.get_params.remote() for trainer in self.trainers] + self.zero_params() + + while True: + ready, left = ray.wait(params, num_returns=1, timeout=None) + if ready: + for t in ready: + for p, mp in zip(ray.get(t), self.model.parameters()): + mp.data += p.cpu() + params = left + if not params: + break + + for p in self.model.parameters(): + p /= self.num_of_trainers + self.broadcast_params(current_global_epoch) + + def broadcast_params(self, current_global_epoch: int) -> None: + for trainer in self.trainers: + trainer.update_params.remote( + tuple(self.model.parameters()), current_global_epoch + ) # run in submit order diff --git a/src/train.py b/src/train_func.py similarity index 64% rename from src/train.py rename to src/train_func.py index acb440d..90482f1 100644 --- a/src/train.py +++ b/src/train_func.py @@ -1,9 +1,9 @@ import torch import torch.nn.functional as F -from sklearn import metrics -def accuracy(output: torch.Tensor, labels: torch.Tensor) -> float: - ''' + +def accuracy(output: torch.Tensor, labels: torch.Tensor) -> torch.Tensor: + """ This function returns the accuracy of the output with respect to the ground truth given Arguments: @@ -13,17 +13,24 @@ def accuracy(output: torch.Tensor, labels: torch.Tensor) -> float: Returns: The accuracy of the model (float) - ''' + """ preds = output.max(1)[1].type_as(labels) correct = preds.eq(labels).double() correct = correct.sum() return correct / len(labels) -def test(model: torch.nn.Module, features: torch.Tensor, adj: torch.Tensor, labels: torch.Tensor, idx_test: torch.Tensor) -> tuple: - ''' - This function tests the model and calculates the loss and accuracy - + +def test( + model: torch.nn.Module, + features: torch.Tensor, + adj: torch.Tensor, + labels: torch.Tensor, + idx_test: torch.Tensor, +) -> tuple: + """ + This function tests the model and calculates the loss and accuracy + Arguments: model: (torch.nn.Module) - Specific model passed features: (torch.Tensor) - Tensor representing the input features @@ -33,22 +40,29 @@ def test(model: torch.nn.Module, features: torch.Tensor, adj: torch.Tensor, labe Returns: The loss and accuracy of the model - - ''' + + """ model.eval() output = model(features, adj) - pred_labels=torch.argmax(output,axis=1) + pred_labels = torch.argmax(output, axis=1) loss_test = F.nll_loss(output[idx_test], labels[idx_test]) acc_test = accuracy(output[idx_test], labels[idx_test]) - - return loss_test.item(), acc_test.item()#, f1_test, auc_test + + return loss_test.item(), acc_test.item() # , f1_test, auc_test +def train( + epoch: int, + model: torch.nn.Module, + optimizer: torch.optim.Optimizer, + features: torch.Tensor, + adj: torch.Tensor, + labels: torch.Tensor, + idx_train: torch.Tensor, +) -> tuple: # Centralized or new FL + """ + This function trains the model and returns the loss and accuracy -def train(epoch: int, model: torch.nn.Module, optimizer: torch.optim.Optimizer, features: torch.Tensor, adj: torch.Tensor, labels: torch.Tensor, idx_train: torch.Tensor) -> tuple: #Centralized or new FL - ''' - This function trains the model and returns the loss and accuracy - Arguments: model: (torch.nn.Module) - Specific model passed features: (torch.FloatTensor) - Tensor representing the input features @@ -60,24 +74,33 @@ def train(epoch: int, model: torch.nn.Module, optimizer: torch.optim.Optimizer, Returns: The loss and accuracy of the model - - ''' - + + """ + model.train() optimizer.zero_grad() - + output = model(features, adj) loss_train = F.nll_loss(output[idx_train], labels[idx_train]) acc_train = accuracy(output[idx_train], labels[idx_train]) loss_train.backward() optimizer.step() optimizer.zero_grad() - + return loss_train.item(), acc_train.item() -def Lhop_Block_matrix_train(epoch: int, model: torch.nn.Module, optimizer: torch.optim.Optimizer, features: torch.Tensor, adj: torch.Tensor, labels: torch.Tensor, communicate_index: torch.Tensor, in_com_train_data_index: torch.Tensor) -> tuple: - ''' +def Lhop_Block_matrix_train( + epoch: int, + model: torch.nn.Module, + optimizer: torch.optim.Optimizer, + features: torch.Tensor, + adj: torch.Tensor, + labels: torch.Tensor, + communicate_index: torch.Tensor, + in_com_train_data_index: torch.Tensor, +) -> tuple: + """ Arguments: model: (model type) - Specific model passed features: (torch.FloatTensor) - Tensor representing the input features @@ -90,30 +113,45 @@ def Lhop_Block_matrix_train(epoch: int, model: torch.nn.Module, optimizer: torch Returns: The loss and accuracy of the model - - ''' + + """ model.train() optimizer.zero_grad() - output = model(features[communicate_index], adj[communicate_index][:,communicate_index]) - - - loss_train = F.nll_loss(output[in_com_train_data_index], labels[communicate_index][in_com_train_data_index]) - - - acc_train = accuracy(output[in_com_train_data_index], labels[communicate_index][in_com_train_data_index]) - + output = model( + features[communicate_index], adj[communicate_index][:, communicate_index] + ) + + loss_train = F.nll_loss( + output[in_com_train_data_index], + labels[communicate_index][in_com_train_data_index], + ) + + acc_train = accuracy( + output[in_com_train_data_index], + labels[communicate_index][in_com_train_data_index], + ) loss_train.backward() optimizer.step() optimizer.zero_grad() return loss_train.item(), acc_train.item() -def FedSage_train(epoch: int, model: torch.nn.Module, optimizer: torch.optim.Optimizer, features: torch.Tensor, adj: torch.Tensor, labels: torch.Tensor, communicate_index: torch.Tensor, in_com_train_data_index: torch.Tensor) -> tuple: - ''' + +def FedSage_train( + epoch: int, + model: torch.nn.Module, + optimizer: torch.optim.Optimizer, + features: torch.Tensor, + adj: torch.Tensor, + labels: torch.Tensor, + communicate_index: torch.Tensor, + in_com_train_data_index: torch.Tensor, +) -> tuple: + """ This function is to train the FedSage model - + Arguments: model: (model type) - Specific model passed features: (torch.FloatTensor) - Tensor representing the input features @@ -126,20 +164,24 @@ def FedSage_train(epoch: int, model: torch.nn.Module, optimizer: torch.optim.Opt Returns: The loss and accuracy of the model - - ''' - + + """ + model.train() optimizer.zero_grad() - #print(features.shape) - - output = model(features, adj[communicate_index][:,communicate_index]) - - loss_train = F.nll_loss(output[in_com_train_data_index], labels[communicate_index][in_com_train_data_index]) - - - acc_train = accuracy(output[in_com_train_data_index], labels[communicate_index][in_com_train_data_index]) - + # print(features.shape) + + output = model(features, adj[communicate_index][:, communicate_index]) + + loss_train = F.nll_loss( + output[in_com_train_data_index], + labels[communicate_index][in_com_train_data_index], + ) + + acc_train = accuracy( + output[in_com_train_data_index], + labels[communicate_index][in_com_train_data_index], + ) loss_train.backward() optimizer.step() diff --git a/src/trainer_class.py b/src/trainer_class.py new file mode 100644 index 0000000..5f4deeb --- /dev/null +++ b/src/trainer_class.py @@ -0,0 +1,128 @@ +from typing import Any + +import numpy as np +import torch +from gnn_models import GCN, GCN_arxiv, SAGE_products +from train_func import test, train + + +class Trainer_General: + def __init__( + self, + rank: int, + adj: torch.Tensor, + labels: torch.Tensor, + features: torch.Tensor, + idx_train: torch.Tensor, + idx_test: torch.Tensor, + args_hidden: int, + class_num: int, + device: torch.device, + args: Any, + ): + # from gnn_models import GCN_Graph_Classification + torch.manual_seed(rank) + + # seems that new trainer process will not inherit sys.path from parent, need to reimport! + if args.dataset == "ogbn-arxiv": + self.model = GCN_arxiv( + nfeat=features.shape[1], + nhid=args_hidden, + nclass=class_num, + dropout=0.5, + NumLayers=args.num_layers, + ).to(device) + elif args.dataset == "ogbn-products": + self.model = SAGE_products( + nfeat=features.shape[1], + nhid=args_hidden, + nclass=class_num, + dropout=0.5, + NumLayers=args.num_layers, + ).to(device) + else: + self.model = GCN( + nfeat=features.shape[1], + nhid=args_hidden, + nclass=class_num, + dropout=0.5, + NumLayers=args.num_layers, + ).to(device) + + self.rank = rank # rank = client ID + + self.device = device + + self.optimizer = torch.optim.SGD( + self.model.parameters(), lr=args.learning_rate, weight_decay=5e-4 + ) + + self.criterion = torch.nn.CrossEntropyLoss() + + self.train_losses: list = [] + self.train_accs: list = [] + + self.test_losses: list = [] + self.test_accs: list = [] + + self.adj = adj.to(device) + self.labels = labels.to(device) + self.features = features.to(device) + self.idx_train = idx_train.to(device) + self.idx_test = idx_test.to(device) + + self.local_step = args.local_step + + @torch.no_grad() + def update_params(self, params: tuple, current_global_epoch: int) -> None: + # load global parameter from global server + self.model.to("cpu") + for ( + p, + mp, + ) in zip(params, self.model.parameters()): + mp.data = p + self.model.to(self.device) + + def train(self, current_global_round: int) -> None: + # clean cache + torch.cuda.empty_cache() + for iteration in range(self.local_step): + self.model.train() + + loss_train, acc_train = train( + iteration, + self.model, + self.optimizer, + self.features, + self.adj, + self.labels, + self.idx_train, + ) + self.train_losses.append(loss_train) + self.train_accs.append(acc_train) + + loss_test, acc_test = self.local_test() + self.test_losses.append(loss_test) + self.test_accs.append(acc_test) + + def local_test(self) -> list: + local_test_loss, local_test_acc = test( + self.model, self.features, self.adj, self.labels, self.idx_test + ) + return [local_test_loss, local_test_acc] + + def get_params(self) -> tuple: + self.optimizer.zero_grad(set_to_none=True) + return tuple(self.model.parameters()) + + def get_all_loss_accuray(self) -> list: + return [ + np.array(self.train_losses), + np.array(self.train_accs), + np.array(self.test_losses), + np.array(self.test_accs), + ] + + def get_rank(self) -> int: + return self.rank diff --git a/src/utils.py b/src/utils.py index bf7e3bc..4130e7d 100644 --- a/src/utils.py +++ b/src/utils.py @@ -1,13 +1,15 @@ -import torch -import numpy as np -import torch_geometric -from pathlib import Path import glob import re +from pathlib import Path + +import numpy as np +import torch +import torch_geometric import torch_sparse + def intersect1d(t1: torch.Tensor, t2: torch.Tensor) -> torch.Tensor: - ''' + """ This function concatenates the two input tensors, finding common elements between these two Argument: @@ -16,7 +18,7 @@ def intersect1d(t1: torch.Tensor, t2: torch.Tensor) -> torch.Tensor: Return: intersection: (PyTorch tensor) - Intersection of the two input tensors - ''' + """ combined = torch.cat((t1, t2)) uniques, counts = combined.unique(return_counts=True) intersection = uniques[counts > 1] @@ -24,26 +26,28 @@ def intersect1d(t1: torch.Tensor, t2: torch.Tensor) -> torch.Tensor: def setdiff1d(t1: torch.Tensor, t2: torch.Tensor) -> torch.Tensor: - ''' - This function computes the set difference between the two input tensors + """ + This function computes the set difference between the two input tensors Arguments: t1: (PyTorch tensor) - The first input tensor for the operation t2: (PyTorch tensor) - The second input tensor for the operation Return: - difference: (PyTorch tensor) - Difference in elements of the two input tensors + difference: (PyTorch tensor) - Difference in elements of the two input tensors + + """ - ''' - combined = torch.cat((t1, t2)) uniques, counts = combined.unique(return_counts=True) difference = uniques[counts == 1] return difference -def label_dirichlet_partition(labels: np.array, N: int, K: int, n_parties: int, beta: float) -> list: - ''' +def label_dirichlet_partition( + labels: np.array, N: int, K: int, n_parties: int, beta: float +) -> list: + """ This function partitions data based on labels by using the Dirichlet distribution, to ensure even distribution of samples Arguments: @@ -56,26 +60,34 @@ def label_dirichlet_partition(labels: np.array, N: int, K: int, n_parties: int, Return: split_data_indexes (list) - list indices of data points assigned into groups - ''' + """ min_size = 0 min_require_size = 10 split_data_indexes = [] while min_size < min_require_size: - idx_batch = [[] for _ in range(n_parties)] + idx_batch: list[list[int]] = [[] for _ in range(n_parties)] for k in range(K): idx_k = np.where(labels == k)[0] np.random.shuffle(idx_k) proportions = np.random.dirichlet(np.repeat(beta, n_parties)) - - proportions = np.array([p * (len(idx_j) < N / n_parties) for p, idx_j in zip(proportions, idx_batch)]) - + + proportions = np.array( + [ + p * (len(idx_j) < N / n_parties) + for p, idx_j in zip(proportions, idx_batch) + ] + ) + proportions = proportions / proportions.sum() - + proportions = (np.cumsum(proportions) * len(idx_k)).astype(int)[:-1] - - idx_batch = [idx_j + idx.tolist() for idx_j, idx in zip(idx_batch, np.split(idx_k, proportions))] + + idx_batch = [ + idx_j + idx.tolist() + for idx_j, idx in zip(idx_batch, np.split(idx_k, proportions)) + ] min_size = min([len(idx_j) for idx_j in idx_batch]) for j in range(n_parties): @@ -84,7 +96,13 @@ def label_dirichlet_partition(labels: np.array, N: int, K: int, n_parties: int, return split_data_indexes -def parition_non_iid(non_iid_percent: float, labels: torch.Tensor, num_clients: int, nclass: int, args_cuda: bool) -> list: +def parition_non_iid( + non_iid_percent: float, + labels: torch.Tensor, + num_clients: int, + nclass: int, + args_cuda: bool, +) -> list: """ This function partitions data into non-IID subsets. @@ -100,41 +118,67 @@ def parition_non_iid(non_iid_percent: float, labels: torch.Tensor, num_clients: """ split_data_indexes = [] - iid_indexes = [] #random assign - shuffle_labels = [] #make train data points split into different devices + iid_indexes = [] # random assign + shuffle_labels = [] # make train data points split into different devices for i in range(num_clients): current = torch.nonzero(labels == i).reshape(-1) - current = current[np.random.permutation(len(current))] #shuffle + current = current[np.random.permutation(len(current))] # shuffle shuffle_labels.append(current) average_device_of_class = num_clients // nclass - if num_clients % nclass != 0: #for non-iid + if num_clients % nclass != 0: # for non-iid average_device_of_class += 1 for i in range(num_clients): - label_i= i // average_device_of_class + label_i = i // average_device_of_class labels_class = shuffle_labels[label_i] - average_num= int(len(labels_class)//average_device_of_class * non_iid_percent) - split_data_indexes.append((labels_class[average_num * (i % average_device_of_class):average_num * (i % average_device_of_class + 1)])) + average_num = int( + len(labels_class) // average_device_of_class * non_iid_percent + ) + split_data_indexes.append( + ( + labels_class[ + average_num + * (i % average_device_of_class) : average_num + * (i % average_device_of_class + 1) + ] + ) + ) if args_cuda: - iid_indexes = setdiff1d(torch.tensor(range(len(labels))).cuda(), torch.cat(split_data_indexes)) + iid_indexes = setdiff1d( + torch.tensor(range(len(labels))).cuda(), torch.cat(split_data_indexes) + ) else: - iid_indexes = setdiff1d(torch.tensor(range(len(labels))), torch.cat(split_data_indexes)) + iid_indexes = setdiff1d( + torch.tensor(range(len(labels))), torch.cat(split_data_indexes) + ) iid_indexes = iid_indexes[np.random.permutation(len(iid_indexes))] - for i in range(num_clients): #for iid - label_i= i // average_device_of_class + for i in range(num_clients): # for iid + label_i = i // average_device_of_class labels_class = shuffle_labels[label_i] - average_num= int(len(labels_class)//average_device_of_class * (1 - non_iid_percent)) - split_data_indexes[i] = list(split_data_indexes[i]) + list(iid_indexes[:average_num]) + average_num = int( + len(labels_class) // average_device_of_class * (1 - non_iid_percent) + ) + split_data_indexes[i] = list(split_data_indexes[i]) + list( + iid_indexes[:average_num] + ) iid_indexes = iid_indexes[average_num:] return split_data_indexes -def get_in_comm_indexes(edge_index: torch.Tensor, split_data_indexes: list, num_clients: int, L_hop: int, idx_train: torch.Tensor, idx_test: torch.Tensor) -> tuple: - ''' + +def get_in_comm_indexes( + edge_index: torch.Tensor, + split_data_indexes: list, + num_clients: int, + L_hop: int, + idx_train: torch.Tensor, + idx_test: torch.Tensor, +) -> tuple: + """ This function is used to extract and preprocess data indices and edge information Arguments: @@ -149,90 +193,153 @@ def get_in_comm_indexes(edge_index: torch.Tensor, split_data_indexes: list, num_ communicate_indexes: (list) - A list of indices assigned to a particular client in_com_train_data_indexes: (list) - A list of tensors where each tensor contains the indices of training data points available to each client edge_indexes_clients: (list) - A list of edge tensors representing the edges between nodes within each client's subgraph - ''' + """ communicate_indexes = [] in_com_train_data_indexes = [] edge_indexes_clients = [] - - for i in range(num_clients): + + for i in range(num_clients): communicate_index = split_data_indexes[i] - + if L_hop == 0: - communicate_index, current_edge_index, _, __ = torch_geometric.utils.k_hop_subgraph(communicate_index,0,edge_index, relabel_nodes=True) + ( + communicate_index, + current_edge_index, + _, + __, + ) = torch_geometric.utils.k_hop_subgraph( + communicate_index, 0, edge_index, relabel_nodes=True + ) del _ del __ for hop in range(L_hop): - if hop != L_hop-1: - communicate_index = torch_geometric.utils.k_hop_subgraph(communicate_index,1,edge_index, relabel_nodes=True)[0] + if hop != L_hop - 1: + communicate_index = torch_geometric.utils.k_hop_subgraph( + communicate_index, 1, edge_index, relabel_nodes=True + )[0] else: - communicate_index, current_edge_index, _, __ = torch_geometric.utils.k_hop_subgraph(communicate_index,1,edge_index, relabel_nodes=True) + ( + communicate_index, + current_edge_index, + _, + __, + ) = torch_geometric.utils.k_hop_subgraph( + communicate_index, 1, edge_index, relabel_nodes=True + ) del _ del __ - communicate_index = communicate_index.to('cpu') - current_edge_index = current_edge_index.to('cpu') + communicate_index = communicate_index.to("cpu") + current_edge_index = current_edge_index.to("cpu") communicate_indexes.append(communicate_index) - - current_edge_index = torch_sparse.SparseTensor(row=current_edge_index[0], col=current_edge_index[1], sparse_sizes=(len(communicate_index), len(communicate_index))) - - edge_indexes_clients.append(current_edge_index) - inter = intersect1d(split_data_indexes[i], idx_train) ###only count the train data of nodes in current server(not communicate nodes) + current_edge_index = torch_sparse.SparseTensor( + row=current_edge_index[0], + col=current_edge_index[1], + sparse_sizes=(len(communicate_index), len(communicate_index)), + ) + edge_indexes_clients.append(current_edge_index) - in_com_train_data_indexes.append(torch.searchsorted(communicate_indexes[i], inter).clone()) #local id in block matrix + inter = intersect1d( + split_data_indexes[i], idx_train + ) ###only count the train data of nodes in current server(not communicate nodes) + + in_com_train_data_indexes.append( + torch.searchsorted(communicate_indexes[i], inter).clone() + ) # local id in block matrix in_com_test_data_indexes = [] for i in range(num_clients): - inter = intersect1d(split_data_indexes[i], idx_test) - in_com_test_data_indexes.append(torch.searchsorted(communicate_indexes[i], inter).clone()) - return communicate_indexes, in_com_train_data_indexes, in_com_test_data_indexes, edge_indexes_clients - - -def get_in_comm_indexes_BDS_GCN(edge_index: torch.Tensor, split_data_indexes: list, num_clients: int, idx_train: torch.Tensor, idx_test: torch.Tensor, sample_rate: float = 0.5) -> tuple: + inter = intersect1d(split_data_indexes[i], idx_test) + in_com_test_data_indexes.append( + torch.searchsorted(communicate_indexes[i], inter).clone() + ) + return ( + communicate_indexes, + in_com_train_data_indexes, + in_com_test_data_indexes, + edge_indexes_clients, + ) + + +def get_in_comm_indexes_BDS_GCN( + edge_index: torch.Tensor, + split_data_indexes: list, + num_clients: int, + idx_train: torch.Tensor, + idx_test: torch.Tensor, + sample_rate: float = 0.5, +) -> tuple: communicate_indexes = [] in_com_train_data_indexes = [] edge_indexes_clients = [] - - for i in range(num_clients): + + for i in range(num_clients): communicate_index = split_data_indexes[i] - - communicate_index = torch_geometric.utils.k_hop_subgraph(communicate_index,1,edge_index)[0].cpu() - + + communicate_index = torch_geometric.utils.k_hop_subgraph( + communicate_index, 1, edge_index + )[0].cpu() + diff = setdiff1d(split_data_indexes[i], communicate_index) - sample_index = torch.cat((split_data_indexes[i], diff[torch.randperm(len(diff))[:int(len(diff) * sample_rate)]])).clone() + sample_index = torch.cat( + ( + split_data_indexes[i], + diff[torch.randperm(len(diff))[: int(len(diff) * sample_rate)]], + ) + ).clone() sample_index = sample_index.sort()[0] - - #get edge_index with relabel_nodes - communicate_index, current_edge_index, _, __ = torch_geometric.utils.k_hop_subgraph(sample_index,0,edge_index, relabel_nodes=True) + + # get edge_index with relabel_nodes + ( + communicate_index, + current_edge_index, + _, + __, + ) = torch_geometric.utils.k_hop_subgraph( + sample_index, 0, edge_index, relabel_nodes=True + ) del _ del __ - communicate_index = communicate_index.to('cpu') - current_edge_index = current_edge_index.to('cpu') + communicate_index = communicate_index.to("cpu") + current_edge_index = current_edge_index.to("cpu") communicate_indexes.append(communicate_index) - - current_edge_index = torch_sparse.SparseTensor(row=current_edge_index[0], col=current_edge_index[1], sparse_sizes=(len(communicate_index), len(communicate_index))) - - edge_indexes_clients.append(current_edge_index) - - inter = intersect1d(split_data_indexes[i], idx_train) ###only count the train data of nodes in current server(not communicate nodes) + current_edge_index = torch_sparse.SparseTensor( + row=current_edge_index[0], + col=current_edge_index[1], + sparse_sizes=(len(communicate_index), len(communicate_index)), + ) - in_com_train_data_indexes.append(torch.searchsorted(communicate_indexes[i], inter).clone()) #local id in block matrix + edge_indexes_clients.append(current_edge_index) + inter = intersect1d( + split_data_indexes[i], idx_train + ) ###only count the train data of nodes in current server(not communicate nodes) + in_com_train_data_indexes.append( + torch.searchsorted(communicate_indexes[i], inter).clone() + ) # local id in block matrix in_com_test_data_indexes = [] for i in range(num_clients): - inter = intersect1d(split_data_indexes[i], idx_test) - in_com_test_data_indexes.append(torch.searchsorted(communicate_indexes[i], inter).clone()) - return communicate_indexes, in_com_train_data_indexes, in_com_test_data_indexes, edge_indexes_clients - - -def increment_dir(dir: str, comment: str ='') -> str: - ''' + inter = intersect1d(split_data_indexes[i], idx_test) + in_com_test_data_indexes.append( + torch.searchsorted(communicate_indexes[i], inter).clone() + ) + return ( + communicate_indexes, + in_com_train_data_indexes, + in_com_test_data_indexes, + edge_indexes_clients, + ) + + +def increment_dir(dir: str, comment: str = "") -> str: + """ This function is used to create a new directory path by incrementing a numeric suffix in the original directory path Arguments: @@ -242,14 +349,14 @@ def increment_dir(dir: str, comment: str ='') -> str: Returns: Returns a string with the path of the new directory - ''' + """ # Increments a directory runs/exp1 --> runs/exp2_comment n = 0 # number dir = str(Path(dir)) # os-agnostic - dirs = sorted(glob.glob(dir + '*')) # directories + dirs = sorted(glob.glob(dir + "*")) # directories if dirs: matches = [re.search(r"exp(\d+)", d) for d in dirs] idxs = [int(m.groups()[0]) for m in matches if m] if idxs: n = max(idxs) + 1 # increment - return dir + str(n) + ('_' + comment if comment else '') + return dir + str(n) + ("_" + comment if comment else "")