first
This commit is contained in:
737
gcdata/DSJC0125.1.txt
Normal file
737
gcdata/DSJC0125.1.txt
Normal file
@@ -0,0 +1,737 @@
|
||||
125 736 5
|
||||
4 0
|
||||
5 1
|
||||
7 3
|
||||
8 3
|
||||
8 5
|
||||
10 1
|
||||
12 4
|
||||
13 6
|
||||
13 8
|
||||
13 12
|
||||
14 7
|
||||
15 9
|
||||
15 11
|
||||
16 1
|
||||
17 11
|
||||
18 4
|
||||
18 7
|
||||
18 10
|
||||
20 6
|
||||
20 7
|
||||
21 16
|
||||
22 12
|
||||
23 20
|
||||
24 2
|
||||
24 9
|
||||
26 1
|
||||
26 5
|
||||
27 8
|
||||
27 16
|
||||
27 18
|
||||
28 0
|
||||
28 23
|
||||
29 4
|
||||
29 14
|
||||
29 27
|
||||
30 9
|
||||
30 26
|
||||
31 1
|
||||
31 10
|
||||
31 17
|
||||
32 9
|
||||
33 11
|
||||
33 31
|
||||
34 2
|
||||
34 4
|
||||
34 11
|
||||
34 14
|
||||
34 15
|
||||
34 23
|
||||
35 19
|
||||
35 26
|
||||
35 34
|
||||
36 12
|
||||
36 21
|
||||
37 2
|
||||
37 3
|
||||
37 5
|
||||
37 16
|
||||
38 25
|
||||
39 1
|
||||
39 36
|
||||
40 2
|
||||
40 6
|
||||
40 15
|
||||
40 21
|
||||
40 34
|
||||
41 5
|
||||
41 6
|
||||
41 7
|
||||
41 11
|
||||
41 18
|
||||
41 24
|
||||
42 28
|
||||
42 32
|
||||
42 34
|
||||
43 0
|
||||
43 33
|
||||
44 15
|
||||
44 23
|
||||
45 1
|
||||
45 9
|
||||
45 11
|
||||
45 12
|
||||
45 24
|
||||
45 25
|
||||
45 35
|
||||
46 11
|
||||
46 19
|
||||
46 29
|
||||
46 35
|
||||
46 45
|
||||
47 18
|
||||
47 23
|
||||
48 8
|
||||
48 36
|
||||
48 42
|
||||
49 21
|
||||
49 22
|
||||
49 23
|
||||
49 31
|
||||
49 40
|
||||
49 46
|
||||
50 6
|
||||
50 16
|
||||
50 26
|
||||
50 34
|
||||
50 40
|
||||
50 41
|
||||
50 47
|
||||
51 8
|
||||
51 11
|
||||
51 13
|
||||
51 16
|
||||
51 27
|
||||
51 47
|
||||
51 48
|
||||
52 0
|
||||
52 27
|
||||
52 29
|
||||
52 43
|
||||
53 21
|
||||
53 28
|
||||
53 47
|
||||
54 14
|
||||
54 24
|
||||
54 42
|
||||
54 48
|
||||
54 49
|
||||
54 53
|
||||
55 2
|
||||
55 5
|
||||
55 34
|
||||
55 37
|
||||
55 43
|
||||
56 7
|
||||
56 13
|
||||
56 25
|
||||
56 29
|
||||
56 35
|
||||
56 42
|
||||
56 49
|
||||
57 7
|
||||
57 16
|
||||
57 48
|
||||
57 51
|
||||
57 54
|
||||
58 15
|
||||
58 36
|
||||
58 41
|
||||
58 50
|
||||
58 56
|
||||
59 55
|
||||
59 56
|
||||
60 8
|
||||
60 13
|
||||
60 16
|
||||
60 30
|
||||
60 35
|
||||
60 46
|
||||
60 51
|
||||
60 57
|
||||
61 2
|
||||
61 7
|
||||
61 9
|
||||
61 15
|
||||
61 23
|
||||
61 31
|
||||
61 36
|
||||
61 44
|
||||
61 52
|
||||
61 53
|
||||
61 54
|
||||
61 55
|
||||
62 2
|
||||
62 10
|
||||
62 23
|
||||
62 28
|
||||
62 29
|
||||
62 32
|
||||
62 34
|
||||
62 37
|
||||
62 38
|
||||
62 50
|
||||
62 51
|
||||
63 28
|
||||
63 41
|
||||
63 44
|
||||
63 54
|
||||
63 56
|
||||
63 57
|
||||
64 4
|
||||
64 7
|
||||
64 20
|
||||
64 25
|
||||
64 35
|
||||
64 36
|
||||
64 47
|
||||
64 57
|
||||
65 8
|
||||
65 13
|
||||
65 14
|
||||
65 17
|
||||
65 18
|
||||
65 27
|
||||
65 36
|
||||
65 61
|
||||
65 63
|
||||
66 5
|
||||
66 9
|
||||
66 10
|
||||
66 21
|
||||
66 30
|
||||
66 34
|
||||
66 36
|
||||
66 44
|
||||
66 46
|
||||
67 21
|
||||
67 34
|
||||
67 42
|
||||
67 52
|
||||
68 3
|
||||
68 5
|
||||
68 14
|
||||
68 24
|
||||
68 28
|
||||
68 58
|
||||
68 62
|
||||
68 64
|
||||
68 66
|
||||
69 26
|
||||
69 27
|
||||
69 30
|
||||
69 31
|
||||
69 33
|
||||
70 10
|
||||
70 22
|
||||
70 39
|
||||
70 55
|
||||
71 3
|
||||
71 11
|
||||
71 25
|
||||
71 38
|
||||
71 39
|
||||
72 6
|
||||
72 8
|
||||
72 11
|
||||
72 23
|
||||
72 47
|
||||
72 49
|
||||
72 50
|
||||
72 71
|
||||
73 20
|
||||
73 24
|
||||
73 27
|
||||
73 30
|
||||
73 38
|
||||
73 58
|
||||
73 65
|
||||
74 5
|
||||
74 17
|
||||
74 61
|
||||
74 64
|
||||
74 65
|
||||
75 13
|
||||
75 23
|
||||
75 29
|
||||
75 31
|
||||
75 34
|
||||
75 39
|
||||
76 4
|
||||
76 6
|
||||
76 8
|
||||
76 10
|
||||
76 36
|
||||
76 54
|
||||
76 57
|
||||
76 59
|
||||
76 60
|
||||
76 63
|
||||
76 68
|
||||
77 15
|
||||
77 42
|
||||
77 48
|
||||
77 68
|
||||
77 71
|
||||
78 0
|
||||
78 1
|
||||
78 13
|
||||
78 38
|
||||
78 68
|
||||
78 73
|
||||
79 0
|
||||
79 13
|
||||
79 40
|
||||
79 51
|
||||
79 67
|
||||
79 75
|
||||
80 9
|
||||
80 19
|
||||
80 27
|
||||
81 4
|
||||
81 5
|
||||
81 21
|
||||
81 25
|
||||
81 36
|
||||
81 39
|
||||
81 50
|
||||
81 57
|
||||
81 73
|
||||
81 77
|
||||
81 79
|
||||
81 80
|
||||
82 12
|
||||
82 44
|
||||
82 49
|
||||
82 53
|
||||
82 68
|
||||
82 75
|
||||
83 4
|
||||
83 8
|
||||
83 12
|
||||
83 16
|
||||
83 17
|
||||
83 20
|
||||
83 23
|
||||
83 41
|
||||
83 48
|
||||
83 52
|
||||
83 55
|
||||
83 56
|
||||
83 62
|
||||
83 76
|
||||
83 77
|
||||
84 19
|
||||
84 21
|
||||
84 30
|
||||
84 31
|
||||
84 39
|
||||
84 64
|
||||
84 78
|
||||
85 13
|
||||
85 21
|
||||
85 31
|
||||
85 32
|
||||
85 47
|
||||
85 67
|
||||
85 80
|
||||
85 81
|
||||
86 9
|
||||
86 13
|
||||
86 15
|
||||
86 43
|
||||
86 70
|
||||
86 71
|
||||
86 75
|
||||
87 2
|
||||
87 10
|
||||
87 23
|
||||
87 25
|
||||
87 36
|
||||
87 37
|
||||
87 41
|
||||
88 40
|
||||
88 44
|
||||
88 50
|
||||
88 61
|
||||
88 73
|
||||
88 78
|
||||
88 83
|
||||
89 17
|
||||
89 21
|
||||
89 43
|
||||
89 52
|
||||
89 61
|
||||
89 69
|
||||
90 8
|
||||
90 21
|
||||
90 23
|
||||
90 30
|
||||
90 35
|
||||
90 45
|
||||
90 49
|
||||
90 52
|
||||
90 63
|
||||
90 67
|
||||
90 71
|
||||
90 77
|
||||
90 79
|
||||
90 80
|
||||
90 87
|
||||
91 4
|
||||
91 14
|
||||
91 20
|
||||
91 26
|
||||
91 39
|
||||
91 51
|
||||
91 61
|
||||
91 72
|
||||
91 73
|
||||
91 85
|
||||
92 14
|
||||
92 25
|
||||
92 32
|
||||
92 44
|
||||
92 47
|
||||
92 58
|
||||
92 60
|
||||
92 69
|
||||
92 82
|
||||
92 90
|
||||
93 16
|
||||
93 37
|
||||
93 38
|
||||
93 69
|
||||
93 70
|
||||
93 75
|
||||
93 80
|
||||
93 89
|
||||
94 59
|
||||
94 60
|
||||
94 76
|
||||
94 83
|
||||
94 91
|
||||
94 93
|
||||
95 8
|
||||
95 10
|
||||
95 12
|
||||
95 34
|
||||
95 60
|
||||
95 68
|
||||
95 74
|
||||
95 76
|
||||
95 90
|
||||
96 19
|
||||
96 43
|
||||
96 53
|
||||
96 54
|
||||
96 57
|
||||
96 67
|
||||
96 68
|
||||
96 72
|
||||
96 76
|
||||
96 79
|
||||
96 83
|
||||
96 89
|
||||
96 92
|
||||
96 95
|
||||
97 6
|
||||
97 15
|
||||
97 16
|
||||
97 18
|
||||
97 22
|
||||
97 26
|
||||
97 31
|
||||
97 48
|
||||
97 70
|
||||
97 74
|
||||
97 82
|
||||
97 88
|
||||
98 3
|
||||
98 12
|
||||
98 16
|
||||
98 26
|
||||
98 46
|
||||
98 48
|
||||
98 55
|
||||
98 68
|
||||
98 75
|
||||
98 88
|
||||
99 8
|
||||
99 11
|
||||
99 13
|
||||
99 47
|
||||
99 57
|
||||
99 84
|
||||
100 8
|
||||
100 12
|
||||
100 31
|
||||
100 57
|
||||
100 65
|
||||
100 76
|
||||
100 83
|
||||
100 96
|
||||
101 16
|
||||
101 19
|
||||
101 35
|
||||
101 41
|
||||
101 51
|
||||
101 65
|
||||
101 83
|
||||
101 85
|
||||
102 7
|
||||
102 10
|
||||
102 11
|
||||
102 13
|
||||
102 26
|
||||
102 31
|
||||
102 33
|
||||
102 35
|
||||
102 37
|
||||
102 46
|
||||
102 58
|
||||
102 64
|
||||
102 68
|
||||
103 15
|
||||
103 48
|
||||
103 73
|
||||
103 75
|
||||
103 82
|
||||
103 84
|
||||
103 101
|
||||
104 1
|
||||
104 3
|
||||
104 7
|
||||
104 15
|
||||
104 31
|
||||
104 38
|
||||
104 46
|
||||
104 55
|
||||
104 72
|
||||
104 79
|
||||
104 82
|
||||
104 87
|
||||
104 103
|
||||
105 10
|
||||
105 15
|
||||
105 24
|
||||
105 26
|
||||
105 41
|
||||
105 45
|
||||
105 50
|
||||
105 63
|
||||
105 69
|
||||
105 76
|
||||
105 79
|
||||
105 81
|
||||
105 87
|
||||
105 92
|
||||
105 99
|
||||
106 41
|
||||
106 44
|
||||
106 48
|
||||
106 50
|
||||
106 71
|
||||
106 73
|
||||
106 76
|
||||
106 87
|
||||
107 40
|
||||
107 41
|
||||
107 50
|
||||
107 57
|
||||
107 63
|
||||
107 65
|
||||
107 82
|
||||
108 5
|
||||
108 68
|
||||
108 74
|
||||
109 7
|
||||
109 9
|
||||
109 22
|
||||
109 40
|
||||
109 48
|
||||
109 49
|
||||
109 59
|
||||
109 70
|
||||
109 75
|
||||
109 76
|
||||
109 77
|
||||
109 83
|
||||
109 88
|
||||
109 96
|
||||
109 107
|
||||
110 8
|
||||
110 24
|
||||
110 39
|
||||
110 45
|
||||
110 47
|
||||
110 51
|
||||
110 52
|
||||
110 60
|
||||
110 61
|
||||
110 78
|
||||
110 97
|
||||
111 2
|
||||
111 16
|
||||
111 23
|
||||
111 31
|
||||
111 53
|
||||
111 57
|
||||
111 65
|
||||
111 77
|
||||
112 0
|
||||
112 3
|
||||
112 16
|
||||
112 18
|
||||
112 22
|
||||
112 52
|
||||
112 70
|
||||
112 71
|
||||
112 95
|
||||
112 107
|
||||
112 111
|
||||
113 15
|
||||
113 17
|
||||
113 18
|
||||
113 48
|
||||
113 50
|
||||
113 56
|
||||
113 79
|
||||
113 82
|
||||
113 93
|
||||
113 97
|
||||
114 2
|
||||
114 22
|
||||
114 30
|
||||
114 46
|
||||
114 52
|
||||
114 63
|
||||
114 77
|
||||
114 88
|
||||
114 91
|
||||
114 92
|
||||
114 99
|
||||
114 102
|
||||
114 108
|
||||
114 109
|
||||
114 113
|
||||
115 2
|
||||
115 12
|
||||
115 14
|
||||
115 25
|
||||
115 26
|
||||
115 37
|
||||
115 66
|
||||
115 67
|
||||
115 83
|
||||
115 106
|
||||
116 25
|
||||
116 34
|
||||
116 49
|
||||
116 50
|
||||
116 79
|
||||
116 81
|
||||
116 95
|
||||
116 102
|
||||
117 4
|
||||
117 9
|
||||
117 28
|
||||
117 51
|
||||
117 53
|
||||
117 55
|
||||
117 69
|
||||
117 92
|
||||
117 108
|
||||
117 109
|
||||
117 114
|
||||
118 12
|
||||
118 30
|
||||
118 39
|
||||
118 54
|
||||
118 57
|
||||
118 63
|
||||
118 67
|
||||
118 70
|
||||
118 107
|
||||
118 113
|
||||
118 115
|
||||
118 116
|
||||
118 117
|
||||
119 0
|
||||
119 5
|
||||
119 21
|
||||
119 23
|
||||
119 26
|
||||
119 43
|
||||
119 46
|
||||
119 90
|
||||
119 98
|
||||
120 4
|
||||
120 10
|
||||
120 32
|
||||
120 51
|
||||
120 63
|
||||
120 92
|
||||
120 112
|
||||
120 117
|
||||
121 10
|
||||
121 16
|
||||
121 40
|
||||
121 41
|
||||
121 43
|
||||
121 44
|
||||
121 50
|
||||
121 52
|
||||
121 58
|
||||
121 65
|
||||
121 83
|
||||
121 88
|
||||
121 95
|
||||
121 100
|
||||
121 103
|
||||
121 106
|
||||
121 107
|
||||
122 0
|
||||
122 5
|
||||
122 17
|
||||
122 22
|
||||
122 62
|
||||
122 80
|
||||
122 82
|
||||
122 87
|
||||
122 91
|
||||
122 92
|
||||
122 115
|
||||
123 19
|
||||
123 26
|
||||
123 32
|
||||
123 39
|
||||
123 46
|
||||
123 54
|
||||
123 62
|
||||
123 71
|
||||
123 101
|
||||
123 104
|
||||
123 109
|
||||
123 116
|
||||
123 121
|
||||
124 33
|
||||
124 48
|
||||
124 59
|
||||
124 61
|
||||
124 79
|
||||
124 84
|
||||
124 98
|
||||
124 104
|
||||
124 109
|
3892
gcdata/DSJC0125.5.txt
Normal file
3892
gcdata/DSJC0125.5.txt
Normal file
File diff suppressed because it is too large
Load Diff
6962
gcdata/DSJC0125.9.txt
Normal file
6962
gcdata/DSJC0125.9.txt
Normal file
File diff suppressed because it is too large
Load Diff
3219
gcdata/DSJC0250.1.txt
Normal file
3219
gcdata/DSJC0250.1.txt
Normal file
File diff suppressed because it is too large
Load Diff
15669
gcdata/DSJC0250.5.txt
Normal file
15669
gcdata/DSJC0250.5.txt
Normal file
File diff suppressed because it is too large
Load Diff
27898
gcdata/DSJC0250.9.txt
Normal file
27898
gcdata/DSJC0250.9.txt
Normal file
File diff suppressed because it is too large
Load Diff
12459
gcdata/DSJC0500.1.txt
Normal file
12459
gcdata/DSJC0500.1.txt
Normal file
File diff suppressed because it is too large
Load Diff
62625
gcdata/DSJC0500.5.txt
Normal file
62625
gcdata/DSJC0500.5.txt
Normal file
File diff suppressed because it is too large
Load Diff
112438
gcdata/DSJC0500.9.txt
Normal file
112438
gcdata/DSJC0500.9.txt
Normal file
File diff suppressed because it is too large
Load Diff
49630
gcdata/DSJC1000.1.txt
Normal file
49630
gcdata/DSJC1000.1.txt
Normal file
File diff suppressed because it is too large
Load Diff
249827
gcdata/DSJC1000.5.txt
Normal file
249827
gcdata/DSJC1000.5.txt
Normal file
File diff suppressed because it is too large
Load Diff
449450
gcdata/DSJC1000.9.txt
Normal file
449450
gcdata/DSJC1000.9.txt
Normal file
File diff suppressed because it is too large
Load Diff
0
gcdata/__init__.py
Normal file
0
gcdata/__init__.py
Normal file
BIN
gcdata/__pycache__/__init__.cpython-311.pyc
Normal file
BIN
gcdata/__pycache__/__init__.cpython-311.pyc
Normal file
Binary file not shown.
BIN
gcdata/__pycache__/gc.cpython-311.pyc
Normal file
BIN
gcdata/__pycache__/gc.cpython-311.pyc
Normal file
Binary file not shown.
BIN
gcdata/__pycache__/gc1.cpython-311.pyc
Normal file
BIN
gcdata/__pycache__/gc1.cpython-311.pyc
Normal file
Binary file not shown.
1640
gcdata/gc.py
Normal file
1640
gcdata/gc.py
Normal file
File diff suppressed because it is too large
Load Diff
666
gcdata/gc1.py
Normal file
666
gcdata/gc1.py
Normal file
@@ -0,0 +1,666 @@
|
||||
def graph_coloring_v8(adj_matrix):
|
||||
"""高级混合图着色算法
|
||||
结合多种算法优点,使用数学优化和多阶段策略,尽可能减少所需颜色数
|
||||
1. 图分解与重组
|
||||
2. 多阶段混合策略
|
||||
3. 高级预处理与结构识别
|
||||
4. 迭代颜色减少
|
||||
5. 自适应参数调整
|
||||
"""
|
||||
import numpy as np
|
||||
import random
|
||||
import heapq
|
||||
import time
|
||||
from collections import defaultdict, deque
|
||||
from itertools import combinations
|
||||
|
||||
start_time = time.time()
|
||||
max_time = 60 # 最大运行时间(秒)
|
||||
|
||||
n = adj_matrix.shape[0]
|
||||
best_coloring = np.full(n, -1)
|
||||
best_color_count = n
|
||||
|
||||
# 构建邻接表以加速计算
|
||||
adj_lists = [np.where(adj_matrix[i] == 1)[0] for i in range(n)]
|
||||
|
||||
# 计算顶点度数
|
||||
degrees = np.sum(adj_matrix, axis=1)
|
||||
max_degree = np.max(degrees)
|
||||
|
||||
# 1. 高级预处理与结构识别
|
||||
|
||||
def find_max_clique():
|
||||
"""使用Bron-Kerbosch算法寻找最大团"""
|
||||
def bron_kerbosch(r, p, x, max_clique):
|
||||
if len(p) == 0 and len(x) == 0:
|
||||
if len(r) > len(max_clique[0]):
|
||||
max_clique[0] = r.copy()
|
||||
return
|
||||
|
||||
# 选择枢轴点以优化分支
|
||||
if p:
|
||||
pivot = max(p, key=lambda v: len(set(adj_lists[v]) & p))
|
||||
p_minus_n_pivot = p - set(adj_lists[pivot])
|
||||
else:
|
||||
p_minus_n_pivot = p
|
||||
|
||||
for v in list(p_minus_n_pivot):
|
||||
neighbors_v = set(adj_lists[v])
|
||||
bron_kerbosch(r | {v}, p & neighbors_v, x & neighbors_v, max_clique)
|
||||
p.remove(v)
|
||||
x.add(v)
|
||||
|
||||
# 提前终止条件
|
||||
if time.time() - start_time > max_time / 10:
|
||||
return
|
||||
|
||||
max_clique = [set()]
|
||||
vertices = set(range(n))
|
||||
|
||||
# 使用度数排序启发式加速
|
||||
sorted_vertices = sorted(range(n), key=lambda v: degrees[v], reverse=True)
|
||||
for v in sorted_vertices[:min(100, n)]: # 限制初始顶点数量
|
||||
if time.time() - start_time > max_time / 10:
|
||||
break
|
||||
r = {v}
|
||||
p = set(adj_lists[v]) & vertices
|
||||
x = vertices - p - r
|
||||
bron_kerbosch(r, p, x, max_clique)
|
||||
|
||||
return list(max_clique[0])
|
||||
|
||||
def identify_independent_sets():
|
||||
"""识别大的独立集"""
|
||||
independent_sets = []
|
||||
remaining = set(range(n))
|
||||
|
||||
while remaining and time.time() - start_time < max_time / 10:
|
||||
# 选择度数最小的顶点开始
|
||||
start_vertex = min(remaining, key=lambda v: degrees[v])
|
||||
ind_set = {start_vertex}
|
||||
candidates = remaining - {start_vertex} - set(adj_lists[start_vertex])
|
||||
|
||||
# 贪心扩展独立集
|
||||
while candidates:
|
||||
# 选择度数最小的顶点加入
|
||||
v = min(candidates, key=lambda v: degrees[v])
|
||||
ind_set.add(v)
|
||||
candidates -= {v} | set(adj_lists[v])
|
||||
|
||||
independent_sets.append(ind_set)
|
||||
remaining -= ind_set
|
||||
|
||||
return independent_sets
|
||||
|
||||
def find_bipartite_subgraphs():
|
||||
"""识别二分图子结构"""
|
||||
bipartite_subgraphs = []
|
||||
visited = np.zeros(n, dtype=bool)
|
||||
|
||||
def bfs_bipartite(start):
|
||||
queue = deque([(start, 0)]) # (vertex, color)
|
||||
coloring = {start: 0}
|
||||
component = {start}
|
||||
|
||||
while queue and time.time() - start_time < max_time / 10:
|
||||
v, color = queue.popleft()
|
||||
next_color = 1 - color
|
||||
|
||||
for u in adj_lists[v]:
|
||||
if u not in coloring:
|
||||
coloring[u] = next_color
|
||||
component.add(u)
|
||||
queue.append((u, next_color))
|
||||
elif coloring[u] == color: # 冲突,不是二分图
|
||||
return None
|
||||
|
||||
# 分离两个部分
|
||||
part0 = {v for v, c in coloring.items() if c == 0}
|
||||
part1 = {v for v, c in coloring.items() if c == 1}
|
||||
return (part0, part1)
|
||||
|
||||
# 寻找所有连通的二分子图
|
||||
for i in range(n):
|
||||
if not visited[i] and time.time() - start_time < max_time / 10:
|
||||
result = bfs_bipartite(i)
|
||||
if result:
|
||||
part0, part1 = result
|
||||
bipartite_subgraphs.append((part0, part1))
|
||||
for v in part0 | part1:
|
||||
visited[v] = True
|
||||
|
||||
return bipartite_subgraphs
|
||||
|
||||
# 2. 图分解技术
|
||||
|
||||
def decompose_graph():
|
||||
"""将图分解为更易处理的组件"""
|
||||
# 寻找割点和桥
|
||||
articulation_points = find_articulation_points()
|
||||
|
||||
# 移除割点后的连通分量
|
||||
components = []
|
||||
visited = np.zeros(n, dtype=bool)
|
||||
|
||||
def dfs(v, component):
|
||||
visited[v] = True
|
||||
component.add(v)
|
||||
for u in adj_lists[v]:
|
||||
if not visited[u] and u not in articulation_points:
|
||||
dfs(u, component)
|
||||
|
||||
# 找出所有连通分量
|
||||
for i in range(n):
|
||||
if not visited[i] and i not in articulation_points:
|
||||
component = set()
|
||||
dfs(i, component)
|
||||
if component:
|
||||
components.append(component)
|
||||
|
||||
# 如果没有找到有效分解,返回整个图
|
||||
if not components:
|
||||
return [set(range(n))]
|
||||
|
||||
return components
|
||||
|
||||
def find_articulation_points():
|
||||
"""使用Tarjan算法寻找割点"""
|
||||
disc = [-1] * n
|
||||
low = [-1] * n
|
||||
visited = [False] * n
|
||||
ap = set()
|
||||
timer = [0]
|
||||
|
||||
def dfs(u, parent):
|
||||
children = 0
|
||||
visited[u] = True
|
||||
disc[u] = low[u] = timer[0]
|
||||
timer[0] += 1
|
||||
|
||||
for v in adj_lists[u]:
|
||||
if not visited[v]:
|
||||
children += 1
|
||||
dfs(v, u)
|
||||
low[u] = min(low[u], low[v])
|
||||
|
||||
# 检查u是否为割点
|
||||
if parent != -1 and low[v] >= disc[u]:
|
||||
ap.add(u)
|
||||
elif v != parent:
|
||||
low[u] = min(low[u], disc[v])
|
||||
|
||||
# 如果u是根节点且有多个子节点
|
||||
if parent == -1 and children > 1:
|
||||
ap.add(u)
|
||||
|
||||
for i in range(n):
|
||||
if not visited[i]:
|
||||
dfs(i, -1)
|
||||
|
||||
return ap
|
||||
|
||||
# 3. 核心着色算法
|
||||
|
||||
def dsatur_coloring(vertices=None, max_colors=None):
|
||||
"""增强版DSATUR算法"""
|
||||
if vertices is None:
|
||||
vertices = set(range(n))
|
||||
if max_colors is None:
|
||||
max_colors = n
|
||||
|
||||
n_sub = len(vertices)
|
||||
vertices_list = list(vertices)
|
||||
vertex_map = {v: i for i, v in enumerate(vertices_list)}
|
||||
reverse_map = {i: v for v, i in vertex_map.items()}
|
||||
|
||||
# 创建子图的邻接表
|
||||
sub_adj_lists = [[] for _ in range(n_sub)]
|
||||
for i, v in enumerate(vertices_list):
|
||||
for u in adj_lists[v]:
|
||||
if u in vertex_map:
|
||||
sub_adj_lists[i].append(vertex_map[u])
|
||||
|
||||
colors = np.full(n_sub, -1)
|
||||
saturation = np.zeros(n_sub, dtype=int)
|
||||
adj_colors = [set() for _ in range(n_sub)]
|
||||
|
||||
# 计算子图中的度数
|
||||
sub_degrees = np.zeros(n_sub, dtype=int)
|
||||
for i in range(n_sub):
|
||||
sub_degrees[i] = len(sub_adj_lists[i])
|
||||
|
||||
# 初始化优先队列 (-饱和度, -度数, 顶点ID)
|
||||
vertex_heap = [(0, -sub_degrees[i], i) for i in range(n_sub)]
|
||||
heapq.heapify(vertex_heap)
|
||||
|
||||
colored_count = 0
|
||||
colored_vertices = np.zeros(n_sub, dtype=bool)
|
||||
|
||||
while colored_count < n_sub and vertex_heap:
|
||||
# 获取优先级最高的未着色顶点
|
||||
_, _, vertex = heapq.heappop(vertex_heap)
|
||||
|
||||
# 如果顶点已着色,跳过
|
||||
if colored_vertices[vertex]:
|
||||
continue
|
||||
|
||||
# 计算可用颜色
|
||||
used = [False] * max_colors
|
||||
for u in sub_adj_lists[vertex]:
|
||||
if colors[u] != -1:
|
||||
used[colors[u]] = True
|
||||
|
||||
# 找到最小可用颜色
|
||||
color = -1
|
||||
for c in range(max_colors):
|
||||
if not used[c]:
|
||||
color = c
|
||||
break
|
||||
|
||||
if color == -1:
|
||||
# 没有可用颜色
|
||||
return None
|
||||
|
||||
colors[vertex] = color
|
||||
colored_vertices[vertex] = True
|
||||
colored_count += 1
|
||||
|
||||
# 更新邻居的饱和度
|
||||
for u in sub_adj_lists[vertex]:
|
||||
if not colored_vertices[u]:
|
||||
if color not in adj_colors[u]:
|
||||
adj_colors[u].add(color)
|
||||
saturation[u] += 1
|
||||
# 重新入堆以更新优先级
|
||||
heapq.heappush(vertex_heap, (-saturation[u], -sub_degrees[u], u))
|
||||
|
||||
# 将子图着色映射回原图
|
||||
result = np.full(n, -1)
|
||||
for i in range(n_sub):
|
||||
result[reverse_map[i]] = colors[i]
|
||||
|
||||
return result
|
||||
|
||||
def kempe_chain_interchange(coloring, v, c1, c2):
|
||||
"""Kempe链交换"""
|
||||
if coloring[v] != c1 or c1 == c2:
|
||||
return False
|
||||
|
||||
# 保存原始着色
|
||||
original = coloring.copy()
|
||||
|
||||
# 构建Kempe链
|
||||
chain = {v}
|
||||
queue = deque([v])
|
||||
|
||||
while queue:
|
||||
current = queue.popleft()
|
||||
for u in adj_lists[current]:
|
||||
if u not in chain and coloring[u] in (c1, c2):
|
||||
chain.add(u)
|
||||
queue.append(u)
|
||||
|
||||
# 交换颜色
|
||||
for u in chain:
|
||||
if coloring[u] == c1:
|
||||
coloring[u] = c2
|
||||
elif coloring[u] == c2:
|
||||
coloring[u] = c1
|
||||
|
||||
# 验证交换后的合法性
|
||||
for u in range(n):
|
||||
for v in adj_lists[u]:
|
||||
if coloring[u] == coloring[v] and coloring[u] != -1:
|
||||
# 恢复原始着色
|
||||
coloring[:] = original
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def iterated_greedy(initial_coloring=None, iterations=100):
|
||||
"""迭代贪心算法"""
|
||||
if initial_coloring is None:
|
||||
# 使用DSATUR生成初始解
|
||||
coloring = dsatur_coloring()
|
||||
else:
|
||||
coloring = initial_coloring.copy()
|
||||
|
||||
best = coloring.copy()
|
||||
best_colors_used = len(set(coloring))
|
||||
|
||||
for _ in range(iterations):
|
||||
if time.time() - start_time > max_time:
|
||||
break
|
||||
|
||||
# 随机选择顶点顺序
|
||||
vertices = list(range(n))
|
||||
random.shuffle(vertices)
|
||||
|
||||
# 临时移除顶点着色
|
||||
temp_coloring = coloring.copy()
|
||||
for v in vertices:
|
||||
temp_coloring[v] = -1
|
||||
|
||||
# 重新着色
|
||||
for v in vertices:
|
||||
used = [False] * n
|
||||
for u in adj_lists[v]:
|
||||
if temp_coloring[u] != -1:
|
||||
used[temp_coloring[u]] = True
|
||||
|
||||
# 找到最小可用颜色
|
||||
for c in range(n):
|
||||
if not used[c]:
|
||||
temp_coloring[v] = c
|
||||
break
|
||||
|
||||
# 更新最佳解
|
||||
colors_used = len(set(temp_coloring))
|
||||
if colors_used < best_colors_used:
|
||||
best = temp_coloring.copy()
|
||||
best_colors_used = colors_used
|
||||
coloring = temp_coloring.copy()
|
||||
elif random.random() < 0.3: # 有时接受较差解以跳出局部最优
|
||||
coloring = temp_coloring.copy()
|
||||
|
||||
return best
|
||||
|
||||
def tabu_search(initial_coloring, max_iterations=1000):
|
||||
"""禁忌搜索优化"""
|
||||
coloring = initial_coloring.copy()
|
||||
best_coloring = coloring.copy()
|
||||
best_colors_used = len(set(coloring))
|
||||
|
||||
# 初始化禁忌表
|
||||
tabu_list = {}
|
||||
tabu_tenure = 10
|
||||
iteration = 0
|
||||
|
||||
# 计算冲突
|
||||
def count_conflicts():
|
||||
conflicts = 0
|
||||
for v in range(n):
|
||||
for u in adj_lists[v]:
|
||||
if u > v and coloring[u] == coloring[v]:
|
||||
conflicts += 1
|
||||
return conflicts
|
||||
|
||||
# 找到最佳移动
|
||||
def find_best_move():
|
||||
best_delta = float('inf')
|
||||
best_moves = []
|
||||
|
||||
for v in range(n):
|
||||
current_color = coloring[v]
|
||||
|
||||
# 计算当前冲突
|
||||
current_conflicts = sum(1 for u in adj_lists[v] if coloring[u] == current_color)
|
||||
|
||||
# 尝试所有可能的颜色
|
||||
for c in range(best_colors_used + 1):
|
||||
if c != current_color:
|
||||
# 计算新冲突
|
||||
new_conflicts = sum(1 for u in adj_lists[v] if coloring[u] == c)
|
||||
delta = new_conflicts - current_conflicts
|
||||
|
||||
# 检查禁忌状态
|
||||
move_key = (v, c)
|
||||
is_tabu = move_key in tabu_list and iteration < tabu_list[move_key]
|
||||
|
||||
# 特赦准则:如果移动导致新的最佳解
|
||||
if is_tabu and not (new_conflicts == 0 and c < best_colors_used):
|
||||
continue
|
||||
|
||||
if delta <= best_delta:
|
||||
if delta < best_delta:
|
||||
best_moves = []
|
||||
best_delta = delta
|
||||
best_moves.append((v, c))
|
||||
|
||||
if not best_moves:
|
||||
return None, None
|
||||
|
||||
# 随机选择一个最佳移动
|
||||
v, c = random.choice(best_moves)
|
||||
return v, c
|
||||
|
||||
# 主循环
|
||||
while iteration < max_iterations and time.time() - start_time < max_time:
|
||||
v, c = find_best_move()
|
||||
if v is None:
|
||||
break
|
||||
|
||||
# 执行移动
|
||||
old_color = coloring[v]
|
||||
coloring[v] = c
|
||||
|
||||
# 更新禁忌表
|
||||
tabu_list[(v, old_color)] = iteration + tabu_tenure
|
||||
|
||||
# 更新最佳解
|
||||
colors_used = len(set(coloring))
|
||||
if colors_used < best_colors_used:
|
||||
best_coloring = coloring.copy()
|
||||
best_colors_used = colors_used
|
||||
|
||||
iteration += 1
|
||||
|
||||
return best_coloring
|
||||
|
||||
# 4. 多阶段混合策略
|
||||
|
||||
def hybrid_coloring():
|
||||
"""多阶段混合着色策略"""
|
||||
# 阶段1: 预处理和结构识别
|
||||
max_clique = find_max_clique()
|
||||
clique_size = len(max_clique)
|
||||
|
||||
# 阶段2: 初始解生成
|
||||
# 使用DSATUR生成初始解
|
||||
initial_coloring = dsatur_coloring()
|
||||
if initial_coloring is None:
|
||||
initial_coloring = np.zeros(n, dtype=int)
|
||||
for i in range(n):
|
||||
used = set()
|
||||
for j in adj_lists[i]:
|
||||
if j < i:
|
||||
used.add(initial_coloring[j])
|
||||
for c in range(n):
|
||||
if c not in used:
|
||||
initial_coloring[i] = c
|
||||
break
|
||||
|
||||
# 阶段3: 迭代优化
|
||||
# 应用迭代贪心
|
||||
improved_coloring = iterated_greedy(initial_coloring, iterations=50)
|
||||
colors_used = len(set(improved_coloring))
|
||||
|
||||
# 修复:使用nonlocal关键字声明best_color_count是外部变量
|
||||
nonlocal best_color_count
|
||||
|
||||
if colors_used < best_color_count:
|
||||
best_coloring[:] = improved_coloring
|
||||
best_color_count = colors_used
|
||||
|
||||
# 阶段4: 禁忌搜索优化
|
||||
if time.time() - start_time < max_time * 0.7:
|
||||
tabu_coloring = tabu_search(improved_coloring, max_iterations=500)
|
||||
tabu_colors_used = len(set(tabu_coloring))
|
||||
|
||||
if tabu_colors_used < best_color_count:
|
||||
best_coloring[:] = tabu_coloring
|
||||
best_color_count = tabu_colors_used
|
||||
|
||||
# 阶段5: 颜色合并
|
||||
if time.time() - start_time < max_time * 0.9:
|
||||
merged_coloring = color_merging(best_coloring.copy())
|
||||
merged_colors_used = len(set(merged_coloring))
|
||||
|
||||
if merged_colors_used < best_color_count:
|
||||
best_coloring[:] = merged_coloring
|
||||
best_color_count = merged_colors_used
|
||||
|
||||
def color_merging(coloring):
|
||||
"""尝试合并颜色类"""
|
||||
colors_used = sorted(set(coloring))
|
||||
|
||||
# 尝试合并每对颜色
|
||||
for c1, c2 in combinations(colors_used, 2):
|
||||
# 检查是否可以合并
|
||||
can_merge = True
|
||||
vertices_with_c1 = np.where(coloring == c1)[0]
|
||||
|
||||
for v in vertices_with_c1:
|
||||
for u in adj_lists[v]:
|
||||
if coloring[u] == c2:
|
||||
can_merge = False
|
||||
break
|
||||
if not can_merge:
|
||||
break
|
||||
|
||||
if can_merge:
|
||||
# 合并颜色
|
||||
coloring[vertices_with_c1] = c2
|
||||
|
||||
# 递归尝试更多合并
|
||||
return color_merging(coloring)
|
||||
|
||||
# 重新映射颜色以确保连续
|
||||
color_map = {}
|
||||
new_coloring = np.zeros_like(coloring)
|
||||
next_color = 0
|
||||
|
||||
for i, c in enumerate(coloring):
|
||||
if c not in color_map:
|
||||
color_map[c] = next_color
|
||||
next_color += 1
|
||||
new_coloring[i] = color_map[c]
|
||||
|
||||
return new_coloring
|
||||
|
||||
# 5. 图分解与重组
|
||||
|
||||
def solve_by_decomposition():
|
||||
"""通过图分解求解"""
|
||||
# 分解图
|
||||
components = decompose_graph()
|
||||
|
||||
if len(components) > 1:
|
||||
# 分别处理每个组件
|
||||
component_colorings = []
|
||||
max_colors = 0
|
||||
|
||||
for component in components:
|
||||
# 为组件着色
|
||||
comp_coloring = dsatur_coloring(component)
|
||||
|
||||
# 重新映射颜色
|
||||
color_map = {}
|
||||
for v in component:
|
||||
if comp_coloring[v] not in color_map:
|
||||
color_map[comp_coloring[v]] = len(color_map)
|
||||
comp_coloring[v] = color_map[comp_coloring[v]]
|
||||
|
||||
component_colorings.append((component, comp_coloring))
|
||||
max_colors = max(max_colors, max(comp_coloring[list(component)]) + 1)
|
||||
|
||||
# 重组解
|
||||
combined_coloring = np.full(n, -1)
|
||||
for component, comp_coloring in component_colorings:
|
||||
for v in component:
|
||||
combined_coloring[v] = comp_coloring[v]
|
||||
|
||||
return combined_coloring
|
||||
else:
|
||||
return None
|
||||
|
||||
# 6. 迭代颜色减少
|
||||
|
||||
def iterative_color_reduction(coloring):
|
||||
"""迭代尝试减少颜色数"""
|
||||
colors_used = len(set(coloring))
|
||||
|
||||
# 如果颜色数已经等于最大团大小,无法进一步减少
|
||||
max_clique = find_max_clique()
|
||||
if colors_used <= len(max_clique):
|
||||
return coloring
|
||||
|
||||
# 尝试减少一种颜色
|
||||
target_colors = colors_used - 1
|
||||
|
||||
# 重新映射颜色以确保连续
|
||||
color_map = {}
|
||||
for i, c in enumerate(coloring):
|
||||
if c not in color_map:
|
||||
color_map[c] = len(color_map)
|
||||
coloring[i] = color_map[c]
|
||||
|
||||
# 尝试移除最高颜色
|
||||
highest_color = target_colors
|
||||
vertices_with_highest = np.where(coloring == highest_color)[0]
|
||||
|
||||
# 临时移除这些顶点的颜色
|
||||
temp_coloring = coloring.copy()
|
||||
temp_coloring[vertices_with_highest] = -1
|
||||
|
||||
# 尝试用更少的颜色重新着色
|
||||
for v in vertices_with_highest:
|
||||
used = [False] * target_colors
|
||||
for u in adj_lists[v]:
|
||||
if temp_coloring[u] != -1:
|
||||
used[temp_coloring[u]] = True
|
||||
|
||||
# 寻找可用颜色
|
||||
available_color = -1
|
||||
for c in range(target_colors):
|
||||
if not used[c]:
|
||||
available_color = c
|
||||
break
|
||||
|
||||
if available_color != -1:
|
||||
temp_coloring[v] = available_color
|
||||
else:
|
||||
# 无法减少颜色
|
||||
return coloring
|
||||
|
||||
# 递归尝试进一步减少
|
||||
if time.time() - start_time < max_time * 0.95:
|
||||
return iterative_color_reduction(temp_coloring)
|
||||
else:
|
||||
return temp_coloring
|
||||
|
||||
# 主算法流程
|
||||
|
||||
# 1. 尝试通过图分解求解
|
||||
decomp_coloring = solve_by_decomposition()
|
||||
if decomp_coloring is not None:
|
||||
decomp_colors_used = len(set(decomp_coloring))
|
||||
if decomp_colors_used < best_color_count:
|
||||
best_coloring = decomp_coloring
|
||||
best_color_count = decomp_colors_used
|
||||
|
||||
# 2. 应用多阶段混合策略
|
||||
hybrid_coloring()
|
||||
|
||||
# 3. 迭代颜色减少
|
||||
if time.time() - start_time < max_time * 0.95:
|
||||
reduced_coloring = iterative_color_reduction(best_coloring.copy())
|
||||
reduced_colors_used = len(set(reduced_coloring))
|
||||
|
||||
if reduced_colors_used < best_color_count:
|
||||
best_coloring = reduced_coloring
|
||||
best_color_count = reduced_colors_used
|
||||
|
||||
# 确保颜色编号连续且从0开始
|
||||
color_map = {}
|
||||
final_coloring = np.zeros_like(best_coloring)
|
||||
next_color = 0
|
||||
|
||||
for i, c in enumerate(best_coloring):
|
||||
if c not in color_map:
|
||||
color_map[c] = next_color
|
||||
next_color += 1
|
||||
final_coloring[i] = color_map[c]
|
||||
|
||||
return final_coloring
|
BIN
gcdata/mcppareto.png
Normal file
BIN
gcdata/mcppareto.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 176 KiB |
831
gcdata/mctest.ipynb
Normal file
831
gcdata/mctest.ipynb
Normal file
File diff suppressed because one or more lines are too long
BIN
gcdata/plot/3color.png
Normal file
BIN
gcdata/plot/3color.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 42 KiB |
BIN
gcdata/plot/5color.png
Normal file
BIN
gcdata/plot/5color.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 43 KiB |
Reference in New Issue
Block a user