backtracking lab finished

This commit is contained in:
2026-01-19 01:17:31 +08:00
parent 785f306726
commit e8e22876d2
19 changed files with 11769 additions and 0 deletions

View File

@@ -0,0 +1,434 @@
n,level,cost,true_val,ratio
5,0,1680,1680,1
5,0,1680,1680,1
5,0,1680,1680,1
5,0,1680,1680,1
5,0,1680,1680,1
5,0,1680,1680,1
5,0,1680,1680,1
5,0,1680,1680,1
5,0,1680,1680,1
5,0,1680,1680,1
5,0,1680,1680,1
5,0,1680,1680,1
5,0,1680,1680,1
5,0,1680,1680,1
5,0,1680,1680,1
5,0,1680,1680,1
5,0,1680,1680,1
5,0,1680,1680,1
5,0,1680,1680,1
5,0,1680,1680,1
5,0,1680,1680,1
5,1,1680,1680,1
5,2,1680,1680,1
5,3,1680,1680,1
5,4,1680,1680,1
5,5,1680,1680,1
10,0,2266.67,2244,1.0101
10,0,2266.67,2244,1.0101
10,0,2266.67,2244,1.0101
10,0,2266.67,2244,1.0101
10,0,2266.67,2244,1.0101
10,0,2266.67,2244,1.0101
10,0,2266.67,2244,1.0101
10,0,2266.67,2244,1.0101
10,0,2266.67,2244,1.0101
10,0,2266.67,2244,1.0101
10,0,2266.67,2244,1.0101
10,0,2266.67,2244,1.0101
10,0,2266.67,2244,1.0101
10,0,2266.67,2244,1.0101
10,0,2266.67,2244,1.0101
10,0,2266.67,2244,1.0101
10,0,2266.67,2244,1.0101
10,0,2266.67,2244,1.0101
10,0,2266.67,2244,1.0101
10,0,2266.67,2244,1.0101
10,0,2266.67,2244,1.0101
10,0,2266.67,2244,1.0101
10,0,2266.67,2244,1.0101
10,0,2266.67,2244,1.0101
10,0,2266.67,2244,1.0101
10,0,2266.67,2244,1.0101
10,0,2266.67,2244,1.0101
10,0,2266.67,2244,1.0101
10,0,2266.67,2244,1.0101
10,0,2266.67,2244,1.0101
10,0,2266.67,2244,1.0101
10,0,2266.67,2244,1.0101
10,0,2266.67,2244,1.0101
10,0,2266.67,2244,1.0101
10,1,2256.83,2244,1.00572
10,2,2246.94,2244,1.00131
10,3,2246.39,2244,1.00107
10,4,2245.89,2244,1.00084
10,5,2245.44,2244,1.00064
10,6,2245.33,2244,1.00059
10,7,2245.23,2244,1.00055
10,8,2244.54,2244,1.00024
10,9,2244.06,2244,1.00003
10,10,2244,2244,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,0,4400,4400,1
15,1,4400,4400,1
15,2,4400,4400,1
15,3,4400,4400,1
15,4,4400,4400,1
15,5,4400,4400,1
15,6,4400,4400,1
15,7,4400,4400,1
15,8,4400,4400,1
15,9,4400,4400,1
15,10,4400,4400,1
15,11,4400,4400,1
15,12,4400,4400,1
15,13,4400,4400,1
15,14,4400,4400,1
15,15,4400,4400,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,0,2200,2200,1
20,1,2200,2200,1
20,2,2200,2200,1
20,3,2200,2200,1
20,4,2200,2200,1
20,5,2200,2200,1
20,6,2200,2200,1
20,7,2200,2200,1
20,8,2200,2200,1
20,9,2200,2200,1
20,10,2200,2200,1
20,11,2200,2200,1
20,12,2200,2200,1
20,13,2200,2200,1
20,14,2200,2200,1
20,15,2200,2200,1
20,16,2200,2200,1
20,17,2200,2200,1
20,18,2200,2200,1
20,19,2200,2200,1
20,20,2200,2200,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,0,3600,3600,1
25,1,3600,3600,1
25,2,3600,3600,1
25,3,3600,3600,1
25,4,3600,3600,1
25,5,3600,3600,1
25,6,3600,3600,1
25,7,3600,3600,1
25,8,3600,3600,1
25,9,3600,3600,1
25,10,3600,3600,1
25,11,3600,3600,1
25,12,3600,3600,1
25,13,3600,3600,1
25,14,3600,3600,1
25,15,3600,3600,1
25,16,3600,3600,1
25,17,3600,3600,1
25,18,3600,3600,1
25,19,3600,3600,1
25,20,3600,3600,1
25,21,3600,3600,1
25,22,3600,3600,1
25,23,3600,3600,1
25,24,3600,3600,1
25,25,3600,3600,1
1 n level cost true_val ratio
2 5 0 1680 1680 1
3 5 0 1680 1680 1
4 5 0 1680 1680 1
5 5 0 1680 1680 1
6 5 0 1680 1680 1
7 5 0 1680 1680 1
8 5 0 1680 1680 1
9 5 0 1680 1680 1
10 5 0 1680 1680 1
11 5 0 1680 1680 1
12 5 0 1680 1680 1
13 5 0 1680 1680 1
14 5 0 1680 1680 1
15 5 0 1680 1680 1
16 5 0 1680 1680 1
17 5 0 1680 1680 1
18 5 0 1680 1680 1
19 5 0 1680 1680 1
20 5 0 1680 1680 1
21 5 0 1680 1680 1
22 5 0 1680 1680 1
23 5 1 1680 1680 1
24 5 2 1680 1680 1
25 5 3 1680 1680 1
26 5 4 1680 1680 1
27 5 5 1680 1680 1
28 10 0 2266.67 2244 1.0101
29 10 0 2266.67 2244 1.0101
30 10 0 2266.67 2244 1.0101
31 10 0 2266.67 2244 1.0101
32 10 0 2266.67 2244 1.0101
33 10 0 2266.67 2244 1.0101
34 10 0 2266.67 2244 1.0101
35 10 0 2266.67 2244 1.0101
36 10 0 2266.67 2244 1.0101
37 10 0 2266.67 2244 1.0101
38 10 0 2266.67 2244 1.0101
39 10 0 2266.67 2244 1.0101
40 10 0 2266.67 2244 1.0101
41 10 0 2266.67 2244 1.0101
42 10 0 2266.67 2244 1.0101
43 10 0 2266.67 2244 1.0101
44 10 0 2266.67 2244 1.0101
45 10 0 2266.67 2244 1.0101
46 10 0 2266.67 2244 1.0101
47 10 0 2266.67 2244 1.0101
48 10 0 2266.67 2244 1.0101
49 10 0 2266.67 2244 1.0101
50 10 0 2266.67 2244 1.0101
51 10 0 2266.67 2244 1.0101
52 10 0 2266.67 2244 1.0101
53 10 0 2266.67 2244 1.0101
54 10 0 2266.67 2244 1.0101
55 10 0 2266.67 2244 1.0101
56 10 0 2266.67 2244 1.0101
57 10 0 2266.67 2244 1.0101
58 10 0 2266.67 2244 1.0101
59 10 0 2266.67 2244 1.0101
60 10 0 2266.67 2244 1.0101
61 10 0 2266.67 2244 1.0101
62 10 1 2256.83 2244 1.00572
63 10 2 2246.94 2244 1.00131
64 10 3 2246.39 2244 1.00107
65 10 4 2245.89 2244 1.00084
66 10 5 2245.44 2244 1.00064
67 10 6 2245.33 2244 1.00059
68 10 7 2245.23 2244 1.00055
69 10 8 2244.54 2244 1.00024
70 10 9 2244.06 2244 1.00003
71 10 10 2244 2244 1
72 15 0 4400 4400 1
73 15 0 4400 4400 1
74 15 0 4400 4400 1
75 15 0 4400 4400 1
76 15 0 4400 4400 1
77 15 0 4400 4400 1
78 15 0 4400 4400 1
79 15 0 4400 4400 1
80 15 0 4400 4400 1
81 15 0 4400 4400 1
82 15 0 4400 4400 1
83 15 0 4400 4400 1
84 15 0 4400 4400 1
85 15 0 4400 4400 1
86 15 0 4400 4400 1
87 15 0 4400 4400 1
88 15 0 4400 4400 1
89 15 0 4400 4400 1
90 15 0 4400 4400 1
91 15 0 4400 4400 1
92 15 0 4400 4400 1
93 15 0 4400 4400 1
94 15 0 4400 4400 1
95 15 0 4400 4400 1
96 15 0 4400 4400 1
97 15 0 4400 4400 1
98 15 0 4400 4400 1
99 15 0 4400 4400 1
100 15 0 4400 4400 1
101 15 0 4400 4400 1
102 15 0 4400 4400 1
103 15 0 4400 4400 1
104 15 0 4400 4400 1
105 15 0 4400 4400 1
106 15 0 4400 4400 1
107 15 0 4400 4400 1
108 15 0 4400 4400 1
109 15 0 4400 4400 1
110 15 0 4400 4400 1
111 15 0 4400 4400 1
112 15 0 4400 4400 1
113 15 0 4400 4400 1
114 15 0 4400 4400 1
115 15 0 4400 4400 1
116 15 0 4400 4400 1
117 15 0 4400 4400 1
118 15 0 4400 4400 1
119 15 0 4400 4400 1
120 15 0 4400 4400 1
121 15 0 4400 4400 1
122 15 0 4400 4400 1
123 15 0 4400 4400 1
124 15 0 4400 4400 1
125 15 0 4400 4400 1
126 15 0 4400 4400 1
127 15 0 4400 4400 1
128 15 0 4400 4400 1
129 15 0 4400 4400 1
130 15 0 4400 4400 1
131 15 0 4400 4400 1
132 15 0 4400 4400 1
133 15 0 4400 4400 1
134 15 0 4400 4400 1
135 15 0 4400 4400 1
136 15 0 4400 4400 1
137 15 0 4400 4400 1
138 15 0 4400 4400 1
139 15 0 4400 4400 1
140 15 0 4400 4400 1
141 15 0 4400 4400 1
142 15 0 4400 4400 1
143 15 0 4400 4400 1
144 15 0 4400 4400 1
145 15 0 4400 4400 1
146 15 0 4400 4400 1
147 15 0 4400 4400 1
148 15 0 4400 4400 1
149 15 0 4400 4400 1
150 15 0 4400 4400 1
151 15 0 4400 4400 1
152 15 0 4400 4400 1
153 15 0 4400 4400 1
154 15 0 4400 4400 1
155 15 0 4400 4400 1
156 15 0 4400 4400 1
157 15 0 4400 4400 1
158 15 0 4400 4400 1
159 15 0 4400 4400 1
160 15 0 4400 4400 1
161 15 0 4400 4400 1
162 15 0 4400 4400 1
163 15 0 4400 4400 1
164 15 0 4400 4400 1
165 15 0 4400 4400 1
166 15 0 4400 4400 1
167 15 0 4400 4400 1
168 15 0 4400 4400 1
169 15 0 4400 4400 1
170 15 0 4400 4400 1
171 15 0 4400 4400 1
172 15 0 4400 4400 1
173 15 1 4400 4400 1
174 15 2 4400 4400 1
175 15 3 4400 4400 1
176 15 4 4400 4400 1
177 15 5 4400 4400 1
178 15 6 4400 4400 1
179 15 7 4400 4400 1
180 15 8 4400 4400 1
181 15 9 4400 4400 1
182 15 10 4400 4400 1
183 15 11 4400 4400 1
184 15 12 4400 4400 1
185 15 13 4400 4400 1
186 15 14 4400 4400 1
187 15 15 4400 4400 1
188 20 0 2200 2200 1
189 20 0 2200 2200 1
190 20 0 2200 2200 1
191 20 0 2200 2200 1
192 20 0 2200 2200 1
193 20 0 2200 2200 1
194 20 0 2200 2200 1
195 20 0 2200 2200 1
196 20 0 2200 2200 1
197 20 0 2200 2200 1
198 20 0 2200 2200 1
199 20 0 2200 2200 1
200 20 0 2200 2200 1
201 20 0 2200 2200 1
202 20 0 2200 2200 1
203 20 0 2200 2200 1
204 20 0 2200 2200 1
205 20 0 2200 2200 1
206 20 0 2200 2200 1
207 20 0 2200 2200 1
208 20 0 2200 2200 1
209 20 0 2200 2200 1
210 20 0 2200 2200 1
211 20 0 2200 2200 1
212 20 0 2200 2200 1
213 20 0 2200 2200 1
214 20 0 2200 2200 1
215 20 0 2200 2200 1
216 20 0 2200 2200 1
217 20 0 2200 2200 1
218 20 0 2200 2200 1
219 20 0 2200 2200 1
220 20 0 2200 2200 1
221 20 0 2200 2200 1
222 20 0 2200 2200 1
223 20 0 2200 2200 1
224 20 0 2200 2200 1
225 20 0 2200 2200 1
226 20 0 2200 2200 1
227 20 0 2200 2200 1
228 20 0 2200 2200 1
229 20 0 2200 2200 1
230 20 0 2200 2200 1
231 20 0 2200 2200 1
232 20 0 2200 2200 1
233 20 0 2200 2200 1
234 20 0 2200 2200 1
235 20 0 2200 2200 1
236 20 0 2200 2200 1
237 20 0 2200 2200 1
238 20 0 2200 2200 1
239 20 0 2200 2200 1
240 20 0 2200 2200 1
241 20 0 2200 2200 1
242 20 0 2200 2200 1
243 20 0 2200 2200 1
244 20 0 2200 2200 1
245 20 0 2200 2200 1
246 20 0 2200 2200 1
247 20 0 2200 2200 1
248 20 0 2200 2200 1
249 20 0 2200 2200 1
250 20 0 2200 2200 1
251 20 0 2200 2200 1
252 20 0 2200 2200 1
253 20 0 2200 2200 1
254 20 0 2200 2200 1
255 20 0 2200 2200 1
256 20 0 2200 2200 1
257 20 0 2200 2200 1
258 20 0 2200 2200 1
259 20 0 2200 2200 1
260 20 0 2200 2200 1
261 20 0 2200 2200 1
262 20 0 2200 2200 1
263 20 0 2200 2200 1
264 20 0 2200 2200 1
265 20 0 2200 2200 1
266 20 0 2200 2200 1
267 20 0 2200 2200 1
268 20 0 2200 2200 1
269 20 0 2200 2200 1
270 20 0 2200 2200 1
271 20 0 2200 2200 1
272 20 0 2200 2200 1
273 20 0 2200 2200 1
274 20 0 2200 2200 1
275 20 0 2200 2200 1
276 20 0 2200 2200 1
277 20 0 2200 2200 1
278 20 0 2200 2200 1
279 20 0 2200 2200 1
280 20 0 2200 2200 1
281 20 0 2200 2200 1
282 20 0 2200 2200 1
283 20 0 2200 2200 1
284 20 0 2200 2200 1
285 20 0 2200 2200 1
286 20 0 2200 2200 1
287 20 0 2200 2200 1
288 20 0 2200 2200 1
289 20 1 2200 2200 1
290 20 2 2200 2200 1
291 20 3 2200 2200 1
292 20 4 2200 2200 1
293 20 5 2200 2200 1
294 20 6 2200 2200 1
295 20 7 2200 2200 1
296 20 8 2200 2200 1
297 20 9 2200 2200 1
298 20 10 2200 2200 1
299 20 11 2200 2200 1
300 20 12 2200 2200 1
301 20 13 2200 2200 1
302 20 14 2200 2200 1
303 20 15 2200 2200 1
304 20 16 2200 2200 1
305 20 17 2200 2200 1
306 20 18 2200 2200 1
307 20 19 2200 2200 1
308 20 20 2200 2200 1
309 25 0 3600 3600 1
310 25 0 3600 3600 1
311 25 0 3600 3600 1
312 25 0 3600 3600 1
313 25 0 3600 3600 1
314 25 0 3600 3600 1
315 25 0 3600 3600 1
316 25 0 3600 3600 1
317 25 0 3600 3600 1
318 25 0 3600 3600 1
319 25 0 3600 3600 1
320 25 0 3600 3600 1
321 25 0 3600 3600 1
322 25 0 3600 3600 1
323 25 0 3600 3600 1
324 25 0 3600 3600 1
325 25 0 3600 3600 1
326 25 0 3600 3600 1
327 25 0 3600 3600 1
328 25 0 3600 3600 1
329 25 0 3600 3600 1
330 25 0 3600 3600 1
331 25 0 3600 3600 1
332 25 0 3600 3600 1
333 25 0 3600 3600 1
334 25 0 3600 3600 1
335 25 0 3600 3600 1
336 25 0 3600 3600 1
337 25 0 3600 3600 1
338 25 0 3600 3600 1
339 25 0 3600 3600 1
340 25 0 3600 3600 1
341 25 0 3600 3600 1
342 25 0 3600 3600 1
343 25 0 3600 3600 1
344 25 0 3600 3600 1
345 25 0 3600 3600 1
346 25 0 3600 3600 1
347 25 0 3600 3600 1
348 25 0 3600 3600 1
349 25 0 3600 3600 1
350 25 0 3600 3600 1
351 25 0 3600 3600 1
352 25 0 3600 3600 1
353 25 0 3600 3600 1
354 25 0 3600 3600 1
355 25 0 3600 3600 1
356 25 0 3600 3600 1
357 25 0 3600 3600 1
358 25 0 3600 3600 1
359 25 0 3600 3600 1
360 25 0 3600 3600 1
361 25 0 3600 3600 1
362 25 0 3600 3600 1
363 25 0 3600 3600 1
364 25 0 3600 3600 1
365 25 0 3600 3600 1
366 25 0 3600 3600 1
367 25 0 3600 3600 1
368 25 0 3600 3600 1
369 25 0 3600 3600 1
370 25 0 3600 3600 1
371 25 0 3600 3600 1
372 25 0 3600 3600 1
373 25 0 3600 3600 1
374 25 0 3600 3600 1
375 25 0 3600 3600 1
376 25 0 3600 3600 1
377 25 0 3600 3600 1
378 25 0 3600 3600 1
379 25 0 3600 3600 1
380 25 0 3600 3600 1
381 25 0 3600 3600 1
382 25 0 3600 3600 1
383 25 0 3600 3600 1
384 25 0 3600 3600 1
385 25 0 3600 3600 1
386 25 0 3600 3600 1
387 25 0 3600 3600 1
388 25 0 3600 3600 1
389 25 0 3600 3600 1
390 25 0 3600 3600 1
391 25 0 3600 3600 1
392 25 0 3600 3600 1
393 25 0 3600 3600 1
394 25 0 3600 3600 1
395 25 0 3600 3600 1
396 25 0 3600 3600 1
397 25 0 3600 3600 1
398 25 0 3600 3600 1
399 25 0 3600 3600 1
400 25 0 3600 3600 1
401 25 0 3600 3600 1
402 25 0 3600 3600 1
403 25 0 3600 3600 1
404 25 0 3600 3600 1
405 25 0 3600 3600 1
406 25 0 3600 3600 1
407 25 0 3600 3600 1
408 25 0 3600 3600 1
409 25 0 3600 3600 1
410 25 1 3600 3600 1
411 25 2 3600 3600 1
412 25 3 3600 3600 1
413 25 4 3600 3600 1
414 25 5 3600 3600 1
415 25 6 3600 3600 1
416 25 7 3600 3600 1
417 25 8 3600 3600 1
418 25 9 3600 3600 1
419 25 10 3600 3600 1
420 25 11 3600 3600 1
421 25 12 3600 3600 1
422 25 13 3600 3600 1
423 25 14 3600 3600 1
424 25 15 3600 3600 1
425 25 16 3600 3600 1
426 25 17 3600 3600 1
427 25 18 3600 3600 1
428 25 19 3600 3600 1
429 25 20 3600 3600 1
430 25 21 3600 3600 1
431 25 22 3600 3600 1
432 25 23 3600 3600 1
433 25 24 3600 3600 1
434 25 25 3600 3600 1

View File

@@ -0,0 +1,6 @@
n,avg_ratio
5,1.02962
10,1.00688
15,1.0016
20,1.00249
25,1.00215
1 n avg_ratio
2 5 1.02962
3 10 1.00688
4 15 1.0016
5 20 1.00249
6 25 1.00215

Binary file not shown.

After

Width:  |  Height:  |  Size: 97 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 114 KiB

BIN
backtracking/knapsack_bnb Executable file

Binary file not shown.

View File

@@ -0,0 +1,166 @@
#let times = "Times LT Pro"
#let times = "Times New Roman"
#let song = (times, "Noto Serif CJK SC")
#let hei = (times, "Noto Sans CJK SC")
#let kai = (times, "Noto Serif CJK SC")
#let xbsong = (times, "Noto Serif CJK SC")
#let fsong = (times, "Noto Serif CJK SC")
#let code = (times, "JetBrains Mono")
#let nudtlabpaper(title: "",
author1: "",
id1: "",
advisor: "",
jobtitle: "",
lab: "",
date: "",
header_str: "",
minimal_cover: false,
body) = {
// Set the document's basic properties.
set document(author: author1, title: title)
set page(
margin: (left: 30mm, right: 30mm, top: 30mm, bottom: 30mm),
)
// If minimal_cover is requested, render an otherwise-empty first page
// that only displays the "实验时间" near the bottom center.
if minimal_cover {
v(158pt)
align(center)[
#block(text(weight: 700, size: 30pt, font: hei, tracking: 1pt, "2025秋 -《算法设计与分析》"))
]
align(center)[
#block(text(weight: 700, size: 24pt, font: song, tracking: 1pt, "回溯与分支限界算法分析实验报告"))
]
// Keep standard margins but push content down toward the bottom.
v(220pt)
align(center)[
#block(text(size: 14pt, font: song, tracking: 9pt, "实验时间"))
]
v(2pt)
align(center)[
#block(text(size: 16pt, font: song, date))
]
pagebreak()
} else {
// Title row.
v(158pt)
align(center)[
#block(text(weight: 700, size: 30pt, font: hei, tracking: 1pt, "2025秋 -《算法设计与分析》"))
]
align(center)[
#block(text(weight: 700, size: 24pt, font: song, tracking: 1pt, "回溯与分支限界算法分析实验报告"))
]
v(103pt)
pad(
left: 1em,
right: 1em,
grid(
// columns: (80pt, 1fr),
// rows: (17pt, auto),
// text(weight: 700, size: 16pt, font: song, "实验名称:"),
// align(center, text(weight: "regular", size: 16pt, font: song, title)),
// text(""),
// line(length: 100%)
)
// #block(text(weight: 700, 1.75em, title))
// underline(text(weight: 700, size: 16pt, font: song, title))
)
// Author information.
v(62.5pt)
grid(
columns: (0.25fr, 0.25fr, 0.25fr, 0.25fr),
rows: (20pt, 8pt, 20pt, 8pt, 20pt, 8pt, 20pt, 12pt),
text(size: 14pt, font: song, tracking: 9pt, "学员姓名"),
align(center, text(size: 14pt, font: song, author1)),
text(size: 14pt, font: song, tracking: 54pt, "学号"),
align(center, text(size: 14pt, font: times, id1)),
text(""),
line(length: 100%),
text(""),
line(length: 100%),
text(size: 14pt, font: song, tracking: 9pt, "指导教员"),
align(center, text(size: 14pt, font: song, advisor)),
text(size: 14pt, font: song, tracking: 54pt, "职称"),
align(center, text(size: 14pt, font: song, jobtitle)),
text(""),
line(length: 100%),
text(""),
line(length: 100%),
text(size: 14pt, font: song, tracking: 9pt, "实验室"),
align(center, text(size: 14pt, font: song, lab)),
text(size: 14pt, font: song, tracking: 9pt, "实验时间"),
align(center, text(size: 14pt, font: song, date)),
text(""),
line(length: 100%),
text(""),
line(length: 100%),
)
v(50.5pt)
align(center, text(font: hei, size: 15pt, "国防科技大学教育训练部制"))
pagebreak()
}
set page(
margin: (left: 30mm, right: 30mm, top: 30mm, bottom: 30mm),
numbering: "i",
number-align: center,
)
v(14pt)
align(center)[
#block(text(font: hei, size: 14pt, "《本科实验报告》填写说明"))
]
v(14pt)
text("")
par(first-line-indent: 2em, text(font: song, size: 12pt, "实验报告内容编排应符合以下要求:"))
par(first-line-indent: 2em, text(font: fsong, size: 12pt, "1采用A421cm×29.7cm白色复印纸单面黑字。上下左右各侧的页边距均为3cm缺省文档网格字号为小4号中文为宋体英文和阿拉伯数字为Times New Roman每页30行每行36字页脚距边界为2.5cm页码置于页脚、居中采用小5号阿拉伯数字从1开始连续编排封面不编页码。"))
par(first-line-indent: 2em, text(font: fsong, size: 12pt, "2报告正文最多可设四级标题字体均为黑体第一级标题字号为4号其余各级标题为小4号标题序号第一级用“一、”、“二、”……第二级用“”、“” ……第三级用“1.”、“2.” ……第四级用“1”、“2” ……,分别按序连续编排。"))
par(first-line-indent: 2em, text(font: fsong, size: 12pt, "3正文插图、表格中的文字字号均为5号。"))
pagebreak()
set page(
margin: (left: 30mm, right: 30mm, top: 30mm, bottom: 30mm),
numbering: "1",
number-align: center,
)
set heading(numbering: "1.1")
// set text(font: hei, lang: "zh")
show heading: it => box(width: 100%)[
#v(0.50em)
#set text(font: hei)
#counter(heading).display()
// #h(0.5em)
#it.body
]
// Main body.
set par(justify: true)
body
}
#let para(t) = par(first-line-indent: 2em, text(font: song, size: 10.5pt, t))
#let subpara(t) = par(first-line-indent: 2em, text(font: song, size: 10pt, t))
#let cb(t) = block(
text(font: ("Consolas","FangSong_GB2312"), t),
fill: luma(240),
inset: 1pt,
radius: 4pt,
// width: 100%,
)

10273
backtracking/main.pdf Normal file

File diff suppressed because one or more lines are too long

257
backtracking/main.typ Normal file
View File

@@ -0,0 +1,257 @@
#import "labtemplate.typ": *
#show: nudtlabpaper.with(
author1: "程景愉",
id1: "202302723005",
advisor: "罗磊",
jobtitle: "教授",
lab: "306-707",
date: "2026.1.19",
header_str: "回溯与分支限界算法分析实验报告",
minimal_cover: true,
)
#set page(header: [
#set par(spacing: 6pt)
#align(center)[#text(size: 11pt)[《算法设计与分析》实验报告]]
#v(-0.3em)
#line(length: 100%, stroke: (thickness: 1pt))
],)
#show heading: it => box(width: 100%)[
#v(0.50em)
#set text(font: hei)
#it.body
]
#outline(title: "目录",depth: 3, indent: 1em)
// #pagebreak()
#outline(
title: [图目录],
target: figure.where(kind: image),
)
#show heading: it => box(width: 100%)[
#v(0.50em)
#set text(font: hei)
#counter(heading).display()
#it.body
]
#set enum(indent: 0.5em,body-indent: 0.5em,)
#pagebreak()
= 实验介绍
#para[
回溯法Backtracking和分支限界法Branch and Bound是求解组合优化问题的两种重要算法。回溯法通过深度优先搜索状态空间树利用剪枝函数避免无效搜索分支限界法则常采用广度优先或最佳优先策略利用代价函数Bound计算结点的上界或下界以剪除不可能产生最优解的分支从而加速搜索。本实验旨在通过完全背包问题和多重背包问题深入理解这两种算法的原理特别是代价函数的设计对算法性能的影响并掌握蒙特卡洛方法在估算搜索树规模中的应用。
]
= 实验内容
#para[
本实验主要包含以下内容:
]
+ 针对完全背包问题,实现回溯法与分支限界算法。
+ 利用蒙特卡洛方法对搜索树的分支数量进行估计。
+ 分析分支限界法中代价函数的准确性,通过与真实值(由动态规划求得)对比,分析不同层级和不同输入规模下的近似效果。
+ 设计并对比两种不同的代价函数(朴素界与分数背包界),分析其剪枝效果与计算开销。
+ (附加)针对多重背包问题,实现分支限界算法,并对比不同代价函数的性能。
= 实验要求
#para[
具体要求如下:
]
+ 以物品种类数 $n$ 为输入规模,随机生成测试样本。
+ 统计不同算法的运行时间、访问结点数。
+ 使用 Python 绘制数据图表,展示蒙特卡洛估计结果、代价函数近似比、以及不同算法的性能对比。
+ 分析实验结果,验证理论分析。
= 实验步骤
== 算法设计
=== 完全背包问题的分支限界法
#para[
完全背包问题允许每种物品选择无限次。在分支限界法中,我们构建状态空间树。为了便于剪枝,我们将物品按价值密度($v_i/w_i$)降序排列。
]
```cpp
struct Item {
int id; int weight; int value;
double density; int limit;
Item(int id, int w, int v, int l = -1) : id(id), weight(w), value(v), limit(l) {
density = (double)v / w;
}
};
bool compareItems(const Item& a, const Item& b) {
return a.density > b.density;
}
```
#para[
每个结点包含当前价值 $V_"cur"$、当前重量 $W_"cur"$ 和当前考虑的物品层级 $"level"$。我们使用二叉分支策略:
]
1. *左孩子*:选择当前物品一件,状态更新为 $("level", W_"cur"+w_i, V_"cur"+v_i)$,前提是未超重。
2. *右孩子*:不再选择当前物品,转而考虑下一件物品,状态更新为 $("level"+1, W_"cur", V_"cur")$
#para[
为了进行剪枝我们需要计算当前结点的价值上界Upper Bound, UB。如果 $"UB" <= "current_best"$,则剪枝。
我们实现了两种代价函数:
]
1. *朴素界 (Simple Bound)*:假设剩余容量全部以全局最大单位价值填充。
$ "UB" = V_"cur" + (W - W_"cur") times max(v_i/w_i) $
该界计算简单,但较为松弛。
2. *分数背包界 (Fractional Bound)*:即标准的分支限界法上界。将剩余空间用分数背包问题的贪心解填充(即优先装入密度大的物品,最后一件可分割)。由于物品已排序,该界能提供更紧密的上界。
```cpp
double bound_fractional(int level, int current_val, int rem_cap, const vector<Item>& items) {
double bound = current_val;
int w = rem_cap;
for (int i = level; i < items.size(); ++i) {
if (w >= items[i].weight) {
// Take as many as possible (for complete knapsack fractional)
bound += (double)w * items[i].density;
return bound;
}
}
return bound;
}
```
=== 蒙特卡洛方法估算搜索树规模
#para[
对于大规模问题,直接遍历搜索树是不现实的。蒙特卡洛方法通过随机采样路径来估算树的结点总数。
设路径上第 $i$ 层结点的度数为 $m_i$,则该路径代表的树规模估计值为:
]
$ N = 1 + m_0 + m_0 m_1 + m_0 m_1 m_2 + dots $
```cpp
long long monte_carlo_estimate(const vector<Item>& items, int capacity, int samples = 1000) {
long long total_nodes = 0;
for (int k = 0; k < samples; ++k) {
long long current_multiplier = 1;
// ... (traversal logic) ...
int branching_factor = moves.size();
total_nodes += current_multiplier;
current_multiplier *= branching_factor;
// ...
}
return total_nodes / samples;
}
```
#para[
通过多次采样取平均值,可得到搜索树规模的无偏估计。在完全背包问题中,由于分支因子变化较大(取决于剩余容量),该方法能有效预估问题难度。
]
=== 多重背包问题的分支限界法
#para[
多重背包问题中,每种物品的数量有限制 $k_i$。算法结构与完全背包类似,但在分支时需考虑物品数量限制。
此处同样对比了两种代价函数:
]
1. *松弛界 (Loose Bound)*:忽略数量限制,视为完全背包求分数界。
2. *紧致界 (Tight Bound)*:考虑数量限制求解分数背包问题。即在贪心填充时,不仅受容量限制,也受物品数量 $k_i$ 限制。
```cpp
double bound_mk_tight(int level, int current_val, int rem_cap, const vector<Item>& items) {
double bound = current_val;
int w = rem_cap;
for (int i = level; i < items.size(); ++i) {
if (items[i].weight == 0) continue;
int can_take_weight = items[i].limit * items[i].weight;
if (w >= can_take_weight) {
w -= can_take_weight;
bound += items[i].value * items[i].limit;
} else {
bound += (double)w * items[i].density;
return bound;
}
}
return bound;
}
```
== 实验环境
- 操作系统Linux
- 编程语言C++ (G++)
- 数据分析Python (Pandas, Seaborn, Matplotlib)
- 硬件环境:标准 PC
= 实验结果与分析
== 蒙特卡洛搜索树规模估计
#para[
1 展示了随物品种类数 $n$ 增加,完全背包问题搜索树结点数的蒙特卡洛估计值(对数坐标)。
]
#figure(
image("mc_estimation.png", width: 80%),
caption: [搜索树规模的蒙特卡洛估计],
)
#para[
结果表明,搜索树规模随 $n$ 呈指数级增长。蒙特卡洛方法能够快速给出问题规模的数量级估计,对于判断是否能在有限时间内求出精确解具有指导意义。对于整数背包问题,当 $n$ 较大时,建议先使用蒙特卡洛方法预估,若规模过大则应考虑近似算法或启发式搜索。
]
== 代价函数准确性分析
#para[
为了评估代价函数(上界)的质量,我们记录了搜索过程中各结点的上界值与该状态下的真实最优值(通过动态规划预先计算得到)的比值。比值越接近 1说明上界越紧致。
]
#figure(
image("cost_ratio_level.png", width: 80%),
caption: [不同层级下代价函数的近似比 (n=20)],
)
#figure(
image("cost_ratio_n.png", width: 80%),
caption: [平均近似比随输入规模 n 的变化],
)
#para[
从图 2 可以看出随着搜索深度的增加Level 增大),剩余问题规模变小,代价函数的近似比逐渐趋向于 1说明上界越来越精确。这是符合预期的因为随着物品确定的越多不确定性越小。
3 展示了输入规模 $n$ 对平均近似比的影响。通常情况下,平均近似比相对稳定,不会随 $n$ 剧烈波动,这表明分数背包界具有良好的鲁棒性。
]
== 不同代价函数的性能对比
#para[
我们对比了“分数背包界 (Fractional)”与“朴素界 (Simple)”在完全背包问题上的性能。
]
#figure(
image("new_cost_nodes.png", width: 80%),
caption: [不同代价函数下的访问结点数对比],
)
#figure(
image("new_cost_time.png", width: 80%),
caption: [不同代价函数下的运行时间对比],
)
#para[
实验结果显著:
]
1. *剪枝效果*分数背包界Fractional的访问结点数远少于朴素界Simple常常相差数个数量级注意图 4 为对数坐标)。这是因为分数背包界提供了更紧的上界,能更早地剪除无效分支。
2. *运行时间*:尽管分数背包界的计算复杂度略高于朴素界(需要遍历剩余物品,而朴素界仅需常数/一次乘法),但由于其极强的剪枝能力,总运行时间反而大幅降低。
#para[
这说明在分支限界法中,设计一个计算稍复杂但更紧致的代价函数通常是值得的。
]
= 附加:多重背包问题分析
#para[
在多重背包问题中,我们对比了考虑物品数量限制的“紧致界 (TightBound)”与忽略数量限制的“松弛界 (LooseBound)”。
]
#figure(
image("mk_nodes.png", width: 80%),
caption: [多重背包:不同代价函数的结点数对比],
)
#figure(
image("mk_time.png", width: 80%),
caption: [多重背包:不同代价函数的运行时间对比],
)
#para[
结果显示紧致界TightBound在性能上优于松弛界。因为忽略数量限制会导致上界过大无法有效剪除那些虽然总重量满足但单种物品数量超标的分支。通过在代价函数中精确建模约束条件可以显著提高算法效率。
]
= 实验总结
#para[
本实验通过实现和分析完全背包及多重背包问题的分支限界算法,得出以下结论:
]
1. *代价函数的重要性*:代价函数的紧致程度直接决定了分支限界法的剪枝效率。更紧的界(如分数背包界)虽然单次计算开销稍大,但能指数级减少搜索空间,从而获得更好的总性能。
2. *蒙特卡洛方法的实用性*:该方法能有效评估大规模组合优化问题的解空间大小,为算法选择提供依据。
3. *真实值对比分析*:通过与 DP 得到的真实值对比,验证了分支限界法随着搜索深度增加,对问题最优解的估计越来越准确的特性。

View File

@@ -0,0 +1,6 @@
n,estimated_nodes
5,617
10,78463
15,2375230
20,543302
25,19540360
1 n estimated_nodes
2 5 617
3 10 78463
4 15 2375230
5 20 543302
6 25 19540360

Binary file not shown.

After

Width:  |  Height:  |  Size: 124 KiB

BIN
backtracking/mk_nodes.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 121 KiB

BIN
backtracking/mk_time.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 120 KiB

View File

@@ -0,0 +1,9 @@
n,method,nodes,time_us
5,TightBound,14,0
5,LooseBound,24,0
10,TightBound,30,0
10,LooseBound,56,1
15,TightBound,41,0
15,LooseBound,75,1
20,TightBound,40,0
20,LooseBound,145,4
1 n method nodes time_us
2 5 TightBound 14 0
3 5 LooseBound 24 0
4 10 TightBound 30 0
5 10 LooseBound 56 1
6 15 TightBound 41 0
7 15 LooseBound 75 1
8 20 TightBound 40 0
9 20 LooseBound 145 4

View File

@@ -0,0 +1,11 @@
n,method,nodes,time_us
5,Fractional,31,0
5,Simple,126,3
10,Fractional,85,1
10,Simple,674,26
15,Fractional,149,3
15,Simple,1611,64
20,Fractional,130,2
20,Simple,1962,78
25,Fractional,137,2
25,Simple,2319,87
1 n method nodes time_us
2 5 Fractional 31 0
3 5 Simple 126 3
4 10 Fractional 85 1
5 10 Simple 674 26
6 15 Fractional 149 3
7 15 Simple 1611 64
8 20 Fractional 130 2
9 20 Simple 1962 78
10 25 Fractional 137 2
11 25 Simple 2319 87

Binary file not shown.

After

Width:  |  Height:  |  Size: 124 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 120 KiB

103
backtracking/plotter.py Normal file
View File

@@ -0,0 +1,103 @@
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
import os
# Set style
sns.set_theme(style="whitegrid")
plt.rcParams['font.sans-serif'] = ['SimHei'] # Use SimHei for Chinese characters if needed, or just English
plt.rcParams['axes.unicode_minus'] = False
output_dir = "backtracking"
if not os.path.exists(output_dir):
os.makedirs(output_dir)
def save_plot(filename):
plt.tight_layout()
plt.savefig(os.path.join(output_dir, filename), dpi=300)
plt.close()
# 1. Monte Carlo Estimation
try:
df_mc = pd.read_csv(os.path.join(output_dir, "mc_estimation.csv"))
plt.figure(figsize=(8, 5))
sns.lineplot(data=df_mc, x="n", y="estimated_nodes", marker="o")
plt.title("Monte Carlo Estimation of Search Tree Size")
plt.xlabel("Number of Item Types (n)")
plt.ylabel("Estimated Nodes (Log Scale)")
plt.yscale("log")
save_plot("mc_estimation.png")
except Exception as e:
print(f"Error plotting MC: {e}")
# 2. Cost Approximation Analysis
try:
df_level = pd.read_csv(os.path.join(output_dir, "cost_approx_level.csv"))
# Filter for a specific n, e.g., n=15 or 20
target_n = 20
df_level_n = df_level[df_level["n"] == target_n]
if not df_level_n.empty:
plt.figure(figsize=(10, 6))
sns.boxplot(data=df_level_n, x="level", y="ratio")
plt.title(f"Cost Function Approximation Ratio vs Level (n={target_n})")
plt.xlabel("Search Tree Level")
plt.ylabel("Ratio (Bound / True Value)")
plt.axhline(1.0, color='r', linestyle='--')
save_plot("cost_ratio_level.png")
df_n = pd.read_csv(os.path.join(output_dir, "cost_approx_n.csv"))
plt.figure(figsize=(8, 5))
sns.lineplot(data=df_n, x="n", y="avg_ratio", marker="o")
plt.title("Average Cost Approximation Ratio vs Input Size")
plt.xlabel("Number of Item Types (n)")
plt.ylabel("Average Ratio")
save_plot("cost_ratio_n.png")
except Exception as e:
print(f"Error plotting Cost Analysis: {e}")
# 3. New Cost Function Comparison
try:
df_new = pd.read_csv(os.path.join(output_dir, "new_cost_analysis.csv"))
# Nodes
plt.figure(figsize=(8, 5))
sns.lineplot(data=df_new, x="n", y="nodes", hue="method", marker="o")
plt.title("Nodes Visited: Fractional vs Simple Bound")
plt.xlabel("n")
plt.ylabel("Nodes Visited (Log Scale)")
plt.yscale("log")
save_plot("new_cost_nodes.png")
# Time
plt.figure(figsize=(8, 5))
sns.lineplot(data=df_new, x="n", y="time_us", hue="method", marker="o")
plt.title("Execution Time: Fractional vs Simple Bound")
plt.xlabel("n")
plt.ylabel("Time (microseconds)")
save_plot("new_cost_time.png")
except Exception as e:
print(f"Error plotting New Cost Analysis: {e}")
# 4. Multiple Knapsack Comparison
try:
df_mk = pd.read_csv(os.path.join(output_dir, "multiple_knapsack.csv"))
# Nodes
plt.figure(figsize=(8, 5))
sns.lineplot(data=df_mk, x="n", y="nodes", hue="method", marker="o")
plt.title("Multiple Knapsack: Nodes Visited Comparison")
plt.xlabel("n")
plt.ylabel("Nodes Visited")
plt.yscale("log")
save_plot("mk_nodes.png")
# Time
plt.figure(figsize=(8, 5))
sns.lineplot(data=df_mk, x="n", y="time_us", hue="method", marker="o")
plt.title("Multiple Knapsack: Execution Time Comparison")
plt.xlabel("n")
plt.ylabel("Time (microseconds)")
save_plot("mk_time.png")
except Exception as e:
print(f"Error plotting Multiple Knapsack: {e}")

491
backtracking/src/main.cpp Normal file
View File

@@ -0,0 +1,491 @@
#include <iostream>
#include <vector>
#include <algorithm>
#include <random>
#include <chrono>
#include <queue>
#include <iomanip>
#include <fstream>
#include <cmath>
#include <map>
using namespace std;
struct Item {
int id;
int weight;
int value;
double density;
int limit; // For multiple knapsack, -1 for complete
Item(int id, int w, int v, int l = -1) : id(id), weight(w), value(v), limit(l) {
density = (double)v / w;
}
};
bool compareItems(const Item& a, const Item& b) {
return a.density > b.density;
}
// Global random engine
mt19937 rng(42);
vector<Item> generate_items(int n, int max_w, int max_v, bool multiple = false, int max_k = 5) {
vector<Item> items;
uniform_int_distribution<int> w_dist(1, max_w);
uniform_int_distribution<int> v_dist(1, max_v);
uniform_int_distribution<int> k_dist(1, max_k);
for (int i = 0; i < n; ++i) {
int w = w_dist(rng);
int v = v_dist(rng);
int l = multiple ? k_dist(rng) : -1;
items.emplace_back(i, w, v, l);
}
// Sort by density descending for BnB and DP consistency
sort(items.begin(), items.end(), compareItems);
return items;
}
// --- DP for True Value Calculation (Complete Knapsack) ---
// dp[i][w] = max value using items i...n-1 with capacity w
vector<vector<int>> compute_suffix_dp_complete(const vector<Item>& items, int capacity) {
int n = items.size();
vector<vector<int>> dp(n + 1, vector<int>(capacity + 1, 0));
for (int i = n - 1; i >= 0; --i) {
for (int w = 0; w <= capacity; ++w) {
// Option 1: Don't take item i
int res = dp[i + 1][w];
// Option 2: Take at least 1 item i (if possible) -> then we are still at state i with w - weight
if (w >= items[i].weight) {
res = max(res, dp[i][w - items[i].weight] + items[i].value);
}
dp[i][w] = res;
}
}
return dp;
}
// --- Cost Functions ---
// 1. Fractional Bound (Standard)
double bound_fractional(int level, int current_val, int rem_cap, const vector<Item>& items) {
double bound = current_val;
int w = rem_cap;
for (int i = level; i < items.size(); ++i) {
if (w >= items[i].weight) {
// For complete knapsack, in fractional relaxation, we can take as many as fit...
// Actually, complete knapsack fractional relaxation:
// Just take the item with MAX density until full.
// Since items are sorted, items[level] is the best available.
// So we just fill the rest with items[level].
bound += (double)w * items[i].density;
return bound;
} else {
// Should not happen if we just take max density,
// but if we handle Multiple Knapsack, it's different.
// For Complete Knapsack, sorted by density, just fill with the current best.
bound += (double)w * items[i].density;
return bound;
}
}
return bound;
}
// 2. Simple Bound (Global Max Density) - A looser bound
double bound_simple(int level, int current_val, int rem_cap, double max_global_density) {
return current_val + rem_cap * max_global_density;
}
// --- Branch and Bound Node ---
struct Node {
int level; // Index of item being considered
int current_val;
int current_weight;
double bound;
// For analysis
double true_opt;
bool operator<(const Node& other) const {
return bound < other.bound; // Priority Queue is Max Heap by default? No, default is max. We want Max Bound first? Yes, exploring promising nodes.
}
};
// --- BnB Solver for Complete Knapsack ---
struct BnBStats {
int nodes_visited;
long long time_us;
int max_val;
// Analysis data: Level -> List of (Bound, TrueVal)
map<int, vector<pair<double, double>>> analysis_data;
};
BnBStats solve_bnb_complete(const vector<Item>& items, int capacity, const vector<vector<int>>& dp_table, bool collect_analysis, int bound_mode = 0) {
auto start_time = chrono::high_resolution_clock::now();
BnBStats stats;
stats.nodes_visited = 0;
stats.max_val = 0;
double max_global_density = 0;
if (!items.empty()) max_global_density = items[0].density; // Since sorted
priority_queue<Node> pq;
// Root node
double init_bound;
if (bound_mode == 1) init_bound = bound_simple(0, 0, capacity, max_global_density);
else init_bound = bound_fractional(0, 0, capacity, items);
pq.push({0, 0, 0, init_bound, (double)dp_table[0][capacity]});
while (!pq.empty()) {
Node u = pq.top();
pq.pop();
stats.nodes_visited++;
if (u.bound <= stats.max_val) continue; // Prune
if (collect_analysis) {
stats.analysis_data[u.level].push_back({u.bound, u.true_opt});
}
if (u.level == items.size()) {
if (u.current_val > stats.max_val) {
stats.max_val = u.current_val;
}
continue;
}
// Branching for Complete Knapsack:
// Commonly we branch on "count of item[level]".
// But infinite branching is hard for heap.
// Alternative: Binary branching.
// Left: Take 1 unit of item[level] (if fits). State: (level, w+wi, v+vi) -> Re-consider same item level.
// Right: Don't take item[level] anymore. State: (level+1, w, v)
// Let's use Binary Branching: Take 1 (Stay at level) vs Don't Take (Go to level + 1)
// Child 1: Take item[level]
if (u.current_weight + items[u.level].weight <= capacity) {
int new_w = u.current_weight + items[u.level].weight;
int new_v = u.current_val + items[u.level].value;
int rem = capacity - new_w;
double b;
if (bound_mode == 1) b = bound_simple(u.level, new_v, rem, max_global_density);
else b = bound_fractional(u.level, new_v, rem, items);
// True val for child 1 (Still at u.level)
double true_v = collect_analysis ? (new_v + dp_table[u.level][rem]) : 0;
if (b > stats.max_val) {
pq.push({u.level, new_v, new_w, b, true_v});
}
}
// Child 2: Don't take item[level] (Move to next item)
{
int rem = capacity - u.current_weight;
double b;
// Bound calculation starts from level + 1
if (bound_mode == 1) b = bound_simple(u.level + 1, u.current_val, rem, max_global_density);
else b = bound_fractional(u.level + 1, u.current_val, rem, items);
// True val for child 2 (at u.level + 1)
double true_v = collect_analysis ? (u.current_val + dp_table[u.level + 1][rem]) : 0;
if (b > stats.max_val) {
pq.push({u.level + 1, u.current_val, u.current_weight, b, true_v});
}
}
}
auto end_time = chrono::high_resolution_clock::now();
stats.time_us = chrono::duration_cast<chrono::microseconds>(end_time - start_time).count();
return stats;
}
// --- Monte Carlo Estimation ---
long long monte_carlo_estimate(const vector<Item>& items, int capacity, int samples = 1000) {
long long total_nodes = 0;
for (int k = 0; k < samples; ++k) {
long long path_nodes = 1; // Root
int curr_w = 0;
int level = 0;
long long current_multiplier = 1;
// Using the same binary branching structure: Take (stay) or Don't Take (next)
while (level < items.size()) {
vector<int> moves; // 0 = don't take, 1 = take
// Can always "Don't take" -> move to level + 1
moves.push_back(0);
// Can "Take" if fits
if (curr_w + items[level].weight <= capacity) {
moves.push_back(1);
}
int branching_factor = moves.size();
total_nodes += current_multiplier; // Add current level nodes count estimation
if (branching_factor == 0) break; // Should not happen as 0 is always there
// Pick random move
int move = moves[rng() % branching_factor];
current_multiplier *= branching_factor;
if (move == 0) {
level++; // Move to next item
} else {
curr_w += items[level].weight; // Take item, stay at level
}
}
}
return total_nodes / samples;
}
// --- Multiple Knapsack BnB ---
// Branching: Iterate k from 0 to limit[i]
// Cost Functions:
// 1. Fractional (Relaxed integer & limit constraint)
// 2. Simple (Global Max) - Not very useful for Multiple, let's use "Fractional with limits" vs "Fractional without limits" (Relaxed Limit)
// Wait, "Fractional Knapsack" normally respects limits in standard definition.
// Let's compare:
// MK Bound 1: Relaxed Integrity (Continuous 0..limit).
// MK Bound 2: Relaxed Integrity AND Relaxed Limit (Infinite count). (Looser)
double bound_mk_tight(int level, int current_val, int rem_cap, const vector<Item>& items) {
double bound = current_val;
int w = rem_cap;
for (int i = level; i < items.size(); ++i) {
int count = min(items[i].limit, w / items[i].weight); // Integer max count we could take?
// No, fractional allows taking fraction of item.
// Standard Fractional Knapsack with Limits:
// Take as much as possible up to limit.
if (items[i].weight == 0) continue;
int can_take_weight = items[i].limit * items[i].weight;
if (w >= can_take_weight) {
w -= can_take_weight;
bound += items[i].value * items[i].limit;
} else {
bound += (double)w * items[i].density;
return bound; // Filled
}
}
return bound;
}
double bound_mk_loose(int level, int current_val, int rem_cap, const vector<Item>& items) {
// Like Complete Knapsack - ignore limits
double bound = current_val;
int w = rem_cap;
for (int i = level; i < items.size(); ++i) {
if (w >= items[i].weight) {
bound += (double)w * items[i].density;
return bound;
}
}
return bound;
}
struct NodeMK {
int level;
int current_val;
int current_weight;
double bound;
bool operator<(const NodeMK& other) const {
return bound < other.bound;
}
};
BnBStats solve_bnb_mk(const vector<Item>& items, int capacity, int bound_mode) {
auto start_time = chrono::high_resolution_clock::now();
BnBStats stats;
stats.nodes_visited = 0;
stats.max_val = 0;
priority_queue<NodeMK> pq;
double init_bound = (bound_mode == 0) ? bound_mk_tight(0, 0, capacity, items) : bound_mk_loose(0, 0, capacity, items);
pq.push({0, 0, 0, init_bound});
while (!pq.empty()) {
NodeMK u = pq.top();
pq.pop();
stats.nodes_visited++;
if (u.bound <= stats.max_val) continue;
if (u.level == items.size()) {
if (u.current_val > stats.max_val) {
stats.max_val = u.current_val;
}
continue;
}
// Branching: 0 to limit
// To be safe and simple, let's branch on specific counts?
// Or Binary: Take 1 (decrement limit) vs Don't Take (next level).
// Let's use Binary with state modification (decrement limit).
// Wait, Node needs to store remaining limits? That's too heavy.
// Standard MK branching: Loop k = 0 to Limit? That's a high branching factor.
// Binary branching is better:
// Option 1: Take 1 of Item[level]. (Requires tracking 'used' count for current level).
// Option 2: Don't take any more of Item[level]. (Move to level+1).
// We need to store 'count_taken_current_level' in Node?
// Or just simplify: "level" implies items[level].
// But we need to know how many we already took.
// Let's assume standard MK backtracking logic where we just loop k.
// But for Priority Queue, creating k children is fine.
int k_max = min(items[u.level].limit, (capacity - u.current_weight) / items[u.level].weight);
// Generate children for k = 0 to k_max
// Heuristic: Try largest k first? Priority Queue handles order.
for (int k = 0; k <= k_max; ++k) {
int new_w = u.current_weight + k * items[u.level].weight;
int new_v = u.current_val + k * items[u.level].value;
int rem = capacity - new_w;
double b;
if (bound_mode == 0) b = bound_mk_tight(u.level + 1, new_v, rem, items);
else b = bound_mk_loose(u.level + 1, new_v, rem, items);
if (b > stats.max_val) {
pq.push({u.level + 1, new_v, new_w, b});
}
}
}
auto end_time = chrono::high_resolution_clock::now();
stats.time_us = chrono::duration_cast<chrono::microseconds>(end_time - start_time).count();
return stats;
}
int main() {
int W = 100;
// --- Experiment 1: Monte Carlo Estimation ---
ofstream mc_file("backtracking/mc_estimation.csv");
mc_file << "n,estimated_nodes\n";
cout << "Running Monte Carlo Estimation..." << endl;
for (int n = 5; n <= 25; n += 5) {
auto items = generate_items(n, 40, 100);
long long est = monte_carlo_estimate(items, W, 2000);
mc_file << n << "," << est << "\n";
}
mc_file.close();
// --- Experiment 2 & 3: Cost Function Analysis (Fixed n & Varying n) ---
// We need: Level, Cost, TrueVal
// Also "Approximation Effect at different input sizes" -> Avg Error vs n
ofstream cost_level_file("backtracking/cost_approx_level.csv");
cost_level_file << "n,level,cost,true_val,ratio\n";
ofstream cost_n_file("backtracking/cost_approx_n.csv");
cost_n_file << "n,avg_ratio\n";
cout << "Running Cost Function Analysis..." << endl;
for (int n = 5; n <= 25; n += 5) {
double total_ratio_sum = 0;
long long total_nodes_count = 0;
// Run multiple samples
for (int s = 0; s < 5; ++s) {
auto items = generate_items(n, 40, 100);
auto dp = compute_suffix_dp_complete(items, W);
// Collect analysis data using Standard Bound (Mode 0)
auto res = solve_bnb_complete(items, W, dp, true, 0);
for (auto const& [lvl, data] : res.analysis_data) {
for (auto const& pair : data) {
double cost = pair.first;
double true_v = pair.second;
double ratio = (true_v > 0) ? (cost / true_v) : 1.0;
if (s == 0) { // Only log details for one sample to avoid huge files
cost_level_file << n << "," << lvl << "," << cost << "," << true_v << "," << ratio << "\n";
}
total_ratio_sum += ratio;
total_nodes_count++;
}
}
}
if (total_nodes_count > 0)
cost_n_file << n << "," << (total_ratio_sum / total_nodes_count) << "\n";
}
cost_level_file.close();
cost_n_file.close();
// --- Experiment 4: New Cost Function Analysis ---
// Compare Mode 0 (Fractional - Tight) vs Mode 1 (Simple - Loose)
ofstream new_cost_file("backtracking/new_cost_analysis.csv");
new_cost_file << "n,method,nodes,time_us\n";
cout << "Running New Cost Function Analysis..." << endl;
for (int n = 5; n <= 25; n += 5) {
long long nodes0 = 0, time0 = 0;
long long nodes1 = 0, time1 = 0;
int runs = 10;
for (int r = 0; r < runs; ++r) {
auto items = generate_items(n, 40, 100);
auto dp = compute_suffix_dp_complete(items, W); // Just for empty passing
auto res0 = solve_bnb_complete(items, W, dp, false, 0); // Fractional
nodes0 += res0.nodes_visited;
time0 += res0.time_us;
auto res1 = solve_bnb_complete(items, W, dp, false, 1); // Simple
nodes1 += res1.nodes_visited;
time1 += res1.time_us;
}
new_cost_file << n << ",Fractional," << nodes0/runs << "," << time0/runs << "\n";
new_cost_file << n << ",Simple," << nodes1/runs << "," << time1/runs << "\n";
}
new_cost_file.close();
// --- Experiment 5: Multiple Knapsack (Additional) ---
ofstream mk_file("backtracking/multiple_knapsack.csv");
mk_file << "n,method,nodes,time_us\n";
cout << "Running Multiple Knapsack Analysis..." << endl;
for (int n = 5; n <= 20; n += 5) { // Smaller range as it might be slower
long long nodes0 = 0, time0 = 0; // Tight
long long nodes1 = 0, time1 = 0; // Loose
int runs = 10;
for (int r = 0; r < runs; ++r) {
auto items = generate_items(n, 40, 100, true, 3); // Limit up to 3
auto res0 = solve_bnb_mk(items, W, 0);
nodes0 += res0.nodes_visited;
time0 += res0.time_us;
auto res1 = solve_bnb_mk(items, W, 1);
nodes1 += res1.nodes_visited;
time1 += res1.time_us;
}
mk_file << n << ",TightBound," << nodes0/runs << "," << time0/runs << "\n";
mk_file << n << ",LooseBound," << nodes1/runs << "," << time1/runs << "\n";
}
mk_file.close();
return 0;
}

13
backtracking/task.txt Normal file
View File

@@ -0,0 +1,13 @@
运用回溯与分支限界算法求解完全背包问题并进行分析。具体要求如下:
针对完全背包问题,实现回溯法和分支限界算法;
以物品种类数n为输入规模固定n随机产生大量测试样本
用蒙特卡洛法对分支数量进行估计,根据统计结果给出应用蒙特卡洛估计整数背包问题分支数量的建议;
在分支限界法计算代价函数时,使用其他算法(如动态规划法)计算出真实值,记录结点的代价函数与真实值,分析在同一输入规模下不同层代价函数的近似效果;
改变n分析在不同输入规模下同一层代价函数的近似效果
针对实验结果,设计新的代价函数,并分析其有效性(近似能力、剪枝效果、增加的开销等)。
附加:运用分支限界法求解多重背包问题并进行分析,具体要求如下:
每种物品的数量有限第i种物品的数量上限为ki个
设计2种以上代价函数实现分支限界法并进行对比分析。