Rev 154 | Details | Compare with Previous | Last modification | View Log | RSS feed
Rev | Author | Line No. | Line |
---|---|---|---|
33 | pmbaty | 1 | #include "chess.h" |
2 | #include "data.h" |
||
108 | pmbaty | 3 | /* last modified 12/29/15 */ |
33 | pmbaty | 4 | /* |
5 | ******************************************************************************* |
||
6 | * * |
||
108 | pmbaty | 7 | * NextMove() is used to select the next move from the current move list. * |
33 | pmbaty | 8 | * * |
108 | pmbaty | 9 | * The "excluded move" code below simply collects any moves that were * |
10 | * searched without being generated (hash move and up to 4 killers). We * |
||
11 | * save them in the NEXT structure and make sure to exclude them when * |
||
12 | * searching after a move generation to avoid the duplicated effort. * |
||
13 | * * |
||
33 | pmbaty | 14 | ******************************************************************************* |
15 | */ |
||
108 | pmbaty | 16 | int NextMove(TREE * RESTRICT tree, int ply, int depth, int side, int in_check) { |
17 | unsigned *movep, *bestp; |
||
18 | int hist, bestval, possible; |
||
33 | pmbaty | 19 | |
20 | /* |
||
21 | ************************************************************ |
||
22 | * * |
||
108 | pmbaty | 23 | * The following "big switch" controls the finate state * |
24 | * machine that selects moves. The "phase" value in the * |
||
25 | * next_status[ply] structure is always set after a move * |
||
26 | * is selected, and it defines the next state of the FSM * |
||
27 | * so select the next move in a sequenced order. * |
||
33 | pmbaty | 28 | * * |
29 | ************************************************************ |
||
30 | */ |
||
31 | switch (tree->next_status[ply].phase) { |
||
32 | /* |
||
33 | ************************************************************ |
||
34 | * * |
||
35 | * First, try the transposition table move (which will be * |
||
36 | * the principal variation move as we first move down the * |
||
108 | pmbaty | 37 | * tree) or the best move found in this position during a * |
38 | * prior search. * |
||
33 | pmbaty | 39 | * * |
40 | ************************************************************ |
||
41 | */ |
||
108 | pmbaty | 42 | case HASH: |
43 | tree->next_status[ply].order = 0; |
||
44 | tree->next_status[ply].exclude = &tree->next_status[ply].done[0]; |
||
45 | tree->next_status[ply].phase = GENERATE_CAPTURES; |
||
33 | pmbaty | 46 | if (tree->hash_move[ply]) { |
47 | tree->curmv[ply] = tree->hash_move[ply]; |
||
108 | pmbaty | 48 | *(tree->next_status[ply].exclude++) = tree->curmv[ply]; |
49 | if (ValidMove(tree, ply, side, tree->curmv[ply])) { |
||
50 | tree->phase[ply] = HASH; |
||
51 | return ++tree->next_status[ply].order; |
||
52 | } |
||
33 | pmbaty | 53 | #if defined(DEBUG) |
54 | else |
||
108 | pmbaty | 55 | Print(2048, "ERROR: bad move from hash table, ply=%d\n", ply); |
33 | pmbaty | 56 | #endif |
57 | } |
||
58 | /* |
||
59 | ************************************************************ |
||
60 | * * |
||
61 | * Generate captures and sort them based on the simple * |
||
62 | * MVV/LVA ordering where we try to capture the most * |
||
63 | * valuable victim piece possible, using the least * |
||
64 | * valuable attacking piece possible. Later we will test * |
||
65 | * to see if the capture appears to lose material and we * |
||
66 | * will defer searching it until later. * |
||
67 | * * |
||
108 | pmbaty | 68 | * Or, if in check, generate all the legal moves that * |
69 | * escape check by using GenerateCheckEvasions(). After * |
||
70 | * we do this, we sort them using MVV/LVA to move captures * |
||
71 | * to the front of the list in the correct order. * |
||
72 | * * |
||
33 | pmbaty | 73 | ************************************************************ |
74 | */ |
||
108 | pmbaty | 75 | case GENERATE_CAPTURES: |
76 | tree->next_status[ply].phase = CAPTURES; |
||
77 | if (!in_check) |
||
78 | tree->last[ply] = |
||
79 | GenerateCaptures(tree, ply, side, tree->last[ply - 1]); |
||
80 | else |
||
81 | tree->last[ply] = |
||
82 | GenerateCheckEvasions(tree, ply, side, tree->last[ply - 1]); |
||
33 | pmbaty | 83 | /* |
84 | ************************************************************ |
||
85 | * * |
||
108 | pmbaty | 86 | * Now make a pass over the moves to assign the sort value * |
87 | * for each. We simply use MVV/LVA move order here. A * |
||
88 | * simple optimization is to use the pre-computed array * |
||
89 | * MVV_LVA[victim][attacker] which returns a simple value * |
||
90 | * that indicates MVV/LVA order. * |
||
33 | pmbaty | 91 | * * |
92 | ************************************************************ |
||
93 | */ |
||
108 | pmbaty | 94 | tree->next_status[ply].remaining = 0; |
95 | for (movep = tree->last[ply - 1]; movep < tree->last[ply]; movep++) |
||
96 | if (*movep == tree->hash_move[ply]) { |
||
97 | *movep = 0; |
||
98 | tree->next_status[ply].exclude = &tree->next_status[ply].done[0]; |
||
99 | } else { |
||
100 | *movep += MVV_LVA[Captured(*movep)][Piece(*movep)]; |
||
101 | tree->next_status[ply].remaining++; |
||
33 | pmbaty | 102 | } |
108 | pmbaty | 103 | NextSort(tree, ply); |
33 | pmbaty | 104 | tree->next_status[ply].last = tree->last[ply - 1]; |
108 | pmbaty | 105 | if (in_check) |
106 | goto remaining_moves; |
||
33 | pmbaty | 107 | /* |
108 | ************************************************************ |
||
109 | * * |
||
110 | * Try the captures moves, which are in order based on * |
||
111 | * MVV/LVA ordering. If a larger-valued piece captures a * |
||
108 | pmbaty | 112 | * lesser-valued piece, and SEE() says it loses material, * |
33 | pmbaty | 113 | * this capture will be deferred until later. * |
114 | * * |
||
108 | pmbaty | 115 | * If we are in check, we jump down to the history moves * |
116 | * phase (we don't need to generate any more moves as * |
||
117 | * GenerateCheckEvasions has already generated all legal * |
||
118 | * moves. * |
||
119 | * * |
||
33 | pmbaty | 120 | ************************************************************ |
121 | */ |
||
108 | pmbaty | 122 | case CAPTURES: |
33 | pmbaty | 123 | while (tree->next_status[ply].remaining) { |
108 | pmbaty | 124 | tree->curmv[ply] = Move(*(tree->next_status[ply].last++)); |
33 | pmbaty | 125 | if (!--tree->next_status[ply].remaining) |
108 | pmbaty | 126 | tree->next_status[ply].phase = KILLER1; |
127 | if (pcval[Piece(tree->curmv[ply])] <= |
||
128 | pcval[Captured(tree->curmv[ply])] |
||
129 | || SEE(tree, side, tree->curmv[ply]) >= 0) { |
||
130 | *(tree->next_status[ply].last - 1) = 0; |
||
131 | tree->phase[ply] = CAPTURES; |
||
132 | return ++tree->next_status[ply].order; |
||
133 | } |
||
33 | pmbaty | 134 | } |
135 | /* |
||
136 | ************************************************************ |
||
137 | * * |
||
138 | * Now, try the killer moves. This phase tries the two * |
||
139 | * killers for the current ply without generating moves, * |
||
140 | * which saves time if a cutoff occurs. After those two * |
||
141 | * killers are searched, we try the killers from two plies * |
||
142 | * back since they have greater depth and might produce a * |
||
143 | * cutoff if the current two do not. * |
||
144 | * * |
||
145 | ************************************************************ |
||
146 | */ |
||
108 | pmbaty | 147 | case KILLER1: |
148 | possible = tree->killers[ply].move1; |
||
149 | if (!Exclude(tree, ply, possible) && |
||
150 | ValidMove(tree, ply, side, possible)) { |
||
151 | tree->curmv[ply] = possible; |
||
152 | *(tree->next_status[ply].exclude++) = possible; |
||
153 | tree->next_status[ply].phase = KILLER2; |
||
154 | tree->phase[ply] = KILLER1; |
||
155 | return ++tree->next_status[ply].order; |
||
33 | pmbaty | 156 | } |
108 | pmbaty | 157 | case KILLER2: |
158 | possible = tree->killers[ply].move2; |
||
159 | if (!Exclude(tree, ply, possible) && |
||
160 | ValidMove(tree, ply, side, possible)) { |
||
161 | tree->curmv[ply] = possible; |
||
162 | *(tree->next_status[ply].exclude++) = possible; |
||
163 | tree->next_status[ply].phase = (ply < 3) ? COUNTER_MOVE1 : KILLER3; |
||
164 | tree->phase[ply] = KILLER2; |
||
165 | return ++tree->next_status[ply].order; |
||
33 | pmbaty | 166 | } |
108 | pmbaty | 167 | case KILLER3: |
168 | possible = tree->killers[ply - 2].move1; |
||
169 | if (!Exclude(tree, ply, possible) && |
||
170 | ValidMove(tree, ply, side, possible)) { |
||
171 | tree->curmv[ply] = possible; |
||
172 | *(tree->next_status[ply].exclude++) = possible; |
||
173 | tree->next_status[ply].phase = KILLER4; |
||
174 | tree->phase[ply] = KILLER3; |
||
175 | return ++tree->next_status[ply].order; |
||
33 | pmbaty | 176 | } |
108 | pmbaty | 177 | case KILLER4: |
178 | possible = tree->killers[ply - 2].move2; |
||
179 | if (!Exclude(tree, ply, possible) && |
||
180 | ValidMove(tree, ply, side, possible)) { |
||
181 | tree->curmv[ply] = possible; |
||
182 | *(tree->next_status[ply].exclude++) = possible; |
||
183 | tree->next_status[ply].phase = COUNTER_MOVE1; |
||
184 | tree->phase[ply] = KILLER4; |
||
185 | return ++tree->next_status[ply].order; |
||
33 | pmbaty | 186 | } |
187 | /* |
||
188 | ************************************************************ |
||
189 | * * |
||
108 | pmbaty | 190 | * Now, before we give up and generate moves, try the * |
191 | * counter-move which was a move that failed high in the * |
||
192 | * past when the move at the previous ply was played. * |
||
193 | * * |
||
194 | ************************************************************ |
||
195 | */ |
||
196 | case COUNTER_MOVE1: |
||
154 | pmbaty | 197 | possible = tree->counter_move[tree->curmv[ply - 1] & 4095].move1; |
108 | pmbaty | 198 | if (!Exclude(tree, ply, possible) && |
199 | ValidMove(tree, ply, side, possible)) { |
||
200 | tree->curmv[ply] = possible; |
||
201 | *(tree->next_status[ply].exclude++) = possible; |
||
202 | tree->next_status[ply].phase = COUNTER_MOVE2; |
||
203 | tree->phase[ply] = COUNTER_MOVE1; |
||
204 | return ++tree->next_status[ply].order; |
||
205 | } |
||
206 | case COUNTER_MOVE2: |
||
154 | pmbaty | 207 | possible = tree->counter_move[tree->curmv[ply - 1] & 4095].move2; |
108 | pmbaty | 208 | if (!Exclude(tree, ply, possible) && |
209 | ValidMove(tree, ply, side, possible)) { |
||
210 | tree->curmv[ply] = possible; |
||
211 | *(tree->next_status[ply].exclude++) = possible; |
||
212 | tree->next_status[ply].phase = MOVE_PAIR1; |
||
213 | tree->phase[ply] = COUNTER_MOVE2; |
||
214 | return ++tree->next_status[ply].order; |
||
215 | } |
||
216 | /* |
||
217 | ************************************************************ |
||
218 | * * |
||
219 | * Finally we try paired moves, which are simply moves * |
||
220 | * that were good when played after the other move in the * |
||
221 | * pair was played two plies back. * |
||
222 | * * |
||
223 | ************************************************************ |
||
224 | */ |
||
225 | case MOVE_PAIR1: |
||
154 | pmbaty | 226 | possible = tree->move_pair[tree->curmv[ply - 2] & 4095].move1; |
108 | pmbaty | 227 | if (!Exclude(tree, ply, possible) && |
228 | ValidMove(tree, ply, side, possible)) { |
||
229 | tree->curmv[ply] = possible; |
||
230 | *(tree->next_status[ply].exclude++) = possible; |
||
231 | tree->next_status[ply].phase = MOVE_PAIR2; |
||
232 | tree->phase[ply] = MOVE_PAIR1; |
||
233 | return ++tree->next_status[ply].order; |
||
234 | } |
||
235 | case MOVE_PAIR2: |
||
154 | pmbaty | 236 | possible = tree->move_pair[tree->curmv[ply - 2] & 4095].move2; |
108 | pmbaty | 237 | if (!Exclude(tree, ply, possible) && |
238 | ValidMove(tree, ply, side, possible)) { |
||
239 | tree->curmv[ply] = possible; |
||
240 | *(tree->next_status[ply].exclude++) = possible; |
||
241 | tree->next_status[ply].phase = GENERATE_QUIET; |
||
242 | tree->phase[ply] = MOVE_PAIR2; |
||
243 | return ++tree->next_status[ply].order; |
||
244 | } |
||
245 | /* |
||
246 | ************************************************************ |
||
247 | * * |
||
33 | pmbaty | 248 | * Now, generate all non-capturing moves, which get added * |
249 | * to the move list behind any captures we did not search. * |
||
250 | * * |
||
251 | ************************************************************ |
||
252 | */ |
||
108 | pmbaty | 253 | case GENERATE_QUIET: |
254 | if (!in_check) |
||
255 | tree->last[ply] = |
||
256 | GenerateNoncaptures(tree, ply, side, tree->last[ply]); |
||
33 | pmbaty | 257 | tree->next_status[ply].last = tree->last[ply - 1]; |
258 | /* |
||
259 | ************************************************************ |
||
260 | * * |
||
108 | pmbaty | 261 | * Now, try the history moves. This phase takes the * |
262 | * complete move list, and passes over them in a classic * |
||
263 | * selection-sort, choosing the move with the highest * |
||
264 | * history score. This phase is only done one time, as it * |
||
265 | * also purges the hash, killer, counter and paired moves * |
||
266 | * from the list. * |
||
33 | pmbaty | 267 | * * |
268 | ************************************************************ |
||
269 | */ |
||
108 | pmbaty | 270 | tree->next_status[ply].remaining = 0; |
271 | tree->next_status[ply].phase = HISTORY; |
||
272 | bestval = -99999999; |
||
273 | bestp = 0; |
||
274 | for (movep = tree->last[ply - 1]; movep < tree->last[ply]; movep++) |
||
275 | if (*movep) { |
||
276 | if (Exclude(tree, ply, *movep)) |
||
277 | *movep = 0; |
||
278 | else if (depth >= 6) { |
||
279 | tree->next_status[ply].remaining++; |
||
280 | hist = history[HistoryIndex(side, *movep)]; |
||
281 | if (hist > bestval) { |
||
282 | bestval = hist; |
||
283 | bestp = movep; |
||
284 | } |
||
285 | } |
||
286 | } |
||
287 | tree->next_status[ply].remaining /= 2; |
||
288 | if (bestp) { |
||
289 | tree->curmv[ply] = Move(*bestp); |
||
290 | *bestp = 0; |
||
291 | tree->phase[ply] = HISTORY; |
||
292 | return ++tree->next_status[ply].order; |
||
293 | } |
||
294 | goto remaining_moves; |
||
295 | /* |
||
296 | ************************************************************ |
||
297 | * * |
||
298 | * Now, continue with the history moves, but since one * |
||
299 | * pass has been made over the complete move list, there * |
||
300 | * are no hash/killer moves left in the list, so the tests * |
||
301 | * for these can be avoided. * |
||
302 | * * |
||
303 | ************************************************************ |
||
304 | */ |
||
305 | case HISTORY: |
||
306 | if (depth >= 6) { |
||
307 | bestval = -99999999; |
||
308 | bestp = 0; |
||
309 | for (movep = tree->last[ply - 1]; movep < tree->last[ply]; movep++) |
||
310 | if (*movep) { |
||
311 | hist = history[HistoryIndex(side, *movep)]; |
||
312 | if (hist > bestval) { |
||
313 | bestval = hist; |
||
314 | bestp = movep; |
||
315 | } |
||
316 | } |
||
317 | if (bestp) { |
||
318 | tree->curmv[ply] = Move(*bestp); |
||
319 | *bestp = 0; |
||
320 | if (--(tree->next_status[ply].remaining) <= 0) { |
||
321 | tree->next_status[ply].phase = REMAINING; |
||
322 | tree->next_status[ply].last = tree->last[ply - 1]; |
||
323 | } |
||
324 | tree->phase[ply] = HISTORY; |
||
325 | return ++tree->next_status[ply].order; |
||
326 | } |
||
327 | } |
||
328 | /* |
||
329 | ************************************************************ |
||
330 | * * |
||
331 | * Then we try the rest of the set of moves, and we do not * |
||
332 | * use Exclude() function to skip any moves we have * |
||
333 | * already searched (hash or killers) since the history * |
||
334 | * phase above has already done that. * |
||
335 | * * |
||
336 | ************************************************************ |
||
337 | */ |
||
338 | remaining_moves: |
||
339 | tree->next_status[ply].phase = REMAINING; |
||
340 | tree->next_status[ply].last = tree->last[ply - 1]; |
||
341 | case REMAINING: |
||
33 | pmbaty | 342 | for (; tree->next_status[ply].last < tree->last[ply]; |
343 | tree->next_status[ply].last++) |
||
344 | if (*tree->next_status[ply].last) { |
||
108 | pmbaty | 345 | tree->curmv[ply] = Move(*tree->next_status[ply].last++); |
346 | tree->phase[ply] = REMAINING; |
||
347 | return ++tree->next_status[ply].order; |
||
33 | pmbaty | 348 | } |
349 | return NONE; |
||
350 | default: |
||
108 | pmbaty | 351 | Print(4095, "oops! next_status.phase is bad! [phase=%d]\n", |
33 | pmbaty | 352 | tree->next_status[ply].phase); |
353 | } |
||
354 | return NONE; |
||
355 | } |
||
356 | |||
108 | pmbaty | 357 | /* last modified 07/03/14 */ |
33 | pmbaty | 358 | /* |
359 | ******************************************************************************* |
||
360 | * * |
||
361 | * NextRootMove() is used to select the next move from the root move list. * |
||
362 | * * |
||
108 | pmbaty | 363 | * There is one subtle trick here that must not be broken. Crafty does LMR * |
364 | * at the root, and the reduction amount is dependent on the order in which * |
||
365 | * a specific move is searched. With the recent changes dealing with this * |
||
366 | * issue in non-root moves, NextRootMove() now simply returns the move's * |
||
367 | * order within the move list. This might be a problem if the last move in * |
||
368 | * the list fails high, because it would be reduced on the re-search, which * |
||
369 | * is something we definitely don't want. The solution is found in the code * |
||
370 | * inside Iterate(). When a move fails high, it is moved to the top of the * |
||
371 | * move list so that (a) it is searched first on the re-search (more on this * |
||
372 | * in a moment) and (b) since its position in the move list is now #1, it * |
||
373 | * will get an order value of 1 which is never reduced. The only warning is * |
||
374 | * that Iterate() MUST re-sort the ply-1 move list after a fail high, even * |
||
375 | * though it seems like a very tiny computational waste. * |
||
376 | * * |
||
377 | * The other reason for doing the re-sort has to do with the parallel search * |
||
378 | * algorithm. When one thread fails high at the root, it stops the others. * |
||
379 | * they have to carefully undo the "this move has been searched" flag since * |
||
380 | * these incomplete searches need to be re-done after the fail-high move is * |
||
381 | * finished. But it is possible some of those interrupted moves appear * |
||
382 | * before the fail high move in the move list. Which would lead Crafty to * |
||
383 | * fail high, then produce a different best move's PV. By re-sorting, now * |
||
384 | * the fail-high move is always searched first since here we just start at * |
||
385 | * the top of the move list and look for the first "not yet searched" move * |
||
386 | * to return. It solves several problems, but if that re-sort is not done, * |
||
387 | * things go south quickly. The voice of experience is all I will say here. * |
||
388 | * * |
||
33 | pmbaty | 389 | ******************************************************************************* |
390 | */ |
||
108 | pmbaty | 391 | int NextRootMove(TREE * RESTRICT tree, TREE * RESTRICT mytree, int side) { |
33 | pmbaty | 392 | uint64_t total_nodes; |
108 | pmbaty | 393 | int which, i, t; |
33 | pmbaty | 394 | |
395 | /* |
||
396 | ************************************************************ |
||
397 | * * |
||
398 | * First, we check to see if we only have one legal move. * |
||
399 | * If so, and we are not pondering, we stop after a short * |
||
400 | * search, saving time, but making sure we have something * |
||
401 | * to ponder. * |
||
402 | * * |
||
403 | ************************************************************ |
||
404 | */ |
||
405 | if (!annotate_mode && !pondering && !booking && n_root_moves == 1 && |
||
108 | pmbaty | 406 | iteration > 10) { |
33 | pmbaty | 407 | abort_search = 1; |
408 | return NONE; |
||
409 | } |
||
410 | /* |
||
411 | ************************************************************ |
||
412 | * * |
||
413 | * For the moves at the root of the tree, the list has * |
||
414 | * already been generated and sorted. * |
||
415 | * * |
||
416 | * We simply have to find the first move that has a zero * |
||
417 | * "already searched" flag and choose that one. We do set * |
||
418 | * the "already searched" flag for this move before we * |
||
419 | * return so that it won't be searched again in another * |
||
420 | * thread. * |
||
421 | * * |
||
422 | ************************************************************ |
||
423 | */ |
||
108 | pmbaty | 424 | for (which = 0; which < n_root_moves; which++) { |
33 | pmbaty | 425 | if (!(root_moves[which].status & 8)) { |
426 | if (search_move) { |
||
427 | if (root_moves[which].move != search_move) { |
||
428 | root_moves[which].status |= 8; |
||
429 | continue; |
||
430 | } |
||
431 | } |
||
432 | tree->curmv[1] = root_moves[which].move; |
||
433 | root_moves[which].status |= 8; |
||
434 | /* |
||
435 | ************************************************************ |
||
436 | * * |
||
437 | * We have found a move to search. If appropriate, we * |
||
438 | * display this move, along with the time and information * |
||
439 | * such as which move this is in the list and how many * |
||
440 | * are left to search before this iteration is done, and * |
||
441 | * a "status" character that shows the state of the * |
||
442 | * current search ("?" means we are pondering, waiting on * |
||
443 | * a move to be entered, "*" means we are searching and * |
||
444 | * our clock is running). We also display the NPS for * |
||
445 | * the search, simply for information about how fast the * |
||
446 | * machine is running. * |
||
447 | * * |
||
448 | ************************************************************ |
||
449 | */ |
||
108 | pmbaty | 450 | if (ReadClock() - start_time > noise_level && display_options & 16) { |
451 | sprintf(mytree->remaining_moves_text, "%d/%d", which + 1, |
||
33 | pmbaty | 452 | n_root_moves); |
453 | end_time = ReadClock(); |
||
108 | pmbaty | 454 | Lock(lock_io); |
33 | pmbaty | 455 | if (pondering) |
108 | pmbaty | 456 | printf(" %2i %s%7s? ", iteration, |
33 | pmbaty | 457 | Display2Times(end_time - start_time), |
458 | mytree->remaining_moves_text); |
||
459 | else |
||
108 | pmbaty | 460 | printf(" %2i %s%7s* ", iteration, |
33 | pmbaty | 461 | Display2Times(end_time - start_time), |
462 | mytree->remaining_moves_text); |
||
108 | pmbaty | 463 | printf("%d. ", move_number); |
464 | if (Flip(side)) |
||
33 | pmbaty | 465 | printf("... "); |
108 | pmbaty | 466 | strcpy(mytree->root_move_text, OutputMove(tree, 1, side, |
467 | tree->curmv[1])); |
||
33 | pmbaty | 468 | total_nodes = block[0]->nodes_searched; |
154 | pmbaty | 469 | for (t = 0; t < smp_max_threads; t++) |
108 | pmbaty | 470 | for (i = 0; i < 64; i++) |
471 | if (!(thread[t].blocks & SetMask(i))) |
||
472 | total_nodes += block[t * 64 + 1 + i]->nodes_searched; |
||
156 | pmbaty | 473 | nodes_per_second = (unsigned int) (total_nodes * 100 / Max(end_time - start_time, 1)); // Pierre-Marie Baty -- added type cast |
33 | pmbaty | 474 | i = strlen(mytree->root_move_text); |
475 | i = (i < 8) ? i : 8; |
||
108 | pmbaty | 476 | strncat(mytree->root_move_text, " ", 8 - i); |
33 | pmbaty | 477 | printf("%s", mytree->root_move_text); |
108 | pmbaty | 478 | printf("(%snps) \r", DisplayKMB(nodes_per_second, 0)); |
33 | pmbaty | 479 | fflush(stdout); |
480 | Unlock(lock_io); |
||
481 | } |
||
482 | /* |
||
483 | ************************************************************ |
||
484 | * * |
||
485 | * Bit of a tricky exit. If the move is not flagged as * |
||
486 | * "OK to search in parallel or reduce" then we return * |
||
108 | pmbaty | 487 | * "DO_NOT_REDUCE" which will prevent Search() from * |
488 | * reducing the move (LMR). Otherwise we return the more * |
||
489 | * common "REMAINING" value which allows LMR to be used on * |
||
33 | pmbaty | 490 | * those root moves. * |
491 | * * |
||
492 | ************************************************************ |
||
493 | */ |
||
108 | pmbaty | 494 | if (root_moves[which].status & 4) |
495 | tree->phase[1] = DO_NOT_REDUCE; |
||
33 | pmbaty | 496 | else |
108 | pmbaty | 497 | tree->phase[1] = REMAINING; |
498 | return which + 1; |
||
33 | pmbaty | 499 | } |
108 | pmbaty | 500 | } |
33 | pmbaty | 501 | return NONE; |
502 | } |
||
503 | |||
108 | pmbaty | 504 | /* last modified 11/13/14 */ |
33 | pmbaty | 505 | /* |
506 | ******************************************************************************* |
||
507 | * * |
||
508 | * NextRootMoveParallel() is used to determine if the next root move can be * |
||
509 | * searched in parallel. If it appears to Iterate() that one of the moves * |
||
510 | * following the first move might become the best move, the 'no parallel' * |
||
511 | * flag is set to speed up finding the new best move. This flag is set if * |
||
512 | * this root move has an "age" value > 0 which indicates this move was the * |
||
108 | pmbaty | 513 | * "best move" within the previous 3 search iterations. We want to search * |
514 | * such moves as quickly as possible, prior to starting a parallel search at * |
||
515 | * the root, in case this move once again becomes the best move and provides * |
||
516 | * a better alpha bound. * |
||
33 | pmbaty | 517 | * * |
518 | ******************************************************************************* |
||
519 | */ |
||
520 | int NextRootMoveParallel(void) { |
||
521 | int which; |
||
522 | |||
523 | /* |
||
524 | ************************************************************ |
||
525 | * * |
||
526 | * Here we simply check the root_move status flag that is * |
||
527 | * set in Iterate() after each iteration is completed. A * |
||
528 | * value of "1" indicates this move has to be searched by * |
||
108 | pmbaty | 529 | * all processors together, splitting at the root must * |
530 | * wait until we have searched all moves that have been * |
||
531 | * "best" during the previous three plies. * |
||
33 | pmbaty | 532 | * * |
108 | pmbaty | 533 | * The root move list has a flag, bit 3, used to indicate * |
534 | * that this move has been best recently. If this bit is * |
||
535 | * set, we are forced to use all processors to search this * |
||
536 | * move so that it is completed quickly rather than being * |
||
537 | * searched by just one processor, and taking much longer * |
||
538 | * to get a score back. We do this to give the search the * |
||
539 | * best opportunity to fail high on this move before we * |
||
540 | * run out of time. * |
||
541 | * * |
||
33 | pmbaty | 542 | ************************************************************ |
543 | */ |
||
544 | for (which = 0; which < n_root_moves; which++) |
||
545 | if (!(root_moves[which].status & 8)) |
||
546 | break; |
||
108 | pmbaty | 547 | if (which < n_root_moves && !(root_moves[which].status & 4)) |
33 | pmbaty | 548 | return 1; |
549 | return 0; |
||
550 | } |
||
551 | |||
108 | pmbaty | 552 | /* last modified 09/11/15 */ |
33 | pmbaty | 553 | /* |
554 | ******************************************************************************* |
||
555 | * * |
||
556 | * Exclude() searches the list of moves searched prior to generating a move * |
||
557 | * list to exclude those that were searched via a hash table best move or * |
||
558 | * through the killer moves for the current ply and two plies back. * |
||
559 | * * |
||
108 | pmbaty | 560 | * The variable next_status[].excluded is the total number of non-generated * |
561 | * moves we searched. next_status[].remaining is initially set to excluded, * |
||
562 | * but each time an excluded move is found, the counter is decremented. * |
||
563 | * Once all excluded moves have been found, we avoid running through the * |
||
564 | * list of excluded moves on each call and simply return. * |
||
33 | pmbaty | 565 | * * |
566 | ******************************************************************************* |
||
567 | */ |
||
568 | int Exclude(TREE * RESTRICT tree, int ply, int move) { |
||
108 | pmbaty | 569 | unsigned *i; |
33 | pmbaty | 570 | |
108 | pmbaty | 571 | if (tree->next_status[ply].exclude > &tree->next_status[ply].done[0]) |
572 | for (i = &tree->next_status[ply].done[0]; |
||
573 | i < tree->next_status[ply].exclude; i++) |
||
574 | if (move == *i) |
||
33 | pmbaty | 575 | return 1; |
576 | return 0; |
||
577 | } |
||
108 | pmbaty | 578 | |
579 | /* last modified 05/20/15 */ |
||
580 | /* |
||
581 | ******************************************************************************* |
||
582 | * * |
||
583 | * NextSort() is used to sort the move list. This is a list of 32 bit * |
||
584 | * values where the rightmost 21 bits is the compressed move, and the left- * |
||
585 | * most 11 bits are the sort key (MVV/LVA values). * |
||
586 | * * |
||
587 | ******************************************************************************* |
||
588 | */ |
||
589 | void NextSort(TREE * RESTRICT tree, int ply) { |
||
590 | unsigned temp, *movep, *tmovep; |
||
591 | |||
592 | /* |
||
593 | ************************************************************ |
||
594 | * * |
||
595 | * This is a simple insertion sort algorithm. * |
||
596 | * * |
||
597 | ************************************************************ |
||
598 | */ |
||
599 | if (tree->last[ply] > tree->last[ply - 1] + 1) { |
||
600 | for (movep = tree->last[ply - 1] + 1; movep < tree->last[ply]; movep++) { |
||
601 | temp = *movep; |
||
602 | tmovep = movep - 1; |
||
603 | while (tmovep >= tree->last[ply - 1] && SortV(*tmovep) < SortV(temp)) { |
||
604 | *(tmovep + 1) = *tmovep; |
||
605 | tmovep--; |
||
606 | } |
||
607 | *(tmovep + 1) = temp; |
||
608 | } |
||
609 | } |
||
610 | } |