3 Copyright (c) Scott Gasch
11 Search support routines. See also search.c, root.c, and split.c.
23 extern SCORE g_iRootScore[2];
24 extern ULONG g_uHardExtendLimit;
25 extern ULONG g_uIterateDepth;
28 UpdatePV(SEARCHER_THREAD_CONTEXT *ctx, MOVE mv)
33 Update the principle variation at this depth.
37 SEARCHER_THREAD_CONTEXT *ctx,
46 PLY_INFO *this = &(ctx->sPlyInfo[ctx->uPly]);
47 PLY_INFO *next = (this + 1);
50 ASSERT(u < MAX_PLY_PER_SEARCH);
53 while(u < MAX_PLY_PER_SEARCH)
55 this->PV[u] = next->PV[u];
56 if (0 == this->PV[u].uMove)
66 CheckInputAndTimers(IN SEARCHER_THREAD_CONTEXT *ctx)
71 This code is called from search when the total number of nodes
72 searched is a multiple of g_MoveTimer.uNodeCheckMask. Its job is
73 to check for user input waiting on the input queue and to check to
74 see if we are over a time limit.
82 static FLAG : TRUE if the search should terminate and unroll,
83 FALSE if the search can continue
90 // See if there's user input to process.
92 if ((ctx->uThreadNumber == 0) && (NumberOfPendingInputEvents() != 0))
97 // If the user input can be handled in the middle of the
98 // search then it will have been. If it cannot be handled
99 // without stopping the search then ParseUserInput will flip
100 // this bit in the MoveTimer to tell us (any any other
101 // searcher threads) to stop and unroll.
103 if (WE_SHOULD_STOP_SEARCHING)
110 // Also check time limits now.
112 if (-1 != g_MoveTimer.dSoftTimeLimit)
114 dTimeStamp = SystemTimeStamp();
115 if ((g_MoveTimer.bvFlags & TIMER_CURRENT_OBVIOUS) ||
116 (g_MoveTimer.bvFlags & TIMER_CURRENT_WONT_UNBLOCK))
118 Trace("OBVIOUS MOVE / WON'T UNBLOCK --> stop searching now\n");
119 g_MoveTimer.bvFlags |= TIMER_STOPPING;
123 if (dTimeStamp > g_MoveTimer.dSoftTimeLimit)
126 // If we have exceeded the soft time limit, move now unless...
128 if ((!(g_MoveTimer.bvFlags & TIMER_JUST_OUT_OF_BOOK)) &&
129 (!(g_MoveTimer.bvFlags & TIMER_ROOT_POSITION_CRITICAL)) &&
130 (!(g_MoveTimer.bvFlags & TIMER_RESOLVING_ROOT_FL)) &&
131 (!(g_MoveTimer.bvFlags & TIMER_RESOLVING_ROOT_FH)) &&
132 (!(g_MoveTimer.bvFlags & TIMER_MANY_ROOT_FLS)) &&
133 (g_MoveTimer.bvFlags & TIMER_SEARCHING_FIRST_MOVE))
135 Trace("SOFT TIMER (%3.1f sec) --> "
136 "stop searching now\n",
137 dTimeStamp - g_MoveTimer.dStartTime);
138 g_MoveTimer.bvFlags |= TIMER_STOPPING;
144 // If we have exceeded the hard limit, we have to move no matter
147 if (dTimeStamp > g_MoveTimer.dHardTimeLimit)
149 Trace("HARD TIMER (%3.1f sec) --> "
150 "stop searching now\n",
151 dTimeStamp - g_MoveTimer.dStartTime);
152 g_MoveTimer.bvFlags |= TIMER_STOPPING;
158 // Also check raw node count limit here.
160 if (g_Options.u64MaxNodeCount != 0ULL &&
161 ctx->sCounters.tree.u64TotalNodeCount > g_Options.u64MaxNodeCount)
163 Trace("NODE COUNT LIMIT (limit %llu, searched %llu) "
164 "--> stop searching now\n", g_Options.u64MaxNodeCount,
165 ctx->sCounters.tree.u64TotalNodeCount);
166 g_MoveTimer.bvFlags |= TIMER_STOPPING;
174 ThreadUnderTerminatingSplit(IN SEARCHER_THREAD_CONTEXT *ctx)
178 for (u = 0; u < NUM_SPLIT_PTRS_IN_CONTEXT; u++)
180 if (ctx->pSplitInfo[u] != NULL)
182 if (ctx->pSplitInfo[u]->fTerminate == TRUE) return(TRUE);
191 WeShouldDoHistoryPruning(IN SCORE iRoughEval,
194 IN SEARCHER_THREAD_CONTEXT *ctx,
195 IN ULONG uRemainingDepth,
196 IN ULONG uLegalMoves,
204 Decide whether or not to do history pruning at this node for the
205 given move. Note: this function is called after the move has
206 been played on the board.
210 SEARCHER_THREAD_CONTEXT *ctx,
211 ULONG uRemainingDepth,
222 ASSERT(ctx->uPly > 0);
224 ASSERT((uMoveNum > 0) || (uLegalMoves == 0));
225 if ((uRemainingDepth >= TWO_PLY) &&
226 (iBeta == (iAlpha + 1)) &&
229 // (iRoughEval + ComputeMoveScore(ctx, mv, uMoveNum - 1) + 200 < iAlpha) &&
230 (!IS_ESCAPING_CHECK(mv)) &&
231 (!IS_CAPTURE_OR_PROMOTION(mv)) &&
232 (!IS_CHECKING_MOVE(mv)) &&
233 (!IS_SAME_MOVE(mv, ctx->mvKiller[ctx->uPly-1][0])) &&
234 (!IS_SAME_MOVE(mv, ctx->mvKiller[ctx->uPly-1][1])) &&
236 (!IS_SAME_MOVE(mv, ctx->mvKiller[ctx->uPly-3][0]) &&
237 !IS_SAME_MOVE(mv, ctx->mvKiller[ctx->uPly-3][1]))) &&
238 (GetMoveFailHighPercentage(mv) <= 10))
240 ASSERT(!InCheck(&ctx->sPosition, ctx->sPosition.uToMove));
248 ComputeMoveScore(IN SEARCHER_THREAD_CONTEXT *ctx,
252 SCORE iMoveScore = (PIECE_VALUE(mv.pCaptured) +
253 PIECE_VALUE(mv.pPromoted));
254 if (uMoveNum != (ULONG)-1)
256 ASSERT(uMoveNum < MAX_MOVE_STACK);
257 ASSERT(IS_SAME_MOVE(mv, ctx->sMoveStack.mvf[uMoveNum].mv));
258 iMoveScore = ctx->sMoveStack.mvf[uMoveNum].iValue;
259 if (iMoveScore >= SORT_THESE_FIRST)
261 ASSERT(iMoveScore > 0);
262 iMoveScore &= STRIP_OFF_FLAGS;
266 iMoveScore = MIN0(iMoveScore);
274 ComputeMoveExtension(IN SEARCHER_THREAD_CONTEXT *ctx,
280 IN OUT INT *piExtend)
285 This routine is called by Search and by HelpSearch (in split.c).
286 It's job is to assign a move-based extension for the move mv.
287 This extension should be between -ONE_PLY (reduction) and +ONE_PLY
290 Note: this code is executed after the move has already been played
295 IN SEARCHER_THREAD_CONTEXT *ctx : the searcher thread's context
296 IN SCORE iAlpha : current alpha
297 IN SCORE iBeta : current beta
298 IN ULONG uMoveNum : move number in stack
299 IN ULONG uDepth : remaining depth
300 IN OUT INT *piExtend : extension amount (in/out)
308 static const SCORE _window[MAX_PLY_PER_SEARCH] = {
309 -1, -1, 500, 500, 425, 425, 350, 350, 275, 275, 200, 200, // 0..11
310 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, // 12..23
311 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, // 24..35
312 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, // 36..47
313 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, 150, // 48..59
314 150, 150, 150, 150 // 60..63
316 POSITION *pos = &ctx->sPosition;
317 MOVE mvLast = ctx->sPlyInfo[ctx->uPly-2].mv;
318 MOVE mv = ctx->sPlyInfo[ctx->uPly-1].mv;
319 ULONG uColor = GET_COLOR(mv.pMoved);
327 ASSERT(IS_VALID_SCORE(iAlpha));
328 ASSERT(IS_VALID_SCORE(iBeta));
329 ASSERT(iAlpha < iBeta);
330 ASSERT(ctx->uPly >= 2);
331 ASSERT(ctx->uPly < MAX_PLY_PER_SEARCH);
333 ASSERT(*piExtend >= 0);
336 // Checking move extension.
337 if (IS_CHECKING_MOVE(mv))
339 ASSERT(InCheck(pos, pos->uToMove));
340 INC(ctx->sCounters.extension.uCheck);
341 *piExtend += ONE_PLY;
342 if (pos->uNonPawnMaterial[FLIP(pos->uToMove)] >
343 (VALUE_KING + VALUE_BISHOP))
345 goto enforce_ceiling;
347 *piExtend -= THREE_QUARTERS_PLY;
350 // Pawn push extension
351 if (IS_PAWN(mv.pMoved))
353 if (((GET_COLOR(mv.pMoved) == WHITE) && RANK7(mv.cTo)) ||
354 ((GET_COLOR(mv.pMoved) == BLACK) && RANK2(mv.cTo)))
356 *piExtend += THREE_QUARTERS_PLY;
357 if (DISTANCE(pos->cNonPawns[GET_COLOR(mv.pMoved)][0], mv.cTo) < 3)
359 *piExtend += QUARTER_PLY;
361 INC(ctx->sCounters.extension.uPawnPush);
362 goto enforce_ceiling;
366 // Responses to check extensions:
367 if (IS_CHECKING_MOVE(mvLast))
369 ASSERT(IS_ESCAPING_CHECK(mv));
370 iMoveScore = iRoughEval + ComputeMoveScore(ctx, mv, uMoveNum);
372 // One legal move in reply to check...
373 if (ONE_LEGAL_MOVE(ctx, ctx->uPly - 1) &&
374 CountKingSafetyDefects(&ctx->sPosition, uColor) > 1)
376 *piExtend += (QUARTER_PLY +
378 (iMoveScore + VALUE_KNIGHT > iAlpha));
379 INC(ctx->sCounters.extension.uOneLegalMove);
382 // Singular response to check...
383 if ((mv.pCaptured) &&
384 (iRoughEval + 225 < iAlpha) &&
385 (iMoveScore + 75 > iAlpha))
387 *piExtend += ONE_PLY;
388 INC(ctx->sCounters.extension.uSingularReplyToCheck);
389 goto enforce_ceiling;
393 // These last ones only happen if we are close to the root score.
394 ASSERT(_window[ctx->uPly] > 0);
395 if ((iRoughEval >= (g_iRootScore[uColor] - _window[ctx->uPly] * 2)) &&
396 (iRoughEval <= (g_iRootScore[uColor] + _window[ctx->uPly] * 2)))
399 if ((ctx->sPlyInfo[1].uTotalNonPawns > 2) &&
400 ((pos->uNonPawnCount[BLACK][0] +
401 pos->uNonPawnCount[WHITE][0]) == 2))
403 if ((mv.pCaptured && !IS_PAWN(mv.pCaptured)) ||
404 (mvLast.pCaptured && !IS_PAWN(mvLast.pCaptured)))
406 *piExtend += HALF_PLY;
407 INC(ctx->sCounters.extension.uEndgame);
411 // Recapture extension
412 if ((iRoughEval >= (g_iRootScore[uColor] - _window[ctx->uPly])) &&
413 (iRoughEval <= (g_iRootScore[uColor] + _window[ctx->uPly])))
415 if (uDepth <= THREE_PLY)
417 if ((mv.pCaptured) && !IS_PAWN(mv.pCaptured) &&
418 ((PIECE_VALUE(mv.pCaptured) ==
419 PIECE_VALUE(mvLast.pCaptured)) ||
420 ((mvLast.pPromoted) && (mv.cTo == mvLast.cTo))))
422 iMoveScore = ComputeMoveScore(ctx, mv, uMoveNum);
425 *piExtend += HALF_PLY;
426 INC(ctx->sCounters.extension.uRecapture);
435 // Don't let extensions add up to > 1 ply
437 iM = MIN(ONE_PLY, *piExtend);
439 ASSERT(!(*piExtend & 0x80000000));
440 *piExtend = MINU(ONE_PLY, *piExtend);
441 ASSERT(iM == *piExtend);
443 // Scale back extensions that are very far from the root:
445 // Iterate g_Soft g_Hard
447 // 0 1 2 3/2 3 4 (x Iterate)
448 // |-------------------|----|----|---------|----...
449 // |full full full full|-1/4|-1/2|-3/4 -3/4|none...
450 ASSERT(ctx->uPly != 0);
451 ASSERT(ctx->uPly <= MAX_PLY_PER_SEARCH);
452 *piExtend -= g_uExtensionReduction[ctx->uPly-1];
454 iM = MAX(0, *piExtend);
456 *piExtend = MAX0(*piExtend);
457 ASSERT(iM == *piExtend);
458 ctx->sPlyInfo[ctx->uPly-1].iExtensionAmount = *piExtend;
459 ASSERT((0 <= *piExtend) && (*piExtend <= ONE_PLY));
467 RescoreMovesViaSearch(IN SEARCHER_THREAD_CONTEXT *ctx,
475 This is my implementation of "internal iterative deepening". It's
476 not really IID because it doesn't recurse on the same position.
477 This code is called when we are at a PV node and have no hash move
478 and no good capture from the generator. Instead of relying on
479 only history / killer heuristics to order the moves instead we
480 reduce the depth by two ply and search all the moves generated.
481 The scores that come out of the reduced-depth search are then used
482 to order the moves at the regular-depth search. This is somewhat
483 recursive because the first move considered by the reduced-depth
484 search may also have no hash move / good capture and invoke the
489 IN SEARCHER_THREAD_CONTEXT *ctx,
503 SCORE iBestScore = -INFINITY;
509 ASSERT(IS_VALID_SCORE(iAlpha));
510 ASSERT(IS_VALID_SCORE(iBeta));
511 ASSERT(iAlpha < iBeta);
512 ASSERT(uDepth >= (IID_R_FACTOR + ONE_PLY));
515 // Compute next depth, possibly recurse
517 uDepth -= (IID_R_FACTOR + ONE_PLY);
518 if (uDepth >= (IID_R_FACTOR + ONE_PLY))
520 (void)RescoreMovesViaSearch(ctx, uDepth, iAlpha, iBeta);
522 ASSERT(uDepth < MAX_DEPTH_PER_SEARCH);
525 // Search the moves and remember the scores
527 for (x = uBest = ctx->sMoveStack.uBegin[ctx->uPly];
528 x < ctx->sMoveStack.uEnd[ctx->uPly];
531 SelectBestNoHistory(ctx, x);
532 mv = ctx->sMoveStack.mvf[x].mv;
533 mv.bvFlags |= WouldGiveCheck(ctx, mv);
534 if (TRUE == MakeMove(ctx, mv))
536 if (-INFINITY == iBestScore)
538 iScore = -Search(ctx, -iBeta, -iAlpha, uDepth);
542 iScore = -Search(ctx, -iAlpha - 1, -iAlpha, uDepth);
545 iScore = -Search(ctx, -iBeta, -iAlpha, uDepth);
549 ctx->sMoveStack.mvf[x].iValue = iScore;
550 if (iScore > iBestScore)
564 // Clear the values from the current move to the end of the
565 // moves in the list so that if we failed high we don't have
566 // stale move values in the list for moves we did not search
569 while(x < ctx->sMoveStack.uEnd[ctx->uPly])
571 ctx->sMoveStack.mvf[x].iValue = -INFINITY;
574 ctx->sMoveStack.mvf[uBest].iValue |= SORT_THESE_FIRST;
575 ASSERT(IS_VALID_SCORE(iBestScore));
582 MateDistancePruningCutoff(IN ULONG uPly,
584 IN OUT SCORE *piBestScore,
585 IN OUT SCORE *piAlpha,
586 IN OUT SCORE *piBeta)
591 This idea (shamelessly?) "borrowed" from Fruit. The idea is that
592 the worst score we can return from any node in the tree is if we
593 are in checkmate right now (indeed, if we're not in check right
594 now than the worst score we can return is mated-in-2-ply).
596 Likewise the best score we can return from here is mate-in-1-ply.
597 Use these facts to attempt to narrow the alpha..beta window.
598 Indeed there are some cases where we can prune this entire tree
603 ULONG uPly : ply from root
604 FLAG InCheck : are we in check?
605 SCORE *piBestScore : pointer to bestscore
606 SCORE *piAlpha : pointer to alpha
607 SCORE *piBeta : pointer to beta
611 FLAG : TRUE if we can prune the branch outright, FALSE otherwise
615 SCORE iScore = MATED_SCORE(uPly + 2 * !fInCheck);
620 if (*piAlpha < iScore)
623 if (iScore >= *piBeta)
625 *piBestScore = iScore;
633 iScore = -1 * MATED_SCORE(uPly + 1);
634 if (iScore < *piBeta)
637 if (iScore <= *piAlpha)
639 *piBestScore = iScore;
648 DumpPlyInfoStack(IN SEARCHER_THREAD_CONTEXT *ctx,
655 Dump some information about the current search line; mainly here as
658 Note: this will not work on an MP searcher thread that is under a
663 SEARCHER_THREAD_CONTEXT *ctx : searcher context to dump
672 POSITION *pos = GetRootPosition();
673 CUMULATIVE_SEARCH_FLAGS *pf = &ctx->sSearchFlags;
674 char *p = PositionToFen(pos);
678 static SEARCHER_THREAD_CONTEXT temp;
680 InitializeSearcherContext(pos, &temp);
682 Trace("Iterate Depth: %2u\tCurrent Ply: %2u\n", g_uIterateDepth,
684 Trace("Qsearch Check: %2u\tCurrent Qdepth: %2u\n", pf->uQsearchCheckDepth,
686 Trace("Current a..b: %d .. %d\n", iAlpha, iBeta);
689 Trace("Root: %s\n", p);
693 Trace("move\t number\t exten\t eval b.keval w.keval Danger Squares\n"
694 "----------------------------------------------------------------------------\n");
696 ASSERT(ctx->uPly < MAX_PLY_PER_SEARCH);
697 for (u = 0; u < ctx->uPly; u++)
699 q = &ctx->sPlyInfo[u];
700 ASSERT(PositionsAreEquivalent(&q->sPosition, &temp.sPosition));
702 if ((FALSE == fSeenQ) && (q->fInQsearch == TRUE))
704 p = PositionToFen(&temp.sPosition);
707 Trace("\nHorizon: %s\n\n", p);
713 Trace("%02u.%-6s ", u, MoveToSan(q->mv, &temp.sPosition));
714 for (v = ctx->sMoveStack.uBegin[u];
715 v < ctx->sMoveStack.uEnd[u];
718 if (IS_SAME_MOVE(q->mv, ctx->sMoveStack.mvf[v].mv))
720 Trace("%u/%u", v - ctx->sMoveStack.uBegin[u] + 1,
726 if (FALSE == MakeMove(&temp, q->mv))
728 Trace("Illegal move?!\n");
732 if (q->iExtensionAmount != 0)
734 Trace("%-3u\t", q->iExtensionAmount);
741 iEval = Eval(&temp, -INFINITY, +INFINITY);
742 Trace("%5s (%+2d) | ", ScoreToString(iEval),
743 temp.sPosition.iMaterialBalance[temp.sPosition.uToMove] / 100);
744 Trace("%5s (%2u) | ",
745 ScoreToString(temp.sPlyInfo[temp.uPly].iKingScore[BLACK]),
746 CountKingSafetyDefects(&temp.sPosition, BLACK));
748 ScoreToString(temp.sPlyInfo[temp.uPly].iKingScore[WHITE]),
749 CountKingSafetyDefects(&temp.sPosition, WHITE));
753 p = PositionToFen(&ctx->sPosition);
757 Trace("Now: %s\n", p);
764 CommonSearchInit(IN SEARCHER_THREAD_CONTEXT *ctx,
765 IN OUT SCORE *piAlpha,
766 IN OUT SCORE *piBeta,
767 IN OUT SCORE *piScore)
772 A set of common code that is called by Search and QSearch. It does
775 1. Increment the total node counter
776 2. Clears pi->mvBest and cDanger squares
777 3. Possibly check for user input or timer expiration
778 4. Checks to see if the position is a draw
779 5. Makes sure ctx->uPly is not too deep
781 7. Does mate-distance pruning
785 IN SEARCHER_THREAD_CONTEXT *ctx,
786 IN OUT SCORE *piAlpha,
787 IN OUT SCORE *piBeta,
788 IN OUT SCORE *piScore
792 FLAG : TRUE if the search node should be aborted (return *piScore)
793 FALSE otherwise (*piAlpha, *piBeta possibly adjusted)
797 PLY_INFO *pi = &ctx->sPlyInfo[ctx->uPly];
800 ASSERT(*piAlpha < *piBeta);
803 // Increment node count and see if we need to check input / timer
805 ctx->sCounters.tree.u64TotalNodeCount++;
806 ASSERT(ctx->sCounters.tree.u64TotalNodeCount > 0);
807 if ((ctx->sCounters.tree.u64TotalNodeCount &
808 g_MoveTimer.uNodeCheckMask) == 0)
810 (void)CheckInputAndTimers(ctx);
814 // Make sure that the time allotted to search in has not expired
815 // and (if this is an MP search) that we are still doing relevant work.
817 if (WE_SHOULD_STOP_SEARCHING)
819 pi->mvBest.uMove = 0;
820 *piScore = INVALID_SCORE;
825 // Make sure we have not exceeded max ply; note that since this
826 // is checked at the beginning of search/qsearch and if we let
827 // the past they will call MakeMove this must cut off one ply
828 // before the hard limit.
830 if ((ctx->uPly >= (MAX_PLY_PER_SEARCH - 1)) ||
831 (ctx->sSearchFlags.uQsearchDepth >= g_uHardExtendLimit))
834 DTTRACE("<toodeep/>");
839 // Erase deeper killer moves
841 if (ctx->uPly + 2 < MAX_PLY_PER_SEARCH)
843 ctx->mvKiller[ctx->uPly + 2][0].uMove = 0;
844 ctx->mvKiller[ctx->uPly + 2][1].uMove = 0;
848 // Set fInCheck in this ply info struct
850 pi->fInCheck = (IS_CHECKING_MOVE((pi-1)->mv) != 0);
851 ASSERT((pi->fInCheck &&
852 (InCheck(&ctx->sPosition, ctx->sPosition.uToMove))) ||
854 (!InCheck(&ctx->sPosition, ctx->sPosition.uToMove))));
857 // This code is for a debug option where I dump paths from root
858 // to leaf along with extensions and evals at each node in the
859 // path for debugging/brainstorming purposes.
861 pi->PV[ctx->uPly].uMove = 0;
862 pi->mvBest.uMove = 0;
863 pi->iExtensionAmount = 0;
864 pi->iEval = INVALID_SCORE;
866 if ((g_uIterateDepth > 5) &&
867 (ctx->uPly > 2.5 * g_uIterateDepth))
869 DumpPlyInfoStack(ctx, *piAlpha, *piBeta);
874 // See if this position is a draw
876 if (TRUE == IsDraw(ctx))
878 INC(ctx->sCounters.tree.u64LeafCount);
880 if ((*piAlpha < *piScore) && (*piScore < *piBeta))
882 UpdatePV(ctx, DRAWMOVE);
884 DTTRACE("<i bold=\"draw\"/>");
889 // Do mate-distance pruning
891 if (TRUE == MateDistancePruningCutoff(ctx->uPly,
897 ASSERT(*piScore != -INFINITY);
898 ASSERT((*piScore <= *piAlpha) || (*piScore >= *piBeta));
902 pi->iAlpha = *piAlpha;
908 ASSERT(IS_VALID_FLAG(fRet));
914 SelectNullmoveRFactor(SEARCHER_THREAD_CONTEXT *ctx,
918 ULONG uMaxPiecesPerSide = MAXU(ctx->sPosition.uNonPawnCount[WHITE][0],
919 ctx->sPosition.uNonPawnCount[BLACK][0]);
920 ASSERT(uMaxPiecesPerSide <= 16);
922 if ((uDepth > 8 * ONE_PLY) ||
923 ((uDepth > 6 * ONE_PLY) && (uMaxPiecesPerSide >= 3)))
927 ASSERT((u == TWO_PLY) || (u == THREE_PLY));
933 WeShouldTryNullmovePruning(SEARCHER_THREAD_CONTEXT *ctx,
939 static SCORE _iDistAlphaSkipNull[] = {
940 700, 950, 1110, 1150, 1190, 1230, 1270, 0
942 POSITION *pos = &ctx->sPosition;
943 PLY_INFO *pi = &ctx->sPlyInfo[ctx->uPly];
944 CUMULATIVE_SEARCH_FLAGS *pf = &ctx->sSearchFlags;
947 ASSERT(IS_VALID_SCORE(iAlpha) && IS_VALID_SCORE(iBeta));
948 ASSERT(iAlpha < iBeta);
949 ASSERT(IS_VALID_SCORE(iRoughEval));
950 ASSERT(uNullDepth < MAX_PLY_PER_SEARCH * ONE_PLY);
952 if ((FALSE == pf->fAvoidNullmove) &&
953 (pos->uNonPawnCount[pos->uToMove][0] > 2) &&
954 (FALSE == pi->fInCheck) &&
955 (iBeta != +INFINITY) &&
956 (iBeta == iAlpha + 1)) // <--- TODO: test this one please...
958 if (uNullDepth <= 6 * ONE_PLY)
960 u = uNullDepth / ONE_PLY;
962 if ((iRoughEval + _iDistAlphaSkipNull[u] <= iAlpha) ||
963 ((iRoughEval + _iDistAlphaSkipNull[u] / 2 <= iAlpha) &&
964 (ValueOfMaterialInTroubleAfterNull(pos, pos->uToMove))))
976 TryNullmovePruning(SEARCHER_THREAD_CONTEXT *ctx,
984 static INT _bm_by_piece[7] = {
985 -1, -1, QUARTER_PLY, QUARTER_PLY, HALF_PLY, HALF_PLY, -1
987 POSITION *pos = &ctx->sPosition;
988 PLY_INFO *pi = &ctx->sPlyInfo[ctx->uPly];
989 CUMULATIVE_SEARCH_FLAGS *pf = &ctx->sSearchFlags;
994 ASSERT(IS_VALID_SCORE(iAlpha) && IS_VALID_SCORE(iBeta));
995 ASSERT(iAlpha < iBeta);
997 // TODO: more experiments with quick null
1000 ASSERT(!InCheck(pos, pos->uToMove));
1001 VERIFY(MakeMove(ctx, NULLMOVE));
1002 ASSERT(pos->u64NonPawnSig != pi->sPosition.u64NonPawnSig);
1003 ASSERT(pos->uToMove != pi->sPosition.uToMove);
1004 INC(ctx->sCounters.tree.u64NullMoves);
1006 // (Reduced depth) search under the nullmove; don't allow two
1007 // nullmoves in a row.
1008 ASSERT(pf->fAvoidNullmove == FALSE);
1009 pf->fAvoidNullmove = TRUE;
1010 *piNullScore = -Search(ctx, -iBeta, -iBeta + 1, uNullDepth);
1011 pf->fAvoidNullmove = FALSE;
1013 // Check the results
1014 if (*piNullScore >= iBeta)
1016 UnmakeMove(ctx, NULLMOVE);
1017 ASSERT(PositionsAreEquivalent(pos, &pi->sPosition));
1019 // If there are very few pieces on the board, launch a
1020 // separate "verification search".
1021 if ((pos->uNonPawnCount[pos->uToMove][0] < 4) &&
1022 (pf->fVerifyNullmove == TRUE))
1024 ASSERT(pf->fAvoidNullmove == FALSE);
1025 uNullDepth -= ONE_PLY;
1026 if (uNullDepth > MAX_DEPTH_PER_SEARCH) uNullDepth = 0;
1027 pf->fVerifyNullmove = FALSE;
1028 iVerifyScore = Search(ctx, iAlpha, iBeta, uNullDepth);
1029 pf->fVerifyNullmove = TRUE;
1030 if (iVerifyScore < iBeta)
1032 // The nullmove failed high but the verify search
1033 // did not; we are in zug so extend.
1034 *piOrigExtend += ONE_PLY;
1035 INC(ctx->sCounters.extension.uZugzwang);
1040 // Nullmove succeeded and either no need to verify or verify
1042 INC(ctx->sCounters.tree.u64NullMoveSuccess);
1043 if (*piNullScore > +NMATE) *piNullScore = +NMATE;
1047 // Nullmove failed...
1048 mv = (pi+1)->mvBest;
1051 // See what we can learn from the move that refuted our nullmove.
1052 ASSERT(SanityCheckMove(pos, mv));
1053 ASSERT(GET_COLOR(mv.pMoved) == pos->uToMove);
1054 ASSERT(ctx->uPly >= 2);
1056 // If we make a nullmove that fails because we lose a
1057 // piece, remember that the piece in question is in danger.
1058 if ((mv.pCaptured) && !IS_PAWN(mv.pCaptured))
1060 ASSERT(GET_COLOR(mv.pCaptured) == FLIP(pos->uToMove));
1061 ASSERT(mv.pCaptured == pos->rgSquare[mv.cTo].pPiece);
1062 StoreEnprisePiece(pos, mv.cTo);
1063 mvRef = ctx->mvNullmoveRefutations[ctx->uPly - 2];
1064 if ((mvRef.uMove) &&
1065 (mvRef.pCaptured == mv.pCaptured) &&
1066 (mvRef.pMoved == mv.pMoved) &&
1067 (mvRef.cTo != mv.cTo))
1069 ASSERT(GET_COLOR(mvRef.pCaptured) == FLIP(pos->uToMove));
1070 ASSERT(mvRef.pCaptured);
1071 ASSERT(_bm_by_piece[PIECE_TYPE(mv.pCaptured)] > 0);
1072 ASSERT(_bm_by_piece[PIECE_TYPE(mv.pCaptured)] <=
1074 *piOrigExtend += _bm_by_piece[PIECE_TYPE(mv.pCaptured)];
1075 INC(ctx->sCounters.extension.uBotvinnikMarkoff);
1077 ctx->mvNullmoveRefutations[ctx->uPly] = mv;
1080 // This is an idea from Tord Romstad on ccc: if we are below a
1081 // history reduced node and we try a nullmove and the nullmove
1082 // fails low and the move that refuted the nullmove involves
1083 // the same piece moved in the reduced move at ply-1 then bail
1084 // out of the reduced depth search now and research at full
1085 // depth. The move that was reduced had some tactical
1087 ASSERT(IS_ON_BOARD(mv.cFrom));
1088 if ((mv.cFrom == (pi-1)->mv.cTo) && ((pi-1)->iExtensionAmount < 0))
1090 ASSERT(GET_COLOR(mv.pMoved) == GET_COLOR((pi-1)->mv.pMoved));
1091 UnmakeMove(ctx, NULLMOVE);
1092 ASSERT(PositionsAreEquivalent(pos, &pi->sPosition));
1093 *piNullScore = iAlpha - 1;
1098 // If we do nothing we are mated-in-n, therefore this is a
1099 // dangerous position so extend. This is Bruce Moreland's idea
1101 if (*piNullScore <= -NMATE)
1103 if (mv.uMove && !IS_CAPTURE_OR_PROMOTION(mv))
1105 ASSERT(SanityCheckMove(pos, mv));
1106 ASSERT(GET_COLOR(mv.pMoved) == pos->uToMove);
1107 UpdateDynamicMoveOrdering(ctx,
1108 uNullDepth + TWO_PLY,
1115 UnmakeMove(ctx, NULLMOVE);
1116 ASSERT(PositionsAreEquivalent(pos, &pi->sPosition));
1123 SanityCheckMoves(IN SEARCHER_THREAD_CONTEXT *ctx,
1128 Routine description:
1130 Support routine to sanity check that:
1132 1. The moves before uCurrent on the move stack were all marked
1133 as searched (or pruned)
1134 2. The moves after uCurrent on the move stack are all zero
1135 value moves and not checking moves.
1139 SEARCHER_THREAD_CONTEXT *ctx,
1151 if (WE_SHOULD_STOP_SEARCHING)
1157 // Check moves yet to be searched
1159 if (uType & VERIFY_AFTER)
1161 for (x = uCurrent; x < ctx->sMoveStack.uEnd[ctx->uPly]; x++)
1163 ASSERT(ctx->sMoveStack.mvf[x].iValue <= 0);
1164 ASSERT(!IS_CHECKING_MOVE(ctx->sMoveStack.mvf[x].mv));
1169 // Check moves already searched
1171 if (uType & VERIFY_BEFORE)
1173 for (x = uCurrent - 1;
1174 ((x >= ctx->sMoveStack.uBegin[ctx->uPly]) && (x != (ULONG)-1));
1177 ASSERT(ctx->sMoveStack.mvf[x].bvFlags & MVF_MOVE_SEARCHED);