1//===----------------------------------------------------------------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8
9#ifndef _LIBCPP___ALGORITHM_NTH_ELEMENT_H
10#define _LIBCPP___ALGORITHM_NTH_ELEMENT_H
11
12#include <__algorithm/comp.h>
13#include <__algorithm/comp_ref_type.h>
14#include <__algorithm/iterator_operations.h>
15#include <__algorithm/sort.h>
16#include <__config>
17#include <__debug>
18#include <__debug_utils/randomize_range.h>
19#include <__iterator/iterator_traits.h>
20#include <__utility/move.h>
21
22#if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
23#  pragma GCC system_header
24#endif
25
26_LIBCPP_BEGIN_NAMESPACE_STD
27
28template<class _Compare, class _RandomAccessIterator>
29_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR_SINCE_CXX14 bool
30__nth_element_find_guard(_RandomAccessIterator& __i, _RandomAccessIterator& __j,
31                         _RandomAccessIterator __m, _Compare __comp)
32{
33    // manually guard downward moving __j against __i
34    while (true) {
35        if (__i == --__j) {
36            return false;
37        }
38        if (__comp(*__j, *__m)) {
39            return true;  // found guard for downward moving __j, now use unguarded partition
40        }
41    }
42}
43
44template <class _AlgPolicy, class _Compare, class _RandomAccessIterator>
45_LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR_SINCE_CXX14 void
46__nth_element(_RandomAccessIterator __first, _RandomAccessIterator __nth, _RandomAccessIterator __last, _Compare __comp)
47{
48    using _Ops = _IterOps<_AlgPolicy>;
49
50    // _Compare is known to be a reference type
51    typedef typename iterator_traits<_RandomAccessIterator>::difference_type difference_type;
52    const difference_type __limit = 7;
53    while (true)
54    {
55        if (__nth == __last)
56            return;
57        difference_type __len = __last - __first;
58        switch (__len)
59        {
60        case 0:
61        case 1:
62            return;
63        case 2:
64            if (__comp(*--__last, *__first))
65                _Ops::iter_swap(__first, __last);
66            return;
67        case 3:
68            {
69            _RandomAccessIterator __m = __first;
70            std::__sort3<_AlgPolicy, _Compare>(__first, ++__m, --__last, __comp);
71            return;
72            }
73        }
74        if (__len <= __limit)
75        {
76            std::__selection_sort<_AlgPolicy, _Compare>(__first, __last, __comp);
77            return;
78        }
79        // __len > __limit >= 3
80        _RandomAccessIterator __m = __first + __len/2;
81        _RandomAccessIterator __lm1 = __last;
82        unsigned __n_swaps = std::__sort3<_AlgPolicy, _Compare>(__first, __m, --__lm1, __comp);
83        // *__m is median
84        // partition [__first, __m) < *__m and *__m <= [__m, __last)
85        // (this inhibits tossing elements equivalent to __m around unnecessarily)
86        _RandomAccessIterator __i = __first;
87        _RandomAccessIterator __j = __lm1;
88        // j points beyond range to be tested, *__lm1 is known to be <= *__m
89        // The search going up is known to be guarded but the search coming down isn't.
90        // Prime the downward search with a guard.
91        if (!__comp(*__i, *__m))  // if *__first == *__m
92        {
93            // *__first == *__m, *__first doesn't go in first part
94            if (_VSTD::__nth_element_find_guard<_Compare>(__i, __j, __m, __comp)) {
95                _Ops::iter_swap(__i, __j);
96                ++__n_swaps;
97            } else {
98                // *__first == *__m, *__m <= all other elements
99                // Partition instead into [__first, __i) == *__first and *__first < [__i, __last)
100                ++__i;  // __first + 1
101                __j = __last;
102                if (!__comp(*__first, *--__j)) {  // we need a guard if *__first == *(__last-1)
103                    while (true) {
104                        if (__i == __j) {
105                            return;  // [__first, __last) all equivalent elements
106                        } else if (__comp(*__first, *__i)) {
107                            _Ops::iter_swap(__i, __j);
108                            ++__n_swaps;
109                            ++__i;
110                            break;
111                        }
112                        ++__i;
113                    }
114                }
115                // [__first, __i) == *__first and *__first < [__j, __last) and __j == __last - 1
116                if (__i == __j) {
117                    return;
118                }
119                while (true) {
120                    while (!__comp(*__first, *__i))
121                        ++__i;
122                    while (__comp(*__first, *--__j))
123                        ;
124                    if (__i >= __j)
125                        break;
126                    _Ops::iter_swap(__i, __j);
127                    ++__n_swaps;
128                    ++__i;
129                }
130                // [__first, __i) == *__first and *__first < [__i, __last)
131                // The first part is sorted,
132                if (__nth < __i) {
133                    return;
134                }
135                // __nth_element the second part
136                // _VSTD::__nth_element<_Compare>(__i, __nth, __last, __comp);
137                __first = __i;
138                continue;
139            }
140        }
141        ++__i;
142        // j points beyond range to be tested, *__lm1 is known to be <= *__m
143        // if not yet partitioned...
144        if (__i < __j)
145        {
146            // known that *(__i - 1) < *__m
147            while (true)
148            {
149                // __m still guards upward moving __i
150                while (__comp(*__i, *__m))
151                    ++__i;
152                // It is now known that a guard exists for downward moving __j
153                while (!__comp(*--__j, *__m))
154                    ;
155                if (__i >= __j)
156                    break;
157                _Ops::iter_swap(__i, __j);
158                ++__n_swaps;
159                // It is known that __m != __j
160                // If __m just moved, follow it
161                if (__m == __i)
162                    __m = __j;
163                ++__i;
164            }
165        }
166        // [__first, __i) < *__m and *__m <= [__i, __last)
167        if (__i != __m && __comp(*__m, *__i))
168        {
169            _Ops::iter_swap(__i, __m);
170            ++__n_swaps;
171        }
172        // [__first, __i) < *__i and *__i <= [__i+1, __last)
173        if (__nth == __i)
174            return;
175        if (__n_swaps == 0)
176        {
177            // We were given a perfectly partitioned sequence.  Coincidence?
178            if (__nth < __i)
179            {
180                // Check for [__first, __i) already sorted
181                __j = __m = __first;
182                while (true) {
183                    if (++__j == __i) {
184                        // [__first, __i) sorted
185                        return;
186                    }
187                    if (__comp(*__j, *__m)) {
188                        // not yet sorted, so sort
189                        break;
190                    }
191                    __m = __j;
192                }
193            }
194            else
195            {
196                // Check for [__i, __last) already sorted
197                __j = __m = __i;
198                while (true) {
199                    if (++__j == __last) {
200                        // [__i, __last) sorted
201                        return;
202                    }
203                    if (__comp(*__j, *__m)) {
204                        // not yet sorted, so sort
205                        break;
206                    }
207                    __m = __j;
208                }
209            }
210        }
211        // __nth_element on range containing __nth
212        if (__nth < __i)
213        {
214            // _VSTD::__nth_element<_Compare>(__first, __nth, __i, __comp);
215            __last = __i;
216        }
217        else
218        {
219            // _VSTD::__nth_element<_Compare>(__i+1, __nth, __last, __comp);
220            __first = ++__i;
221        }
222    }
223}
224
225template <class _AlgPolicy, class _RandomAccessIterator, class _Compare>
226inline _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR_SINCE_CXX20
227void __nth_element_impl(_RandomAccessIterator __first, _RandomAccessIterator __nth, _RandomAccessIterator __last,
228                        _Compare& __comp) {
229  if (__nth == __last)
230    return;
231
232  std::__debug_randomize_range<_AlgPolicy>(__first, __last);
233
234  std::__nth_element<_AlgPolicy, __comp_ref_type<_Compare> >(__first, __nth, __last, __comp);
235
236  std::__debug_randomize_range<_AlgPolicy>(__first, __nth);
237  if (__nth != __last) {
238    std::__debug_randomize_range<_AlgPolicy>(++__nth, __last);
239  }
240}
241
242template <class _RandomAccessIterator, class _Compare>
243inline _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR_SINCE_CXX20
244void nth_element(_RandomAccessIterator __first, _RandomAccessIterator __nth, _RandomAccessIterator __last,
245                 _Compare __comp) {
246  std::__nth_element_impl<_ClassicAlgPolicy>(std::move(__first), std::move(__nth), std::move(__last), __comp);
247}
248
249template <class _RandomAccessIterator>
250inline _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR_SINCE_CXX20
251void nth_element(_RandomAccessIterator __first, _RandomAccessIterator __nth, _RandomAccessIterator __last) {
252  std::nth_element(std::move(__first), std::move(__nth), std::move(__last), __less<typename
253      iterator_traits<_RandomAccessIterator>::value_type>());
254}
255
256_LIBCPP_END_NAMESPACE_STD
257
258#endif // _LIBCPP___ALGORITHM_NTH_ELEMENT_H
259