libstdc++
generator
Go to the documentation of this file.
1// <generator> -*- C++ -*-
2
3// Copyright (C) 2023-2025 Free Software Foundation, Inc.
4//
5// This file is part of the GNU ISO C++ Library. This library is free
6// software; you can redistribute it and/or modify it under the
7// terms of the GNU General Public License as published by the
8// Free Software Foundation; either version 3, or (at your option)
9// any later version.
10
11// This library is distributed in the hope that it will be useful,
12// but WITHOUT ANY WARRANTY; without even the implied warranty of
13// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14// GNU General Public License for more details.
15
16// Under Section 7 of GPL version 3, you are granted additional
17// permissions described in the GCC Runtime Library Exception, version
18// 3.1, as published by the Free Software Foundation.
19
20// You should have received a copy of the GNU General Public License and
21// a copy of the GCC Runtime Library Exception along with this program;
22// see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23// <http://www.gnu.org/licenses/>.
24
25/** @file include/generator
26 * This is a Standard C++ Library header.
27 */
28
29#ifndef _GLIBCXX_GENERATOR
30#define _GLIBCXX_GENERATOR
31
32#include <ranges>
33#ifdef _GLIBCXX_SYSHDR
34#pragma GCC system_header
35#endif
36
37#include <bits/c++config.h>
38
39#define __glibcxx_want_generator
40#include <bits/version.h>
41
42#ifdef __cpp_lib_generator // C++ >= 23 && __glibcxx_coroutine
43#include <new>
44#include <bits/move.h>
45#include <bits/ranges_util.h>
46#include <bits/elements_of.h>
47#include <bits/uses_allocator.h>
48#include <bits/exception_ptr.h>
49#include <cstddef>
50#include <cstdint>
51#include <cstring>
52#include <coroutine>
53
54#include <type_traits>
55#include <variant>
56#include <concepts>
57
58#if _GLIBCXX_HOSTED
59# include <bits/memory_resource.h>
60#endif // HOSTED
61
62namespace std _GLIBCXX_VISIBILITY(default)
63{
64_GLIBCXX_BEGIN_NAMESPACE_VERSION
65
66 /**
67 * @defgroup generator_coros Range generator coroutines
68 * @addtogroup ranges
69 * @since C++23
70 * @{
71 */
72
73 /** @brief A range specified using a yielding coroutine.
74 *
75 * `std::generator` is a utility class for defining ranges using coroutines
76 * that yield elements as a range. Generator coroutines are synchronous.
77 *
78 * @headerfile generator
79 * @since C++23
80 */
81 template<typename _Ref, typename _Val = void, typename _Alloc = void>
82 class generator;
83
84 /// @cond undocumented
85 namespace __gen
86 {
87 /// _Reference type for a generator whose reference (first argument) and
88 /// value (second argument) types are _Ref and _Val.
89 template<typename _Ref, typename _Val>
90 using _Reference_t = __conditional_t<is_void_v<_Val>,
91 _Ref&&, _Ref>;
92
93 /// Type yielded by a generator whose _Reference type is _Reference.
94 template<typename _Reference>
95 using _Yield_t = __conditional_t<is_reference_v<_Reference>,
96 _Reference,
97 const _Reference&>;
98
99 /// _Yield_t * _Reference_t
100 template<typename _Ref, typename _Val>
101 using _Yield2_t = _Yield_t<_Reference_t<_Ref, _Val>>;
102
103 template<typename> constexpr bool __is_generator = false;
104 template<typename _Val, typename _Ref, typename _Alloc>
105 constexpr bool __is_generator<std::generator<_Val, _Ref, _Alloc>> = true;
106
107 /// Allocator and value type erased generator promise type.
108 /// \tparam _Yielded The corresponding generators yielded type.
109 template<typename _Yielded>
110 class _Promise_erased
111 {
112 static_assert(is_reference_v<_Yielded>);
113 using _Yielded_deref = remove_reference_t<_Yielded>;
114 using _Yielded_decvref = remove_cvref_t<_Yielded>;
115 using _ValuePtr = add_pointer_t<_Yielded>;
116 using _Coro_handle = std::coroutine_handle<_Promise_erased>;
117
118 template<typename, typename, typename>
119 friend class std::generator;
120
121 template<typename _Gen>
122 struct _Recursive_awaiter;
123 template<typename>
124 friend struct _Recursive_awaiter;
125 struct _Copy_awaiter;
126 struct _Subyield_state;
127 struct _Final_awaiter;
128 public:
129 suspend_always
130 initial_suspend() const noexcept
131 { return {}; }
132
133 suspend_always
134 yield_value(_Yielded __val) noexcept
135 {
136 _M_bottom_value() = ::std::addressof(__val);
137 return {};
138 }
139
140 auto
141 yield_value(const _Yielded_deref& __val)
142 noexcept (is_nothrow_constructible_v<_Yielded_decvref,
143 const _Yielded_deref&>)
144 requires (is_rvalue_reference_v<_Yielded>
145 && constructible_from<_Yielded_decvref,
146 const _Yielded_deref&>)
147 { return _Copy_awaiter(_Yielded_decvref(__val), _M_bottom_value()); }
148
149 template<typename _R2, typename _V2, typename _A2, typename _U2>
150 requires std::same_as<_Yield2_t<_R2, _V2>, _Yielded>
151 auto
152 yield_value(ranges::elements_of<generator<_R2, _V2, _A2>&&, _U2> __r)
153 noexcept
154 { return _Recursive_awaiter { std::move(__r.range) }; }
155
156 // _GLIBCXX_RESOLVE_LIB_DEFECTS
157 // 3899. co_yielding elements of an lvalue generator is
158 // unnecessarily inefficient
159 template<typename _R2, typename _V2, typename _A2, typename _U2>
160 requires std::same_as<_Yield2_t<_R2, _V2>, _Yielded>
161 auto
162 yield_value(ranges::elements_of<generator<_R2, _V2, _A2>&, _U2> __r)
163 noexcept
164 { return _Recursive_awaiter { std::move(__r.range) }; }
165
166 template<ranges::input_range _R, typename _Alloc>
167 requires convertible_to<ranges::range_reference_t<_R>, _Yielded>
168 auto
169 yield_value(ranges::elements_of<_R, _Alloc> __r)
170 {
171 auto __n = [] (allocator_arg_t, _Alloc,
172 ranges::iterator_t<_R> __i,
173 ranges::sentinel_t<_R> __s)
174 -> generator<_Yielded, ranges::range_value_t<_R>, _Alloc> {
175 for (; __i != __s; ++__i)
176 co_yield static_cast<_Yielded>(*__i);
177 };
178 return yield_value(ranges::elements_of(__n(allocator_arg,
179 __r.allocator,
180 ranges::begin(__r.range),
181 ranges::end(__r.range))));
182 }
183
184
185 _Final_awaiter
186 final_suspend() noexcept
187 { return {}; }
188
189 void
190 unhandled_exception()
191 {
192 // To get to this point, this coroutine must have been active. In that
193 // case, it must be the top of the stack. The current coroutine is
194 // the sole entry of the stack iff it is both the top and the bottom. As
195 // it is the top implicitly in this context it will be the sole entry iff
196 // it is the bottom.
197 if (_M_nest._M_is_bottom())
198 throw;
199 else
200 this->_M_except = std::current_exception();
201 }
202
203 void await_transform() = delete;
204 void return_void() const noexcept {}
205
206 private:
207 _ValuePtr&
208 _M_bottom_value() noexcept
209 { return _M_nest._M_bottom_value(*this); }
210
211 _ValuePtr&
212 _M_value() noexcept
213 { return _M_nest._M_value(*this); }
214
215 _Subyield_state _M_nest;
216 std::exception_ptr _M_except;
217 };
218
219 template<typename _Yielded>
220 struct _Promise_erased<_Yielded>::_Subyield_state
221 {
222 struct _Frame
223 {
224 _Coro_handle _M_bottom;
225 _Coro_handle _M_parent;
226 };
227
228 struct _Bottom_frame
229 {
230 _Coro_handle _M_top;
231 _ValuePtr _M_value = nullptr;
232 };
233
234 std::variant<
235 _Bottom_frame,
236 _Frame
237 > _M_stack;
238
239 bool
240 _M_is_bottom() const noexcept
241 { return !std::holds_alternative<_Frame>(this->_M_stack); }
242
243 _Coro_handle&
244 _M_top() noexcept
245 {
246 if (auto __f = std::get_if<_Frame>(&this->_M_stack))
247 return __f->_M_bottom.promise()._M_nest._M_top();
248
249 auto __bf = std::get_if<_Bottom_frame>(&this->_M_stack);
250 __glibcxx_assert(__bf);
251 return __bf->_M_top;
252 }
253
254 void
255 _M_push(_Coro_handle __current, _Coro_handle __subyield) noexcept
256 {
257 __glibcxx_assert(&__current.promise()._M_nest == this);
258 __glibcxx_assert(this->_M_top() == __current);
259
260 __subyield.promise()._M_nest._M_jump_in(__current, __subyield);
261 }
262
263 std::coroutine_handle<>
264 _M_pop() noexcept
265 {
266 if (auto __f = std::get_if<_Frame>(&this->_M_stack))
267 {
268 // We aren't a bottom coroutine. Restore the parent to the top
269 // and resume.
270 auto __p = this->_M_top() = __f->_M_parent;
271 return __p;
272 }
273 else
274 // Otherwise, there's nothing to resume.
275 return std::noop_coroutine();
276 }
277
278 void
279 _M_jump_in(_Coro_handle __rest, _Coro_handle __new) noexcept
280 {
281 __glibcxx_assert(&__new.promise()._M_nest == this);
282 __glibcxx_assert(this->_M_is_bottom());
283 // We're bottom. We're also top if top is unset (note that this is
284 // not true if something was added to the coro stack and then popped,
285 // but in that case we can't possibly be yielded from, as it would
286 // require rerunning begin()).
287 __glibcxx_assert(!this->_M_top());
288
289 auto& __rn = __rest.promise()._M_nest;
290 __rn._M_top() = __new;
291
292 // Presume we're the second frame...
293 auto __bott = __rest;
294 if (auto __f = std::get_if<_Frame>(&__rn._M_stack))
295 // But, if we aren't, get the actual bottom. We're only the second
296 // frame if our parent is the bottom frame, i.e. it doesn't have a
297 // _Frame member.
298 __bott = __f->_M_bottom;
299
300 this->_M_stack = _Frame {
301 ._M_bottom = __bott,
302 ._M_parent = __rest
303 };
304 }
305
306 _ValuePtr&
307 _M_bottom_value(_Promise_erased& __current) noexcept
308 {
309 __glibcxx_assert(&__current._M_nest == this);
310 if (auto __bf = std::get_if<_Bottom_frame>(&this->_M_stack))
311 return __bf->_M_value;
312 auto __f = std::get_if<_Frame>(&this->_M_stack);
313 __glibcxx_assert(__f);
314 auto& __p = __f->_M_bottom.promise();
315 return __p._M_nest._M_value(__p);
316 }
317
318 _ValuePtr&
319 _M_value(_Promise_erased& __current) noexcept
320 {
321 __glibcxx_assert(&__current._M_nest == this);
322 auto __bf = std::get_if<_Bottom_frame>(&this->_M_stack);
323 __glibcxx_assert(__bf);
324 return __bf->_M_value;
325 }
326 };
327
328 template<typename _Yielded>
329 struct _Promise_erased<_Yielded>::_Final_awaiter
330 {
331 bool await_ready() noexcept
332 { return false; }
333
334 template<typename _Promise>
335 auto await_suspend(std::coroutine_handle<_Promise> __c) noexcept
336 {
337#ifdef __glibcxx_is_pointer_interconvertible
338 static_assert(is_pointer_interconvertible_base_of_v<
339 _Promise_erased, _Promise>);
340#endif
341
342 auto& __n = __c.promise()._M_nest;
343 return __n._M_pop();
344 }
345
346 void await_resume() noexcept {}
347 };
348
349 template<typename _Yielded>
350 struct _Promise_erased<_Yielded>::_Copy_awaiter
351 {
352 _Yielded_decvref _M_value;
353 _ValuePtr& _M_bottom_value;
354
355 constexpr bool await_ready() noexcept
356 { return false; }
357
358 template<typename _Promise>
359 void await_suspend(std::coroutine_handle<_Promise>) noexcept
360 {
361#ifdef __glibcxx_is_pointer_interconvertible
362 static_assert(is_pointer_interconvertible_base_of_v<
363 _Promise_erased, _Promise>);
364#endif
365 _M_bottom_value = ::std::addressof(_M_value);
366 }
367
368 constexpr void
369 await_resume() const noexcept
370 {}
371 };
372
373 template<typename _Yielded>
374 template<typename _Gen>
375 struct _Promise_erased<_Yielded>::_Recursive_awaiter
376 {
377 _Gen _M_gen;
378 static_assert(__is_generator<_Gen>);
379 static_assert(std::same_as<typename _Gen::yielded, _Yielded>);
380
381 _Recursive_awaiter(_Gen __gen) noexcept
382 : _M_gen(std::move(__gen))
383 { this->_M_gen._M_mark_as_started(); }
384
385 constexpr bool
386 await_ready() const noexcept
387 { return false; }
388
389
390 template<typename _Promise>
391 std::coroutine_handle<>
392 await_suspend(std::coroutine_handle<_Promise> __p) noexcept
393 {
394#ifdef __glibcxx_is_pointer_interconvertible
395 static_assert(is_pointer_interconvertible_base_of_v<
396 _Promise_erased, _Promise>);
397#endif
398
399 auto __c = _Coro_handle::from_address(__p.address());
400 auto __t = _Coro_handle::from_address(this->_M_gen._M_coro.address());
401 __p.promise()._M_nest._M_push(__c, __t);
402 return __t;
403 }
404
405 void await_resume()
406 {
407 if (auto __e = _M_gen._M_coro.promise()._M_except)
408 std::rethrow_exception(__e);
409 }
410 };
411
412 struct _Alloc_block
413 {
414 alignas(__STDCPP_DEFAULT_NEW_ALIGNMENT__)
415 char _M_data[__STDCPP_DEFAULT_NEW_ALIGNMENT__];
416
417 static auto
418 _M_cnt(std::size_t __sz) noexcept
419 {
420 auto __blksz = sizeof(_Alloc_block);
421 return (__sz + __blksz - 1) / __blksz;
422 }
423 };
424
425 template<typename _All>
426 concept _Stateless_alloc = (allocator_traits<_All>::is_always_equal::value
427 && default_initializable<_All>);
428
429 template<typename _Allocator>
430 class _Promise_alloc
431 {
432 using _Rebound = __alloc_rebind<_Allocator, _Alloc_block>;
433 using _Rebound_ATr = allocator_traits<_Rebound>;
434 static_assert(is_pointer_v<typename _Rebound_ATr::pointer>,
435 "Must use allocators for true pointers with generators");
436
437 static auto
438 _M_alloc_address(std::uintptr_t __fn, std::uintptr_t __fsz) noexcept
439 {
440 auto __an = __fn + __fsz;
441 auto __ba = alignof(_Rebound);
442 return reinterpret_cast<_Rebound*>(((__an + __ba - 1) / __ba) * __ba);
443 }
444
445 static auto
446 _M_alloc_size(std::size_t __csz) noexcept
447 {
448 auto __ba = alignof(_Rebound);
449 // Our desired layout is placing the coroutine frame, then pad out to
450 // align, then place the allocator. The total size of that is the
451 // size of the coroutine frame, plus up to __ba bytes, plus the size
452 // of the allocator.
453 return __csz + __ba + sizeof(_Rebound);
454 }
455
456 static void*
457 _M_allocate(_Rebound __b, std::size_t __csz)
458 {
459 if constexpr (_Stateless_alloc<_Rebound>)
460 // Only need room for the coroutine.
461 return __b.allocate(_Alloc_block::_M_cnt(__csz));
462 else
463 {
464 auto __nsz = _Alloc_block::_M_cnt(_M_alloc_size(__csz));
465 auto __f = __b.allocate(__nsz);
466 auto __fn = reinterpret_cast<std::uintptr_t>(__f);
467 auto __an = _M_alloc_address(__fn, __csz);
468 ::new (__an) _Rebound(std::move(__b));
469 return __f;
470 }
471 }
472
473 public:
474 void*
475 operator new(std::size_t __sz)
476 requires default_initializable<_Rebound> // _Allocator is non-void
477 { return _M_allocate({}, __sz); }
478
479 // _GLIBCXX_RESOLVE_LIB_DEFECTS
480 // 3900. The allocator_arg_t overloads of promise_type::operator new
481 // should not be constrained
482 template<typename _Alloc, typename... _Args>
483 void*
484 operator new(std::size_t __sz,
485 allocator_arg_t, const _Alloc& __a,
486 const _Args&...)
487 {
488 static_assert(convertible_to<const _Alloc&, _Allocator>,
489 "the allocator argument to the coroutine must be "
490 "convertible to the generator's allocator type");
491 return _M_allocate(_Rebound(_Allocator(__a)), __sz);
492 }
493
494 template<typename _This, typename _Alloc, typename... _Args>
495 void*
496 operator new(std::size_t __sz,
497 const _This&,
498 allocator_arg_t, const _Alloc& __a,
499 const _Args&...)
500 {
501 static_assert(convertible_to<const _Alloc&, _Allocator>,
502 "the allocator argument to the coroutine must be "
503 "convertible to the generator's allocator type");
504 return _M_allocate(_Rebound(_Allocator(__a)), __sz);
505 }
506
507 void
508 operator delete(void* __ptr, std::size_t __csz) noexcept
509 {
510 if constexpr (_Stateless_alloc<_Rebound>)
511 {
512 _Rebound __b;
513 return __b.deallocate(reinterpret_cast<_Alloc_block*>(__ptr),
514 _Alloc_block::_M_cnt(__csz));
515 }
516 else
517 {
518 auto __nsz = _Alloc_block::_M_cnt(_M_alloc_size(__csz));
519 auto __fn = reinterpret_cast<std::uintptr_t>(__ptr);
520 auto __an = _M_alloc_address(__fn, __csz);
521 _Rebound __b(std::move(*__an));
522 __an->~_Rebound();
523 __b.deallocate(reinterpret_cast<_Alloc_block*>(__ptr), __nsz);
524 }
525 }
526 };
527
528 template<>
529 class _Promise_alloc<void>
530 {
531 using _Dealloc_fn = void (*)(void*, std::size_t);
532
533 static auto
534 _M_dealloc_address(std::uintptr_t __fn, std::uintptr_t __fsz) noexcept
535 {
536 auto __an = __fn + __fsz;
537 auto __ba = alignof(_Dealloc_fn);
538 auto __aligned = ((__an + __ba - 1) / __ba) * __ba;
539 return reinterpret_cast<_Dealloc_fn*>(__aligned);
540 }
541
542 template<typename _Rebound>
543 static auto
544 _M_alloc_address(std::uintptr_t __fn, std::uintptr_t __fsz) noexcept
545 requires (!_Stateless_alloc<_Rebound>)
546 {
547 auto __ba = alignof(_Rebound);
548 auto __da = _M_dealloc_address(__fn, __fsz);
549 auto __aan = reinterpret_cast<std::uintptr_t>(__da);
550 __aan += sizeof(_Dealloc_fn);
551 auto __aligned = ((__aan + __ba - 1) / __ba) * __ba;
552 return reinterpret_cast<_Rebound*>(__aligned);
553 }
554
555 template<typename _Rebound>
556 static auto
557 _M_alloc_size(std::size_t __csz) noexcept
558 {
559 // This time, we want the coroutine frame, then the deallocator
560 // pointer, then the allocator itself, if any.
561 std::size_t __aa = 0;
562 std::size_t __as = 0;
563 if constexpr (!std::same_as<_Rebound, void>)
564 {
565 __aa = alignof(_Rebound);
566 __as = sizeof(_Rebound);
567 }
568 auto __ba = __aa + alignof(_Dealloc_fn);
569 return __csz + __ba + __as + sizeof(_Dealloc_fn);
570 }
571
572 template<typename _Rebound>
573 static void
574 _M_deallocator(void* __ptr, std::size_t __csz) noexcept
575 {
576 auto __asz = _M_alloc_size<_Rebound>(__csz);
577 auto __nblk = _Alloc_block::_M_cnt(__asz);
578
579 if constexpr (_Stateless_alloc<_Rebound>)
580 {
581 _Rebound __b;
582 __b.deallocate(reinterpret_cast<_Alloc_block*>(__ptr), __nblk);
583 }
584 else
585 {
586 auto __fn = reinterpret_cast<std::uintptr_t>(__ptr);
587 auto __an = _M_alloc_address<_Rebound>(__fn, __csz);
588 _Rebound __b(std::move(*__an));
589 __an->~_Rebound();
590 __b.deallocate(reinterpret_cast<_Alloc_block*>(__ptr), __nblk);
591 }
592 }
593
594 template<typename _Alloc>
595 static void*
596 _M_allocate(const _Alloc& __a, std::size_t __csz)
597 {
598 using _Rebound = __alloc_rebind<_Alloc, _Alloc_block>;
599 using _Rebound_ATr = allocator_traits<_Rebound>;
600
601 static_assert(is_pointer_v<typename _Rebound_ATr::pointer>,
602 "Must use allocators for true pointers with generators");
603
604 _Dealloc_fn __d = &_M_deallocator<_Rebound>;
605 auto __b = static_cast<_Rebound>(__a);
606 auto __asz = _M_alloc_size<_Rebound>(__csz);
607 auto __nblk = _Alloc_block::_M_cnt(__asz);
608 void* __p = __b.allocate(__nblk);
609 auto __pn = reinterpret_cast<std::uintptr_t>(__p);
610 *_M_dealloc_address(__pn, __csz) = __d;
611 if constexpr (!_Stateless_alloc<_Rebound>)
612 {
613 auto __an = _M_alloc_address<_Rebound>(__pn, __csz);
614 ::new (__an) _Rebound(std::move(__b));
615 }
616 return __p;
617 }
618 public:
619 void*
620 operator new(std::size_t __sz)
621 {
622 auto __nsz = _M_alloc_size<void>(__sz);
623 _Dealloc_fn __d = [] (void* __ptr, std::size_t __sz)
624 {
625 ::operator delete(__ptr, _M_alloc_size<void>(__sz));
626 };
627 auto __p = ::operator new(__nsz);
628 auto __pn = reinterpret_cast<uintptr_t>(__p);
629 *_M_dealloc_address(__pn, __sz) = __d;
630 return __p;
631 }
632
633 template<typename _Alloc, typename... _Args>
634 void*
635 operator new(std::size_t __sz,
636 allocator_arg_t, const _Alloc& __a,
637 const _Args&...)
638 { return _M_allocate(__a, __sz); }
639
640 template<typename _This, typename _Alloc, typename... _Args>
641 void*
642 operator new(std::size_t __sz,
643 const _This&,
644 allocator_arg_t, const _Alloc& __a,
645 const _Args&...)
646 { return _M_allocate(__a, __sz); }
647
648 void
649 operator delete(void* __ptr, std::size_t __sz) noexcept
650 {
651 _Dealloc_fn __d;
652 auto __pn = reinterpret_cast<uintptr_t>(__ptr);
653 __d = *_M_dealloc_address(__pn, __sz);
654 __d(__ptr, __sz);
655 }
656 };
657
658 template<typename _Tp>
659 concept _Cv_unqualified_object = is_object_v<_Tp>
660 && same_as<_Tp, remove_cv_t<_Tp>>;
661 } // namespace __gen
662 /// @endcond
663
664 template<typename _Ref, typename _Val, typename _Alloc>
665 class generator
666 : public ranges::view_interface<generator<_Ref, _Val, _Alloc>>
667 {
668 using _Value = __conditional_t<is_void_v<_Val>,
669 remove_cvref_t<_Ref>,
670 _Val>;
671 static_assert(__gen::_Cv_unqualified_object<_Value>,
672 "Generator value must be a cv-unqualified object type");
673 using _Reference = __gen::_Reference_t<_Ref, _Val>;
674 static_assert(is_reference_v<_Reference>
675 || (__gen::_Cv_unqualified_object<_Reference>
676 && copy_constructible<_Reference>),
677 "Generator reference type must be either a cv-unqualified "
678 "object type that is trivially constructible or a "
679 "reference type");
680
681 using _RRef = __conditional_t<
682 is_reference_v<_Reference>,
683 remove_reference_t<_Reference>&&,
684 _Reference>;
685
686 /* Required to model indirectly_readable, and input_iterator. */
687 static_assert(common_reference_with<_Reference&&, _Value&&>);
688 static_assert(common_reference_with<_Reference&&, _RRef&&>);
689 static_assert(common_reference_with<_RRef&&, const _Value&>);
690
691 using _Yielded = __gen::_Yield_t<_Reference>;
692 using _Erased_promise = __gen::_Promise_erased<_Yielded>;
693
694 struct _Iterator;
695
696 friend _Erased_promise;
697 friend struct _Erased_promise::_Subyield_state;
698 public:
699 using yielded = _Yielded;
700
701 struct promise_type : _Erased_promise, __gen::_Promise_alloc<_Alloc>
702 {
703 generator get_return_object() noexcept
704 { return { coroutine_handle<promise_type>::from_promise(*this) }; }
705 };
706
707#ifdef __glibcxx_is_pointer_interconvertible
708 static_assert(is_pointer_interconvertible_base_of_v<_Erased_promise,
709 promise_type>);
710#endif
711
712 generator(const generator&) = delete;
713
714 generator(generator&& __other) noexcept
715 : _M_coro(std::__exchange(__other._M_coro, nullptr)),
716 _M_began(std::__exchange(__other._M_began, false))
717 {}
718
719 ~generator()
720 {
721 if (auto& __c = this->_M_coro)
722 __c.destroy();
723 }
724
725 generator&
726 operator=(generator __other) noexcept
727 {
728 swap(__other._M_coro, this->_M_coro);
729 swap(__other._M_began, this->_M_began);
730 return *this;
731 }
732
733 _Iterator
734 begin()
735 {
736 this->_M_mark_as_started();
737 auto __h = _Coro_handle::from_promise(_M_coro.promise());
738 __h.promise()._M_nest._M_top() = __h;
739 return { __h };
740 }
741
742 default_sentinel_t
743 end() const noexcept
744 { return default_sentinel; }
745
746 private:
747 using _Coro_handle = std::coroutine_handle<_Erased_promise>;
748
749 generator(coroutine_handle<promise_type> __coro) noexcept
750 : _M_coro { move(__coro) }
751 {}
752
753 void
754 _M_mark_as_started() noexcept
755 {
756 __glibcxx_assert(!this->_M_began);
757 this->_M_began = true;
758 }
759
760 coroutine_handle<promise_type> _M_coro;
761 bool _M_began = false;
762 };
763
764 template<class _Ref, class _Val, class _Alloc>
765 struct generator<_Ref, _Val, _Alloc>::_Iterator
766 {
767 using value_type = _Value;
768 using difference_type = ptrdiff_t;
769
770 friend bool
771 operator==(const _Iterator& __i, default_sentinel_t) noexcept
772 { return __i._M_coro.done(); }
773
774 friend class generator;
775
776 _Iterator(_Iterator&& __o) noexcept
777 : _M_coro(std::__exchange(__o._M_coro, {}))
778 {}
779
780 _Iterator&
781 operator=(_Iterator&& __o) noexcept
782 {
783 this->_M_coro = std::__exchange(__o._M_coro, {});
784 return *this;
785 }
786
787 _Iterator&
788 operator++()
789 {
790 _M_next();
791 return *this;
792 }
793
794 void
795 operator++(int)
796 { this->operator++(); }
797
798 _Reference
799 operator*()
800 const noexcept(is_nothrow_move_constructible_v<_Reference>)
801 {
802 auto& __p = this->_M_coro.promise();
803 return static_cast<_Reference>(*__p._M_value());
804 }
805
806 private:
807 friend class generator;
808
809 _Iterator(_Coro_handle __g)
810 : _M_coro { __g }
811 { this->_M_next(); }
812
813 void _M_next()
814 {
815 auto& __t = this->_M_coro.promise()._M_nest._M_top();
816 __t.resume();
817 }
818
819 _Coro_handle _M_coro;
820 };
821
822 /// @}
823
824#if _GLIBCXX_HOSTED
825 namespace pmr {
826 template<typename _Ref, typename _Val = void>
827 using generator = std::generator<_Ref, _Val, polymorphic_allocator<std::byte>>;
828 }
829#endif // HOSTED
830
831_GLIBCXX_END_NAMESPACE_VERSION
832} // namespace std
833#endif // __cpp_lib_generator
834
835#endif // _GLIBCXX_GENERATOR