1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255
1256
1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278
1279
1280
1281
base / containers / circular_deque.h [blame]
// Copyright 2017 The Chromium Authors
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef BASE_CONTAINERS_CIRCULAR_DEQUE_H_
#define BASE_CONTAINERS_CIRCULAR_DEQUE_H_
#include <algorithm>
#include <cstddef>
#include <iterator>
#include <utility>
#include "base/check.h"
#include "base/containers/span.h"
#include "base/containers/vector_buffer.h"
#include "base/dcheck_is_on.h"
#include "base/memory/raw_ptr_exclusion.h"
#include "base/numerics/checked_math.h"
#include "base/numerics/safe_conversions.h"
#include "base/ranges/algorithm.h"
#include "base/ranges/from_range.h"
#if DCHECK_IS_ON()
#include <ostream>
#endif
// base::circular_deque is similar to std::deque. Unlike std::deque, the
// storage is provided in a flat circular buffer conceptually similar to a
// vector. The beginning and end will wrap around as necessary so that
// pushes and pops will be constant time as long as a capacity expansion is
// not required.
//
// The API should be identical to std::deque with the following differences:
//
// - ITERATORS ARE NOT STABLE. Mutating the container will invalidate all
// iterators.
//
// - Insertions may resize the vector and so are not constant time (std::deque
// guarantees constant time for insertions at the ends).
//
// - Container-wide comparisons are not implemented. If you want to compare
// two containers, use an algorithm so the expensive iteration is explicit.
//
// If you want a similar container with only a queue API, use base::queue in
// base/containers/queue.h.
//
// Constructors:
// circular_deque();
// circular_deque(size_t count);
// circular_deque(size_t count, const T& value);
// circular_deque(InputIterator first, InputIterator last);
// circular_deque(base::from_range_t, Range range);
// circular_deque(const circular_deque&);
// circular_deque(circular_deque&&);
// circular_deque(std::initializer_list<value_type>);
//
// Assignment functions:
// circular_deque& operator=(const circular_deque&);
// circular_deque& operator=(circular_deque&&);
// circular_deque& operator=(std::initializer_list<T>);
// void assign(size_t count, const T& value);
// void assign(InputIterator first, InputIterator last);
// void assign(std::initializer_list<T> value);
// void assign_range(Range range);
//
// Random accessors:
// T& at(size_t);
// const T& at(size_t) const;
// T& operator[](size_t);
// const T& operator[](size_t) const;
//
// End accessors:
// T& front();
// const T& front() const;
// T& back();
// const T& back() const;
//
// Iterator functions:
// iterator begin();
// const_iterator begin() const;
// const_iterator cbegin() const;
// iterator end();
// const_iterator end() const;
// const_iterator cend() const;
// reverse_iterator rbegin();
// const_reverse_iterator rbegin() const;
// const_reverse_iterator crbegin() const;
// reverse_iterator rend();
// const_reverse_iterator rend() const;
// const_reverse_iterator crend() const;
//
// Memory management:
// void reserve(size_t); // SEE IMPLEMENTATION FOR SOME GOTCHAS.
// size_t capacity() const;
// void shrink_to_fit();
//
// Size management:
// void clear();
// bool empty() const;
// size_t size() const;
// void resize(size_t);
// void resize(size_t count, const T& value);
//
// Positional insert and erase:
// void insert(const_iterator pos, size_type count, const T& value);
// void insert(const_iterator pos,
// InputIterator first, InputIterator last);
// iterator insert(const_iterator pos, const T& value);
// iterator insert(const_iterator pos, T&& value);
// iterator emplace(const_iterator pos, Args&&... args);
// iterator erase(const_iterator pos);
// iterator erase(const_iterator first, const_iterator last);
//
// End insert and erase:
// void push_front(const T&);
// void push_front(T&&);
// void push_back(const T&);
// void push_back(T&&);
// T& emplace_front(Args&&...);
// T& emplace_back(Args&&...);
// void pop_front();
// void pop_back();
//
// General:
// void swap(circular_deque&);
namespace base {
template <class T>
class circular_deque;
namespace internal {
// Start allocating nonempty buffers with this many entries. This is the
// external capacity so the internal buffer will be one larger (= 4) which is
// more even for the allocator. See the descriptions of internal vs. external
// capacity on the comment above the buffer_ variable below.
constexpr size_t kCircularBufferInitialCapacity = 3;
template <typename T>
class circular_deque_const_iterator {
public:
using difference_type = ptrdiff_t;
using value_type = T;
using pointer = const T*;
using reference = const T&;
using iterator_category = std::random_access_iterator_tag;
circular_deque_const_iterator() = default;
// Dereferencing.
const T& operator*() const {
CHECK_NE(index_, end_);
CheckUnstableUsage();
CheckValidIndex(index_);
// SAFETY: Increment() and Decrement() and Add() operations ensure that
// `index_` stays inside [begin_, end_] (while supporting wrap around for
// the structure. This maintains that `index_` always points at a
// valid position for the `buffer_`. We also CHECK above that `index_` is
// not `end_` making it a valid pointer to dereference.
return UNSAFE_BUFFERS(buffer_[index_]);
}
const T* operator->() const {
CHECK_NE(index_, end_);
CheckUnstableUsage();
CheckValidIndex(index_);
// SAFETY: Increment() and Decrement() and Add() operations ensure that
// `index_` stays inside [begin_, end_] while supporting wrap around for
// the structure. This maintains that `index_` always points at a
// valid position for the `buffer_`. We also CHECK above that `index_` is
// not `end_` making it a valid pointer to dereference.
return &UNSAFE_BUFFERS(buffer_[index_]);
}
const value_type& operator[](difference_type i) const { return *(*this + i); }
// Increment and decrement.
circular_deque_const_iterator& operator++() {
Increment();
return *this;
}
circular_deque_const_iterator operator++(int) {
circular_deque_const_iterator ret = *this;
Increment();
return ret;
}
circular_deque_const_iterator& operator--() {
Decrement();
return *this;
}
circular_deque_const_iterator operator--(int) {
circular_deque_const_iterator ret = *this;
Decrement();
return ret;
}
// Random access mutation.
friend circular_deque_const_iterator operator+(
const circular_deque_const_iterator& iter,
difference_type offset) {
circular_deque_const_iterator ret = iter;
ret.Add(offset);
return ret;
}
circular_deque_const_iterator& operator+=(difference_type offset) {
Add(offset);
return *this;
}
friend circular_deque_const_iterator operator-(
const circular_deque_const_iterator& iter,
difference_type offset) {
circular_deque_const_iterator ret = iter;
ret.Add(-offset);
return ret;
}
circular_deque_const_iterator& operator-=(difference_type offset) {
Add(-offset);
return *this;
}
friend std::ptrdiff_t operator-(const circular_deque_const_iterator& lhs,
const circular_deque_const_iterator& rhs) {
lhs.CheckComparable(rhs);
return static_cast<std::ptrdiff_t>(lhs.OffsetFromBegin() -
rhs.OffsetFromBegin());
}
// Comparisons.
friend bool operator==(const circular_deque_const_iterator& lhs,
const circular_deque_const_iterator& rhs) {
lhs.CheckComparable(rhs);
return lhs.index_ == rhs.index_;
}
friend std::strong_ordering operator<=>(
const circular_deque_const_iterator& lhs,
const circular_deque_const_iterator& rhs) {
lhs.CheckComparable(rhs);
// The order is based on the position of the element in the circular_dequeue
// rather than `index_` at which the element is stored in the ring buffer.
return lhs.OffsetFromBegin() <=> rhs.OffsetFromBegin();
}
protected:
friend class circular_deque<T>;
circular_deque_const_iterator(const circular_deque<T>* parent, size_t index)
: buffer_(parent->buffer_.data()),
cap_(parent->buffer_.capacity()),
begin_(parent->begin_),
end_(parent->end_),
index_(index) {
if (begin_ <= end_) {
CHECK_GE(index_, begin_);
CHECK_LE(index_, end_);
} else if (index_ >= begin_) {
CHECK(index_ < cap_);
} else {
CHECK(index_ <= end_);
}
#if DCHECK_IS_ON()
parent_deque_ = parent;
created_generation_ = parent->generation_;
#endif // DCHECK_IS_ON()
}
// Returns the offset from the beginning index of the buffer to the current
// item.
size_t OffsetFromBegin() const {
if (index_ >= begin_) {
return index_ - begin_; // On the same side as begin.
}
return cap_ - begin_ + index_;
}
// The size of the deque, ie. the number of elements in it.
size_t Size() const {
if (begin_ <= end_) {
return end_ - begin_;
}
return cap_ - begin_ + end_;
}
// Most uses will be ++ and -- so use a simplified implementation.
void Increment() {
CheckUnstableUsage();
CheckValidIndex(index_);
CHECK_NE(index_, end_);
index_++;
if (index_ == cap_) {
index_ = 0u;
}
}
void Decrement() {
CheckUnstableUsage();
CheckValidIndexOrEnd(index_);
CHECK_NE(index_, begin_);
if (index_ == 0u) {
index_ = cap_ - 1u;
} else {
index_--;
}
}
void Add(difference_type delta) {
CheckUnstableUsage();
#if DCHECK_IS_ON()
if (delta <= 0) {
CheckValidIndexOrEnd(index_);
} else {
CheckValidIndex(index_);
}
#endif
// It should be valid to add 0 to any iterator, even if the container is
// empty and the iterator points to end(). The modulo below will divide
// by 0 if the buffer capacity is empty, so it's important to check for
// this case explicitly.
if (delta == 0) {
return;
}
const auto offset_from_begin =
// The max allocation size is PTRDIFF_MAX, so this value can't be larger
// than fits in ptrdiff_t.
static_cast<difference_type>(OffsetFromBegin());
const auto deque_size =
// The max allocation size is PTRDIFF_MAX, so this value can't be larger
// than fits in ptrdiff_t.
static_cast<difference_type>(Size());
if (delta >= 0) {
// Check `offset_from_begin + delta <= deque_size` without overflowing.
CHECK_LE(delta, deque_size - offset_from_begin);
} else {
// Check `offset_from_begin + delta >= 0` without overflowing. We avoid
// negating a negative `delta` which can overflow. Instead negate the
// positive number which can not.
CHECK_GE(delta, -offset_from_begin) << offset_from_begin;
}
const auto new_offset =
// The above checks verify that `offset_from_begin + delta` is in the
// range [0, deque_size] and does not overflow, so it also fits in
// `size_t`.
static_cast<size_t>(offset_from_begin + delta);
index_ = (new_offset + begin_) % cap_;
}
#if DCHECK_IS_ON()
void CheckValidIndexOrEnd(size_t index) const {
parent_deque_->CheckValidIndexOrEnd(index_);
}
void CheckValidIndex(size_t index) const {
parent_deque_->CheckValidIndex(index_);
}
void CheckUnstableUsage() const {
DCHECK(parent_deque_);
// Since circular_deque doesn't guarantee stability, any attempt to
// dereference this iterator after a mutation (i.e. the generation doesn't
// match the original) in the container is illegal.
DCHECK_EQ(created_generation_, parent_deque_->generation_)
<< "circular_deque iterator dereferenced after mutation.";
}
void CheckComparable(const circular_deque_const_iterator& other) const {
DCHECK_EQ(parent_deque_, other.parent_deque_);
// Since circular_deque doesn't guarantee stability, two iterators that
// are compared must have been generated without mutating the container.
// If this fires, the container was mutated between generating the two
// iterators being compared.
DCHECK_EQ(created_generation_, other.created_generation_);
}
#else
inline void CheckUnstableUsage() const {}
inline void CheckComparable(const circular_deque_const_iterator&) const {}
void CheckValidIndexOrEnd(size_t index) const {}
void CheckValidIndex(size_t index) const {}
#endif // DCHECK_IS_ON()
// `buffer_` is not a raw_ptr<...> for performance reasons: Usually
// on-stack pointer, pointing back to the collection being iterated, owned by
// object that iterates over it. Additionally this is supported by the
// analysis of sampling profiler data and tab_search:top100:2020.
RAW_PTR_EXCLUSION const T* buffer_ = nullptr;
size_t cap_ = 0u;
size_t begin_ = 0u;
size_t end_ = 0u;
size_t index_ = 0u;
#if DCHECK_IS_ON()
RAW_PTR_EXCLUSION const circular_deque<T>* parent_deque_ = nullptr;
// The generation of the parent deque when this iterator was created. The
// container will update the generation for every modification so we can
// test if the container was modified by comparing them.
uint64_t created_generation_ = 0u;
#endif // DCHECK_IS_ON()
};
template <typename T>
class circular_deque_iterator : public circular_deque_const_iterator<T> {
using base = circular_deque_const_iterator<T>;
public:
friend class circular_deque<T>;
using difference_type = std::ptrdiff_t;
using value_type = T;
using pointer = T*;
using reference = T&;
using iterator_category = std::random_access_iterator_tag;
// Expose the base class' constructor.
circular_deque_iterator() : circular_deque_const_iterator<T>() {}
// Dereferencing.
T& operator*() const { return const_cast<T&>(base::operator*()); }
T* operator->() const { return const_cast<T*>(base::operator->()); }
T& operator[](difference_type i) {
return const_cast<T&>(base::operator[](i));
}
// Random access mutation.
friend circular_deque_iterator operator+(const circular_deque_iterator& iter,
difference_type offset) {
circular_deque_iterator ret = iter;
ret.Add(offset);
return ret;
}
circular_deque_iterator& operator+=(difference_type offset) {
base::Add(offset);
return *this;
}
friend circular_deque_iterator operator-(const circular_deque_iterator& iter,
difference_type offset) {
circular_deque_iterator ret = iter;
ret.Add(-offset);
return ret;
}
circular_deque_iterator& operator-=(difference_type offset) {
base::Add(-offset);
return *this;
}
// Increment and decrement.
circular_deque_iterator& operator++() {
base::Increment();
return *this;
}
circular_deque_iterator operator++(int) {
circular_deque_iterator ret = *this;
base::Increment();
return ret;
}
circular_deque_iterator& operator--() {
base::Decrement();
return *this;
}
circular_deque_iterator operator--(int) {
circular_deque_iterator ret = *this;
base::Decrement();
return ret;
}
private:
circular_deque_iterator(const circular_deque<T>* parent, size_t index)
: circular_deque_const_iterator<T>(parent, index) {}
};
} // namespace internal
template <typename T>
class circular_deque {
private:
using VectorBuffer = internal::VectorBuffer<T>;
public:
using value_type = T;
using size_type = size_t;
using difference_type = std::ptrdiff_t;
using reference = value_type&;
using const_reference = const value_type&;
using pointer = value_type*;
using const_pointer = const value_type*;
using iterator = internal::circular_deque_iterator<T>;
using const_iterator = internal::circular_deque_const_iterator<T>;
using reverse_iterator = std::reverse_iterator<iterator>;
using const_reverse_iterator = std::reverse_iterator<const_iterator>;
// ---------------------------------------------------------------------------
// Constructor
// Constructs an empty deque.
constexpr circular_deque() = default;
// Constructs with `count` copies of a default-constructed T.
explicit circular_deque(size_type count) { resize(count); }
// Constructs with `count` copies of `value`.
circular_deque(size_type count, const T& value) { resize(count, value); }
// Construct a deque by constructing its elements from each element in
// `[first, last)`.
//
// Prefer using the `from_range_t` constructor, which builds a deque from a
// range, instead of from problematic iterator pairs.
//
// # Safety
// The `first` and `last` iterators must be from the same container, with
// `first <= last`.
template <class InputIterator>
requires(std::input_iterator<InputIterator>)
UNSAFE_BUFFER_USAGE circular_deque(InputIterator first, InputIterator last)
: circular_deque() {
// SAFETY: The caller is responsible for giving iterator from the same
// container.
UNSAFE_BUFFERS(assign(first, last));
}
// Constructs a deque from the elements in a range (a container or span),
// typically by copy-constructing if the range also holds objects of type
// `T`.
//
// Example:
// ```
// int values[] = {1, 3};
// circular_deque<int> deq(base::from_range, values);
// ```
template <typename Range>
requires(std::ranges::input_range<Range>)
circular_deque(base::from_range_t, Range&& value) : circular_deque() {
assign_range(std::forward<Range>(value));
}
// Copy/move.
circular_deque(const circular_deque& other) : buffer_(other.size() + 1) {
assign_range(other);
}
circular_deque(circular_deque&& other) noexcept
: buffer_(std::move(other.buffer_)),
begin_(std::exchange(other.begin_, 0u)),
end_(std::exchange(other.end_, 0u)) {}
circular_deque(std::initializer_list<value_type> init) { assign(init); }
~circular_deque() { DestructRange(begin_, end_); }
// ---------------------------------------------------------------------------
// Assignments.
//
// All of these may invalidate iterators and references.
circular_deque& operator=(const circular_deque& other) {
if (&other == this) {
return *this;
}
reserve(other.size());
assign_range(other);
return *this;
}
circular_deque& operator=(circular_deque&& other) noexcept {
if (&other == this) {
return *this;
}
// We're about to overwrite the buffer, so don't free it in clear to
// avoid doing it twice.
ClearRetainCapacity();
buffer_ = std::move(other.buffer_);
begin_ = std::exchange(other.begin_, 0u);
end_ = std::exchange(other.end_, 0u);
IncrementGeneration();
return *this;
}
circular_deque& operator=(std::initializer_list<value_type> ilist) {
reserve(ilist.size());
assign_range(ilist);
return *this;
}
void assign(size_type count, const value_type& value) {
ClearRetainCapacity();
reserve(count);
for (size_t i = 0; i < count; i++) {
emplace_back(value);
}
IncrementGeneration();
}
// Constructs and appends new elements into the container from each element in
// `[first, last)`, typically by copy-constructing if the iterators are also
// over objects of type `T`.
//
// # Safety
// Requires that `first` and `last` are valid iterators into a container, with
// `first <= last`.
template <typename InputIterator>
requires(std::forward_iterator<InputIterator>)
UNSAFE_BUFFER_USAGE void assign(InputIterator first, InputIterator last) {
// Possible future enhancement, dispatch on iterator tag type. For forward
// iterators we can use std::difference to preallocate the space required
// and only do one copy.
ClearRetainCapacity();
// SAFETY: Pointers are iterators, so `first` may be a pointer. We require
// the caller to provide valid pointers such that `last` is for the same
// allocation and `first <= last`, and we've checked in the loop condition
// that `first != last` so incrementing will stay a valid pointer for the
// allocation.
for (; first != last; UNSAFE_BUFFERS(++first)) {
emplace_back(*first);
}
IncrementGeneration();
}
// Copies and appends new elements into the container from each element in
// the initializer list.
void assign(std::initializer_list<value_type> value) { assign_range(value); }
// Constructs and appends new elements into the container from each element in
// a range (a container or span), typically by copy-constructing if
// the range also holds objects of type `T`.
template <typename Range>
requires(std::ranges::input_range<Range>)
void assign_range(Range&& range) {
reserve(std::ranges::distance(range));
// SAFETY: begin() and end() produce iterators from the same container with
// begin <= end.
UNSAFE_BUFFERS(assign(std::ranges::begin(range), std::ranges::end(range)));
}
// ---------------------------------------------------------------------------
// Accessors.
//
// Since this class assumes no exceptions, at() and operator[] are equivalent.
const value_type& at(size_type i) const {
CHECK_LT(i, size());
size_t right_size = buffer_.capacity() - begin_;
if (begin_ <= end_ || i < right_size) {
return buffer_[begin_ + i];
}
return buffer_[i - right_size];
}
value_type& at(size_type i) {
return const_cast<value_type&>(std::as_const(*this).at(i));
}
const value_type& operator[](size_type i) const { return at(i); }
value_type& operator[](size_type i) { return at(i); }
value_type& front() {
CHECK(!empty());
return buffer_[begin_];
}
const value_type& front() const {
CHECK(!empty());
return buffer_[begin_];
}
value_type& back() {
CHECK(!empty());
return *(end() - 1);
}
const value_type& back() const {
CHECK(!empty());
return *(end() - 1);
}
// ---------------------------------------------------------------------------
// Iterators.
iterator begin() { return iterator(this, begin_); }
const_iterator begin() const { return const_iterator(this, begin_); }
const_iterator cbegin() const { return const_iterator(this, begin_); }
iterator end() { return iterator(this, end_); }
const_iterator end() const { return const_iterator(this, end_); }
const_iterator cend() const { return const_iterator(this, end_); }
reverse_iterator rbegin() { return reverse_iterator(end()); }
const_reverse_iterator rbegin() const {
return const_reverse_iterator(end());
}
const_reverse_iterator crbegin() const { return rbegin(); }
reverse_iterator rend() { return reverse_iterator(begin()); }
const_reverse_iterator rend() const {
return const_reverse_iterator(begin());
}
const_reverse_iterator crend() const { return rend(); }
// ---------------------------------------------------------------------------
// Memory management.
// IMPORTANT NOTE ON reserve(...): This class implements auto-shrinking of
// the buffer when elements are deleted and there is "too much" wasted space.
// So if you call reserve() with a large size in anticipation of pushing many
// elements, but pop an element before the queue is full, the capacity you
// reserved may be lost.
//
// As a result, it's only worthwhile to call reserve() when you're adding
// many things at once with no intermediate operations.
void reserve(size_type new_capacity) {
if (new_capacity > capacity()) {
SetCapacityTo(new_capacity);
}
}
size_type capacity() const {
// One item is wasted to indicate end().
return buffer_.capacity() == 0 ? 0 : buffer_.capacity() - 1;
}
void shrink_to_fit() {
if (empty()) {
// Optimize empty case to really delete everything if there was
// something.
if (buffer_.capacity()) {
buffer_ = VectorBuffer();
}
} else {
SetCapacityTo(size());
}
}
// ---------------------------------------------------------------------------
// Size management.
// This will additionally reset the capacity() to 0.
void clear() {
// This can't resize(0) because that requires a default constructor to
// compile, which not all contained classes may implement.
ClearRetainCapacity();
buffer_ = VectorBuffer();
}
bool empty() const { return begin_ == end_; }
size_type size() const {
if (begin_ <= end_) {
return end_ - begin_;
}
return buffer_.capacity() - begin_ + end_;
}
// When reducing size, the elements are deleted from the end. When expanding
// size, elements are added to the end with |value| or the default
// constructed version. Even when using resize(count) to shrink, a default
// constructor is required for the code to compile, even though it will not
// be called.
//
// There are two versions rather than using a default value to avoid
// creating a temporary when shrinking (when it's not needed). Plus if
// the default constructor is desired when expanding usually just calling it
// for each element is faster than making a default-constructed temporary and
// copying it.
void resize(size_type count) {
// SEE BELOW VERSION if you change this. The code is mostly the same.
if (count > size()) {
// This could be slighly more efficient but expanding a queue with
// identical elements is unusual and the extra computations of emplacing
// one-by-one will typically be small relative to calling the constructor
// for every item.
ExpandCapacityIfNecessary(count - size());
while (size() < count) {
emplace_back();
}
} else if (count < size()) {
size_t new_end = (begin_ + count) % buffer_.capacity();
DestructRange(new_end, end_);
end_ = new_end;
ShrinkCapacityIfNecessary();
}
IncrementGeneration();
}
void resize(size_type count, const value_type& value) {
// SEE ABOVE VERSION if you change this. The code is mostly the same.
if (count > size()) {
ExpandCapacityIfNecessary(count - size());
while (size() < count) {
emplace_back(value);
}
} else if (count < size()) {
size_t new_end = (begin_ + count) % buffer_.capacity();
DestructRange(new_end, end_);
end_ = new_end;
ShrinkCapacityIfNecessary();
}
IncrementGeneration();
}
// ---------------------------------------------------------------------------
// Insert and erase.
//
// Insertion and deletion in the middle is O(n) and invalidates all existing
// iterators.
//
// The implementation of insert isn't optimized as much as it could be. If
// the insertion requires that the buffer be grown, it will first be grown
// and everything moved, and then the items will be inserted, potentially
// moving some items twice. This simplifies the implemntation substantially
// and means less generated templatized code. Since this is an uncommon
// operation for deques, and already relatively slow, it doesn't seem worth
// the benefit to optimize this.
void insert(const_iterator pos, size_type count, const T& value) {
ValidateIterator(pos);
// Optimize insert at the beginning.
if (pos == begin()) {
ExpandCapacityIfNecessary(count);
for (size_t i = 0; i < count; i++) {
push_front(value);
}
return;
}
CHECK_LT(pos.index_, buffer_.capacity());
iterator insert_cur(this, pos.index_);
iterator insert_end;
MakeRoomFor(count, &insert_cur, &insert_end);
while (insert_cur < insert_end) {
std::construct_at(buffer_.get_at(insert_cur.index_), value);
++insert_cur;
}
IncrementGeneration();
}
template <class InputIterator>
requires(std::forward_iterator<InputIterator>)
void insert(const_iterator pos, InputIterator first, InputIterator last) {
ValidateIterator(pos);
const size_t inserted_items =
checked_cast<size_t>(std::distance(first, last));
if (inserted_items == 0u) {
return; // Can divide by 0 when doing modulo below, so return early.
}
// Make a hole to copy the items into.
iterator insert_cur;
iterator insert_end;
if (pos == begin()) {
// Optimize insert at the beginning, nothing needs to be shifted and the
// hole is the |inserted_items| block immediately before |begin_|.
ExpandCapacityIfNecessary(inserted_items);
const size_t old_begin = begin_;
begin_ = (old_begin + buffer_.capacity() - inserted_items) %
buffer_.capacity();
insert_cur = begin();
insert_end = iterator(this, old_begin);
} else {
CHECK_LT(pos.index_, buffer_.capacity());
insert_cur = iterator(this, pos.index_);
MakeRoomFor(inserted_items, &insert_cur, &insert_end);
}
// Copy the items.
while (insert_cur < insert_end) {
std::construct_at(buffer_.get_at(insert_cur.index_), *first);
++insert_cur;
// SAFETY: The input iterator may be a pointer, in which case we will
// produce UB if `first` is incremented past `last`. We use checked_cast
// of std::distance to an unsigned value above, which ensures that `last
// >= first`. Then we need that `insert_end - insert_cur <= last - first`:
// - If inserting at the start, pos == begin() and `insert_cur` is
// positioned at `begin_ - (last - first)`, and `insert_end` is
// positioned at `begin_` so we have
// `insert_end - insert_cur == last - first`.
// - If inserting elsewhere, `MakeRoomFor(last - first, ...)` returns an
// iterator
// pair with distance of `last - first`, so we have
// `insert_end - insert_cur == last - first`.
UNSAFE_BUFFERS(++first);
}
IncrementGeneration();
}
// These all return an iterator to the inserted item. Existing iterators will
// be invalidated.
iterator insert(const_iterator pos, const T& value) {
return emplace(pos, value);
}
iterator insert(const_iterator pos, T&& value) {
return emplace(pos, std::move(value));
}
template <class... Args>
iterator emplace(const_iterator pos, Args&&... args) {
ValidateIterator(pos);
// Optimize insert at beginning which doesn't require shifting.
if (pos == cbegin()) {
emplace_front(std::forward<Args>(args)...);
return begin();
}
// Do this before we make the new iterators we return.
IncrementGeneration();
CHECK_LT(pos.index_, buffer_.capacity());
iterator insert_begin(this, pos.index_);
iterator insert_end;
MakeRoomFor(1, &insert_begin, &insert_end);
std::construct_at(buffer_.get_at(insert_begin.index_),
std::forward<Args>(args)...);
return insert_begin;
}
// Calling erase() won't automatically resize the buffer smaller like resize
// or the pop functions. Erase is slow and relatively uncommon, and for
// normal deque usage a pop will normally be done on a regular basis that
// will prevent excessive buffer usage over long periods of time. It's not
// worth having the extra code for every template instantiation of erase()
// to resize capacity downward to a new buffer.
iterator erase(const_iterator pos) { return erase(pos, pos + 1); }
iterator erase(const_iterator pos_begin, const_iterator pos_end) {
ValidateIterator(pos_begin);
ValidateIterator(pos_end);
IncrementGeneration();
if (pos_begin.index_ == pos_end.index_) {
// Nothing deleted. Need to return early to avoid falling through to
// moving items on top of themselves.
return iterator(this, pos_begin.index_);
}
// First, call the destructor on the deleted items.
DestructRange(pos_begin.index_, pos_end.index_);
if (pos_begin.index_ == begin_) {
// This deletion is from the beginning. Nothing needs to be copied, only
// begin_ needs to be updated.
begin_ = pos_end.index_;
return iterator(this, pos_end.index_);
}
// In an erase operation, the shifted items all move logically to the left,
// so move them from left-to-right.
//
// The elements are being moved to memory where the T objects were
// previously destroyed.
//
// TODO(danakj): We could skip destruction and do MoveAssignRange here, for
// the elements that are being replaced.
size_t move_src = pos_end.index_;
const size_t move_src_end = end_;
size_t move_dest = pos_begin.index_;
const size_t cap = buffer_.capacity();
while (move_src != move_src_end) {
VectorBuffer::MoveConstructRange(buffer_.subspan(move_src, 1u),
buffer_.subspan(move_dest, 1u));
move_src = (move_src + 1u) % cap;
move_dest = (move_dest + 1u) % cap;
}
end_ = move_dest;
// Since we did not reallocate and only changed things after the erase
// element(s), the input iterator's index points to the thing following the
// deletion.
return iterator(this, pos_begin.index_);
}
// ---------------------------------------------------------------------------
// Begin/end operations.
void push_front(const T& value) { emplace_front(value); }
void push_front(T&& value) { emplace_front(std::move(value)); }
void push_back(const T& value) { emplace_back(value); }
void push_back(T&& value) { emplace_back(std::move(value)); }
template <class... Args>
reference emplace_front(Args&&... args) {
ExpandCapacityIfNecessary(1);
if (begin_ == 0) {
begin_ = buffer_.capacity() - 1;
} else {
begin_--;
}
IncrementGeneration();
std::construct_at(buffer_.get_at(begin_), std::forward<Args>(args)...);
return front();
}
template <class... Args>
reference emplace_back(Args&&... args) {
ExpandCapacityIfNecessary(1);
std::construct_at(buffer_.get_at(end_), std::forward<Args>(args)...);
if (end_ == buffer_.capacity() - 1) {
end_ = 0;
} else {
end_++;
}
IncrementGeneration();
return back();
}
void pop_front() {
CHECK(!empty());
DestructRange(begin_, begin_ + 1u);
begin_++;
if (begin_ == buffer_.capacity()) {
begin_ = 0;
}
ShrinkCapacityIfNecessary();
// Technically popping will not invalidate any iterators since the
// underlying buffer will be stable. But in the future we may want to add a
// feature that resizes the buffer smaller if there is too much wasted
// space. This ensures we can make such a change safely.
IncrementGeneration();
}
void pop_back() {
CHECK(!empty());
if (end_ == 0) {
end_ = buffer_.capacity() - 1;
} else {
end_--;
}
DestructRange(end_, end_ + 1u);
ShrinkCapacityIfNecessary();
// See pop_front comment about why this is here.
IncrementGeneration();
}
// ---------------------------------------------------------------------------
// General operations.
void swap(circular_deque& other) {
std::swap(buffer_, other.buffer_);
std::swap(begin_, other.begin_);
std::swap(end_, other.end_);
IncrementGeneration();
}
friend void swap(circular_deque& lhs, circular_deque& rhs) { lhs.swap(rhs); }
private:
friend internal::circular_deque_iterator<T>;
friend internal::circular_deque_const_iterator<T>;
// Moves the items in the given circular buffer to the current one. The source
// is moved from so will become invalid. The destination buffer must have
// already been allocated with enough size.
//
// # Safety
// `from_begin` and `from_end` must be less-than and less-than-or-equal-to the
// capacity of `from_buf` respectively, with `from_begin <= from_end`, or
// Undefined Behaviour may result.
UNSAFE_BUFFER_USAGE static void MoveBuffer(VectorBuffer& from_buf,
size_t from_begin,
size_t from_end,
VectorBuffer& to_buf,
size_t* to_begin,
size_t* to_end) {
*to_begin = 0;
if (from_begin < from_end) {
// Contiguous.
VectorBuffer::MoveConstructRange(
from_buf.subspan(from_begin, from_end - from_begin),
to_buf.subspan(0u, from_end - from_begin));
*to_end = from_end - from_begin;
} else if (from_begin > from_end) {
// Discontiguous, copy the right side to the beginning of the new buffer.
span<T> right_side = from_buf.subspan(from_begin);
VectorBuffer::MoveConstructRange(right_side,
to_buf.subspan(0u, right_side.size()));
// Append the left side.
span<T> left_side = from_buf.subspan(0u, from_end);
VectorBuffer::MoveConstructRange(
left_side, to_buf.subspan(right_side.size(), left_side.size()));
*to_end = left_side.size() + right_side.size();
} else {
// No items.
*to_end = 0;
}
}
// Expands the buffer size. This assumes the size is larger than the
// number of elements in the vector (it won't call delete on anything).
void SetCapacityTo(size_t new_capacity) {
// Use the capacity + 1 as the internal buffer size to differentiate
// empty and full (see definition of buffer_ below).
VectorBuffer new_buffer(new_capacity + 1u);
// SAFETY: This class maintains an invariant that `begin_` and `end_` are
// less than `buffer_`'s capacity.
UNSAFE_BUFFERS(
MoveBuffer(buffer_, begin_, end_, new_buffer, &begin_, &end_));
buffer_ = std::move(new_buffer);
}
void ExpandCapacityIfNecessary(size_t additional_elts) {
const size_t cur_size = size();
const size_t cur_capacity = capacity();
// Protect against overflow when adding `additional_elts`, and exceeding the
// max allocation size.
CHECK_LE(additional_elts, PTRDIFF_MAX - cur_size);
size_t min_new_capacity = cur_size + additional_elts;
if (cur_capacity >= min_new_capacity) {
return; // Already enough room.
}
min_new_capacity =
std::max(min_new_capacity, internal::kCircularBufferInitialCapacity);
// std::vector always grows by at least 50%. WTF::Deque grows by at least
// 25%. We expect queue workloads to generally stay at a similar size and
// grow less than a vector might, so use 25%.
SetCapacityTo(std::max(min_new_capacity, cur_capacity + cur_capacity / 4u));
}
void ShrinkCapacityIfNecessary() {
// Don't auto-shrink below this size.
if (capacity() <= internal::kCircularBufferInitialCapacity) {
return;
}
// Shrink when 100% of the size() is wasted.
size_t sz = size();
size_t empty_spaces = capacity() - sz;
if (empty_spaces < sz) {
return;
}
// Leave 1/4 the size as free capacity, not going below the initial
// capacity.
size_t new_capacity =
std::max(internal::kCircularBufferInitialCapacity, sz + sz / 4);
if (new_capacity < capacity()) {
// Count extra item to convert to internal capacity.
SetCapacityTo(new_capacity);
}
}
// Backend for clear() but does not resize the internal buffer.
void ClearRetainCapacity() {
// This can't resize(0) because that requires a default constructor to
// compile, which not all contained classes may implement.
// SAFETY: This class maintains an invariant that `begin_` and `end_` are
// less than `buffer_`'s capacity. `new_end` is computed modulo the capacity
// so it is in range.
DestructRange(begin_, end_);
begin_ = 0;
end_ = 0;
IncrementGeneration();
}
// Calls destructors for the given begin->end indices. The indices may wrap
// around. The buffer is not resized, and the begin_ and end_ members are
// not changed.
void DestructRange(size_t begin, size_t end) {
if (end == begin) {
return;
} else if (end > begin) {
VectorBuffer::DestructRange(buffer_.subspan(begin, end - begin));
} else {
VectorBuffer::DestructRange(buffer_.subspan(begin));
VectorBuffer::DestructRange(buffer_.subspan(0u, end));
}
}
// Makes room for |count| items starting at |*insert_begin|. Since iterators
// are not stable across buffer resizes, |*insert_begin| will be updated to
// point to the beginning of the newly opened position in the new array (it's
// in/out), and the end of the newly opened position (it's out-only).
void MakeRoomFor(size_t count, iterator* insert_begin, iterator* insert_end) {
if (count == 0) {
*insert_end = *insert_begin;
return;
}
// The offset from the beginning will be stable across reallocations.
size_t begin_offset = insert_begin->OffsetFromBegin();
ExpandCapacityIfNecessary(count);
// Update the new end and prepare the iterators for copying. The newly
// used space contains uninitialized memory.
const size_t cap = buffer_.capacity();
size_t src = end_;
end_ = (end_ + count) % cap;
size_t dest = end_;
*insert_begin = iterator(this, (begin_ + begin_offset) % cap);
*insert_end = iterator(this, (insert_begin->index_ + count) % cap);
// Move the elements. This will always involve shifting logically to the
// right, so move in a right-to-left order.
while (true) {
if (src == insert_begin->index_) {
break;
}
src = (src + cap - 1u) % cap;
dest = (dest + cap - 1u) % cap;
VectorBuffer::MoveConstructRange(buffer_.subspan(src, 1u),
buffer_.subspan(dest, 1u));
}
}
#if DCHECK_IS_ON()
// Asserts the given index is dereferencable. The index is an index into the
// buffer, not an index used by operator[] or at() which will be offsets from
// begin.
void CheckValidIndex(size_t i) const {
if (begin_ <= end_) {
DCHECK(i >= begin_ && i < end_);
} else {
DCHECK((i >= begin_ && i < buffer_.capacity()) || i < end_);
}
}
// Asserts the given index is either dereferencable or points to end().
void CheckValidIndexOrEnd(size_t i) const {
if (i != end_) {
CheckValidIndex(i);
}
}
void ValidateIterator(const const_iterator& i) const {
DCHECK(i.parent_deque_ == this);
i.CheckUnstableUsage();
}
// See generation_ below.
void IncrementGeneration() { generation_++; }
#else
// No-op versions of these functions for release builds.
void CheckValidIndex(size_t) const {}
void CheckValidIndexOrEnd(size_t) const {}
void ValidateIterator(const const_iterator& i) const {}
void IncrementGeneration() {}
#endif
// Danger, the buffer_.capacity() is the "internal capacity" which is
// capacity() + 1 since there is an extra item to indicate the end. Otherwise
// being completely empty and completely full are indistinguishable (begin ==
// end). We could add a separate flag to avoid it, but that adds significant
// extra complexity since every computation will have to check for it. Always
// keeping one extra unused element in the buffer makes iterator computations
// much simpler.
//
// Container internal code will want to use buffer_.capacity() for offset
// computations rather than capacity().
VectorBuffer buffer_;
size_type begin_ = 0;
size_type end_ = 0;
#if DCHECK_IS_ON()
// Incremented every time a modification is made that could affect iterator
// invalidations.
uint64_t generation_ = 0;
#endif
};
// Implementations of base::Erase[If] (see base/stl_util.h).
template <class T, class Value>
size_t Erase(circular_deque<T>& container, const Value& value) {
auto it = ranges::remove(container, value);
size_t removed = std::distance(it, container.end());
container.erase(it, container.end());
return removed;
}
template <class T, class Predicate>
size_t EraseIf(circular_deque<T>& container, Predicate pred) {
auto it = ranges::remove_if(container, pred);
size_t removed = std::distance(it, container.end());
container.erase(it, container.end());
return removed;
}
} // namespace base
#endif // BASE_CONTAINERS_CIRCULAR_DEQUE_H_