1use alloc::alloc::{alloc, dealloc};
2use core::{alloc::Layout, ptr::NonNull, slice};
3
4pub struct RingBuffer {
5 buf: NonNull<u8>,
17 cap: usize,
18 head: usize,
19 tail: usize,
20}
21
22unsafe impl Send for RingBuffer {}
24
25unsafe impl Sync for RingBuffer {}
27
28impl RingBuffer {
29 pub fn new() -> Self {
30 RingBuffer {
31 buf: NonNull::dangling(),
33 cap: 0,
34 head: 0,
36 tail: 0,
37 }
38 }
39
40 pub fn len(&self) -> usize {
42 let (x, y) = self.data_slice_lengths();
43 x + y
44 }
45
46 pub fn free(&self) -> usize {
48 let (x, y) = self.free_slice_lengths();
49 (x + y).saturating_sub(1)
50 }
51
52 pub fn clear(&mut self) {
54 self.head = 0;
57 self.tail = 0;
58 }
59
60 pub fn is_empty(&self) -> bool {
62 self.head == self.tail
63 }
64
65 pub fn reserve(&mut self, amount: usize) {
67 let free = self.free();
68 if free >= amount {
69 return;
70 }
71
72 self.reserve_amortized(amount - free);
73 }
74
75 #[inline(never)]
76 #[cold]
77 fn reserve_amortized(&mut self, amount: usize) {
78 let current_layout = unsafe { Layout::array::<u8>(self.cap).unwrap_unchecked() };
80
81 let new_cap = usize::max(
83 self.cap.next_power_of_two(),
84 (self.cap + amount).next_power_of_two(),
85 ) + 1;
86
87 #[allow(clippy::assertions_on_constants)]
90 {
91 debug_assert!(usize::BITS >= 64 || new_cap < isize::MAX as usize);
92 }
93
94 let new_layout = Layout::array::<u8>(new_cap)
95 .unwrap_or_else(|_| panic!("Could not create layout for u8 array of size {}", new_cap));
96
97 let new_buf = unsafe {
100 let new_buf = alloc(new_layout);
101
102 NonNull::new(new_buf).expect("Allocating new space for the ringbuffer failed")
103 };
104
105 if self.cap > 0 {
107 let ((s1_ptr, s1_len), (s2_ptr, s2_len)) = self.data_slice_parts();
108
109 unsafe {
110 new_buf.as_ptr().copy_from_nonoverlapping(s1_ptr, s1_len);
112 new_buf
113 .as_ptr()
114 .add(s1_len)
115 .copy_from_nonoverlapping(s2_ptr, s2_len);
116 dealloc(self.buf.as_ptr(), current_layout);
117 }
118
119 self.tail = s1_len + s2_len;
122 self.head = 0;
123 }
124 self.buf = new_buf;
126 self.cap = new_cap;
127 }
128
129 #[allow(dead_code)]
130 pub fn push_back(&mut self, byte: u8) {
131 self.reserve(1);
132
133 unsafe { self.buf.as_ptr().add(self.tail).write(byte) };
135 self.tail = (self.tail + 1) % self.cap;
137 }
138
139 #[allow(dead_code)]
142 pub fn get(&self, idx: usize) -> Option<u8> {
143 if idx < self.len() {
144 let idx = (self.head + idx) % self.cap;
147 Some(unsafe { self.buf.as_ptr().add(idx).read() })
148 } else {
149 None
150 }
151 }
152 pub fn extend(&mut self, data: &[u8]) {
154 let len = data.len();
155 let ptr = data.as_ptr();
156 if len == 0 {
157 return;
158 }
159
160 self.reserve(len);
161
162 debug_assert!(self.len() + len <= self.cap - 1);
163 debug_assert!(self.free() >= len, "free: {} len: {}", self.free(), len);
164
165 let ((f1_ptr, f1_len), (f2_ptr, f2_len)) = self.free_slice_parts();
166 debug_assert!(f1_len + f2_len >= len, "{} + {} < {}", f1_len, f2_len, len);
167
168 let in_f1 = usize::min(len, f1_len);
169
170 let in_f2 = len - in_f1;
171
172 debug_assert!(in_f1 + in_f2 == len);
173
174 unsafe {
175 if in_f1 > 0 {
178 f1_ptr.copy_from_nonoverlapping(ptr, in_f1);
179 }
180 if in_f2 > 0 {
181 f2_ptr.copy_from_nonoverlapping(ptr.add(in_f1), in_f2);
182 }
183 }
184 self.tail = (self.tail + len) % self.cap;
186 }
187
188 pub fn drop_first_n(&mut self, amount: usize) {
191 debug_assert!(amount <= self.len());
192 let amount = usize::min(amount, self.len());
193 self.head = (self.head + amount) % self.cap;
196 }
197
198 fn data_slice_lengths(&self) -> (usize, usize) {
202 let len_after_head;
203 let len_to_tail;
204
205 if self.tail >= self.head {
207 len_after_head = self.tail - self.head;
208 len_to_tail = 0;
209 } else {
210 len_after_head = self.cap - self.head;
211 len_to_tail = self.tail;
212 }
213 (len_after_head, len_to_tail)
214 }
215
216 fn data_slice_parts(&self) -> ((*const u8, usize), (*const u8, usize)) {
219 let (len_after_head, len_to_tail) = self.data_slice_lengths();
220
221 (
222 (unsafe { self.buf.as_ptr().add(self.head) }, len_after_head),
223 (self.buf.as_ptr(), len_to_tail),
224 )
225 }
226
227 pub fn as_slices(&self) -> (&[u8], &[u8]) {
229 let (s1, s2) = self.data_slice_parts();
230 unsafe {
231 let s1 = slice::from_raw_parts(s1.0, s1.1);
233 let s2 = slice::from_raw_parts(s2.0, s2.1);
234 (s1, s2)
235 }
236 }
237
238 fn free_slice_lengths(&self) -> (usize, usize) {
242 let len_to_head;
243 let len_after_tail;
244
245 if self.tail < self.head {
247 len_after_tail = self.head - self.tail;
248 len_to_head = 0;
249 } else {
250 len_after_tail = self.cap - self.tail;
251 len_to_head = self.head;
252 }
253 (len_to_head, len_after_tail)
254 }
255
256 fn free_slice_parts(&self) -> ((*mut u8, usize), (*mut u8, usize)) {
261 let (len_to_head, len_after_tail) = self.free_slice_lengths();
262
263 (
264 (unsafe { self.buf.as_ptr().add(self.tail) }, len_after_tail),
265 (self.buf.as_ptr(), len_to_head),
266 )
267 }
268
269 #[allow(dead_code)]
271 pub fn extend_from_within(&mut self, start: usize, len: usize) {
272 if start + len > self.len() {
273 panic!(
274 "Calls to this functions must respect start ({}) + len ({}) <= self.len() ({})!",
275 start,
276 len,
277 self.len()
278 );
279 }
280
281 self.reserve(len);
282
283 unsafe { self.extend_from_within_unchecked(start, len) }
287 }
288
289 #[warn(unsafe_op_in_unsafe_fn)]
297 pub unsafe fn extend_from_within_unchecked(&mut self, start: usize, len: usize) {
298 debug_assert!(start + len <= self.len());
299 debug_assert!(self.free() >= len);
300
301 if self.head < self.tail {
302 let after_tail = usize::min(len, self.cap - self.tail);
315
316 let src = (
317 unsafe { self.buf.as_ptr().add(self.head + start) }.cast_const(),
319 self.tail - self.head - start,
321 );
322
323 let dst = (
324 unsafe { self.buf.as_ptr().add(self.tail) },
326 self.cap - self.tail,
328 );
329
330 unsafe { copy_bytes_overshooting(src, dst, after_tail) }
333
334 if after_tail < len {
335 let src = (
349 unsafe { src.0.add(after_tail) },
351 src.1 - after_tail,
353 );
354 let dst = (
355 self.buf.as_ptr(),
356 self.head,
358 );
359
360 unsafe { copy_bytes_overshooting(src, dst, len - after_tail) }
363 }
364 } else {
365 if self.head + start > self.cap {
366 let start = (self.head + start) % self.cap;
380
381 let src = (
382 unsafe { self.buf.as_ptr().add(start) }.cast_const(),
384 self.tail - start,
386 );
387
388 let dst = (
389 unsafe { self.buf.as_ptr().add(self.tail) }, self.head - self.tail,
393 );
394
395 unsafe { copy_bytes_overshooting(src, dst, len) }
398 } else {
399 let after_start = usize::min(len, self.cap - self.head - start);
413
414 let src = (
415 unsafe { self.buf.as_ptr().add(self.head + start) }.cast_const(),
417 self.cap - self.head - start,
419 );
420
421 let dst = (
422 unsafe { self.buf.as_ptr().add(self.tail) },
424 self.head - self.tail,
426 );
427
428 unsafe { copy_bytes_overshooting(src, dst, after_start) }
431
432 if after_start < len {
433 let src = (
447 self.buf.as_ptr().cast_const(),
448 self.tail,
450 );
451
452 let dst = (
453 unsafe { dst.0.add(after_start) },
455 dst.1 - after_start,
457 );
458
459 unsafe { copy_bytes_overshooting(src, dst, len - after_start) }
462 }
463 }
464 }
465
466 self.tail = (self.tail + len) % self.cap;
467 }
468
469 #[allow(dead_code)]
470 pub unsafe fn extend_from_within_unchecked_branchless(&mut self, start: usize, len: usize) {
477 let ((s1_ptr, s1_len), (s2_ptr, s2_len)) = self.data_slice_parts();
479
480 debug_assert!(len <= s1_len + s2_len, "{} > {} + {}", len, s1_len, s2_len);
481
482 let start_in_s1 = usize::min(s1_len, start);
484 let end_in_s1 = usize::min(s1_len, start + len);
485 let m1_ptr = s1_ptr.add(start_in_s1);
486 let m1_len = end_in_s1 - start_in_s1;
487
488 debug_assert!(end_in_s1 <= s1_len);
489 debug_assert!(start_in_s1 <= s1_len);
490
491 let start_in_s2 = start.saturating_sub(s1_len);
492 let end_in_s2 = start_in_s2 + (len - m1_len);
493 let m2_ptr = s2_ptr.add(start_in_s2);
494 let m2_len = end_in_s2 - start_in_s2;
495
496 debug_assert!(start_in_s2 <= s2_len);
497 debug_assert!(end_in_s2 <= s2_len);
498
499 debug_assert_eq!(len, m1_len + m2_len);
500
501 let ((f1_ptr, f1_len), (f2_ptr, f2_len)) = self.free_slice_parts();
503
504 debug_assert!(f1_len + f2_len >= m1_len + m2_len);
505
506 let m1_in_f1 = usize::min(m1_len, f1_len);
508 let m1_in_f2 = m1_len - m1_in_f1;
509 let m2_in_f1 = usize::min(f1_len - m1_in_f1, m2_len);
510 let m2_in_f2 = m2_len - m2_in_f1;
511
512 debug_assert_eq!(m1_len, m1_in_f1 + m1_in_f2);
513 debug_assert_eq!(m2_len, m2_in_f1 + m2_in_f2);
514 debug_assert!(f1_len >= m1_in_f1 + m2_in_f1);
515 debug_assert!(f2_len >= m1_in_f2 + m2_in_f2);
516 debug_assert_eq!(len, m1_in_f1 + m2_in_f1 + m1_in_f2 + m2_in_f2);
517
518 debug_assert!(self.buf.as_ptr().add(self.cap) > f1_ptr.add(m1_in_f1 + m2_in_f1));
519 debug_assert!(self.buf.as_ptr().add(self.cap) > f2_ptr.add(m1_in_f2 + m2_in_f2));
520
521 debug_assert!((m1_in_f2 > 0) ^ (m2_in_f1 > 0) || (m1_in_f2 == 0 && m2_in_f1 == 0));
522
523 copy_with_checks(
524 m1_ptr, m2_ptr, f1_ptr, f2_ptr, m1_in_f1, m2_in_f1, m1_in_f2, m2_in_f2,
525 );
526 self.tail = (self.tail + len) % self.cap;
527 }
528}
529
530impl Drop for RingBuffer {
531 fn drop(&mut self) {
532 if self.cap == 0 {
533 return;
534 }
535
536 let current_layout = unsafe { Layout::array::<u8>(self.cap).unwrap_unchecked() };
539
540 unsafe {
541 dealloc(self.buf.as_ptr(), current_layout);
542 }
543 }
544}
545
546#[inline(always)]
560unsafe fn copy_bytes_overshooting(
561 src: (*const u8, usize),
562 dst: (*mut u8, usize),
563 copy_at_least: usize,
564) {
565 #[cfg(all(not(target_feature = "sse2"), not(target_feature = "neon")))]
567 type CopyType = usize;
568
569 #[cfg(target_feature = "neon")]
571 type CopyType = u128;
572 #[cfg(target_feature = "sse2")]
573 type CopyType = u128;
574
575 const COPY_AT_ONCE_SIZE: usize = core::mem::size_of::<CopyType>();
576 let min_buffer_size = usize::min(src.1, dst.1);
577
578 if min_buffer_size >= COPY_AT_ONCE_SIZE && copy_at_least <= COPY_AT_ONCE_SIZE {
580 dst.0
581 .cast::<CopyType>()
582 .write_unaligned(src.0.cast::<CopyType>().read_unaligned())
583 } else {
584 let copy_multiple = copy_at_least.next_multiple_of(COPY_AT_ONCE_SIZE);
585 if min_buffer_size >= copy_multiple {
587 let mut src_ptr = src.0.cast::<CopyType>();
588 let src_ptr_end = src.0.add(copy_multiple).cast::<CopyType>();
589 let mut dst_ptr = dst.0.cast::<CopyType>();
590
591 while src_ptr < src_ptr_end {
592 dst_ptr.write_unaligned(src_ptr.read_unaligned());
593 src_ptr = src_ptr.add(1);
594 dst_ptr = dst_ptr.add(1);
595 }
596 } else {
597 dst.0.copy_from_nonoverlapping(src.0, copy_at_least);
599 }
600 }
601
602 debug_assert_eq!(
603 slice::from_raw_parts(src.0, copy_at_least),
604 slice::from_raw_parts(dst.0, copy_at_least)
605 );
606}
607
608#[allow(dead_code)]
609#[inline(always)]
610#[allow(clippy::too_many_arguments)]
611unsafe fn copy_without_checks(
612 m1_ptr: *const u8,
613 m2_ptr: *const u8,
614 f1_ptr: *mut u8,
615 f2_ptr: *mut u8,
616 m1_in_f1: usize,
617 m2_in_f1: usize,
618 m1_in_f2: usize,
619 m2_in_f2: usize,
620) {
621 f1_ptr.copy_from_nonoverlapping(m1_ptr, m1_in_f1);
622 f1_ptr
623 .add(m1_in_f1)
624 .copy_from_nonoverlapping(m2_ptr, m2_in_f1);
625
626 f2_ptr.copy_from_nonoverlapping(m1_ptr.add(m1_in_f1), m1_in_f2);
627 f2_ptr
628 .add(m1_in_f2)
629 .copy_from_nonoverlapping(m2_ptr.add(m2_in_f1), m2_in_f2);
630}
631
632#[allow(dead_code)]
633#[inline(always)]
634#[allow(clippy::too_many_arguments)]
635unsafe fn copy_with_checks(
636 m1_ptr: *const u8,
637 m2_ptr: *const u8,
638 f1_ptr: *mut u8,
639 f2_ptr: *mut u8,
640 m1_in_f1: usize,
641 m2_in_f1: usize,
642 m1_in_f2: usize,
643 m2_in_f2: usize,
644) {
645 if m1_in_f1 != 0 {
646 f1_ptr.copy_from_nonoverlapping(m1_ptr, m1_in_f1);
647 }
648 if m2_in_f1 != 0 {
649 f1_ptr
650 .add(m1_in_f1)
651 .copy_from_nonoverlapping(m2_ptr, m2_in_f1);
652 }
653
654 if m1_in_f2 != 0 {
655 f2_ptr.copy_from_nonoverlapping(m1_ptr.add(m1_in_f1), m1_in_f2);
656 }
657 if m2_in_f2 != 0 {
658 f2_ptr
659 .add(m1_in_f2)
660 .copy_from_nonoverlapping(m2_ptr.add(m2_in_f1), m2_in_f2);
661 }
662}
663
664#[allow(dead_code)]
665#[inline(always)]
666#[allow(clippy::too_many_arguments)]
667unsafe fn copy_with_nobranch_check(
668 m1_ptr: *const u8,
669 m2_ptr: *const u8,
670 f1_ptr: *mut u8,
671 f2_ptr: *mut u8,
672 m1_in_f1: usize,
673 m2_in_f1: usize,
674 m1_in_f2: usize,
675 m2_in_f2: usize,
676) {
677 let case = (m1_in_f1 > 0) as usize
678 | (((m2_in_f1 > 0) as usize) << 1)
679 | (((m1_in_f2 > 0) as usize) << 2)
680 | (((m2_in_f2 > 0) as usize) << 3);
681
682 match case {
683 0 => {}
684
685 1 => {
687 f1_ptr.copy_from_nonoverlapping(m1_ptr, m1_in_f1);
688 }
689 2 => {
690 f1_ptr.copy_from_nonoverlapping(m2_ptr, m2_in_f1);
691 }
692 4 => {
693 f2_ptr.copy_from_nonoverlapping(m1_ptr, m1_in_f2);
694 }
695 8 => {
696 f2_ptr.copy_from_nonoverlapping(m2_ptr, m2_in_f2);
697 }
698
699 3 => {
701 f1_ptr.copy_from_nonoverlapping(m1_ptr, m1_in_f1);
702 f1_ptr
703 .add(m1_in_f1)
704 .copy_from_nonoverlapping(m2_ptr, m2_in_f1);
705 }
706 5 => {
707 f1_ptr.copy_from_nonoverlapping(m1_ptr, m1_in_f1);
708 f2_ptr.copy_from_nonoverlapping(m1_ptr.add(m1_in_f1), m1_in_f2);
709 }
710 6 => core::hint::unreachable_unchecked(),
711 7 => core::hint::unreachable_unchecked(),
712 9 => {
713 f1_ptr.copy_from_nonoverlapping(m1_ptr, m1_in_f1);
714 f2_ptr.copy_from_nonoverlapping(m2_ptr, m2_in_f2);
715 }
716 10 => {
717 f1_ptr.copy_from_nonoverlapping(m2_ptr, m2_in_f1);
718 f2_ptr.copy_from_nonoverlapping(m2_ptr.add(m2_in_f1), m2_in_f2);
719 }
720 12 => {
721 f2_ptr.copy_from_nonoverlapping(m1_ptr, m1_in_f2);
722 f2_ptr
723 .add(m1_in_f2)
724 .copy_from_nonoverlapping(m2_ptr, m2_in_f2);
725 }
726
727 11 => {
729 f1_ptr.copy_from_nonoverlapping(m1_ptr, m1_in_f1);
730 f1_ptr
731 .add(m1_in_f1)
732 .copy_from_nonoverlapping(m2_ptr, m2_in_f1);
733 f2_ptr.copy_from_nonoverlapping(m2_ptr.add(m2_in_f1), m2_in_f2);
734 }
735 13 => {
736 f1_ptr.copy_from_nonoverlapping(m1_ptr, m1_in_f1);
737 f2_ptr.copy_from_nonoverlapping(m1_ptr.add(m1_in_f1), m1_in_f2);
738 f2_ptr
739 .add(m1_in_f2)
740 .copy_from_nonoverlapping(m2_ptr, m2_in_f2);
741 }
742 14 => core::hint::unreachable_unchecked(),
743 15 => core::hint::unreachable_unchecked(),
744 _ => core::hint::unreachable_unchecked(),
745 }
746}
747
748#[cfg(test)]
749mod tests {
750 use super::RingBuffer;
751
752 #[test]
753 fn smoke() {
754 let mut rb = RingBuffer::new();
755
756 rb.reserve(15);
757 assert_eq!(17, rb.cap);
758
759 rb.extend(b"0123456789");
760 assert_eq!(rb.len(), 10);
761 assert_eq!(rb.as_slices().0, b"0123456789");
762 assert_eq!(rb.as_slices().1, b"");
763
764 rb.drop_first_n(5);
765 assert_eq!(rb.len(), 5);
766 assert_eq!(rb.as_slices().0, b"56789");
767 assert_eq!(rb.as_slices().1, b"");
768
769 rb.extend_from_within(2, 3);
770 assert_eq!(rb.len(), 8);
771 assert_eq!(rb.as_slices().0, b"56789789");
772 assert_eq!(rb.as_slices().1, b"");
773
774 rb.extend_from_within(0, 3);
775 assert_eq!(rb.len(), 11);
776 assert_eq!(rb.as_slices().0, b"56789789567");
777 assert_eq!(rb.as_slices().1, b"");
778
779 rb.extend_from_within(0, 2);
780 assert_eq!(rb.len(), 13);
781 assert_eq!(rb.as_slices().0, b"567897895675");
782 assert_eq!(rb.as_slices().1, b"6");
783
784 rb.drop_first_n(11);
785 assert_eq!(rb.len(), 2);
786 assert_eq!(rb.as_slices().0, b"5");
787 assert_eq!(rb.as_slices().1, b"6");
788
789 rb.extend(b"0123456789");
790 assert_eq!(rb.len(), 12);
791 assert_eq!(rb.as_slices().0, b"5");
792 assert_eq!(rb.as_slices().1, b"60123456789");
793
794 rb.drop_first_n(11);
795 assert_eq!(rb.len(), 1);
796 assert_eq!(rb.as_slices().0, b"9");
797 assert_eq!(rb.as_slices().1, b"");
798
799 rb.extend(b"0123456789");
800 assert_eq!(rb.len(), 11);
801 assert_eq!(rb.as_slices().0, b"9012345");
802 assert_eq!(rb.as_slices().1, b"6789");
803 }
804
805 #[test]
806 fn edge_cases() {
807 let mut rb = RingBuffer::new();
809 rb.reserve(16);
810 assert_eq!(17, rb.cap);
811 rb.extend(b"0123456789012345");
812 assert_eq!(17, rb.cap);
813 assert_eq!(16, rb.len());
814 assert_eq!(0, rb.free());
815 rb.drop_first_n(16);
816 assert_eq!(0, rb.len());
817 assert_eq!(16, rb.free());
818 rb.extend(b"0123456789012345");
819 assert_eq!(16, rb.len());
820 assert_eq!(0, rb.free());
821 assert_eq!(17, rb.cap);
822 assert_eq!(1, rb.as_slices().0.len());
823 assert_eq!(15, rb.as_slices().1.len());
824
825 rb.clear();
826
827 rb.extend(b"0123456789012345");
829 rb.drop_first_n(8);
830 rb.extend(b"67890123");
831 assert_eq!(16, rb.len());
832 assert_eq!(0, rb.free());
833 assert_eq!(17, rb.cap);
834 assert_eq!(9, rb.as_slices().0.len());
835 assert_eq!(7, rb.as_slices().1.len());
836 rb.reserve(1);
837 assert_eq!(16, rb.len());
838 assert_eq!(16, rb.free());
839 assert_eq!(33, rb.cap);
840 assert_eq!(16, rb.as_slices().0.len());
841 assert_eq!(0, rb.as_slices().1.len());
842
843 rb.clear();
844
845 rb.extend(b"0123456789012345");
847 rb.extend_from_within(0, 16);
848 assert_eq!(32, rb.len());
849 assert_eq!(0, rb.free());
850 assert_eq!(33, rb.cap);
851 assert_eq!(32, rb.as_slices().0.len());
852 assert_eq!(0, rb.as_slices().1.len());
853
854 let mut rb = RingBuffer::new();
856 rb.reserve(8);
857 rb.extend(b"01234567");
858 rb.drop_first_n(5);
859 rb.extend_from_within(0, 3);
860 assert_eq!(4, rb.as_slices().0.len());
861 assert_eq!(2, rb.as_slices().1.len());
862
863 rb.drop_first_n(2);
864 assert_eq!(2, rb.as_slices().0.len());
865 assert_eq!(2, rb.as_slices().1.len());
866 rb.extend_from_within(0, 4);
867 assert_eq!(2, rb.as_slices().0.len());
868 assert_eq!(6, rb.as_slices().1.len());
869
870 rb.drop_first_n(2);
871 assert_eq!(6, rb.as_slices().0.len());
872 assert_eq!(0, rb.as_slices().1.len());
873 rb.drop_first_n(2);
874 assert_eq!(4, rb.as_slices().0.len());
875 assert_eq!(0, rb.as_slices().1.len());
876 rb.extend_from_within(0, 4);
877 assert_eq!(7, rb.as_slices().0.len());
878 assert_eq!(1, rb.as_slices().1.len());
879
880 let mut rb = RingBuffer::new();
881 rb.reserve(8);
882 rb.extend(b"11111111");
883 rb.drop_first_n(7);
884 rb.extend(b"111");
885 assert_eq!(2, rb.as_slices().0.len());
886 assert_eq!(2, rb.as_slices().1.len());
887 rb.extend_from_within(0, 4);
888 assert_eq!(b"11", rb.as_slices().0);
889 assert_eq!(b"111111", rb.as_slices().1);
890 }
891}