memchr/
memchr.rs

1use core::iter::Rev;
2
3use crate::arch::generic::memchr as generic;
4
5/// Search for the first occurrence of a byte in a slice.
6///
7/// This returns the index corresponding to the first occurrence of `needle` in
8/// `haystack`, or `None` if one is not found. If an index is returned, it is
9/// guaranteed to be less than `haystack.len()`.
10///
11/// While this is semantically the same as something like
12/// `haystack.iter().position(|&b| b == needle)`, this routine will attempt to
13/// use highly optimized vector operations that can be an order of magnitude
14/// faster (or more).
15///
16/// # Example
17///
18/// This shows how to find the first position of a byte in a byte string.
19///
20/// ```
21/// use memchr::memchr;
22///
23/// let haystack = b"the quick brown fox";
24/// assert_eq!(memchr(b'k', haystack), Some(8));
25/// ```
26#[inline]
27pub fn memchr(needle: u8, haystack: &[u8]) -> Option<usize> {
28    // SAFETY: memchr_raw, when a match is found, always returns a valid
29    // pointer between start and end.
30    unsafe {
31        generic::search_slice_with_raw(haystack, |start, end| {
32            memchr_raw(needle, start, end)
33        })
34    }
35}
36
37/// Search for the last occurrence of a byte in a slice.
38///
39/// This returns the index corresponding to the last occurrence of `needle` in
40/// `haystack`, or `None` if one is not found. If an index is returned, it is
41/// guaranteed to be less than `haystack.len()`.
42///
43/// While this is semantically the same as something like
44/// `haystack.iter().rposition(|&b| b == needle)`, this routine will attempt to
45/// use highly optimized vector operations that can be an order of magnitude
46/// faster (or more).
47///
48/// # Example
49///
50/// This shows how to find the last position of a byte in a byte string.
51///
52/// ```
53/// use memchr::memrchr;
54///
55/// let haystack = b"the quick brown fox";
56/// assert_eq!(memrchr(b'o', haystack), Some(17));
57/// ```
58#[inline]
59pub fn memrchr(needle: u8, haystack: &[u8]) -> Option<usize> {
60    // SAFETY: memrchr_raw, when a match is found, always returns a valid
61    // pointer between start and end.
62    unsafe {
63        generic::search_slice_with_raw(haystack, |start, end| {
64            memrchr_raw(needle, start, end)
65        })
66    }
67}
68
69/// Search for the first occurrence of two possible bytes in a haystack.
70///
71/// This returns the index corresponding to the first occurrence of one of the
72/// needle bytes in `haystack`, or `None` if one is not found. If an index is
73/// returned, it is guaranteed to be less than `haystack.len()`.
74///
75/// While this is semantically the same as something like
76/// `haystack.iter().position(|&b| b == needle1 || b == needle2)`, this routine
77/// will attempt to use highly optimized vector operations that can be an order
78/// of magnitude faster (or more).
79///
80/// # Example
81///
82/// This shows how to find the first position of one of two possible bytes in a
83/// haystack.
84///
85/// ```
86/// use memchr::memchr2;
87///
88/// let haystack = b"the quick brown fox";
89/// assert_eq!(memchr2(b'k', b'q', haystack), Some(4));
90/// ```
91#[inline]
92pub fn memchr2(needle1: u8, needle2: u8, haystack: &[u8]) -> Option<usize> {
93    // SAFETY: memchr2_raw, when a match is found, always returns a valid
94    // pointer between start and end.
95    unsafe {
96        generic::search_slice_with_raw(haystack, |start, end| {
97            memchr2_raw(needle1, needle2, start, end)
98        })
99    }
100}
101
102/// Search for the last occurrence of two possible bytes in a haystack.
103///
104/// This returns the index corresponding to the last occurrence of one of the
105/// needle bytes in `haystack`, or `None` if one is not found. If an index is
106/// returned, it is guaranteed to be less than `haystack.len()`.
107///
108/// While this is semantically the same as something like
109/// `haystack.iter().rposition(|&b| b == needle1 || b == needle2)`, this
110/// routine will attempt to use highly optimized vector operations that can be
111/// an order of magnitude faster (or more).
112///
113/// # Example
114///
115/// This shows how to find the last position of one of two possible bytes in a
116/// haystack.
117///
118/// ```
119/// use memchr::memrchr2;
120///
121/// let haystack = b"the quick brown fox";
122/// assert_eq!(memrchr2(b'k', b'o', haystack), Some(17));
123/// ```
124#[inline]
125pub fn memrchr2(needle1: u8, needle2: u8, haystack: &[u8]) -> Option<usize> {
126    // SAFETY: memrchr2_raw, when a match is found, always returns a valid
127    // pointer between start and end.
128    unsafe {
129        generic::search_slice_with_raw(haystack, |start, end| {
130            memrchr2_raw(needle1, needle2, start, end)
131        })
132    }
133}
134
135/// Search for the first occurrence of three possible bytes in a haystack.
136///
137/// This returns the index corresponding to the first occurrence of one of the
138/// needle bytes in `haystack`, or `None` if one is not found. If an index is
139/// returned, it is guaranteed to be less than `haystack.len()`.
140///
141/// While this is semantically the same as something like
142/// `haystack.iter().position(|&b| b == needle1 || b == needle2 || b == needle3)`,
143/// this routine will attempt to use highly optimized vector operations that
144/// can be an order of magnitude faster (or more).
145///
146/// # Example
147///
148/// This shows how to find the first position of one of three possible bytes in
149/// a haystack.
150///
151/// ```
152/// use memchr::memchr3;
153///
154/// let haystack = b"the quick brown fox";
155/// assert_eq!(memchr3(b'k', b'q', b'u', haystack), Some(4));
156/// ```
157#[inline]
158pub fn memchr3(
159    needle1: u8,
160    needle2: u8,
161    needle3: u8,
162    haystack: &[u8],
163) -> Option<usize> {
164    // SAFETY: memchr3_raw, when a match is found, always returns a valid
165    // pointer between start and end.
166    unsafe {
167        generic::search_slice_with_raw(haystack, |start, end| {
168            memchr3_raw(needle1, needle2, needle3, start, end)
169        })
170    }
171}
172
173/// Search for the last occurrence of three possible bytes in a haystack.
174///
175/// This returns the index corresponding to the last occurrence of one of the
176/// needle bytes in `haystack`, or `None` if one is not found. If an index is
177/// returned, it is guaranteed to be less than `haystack.len()`.
178///
179/// While this is semantically the same as something like
180/// `haystack.iter().rposition(|&b| b == needle1 || b == needle2 || b == needle3)`,
181/// this routine will attempt to use highly optimized vector operations that
182/// can be an order of magnitude faster (or more).
183///
184/// # Example
185///
186/// This shows how to find the last position of one of three possible bytes in
187/// a haystack.
188///
189/// ```
190/// use memchr::memrchr3;
191///
192/// let haystack = b"the quick brown fox";
193/// assert_eq!(memrchr3(b'k', b'o', b'n', haystack), Some(17));
194/// ```
195#[inline]
196pub fn memrchr3(
197    needle1: u8,
198    needle2: u8,
199    needle3: u8,
200    haystack: &[u8],
201) -> Option<usize> {
202    // SAFETY: memrchr3_raw, when a match is found, always returns a valid
203    // pointer between start and end.
204    unsafe {
205        generic::search_slice_with_raw(haystack, |start, end| {
206            memrchr3_raw(needle1, needle2, needle3, start, end)
207        })
208    }
209}
210
211/// Returns an iterator over all occurrences of the needle in a haystack.
212///
213/// The iterator returned implements `DoubleEndedIterator`. This means it
214/// can also be used to find occurrences in reverse order.
215#[inline]
216pub fn memchr_iter<'h>(needle: u8, haystack: &'h [u8]) -> Memchr<'h> {
217    Memchr::new(needle, haystack)
218}
219
220/// Returns an iterator over all occurrences of the needle in a haystack, in
221/// reverse.
222#[inline]
223pub fn memrchr_iter(needle: u8, haystack: &[u8]) -> Rev<Memchr<'_>> {
224    Memchr::new(needle, haystack).rev()
225}
226
227/// Returns an iterator over all occurrences of the needles in a haystack.
228///
229/// The iterator returned implements `DoubleEndedIterator`. This means it
230/// can also be used to find occurrences in reverse order.
231#[inline]
232pub fn memchr2_iter<'h>(
233    needle1: u8,
234    needle2: u8,
235    haystack: &'h [u8],
236) -> Memchr2<'h> {
237    Memchr2::new(needle1, needle2, haystack)
238}
239
240/// Returns an iterator over all occurrences of the needles in a haystack, in
241/// reverse.
242#[inline]
243pub fn memrchr2_iter(
244    needle1: u8,
245    needle2: u8,
246    haystack: &[u8],
247) -> Rev<Memchr2<'_>> {
248    Memchr2::new(needle1, needle2, haystack).rev()
249}
250
251/// Returns an iterator over all occurrences of the needles in a haystack.
252///
253/// The iterator returned implements `DoubleEndedIterator`. This means it
254/// can also be used to find occurrences in reverse order.
255#[inline]
256pub fn memchr3_iter<'h>(
257    needle1: u8,
258    needle2: u8,
259    needle3: u8,
260    haystack: &'h [u8],
261) -> Memchr3<'h> {
262    Memchr3::new(needle1, needle2, needle3, haystack)
263}
264
265/// Returns an iterator over all occurrences of the needles in a haystack, in
266/// reverse.
267#[inline]
268pub fn memrchr3_iter(
269    needle1: u8,
270    needle2: u8,
271    needle3: u8,
272    haystack: &[u8],
273) -> Rev<Memchr3<'_>> {
274    Memchr3::new(needle1, needle2, needle3, haystack).rev()
275}
276
277/// An iterator over all occurrences of a single byte in a haystack.
278///
279/// This iterator implements `DoubleEndedIterator`, which means it can also be
280/// used to find occurrences in reverse order.
281///
282/// This iterator is created by the [`memchr_iter`] or `[memrchr_iter`]
283/// functions. It can also be created with the [`Memchr::new`] method.
284///
285/// The lifetime parameter `'h` refers to the lifetime of the haystack being
286/// searched.
287#[derive(Clone, Debug)]
288pub struct Memchr<'h> {
289    needle1: u8,
290    it: crate::arch::generic::memchr::Iter<'h>,
291}
292
293impl<'h> Memchr<'h> {
294    /// Returns an iterator over all occurrences of the needle byte in the
295    /// given haystack.
296    ///
297    /// The iterator returned implements `DoubleEndedIterator`. This means it
298    /// can also be used to find occurrences in reverse order.
299    #[inline]
300    pub fn new(needle1: u8, haystack: &'h [u8]) -> Memchr<'h> {
301        Memchr {
302            needle1,
303            it: crate::arch::generic::memchr::Iter::new(haystack),
304        }
305    }
306}
307
308impl<'h> Iterator for Memchr<'h> {
309    type Item = usize;
310
311    #[inline]
312    fn next(&mut self) -> Option<usize> {
313        // SAFETY: All of our implementations of memchr ensure that any
314        // pointers returns will fall within the start and end bounds, and this
315        // upholds the safety contract of `self.it.next`.
316        unsafe {
317            // NOTE: I attempted to define an enum of previously created
318            // searchers and then switch on those here instead of just
319            // calling `memchr_raw` (or `One::new(..).find_raw(..)`). But
320            // that turned out to have a fair bit of extra overhead when
321            // searching very small haystacks.
322            self.it.next(|s, e| memchr_raw(self.needle1, s, e))
323        }
324    }
325
326    #[inline]
327    fn count(self) -> usize {
328        self.it.count(|s, e| {
329            // SAFETY: We rely on our generic iterator to return valid start
330            // and end pointers.
331            unsafe { count_raw(self.needle1, s, e) }
332        })
333    }
334
335    #[inline]
336    fn size_hint(&self) -> (usize, Option<usize>) {
337        self.it.size_hint()
338    }
339}
340
341impl<'h> DoubleEndedIterator for Memchr<'h> {
342    #[inline]
343    fn next_back(&mut self) -> Option<usize> {
344        // SAFETY: All of our implementations of memchr ensure that any
345        // pointers returns will fall within the start and end bounds, and this
346        // upholds the safety contract of `self.it.next_back`.
347        unsafe { self.it.next_back(|s, e| memrchr_raw(self.needle1, s, e)) }
348    }
349}
350
351impl<'h> core::iter::FusedIterator for Memchr<'h> {}
352
353/// An iterator over all occurrences of two possible bytes in a haystack.
354///
355/// This iterator implements `DoubleEndedIterator`, which means it can also be
356/// used to find occurrences in reverse order.
357///
358/// This iterator is created by the [`memchr2_iter`] or `[memrchr2_iter`]
359/// functions. It can also be created with the [`Memchr2::new`] method.
360///
361/// The lifetime parameter `'h` refers to the lifetime of the haystack being
362/// searched.
363#[derive(Clone, Debug)]
364pub struct Memchr2<'h> {
365    needle1: u8,
366    needle2: u8,
367    it: crate::arch::generic::memchr::Iter<'h>,
368}
369
370impl<'h> Memchr2<'h> {
371    /// Returns an iterator over all occurrences of the needle bytes in the
372    /// given haystack.
373    ///
374    /// The iterator returned implements `DoubleEndedIterator`. This means it
375    /// can also be used to find occurrences in reverse order.
376    #[inline]
377    pub fn new(needle1: u8, needle2: u8, haystack: &'h [u8]) -> Memchr2<'h> {
378        Memchr2 {
379            needle1,
380            needle2,
381            it: crate::arch::generic::memchr::Iter::new(haystack),
382        }
383    }
384}
385
386impl<'h> Iterator for Memchr2<'h> {
387    type Item = usize;
388
389    #[inline]
390    fn next(&mut self) -> Option<usize> {
391        // SAFETY: All of our implementations of memchr ensure that any
392        // pointers returns will fall within the start and end bounds, and this
393        // upholds the safety contract of `self.it.next`.
394        unsafe {
395            self.it.next(|s, e| memchr2_raw(self.needle1, self.needle2, s, e))
396        }
397    }
398
399    #[inline]
400    fn size_hint(&self) -> (usize, Option<usize>) {
401        self.it.size_hint()
402    }
403}
404
405impl<'h> DoubleEndedIterator for Memchr2<'h> {
406    #[inline]
407    fn next_back(&mut self) -> Option<usize> {
408        // SAFETY: All of our implementations of memchr ensure that any
409        // pointers returns will fall within the start and end bounds, and this
410        // upholds the safety contract of `self.it.next_back`.
411        unsafe {
412            self.it.next_back(|s, e| {
413                memrchr2_raw(self.needle1, self.needle2, s, e)
414            })
415        }
416    }
417}
418
419impl<'h> core::iter::FusedIterator for Memchr2<'h> {}
420
421/// An iterator over all occurrences of three possible bytes in a haystack.
422///
423/// This iterator implements `DoubleEndedIterator`, which means it can also be
424/// used to find occurrences in reverse order.
425///
426/// This iterator is created by the [`memchr2_iter`] or `[memrchr2_iter`]
427/// functions. It can also be created with the [`Memchr3::new`] method.
428///
429/// The lifetime parameter `'h` refers to the lifetime of the haystack being
430/// searched.
431#[derive(Clone, Debug)]
432pub struct Memchr3<'h> {
433    needle1: u8,
434    needle2: u8,
435    needle3: u8,
436    it: crate::arch::generic::memchr::Iter<'h>,
437}
438
439impl<'h> Memchr3<'h> {
440    /// Returns an iterator over all occurrences of the needle bytes in the
441    /// given haystack.
442    ///
443    /// The iterator returned implements `DoubleEndedIterator`. This means it
444    /// can also be used to find occurrences in reverse order.
445    #[inline]
446    pub fn new(
447        needle1: u8,
448        needle2: u8,
449        needle3: u8,
450        haystack: &'h [u8],
451    ) -> Memchr3<'h> {
452        Memchr3 {
453            needle1,
454            needle2,
455            needle3,
456            it: crate::arch::generic::memchr::Iter::new(haystack),
457        }
458    }
459}
460
461impl<'h> Iterator for Memchr3<'h> {
462    type Item = usize;
463
464    #[inline]
465    fn next(&mut self) -> Option<usize> {
466        // SAFETY: All of our implementations of memchr ensure that any
467        // pointers returns will fall within the start and end bounds, and this
468        // upholds the safety contract of `self.it.next`.
469        unsafe {
470            self.it.next(|s, e| {
471                memchr3_raw(self.needle1, self.needle2, self.needle3, s, e)
472            })
473        }
474    }
475
476    #[inline]
477    fn size_hint(&self) -> (usize, Option<usize>) {
478        self.it.size_hint()
479    }
480}
481
482impl<'h> DoubleEndedIterator for Memchr3<'h> {
483    #[inline]
484    fn next_back(&mut self) -> Option<usize> {
485        // SAFETY: All of our implementations of memchr ensure that any
486        // pointers returns will fall within the start and end bounds, and this
487        // upholds the safety contract of `self.it.next_back`.
488        unsafe {
489            self.it.next_back(|s, e| {
490                memrchr3_raw(self.needle1, self.needle2, self.needle3, s, e)
491            })
492        }
493    }
494}
495
496impl<'h> core::iter::FusedIterator for Memchr3<'h> {}
497
498/// memchr, but using raw pointers to represent the haystack.
499///
500/// # Safety
501///
502/// Pointers must be valid. See `One::find_raw`.
503#[inline]
504unsafe fn memchr_raw(
505    needle: u8,
506    start: *const u8,
507    end: *const u8,
508) -> Option<*const u8> {
509    #[cfg(target_arch = "x86_64")]
510    {
511        // x86_64 does CPU feature detection at runtime in order to use AVX2
512        // instructions even when the `avx2` feature isn't enabled at compile
513        // time. This function also handles using a fallback if neither AVX2
514        // nor SSE2 (unusual) are available.
515        crate::arch::x86_64::memchr::memchr_raw(needle, start, end)
516    }
517    #[cfg(all(target_arch = "wasm32", target_feature = "simd128"))]
518    {
519        crate::arch::wasm32::memchr::memchr_raw(needle, start, end)
520    }
521    #[cfg(target_arch = "aarch64")]
522    {
523        crate::arch::aarch64::memchr::memchr_raw(needle, start, end)
524    }
525    #[cfg(not(any(
526        target_arch = "x86_64",
527        all(target_arch = "wasm32", target_feature = "simd128"),
528        target_arch = "aarch64"
529    )))]
530    {
531        crate::arch::all::memchr::One::new(needle).find_raw(start, end)
532    }
533}
534
535/// memrchr, but using raw pointers to represent the haystack.
536///
537/// # Safety
538///
539/// Pointers must be valid. See `One::rfind_raw`.
540#[inline]
541unsafe fn memrchr_raw(
542    needle: u8,
543    start: *const u8,
544    end: *const u8,
545) -> Option<*const u8> {
546    #[cfg(target_arch = "x86_64")]
547    {
548        crate::arch::x86_64::memchr::memrchr_raw(needle, start, end)
549    }
550    #[cfg(all(target_arch = "wasm32", target_feature = "simd128"))]
551    {
552        crate::arch::wasm32::memchr::memrchr_raw(needle, start, end)
553    }
554    #[cfg(target_arch = "aarch64")]
555    {
556        crate::arch::aarch64::memchr::memrchr_raw(needle, start, end)
557    }
558    #[cfg(not(any(
559        target_arch = "x86_64",
560        all(target_arch = "wasm32", target_feature = "simd128"),
561        target_arch = "aarch64"
562    )))]
563    {
564        crate::arch::all::memchr::One::new(needle).rfind_raw(start, end)
565    }
566}
567
568/// memchr2, but using raw pointers to represent the haystack.
569///
570/// # Safety
571///
572/// Pointers must be valid. See `Two::find_raw`.
573#[inline]
574unsafe fn memchr2_raw(
575    needle1: u8,
576    needle2: u8,
577    start: *const u8,
578    end: *const u8,
579) -> Option<*const u8> {
580    #[cfg(target_arch = "x86_64")]
581    {
582        crate::arch::x86_64::memchr::memchr2_raw(needle1, needle2, start, end)
583    }
584    #[cfg(all(target_arch = "wasm32", target_feature = "simd128"))]
585    {
586        crate::arch::wasm32::memchr::memchr2_raw(needle1, needle2, start, end)
587    }
588    #[cfg(target_arch = "aarch64")]
589    {
590        crate::arch::aarch64::memchr::memchr2_raw(needle1, needle2, start, end)
591    }
592    #[cfg(not(any(
593        target_arch = "x86_64",
594        all(target_arch = "wasm32", target_feature = "simd128"),
595        target_arch = "aarch64"
596    )))]
597    {
598        crate::arch::all::memchr::Two::new(needle1, needle2)
599            .find_raw(start, end)
600    }
601}
602
603/// memrchr2, but using raw pointers to represent the haystack.
604///
605/// # Safety
606///
607/// Pointers must be valid. See `Two::rfind_raw`.
608#[inline]
609unsafe fn memrchr2_raw(
610    needle1: u8,
611    needle2: u8,
612    start: *const u8,
613    end: *const u8,
614) -> Option<*const u8> {
615    #[cfg(target_arch = "x86_64")]
616    {
617        crate::arch::x86_64::memchr::memrchr2_raw(needle1, needle2, start, end)
618    }
619    #[cfg(all(target_arch = "wasm32", target_feature = "simd128"))]
620    {
621        crate::arch::wasm32::memchr::memrchr2_raw(needle1, needle2, start, end)
622    }
623    #[cfg(target_arch = "aarch64")]
624    {
625        crate::arch::aarch64::memchr::memrchr2_raw(
626            needle1, needle2, start, end,
627        )
628    }
629    #[cfg(not(any(
630        target_arch = "x86_64",
631        all(target_arch = "wasm32", target_feature = "simd128"),
632        target_arch = "aarch64"
633    )))]
634    {
635        crate::arch::all::memchr::Two::new(needle1, needle2)
636            .rfind_raw(start, end)
637    }
638}
639
640/// memchr3, but using raw pointers to represent the haystack.
641///
642/// # Safety
643///
644/// Pointers must be valid. See `Three::find_raw`.
645#[inline]
646unsafe fn memchr3_raw(
647    needle1: u8,
648    needle2: u8,
649    needle3: u8,
650    start: *const u8,
651    end: *const u8,
652) -> Option<*const u8> {
653    #[cfg(target_arch = "x86_64")]
654    {
655        crate::arch::x86_64::memchr::memchr3_raw(
656            needle1, needle2, needle3, start, end,
657        )
658    }
659    #[cfg(all(target_arch = "wasm32", target_feature = "simd128"))]
660    {
661        crate::arch::wasm32::memchr::memchr3_raw(
662            needle1, needle2, needle3, start, end,
663        )
664    }
665    #[cfg(target_arch = "aarch64")]
666    {
667        crate::arch::aarch64::memchr::memchr3_raw(
668            needle1, needle2, needle3, start, end,
669        )
670    }
671    #[cfg(not(any(
672        target_arch = "x86_64",
673        all(target_arch = "wasm32", target_feature = "simd128"),
674        target_arch = "aarch64"
675    )))]
676    {
677        crate::arch::all::memchr::Three::new(needle1, needle2, needle3)
678            .find_raw(start, end)
679    }
680}
681
682/// memrchr3, but using raw pointers to represent the haystack.
683///
684/// # Safety
685///
686/// Pointers must be valid. See `Three::rfind_raw`.
687#[inline]
688unsafe fn memrchr3_raw(
689    needle1: u8,
690    needle2: u8,
691    needle3: u8,
692    start: *const u8,
693    end: *const u8,
694) -> Option<*const u8> {
695    #[cfg(target_arch = "x86_64")]
696    {
697        crate::arch::x86_64::memchr::memrchr3_raw(
698            needle1, needle2, needle3, start, end,
699        )
700    }
701    #[cfg(all(target_arch = "wasm32", target_feature = "simd128"))]
702    {
703        crate::arch::wasm32::memchr::memrchr3_raw(
704            needle1, needle2, needle3, start, end,
705        )
706    }
707    #[cfg(target_arch = "aarch64")]
708    {
709        crate::arch::aarch64::memchr::memrchr3_raw(
710            needle1, needle2, needle3, start, end,
711        )
712    }
713    #[cfg(not(any(
714        target_arch = "x86_64",
715        all(target_arch = "wasm32", target_feature = "simd128"),
716        target_arch = "aarch64"
717    )))]
718    {
719        crate::arch::all::memchr::Three::new(needle1, needle2, needle3)
720            .rfind_raw(start, end)
721    }
722}
723
724/// Count all matching bytes, but using raw pointers to represent the haystack.
725///
726/// # Safety
727///
728/// Pointers must be valid. See `One::count_raw`.
729#[inline]
730unsafe fn count_raw(needle: u8, start: *const u8, end: *const u8) -> usize {
731    #[cfg(target_arch = "x86_64")]
732    {
733        crate::arch::x86_64::memchr::count_raw(needle, start, end)
734    }
735    #[cfg(all(target_arch = "wasm32", target_feature = "simd128"))]
736    {
737        crate::arch::wasm32::memchr::count_raw(needle, start, end)
738    }
739    #[cfg(target_arch = "aarch64")]
740    {
741        crate::arch::aarch64::memchr::count_raw(needle, start, end)
742    }
743    #[cfg(not(any(
744        target_arch = "x86_64",
745        all(target_arch = "wasm32", target_feature = "simd128"),
746        target_arch = "aarch64"
747    )))]
748    {
749        crate::arch::all::memchr::One::new(needle).count_raw(start, end)
750    }
751}
752
753#[cfg(test)]
754mod tests {
755    use super::*;
756
757    #[test]
758    fn forward1_iter() {
759        crate::tests::memchr::Runner::new(1).forward_iter(
760            |haystack, needles| {
761                Some(memchr_iter(needles[0], haystack).collect())
762            },
763        )
764    }
765
766    #[test]
767    fn forward1_oneshot() {
768        crate::tests::memchr::Runner::new(1).forward_oneshot(
769            |haystack, needles| Some(memchr(needles[0], haystack)),
770        )
771    }
772
773    #[test]
774    fn reverse1_iter() {
775        crate::tests::memchr::Runner::new(1).reverse_iter(
776            |haystack, needles| {
777                Some(memrchr_iter(needles[0], haystack).collect())
778            },
779        )
780    }
781
782    #[test]
783    fn reverse1_oneshot() {
784        crate::tests::memchr::Runner::new(1).reverse_oneshot(
785            |haystack, needles| Some(memrchr(needles[0], haystack)),
786        )
787    }
788
789    #[test]
790    fn count1_iter() {
791        crate::tests::memchr::Runner::new(1).count_iter(|haystack, needles| {
792            Some(memchr_iter(needles[0], haystack).count())
793        })
794    }
795
796    #[test]
797    fn forward2_iter() {
798        crate::tests::memchr::Runner::new(2).forward_iter(
799            |haystack, needles| {
800                let n1 = needles.get(0).copied()?;
801                let n2 = needles.get(1).copied()?;
802                Some(memchr2_iter(n1, n2, haystack).collect())
803            },
804        )
805    }
806
807    #[test]
808    fn forward2_oneshot() {
809        crate::tests::memchr::Runner::new(2).forward_oneshot(
810            |haystack, needles| {
811                let n1 = needles.get(0).copied()?;
812                let n2 = needles.get(1).copied()?;
813                Some(memchr2(n1, n2, haystack))
814            },
815        )
816    }
817
818    #[test]
819    fn reverse2_iter() {
820        crate::tests::memchr::Runner::new(2).reverse_iter(
821            |haystack, needles| {
822                let n1 = needles.get(0).copied()?;
823                let n2 = needles.get(1).copied()?;
824                Some(memrchr2_iter(n1, n2, haystack).collect())
825            },
826        )
827    }
828
829    #[test]
830    fn reverse2_oneshot() {
831        crate::tests::memchr::Runner::new(2).reverse_oneshot(
832            |haystack, needles| {
833                let n1 = needles.get(0).copied()?;
834                let n2 = needles.get(1).copied()?;
835                Some(memrchr2(n1, n2, haystack))
836            },
837        )
838    }
839
840    #[test]
841    fn forward3_iter() {
842        crate::tests::memchr::Runner::new(3).forward_iter(
843            |haystack, needles| {
844                let n1 = needles.get(0).copied()?;
845                let n2 = needles.get(1).copied()?;
846                let n3 = needles.get(2).copied()?;
847                Some(memchr3_iter(n1, n2, n3, haystack).collect())
848            },
849        )
850    }
851
852    #[test]
853    fn forward3_oneshot() {
854        crate::tests::memchr::Runner::new(3).forward_oneshot(
855            |haystack, needles| {
856                let n1 = needles.get(0).copied()?;
857                let n2 = needles.get(1).copied()?;
858                let n3 = needles.get(2).copied()?;
859                Some(memchr3(n1, n2, n3, haystack))
860            },
861        )
862    }
863
864    #[test]
865    fn reverse3_iter() {
866        crate::tests::memchr::Runner::new(3).reverse_iter(
867            |haystack, needles| {
868                let n1 = needles.get(0).copied()?;
869                let n2 = needles.get(1).copied()?;
870                let n3 = needles.get(2).copied()?;
871                Some(memrchr3_iter(n1, n2, n3, haystack).collect())
872            },
873        )
874    }
875
876    #[test]
877    fn reverse3_oneshot() {
878        crate::tests::memchr::Runner::new(3).reverse_oneshot(
879            |haystack, needles| {
880                let n1 = needles.get(0).copied()?;
881                let n2 = needles.get(1).copied()?;
882                let n3 = needles.get(2).copied()?;
883                Some(memrchr3(n1, n2, n3, haystack))
884            },
885        )
886    }
887
888    // Prior to memchr 2.6, the memchr iterators both implemented Send and
889    // Sync. But in memchr 2.6, the iterator changed to use raw pointers
890    // internally and I didn't add explicit Send/Sync impls. This ended up
891    // regressing the API. This test ensures we don't do that again.
892    //
893    // See: https://github.com/BurntSushi/memchr/issues/133
894    #[test]
895    fn sync_regression() {
896        use core::panic::{RefUnwindSafe, UnwindSafe};
897
898        fn assert_send_sync<T: Send + Sync + UnwindSafe + RefUnwindSafe>() {}
899        assert_send_sync::<Memchr>();
900        assert_send_sync::<Memchr2>();
901        assert_send_sync::<Memchr3>()
902    }
903}