1use crate::iter::Bytes;
2
3#[inline]
4#[target_feature(enable = "avx2")]
5pub unsafe fn match_uri_vectored(bytes: &mut Bytes) {
6 while bytes.as_ref().len() >= 32 {
7
8 let advance = match_url_char_32_avx(bytes.as_ref());
9
10 bytes.advance(advance);
11
12 if advance != 32 {
13 return;
14 }
15 }
16 super::swar::match_uri_vectored(bytes)
18}
19
20#[inline(always)]
21#[allow(non_snake_case, overflowing_literals)]
22#[allow(unused)]
23unsafe fn match_url_char_32_avx(buf: &[u8]) -> usize {
24 debug_assert!(buf.len() >= 32);
27
28 #[cfg(target_arch = "x86")]
29 use core::arch::x86::*;
30 #[cfg(target_arch = "x86_64")]
31 use core::arch::x86_64::*;
32
33 let ptr = buf.as_ptr();
35
36 let DEL: __m256i = _mm256_set1_epi8(0x7f);
43 let LOW: __m256i = _mm256_set1_epi8(0x21);
46
47 let dat = _mm256_lddqu_si256(ptr as *const _);
51
52 let low = _mm256_cmpeq_epi8(_mm256_max_epu8(dat, LOW), dat);
65 let del = _mm256_cmpeq_epi8(dat, DEL);
67
68 let bit = _mm256_andnot_si256(del, low);
73 let res = _mm256_movemask_epi8(bit) as u32;
76
77 (!res).trailing_zeros() as usize
81}
82
83#[target_feature(enable = "avx2")]
84pub unsafe fn match_header_value_vectored(bytes: &mut Bytes) {
85 while bytes.as_ref().len() >= 32 {
86 let advance = match_header_value_char_32_avx(bytes.as_ref());
87 bytes.advance(advance);
88
89 if advance != 32 {
90 return;
91 }
92 }
93 super::swar::match_header_value_vectored(bytes)
95}
96
97#[inline(always)]
98#[allow(non_snake_case)]
99#[allow(unused)]
100unsafe fn match_header_value_char_32_avx(buf: &[u8]) -> usize {
101 debug_assert!(buf.len() >= 32);
102
103 #[cfg(target_arch = "x86")]
104 use core::arch::x86::*;
105 #[cfg(target_arch = "x86_64")]
106 use core::arch::x86_64::*;
107
108 let ptr = buf.as_ptr();
109
110 let TAB: __m256i = _mm256_set1_epi8(0x09);
113 let DEL: __m256i = _mm256_set1_epi8(0x7f);
115 let LOW: __m256i = _mm256_set1_epi8(0x20);
117
118 let dat = _mm256_lddqu_si256(ptr as *const _);
120
121 let low = _mm256_cmpeq_epi8(_mm256_max_epu8(dat, LOW), dat);
126 let tab = _mm256_cmpeq_epi8(dat, TAB);
128 let del = _mm256_cmpeq_epi8(dat, DEL);
130
131 let bit = _mm256_andnot_si256(del, _mm256_or_si256(low, tab));
137 let res = _mm256_movemask_epi8(bit) as u32;
140
141 (!res).trailing_zeros() as usize
145}
146
147#[test]
148fn avx2_code_matches_uri_chars_table() {
149 if !is_x86_feature_detected!("avx2") {
150 return;
151 }
152
153 #[allow(clippy::undocumented_unsafe_blocks)]
154 unsafe {
155 assert!(byte_is_allowed(b'_', match_uri_vectored));
156
157 for (b, allowed) in crate::URI_MAP.iter().cloned().enumerate() {
158 assert_eq!(
159 byte_is_allowed(b as u8, match_uri_vectored), allowed,
160 "byte_is_allowed({:?}) should be {:?}", b, allowed,
161 );
162 }
163 }
164}
165
166#[test]
167fn avx2_code_matches_header_value_chars_table() {
168 if !is_x86_feature_detected!("avx2") {
169 return;
170 }
171
172 #[allow(clippy::undocumented_unsafe_blocks)]
173 unsafe {
174 assert!(byte_is_allowed(b'_', match_header_value_vectored));
175
176 for (b, allowed) in crate::HEADER_VALUE_MAP.iter().cloned().enumerate() {
177 assert_eq!(
178 byte_is_allowed(b as u8, match_header_value_vectored), allowed,
179 "byte_is_allowed({:?}) should be {:?}", b, allowed,
180 );
181 }
182 }
183}
184
185#[cfg(test)]
186unsafe fn byte_is_allowed(byte: u8, f: unsafe fn(bytes: &mut Bytes<'_>)) -> bool {
187 let slice = [
188 b'_', b'_', b'_', b'_',
189 b'_', b'_', b'_', b'_',
190 b'_', b'_', b'_', b'_',
191 b'_', b'_', b'_', b'_',
192 b'_', b'_', b'_', b'_',
193 b'_', b'_', b'_', b'_',
194 b'_', b'_', byte, b'_',
195 b'_', b'_', b'_', b'_',
196 ];
197 let mut bytes = Bytes::new(&slice);
198
199 f(&mut bytes);
200
201 match bytes.pos() {
202 32 => true,
203 26 => false,
204 _ => unreachable!(),
205 }
206}