Alioth Code Coverage

split.rs79.29%

1// Copyright 2024 Google LLC
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// https://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15use std::marker::PhantomData;
16use std::mem::size_of;
17use std::sync::atomic::{Ordering, fence};
18
19use alioth_macros::Layout;
20use zerocopy::{FromBytes, Immutable, IntoBytes};
21
22use crate::bitflags;
23use crate::mem::mapped::Ram;
24use crate::virtio::queue::{DescChain, DescFlag, QueueReg, VirtQueue};
25use crate::virtio::{Result, error};
26
27#[repr(C, align(16))]
28#[derive(Debug, Clone, Default, FromBytes, Immutable, IntoBytes)]
29pub struct Desc {
30 pub addr: u64,
31 pub len: u32,
32 pub flag: u16,
33 pub next: u16,
34}
35
36bitflags! {
37 pub struct AvailFlag(u16) {
38 NO_INTERRUPT = 1 << 0;
39 }
40}
41
42#[repr(C, align(2))]
43#[derive(Debug, Clone, Layout, Immutable, FromBytes, IntoBytes)]
44pub struct AvailHeader {
45 flags: u16,
46 idx: u16,
47}
48
49bitflags! {
50 pub struct UsedFlag(u16) {
51 NO_NOTIFY = 1 << 0;
52 }
53}
54
55#[repr(C, align(4))]
56#[derive(Debug, Clone, Layout)]
57pub struct UsedHeader {
58 flags: u16,
59 idx: u16,
60}
61
62#[repr(C)]
63#[derive(Debug, Clone, Default)]
64pub struct UsedElem {
65 id: u32,
66 len: u32,
67}
68
69#[derive(Debug)]
70pub struct SplitQueue<'m> {
71 size: u16,
72 avail_hdr: *mut AvailHeader,
73 avail_ring: *mut u16,
74 used_event: Option<*mut u16>,
75 used_hdr: *mut UsedHeader,
76 used_ring: *mut UsedElem,
77 avail_event: Option<*mut u16>,
78 desc: *mut Desc,
79 _phantom: PhantomData<&'m ()>,
80}
81
82impl SplitQueue<'_> {
83 pub fn avail_index(&self) -> u16 {360x
84 unsafe { &*self.avail_hdr }.idx360x
85 }360x
86
87 pub fn set_used_index(&self, val: u16) {63x
88 unsafe { &mut *self.used_hdr }.idx = val;63x
89 }63x
90
91 pub fn used_event(&self) -> Option<u16> {60x
92 self.used_event.map(|event| unsafe { *event })60x
93 }60x
94
95 pub fn set_avail_event(&self, op: impl FnOnce(&mut u16)) -> bool {135x
96 match self.avail_event {135x
97 Some(avail_event) => {3x
98 op(unsafe { &mut *avail_event });3x
99 true3x
100 }
101 None => false,132x
102 }
103 }135x
104
105 pub fn set_flag_notification(&self, enabled: bool) {132x
106 unsafe { &mut *self.used_hdr }.flags = (!enabled) as _;132x
107 }132x
108
109 pub fn flag_interrupt_enabled(&self) -> bool {57x
110 unsafe { &*self.avail_hdr }.flags == 057x
111 }57x
112
113 fn get_desc(&self, id: u16) -> Result<&Desc> {117x
114 if id < self.size {117x
115 Ok(unsafe { &*self.desc.offset(id as isize) })117x
116 } else {
117 error::InvalidDescriptor { id }.fail()
118 }
119 }117x
120}
121
122impl<'m> SplitQueue<'m> {
123 pub fn new(reg: &QueueReg, ram: &'m Ram, event_idx: bool) -> Result<Option<SplitQueue<'m>>> {51x
124 if !reg.enabled.load(Ordering::Acquire) {51x
125 return Ok(None);3x
126 }48x
127 let size = reg.size.load(Ordering::Acquire) as u64;48x
128 let mut avail_event = None;48x
129 let mut used_event = None;48x
130 let used = reg.device.load(Ordering::Acquire);48x
131 let avail = reg.driver.load(Ordering::Acquire);48x
132 if event_idx {48x
133 let avail_event_gpa =3x
134 used + size_of::<UsedHeader>() as u64 + size * size_of::<UsedElem>() as u64;3x
135 avail_event = Some(ram.get_ptr(avail_event_gpa)?);3x
136 let used_event_gpa =3x
137 avail + size_of::<AvailHeader>() as u64 + size * size_of::<u16>() as u64;3x
138 used_event = Some(ram.get_ptr(used_event_gpa)?);3x
139 }45x
140 let used_hdr = ram.get_ptr::<UsedHeader>(used)?;48x
141 let avail_ring_gpa = avail + size_of::<AvailHeader>() as u64;48x
142 let used_ring_gpa = used + size_of::<UsedHeader>() as u64;48x
143 let desc = reg.desc.load(Ordering::Acquire);48x
144 Ok(Some(SplitQueue {
145 size: size as u16,48x
146 avail_hdr: ram.get_ptr(avail)?,48x
147 avail_ring: ram.get_ptr(avail_ring_gpa)?,48x
148 used_event,48x
149 used_hdr,48x
150 used_ring: ram.get_ptr(used_ring_gpa)?,48x
151 avail_event,48x
152 desc: ram.get_ptr(desc)?,48x
153 _phantom: PhantomData,48x
154 }))
155 }51x
156}
157
158impl<'m> VirtQueue<'m> for SplitQueue<'m> {
159 type Index = u16;
160
161 const INIT_INDEX: u16 = 0;
162
163 fn desc_avail(&self, index: u16) -> bool {288x
164 let avail_index = self.avail_index();288x
165 index < avail_index || index - avail_index >= !(self.size - 1)288x
166 }288x
167
168 fn get_avail(&self, index: Self::Index, ram: &'m Ram) -> Result<Option<DescChain<'m>>> {135x
169 if !self.desc_avail(index) {135x
170 return Ok(None);54x
171 }81x
172 let mut readable = Vec::new();81x
173 let mut writable = Vec::new();81x
174 let wrapped_index = index & (self.size - 1);81x
175 let head_id = unsafe { *self.avail_ring.offset(wrapped_index as isize) };81x
176 let mut id = head_id;81x
177 loop {
178 let desc = self.get_desc(id)?;117x
179 let flag = DescFlag::from_bits_retain(desc.flag);117x
180 if flag.contains(DescFlag::INDIRECT) {117x
181 let mut id = 0;
182 loop {
183 let addr = desc.addr + id as u64 * size_of::<Desc>() as u64;
184 let desc: Desc = ram.read_t(addr)?;
185 let flag = DescFlag::from_bits_retain(desc.flag);
186 assert!(!flag.contains(DescFlag::INDIRECT));
187 if flag.contains(DescFlag::WRITE) {
188 writable.push((desc.addr, desc.len as u64));
189 } else {
190 readable.push((desc.addr, desc.len as u64));
191 }
192 if flag.contains(DescFlag::NEXT) {
193 id = desc.next;
194 } else {
195 break;
196 }
197 }
198 } else if flag.contains(DescFlag::WRITE) {117x
199 writable.push((desc.addr, desc.len as u64));45x
200 } else {72x
201 readable.push((desc.addr, desc.len as u64));72x
202 }72x
203 if flag.contains(DescFlag::NEXT) {117x
204 id = desc.next;36x
205 } else {36x
206 break;81x
207 }
208 }
209 let readable = ram.translate_iov(&readable)?;81x
210 let writable = ram.translate_iov_mut(&writable)?;81x
211 Ok(Some(DescChain {81x
212 id: head_id,81x
213 delta: 1,81x
214 readable,81x
215 writable,81x
216 }))81x
217 }135x
218
219 fn set_used(&self, index: Self::Index, id: u16, len: u32) {63x
220 let used_elem = UsedElem { id: id as u32, len };63x
221 let wrapped_index = index & (self.size - 1);63x
222 unsafe { *self.used_ring.offset(wrapped_index as isize) = used_elem };63x
223 fence(Ordering::SeqCst);63x
224 self.set_used_index(index.wrapping_add(1));63x
225 }63x
226
227 fn enable_notification(&self, enabled: bool) {132x
228 if !self.set_avail_event(|event| {132x
229 let mut avail_index = self.avail_index();
230 if enabled {
231 loop {
232 *event = avail_index;
233 fence(Ordering::SeqCst);
234 let new_avail_index = self.avail_index();
235 if new_avail_index == avail_index {
236 break;
237 } else {
238 avail_index = new_avail_index;
239 }
240 }
241 } else {
242 *event = avail_index.wrapping_sub(1);
243 }
244 }) {132x
245 self.set_flag_notification(enabled);132x
246 }132x
247 }132x
248
249 fn interrupt_enabled(&self, index: Self::Index, _: u16) -> bool {57x
250 match self.used_event() {57x
251 Some(used_event) => used_event == index.wrapping_sub(1),
252 None => self.flag_interrupt_enabled(),57x
253 }
254 }57x
255
256 fn index_add(&self, index: Self::Index, _: u16) -> Self::Index {222x
257 index.wrapping_add(1)222x
258 }222x
259}
260
261#[cfg(test)]
262#[path = "split_test.rs"]
263mod tests;
264