Alioth Code Coverage

packed.rs90.76%

1// Copyright 2025 Google LLC
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7// https://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15use std::marker::PhantomData;
16use std::sync::atomic::Ordering;
17
18use bitfield::bitfield;
19use zerocopy::{FromBytes, Immutable, IntoBytes};
20
21use crate::consts;
22use crate::mem::mapped::Ram;
23use crate::virtio::Result;
24use crate::virtio::queue::{DescChain, DescFlag, QueueReg, VirtQueue};
25
26#[repr(C, align(16))]
27#[derive(Debug, Clone, Default, FromBytes, Immutable, IntoBytes)]
28struct Desc {
29 pub addr: u64,
30 pub len: u32,
31 pub id: u16,
32 pub flag: u16,
33}
34
35bitfield! {
36 #[derive(Copy, Clone, Default, PartialEq, Eq, Hash)]
37 pub struct WrappedIndex(u16);
38 impl Debug;
39 pub u16, offset, set_offset : 14, 0;
40 pub wrap_counter, set_warp_counter: 15;
41}
42
43impl WrappedIndex {
44 const INIT: WrappedIndex = WrappedIndex(1 << 15);
45
46 fn wrapping_add(&self, delta: u16, size: u16) -> WrappedIndex {39x
47 let mut offset = self.offset() + delta;39x
48 let mut wrap_counter = self.wrap_counter();39x
49 if offset >= size {39x
50 offset -= size;6x
51 wrap_counter = !wrap_counter;6x
52 }33x
53 let mut r = WrappedIndex(offset);39x
54 r.set_warp_counter(wrap_counter);39x
55 r39x
56 }39x
57
58 fn wrapping_sub(&self, delta: u16, size: u16) -> WrappedIndex {36x
59 let mut offset = self.offset();36x
60 let mut wrap_counter = self.wrap_counter();36x
61 if offset >= delta {36x
62 offset -= delta;18x
63 } else {18x
64 offset += size - delta;18x
65 wrap_counter = !wrap_counter;18x
66 }18x
67 let mut r = WrappedIndex(offset);36x
68 r.set_warp_counter(wrap_counter);36x
69 r36x
70 }36x
71}
72
73consts! {
74 struct EventFlag(u16) {
75 ENABLE = 0;
76 DISABLE = 1;
77 DESC = 2;
78 }
79}
80
81struct DescEvent {
82 index: WrappedIndex,
83 flag: EventFlag,
84}
85
86#[derive(Debug)]
87pub struct PackedQueue<'m> {
88 size: u16,
89 desc: *mut Desc,
90 enable_event_idx: bool,
91 notification: *mut DescEvent,
92 interrupt: *mut DescEvent,
93 _phantom: PhantomData<&'m ()>,
94}
95
96impl<'m> PackedQueue<'m> {
97 pub fn new(reg: &QueueReg, ram: &'m Ram, event_idx: bool) -> Result<Option<PackedQueue<'m>>> {51x
98 if !reg.enabled.load(Ordering::Acquire) {51x
99 return Ok(None);3x
100 }48x
101 let size = reg.size.load(Ordering::Acquire);48x
102 let desc = reg.desc.load(Ordering::Acquire);48x
103 let notification: *mut DescEvent = ram.get_ptr(reg.device.load(Ordering::Acquire))?;48x
104 Ok(Some(PackedQueue {
105 size,48x
106 desc: ram.get_ptr(desc)?,48x
107 enable_event_idx: event_idx,48x
108 notification,48x
109 interrupt: ram.get_ptr(reg.driver.load(Ordering::Acquire))?,48x
110 _phantom: PhantomData,48x
111 }))
112 }51x
113
114 fn flag_is_avail(&self, flag: DescFlag, wrap_counter: bool) -> bool {12x
115 flag.contains(DescFlag::AVAIL) == wrap_counter12x
116 && flag.contains(DescFlag::USED) != wrap_counter9x
117 }12x
118
119 fn set_flag_used(&self, flag: &mut DescFlag, wrap_counter: bool) {6x
120 if wrap_counter {6x
121 flag.insert(DescFlag::USED | DescFlag::AVAIL);6x
122 } else {6x
123 flag.remove(DescFlag::USED | DescFlag::AVAIL);
124 }
125 }6x
126}
127
128impl<'m> VirtQueue<'m> for PackedQueue<'m> {
129 type Index = WrappedIndex;
130
131 const INIT_INDEX: WrappedIndex = WrappedIndex::INIT;
132
133 fn desc_avail(&self, index: WrappedIndex) -> bool {12x
134 self.flag_is_avail(12x
135 DescFlag::from_bits_retain(unsafe { &*self.desc.offset(index.offset() as isize) }.flag),12x
136 index.wrap_counter(),12x
137 )
138 }12x
139
140 fn get_avail(&self, index: Self::Index, ram: &'m Ram) -> Result<Option<DescChain<'m>>> {9x
141 if !self.desc_avail(index) {9x
142 return Ok(None);3x
143 }6x
144 let mut readable = Vec::new();6x
145 let mut writeable = Vec::new();6x
146 let mut delta = 0;6x
147 let mut offset = index.offset();6x
148 let id = loop {6x
149 let desc = unsafe { &*self.desc.offset(offset as isize) };9x
150 let flag = DescFlag::from_bits_retain(desc.flag);9x
151 if flag.contains(DescFlag::INDIRECT) {9x
152 for i in 0..(desc.len as usize / size_of::<Desc>()) {
153 let addr = desc.addr + (i * size_of::<Desc>()) as u64;
154 let desc: Desc = ram.read_t(addr)?;
155 let flag = DescFlag::from_bits_retain(desc.flag);
156 if flag.contains(DescFlag::WRITE) {
157 writeable.push((desc.addr, desc.len as u64));
158 } else {
159 readable.push((desc.addr, desc.len as u64));
160 }
161 }
162 } else if flag.contains(DescFlag::WRITE) {9x
163 writeable.push((desc.addr, desc.len as u64));3x
164 } else {6x
165 readable.push((desc.addr, desc.len as u64));6x
166 }6x
167 delta += 1;9x
168 if !flag.contains(DescFlag::NEXT) {9x
169 break desc.id;6x
170 }3x
171 offset = (offset + 1) % self.size;3x
172 };
173 Ok(Some(DescChain {
174 id,6x
175 delta,6x
176 readable: ram.translate_iov(&readable)?,6x
177 writable: ram.translate_iov_mut(&writeable)?,6x
178 }))
179 }9x
180
181 fn set_used(&self, index: Self::Index, id: u16, len: u32) {6x
182 let first = unsafe { &mut *self.desc.offset(index.offset() as isize) };6x
183 first.id = id;6x
184 first.len = len;6x
185 let mut flag = DescFlag::from_bits_retain(first.flag);6x
186 self.set_flag_used(&mut flag, index.wrap_counter());6x
187 first.flag = flag.bits();6x
188 }6x
189
190 fn enable_notification(&self, enabled: bool) {6x
191 unsafe {
192 (&mut *self.notification).flag = if enabled {6x
193 EventFlag::ENABLE3x
194 } else {
195 EventFlag::DISABLE3x
196 };
197 }
198 }6x
199
200 fn interrupt_enabled(&self, index: Self::Index, delta: u16) -> bool {39x
201 let interrupt = unsafe { &*self.interrupt };39x
202 if self.enable_event_idx && interrupt.flag == EventFlag::DESC {39x
203 let prev_used_index = index.wrapping_sub(delta, self.size);24x
204 let base = prev_used_index.offset();24x
205 let end = base + delta;24x
206 let mut offset = interrupt.index.offset();24x
207 if interrupt.index.wrap_counter() != prev_used_index.wrap_counter() {24x
208 offset += self.size;9x
209 }15x
210 base <= offset && offset < end24x
211 } else {
212 interrupt.flag == EventFlag::ENABLE15x
213 }
214 }39x
215
216 fn index_add(&self, index: Self::Index, delta: u16) -> Self::Index {18x
217 index.wrapping_add(delta, self.size)18x
218 }18x
219}
220
221#[cfg(test)]
222#[path = "packed_test.rs"]
223mod tests;
224