split.rs79.29%
1
// Copyright 2024 Google LLC2
//3
// Licensed under the Apache License, Version 2.0 (the "License");4
// you may not use this file except in compliance with the License.5
// You may obtain a copy of the License at6
//7
// https://www.apache.org/licenses/LICENSE-2.08
//9
// Unless required by applicable law or agreed to in writing, software10
// distributed under the License is distributed on an "AS IS" BASIS,11
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.12
// See the License for the specific language governing permissions and13
// limitations under the License.14
15
use std::marker::PhantomData;16
use std::mem::size_of;17
use std::sync::atomic::{Ordering, fence};18
19
use alioth_macros::Layout;20
use zerocopy::{FromBytes, Immutable, IntoBytes};21
22
use crate::bitflags;23
use crate::mem::mapped::Ram;24
use crate::virtio::queue::{DescChain, DescFlag, QueueReg, VirtQueue};25
use crate::virtio::{Result, error};26
27
#[repr(C, align(16))]28
#[derive(Debug, Clone, Default, FromBytes, Immutable, IntoBytes)]29
pub struct Desc {30
pub addr: u64,31
pub len: u32,32
pub flag: u16,33
pub next: u16,34
}35
36
bitflags! {37
pub struct AvailFlag(u16) {38
NO_INTERRUPT = 1 << 0;39
}40
}41
42
#[repr(C, align(2))]43
#[derive(Debug, Clone, Layout, Immutable, FromBytes, IntoBytes)]44
pub struct AvailHeader {45
flags: u16,46
idx: u16,47
}48
49
bitflags! {50
pub struct UsedFlag(u16) {51
NO_NOTIFY = 1 << 0;52
}53
}54
55
#[repr(C, align(4))]56
#[derive(Debug, Clone, Layout)]57
pub struct UsedHeader {58
flags: u16,59
idx: u16,60
}61
62
#[repr(C)]63
#[derive(Debug, Clone, Default)]64
pub struct UsedElem {65
id: u32,66
len: u32,67
}68
69
#[derive(Debug)]70
pub struct SplitQueue<'m> {71
size: u16,72
avail_hdr: *mut AvailHeader,73
avail_ring: *mut u16,74
used_event: Option<*mut u16>,75
used_hdr: *mut UsedHeader,76
used_ring: *mut UsedElem,77
avail_event: Option<*mut u16>,78
desc: *mut Desc,79
_phantom: PhantomData<&'m ()>,80
}81
82
impl SplitQueue<'_> {83
pub fn avail_index(&self) -> u16 {360x84
unsafe { &*self.avail_hdr }.idx360x85
}360x86
87
pub fn set_used_index(&self, val: u16) {63x88
unsafe { &mut *self.used_hdr }.idx = val;63x89
}63x90
91
pub fn used_event(&self) -> Option<u16> {60x92
self.used_event.map(|event| unsafe { *event })60x93
}60x94
95
pub fn set_avail_event(&self, op: impl FnOnce(&mut u16)) -> bool {135x96
match self.avail_event {135x97
Some(avail_event) => {3x98
op(unsafe { &mut *avail_event });3x99
true3x100
}101
None => false,132x102
}103
}135x104
105
pub fn set_flag_notification(&self, enabled: bool) {132x106
unsafe { &mut *self.used_hdr }.flags = (!enabled) as _;132x107
}132x108
109
pub fn flag_interrupt_enabled(&self) -> bool {57x110
unsafe { &*self.avail_hdr }.flags == 057x111
}57x112
113
fn get_desc(&self, id: u16) -> Result<&Desc> {117x114
if id < self.size {117x115
Ok(unsafe { &*self.desc.offset(id as isize) })117x116
} else {117
error::InvalidDescriptor { id }.fail()118
}119
}117x120
}121
122
impl<'m> SplitQueue<'m> {123
pub fn new(reg: &QueueReg, ram: &'m Ram, event_idx: bool) -> Result<Option<SplitQueue<'m>>> {51x124
if !reg.enabled.load(Ordering::Acquire) {51x125
return Ok(None);3x126
}48x127
let size = reg.size.load(Ordering::Acquire) as u64;48x128
let mut avail_event = None;48x129
let mut used_event = None;48x130
let used = reg.device.load(Ordering::Acquire);48x131
let avail = reg.driver.load(Ordering::Acquire);48x132
if event_idx {48x133
let avail_event_gpa =3x134
used + size_of::<UsedHeader>() as u64 + size * size_of::<UsedElem>() as u64;3x135
avail_event = Some(ram.get_ptr(avail_event_gpa)?);3x136
let used_event_gpa =3x137
avail + size_of::<AvailHeader>() as u64 + size * size_of::<u16>() as u64;3x138
used_event = Some(ram.get_ptr(used_event_gpa)?);3x139
}45x140
let used_hdr = ram.get_ptr::<UsedHeader>(used)?;48x141
let avail_ring_gpa = avail + size_of::<AvailHeader>() as u64;48x142
let used_ring_gpa = used + size_of::<UsedHeader>() as u64;48x143
let desc = reg.desc.load(Ordering::Acquire);48x144
Ok(Some(SplitQueue {145
size: size as u16,48x146
avail_hdr: ram.get_ptr(avail)?,48x147
avail_ring: ram.get_ptr(avail_ring_gpa)?,48x148
used_event,48x149
used_hdr,48x150
used_ring: ram.get_ptr(used_ring_gpa)?,48x151
avail_event,48x152
desc: ram.get_ptr(desc)?,48x153
_phantom: PhantomData,48x154
}))155
}51x156
}157
158
impl<'m> VirtQueue<'m> for SplitQueue<'m> {159
type Index = u16;160
161
const INIT_INDEX: u16 = 0;162
163
fn desc_avail(&self, index: u16) -> bool {288x164
let avail_index = self.avail_index();288x165
index < avail_index || index - avail_index >= !(self.size - 1)288x166
}288x167
168
fn get_avail(&self, index: Self::Index, ram: &'m Ram) -> Result<Option<DescChain<'m>>> {135x169
if !self.desc_avail(index) {135x170
return Ok(None);54x171
}81x172
let mut readable = Vec::new();81x173
let mut writable = Vec::new();81x174
let wrapped_index = index & (self.size - 1);81x175
let head_id = unsafe { *self.avail_ring.offset(wrapped_index as isize) };81x176
let mut id = head_id;81x177
loop {178
let desc = self.get_desc(id)?;117x179
let flag = DescFlag::from_bits_retain(desc.flag);117x180
if flag.contains(DescFlag::INDIRECT) {117x181
let mut id = 0;182
loop {183
let addr = desc.addr + id as u64 * size_of::<Desc>() as u64;184
let desc: Desc = ram.read_t(addr)?;185
let flag = DescFlag::from_bits_retain(desc.flag);186
assert!(!flag.contains(DescFlag::INDIRECT));187
if flag.contains(DescFlag::WRITE) {188
writable.push((desc.addr, desc.len as u64));189
} else {190
readable.push((desc.addr, desc.len as u64));191
}192
if flag.contains(DescFlag::NEXT) {193
id = desc.next;194
} else {195
break;196
}197
}198
} else if flag.contains(DescFlag::WRITE) {117x199
writable.push((desc.addr, desc.len as u64));45x200
} else {72x201
readable.push((desc.addr, desc.len as u64));72x202
}72x203
if flag.contains(DescFlag::NEXT) {117x204
id = desc.next;36x205
} else {36x206
break;81x207
}208
}209
let readable = ram.translate_iov(&readable)?;81x210
let writable = ram.translate_iov_mut(&writable)?;81x211
Ok(Some(DescChain {81x212
id: head_id,81x213
delta: 1,81x214
readable,81x215
writable,81x216
}))81x217
}135x218
219
fn set_used(&self, index: Self::Index, id: u16, len: u32) {63x220
let used_elem = UsedElem { id: id as u32, len };63x221
let wrapped_index = index & (self.size - 1);63x222
unsafe { *self.used_ring.offset(wrapped_index as isize) = used_elem };63x223
fence(Ordering::SeqCst);63x224
self.set_used_index(index.wrapping_add(1));63x225
}63x226
227
fn enable_notification(&self, enabled: bool) {132x228
if !self.set_avail_event(|event| {132x229
let mut avail_index = self.avail_index();230
if enabled {231
loop {232
*event = avail_index;233
fence(Ordering::SeqCst);234
let new_avail_index = self.avail_index();235
if new_avail_index == avail_index {236
break;237
} else {238
avail_index = new_avail_index;239
}240
}241
} else {242
*event = avail_index.wrapping_sub(1);243
}244
}) {132x245
self.set_flag_notification(enabled);132x246
}132x247
}132x248
249
fn interrupt_enabled(&self, index: Self::Index, _: u16) -> bool {57x250
match self.used_event() {57x251
Some(used_event) => used_event == index.wrapping_sub(1),252
None => self.flag_interrupt_enabled(),57x253
}254
}57x255
256
fn index_add(&self, index: Self::Index, _: u16) -> Self::Index {222x257
index.wrapping_add(1)222x258
}222x259
}260
261
#[cfg(test)]262
#[path = "split_test.rs"]263
mod tests;264