Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Rust soundness fixes #7518

Merged
merged 15 commits into from Sep 29, 2022
29 changes: 15 additions & 14 deletions rust/flatbuffers/src/array.rs
Expand Up @@ -37,8 +37,15 @@ where
#[allow(clippy::len_without_is_empty)]
#[allow(clippy::from_over_into)] // TODO(caspern): Go from From to Into.
impl<'a, T: 'a, const N: usize> Array<'a, T, N> {
/// # SAFETY:
///
/// buf must be a contiguous array of `T`
///
/// # Panics
///
/// Panics if `buf.len()` is not `size_of::<T>() * N`
#[inline(always)]
pub fn new(buf: &'a [u8]) -> Self {
pub unsafe fn new(buf: &'a [u8]) -> Self {
assert!(size_of::<T>() * N == buf.len());

Array {
Expand All @@ -61,12 +68,12 @@ impl<'a, T: Follow<'a> + 'a, const N: usize> Array<'a, T, N> {
pub fn get(&self, idx: usize) -> T::Inner {
assert!(idx < N);
let sz = size_of::<T>();
T::follow(self.0, sz * idx)
unsafe { T::follow(self.0, sz * idx) }
}

#[inline(always)]
pub fn iter(&self) -> VectorIter<'a, T> {
VectorIter::from_slice(self.0, self.len())
unsafe { VectorIter::from_slice(self.0, self.len()) }
}
}

Expand All @@ -83,27 +90,21 @@ impl<'a, T: Follow<'a> + Debug, const N: usize> Into<[T::Inner; N]> for Array<'a
impl<'a, T: Follow<'a> + 'a, const N: usize> Follow<'a> for Array<'a, T, N> {
type Inner = Array<'a, T, N>;
#[inline(always)]
fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
tustvold marked this conversation as resolved.
Show resolved Hide resolved
Array::new(&buf[loc..loc + N * size_of::<T>()])
}
}

pub fn emplace_scalar_array<T: EndianScalar, const N: usize>(
pub unsafe fn emplace_scalar_array<T: EndianScalar, const N: usize>(
buf: &mut [u8],
loc: usize,
src: &[T; N],
) {
let mut buf_ptr = buf[loc..].as_mut_ptr();
for item in src.iter() {
let item_le = item.to_little_endian();
unsafe {
core::ptr::copy_nonoverlapping(
&item_le as *const T as *const u8,
buf_ptr,
size_of::<T>(),
);
buf_ptr = buf_ptr.add(size_of::<T>());
}
core::ptr::copy_nonoverlapping(&item_le as *const T as *const u8, buf_ptr, size_of::<T>());
buf_ptr = buf_ptr.add(size_of::<T>());
}
}

Expand Down Expand Up @@ -134,7 +135,7 @@ where
}
}

#[cfg(feature="serialize")]
#[cfg(feature = "serialize")]
impl<'a, T: 'a, const N: usize> serde::ser::Serialize for Array<'a, T, N>
where
T: 'a + Follow<'a>,
Expand Down
80 changes: 35 additions & 45 deletions rust/flatbuffers/src/builder.rs
Expand Up @@ -16,19 +16,18 @@

extern crate smallvec;

#[cfg(feature = "no_std")]
use alloc::{vec, vec::Vec};
use core::cmp::max;
use core::iter::{DoubleEndedIterator, ExactSizeIterator};
use core::marker::PhantomData;
use core::ptr::write_bytes;
use core::slice::from_raw_parts;
#[cfg(feature = "no_std")]
use alloc::{vec, vec::Vec};

use crate::endian_scalar::{emplace_scalar, read_scalar_at};
use crate::primitives::*;
use crate::push::{Push, PushAlignment};
use crate::table::Table;
use crate::vector::{SafeSliceAccess, Vector};
use crate::vector::Vector;
use crate::vtable::{field_index_to_field_offset, VTable};
use crate::vtable_writer::VTableWriter;

Expand Down Expand Up @@ -153,7 +152,9 @@ impl<'fbb> FlatBufferBuilder<'fbb> {
self.make_space(sz);
{
let (dst, rest) = (&mut self.owned_buf[self.head..]).split_at_mut(sz);
x.push(dst, rest);
// SAFETY
// Called make_space above
unsafe { x.push(dst, rest.len()) };
}
WIPOffset::new(self.used_space() as UOffsetT)
}
Expand Down Expand Up @@ -309,33 +310,6 @@ impl<'fbb> FlatBufferBuilder<'fbb> {
WIPOffset::new(self.used_space() as UOffsetT)
}

/// Create a vector by memcpy'ing. This is much faster than calling
tustvold marked this conversation as resolved.
Show resolved Hide resolved
/// `create_vector`, but the underlying type must be represented as
/// little-endian on the host machine. This property is encoded in the
/// type system through the SafeSliceAccess trait. The following types are
/// always safe, on any platform: bool, u8, i8, and any
/// FlatBuffers-generated struct.
#[inline]
pub fn create_vector_direct<'a: 'b, 'b, T: SafeSliceAccess + Push + Sized + 'b>(
&'a mut self,
items: &'b [T],
) -> WIPOffset<Vector<'fbb, T>> {
self.assert_not_nested(
"create_vector_direct can not be called when a table or vector is under construction",
);
let elem_size = T::size();
self.align(items.len() * elem_size, T::alignment().max_of(SIZE_UOFFSET));

let bytes = {
let ptr = items.as_ptr() as *const T as *const u8;
unsafe { from_raw_parts(ptr, items.len() * elem_size) }
};
self.push_bytes_unprefixed(bytes);
self.push(items.len() as UOffsetT);

WIPOffset::new(self.used_space() as UOffsetT)
}

/// Create a vector of strings.
///
/// Speed-sensitive users may wish to reduce memory usage by creating the
Expand Down Expand Up @@ -367,15 +341,27 @@ impl<'fbb> FlatBufferBuilder<'fbb> {
/// Speed-sensitive users may wish to reduce memory usage by creating the
/// vector manually: use `start_vector`, `push`, and `end_vector`.
#[inline]
pub fn create_vector<'a: 'b, 'b, T: Push + Copy + 'b>(
pub fn create_vector<'a: 'b, 'b, T: Push + 'b>(
&'a mut self,
items: &'b [T],
) -> WIPOffset<Vector<'fbb, T::Output>> {
let elem_size = T::size();
self.align(items.len() * elem_size, T::alignment().max_of(SIZE_UOFFSET));
for i in (0..items.len()).rev() {
self.push(items[i]);
let slice_size = items.len() * elem_size;
tustvold marked this conversation as resolved.
Show resolved Hide resolved
self.align(slice_size, T::alignment().max_of(SIZE_UOFFSET));
self.ensure_capacity(slice_size + UOffsetT::size());

self.head -= slice_size;
let mut written_len = self.owned_buf.len() - self.head;

let buf = &mut self.owned_buf[self.head..self.head + slice_size];
for (item, out) in items.iter().zip(buf.chunks_exact_mut(elem_size)) {
written_len -= elem_size;

// SAFETY
// Called ensure_capacity and aligned to T above
unsafe { item.push(out, written_len) };
}

WIPOffset::new(self.push::<UOffsetT>(items.len() as UOffsetT).value())
}

Expand All @@ -384,17 +370,19 @@ impl<'fbb> FlatBufferBuilder<'fbb> {
/// Speed-sensitive users may wish to reduce memory usage by creating the
/// vector manually: use `start_vector`, `push`, and `end_vector`.
#[inline]
pub fn create_vector_from_iter<T: Push + Copy>(
pub fn create_vector_from_iter<T: Push>(
tustvold marked this conversation as resolved.
Show resolved Hide resolved
&mut self,
items: impl ExactSizeIterator<Item = T> + DoubleEndedIterator,
) -> WIPOffset<Vector<'fbb, T::Output>> {
let elem_size = T::size();
let len = items.len();
tustvold marked this conversation as resolved.
Show resolved Hide resolved
self.align(len * elem_size, T::alignment().max_of(SIZE_UOFFSET));
let mut actual = 0;
for item in items.rev() {
self.push(item);
actual += 1;
}
WIPOffset::new(self.push::<UOffsetT>(len as UOffsetT).value())
WIPOffset::new(self.push::<UOffsetT>(actual).value())
}

/// Set whether default values are stored.
Expand Down Expand Up @@ -443,7 +431,7 @@ impl<'fbb> FlatBufferBuilder<'fbb> {
assert_msg_name: &'static str,
) {
let idx = self.used_space() - tab_revloc.value() as usize;
let tab = Table::new(&self.owned_buf[self.head..], idx);
let tab = unsafe { Table::new(&self.owned_buf[self.head..], idx) };
let o = tab.vtable().get(slot_byte_loc) as usize;
assert!(o != 0, "missing required field {}", assert_msg_name);
}
Expand Down Expand Up @@ -560,11 +548,13 @@ impl<'fbb> FlatBufferBuilder<'fbb> {
}
}
let new_vt_bytes = &self.owned_buf[vt_start_pos..vt_end_pos];
let found = self.written_vtable_revpos.binary_search_by(|old_vtable_revpos: &UOffsetT| {
let old_vtable_pos = self.owned_buf.len() - *old_vtable_revpos as usize;
let old_vtable = VTable::init(&self.owned_buf, old_vtable_pos);
new_vt_bytes.cmp(old_vtable.as_bytes())
});
let found = self
.written_vtable_revpos
.binary_search_by(|old_vtable_revpos: &UOffsetT| {
let old_vtable_pos = self.owned_buf.len() - *old_vtable_revpos as usize;
let old_vtable = unsafe { VTable::init(&self.owned_buf, old_vtable_pos) };
new_vt_bytes.cmp(old_vtable.as_bytes())
});
let final_vtable_revpos = match found {
Ok(i) => {
// The new vtable is a duplicate so clear it.
Expand All @@ -586,7 +576,7 @@ impl<'fbb> FlatBufferBuilder<'fbb> {
unsafe {
emplace_scalar::<SOffsetT>(
&mut self.owned_buf[table_pos..table_pos + SIZE_SOFFSET],
final_vtable_revpos as SOffsetT - object_revloc_to_vtable.value() as SOffsetT
final_vtable_revpos as SOffsetT - object_revloc_to_vtable.value() as SOffsetT,
);
}

Expand Down
4 changes: 2 additions & 2 deletions rust/flatbuffers/src/endian_scalar.rs
Expand Up @@ -150,7 +150,7 @@ pub fn byte_swap_f64(x: f64) -> f64 {
/// Place an EndianScalar into the provided mutable byte slice. Performs
/// endian conversion, if necessary.
/// # Safety
/// Caller must ensure `s.len() > size_of::<T>()`
/// Caller must ensure `s.len() >= size_of::<T>()`
/// and `x` does not overlap with `s`.
#[inline]
pub unsafe fn emplace_scalar<T: EndianScalar>(s: &mut [u8], x: T) {
tustvold marked this conversation as resolved.
Show resolved Hide resolved
Expand All @@ -165,7 +165,7 @@ pub unsafe fn emplace_scalar<T: EndianScalar>(s: &mut [u8], x: T) {
/// Read an EndianScalar from the provided byte slice at the specified location.
/// Performs endian conversion, if necessary.
/// # Safety
/// Caller must ensure `s.len() > loc + size_of::<T>()`.
/// Caller must ensure `s.len() >= loc + size_of::<T>()`.
#[inline]
pub unsafe fn read_scalar_at<T: EndianScalar>(s: &[u8], loc: usize) -> T {
read_scalar(&s[loc..])
Expand Down
13 changes: 10 additions & 3 deletions rust/flatbuffers/src/follow.rs
Expand Up @@ -29,7 +29,10 @@ use core::marker::PhantomData;
/// continue traversing the FlatBuffer.
pub trait Follow<'buf> {
type Inner;
fn follow(buf: &'buf [u8], loc: usize) -> Self::Inner;
/// SAFETY
///
/// `buf[loc..]` must contain a valid value of `Self`
tustvold marked this conversation as resolved.
Show resolved Hide resolved
unsafe fn follow(buf: &'buf [u8], loc: usize) -> Self::Inner;
}

/// FollowStart wraps a Follow impl in a struct type. This can make certain
Expand All @@ -41,15 +44,19 @@ impl<'a, T: Follow<'a> + 'a> FollowStart<T> {
pub fn new() -> Self {
Self { 0: PhantomData }
}

/// SAFETY
///
/// `buf[loc..]` must contain a valid value of `T`
#[inline]
pub fn self_follow(&'a self, buf: &'a [u8], loc: usize) -> T::Inner {
pub unsafe fn self_follow(&'a self, buf: &'a [u8], loc: usize) -> T::Inner {
T::follow(buf, loc)
}
}
impl<'a, T: Follow<'a>> Follow<'a> for FollowStart<T> {
type Inner = T::Inner;
#[inline]
fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
T::follow(buf, loc)
}
}