1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132
|
// automatically generated by the FlatBuffers compiler, do not modify
// @generated
extern crate alloc;
extern crate flatbuffers;
use alloc::boxed::Box;
use alloc::string::{String, ToString};
use alloc::vec::Vec;
use core::mem;
use core::cmp::Ordering;
use self::flatbuffers::{EndianScalar, Follow};
use super::*;
// struct Object, aligned to 4
#[repr(transparent)]
#[derive(Clone, Copy, PartialEq)]
pub(crate) struct Object(pub [u8; 4]);
impl Default for Object {
fn default() -> Self {
Self([0; 4])
}
}
impl core::fmt::Debug for Object {
fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
f.debug_struct("Object")
.field("value", &self.value())
.finish()
}
}
impl flatbuffers::SimpleToVerifyInSlice for Object {}
impl flatbuffers::SafeSliceAccess for Object {}
impl<'a> flatbuffers::Follow<'a> for Object {
type Inner = &'a Object;
#[inline]
fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
<&'a Object>::follow(buf, loc)
}
}
impl<'a> flatbuffers::Follow<'a> for &'a Object {
type Inner = &'a Object;
#[inline]
fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
flatbuffers::follow_cast_ref::<Object>(buf, loc)
}
}
impl<'b> flatbuffers::Push for Object {
type Output = Object;
#[inline]
fn push(&self, dst: &mut [u8], _rest: &[u8]) {
let src = unsafe {
::core::slice::from_raw_parts(self as *const Object as *const u8, Self::size())
};
dst.copy_from_slice(src);
}
}
impl<'b> flatbuffers::Push for &'b Object {
type Output = Object;
#[inline]
fn push(&self, dst: &mut [u8], _rest: &[u8]) {
let src = unsafe {
::core::slice::from_raw_parts(*self as *const Object as *const u8, Self::size())
};
dst.copy_from_slice(src);
}
}
impl<'a> flatbuffers::Verifiable for Object {
#[inline]
fn run_verifier(
v: &mut flatbuffers::Verifier, pos: usize
) -> Result<(), flatbuffers::InvalidFlatbuffer> {
use self::flatbuffers::Verifiable;
v.in_buffer::<Self>(pos)
}
}
impl<'a> Object {
#[allow(clippy::too_many_arguments)]
pub fn new(
value: i32,
) -> Self {
let mut s = Self([0; 4]);
s.set_value(value);
s
}
pub const fn get_fully_qualified_name() -> &'static str {
"Object"
}
pub fn value(&self) -> i32 {
let mut mem = core::mem::MaybeUninit::<i32>::uninit();
unsafe {
core::ptr::copy_nonoverlapping(
self.0[0..].as_ptr(),
mem.as_mut_ptr() as *mut u8,
core::mem::size_of::<i32>(),
);
mem.assume_init()
}.from_little_endian()
}
pub fn set_value(&mut self, x: i32) {
let x_le = x.to_little_endian();
unsafe {
core::ptr::copy_nonoverlapping(
&x_le as *const i32 as *const u8,
self.0[0..].as_mut_ptr(),
core::mem::size_of::<i32>(),
);
}
}
pub fn unpack(&self) -> ObjectT {
ObjectT {
value: self.value(),
}
}
}
#[derive(Debug, Clone, PartialEq, Default)]
pub(crate) struct ObjectT {
pub value: i32,
}
impl ObjectT {
pub fn pack(&self) -> Object {
Object::new(
self.value,
)
}
}
|