1#![allow(dead_code)]
19#![allow(unused_imports)]
20
21use crate::r#gen::Schema::*;
22use crate::r#gen::SparseTensor::*;
23use crate::r#gen::Tensor::*;
24use flatbuffers::EndianScalar;
25use std::{cmp::Ordering, mem};
26#[deprecated(
31 since = "2.0.0",
32 note = "Use associated constants instead. This will no longer be generated in 2021."
33)]
34pub const ENUM_MIN_COMPRESSION_TYPE: i8 = 0;
35#[deprecated(
36 since = "2.0.0",
37 note = "Use associated constants instead. This will no longer be generated in 2021."
38)]
39pub const ENUM_MAX_COMPRESSION_TYPE: i8 = 1;
40#[deprecated(
41 since = "2.0.0",
42 note = "Use associated constants instead. This will no longer be generated in 2021."
43)]
44#[allow(non_camel_case_types)]
45pub const ENUM_VALUES_COMPRESSION_TYPE: [CompressionType; 2] =
46 [CompressionType::LZ4_FRAME, CompressionType::ZSTD];
47
48#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
49#[repr(transparent)]
50pub struct CompressionType(pub i8);
51#[allow(non_upper_case_globals)]
52impl CompressionType {
53 pub const LZ4_FRAME: Self = Self(0);
54 pub const ZSTD: Self = Self(1);
55
56 pub const ENUM_MIN: i8 = 0;
57 pub const ENUM_MAX: i8 = 1;
58 pub const ENUM_VALUES: &'static [Self] = &[Self::LZ4_FRAME, Self::ZSTD];
59 pub fn variant_name(self) -> Option<&'static str> {
61 match self {
62 Self::LZ4_FRAME => Some("LZ4_FRAME"),
63 Self::ZSTD => Some("ZSTD"),
64 _ => None,
65 }
66 }
67}
68impl core::fmt::Debug for CompressionType {
69 fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
70 if let Some(name) = self.variant_name() {
71 f.write_str(name)
72 } else {
73 f.write_fmt(format_args!("<UNKNOWN {:?}>", self.0))
74 }
75 }
76}
77impl<'a> flatbuffers::Follow<'a> for CompressionType {
78 type Inner = Self;
79 #[inline]
80 unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
81 let b = unsafe { flatbuffers::read_scalar_at::<i8>(buf, loc) };
82 Self(b)
83 }
84}
85
86impl flatbuffers::Push for CompressionType {
87 type Output = CompressionType;
88 #[inline]
89 unsafe fn push(&self, dst: &mut [u8], _written_len: usize) {
90 unsafe {
91 flatbuffers::emplace_scalar::<i8>(dst, self.0);
92 }
93 }
94}
95
96impl flatbuffers::EndianScalar for CompressionType {
97 type Scalar = i8;
98 #[inline]
99 fn to_little_endian(self) -> i8 {
100 self.0.to_le()
101 }
102 #[inline]
103 #[allow(clippy::wrong_self_convention)]
104 fn from_little_endian(v: i8) -> Self {
105 let b = i8::from_le(v);
106 Self(b)
107 }
108}
109
110impl<'a> flatbuffers::Verifiable for CompressionType {
111 #[inline]
112 fn run_verifier(
113 v: &mut flatbuffers::Verifier,
114 pos: usize,
115 ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
116 use flatbuffers::Verifiable;
117 i8::run_verifier(v, pos)
118 }
119}
120
121impl flatbuffers::SimpleToVerifyInSlice for CompressionType {}
122#[deprecated(
123 since = "2.0.0",
124 note = "Use associated constants instead. This will no longer be generated in 2021."
125)]
126pub const ENUM_MIN_BODY_COMPRESSION_METHOD: i8 = 0;
127#[deprecated(
128 since = "2.0.0",
129 note = "Use associated constants instead. This will no longer be generated in 2021."
130)]
131pub const ENUM_MAX_BODY_COMPRESSION_METHOD: i8 = 0;
132#[deprecated(
133 since = "2.0.0",
134 note = "Use associated constants instead. This will no longer be generated in 2021."
135)]
136#[allow(non_camel_case_types)]
137pub const ENUM_VALUES_BODY_COMPRESSION_METHOD: [BodyCompressionMethod; 1] =
138 [BodyCompressionMethod::BUFFER];
139
140#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
144#[repr(transparent)]
145pub struct BodyCompressionMethod(pub i8);
146#[allow(non_upper_case_globals)]
147impl BodyCompressionMethod {
148 pub const BUFFER: Self = Self(0);
156
157 pub const ENUM_MIN: i8 = 0;
158 pub const ENUM_MAX: i8 = 0;
159 pub const ENUM_VALUES: &'static [Self] = &[Self::BUFFER];
160 pub fn variant_name(self) -> Option<&'static str> {
162 match self {
163 Self::BUFFER => Some("BUFFER"),
164 _ => None,
165 }
166 }
167}
168impl core::fmt::Debug for BodyCompressionMethod {
169 fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
170 if let Some(name) = self.variant_name() {
171 f.write_str(name)
172 } else {
173 f.write_fmt(format_args!("<UNKNOWN {:?}>", self.0))
174 }
175 }
176}
177impl<'a> flatbuffers::Follow<'a> for BodyCompressionMethod {
178 type Inner = Self;
179 #[inline]
180 unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
181 let b = unsafe { flatbuffers::read_scalar_at::<i8>(buf, loc) };
182 Self(b)
183 }
184}
185
186impl flatbuffers::Push for BodyCompressionMethod {
187 type Output = BodyCompressionMethod;
188 #[inline]
189 unsafe fn push(&self, dst: &mut [u8], _written_len: usize) {
190 unsafe {
191 flatbuffers::emplace_scalar::<i8>(dst, self.0);
192 }
193 }
194}
195
196impl flatbuffers::EndianScalar for BodyCompressionMethod {
197 type Scalar = i8;
198 #[inline]
199 fn to_little_endian(self) -> i8 {
200 self.0.to_le()
201 }
202 #[inline]
203 #[allow(clippy::wrong_self_convention)]
204 fn from_little_endian(v: i8) -> Self {
205 let b = i8::from_le(v);
206 Self(b)
207 }
208}
209
210impl<'a> flatbuffers::Verifiable for BodyCompressionMethod {
211 #[inline]
212 fn run_verifier(
213 v: &mut flatbuffers::Verifier,
214 pos: usize,
215 ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
216 use flatbuffers::Verifiable;
217 i8::run_verifier(v, pos)
218 }
219}
220
221impl flatbuffers::SimpleToVerifyInSlice for BodyCompressionMethod {}
222#[deprecated(
223 since = "2.0.0",
224 note = "Use associated constants instead. This will no longer be generated in 2021."
225)]
226pub const ENUM_MIN_MESSAGE_HEADER: u8 = 0;
227#[deprecated(
228 since = "2.0.0",
229 note = "Use associated constants instead. This will no longer be generated in 2021."
230)]
231pub const ENUM_MAX_MESSAGE_HEADER: u8 = 5;
232#[deprecated(
233 since = "2.0.0",
234 note = "Use associated constants instead. This will no longer be generated in 2021."
235)]
236#[allow(non_camel_case_types)]
237pub const ENUM_VALUES_MESSAGE_HEADER: [MessageHeader; 6] = [
238 MessageHeader::NONE,
239 MessageHeader::Schema,
240 MessageHeader::DictionaryBatch,
241 MessageHeader::RecordBatch,
242 MessageHeader::Tensor,
243 MessageHeader::SparseTensor,
244];
245
246#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)]
255#[repr(transparent)]
256pub struct MessageHeader(pub u8);
257#[allow(non_upper_case_globals)]
258impl MessageHeader {
259 pub const NONE: Self = Self(0);
260 pub const Schema: Self = Self(1);
261 pub const DictionaryBatch: Self = Self(2);
262 pub const RecordBatch: Self = Self(3);
263 pub const Tensor: Self = Self(4);
264 pub const SparseTensor: Self = Self(5);
265
266 pub const ENUM_MIN: u8 = 0;
267 pub const ENUM_MAX: u8 = 5;
268 pub const ENUM_VALUES: &'static [Self] = &[
269 Self::NONE,
270 Self::Schema,
271 Self::DictionaryBatch,
272 Self::RecordBatch,
273 Self::Tensor,
274 Self::SparseTensor,
275 ];
276 pub fn variant_name(self) -> Option<&'static str> {
278 match self {
279 Self::NONE => Some("NONE"),
280 Self::Schema => Some("Schema"),
281 Self::DictionaryBatch => Some("DictionaryBatch"),
282 Self::RecordBatch => Some("RecordBatch"),
283 Self::Tensor => Some("Tensor"),
284 Self::SparseTensor => Some("SparseTensor"),
285 _ => None,
286 }
287 }
288}
289impl core::fmt::Debug for MessageHeader {
290 fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
291 if let Some(name) = self.variant_name() {
292 f.write_str(name)
293 } else {
294 f.write_fmt(format_args!("<UNKNOWN {:?}>", self.0))
295 }
296 }
297}
298impl<'a> flatbuffers::Follow<'a> for MessageHeader {
299 type Inner = Self;
300 #[inline]
301 unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
302 let b = unsafe { flatbuffers::read_scalar_at::<u8>(buf, loc) };
303 Self(b)
304 }
305}
306
307impl flatbuffers::Push for MessageHeader {
308 type Output = MessageHeader;
309 #[inline]
310 unsafe fn push(&self, dst: &mut [u8], _written_len: usize) {
311 unsafe {
312 flatbuffers::emplace_scalar::<u8>(dst, self.0);
313 }
314 }
315}
316
317impl flatbuffers::EndianScalar for MessageHeader {
318 type Scalar = u8;
319 #[inline]
320 fn to_little_endian(self) -> u8 {
321 self.0.to_le()
322 }
323 #[inline]
324 #[allow(clippy::wrong_self_convention)]
325 fn from_little_endian(v: u8) -> Self {
326 let b = u8::from_le(v);
327 Self(b)
328 }
329}
330
331impl<'a> flatbuffers::Verifiable for MessageHeader {
332 #[inline]
333 fn run_verifier(
334 v: &mut flatbuffers::Verifier,
335 pos: usize,
336 ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
337 use flatbuffers::Verifiable;
338 u8::run_verifier(v, pos)
339 }
340}
341
342impl flatbuffers::SimpleToVerifyInSlice for MessageHeader {}
343pub struct MessageHeaderUnionTableOffset {}
344
345#[repr(transparent)]
356#[derive(Clone, Copy, PartialEq)]
357pub struct FieldNode(pub [u8; 16]);
358impl Default for FieldNode {
359 fn default() -> Self {
360 Self([0; 16])
361 }
362}
363impl core::fmt::Debug for FieldNode {
364 fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result {
365 f.debug_struct("FieldNode")
366 .field("length", &self.length())
367 .field("null_count", &self.null_count())
368 .finish()
369 }
370}
371
372impl flatbuffers::SimpleToVerifyInSlice for FieldNode {}
373impl<'a> flatbuffers::Follow<'a> for FieldNode {
374 type Inner = &'a FieldNode;
375 #[inline]
376 unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
377 unsafe { <&'a FieldNode>::follow(buf, loc) }
378 }
379}
380impl<'a> flatbuffers::Follow<'a> for &'a FieldNode {
381 type Inner = &'a FieldNode;
382 #[inline]
383 unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
384 unsafe { flatbuffers::follow_cast_ref::<FieldNode>(buf, loc) }
385 }
386}
387impl<'b> flatbuffers::Push for FieldNode {
388 type Output = FieldNode;
389 #[inline]
390 unsafe fn push(&self, dst: &mut [u8], _written_len: usize) {
391 let src = unsafe {
392 ::core::slice::from_raw_parts(
393 self as *const FieldNode as *const u8,
394 <Self as flatbuffers::Push>::size(),
395 )
396 };
397 dst.copy_from_slice(src);
398 }
399 #[inline]
400 fn alignment() -> flatbuffers::PushAlignment {
401 flatbuffers::PushAlignment::new(8)
402 }
403}
404
405impl<'a> flatbuffers::Verifiable for FieldNode {
406 #[inline]
407 fn run_verifier(
408 v: &mut flatbuffers::Verifier,
409 pos: usize,
410 ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
411 use flatbuffers::Verifiable;
412 v.in_buffer::<Self>(pos)
413 }
414}
415
416impl<'a> FieldNode {
417 #[allow(clippy::too_many_arguments)]
418 pub fn new(length: i64, null_count: i64) -> Self {
419 let mut s = Self([0; 16]);
420 s.set_length(length);
421 s.set_null_count(null_count);
422 s
423 }
424
425 pub fn length(&self) -> i64 {
428 let mut mem = core::mem::MaybeUninit::<<i64 as EndianScalar>::Scalar>::uninit();
429 EndianScalar::from_little_endian(unsafe {
433 core::ptr::copy_nonoverlapping(
434 self.0[0..].as_ptr(),
435 mem.as_mut_ptr() as *mut u8,
436 core::mem::size_of::<<i64 as EndianScalar>::Scalar>(),
437 );
438 mem.assume_init()
439 })
440 }
441
442 pub fn set_length(&mut self, x: i64) {
443 let x_le = x.to_little_endian();
444 unsafe {
448 core::ptr::copy_nonoverlapping(
449 &x_le as *const _ as *const u8,
450 self.0[0..].as_mut_ptr(),
451 core::mem::size_of::<<i64 as EndianScalar>::Scalar>(),
452 );
453 }
454 }
455
456 pub fn null_count(&self) -> i64 {
460 let mut mem = core::mem::MaybeUninit::<<i64 as EndianScalar>::Scalar>::uninit();
461 EndianScalar::from_little_endian(unsafe {
465 core::ptr::copy_nonoverlapping(
466 self.0[8..].as_ptr(),
467 mem.as_mut_ptr() as *mut u8,
468 core::mem::size_of::<<i64 as EndianScalar>::Scalar>(),
469 );
470 mem.assume_init()
471 })
472 }
473
474 pub fn set_null_count(&mut self, x: i64) {
475 let x_le = x.to_little_endian();
476 unsafe {
480 core::ptr::copy_nonoverlapping(
481 &x_le as *const _ as *const u8,
482 self.0[8..].as_mut_ptr(),
483 core::mem::size_of::<<i64 as EndianScalar>::Scalar>(),
484 );
485 }
486 }
487}
488
489pub enum BodyCompressionOffset {}
490#[derive(Copy, Clone, PartialEq)]
491
492pub struct BodyCompression<'a> {
496 pub _tab: flatbuffers::Table<'a>,
497}
498
499impl<'a> flatbuffers::Follow<'a> for BodyCompression<'a> {
500 type Inner = BodyCompression<'a>;
501 #[inline]
502 unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
503 Self {
504 _tab: unsafe { flatbuffers::Table::new(buf, loc) },
505 }
506 }
507}
508
509impl<'a> BodyCompression<'a> {
510 pub const VT_CODEC: flatbuffers::VOffsetT = 4;
511 pub const VT_METHOD: flatbuffers::VOffsetT = 6;
512
513 #[inline]
514 pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
515 BodyCompression { _tab: table }
516 }
517 #[allow(unused_mut)]
518 pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: flatbuffers::Allocator + 'bldr>(
519 _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr, A>,
520 args: &'args BodyCompressionArgs,
521 ) -> flatbuffers::WIPOffset<BodyCompression<'bldr>> {
522 let mut builder = BodyCompressionBuilder::new(_fbb);
523 builder.add_method(args.method);
524 builder.add_codec(args.codec);
525 builder.finish()
526 }
527
528 #[inline]
531 pub fn codec(&self) -> CompressionType {
532 unsafe {
536 self._tab
537 .get::<CompressionType>(BodyCompression::VT_CODEC, Some(CompressionType::LZ4_FRAME))
538 .unwrap()
539 }
540 }
541 #[inline]
543 pub fn method(&self) -> BodyCompressionMethod {
544 unsafe {
548 self._tab
549 .get::<BodyCompressionMethod>(
550 BodyCompression::VT_METHOD,
551 Some(BodyCompressionMethod::BUFFER),
552 )
553 .unwrap()
554 }
555 }
556}
557
558impl flatbuffers::Verifiable for BodyCompression<'_> {
559 #[inline]
560 fn run_verifier(
561 v: &mut flatbuffers::Verifier,
562 pos: usize,
563 ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
564 use flatbuffers::Verifiable;
565 v.visit_table(pos)?
566 .visit_field::<CompressionType>("codec", Self::VT_CODEC, false)?
567 .visit_field::<BodyCompressionMethod>("method", Self::VT_METHOD, false)?
568 .finish();
569 Ok(())
570 }
571}
572pub struct BodyCompressionArgs {
573 pub codec: CompressionType,
574 pub method: BodyCompressionMethod,
575}
576impl<'a> Default for BodyCompressionArgs {
577 #[inline]
578 fn default() -> Self {
579 BodyCompressionArgs {
580 codec: CompressionType::LZ4_FRAME,
581 method: BodyCompressionMethod::BUFFER,
582 }
583 }
584}
585
586pub struct BodyCompressionBuilder<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> {
587 fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
588 start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
589}
590impl<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> BodyCompressionBuilder<'a, 'b, A> {
591 #[inline]
592 pub fn add_codec(&mut self, codec: CompressionType) {
593 self.fbb_.push_slot::<CompressionType>(
594 BodyCompression::VT_CODEC,
595 codec,
596 CompressionType::LZ4_FRAME,
597 );
598 }
599 #[inline]
600 pub fn add_method(&mut self, method: BodyCompressionMethod) {
601 self.fbb_.push_slot::<BodyCompressionMethod>(
602 BodyCompression::VT_METHOD,
603 method,
604 BodyCompressionMethod::BUFFER,
605 );
606 }
607 #[inline]
608 pub fn new(
609 _fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
610 ) -> BodyCompressionBuilder<'a, 'b, A> {
611 let start = _fbb.start_table();
612 BodyCompressionBuilder {
613 fbb_: _fbb,
614 start_: start,
615 }
616 }
617 #[inline]
618 pub fn finish(self) -> flatbuffers::WIPOffset<BodyCompression<'a>> {
619 let o = self.fbb_.end_table(self.start_);
620 flatbuffers::WIPOffset::new(o.value())
621 }
622}
623
624impl core::fmt::Debug for BodyCompression<'_> {
625 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
626 let mut ds = f.debug_struct("BodyCompression");
627 ds.field("codec", &self.codec());
628 ds.field("method", &self.method());
629 ds.finish()
630 }
631}
632pub enum RecordBatchOffset {}
633#[derive(Copy, Clone, PartialEq)]
634
635pub struct RecordBatch<'a> {
639 pub _tab: flatbuffers::Table<'a>,
640}
641
642impl<'a> flatbuffers::Follow<'a> for RecordBatch<'a> {
643 type Inner = RecordBatch<'a>;
644 #[inline]
645 unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
646 Self {
647 _tab: unsafe { flatbuffers::Table::new(buf, loc) },
648 }
649 }
650}
651
652impl<'a> RecordBatch<'a> {
653 pub const VT_LENGTH: flatbuffers::VOffsetT = 4;
654 pub const VT_NODES: flatbuffers::VOffsetT = 6;
655 pub const VT_BUFFERS: flatbuffers::VOffsetT = 8;
656 pub const VT_COMPRESSION: flatbuffers::VOffsetT = 10;
657 pub const VT_VARIADICBUFFERCOUNTS: flatbuffers::VOffsetT = 12;
658
659 #[inline]
660 pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
661 RecordBatch { _tab: table }
662 }
663 #[allow(unused_mut)]
664 pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: flatbuffers::Allocator + 'bldr>(
665 _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr, A>,
666 args: &'args RecordBatchArgs<'args>,
667 ) -> flatbuffers::WIPOffset<RecordBatch<'bldr>> {
668 let mut builder = RecordBatchBuilder::new(_fbb);
669 builder.add_length(args.length);
670 if let Some(x) = args.variadicBufferCounts {
671 builder.add_variadicBufferCounts(x);
672 }
673 if let Some(x) = args.compression {
674 builder.add_compression(x);
675 }
676 if let Some(x) = args.buffers {
677 builder.add_buffers(x);
678 }
679 if let Some(x) = args.nodes {
680 builder.add_nodes(x);
681 }
682 builder.finish()
683 }
684
685 #[inline]
688 pub fn length(&self) -> i64 {
689 unsafe {
693 self._tab
694 .get::<i64>(RecordBatch::VT_LENGTH, Some(0))
695 .unwrap()
696 }
697 }
698 #[inline]
700 pub fn nodes(&self) -> Option<flatbuffers::Vector<'a, FieldNode>> {
701 unsafe {
705 self._tab
706 .get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, FieldNode>>>(
707 RecordBatch::VT_NODES,
708 None,
709 )
710 }
711 }
712 #[inline]
719 pub fn buffers(&self) -> Option<flatbuffers::Vector<'a, Buffer>> {
720 unsafe {
724 self._tab
725 .get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, Buffer>>>(
726 RecordBatch::VT_BUFFERS,
727 None,
728 )
729 }
730 }
731 #[inline]
733 pub fn compression(&self) -> Option<BodyCompression<'a>> {
734 unsafe {
738 self._tab
739 .get::<flatbuffers::ForwardsUOffset<BodyCompression>>(
740 RecordBatch::VT_COMPRESSION,
741 None,
742 )
743 }
744 }
745 #[inline]
760 pub fn variadicBufferCounts(&self) -> Option<flatbuffers::Vector<'a, i64>> {
761 unsafe {
765 self._tab
766 .get::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'a, i64>>>(
767 RecordBatch::VT_VARIADICBUFFERCOUNTS,
768 None,
769 )
770 }
771 }
772}
773
774impl flatbuffers::Verifiable for RecordBatch<'_> {
775 #[inline]
776 fn run_verifier(
777 v: &mut flatbuffers::Verifier,
778 pos: usize,
779 ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
780 use flatbuffers::Verifiable;
781 v.visit_table(pos)?
782 .visit_field::<i64>("length", Self::VT_LENGTH, false)?
783 .visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, FieldNode>>>(
784 "nodes",
785 Self::VT_NODES,
786 false,
787 )?
788 .visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, Buffer>>>(
789 "buffers",
790 Self::VT_BUFFERS,
791 false,
792 )?
793 .visit_field::<flatbuffers::ForwardsUOffset<BodyCompression>>(
794 "compression",
795 Self::VT_COMPRESSION,
796 false,
797 )?
798 .visit_field::<flatbuffers::ForwardsUOffset<flatbuffers::Vector<'_, i64>>>(
799 "variadicBufferCounts",
800 Self::VT_VARIADICBUFFERCOUNTS,
801 false,
802 )?
803 .finish();
804 Ok(())
805 }
806}
807pub struct RecordBatchArgs<'a> {
808 pub length: i64,
809 pub nodes: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, FieldNode>>>,
810 pub buffers: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, Buffer>>>,
811 pub compression: Option<flatbuffers::WIPOffset<BodyCompression<'a>>>,
812 pub variadicBufferCounts: Option<flatbuffers::WIPOffset<flatbuffers::Vector<'a, i64>>>,
813}
814impl<'a> Default for RecordBatchArgs<'a> {
815 #[inline]
816 fn default() -> Self {
817 RecordBatchArgs {
818 length: 0,
819 nodes: None,
820 buffers: None,
821 compression: None,
822 variadicBufferCounts: None,
823 }
824 }
825}
826
827pub struct RecordBatchBuilder<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> {
828 fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
829 start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
830}
831impl<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> RecordBatchBuilder<'a, 'b, A> {
832 #[inline]
833 pub fn add_length(&mut self, length: i64) {
834 self.fbb_
835 .push_slot::<i64>(RecordBatch::VT_LENGTH, length, 0);
836 }
837 #[inline]
838 pub fn add_nodes(&mut self, nodes: flatbuffers::WIPOffset<flatbuffers::Vector<'b, FieldNode>>) {
839 self.fbb_
840 .push_slot_always::<flatbuffers::WIPOffset<_>>(RecordBatch::VT_NODES, nodes);
841 }
842 #[inline]
843 pub fn add_buffers(
844 &mut self,
845 buffers: flatbuffers::WIPOffset<flatbuffers::Vector<'b, Buffer>>,
846 ) {
847 self.fbb_
848 .push_slot_always::<flatbuffers::WIPOffset<_>>(RecordBatch::VT_BUFFERS, buffers);
849 }
850 #[inline]
851 pub fn add_compression(&mut self, compression: flatbuffers::WIPOffset<BodyCompression<'b>>) {
852 self.fbb_
853 .push_slot_always::<flatbuffers::WIPOffset<BodyCompression>>(
854 RecordBatch::VT_COMPRESSION,
855 compression,
856 );
857 }
858 #[inline]
859 pub fn add_variadicBufferCounts(
860 &mut self,
861 variadicBufferCounts: flatbuffers::WIPOffset<flatbuffers::Vector<'b, i64>>,
862 ) {
863 self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(
864 RecordBatch::VT_VARIADICBUFFERCOUNTS,
865 variadicBufferCounts,
866 );
867 }
868 #[inline]
869 pub fn new(
870 _fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
871 ) -> RecordBatchBuilder<'a, 'b, A> {
872 let start = _fbb.start_table();
873 RecordBatchBuilder {
874 fbb_: _fbb,
875 start_: start,
876 }
877 }
878 #[inline]
879 pub fn finish(self) -> flatbuffers::WIPOffset<RecordBatch<'a>> {
880 let o = self.fbb_.end_table(self.start_);
881 flatbuffers::WIPOffset::new(o.value())
882 }
883}
884
885impl core::fmt::Debug for RecordBatch<'_> {
886 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
887 let mut ds = f.debug_struct("RecordBatch");
888 ds.field("length", &self.length());
889 ds.field("nodes", &self.nodes());
890 ds.field("buffers", &self.buffers());
891 ds.field("compression", &self.compression());
892 ds.field("variadicBufferCounts", &self.variadicBufferCounts());
893 ds.finish()
894 }
895}
896pub enum DictionaryBatchOffset {}
897#[derive(Copy, Clone, PartialEq)]
898
899pub struct DictionaryBatch<'a> {
906 pub _tab: flatbuffers::Table<'a>,
907}
908
909impl<'a> flatbuffers::Follow<'a> for DictionaryBatch<'a> {
910 type Inner = DictionaryBatch<'a>;
911 #[inline]
912 unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
913 Self {
914 _tab: unsafe { flatbuffers::Table::new(buf, loc) },
915 }
916 }
917}
918
919impl<'a> DictionaryBatch<'a> {
920 pub const VT_ID: flatbuffers::VOffsetT = 4;
921 pub const VT_DATA: flatbuffers::VOffsetT = 6;
922 pub const VT_ISDELTA: flatbuffers::VOffsetT = 8;
923
924 #[inline]
925 pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
926 DictionaryBatch { _tab: table }
927 }
928 #[allow(unused_mut)]
929 pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: flatbuffers::Allocator + 'bldr>(
930 _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr, A>,
931 args: &'args DictionaryBatchArgs<'args>,
932 ) -> flatbuffers::WIPOffset<DictionaryBatch<'bldr>> {
933 let mut builder = DictionaryBatchBuilder::new(_fbb);
934 builder.add_id(args.id);
935 if let Some(x) = args.data {
936 builder.add_data(x);
937 }
938 builder.add_isDelta(args.isDelta);
939 builder.finish()
940 }
941
942 #[inline]
943 pub fn id(&self) -> i64 {
944 unsafe {
948 self._tab
949 .get::<i64>(DictionaryBatch::VT_ID, Some(0))
950 .unwrap()
951 }
952 }
953 #[inline]
954 pub fn data(&self) -> Option<RecordBatch<'a>> {
955 unsafe {
959 self._tab
960 .get::<flatbuffers::ForwardsUOffset<RecordBatch>>(DictionaryBatch::VT_DATA, None)
961 }
962 }
963 #[inline]
967 pub fn isDelta(&self) -> bool {
968 unsafe {
972 self._tab
973 .get::<bool>(DictionaryBatch::VT_ISDELTA, Some(false))
974 .unwrap()
975 }
976 }
977}
978
979impl flatbuffers::Verifiable for DictionaryBatch<'_> {
980 #[inline]
981 fn run_verifier(
982 v: &mut flatbuffers::Verifier,
983 pos: usize,
984 ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
985 use flatbuffers::Verifiable;
986 v.visit_table(pos)?
987 .visit_field::<i64>("id", Self::VT_ID, false)?
988 .visit_field::<flatbuffers::ForwardsUOffset<RecordBatch>>("data", Self::VT_DATA, false)?
989 .visit_field::<bool>("isDelta", Self::VT_ISDELTA, false)?
990 .finish();
991 Ok(())
992 }
993}
994pub struct DictionaryBatchArgs<'a> {
995 pub id: i64,
996 pub data: Option<flatbuffers::WIPOffset<RecordBatch<'a>>>,
997 pub isDelta: bool,
998}
999impl<'a> Default for DictionaryBatchArgs<'a> {
1000 #[inline]
1001 fn default() -> Self {
1002 DictionaryBatchArgs {
1003 id: 0,
1004 data: None,
1005 isDelta: false,
1006 }
1007 }
1008}
1009
1010pub struct DictionaryBatchBuilder<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> {
1011 fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
1012 start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
1013}
1014impl<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> DictionaryBatchBuilder<'a, 'b, A> {
1015 #[inline]
1016 pub fn add_id(&mut self, id: i64) {
1017 self.fbb_.push_slot::<i64>(DictionaryBatch::VT_ID, id, 0);
1018 }
1019 #[inline]
1020 pub fn add_data(&mut self, data: flatbuffers::WIPOffset<RecordBatch<'b>>) {
1021 self.fbb_
1022 .push_slot_always::<flatbuffers::WIPOffset<RecordBatch>>(
1023 DictionaryBatch::VT_DATA,
1024 data,
1025 );
1026 }
1027 #[inline]
1028 pub fn add_isDelta(&mut self, isDelta: bool) {
1029 self.fbb_
1030 .push_slot::<bool>(DictionaryBatch::VT_ISDELTA, isDelta, false);
1031 }
1032 #[inline]
1033 pub fn new(
1034 _fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
1035 ) -> DictionaryBatchBuilder<'a, 'b, A> {
1036 let start = _fbb.start_table();
1037 DictionaryBatchBuilder {
1038 fbb_: _fbb,
1039 start_: start,
1040 }
1041 }
1042 #[inline]
1043 pub fn finish(self) -> flatbuffers::WIPOffset<DictionaryBatch<'a>> {
1044 let o = self.fbb_.end_table(self.start_);
1045 flatbuffers::WIPOffset::new(o.value())
1046 }
1047}
1048
1049impl core::fmt::Debug for DictionaryBatch<'_> {
1050 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
1051 let mut ds = f.debug_struct("DictionaryBatch");
1052 ds.field("id", &self.id());
1053 ds.field("data", &self.data());
1054 ds.field("isDelta", &self.isDelta());
1055 ds.finish()
1056 }
1057}
1058pub enum MessageOffset {}
1059#[derive(Copy, Clone, PartialEq)]
1060
1061pub struct Message<'a> {
1062 pub _tab: flatbuffers::Table<'a>,
1063}
1064
1065impl<'a> flatbuffers::Follow<'a> for Message<'a> {
1066 type Inner = Message<'a>;
1067 #[inline]
1068 unsafe fn follow(buf: &'a [u8], loc: usize) -> Self::Inner {
1069 Self {
1070 _tab: unsafe { flatbuffers::Table::new(buf, loc) },
1071 }
1072 }
1073}
1074
1075impl<'a> Message<'a> {
1076 pub const VT_VERSION: flatbuffers::VOffsetT = 4;
1077 pub const VT_HEADER_TYPE: flatbuffers::VOffsetT = 6;
1078 pub const VT_HEADER: flatbuffers::VOffsetT = 8;
1079 pub const VT_BODYLENGTH: flatbuffers::VOffsetT = 10;
1080 pub const VT_CUSTOM_METADATA: flatbuffers::VOffsetT = 12;
1081
1082 #[inline]
1083 pub unsafe fn init_from_table(table: flatbuffers::Table<'a>) -> Self {
1084 Message { _tab: table }
1085 }
1086 #[allow(unused_mut)]
1087 pub fn create<'bldr: 'args, 'args: 'mut_bldr, 'mut_bldr, A: flatbuffers::Allocator + 'bldr>(
1088 _fbb: &'mut_bldr mut flatbuffers::FlatBufferBuilder<'bldr, A>,
1089 args: &'args MessageArgs<'args>,
1090 ) -> flatbuffers::WIPOffset<Message<'bldr>> {
1091 let mut builder = MessageBuilder::new(_fbb);
1092 builder.add_bodyLength(args.bodyLength);
1093 if let Some(x) = args.custom_metadata {
1094 builder.add_custom_metadata(x);
1095 }
1096 if let Some(x) = args.header {
1097 builder.add_header(x);
1098 }
1099 builder.add_version(args.version);
1100 builder.add_header_type(args.header_type);
1101 builder.finish()
1102 }
1103
1104 #[inline]
1105 pub fn version(&self) -> MetadataVersion {
1106 unsafe {
1110 self._tab
1111 .get::<MetadataVersion>(Message::VT_VERSION, Some(MetadataVersion::V1))
1112 .unwrap()
1113 }
1114 }
1115 #[inline]
1116 pub fn header_type(&self) -> MessageHeader {
1117 unsafe {
1121 self._tab
1122 .get::<MessageHeader>(Message::VT_HEADER_TYPE, Some(MessageHeader::NONE))
1123 .unwrap()
1124 }
1125 }
1126 #[inline]
1127 pub fn header(&self) -> Option<flatbuffers::Table<'a>> {
1128 unsafe {
1132 self._tab
1133 .get::<flatbuffers::ForwardsUOffset<flatbuffers::Table<'a>>>(
1134 Message::VT_HEADER,
1135 None,
1136 )
1137 }
1138 }
1139 #[inline]
1140 pub fn bodyLength(&self) -> i64 {
1141 unsafe {
1145 self._tab
1146 .get::<i64>(Message::VT_BODYLENGTH, Some(0))
1147 .unwrap()
1148 }
1149 }
1150 #[inline]
1151 pub fn custom_metadata(
1152 &self,
1153 ) -> Option<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<KeyValue<'a>>>> {
1154 unsafe {
1158 self._tab.get::<flatbuffers::ForwardsUOffset<
1159 flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<KeyValue>>,
1160 >>(Message::VT_CUSTOM_METADATA, None)
1161 }
1162 }
1163 #[inline]
1164 #[allow(non_snake_case)]
1165 pub fn header_as_schema(&self) -> Option<Schema<'a>> {
1166 if self.header_type() == MessageHeader::Schema {
1167 self.header().map(|t| {
1168 unsafe { Schema::init_from_table(t) }
1172 })
1173 } else {
1174 None
1175 }
1176 }
1177
1178 #[inline]
1179 #[allow(non_snake_case)]
1180 pub fn header_as_dictionary_batch(&self) -> Option<DictionaryBatch<'a>> {
1181 if self.header_type() == MessageHeader::DictionaryBatch {
1182 self.header().map(|t| {
1183 unsafe { DictionaryBatch::init_from_table(t) }
1187 })
1188 } else {
1189 None
1190 }
1191 }
1192
1193 #[inline]
1194 #[allow(non_snake_case)]
1195 pub fn header_as_record_batch(&self) -> Option<RecordBatch<'a>> {
1196 if self.header_type() == MessageHeader::RecordBatch {
1197 self.header().map(|t| {
1198 unsafe { RecordBatch::init_from_table(t) }
1202 })
1203 } else {
1204 None
1205 }
1206 }
1207
1208 #[inline]
1209 #[allow(non_snake_case)]
1210 pub fn header_as_tensor(&self) -> Option<Tensor<'a>> {
1211 if self.header_type() == MessageHeader::Tensor {
1212 self.header().map(|t| {
1213 unsafe { Tensor::init_from_table(t) }
1217 })
1218 } else {
1219 None
1220 }
1221 }
1222
1223 #[inline]
1224 #[allow(non_snake_case)]
1225 pub fn header_as_sparse_tensor(&self) -> Option<SparseTensor<'a>> {
1226 if self.header_type() == MessageHeader::SparseTensor {
1227 self.header().map(|t| {
1228 unsafe { SparseTensor::init_from_table(t) }
1232 })
1233 } else {
1234 None
1235 }
1236 }
1237}
1238
1239impl flatbuffers::Verifiable for Message<'_> {
1240 #[inline]
1241 fn run_verifier(
1242 v: &mut flatbuffers::Verifier,
1243 pos: usize,
1244 ) -> Result<(), flatbuffers::InvalidFlatbuffer> {
1245 use flatbuffers::Verifiable;
1246 v.visit_table(pos)?
1247 .visit_field::<MetadataVersion>("version", Self::VT_VERSION, false)?
1248 .visit_union::<MessageHeader, _>(
1249 "header_type",
1250 Self::VT_HEADER_TYPE,
1251 "header",
1252 Self::VT_HEADER,
1253 false,
1254 |key, v, pos| match key {
1255 MessageHeader::Schema => v
1256 .verify_union_variant::<flatbuffers::ForwardsUOffset<Schema>>(
1257 "MessageHeader::Schema",
1258 pos,
1259 ),
1260 MessageHeader::DictionaryBatch => v
1261 .verify_union_variant::<flatbuffers::ForwardsUOffset<DictionaryBatch>>(
1262 "MessageHeader::DictionaryBatch",
1263 pos,
1264 ),
1265 MessageHeader::RecordBatch => v
1266 .verify_union_variant::<flatbuffers::ForwardsUOffset<RecordBatch>>(
1267 "MessageHeader::RecordBatch",
1268 pos,
1269 ),
1270 MessageHeader::Tensor => v
1271 .verify_union_variant::<flatbuffers::ForwardsUOffset<Tensor>>(
1272 "MessageHeader::Tensor",
1273 pos,
1274 ),
1275 MessageHeader::SparseTensor => v
1276 .verify_union_variant::<flatbuffers::ForwardsUOffset<SparseTensor>>(
1277 "MessageHeader::SparseTensor",
1278 pos,
1279 ),
1280 _ => Ok(()),
1281 },
1282 )?
1283 .visit_field::<i64>("bodyLength", Self::VT_BODYLENGTH, false)?
1284 .visit_field::<flatbuffers::ForwardsUOffset<
1285 flatbuffers::Vector<'_, flatbuffers::ForwardsUOffset<KeyValue>>,
1286 >>("custom_metadata", Self::VT_CUSTOM_METADATA, false)?
1287 .finish();
1288 Ok(())
1289 }
1290}
1291pub struct MessageArgs<'a> {
1292 pub version: MetadataVersion,
1293 pub header_type: MessageHeader,
1294 pub header: Option<flatbuffers::WIPOffset<flatbuffers::UnionWIPOffset>>,
1295 pub bodyLength: i64,
1296 pub custom_metadata: Option<
1297 flatbuffers::WIPOffset<flatbuffers::Vector<'a, flatbuffers::ForwardsUOffset<KeyValue<'a>>>>,
1298 >,
1299}
1300impl<'a> Default for MessageArgs<'a> {
1301 #[inline]
1302 fn default() -> Self {
1303 MessageArgs {
1304 version: MetadataVersion::V1,
1305 header_type: MessageHeader::NONE,
1306 header: None,
1307 bodyLength: 0,
1308 custom_metadata: None,
1309 }
1310 }
1311}
1312
1313pub struct MessageBuilder<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> {
1314 fbb_: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
1315 start_: flatbuffers::WIPOffset<flatbuffers::TableUnfinishedWIPOffset>,
1316}
1317impl<'a: 'b, 'b, A: flatbuffers::Allocator + 'a> MessageBuilder<'a, 'b, A> {
1318 #[inline]
1319 pub fn add_version(&mut self, version: MetadataVersion) {
1320 self.fbb_
1321 .push_slot::<MetadataVersion>(Message::VT_VERSION, version, MetadataVersion::V1);
1322 }
1323 #[inline]
1324 pub fn add_header_type(&mut self, header_type: MessageHeader) {
1325 self.fbb_.push_slot::<MessageHeader>(
1326 Message::VT_HEADER_TYPE,
1327 header_type,
1328 MessageHeader::NONE,
1329 );
1330 }
1331 #[inline]
1332 pub fn add_header(&mut self, header: flatbuffers::WIPOffset<flatbuffers::UnionWIPOffset>) {
1333 self.fbb_
1334 .push_slot_always::<flatbuffers::WIPOffset<_>>(Message::VT_HEADER, header);
1335 }
1336 #[inline]
1337 pub fn add_bodyLength(&mut self, bodyLength: i64) {
1338 self.fbb_
1339 .push_slot::<i64>(Message::VT_BODYLENGTH, bodyLength, 0);
1340 }
1341 #[inline]
1342 pub fn add_custom_metadata(
1343 &mut self,
1344 custom_metadata: flatbuffers::WIPOffset<
1345 flatbuffers::Vector<'b, flatbuffers::ForwardsUOffset<KeyValue<'b>>>,
1346 >,
1347 ) {
1348 self.fbb_.push_slot_always::<flatbuffers::WIPOffset<_>>(
1349 Message::VT_CUSTOM_METADATA,
1350 custom_metadata,
1351 );
1352 }
1353 #[inline]
1354 pub fn new(_fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>) -> MessageBuilder<'a, 'b, A> {
1355 let start = _fbb.start_table();
1356 MessageBuilder {
1357 fbb_: _fbb,
1358 start_: start,
1359 }
1360 }
1361 #[inline]
1362 pub fn finish(self) -> flatbuffers::WIPOffset<Message<'a>> {
1363 let o = self.fbb_.end_table(self.start_);
1364 flatbuffers::WIPOffset::new(o.value())
1365 }
1366}
1367
1368impl core::fmt::Debug for Message<'_> {
1369 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
1370 let mut ds = f.debug_struct("Message");
1371 ds.field("version", &self.version());
1372 ds.field("header_type", &self.header_type());
1373 match self.header_type() {
1374 MessageHeader::Schema => {
1375 if let Some(x) = self.header_as_schema() {
1376 ds.field("header", &x)
1377 } else {
1378 ds.field(
1379 "header",
1380 &"InvalidFlatbuffer: Union discriminant does not match value.",
1381 )
1382 }
1383 }
1384 MessageHeader::DictionaryBatch => {
1385 if let Some(x) = self.header_as_dictionary_batch() {
1386 ds.field("header", &x)
1387 } else {
1388 ds.field(
1389 "header",
1390 &"InvalidFlatbuffer: Union discriminant does not match value.",
1391 )
1392 }
1393 }
1394 MessageHeader::RecordBatch => {
1395 if let Some(x) = self.header_as_record_batch() {
1396 ds.field("header", &x)
1397 } else {
1398 ds.field(
1399 "header",
1400 &"InvalidFlatbuffer: Union discriminant does not match value.",
1401 )
1402 }
1403 }
1404 MessageHeader::Tensor => {
1405 if let Some(x) = self.header_as_tensor() {
1406 ds.field("header", &x)
1407 } else {
1408 ds.field(
1409 "header",
1410 &"InvalidFlatbuffer: Union discriminant does not match value.",
1411 )
1412 }
1413 }
1414 MessageHeader::SparseTensor => {
1415 if let Some(x) = self.header_as_sparse_tensor() {
1416 ds.field("header", &x)
1417 } else {
1418 ds.field(
1419 "header",
1420 &"InvalidFlatbuffer: Union discriminant does not match value.",
1421 )
1422 }
1423 }
1424 _ => {
1425 let x: Option<()> = None;
1426 ds.field("header", &x)
1427 }
1428 };
1429 ds.field("bodyLength", &self.bodyLength());
1430 ds.field("custom_metadata", &self.custom_metadata());
1431 ds.finish()
1432 }
1433}
1434#[inline]
1435pub fn root_as_message(buf: &[u8]) -> Result<Message, flatbuffers::InvalidFlatbuffer> {
1442 flatbuffers::root::<Message>(buf)
1443}
1444#[inline]
1445pub fn size_prefixed_root_as_message(
1452 buf: &[u8],
1453) -> Result<Message, flatbuffers::InvalidFlatbuffer> {
1454 flatbuffers::size_prefixed_root::<Message>(buf)
1455}
1456#[inline]
1457pub fn root_as_message_with_opts<'b, 'o>(
1464 opts: &'o flatbuffers::VerifierOptions,
1465 buf: &'b [u8],
1466) -> Result<Message<'b>, flatbuffers::InvalidFlatbuffer> {
1467 flatbuffers::root_with_opts::<Message<'b>>(opts, buf)
1468}
1469#[inline]
1470pub fn size_prefixed_root_as_message_with_opts<'b, 'o>(
1477 opts: &'o flatbuffers::VerifierOptions,
1478 buf: &'b [u8],
1479) -> Result<Message<'b>, flatbuffers::InvalidFlatbuffer> {
1480 flatbuffers::size_prefixed_root_with_opts::<Message<'b>>(opts, buf)
1481}
1482#[inline]
1483pub unsafe fn root_as_message_unchecked(buf: &[u8]) -> Message {
1487 unsafe { flatbuffers::root_unchecked::<Message>(buf) }
1488}
1489#[inline]
1490pub unsafe fn size_prefixed_root_as_message_unchecked(buf: &[u8]) -> Message {
1494 unsafe { flatbuffers::size_prefixed_root_unchecked::<Message>(buf) }
1495}
1496#[inline]
1497pub fn finish_message_buffer<'a, 'b, A: flatbuffers::Allocator + 'a>(
1498 fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
1499 root: flatbuffers::WIPOffset<Message<'a>>,
1500) {
1501 fbb.finish(root, None);
1502}
1503
1504#[inline]
1505pub fn finish_size_prefixed_message_buffer<'a, 'b, A: flatbuffers::Allocator + 'a>(
1506 fbb: &'b mut flatbuffers::FlatBufferBuilder<'a, A>,
1507 root: flatbuffers::WIPOffset<Message<'a>>,
1508) {
1509 fbb.finish_size_prefixed(root, None);
1510}