Add VarInt discriminant type (#15)
This commit is contained in:
parent
024786e618
commit
05cf9c6e6b
@ -7,14 +7,13 @@ pub(crate) enum DeriveInputParserError {
|
||||
UnsupportedData,
|
||||
/// Data fields must be named.
|
||||
UnnamedDataFields,
|
||||
FieldError {
|
||||
field_error: FieldError,
|
||||
},
|
||||
/// Possible errors while parsing attributes.
|
||||
AttributeError { attribute_error: AttributeError },
|
||||
}
|
||||
|
||||
/// Possible errors while parsing field.
|
||||
/// Possible errors while parsing attributes.
|
||||
#[derive(Debug)]
|
||||
pub(crate) enum FieldError {
|
||||
pub(crate) enum AttributeError {
|
||||
/// Failed to parse field meta due incorrect syntax.
|
||||
BadAttributeSyntax { syn_error: SynError },
|
||||
/// Unsupported field attribute type.
|
||||
@ -24,22 +23,22 @@ pub(crate) enum FieldError {
|
||||
AttributeWrongValueType,
|
||||
}
|
||||
|
||||
impl From<FieldError> for DeriveInputParserError {
|
||||
fn from(field_error: FieldError) -> Self {
|
||||
DeriveInputParserError::FieldError { field_error }
|
||||
impl From<AttributeError> for DeriveInputParserError {
|
||||
fn from(attribute_error: AttributeError) -> Self {
|
||||
DeriveInputParserError::AttributeError { attribute_error }
|
||||
}
|
||||
}
|
||||
|
||||
impl From<SynError> for DeriveInputParserError {
|
||||
fn from(syn_error: SynError) -> Self {
|
||||
DeriveInputParserError::FieldError {
|
||||
field_error: FieldError::BadAttributeSyntax { syn_error },
|
||||
DeriveInputParserError::AttributeError {
|
||||
attribute_error: AttributeError::BadAttributeSyntax { syn_error },
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<SynError> for FieldError {
|
||||
impl From<SynError> for AttributeError {
|
||||
fn from(syn_error: SynError) -> Self {
|
||||
FieldError::BadAttributeSyntax { syn_error }
|
||||
AttributeError::BadAttributeSyntax { syn_error }
|
||||
}
|
||||
}
|
||||
|
@ -18,7 +18,11 @@ pub fn derive_encoder(tokens: TokenStream) -> TokenStream {
|
||||
|
||||
TokenStream::from(match derive_parse_result {
|
||||
DeriveInputParseResult::Struct { name, fields } => render_struct_encoder(name, &fields),
|
||||
DeriveInputParseResult::Enum { name, variants } => render_enum_encoder(name, &variants),
|
||||
DeriveInputParseResult::Enum {
|
||||
name,
|
||||
discriminant_type,
|
||||
variants,
|
||||
} => render_enum_encoder(name, &discriminant_type, &variants),
|
||||
})
|
||||
}
|
||||
|
||||
@ -29,6 +33,10 @@ pub fn derive_decoder(tokens: TokenStream) -> TokenStream {
|
||||
|
||||
TokenStream::from(match derive_parse_result {
|
||||
DeriveInputParseResult::Struct { name, fields } => render_struct_decoder(name, &fields),
|
||||
DeriveInputParseResult::Enum { name, variants } => render_enum_decoder(name, &variants),
|
||||
DeriveInputParseResult::Enum {
|
||||
name,
|
||||
discriminant_type,
|
||||
variants,
|
||||
} => render_enum_decoder(name, &discriminant_type, &variants),
|
||||
})
|
||||
}
|
||||
|
@ -1,8 +1,10 @@
|
||||
use crate::error::{DeriveInputParserError, FieldError};
|
||||
use crate::error::{AttributeError, DeriveInputParserError};
|
||||
use proc_macro2::Ident;
|
||||
use std::iter::FromIterator;
|
||||
use syn::punctuated::Punctuated;
|
||||
use syn::{Data, DeriveInput, ExprLit, Field, Fields, FieldsNamed, Lit, Meta, NestedMeta, Type};
|
||||
use syn::{
|
||||
Attribute, Data, DeriveInput, ExprLit, Fields, FieldsNamed, Lit, Meta, NestedMeta, Type,
|
||||
};
|
||||
use syn::{Error as SynError, Variant};
|
||||
use syn::{Expr, Token};
|
||||
|
||||
@ -13,12 +15,13 @@ pub(crate) enum DeriveInputParseResult<'a> {
|
||||
},
|
||||
Enum {
|
||||
name: &'a Ident,
|
||||
discriminant_type: DiscriminantType,
|
||||
variants: Vec<VariantData<'a>>,
|
||||
},
|
||||
}
|
||||
|
||||
pub(crate) struct VariantData<'a> {
|
||||
pub(crate) discriminant: u8,
|
||||
pub(crate) discriminant: usize,
|
||||
pub(crate) name: &'a Ident,
|
||||
pub(crate) fields: Vec<FieldData<'a>>,
|
||||
}
|
||||
@ -26,16 +29,22 @@ pub(crate) struct VariantData<'a> {
|
||||
pub(crate) struct FieldData<'a> {
|
||||
pub(crate) name: &'a Ident,
|
||||
pub(crate) ty: &'a Type,
|
||||
pub(crate) attribute: Attribute,
|
||||
pub(crate) attribute: AttributeData,
|
||||
}
|
||||
|
||||
#[derive(Debug, Eq, PartialEq)]
|
||||
pub(crate) enum Attribute {
|
||||
pub(crate) enum AttributeData {
|
||||
With { module: String },
|
||||
MaxLength { length: usize },
|
||||
Empty,
|
||||
}
|
||||
|
||||
#[derive(Debug, Eq, PartialEq)]
|
||||
pub(crate) enum DiscriminantType {
|
||||
UnsignedByte,
|
||||
VarInt,
|
||||
}
|
||||
|
||||
pub(crate) fn parse_derive_input(
|
||||
input: &DeriveInput,
|
||||
) -> Result<DeriveInputParseResult, DeriveInputParserError> {
|
||||
@ -52,24 +61,41 @@ pub(crate) fn parse_derive_input(
|
||||
},
|
||||
Data::Enum(data_enum) => {
|
||||
let variants = parse_variants(&data_enum.variants)?;
|
||||
let discriminant_type = parse_discriminant_type(&input.attrs)?;
|
||||
|
||||
Ok(DeriveInputParseResult::Enum { name, variants })
|
||||
Ok(DeriveInputParseResult::Enum {
|
||||
name,
|
||||
discriminant_type,
|
||||
variants,
|
||||
})
|
||||
}
|
||||
_ => Err(DeriveInputParserError::UnsupportedData),
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_discriminant_type(
|
||||
attributes: &Vec<Attribute>,
|
||||
) -> Result<DiscriminantType, DeriveInputParserError> {
|
||||
let nested_metas = parse_attributes_nested_metas(attributes)?;
|
||||
let attribute = parse_attribute(nested_metas)?;
|
||||
|
||||
match attribute {
|
||||
AttributeData::With { module } if module == "var_int" => Ok(DiscriminantType::VarInt),
|
||||
_ => Ok(DiscriminantType::UnsignedByte),
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_variants(
|
||||
variants: &Punctuated<Variant, Token![,]>,
|
||||
) -> Result<Vec<VariantData>, DeriveInputParserError> {
|
||||
variants
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(idx, v)| parse_variant(idx as u8, v))
|
||||
.map(|(idx, v)| parse_variant(idx, v))
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn parse_variant(idx: u8, variant: &Variant) -> Result<VariantData, DeriveInputParserError> {
|
||||
fn parse_variant(idx: usize, variant: &Variant) -> Result<VariantData, DeriveInputParserError> {
|
||||
let discriminant = parse_variant_discriminant(variant).unwrap_or(idx);
|
||||
let name = &variant.ident;
|
||||
|
||||
@ -86,7 +112,7 @@ fn parse_variant(idx: u8, variant: &Variant) -> Result<VariantData, DeriveInputP
|
||||
})
|
||||
}
|
||||
|
||||
fn parse_variant_discriminant(variant: &Variant) -> Option<u8> {
|
||||
fn parse_variant_discriminant(variant: &Variant) -> Option<usize> {
|
||||
variant
|
||||
.discriminant
|
||||
.as_ref()
|
||||
@ -106,7 +132,7 @@ fn parse_fields(named_fields: &FieldsNamed) -> Result<Vec<FieldData>, DeriveInpu
|
||||
let name = field.ident.as_ref().unwrap();
|
||||
let ty = &field.ty;
|
||||
|
||||
let nested_metas = parse_field_nested_metas(field)?;
|
||||
let nested_metas = parse_attributes_nested_metas(&field.attrs)?;
|
||||
let attribute = parse_attribute(nested_metas)?;
|
||||
|
||||
fields_data.push(FieldData {
|
||||
@ -119,9 +145,10 @@ fn parse_fields(named_fields: &FieldsNamed) -> Result<Vec<FieldData>, DeriveInpu
|
||||
Ok(fields_data)
|
||||
}
|
||||
|
||||
fn parse_field_nested_metas(field: &Field) -> Result<Vec<NestedMeta>, DeriveInputParserError> {
|
||||
let parsed_metas = field
|
||||
.attrs
|
||||
fn parse_attributes_nested_metas(
|
||||
attributes: &Vec<Attribute>,
|
||||
) -> Result<Vec<NestedMeta>, DeriveInputParserError> {
|
||||
let parsed_metas = attributes
|
||||
.iter()
|
||||
.filter(|a| a.path.is_ident("data_type"))
|
||||
.map(|a| a.parse_meta())
|
||||
@ -131,56 +158,56 @@ fn parse_field_nested_metas(field: &Field) -> Result<Vec<NestedMeta>, DeriveInpu
|
||||
.into_iter()
|
||||
.map(|m| match m {
|
||||
Meta::List(meta_list) => Ok(Vec::from_iter(meta_list.nested)),
|
||||
_ => Err(FieldError::UnsupportedAttribute),
|
||||
_ => Err(AttributeError::UnsupportedAttribute),
|
||||
})
|
||||
.collect::<Result<Vec<Vec<NestedMeta>>, FieldError>>()?;
|
||||
.collect::<Result<Vec<Vec<NestedMeta>>, AttributeError>>()?;
|
||||
|
||||
Ok(nested_metas.into_iter().flatten().collect())
|
||||
}
|
||||
|
||||
fn parse_attribute(nested_metas: Vec<NestedMeta>) -> Result<Attribute, DeriveInputParserError> {
|
||||
let attribute_parsers: Vec<fn(&NestedMeta) -> Result<Attribute, FieldError>> =
|
||||
fn parse_attribute(nested_metas: Vec<NestedMeta>) -> Result<AttributeData, DeriveInputParserError> {
|
||||
let attribute_parsers: Vec<fn(&NestedMeta) -> Result<AttributeData, AttributeError>> =
|
||||
vec![get_module_attribute, get_max_length_attribute];
|
||||
|
||||
for nested_meta in nested_metas.iter() {
|
||||
for attribute_parser in attribute_parsers.iter() {
|
||||
let attribute = attribute_parser(nested_meta)?;
|
||||
|
||||
if attribute != Attribute::Empty {
|
||||
if attribute != AttributeData::Empty {
|
||||
return Ok(attribute);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Attribute::Empty)
|
||||
Ok(AttributeData::Empty)
|
||||
}
|
||||
|
||||
fn get_module_attribute(nested_meta: &NestedMeta) -> Result<Attribute, FieldError> {
|
||||
fn get_module_attribute(nested_meta: &NestedMeta) -> Result<AttributeData, AttributeError> {
|
||||
if let NestedMeta::Meta(Meta::NameValue(named_meta)) = nested_meta {
|
||||
if matches!(&named_meta.path, path if path.is_ident("with")) {
|
||||
return match &named_meta.lit {
|
||||
Lit::Str(lit_str) => Ok(Attribute::With {
|
||||
Lit::Str(lit_str) => Ok(AttributeData::With {
|
||||
module: lit_str.value(),
|
||||
}),
|
||||
_ => Err(FieldError::AttributeWrongValueType),
|
||||
_ => Err(AttributeError::AttributeWrongValueType),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Attribute::Empty)
|
||||
Ok(AttributeData::Empty)
|
||||
}
|
||||
|
||||
fn get_max_length_attribute(nested_meta: &NestedMeta) -> Result<Attribute, FieldError> {
|
||||
fn get_max_length_attribute(nested_meta: &NestedMeta) -> Result<AttributeData, AttributeError> {
|
||||
if let NestedMeta::Meta(Meta::NameValue(named_meta)) = nested_meta {
|
||||
if matches!(&named_meta.path, path if path.is_ident("max_length")) {
|
||||
return match &named_meta.lit {
|
||||
Lit::Int(lit_int) => Ok(Attribute::MaxLength {
|
||||
length: lit_int.base10_parse::<usize>()?,
|
||||
Lit::Int(lit_int) => Ok(AttributeData::MaxLength {
|
||||
length: lit_int.base10_parse()?,
|
||||
}),
|
||||
_ => Err(FieldError::AttributeWrongValueType),
|
||||
_ => Err(AttributeError::AttributeWrongValueType),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Attribute::Empty)
|
||||
Ok(AttributeData::Empty)
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
use crate::parse::{Attribute, FieldData, VariantData};
|
||||
use crate::parse::{AttributeData, DiscriminantType, FieldData, VariantData};
|
||||
use proc_macro2::TokenStream as TokenStream2;
|
||||
use proc_macro2::{Ident, Span};
|
||||
use quote::quote;
|
||||
@ -24,8 +24,13 @@ pub(crate) fn render_struct_decoder(name: &Ident, fields: &Vec<FieldData>) -> To
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn render_enum_decoder(name: &Ident, variants: &Vec<VariantData>) -> TokenStream2 {
|
||||
let render_variants = render_variants(variants);
|
||||
pub(crate) fn render_enum_decoder(
|
||||
name: &Ident,
|
||||
discriminant_type: &DiscriminantType,
|
||||
variants: &Vec<VariantData>,
|
||||
) -> TokenStream2 {
|
||||
let render_variants = render_variants(discriminant_type, variants);
|
||||
let render_discriminant_type = render_discriminant_type(discriminant_type);
|
||||
|
||||
quote! {
|
||||
#[automatically_derived]
|
||||
@ -33,31 +38,40 @@ pub(crate) fn render_enum_decoder(name: &Ident, variants: &Vec<VariantData>) ->
|
||||
type Output = Self;
|
||||
|
||||
fn decode<R: std::io::Read>(reader: &mut R) -> Result<Self::Output, crate::error::DecodeError> {
|
||||
let type_id = reader.read_u8()?;
|
||||
let type_id = #render_discriminant_type;
|
||||
|
||||
match type_id {
|
||||
#render_variants
|
||||
_ => Err(DecodeError::UnknownEnumType { type_id }),
|
||||
_ => Err(DecodeError::UnknownEnumType { type_id: type_id as usize, }),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn render_variants(variants: &Vec<VariantData>) -> TokenStream2 {
|
||||
variants.iter().map(|v| render_variant(v)).collect()
|
||||
fn render_variants(
|
||||
discriminant_type: &DiscriminantType,
|
||||
variants: &Vec<VariantData>,
|
||||
) -> TokenStream2 {
|
||||
variants
|
||||
.iter()
|
||||
.map(|v| render_variant(discriminant_type, v))
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn render_variant(variant: &VariantData) -> TokenStream2 {
|
||||
fn render_variant(discriminant_type: &DiscriminantType, variant: &VariantData) -> TokenStream2 {
|
||||
if variant.fields.is_empty() {
|
||||
render_unit_variant(variant)
|
||||
render_unit_variant(discriminant_type, variant)
|
||||
} else {
|
||||
render_struct_variant(variant)
|
||||
render_struct_variant(discriminant_type, variant)
|
||||
}
|
||||
}
|
||||
|
||||
fn render_unit_variant(variant: &VariantData) -> TokenStream2 {
|
||||
let discriminant = variant.discriminant;
|
||||
fn render_unit_variant(
|
||||
discriminant_type: &DiscriminantType,
|
||||
variant: &VariantData,
|
||||
) -> TokenStream2 {
|
||||
let discriminant = render_discriminant(discriminant_type, variant.discriminant);
|
||||
let name = variant.name;
|
||||
|
||||
quote! {
|
||||
@ -65,8 +79,11 @@ fn render_unit_variant(variant: &VariantData) -> TokenStream2 {
|
||||
}
|
||||
}
|
||||
|
||||
fn render_struct_variant(variant: &VariantData) -> TokenStream2 {
|
||||
let discriminant = variant.discriminant;
|
||||
fn render_struct_variant(
|
||||
discriminant_type: &DiscriminantType,
|
||||
variant: &VariantData,
|
||||
) -> TokenStream2 {
|
||||
let discriminant = render_discriminant(discriminant_type, variant.discriminant);
|
||||
let name = variant.name;
|
||||
let fields = &variant.fields;
|
||||
|
||||
@ -84,6 +101,30 @@ fn render_struct_variant(variant: &VariantData) -> TokenStream2 {
|
||||
}
|
||||
}
|
||||
|
||||
fn render_discriminant_type(discriminant_type: &DiscriminantType) -> TokenStream2 {
|
||||
match discriminant_type {
|
||||
DiscriminantType::UnsignedByte => {
|
||||
quote!(reader.read_u8()?;)
|
||||
}
|
||||
DiscriminantType::VarInt => {
|
||||
quote!(reader.read_var_i32()?;)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn render_discriminant(discriminant_type: &DiscriminantType, discriminant: usize) -> TokenStream2 {
|
||||
match discriminant_type {
|
||||
DiscriminantType::UnsignedByte => {
|
||||
let u8 = discriminant as u8;
|
||||
quote!(#u8)
|
||||
}
|
||||
DiscriminantType::VarInt => {
|
||||
let i32 = discriminant as i32;
|
||||
quote!(#i32)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn render_field_names_joined_comma(fields: &Vec<FieldData>) -> TokenStream2 {
|
||||
fields.iter().map(|f| f.name).map(|n| quote!(#n,)).collect()
|
||||
}
|
||||
@ -97,9 +138,9 @@ fn render_field(field: &FieldData) -> TokenStream2 {
|
||||
let ty = field.ty;
|
||||
|
||||
match &field.attribute {
|
||||
Attribute::With { module } => render_with_field(name, module),
|
||||
Attribute::MaxLength { length } => render_max_length_field(name, *length as u16),
|
||||
Attribute::Empty => render_simple_field(name, ty),
|
||||
AttributeData::With { module } => render_with_field(name, module),
|
||||
AttributeData::MaxLength { length } => render_max_length_field(name, *length as u16),
|
||||
AttributeData::Empty => render_simple_field(name, ty),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
use crate::parse::{Attribute, FieldData, VariantData};
|
||||
use crate::parse::{AttributeData, DiscriminantType, FieldData, VariantData};
|
||||
use proc_macro2::TokenStream as TokenStream2;
|
||||
use proc_macro2::{Ident, Span};
|
||||
use quote::quote;
|
||||
@ -18,8 +18,12 @@ pub(crate) fn render_struct_encoder(name: &Ident, fields: &Vec<FieldData>) -> To
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn render_enum_encoder(name: &Ident, variants: &Vec<VariantData>) -> TokenStream2 {
|
||||
let render_variants = render_variants(variants);
|
||||
pub(crate) fn render_enum_encoder(
|
||||
name: &Ident,
|
||||
discriminant_type: &DiscriminantType,
|
||||
variants: &Vec<VariantData>,
|
||||
) -> TokenStream2 {
|
||||
let render_variants = render_variants(discriminant_type, variants);
|
||||
|
||||
quote! {
|
||||
#[automatically_derived]
|
||||
@ -35,34 +39,49 @@ pub(crate) fn render_enum_encoder(name: &Ident, variants: &Vec<VariantData>) ->
|
||||
}
|
||||
}
|
||||
|
||||
fn render_variants(variants: &Vec<VariantData>) -> TokenStream2 {
|
||||
variants.iter().map(|v| render_variant(v)).collect()
|
||||
fn render_variants(
|
||||
discriminant_type: &DiscriminantType,
|
||||
variants: &Vec<VariantData>,
|
||||
) -> TokenStream2 {
|
||||
variants
|
||||
.iter()
|
||||
.map(|v| render_variant(discriminant_type, v))
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn render_variant(variant: &VariantData) -> TokenStream2 {
|
||||
fn render_variant(discriminant_type: &DiscriminantType, variant: &VariantData) -> TokenStream2 {
|
||||
if variant.fields.is_empty() {
|
||||
render_unit_variant(variant)
|
||||
render_unit_variant(discriminant_type, variant)
|
||||
} else {
|
||||
render_struct_variant(variant)
|
||||
render_struct_variant(discriminant_type, variant)
|
||||
}
|
||||
}
|
||||
|
||||
fn render_unit_variant(variant: &VariantData) -> TokenStream2 {
|
||||
fn render_unit_variant(
|
||||
discriminant_type: &DiscriminantType,
|
||||
variant: &VariantData,
|
||||
) -> TokenStream2 {
|
||||
let discriminant = variant.discriminant;
|
||||
let name = variant.name;
|
||||
|
||||
let render_discriminant_type = render_discriminant_type(discriminant_type, discriminant);
|
||||
|
||||
quote! {
|
||||
Self::#name => {
|
||||
writer.write_u8(#discriminant)?;
|
||||
#render_discriminant_type
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn render_struct_variant(variant: &VariantData) -> TokenStream2 {
|
||||
fn render_struct_variant(
|
||||
discriminant_type: &DiscriminantType,
|
||||
variant: &VariantData,
|
||||
) -> TokenStream2 {
|
||||
let discriminant = variant.discriminant;
|
||||
let name = variant.name;
|
||||
let fields = &variant.fields;
|
||||
|
||||
let render_discriminant_type = render_discriminant_type(discriminant_type, discriminant);
|
||||
let field_names_joined_comma = render_field_names_joined_comma(fields);
|
||||
let render_fields = render_fields(fields, false);
|
||||
|
||||
@ -70,13 +89,31 @@ fn render_struct_variant(variant: &VariantData) -> TokenStream2 {
|
||||
Self::#name {
|
||||
#field_names_joined_comma
|
||||
} => {
|
||||
writer.write_u8(#discriminant)?;
|
||||
#render_discriminant_type
|
||||
|
||||
#render_fields
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn render_discriminant_type(
|
||||
discriminant_type: &DiscriminantType,
|
||||
discriminant: usize,
|
||||
) -> TokenStream2 {
|
||||
match discriminant_type {
|
||||
DiscriminantType::UnsignedByte => {
|
||||
let u8 = discriminant as u8;
|
||||
|
||||
quote!(writer.write_u8(#u8)?;)
|
||||
}
|
||||
DiscriminantType::VarInt => {
|
||||
let var_i32 = discriminant as i32;
|
||||
|
||||
quote!(writer.write_var_i32(#var_i32)?;)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn render_field_names_joined_comma(fields: &Vec<FieldData>) -> TokenStream2 {
|
||||
fields.iter().map(|f| f.name).map(|n| quote!(#n,)).collect()
|
||||
}
|
||||
@ -89,9 +126,11 @@ fn render_field(field: &FieldData, with_self: bool) -> TokenStream2 {
|
||||
let name = field.name;
|
||||
|
||||
match &field.attribute {
|
||||
Attribute::With { module } => render_with_field(name, module, with_self),
|
||||
Attribute::MaxLength { length } => render_max_length_field(name, *length as u16, with_self),
|
||||
Attribute::Empty => render_simple_field(name, with_self),
|
||||
AttributeData::With { module } => render_with_field(name, module, with_self),
|
||||
AttributeData::MaxLength { length } => {
|
||||
render_max_length_field(name, *length as u16, with_self)
|
||||
}
|
||||
AttributeData::Empty => render_simple_field(name, with_self),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -65,7 +65,7 @@ pub enum DecodeError {
|
||||
},
|
||||
/// Type id was not parsed as valid enum value.
|
||||
UnknownEnumType {
|
||||
type_id: u8,
|
||||
type_id: usize,
|
||||
},
|
||||
TagDecodeError {
|
||||
tag_decode_error: TagDecodeError,
|
||||
|
@ -1,5 +1,7 @@
|
||||
use crate::data::chat::Message;
|
||||
use crate::decoder::Decoder;
|
||||
use crate::decoder::DecoderReadExt;
|
||||
use crate::encoder::EncoderWriteExt;
|
||||
use crate::error::DecodeError;
|
||||
use byteorder::{ReadBytesExt, WriteBytesExt};
|
||||
use minecraft_protocol_derive::{Decoder, Encoder};
|
||||
@ -19,6 +21,7 @@ pub enum GameClientBoundPacket {
|
||||
ChunkData(ChunkData),
|
||||
GameDisconnect(GameDisconnect),
|
||||
BossBar(BossBar),
|
||||
EntityAction(EntityAction),
|
||||
}
|
||||
|
||||
impl GameServerBoundPacket {
|
||||
@ -55,6 +58,7 @@ impl GameClientBoundPacket {
|
||||
GameClientBoundPacket::ChunkData(_) => 0x21,
|
||||
GameClientBoundPacket::JoinGame(_) => 0x25,
|
||||
GameClientBoundPacket::BossBar(_) => 0x0D,
|
||||
GameClientBoundPacket::EntityAction(_) => 0x1B,
|
||||
}
|
||||
}
|
||||
|
||||
@ -305,6 +309,29 @@ impl BossBar {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Encoder, Decoder, Debug, PartialEq)]
|
||||
pub struct EntityAction {
|
||||
#[data_type(with = "var_int")]
|
||||
pub entity_id: i32,
|
||||
pub action_id: EntityActionId,
|
||||
#[data_type(with = "var_int")]
|
||||
pub jump_boost: i32,
|
||||
}
|
||||
|
||||
#[derive(Encoder, Decoder, Debug, PartialEq)]
|
||||
#[data_type(with = "var_int")]
|
||||
pub enum EntityActionId {
|
||||
StartSneaking,
|
||||
StopSneaking,
|
||||
LeaveBad,
|
||||
StartSprinting,
|
||||
StopSprinting,
|
||||
StartJumpWithHorse,
|
||||
StopJumpWithHorse,
|
||||
OpenHorseInventory,
|
||||
StartFlyingWithElytra,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::data::chat::Payload;
|
||||
@ -626,4 +653,37 @@ mod tests {
|
||||
action: BossBarAction::Remove,
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_entity_action_encode() {
|
||||
let entity_action = EntityAction {
|
||||
entity_id: 12345,
|
||||
action_id: EntityActionId::StartFlyingWithElytra,
|
||||
jump_boost: i32::MAX,
|
||||
};
|
||||
|
||||
let mut vec = Vec::new();
|
||||
entity_action.encode(&mut vec).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
vec,
|
||||
include_bytes!("../../../test/packet/game/entity_action.dat").to_vec()
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_entity_action_decode() {
|
||||
let mut cursor =
|
||||
Cursor::new(include_bytes!("../../../test/packet/game/entity_action.dat").to_vec());
|
||||
let entity_action = EntityAction::decode(&mut cursor).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
entity_action,
|
||||
EntityAction {
|
||||
entity_id: 12345,
|
||||
action_id: EntityActionId::StartFlyingWithElytra,
|
||||
jump_boost: i32::MAX,
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
1
protocol/test/packet/game/entity_action.dat
Normal file
1
protocol/test/packet/game/entity_action.dat
Normal file
@ -0,0 +1 @@
|
||||
筦<08><><EFBFBD><EFBFBD>
|
Loading…
x
Reference in New Issue
Block a user