diff --git a/Cargo.toml b/Cargo.toml index 5dfce4ae..70383d0a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,14 +1,31 @@ [workspace] members = [ - "parser", - "parser/visitable-derive", - "parser/generator", - "checker", - "checker/binary-serialize-derive", - # "lsp/server", - # "checker/specification" + "parser", + "parser/visitable-derive", + "parser/generator", + "checker", + "checker/binary-serialize-derive", + # "lsp/server", + # "checker/specification" ] + +[workspace.lints.clippy] +all = "deny" +pedantic = "deny" +cast_precision_loss = "warn" +cast_possible_truncation = "warn" +cast_sign_loss = "warn" +default_trait_access = "allow" +missing_errors_doc = "allow" +missing_panics_doc = "allow" +implicit_hasher = "allow" +module_name_repetitions = "allow" +too_many_lines = "allow" +new_without_default = "allow" +result_unit_err = "allow" + + [package] name = "ezno" description = "A JavaScript type checker and compiler. For use as a library or through the CLI" diff --git a/checker/Cargo.toml b/checker/Cargo.toml index 4f50b964..7abccccf 100644 --- a/checker/Cargo.toml +++ b/checker/Cargo.toml @@ -11,6 +11,9 @@ categories = ["compilers"] # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html +[lints] +workspace = true + [features] default = [] ezno-parser = ["parser"] @@ -23,7 +26,7 @@ binary-serialize-derive = { path = "./binary-serialize-derive", version = "0.0.1 temporary-annex = "0.1.0" derive-enum-from-into = "0.1.0" derive-debug-extras = { version = "0.2.0", features = [ - "auto-debug-single-tuple-inline", + "auto-debug-single-tuple-inline", ] } enum-variants-strings = "0.2" iterator-endiate = "0.2" diff --git a/checker/examples/check.rs b/checker/examples/check.rs index 44734fce..857e9982 100644 --- a/checker/examples/check.rs +++ b/checker/examples/check.rs @@ -12,7 +12,7 @@ fn main() { let (diagnostics, post_check_data) = check_project::<_, synthesis::EznoParser>( path.to_path_buf(), - HashSet::from_iter(std::iter::once(ezno_checker::INTERNAL_DEFINITION_FILE_PATH.into())), + std::iter::once(ezno_checker::INTERNAL_DEFINITION_FILE_PATH.into()).collect(), |path: &std::path::Path| { if path == PathBuf::from(ezno_checker::INTERNAL_DEFINITION_FILE_PATH) { Some(ezno_checker::INTERNAL_DEFINITION_FILE.to_owned()) @@ -30,7 +30,7 @@ fn main() { if args.iter().any(|arg| arg == "--types") { eprintln!("Types:"); for item in post_check_data.types.into_vec_temp() { - eprintln!("\t{:?}", item); + eprintln!("\t{item:?}"); } } if args.iter().any(|arg| arg == "--events") { @@ -38,14 +38,14 @@ fn main() { let entry_module = post_check_data.modules.remove(&post_check_data.entry_source).unwrap(); for item in entry_module.facts.get_events() { - eprintln!("\t{:?}", item); + eprintln!("\t{item:?}"); } } } eprintln!("Diagnostics:"); - for diagnostic in diagnostics.into_iter() { - eprintln!("\t{}", diagnostic.reason()) + for diagnostic in diagnostics { + eprintln!("\t{}", diagnostic.reason()); } } diff --git a/checker/specification/build.rs b/checker/specification/build.rs index 37659ea8..ea26cf83 100644 --- a/checker/specification/build.rs +++ b/checker/specification/build.rs @@ -63,13 +63,14 @@ fn markdown_lines_append_test_to_rust( let blocks = { let mut blocks = Vec::new(); let mut current_filename = None; - while let Some((_, line)) = lines.next() { + for (_, line) in lines.by_ref() { if line == "```ts" { break; } } let mut code = String::new(); - while let Some((_, line)) = lines.next() { + + for (_, line) in lines.by_ref() { if let Some(path) = line.strip_prefix("// in ") { if !code.trim().is_empty() { blocks.push(( @@ -93,7 +94,7 @@ fn markdown_lines_append_test_to_rust( }; let errors = { let mut errors = Vec::new(); - while let Some((_, line)) = lines.next() { + for (_, line) in lines.by_ref() { if line.is_empty() || !line.starts_with('-') { if !errors.is_empty() { break; diff --git a/checker/specification/test.rs b/checker/specification/test.rs index cf7fcbc0..eba4b68e 100644 --- a/checker/specification/test.rs +++ b/checker/specification/test.rs @@ -59,15 +59,13 @@ fn check_errors( |path| { if path == std::path::Path::new(checker::INTERNAL_DEFINITION_FILE_PATH) { Some(checker::INTERNAL_DEFINITION_FILE.to_owned()) + } else if code.len() == 1 { + Some(code[0].1.to_owned()) } else { - if code.len() == 1 { - Some(code[0].1.to_owned()) - } else { - code.iter().find_map(|(code_path, content)| { - (std::path::Path::new(code_path) == path) - .then_some(content.to_owned().to_owned()) - }) - } + code.iter().find_map(|(code_path, content)| { + (std::path::Path::new(code_path) == path) + .then_some(content.to_owned().to_owned()) + }) } }, type_check_options, @@ -91,7 +89,7 @@ fn check_errors( }) .collect(); - if &diagnostics != expected_diagnostics { + if diagnostics != expected_diagnostics { panic!( "{}", pretty_assertions::Comparison::new(expected_diagnostics, &diagnostics).to_string() diff --git a/checker/src/behavior/assignments.rs b/checker/src/behavior/assignments.rs index f0dc711e..6fcdb12c 100644 --- a/checker/src/behavior/assignments.rs +++ b/checker/src/behavior/assignments.rs @@ -39,6 +39,7 @@ pub enum AssignmentReturnStatus { } impl Reference { + #[must_use] pub fn get_position(&self) -> SpanWithSource { match self { Reference::Variable(_, span) | Reference::Property { span, .. } => span.clone(), diff --git a/checker/src/behavior/constant_functions.rs b/checker/src/behavior/constant_functions.rs index a8bf3ca7..aea236cf 100644 --- a/checker/src/behavior/constant_functions.rs +++ b/checker/src/behavior/constant_functions.rs @@ -46,7 +46,7 @@ pub(crate) fn call_constant_function( .last() .ok_or(ConstantFunctionError::BadCall)? .to_type() - .map_err(|_| ConstantFunctionError::BadCall)?, + .map_err(|()| ConstantFunctionError::BadCall)?, ); let Type::Constant(Constant::Number(num)) = second_argument_type else { @@ -115,7 +115,7 @@ pub(crate) fn call_constant_function( .first() .ok_or(ConstantFunctionError::BadCall)? .to_type() - .map_err(|_| ConstantFunctionError::BadCall)?; + .map_err(|()| ConstantFunctionError::BadCall)?; let ty_as_string = print_type(ty, types, &environment.as_general_context(), debug); Ok(ConstantOutput::Diagnostic(format!("Type is: {ty_as_string}"))) } @@ -124,14 +124,14 @@ pub(crate) fn call_constant_function( .first() .ok_or(ConstantFunctionError::BadCall)? .to_type() - .map_err(|_| ConstantFunctionError::BadCall)?; + .map_err(|()| ConstantFunctionError::BadCall)?; if let Type::Function(func, _) | Type::FunctionReference(func, _) = types.get_type_by_id(ty) { let effects = &types.functions.get(func).ok_or(ConstantFunctionError::BadCall)?.effects; // TODO print using a different function - Ok(ConstantOutput::Diagnostic(format!("{:#?}", effects))) + Ok(ConstantOutput::Diagnostic(format!("{effects:#?}"))) } else { Ok(ConstantOutput::Diagnostic("not a function".to_owned())) } @@ -145,7 +145,7 @@ pub(crate) fn call_constant_function( Some(this_ty), ) = (on, first_argument) { - let type_id = this_ty.to_type().map_err(|_| ConstantFunctionError::BadCall)?; + let type_id = this_ty.to_type().map_err(|()| ConstantFunctionError::BadCall)?; let value = types.register_type(Type::Function(*func, ThisValue::Passed(type_id))); Ok(ConstantOutput::Value(value)) } else { @@ -171,7 +171,7 @@ pub(crate) fn call_constant_function( .facts .prototypes .get(&first.to_type().unwrap()) - .cloned() + .copied() .unwrap_or(TypeId::NULL_TYPE); Ok(ConstantOutput::Value(prototype)) } else { @@ -197,7 +197,7 @@ pub(crate) fn call_constant_function( .first() .ok_or(ConstantFunctionError::BadCall)? .to_type() - .map_err(|_| ConstantFunctionError::BadCall)?; + .map_err(|()| ConstantFunctionError::BadCall)?; // TODO temp!!! let arg = call_site_type_args .iter() @@ -235,7 +235,7 @@ pub(crate) fn call_constant_function( .first() .ok_or(ConstantFunctionError::BadCall)? .to_type() - .map_err(|_| ConstantFunctionError::BadCall)? + .map_err(|()| ConstantFunctionError::BadCall)? ) .is_dependent() ))), diff --git a/checker/src/behavior/functions.rs b/checker/src/behavior/functions.rs index a4bb5935..111668c2 100644 --- a/checker/src/behavior/functions.rs +++ b/checker/src/behavior/functions.rs @@ -28,20 +28,20 @@ pub enum ThisValue { impl ThisValue { pub(crate) fn get( - &self, + self, environment: &mut Environment, types: &TypeStore, - position: SpanWithSource, + position: &SpanWithSource, ) -> TypeId { match self { - ThisValue::Passed(value) => *value, + ThisValue::Passed(value) => value, ThisValue::UseParent => environment.get_value_of_this(types, position), } } - pub(crate) fn get_passed(&self) -> Option { + pub(crate) fn get_passed(self) -> Option { match self { - ThisValue::Passed(value) => Some(*value), + ThisValue::Passed(value) => Some(value), ThisValue::UseParent => None, } } @@ -115,7 +115,7 @@ pub fn synthesise_hoisted_statement_function PropertyValue { @@ -210,7 +210,7 @@ pub trait SynthesisableFunction { ); } -/// TODO might be generic if FunctionBehavior becomes generic +/// TODO might be generic if `FunctionBehavior` becomes generic pub enum FunctionRegisterBehavior<'a, M: crate::ASTImplementation> { ArrowFunction { expecting: TypeId, @@ -252,6 +252,7 @@ pub enum FunctionRegisterBehavior<'a, M: crate::ASTImplementation> { pub struct ClassPropertiesToRegister<'a, M: ASTImplementation>(pub Vec>); impl<'a, M: crate::ASTImplementation> FunctionRegisterBehavior<'a, M> { + #[must_use] pub fn is_async(&self) -> bool { match self { FunctionRegisterBehavior::ArrowFunction { is_async, .. } @@ -263,6 +264,7 @@ impl<'a, M: crate::ASTImplementation> FunctionRegisterBehavior<'a, M> { } } + #[must_use] pub fn is_generator(&self) -> bool { match self { FunctionRegisterBehavior::ExpressionFunction { is_generator, .. } diff --git a/checker/src/behavior/objects.rs b/checker/src/behavior/objects.rs index 59172b83..9e9eb00a 100644 --- a/checker/src/behavior/objects.rs +++ b/checker/src/behavior/objects.rs @@ -32,9 +32,10 @@ impl ObjectBuilder { value: PropertyValue, position: Option, ) { - environment.facts.register_property(self.object, publicity, under, value, true, position) + environment.facts.register_property(self.object, publicity, under, value, true, position); } + #[must_use] pub fn build_object(self) -> TypeId { self.object } diff --git a/checker/src/behavior/operations.rs b/checker/src/behavior/operations.rs index bbc79304..05a34cbb 100644 --- a/checker/src/behavior/operations.rs +++ b/checker/src/behavior/operations.rs @@ -63,7 +63,7 @@ pub fn evaluate_pure_binary_operation_handle_errors { evaluate_equality_inequality_operation( lhs, - operator, + &operator, rhs, &mut checking_data.types, checking_data.options.strict_casts, @@ -82,7 +82,7 @@ pub fn evaluate_mathematical_operation( ) -> Result { fn attempt_constant_math_operator( lhs: TypeId, - operator: MathematicalAndBitwise, + operator: &MathematicalAndBitwise, rhs: TypeId, types: &mut TypeStore, strict_casts: bool, @@ -153,7 +153,7 @@ pub fn evaluate_mathematical_operation( return Ok(types.register_type(crate::Type::Constructor(constructor))); } - attempt_constant_math_operator(lhs, operator, rhs, types, strict_casts) + attempt_constant_math_operator(lhs, &operator, rhs, types, strict_casts) } /// Not canonical / reducible @@ -178,7 +178,7 @@ pub enum CanonicalEqualityAndInequality { pub fn evaluate_equality_inequality_operation( lhs: TypeId, - operator: EqualityAndInequality, + operator: &EqualityAndInequality, rhs: TypeId, types: &mut TypeStore, strict_casts: bool, @@ -202,7 +202,7 @@ pub fn evaluate_equality_inequality_operation( match attempt_constant_equality(lhs, rhs, types) { Ok(ty) => Ok(ty), - Err(_) => { + Err(()) => { unreachable!("should have been caught by above") } } @@ -248,7 +248,7 @@ pub fn evaluate_equality_inequality_operation( EqualityAndInequality::StrictNotEqual => { let equality_result = evaluate_equality_inequality_operation( rhs, - EqualityAndInequality::StrictEqual, + &EqualityAndInequality::StrictEqual, lhs, types, strict_casts, @@ -267,7 +267,7 @@ pub fn evaluate_equality_inequality_operation( EqualityAndInequality::NotEqual => { let equality_result = evaluate_equality_inequality_operation( rhs, - EqualityAndInequality::Equal, + &EqualityAndInequality::Equal, lhs, types, strict_casts, @@ -281,7 +281,7 @@ pub fn evaluate_equality_inequality_operation( } EqualityAndInequality::GreaterThan => evaluate_equality_inequality_operation( rhs, - EqualityAndInequality::LessThan, + &EqualityAndInequality::LessThan, lhs, types, strict_casts, @@ -289,7 +289,7 @@ pub fn evaluate_equality_inequality_operation( EqualityAndInequality::LessThanEqual => { let lhs = evaluate_equality_inequality_operation( rhs, - EqualityAndInequality::StrictEqual, + &EqualityAndInequality::StrictEqual, lhs, types, strict_casts, @@ -300,7 +300,7 @@ pub fn evaluate_equality_inequality_operation( } else if lhs == TypeId::FALSE { evaluate_equality_inequality_operation( rhs, - EqualityAndInequality::LessThan, + &EqualityAndInequality::LessThan, lhs, types, strict_casts, @@ -308,7 +308,7 @@ pub fn evaluate_equality_inequality_operation( } else { let rhs = evaluate_equality_inequality_operation( rhs, - EqualityAndInequality::LessThan, + &EqualityAndInequality::LessThan, lhs, types, strict_casts, @@ -318,7 +318,7 @@ pub fn evaluate_equality_inequality_operation( } EqualityAndInequality::GreaterThanEqual => evaluate_equality_inequality_operation( rhs, - EqualityAndInequality::LessThanEqual, + &EqualityAndInequality::LessThanEqual, lhs, types, strict_casts, @@ -339,7 +339,7 @@ pub fn evaluate_logical_operation_with_expression< M: crate::ASTImplementation, >( lhs: TypeId, - operator: Logical, + operator: &Logical, rhs: &M::Expression, checking_data: &mut CheckingData, environment: &mut Environment, @@ -399,7 +399,7 @@ pub fn evaluate_logical_operation_with_expression< Logical::NullCoalescing => { let is_lhs_null = evaluate_equality_inequality_operation( lhs, - EqualityAndInequality::StrictEqual, + &EqualityAndInequality::StrictEqual, TypeId::NULL_TYPE, &mut checking_data.types, checking_data.options.strict_casts, diff --git a/checker/src/behavior/template_literal.rs b/checker/src/behavior/template_literal.rs index 4df02412..eaf32c78 100644 --- a/checker/src/behavior/template_literal.rs +++ b/checker/src/behavior/template_literal.rs @@ -4,7 +4,7 @@ use source_map::{Span, SpanWithSource}; use crate::{ behavior::objects::ObjectBuilder, - types::{cast_as_string, SynthesisedArgument}, + types::{calling::CallingInput, cast_as_string, SynthesisedArgument}, CheckingData, Constant, Environment, Instance, Type, TypeId, }; @@ -26,13 +26,13 @@ where M::Expression: 'a, { fn part_to_type( - first: TemplateLiteralPart, + first: &TemplateLiteralPart, environment: &mut Environment, checking_data: &mut CheckingData, ) -> crate::TypeId { match first { TemplateLiteralPart::Static(static_part) => { - checking_data.types.new_constant_type(Constant::String(static_part.to_owned())) + checking_data.types.new_constant_type(Constant::String((*static_part).to_owned())) } TemplateLiteralPart::Dynamic(expression) => { // TODO tidy @@ -67,7 +67,7 @@ where for part in parts_iter { match part { p @ TemplateLiteralPart::Static(_) => { - let value = part_to_type(p, environment, checking_data); + let value = part_to_type(&p, environment, checking_data); static_parts.append( environment, crate::context::facts::Publicity::Public, @@ -79,12 +79,12 @@ where static_part_count += 1; } p @ TemplateLiteralPart::Dynamic(_) => { - let ty = part_to_type(p, environment, checking_data); + let ty = part_to_type(&p, environment, checking_data); arguments.push(SynthesisedArgument::NonSpread { ty, // TODO position position: SpanWithSource::NULL_SPAN, - }) + }); } } } @@ -101,21 +101,23 @@ where let call_site = position.clone().with_source(environment.get_source()); crate::types::calling::call_type_handle_errors( tag, - crate::types::calling::CalledWithNew::None, - crate::behavior::functions::ThisValue::UseParent, - None, - arguments, - call_site, + CallingInput { + called_with_new: crate::types::calling::CalledWithNew::None, + this_value: crate::behavior::functions::ThisValue::UseParent, + call_site, + call_site_type_arguments: None, + }, environment, + arguments, checking_data, ) .0 } else { // Bit weird but makes Rust happy if let Some(first) = parts_iter.next() { - let mut acc = part_to_type(first, environment, checking_data); + let mut acc = part_to_type(&first, environment, checking_data); for rest in parts_iter { - let other = part_to_type(rest, environment, checking_data); + let other = part_to_type(&rest, environment, checking_data); let result = super::operations::evaluate_mathematical_operation( acc, crate::behavior::operations::MathematicalAndBitwise::Add, @@ -125,14 +127,14 @@ where ); match result { Ok(result) => acc = result, - Err(_) => { + Err(()) => { crate::utils::notify!("Invalid template literal concatenation"); } } } acc } else { - checking_data.types.new_constant_type(Constant::String("".into())) + checking_data.types.new_constant_type(Constant::String(String::new())) } } } diff --git a/checker/src/context/bases.rs b/checker/src/context/bases.rs index ab38dabd..8bad43e2 100644 --- a/checker/src/context/bases.rs +++ b/checker/src/context/bases.rs @@ -22,7 +22,7 @@ impl Bases { pub(crate) fn merge(&mut self, mut bases: Bases, context_id: ContextId) { self.immutable_bases.extend(bases.immutable_bases); - for (ty, (ctx_ceil, base)) in bases.mutable_bases.into_iter() { + for (ty, (ctx_ceil, base)) in bases.mutable_bases { let existing = if ctx_ceil.0 == context_id { self.immutable_bases.insert(ty, base).is_some() } else { diff --git a/checker/src/context/environment.rs b/checker/src/context/environment.rs index 0d8e2eca..676ebb27 100644 --- a/checker/src/context/environment.rs +++ b/checker/src/context/environment.rs @@ -151,7 +151,7 @@ impl<'a> Environment<'a> { /// /// Will evaluate the expression with the right timing and conditions, including never if short circuit /// - /// TODO finish operator. Unify increment and decrement. The RHS span should be fine with Span::NULL ...? Maybe RHS type could be None to accommodate + /// TODO finish operator. Unify increment and decrement. The RHS span should be fine with `Span::NULL` ...? Maybe RHS type could be None to accommodate pub fn assign_to_assignable_handle_errors( &mut self, lhs: Assignable, @@ -185,7 +185,7 @@ impl<'a> Environment<'a> { ); match get_property_handle_errors { Ok(i) => i.get_value(), - Err(_) => TypeId::ERROR_TYPE, + Err(()) => TypeId::ERROR_TYPE, } } } @@ -209,7 +209,7 @@ impl<'a> Environment<'a> { .set_property( on, publicity, - with, + &with, new, &checking_data.types, Some(span), @@ -220,7 +220,7 @@ impl<'a> Environment<'a> { fn set_property_error_to_type_check_error( ctx: &GeneralContext, - error: SetPropertyError, + error: &SetPropertyError, assignment_span: SpanWithSource, types: &TypeStore, new: TypeId, @@ -232,7 +232,10 @@ impl<'a> Environment<'a> { SetPropertyError::DoesNotMeetConstraint(constraint, _) => { TypeCheckError::AssignmentError(AssignmentError::PropertyConstraint { property_type: TypeStringRepresentation::from_type_id( - constraint, ctx, types, false, + *constraint, + ctx, + types, + false, ), value_type: TypeStringRepresentation::from_type_id( new, ctx, types, false, @@ -257,7 +260,7 @@ impl<'a> Environment<'a> { Err(error) => { let error = set_property_error_to_type_check_error( &self.as_general_context(), - error, + &error, assignment_span, &checking_data.types, new, @@ -296,7 +299,7 @@ impl<'a> Environment<'a> { Err(error) => { let error = set_property_error_to_type_check_error( &self.as_general_context(), - error, + &error, assignment_span, &checking_data.types, new, @@ -342,7 +345,7 @@ impl<'a> Environment<'a> { Err(error) => { let error = set_property_error_to_type_check_error( &self.as_general_context(), - error, + &error, assignment_span, &checking_data.types, new, @@ -358,7 +361,7 @@ impl<'a> Environment<'a> { let expression = expression.unwrap(); let new = evaluate_logical_operation_with_expression( existing, - operator, + &operator, expression, checking_data, self, @@ -372,7 +375,7 @@ impl<'a> Environment<'a> { Err(error) => { let error = set_property_error_to_type_check_error( &self.as_general_context(), - error, + &error, assignment_span, &checking_data.types, new, @@ -503,6 +506,7 @@ impl<'a> Environment<'a> { } } + #[must_use] pub fn get_environment_id(&self) -> super::ContextId { self.context_id } @@ -512,11 +516,12 @@ impl<'a> Environment<'a> { } /// TODO decidable & private? - pub fn property_in(&self, on: TypeId, property: PropertyKey) -> bool { + #[must_use] + pub fn property_in(&self, on: TypeId, property: &PropertyKey) -> bool { self.facts_chain().any(|facts| match facts.current_properties.get(&on) { Some(v) => { v.iter().any( - |(_, p, v)| if let PropertyValue::Deleted = v { false } else { *p == property }, + |(_, p, v)| if let PropertyValue::Deleted = v { false } else { p == property }, ) } None => false, @@ -524,8 +529,8 @@ impl<'a> Environment<'a> { } /// TODO decidable & private? - pub fn delete_property(&mut self, on: TypeId, property: PropertyKey) -> bool { - let existing = self.property_in(on, property.clone()); + pub fn delete_property(&mut self, on: TypeId, property: &PropertyKey) -> bool { + let existing = self.property_in(on, property); let under = property.into_owned(); @@ -600,41 +605,36 @@ impl<'a> Environment<'a> { None, site.clone(), ); - match get_property { - Some((kind, result)) => Ok(match kind { + if let Some((kind, result)) = get_property { + Ok(match kind { PropertyKind::Getter => Instance::GValue(result), // TODO instance.property...? PropertyKind::Generic | PropertyKind::Direct => Instance::RValue(result), - }), - None => { - let types = &checking_data.types; - let ctx = &self.as_general_context(); - checking_data.diagnostics_container.add_error( - TypeCheckError::PropertyDoesNotExist { - // TODO printing temp - property: match key { - PropertyKey::String(s) => { - crate::diagnostics::PropertyRepresentation::StringKey(s.to_string()) - } - PropertyKey::Type(t) => { - crate::diagnostics::PropertyRepresentation::Type( - crate::types::printing::print_type( - t, - &checking_data.types, - &self.as_general_context(), - false, - ), - ) - } - }, - on: crate::diagnostics::TypeStringRepresentation::from_type_id( - on, ctx, types, false, + }) + } else { + let types = &checking_data.types; + let ctx = &self.as_general_context(); + checking_data.diagnostics_container.add_error(TypeCheckError::PropertyDoesNotExist { + // TODO printing temp + property: match key { + PropertyKey::String(s) => { + crate::diagnostics::PropertyRepresentation::StringKey(s.to_string()) + } + PropertyKey::Type(t) => crate::diagnostics::PropertyRepresentation::Type( + crate::types::printing::print_type( + t, + &checking_data.types, + &self.as_general_context(), + false, ), - site, - }, - ); - Err(()) - } + ), + }, + on: crate::diagnostics::TypeStringRepresentation::from_type_id( + on, ctx, types, false, + ), + site, + }); + Err(()) } } @@ -646,21 +646,18 @@ impl<'a> Environment<'a> { ) -> Result { let (in_root, crossed_boundary, og_var) = { let this = self.get_variable_unbound(name); - match this { - Some((in_root, crossed_boundary, og_var)) => { - (in_root, crossed_boundary, og_var.clone()) - } - None => { - checking_data.diagnostics_container.add_error( - TypeCheckError::CouldNotFindVariable { - variable: name, - // TODO - possibles: Default::default(), - position, - }, - ); - return Err(TypeId::ERROR_TYPE); - } + if let Some((in_root, crossed_boundary, og_var)) = this { + (in_root, crossed_boundary, og_var.clone()) + } else { + checking_data.diagnostics_container.add_error( + TypeCheckError::CouldNotFindVariable { + variable: name, + // TODO + possibles: Default::default(), + position, + }, + ); + return Err(TypeId::ERROR_TYPE); } }; @@ -719,28 +716,26 @@ impl<'a> Environment<'a> { "Open poly type treated as immutable free variable" ); return Ok(VariableWithValue(og_var.clone(), *ot)); - } else { - crate::utils::notify!("Free variable!"); } + + crate::utils::notify!("Free variable!"); } } // TODO is primitive, then can just use type - match constraint { - Some(constraint) => *constraint, - None => { - crate::utils::notify!("TODO record that parent variable is `any` here"); - TypeId::ANY_TYPE - } + if let Some(constraint) = constraint { + *constraint + } else { + crate::utils::notify!("TODO record that parent variable is `any` here"); + TypeId::ANY_TYPE } } VariableMutability::Mutable { reassignment_constraint } => { - match reassignment_constraint { - Some(constraint) => constraint, - None => { - crate::utils::notify!("TODO record that parent variable is `any` here"); - TypeId::ANY_TYPE - } + if let Some(constraint) = reassignment_constraint { + constraint + } else { + crate::utils::notify!("TODO record that parent variable is `any` here"); + TypeId::ANY_TYPE } } }; @@ -748,7 +743,7 @@ impl<'a> Environment<'a> { // TODO temp position let mut value = None; - for event in self.facts.events.iter() { + for event in &self.facts.events { // TODO explain why don't need to detect sets if let Event::ReadsReference { reference: other_reference, @@ -888,7 +883,7 @@ impl<'a> Environment<'a> { } pub fn return_value(&mut self, returned: TypeId, returned_position: SpanWithSource) { - self.facts.events.push(Event::Return { returned, returned_position }) + self.facts.events.push(Event::Return { returned, returned_position }); } /// Updates **a existing property** @@ -898,7 +893,7 @@ impl<'a> Environment<'a> { &mut self, on: TypeId, publicity: Publicity, - under: PropertyKey, + under: &PropertyKey, new: TypeId, types: &TypeStore, setter_position: Option, @@ -907,7 +902,7 @@ impl<'a> Environment<'a> { on, publicity, under, - PropertyValue::Value(new), + &PropertyValue::Value(new), self, &mut CheckThings, types, @@ -1094,7 +1089,7 @@ impl<'a> Environment<'a> { } ImportKind::Everything => { if let Ok(Ok(ref exports)) = exports { - for (name, (variable, mutability)) in exports.named.iter() { + for (name, (variable, mutability)) in &exports.named { // TODO are variables put into scope? if let Scope::Module { ref mut exported, .. } = self.context_type.scope { exported.named.push((name.clone(), (*variable, *mutability))); diff --git a/checker/src/context/facts.rs b/checker/src/context/facts.rs index 0852914f..bed4a586 100644 --- a/checker/src/context/facts.rs +++ b/checker/src/context/facts.rs @@ -72,6 +72,7 @@ impl Facts { self.events.push(Event::Throw(value, position)); } + #[must_use] pub fn get_events(&self) -> &[Event] { &self.events } @@ -105,6 +106,7 @@ impl Facts { ty } + #[must_use] pub fn get_properties_on_type_for_this_level( &self, ty: TypeId, @@ -131,15 +133,15 @@ impl Facts { pub(crate) fn extend_ref(&mut self, other: &Facts) { self.events.extend(other.events.iter().cloned()); self.queued_events.extend(other.queued_events.iter().cloned()); - self.variable_current_value.extend(other.variable_current_value.iter().to_owned()); - self.prototypes.extend(other.prototypes.iter().to_owned()); + self.variable_current_value.extend(other.variable_current_value.iter().clone()); + self.prototypes.extend(other.prototypes.iter().clone()); self.current_properties .extend(other.current_properties.iter().map(|(l, r)| (*l, r.clone()))); self.closure_current_values .extend(other.closure_current_values.iter().map(|(l, r)| (l.clone(), *r))); - self.configurable.extend(other.configurable.iter().to_owned()); - self.enumerable.extend(other.enumerable.iter().to_owned()); - self.writable.extend(other.writable.iter().to_owned()); - self.frozen.extend(other.frozen.iter().to_owned()); + self.configurable.extend(other.configurable.iter().clone()); + self.enumerable.extend(other.enumerable.iter().clone()); + self.writable.extend(other.writable.iter().clone()); + self.frozen.extend(other.frozen.iter().clone()); } } diff --git a/checker/src/context/mod.rs b/checker/src/context/mod.rs index a019e782..a288a809 100644 --- a/checker/src/context/mod.rs +++ b/checker/src/context/mod.rs @@ -305,7 +305,7 @@ impl Context { /// Declares a new variable in the environment and returns the new variable /// /// **THIS IS USED FOR HOISTING, DOES NOT SET THE VALUE** - /// TODO maybe name: VariableDeclarator to include destructuring ...? + /// TODO maybe name: `VariableDeclarator` to include destructuring ...? /// TODO hoisted vs declared pub fn register_variable<'b>( &mut self, @@ -396,10 +396,10 @@ impl Context { } }; - if !existing_variable { - Ok(ty) - } else { + if existing_variable { Err(CannotRedeclareVariable { name }) + } else { + Ok(ty) } } @@ -410,18 +410,18 @@ impl Context { behavior: VariableRegisterBehavior, checking_data: &mut CheckingData, ) -> TypeId { - match self.register_variable(name, declared_at.clone(), behavior, &mut checking_data.types) + if let Ok(ty) = + self.register_variable(name, declared_at.clone(), behavior, &mut checking_data.types) { - Ok(ty) => ty, - Err(_) => { - checking_data.diagnostics_container.add_error( - TypeCheckError::CannotRedeclareVariable { - name: name.to_owned(), - position: declared_at, - }, - ); - TypeId::ERROR_TYPE - } + ty + } else { + checking_data.diagnostics_container.add_error( + TypeCheckError::CannotRedeclareVariable { + name: name.to_owned(), + position: declared_at, + }, + ); + TypeId::ERROR_TYPE } } @@ -479,9 +479,9 @@ impl Context { // .unwrap(); if let GeneralContext::Syntax(syn) = ctx { if !syn.facts.events.is_empty() { - writeln!(buf, "{}> Events:", indent).unwrap(); - for event in syn.facts.events.iter() { - writeln!(buf, "{} {:?}", indent, event).unwrap(); + writeln!(buf, "{indent}> Events:").unwrap(); + for event in &syn.facts.events { + writeln!(buf, "{indent} {event:?}").unwrap(); } } } @@ -494,7 +494,7 @@ impl Context { match types.get_type_by_id(on) { Type::RootPolyType(nature) => { fn does_type_have_mutable_constraint( - context: Context, + context: &Context, on: TypeId, ) -> bool { context.parents_iter().any(|env| { @@ -712,11 +712,11 @@ impl Context { | Scope::Function(FunctionScope::Function { this_type: this_constraint, .. }) => { Some(*this_constraint) } - Scope::Function(_) => None, Scope::FunctionAnnotation {} => todo!(), - Scope::StaticBlock { .. } => None, Scope::Conditional { .. } | Scope::Looping { .. } + | Scope::StaticBlock { .. } + | Scope::Function(_) | Scope::TryBlock { .. } | Scope::TypeAlias | Scope::Block {} @@ -728,7 +728,7 @@ impl Context { } } - /// Similar to [Context::get_this_unbound] + /// Similar to [`Context::get_this_unbound`] fn get_variable_unbound( &self, variable_name: &str, @@ -780,7 +780,7 @@ impl Context { ) -> Vec<(Publicity, PropertyKey<'static>, TypeId)> { let reversed_flattened_properties = self .parents_iter() - .flat_map(|ctx| { + .filter_map(|ctx| { let id = get_on_ctx!(ctx.context_id); let properties = get_on_ctx!(ctx.facts.current_properties.get(&base)); properties.map(|v| v.iter().rev()) @@ -811,7 +811,7 @@ impl Context { types: &TypeStore, ) -> Option> { fn get_property( - env: GeneralContext, + env: &GeneralContext, types: &TypeStore, on: TypeId, under: (Publicity, &PropertyKey), @@ -860,38 +860,35 @@ impl Context { // TODO need actual method for these, aka lowest - match types.get_type_by_id(on) { - Type::SpecialObject(obj) => { - todo!() - } - _ => { - let under = match under { - PropertyKey::Type(t) => { - PropertyKey::Type(self.get_poly_base(t, types).unwrap_or(t)) - } - under => under, - }; - types.get_fact_about_type(self, on, &get_property, (publicity, &under)) - } + if let Type::SpecialObject(obj) = types.get_type_by_id(on) { + todo!() + } else { + let under = match under { + PropertyKey::Type(t) => { + PropertyKey::Type(self.get_poly_base(t, types).unwrap_or(t)) + } + under @ PropertyKey::String(_) => under, + }; + types.get_fact_about_type(self, on, &get_property, (publicity, &under)) } } /// Note: this also returns base generic types like `Array` pub fn get_type_from_name(&self, name: &str) -> Option { - self.parents_iter().find_map(|env| get_on_ctx!(env.named_types.get(name))).cloned() + self.parents_iter().find_map(|env| get_on_ctx!(env.named_types.get(name))).copied() } - pub(crate) fn get_variable_name(&self, id: &VariableId) -> &str { - self.parents_iter().find_map(|env| get_on_ctx!(env.variable_names.get(id))).unwrap() + pub(crate) fn get_variable_name(&self, id: VariableId) -> &str { + self.parents_iter().find_map(|env| get_on_ctx!(env.variable_names.get(&id))).unwrap() } pub fn as_general_context(&self) -> GeneralContext { T::as_general_context(self) } - /// TODO doesn't look at aliases using get_type_fact! + /// TODO doesn't look at aliases using `get_type_fact`! pub fn is_frozen(&self, value: TypeId) -> Option { - self.parents_iter().find_map(|ctx| get_on_ctx!(ctx.facts.frozen.get(&value))).cloned() + self.parents_iter().find_map(|ctx| get_on_ctx!(ctx.facts.frozen.get(&value))).copied() } // TODO temp declaration @@ -1082,7 +1079,7 @@ impl Context { if let FunctionBehavior::Method { ref mut free_this_id, .. } = behavior { *free_this_id = type_id; } - *free_this_type = type_id + *free_this_type = type_id; } FunctionScope::Function { ref mut this_type, .. } => { // TODO this could be done conditionally to create less objects, but also doesn't introduce any bad side effects so @@ -1183,6 +1180,8 @@ impl Context { let return_type_annotation = function.return_type_annotation(&mut function_environment, checking_data); + // TODO: + #[allow(clippy::no_effect_underscore_binding)] let _expected_return_type: Option = expected_return; function_environment.context_type.location = location; @@ -1243,7 +1242,7 @@ impl Context { self.variable_names.extend(function_environment.variable_names); // TODO temp ... - for (on, mut properties) in facts.current_properties.into_iter() { + for (on, mut properties) in facts.current_properties { match self.facts.current_properties.entry(on) { hash_map::Entry::Occupied(mut occupied) => {} hash_map::Entry::Vacant(vacant) => { @@ -1252,7 +1251,7 @@ impl Context { } } - for (on, mut properties) in facts.closure_current_values.into_iter() { + for (on, mut properties) in facts.closure_current_values { match self.facts.closure_current_values.entry(on) { hash_map::Entry::Occupied(mut occupied) => {} hash_map::Entry::Vacant(vacant) => { @@ -1434,7 +1433,7 @@ impl Context { self.can_reference_this = can_reference_this; - for (on, mut properties) in facts.current_properties.into_iter() { + for (on, mut properties) in facts.current_properties { match self.facts.current_properties.entry(on) { hash_map::Entry::Occupied(mut occupied) => { occupied.get_mut().append(&mut properties); @@ -1498,15 +1497,14 @@ impl Context { pos: SpanWithSource, checking_data: &mut CheckingData, ) -> TypeId { - match self.get_type_from_name(name) { - Some(val) => val, - None => { - checking_data - .diagnostics_container - .add_error(TypeCheckError::CouldNotFindType(name, pos)); - - TypeId::ERROR_TYPE - } + if let Some(val) = self.get_type_from_name(name) { + val + } else { + checking_data + .diagnostics_container + .add_error(TypeCheckError::CouldNotFindType(name, pos)); + + TypeId::ERROR_TYPE } } @@ -1517,7 +1515,7 @@ impl Context { nominal: bool, parameters: Option<&[M::TypeParameter]>, extends: Option<&[M::TypeAnnotation]>, - position: SpanWithSource, + position: &SpanWithSource, types: &mut TypeStore, ) -> TypeId { // TODO declare here @@ -1643,9 +1641,9 @@ impl Context { .flat_map(|env| { get_on_ctx!(env.object_constraints.get(&on)) .iter() - .cloned() + .copied() .flatten() - .cloned() + .copied() .collect::>() }) .collect() @@ -1658,22 +1656,25 @@ impl Context { pub(crate) fn get_value_of_this( &mut self, types: &TypeStore, - position: SpanWithSource, + position: &SpanWithSource, ) -> TypeId { self.parents_iter() - .find_map(|env| match env { - GeneralContext::Syntax(ctx) => match ctx.context_type.scope { - Scope::Function( - FunctionScope::ArrowFunction { free_this_type, .. } - | FunctionScope::MethodFunction { free_this_type, .. }, - ) => Some(free_this_type), - Scope::Function(FunctionScope::Constructor { this_object_type, .. }) => { - Some(this_object_type) + .find_map(|env| { + if let GeneralContext::Syntax(ctx) = env { + match ctx.context_type.scope { + Scope::Function( + FunctionScope::ArrowFunction { free_this_type, .. } + | FunctionScope::MethodFunction { free_this_type, .. }, + ) => Some(free_this_type), + Scope::Function(FunctionScope::Constructor { + this_object_type, .. + }) => Some(this_object_type), + Scope::Function(FunctionScope::Function { this_type, .. }) => { + Some(this_type) + } + _ => None, } - Scope::Function(FunctionScope::Function { this_type, .. }) => Some(this_type), - _ => None, - }, - _ => { + } else { crate::utils::notify!("TODO get root type"); Some(TypeId::ERROR_TYPE) } @@ -1751,6 +1752,7 @@ pub enum Logical { } impl<'a, T: Clone> Logical<&'a T> { + #[must_use] pub fn cloned(self) -> Logical { match self { Logical::Pure(t) => Logical::Pure(t.clone()), diff --git a/checker/src/context/root.rs b/checker/src/context/root.rs index 823aa5a3..c08be30d 100644 --- a/checker/src/context/root.rs +++ b/checker/src/context/root.rs @@ -37,7 +37,7 @@ impl ContextType for Root { const HEADER: &[u8] = b"EZNO\0CONTEXT\0FILE"; impl RootContext { - /// Merges two [RootEnvironments]. May be used for multiple `.d.ts` files + /// Merges two [`RootEnvironments`]. May be used for multiple `.d.ts` files pub(crate) fn union(&mut self, other: Self) { // TODO this is bad, some things need to merge, inserting over existing will be bad self.variables.extend(other.variables); @@ -45,6 +45,7 @@ impl RootContext { // self.tys.extend(other.tys.into_iter()); } + #[must_use] pub fn new_with_primitive_references() -> Self { // TODO number might not be a reference at some point let named_types = [ @@ -110,7 +111,7 @@ impl RootContext { todo!() } - pub(crate) fn deserialize(source: Vec, backing_source: SourceId) -> Result { + pub(crate) fn deserialize(source: &[u8], backing_source: SourceId) -> Result { todo!() // let mut ctx = Root::new_with_primitive_references(); diff --git a/checker/src/diagnostics.rs b/checker/src/diagnostics.rs index da722d42..6d1265ec 100644 --- a/checker/src/diagnostics.rs +++ b/checker/src/diagnostics.rs @@ -1,5 +1,6 @@ //! Contains type checking errors, warnings and related structures +use crate::diagnostics; use serde::Serialize; use source_map::{SourceId, Span, SpanWithSource}; use std::{ @@ -46,12 +47,13 @@ impl Diagnostic { Diagnostic::Position { position: span, .. } => Left(Right(iter::once(span.source))), Diagnostic::PositionWithAdditionLabels { position: pos, labels, .. } => { Right(iter::once(pos.source).chain( - labels.iter().flat_map(|(_, span)| span.as_ref().map(|span| span.source)), + labels.iter().filter_map(|(_, span)| span.as_ref().map(|span| span.source)), )) } } } + #[must_use] pub fn reason(&self) -> &str { match self { Diagnostic::Global { reason, .. } @@ -60,6 +62,7 @@ impl Diagnostic { } } + #[must_use] pub fn reason_and_position(self) -> (String, Option) { match self { Diagnostic::Global { reason, .. } => (reason, None), @@ -81,32 +84,35 @@ pub struct DiagnosticsContainer { // TODO the add methods are the same... impl DiagnosticsContainer { + #[must_use] pub fn new() -> Self { Self { diagnostics: Default::default(), has_error: false } } pub fn add_error>(&mut self, error: T) { self.has_error = true; - self.diagnostics.push(error.into()) + self.diagnostics.push(error.into()); } pub fn add_warning>(&mut self, warning: T) { - self.diagnostics.push(warning.into()) + self.diagnostics.push(warning.into()); } pub fn add_info>(&mut self, info: T) { - self.diagnostics.push(info.into()) + self.diagnostics.push(info.into()); } + #[must_use] pub fn has_error(&self) -> bool { self.has_error } pub fn sources(&self) -> impl Iterator + '_ { - self.diagnostics.iter().flat_map(|item| item.sources()) + self.diagnostics.iter().flat_map(diagnostics::Diagnostic::sources) } #[doc(hidden)] + #[must_use] pub fn get_diagnostics(self) -> Vec { self.diagnostics } @@ -148,6 +154,7 @@ pub enum PropertyRepresentation { } impl TypeStringRepresentation { + #[must_use] pub fn from_type_id( id: TypeId, ctx: &GeneralContext, @@ -294,8 +301,7 @@ mod defined_errors_and_warnings { TypeCheckError::CouldNotFindVariable { variable, possibles, position } => { Diagnostic::Position { reason: format!( - "Could not find variable {} in scope", - variable, + "Could not find variable {variable} in scope", // possibles Consider '{:?}' ), position, @@ -303,15 +309,15 @@ mod defined_errors_and_warnings { } } TypeCheckError::CouldNotFindType(reference, pos) => Diagnostic::Position { - reason: format!("Could not find type '{}'", reference), + reason: format!("Could not find type '{reference}'"), position: pos, kind, }, TypeCheckError::PropertyDoesNotExist { property, on, site } => { Diagnostic::Position { reason: match property { - PropertyRepresentation::Type(ty) => format!("No property of type {} on {}", ty, on), - PropertyRepresentation::StringKey(property) => format!("No property '{}' on {}", property, on), + PropertyRepresentation::Type(ty) => format!("No property of type {ty} on {on}"), + PropertyRepresentation::StringKey(property) => format!("No property '{property}' on {on}"), }, position: site, kind, @@ -328,14 +334,12 @@ mod defined_errors_and_warnings { if let Some((restriction_pos, restriction)) = restriction { Diagnostic::PositionWithAdditionLabels { reason: format!( - "Argument of type {} is not assignable to parameter of type {}", - argument_type, restriction + "Argument of type {argument_type} is not assignable to parameter of type {restriction}" ), position: argument_position, labels: vec![( format!( - "{} was specialised with type {}", - parameter_type, restriction + "{parameter_type} was specialised with type {restriction}" ), Some(restriction_pos), )], @@ -344,12 +348,11 @@ mod defined_errors_and_warnings { } else { Diagnostic::PositionWithAdditionLabels { reason: format!( - "Argument of type {} is not assignable to parameter of type {}", - argument_type, parameter_type + "Argument of type {argument_type} is not assignable to parameter of type {parameter_type}", ), position: argument_position, labels: vec![( - format!("Parameter has type {}", parameter_type), + format!("Parameter has type {parameter_type}"), Some(parameter_position), )], kind, @@ -506,7 +509,7 @@ mod defined_errors_and_warnings { } } TypeCheckError::Unsupported { thing, at } => Diagnostic::Position { - reason: format!("Unsupported: {}", thing), + reason: format!("Unsupported: {thing}"), position: at, kind, }, @@ -517,8 +520,7 @@ mod defined_errors_and_warnings { position, } => Diagnostic::Position { reason: format!( - "{} constraint on function does not match synthesised form {}", - function_constraint, function_type + "{function_constraint} constraint on function does not match synthesised form {function_type}", ), position, kind, @@ -630,7 +632,7 @@ mod defined_errors_and_warnings { }, TypeCheckWarning::DeadBranch { expression_span, expression_value } => { Diagnostic::Position { - reason: format!("Expression is always {:?}", expression_value), + reason: format!("Expression is always {expression_value:?}"), position: expression_span, kind, } @@ -641,7 +643,7 @@ mod defined_errors_and_warnings { kind, }, TypeCheckWarning::Unimplemented { thing, at } => Diagnostic::Position { - reason: format!("Unsupported: {}", thing), + reason: format!("Unsupported: {thing}"), position: at, kind, }, diff --git a/checker/src/events/application.rs b/checker/src/events/application.rs index 53b2337b..3387eb84 100644 --- a/checker/src/events/application.rs +++ b/checker/src/events/application.rs @@ -32,15 +32,14 @@ pub(crate) fn apply_event( id, Some(&*type_arguments), ); - match value { - Some(ty) => ty, - None => { - crate::utils::notify!("emit a tdz error"); - TypeId::ERROR_TYPE - } + if let Some(ty) = value { + ty + } else { + crate::utils::notify!("emit a tdz error"); + TypeId::ERROR_TYPE } } - RootReference::This => this_value.get(environment, types, position), + RootReference::This => this_value.get(environment, types, &position), }; type_arguments.set_id_from_reference(id, value, types); } @@ -71,7 +70,7 @@ pub(crate) fn apply_event( let ty = substitute(under, type_arguments, environment, types); crate::types::properties::PropertyKey::from_type(ty, types) } - under => under, + under @ crate::types::properties::PropertyKey::String(_) => under, }; let (_, value) = @@ -99,7 +98,7 @@ pub(crate) fn apply_event( let ty = substitute(under, type_arguments, environment, types); crate::types::properties::PropertyKey::from_type(ty, types) } - under => under, + under @ crate::types::properties::PropertyKey::String(_) => under, }; let new = match new { @@ -111,7 +110,7 @@ pub(crate) fn apply_event( PropertyValue::Setter(_) => todo!(), // TODO this might be a different thing at some point PropertyValue::Deleted => { - environment.delete_property(on, under); + environment.delete_property(on, &under); return None; } }; @@ -146,7 +145,7 @@ pub(crate) fn apply_event( .register_property(on, publicity, under, new, true, position); } else { let returned = - set_property(on, publicity, under, new, environment, target, types, position) + set_property(on, publicity, &under, &new, environment, target, types, position) .unwrap(); if let Some(id) = reflects_dependency { @@ -175,12 +174,14 @@ pub(crate) fn apply_event( CallingTiming::Synchronous => { let result = crate::types::calling::call_type( on, - called_with_new, - Default::default(), - None, + crate::types::calling::CallingInput { + called_with_new, + this_value: Default::default(), + call_site_type_arguments: None, + // TODO: + call_site: source_map::SpanWithSource::NULL_SPAN, + }, with, - // TODO - source_map::SpanWithSource::NULL_SPAN, environment, target, types, @@ -296,7 +297,7 @@ pub(crate) fn apply_event( condition, truth, else_facts.variable_current_value.remove(&var).unwrap_or(*existing), - ) + ); }); } @@ -312,9 +313,9 @@ pub(crate) fn apply_event( let substituted_returned = substitute(returned, type_arguments, environment, types); if substituted_returned != TypeId::ERROR_TYPE { return Some(substituted_returned); - } else { - crate::utils::notify!("event returned error so skipped"); } + + crate::utils::notify!("event returned error so skipped"); } // TODO Needs a position (or not?) Event::CreateObject { referenced_in_scope_as, prototype, position } => { diff --git a/checker/src/events/helpers.rs b/checker/src/events/helpers.rs index 23a8b83a..216bf298 100644 --- a/checker/src/events/helpers.rs +++ b/checker/src/events/helpers.rs @@ -187,12 +187,12 @@ pub(crate) fn get_return_from_events<'a, T: crate::ReadFromFS, M: crate::ASTImpl /// This actually removes the events as they are caught pub(crate) fn extract_throw_events(events: Vec, thrown: &mut Vec) -> Vec { let mut new_events = Vec::new(); - for event in events.into_iter() { + for event in events { if let Event::Throw(value, position) = event { thrown.push(value); } else { // TODO nested grouping - new_events.push(event) + new_events.push(event); } } new_events diff --git a/checker/src/events/mod.rs b/checker/src/events/mod.rs index c4017aff..d083f5ba 100644 --- a/checker/src/events/mod.rs +++ b/checker/src/events/mod.rs @@ -29,9 +29,10 @@ pub enum RootReference { } impl RootReference { + #[must_use] pub fn get_name<'a>(&self, ctx: &'a GeneralContext) -> &'a str { match self { - Self::Variable(id) => get_on_ctx!(ctx.get_variable_name(id)), + Self::Variable(id) => get_on_ctx!(ctx.get_variable_name(*id)), Self::This => "this", } } diff --git a/checker/src/lib.rs b/checker/src/lib.rs index fd0d4183..b1e69d75 100644 --- a/checker/src/lib.rs +++ b/checker/src/lib.rs @@ -6,7 +6,10 @@ unused_mut, dead_code, irrefutable_let_patterns, - deprecated + deprecated, + clippy::new_without_default, + clippy::too_many_lines, + clippy::result_unit_err )] pub mod behavior; @@ -66,7 +69,7 @@ pub use source_map::{self, SourceId, Span}; /// Contains all the modules and mappings for import statements /// -/// TODO could files and synthesised_modules be merged? (with a change to the source map crate) +/// TODO could files and `synthesised_modules` be merged? (with a change to the source map crate) pub struct ModuleData<'a, FileReader, ModuleAST: ASTImplementation> { pub(crate) file_reader: &'a FileReader, pub(crate) current_working_directory: PathBuf, @@ -96,12 +99,16 @@ pub trait ASTImplementation: Sized { type ClassMethod: SynthesisableFunction; + /// # Errors + /// TODO fn module_from_string( source_id: SourceId, string: String, options: &Self::ParseOptions, ) -> Result; + /// # Errors + /// TODO fn definition_module_from_string( source_id: SourceId, string: String, @@ -251,6 +258,47 @@ impl<'a, T: crate::ReadFromFS, M: ASTImplementation> CheckingData<'a, T, M> { importing_path: &str, environment: &mut Environment, ) -> Result, CouldNotOpenFile> { + fn get_module<'a, T: crate::ReadFromFS, M: ASTImplementation>( + full_importer: &PathBuf, + environment: &mut Environment, + checking_data: &'a mut CheckingData, + ) -> Option, M::ParseError>> { + let existing = checking_data.modules.files.get_source_at_path(full_importer); + if let Some(existing) = existing { + Some(Ok(checking_data + .modules + .synthesised_modules + .get(&existing) + .expect("existing file, but not synthesised"))) + } else { + let content = (checking_data.modules.file_reader)(full_importer.as_ref()); + if let Some(content) = content { + let source = checking_data + .modules + .files + .new_source_id(full_importer.clone(), content.clone()); + + match M::module_from_string( + source, + content, + &checking_data.modules.parsing_options, + ) { + Ok(module) => { + let new_module_context = environment.get_root().new_module_context( + source, + module, + checking_data, + ); + Some(Ok(new_module_context)) + } + Err(err) => Some(Err(err)), + } + } else { + None + } + } + } + if importing_path.starts_with('.') { let from_path = self.modules.files.get_file_path(from); let from = PathBuf::from(importing_path); @@ -259,55 +307,14 @@ impl<'a, T: crate::ReadFromFS, M: ASTImplementation> CheckingData<'a, T, M> { .unwrap() .to_path_buf(); - fn get_module<'a, T: crate::ReadFromFS, M: ASTImplementation>( - full_importer: PathBuf, - environment: &mut Environment, - checking_data: &'a mut CheckingData, - ) -> Option, M::ParseError>> { - let existing = checking_data.modules.files.get_source_at_path(&full_importer); - if let Some(existing) = existing { - Some(Ok(checking_data - .modules - .synthesised_modules - .get(&existing) - .expect("existing file, but not synthesised"))) - } else { - let content = (checking_data.modules.file_reader)(full_importer.as_ref()); - if let Some(content) = content { - let source = checking_data - .modules - .files - .new_source_id(full_importer.to_path_buf(), content.clone()); - - match M::module_from_string( - source, - content, - &checking_data.modules.parsing_options, - ) { - Ok(module) => { - let new_module_context = environment.get_root().new_module_context( - source, - module, - checking_data, - ); - Some(Ok(new_module_context)) - } - Err(err) => Some(Err(err)), - } - } else { - None - } - } - } - let result = if full_importer.extension().is_some() { - get_module(full_importer.clone(), environment, self) + get_module(&full_importer, environment, self) } else { let mut result = None; for ext in ["ts", "tsx", "js"] { full_importer.set_extension(ext); // TODO change parse options based on extension - result = get_module(full_importer.clone(), environment, self); + result = get_module(&full_importer, environment, self); if result.is_some() { break; } @@ -341,7 +348,7 @@ impl<'a, T: crate::ReadFromFS, M: ASTImplementation> CheckingData<'a, T, M> { self.diagnostics_container.add_error(TypeCheckWarning::DeadBranch { expression_span: span, expression_value: value, - }) + }); } /// TODO temp, needs better place @@ -468,15 +475,12 @@ pub(crate) fn add_definition_files_to_root, ) { for path in type_definition_files { - let (source_id, content) = match checking_data.modules.get_file(&path) { - Some(result) => result, - None => { - checking_data.diagnostics_container.add_error(Diagnostic::Global { - reason: format!("could not find {}", path.display()), - kind: crate::DiagnosticKind::Error, - }); - continue; - } + let Some((source_id, content)) = checking_data.modules.get_file(&path) else { + checking_data.diagnostics_container.add_error(Diagnostic::Global { + reason: format!("could not find {}", path.display()), + kind: crate::DiagnosticKind::Error, + }); + continue; }; // TODO U::new_tdm_from_string diff --git a/checker/src/options.rs b/checker/src/options.rs index 0d159370..55327071 100644 --- a/checker/src/options.rs +++ b/checker/src/options.rs @@ -3,6 +3,8 @@ use std::any::Any; /// Settings for type checking /// TODO reach compat with tsc #[derive(serde::Deserialize)] +// TODO: Can be refactored with bit to reduce memory +#[allow(clippy::struct_excessive_bools)] pub struct TypeCheckOptions { /// Parameters cannot be reassigned pub constant_parameters: bool, diff --git a/checker/src/range_map.rs b/checker/src/range_map.rs index 3bddad3c..af02a9c8 100644 --- a/checker/src/range_map.rs +++ b/checker/src/range_map.rs @@ -14,6 +14,7 @@ impl Default for RangeMap { } impl RangeMap { + #[must_use] pub fn new() -> Self { Self { entries: Default::default() } } @@ -28,9 +29,10 @@ impl RangeMap { } /// Get the top level entry at some point + #[must_use] pub fn get(&self, point: u32) -> Option<&T> { self.entries - .range(0..(point + 1)) + .range(0..=point) // very important to reverse .rev() .find_map(|(_, v)| v.iter().find_map(|(e, v)| (*e > point).then_some(v))) diff --git a/checker/src/serialization.rs b/checker/src/serialization.rs index d81a3e52..b0801f8c 100644 --- a/checker/src/serialization.rs +++ b/checker/src/serialization.rs @@ -25,7 +25,7 @@ impl BinarySerializable for String { fn deserialize>(iter: &mut I, source: SourceId) -> Self { let len = iter.next().unwrap(); - String::from_iter(iter.by_ref().take(len as usize).map(|v| v as char)) + iter.by_ref().take(len as usize).map(|v| v as char).collect::() } } @@ -58,7 +58,7 @@ impl BinarySerializable for Option { impl BinarySerializable for Vec { fn serialize(self, buf: &mut Vec) { buf.extend_from_slice(&u16::try_from(self.len()).unwrap().to_le_bytes()); - for item in self.into_iter() { + for item in self { item.serialize(buf); } } @@ -71,7 +71,7 @@ impl BinarySerializable for Vec { impl BinarySerializable for Box { fn serialize(self, buf: &mut Vec) { - BinarySerializable::serialize(*self, buf) + BinarySerializable::serialize(*self, buf); } fn deserialize>(iter: &mut I, source: SourceId) -> Self { @@ -116,7 +116,7 @@ where fn serialize(self, buf: &mut Vec) { buf.extend_from_slice(&u16::try_from(self.len()).unwrap().to_le_bytes()); - for (k, v) in self.into_iter() { + for (k, v) in self { k.serialize(buf); v.serialize(buf); } @@ -135,7 +135,7 @@ where fn serialize(self, buf: &mut Vec) { buf.extend_from_slice(&u16::try_from(self.len()).unwrap().to_le_bytes()); - for v in self.into_iter() { + for v in self { v.serialize(buf); } } @@ -153,7 +153,7 @@ where { fn serialize(self, buf: &mut Vec) { buf.extend_from_slice(&u16::try_from(self.len()).unwrap().to_le_bytes()); - for (k, v) in self.into_iter() { + for (k, v) in self { k.serialize(buf); v.serialize(buf); } @@ -172,7 +172,7 @@ where fn serialize(self, buf: &mut Vec) { buf.extend_from_slice(&u16::try_from(self.len()).unwrap().to_le_bytes()); - for v in self.into_iter() { + for v in self { v.serialize(buf); } } @@ -259,7 +259,7 @@ impl BinarySerializable for ordered_float::NotNan { impl BinarySerializable for bool { fn serialize(self, buf: &mut Vec) { - buf.push(if self { 1 } else { 0 }) + buf.push(u8::from(self)); } fn deserialize>(iter: &mut I, source: SourceId) -> Self { diff --git a/checker/src/synthesis/block.rs b/checker/src/synthesis/block.rs index c3d13b0e..71101630 100644 --- a/checker/src/synthesis/block.rs +++ b/checker/src/synthesis/block.rs @@ -36,7 +36,7 @@ pub(super) fn synthesise_block( } } StatementOrDeclaration::Declaration(declaration) => { - synthesise_declaration(declaration, environment, checking_data) + synthesise_declaration(declaration, environment, checking_data); } } } @@ -62,7 +62,7 @@ pub(crate) fn synthesise_declaration( ) { match declaration { Declaration::Variable(declaration) => { - synthesise_variable_declaration(declaration, environment, checking_data, false) + synthesise_variable_declaration(declaration, environment, checking_data, false); } Declaration::Class(class) => { let constructor = synthesise_class_declaration(&class.on, environment, checking_data); @@ -78,7 +78,7 @@ pub(crate) fn synthesise_declaration( checking_data.diagnostics_container.add_error(TypeCheckError::ReDeclaredVariable { name: class.on.name.as_str(), position, - }) + }); } } Declaration::DeclareVariable(_) @@ -87,16 +87,12 @@ pub(crate) fn synthesise_declaration( | Declaration::Function(_) | Declaration::Enum(_) | Declaration::Interface(_) - | Declaration::TypeAlias(_) => {} - // Imports are hoisted - Declaration::Import(_) => {} + | Declaration::TypeAlias(_) + | Declaration::Import(_) => {} Declaration::Export(exported) => match &exported.on { parser::declarations::ExportDeclaration::Variable { exported, position } => { match exported { // Skipped as this is done earlier - parser::declarations::export::Exportable::Function(_) - | parser::declarations::export::Exportable::Interface(_) - | parser::declarations::export::Exportable::TypeAlias(_) => {} parser::declarations::export::Exportable::Class(class) => { // TODO mark as exported synthesise_class_declaration(class, environment, checking_data); @@ -105,7 +101,7 @@ pub(crate) fn synthesise_declaration( synthesise_variable_declaration(variable, environment, checking_data, true); } parser::declarations::export::Exportable::Parts(parts) => { - for part in parts.iter() { + for part in parts { let pair = super::hoisting::export_part_to_name_pair(part); if let Some(pair) = pair { let position = pair.position.with_source(environment.get_source()); @@ -128,7 +124,10 @@ pub(crate) fn synthesise_declaration( } } parser::declarations::export::Exportable::ImportAll { .. } - | parser::declarations::export::Exportable::ImportParts { .. } => {} + | parser::declarations::export::Exportable::ImportParts { .. } + | parser::declarations::export::Exportable::Function(_) + | parser::declarations::export::Exportable::Interface(_) + | parser::declarations::export::Exportable::TypeAlias(_) => {} } } parser::declarations::ExportDeclaration::Default { expression, position } => { diff --git a/checker/src/synthesis/classes.rs b/checker/src/synthesis/classes.rs index 5a16f63a..b0447fbe 100644 --- a/checker/src/synthesis/classes.rs +++ b/checker/src/synthesis/classes.rs @@ -108,7 +108,7 @@ pub(super) fn synthesise_class_declaration< ); let property = - function_to_property(getter_setter, method_ty, &mut checking_data.types); + function_to_property(&getter_setter, method_ty, &mut checking_data.types); let position = Some(method.position.clone().with_source(environment.get_source())); environment.facts.register_property( @@ -140,7 +140,7 @@ pub(super) fn synthesise_class_declaration< environment, checking_data, ); - static_property_keys.push(value) + static_property_keys.push(value); } _ => {} } @@ -170,7 +170,7 @@ pub(super) fn synthesise_class_declaration< // TODO ... static_property_keys.reverse(); - for member in class.members.iter() { + for member in &class.members { match &member.on { ClassMember::Method(Some(_), method) => { let publicity_kind = match method.name.get_ast_ref() { @@ -188,7 +188,9 @@ pub(super) fn synthesise_class_declaration< let value = match method.header { MethodHeader::Get(_) => PropertyValue::Getter(Box::new(function)), MethodHeader::Set(_) => PropertyValue::Setter(Box::new(function)), - _ => PropertyValue::Value(checking_data.types.new_function_type(function)), + MethodHeader::Regular { .. } => { + PropertyValue::Value(checking_data.types.new_function_type(function)) + } }; // (publicity_kind, property_key, PropertyOnClass::Function { method, property }) @@ -230,7 +232,7 @@ pub(super) fn synthesise_class_declaration< Scope::StaticBlock {}, checking_data, |environment, checking_data| { - synthesise_block(&block.0, environment, checking_data) + synthesise_block(&block.0, environment, checking_data); }, ); } diff --git a/checker/src/synthesis/declarations.rs b/checker/src/synthesis/declarations.rs index 5b311a78..8f8c71dd 100644 --- a/checker/src/synthesis/declarations.rs +++ b/checker/src/synthesis/declarations.rs @@ -12,7 +12,7 @@ pub(super) fn synthesise_variable_declaration( ) { match declaration { VariableDeclaration::ConstDeclaration { declarations, .. } => { - for variable_declaration in declarations.iter() { + for variable_declaration in declarations { synthesise_variable_declaration_item( variable_declaration, environment, @@ -27,7 +27,7 @@ pub(super) fn synthesise_variable_declaration( keyword: parser::Keyword(_, position), .. } => { - for variable_declaration in declarations.iter() { + for variable_declaration in declarations { let exported = exported.then(|| { let restriction = checking_data .type_mappings diff --git a/checker/src/synthesis/definitions.rs b/checker/src/synthesis/definitions.rs index 02a52ad9..405dc0ef 100644 --- a/checker/src/synthesis/definitions.rs +++ b/checker/src/synthesis/definitions.rs @@ -8,7 +8,7 @@ use crate::{ const DEFINITION_VAR_IS_CONSTANT: bool = true; -/// Interprets a definition module (.d.ts) and produces a [Environment]. Consumes the [TypeDefinitionModule] +/// Interprets a definition module (.d.ts) and produces a [Environment]. Consumes the [`TypeDefinitionModule`] /// TODO remove unwraps here and add to the existing error handler pub(super) fn type_definition_file( mut definition: parser::TypeDefinitionModule, @@ -32,7 +32,7 @@ pub(super) fn type_definition_file( // Hoisting names of interfaces, namespaces and types // At some point with binaries could remove this pass - for statement in definition.declarations.iter() { + for statement in &definition.declarations { match statement { TypeDefinitionModuleDeclaration::Interface(interface) => { let ty = env.new_interface::( @@ -40,7 +40,7 @@ pub(super) fn type_definition_file( interface.on.nominal_keyword.is_some(), interface.on.type_parameters.as_deref(), interface.on.extends.as_deref(), - interface.on.position.clone().with_source(source).clone(), + &interface.on.position.clone().with_source(source), &mut checking_data.types, ); idx_to_types.insert(interface.on.position.start, ty); @@ -65,7 +65,7 @@ pub(super) fn type_definition_file( } } - for declaration in definition.declarations.into_iter() { + for declaration in definition.declarations { match declaration { TypeDefinitionModuleDeclaration::Function(func) => { // TODO abstract @@ -77,7 +77,7 @@ pub(super) fn type_definition_file( &mut env, checking_data, func.performs.as_ref().into(), - declared_at.clone(), + &declared_at, crate::behavior::functions::FunctionBehavior::ArrowFunction { is_async: false }, None, ); @@ -87,7 +87,7 @@ pub(super) fn type_definition_file( base.parameters, base.return_type, // TODO - declared_at, + &declared_at, base.effects, base.constant_function, ); @@ -111,7 +111,7 @@ pub(super) fn type_definition_file( position, decorators, }) => { - for declaration in declarations.iter() { + for declaration in &declarations { let constraint = declaration.type_annotation.as_ref().map(|annotation| { synthesise_type_annotation(annotation, &mut env, checking_data) }); @@ -216,7 +216,7 @@ pub(super) fn type_definition_file( } let Environment { named_types, facts, variable_names, variables, .. } = env; - (Names { named_types, variable_names, variables }, facts) + (Names { variables, named_types, variable_names }, facts) } pub(crate) fn decorators_to_context(decorators: &[parser::Decorator]) -> Option { diff --git a/checker/src/synthesis/expressions.rs b/checker/src/synthesis/expressions.rs index 0b62a09a..6fbe0960 100644 --- a/checker/src/synthesis/expressions.rs +++ b/checker/src/synthesis/expressions.rs @@ -19,7 +19,7 @@ use crate::{ variables::VariableWithValue, }, synthesis::parser_property_key_to_checker_property_key, - types::properties::PropertyKey, + types::{calling::CallingInput, properties::PropertyKey}, Decidable, }; @@ -91,7 +91,7 @@ pub(super) fn synthesise_expression( } Expression::ArrayLiteral(elements, _) => { fn synthesise_array_item( - idx: Decidable, + idx: &Decidable, element: &SpreadExpression, environment: &mut Environment, checking_data: &mut CheckingData, @@ -104,8 +104,8 @@ pub(super) fn synthesise_expression( synthesise_expression(element, environment, checking_data, expecting); ( PropertyKey::from_usize(match idx { - Decidable::Known(idx) => idx, - _ => todo!(), + Decidable::Known(idx) => *idx, + Decidable::Unknown(_) => todo!(), }), expression_type, ) @@ -120,8 +120,8 @@ pub(super) fn synthesise_expression( crate::utils::notify!("Skipping spread"); ( PropertyKey::from_usize(match idx { - Decidable::Known(idx) => idx, - _ => todo!(), + Decidable::Known(idx) => *idx, + Decidable::Unknown(_) => todo!(), }), TypeId::ERROR_TYPE, ) @@ -130,8 +130,8 @@ pub(super) fn synthesise_expression( crate::utils::notify!("Empty expression temp as empty. Should be "); ( PropertyKey::from_usize(match idx { - Decidable::Known(idx) => idx, - _ => todo!(), + Decidable::Known(idx) => *idx, + Decidable::Unknown(_) => todo!(), }), TypeId::UNDEFINED_TYPE, ) @@ -150,8 +150,12 @@ pub(super) fn synthesise_expression( let spread_expression_position = value.get_position().clone().with_source(environment.get_source()); - let (key, value) = - synthesise_array_item(Decidable::Known(idx), value, environment, checking_data); + let (key, value) = synthesise_array_item( + &Decidable::Known(idx), + value, + environment, + checking_data, + ); basis.append( environment, @@ -204,6 +208,8 @@ pub(super) fn synthesise_expression( )) } Expression::BinaryOperation { lhs, operator, rhs, .. } => { + use parser::operators::BinaryOperator; + let lhs_ty = synthesise_expression(lhs, environment, checking_data, TypeId::ANY_TYPE); if let BinaryOperator::LogicalAnd @@ -213,10 +219,10 @@ pub(super) fn synthesise_expression( return evaluate_logical_operation_with_expression( lhs_ty, match operator { - BinaryOperator::LogicalAnd => crate::behavior::operations::Logical::And, - BinaryOperator::LogicalOr => crate::behavior::operations::Logical::Or, + BinaryOperator::LogicalAnd => &crate::behavior::operations::Logical::And, + BinaryOperator::LogicalOr => &crate::behavior::operations::Logical::Or, BinaryOperator::NullCoalescing => { - crate::behavior::operations::Logical::NullCoalescing + &crate::behavior::operations::Logical::NullCoalescing } _ => unreachable!(), }, @@ -238,7 +244,6 @@ pub(super) fn synthesise_expression( ASTNode::get_position(&**lhs).clone().with_source(environment.get_source()); let rhs_pos = ASTNode::get_position(&**rhs).clone().with_source(environment.get_source()); - use parser::operators::BinaryOperator; let operator = match operator { BinaryOperator::Add => MathematicalAndBitwise::Add.into(), @@ -339,7 +344,7 @@ pub(super) fn synthesise_expression( } => { let result = environment.delete_property( on, - PropertyKey::String(Cow::Owned(property.clone())), + &PropertyKey::String(Cow::Owned(property.clone())), ); return if result { TypeId::TRUE } else { TypeId::FALSE }; } @@ -347,7 +352,7 @@ pub(super) fn synthesise_expression( } } Expression::Index { indexee, indexer, is_optional, position } => { - let indexee = synthesise_expression( + let being_indexed = synthesise_expression( indexee, environment, checking_data, @@ -361,7 +366,7 @@ pub(super) fn synthesise_expression( ); let property = PropertyKey::from_type(indexer, &checking_data.types); - let result = environment.delete_property(indexee, property); + let result = environment.delete_property(being_indexed, &property); return if result { TypeId::TRUE } else { TypeId::FALSE }; } _ => { @@ -394,19 +399,19 @@ pub(super) fn synthesise_expression( ); } Expression::BinaryAssignmentOperation { lhs, operator, rhs, position } => { + use crate::behavior::assignments::AssignmentKind; + use parser::operators::BinaryAssignmentOperator; + let lhs: Assignable = Assignable::Reference(synthesise_access_to_reference( lhs, environment, checking_data, )); - use crate::behavior::assignments::AssignmentKind; - use parser::operators::BinaryAssignmentOperator; - let assignment_span = position.clone().with_source(environment.get_source()); return environment.assign_to_assignable_handle_errors( lhs, - operator_to_assignment_kind(operator), + operator_to_assignment_kind(*operator), Some(&**rhs), assignment_span, checking_data, @@ -511,11 +516,11 @@ pub(super) fn synthesise_expression( match result { Ok(instance) => instance, - Err(_) => return TypeId::ERROR_TYPE, + Err(()) => return TypeId::ERROR_TYPE, } } Expression::Index { indexee, indexer, position, .. } => { - let indexee = + let being_indexed = synthesise_expression(indexee, environment, checking_data, TypeId::ANY_TYPE); let indexer = synthesise_multiple_expression( indexer, @@ -527,7 +532,7 @@ pub(super) fn synthesise_expression( let index_position = position.clone().with_source(environment.get_source()); // TODO handle differently? let result = environment.get_property_handle_errors( - indexee, + being_indexed, Publicity::Public, PropertyKey::from_type(indexer, &checking_data.types), checking_data, @@ -536,12 +541,12 @@ pub(super) fn synthesise_expression( match result { Ok(instance) => instance, - Err(_) => return TypeId::ERROR_TYPE, + Err(()) => return TypeId::ERROR_TYPE, } } Expression::ThisReference(pos) => { let position = pos.clone().with_source(environment.get_source()); - Instance::RValue(environment.get_value_of_this(&checking_data.types, position)) + Instance::RValue(environment.get_value_of_this(&checking_data.types, &position)) } Expression::SuperExpression(reference, position) => match reference { SuperReference::Call { arguments } => { @@ -721,8 +726,8 @@ pub(super) fn synthesise_expression( } }; let rhs = synthesise_expression(rhs, environment, checking_data, TypeId::ANY_TYPE); - let result = - environment.property_in(rhs, PropertyKey::from_type(lhs, &checking_data.types)); + let result = environment + .property_in(rhs, &PropertyKey::from_type(lhs, &checking_data.types)); Instance::RValue(if result { TypeId::TRUE } else { TypeId::FALSE }) } @@ -749,7 +754,7 @@ pub(super) fn synthesise_expression( } fn operator_to_assignment_kind( - operator: &parser::operators::BinaryAssignmentOperator, + operator: parser::operators::BinaryAssignmentOperator, ) -> crate::behavior::assignments::AssignmentKind { use crate::behavior::assignments::AssignmentKind; use parser::operators::BinaryAssignmentOperator; @@ -764,7 +769,8 @@ fn operator_to_assignment_kind( BinaryAssignmentOperator::LogicalNullishAssignment => { AssignmentKind::ConditionalUpdate(crate::behavior::operations::Logical::NullCoalescing) } - BinaryAssignmentOperator::AddAssign => { + BinaryAssignmentOperator::AddAssign + | BinaryAssignmentOperator::BitwiseShiftRightUnsigned => { AssignmentKind::PureUpdate(MathematicalAndBitwise::Add) } BinaryAssignmentOperator::SubtractAssign => { @@ -788,9 +794,6 @@ fn operator_to_assignment_kind( BinaryAssignmentOperator::BitwiseShiftRightAssign => { AssignmentKind::PureUpdate(MathematicalAndBitwise::BitwiseShiftRight) } - BinaryAssignmentOperator::BitwiseShiftRightUnsigned => { - AssignmentKind::PureUpdate(MathematicalAndBitwise::Add) - } BinaryAssignmentOperator::BitwiseAndAssign => { AssignmentKind::PureUpdate(MathematicalAndBitwise::BitwiseAnd) } @@ -805,7 +808,7 @@ fn operator_to_assignment_kind( /// Generic for functions + constructor calls /// -/// TODO error with function_type_id should be handled earlier +/// TODO error with `function_type_id` should be handled earlier fn call_function( function_type_id: TypeId, called_with_new: CalledWithNew, @@ -837,12 +840,14 @@ fn call_function( crate::types::calling::call_type_handle_errors( function_type_id, - called_with_new, - Default::default(), - generic_type_arguments, - synthesised_arguments, - call_site.clone().with_source(environment.get_source()), + CallingInput { + called_with_new, + this_value: Default::default(), + call_site: call_site.clone().with_source(environment.get_source()), + call_site_type_arguments: generic_type_arguments, + }, environment, + synthesised_arguments, checking_data, ) } @@ -855,7 +860,7 @@ fn synthesise_arguments( ) -> Vec { arguments .iter() - .flat_map(|argument| match argument { + .filter_map(|argument| match argument { SpreadExpression::Spread(expr, _) => { todo!() // Some(synthesisedFunctionArgument::Spread(synthesise_expression( @@ -898,7 +903,7 @@ pub(super) fn synthesise_object_literal( let mut object_builder = ObjectBuilder::new(None, &mut checking_data.types, &mut environment.facts); - for member in members.iter() { + for member in members { let member_position = member.get_position().clone().with_source(environment.get_source()); match member { ObjectLiteralMember::Spread(spread, pos) => { @@ -992,7 +997,7 @@ pub(super) fn synthesise_object_literal( let property = match &method.header { MethodHeader::Get(_) => crate::PropertyValue::Getter(Box::new(function)), MethodHeader::Set(_) => crate::PropertyValue::Setter(Box::new(function)), - _ => { + MethodHeader::Regular { .. } => { crate::PropertyValue::Value(checking_data.types.new_function_type(function)) } }; @@ -1003,7 +1008,7 @@ pub(super) fn synthesise_object_literal( key, property, Some(member_position), - ) + ); } } } diff --git a/checker/src/synthesis/extensions/is_expression.rs b/checker/src/synthesis/extensions/is_expression.rs index 57e8b8a4..9a7b141a 100644 --- a/checker/src/synthesis/extensions/is_expression.rs +++ b/checker/src/synthesis/extensions/is_expression.rs @@ -21,7 +21,7 @@ pub(crate) fn synthesise_is_expression( ); let mut returned = TypeId::UNDEFINED_TYPE; - for (condition, code) in is_expression.branches.iter() { + for (condition, code) in &is_expression.branches { let requirement = synthesise_type_annotation(condition, environment, checking_data); // TODO need to test subtyping and subtype here diff --git a/checker/src/synthesis/extensions/jsx.rs b/checker/src/synthesis/extensions/jsx.rs index 85a08c6b..69b6c5f0 100644 --- a/checker/src/synthesis/extensions/jsx.rs +++ b/checker/src/synthesis/extensions/jsx.rs @@ -14,6 +14,7 @@ use crate::{ diagnostics::{TypeCheckError, TypeStringRepresentation}, synthesis::expressions::synthesise_expression, types::{ + calling::CallingInput, properties::PropertyKey, subtyping::{type_is_subtype, BasicEquality, SubTypeResult}, SynthesisedArgument, @@ -37,6 +38,10 @@ pub(crate) fn synthesise_jsx_element( environment: &mut Environment, checking_data: &mut CheckingData, ) -> TypeId { + // TODO cache or something? + // TODO temp, to be worked out + const JSX_NAME: &str = "JSXH"; + let tag_name = element.tag_name.as_str(); let tag_name_as_cst_ty = @@ -45,7 +50,7 @@ pub(crate) fn synthesise_jsx_element( let mut attributes_object = ObjectBuilder::new(None, &mut checking_data.types, &mut environment.facts); - for attribute in element.attributes.iter() { + for attribute in &element.attributes { let (name, attribute_value) = synthesise_attribute(attribute, environment, checking_data); let attribute_position = attribute.get_position().clone().with_source(environment.get_source()); @@ -159,10 +164,6 @@ pub(crate) fn synthesise_jsx_element( None }; - // TODO cache or something? - // TODO temp, to be worked out - const JSX_NAME: &str = "JSXH"; - let position = element.get_position().clone().with_source(environment.get_source()); let jsx_function = match environment.get_variable_handle_error(JSX_NAME, position.clone(), checking_data) { @@ -186,17 +187,19 @@ pub(crate) fn synthesise_jsx_element( let mut args = vec![tag_name_argument, attributes_argument]; if let Some(child_nodes) = child_nodes { // TODO position here - args.push(SynthesisedArgument::NonSpread { ty: child_nodes, position: position.clone() }) + args.push(SynthesisedArgument::NonSpread { ty: child_nodes, position: position.clone() }); } call_type_handle_errors( jsx_function, - crate::types::calling::CalledWithNew::None, - environment.facts.value_of_this, - None, - args, - position.clone(), + CallingInput { + called_with_new: crate::types::calling::CalledWithNew::None, + this_value: environment.facts.value_of_this, + call_site: position.clone(), + call_site_type_arguments: None, + }, environment, + args, checking_data, ) .0 diff --git a/checker/src/synthesis/functions.rs b/checker/src/synthesis/functions.rs index 75a9ffd4..3f61c7a2 100644 --- a/checker/src/synthesis/functions.rs +++ b/checker/src/synthesis/functions.rs @@ -190,13 +190,13 @@ where environment: &mut Environment, checking_data: &mut CheckingData, ) { - self.body.synthesise_function_body(environment, checking_data) + self.body.synthesise_function_body(environment, checking_data); } } pub(super) trait SynthesisableFunctionBody { // Return type is the return type of the body, if it doesn't use - /// any returns it is equal to [Type::Undefined] + /// any returns it is equal to [`Type::Undefined`] fn synthesise_function_body( &self, environment: &mut Environment, @@ -230,7 +230,7 @@ impl SynthesisableFunctionBody for ExpressionOrBlock { environment.return_value(returned, position); } ExpressionOrBlock::Block(block) => { - block.synthesise_function_body(environment, checking_data) + block.synthesise_function_body(environment, checking_data); } } } @@ -311,8 +311,7 @@ pub(super) fn synthesise_type_annotation_function_parameters( { string_comment_to_type( possible_declaration, - position.clone().with_source(environment.get_source()), + &position.clone().with_source(environment.get_source()), environment, checking_data, ) @@ -410,7 +409,7 @@ fn synthesise_function_parameters( }) .collect(); - for parameter in ast_parameters.rest_parameter.iter() { + for parameter in &ast_parameters.rest_parameter { todo!() // super::variables::hoist_variable_identifier(¶meter.name, environment, is_constant); } @@ -423,7 +422,7 @@ fn param_name_to_string(param: &VariableField if let VariableIdentifier::Standard(name, ..) = name { name.clone() } else { - "".to_owned() + String::new() } } VariableField::Array(_, _) => todo!(), @@ -447,6 +446,7 @@ fn get_parameter_name( /// TODO should always take effect annotations (right?) /// /// TODO abstract +#[allow(clippy::too_many_arguments)] pub(super) fn synthesise_function_annotation( type_parameters: &Option>, parameters: &parser::type_annotations::TypeAnnotationFunctionParameters, @@ -455,7 +455,7 @@ pub(super) fn synthesise_function_annotation, checking_data: &mut CheckingData, performs: super::Performs, - position: source_map::SpanWithSource, + position: &source_map::SpanWithSource, mut behavior: FunctionBehavior, on_interface: Option, ) -> FunctionType { @@ -505,12 +505,10 @@ pub(super) fn synthesise_function_annotation( let mut idx_to_types = HashMap::new(); // First stage - for item in items.iter() { + for item in items { if let StatementOrDeclaration::Declaration(declaration) = item { match declaration { parser::Declaration::DeclareVariable(_) @@ -50,7 +50,7 @@ pub(crate) fn hoist_statements( interface.nominal_keyword.is_some(), interface.type_parameters.as_deref(), interface.extends.as_deref(), - interface.position.clone().with_source(environment.get_source()), + &interface.position.clone().with_source(environment.get_source()), &mut checking_data.types, ); idx_to_types.insert(interface.position.start, ty); @@ -61,7 +61,7 @@ pub(crate) fn hoist_statements( interface.on.nominal_keyword.is_some(), interface.on.type_parameters.as_deref(), interface.on.extends.as_deref(), - interface.on.position.clone().with_source(environment.get_source()), + &interface.on.position.clone().with_source(environment.get_source()), &mut checking_data.types, ); idx_to_types.insert(interface.on.position.start, ty); @@ -128,7 +128,7 @@ pub(crate) fn hoist_statements( kind, checking_data, true, - ) + ); } Exportable::ImportParts { parts, from, .. } => { let parts = parts.iter().filter_map(export_part_to_name_pair); @@ -179,7 +179,7 @@ pub(crate) fn hoist_statements( } StatementOrDeclaration::Declaration(dec) => match dec { parser::Declaration::Variable(declaration) => { - hoist_variable_declaration(declaration, environment, checking_data) + hoist_variable_declaration(declaration, environment, checking_data); } parser::Declaration::Function(func) => { // TODO unsynthesised function? ... @@ -204,7 +204,7 @@ pub(crate) fn hoist_statements( environment, checking_data, func.performs.as_ref().into(), - declared_at.clone(), + &declared_at, crate::behavior::functions::FunctionBehavior::ArrowFunction { is_async: false, }, @@ -215,7 +215,7 @@ pub(crate) fn hoist_statements( base.type_parameters, base.parameters, base.return_type, - declared_at, + &declared_at, base.effects, base.constant_function, ); @@ -229,9 +229,6 @@ pub(crate) fn hoist_statements( checking_data, ); } - parser::Declaration::Class(_) => { - // TODO hoist type... - } parser::Declaration::Enum(r#enum) => { checking_data.raise_unimplemented_error( "enum", @@ -248,14 +245,13 @@ pub(crate) fn hoist_statements( checking_data, ); } - parser::Declaration::TypeAlias(_) => {} parser::Declaration::DeclareVariable(DeclareVariableDeclaration { keyword: _, declarations, position, decorators, }) => { - for declaration in declarations.iter() { + for declaration in declarations { let constraint = get_annotation_from_declaration( declaration, environment, @@ -277,12 +273,10 @@ pub(crate) fn hoist_statements( ); } } - parser::Declaration::DeclareInterface(_) => {} - parser::Declaration::Import(_) => {} + parser::Declaration::Export(exported) => match &exported.on { parser::declarations::ExportDeclaration::Variable { exported, position } => { match exported { - Exportable::Class(_) => {} Exportable::Function(func) => { // TODO unsynthesised function? ... let mutability = @@ -304,7 +298,7 @@ pub(crate) fn hoist_statements( } Exportable::Variable(declaration) => { // TODO mark exported - hoist_variable_declaration(declaration, environment, checking_data) + hoist_variable_declaration(declaration, environment, checking_data); } Exportable::Interface(interface) => { let ty = idx_to_types.remove(&interface.position.start).unwrap(); @@ -319,11 +313,16 @@ pub(crate) fn hoist_statements( Exportable::TypeAlias(_) | Exportable::Parts(..) | Exportable::ImportAll { .. } - | Exportable::ImportParts { .. } => {} + | Exportable::ImportParts { .. } + | Exportable::Class(_) => {} } } parser::declarations::ExportDeclaration::Default { .. } => {} }, + parser::Declaration::Class(_) + | parser::Declaration::TypeAlias(_) + | parser::Declaration::DeclareInterface(_) + | parser::Declaration::Import(_) => {} }, } } @@ -408,7 +407,7 @@ fn import_part_to_name_pair(item: &parser::declarations::ImportPart) -> Option item, - _ => todo!(), + parser::declarations::ImportExportName::Cursor(_) => todo!(), }, r#as: name, position: position.clone(), @@ -440,7 +439,7 @@ pub(super) fn export_part_to_name_pair( r#as: match alias { parser::declarations::ImportExportName::Reference(item) | parser::declarations::ImportExportName::Quoted(item, _) => item, - _ => todo!(), + parser::declarations::ImportExportName::Cursor(_) => todo!(), }, position: position.clone(), }) @@ -465,7 +464,7 @@ fn hoist_variable_declaration( declarations, position, } => { - for declaration in declarations.iter() { + for declaration in declarations { let constraint = get_annotation_from_declaration(declaration, environment, checking_data); @@ -487,7 +486,7 @@ fn hoist_variable_declaration( declarations, position, } => { - for declaration in declarations.iter() { + for declaration in declarations { let constraint = get_annotation_from_declaration(declaration, environment, checking_data); @@ -527,7 +526,7 @@ fn get_annotation_from_declaration< { string_comment_to_type( possible_declaration, - position.clone().with_source(environment.get_source()), + &position.clone().with_source(environment.get_source()), environment, checking_data, ) @@ -548,12 +547,13 @@ fn get_annotation_from_declaration< pub(crate) fn string_comment_to_type( possible_declaration: &String, - position: source_map::SpanWithSource, + position: &source_map::SpanWithSource, environment: &mut crate::context::Context>, checking_data: &mut CheckingData, ) -> Option<(TypeId, source_map::SpanWithSource)> { - let source = environment.get_source(); use parser::ASTNode; + + let source = environment.get_source(); let offset = Some(position.end - 2 - possible_declaration.len() as u32); let annotation = parser::TypeAnnotation::from_string( possible_declaration.clone(), diff --git a/checker/src/synthesis/interfaces.rs b/checker/src/synthesis/interfaces.rs index 29d5ae9f..905a5abd 100644 --- a/checker/src/synthesis/interfaces.rs +++ b/checker/src/synthesis/interfaces.rs @@ -102,7 +102,7 @@ impl SynthesiseInterfaceBehavior for OnToType { }; // TODO: `None` position passed - environment.facts.register_property(self.0, publicity, under, ty, false, None) + environment.facts.register_property(self.0, publicity, under, ty, false, None); } fn interface_type(&self) -> Option { @@ -145,7 +145,7 @@ pub(super) fn synthesise_signatures GetterSetter::Getter, parser::MethodHeader::Set(_) => GetterSetter::Setter, - _ => GetterSetter::None, + parser::MethodHeader::Regular { .. } => GetterSetter::None, }; let function = synthesise_function_annotation( type_parameters, @@ -154,7 +154,7 @@ pub(super) fn synthesise_signatures Result { let options = Default::default(); - parser::TypeDefinitionModule::from_string(string, options, source_id) + parser::TypeDefinitionModule::from_string(&string, options, source_id) .map_err(|err| (err, source_id)) } @@ -129,7 +129,7 @@ impl crate::ASTImplementation for EznoParser { module_environment: &mut Environment, checking_data: &mut crate::CheckingData, ) { - synthesise_block(&module.items, module_environment, checking_data) + synthesise_block(&module.items, module_environment, checking_data); } fn synthesise_expression( @@ -255,6 +255,7 @@ pub mod interactive { } } + #[must_use] pub fn get_fs_ref(&self) -> &MapFileStore { &self.checking_data.modules.files } @@ -263,6 +264,7 @@ pub mod interactive { &mut self.checking_data.modules.files } + #[must_use] pub fn get_source_id(&self) -> SourceId { self.checking_data.modules.entry_point.unwrap() } diff --git a/checker/src/synthesis/statements.rs b/checker/src/synthesis/statements.rs index 23509786..78b4c37d 100644 --- a/checker/src/synthesis/statements.rs +++ b/checker/src/synthesis/statements.rs @@ -107,21 +107,21 @@ pub(super) fn synthesise_statement( environment.new_conditional_context( condition, |env: &mut Environment, data: &mut CheckingData| { - synthesise_block_or_single_statement(current.1, env, data) + synthesise_block_or_single_statement(current.1, env, data); }, if !others.is_empty() || last.is_some() { Some(|env: &mut Environment, data: &mut CheckingData| { if let [current, others @ ..] = &others { - run_condition(*current, others, last, env, data) + run_condition(*current, others, last, env, data); } else { - synthesise_block_or_single_statement(last.unwrap(), env, data) + synthesise_block_or_single_statement(last.unwrap(), env, data); } }) } else { None }, checking_data, - ) + ); } let others = if_statement @@ -138,7 +138,7 @@ pub(super) fn synthesise_statement( last, environment, checking_data, - ) + ); // environment.new_conditional_context( // condition, @@ -351,7 +351,6 @@ pub(super) fn synthesise_statement( // yay! } // TODO acknowledge '@ts-ignore' statements but error - Statement::Comment(..) | Statement::MultiLineComment(..) => {} Statement::Cursor(cursor_id, _) => { todo!("Dump environment data somewhere") } @@ -369,7 +368,7 @@ pub(super) fn synthesise_statement( TypeId::ANY_TYPE, ); let thrown_position = stmt.2.clone().with_source(environment.get_source()); - environment.throw_value(thrown_value, thrown_position) + environment.throw_value(thrown_value, thrown_position); } Statement::Labelled { position, name, statement } => { checking_data.raise_unimplemented_error( @@ -414,7 +413,7 @@ pub(super) fn synthesise_statement( ); } } - Statement::Empty(_) => {} + Statement::Empty(_) | Statement::Comment(..) | Statement::MultiLineComment(..) => {} } } @@ -425,10 +424,10 @@ fn synthesise_block_or_single_statement( ) { match block_or_single_statement { BlockOrSingleStatement::Braced(block) => { - synthesise_block(&block.0, environment, checking_data) + synthesise_block(&block.0, environment, checking_data); } BlockOrSingleStatement::SingleStatement(statement) => { - synthesise_statement(statement, environment, checking_data) + synthesise_statement(statement, environment, checking_data); } } // environment.new_lexical_environment_fold_into_parent( diff --git a/checker/src/synthesis/type_annotations.rs b/checker/src/synthesis/type_annotations.rs index a0b5ec30..78372f4c 100644 --- a/checker/src/synthesis/type_annotations.rs +++ b/checker/src/synthesis/type_annotations.rs @@ -1,14 +1,14 @@ -//! Logic for getting [TypeId] from [parser::TypeAnnotation]s +//! Logic for getting [`TypeId`] from [`parser::TypeAnnotation`]s //! //! ### There are several behaviors for type references depending on their position: //! #### Sources: -//! - Type reference of any source variable declarations is a [crate::TypeConstraint] -//! - Type references in parameters are [crate::TypeConstraint]s -//! - Type references in returns types are also [crate::TypeConstraint]s, because ezno uses the body to get the return +//! - Type reference of any source variable declarations is a [`crate::TypeConstraint`] +//! - Type references in parameters are [`crate::TypeConstraint`]s +//! - Type references in returns types are also [`crate::TypeConstraint`]s, because ezno uses the body to get the return //! type //! //! #### Declarations -//! - Type reference in any declaration or return type is a internal type [crate::Type::InternalObjectReference] +//! - Type reference in any declaration or return type is a internal type [`crate::Type::InternalObjectReference`] //! - Return types need to know whether they return a unique object (todo don't know any examples) //! or a new object. e.g. `Array.from` //! - Parameters shouldn't do generic resolving @@ -21,7 +21,12 @@ use std::{convert::TryInto, iter::FromIterator}; use indexmap::IndexSet; -use parser::{type_annotations::*, ASTNode}; +use parser::{ + type_annotations::{ + AnnotationWithBinder, CommonTypes, SpreadKind, TypeCondition, TypeConditionResult, + }, + ASTNode, TypeAnnotation, +}; use source_map::{SourceId, SpanWithSource}; use crate::{ @@ -41,11 +46,11 @@ use crate::{ use crate::context::{Context, ContextType}; -/// Turns a [parser::TypeAnnotation] into [TypeId] +/// Turns a [`parser::TypeAnnotation`] into [`TypeId`] /// -/// [CheckingData] contains [Memory] and [crate::ErrorAndWarningHandler] +/// [`CheckingData`] contains [Memory] and [`crate::ErrorAndWarningHandler`] /// -/// Returns a Type if it is found else a [Result::Err]. +/// Returns a Type if it is found else a [`Result::Err`]. /// Errors other than non existent type are instead appended to the warning handler and a "default" is returned: /// Example errors: /// - Reference to generic without generic types @@ -77,33 +82,28 @@ pub(super) fn synthesise_type_annotation( "this" => todo!(), // environment.get_value_of_this(&mut checking_data.types), "self" => TypeId::ANY_INFERRED_FREE_THIS, name => { - match environment.get_type_from_name(name) { - Some(ty) => { - // Warn if it requires parameters. e.g. Array - if let Type::AliasTo { parameters: Some(_), .. } - | Type::NamedRooted { parameters: Some(_), .. } = checking_data.types.get_type_by_id(ty) - { - // TODO check defaults... - checking_data.diagnostics_container.add_error( - TypeCheckError::TypeNeedsTypeArguments( - name, - pos.clone().with_source(environment.get_source()), - ), - ); - TypeId::ANY_TYPE - } else { - ty - } - } - None => { + if let Some(ty) = environment.get_type_from_name(name) { + // Warn if it requires parameters. e.g. Array + if let Type::AliasTo { parameters: Some(_), .. } + | Type::NamedRooted { parameters: Some(_), .. } = checking_data.types.get_type_by_id(ty) + { + // TODO check defaults... checking_data.diagnostics_container.add_error( - TypeCheckError::CannotFindType( + TypeCheckError::TypeNeedsTypeArguments( name, pos.clone().with_source(environment.get_source()), ), ); - TypeId::ERROR_TYPE + TypeId::ANY_TYPE + } else { + ty } + } else { + checking_data.diagnostics_container.add_error(TypeCheckError::CannotFindType( + name, + pos.clone().with_source(environment.get_source()), + )); + TypeId::ERROR_TYPE } } }, @@ -253,7 +253,7 @@ pub(super) fn synthesise_type_annotation( environment, checking_data, super::Performs::None, - position.clone(), + &position, // TODO async crate::behavior::functions::FunctionBehavior::ArrowFunction { is_async: false }, None, @@ -263,7 +263,7 @@ pub(super) fn synthesise_type_annotation( function_type.type_parameters, function_type.parameters, function_type.return_type, - position.clone(), + &position, function_type.effects, None, ) @@ -382,11 +382,12 @@ pub(super) fn synthesise_type_annotation( TypeAnnotation::ParenthesizedReference(ref reference, _) => { synthesise_type_annotation(reference, environment, checking_data) } - TypeAnnotation::Index(indexee, indexer, _) => { - let indexee = synthesise_type_annotation(indexee, environment, checking_data); + TypeAnnotation::Index(being_indexed, indexer, _) => { + let being_indexed = + synthesise_type_annotation(being_indexed, environment, checking_data); let indexer = synthesise_type_annotation(indexer, environment, checking_data); if let Some(prop) = environment.get_property_unbound( - indexee, + being_indexed, Publicity::Public, crate::types::properties::PropertyKey::Type(indexer), &checking_data.types, @@ -407,8 +408,6 @@ pub(super) fn synthesise_type_annotation( } TypeAnnotation::KeyOf(_, _) => unimplemented!(), TypeAnnotation::Conditional { condition, resolve_true, resolve_false, position } => { - let condition = synthesise_type_condition(condition, environment, checking_data); - fn synthesise_condition(result: &TypeConditionResult) -> &TypeAnnotation { match result { TypeConditionResult::Reference(reference) => reference, @@ -416,6 +415,8 @@ pub(super) fn synthesise_type_annotation( } } + let condition = synthesise_type_condition(condition, environment, checking_data); + let truthy_result = synthesise_type_annotation( synthesise_condition(resolve_true), environment, diff --git a/checker/src/synthesis/variables.rs b/checker/src/synthesis/variables.rs index 03ec1d57..01aa0264 100644 --- a/checker/src/synthesis/variables.rs +++ b/checker/src/synthesis/variables.rs @@ -88,29 +88,41 @@ pub(crate) fn register_variable value.prop_to_type(), - None => { - checking_data.diagnostics_container.add_error( - TypeCheckError::PropertyDoesNotExist { - property: match under { - PropertyKey::String(s) => crate::diagnostics::PropertyRepresentation::StringKey(s.to_string()), - PropertyKey::Type(t) => crate::diagnostics::PropertyRepresentation::Type(print_type(t, &checking_data.types, &environment.as_general_context(), false)) - }, - on: TypeStringRepresentation::from_type_id( - constraint, - &environment.as_general_context(), - &checking_data.types, - false, - ), - site: name - .get_position() - .clone() - .with_source(environment.get_source()), - }, - ); - TypeId::ERROR_TYPE - } + if let Some(value) = property_constraint { + value.prop_to_type() + } else { + checking_data + .diagnostics_container + .add_error(TypeCheckError::PropertyDoesNotExist { + property: match under { + PropertyKey::String(s) => { + crate::diagnostics::PropertyRepresentation::StringKey( + s.to_string(), + ) + } + PropertyKey::Type(t) => { + crate::diagnostics::PropertyRepresentation::Type( + print_type( + t, + &checking_data.types, + &environment.as_general_context(), + false, + ), + ) + } + }, + on: TypeStringRepresentation::from_type_id( + constraint, + &environment.as_general_context(), + &checking_data.types, + false, + ), + site: name + .get_position() + .clone() + .with_source(environment.get_source()), + }); + TypeId::ERROR_TYPE } }); register_variable( @@ -127,27 +139,10 @@ pub(crate) fn register_variable { - for field in items.iter() { + for field in items { match field.get_ast_ref() { - ObjectDestructuringField::Spread(variable, _) => { - let ty = register_variable_identifier( - variable, - environment, - checking_data, - behavior.clone(), - // TODO - constraint, - ); - if let Some(constraint) = constraint { - // TODO - // checking_data - // .type_mappings - // .variables_to_constraints - // .0 - // .insert(crate::VariableId(pos.source, pos.start), constraint); - } - } - ObjectDestructuringField::Name(variable, ..) => { + ObjectDestructuringField::Name(variable, ..) + | ObjectDestructuringField::Spread(variable, _) => { let ty = register_variable_identifier( variable, environment, @@ -178,29 +173,41 @@ pub(crate) fn register_variable value.prop_to_type(), - None => { - checking_data.diagnostics_container.add_error( - TypeCheckError::PropertyDoesNotExist { - property: match under { - PropertyKey::String(s) => crate::diagnostics::PropertyRepresentation::StringKey(s.to_string()), - PropertyKey::Type(t) => crate::diagnostics::PropertyRepresentation::Type(print_type(t, &checking_data.types, &environment.as_general_context(), false)) - }, - on: TypeStringRepresentation::from_type_id( - constraint, - &environment.as_general_context(), - &checking_data.types, - false, - ), - site: name - .get_position() - .clone() - .with_source(environment.get_source()), - }, - ); - TypeId::ERROR_TYPE - } + if let Some(value) = property_constraint { + value.prop_to_type() + } else { + checking_data + .diagnostics_container + .add_error(TypeCheckError::PropertyDoesNotExist { + property: match under { + PropertyKey::String(s) => { + crate::diagnostics::PropertyRepresentation::StringKey( + s.to_string(), + ) + } + PropertyKey::Type(t) => { + crate::diagnostics::PropertyRepresentation::Type( + print_type( + t, + &checking_data.types, + &environment.as_general_context(), + false, + ), + ) + } + }, + on: TypeStringRepresentation::from_type_id( + constraint, + &environment.as_general_context(), + &checking_data.types, + false, + ), + site: name + .get_position() + .clone() + .with_source(environment.get_source()), + }); + TypeId::ERROR_TYPE } }); register_variable( @@ -220,7 +227,7 @@ pub(crate) fn register_variable( checking_data, value, exported, - ) + ); } // TODO @@ -373,7 +380,7 @@ fn assign_to_fields( } }; - environment.register_initial_variable_declaration_value(id, value) + environment.register_initial_variable_declaration_value(id, value); } ObjectDestructuringField::Map { from, name, default_value, position } => { let key_ty = super::parser_property_key_to_checker_property_key( @@ -418,7 +425,7 @@ fn assign_to_fields( checking_data, value, exported, - ) + ); } } } diff --git a/checker/src/type_mappings.rs b/checker/src/type_mappings.rs index c3f8fc0c..90e4d34d 100644 --- a/checker/src/type_mappings.rs +++ b/checker/src/type_mappings.rs @@ -12,10 +12,10 @@ use crate::{ types::{TypeId, TypeStore}, FunctionId, GeneralContext, VariableId, }; -/// [TypeMappings] is used to retaining information between passes, including the synthesise and checking passes +/// [`TypeMappings`] is used to retaining information between passes, including the synthesise and checking passes /// This for use in the both use in the compiler and compiler plugins -/// Checking things are held on [crate::Memory], function things are held on [crate::HoistedFunctionContainer] -/// and module things on [crate::ModuleData] +/// Checking things are held on [`crate::Memory`], function things are held on [`crate::HoistedFunctionContainer`] +/// and module things on [`crate::ModuleData`] #[derive(Default, Debug)] pub struct TypeMappings { /// Figures out the types of the expressions in the AST @@ -47,17 +47,19 @@ pub struct VariablesToTypes(pub(crate) HashMap); // TODO these are temp impl TypeMappings { + #[must_use] pub fn print_called_functions(&self, source: &str) -> String { let mut buf = "Called functions:\n".to_owned(); - for func_id in self.called_functions.iter() { + for func_id in &self.called_functions { buf.push_str( source.get((func_id.1 as usize)..(func_id.1 as usize + 10)).unwrap_or_default(), ); - buf.push('\n') + buf.push('\n'); } buf } + #[must_use] pub fn print_type_mappings( &self, source: &str, @@ -78,8 +80,8 @@ impl TypeMappings { } } -/// See https://www.internalpointers.com/post/understanding-meaning-lexpressions-and-rexpressions-c for a understanding -/// of LValue vs RValue +/// See for a understanding +/// of `LValue` vs `RValue` #[derive(Clone, Debug)] pub enum Instance { LValue(VariableWithValue), @@ -89,6 +91,7 @@ pub enum Instance { } impl Instance { + #[must_use] pub fn get_variable_id(&self) -> Option { match self { Self::LValue(variable) => Some(variable.0.get_id()), @@ -96,6 +99,7 @@ impl Instance { } } + #[must_use] pub fn get_value(&self) -> TypeId { match self { Instance::LValue(l) => l.1, diff --git a/checker/src/types/calling.rs b/checker/src/types/calling.rs index 374007b6..352523ea 100644 --- a/checker/src/types/calling.rs +++ b/checker/src/types/calling.rs @@ -27,24 +27,35 @@ use super::{ Constructor, PolyNature, StructureGenerics, TypeStore, }; +pub struct CallingInput { + pub called_with_new: CalledWithNew, + pub this_value: ThisValue, + pub call_site_type_arguments: Option>, + pub call_site: SpanWithSource, +} + +pub struct CallingInputWithoutThis { + pub called_with_new: CalledWithNew, + pub call_site_type_arguments: Option>, + pub call_site: SpanWithSource, +} + pub fn call_type_handle_errors( ty: TypeId, - // Overwritten by .call, else look at binding - called_with_new: CalledWithNew, - this_value: ThisValue, - call_site_type_arguments: Option>, - arguments: Vec, - call_site: SpanWithSource, + CallingInput { called_with_new, this_value, call_site_type_arguments, call_site }: CallingInput, environment: &mut Environment, + arguments: Vec, checking_data: &mut crate::CheckingData, ) -> (TypeId, Option) { let result = call_type( ty, - called_with_new, - this_value, - call_site_type_arguments, + CallingInput { + called_with_new, + this_value, + call_site_type_arguments, + call_site: call_site.clone(), + }, arguments, - call_site.clone(), environment, &mut CheckThings, &mut checking_data.types, @@ -58,7 +69,7 @@ pub fn call_type_handle_errors( on: TypeId, - called_with_new: CalledWithNew, - // Overwritten by .call, else look at binding - this_value: ThisValue, - call_site_type_arguments: Option>, + CallingInput { called_with_new, this_value, call_site_type_arguments, call_site }: CallingInput, arguments: Vec, - call_site: SpanWithSource, environment: &mut Environment, behavior: &mut E, types: &mut TypeStore, @@ -105,11 +112,8 @@ pub(crate) fn call_type( if let Some(constraint) = environment.get_poly_base(on, types) { create_generic_function_call( constraint, - called_with_new, - this_value, - call_site_type_arguments, + CallingInput { called_with_new, this_value, call_site_type_arguments, call_site }, arguments, - call_site, on, environment, behavior, @@ -127,11 +131,9 @@ pub(crate) fn call_type( call_logical( logical, types, - called_with_new, - call_site_type_arguments, + CallingInputWithoutThis { called_with_new, call_site_type_arguments, call_site }, structure_generics, arguments, - &call_site, environment, behavior, ) @@ -152,11 +154,9 @@ pub(crate) fn call_type( fn call_logical( logical: Logical<(FunctionId, ThisValue)>, types: &mut TypeStore, - called_with_new: CalledWithNew, - call_site_type_arguments: Option)>>, + CallingInputWithoutThis { called_with_new, call_site_type_arguments, call_site }: CallingInputWithoutThis, structure_generics: Option, arguments: Vec, - call_site: &source_map::BaseSpan, environment: &mut Environment, behavior: &mut E, ) -> Result> { @@ -164,12 +164,14 @@ fn call_logical( Logical::Pure((func, this_value)) => { if let Some(function_type) = types.functions.get(&func) { function_type.clone().call( - called_with_new, - this_value, - call_site_type_arguments, + CallingInput { + called_with_new, + this_value, + call_site_type_arguments, + call_site, + }, structure_generics, &arguments, - call_site.clone(), environment, behavior, types, @@ -183,11 +185,9 @@ fn call_logical( Logical::Implies { on, antecedent } => call_logical( *on, types, - called_with_new, - call_site_type_arguments, + CallingInputWithoutThis { called_with_new, call_site_type_arguments, call_site }, Some(antecedent), arguments, - call_site, environment, behavior, ), @@ -232,11 +232,8 @@ fn get_logical_callable_from_type( fn create_generic_function_call( constraint: TypeId, - called_with_new: CalledWithNew, - this_value: ThisValue, - call_site_type_arguments: Option>, + CallingInput { called_with_new, this_value, call_site_type_arguments, call_site }: CallingInput, arguments: Vec, - call_site: SpanWithSource, on: TypeId, environment: &mut Environment, behavior: &mut E, @@ -245,12 +242,14 @@ fn create_generic_function_call( // TODO don't like how it is mixed let result = call_type( constraint, - called_with_new, - this_value, - call_site_type_arguments, + CallingInput { + called_with_new, + this_value, + call_site_type_arguments, + call_site: call_site.clone(), + }, // TODO clone arguments.clone(), - call_site.clone(), environment, behavior, types, @@ -260,7 +259,10 @@ fn create_generic_function_call( // TODO work this out let is_open_poly = false; - let reflects_dependency = if !is_open_poly { + + let reflects_dependency = if is_open_poly { + None + } else { // Skip constant types if matches!(result.returned_type, TypeId::UNDEFINED_TYPE | TypeId::NULL_TYPE) || matches!( @@ -274,8 +276,7 @@ fn create_generic_function_call( on, with, timing: crate::events::CallingTiming::Synchronous, - called_with_new, - // Don't care about output. + called_with_new, // Don't care about output. reflects_dependency: None, position: call_site.clone(), }); @@ -294,8 +295,6 @@ fn create_generic_function_call( let constructor_return = types.register_type(Type::Constructor(constructor)); Some(constructor_return) - } else { - None }; // TODO nearest fact @@ -376,14 +375,13 @@ impl FunctionType { /// Calls the function /// /// Returns warnings and errors + // Move references in a wrapping struct can be hard due to lifetimes + #[allow(clippy::too_many_arguments)] pub(crate) fn call( &self, - called_with_new: CalledWithNew, - mut this_value: ThisValue, - call_site_type_arguments: Option>, + CallingInput { called_with_new, mut this_value, call_site_type_arguments, call_site }: CallingInput, parent_type_arguments: Option, arguments: &[SynthesisedArgument], - call_site: SpanWithSource, environment: &mut Environment, behavior: &mut E, types: &mut crate::TypeStore, @@ -470,12 +468,14 @@ impl FunctionType { // TODO with cloned!! let result = self .call( - called_with_new, - this_value, - call_site_type_arguments, + CallingInput { + called_with_new, + this_value, + call_site_type_arguments, + call_site: call_site.clone(), + }, parent_type_arguments, arguments, - call_site.clone(), environment, behavior, types, @@ -562,7 +562,7 @@ impl FunctionType { CalledWithNew::SpecialSuperCall { this_type } => todo!(), CalledWithNew::None => { // TODO - let value_of_this = this_value.get(environment, types, call_site.clone()); + let value_of_this = this_value.get(environment, types, &call_site); seeding_context.type_arguments.insert( free_this_id, @@ -617,13 +617,13 @@ impl FunctionType { environment, types, &mut errors, - call_site, + &call_site, ); // take the found and inject back into what it resolved let mut result_type_arguments = map_vec::Map::new(); - for (item, values) in found.into_iter() { + found.into_iter().for_each(|(item, values)| { let mut into_iter = values.into_iter(); let (mut value, argument_position, param) = into_iter.next().expect("no type argument ...?"); @@ -668,14 +668,14 @@ impl FunctionType { parameter_type, parameter_position: synthesised_parameter.position.clone(), restriction, - }) + }); } } } // TODO position is just the first result_type_arguments.insert(item, (value, argument_position)); - } + }); // for (item, restrictions) in type_restrictions.iter() { // for (restriction, pos) in restrictions { // // TODO @@ -698,16 +698,16 @@ impl FunctionType { // Evaluate effects directly into environment let mut early_return = behavior.new_function_target(self.id, |target| { - type_arguments.closure_id = if !self.closed_over_variables.is_empty() { + type_arguments.closure_id = if self.closed_over_variables.is_empty() { + None + } else { let closure_id = types.new_closure_id(); Some(closure_id) - } else { - None }; let mut return_result = None; - for event in self.effects.clone().into_iter() { + for event in self.effects.clone() { let result = apply_event(event, this_value, &mut type_arguments, environment, target, types); @@ -719,7 +719,7 @@ impl FunctionType { if let Some(closure_id) = type_arguments.closure_id { // Set closed over values - for (reference, value) in self.closed_over_variables.iter() { + self.closed_over_variables.iter().for_each(|(reference, value)| { let value = substitute(*value, &mut type_arguments, environment, types); environment .facts @@ -727,7 +727,7 @@ impl FunctionType { .insert((closure_id, reference.clone()), value); crate::utils::notify!("in {:?} set {:?} to {:?}", closure_id, reference, value); - } + }); } return_result @@ -792,7 +792,7 @@ impl FunctionType { environment: &mut Environment, types: &TypeStore, errors: &mut Vec, - call_site: source_map::BaseSpan, + call_site: &source_map::BaseSpan, ) -> SeedingContext { for (parameter_idx, parameter) in self.parameters.parameters.iter().enumerate() { // TODO temp @@ -852,7 +852,7 @@ impl FunctionType { parameter_position: parameter.position.clone(), argument_position: argument_position.clone(), restriction: None, - }) + }); } } else { // Already checked so can set. TODO destructuring etc @@ -868,7 +868,7 @@ impl FunctionType { parameter.ty, (value, SpanWithSource::NULL_SPAN, parameter_idx), false, - ) + ); } else { // TODO group errors.push(FunctionCallingError::MissingArgument { @@ -900,12 +900,13 @@ impl FunctionType { for (idx, argument) in arguments.iter().enumerate().skip(self.parameters.parameters.len()) { - let (argument_type, argument_pos) = - if let SynthesisedArgument::NonSpread { ty, position: pos } = argument { - (ty, pos) - } else { - todo!() - }; + let SynthesisedArgument::NonSpread { + ty: argument_type, + position: argument_pos, + } = argument + else { + todo!() + }; let item_type = if let Type::Constructor(Constructor::StructureGenerics( StructureGenerics { on, arguments }, @@ -947,7 +948,7 @@ impl FunctionType { argument_position: argument_pos.clone(), parameter_position: rest_parameter.position.clone(), restriction: None, - }) + }); } } else { todo!("substitute") diff --git a/checker/src/types/casts.rs b/checker/src/types/casts.rs index 8314d0a8..5aae1ea9 100644 --- a/checker/src/types/casts.rs +++ b/checker/src/types/casts.rs @@ -2,7 +2,7 @@ use super::Constant; /// TODO needs environment for to primitive /// -/// https://tc39.es/ecma262/multipage/abstract-operations.html#sec-tonumber +/// pub(crate) fn cast_as_number(cst: &Constant, strict_casts: bool) -> Result { if strict_casts && !matches!(cst, Constant::Number(_)) { return Err(()); @@ -30,9 +30,10 @@ pub(crate) fn cast_as_string(cst: &Constant, strict_casts: bool) -> Result /// -/// TODO this ridiculous clause: https://tc39.es/ecma262/multipage/additional-ecmascript-features-for-web-browsers.html#sec-IsHTMLDDA-internal-slot-to-boolean +/// TODO this ridiculous clause: +#[allow(clippy::unnecessary_wraps)] pub(crate) fn cast_as_boolean(cst: &Constant, strict_casts: bool) -> Result { if strict_casts { crate::utils::notify!("TODO assert boolean type here, maybe levels. Need to also return where can do collapsation"); diff --git a/checker/src/types/classes.rs b/checker/src/types/classes.rs index 6c443250..dbf934bc 100644 --- a/checker/src/types/classes.rs +++ b/checker/src/types/classes.rs @@ -39,7 +39,7 @@ pub struct SynthesisedClassValue { /// TODO i really hate this setup. Can it be simpler & faster? /// -/// What about storing it as just set_events...? +/// What about storing it as just `set_events`...? pub struct RegisterClassPropertiesEvent { pub properties: Vec, pub class_prototype: TypeId, @@ -58,7 +58,7 @@ fn register_properties_into_store Result { match self { SynthesisedArgument::NonSpread { ty, position: _ } => Ok(*ty), diff --git a/checker/src/types/mod.rs b/checker/src/types/mod.rs index 67a9088e..e2aaf22a 100644 --- a/checker/src/types/mod.rs +++ b/checker/src/types/mod.rs @@ -46,7 +46,7 @@ pub struct TypeId(pub(crate) u16); // TODO ids as macro as to not do in [crate::RootEnvironment] impl TypeId { - /// Not to be confused with [TypeId::NEVER_TYPE] + /// Not to be confused with [`TypeId::NEVER_TYPE`] pub const ERROR_TYPE: Self = Self(0); pub const UNIMPLEMENTED_ERROR_TYPE: TypeId = TypeId::ERROR_TYPE; @@ -81,7 +81,7 @@ impl TypeId { /// TODO remove. Shortcut for inferred this pub const ANY_INFERRED_FREE_THIS: Self = Self(19); - /// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/new.target + /// pub const NEW_TARGET_ARG: Self = Self(20); pub const SYMBOL_TO_PRIMITIVE: Self = Self(21); @@ -149,6 +149,7 @@ pub enum PolyNature { } // TODO +#[must_use] pub fn is_primitive(ty: TypeId, types: &TypeStore) -> bool { if matches!(ty, TypeId::BOOLEAN_TYPE | TypeId::NUMBER_TYPE | TypeId::STRING_TYPE) { return true; @@ -175,6 +176,7 @@ impl Type { /// TODO return is poly pub(crate) fn is_dependent(&self) -> bool { + #[allow(clippy::match_same_arms)] match self { // TODO Type::Constructor(Constructor::StructureGenerics(..)) => false, @@ -279,6 +281,7 @@ pub(crate) fn new_logical_or_type(lhs: TypeId, rhs: TypeId, types: &mut TypeStor types.new_conditional_type(lhs, lhs, rhs) } +#[must_use] pub fn is_type_truthy_falsy(id: TypeId, types: &TypeStore) -> Decidable { // These first two branches are just shortcuts. if id == TypeId::TRUE || id == TypeId::FALSE { @@ -309,7 +312,7 @@ pub fn is_type_truthy_falsy(id: TypeId, types: &TypeStore) -> Decidable { } } -/// TODO add_property_restrictions via const generics +/// TODO `add_property_restrictions` via const generics pub struct BasicEquality { pub add_property_restrictions: bool, pub position: SpanWithSource, diff --git a/checker/src/types/others.rs b/checker/src/types/others.rs index 6361193c..716e0f70 100644 --- a/checker/src/types/others.rs +++ b/checker/src/types/others.rs @@ -16,7 +16,7 @@ pub(crate) fn create_object_for_type( let mut obj = ObjectBuilder::new(None, types, &mut environment.facts); // env.facts.new_object(None, types, false); match types.get_type_by_id(ty) { Type::AliasTo { to, name, parameters } => todo!(), - ty @ Type::And(left, right) | ty @ Type::Or(left, right) => { + ty @ (Type::And(left, right) | Type::Or(left, right)) => { let kind = if matches!(ty, Type::And(..)) { "and" } else { "or" }; let (left, right) = (*left, *right); diff --git a/checker/src/types/poly_types/generics/generic_type_arguments.rs b/checker/src/types/poly_types/generics/generic_type_arguments.rs index 45a9997e..1fd485c2 100644 --- a/checker/src/types/poly_types/generics/generic_type_arguments.rs +++ b/checker/src/types/poly_types/generics/generic_type_arguments.rs @@ -101,7 +101,7 @@ impl ClosureChain for FunctionTypeArguments { } } if let Some(ref parent) = self.structure_arguments { - for closure_id in parent.closures.iter() { + for closure_id in &parent.closures { let res = cb(*closure_id); if res.is_some() { return res; @@ -135,7 +135,7 @@ impl TypeArgumentStore for FunctionTypeArguments { StructureGenericArguments { type_arguments: merged, - closures: parent.closures.iter().cloned().chain(self.closure_id).collect(), + closures: parent.closures.iter().copied().chain(self.closure_id).collect(), } } None => StructureGenericArguments { diff --git a/checker/src/types/poly_types/generics/generic_type_parameters.rs b/checker/src/types/poly_types/generics/generic_type_parameters.rs index fc525261..feae4d4a 100644 --- a/checker/src/types/poly_types/generics/generic_type_parameters.rs +++ b/checker/src/types/poly_types/generics/generic_type_parameters.rs @@ -10,6 +10,7 @@ use super::generic_type_arguments::TypeArgumentStore; pub struct GenericTypeParameters(pub Vec); impl GenericTypeParameters { + #[must_use] pub fn as_option(&self) -> Option<()> { todo!() // let borrow = self.0.borrow(); diff --git a/checker/src/types/poly_types/substitution.rs b/checker/src/types/poly_types/substitution.rs index e5141980..9a4c36ca 100644 --- a/checker/src/types/poly_types/substitution.rs +++ b/checker/src/types/poly_types/substitution.rs @@ -25,7 +25,6 @@ pub(crate) fn substitute( let ty = types.get_type_by_id(id); match ty { - Type::Constant(_) => id, Type::Object(..) => { // TODO only sometimes curry_arguments(arguments, types, id) @@ -42,7 +41,11 @@ pub(crate) fn substitute( curry_arguments(arguments, types, id) } Type::FunctionReference(f, t) => curry_arguments(arguments, types, id), - Type::AliasTo { .. } | Type::And(_, _) | Type::Or(_, _) | Type::NamedRooted { .. } => id, + Type::Constant(_) + | Type::AliasTo { .. } + | Type::And(_, _) + | Type::Or(_, _) + | Type::NamedRooted { .. } => id, Type::RootPolyType(nature) => { if let PolyNature::Open(_) = nature { id @@ -207,7 +210,7 @@ pub(crate) fn substitute( let lhs = substitute(lhs, arguments, environment, types); let rhs = substitute(rhs, arguments, environment, types); - evaluate_equality_inequality_operation(lhs, operator, rhs, types, false) + evaluate_equality_inequality_operation(lhs, &operator, rhs, types, false) .expect("restriction about binary operator failed") } Constructor::TypeOperator(..) => todo!(), @@ -258,7 +261,9 @@ pub(crate) fn curry_arguments( types: &mut TypeStore, id: TypeId, ) -> TypeId { - if !arguments.is_empty() { + if arguments.is_empty() { + id + } else { crate::utils::notify!("Storing arguments onto object"); // TODO only carry arguments that are used let arguments = arguments.to_structural_generic_arguments(); @@ -266,7 +271,5 @@ pub(crate) fn curry_arguments( types.register_type(Type::Constructor(Constructor::StructureGenerics( crate::types::StructureGenerics { on: id, arguments }, ))) - } else { - id } } diff --git a/checker/src/types/printing.rs b/checker/src/types/printing.rs index b0c105ae..14b7862a 100644 --- a/checker/src/types/printing.rs +++ b/checker/src/types/printing.rs @@ -9,6 +9,8 @@ use crate::{ }; /// TODO temp, needs recursion safe, reuse buffer + +#[must_use] pub fn print_type(id: TypeId, types: &TypeStore, ctx: &GeneralContext, debug: bool) -> String { let mut buf = String::new(); print_type_into_buf(id, &mut buf, &mut HashSet::new(), types, ctx, debug); @@ -78,7 +80,7 @@ fn print_type_into_buf( } print_type_into_buf(*to, buf, cycles, types, ctx, debug); } - nature => { + PolyNature::RecursiveFunction(_, _) => { todo!() // let modified_base = match env { // GeneralContext::Syntax(syn) => { @@ -97,7 +99,7 @@ fn print_type_into_buf( result_union, } => { if debug { - write!(buf, "[? {:? }", id).unwrap(); + write!(buf, "[? {id:? }").unwrap(); print_type_into_buf(*condition, buf, cycles, types, ctx, debug); buf.push(']'); } @@ -126,7 +128,7 @@ fn print_type_into_buf( for (not_at_end, (arg, _)) in arguments.type_arguments.values().nendiate() { print_type_into_buf(*arg, buf, cycles, types, ctx, debug); if not_at_end { - buf.push_str(", ") + buf.push_str(", "); } } buf.push('>'); @@ -170,7 +172,7 @@ fn print_type_into_buf( }, constructor => { let base = get_on_ctx!(ctx.get_poly_base(id, types)).unwrap(); - print_type_into_buf(base, buf, cycles, types, ctx, debug) + print_type_into_buf(base, buf, cycles, types, ctx, debug); } }, Type::NamedRooted { name, parameters, nominal } => { @@ -191,7 +193,7 @@ fn print_type_into_buf( // } // buf.push_str(" }") } else { - buf.push_str(name) + buf.push_str(name); } if let (true, Some(parameters)) = (debug, parameters) { buf.push('<'); @@ -199,7 +201,7 @@ fn print_type_into_buf( print_type_into_buf(*param, buf, cycles, types, ctx, debug); buf.push_str(", "); } - buf.push('>') + buf.push('>'); } } Type::Constant(cst) => { @@ -237,7 +239,7 @@ fn print_type_into_buf( todo!() } if not_at_end { - buf.push_str(", ") + buf.push_str(", "); } } buf.push('>'); @@ -248,7 +250,7 @@ fn print_type_into_buf( buf.push_str(": "); print_type_into_buf(param.ty, buf, cycles, types, ctx, debug); if not_at_end { - buf.push_str(", ") + buf.push_str(", "); } } buf.push_str(") => "); @@ -316,6 +318,7 @@ fn print_type_into_buf( cycles.remove(&id); } +#[must_use] pub fn print_property_key( key: &PropertyKey, types: &TypeStore, diff --git a/checker/src/types/properties.rs b/checker/src/types/properties.rs index 01a25dd4..736d5513 100644 --- a/checker/src/types/properties.rs +++ b/checker/src/types/properties.rs @@ -5,7 +5,9 @@ use crate::{ context::{facts::Publicity, CallCheckingBehavior, Logical, SetPropertyError}, events::Event, subtyping::{type_is_subtype, SubTypeResult}, - types::{printing::print_type, substitute, FunctionType, StructureGenerics}, + types::{ + calling::CallingInput, printing::print_type, substitute, FunctionType, StructureGenerics, + }, Constant, Environment, TypeId, }; @@ -31,6 +33,7 @@ pub enum PropertyKey<'a> { } impl<'a> PropertyKey<'a> { + #[must_use] pub fn into_owned(&self) -> PropertyKey<'static> { match self { PropertyKey::String(s) => PropertyKey::String(Cow::Owned(s.to_string())), @@ -73,9 +76,10 @@ static NUMBERS: &str = "0123456789"; impl<'a> PropertyKey<'a> { /// For array indexes + #[must_use] pub fn from_usize(a: usize) -> Self { if a < 10 { - Self::String(Cow::Borrowed(&NUMBERS[a..(a + 1)])) + Self::String(Cow::Borrowed(&NUMBERS[a..=a])) } else { Self::String(Cow::Owned(a.to_string())) } @@ -95,6 +99,7 @@ pub enum PropertyValue { impl PropertyValue { /// TODO wip + #[must_use] pub fn as_get_type(&self) -> TypeId { match self { PropertyValue::Value(value) => *value, @@ -105,6 +110,7 @@ impl PropertyValue { } } + #[must_use] pub fn as_set_type(&self) -> TypeId { match self { PropertyValue::Value(value) => *value, @@ -120,6 +126,8 @@ impl PropertyValue { /// /// *be aware this creates a new type every time, bc of this binding. could cache this bound /// types at some point* +// https://github.com/kaleidawave/ezno/pull/88 +#[allow(clippy::too_many_arguments)] pub(crate) fn get_property( on: TypeId, publicity: Publicity, @@ -130,17 +138,17 @@ pub(crate) fn get_property( types: &mut TypeStore, position: SpanWithSource, ) -> Option<(PropertyKind, TypeId)> { - // || under == TypeId::ERROR_TYPE - if on == TypeId::ERROR_TYPE { - return Some((PropertyKind::Direct, TypeId::ERROR_TYPE)); - } - enum GetResult { AccessIntroducesDependence(TypeId), /// These always return the same value FromAObject(TypeId), } + // || under == TypeId::ERROR_TYPE + if on == TypeId::ERROR_TYPE { + return Some((PropertyKind::Direct, TypeId::ERROR_TYPE)); + } + let value: GetResult = if let Some(constraint) = environment.get_poly_base(on, types) { GetResult::AccessIntroducesDependence(evaluate_get_on_poly( constraint, @@ -195,10 +203,6 @@ fn get_from_an_object( behavior: &mut E, types: &mut TypeStore, ) -> Option<(PropertyKind, TypeId)> { - let result = environment.get_property_unbound(on, publicity, under, types)?; - - return resolve_property_on_logical(result, types, on, environment, behavior); - /// Generates closure arguments, values of this and more. Runs getters fn resolve_property_on_logical( logical: Logical, @@ -271,13 +275,15 @@ fn get_from_an_object( PropertyValue::Getter(getter) => { let state = ThisValue::Passed(on); let call = getter.call( - CalledWithNew::None, - state, - None, + CallingInput { + called_with_new: CalledWithNew::None, + this_value: state, + call_site_type_arguments: None, + call_site: SpanWithSource::NULL_SPAN, + }, // TODO None, &[], - SpanWithSource::NULL_SPAN, environment, behavior, types, @@ -303,8 +309,14 @@ fn get_from_an_object( } } } + + let result = environment.get_property_unbound(on, publicity, under, types)?; + + resolve_property_on_logical(result, types, on, environment, behavior) } +// https://github.com/kaleidawave/ezno/pull/88 +#[allow(clippy::too_many_arguments)] fn evaluate_get_on_poly( constraint: TypeId, on: TypeId, @@ -323,8 +335,6 @@ fn evaluate_get_on_poly( // is_open_poly // ); - let fact = environment.get_property_unbound(constraint, publicity, under.clone(), types)?; - fn get_property_from_logical( fact: Logical, types: &mut TypeStore, @@ -403,17 +413,22 @@ fn evaluate_get_on_poly( } } + let fact = environment.get_property_unbound(constraint, publicity, under.clone(), types)?; + get_property_from_logical(fact, types, on, under) } /// Aka a assignment to a property, **INCLUDING initialization of a new one** /// /// Evaluates setters + +// https://github.com/kaleidawave/ezno/pull/88 +#[allow(clippy::too_many_arguments)] pub(crate) fn set_property( on: TypeId, publicity: Publicity, - under: PropertyKey, - new: PropertyValue, + under: &PropertyKey, + new: &PropertyValue, environment: &mut Environment, behavior: &mut E, types: &TypeStore, diff --git a/checker/src/types/store.rs b/checker/src/types/store.rs index b55aee37..f06f7e44 100644 --- a/checker/src/types/store.rs +++ b/checker/src/types/store.rs @@ -86,7 +86,7 @@ impl Default for TypeStore { assert_eq!(types.len(), TypeId::INTERNAL_TYPE_COUNT); Self { - types: types.to_vec(), + types: types.clone(), functions: HashMap::new(), dependent_dependencies: Default::default(), specialisations: Default::default(), @@ -125,6 +125,7 @@ impl TypeStore { id } + #[must_use] pub fn get_type_by_id(&self, id: TypeId) -> &Type { &self.types[id.0 as usize] } @@ -141,6 +142,7 @@ impl TypeStore { } /// TODO temp + #[must_use] pub fn into_vec_temp(self) -> Vec<(TypeId, Type)> { self.types.into_iter().enumerate().map(|(idx, ty)| (TypeId(idx as u16), ty)).collect() } @@ -151,7 +153,7 @@ impl TypeStore { type_parameters: Option, parameters: crate::types::functions::SynthesisedParameters, return_type: TypeId, - declared_at: source_map::SpanWithSource, + declared_at: &source_map::SpanWithSource, effects: Vec, constant_function: Option, ) -> TypeId { @@ -230,14 +232,14 @@ impl TypeStore { &self, ctx: &'a Context, on: TypeId, - resolver: &impl Fn(GeneralContext<'a>, &TypeStore, TypeId, TData) -> Option, + resolver: &impl Fn(&GeneralContext<'a>, &TypeStore, TypeId, TData) -> Option, data: TData, ) -> Option> { match self.get_type_by_id(on) { Type::Function(..) => { let on_function = ctx .parents_iter() - .find_map(|env| resolver(env, self, on, data)) + .find_map(|env| resolver(&env, self, on, data)) .map(Logical::Pure); // TODO undecided on this @@ -248,7 +250,7 @@ impl TypeStore { Type::AliasTo { to, .. } => { let property_on_self = ctx .parents_iter() - .find_map(|env| resolver(env, self, on, data)) + .find_map(|env| resolver(&env, self, on, data)) .map(Logical::Pure); property_on_self.or_else(|| self.get_fact_about_type(ctx, *to, resolver, data)) @@ -294,7 +296,7 @@ impl TypeStore { } Type::Object(..) | Type::NamedRooted { .. } => ctx .parents_iter() - .find_map(|env| resolver(env, self, on, data)) + .find_map(|env| resolver(&env, self, on, data)) .map(Logical::Pure) .or_else(|| { if let Some(prototype) = ctx @@ -308,7 +310,7 @@ impl TypeStore { }), Type::Constant(cst) => ctx .parents_iter() - .find_map(|env| resolver(env, self, on, data)) + .find_map(|env| resolver(&env, self, on, data)) .map(Logical::Pure) .or_else(|| { self.get_fact_about_type(ctx, cst.get_backing_type_id(), resolver, data) @@ -323,6 +325,7 @@ impl TypeStore { ClosureId(self.closure_counter) } + #[must_use] pub fn get_function_from_id(&self, id: FunctionId) -> &FunctionType { self.functions.get(&id).unwrap() } diff --git a/checker/src/types/subtyping.rs b/checker/src/types/subtyping.rs index f75c6a14..7e41566e 100644 --- a/checker/src/types/subtyping.rs +++ b/checker/src/types/subtyping.rs @@ -23,7 +23,7 @@ type TypeArguments = map_vec::Map; /// `base_type :>= ty` (`ty <=: base_type`) /// -/// TODO TypeArguments as a chain? +/// TODO `TypeArguments` as a chain? pub fn type_is_subtype( base_type: TypeId, ty: TypeId, @@ -51,7 +51,7 @@ fn set_object_restriction(environment: &mut Environment, object: TypeId, restric } } -/// TODO integrate set_restriction, but it can't create a type ? maybe object restriction should be logically. +/// TODO integrate `set_restriction`, but it can't create a type ? maybe object restriction should be logically. /// maybe sub function pub fn type_is_subtype_of_property( property: Logical, @@ -104,6 +104,7 @@ pub fn type_is_subtype_of_property( } } +#[allow(clippy::too_many_arguments)] fn type_is_subtype2( base_type: TypeId, ty: TypeId, @@ -365,75 +366,71 @@ fn type_is_subtype2( arguments, })) => { // Overwrites for special types with proofs - match *on { - // TODO is nominal - TypeId::ARRAY_TYPE => { - let backing_type = - arguments.get_argument(TypeId::T_TYPE).expect("array T argument not set ?"); - - // TODO temp fix for general parameters - if let Type::Object(_) = right_ty { - for (publicity, property, value) in environment.get_properties_on_type(ty) { - // Assume every property on itself is either number or 'length' - match property { - PropertyKey::String(a) if a == "length" => { - continue; - } - PropertyKey::String(a) => { - crate::utils::notify!("looking at prototype {}", a); - } - _ => (), + if let TypeId::ARRAY_TYPE = *on { + let backing_type = + arguments.get_argument(TypeId::T_TYPE).expect("array T argument not set ?"); + + // TODO temp fix for general parameters + if let Type::Object(_) = right_ty { + for (publicity, property, value) in environment.get_properties_on_type(ty) { + // Assume every property on itself is either number or 'length' + match property { + PropertyKey::String(a) if a == "length" => { + continue; } - let result = type_is_subtype2( - backing_type, - value, - Some(&arguments.type_arguments), - right_type_arguments, - behavior, - environment, - types, - restriction_mode, - ); - // TODO collect - if !matches!(result, SubTypeResult::IsSubType) { - return result; + PropertyKey::String(a) => { + crate::utils::notify!("looking at prototype {}", a); } - - // TODO cheaper subtype checker - // if let SubTypeResult::IsSubType = type_is_subtype2( - // *base_property, - // property, - // Some(&arguments.type_arguments), - // right_type_arguments, - // behavior, - // environment, - // types, - // false, - // ) { - // } + PropertyKey::Type(_) => (), + } + let result = type_is_subtype2( + backing_type, + value, + Some(&arguments.type_arguments), + right_type_arguments, + behavior, + environment, + types, + restriction_mode, + ); + // TODO collect + if !matches!(result, SubTypeResult::IsSubType) { + return result; } - SubTypeResult::IsSubType - } else { - crate::utils::notify!("Else here :?"); - todo!("get right type structure generics match parameters"); + // TODO cheaper subtype checker + // if let SubTypeResult::IsSubType = type_is_subtype2( + // *base_property, + // property, + // Some(&arguments.type_arguments), + // right_type_arguments, + // behavior, + // environment, + // types, + // false, + // ) { + // } } + + SubTypeResult::IsSubType + } else { + crate::utils::notify!("Else here :?"); + todo!("get right type structure generics match parameters"); } - _ => { - if base_type_arguments.is_some() { - todo!("need chain to do nesting") - } - type_is_subtype2( - *on, - ty, - Some(&arguments.type_arguments), - right_type_arguments, - behavior, - environment, - types, - restriction_mode, - ) + } else { + if base_type_arguments.is_some() { + todo!("need chain to do nesting") } + type_is_subtype2( + *on, + ty, + Some(&arguments.type_arguments), + right_type_arguments, + behavior, + environment, + types, + restriction_mode, + ) } } Type::Constructor(cst) => match cst { @@ -603,6 +600,8 @@ fn type_is_subtype2( } /// TODO temp + +#[allow(clippy::too_many_arguments)] fn check_properties( base_type: TypeId, ty: TypeId, @@ -644,7 +643,7 @@ fn check_properties( match result { SubTypeResult::IsSubType => { if behavior.add_property_restrictions() { - set_object_restriction(environment, rhs_type, property) + set_object_restriction(environment, rhs_type, property); } } SubTypeResult::IsNotSubType(mismatch) => { @@ -669,12 +668,12 @@ fn check_properties( } } } - if !property_errors.is_empty() { + if property_errors.is_empty() { + SubTypeResult::IsSubType + } else { SubTypeResult::IsNotSubType(NonEqualityReason::PropertiesInvalid { errors: property_errors, }) - } else { - SubTypeResult::IsSubType } } @@ -690,7 +689,7 @@ impl NonEqualityReason { | NonEqualityReason::MissingParameter | NonEqualityReason::Mismatch => Vec::new(), NonEqualityReason::PropertiesInvalid { errors } => { - errors.into_iter().map(|error| format!("{:?}", error)).collect() + errors.into_iter().map(|error| format!("{error:?}")).collect() } NonEqualityReason::TooStrict => todo!(), } diff --git a/checker/src/types/terms.rs b/checker/src/types/terms.rs index 4e781667..33485a0a 100644 --- a/checker/src/types/terms.rs +++ b/checker/src/types/terms.rs @@ -2,9 +2,9 @@ use super::TypeId; /// Terms /// TODO: -/// - IntoProof -/// - BigInt (https://github.com/rust-num/num-bigint) -/// - Separate NotNull term, and implement js subtyping +/// - `IntoProof` +/// - `BigInt` () +/// - Separate `NotNull` term, and implement js subtyping /// /// TODO not sure about some of these #[derive(Eq, PartialEq, Hash, Debug, Clone, binary_serialize_derive::BinarySerializable)] @@ -26,6 +26,7 @@ pub enum Constant { impl Constant { /// **AS OF THE JS IMPLEMENTATION** + #[must_use] pub fn as_js_string(&self) -> String { match self { Constant::Number(value) => value.to_string(), @@ -39,7 +40,7 @@ impl Constant { } } - /// Like [Constant::as_js_string] but adds quotes to strings + /// Like [`Constant::as_js_string`] but adds quotes to strings /// /// TODO take buffer pub(crate) fn as_type_name(&self) -> String { @@ -56,6 +57,7 @@ impl Constant { } } + #[must_use] pub fn get_backing_type_id(&self) -> TypeId { match self { Constant::Number(_) | Constant::NaN => TypeId::NUMBER_TYPE, diff --git a/checker/src/utils.rs b/checker/src/utils.rs index 0a412b80..377c651c 100644 --- a/checker/src/utils.rs +++ b/checker/src/utils.rs @@ -2,13 +2,13 @@ pub(crate) fn format_list(mut iterator: impl ExactSizeIterator "".into(), + 0 => String::new(), 1 => iterator.next().unwrap().to_string(), 2 => format!("{} and {}", iterator.next().unwrap(), iterator.next().unwrap()), val => { let mut buf = String::new(); for value in iterator.by_ref().take(val - 1) { - write!(&mut buf, "{}, ", value).unwrap(); + write!(&mut buf, "{value}, ").unwrap(); } write!(&mut buf, "and {}", iterator.next().unwrap()).unwrap(); buf diff --git a/parser/Cargo.toml b/parser/Cargo.toml index caafdb59..e2a3259e 100644 --- a/parser/Cargo.toml +++ b/parser/Cargo.toml @@ -9,8 +9,12 @@ repository = "https://github.com/kaleidawave/ezno" homepage = "https://kaleidawave.github.io/posts/introducing-ezno/" categories = ["parser-implementations"] + # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html +[lints] +workspace = true + [features] # TODO these shouldn't be default default = [ diff --git a/parser/examples/chain.rs b/parser/examples/chain.rs index ede4df16..946e427d 100644 --- a/parser/examples/chain.rs +++ b/parser/examples/chain.rs @@ -12,7 +12,7 @@ struct ShowChain; impl Visitor for ShowChain { fn visit(&mut self, item: &Expression, _data: &mut (), chain: &Chain) { if matches!(item, Expression::VariableReference(name, _) if name == "chain") { - eprintln!("{:#?}", chain); + eprintln!("{chain:#?}"); } } } diff --git a/parser/examples/expressions.rs b/parser/examples/expressions.rs index 89e0a761..3505d9a6 100644 --- a/parser/examples/expressions.rs +++ b/parser/examples/expressions.rs @@ -14,6 +14,6 @@ fn main() { SourceId::NULL, None, ); - println!("{:#?}", expression); + println!("{expression:#?}"); } } diff --git a/parser/examples/lex.rs b/parser/examples/lex.rs index 2b71de66..b9a3d0a5 100644 --- a/parser/examples/lex.rs +++ b/parser/examples/lex.rs @@ -33,9 +33,9 @@ fn lex_and_print_tokens(script: String, cursors: Option {} + Ok(()) => {} Err((lexer_err, _)) => { - eprintln!("lexer error: {:?}", lexer_err); + eprintln!("lexer error: {lexer_err:?}"); } } } diff --git a/parser/examples/self_tokenization.rs b/parser/examples/self_tokenization.rs index a2008118..3971870e 100644 --- a/parser/examples/self_tokenization.rs +++ b/parser/examples/self_tokenization.rs @@ -9,7 +9,7 @@ fn main() { let tokens = SelfRustTokenize::to_tokens(&expression); - println!("{}", tokens); + println!("{tokens}"); } #[cfg(not(feature = "self-rust-tokenize"))] diff --git a/parser/examples/statement.rs b/parser/examples/statement.rs index b8025ab9..706a9be2 100644 --- a/parser/examples/statement.rs +++ b/parser/examples/statement.rs @@ -3,5 +3,5 @@ use ezno_parser::{ASTNode, SourceId, Statement}; fn main() { let statement = Statement::from_string("const x = 4".to_owned(), Default::default(), SourceId::NULL, None); - println!("{:#?}", statement); + println!("{statement:#?}"); } diff --git a/parser/examples/type_references.rs b/parser/examples/type_references.rs index 9ffa8afe..2aa913eb 100644 --- a/parser/examples/type_references.rs +++ b/parser/examples/type_references.rs @@ -8,7 +8,7 @@ fn main() { None, ); - println!("{:#?}", reference); + println!("{reference:#?}"); let expression = Expression::from_string( "(x << 3, x >> 2, y>(2), x < 7, x< 7)".into(), @@ -17,5 +17,5 @@ fn main() { None, ); - println!("{:#?}", expression); + println!("{expression:#?}"); } diff --git a/parser/generator/generator.rs b/parser/generator/generator.rs index ac14cb68..806aa7dd 100644 --- a/parser/generator/generator.rs +++ b/parser/generator/generator.rs @@ -43,8 +43,14 @@ fn token_stream_to_ast_node(line_starts, options, string, source, None, cursors); + let parse_result = ezno_parser::lex_and_parse_script::( + line_starts, + options, + &string, + source, + None, + cursors, + ); let node = match parse_result { Ok(node) => node, diff --git a/parser/src/block.rs b/parser/src/block.rs index 95973ee4..978f16ef 100644 --- a/parser/src/block.rs +++ b/parser/src/block.rs @@ -71,10 +71,10 @@ impl ASTNode for StatementOrDeclaration { ) { match self { StatementOrDeclaration::Statement(item) => { - item.to_string_from_buffer(buf, options, depth) + item.to_string_from_buffer(buf, options, depth); } StatementOrDeclaration::Declaration(item) => { - item.to_string_from_buffer(buf, options, depth) + item.to_string_from_buffer(buf, options, depth); } } } @@ -256,7 +256,7 @@ impl ASTNode for BlockOrSingleStatement { ) { match self { BlockOrSingleStatement::Braced(block) => { - block.to_string_from_buffer(buf, options, depth) + block.to_string_from_buffer(buf, options, depth); } BlockOrSingleStatement::SingleStatement(stmt) => { if options.pretty { diff --git a/parser/src/comments.rs b/parser/src/comments.rs index cd533c27..e9315edb 100644 --- a/parser/src/comments.rs +++ b/parser/src/comments.rs @@ -27,7 +27,7 @@ where | WithComment::PrefixComment(_, item, _) | WithComment::PostfixComment(item, _, _) => { let inner = self_rust_tokenize::SelfRustTokenize::to_tokens(item); - token_stream.extend(self_rust_tokenize::quote!(WithComment::None(#inner))) + token_stream.extend(self_rust_tokenize::quote!(WithComment::None(#inner))); } } } @@ -52,7 +52,7 @@ impl Visitable for WithComment { options: &crate::VisitSettings, chain: &mut temporary_annex::Annex, ) { - self.get_ast_ref().visit(visitors, data, options, chain) + self.get_ast_ref().visit(visitors, data, options, chain); } fn visit_mut( @@ -62,7 +62,7 @@ impl Visitable for WithComment { options: &crate::VisitSettings, chain: &mut temporary_annex::Annex, ) { - self.get_ast_mut().visit_mut(visitors, data, options, chain) + self.get_ast_mut().visit_mut(visitors, data, options, chain); } } diff --git a/parser/src/cursor.rs b/parser/src/cursor.rs index 35d6585d..5f39fb11 100644 --- a/parser/src/cursor.rs +++ b/parser/src/cursor.rs @@ -3,6 +3,7 @@ use std::marker::PhantomData; pub type EmptyCursorId = CursorId<()>; impl EmptyCursorId { + #[must_use] pub fn new(id: u8) -> Self { Self(id, PhantomData) } @@ -35,6 +36,6 @@ impl self_rust_tokenize::SelfRustTokenize for CursorId { ) { use self_rust_tokenize::proc_macro2::{Ident, Span}; let token = Ident::new(&format!("_cursor_{}", self.0), Span::call_site()); - token_stream.extend(self_rust_tokenize::quote!(ezno_parser::IntoAST::into_ast(#token))) + token_stream.extend(self_rust_tokenize::quote!(ezno_parser::IntoAST::into_ast(#token))); } } diff --git a/parser/src/declarations/classes/class_member.rs b/parser/src/declarations/classes/class_member.rs index f03cf07f..67693ae0 100644 --- a/parser/src/declarations/classes/class_member.rs +++ b/parser/src/declarations/classes/class_member.rs @@ -10,7 +10,7 @@ use crate::{ ParseOptions, ParseResult, PropertyKey, TSXKeyword, TSXToken, TypeAnnotation, WithComment, }; -/// The variable id's of these is handled by their [PropertyKey] +/// The variable id's of these is handled by their [`PropertyKey`] #[derive(Debug, Clone, PartialEq, Eq)] #[cfg_attr(feature = "self-rust-tokenize", derive(self_rust_tokenize::SelfRustTokenize))] #[cfg_attr(feature = "serde-serialize", derive(serde::Serialize))] @@ -52,6 +52,7 @@ impl ASTNode for ClassMember { } } + #[allow(clippy::similar_names)] fn from_reader( reader: &mut impl TokenReader, state: &mut crate::ParsingState, @@ -82,6 +83,7 @@ impl ASTNode for ClassMember { .map(|token| Keyword::new(token.get_span())); let header = MethodHeader::from_reader(reader); + let key = WithComment::>::from_reader(reader, state, options)?; match reader.peek() { @@ -156,10 +158,10 @@ impl ASTNode for ClassMember { if is_static.is_some() { buf.push_str("static "); } - function.to_string_from_buffer(buf, options, depth + 1) + function.to_string_from_buffer(buf, options, depth + 1); } Self::Constructor(constructor) => { - constructor.to_string_from_buffer(buf, options, depth + 1) + constructor.to_string_from_buffer(buf, options, depth + 1); } Self::StaticBlock(block) => { buf.push_str("static "); @@ -169,7 +171,7 @@ impl ASTNode for ClassMember { if options.include_comments { buf.push_str("/*"); buf.push_str(c); - buf.push_str("*/") + buf.push_str("*/"); } } } @@ -199,6 +201,7 @@ impl FunctionBased for ClassFunctionBase { type Header = MethodHeader; type Name = WithComment>; + #[allow(clippy::similar_names)] fn header_and_name_from_reader( reader: &mut impl TokenReader, state: &mut crate::ParsingState, @@ -249,7 +252,7 @@ impl FunctionBased for ClassConstructorBase { _options: &crate::ToStringOptions, _depth: u8, ) { - buf.push_str("constructor") + buf.push_str("constructor"); } fn header_left(header: &Self::Header) -> Option { diff --git a/parser/src/declarations/classes/mod.rs b/parser/src/declarations/classes/mod.rs index 832d3e49..dfa5534c 100644 --- a/parser/src/declarations/classes/mod.rs +++ b/parser/src/declarations/classes/mod.rs @@ -45,7 +45,7 @@ impl &Span { @@ -116,10 +116,10 @@ impl ClassDeclaration { Ok(ClassDeclaration { class_keyword, name, + type_parameters, extends, implements, members, - type_parameters, position, }) } diff --git a/parser/src/declarations/export.rs b/parser/src/declarations/export.rs index f19afe54..3ae9610d 100644 --- a/parser/src/declarations/export.rs +++ b/parser/src/declarations/export.rs @@ -99,7 +99,7 @@ impl ASTNode for ExportDeclaration { let position = start.union(class_declaration.get_position()); Ok(Self::Variable { exported: Exportable::Class(class_declaration), position }) } - Token(TSXToken::Keyword(TSXKeyword::Const) | TSXToken::Keyword(TSXKeyword::Let), _) => { + Token(TSXToken::Keyword(TSXKeyword::Const | TSXKeyword::Let), _) => { let variable_declaration = VariableDeclaration::from_reader(reader, state, options)?; let position = start.union(variable_declaration.get_position()); @@ -118,36 +118,34 @@ impl ASTNode for ExportDeclaration { }) } Token(TSXToken::Keyword(TSXKeyword::Type), _) => { - match reader.peek_n(1).ok_or_else(parse_lexing_error)? { - Token(TSXToken::OpenBrace, _) => { - let type_keyword = reader.next().map(|tok| Keyword::new(tok.get_span())); + if let Token(TSXToken::OpenBrace, _) = + reader.peek_n(1).ok_or_else(parse_lexing_error)? + { + let type_keyword = reader.next().map(|tok| Keyword::new(tok.get_span())); - let Token(_, start) = reader.next().unwrap(); // OpenBrace + let Token(_, start) = reader.next().unwrap(); // OpenBrace - let (parts, _end) = crate::parse_bracketed::( - reader, - state, - options, - None, - TSXToken::CloseBrace, - )?; + let (parts, _end) = crate::parse_bracketed::( + reader, + state, + options, + None, + TSXToken::CloseBrace, + )?; - reader.expect_next(TSXToken::Keyword(TSXKeyword::From))?; + reader.expect_next(TSXToken::Keyword(TSXKeyword::From))?; - let (from, end) = ImportLocation::from_token( - reader.next().ok_or_else(parse_lexing_error)?, - )?; + let (from, end) = + ImportLocation::from_token(reader.next().ok_or_else(parse_lexing_error)?)?; - Ok(Self::Variable { - exported: Exportable::ImportParts { parts, from, type_keyword }, - position: start.union(end), - }) - } - _ => { - let type_alias = TypeAlias::from_reader(reader, state, options)?; - let position = start.union(type_alias.get_position()); - Ok(Self::Variable { exported: Exportable::TypeAlias(type_alias), position }) - } + Ok(Self::Variable { + exported: Exportable::ImportParts { parts, from, type_keyword }, + position: start.union(end), + }) + } else { + let type_alias = TypeAlias::from_reader(reader, state, options)?; + let position = start.union(type_alias.get_position()); + Ok(Self::Variable { exported: Exportable::TypeAlias(type_alias), position }) } } Token(TSXToken::OpenBrace, _) => { @@ -307,7 +305,7 @@ impl ASTNode for ExportDeclaration { /// /// -/// Similar to [ImportPart] but reversed +/// Similar to [`ImportPart`] but reversed #[derive(Debug, Clone, PartialEq, Eq, Visitable, GetFieldByType)] #[cfg_attr(feature = "self-rust-tokenize", derive(self_rust_tokenize::SelfRustTokenize))] #[cfg_attr(feature = "serde-serialize", derive(serde::Serialize))] @@ -358,7 +356,7 @@ impl ASTNode for ExportPart { unreachable!() }; let pos = value.get_position().union(start.get_end_after(c.len() + 2)); - value = Self::PostfixComment(Box::new(value), c, pos) + value = Self::PostfixComment(Box::new(value), c, pos); } Ok(value) } diff --git a/parser/src/declarations/import.rs b/parser/src/declarations/import.rs index 261bc11f..9daa8be1 100644 --- a/parser/src/declarations/import.rs +++ b/parser/src/declarations/import.rs @@ -353,7 +353,7 @@ impl ASTNode for ImportPart { unreachable!() }; let pos = value.get_position().union(start.get_end_after(c.len() + 2)); - value = Self::PostfixComment(Box::new(value), c, pos) + value = Self::PostfixComment(Box::new(value), c, pos); } Ok(value) } diff --git a/parser/src/declarations/mod.rs b/parser/src/declarations/mod.rs index 0afc1a58..1be916c6 100644 --- a/parser/src/declarations/mod.rs +++ b/parser/src/declarations/mod.rs @@ -127,6 +127,7 @@ impl ImportLocation { } /// Can be None if self is a cursor point + #[must_use] pub fn get_path(&self) -> Option<&str> { if let Self::Quoted(name, _) = self { Some(name) @@ -274,7 +275,7 @@ impl crate::ASTNode for Declaration { Declaration::DeclareVariable(dvd) => dvd.to_string_from_buffer(buf, options, depth), Declaration::DeclareInterface(did) => { buf.push_str("declare "); - did.to_string_from_buffer(buf, options, depth) + did.to_string_from_buffer(buf, options, depth); } } } diff --git a/parser/src/declarations/variable.rs b/parser/src/declarations/variable.rs index 60ba8551..108362c5 100644 --- a/parser/src/declarations/variable.rs +++ b/parser/src/declarations/variable.rs @@ -56,12 +56,12 @@ impl DeclarationExpression for Option { ) { if let Some(expr) = self { buf.push_str(if options.pretty { " = " } else { "=" }); - expr.to_string_from_buffer(buf, options, depth) + expr.to_string_from_buffer(buf, options, depth); } } fn get_decl_position(&self) -> Option<&Span> { - self.as_ref().map(|expr| expr.get_position()) + self.as_ref().map(ASTNode::get_position) } fn as_option_expr_ref(&self) -> Option<&Expression> { @@ -90,7 +90,7 @@ impl DeclarationExpression for crate::Expression { depth: u8, ) { buf.push_str(if options.pretty { " = " } else { "=" }); - ASTNode::to_string_from_buffer(self, buf, options, depth) + ASTNode::to_string_from_buffer(self, buf, options, depth); } fn get_decl_position(&self) -> Option<&Span> { @@ -140,7 +140,7 @@ impl ASTNode for VariableDeclarationItem position: name.get_position().union( expression .get_decl_position() - .or(type_annotation.as_ref().map(|ta| ta.get_position())) + .or(type_annotation.as_ref().map(ASTNode::get_position)) // TODO lol .unwrap_or(name.get_position()), ), @@ -196,6 +196,7 @@ pub enum VariableDeclarationKeyword { } impl VariableDeclarationKeyword { + #[must_use] pub fn is_token_variable_keyword(token: &TSXToken) -> bool { matches!(token, TSXToken::Keyword(TSXKeyword::Const | TSXKeyword::Let)) } @@ -214,6 +215,7 @@ impl VariableDeclarationKeyword { } } + #[must_use] pub fn as_str(&self) -> &str { match self { VariableDeclarationKeyword::Const(_) => "const ", @@ -221,6 +223,7 @@ impl VariableDeclarationKeyword { } } + #[must_use] pub fn get_position(&self) -> &Span { match self { VariableDeclarationKeyword::Const(kw) => kw.get_position(), @@ -312,6 +315,7 @@ impl ASTNode for VariableDeclaration { } impl VariableDeclaration { + #[must_use] pub fn is_constant(&self) -> bool { matches!(self, VariableDeclaration::ConstDeclaration { .. }) } diff --git a/parser/src/errors.rs b/parser/src/errors.rs index 7ceef2c3..f3d86515 100644 --- a/parser/src/errors.rs +++ b/parser/src/errors.rs @@ -66,7 +66,7 @@ impl Display for LexingErrors { f.write_str("Expected closing angle at end of self closing JSX tag") } LexingErrors::InvalidCharacterInAttributeKey(chr) => { - write!(f, "Invalid character {:?} in JSX attribute name", chr) + write!(f, "Invalid character {chr:?} in JSX attribute name") } LexingErrors::EmptyAttributeName => f.write_str("Empty JSX attribute name"), LexingErrors::ExpectedJSXEndTag => f.write_str("Expected JSX end tag"), @@ -183,6 +183,7 @@ pub struct ParseError { } impl ParseError { + #[allow(clippy::needless_pass_by_value)] pub fn new(reason: impl ParserErrorReason, position: Span) -> Self { Self { reason: reason.to_string(), position } } diff --git a/parser/src/expressions/arrow_function.rs b/parser/src/expressions/arrow_function.rs index e129e3c7..53ce5178 100644 --- a/parser/src/expressions/arrow_function.rs +++ b/parser/src/expressions/arrow_function.rs @@ -40,7 +40,7 @@ impl FunctionBased for ArrowFunctionBase { _depth: u8, ) { if is_async.is_some() { - buf.push_str("async ") + buf.push_str("async "); } } @@ -174,7 +174,7 @@ impl ArrowFunction { } } -/// For [ArrowFunction] and [crate::MatchArm] bodies +/// For [`ArrowFunction`] and [`crate::MatchArm`] bodies #[derive(Debug, Clone, Eq, PartialEq, Visitable)] #[cfg_attr(feature = "self-rust-tokenize", derive(self_rust_tokenize::SelfRustTokenize))] #[cfg_attr(feature = "serde-serialize", derive(serde::Serialize))] diff --git a/parser/src/expressions/assignments.rs b/parser/src/expressions/assignments.rs index 486f042b..3482b5ce 100644 --- a/parser/src/expressions/assignments.rs +++ b/parser/src/expressions/assignments.rs @@ -143,6 +143,7 @@ impl From for Expression { } impl VariableOrPropertyAccess { + #[must_use] pub fn get_parent(&self) -> Option<&Expression> { match self { VariableOrPropertyAccess::Variable(..) => None, @@ -160,7 +161,7 @@ impl VariableOrPropertyAccess { } } -/// TODO should be different from VariableFieldInSourceCode here +/// TODO should be different from `VariableFieldInSourceCode` here /// TODO visitable is current skipped... /// TODO cursor #[derive(PartialEqExtras, Debug, Clone, Visitable, derive_enum_from_into::EnumFrom)] @@ -180,6 +181,7 @@ pub enum LHSOfAssignment { } impl LHSOfAssignment { + #[must_use] pub fn get_position(&self) -> &Span { match self { LHSOfAssignment::ObjectDestructuring(_, pos) @@ -221,7 +223,7 @@ impl LHSOfAssignment { buf.push(']'); } LHSOfAssignment::VariableOrPropertyAccess(variable_or_property_access) => { - variable_or_property_access.to_string_from_buffer(buf, options, depth) + variable_or_property_access.to_string_from_buffer(buf, options, depth); } } } diff --git a/parser/src/expressions/mod.rs b/parser/src/expressions/mod.rs index e474096e..c5ca1dd7 100644 --- a/parser/src/expressions/mod.rs +++ b/parser/src/expressions/mod.rs @@ -53,7 +53,7 @@ use std::convert::{TryFrom, TryInto}; /// Expression structures /// -/// Comma is implemented as a [BinaryOperator] +/// Comma is implemented as a [`BinaryOperator`] #[derive(PartialEqExtras, Debug, Clone, Visitable, GetFieldByType)] #[get_field_by_type_target(Span)] #[partial_eq_ignore_types(Span)] @@ -209,7 +209,7 @@ impl ASTNode for Expression { options: &crate::ToStringOptions, depth: u8, ) { - self.to_string_using_precedence(buf, options, depth, COMMA_PRECEDENCE) + self.to_string_using_precedence(buf, options, depth, COMMA_PRECEDENCE); } fn get_position(&self) -> &Span { @@ -218,6 +218,7 @@ impl ASTNode for Expression { } impl Expression { + #[allow(clippy::similar_names)] pub(self) fn from_reader_with_precedence( reader: &mut impl TokenReader, state: &mut crate::ParsingState, @@ -520,7 +521,7 @@ impl Expression { let position = start.union(expression.get_position()); Expression::PrefixComment(comment, Box::new(expression), position) } - Token(tok @ TSXToken::JSXOpeningTagStart | tok @ TSXToken::JSXFragmentStart, span) => { + Token(tok @ (TSXToken::JSXOpeningTagStart | TSXToken::JSXFragmentStart), span) => { let var_name = matches!(tok, TSXToken::JSXFragmentStart); let root = JSXRoot::from_reader_sub_start(reader, state, options, var_name, span)?; Expression::JSXRoot(root) @@ -552,16 +553,16 @@ impl Expression { start, )?; return Ok(Expression::ArrowFunction(function)); - } else { - let (name, position) = token_as_identifier(token, "function parameter")?; - let function = ArrowFunction::from_reader_with_first_parameter( - reader, - state, - options, - (name, position), - )?; - return Ok(Expression::ArrowFunction(function)); } + + let (name, position) = token_as_identifier(token, "function parameter")?; + let function = ArrowFunction::from_reader_with_first_parameter( + reader, + state, + options, + (name, position), + )?; + return Ok(Expression::ArrowFunction(function)); } #[cfg(feature = "extras")] @@ -1066,10 +1067,9 @@ impl Expression { is_optional: false, }; continue; - } else { - // TODO - &reader.peek().unwrap().0 } + // TODO + &reader.peek().unwrap().0 } else { token }; @@ -1140,6 +1140,7 @@ impl Expression { } } + #[must_use] pub fn get_precedence(&self) -> u8 { match self { Self::NumberLiteral(..) @@ -1204,9 +1205,7 @@ impl Expression { ) { match self { Self::Cursor { .. } => { - if !options.expect_cursors { - panic!(); - } + assert!(options.expect_cursors,); } Self::NumberLiteral(num, _) => buf.push_str(&num.to_string()), Self::StringLiteral(string, quoted, _) => { @@ -1253,18 +1252,18 @@ impl Expression { buf.push_str(property); } InExpressionLHS::Expression(lhs) => { - lhs.to_string_using_precedence(buf, options, depth, IN_PRECEDENCE) + lhs.to_string_using_precedence(buf, options, depth, IN_PRECEDENCE); } } // TODO whitespace can be dropped depending on LHS and RHS buf.push_str(" in "); - rhs.to_string_using_precedence(buf, options, depth, IN_PRECEDENCE) + rhs.to_string_using_precedence(buf, options, depth, IN_PRECEDENCE); } SpecialOperators::InstanceOfExpression { lhs, rhs } => { lhs.to_string_using_precedence(buf, options, depth, INSTANCE_OF_PRECEDENCE); // TODO whitespace can be dropped depending on LHS and RHS buf.push_str(" instanceof "); - rhs.to_string_using_precedence(buf, options, depth, INSTANCE_OF_PRECEDENCE) + rhs.to_string_using_precedence(buf, options, depth, INSTANCE_OF_PRECEDENCE); } #[cfg(feature = "extras")] SpecialOperators::IsExpression { value, type_annotation, .. } => { @@ -1279,14 +1278,14 @@ impl Expression { Self::Assignment { lhs, rhs, .. } => { lhs.to_string_from_buffer(buf, options, depth); buf.push_str(if options.pretty { " = " } else { "=" }); - rhs.to_string_from_buffer(buf, options, depth) + rhs.to_string_from_buffer(buf, options, depth); } Self::BinaryAssignmentOperation { lhs, operator, rhs, .. } => { lhs.to_string_from_buffer(buf, options, depth); options.add_gap(buf); buf.push_str(operator.to_str()); options.add_gap(buf); - rhs.to_string_from_buffer(buf, options, depth) + rhs.to_string_from_buffer(buf, options, depth); } Self::UnaryPrefixAssignmentOperation { operand, operator, .. } => { buf.push_str(operator.to_str()); @@ -1330,7 +1329,7 @@ impl Expression { if let MultipleExpression::Single(inner @ Expression::VariableReference(..)) = &**expr { - inner.to_string_from_buffer(buf, options, depth) + inner.to_string_from_buffer(buf, options, depth); } else { buf.push('('); expr.to_string_from_buffer(buf, options, depth); @@ -1390,10 +1389,10 @@ impl Expression { } Self::JSXRoot(root) => root.to_string_from_buffer(buf, options, depth), Self::ObjectLiteral(object_literal) => { - object_literal.to_string_from_buffer(buf, options, depth) + object_literal.to_string_from_buffer(buf, options, depth); } Self::ArrowFunction(arrow_function) => { - arrow_function.to_string_from_buffer(buf, options, depth) + arrow_function.to_string_from_buffer(buf, options, depth); } Self::ExpressionFunction(function) => { function.to_string_from_buffer(buf, options, depth); @@ -1423,7 +1422,7 @@ impl Expression { } } Self::TemplateLiteral(template_literal) => { - template_literal.to_string_from_buffer(buf, options, depth) + template_literal.to_string_from_buffer(buf, options, depth); } Self::ConditionalTernaryExpression { condition, truthy_result, falsy_result, .. @@ -1484,6 +1483,7 @@ pub enum MultipleExpression { } impl MultipleExpression { + #[must_use] pub fn is_iife(&self) -> Option<&ExpressionOrBlock> { if let MultipleExpression::Single(inner) = self { inner.is_iife() @@ -1730,7 +1730,7 @@ impl SpreadExpression { SpreadExpression::Spread(expression, _) | SpreadExpression::NonSpread(expression) => { expression } - _ => panic!(), + SpreadExpression::Empty => panic!(), } } } @@ -1744,6 +1744,7 @@ impl From for SpreadExpression { // Utils for Expression impl Expression { /// IIFE = immediate invoked function execution + #[must_use] pub fn build_iife(block: Block) -> Self { let position = block.get_position().clone(); Expression::FunctionCall { @@ -1777,6 +1778,7 @@ impl Expression { } } + #[must_use] pub fn is_iife(&self) -> Option<&ExpressionOrBlock> { if let Expression::FunctionCall { arguments, function, .. } = self { if let (true, Expression::ParenthesizedExpression(expression, _)) = @@ -1793,6 +1795,7 @@ impl Expression { } /// Recurses to find first non parenthesized expression + #[must_use] pub fn get_non_parenthesized(&self) -> &Self { if let Expression::ParenthesizedExpression(inner_multiple_expr, _) = self { if let MultipleExpression::Single(expr) = &**inner_multiple_expr { @@ -1813,6 +1816,7 @@ impl Expression { /// For prettier printing /// /// TODO temp + #[must_use] pub fn is_small(&self) -> bool { match self { Self::NumberLiteral(..) | Self::BooleanLiteral(..) | Self::VariableReference(..) => { @@ -1883,7 +1887,7 @@ mod tests { Default::default(), ) .unwrap(); - assert!(expr.is_iife().is_some()) + assert!(expr.is_iife().is_some()); } #[test] diff --git a/parser/src/expressions/object_literal.rs b/parser/src/expressions/object_literal.rs index 482bb062..aa1b798b 100644 --- a/parser/src/expressions/object_literal.rs +++ b/parser/src/expressions/object_literal.rs @@ -39,9 +39,9 @@ impl crate::Visitable for ObjectLiteralMember { chain: &mut temporary_annex::Annex, ) { match self { - ObjectLiteralMember::Spread(_, _) => {} - ObjectLiteralMember::Shorthand(_, _) => {} - ObjectLiteralMember::Property(_, _, _) => {} + ObjectLiteralMember::Shorthand(_, _) + | ObjectLiteralMember::Property(_, _, _) + | ObjectLiteralMember::Spread(_, _) => {} ObjectLiteralMember::Method(method) => method.visit(visitors, data, options, chain), } } @@ -54,9 +54,9 @@ impl crate::Visitable for ObjectLiteralMember { chain: &mut temporary_annex::Annex, ) { match self { - ObjectLiteralMember::Spread(_, _) => {} - ObjectLiteralMember::Shorthand(_, _) => {} - ObjectLiteralMember::Property(_, _, _) => {} + ObjectLiteralMember::Property(_, _, _) + | ObjectLiteralMember::Spread(_, _) + | ObjectLiteralMember::Shorthand(_, _) => {} ObjectLiteralMember::Method(method) => method.visit_mut(visitors, data, options, chain), } } @@ -159,6 +159,7 @@ impl ObjectLiteral { } impl ASTNode for ObjectLiteralMember { + #[allow(clippy::similar_names)] fn from_reader( reader: &mut impl TokenReader, state: &mut crate::ParsingState, @@ -189,7 +190,7 @@ impl ASTNode for ObjectLiteralMember { let (name, position) = match mem::take(&mut header) { MethodHeader::Get(kw) => ("get", kw.1), MethodHeader::Set(kw) => ("set", kw.1), - _ => unreachable!(), + MethodHeader::Regular { .. } => unreachable!(), }; WithComment::None(PropertyKey::Ident(name.to_owned(), position, ())) } else { @@ -210,7 +211,7 @@ impl ASTNode for ObjectLiteralMember { return crate::throw_unexpected_token(reader, &[TSXToken::OpenParentheses]); } if let Some(Token(TSXToken::Comma | TSXToken::CloseBrace, _)) = reader.peek() { - if let PropertyKey::Ident(name, position, _) = key.get_ast() { + if let PropertyKey::Ident(name, position, ()) = key.get_ast() { Ok(Self::Shorthand(name, position)) } else { let token = reader.next().ok_or_else(parse_lexing_error)?; diff --git a/parser/src/expressions/template_literal.rs b/parser/src/expressions/template_literal.rs index 7ef6b665..764e605d 100644 --- a/parser/src/expressions/template_literal.rs +++ b/parser/src/expressions/template_literal.rs @@ -32,7 +32,7 @@ impl crate::Visitable for TemplateLiteralPart chain: &mut temporary_annex::Annex, ) { if let Self::Dynamic(dynamic) = self { - dynamic.visit(visitors, data, options, chain) + dynamic.visit(visitors, data, options, chain); } } @@ -44,7 +44,7 @@ impl crate::Visitable for TemplateLiteralPart chain: &mut temporary_annex::Annex, ) { if let Self::Dynamic(dynamic) = self { - dynamic.visit_mut(visitors, data, options, chain) + dynamic.visit_mut(visitors, data, options, chain); } } } @@ -73,10 +73,10 @@ impl ASTNode for TemplateLiteral { tag.to_string_from_buffer(buf, options, depth); } buf.push('`'); - for part in self.parts.iter() { + for part in &self.parts { match part { TemplateLiteralPart::Static(content) => { - buf.push_str_contains_new_line(content.as_str()) + buf.push_str_contains_new_line(content.as_str()); } TemplateLiteralPart::Dynamic(expression) => { buf.push_str("${"); diff --git a/parser/src/extensions/decorators.rs b/parser/src/extensions/decorators.rs index 9edddef9..1e3de429 100644 --- a/parser/src/extensions/decorators.rs +++ b/parser/src/extensions/decorators.rs @@ -152,7 +152,7 @@ impl Decorated { pub fn new(decorators: Vec, on: U) -> Self { let position = decorators.first().map_or(on.get_position(), |d| &d.position).union(on.get_position()); - Self { decorators, position, on } + Self { decorators, on, position } } pub(crate) fn to_string_from_buffer_just_decorators( @@ -162,7 +162,7 @@ impl Decorated { depth: u8, ) { if options.include_decorators { - for decorator in self.decorators.iter() { + for decorator in &self.decorators { decorator.to_string_from_buffer(buf, options, depth); if options.pretty { buf.push_new_line(); diff --git a/parser/src/extensions/jsx.rs b/parser/src/extensions/jsx.rs index d7205be3..6e5bdd35 100644 --- a/parser/src/extensions/jsx.rs +++ b/parser/src/extensions/jsx.rs @@ -60,7 +60,7 @@ impl ASTNode for JSXElement { ) { buf.push('<'); buf.push_str(&self.tag_name); - for attribute in self.attributes.iter() { + for attribute in &self.attributes { buf.push(' '); attribute.to_string_from_buffer(buf, options, depth); } @@ -191,7 +191,7 @@ fn jsx_children_to_string( ) { let indent = children.iter().any(|node| matches!(node, JSXNode::Element(..) | JSXNode::LineBreak)); - for node in children.iter() { + for node in children { if indent { options.add_indent(depth + 1, buf); } @@ -365,9 +365,7 @@ impl JSXElement { options: &ParseOptions, start: TokenStart, ) -> ParseResult { - let tag_name = if let Some(Token(TSXToken::JSXTagName(tag_name), _)) = reader.next() { - tag_name - } else { + let Some(Token(TSXToken::JSXTagName(tag_name), _)) = reader.next() else { return Err(parse_lexing_error()); }; let mut attributes = Vec::new(); @@ -458,11 +456,13 @@ impl JSXElement { } /// Used for lexing +#[must_use] pub fn html_tag_contains_literal_content(tag_name: &str) -> bool { matches!(tag_name, "script" | "style") } /// Used for lexing +#[must_use] pub fn html_tag_is_self_closing(tag_name: &str) -> bool { matches!( tag_name, diff --git a/parser/src/functions.rs b/parser/src/functions.rs index 36c6d714..b779906a 100644 --- a/parser/src/functions.rs +++ b/parser/src/functions.rs @@ -25,7 +25,7 @@ pub mod bases { }; } -/// Specialization information for [FunctionBase] +/// Specialization information for [`FunctionBase`] pub trait FunctionBased: Debug + Clone + PartialEq + Eq + Send + Sync { /// Includes a keyword and/or modifiers type Header: Debug + Clone + PartialEq + Eq + Send + Sync; @@ -50,12 +50,13 @@ pub trait FunctionBased: Debug + Clone + PartialEq + Eq + Send + Sync { depth: u8, ); - /// For [crate::ArrowFunction] + /// For [`crate::ArrowFunction`] + #[must_use] fn get_parameter_body_boundary_token() -> Option { None } - /// For [crate::ArrowFunction] + /// For [`crate::ArrowFunction`] fn parameters_from_reader( reader: &mut impl TokenReader, state: &mut crate::ParsingState, @@ -64,7 +65,7 @@ pub trait FunctionBased: Debug + Clone + PartialEq + Eq + Send + Sync { FunctionParameters::from_reader(reader, state, options) } - /// For [crate::ArrowFunction] + /// For [`crate::ArrowFunction`] fn parameters_to_string_from_buffer( buf: &mut T, parameters: &FunctionParameters, @@ -74,7 +75,7 @@ pub trait FunctionBased: Debug + Clone + PartialEq + Eq + Send + Sync { parameters.to_string_from_buffer(buf, options, depth); } - /// For [crate::ArrowFunction] + /// For [`crate::ArrowFunction`] fn parameter_body_boundary_token_to_string_from_buffer( buf: &mut T, options: &crate::ToStringOptions, @@ -85,7 +86,7 @@ pub trait FunctionBased: Debug + Clone + PartialEq + Eq + Send + Sync { /// Base for all function based structures with bodies (no interface, type reference etc) /// -/// Note: the [PartialEq] implementation is based on syntactical representation rather than [FunctionId] equality +/// Note: the [`PartialEq`] implementation is based on syntactical representation rather than [`FunctionId`] equality #[derive(Debug, Clone, PartialEqExtras, get_field_by_type::GetFieldByType)] #[get_field_by_type_target(Span)] #[cfg_attr(feature = "self-rust-tokenize", derive(self_rust_tokenize::SelfRustTokenize))] @@ -103,6 +104,7 @@ pub struct FunctionBase { impl Eq for FunctionBase {} impl ASTNode for FunctionBase { + #[allow(clippy::similar_names)] fn from_reader( reader: &mut impl TokenReader, state: &mut crate::ParsingState, @@ -136,6 +138,7 @@ impl ASTNode for FunctionBase { } } +#[allow(clippy::similar_names)] impl FunctionBase { pub(crate) fn from_reader_with_header_and_name( reader: &mut impl TokenReader, @@ -169,7 +172,7 @@ impl FunctionBase { } else { parameters.position.clone().union(body_pos) }; - Ok(Self { position, header, name, parameters, type_parameters, body, return_type }) + Ok(Self { header, name, type_parameters, parameters, return_type, body, position }) } } @@ -213,6 +216,7 @@ pub struct GeneralFunctionBase(PhantomData) pub type ExpressionFunction = FunctionBase>; +#[allow(clippy::similar_names)] impl FunctionBased for GeneralFunctionBase { type Body = Block; type Header = FunctionHeader; @@ -405,6 +409,7 @@ fn parse_regular_header( } impl FunctionHeader { + #[must_use] pub fn is_generator(&self) -> bool { match self { FunctionHeader::VirginFunctionHeader { @@ -417,6 +422,7 @@ impl FunctionHeader { } } + #[must_use] pub fn is_async(&self) -> bool { match self { FunctionHeader::VirginFunctionHeader { async_keyword, .. } => async_keyword.is_some(), @@ -426,6 +432,7 @@ impl FunctionHeader { } #[cfg(feature = "extras")] + #[must_use] pub fn get_location(&self) -> Option<&FunctionLocationModifier> { match self { FunctionHeader::VirginFunctionHeader { location, .. } diff --git a/parser/src/generator_helpers.rs b/parser/src/generator_helpers.rs index fd73dfe9..53c45a8a 100644 --- a/parser/src/generator_helpers.rs +++ b/parser/src/generator_helpers.rs @@ -2,7 +2,7 @@ use crate::{ASTNode, Expression, PropertyReference, Statement, VariableIdentifie use source_map::Span; -/// A trait which means that self can be pushed to a [TokenSender] +/// A trait which means that self can be pushed to a [`TokenSender`] pub trait IntoAST { fn into_ast(self) -> T; } diff --git a/parser/src/lexer.rs b/parser/src/lexer.rs index d9b884d2..f5c4b138 100644 --- a/parser/src/lexer.rs +++ b/parser/src/lexer.rs @@ -1,6 +1,6 @@ //! Contains lexing logic for all the whole of JS + TypeScript type annotations + JSX + other syntax //! -//! Uses [TSXToken]s for data, uses [Span] for location data. Uses [tokenizer_lib] for logic. +//! Uses [`TSXToken`]s for data, uses [Span] for location data. Uses [`tokenizer_lib`] for logic. use super::{Span, TSXToken}; use crate::{ @@ -61,8 +61,6 @@ pub fn lex_script( offset: Option, mut cursors: Vec<(usize, EmptyCursorId)>, ) -> Result<(), (LexingErrors, Span)> { - cursors.reverse(); - #[derive(PartialEq, Debug)] enum JSXAttributeValueDelimiter { None, @@ -161,6 +159,8 @@ pub fn lex_script( }, } + cursors.reverse(); + let mut state: LexingState = LexingState::None; // Used to go back to previous state if was in template literal or JSX literal @@ -369,10 +369,10 @@ pub fn lex_script( push_token!(result); start = idx + chr.len_utf8(); continue; - } else { - push_token!(result); - start = idx; } + + push_token!(result); + start = idx; } GetNextResult::NewState(new_state) => { state = LexingState::Symbol(new_state); @@ -543,10 +543,10 @@ pub fn lex_script( if *tag_depth == 0 { set_state!(LexingState::None); continue; - } else { - start = idx + 1; - *jsx_state = JSXLexingState::Content; } + + start = idx + 1; + *jsx_state = JSXLexingState::Content; } // Fragment start '>' if !*lexed_start => { @@ -641,9 +641,8 @@ pub fn lex_script( *jsx_state = JSXLexingState::Content; } continue; - } else { - return_err!(LexingErrors::ExpectedClosingAngleAtEndOfSelfClosingTag); } + return_err!(LexingErrors::ExpectedClosingAngleAtEndOfSelfClosingTag); } JSXLexingState::AttributeKey => match chr { '=' => { @@ -890,10 +889,10 @@ pub fn lex_script( '"' => set_state!(LexingState::String { double_quoted: true, escaped: false }), '\'' => set_state!(LexingState::String { double_quoted: false, escaped: false }), '_' | '$' => { - set_state!(LexingState::Identifier) + set_state!(LexingState::Identifier); } chr if chr.is_alphabetic() => { - set_state!(LexingState::Identifier) + set_state!(LexingState::Identifier); } chr if chr.is_whitespace() => { continue; diff --git a/parser/src/lib.rs b/parser/src/lib.rs index 9d948227..4e2cc8f9 100644 --- a/parser/src/lib.rs +++ b/parser/src/lib.rs @@ -1,5 +1,5 @@ #![doc = include_str!("../README.md")] -#![allow(clippy::new_without_default)] +#![allow(clippy::new_without_default, clippy::too_many_lines)] mod block; mod comments; @@ -50,7 +50,10 @@ pub use types::{ type_declarations::{self, GenericTypeConstraint, TypeDeclaration}, }; pub use variable_fields::*; -pub(crate) use visiting::*; +pub(crate) use visiting::{ + Chain, ChainVariable, ImmutableVariableOrPropertyPart, MutableVariablePart, VisitSettings, + Visitable, VisitorMutReceiver, VisitorReceiver, +}; use tokenizer_lib::{sized_tokens::TokenEnd, Token, TokenReader}; @@ -79,6 +82,8 @@ impl Quoted { /// Settings to customize parsing #[allow(unused)] #[derive(Copy, Clone)] +// TODO: Can be refactored with bit to reduce memory +#[allow(clippy::struct_excessive_bools)] pub struct ParseOptions { /// Parsing of [JSX](https://facebook.github.io/jsx/) (includes some additions) pub jsx: bool, @@ -108,6 +113,7 @@ impl ParseOptions { } } + #[must_use] pub fn all_features() -> Self { Self { jsx: true, @@ -140,7 +146,9 @@ impl Default for ParseOptions { } } -/// Settings for serializing ASTNodes +/// Settings for serializing `ASTNodes` +// TODO: Can be refactored with bit to reduce memory +#[allow(clippy::struct_excessive_bools)] pub struct ToStringOptions { /// Does not include whitespace minification pub pretty: bool, @@ -173,16 +181,18 @@ impl Default for ToStringOptions { } impl ToStringOptions { + #[must_use] pub fn minified() -> Self { ToStringOptions { pretty: false, include_comments: false, - indent_with: "".to_owned(), + indent_with: String::new(), ..Default::default() } } /// With typescript type syntax + #[must_use] pub fn typescript() -> Self { ToStringOptions { include_types: true, ..Default::default() } } @@ -193,7 +203,7 @@ impl ToStringOptions { } pub(crate) fn add_indent(&self, indent: u8, buf: &mut T) { - (0..indent).for_each(|_| buf.push_str(&self.indent_with)) + (0..indent).for_each(|_| buf.push_str(&self.indent_with)); } pub(crate) fn add_gap(&self, buf: &mut T) { @@ -208,7 +218,7 @@ impl ToStringOptions { /// /// TODO remove partial eq pub trait ASTNode: Sized + Clone + PartialEq + std::fmt::Debug + Sync + Send + 'static { - /// From string, with default impl to call abstract method from_reader + /// From string, with default impl to call abstract method `from_reader` fn from_string( script: String, options: ParseOptions, @@ -220,10 +230,10 @@ pub trait ASTNode: Sized + Clone + PartialEq + std::fmt::Debug + Sync + Send + ' // TODO take from argument let line_starts = LineStarts::new(script.as_str()); - lex_and_parse_script(line_starts, options, script, source, offset, Default::default()) + lex_and_parse_script(line_starts, options, &script, source, offset, Default::default()) } - /// Returns position of node as span AS IT WAS PARSED. May be Span::NULL if AST was doesn't match anything in source + /// Returns position of node as span AS IT WAS PARSED. May be `Span::NULL` if AST was doesn't match anything in source fn get_position(&self) -> &Span; fn from_reader( @@ -252,7 +262,7 @@ pub trait ASTNode: Sized + Clone + PartialEq + std::fmt::Debug + Sync + Send + ' pub fn lex_and_parse_script( line_starts: source_map::LineStarts, options: ParseOptions, - script: String, + script: &str, source: SourceId, offset: Option, cursors: Vec<(usize, CursorId<()>)>, @@ -274,7 +284,7 @@ pub fn lex_and_parse_script( res }); - let lex_result = lexer::lex_script(&script, &mut sender, &lex_options, offset, cursors); + let lex_result = lexer::lex_script(script, &mut sender, &lex_options, offset, cursors); if let Err((reason, pos)) = lex_result { return Err(ParseError::new(reason, pos)); } @@ -287,14 +297,14 @@ pub fn lex_and_parse_script( pub fn lex_and_parse_script( line_starts: source_map::LineStarts, options: ParseOptions, - script: String, + script: &str, source: SourceId, offset: Option, cursors: Vec<(usize, CursorId<()>)>, ) -> Result { let mut queue = tokenizer_lib::BufferedTokenQueue::new(); let lex_result = - lexer::lex_script(&script, &mut queue, &options.get_lex_options(), offset, cursors); + lexer::lex_script(script, &mut queue, &options.get_lex_options(), offset, cursors); if let Err((reason, pos)) = lex_result { return Err(ParseError::new(reason, pos)); @@ -374,6 +384,7 @@ impl Keyword where tokens::TSXKeyword: std::convert::From, { + #[must_use] pub fn new(span: Span) -> Self { Keyword(T::default(), span) } @@ -562,17 +573,17 @@ impl FromStr for NumberRepresentation { } Some('X' | 'x') => { let mut number = 0u64; - for c in s[2..].as_bytes().iter() { + for c in s[2..].as_bytes() { number <<= 4; // 16=2^4 match c { b'0'..=b'9' => { - number += (c - b'0') as u64; + number += u64::from(c - b'0'); } b'a'..=b'f' => { - number += (c - b'a') as u64 + 10; + number += u64::from(c - b'a') + 10; } b'A'..=b'F' => { - number += (c - b'A') as u64 + 10; + number += u64::from(c - b'A') + 10; } _ => return Err(s.to_owned()), } @@ -581,11 +592,11 @@ impl FromStr for NumberRepresentation { } Some('B' | 'b') => { let mut number = 0u64; - for c in s[2..].as_bytes().iter() { + for c in s[2..].as_bytes() { number <<= 1; match c { b'0' | b'1' => { - number += (c - b'0') as u64; + number += u64::from(c - b'0'); } _ => return Err(s.to_owned()), } @@ -597,10 +608,10 @@ impl FromStr for NumberRepresentation { let uses_character = matches!(c, 'o' | 'O'); let start = if uses_character { 2 } else { 1 }; let mut number = 0u64; - for c in s[start..].as_bytes().iter() { + for c in s[start..].as_bytes() { number <<= 3; // 8=2^3 if matches!(c, b'0'..=b'7') { - number += (c - b'0') as u64; + number += u64::from(c - b'0'); } else { return Err(s.to_owned()); } @@ -656,9 +667,9 @@ impl PartialEq for NumberRepresentation { fn eq(&self, other: &Self) -> bool { match (self, other) { // TODO needs to do conversion - (Self::Hex(l0, l1), Self::Hex(r0, r1)) => l0 == r0 && l1 == r1, - (Self::Bin(l0, l1), Self::Bin(r0, r1)) => l0 == r0 && l1 == r1, - (Self::Octal(l0, l1, _), Self::Octal(r0, r1, _)) => l0 == r0 && l1 == r1, + (Self::Bin(l0, l1), Self::Bin(r0, r1)) + | (Self::Octal(l0, l1, _), Self::Octal(r0, r1, _)) + | (Self::Hex(l0, l1), Self::Hex(r0, r1)) => l0 == r0 && l1 == r1, (Self::Number { internal: l0, .. }, Self::Number { internal: r0, .. }) => l0 == r0, _ => core::mem::discriminant(self) == core::mem::discriminant(other), } @@ -668,10 +679,12 @@ impl PartialEq for NumberRepresentation { impl Eq for NumberRepresentation {} impl NumberRepresentation { + #[must_use] pub fn negate(&self) -> Self { f64::from(self.clone()).neg().into() } + #[must_use] pub fn as_js_string(self) -> String { match self { NumberRepresentation::Infinity => "Infinity".to_owned(), @@ -693,7 +706,7 @@ impl NumberRepresentation { start.remove(0); } if trailing_point { - start.push('.') + start.push('.'); } start } @@ -800,10 +813,12 @@ impl MethodHeader { } } + #[must_use] pub fn is_async(&self) -> bool { matches!(self, Self::Regular { r#async: Some(_), .. }) } + #[must_use] pub fn is_generator(&self) -> bool { matches!(self, Self::Regular { generator: Some(_), .. }) } @@ -984,19 +999,19 @@ fn receiver_to_tokens( let start = span.start; let section = (input.get(std::ops::Range::from(span.clone())).unwrap_or("?").to_owned(), true); - if last != start { + if last == start { + last = span.end; + Some(section) + } else { last_section = Some(section); let token = input.get((last as usize)..(start as usize)).unwrap_or("?").to_owned(); last = span.end; Some((token, false)) - } else { - last = span.end; - Some(section) } }) } -/// *to_strings* items surrounded in `{`, `[`, `(`, etc +/// *`to_strings`* items surrounded in `{`, `[`, `(`, etc /// /// TODO delimiter pub(crate) fn to_string_bracketed( diff --git a/parser/src/modules.rs b/parser/src/modules.rs index 42bdc941..feef2710 100644 --- a/parser/src/modules.rs +++ b/parser/src/modules.rs @@ -51,7 +51,7 @@ impl ASTNode for Module { options: &crate::ToStringOptions, depth: u8, ) { - statements_and_declarations_to_string(&self.items, buf, options, depth) + statements_and_declarations_to_string(&self.items, buf, options, depth); } fn get_position(&self) -> &Span { @@ -83,6 +83,7 @@ impl Module { buf.build(fs) } + #[must_use] pub fn length(&self, options: &crate::ToStringOptions) -> usize { let mut buf = source_map::Counter::new(); self.to_string_from_buffer(&mut buf, options, 0); @@ -190,7 +191,7 @@ pub struct TypeDefinitionModule { impl TypeDefinitionModule { pub fn from_string( - script: String, + script: &str, mut options: ParseOptions, source: SourceId, ) -> ParseResult { @@ -199,7 +200,7 @@ impl TypeDefinitionModule { // Important not to parse JSX as <> is used for casting options.jsx = false; - let line_starts = source_map::LineStarts::new(&script); + let line_starts = source_map::LineStarts::new(script); super::lex_and_parse_script(line_starts, options, script, source, None, Default::default()) } @@ -211,7 +212,7 @@ impl TypeDefinitionModule { ) -> Result { let script = fs::read_to_string(&path).map_err(FromFileError::FileError)?; let source = SourceId::new(fs, path.as_ref().to_path_buf(), script.clone()); - Self::from_string(script, options, source) + Self::from_string(&script, options, source) .map_err(|err| FromFileError::ParseError(err, source)) } } @@ -291,11 +292,11 @@ impl ASTNode for TypeDefinitionModuleDeclaration { )?, )) } - Token(TSXToken::Comment(_), _) | Token(TSXToken::MultiLineComment(_), _) => { - let comment = match reader.next().unwrap().0 { - // TODO loses multiline/single-line data - TSXToken::MultiLineComment(comment) | TSXToken::Comment(comment) => comment, - _ => unreachable!(), + Token(TSXToken::Comment(_) | TSXToken::MultiLineComment(_), _) => { + let (TSXToken::MultiLineComment(comment) | TSXToken::Comment(comment)) = + reader.next().unwrap().0 + else { + unreachable!() }; Ok(TypeDefinitionModuleDeclaration::Comment(comment)) } diff --git a/parser/src/operators.rs b/parser/src/operators.rs index ae934b8d..8f48564f 100644 --- a/parser/src/operators.rs +++ b/parser/src/operators.rs @@ -1,15 +1,15 @@ //! Contains the definitions for expressions -//! Operators marked in spec but implemented as a variant of [crate::Expression] rather than a *operator*: -//! OptionalChain, OptionalCall, OptionalIndex, Index, Group, Initialize, Call, +//! Operators marked in spec but implemented as a variant of [`crate::Expression`] rather than a *operator*: +//! `OptionalChain`, `OptionalCall`, `OptionalIndex`, Index, Group, Initialize, Call, use std::convert::TryFrom; use crate::{TSXKeyword, TSXToken}; -/// Comma operator is on [crate::MultipleExpression] +/// Comma operator is on [`crate::MultipleExpression`] /// -/// InstanceOf, In are special operators +/// `InstanceOf`, In are special operators #[rustfmt::skip] #[derive(Debug, PartialEq, Eq, Clone, Copy)] #[cfg_attr(feature = "self-rust-tokenize", derive(self_rust_tokenize::SelfRustTokenize))] @@ -124,7 +124,7 @@ impl Operator for BinaryOperator { BinaryOperator::LessThan => "<", BinaryOperator::GreaterThan => ">", BinaryOperator::LessThanEqual => "<=", - BinaryOperator::GreaterThanEqual => ">", + BinaryOperator::GreaterThanEqual => ">=", BinaryOperator::Equal => "==", BinaryOperator::StrictEqual => "===", BinaryOperator::NotEqual => "!=", @@ -147,19 +147,20 @@ impl Operator for BinaryOperator { fn precedence(&self) -> u8 { match self { + BinaryOperator::Pipe | BinaryOperator::Compose => 15, BinaryOperator::Exponent => 14, - BinaryOperator::Multiply => 13, - BinaryOperator::Divide => 13, - BinaryOperator::Modulo => 13, - BinaryOperator::Add => 12, - BinaryOperator::Subtract => 12, - BinaryOperator::BitwiseShiftLeft => 11, - BinaryOperator::BitwiseShiftRight => 11, - BinaryOperator::BitwiseShiftRightUnsigned => 11, - BinaryOperator::LessThan => 10, - BinaryOperator::LessThanEqual => 10, - BinaryOperator::GreaterThan => 10, - BinaryOperator::GreaterThanEqual => 10, + BinaryOperator::Multiply + | BinaryOperator::Divide + | BinaryOperator::Modulo + | BinaryOperator::Divides => 13, + BinaryOperator::Add | BinaryOperator::Subtract => 12, + BinaryOperator::BitwiseShiftLeft + | BinaryOperator::BitwiseShiftRightUnsigned + | BinaryOperator::BitwiseShiftRight => 11, + BinaryOperator::LessThan + | BinaryOperator::LessThanEqual + | BinaryOperator::GreaterThanEqual + | BinaryOperator::GreaterThan => 10, BinaryOperator::Equal | BinaryOperator::NotEqual | BinaryOperator::StrictEqual @@ -168,13 +169,7 @@ impl Operator for BinaryOperator { BinaryOperator::BitwiseXOr => 7, BinaryOperator::BitwiseOr => 6, BinaryOperator::LogicalAnd => 5, - BinaryOperator::NullCoalescing => 4, - BinaryOperator::LogicalOr => 4, - // Same as modulo - BinaryOperator::Divides => 13, - // No idea - BinaryOperator::Pipe => 15, - BinaryOperator::Compose => 15, + BinaryOperator::NullCoalescing | BinaryOperator::LogicalOr => 4, } } @@ -467,6 +462,7 @@ impl TryFrom<&TSXToken> for UnaryPrefixAssignmentOperator { impl BinaryOperator { /// Operators which return true may or may not evaluate RHS based on their own value /// TODO might be more + #[must_use] pub fn is_rhs_conditional_evaluation(&self) -> bool { matches!(self, BinaryOperator::LogicalAnd | BinaryOperator::LogicalOr) } diff --git a/parser/src/statements/for_statement.rs b/parser/src/statements/for_statement.rs index 06eee75e..6179b0ea 100644 --- a/parser/src/statements/for_statement.rs +++ b/parser/src/statements/for_statement.rs @@ -252,17 +252,17 @@ impl ASTNode for ForLoopCondition { } }); - let condition = if !matches!(reader.peek(), Some(Token(TSXToken::SemiColon, _))) { - Some(MultipleExpression::from_reader(reader, state, options)?) - } else { + let condition = if matches!(reader.peek(), Some(Token(TSXToken::SemiColon, _))) { None + } else { + Some(MultipleExpression::from_reader(reader, state, options)?) }; let semi_colon_two = reader.expect_next_get_end(TSXToken::SemiColon)?; let afterthought = - if !matches!(reader.peek(), Some(Token(TSXToken::CloseParentheses, _))) { - Some(MultipleExpression::from_reader(reader, state, options)?) - } else { + if matches!(reader.peek(), Some(Token(TSXToken::CloseParentheses, _))) { None + } else { + Some(MultipleExpression::from_reader(reader, state, options)?) }; let end = afterthought .as_ref() @@ -305,13 +305,13 @@ impl ASTNode for ForLoopCondition { if let Some(initializer) = initializer { match initializer { ForLoopStatementInitializer::VariableDeclaration(stmt) => { - stmt.to_string_from_buffer(buf, options, depth) + stmt.to_string_from_buffer(buf, options, depth); } ForLoopStatementInitializer::Expression(expr) => { expr.to_string_from_buffer(buf, options, depth); } ForLoopStatementInitializer::VarStatement(stmt) => { - stmt.to_string_from_buffer(buf, options, depth) + stmt.to_string_from_buffer(buf, options, depth); } } } @@ -346,6 +346,6 @@ mod tests { #[test] fn condition_without_variable_keyword() { - assert_matches_ast!("(k in x)", ForLoopCondition::ForIn { .. }) + assert_matches_ast!("(k in x)", ForLoopCondition::ForIn { .. }); } } diff --git a/parser/src/statements/if_statement.rs b/parser/src/statements/if_statement.rs index 3ae88b59..1ec66fa4 100644 --- a/parser/src/statements/if_statement.rs +++ b/parser/src/statements/if_statement.rs @@ -75,7 +75,7 @@ impl ASTNode for IfStatement { } } let position = start_span.union(inner.get_position()); - Ok(IfStatement { condition, inner, position, else_conditions, trailing_else }) + Ok(IfStatement { condition, inner, else_conditions, trailing_else, position }) } fn get_position(&self) -> &Span { diff --git a/parser/src/statements/mod.rs b/parser/src/statements/mod.rs index 7153451b..93f64b60 100644 --- a/parser/src/statements/mod.rs +++ b/parser/src/statements/mod.rs @@ -171,26 +171,26 @@ impl ASTNode for Statement { TSXToken::Keyword(TSXKeyword::Break) => { let break_token = reader.next().unwrap(); // TODO token is semi-colon - let label = if !matches!( + let label = if matches!( reader.peek(), Some(Token(TSXToken::SemiColon | TSXToken::CloseBrace, _)) ) { - Some(token_as_identifier(reader.next().unwrap(), "break label")?.0) - } else { None + } else { + Some(token_as_identifier(reader.next().unwrap(), "break label")?.0) }; Ok(Statement::Break(label, break_token.get_span())) } TSXToken::Keyword(TSXKeyword::Continue) => { let continue_token = reader.next().unwrap(); // TODO token is semi-colon - let label = if !matches!( + let label = if matches!( reader.peek(), Some(Token(TSXToken::SemiColon | TSXToken::CloseBrace, _)) ) { - Some(token_as_identifier(reader.next().unwrap(), "continue label")?.0) - } else { None + } else { + Some(token_as_identifier(reader.next().unwrap(), "continue label")?.0) }; Ok(Statement::Continue(label, continue_token.get_span())) } @@ -227,9 +227,7 @@ impl ASTNode for Statement { ) { match self { Statement::Cursor(..) => { - if !options.expect_cursors { - panic!("tried to to-string cursor") - } + assert!(options.expect_cursors, "tried to to-string cursor"); } Statement::Empty(..) => { buf.push(';'); @@ -296,7 +294,8 @@ impl ASTNode for Statement { } impl Statement { - /// Used for skipping in to_string + /// Used for skipping in `to_string` + #[must_use] pub fn is_comment(&self) -> bool { matches!(self, Statement::Comment(..) | Statement::MultiLineComment(..)) } diff --git a/parser/src/statements/switch_statement.rs b/parser/src/statements/switch_statement.rs index 0c98932a..bc5e87cd 100644 --- a/parser/src/statements/switch_statement.rs +++ b/parser/src/statements/switch_statement.rs @@ -73,8 +73,7 @@ impl ASTNode for SwitchStatement { let mut statements = Vec::new(); loop { if let Some(Token( - TSXToken::Keyword(TSXKeyword::Case) - | TSXToken::Keyword(TSXKeyword::Default) + TSXToken::Keyword(TSXKeyword::Case | TSXKeyword::Default) | TSXToken::CloseBrace, _, )) = reader.peek() @@ -87,9 +86,9 @@ impl ASTNode for SwitchStatement { } } if let Some(case) = case { - branches.push(SwitchBranch::Case(case, statements)) + branches.push(SwitchBranch::Case(case, statements)); } else { - branches.push(SwitchBranch::Default(statements)) + branches.push(SwitchBranch::Default(statements)); } } Ok(Self { case, branches, position: start.union(close_brace_pos) }) @@ -108,7 +107,7 @@ impl ASTNode for SwitchStatement { buf.push(')'); options.add_gap(buf); buf.push('{'); - for branch in self.branches.iter() { + for branch in &self.branches { if options.pretty { buf.push_new_line(); options.add_indent(depth + 1, buf); diff --git a/parser/src/statements/try_catch_statement.rs b/parser/src/statements/try_catch_statement.rs index 8ce05db8..e9794dba 100644 --- a/parser/src/statements/try_catch_statement.rs +++ b/parser/src/statements/try_catch_statement.rs @@ -82,7 +82,7 @@ impl ASTNode for TryCatchStatement { )); }; - Ok(Self { position, try_inner, exception_var, catch_inner, finally_inner }) + Ok(Self { try_inner, catch_inner, exception_var, finally_inner, position }) } fn to_string_from_buffer( diff --git a/parser/src/tokens.rs b/parser/src/tokens.rs index bd187099..fd460cd3 100644 --- a/parser/src/tokens.rs +++ b/parser/src/tokens.rs @@ -1,4 +1,4 @@ -//! Contains the declaration for [TSXToken] which are pieces of syntax. Also +//! Contains the declaration for [`TSXToken`] which are pieces of syntax. Also //! - How tokens are made from consecutive characters //! - Keywords @@ -376,6 +376,7 @@ impl std::fmt::Display for TSXToken { } impl TSXToken { + #[must_use] pub fn is_comment(&self) -> bool { matches!(self, TSXToken::Comment(_) | TSXToken::MultiLineComment(_)) } @@ -395,6 +396,7 @@ impl TSXToken { } /// Used for lexing regular expression and JSX literals differently + #[must_use] pub fn is_expression_prefix(&self) -> bool { matches!( self, @@ -412,6 +414,7 @@ impl TSXToken { } /// Returns a keyword token else an identifier literal + #[must_use] pub fn from_slice(slice: &str) -> Self { match TSXKeyword::from_str(slice) { Ok(keyword_token) => TSXToken::Keyword(keyword_token), @@ -421,7 +424,7 @@ impl TSXToken { } /// Some tokens can be used as names for variables, methods (eg 'get' in .get()). This function -/// takes a [Token] and returns its name as a [String] and the location as a [Span]. Will throw [ParseError] if +/// takes a [Token] and returns its name as a [String] and the location as a [Span]. Will throw [`ParseError`] if /// cannot convert token to string pub(crate) fn token_as_identifier( token: Token, diff --git a/parser/src/types/declares.rs b/parser/src/types/declares.rs index af407dd0..9d31e52f 100644 --- a/parser/src/types/declares.rs +++ b/parser/src/types/declares.rs @@ -158,7 +158,7 @@ impl ASTNode for DeclareFunctionDeclaration { self.parameters.to_string_from_buffer(buf, options, depth); if let Some(return_type) = &self.return_type { buf.push_str(": "); - return_type.to_string_from_buffer(buf, options, depth) + return_type.to_string_from_buffer(buf, options, depth); } } } @@ -199,17 +199,17 @@ impl DeclareFunctionDeclaration { }; let position = start_pos - .union(return_type.as_ref().map(ASTNode::get_position).unwrap_or(¶meters.position)); + .union(return_type.as_ref().map_or(¶meters.position, ASTNode::get_position)); Ok(Self { name, type_parameters, parameters, return_type, - decorators, - position, #[cfg(feature = "extras")] performs, + decorators, + position, }) } } diff --git a/parser/src/types/interface.rs b/parser/src/types/interface.rs index 89226086..a3359725 100644 --- a/parser/src/types/interface.rs +++ b/parser/src/types/interface.rs @@ -6,7 +6,9 @@ use crate::{ throw_unexpected_token_with_token, to_string_bracketed, tokens::token_as_identifier, tsx_keywords, - types::{type_annotations::TypeAnnotationFunctionParameters, type_declarations::*}, + types::{ + type_annotations::TypeAnnotationFunctionParameters, type_declarations::TypeDeclaration, + }, ASTNode, Expression, GenericTypeConstraint, Keyword, MethodHeader, NumberRepresentation, ParseOptions, ParseResult, PropertyKey, Span, TSXKeyword, TSXToken, TypeAnnotation, }; @@ -59,6 +61,7 @@ impl ASTNode for InterfaceDeclaration { #[cfg(feature = "extras")] let nominal_keyword = Keyword::optionally_from_reader(reader); + // if let Some(Token(TSXToken::Keyword(TSXKeyword::Nominal), _)) = reader.peek() { // Some((reader.next().unwrap().1)) // } else { @@ -101,12 +104,12 @@ impl ASTNode for InterfaceDeclaration { let position = start.union(reader.expect_next_get_end(TSXToken::CloseBrace)?); Ok(InterfaceDeclaration { name, - members, + #[cfg(feature = "extras")] + nominal_keyword, type_parameters, extends, + members, position, - #[cfg(feature = "extras")] - nominal_keyword, }) } @@ -136,7 +139,7 @@ impl ASTNode for InterfaceDeclaration { if options.pretty && !self.members.is_empty() { buf.push_new_line(); } - for member in self.members.iter() { + for member in &self.members { options.add_indent(depth + 1, buf); member.to_string_from_buffer(buf, options, depth + 1); if options.pretty { @@ -152,7 +155,7 @@ impl ASTNode for InterfaceDeclaration { } } -/// This is also used for [TypeAnnotation::ObjectLiteral] +/// This is also used for [`TypeAnnotation::ObjectLiteral`] #[derive(Debug, Clone, PartialEq, Eq, GetFieldByType)] #[get_field_by_type_target(Span)] #[cfg_attr(feature = "self-rust-tokenize", derive(self_rust_tokenize::SelfRustTokenize))] @@ -217,6 +220,7 @@ pub enum InterfaceMember { Comment(String, Span), } +#[allow(clippy::similar_names)] impl ASTNode for InterfaceMember { fn from_reader( reader: &mut impl TokenReader, @@ -246,10 +250,7 @@ impl ASTNode for InterfaceMember { .as_ref() .map_or(¶meters.position, |kw| kw.get_position()) .union( - return_type - .as_ref() - .map(ASTNode::get_position) - .unwrap_or(¶meters.position), + return_type.as_ref().map_or(¶meters.position, ASTNode::get_position), ); Ok(InterfaceMember::Caller { is_readonly, @@ -313,8 +314,7 @@ impl ASTNode for InterfaceMember { None }; - let end = - return_type.as_ref().map(ASTNode::get_position).unwrap_or(¶meters.position); + let end = return_type.as_ref().map_or(¶meters.position, ASTNode::get_position); let position = readonly_keyword.as_ref().map_or(&new_span, |kw| kw.get_position()).union(end); @@ -332,13 +332,12 @@ impl ASTNode for InterfaceMember { TSXToken::MultiLineComment(..) | TSXToken::Comment(..) => { let token = reader.next().unwrap(); let span = token.get_span(); - let comment = if let TSXToken::MultiLineComment(comment) - | TSXToken::Comment(comment) = token.0 - { - comment - } else { + + let (TSXToken::MultiLineComment(comment) | TSXToken::Comment(comment)) = token.0 + else { unreachable!() }; + Ok(InterfaceMember::Comment(comment, span)) } _ => { diff --git a/parser/src/types/type_annotations.rs b/parser/src/types/type_annotations.rs index dca1247d..633078e8 100644 --- a/parser/src/types/type_annotations.rs +++ b/parser/src/types/type_annotations.rs @@ -21,7 +21,7 @@ use crate::{ /// A reference to a type /// -/// TODO need to figure out what [TypeId] is used for here and where it might be useful for the checker +/// TODO need to figure out what [`TypeId`] is used for here and where it might be useful for the checker #[derive(Debug, Clone, PartialEqExtras, Eq, get_field_by_type::GetFieldByType)] #[get_field_by_type_target(Span)] #[partial_eq_ignore_types(Span)] @@ -146,7 +146,7 @@ pub enum SpreadKind { Spread, } -/// Condition in a [TypeAnnotation::Conditional] +/// Condition in a [`TypeAnnotation::Conditional`] #[derive(Debug, Clone, PartialEq, Eq)] #[cfg_attr(feature = "self-rust-tokenize", derive(self_rust_tokenize::SelfRustTokenize))] #[cfg_attr(feature = "serde-serialize", derive(serde::Serialize))] @@ -195,7 +195,7 @@ impl TypeCondition { } } -/// The result of a [TypeAnnotation::Condition] +/// The result of a [`TypeAnnotation::Condition`] #[derive(Debug, Clone, PartialEq, Eq)] #[cfg_attr(feature = "self-rust-tokenize", derive(self_rust_tokenize::SelfRustTokenize))] #[cfg_attr(feature = "serde-serialize", derive(serde::Serialize))] @@ -242,7 +242,7 @@ impl ASTNode for TypeConditionResult { inferred_type.to_string_from_buffer(buf, options, depth); } TypeConditionResult::Reference(reference) => { - reference.to_string_from_buffer(buf, options, depth) + reference.to_string_from_buffer(buf, options, depth); } } } @@ -265,9 +265,7 @@ impl ASTNode for TypeAnnotation { ) { match self { Self::Cursor(..) => { - if !options.expect_cursors { - panic!() - } + assert!(options.expect_cursors,); } Self::CommonName(name, _) => buf.push_str(match name { CommonTypes::String => "string", @@ -277,16 +275,16 @@ impl ASTNode for TypeAnnotation { Self::Decorated(decorator, on_type_annotation, _) => { decorator.to_string_from_buffer(buf, options, depth); buf.push(' '); - on_type_annotation.to_string_from_buffer(buf, options, depth) + on_type_annotation.to_string_from_buffer(buf, options, depth); } Self::Name(name, _) => buf.push_str(name), Self::NameWithGenericArguments(name, arguments, _) => { buf.push_str(name); - to_string_bracketed(arguments, ('<', '>'), buf, options, depth) + to_string_bracketed(arguments, ('<', '>'), buf, options, depth); } Self::FunctionLiteral { type_parameters, parameters, return_type, .. } => { if let Some(type_parameters) = type_parameters { - to_string_bracketed(type_parameters, ('<', '>'), buf, options, depth) + to_string_bracketed(type_parameters, ('<', '>'), buf, options, depth); } parameters.to_string_from_buffer(buf, options, depth); buf.push_str(" => "); @@ -592,8 +590,7 @@ impl TypeAnnotation { // Namespaced name if let Some(Token(TSXToken::Dot, _)) = reader.peek() { reader.next(); - let (name, start) = - if let Self::Name(name, start) = reference { (name, start) } else { panic!() }; + let Self::Name(name, start) = reference else { panic!() }; let (namespace_member, end) = token_as_identifier(reader.next().unwrap(), "namespace name")?; let position = start.union(end); @@ -602,9 +599,7 @@ impl TypeAnnotation { // Generics arguments: if let Some(Token(TSXToken::OpenChevron, _position)) = reader.peek() { // Assert its a Self::Name - let (name, start_span) = if let Self::Name(name, start_span) = reference { - (name, start_span) - } else { + let Self::Name(name, start_span) = reference else { let position = reader.next().unwrap().get_span(); return Err(ParseError::new( crate::ParseErrors::TypeArgumentsNotValidOnReference, @@ -753,7 +748,7 @@ impl TypeAnnotation { } } -/// Parses the arguments (vector of [TypeAnnotation]s) parsed to to a type reference or function call. +/// Parses the arguments (vector of [`TypeAnnotation`]s) parsed to to a type reference or function call. /// Returns arguments and the closing span. /// TODO could use parse bracketed but needs to have the more complex logic inside pub(crate) fn generic_arguments_from_reader_sub_open_angle( @@ -807,7 +802,7 @@ pub(crate) fn generic_arguments_from_reader_sub_open_angle( } } -/// Mirrors [crate::FunctionParameters] +/// Mirrors [`crate::FunctionParameters`] #[derive(Debug, Clone, PartialEq, Eq)] #[cfg_attr(feature = "self-rust-tokenize", derive(self_rust_tokenize::SelfRustTokenize))] #[cfg_attr(feature = "serde-serialize", derive(serde::Serialize))] @@ -837,7 +832,7 @@ impl ASTNode for TypeAnnotationFunctionParameters { options: &crate::ToStringOptions, depth: u8, ) { - for parameter in self.parameters.iter() { + for parameter in &self.parameters { if let Some(ref name) = parameter.name { name.to_string_from_buffer(buf, options, depth); } @@ -873,6 +868,7 @@ impl TypeAnnotationFunctionParameters { while let Some(Token(TSXToken::At, _)) = reader.peek() { decorators.push(Decorator::from_reader(reader, state, options)?); } + if let Some(Token(TSXToken::Spread, _)) = reader.peek() { let token = reader.next().unwrap(); let (name, _) = token_as_identifier( @@ -888,50 +884,50 @@ impl TypeAnnotationFunctionParameters { decorators, })); break; - } else { - let mut depth = 0; - let after_variable_field = reader.scan(|token, _| match token { - TSXToken::OpenBracket | TSXToken::OpenBrace | TSXToken::OpenParentheses => { - depth += 1; - false - } - TSXToken::CloseBracket | TSXToken::CloseBrace | TSXToken::CloseParentheses => { - depth -= 1; - depth == 0 - } - _ => depth == 0, - }); - let name: Option>> = - if let Some(Token(TSXToken::Colon | TSXToken::OptionalMember, _)) = - after_variable_field - { - Some(ASTNode::from_reader(reader, state, options)?) - } else { - None - }; - let is_optional = match reader.next().ok_or_else(parse_lexing_error)? { - Token(TSXToken::Colon, _) => false, - Token(TSXToken::OptionalMember, _) => true, - token => { - return throw_unexpected_token_with_token( - token, - &[TSXToken::Colon, TSXToken::OptionalMember], - ) - } - }; - let type_annotation = TypeAnnotation::from_reader(reader, state, options)?; - parameters.push(TypeAnnotationFunctionParameter { - position: name - .as_ref() - .map_or(type_annotation.get_position(), |name| name.get_position()) - .union(type_annotation.get_position()), - decorators, - name, - type_annotation, - is_optional, - }); } + let mut depth = 0; + let after_variable_field = reader.scan(|token, _| match token { + TSXToken::OpenBracket | TSXToken::OpenBrace | TSXToken::OpenParentheses => { + depth += 1; + false + } + TSXToken::CloseBracket | TSXToken::CloseBrace | TSXToken::CloseParentheses => { + depth -= 1; + depth == 0 + } + _ => depth == 0, + }); + let name: Option>> = + if let Some(Token(TSXToken::Colon | TSXToken::OptionalMember, _)) = + after_variable_field + { + Some(ASTNode::from_reader(reader, state, options)?) + } else { + None + }; + let is_optional = match reader.next().ok_or_else(parse_lexing_error)? { + Token(TSXToken::Colon, _) => false, + Token(TSXToken::OptionalMember, _) => true, + token => { + return throw_unexpected_token_with_token( + token, + &[TSXToken::Colon, TSXToken::OptionalMember], + ) + } + }; + let type_annotation = TypeAnnotation::from_reader(reader, state, options)?; + parameters.push(TypeAnnotationFunctionParameter { + position: name + .as_ref() + .map_or(type_annotation.get_position(), |name| name.get_position()) + .union(type_annotation.get_position()), + decorators, + name, + type_annotation, + is_optional, + }); + if reader.conditional_next(|tok| matches!(tok, TSXToken::Comma)).is_none() { break; } @@ -975,7 +971,7 @@ mod tests { #[test] fn name() { assert_matches_ast!("something", TypeAnnotation::Name(Deref @ "something", span!(0, 9))); - assert_matches_ast!("string", TypeAnnotation::CommonName(CommonTypes::String, span!(0, 6))) + assert_matches_ast!("string", TypeAnnotation::CommonName(CommonTypes::String, span!(0, 6))); } #[test] @@ -1035,7 +1031,7 @@ mod tests { [TypeAnnotation::CommonName(CommonTypes::String, span!(0, 6)), TypeAnnotation::CommonName(CommonTypes::Number, span!(9, 15))], _, ) - ) + ); } #[test] @@ -1047,7 +1043,7 @@ mod tests { [TypeAnnotation::CommonName(CommonTypes::String, span!(0, 6)), TypeAnnotation::CommonName(CommonTypes::Number, span!(9, 15))], _, ) - ) + ); } #[test] diff --git a/parser/src/types/type_declarations.rs b/parser/src/types/type_declarations.rs index c40555d6..51221492 100644 --- a/parser/src/types/type_declarations.rs +++ b/parser/src/types/type_declarations.rs @@ -35,7 +35,7 @@ impl ASTNode for TypeDeclaration { .map(|(params, _)| params) }) .transpose()?; - Ok(Self { name, position, type_parameters }) + Ok(Self { name, type_parameters, position }) } fn to_string_from_buffer( @@ -46,7 +46,7 @@ impl ASTNode for TypeDeclaration { ) { buf.push_str(&self.name); if let Some(ref type_parameters) = self.type_parameters { - to_string_bracketed(type_parameters, ('<', '>'), buf, options, depth) + to_string_bracketed(type_parameters, ('<', '>'), buf, options, depth); } } @@ -70,6 +70,7 @@ pub enum GenericTypeConstraint { } impl GenericTypeConstraint { + #[must_use] pub fn name(&self) -> &str { match self { GenericTypeConstraint::Parameter { name, .. } diff --git a/parser/src/variable_fields.rs b/parser/src/variable_fields.rs index 31899044..9aee3de0 100644 --- a/parser/src/variable_fields.rs +++ b/parser/src/variable_fields.rs @@ -1,6 +1,6 @@ /// Contains: -/// - [VariableId] given to variable declaring items -/// - [VariableField] for destructuring things and its nested derivatives + visiting behavior + tests for self +/// - [`VariableId`] given to variable declaring items +/// - [`VariableField`] for destructuring things and its nested derivatives + visiting behavior + tests for self use std::fmt::Debug; use crate::{ @@ -57,6 +57,7 @@ impl ASTNode for VariableIdentifier { impl VariableIdentifier { /// TODO temp + #[must_use] pub fn as_str(&self) -> &str { match self { VariableIdentifier::Standard(name, _) => name.as_str(), @@ -154,9 +155,9 @@ impl Eq for VariableField {} /// Variable field can be used in type annotations but cannot have a value /// -/// TODO value assignment this is VariableOrFieldAccess thingy +/// TODO value assignment this is `VariableOrFieldAccess` thingy /// -/// TODO could have get_optional_expression_as_option(&Self::OptionalExpression) -> Option +/// TODO could have `get_optional_expression_as_option(&Self::OptionalExpression`) -> Option pub trait VariableFieldKind: PartialEq + Eq + Debug + Clone + 'static { type OptionalExpression: PartialEq + Eq + Debug + Clone + Sync + Send; @@ -206,14 +207,14 @@ impl VariableFieldKind for VariableFieldInSourceCode { ) { if let Some(optional_expression) = optional_expression { buf.push_str(if options.pretty { " = " } else { "=" }); - optional_expression.to_string_from_buffer(buf, options, depth) + optional_expression.to_string_from_buffer(buf, options, depth); } } fn optional_expression_get_position( optional_expression: &Self::OptionalExpression, ) -> Option<&Span> { - optional_expression.as_ref().map(|expr| expr.get_position()) + optional_expression.as_ref().map(ASTNode::get_position) } } @@ -340,41 +341,38 @@ impl ASTNode for ObjectDestructuringField { state: &mut crate::ParsingState, options: &ParseOptions, ) -> ParseResult { - match reader.peek().ok_or_else(parse_lexing_error)? { - Token(TSXToken::Spread, _) => { - let token = reader.next().unwrap(); - let identifier = VariableIdentifier::from_reader(reader, state, options)?; - let position = token.get_span().union(identifier.get_position()); - Ok(Self::Spread(identifier, position)) - } - _ => { - let key = PropertyKey::from_reader(reader, state, options)?; - if matches!(reader.peek(), Some(Token(TSXToken::Colon, _))) { - reader.next(); - let variable_name = - WithComment::>::from_reader(reader, state, options)?; - let default_value = U::optional_expression_from_reader(reader, state, options)?; - let position = - if let Some(pos) = U::optional_expression_get_position(&default_value) { - key.get_position().union(pos) - } else { - key.get_position().clone() - }; - Ok(Self::Map { from: key, name: variable_name, default_value, position }) - } else if let PropertyKey::Ident(name, key_pos, _) = key { - let default_value = U::optional_expression_from_reader(reader, state, options)?; - let standard = VariableIdentifier::Standard(name, key_pos); - let position = - if let Some(pos) = U::optional_expression_get_position(&default_value) { - standard.get_position().union(pos) - } else { - standard.get_position().clone() - }; - Ok(Self::Name(standard, default_value, position)) - } else { - let token = reader.next().ok_or_else(parse_lexing_error)?; - throw_unexpected_token_with_token(token, &[TSXToken::Colon]) - } + if let Token(TSXToken::Spread, _) = reader.peek().ok_or_else(parse_lexing_error)? { + let token = reader.next().unwrap(); + let identifier = VariableIdentifier::from_reader(reader, state, options)?; + let position = token.get_span().union(identifier.get_position()); + Ok(Self::Spread(identifier, position)) + } else { + let key = PropertyKey::from_reader(reader, state, options)?; + if matches!(reader.peek(), Some(Token(TSXToken::Colon, _))) { + reader.next(); + let variable_name = + WithComment::>::from_reader(reader, state, options)?; + let default_value = U::optional_expression_from_reader(reader, state, options)?; + let position = + if let Some(pos) = U::optional_expression_get_position(&default_value) { + key.get_position().union(pos) + } else { + key.get_position().clone() + }; + Ok(Self::Map { from: key, name: variable_name, default_value, position }) + } else if let PropertyKey::Ident(name, key_pos, ()) = key { + let default_value = U::optional_expression_from_reader(reader, state, options)?; + let standard = VariableIdentifier::Standard(name, key_pos); + let position = + if let Some(pos) = U::optional_expression_get_position(&default_value) { + standard.get_position().union(pos) + } else { + standard.get_position().clone() + }; + Ok(Self::Name(standard, default_value, position)) + } else { + let token = reader.next().ok_or_else(parse_lexing_error)?; + throw_unexpected_token_with_token(token, &[TSXToken::Colon]) } } } @@ -392,13 +390,13 @@ impl ASTNode for ObjectDestructuringField { } Self::Name(name, default_value, _) => { buf.push_str(name.as_str()); - U::optional_expression_to_string_from_buffer(default_value, buf, options, depth) + U::optional_expression_to_string_from_buffer(default_value, buf, options, depth); } Self::Map { from, name: variable_name, default_value, .. } => { from.to_string_from_buffer(buf, options, depth); buf.push(':'); variable_name.to_string_from_buffer(buf, options, depth); - U::optional_expression_to_string_from_buffer(default_value, buf, options, depth) + U::optional_expression_to_string_from_buffer(default_value, buf, options, depth); } } } @@ -408,7 +406,7 @@ impl ASTNode for ObjectDestructuringField { } } -/// TODO not sure about the positions here, is potential duplication if T::OptionalExpression is none +/// TODO not sure about the positions here, is potential duplication if `T::OptionalExpression` is none #[derive(Debug, Clone)] #[cfg_attr(feature = "self-rust-tokenize", derive(self_rust_tokenize::SelfRustTokenize))] #[cfg_attr(feature = "serde-serialize", derive(serde::Serialize))] @@ -466,7 +464,7 @@ impl ASTNode for ArrayDestructuringField { } Self::Name(name, default_value) => { name.to_string_from_buffer(buf, options, depth); - U::optional_expression_to_string_from_buffer(default_value, buf, options, depth) + U::optional_expression_to_string_from_buffer(default_value, buf, options, depth); } Self::None => {} } @@ -495,7 +493,7 @@ impl Visitable for VariableField { } } VariableField::Array(array_destructuring_fields, _) => { - for field in array_destructuring_fields.iter() { + for field in array_destructuring_fields { visitors.visit_variable( &ImmutableVariableOrPropertyPart::ArrayDestructuringMember(field), data, @@ -512,7 +510,7 @@ impl Visitable for VariableField { } } VariableField::Object(object_destructuring_fields, _) => { - for field in object_destructuring_fields.iter() { + for field in object_destructuring_fields { visitors.visit_variable( &ImmutableVariableOrPropertyPart::ObjectDestructuringMember(field), data, diff --git a/parser/src/visiting.rs b/parser/src/visiting.rs index 77be98de..1305fcdf 100644 --- a/parser/src/visiting.rs +++ b/parser/src/visiting.rs @@ -17,7 +17,10 @@ mod ast { use crate::block::{BlockLike, BlockLikeMut}; - use super::*; + use super::{ + Chain, Expression, ImmutableVariableOrPropertyPart, JSXElement, MutableVariablePart, + StatementOrDeclarationMut, StatementOrDeclarationRef, VisitorMutReceiver, VisitorReceiver, + }; /// Options for behavior when visiting AST. /// Customizable behavior is important for analysis @@ -87,7 +90,7 @@ mod ast { s: &VisitSettings, c: &mut Annex, ) { - Visitable::visit(&**self, v, d, s, c) + Visitable::visit(&**self, v, d, s, c); } fn visit_mut( @@ -97,7 +100,7 @@ mod ast { s: &VisitSettings, c: &mut Annex, ) { - Visitable::visit_mut(&mut **self, v, d, s, c) + Visitable::visit_mut(&mut **self, v, d, s, c); } } @@ -252,7 +255,10 @@ mod structures { Declaration, Statement, VariableFieldInSourceCode, }; - use super::*; + use super::{ + ArrayDestructuringField, ObjectDestructuringField, PropertyKey, SourceId, + StatementOrDeclaration, WithComment, + }; use source_map::Span; use temporary_annex::{Annex, Annexable}; @@ -268,33 +274,39 @@ mod structures { pub struct Chain(Vec); impl Chain { + #[must_use] pub fn new() -> Self { Self(Vec::with_capacity(10)) } + #[must_use] pub fn new_with_initial(initial: ChainVariable) -> Self { let mut buf = Vec::with_capacity(10); buf.push(initial); Self(buf) } + #[must_use] pub fn len(&self) -> usize { self.0.len() } + #[must_use] pub fn is_empty(&self) -> bool { self.0.is_empty() } pub fn truncate(&mut self, to_size: usize) { - self.0.truncate(to_size) + self.0.truncate(to_size); } /// TODO remove + #[must_use] pub fn get_chain(&self) -> &[ChainVariable] { &self.0 } + #[must_use] pub fn get_module(&self) -> SourceId { if let ChainVariable::Module(source) = self.0.first().unwrap() { source.to_owned() @@ -354,6 +366,7 @@ mod structures { } impl<'a> ImmutableVariableOrPropertyPart<'a> { + #[must_use] pub fn get_name(&self) -> Option<&'a str> { match self { ImmutableVariableOrPropertyPart::FunctionName(name, _) @@ -363,7 +376,7 @@ mod structures { ImmutableVariableOrPropertyPart::ClassName(name, _) => *name, ImmutableVariableOrPropertyPart::ObjectPropertyKey(property) => { match property.get_ast_ref() { - PropertyKey::Ident(ident, _, _) + PropertyKey::Ident(ident, _, ()) | PropertyKey::StringLiteral(ident, _, _) => Some(ident.as_str()), PropertyKey::NumberLiteral(_, _) | PropertyKey::Computed(_, _) => None, } @@ -378,6 +391,7 @@ mod structures { } } + #[must_use] pub fn get_position(&self) -> &Span { use crate::ASTNode; match self { @@ -462,11 +476,14 @@ mod structures { } mod visitors { - use super::*; + use super::{ + Chain, Expression, ImmutableVariableOrPropertyPart, JSXElement, SelfVisitable, + StatementOrDeclarationRef, + }; use crate::{block::BlockLike, TSXKeyword}; use source_map::Span; - /// A visitor over something which is hooked/is SelfVisitable with some Data + /// A visitor over something which is hooked/is `SelfVisitable` with some Data pub trait Visitor { fn visit(&mut self, item: &Item, data: &mut Data, chain: &Chain); } @@ -525,11 +542,11 @@ mod visitors { data: &mut T, chain: &Chain, ) { - self.variable_visitors.iter_mut().for_each(|vis| vis.visit(variable, data, chain)) + self.variable_visitors.iter_mut().for_each(|vis| vis.visit(variable, data, chain)); } fn visit_block(&mut self, block: &BlockLike, data: &mut T, chain: &Chain) { - self.block_visitors.iter_mut().for_each(|vis| vis.visit(block, data, chain)) + self.block_visitors.iter_mut().for_each(|vis| vis.visit(block, data, chain)); } fn visit_keyword(&mut self, _keyword: &(TSXKeyword, &Span), _data: &mut T, _chain: &Chain) { @@ -585,9 +602,12 @@ mod visitors { mod visitors_mut { use crate::block::BlockLikeMut; - use super::*; + use super::{ + Chain, Expression, JSXElement, MutableVariablePart, SelfVisitableMut, + StatementOrDeclarationMut, + }; - /// A visitor over something which is hooked/is SelfVisitable with some Data + /// A visitor over something which is hooked/is `SelfVisitable` with some Data pub trait VisitorMut { fn visit_mut(&mut self, item: &mut Item, data: &mut Data, chain: &Chain); } diff --git a/parser/tests/expressions.rs b/parser/tests/expressions.rs index ec5d7d8a..c0c6f52d 100644 --- a/parser/tests/expressions.rs +++ b/parser/tests/expressions.rs @@ -2,7 +2,7 @@ use ezno_parser::{ASTNode, Module, SourceId}; #[test] fn arrow_functions() { - let input = r#" + let input = r" () => expression; param => expression; (param) => expression; @@ -16,7 +16,7 @@ param => { (param1, paramN) => { statements } - "# + " .trim_start(); let _module = @@ -28,7 +28,7 @@ param => { #[test] fn function_calls() { - let input = r#" + let input = r" x(4, 5); y.t(2, 3); y.t<4, 2>(3); @@ -36,13 +36,13 @@ y.t<4, Array<5>>(3); a(y<2>(4)); a.a?.(y<2>(4)); a.a(...expr, y) - "# + " .trim(); let module = Module::from_string(input.to_owned(), Default::default(), SourceId::NULL, None).unwrap(); - eprintln!("Module: {:#?}", module); + eprintln!("Module: {module:#?}"); let output = module.to_string(&ezno_parser::ToStringOptions::typescript()); assert_eq!(output, input); @@ -50,19 +50,19 @@ a.a(...expr, y) #[test] fn objects() { - let input = r#" + let input = r" ({ a: 5 }); ({ ...b, a: 5, ...c, d: 4 }); ({ async e() { return 2 } }) - "# + " .trim(); let module = Module::from_string(input.to_owned(), Default::default(), SourceId::NULL, None).unwrap(); - eprintln!("Module: {:#?}", module); + eprintln!("Module: {module:#?}"); let output = module.to_string(&ezno_parser::ToStringOptions::typescript()); assert_eq!(output, input); diff --git a/parser/tests/statements.rs b/parser/tests/statements.rs index 44bd5d1a..b5e3cdfb 100644 --- a/parser/tests/statements.rs +++ b/parser/tests/statements.rs @@ -35,7 +35,7 @@ interface X { .replace(" ", "\t"); let module = - Module::from_string(input.to_owned(), Default::default(), SourceId::NULL, None).unwrap(); + Module::from_string(input.clone(), Default::default(), SourceId::NULL, None).unwrap(); let output = module.to_string(&ToStringOptions::typescript()); assert_eq!(output, input); } @@ -99,7 +99,7 @@ try { .replace(" ", "\t"); let module = - Module::from_string(input.to_owned(), Default::default(), SourceId::NULL, None).unwrap(); + Module::from_string(input.clone(), Default::default(), SourceId::NULL, None).unwrap(); let output = module.to_string(&Default::default()); assert_eq!(output, input); } @@ -218,7 +218,7 @@ from "module-name" import defaultExport, * as name; let module = Module::from_string(input.to_owned(), Default::default(), SourceId::NULL, None).unwrap(); - eprintln!("Module: {:#?}", module); + eprintln!("Module: {module:#?}"); // let output = module.to_string(&ezno_parser::ToStringOptions::typescript()); // assert_eq!(output, input); @@ -227,20 +227,20 @@ from "module-name" import defaultExport, * as name; #[cfg(feature = "extras")] #[test] fn function_custom_headers() { - let input = r#" + let input = r" function a() {} generator function a() {} generator server function a() {} generator server function a() {} async server function a() {} module function a() {} - "# + " .trim(); let module = Module::from_string(input.to_owned(), Default::default(), SourceId::NULL, None).unwrap(); - eprintln!("Module: {:#?}", module); + eprintln!("Module: {module:#?}"); // let output = module.to_string(&ezno_parser::ToStringOptions::typescript()); // assert_eq!(output, input); diff --git a/parser/tests/visiting.rs b/parser/tests/visiting.rs index 3bfd685e..a09cae8d 100644 --- a/parser/tests/visiting.rs +++ b/parser/tests/visiting.rs @@ -62,7 +62,7 @@ impl VisitorMut, ()> for AddElseClause { .into(); if_statement.trailing_else = - Some(UnconditionalElseStatement { inner, position: Span::NULL_SPAN }) + Some(UnconditionalElseStatement { inner, position: Span::NULL_SPAN }); } } } diff --git a/src/ast_explorer.rs b/src/ast_explorer.rs index 0e0792a3..65dde5bc 100644 --- a/src/ast_explorer.rs +++ b/src/ast_explorer.rs @@ -24,15 +24,16 @@ pub(crate) struct ExplorerArguments { } impl ExplorerArguments { + #[allow(clippy::needless_continue)] pub(crate) fn run( &mut self, - fs_resolver: T, + fs_resolver: &T, cli_input_resolver: U, ) { if let Some(ref file) = self.file { let content = fs_resolver.get_content_at_path(file); if let Some(content) = content { - self.nested.run(content, Some(file.to_owned())) + self.nested.run(content, Some(file.to_owned())); } else { eprintln!("Could not find file at {}", file.display()); } @@ -40,6 +41,7 @@ impl ExplorerArguments { print_to_cli(format_args!("ezno ast-explorer\nUse #exit to leave. Also #switch-mode *mode name* and #load-file *path*")); loop { let input = cli_input_resolver(self.nested.to_str()).unwrap_or_default(); + if input.is_empty() { continue; } else if input.trim() == "#exit" { @@ -48,10 +50,7 @@ impl ExplorerArguments { self.nested = match ExplorerSubCommand::from_str(new_mode.trim()) { Ok(mode) => mode, Err(expected) => { - print_to_cli(format_args!( - "Unexpected mode, options are {:?}", - expected - )); + print_to_cli(format_args!("Unexpected mode, options are {expected:?}")); continue; } }; @@ -59,7 +58,7 @@ impl ExplorerArguments { let input = match fs::read_to_string(path.trim()) { Ok(string) => string, Err(err) => { - print_to_cli(format_args!("{:?}", err)); + print_to_cli(format_args!("{err:?}")); continue; } }; @@ -138,7 +137,7 @@ impl ExplorerSubCommand { serde_json::to_string_pretty(&res).unwrap() )); } else { - print_to_cli(format_args!("{:#?}", res)); + print_to_cli(format_args!("{res:#?}")); } } // TODO temp @@ -163,7 +162,7 @@ impl ExplorerSubCommand { serde_json::to_string_pretty(&res).unwrap() )); } else { - print_to_cli(format_args!("{:#?}", res)); + print_to_cli(format_args!("{res:#?}")); } } // TODO temp @@ -190,9 +189,7 @@ impl ExplorerSubCommand { ExplorerSubCommand::Lexer(_) => { let mut color = console::Color::Red; for (section, with) in parser::script_to_tokens(input) { - if !with { - print_to_cli_without_newline(format_args!("{}", section)); - } else { + if with { let value = style(section).bg(color); // Cycle through colors color = match color { @@ -204,7 +201,9 @@ impl ExplorerSubCommand { console::Color::Cyan => console::Color::Red, _ => unreachable!(), }; - print_to_cli_without_newline(format_args!("{}", value)); + print_to_cli_without_newline(format_args!("{value}")); + } else { + print_to_cli_without_newline(format_args!("{section}")); } } print_to_cli(format_args!("")); diff --git a/src/cli.rs b/src/cli.rs index 08e9097b..6ddac9da 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -56,6 +56,8 @@ struct Info {} /// Build project #[derive(FromArgs, PartialEq, Debug)] #[argh(subcommand, name = "build")] +// TODO: Can be refactored with bit to reduce memory +#[allow(clippy::struct_excessive_bools)] pub(crate) struct BuildArguments { /// path to input file #[argh(positional)] @@ -142,7 +144,7 @@ fn file_system_resolver(path: &Path) -> Option { pub fn run_cli( cli_arguments: &[&str], - read_file: T, + read_file: &T, write_file: U, cli_input_resolver: V, ) { @@ -161,11 +163,11 @@ pub fn run_cli { let output_path = build_config.output.unwrap_or("ezno_output.js".into()); let output = crate::commands::build( - &read_file, + read_file, &build_config.input, build_config.definition_file.as_deref(), &output_path, - crate::commands::BuildConfig { strip_whitespace: build_config.minify }, + &crate::commands::BuildConfig { strip_whitespace: build_config.minify }, None, ); match output { @@ -173,12 +175,12 @@ pub fn run_cli { - for diagnostic in diagnostics.into_iter() { + for diagnostic in diagnostics { emit_ezno_diagnostic(diagnostic, &fs).unwrap(); } } @@ -187,14 +189,14 @@ pub fn run_cli repl.run(read_file, cli_input_resolver), CompilerSubCommand::Check(check_arguments) => { let CheckArguments { input, watch: _, definition_file } = check_arguments; - let (diagnostics, _others) = - crate::commands::check(&read_file, &input, definition_file.as_deref()); + let (diagnostics, others) = + crate::commands::check(read_file, &input, definition_file.as_deref()); - let fs = match _others { + let fs = match others { Ok(data) => data.module_contents, Err(data) => data, }; - for diagnostic in diagnostics.into_iter() { + for diagnostic in diagnostics { emit_ezno_diagnostic(diagnostic, &fs).unwrap(); } } diff --git a/src/commands.rs b/src/commands.rs index 44147392..d07e314a 100644 --- a/src/commands.rs +++ b/src/commands.rs @@ -4,10 +4,7 @@ use parser::{ ASTNode, ParseOptions, ToStringOptions, }; use serde::Deserialize; -use std::{ - collections::HashSet, - path::{Path, PathBuf}, -}; +use std::path::{Path, PathBuf}; pub type EznoCheckerData = PostCheckData; @@ -17,9 +14,9 @@ pub fn check( type_definition_module: Option<&Path>, ) -> (checker::DiagnosticsContainer, Result>) { let definitions = if let Some(tdm) = type_definition_module { - HashSet::from_iter(std::iter::once(tdm.into())) + std::iter::once(tdm.into()).collect() } else { - HashSet::from_iter(std::iter::once(checker::INTERNAL_DEFINITION_FILE_PATH.into())) + std::iter::once(checker::INTERNAL_DEFINITION_FILE_PATH.into()).collect() }; let read_from_fs = |path: &Path| { @@ -81,7 +78,7 @@ pub fn build( input_path: &Path, type_definition_module: Option<&Path>, output_path: &Path, - config: BuildConfig, + config: &BuildConfig, transformers: Option, ) -> Result { // TODO parse settings + non_standard_library & non_standard_syntax diff --git a/src/error_handling.rs b/src/error_handling.rs index d13729fa..13a7e8df 100644 --- a/src/error_handling.rs +++ b/src/error_handling.rs @@ -13,14 +13,14 @@ pub(crate) fn emit_ezno_diagnostic( ) -> Result<(), codespan_reporting::files::Error> { let diagnostic = match diagnostic { checker::Diagnostic::Global { reason, kind } => Diagnostic { - severity: ezno_diagnostic_to_severity(kind), + severity: ezno_diagnostic_to_severity(&kind), code: None, message: reason, labels: Vec::new(), notes: Vec::default(), }, checker::Diagnostic::Position { reason, position, kind } => Diagnostic { - severity: ezno_diagnostic_to_severity(kind), + severity: ezno_diagnostic_to_severity(&kind), code: None, message: Default::default(), labels: vec![Label::primary(position.source, position).with_message(reason)], @@ -31,7 +31,7 @@ pub(crate) fn emit_ezno_diagnostic( labels.into_iter().partition::, _>(|(_, value)| value.is_some()); Diagnostic { - severity: ezno_diagnostic_to_severity(kind), + severity: ezno_diagnostic_to_severity(&kind), code: None, message: Default::default(), labels: iter::once(Label::primary(position.source, position).with_message(reason)) @@ -48,7 +48,7 @@ pub(crate) fn emit_ezno_diagnostic( emit_diagnostic(&diagnostic, &fs.into_code_span_store()) } -fn ezno_diagnostic_to_severity(kind: checker::DiagnosticKind) -> Severity { +fn ezno_diagnostic_to_severity(kind: &checker::DiagnosticKind) -> Severity { match kind { checker::DiagnosticKind::Error => Severity::Error, checker::DiagnosticKind::Warning => Severity::Warning, diff --git a/src/main.rs b/src/main.rs index 72e06b82..598da795 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,3 +1,19 @@ +#![deny(clippy::all)] +#![deny(clippy::pedantic)] +#![allow( + clippy::new_without_default, + // TODO: Remove when fixed + clippy::result_unit_err, + clippy::default_trait_access, + clippy::missing_errors_doc, + clippy::missing_panics_doc, + clippy::implicit_hasher, + clippy::too_many_lines, + // More explicit sometimes to have the module name + clippy::module_name_repetitions +)] +#![warn(clippy::cast_precision_loss, clippy::cast_possible_truncation, clippy::cast_sign_loss)] + use ezno_lib::cli::run_cli; use std::io; @@ -12,8 +28,9 @@ pub(crate) fn cli_input_resolver(prompt: &str) -> Option { } #[cfg(target_family = "unix")] +#[allow(clippy::unnecessary_wraps)] pub(crate) fn cli_input_resolver(prompt: &str) -> Option { - print!("{}> ", prompt); + print!("{prompt}> "); io::Write::flush(&mut io::stdout()).unwrap(); let mut input = String::new(); let std_in = &mut io::stdin(); @@ -22,9 +39,6 @@ pub(crate) fn cli_input_resolver(prompt: &str) -> Option { } fn main() { - let arguments = std::env::args().skip(1).collect::>(); - let arguments = arguments.iter().map(String::as_str).collect::>(); - fn read_from_file(path: &std::path::Path) -> Option { std::fs::read_to_string(path).ok() } @@ -33,5 +47,8 @@ fn main() { std::fs::write(path, content).unwrap(); } - run_cli(&arguments, read_from_file, write_to_file, cli_input_resolver) + let arguments = std::env::args().skip(1).collect::>(); + let arguments = arguments.iter().map(String::as_str).collect::>(); + + run_cli(&arguments, &read_from_file, write_to_file, cli_input_resolver); } diff --git a/src/repl.rs b/src/repl.rs index e887644b..a1d32d91 100644 --- a/src/repl.rs +++ b/src/repl.rs @@ -1,4 +1,3 @@ -use std::collections::HashSet; use std::fs; use std::path::{Path, PathBuf}; @@ -45,21 +44,21 @@ pub(crate) fn run_deno_repl( cli_input_resolver: U, ReplArguments { type_output, const_as_let, type_definition_module }: ReplArguments, ) { - let mut items = if !type_output { + let mut items = if type_output { + None + } else { let mut deno = Command::new("deno"); let command = deno.arg("repl").arg("-q").stdout(Stdio::piped()).stdin(Stdio::piped()); let mut process = command.spawn().unwrap(); let stdin = process.stdin.take().unwrap(); let child_buf = BufReader::new(process.stdout.take().unwrap()); Some((process, stdin, child_buf)) - } else { - None }; let definitions = if let Some(tdm) = type_definition_module { - HashSet::from_iter(std::iter::once(tdm)) + std::iter::once(tdm).collect() } else { - HashSet::from_iter(std::iter::once(checker::INTERNAL_DEFINITION_FILE_PATH.into())) + std::iter::once(checker::INTERNAL_DEFINITION_FILE_PATH.into()).collect() }; let state = checker::synthesis::interactive::State::new(&file_system_resolver, definitions); @@ -67,7 +66,7 @@ pub(crate) fn run_deno_repl( let mut state = match state { Ok(state) => state, Err((diagnostics, fs)) => { - for diagnostic in diagnostics.into_iter() { + for diagnostic in diagnostics { emit_ezno_diagnostic(diagnostic, &fs).unwrap(); } return; @@ -87,9 +86,9 @@ pub(crate) fn run_deno_repl( stdin.flush().unwrap(); } break; - } else { - input } + + input } else { continue; }; @@ -131,7 +130,7 @@ pub(crate) fn run_deno_repl( match result { Ok((last_ty, diagnostics)) => { - for diagnostic in diagnostics.into_iter() { + for diagnostic in diagnostics { emit_ezno_diagnostic(diagnostic, state.get_fs_ref()).unwrap(); } if let Some((_process, stdin, child_buf)) = items.as_mut() { @@ -143,18 +142,15 @@ pub(crate) fn run_deno_repl( loop { let mut buf = String::new(); - match child_buf.read_line(&mut buf) { - Ok(_output) => { - if buf.contains("REPL_END") { - break; - } - // deno already emits new line so just print here - print!("{}", buf); - } - Err(_) => { - println!("Error"); + if let Ok(_output) = child_buf.read_line(&mut buf) { + if buf.contains("REPL_END") { break; } + // deno already emits new line so just print here + print!("{buf}"); + } else { + println!("Error"); + break; } } } else if let Some(last_ty) = last_ty { @@ -162,7 +158,7 @@ pub(crate) fn run_deno_repl( } } Err(diagnostics) => { - for diagnostic in diagnostics.into_iter() { + for diagnostic in diagnostics { emit_ezno_diagnostic(diagnostic, state.get_fs_ref()).unwrap(); } } diff --git a/src/wasm_bindings.rs b/src/wasm_bindings.rs index 08b6b42f..7b4d5dfd 100644 --- a/src/wasm_bindings.rs +++ b/src/wasm_bindings.rs @@ -21,7 +21,7 @@ pub fn build_wasm(entry_path: String, fs_resolver_js: &js_sys::Function, minify: Path::new(&entry_path), None, Path::new("out.js"), - crate::commands::BuildConfig { strip_whitespace: minify }, + &crate::commands::BuildConfig { strip_whitespace: minify }, None, ); serde_wasm_bindgen::to_value(&result).unwrap() @@ -76,7 +76,7 @@ pub fn run_cli_wasm( .and_then(JsValue::as_string) }; - crate::run_cli(&arguments, read_file, write_file, cli_input_resolver) + crate::run_cli(&arguments, &read_file, write_file, cli_input_resolver) } #[wasm_bindgen(js_name = parse_expression)]