Clean up some clippy lints
This commit is contained in:
parent
88912bfa39
commit
223554d49d
@ -110,7 +110,6 @@ impl<'lexer> Lexer<'lexer> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Read the next token from the input.
|
/// Read the next token from the input.
|
||||||
#[must_use]
|
|
||||||
pub(crate) fn read(&mut self) -> Result<Option<Token>, LexerError> {
|
pub(crate) fn read(&mut self) -> Result<Option<Token>, LexerError> {
|
||||||
// Eat whitespace until we encounter a meaningful character, or simply return if
|
// Eat whitespace until we encounter a meaningful character, or simply return if
|
||||||
// we have reached the end of input and no additional characters can be read:
|
// we have reached the end of input and no additional characters can be read:
|
||||||
@ -226,7 +225,6 @@ impl<'lexer> Lexer<'lexer> {
|
|||||||
TokenKind::BlockComment(comment.trim().into())
|
TokenKind::BlockComment(comment.trim().into())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[must_use]
|
|
||||||
fn float_literal(&self, word: String, span: Span) -> Result<TokenKind, LexerError> {
|
fn float_literal(&self, word: String, span: Span) -> Result<TokenKind, LexerError> {
|
||||||
let float = word.parse().map_err(|_| {
|
let float = word.parse().map_err(|_| {
|
||||||
LexerError::new(LexerErrorKind::InvalidNumber(word), span.join(&self.span()))
|
LexerError::new(LexerErrorKind::InvalidNumber(word), span.join(&self.span()))
|
||||||
@ -235,7 +233,6 @@ impl<'lexer> Lexer<'lexer> {
|
|||||||
Ok(TokenKind::Float(float))
|
Ok(TokenKind::Float(float))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[must_use]
|
|
||||||
fn integer_literal(
|
fn integer_literal(
|
||||||
&self,
|
&self,
|
||||||
word: String,
|
word: String,
|
||||||
@ -256,7 +253,6 @@ impl<'lexer> Lexer<'lexer> {
|
|||||||
Ok(TokenKind::Integer(integer))
|
Ok(TokenKind::Integer(integer))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[must_use]
|
|
||||||
fn numeric_literal(&mut self, span: Span) -> Result<TokenKind, LexerError> {
|
fn numeric_literal(&mut self, span: Span) -> Result<TokenKind, LexerError> {
|
||||||
let word = self.read_word();
|
let word = self.read_word();
|
||||||
|
|
||||||
@ -269,7 +265,6 @@ impl<'lexer> Lexer<'lexer> {
|
|||||||
Ok(kind)
|
Ok(kind)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[must_use]
|
|
||||||
fn char_literal(&mut self, span: Span) -> Result<TokenKind, LexerError> {
|
fn char_literal(&mut self, span: Span) -> Result<TokenKind, LexerError> {
|
||||||
self.advance(); // '\''
|
self.advance(); // '\''
|
||||||
|
|
||||||
@ -314,7 +309,6 @@ impl<'lexer> Lexer<'lexer> {
|
|||||||
Ok(TokenKind::Char(c))
|
Ok(TokenKind::Char(c))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[must_use]
|
|
||||||
fn string_literal(&mut self, span: Span) -> Result<TokenKind, LexerError> {
|
fn string_literal(&mut self, span: Span) -> Result<TokenKind, LexerError> {
|
||||||
self.advance(); // '"'
|
self.advance(); // '"'
|
||||||
|
|
||||||
|
@ -28,7 +28,7 @@ impl From<Vec<Node>> for Ast {
|
|||||||
impl std::fmt::Display for Ast {
|
impl std::fmt::Display for Ast {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
for node in &self.root {
|
for node in &self.root {
|
||||||
write!(f, "{node}\n")?;
|
writeln!(f, "{node}")?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
@ -63,14 +63,7 @@ impl Node {
|
|||||||
Expr::List(vec) | Expr::Map(vec) | Expr::Set(vec) | Expr::Vector(vec) => {
|
Expr::List(vec) | Expr::Map(vec) | Expr::Set(vec) | Expr::Vector(vec) => {
|
||||||
vec.push(child);
|
vec.push(child);
|
||||||
}
|
}
|
||||||
_ => {
|
_ => unimplemented!(),
|
||||||
// FIXME
|
|
||||||
// return Err(ParserError::new(
|
|
||||||
// ParserErrorKind::UnexpectedState,
|
|
||||||
// child.span,
|
|
||||||
// ))
|
|
||||||
todo!()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
@ -203,14 +196,7 @@ impl TryFrom<Token> for Expr {
|
|||||||
TokenKind::String(s) => Atom::String(s),
|
TokenKind::String(s) => Atom::String(s),
|
||||||
TokenKind::Symbol(s) => Atom::Symbol(s),
|
TokenKind::Symbol(s) => Atom::Symbol(s),
|
||||||
TokenKind::Nil => Atom::Nil,
|
TokenKind::Nil => Atom::Nil,
|
||||||
_ => {
|
_ => unimplemented!(),
|
||||||
// FIXME
|
|
||||||
// return Err(ParserError::new(
|
|
||||||
// ParserErrorKind::UnexpectedState,
|
|
||||||
// token.span,
|
|
||||||
// ))
|
|
||||||
todo!()
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(kind.into())
|
Ok(kind.into())
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
|
pub(crate) use self::{ast::Ast, error::ParserError};
|
||||||
use self::{
|
use self::{
|
||||||
ast::{Ast, Expr, Node},
|
ast::{Expr, Node},
|
||||||
error::{ParserError, ParserErrorKind},
|
error::ParserErrorKind,
|
||||||
};
|
};
|
||||||
use crate::{
|
use crate::{
|
||||||
lexer::{Lexer, TokenKind},
|
lexer::{Lexer, TokenKind},
|
||||||
@ -36,7 +37,6 @@ impl<'parser> Parser<'parser> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Produce an Abstract Syntax Tree (AST) from the source input.
|
/// Produce an Abstract Syntax Tree (AST) from the source input.
|
||||||
#[must_use]
|
|
||||||
pub(crate) fn parse(mut self) -> Result<Ast, ParserError> {
|
pub(crate) fn parse(mut self) -> Result<Ast, ParserError> {
|
||||||
// This parser is actually quite simple!Recursively parse expressions until we
|
// This parser is actually quite simple!Recursively parse expressions until we
|
||||||
// run out of tokens, or an error occurs:
|
// run out of tokens, or an error occurs:
|
||||||
@ -65,7 +65,6 @@ impl<'parser> Parser<'parser> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[must_use]
|
|
||||||
fn expr(&mut self) -> Result<Option<Node>, ParserError> {
|
fn expr(&mut self) -> Result<Option<Node>, ParserError> {
|
||||||
if let Some(token) = self.lexer.read()? {
|
if let Some(token) = self.lexer.read()? {
|
||||||
match token.kind {
|
match token.kind {
|
||||||
@ -103,7 +102,6 @@ impl<'parser> Parser<'parser> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[must_use]
|
|
||||||
fn begin_sequence(
|
fn begin_sequence(
|
||||||
&mut self,
|
&mut self,
|
||||||
init: impl FnOnce(Vec<Node>) -> Expr,
|
init: impl FnOnce(Vec<Node>) -> Expr,
|
||||||
@ -117,7 +115,6 @@ impl<'parser> Parser<'parser> {
|
|||||||
Ok(None)
|
Ok(None)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[must_use]
|
|
||||||
fn end_sequence(&mut self, kind: TokenKind, span: Span) -> Result<Option<Node>, ParserError> {
|
fn end_sequence(&mut self, kind: TokenKind, span: Span) -> Result<Option<Node>, ParserError> {
|
||||||
// We will ultimately return the current expression, so clone it and update its
|
// We will ultimately return the current expression, so clone it and update its
|
||||||
// span first:
|
// span first:
|
||||||
|
@ -2,7 +2,7 @@ use std::{cmp::Ordering, iter, ops::Range, sync::Arc};
|
|||||||
|
|
||||||
/// A location within some source text.
|
/// A location within some source text.
|
||||||
#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, Hash)]
|
#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, Hash)]
|
||||||
pub(crate) struct Location {
|
pub struct Location {
|
||||||
line: usize,
|
line: usize,
|
||||||
column: usize,
|
column: usize,
|
||||||
}
|
}
|
||||||
@ -26,7 +26,7 @@ impl PartialOrd for Location {
|
|||||||
|
|
||||||
/// Some (optionally named) source text.
|
/// Some (optionally named) source text.
|
||||||
#[derive(Debug, Default, Clone, PartialEq, Eq, Hash)]
|
#[derive(Debug, Default, Clone, PartialEq, Eq, Hash)]
|
||||||
pub(crate) struct Source {
|
pub struct Source {
|
||||||
name: Option<String>,
|
name: Option<String>,
|
||||||
contents: String,
|
contents: String,
|
||||||
lines: Vec<usize>,
|
lines: Vec<usize>,
|
||||||
@ -87,8 +87,8 @@ impl Source {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// A contiguous sequence of bytes within some source.
|
/// A contiguous sequence of bytes within some source.
|
||||||
#[derive(Debug, Default, Clone, Eq, Hash)]
|
#[derive(Debug, Default, Clone, Eq)]
|
||||||
pub(crate) struct Span {
|
pub struct Span {
|
||||||
bytes: Range<usize>,
|
bytes: Range<usize>,
|
||||||
source: Arc<Source>,
|
source: Arc<Source>,
|
||||||
}
|
}
|
||||||
@ -144,6 +144,16 @@ impl PartialEq for Span {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl std::hash::Hash for Span {
|
||||||
|
fn hash<H>(&self, state: &mut H)
|
||||||
|
where
|
||||||
|
H: std::hash::Hasher,
|
||||||
|
{
|
||||||
|
self.bytes.hash(state);
|
||||||
|
self.source.hash(state);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
Loading…
Reference in New Issue
Block a user