Include the slice in the token as well

This commit is contained in:
Jesse Braham 2025-02-05 21:43:41 +01:00
parent ece6645e50
commit e3a9dcd4fb
2 changed files with 26 additions and 26 deletions

View File

@ -42,23 +42,9 @@ impl<'a> Lexer<'a> {
}
}
/// Return a byte slice containing the contents of the current [Token].
#[inline]
#[must_use]
pub fn slice(&self) -> &'a [u8] {
&self.bytes[self.token_start..self.cursor]
}
/// Return the span of the current [Token].
#[inline]
#[must_use]
pub fn span(&self) -> Span {
Span::new(self.token_start, self.cursor)
}
/// Return the next [Token] in the input stream.
#[inline]
pub fn next_token(&mut self) -> Result<Option<Token>, LexerError> {
pub fn next_token(&mut self) -> Result<Option<Token<'a>>, LexerError> {
self.token_start = self.cursor;
let Some(c) = self.advance() else {
@ -100,7 +86,19 @@ impl<'a> Lexer<'a> {
}
};
Ok(Some(Token::new(kind, self.span())))
Ok(Some(Token::new(kind, self.span(), self.slice())))
}
#[inline]
#[must_use]
fn slice(&self) -> &'a [u8] {
&self.bytes[self.token_start..self.cursor]
}
#[inline]
#[must_use]
fn span(&self) -> Span {
Span::new(self.token_start, self.cursor)
}
#[inline]
@ -142,7 +140,7 @@ impl<'a> Lexer<'a> {
fn read_char(&mut self) -> Result<TokenKind, LexerError> {
// NOTE: We have already consumed the initial '\' when this function is invoked
if self.peek().is_none() || self.peek().is_some_and(|c| is_separator(c)) {
if self.peek().is_none() || self.peek().is_some_and(is_separator) {
return Err(LexerError::new(LexerErrorKind::InvalidChar, self.span()));
}
@ -353,8 +351,8 @@ impl<'a> Lexer<'a> {
}
}
impl Iterator for Lexer<'_> {
type Item = Result<Token, LexerError>;
impl<'a> Iterator for Lexer<'a> {
type Item = Result<Token<'a>, LexerError>;
fn next(&mut self) -> Option<Self::Item> {
self.next_token().transpose()
@ -584,7 +582,7 @@ mod tests {
#[test]
fn $name(x in $input) {
let mut lexer = Lexer::new(&x);
assert_eq!(lexer.next(), Some(Ok(Token::new(TokenKind::$kind, lexer.span()))));
assert_eq!(lexer.next(), Some(Ok(Token::new(TokenKind::$kind, lexer.span(), lexer.slice()))));
assert_eq!(lexer.slice(), x.as_bytes());
assert_eq!(lexer.span(), Span::new(0, x.len()));
}

View File

@ -55,17 +55,19 @@ impl TokenKind {
/// A valid token found in Onihime source code.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Token {
pub struct Token<'a> {
/// Kind of token which was found.
pub kind: TokenKind,
/// The token's span.
pub span: Span,
/// The token's slice.
pub slice: &'a [u8],
}
impl Token {
impl<'a> Token<'a> {
/// Construct a new instance of a token.
#[must_use]
pub const fn new(kind: TokenKind, span: Span) -> Self {
Self { kind, span }
pub const fn new(kind: TokenKind, span: Span, slice: &'a [u8]) -> Self {
Self { kind, span, slice }
}
}