Skip to content

Commit

Permalink
refactor(tokenizer): Remove stale function
Browse files Browse the repository at this point in the history
  • Loading branch information
Ed Page committed Mar 1, 2021
1 parent ddeee94 commit 1010d2f
Showing 1 changed file with 2 additions and 6 deletions.
8 changes: 2 additions & 6 deletions crates/typos/src/tokens.rs
Original file line number Diff line number Diff line change
Expand Up @@ -181,7 +181,7 @@ impl<'t> Identifier<'t> {

/// Split into individual Words.
pub fn split(&self) -> impl Iterator<Item = Word<'t>> {
split_ident(self.token, self.offset)
SplitIdent::new(self.token, self.offset)
}
}

Expand All @@ -195,7 +195,7 @@ pub struct Word<'t> {

impl<'t> Word<'t> {
pub fn new(token: &'t str, offset: usize) -> Result<Self, std::io::Error> {
let mut itr = split_ident(token, 0);
let mut itr = SplitIdent::new(token, 0);
let mut item = itr.next().ok_or_else(|| {
std::io::Error::new(
std::io::ErrorKind::InvalidInput,
Expand Down Expand Up @@ -239,10 +239,6 @@ impl<'t> Word<'t> {
}
}

fn split_ident(ident: &str, offset: usize) -> impl Iterator<Item = Word<'_>> {
SplitIdent::new(ident, offset)
}

struct SplitIdent<'s> {
ident: &'s str,
offset: usize,
Expand Down

0 comments on commit 1010d2f

Please sign in to comment.