Fix panic when parse-wiki-text-2 times out while scan_position is inside a
multibyte UTF-8 character. The parser scans by byte, but Rust str slicing must
use character boundaries.

--- a/src/parse.rs
+++ b/src/parse.rs
@@ -245,7 +245,7 @@ pub fn parse<'a>(
 		if !max_duration.is_zero() && loop_counter == 10_000 {
 			loop_counter = 0;
 			if start_time.elapsed() > max_duration {
-				state.flush(state.scan_position);
+				state.flush(state.previous_char_boundary(state.scan_position));
 
 				return Err(ParseError::TimedOut {
 					execution_time: start_time.elapsed(),
--- a/src/state.rs
+++ b/src/state.rs
@@ -140,6 +140,10 @@ impl<'a> State<'a> {
 	pub fn skip_whitespace_forwards(&self, position: usize) -> usize {
 		skip_whitespace_forwards(self.wiki_text, position)
 	}
+
+	pub fn previous_char_boundary(&self, position: usize) -> usize {
+		previous_char_boundary(self.wiki_text, position)
+	}
 }
 
 pub fn flush<'a>(
@@ -180,6 +184,14 @@ pub fn skip_whitespace_forwards(wiki_text: &str, position: usize) -> usize {
 	position + non_whitespace_position
 }
 
+fn previous_char_boundary(wiki_text: &str, mut position: usize) -> usize {
+	position = position.min(wiki_text.len());
+	while !wiki_text.is_char_boundary(position) {
+		position -= 1;
+	}
+	position
+}
+
 #[cfg(test)]
 mod tests {
 	use super::*;
