103_tokenization.patch 393 B

1234567891011
  1. --- exercises/103_tokenization.zig 2023-10-05 21:57:23.245974688 +0200
  2. +++ answers/103_tokenization.zig 2023-10-05 22:06:08.319119156 +0200
  3. @@ -136,7 +136,7 @@
  4. ;
  5. // now the tokenizer, but what do we need here?
  6. - var it = std.mem.tokenizeAny(u8, poem, ???);
  7. + var it = std.mem.tokenizeAny(u8, poem, " ,;!\n");
  8. // print all words and count them
  9. var cnt: usize = 0;