const std = @import("std"); test "slices" { // option 1 slices const a = std.testing.allocator; var words_al = std.ArrayList([]const u8).init(a); defer words_al.deinit(); var toks = std.mem.tokenizeAny(u8, wordlist, "\n "); while (toks.next()) |tok| { try words_al.append(tok); } const words: []const[]const u8 = try words_al.toOwnedSlice(); // a slice is a pointer + length (so 2 x usize x length) defer a.free(words); try std.testing.expectEqualStrings("bar", words[1]); // words accessed by index } test "offsets" { // option 2 offsets const a = std.testing.allocator; var offsets_al = std.ArrayList(u16).init(a); defer offsets_al.deinit(); try offsets_al.append(0); for (wordlist, 0..) |ch,ix| { if (ch == '\n' and ix < wordlist.len) { try offsets_al.append(@intCast(ix+1)); // we know we have less than 2^16 words } } const offsets = try offsets_al.toOwnedSlice(); // offsets are just u16 x length, so ~4x less memory on 64 bit systems. defer a.free(offsets); // words accessed by slicing wordlist using offsets // be careful of edge cases i.e. accessing the last word in the list, not handled here. const word = wordlist[offsets[1]..(offsets[2]-1)]; try std.testing.expectEqualStrings("bar", word); } const wordlist: []const u8 = \\foo \\bar \\baz \\I \\like \\cheese ;