Skip to content

Commit 3b49d71

Browse files
author
Steve van Loben Sels
authored
Revert #128, update docs and add tests to Tokenizer.Remaining() (#129)
1 parent b2d0aeb commit 3b49d71

File tree

2 files changed

+10
-18
lines changed

2 files changed

+10
-18
lines changed

json/token.go

Lines changed: 8 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -43,17 +43,6 @@ type Tokenizer struct {
4343
// null, true, false, numbers, or quoted strings.
4444
Value RawValue
4545

46-
// Position is the Tokenizer's current index into the underlying byte slice.
47-
// Since the Tokenizer has already been advanced by calling Next, this
48-
// position will be the first index of the next token. The position of
49-
// the current Value can be calculated by subtracting len(token.value).
50-
// Accordingly, slicing the underlying bytes like:
51-
//
52-
// b[token.Position-len(token.Value):token.Position]
53-
//
54-
// will yield the current Value.
55-
Position int
56-
5746
// When the tokenizer has encountered invalid content this field is not nil.
5847
Err error
5948

@@ -102,7 +91,6 @@ func (t *Tokenizer) Reset(b []byte) {
10291
// However, it does not compile down to an invocation of duff-copy.
10392
t.Delim = 0
10493
t.Value = nil
105-
t.Position = 0
10694
t.Err = nil
10795
t.Depth = 0
10896
t.Index = 0
@@ -139,16 +127,13 @@ skipLoop:
139127

140128
if i > 0 {
141129
t.json = t.json[i:]
142-
t.Position += i
143130
}
144131

145132
if len(t.json) == 0 {
146133
t.Reset(nil)
147134
return false
148135
}
149136

150-
lenBefore := len(t.json)
151-
152137
var kind Kind
153138
switch t.json[0] {
154139
case '"':
@@ -179,8 +164,6 @@ skipLoop:
179164
t.Value, t.json, t.Err = t.json[:1], t.json[1:], syntaxError(t.json, "expected token but found '%c'", t.json[0])
180165
}
181166

182-
t.Position += lenBefore - len(t.json)
183-
184167
t.Depth = t.depth()
185168
t.Index = t.index()
186169
t.flags = t.flags.withKind(kind)
@@ -319,6 +302,14 @@ func (t *Tokenizer) String() []byte {
319302
}
320303

321304
// Remaining returns the number of bytes left to parse.
305+
//
306+
// The position of the tokenizer's current Value within the original byte slice
307+
// can be calculated like so:
308+
//
309+
// end := len(b) - tok.Remaining()
310+
// start := end - len(tok.Value)
311+
//
312+
// And slicing b[start:end] will yield the tokenizer's current Value.
322313
func (t *Tokenizer) Remaining() int {
323314
return len(t.json)
324315
}

json/token_test.go

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,8 @@ func tokenize(t *testing.T, b []byte) (tokens []token) {
4545
tok := NewTokenizer(b)
4646

4747
for tok.Next() {
48-
start, end := tok.Position-len(tok.Value), tok.Position
48+
end := len(b) - tok.Remaining()
49+
start := end - len(tok.Value)
4950
if end > len(b) {
5051
t.Fatalf("token position too far [%d:%d], len(b) is %d", start, end, len(b))
5152
}

0 commit comments

Comments
 (0)