Skip to content

Commit 7d59e05

Browse files
committed
Prefer next(stream) over stream.next()
1 parent 98acb27 commit 7d59e05

19 files changed

+68
-68
lines changed

liquid/builtin/expressions/arguments.py

+10-10
Original file line numberDiff line numberDiff line change
@@ -52,13 +52,13 @@ def parse(env: Environment, tokens: TokenStream) -> list[KeywordArgument]:
5252

5353
# Leading commas are OK
5454
if tokens.current.kind == TOKEN_COMMA:
55-
tokens.next()
55+
next(tokens)
5656

5757
while True:
58-
token = tokens.next()
58+
token = next(tokens)
5959

6060
if token.kind == TOKEN_COMMA:
61-
token = tokens.next()
61+
token = next(tokens)
6262

6363
if token.kind == TOKEN_EOF:
6464
break
@@ -109,7 +109,7 @@ def parse(env: Environment, tokens: TokenStream) -> list[PositionalArgument]:
109109

110110
while True:
111111
if tokens.current.kind == TOKEN_COMMA:
112-
tokens.next()
112+
next(tokens)
113113

114114
if tokens.current.kind == TOKEN_EOF:
115115
break
@@ -141,19 +141,19 @@ def parse(env: Environment, tokens: TokenStream) -> dict[str, Parameter]:
141141
)
142142

143143
while True:
144-
token = tokens.next()
144+
token = next(tokens)
145145

146146
if token.kind == TOKEN_COMMA:
147147
# Leading and/or trailing commas are OK.
148-
token = tokens.next()
148+
token = next(tokens)
149149

150150
if token.kind == TOKEN_EOF:
151151
break
152152

153153
if token.kind == TOKEN_WORD:
154154
if tokens.current.kind in argument_separators:
155155
# A parameter with a default value
156-
tokens.next() # Move past ":" or "="
156+
next(tokens) # Move past ":" or "="
157157
value = parse_primitive(env, tokens)
158158
params[token.value] = Parameter(token, token.value, value)
159159
else:
@@ -179,7 +179,7 @@ def parse_arguments(
179179

180180
# Leading commas are OK
181181
if tokens.current.kind == TOKEN_COMMA:
182-
tokens.next()
182+
next(tokens)
183183

184184
while True:
185185
token = tokens.current
@@ -189,8 +189,8 @@ def parse_arguments(
189189

190190
if token.kind == TOKEN_WORD:
191191
if tokens.peek.kind in argument_separators:
192-
name_token = tokens.next()
193-
tokens.next() # = or :
192+
name_token = next(tokens)
193+
next(tokens) # = or :
194194
value = parse_primitive(env, tokens)
195195
kwargs.append(KeywordArgument(name_token, token.value, value))
196196
else:

liquid/builtin/expressions/filtered.py

+5-5
Original file line numberDiff line numberDiff line change
@@ -220,7 +220,7 @@ def parse(
220220
tail_filters: list[Filter] | None = None
221221

222222
if tokens.current.kind == TOKEN_ELSE:
223-
tokens.next() # else
223+
next(tokens) # else
224224
alternative = parse_primitive(env, tokens)
225225

226226
if tokens.current.kind == TOKEN_PIPE:
@@ -331,7 +331,7 @@ def parse(
331331
)
332332

333333
while tokens.current.kind in delim:
334-
tokens.next()
334+
next(tokens)
335335
token = tokens.eat(TOKEN_WORD)
336336
args: list[Union[KeywordArgument, PositionalArgument]] = []
337337

@@ -345,8 +345,8 @@ def parse(
345345
tok = tokens.current
346346
if tok.kind == TOKEN_WORD:
347347
if tokens.peek.kind in argument_separators:
348-
tokens.next() # word
349-
tokens.next() # : or =
348+
next(tokens) # word
349+
next(tokens) # : or =
350350
args.append(
351351
KeywordArgument(
352352
tok, tok.value, parse_primitive(env, tokens)
@@ -378,7 +378,7 @@ def parse(
378378
f"found {tokens.peek.kind}",
379379
token=tokens.peek,
380380
)
381-
tokens.next()
381+
next(tokens)
382382
else:
383383
break
384384

liquid/builtin/expressions/logical.py

+10-10
Original file line numberDiff line numberDiff line change
@@ -437,30 +437,30 @@ def parse_boolean_primitive( # noqa: PLR0912
437437

438438
if kind == TOKEN_TRUE:
439439
left = TrueLiteral(token)
440-
tokens.next()
440+
next(tokens)
441441
elif kind == TOKEN_FALSE:
442442
left = FalseLiteral(token)
443-
tokens.next()
443+
next(tokens)
444444
elif kind in (TOKEN_NIL, TOKEN_NULL):
445445
left = Nil(token)
446-
tokens.next()
446+
next(tokens)
447447
elif kind == TOKEN_INTEGER:
448448
left = IntegerLiteral(token, to_int(token.value))
449-
tokens.next()
449+
next(tokens)
450450
elif kind == TOKEN_FLOAT:
451451
left = FloatLiteral(token, float(token.value))
452-
tokens.next()
452+
next(tokens)
453453
elif kind == TOKEN_STRING:
454454
left = StringLiteral(token, token.value)
455-
tokens.next()
455+
next(tokens)
456456
elif kind == TOKEN_RANGE_LITERAL:
457457
left = RangeLiteral.parse(env, tokens)
458458
elif kind == TOKEN_BLANK:
459459
left = Blank(token)
460-
tokens.next()
460+
next(tokens)
461461
elif kind == TOKEN_EMPTY:
462462
left = Empty(token)
463-
tokens.next()
463+
next(tokens)
464464
elif kind in (TOKEN_WORD, TOKEN_IDENTSTRING, TOKEN_LBRACKET):
465465
left = Path.parse(env, tokens)
466466
elif kind == TOKEN_LPAREN:
@@ -493,7 +493,7 @@ def parse_infix_expression( # noqa: PLR0911
493493
env: Environment, stream: TokenStream, left: Expression
494494
) -> Expression: # noqa: PLR0911
495495
"""Return a logical, comparison, or membership expression parsed from _stream_."""
496-
token = stream.next()
496+
token = next(stream)
497497
assert token is not None
498498
precedence = PRECEDENCES.get(token.kind, PRECEDENCE_LOWEST)
499499

@@ -553,7 +553,7 @@ def parse_grouped_expression(env: Environment, tokens: TokenStream) -> Expressio
553553

554554
tokens.eat(TOKEN_LPAREN)
555555
expr = parse_boolean_primitive(env, tokens)
556-
token = tokens.next()
556+
token = next(tokens)
557557

558558
while token.kind != TOKEN_RPAREN:
559559
if token is None:

liquid/builtin/expressions/loop.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -229,10 +229,10 @@ def parse(env: Environment, tokens: TokenStream) -> LoopExpression:
229229

230230
# Leading commas are OK
231231
if tokens.current.kind == TOKEN_COMMA:
232-
tokens.next()
232+
next(tokens)
233233

234234
while True:
235-
arg_token = tokens.next()
235+
arg_token = next(tokens)
236236
kind = arg_token.kind
237237

238238
if kind == TOKEN_LIMIT:
@@ -244,7 +244,7 @@ def parse(env: Environment, tokens: TokenStream) -> LoopExpression:
244244
tokens.eat_one_of(*argument_separators)
245245
offset_token = tokens.current
246246
if offset_token.kind == TOKEN_CONTINUE:
247-
tokens.next()
247+
next(tokens)
248248
offset = StringLiteral(token=offset_token, value="continue")
249249
else:
250250
offset = parse_primitive(env, tokens)

liquid/builtin/expressions/path.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -107,7 +107,7 @@ def parse(env: Environment, tokens: TokenStream) -> Path: # noqa: PLR0912
107107
segments.append(value)
108108
if tokens.peek.kind == TOKEN_WORD:
109109
# Two consecutive words indicate end of path.
110-
tokens.next()
110+
next(tokens)
111111
break
112112
elif kind == TOKEN_IDENTSTRING:
113113
segments.append(value)
@@ -124,7 +124,7 @@ def parse(env: Environment, tokens: TokenStream) -> Path: # noqa: PLR0912
124124
token=tokens.peek,
125125
)
126126
elif kind == TOKEN_LBRACKET:
127-
tokens.next()
127+
next(tokens)
128128
segments.append(Path.parse(env, tokens))
129129
tokens.expect(TOKEN_RBRACKET)
130130
if env.mode == Mode.STRICT and tokens.peek.kind == TOKEN_WORD:
@@ -149,7 +149,7 @@ def parse(env: Environment, tokens: TokenStream) -> Path: # noqa: PLR0912
149149
else:
150150
break
151151

152-
tokens.next()
152+
next(tokens)
153153

154154
if not segments:
155155
raise LiquidSyntaxError(

liquid/builtin/expressions/primitive.py

+8-8
Original file line numberDiff line numberDiff line change
@@ -281,23 +281,23 @@ def parse_primitive(env: Environment, tokens: TokenStream) -> Expression: # noq
281281
kind = token.kind
282282

283283
if kind == TOKEN_TRUE:
284-
return TrueLiteral(tokens.next())
284+
return TrueLiteral(next(tokens))
285285
if kind == TOKEN_FALSE:
286-
return FalseLiteral(tokens.next())
286+
return FalseLiteral(next(tokens))
287287
if kind in (TOKEN_NIL, TOKEN_NULL):
288-
return Nil(tokens.next())
288+
return Nil(next(tokens))
289289
if kind == TOKEN_INTEGER:
290-
return IntegerLiteral(tokens.next(), to_int(token.value))
290+
return IntegerLiteral(next(tokens), to_int(token.value))
291291
if kind == TOKEN_FLOAT:
292-
return FloatLiteral(tokens.next(), float(token.value))
292+
return FloatLiteral(next(tokens), float(token.value))
293293
if kind == TOKEN_STRING:
294-
return StringLiteral(tokens.next(), token.value)
294+
return StringLiteral(next(tokens), token.value)
295295
if kind == TOKEN_RANGE_LITERAL:
296296
return RangeLiteral.parse(env, tokens)
297297
if kind == TOKEN_EMPTY:
298-
return Empty(tokens.next())
298+
return Empty(next(tokens))
299299
if kind == TOKEN_BLANK:
300-
return Blank(tokens.next())
300+
return Blank(next(tokens))
301301
if kind in (TOKEN_WORD, TOKEN_IDENTSTRING, TOKEN_LBRACKET):
302302
return Path.parse(env, tokens)
303303

liquid/builtin/illegal.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ def parse(self, stream: TokenStream) -> IllegalNode:
2020
token = stream.expect(TOKEN_TAG)
2121

2222
if stream.peek.kind == TOKEN_EXPRESSION:
23-
stream.next()
23+
next(stream)
2424

2525
msg = (
2626
"missing tag name" if not token.value else f"unexpected tag '{token.value}'"

liquid/builtin/tags/case_tag.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -148,17 +148,17 @@ def parse(self, stream: TokenStream) -> Node:
148148
stream.current.kind != TOKEN_TAG
149149
and stream.current.value not in ENDWHENBLOCK
150150
):
151-
stream.next()
151+
next(stream)
152152

153153
parse_block = get_parser(self.env).parse_block
154154
blocks: list[Union[MultiExpressionBlockNode, BlockNode]] = []
155155

156156
while not stream.current.is_tag(TAG_ENDCASE):
157157
if stream.current.is_tag(TAG_ELSE):
158-
stream.next()
158+
next(stream)
159159
blocks.append(parse_block(stream, ENDWHENBLOCK))
160160
elif stream.current.is_tag(TAG_WHEN):
161-
alternative_token = stream.next()
161+
alternative_token = next(stream)
162162
expressions = self._parse_when_expression(
163163
stream.into_inner(tag=alternative_token)
164164
)
@@ -187,7 +187,7 @@ def parse(self, stream: TokenStream) -> Node:
187187
def _parse_when_expression(self, stream: TokenStream) -> list[Expression]:
188188
expressions: list[Expression] = [parse_primitive(self.env, stream)]
189189
while stream.current.kind in (TOKEN_COMMA, TOKEN_OR):
190-
stream.next()
190+
next(stream)
191191
try:
192192
expressions.append(parse_primitive(self.env, stream))
193193
except LiquidSyntaxError:

liquid/builtin/tags/comment_tag.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -67,6 +67,6 @@ def parse(self, stream: TokenStream) -> CommentNode:
6767
):
6868
break
6969
text.append(stream.current.value)
70-
stream.next()
70+
next(stream)
7171

7272
return self.node_class(token, text="".join(text))

liquid/builtin/tags/doc_tag.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -63,6 +63,6 @@ def parse(self, stream: TokenStream) -> DocNode:
6363
if stream.current.kind == TOKEN_EOF:
6464
raise LiquidSyntaxError("doc tag wad never closed", token=token)
6565
text.append(stream.current.value)
66-
stream.next()
66+
next(stream)
6767

6868
return self.node_class(token, text="".join(text))

liquid/builtin/tags/for_tag.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -209,7 +209,7 @@ def parse(self, stream: TokenStream) -> Node:
209209
default: Optional[BlockNode] = None
210210

211211
if stream.current.is_tag(TAG_ELSE):
212-
stream.next()
212+
next(stream)
213213
default = parse_block(stream, ENDFORELSEBLOCK)
214214

215215
stream.expect(TOKEN_TAG, value=TAG_ENDFOR)

liquid/builtin/tags/if_tag.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -159,7 +159,7 @@ def parse(self, stream: TokenStream) -> Node:
159159
# continue to parse more "elsif" expression, if any.
160160
try:
161161
expr = BooleanExpression.parse(
162-
self.env, stream.into_inner(tag=stream.next())
162+
self.env, stream.into_inner(tag=next(stream))
163163
)
164164
except LiquidSyntaxError as err:
165165
self.env.error(err)
@@ -176,11 +176,11 @@ def parse(self, stream: TokenStream) -> Node:
176176
default: Optional[BlockNode] = None
177177

178178
if stream.current.is_tag(TAG_ELSE):
179-
stream.next()
179+
next(stream)
180180
if stream.current.kind == TOKEN_EXPRESSION:
181181
if self.mode == Mode.LAX:
182182
# Superfluous expressions inside an `else` tag are ignored.
183-
stream.next()
183+
next(stream)
184184
else:
185185
raise LiquidSyntaxError(
186186
"found an 'else' tag expression, did you mean 'elsif'?",
@@ -196,7 +196,7 @@ def parse(self, stream: TokenStream) -> Node:
196196
and stream.current.value == TAG_ENDIF
197197
):
198198
break
199-
stream.next()
199+
next(stream)
200200

201201
stream.expect(TOKEN_TAG, value=TAG_ENDIF)
202202

liquid/builtin/tags/include_tag.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -235,14 +235,14 @@ def parse(self, stream: TokenStream) -> Node:
235235

236236
# Optionally bind a variable to the included template context
237237
if tokens.current.kind in BIND_TOKENS:
238-
tokens.next() # Eat 'with' or 'for'
238+
next(tokens) # Eat 'with' or 'for'
239239
tokens.expect(TOKEN_WORD)
240240
var = Path.parse(self.env, tokens)
241241

242242
# The bound variable will take the name of the template by default,
243243
# or an alias if an identifier follows the "as" keyword.
244244
if tokens.current.kind == TOKEN_AS:
245-
tokens.next() # Eat 'as'
245+
next(tokens) # Eat 'as'
246246
tokens.expect(TOKEN_WORD)
247247
alias = parse_identifier(self.env, tokens)
248248

liquid/builtin/tags/inline_comment_tag.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ def parse(self, stream: TokenStream) -> InlineCommentNode:
3838
token = stream.expect(TOKEN_TAG)
3939
# Empty comment tag?
4040
if stream.peek.kind == TOKEN_EXPRESSION:
41-
stream.next()
41+
next(stream)
4242
if RE_INVALID_INLINE_COMMENT.search(stream.current.value):
4343
raise LiquidSyntaxError(
4444
"every line of an inline comment must start with a '#' character",

liquid/builtin/tags/render_tag.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -298,14 +298,14 @@ def parse(self, stream: TokenStream) -> Node:
298298
# Optionally bind a variable to the included template context
299299
if tokens.current.kind in BIND_TAGS:
300300
loop = tokens.current.kind == TOKEN_FOR
301-
tokens.next() # Eat 'with' or 'for'
301+
next(tokens) # Eat 'with' or 'for'
302302
tokens.expect(TOKEN_WORD)
303303
var = Path.parse(self.env, tokens)
304304

305305
# The bound variable will take the name of the template by default,
306306
# or an alias if an identifier follows the "as" keyword.
307307
if tokens.current.kind == TOKEN_AS:
308-
tokens.next() # Eat 'as'
308+
next(tokens) # Eat 'as'
309309
tokens.expect(TOKEN_WORD)
310310
alias = parse_identifier(self.env, tokens)
311311

0 commit comments

Comments
 (0)