|
3 | 3 | from tokenize import (tokenize, _tokenize, untokenize, NUMBER, NAME, OP,
|
4 | 4 | STRING, ENDMARKER, ENCODING, tok_name, detect_encoding,
|
5 | 5 | open as tokenize_open, Untokenizer, generate_tokens,
|
6 |
| - NEWLINE, _generate_tokens_from_c_tokenizer) |
| 6 | + NEWLINE, _generate_tokens_from_c_tokenizer, DEDENT) |
7 | 7 | from io import BytesIO, StringIO
|
8 | 8 | import unittest
|
9 | 9 | from textwrap import dedent
|
@@ -2512,6 +2512,26 @@ def get_tokens(string):
|
2512 | 2512 | self.assertRaises(SyntaxError, get_tokens, "("*1000+"a"+")"*1000)
|
2513 | 2513 | self.assertRaises(SyntaxError, get_tokens, "]")
|
2514 | 2514 |
|
| 2515 | + def test_max_indent(self): |
| 2516 | + MAXINDENT = 100 |
| 2517 | + |
| 2518 | + def generate_source(indents): |
| 2519 | + source = ''.join((' ' * x) + 'if True:\n' for x in range(indents)) |
| 2520 | + source += ' ' * indents + 'pass\n' |
| 2521 | + return source |
| 2522 | + |
| 2523 | + valid = generate_source(MAXINDENT - 1) |
| 2524 | + tokens = list(_generate_tokens_from_c_tokenizer(valid)) |
| 2525 | + self.assertEqual(tokens[-1].type, DEDENT) |
| 2526 | + compile(valid, "<string>", "exec") |
| 2527 | + |
| 2528 | + invalid = generate_source(MAXINDENT) |
| 2529 | + tokens = list(_generate_tokens_from_c_tokenizer(invalid)) |
| 2530 | + self.assertEqual(tokens[-1].type, NEWLINE) |
| 2531 | + self.assertRaises( |
| 2532 | + IndentationError, compile, invalid, "<string>", "exec" |
| 2533 | + ) |
| 2534 | + |
2515 | 2535 | def test_continuation_lines_indentation(self):
|
2516 | 2536 | def get_tokens(string):
|
2517 | 2537 | return [(kind, string) for (kind, string, *_) in _generate_tokens_from_c_tokenizer(string)]
|
|
0 commit comments