[Python-checkins] cpython (merge 3.3 -> default): Untokenize, bad assert: Merge with 3.3

terry.reedy python-checkins at python.org
Mon Feb 17 22:50:23 CET 2014


http://hg.python.org/cpython/rev/38ac5d82507d
changeset:   89234:38ac5d82507d
parent:      89231:ea4c74cc4da5
parent:      89233:51e5a89afb3b
user:        Terry Jan Reedy <tjreedy at udel.edu>
date:        Mon Feb 17 16:46:43 2014 -0500
summary:
  Untokenize, bad assert: Merge with 3.3

files:
  Lib/test/test_tokenize.py |  16 +++++++++++++++-
  Lib/tokenize.py           |   4 +++-
  2 files changed, 18 insertions(+), 2 deletions(-)


diff --git a/Lib/test/test_tokenize.py b/Lib/test/test_tokenize.py
--- a/Lib/test/test_tokenize.py
+++ b/Lib/test/test_tokenize.py
@@ -638,7 +638,7 @@
 from test import support
 from tokenize import (tokenize, _tokenize, untokenize, NUMBER, NAME, OP,
                      STRING, ENDMARKER, ENCODING, tok_name, detect_encoding,
-                     open as tokenize_open)
+                     open as tokenize_open, Untokenizer)
 from io import BytesIO
 from unittest import TestCase
 import os, sys, glob
@@ -1153,6 +1153,19 @@
         # See http://bugs.python.org/issue16152
         self.assertExactTypeEqual('@          ', token.AT)
 
+class UntokenizeTest(TestCase):
+    
+    def test_bad_input_order(self):
+        u = Untokenizer()
+        u.prev_row = 2
+        u.prev_col = 2
+        with self.assertRaises(ValueError) as cm:
+            u.add_whitespace((1,3))
+        self.assertEqual(cm.exception.args[0], 
+                'start (1,3) precedes previous end (2,2)')
+        self.assertRaises(ValueError, u.add_whitespace, (2,1))
+
+
 __test__ = {"doctests" : doctests, 'decistmt': decistmt}
 
 def test_main():
@@ -1162,6 +1175,7 @@
     support.run_unittest(Test_Tokenize)
     support.run_unittest(TestDetectEncoding)
     support.run_unittest(TestTokenize)
+    support.run_unittest(UntokenizeTest)
 
 if __name__ == "__main__":
     test_main()
diff --git a/Lib/tokenize.py b/Lib/tokenize.py
--- a/Lib/tokenize.py
+++ b/Lib/tokenize.py
@@ -229,7 +229,9 @@
 
     def add_whitespace(self, start):
         row, col = start
-        assert row <= self.prev_row
+        if row < self.prev_row or row == self.prev_row and col < self.prev_col:
+            raise ValueError("start ({},{}) precedes previous end ({},{})"
+                             .format(row, col, self.prev_row, self.prev_col))
         col_offset = col - self.prev_col
         if col_offset:
             self.tokens.append(" " * col_offset)

-- 
Repository URL: http://hg.python.org/cpython


More information about the Python-checkins mailing list