[Python-checkins] cpython (3.3): Add more tests for hashlib and hash object attributes

christian.heimes python-checkins at python.org
Tue Jul 30 15:36:22 CEST 2013


http://hg.python.org/cpython/rev/10501d5f07b8
changeset:   84906:10501d5f07b8
branch:      3.3
user:        Christian Heimes <christian at cheimes.de>
date:        Tue Jul 30 15:33:30 2013 +0200
summary:
  Add more tests for hashlib and hash object attributes

files:
  Lib/test/test_hashlib.py |  75 +++++++++++++++++++++++----
  1 files changed, 62 insertions(+), 13 deletions(-)


diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py
--- a/Lib/test/test_hashlib.py
+++ b/Lib/test/test_hashlib.py
@@ -96,10 +96,14 @@
 
         super(HashLibTestCase, self).__init__(*args, **kwargs)
 
+    @property
+    def hash_constructors(self):
+        constructors = self.constructors_to_test.values()
+        return itertools.chain.from_iterable(constructors)
+
     def test_hash_array(self):
         a = array.array("b", range(10))
-        constructors = self.constructors_to_test.values()
-        for cons in itertools.chain.from_iterable(constructors):
+        for cons in self.hash_constructors:
             c = cons(a)
             c.hexdigest()
 
@@ -136,39 +140,57 @@
         self.assertRaises(TypeError, get_builtin_constructor, 3)
 
     def test_hexdigest(self):
-        for name in self.supported_hash_names:
-            h = hashlib.new(name)
+        for cons in self.hash_constructors:
+            h = cons()
             assert isinstance(h.digest(), bytes), name
             self.assertEqual(hexstr(h.digest()), h.hexdigest())
 
-
     def test_large_update(self):
         aas = b'a' * 128
         bees = b'b' * 127
         cees = b'c' * 126
+        dees = b'd' * 2048 #  HASHLIB_GIL_MINSIZE
 
-        for name in self.supported_hash_names:
-            m1 = hashlib.new(name)
+        for cons in self.hash_constructors:
+            m1 = cons()
             m1.update(aas)
             m1.update(bees)
             m1.update(cees)
+            m1.update(dees)
 
-            m2 = hashlib.new(name)
-            m2.update(aas + bees + cees)
+            m2 = cons()
+            m2.update(aas + bees + cees + dees)
             self.assertEqual(m1.digest(), m2.digest())
 
-    def check(self, name, data, digest):
+            m3 = cons(aas + bees + cees + dees)
+            self.assertEqual(m1.digest(), m3.digest())
+
+            # verify copy() doesn't touch original
+            m4 = cons(aas + bees + cees)
+            m4_digest = m4.digest()
+            m4_copy = m4.copy()
+            m4_copy.update(dees)
+            self.assertEqual(m1.digest(), m4_copy.digest())
+            self.assertEqual(m4.digest(), m4_digest)
+
+    def check(self, name, data, hexdigest):
+        hexdigest = hexdigest.lower()
         constructors = self.constructors_to_test[name]
         # 2 is for hashlib.name(...) and hashlib.new(name, ...)
         self.assertGreaterEqual(len(constructors), 2)
         for hash_object_constructor in constructors:
-            computed = hash_object_constructor(data).hexdigest()
+            m = hash_object_constructor(data)
+            computed = m.hexdigest()
             self.assertEqual(
-                    computed, digest,
+                    computed, hexdigest,
                     "Hash algorithm %s constructed using %s returned hexdigest"
                     " %r for %d byte input data that should have hashed to %r."
                     % (name, hash_object_constructor,
-                       computed, len(data), digest))
+                       computed, len(data), hexdigest))
+            computed = m.digest()
+            digest = bytes.fromhex(hexdigest)
+            self.assertEqual(computed, digest)
+            self.assertEqual(len(digest), m.digest_size)
 
     def check_no_unicode(self, algorithm_name):
         # Unicode objects are not allowed as input.
@@ -184,6 +206,24 @@
         self.check_no_unicode('sha384')
         self.check_no_unicode('sha512')
 
+    def check_blocksize_name(self, name, block_size=0, digest_size=0):
+        constructors = self.constructors_to_test[name]
+        for hash_object_constructor in constructors:
+            m = hash_object_constructor()
+            self.assertEqual(m.block_size, block_size)
+            self.assertEqual(m.digest_size, digest_size)
+            self.assertEqual(len(m.digest()), digest_size)
+            self.assertEqual(m.name.lower(), name.lower())
+            self.assertIn(name.split("_")[0], repr(m).lower())
+
+    def test_blocksize_name(self):
+        self.check_blocksize_name('md5', 64, 16)
+        self.check_blocksize_name('sha1', 64, 20)
+        self.check_blocksize_name('sha224', 64, 28)
+        self.check_blocksize_name('sha256', 64, 32)
+        self.check_blocksize_name('sha384', 128, 48)
+        self.check_blocksize_name('sha512', 128, 64)
+
     def test_case_md5_0(self):
         self.check('md5', b'', 'd41d8cd98f00b204e9800998ecf8427e')
 
@@ -323,6 +363,15 @@
         # for multithreaded operation (which is hardwired to 2048).
         gil_minsize = 2048
 
+        for cons in self.hash_constructors:
+            m = cons()
+            m.update(b'1')
+            m.update(b'#' * gil_minsize)
+            m.update(b'1')
+
+            m = cons(b'x' * gil_minsize)
+            m.update(b'1')
+
         m = hashlib.md5()
         m.update(b'1')
         m.update(b'#' * gil_minsize)

-- 
Repository URL: http://hg.python.org/cpython


More information about the Python-checkins mailing list