\d{1,6})P(?P\d+)"
+ r"Q(?P\d+)\.(?P[^:/]+)")
+ previous_groups = None
+ for x in xrange(repetitions):
+ tmp_file = self._box._create_tmp()
+ head, tail = os.path.split(tmp_file.name)
+ self.assertEqual(head, os.path.abspath(os.path.join(self._path,
+ "tmp")),
+ "File in wrong location: '%s'" % head)
+ match = pattern.match(tail)
+ self.assertTrue(match is not None, "Invalid file name: '%s'" % tail)
+ groups = match.groups()
+ if previous_groups is not None:
+ self.assertTrue(int(groups[0] >= previous_groups[0]),
+ "Non-monotonic seconds: '%s' before '%s'" %
+ (previous_groups[0], groups[0]))
+ self.assertTrue(int(groups[1] >= previous_groups[1]) or
+ groups[0] != groups[1],
+ "Non-monotonic milliseconds: '%s' before '%s'" %
+ (previous_groups[1], groups[1]))
+ self.assertTrue(int(groups[2]) == pid,
+ "Process ID mismatch: '%s' should be '%s'" %
+ (groups[2], pid))
+ self.assertTrue(int(groups[3]) == int(previous_groups[3]) + 1,
+ "Non-sequential counter: '%s' before '%s'" %
+ (previous_groups[3], groups[3]))
+ self.assertTrue(groups[4] == hostname,
+ "Host name mismatch: '%s' should be '%s'" %
+ (groups[4], hostname))
+ previous_groups = groups
+ tmp_file.write(_sample_message)
+ tmp_file.seek(0)
+ self.assertTrue(tmp_file.read() == _sample_message)
+ tmp_file.close()
+ file_count = len(os.listdir(os.path.join(self._path, "tmp")))
+ self.assertTrue(file_count == repetitions,
+ "Wrong file count: '%s' should be '%s'" %
+ (file_count, repetitions))
+
+ def test_refresh(self):
+ # Update the table of contents
+ self.assertEqual(self._box._toc, {})
+ key0 = self._box.add(self._template % 0)
+ key1 = self._box.add(self._template % 1)
+ self.assertEqual(self._box._toc, {})
+ self._box._refresh()
+ self.assertEqual(self._box._toc, {key0: os.path.join('new', key0),
+ key1: os.path.join('new', key1)})
+ key2 = self._box.add(self._template % 2)
+ self.assertEqual(self._box._toc, {key0: os.path.join('new', key0),
+ key1: os.path.join('new', key1)})
+ self._box._refresh()
+ self.assertEqual(self._box._toc, {key0: os.path.join('new', key0),
+ key1: os.path.join('new', key1),
+ key2: os.path.join('new', key2)})
+
+ def test_lookup(self):
+ # Look up message subpaths in the TOC
+ self.assertRaises(KeyError, lambda: self._box._lookup('foo'))
+ key0 = self._box.add(self._template % 0)
+ self.assertEqual(self._box._lookup(key0), os.path.join('new', key0))
+ os.remove(os.path.join(self._path, 'new', key0))
+ self.assertEqual(self._box._toc, {key0: os.path.join('new', key0)})
+ # Be sure that the TOC is read back from disk (see issue #6896
+ # about bad mtime behaviour on some systems).
+ self._box.flush()
+ self.assertRaises(KeyError, lambda: self._box._lookup(key0))
+ self.assertEqual(self._box._toc, {})
+
+ def test_lock_unlock(self):
+ # Lock and unlock the mailbox. For Maildir, this does nothing.
+ self._box.lock()
+ self._box.unlock()
+
+ def test_folder (self):
+ # Test for bug #1569790: verify that folders returned by .get_folder()
+ # use the same factory function.
+ def dummy_factory (s):
+ return None
+ box = self._factory(self._path, factory=dummy_factory)
+ folder = box.add_folder('folder1')
+ self.assertIs(folder._factory, dummy_factory)
+
+ folder1_alias = box.get_folder('folder1')
+ self.assertIs(folder1_alias._factory, dummy_factory)
+
+ def test_directory_in_folder (self):
+ # Test that mailboxes still work if there's a stray extra directory
+ # in a folder.
+ for i in range(10):
+ self._box.add(mailbox.Message(_sample_message))
+
+ # Create a stray directory
+ os.mkdir(os.path.join(self._path, 'cur', 'stray-dir'))
+
+ # Check that looping still works with the directory present.
+ for msg in self._box:
+ pass
+
+ def test_file_permissions(self):
+ # Verify that message files are created without execute permissions
+ if not hasattr(os, "stat") or not hasattr(os, "umask"):
+ return
+ msg = mailbox.MaildirMessage(self._template % 0)
+ orig_umask = os.umask(0)
+ try:
+ key = self._box.add(msg)
+ finally:
+ os.umask(orig_umask)
+ path = os.path.join(self._path, self._box._lookup(key))
+ mode = os.stat(path).st_mode
+ self.assertEqual(mode & 0111, 0)
+
+ def test_folder_file_perms(self):
+ # From bug #3228, we want to verify that the file created inside a Maildir
+ # subfolder isn't marked as executable.
+ if not hasattr(os, "stat") or not hasattr(os, "umask"):
+ return
+
+ orig_umask = os.umask(0)
+ try:
+ subfolder = self._box.add_folder('subfolder')
+ finally:
+ os.umask(orig_umask)
+
+ path = os.path.join(subfolder._path, 'maildirfolder')
+ st = os.stat(path)
+ perms = st.st_mode
+ self.assertFalse((perms & 0111)) # Execute bits should all be off.
+
+ def test_reread(self):
+
+ # Put the last modified times more than two seconds into the past
+ # (because mtime may have only a two second granularity).
+ for subdir in ('cur', 'new'):
+ os.utime(os.path.join(self._box._path, subdir),
+ (time.time()-5,)*2)
+
+ # Because mtime has a two second granularity in worst case (FAT), a
+ # refresh is done unconditionally if called for within
+ # two-second-plus-a-bit of the last one, just in case the mbox has
+ # changed; so now we have to wait for that interval to expire.
+ time.sleep(2.01 + self._box._skewfactor)
+
+ # Re-reading causes the ._toc attribute to be assigned a new dictionary
+ # object, so we'll check that the ._toc attribute isn't a different
+ # object.
+ orig_toc = self._box._toc
+ def refreshed():
+ return self._box._toc is not orig_toc
+
+ self._box._refresh()
+ self.assertFalse(refreshed())
+
+ # Now, write something into cur and remove it. This changes
+ # the mtime and should cause a re-read.
+ filename = os.path.join(self._path, 'cur', 'stray-file')
+ f = open(filename, 'w')
+ f.close()
+ os.unlink(filename)
+ self._box._refresh()
+ self.assertTrue(refreshed())
+
+class _TestMboxMMDF(TestMailbox):
+
+ def tearDown(self):
+ self._box.close()
+ self._delete_recursively(self._path)
+ for lock_remnant in glob.glob(self._path + '.*'):
+ test_support.unlink(lock_remnant)
+
+ def test_add_from_string(self):
+ # Add a string starting with 'From ' to the mailbox
+ key = self._box.add('From foo at bar blah\nFrom: foo\n\n0')
+ self.assertEqual(self._box[key].get_from(), 'foo at bar blah')
+ self.assertEqual(self._box[key].get_payload(), '0')
+
+ def test_add_mbox_or_mmdf_message(self):
+ # Add an mboxMessage or MMDFMessage
+ for class_ in (mailbox.mboxMessage, mailbox.MMDFMessage):
+ msg = class_('From foo at bar blah\nFrom: foo\n\n0')
+ key = self._box.add(msg)
+
+ def test_open_close_open(self):
+ # Open and inspect previously-created mailbox
+ values = [self._template % i for i in xrange(3)]
+ for value in values:
+ self._box.add(value)
+ self._box.close()
+ mtime = os.path.getmtime(self._path)
+ self._box = self._factory(self._path)
+ self.assertEqual(len(self._box), 3)
+ for key in self._box.iterkeys():
+ self.assertIn(self._box.get_string(key), values)
+ self._box.close()
+ self.assertEqual(mtime, os.path.getmtime(self._path))
+
+ def test_add_and_close(self):
+ # Verifying that closing a mailbox doesn't change added items
+ self._box.add(_sample_message)
+ for i in xrange(3):
+ self._box.add(self._template % i)
+ self._box.add(_sample_message)
+ self._box._file.flush()
+ self._box._file.seek(0)
+ contents = self._box._file.read()
+ self._box.close()
+ with open(self._path, 'rb') as f:
+ self.assertEqual(contents, f.read())
+ self._box = self._factory(self._path)
+
+ def test_lock_conflict(self):
+ # Fork off a subprocess that will lock the file for 2 seconds,
+ # unlock it, and then exit.
+ if not hasattr(os, 'fork'):
+ return
+ pid = os.fork()
+ if pid == 0:
+ # In the child, lock the mailbox.
+ self._box.lock()
+ time.sleep(2)
+ self._box.unlock()
+ os._exit(0)
+
+ # In the parent, sleep a bit to give the child time to acquire
+ # the lock.
+ time.sleep(0.5)
+ try:
+ self.assertRaises(mailbox.ExternalClashError,
+ self._box.lock)
+ finally:
+ # Wait for child to exit. Locking should now succeed.
+ exited_pid, status = os.waitpid(pid, 0)
+
+ self._box.lock()
+ self._box.unlock()
+
+ def test_relock(self):
+ # Test case for bug #1575506: the mailbox class was locking the
+ # wrong file object in its flush() method.
+ msg = "Subject: sub\n\nbody\n"
+ key1 = self._box.add(msg)
+ self._box.flush()
+ self._box.close()
+
+ self._box = self._factory(self._path)
+ self._box.lock()
+ key2 = self._box.add(msg)
+ self._box.flush()
+ self.assertTrue(self._box._locked)
+ self._box.close()
+
+
+class TestMbox(_TestMboxMMDF):
+
+ _factory = lambda self, path, factory=None: mailbox.mbox(path, factory)
+
+ def test_file_perms(self):
+ # From bug #3228, we want to verify that the mailbox file isn't executable,
+ # even if the umask is set to something that would leave executable bits set.
+ # We only run this test on platforms that support umask.
+ if hasattr(os, 'umask') and hasattr(os, 'stat'):
+ try:
+ old_umask = os.umask(0077)
+ self._box.close()
+ os.unlink(self._path)
+ self._box = mailbox.mbox(self._path, create=True)
+ self._box.add('')
+ self._box.close()
+ finally:
+ os.umask(old_umask)
+
+ st = os.stat(self._path)
+ perms = st.st_mode
+ self.assertFalse((perms & 0111)) # Execute bits should all be off.
+
+class TestMMDF(_TestMboxMMDF):
+
+ _factory = lambda self, path, factory=None: mailbox.MMDF(path, factory)
+
+
+class TestMH(TestMailbox):
+
+ _factory = lambda self, path, factory=None: mailbox.MH(path, factory)
+
+ def test_list_folders(self):
+ # List folders
+ self._box.add_folder('one')
+ self._box.add_folder('two')
+ self._box.add_folder('three')
+ self.assertEqual(len(self._box.list_folders()), 3)
+ self.assertEqual(set(self._box.list_folders()),
+ set(('one', 'two', 'three')))
+
+ def test_get_folder(self):
+ # Open folders
+ def dummy_factory (s):
+ return None
+ self._box = self._factory(self._path, dummy_factory)
+
+ new_folder = self._box.add_folder('foo.bar')
+ folder0 = self._box.get_folder('foo.bar')
+ folder0.add(self._template % 'bar')
+ self.assertTrue(os.path.isdir(os.path.join(self._path, 'foo.bar')))
+ folder1 = self._box.get_folder('foo.bar')
+ self.assertEqual(folder1.get_string(folder1.keys()[0]),
+ self._template % 'bar')
+
+ # Test for bug #1569790: verify that folders returned by .get_folder()
+ # use the same factory function.
+ self.assertIs(new_folder._factory, self._box._factory)
+ self.assertIs(folder0._factory, self._box._factory)
+
+ def test_add_and_remove_folders(self):
+ # Delete folders
+ self._box.add_folder('one')
+ self._box.add_folder('two')
+ self.assertEqual(len(self._box.list_folders()), 2)
+ self.assertEqual(set(self._box.list_folders()), set(('one', 'two')))
+ self._box.remove_folder('one')
+ self.assertEqual(len(self._box.list_folders()), 1)
+ self.assertEqual(set(self._box.list_folders()), set(('two', )))
+ self._box.add_folder('three')
+ self.assertEqual(len(self._box.list_folders()), 2)
+ self.assertEqual(set(self._box.list_folders()), set(('two', 'three')))
+ self._box.remove_folder('three')
+ self.assertEqual(len(self._box.list_folders()), 1)
+ self.assertEqual(set(self._box.list_folders()), set(('two', )))
+ self._box.remove_folder('two')
+ self.assertEqual(len(self._box.list_folders()), 0)
+ self.assertEqual(self._box.list_folders(), [])
+
+ def test_sequences(self):
+ # Get and set sequences
+ self.assertEqual(self._box.get_sequences(), {})
+ msg0 = mailbox.MHMessage(self._template % 0)
+ msg0.add_sequence('foo')
+ key0 = self._box.add(msg0)
+ self.assertEqual(self._box.get_sequences(), {'foo':[key0]})
+ msg1 = mailbox.MHMessage(self._template % 1)
+ msg1.set_sequences(['bar', 'replied', 'foo'])
+ key1 = self._box.add(msg1)
+ self.assertEqual(self._box.get_sequences(),
+ {'foo':[key0, key1], 'bar':[key1], 'replied':[key1]})
+ msg0.set_sequences(['flagged'])
+ self._box[key0] = msg0
+ self.assertEqual(self._box.get_sequences(),
+ {'foo':[key1], 'bar':[key1], 'replied':[key1],
+ 'flagged':[key0]})
+ self._box.remove(key1)
+ self.assertEqual(self._box.get_sequences(), {'flagged':[key0]})
+
+ def test_issue2625(self):
+ msg0 = mailbox.MHMessage(self._template % 0)
+ msg0.add_sequence('foo')
+ key0 = self._box.add(msg0)
+ refmsg0 = self._box.get_message(key0)
+
+ def test_issue7627(self):
+ msg0 = mailbox.MHMessage(self._template % 0)
+ key0 = self._box.add(msg0)
+ self._box.lock()
+ self._box.remove(key0)
+ self._box.unlock()
+
+ def test_pack(self):
+ # Pack the contents of the mailbox
+ msg0 = mailbox.MHMessage(self._template % 0)
+ msg1 = mailbox.MHMessage(self._template % 1)
+ msg2 = mailbox.MHMessage(self._template % 2)
+ msg3 = mailbox.MHMessage(self._template % 3)
+ msg0.set_sequences(['foo', 'unseen'])
+ msg1.set_sequences(['foo'])
+ msg2.set_sequences(['foo', 'flagged'])
+ msg3.set_sequences(['foo', 'bar', 'replied'])
+ key0 = self._box.add(msg0)
+ key1 = self._box.add(msg1)
+ key2 = self._box.add(msg2)
+ key3 = self._box.add(msg3)
+ self.assertEqual(self._box.get_sequences(),
+ {'foo':[key0,key1,key2,key3], 'unseen':[key0],
+ 'flagged':[key2], 'bar':[key3], 'replied':[key3]})
+ self._box.remove(key2)
+ self.assertEqual(self._box.get_sequences(),
+ {'foo':[key0,key1,key3], 'unseen':[key0], 'bar':[key3],
+ 'replied':[key3]})
+ self._box.pack()
+ self.assertEqual(self._box.keys(), [1, 2, 3])
+ key0 = key0
+ key1 = key0 + 1
+ key2 = key1 + 1
+ self.assertEqual(self._box.get_sequences(),
+ {'foo':[1, 2, 3], 'unseen':[1], 'bar':[3], 'replied':[3]})
+
+ # Test case for packing while holding the mailbox locked.
+ key0 = self._box.add(msg1)
+ key1 = self._box.add(msg1)
+ key2 = self._box.add(msg1)
+ key3 = self._box.add(msg1)
+
+ self._box.remove(key0)
+ self._box.remove(key2)
+ self._box.lock()
+ self._box.pack()
+ self._box.unlock()
+ self.assertEqual(self._box.get_sequences(),
+ {'foo':[1, 2, 3, 4, 5],
+ 'unseen':[1], 'bar':[3], 'replied':[3]})
+
+ def _get_lock_path(self):
+ return os.path.join(self._path, '.mh_sequences.lock')
+
+
+class TestBabyl(TestMailbox):
+
+ _factory = lambda self, path, factory=None: mailbox.Babyl(path, factory)
+
+ def tearDown(self):
+ self._box.close()
+ self._delete_recursively(self._path)
+ for lock_remnant in glob.glob(self._path + '.*'):
+ test_support.unlink(lock_remnant)
+
+ def test_labels(self):
+ # Get labels from the mailbox
+ self.assertEqual(self._box.get_labels(), [])
+ msg0 = mailbox.BabylMessage(self._template % 0)
+ msg0.add_label('foo')
+ key0 = self._box.add(msg0)
+ self.assertEqual(self._box.get_labels(), ['foo'])
+ msg1 = mailbox.BabylMessage(self._template % 1)
+ msg1.set_labels(['bar', 'answered', 'foo'])
+ key1 = self._box.add(msg1)
+ self.assertEqual(set(self._box.get_labels()), set(['foo', 'bar']))
+ msg0.set_labels(['blah', 'filed'])
+ self._box[key0] = msg0
+ self.assertEqual(set(self._box.get_labels()),
+ set(['foo', 'bar', 'blah']))
+ self._box.remove(key1)
+ self.assertEqual(set(self._box.get_labels()), set(['blah']))
+
+
+class TestMessage(TestBase):
+
+ _factory = mailbox.Message # Overridden by subclasses to reuse tests
+
+ def setUp(self):
+ self._path = test_support.TESTFN
+
+ def tearDown(self):
+ self._delete_recursively(self._path)
+
+ def test_initialize_with_eMM(self):
+ # Initialize based on email.message.Message instance
+ eMM = email.message_from_string(_sample_message)
+ msg = self._factory(eMM)
+ self._post_initialize_hook(msg)
+ self._check_sample(msg)
+
+ def test_initialize_with_string(self):
+ # Initialize based on string
+ msg = self._factory(_sample_message)
+ self._post_initialize_hook(msg)
+ self._check_sample(msg)
+
+ def test_initialize_with_file(self):
+ # Initialize based on contents of file
+ with open(self._path, 'w+') as f:
+ f.write(_sample_message)
+ f.seek(0)
+ msg = self._factory(f)
+ self._post_initialize_hook(msg)
+ self._check_sample(msg)
+
+ def test_initialize_with_nothing(self):
+ # Initialize without arguments
+ msg = self._factory()
+ self._post_initialize_hook(msg)
+ self.assertIsInstance(msg, email.message.Message)
+ self.assertIsInstance(msg, mailbox.Message)
+ self.assertIsInstance(msg, self._factory)
+ self.assertEqual(msg.keys(), [])
+ self.assertFalse(msg.is_multipart())
+ self.assertEqual(msg.get_payload(), None)
+
+ def test_initialize_incorrectly(self):
+ # Initialize with invalid argument
+ self.assertRaises(TypeError, lambda: self._factory(object()))
+
+ def test_become_message(self):
+ # Take on the state of another message
+ eMM = email.message_from_string(_sample_message)
+ msg = self._factory()
+ msg._become_message(eMM)
+ self._check_sample(msg)
+
+ def test_explain_to(self):
+ # Copy self's format-specific data to other message formats.
+ # This test is superficial; better ones are in TestMessageConversion.
+ msg = self._factory()
+ for class_ in (mailbox.Message, mailbox.MaildirMessage,
+ mailbox.mboxMessage, mailbox.MHMessage,
+ mailbox.BabylMessage, mailbox.MMDFMessage):
+ other_msg = class_()
+ msg._explain_to(other_msg)
+ other_msg = email.message.Message()
+ self.assertRaises(TypeError, lambda: msg._explain_to(other_msg))
+
+ def _post_initialize_hook(self, msg):
+ # Overridden by subclasses to check extra things after initialization
+ pass
+
+
+class TestMaildirMessage(TestMessage):
+
+ _factory = mailbox.MaildirMessage
+
+ def _post_initialize_hook(self, msg):
+ self.assertEqual(msg._subdir, 'new')
+ self.assertEqual(msg._info,'')
+
+ def test_subdir(self):
+ # Use get_subdir() and set_subdir()
+ msg = mailbox.MaildirMessage(_sample_message)
+ self.assertEqual(msg.get_subdir(), 'new')
+ msg.set_subdir('cur')
+ self.assertEqual(msg.get_subdir(), 'cur')
+ msg.set_subdir('new')
+ self.assertEqual(msg.get_subdir(), 'new')
+ self.assertRaises(ValueError, lambda: msg.set_subdir('tmp'))
+ self.assertEqual(msg.get_subdir(), 'new')
+ msg.set_subdir('new')
+ self.assertEqual(msg.get_subdir(), 'new')
+ self._check_sample(msg)
+
+ def test_flags(self):
+ # Use get_flags(), set_flags(), add_flag(), remove_flag()
+ msg = mailbox.MaildirMessage(_sample_message)
+ self.assertEqual(msg.get_flags(), '')
+ self.assertEqual(msg.get_subdir(), 'new')
+ msg.set_flags('F')
+ self.assertEqual(msg.get_subdir(), 'new')
+ self.assertEqual(msg.get_flags(), 'F')
+ msg.set_flags('SDTP')
+ self.assertEqual(msg.get_flags(), 'DPST')
+ msg.add_flag('FT')
+ self.assertEqual(msg.get_flags(), 'DFPST')
+ msg.remove_flag('TDRP')
+ self.assertEqual(msg.get_flags(), 'FS')
+ self.assertEqual(msg.get_subdir(), 'new')
+ self._check_sample(msg)
+
+ def test_date(self):
+ # Use get_date() and set_date()
+ msg = mailbox.MaildirMessage(_sample_message)
+ diff = msg.get_date() - time.time()
+ self.assertTrue(abs(diff) < 60, diff)
+ msg.set_date(0.0)
+ self.assertEqual(msg.get_date(), 0.0)
+
+ def test_info(self):
+ # Use get_info() and set_info()
+ msg = mailbox.MaildirMessage(_sample_message)
+ self.assertEqual(msg.get_info(), '')
+ msg.set_info('1,foo=bar')
+ self.assertEqual(msg.get_info(), '1,foo=bar')
+ self.assertRaises(TypeError, lambda: msg.set_info(None))
+ self._check_sample(msg)
+
+ def test_info_and_flags(self):
+ # Test interaction of info and flag methods
+ msg = mailbox.MaildirMessage(_sample_message)
+ self.assertEqual(msg.get_info(), '')
+ msg.set_flags('SF')
+ self.assertEqual(msg.get_flags(), 'FS')
+ self.assertEqual(msg.get_info(), '2,FS')
+ msg.set_info('1,')
+ self.assertEqual(msg.get_flags(), '')
+ self.assertEqual(msg.get_info(), '1,')
+ msg.remove_flag('RPT')
+ self.assertEqual(msg.get_flags(), '')
+ self.assertEqual(msg.get_info(), '1,')
+ msg.add_flag('D')
+ self.assertEqual(msg.get_flags(), 'D')
+ self.assertEqual(msg.get_info(), '2,D')
+ self._check_sample(msg)
+
+
+class _TestMboxMMDFMessage(TestMessage):
+
+ _factory = mailbox._mboxMMDFMessage
+
+ def _post_initialize_hook(self, msg):
+ self._check_from(msg)
+
+ def test_initialize_with_unixfrom(self):
+ # Initialize with a message that already has a _unixfrom attribute
+ msg = mailbox.Message(_sample_message)
+ msg.set_unixfrom('From foo at bar blah')
+ msg = mailbox.mboxMessage(msg)
+ self.assertEqual(msg.get_from(), 'foo at bar blah')
+
+ def test_from(self):
+ # Get and set "From " line
+ msg = mailbox.mboxMessage(_sample_message)
+ self._check_from(msg)
+ msg.set_from('foo bar')
+ self.assertEqual(msg.get_from(), 'foo bar')
+ msg.set_from('foo at bar', True)
+ self._check_from(msg, 'foo at bar')
+ msg.set_from('blah at temp', time.localtime())
+ self._check_from(msg, 'blah at temp')
+
+ def test_flags(self):
+ # Use get_flags(), set_flags(), add_flag(), remove_flag()
+ msg = mailbox.mboxMessage(_sample_message)
+ self.assertEqual(msg.get_flags(), '')
+ msg.set_flags('F')
+ self.assertEqual(msg.get_flags(), 'F')
+ msg.set_flags('XODR')
+ self.assertEqual(msg.get_flags(), 'RODX')
+ msg.add_flag('FA')
+ self.assertEqual(msg.get_flags(), 'RODFAX')
+ msg.remove_flag('FDXA')
+ self.assertEqual(msg.get_flags(), 'RO')
+ self._check_sample(msg)
+
+ def _check_from(self, msg, sender=None):
+ # Check contents of "From " line
+ if sender is None:
+ sender = "MAILER-DAEMON"
+ self.assertTrue(re.match(sender + r" \w{3} \w{3} [\d ]\d [\d ]\d:\d{2}:"
+ r"\d{2} \d{4}", msg.get_from()))
+
+
+class TestMboxMessage(_TestMboxMMDFMessage):
+
+ _factory = mailbox.mboxMessage
+
+
+class TestMHMessage(TestMessage):
+
+ _factory = mailbox.MHMessage
+
+ def _post_initialize_hook(self, msg):
+ self.assertEqual(msg._sequences, [])
+
+ def test_sequences(self):
+ # Get, set, join, and leave sequences
+ msg = mailbox.MHMessage(_sample_message)
+ self.assertEqual(msg.get_sequences(), [])
+ msg.set_sequences(['foobar'])
+ self.assertEqual(msg.get_sequences(), ['foobar'])
+ msg.set_sequences([])
+ self.assertEqual(msg.get_sequences(), [])
+ msg.add_sequence('unseen')
+ self.assertEqual(msg.get_sequences(), ['unseen'])
+ msg.add_sequence('flagged')
+ self.assertEqual(msg.get_sequences(), ['unseen', 'flagged'])
+ msg.add_sequence('flagged')
+ self.assertEqual(msg.get_sequences(), ['unseen', 'flagged'])
+ msg.remove_sequence('unseen')
+ self.assertEqual(msg.get_sequences(), ['flagged'])
+ msg.add_sequence('foobar')
+ self.assertEqual(msg.get_sequences(), ['flagged', 'foobar'])
+ msg.remove_sequence('replied')
+ self.assertEqual(msg.get_sequences(), ['flagged', 'foobar'])
+ msg.set_sequences(['foobar', 'replied'])
+ self.assertEqual(msg.get_sequences(), ['foobar', 'replied'])
+
+
+class TestBabylMessage(TestMessage):
+
+ _factory = mailbox.BabylMessage
+
+ def _post_initialize_hook(self, msg):
+ self.assertEqual(msg._labels, [])
+
+ def test_labels(self):
+ # Get, set, join, and leave labels
+ msg = mailbox.BabylMessage(_sample_message)
+ self.assertEqual(msg.get_labels(), [])
+ msg.set_labels(['foobar'])
+ self.assertEqual(msg.get_labels(), ['foobar'])
+ msg.set_labels([])
+ self.assertEqual(msg.get_labels(), [])
+ msg.add_label('filed')
+ self.assertEqual(msg.get_labels(), ['filed'])
+ msg.add_label('resent')
+ self.assertEqual(msg.get_labels(), ['filed', 'resent'])
+ msg.add_label('resent')
+ self.assertEqual(msg.get_labels(), ['filed', 'resent'])
+ msg.remove_label('filed')
+ self.assertEqual(msg.get_labels(), ['resent'])
+ msg.add_label('foobar')
+ self.assertEqual(msg.get_labels(), ['resent', 'foobar'])
+ msg.remove_label('unseen')
+ self.assertEqual(msg.get_labels(), ['resent', 'foobar'])
+ msg.set_labels(['foobar', 'answered'])
+ self.assertEqual(msg.get_labels(), ['foobar', 'answered'])
+
+ def test_visible(self):
+ # Get, set, and update visible headers
+ msg = mailbox.BabylMessage(_sample_message)
+ visible = msg.get_visible()
+ self.assertEqual(visible.keys(), [])
+ self.assertIs(visible.get_payload(), None)
+ visible['User-Agent'] = 'FooBar 1.0'
+ visible['X-Whatever'] = 'Blah'
+ self.assertEqual(msg.get_visible().keys(), [])
+ msg.set_visible(visible)
+ visible = msg.get_visible()
+ self.assertEqual(visible.keys(), ['User-Agent', 'X-Whatever'])
+ self.assertEqual(visible['User-Agent'], 'FooBar 1.0')
+ self.assertEqual(visible['X-Whatever'], 'Blah')
+ self.assertIs(visible.get_payload(), None)
+ msg.update_visible()
+ self.assertEqual(visible.keys(), ['User-Agent', 'X-Whatever'])
+ self.assertIs(visible.get_payload(), None)
+ visible = msg.get_visible()
+ self.assertEqual(visible.keys(), ['User-Agent', 'Date', 'From', 'To',
+ 'Subject'])
+ for header in ('User-Agent', 'Date', 'From', 'To', 'Subject'):
+ self.assertEqual(visible[header], msg[header])
+
+
+class TestMMDFMessage(_TestMboxMMDFMessage):
+
+ _factory = mailbox.MMDFMessage
+
+
+class TestMessageConversion(TestBase):
+
+ def test_plain_to_x(self):
+ # Convert Message to all formats
+ for class_ in (mailbox.Message, mailbox.MaildirMessage,
+ mailbox.mboxMessage, mailbox.MHMessage,
+ mailbox.BabylMessage, mailbox.MMDFMessage):
+ msg_plain = mailbox.Message(_sample_message)
+ msg = class_(msg_plain)
+ self._check_sample(msg)
+
+ def test_x_to_plain(self):
+ # Convert all formats to Message
+ for class_ in (mailbox.Message, mailbox.MaildirMessage,
+ mailbox.mboxMessage, mailbox.MHMessage,
+ mailbox.BabylMessage, mailbox.MMDFMessage):
+ msg = class_(_sample_message)
+ msg_plain = mailbox.Message(msg)
+ self._check_sample(msg_plain)
+
+ def test_x_to_invalid(self):
+ # Convert all formats to an invalid format
+ for class_ in (mailbox.Message, mailbox.MaildirMessage,
+ mailbox.mboxMessage, mailbox.MHMessage,
+ mailbox.BabylMessage, mailbox.MMDFMessage):
+ self.assertRaises(TypeError, lambda: class_(False))
+
+ def test_maildir_to_maildir(self):
+ # Convert MaildirMessage to MaildirMessage
+ msg_maildir = mailbox.MaildirMessage(_sample_message)
+ msg_maildir.set_flags('DFPRST')
+ msg_maildir.set_subdir('cur')
+ date = msg_maildir.get_date()
+ msg = mailbox.MaildirMessage(msg_maildir)
+ self._check_sample(msg)
+ self.assertEqual(msg.get_flags(), 'DFPRST')
+ self.assertEqual(msg.get_subdir(), 'cur')
+ self.assertEqual(msg.get_date(), date)
+
+ def test_maildir_to_mboxmmdf(self):
+ # Convert MaildirMessage to mboxmessage and MMDFMessage
+ pairs = (('D', ''), ('F', 'F'), ('P', ''), ('R', 'A'), ('S', 'R'),
+ ('T', 'D'), ('DFPRST', 'RDFA'))
+ for class_ in (mailbox.mboxMessage, mailbox.MMDFMessage):
+ msg_maildir = mailbox.MaildirMessage(_sample_message)
+ msg_maildir.set_date(0.0)
+ for setting, result in pairs:
+ msg_maildir.set_flags(setting)
+ msg = class_(msg_maildir)
+ self.assertEqual(msg.get_flags(), result)
+ self.assertEqual(msg.get_from(), 'MAILER-DAEMON %s' %
+ time.asctime(time.gmtime(0.0)))
+ msg_maildir.set_subdir('cur')
+ self.assertEqual(class_(msg_maildir).get_flags(), 'RODFA')
+
+ def test_maildir_to_mh(self):
+ # Convert MaildirMessage to MHMessage
+ msg_maildir = mailbox.MaildirMessage(_sample_message)
+ pairs = (('D', ['unseen']), ('F', ['unseen', 'flagged']),
+ ('P', ['unseen']), ('R', ['unseen', 'replied']), ('S', []),
+ ('T', ['unseen']), ('DFPRST', ['replied', 'flagged']))
+ for setting, result in pairs:
+ msg_maildir.set_flags(setting)
+ self.assertEqual(mailbox.MHMessage(msg_maildir).get_sequences(),
+ result)
+
+ def test_maildir_to_babyl(self):
+ # Convert MaildirMessage to Babyl
+ msg_maildir = mailbox.MaildirMessage(_sample_message)
+ pairs = (('D', ['unseen']), ('F', ['unseen']),
+ ('P', ['unseen', 'forwarded']), ('R', ['unseen', 'answered']),
+ ('S', []), ('T', ['unseen', 'deleted']),
+ ('DFPRST', ['deleted', 'answered', 'forwarded']))
+ for setting, result in pairs:
+ msg_maildir.set_flags(setting)
+ self.assertEqual(mailbox.BabylMessage(msg_maildir).get_labels(),
+ result)
+
+ def test_mboxmmdf_to_maildir(self):
+ # Convert mboxMessage and MMDFMessage to MaildirMessage
+ for class_ in (mailbox.mboxMessage, mailbox.MMDFMessage):
+ msg_mboxMMDF = class_(_sample_message)
+ msg_mboxMMDF.set_from('foo at bar', time.gmtime(0.0))
+ pairs = (('R', 'S'), ('O', ''), ('D', 'T'), ('F', 'F'), ('A', 'R'),
+ ('RODFA', 'FRST'))
+ for setting, result in pairs:
+ msg_mboxMMDF.set_flags(setting)
+ msg = mailbox.MaildirMessage(msg_mboxMMDF)
+ self.assertEqual(msg.get_flags(), result)
+ self.assertEqual(msg.get_date(), 0.0)
+ msg_mboxMMDF.set_flags('O')
+ self.assertEqual(mailbox.MaildirMessage(msg_mboxMMDF).get_subdir(),
+ 'cur')
+
+ def test_mboxmmdf_to_mboxmmdf(self):
+ # Convert mboxMessage and MMDFMessage to mboxMessage and MMDFMessage
+ for class_ in (mailbox.mboxMessage, mailbox.MMDFMessage):
+ msg_mboxMMDF = class_(_sample_message)
+ msg_mboxMMDF.set_flags('RODFA')
+ msg_mboxMMDF.set_from('foo at bar')
+ for class2_ in (mailbox.mboxMessage, mailbox.MMDFMessage):
+ msg2 = class2_(msg_mboxMMDF)
+ self.assertEqual(msg2.get_flags(), 'RODFA')
+ self.assertEqual(msg2.get_from(), 'foo at bar')
+
+ def test_mboxmmdf_to_mh(self):
+ # Convert mboxMessage and MMDFMessage to MHMessage
+ for class_ in (mailbox.mboxMessage, mailbox.MMDFMessage):
+ msg_mboxMMDF = class_(_sample_message)
+ pairs = (('R', []), ('O', ['unseen']), ('D', ['unseen']),
+ ('F', ['unseen', 'flagged']),
+ ('A', ['unseen', 'replied']),
+ ('RODFA', ['replied', 'flagged']))
+ for setting, result in pairs:
+ msg_mboxMMDF.set_flags(setting)
+ self.assertEqual(mailbox.MHMessage(msg_mboxMMDF).get_sequences(),
+ result)
+
+ def test_mboxmmdf_to_babyl(self):
+ # Convert mboxMessage and MMDFMessage to BabylMessage
+ for class_ in (mailbox.mboxMessage, mailbox.MMDFMessage):
+ msg = class_(_sample_message)
+ pairs = (('R', []), ('O', ['unseen']),
+ ('D', ['unseen', 'deleted']), ('F', ['unseen']),
+ ('A', ['unseen', 'answered']),
+ ('RODFA', ['deleted', 'answered']))
+ for setting, result in pairs:
+ msg.set_flags(setting)
+ self.assertEqual(mailbox.BabylMessage(msg).get_labels(), result)
+
+ def test_mh_to_maildir(self):
+ # Convert MHMessage to MaildirMessage
+ pairs = (('unseen', ''), ('replied', 'RS'), ('flagged', 'FS'))
+ for setting, result in pairs:
+ msg = mailbox.MHMessage(_sample_message)
+ msg.add_sequence(setting)
+ self.assertEqual(mailbox.MaildirMessage(msg).get_flags(), result)
+ self.assertEqual(mailbox.MaildirMessage(msg).get_subdir(), 'cur')
+ msg = mailbox.MHMessage(_sample_message)
+ msg.add_sequence('unseen')
+ msg.add_sequence('replied')
+ msg.add_sequence('flagged')
+ self.assertEqual(mailbox.MaildirMessage(msg).get_flags(), 'FR')
+ self.assertEqual(mailbox.MaildirMessage(msg).get_subdir(), 'cur')
+
+ def test_mh_to_mboxmmdf(self):
+ # Convert MHMessage to mboxMessage and MMDFMessage
+ pairs = (('unseen', 'O'), ('replied', 'ROA'), ('flagged', 'ROF'))
+ for setting, result in pairs:
+ msg = mailbox.MHMessage(_sample_message)
+ msg.add_sequence(setting)
+ for class_ in (mailbox.mboxMessage, mailbox.MMDFMessage):
+ self.assertEqual(class_(msg).get_flags(), result)
+ msg = mailbox.MHMessage(_sample_message)
+ msg.add_sequence('unseen')
+ msg.add_sequence('replied')
+ msg.add_sequence('flagged')
+ for class_ in (mailbox.mboxMessage, mailbox.MMDFMessage):
+ self.assertEqual(class_(msg).get_flags(), 'OFA')
+
+ def test_mh_to_mh(self):
+ # Convert MHMessage to MHMessage
+ msg = mailbox.MHMessage(_sample_message)
+ msg.add_sequence('unseen')
+ msg.add_sequence('replied')
+ msg.add_sequence('flagged')
+ self.assertEqual(mailbox.MHMessage(msg).get_sequences(),
+ ['unseen', 'replied', 'flagged'])
+
+ def test_mh_to_babyl(self):
+ # Convert MHMessage to BabylMessage
+ pairs = (('unseen', ['unseen']), ('replied', ['answered']),
+ ('flagged', []))
+ for setting, result in pairs:
+ msg = mailbox.MHMessage(_sample_message)
+ msg.add_sequence(setting)
+ self.assertEqual(mailbox.BabylMessage(msg).get_labels(), result)
+ msg = mailbox.MHMessage(_sample_message)
+ msg.add_sequence('unseen')
+ msg.add_sequence('replied')
+ msg.add_sequence('flagged')
+ self.assertEqual(mailbox.BabylMessage(msg).get_labels(),
+ ['unseen', 'answered'])
+
+ def test_babyl_to_maildir(self):
+ # Convert BabylMessage to MaildirMessage
+ pairs = (('unseen', ''), ('deleted', 'ST'), ('filed', 'S'),
+ ('answered', 'RS'), ('forwarded', 'PS'), ('edited', 'S'),
+ ('resent', 'PS'))
+ for setting, result in pairs:
+ msg = mailbox.BabylMessage(_sample_message)
+ msg.add_label(setting)
+ self.assertEqual(mailbox.MaildirMessage(msg).get_flags(), result)
+ self.assertEqual(mailbox.MaildirMessage(msg).get_subdir(), 'cur')
+ msg = mailbox.BabylMessage(_sample_message)
+ for label in ('unseen', 'deleted', 'filed', 'answered', 'forwarded',
+ 'edited', 'resent'):
+ msg.add_label(label)
+ self.assertEqual(mailbox.MaildirMessage(msg).get_flags(), 'PRT')
+ self.assertEqual(mailbox.MaildirMessage(msg).get_subdir(), 'cur')
+
+ def test_babyl_to_mboxmmdf(self):
+ # Convert BabylMessage to mboxMessage and MMDFMessage
+ pairs = (('unseen', 'O'), ('deleted', 'ROD'), ('filed', 'RO'),
+ ('answered', 'ROA'), ('forwarded', 'RO'), ('edited', 'RO'),
+ ('resent', 'RO'))
+ for setting, result in pairs:
+ for class_ in (mailbox.mboxMessage, mailbox.MMDFMessage):
+ msg = mailbox.BabylMessage(_sample_message)
+ msg.add_label(setting)
+ self.assertEqual(class_(msg).get_flags(), result)
+ msg = mailbox.BabylMessage(_sample_message)
+ for label in ('unseen', 'deleted', 'filed', 'answered', 'forwarded',
+ 'edited', 'resent'):
+ msg.add_label(label)
+ for class_ in (mailbox.mboxMessage, mailbox.MMDFMessage):
+ self.assertEqual(class_(msg).get_flags(), 'ODA')
+
+ def test_babyl_to_mh(self):
+ # Convert BabylMessage to MHMessage
+ pairs = (('unseen', ['unseen']), ('deleted', []), ('filed', []),
+ ('answered', ['replied']), ('forwarded', []), ('edited', []),
+ ('resent', []))
+ for setting, result in pairs:
+ msg = mailbox.BabylMessage(_sample_message)
+ msg.add_label(setting)
+ self.assertEqual(mailbox.MHMessage(msg).get_sequences(), result)
+ msg = mailbox.BabylMessage(_sample_message)
+ for label in ('unseen', 'deleted', 'filed', 'answered', 'forwarded',
+ 'edited', 'resent'):
+ msg.add_label(label)
+ self.assertEqual(mailbox.MHMessage(msg).get_sequences(),
+ ['unseen', 'replied'])
+
+ def test_babyl_to_babyl(self):
+ # Convert BabylMessage to BabylMessage
+ msg = mailbox.BabylMessage(_sample_message)
+ msg.update_visible()
+ for label in ('unseen', 'deleted', 'filed', 'answered', 'forwarded',
+ 'edited', 'resent'):
+ msg.add_label(label)
+ msg2 = mailbox.BabylMessage(msg)
+ self.assertEqual(msg2.get_labels(), ['unseen', 'deleted', 'filed',
+ 'answered', 'forwarded', 'edited',
+ 'resent'])
+ self.assertEqual(msg.get_visible().keys(), msg2.get_visible().keys())
+ for key in msg.get_visible().keys():
+ self.assertEqual(msg.get_visible()[key], msg2.get_visible()[key])
+
+
+class TestProxyFileBase(TestBase):
+
+ def _test_read(self, proxy):
+ # Read by byte
+ proxy.seek(0)
+ self.assertEqual(proxy.read(), 'bar')
+ proxy.seek(1)
+ self.assertEqual(proxy.read(), 'ar')
+ proxy.seek(0)
+ self.assertEqual(proxy.read(2), 'ba')
+ proxy.seek(1)
+ self.assertEqual(proxy.read(-1), 'ar')
+ proxy.seek(2)
+ self.assertEqual(proxy.read(1000), 'r')
+
+ def _test_readline(self, proxy):
+ # Read by line
+ proxy.seek(0)
+ self.assertEqual(proxy.readline(), 'foo' + os.linesep)
+ self.assertEqual(proxy.readline(), 'bar' + os.linesep)
+ self.assertEqual(proxy.readline(), 'fred' + os.linesep)
+ self.assertEqual(proxy.readline(), 'bob')
+ proxy.seek(2)
+ self.assertEqual(proxy.readline(), 'o' + os.linesep)
+ proxy.seek(6 + 2 * len(os.linesep))
+ self.assertEqual(proxy.readline(), 'fred' + os.linesep)
+ proxy.seek(6 + 2 * len(os.linesep))
+ self.assertEqual(proxy.readline(2), 'fr')
+ self.assertEqual(proxy.readline(-10), 'ed' + os.linesep)
+
+ def _test_readlines(self, proxy):
+ # Read multiple lines
+ proxy.seek(0)
+ self.assertEqual(proxy.readlines(), ['foo' + os.linesep,
+ 'bar' + os.linesep,
+ 'fred' + os.linesep, 'bob'])
+ proxy.seek(0)
+ self.assertEqual(proxy.readlines(2), ['foo' + os.linesep])
+ proxy.seek(3 + len(os.linesep))
+ self.assertEqual(proxy.readlines(4 + len(os.linesep)),
+ ['bar' + os.linesep, 'fred' + os.linesep])
+ proxy.seek(3)
+ self.assertEqual(proxy.readlines(1000), [os.linesep, 'bar' + os.linesep,
+ 'fred' + os.linesep, 'bob'])
+
+ def _test_iteration(self, proxy):
+ # Iterate by line
+ proxy.seek(0)
+ iterator = iter(proxy)
+ self.assertEqual(list(iterator),
+ ['foo' + os.linesep, 'bar' + os.linesep, 'fred' + os.linesep, 'bob'])
+
+ def _test_seek_and_tell(self, proxy):
+ # Seek and use tell to check position
+ proxy.seek(3)
+ self.assertEqual(proxy.tell(), 3)
+ self.assertEqual(proxy.read(len(os.linesep)), os.linesep)
+ proxy.seek(2, 1)
+ self.assertEqual(proxy.read(1 + len(os.linesep)), 'r' + os.linesep)
+ proxy.seek(-3 - len(os.linesep), 2)
+ self.assertEqual(proxy.read(3), 'bar')
+ proxy.seek(2, 0)
+ self.assertEqual(proxy.read(), 'o' + os.linesep + 'bar' + os.linesep)
+ proxy.seek(100)
+ self.assertEqual(proxy.read(), '')
+
+ def _test_close(self, proxy):
+ # Close a file
+ proxy.close()
+ self.assertRaises(AttributeError, lambda: proxy.close())
+
+
+class TestProxyFile(TestProxyFileBase):
+
+ def setUp(self):
+ self._path = test_support.TESTFN
+ self._file = open(self._path, 'wb+')
+
+ def tearDown(self):
+ self._file.close()
+ self._delete_recursively(self._path)
+
+ def test_initialize(self):
+ # Initialize and check position
+ self._file.write('foo')
+ pos = self._file.tell()
+ proxy0 = mailbox._ProxyFile(self._file)
+ self.assertEqual(proxy0.tell(), pos)
+ self.assertEqual(self._file.tell(), pos)
+ proxy1 = mailbox._ProxyFile(self._file, 0)
+ self.assertEqual(proxy1.tell(), 0)
+ self.assertEqual(self._file.tell(), pos)
+
+ def test_read(self):
+ self._file.write('bar')
+ self._test_read(mailbox._ProxyFile(self._file))
+
+ def test_readline(self):
+ self._file.write('foo%sbar%sfred%sbob' % (os.linesep, os.linesep,
+ os.linesep))
+ self._test_readline(mailbox._ProxyFile(self._file))
+
+ def test_readlines(self):
+ self._file.write('foo%sbar%sfred%sbob' % (os.linesep, os.linesep,
+ os.linesep))
+ self._test_readlines(mailbox._ProxyFile(self._file))
+
+ def test_iteration(self):
+ self._file.write('foo%sbar%sfred%sbob' % (os.linesep, os.linesep,
+ os.linesep))
+ self._test_iteration(mailbox._ProxyFile(self._file))
+
+ def test_seek_and_tell(self):
+ self._file.write('foo%sbar%s' % (os.linesep, os.linesep))
+ self._test_seek_and_tell(mailbox._ProxyFile(self._file))
+
+ def test_close(self):
+ self._file.write('foo%sbar%s' % (os.linesep, os.linesep))
+ self._test_close(mailbox._ProxyFile(self._file))
+
+
+class TestPartialFile(TestProxyFileBase):
+
+ def setUp(self):
+ self._path = test_support.TESTFN
+ self._file = open(self._path, 'wb+')
+
+ def tearDown(self):
+ self._file.close()
+ self._delete_recursively(self._path)
+
+ def test_initialize(self):
+ # Initialize and check position
+ self._file.write('foo' + os.linesep + 'bar')
+ pos = self._file.tell()
+ proxy = mailbox._PartialFile(self._file, 2, 5)
+ self.assertEqual(proxy.tell(), 0)
+ self.assertEqual(self._file.tell(), pos)
+
+ def test_read(self):
+ self._file.write('***bar***')
+ self._test_read(mailbox._PartialFile(self._file, 3, 6))
+
+ def test_readline(self):
+ self._file.write('!!!!!foo%sbar%sfred%sbob!!!!!' %
+ (os.linesep, os.linesep, os.linesep))
+ self._test_readline(mailbox._PartialFile(self._file, 5,
+ 18 + 3 * len(os.linesep)))
+
+ def test_readlines(self):
+ self._file.write('foo%sbar%sfred%sbob?????' %
+ (os.linesep, os.linesep, os.linesep))
+ self._test_readlines(mailbox._PartialFile(self._file, 0,
+ 13 + 3 * len(os.linesep)))
+
+ def test_iteration(self):
+ self._file.write('____foo%sbar%sfred%sbob####' %
+ (os.linesep, os.linesep, os.linesep))
+ self._test_iteration(mailbox._PartialFile(self._file, 4,
+ 17 + 3 * len(os.linesep)))
+
+ def test_seek_and_tell(self):
+ self._file.write('(((foo%sbar%s$$$' % (os.linesep, os.linesep))
+ self._test_seek_and_tell(mailbox._PartialFile(self._file, 3,
+ 9 + 2 * len(os.linesep)))
+
+ def test_close(self):
+ self._file.write('&foo%sbar%s^' % (os.linesep, os.linesep))
+ self._test_close(mailbox._PartialFile(self._file, 1,
+ 6 + 3 * len(os.linesep)))
+
+
+## Start: tests from the original module (for backward compatibility).
+
+FROM_ = "From some.body at dummy.domain Sat Jul 24 13:43:35 2004\n"
+DUMMY_MESSAGE = """\
+From: some.body at dummy.domain
+To: me at my.domain
+Subject: Simple Test
+
+This is a dummy message.
+"""
+
+class MaildirTestCase(unittest.TestCase):
+
+ def setUp(self):
+ # create a new maildir mailbox to work with:
+ self._dir = test_support.TESTFN
+ os.mkdir(self._dir)
+ os.mkdir(os.path.join(self._dir, "cur"))
+ os.mkdir(os.path.join(self._dir, "tmp"))
+ os.mkdir(os.path.join(self._dir, "new"))
+ self._counter = 1
+ self._msgfiles = []
+
+ def tearDown(self):
+ map(os.unlink, self._msgfiles)
+ os.rmdir(os.path.join(self._dir, "cur"))
+ os.rmdir(os.path.join(self._dir, "tmp"))
+ os.rmdir(os.path.join(self._dir, "new"))
+ os.rmdir(self._dir)
+
+ def createMessage(self, dir, mbox=False):
+ t = int(time.time() % 1000000)
+ pid = self._counter
+ self._counter += 1
+ filename = os.extsep.join((str(t), str(pid), "myhostname", "mydomain"))
+ tmpname = os.path.join(self._dir, "tmp", filename)
+ newname = os.path.join(self._dir, dir, filename)
+ with open(tmpname, "w") as fp:
+ self._msgfiles.append(tmpname)
+ if mbox:
+ fp.write(FROM_)
+ fp.write(DUMMY_MESSAGE)
+ if hasattr(os, "link"):
+ os.link(tmpname, newname)
+ else:
+ with open(newname, "w") as fp:
+ fp.write(DUMMY_MESSAGE)
+ self._msgfiles.append(newname)
+ return tmpname
+
+ def test_empty_maildir(self):
+ """Test an empty maildir mailbox"""
+ # Test for regression on bug #117490:
+ # Make sure the boxes attribute actually gets set.
+ self.mbox = mailbox.Maildir(test_support.TESTFN)
+ #self.assertTrue(hasattr(self.mbox, "boxes"))
+ #self.assertTrue(len(self.mbox.boxes) == 0)
+ self.assertIs(self.mbox.next(), None)
+ self.assertIs(self.mbox.next(), None)
+
+ def test_nonempty_maildir_cur(self):
+ self.createMessage("cur")
+ self.mbox = mailbox.Maildir(test_support.TESTFN)
+ #self.assertTrue(len(self.mbox.boxes) == 1)
+ self.assertIsNot(self.mbox.next(), None)
+ self.assertIs(self.mbox.next(), None)
+ self.assertIs(self.mbox.next(), None)
+
+ def test_nonempty_maildir_new(self):
+ self.createMessage("new")
+ self.mbox = mailbox.Maildir(test_support.TESTFN)
+ #self.assertTrue(len(self.mbox.boxes) == 1)
+ self.assertIsNot(self.mbox.next(), None)
+ self.assertIs(self.mbox.next(), None)
+ self.assertIs(self.mbox.next(), None)
+
+ def test_nonempty_maildir_both(self):
+ self.createMessage("cur")
+ self.createMessage("new")
+ self.mbox = mailbox.Maildir(test_support.TESTFN)
+ #self.assertTrue(len(self.mbox.boxes) == 2)
+ self.assertIsNot(self.mbox.next(), None)
+ self.assertIsNot(self.mbox.next(), None)
+ self.assertIs(self.mbox.next(), None)
+ self.assertIs(self.mbox.next(), None)
+
+ def test_unix_mbox(self):
+ ### should be better!
+ import email.parser
+ fname = self.createMessage("cur", True)
+ n = 0
+ for msg in mailbox.PortableUnixMailbox(open(fname),
+ email.parser.Parser().parse):
+ n += 1
+ self.assertEqual(msg["subject"], "Simple Test")
+ self.assertEqual(len(str(msg)), len(FROM_)+len(DUMMY_MESSAGE))
+ self.assertEqual(n, 1)
+
+## End: classes from the original module (for backward compatibility).
+
+
+_sample_message = """\
+Return-Path:
+X-Original-To: gkj+person at localhost
+Delivered-To: gkj+person at localhost
+Received: from localhost (localhost [127.0.0.1])
+ by andy.gregorykjohnson.com (Postfix) with ESMTP id 356ED9DD17
+ for ; Wed, 13 Jul 2005 17:23:16 -0400 (EDT)
+Delivered-To: gkj at sundance.gregorykjohnson.com
+Received: from localhost [127.0.0.1]
+ by localhost with POP3 (fetchmail-6.2.5)
+ for gkj+person at localhost (single-drop); Wed, 13 Jul 2005 17:23:16 -0400 (EDT)
+Received: from andy.gregorykjohnson.com (andy.gregorykjohnson.com [64.32.235.228])
+ by sundance.gregorykjohnson.com (Postfix) with ESMTP id 5B056316746
+ for ; Wed, 13 Jul 2005 17:23:11 -0400 (EDT)
+Received: by andy.gregorykjohnson.com (Postfix, from userid 1000)
+ id 490CD9DD17; Wed, 13 Jul 2005 17:23:11 -0400 (EDT)
+Date: Wed, 13 Jul 2005 17:23:11 -0400
+From: "Gregory K. Johnson"
+To: gkj at gregorykjohnson.com
+Subject: Sample message
+Message-ID: <20050713212311.GC4701 at andy.gregorykjohnson.com>
+Mime-Version: 1.0
+Content-Type: multipart/mixed; boundary="NMuMz9nt05w80d4+"
+Content-Disposition: inline
+User-Agent: Mutt/1.5.9i
+
+
+--NMuMz9nt05w80d4+
+Content-Type: text/plain; charset=us-ascii
+Content-Disposition: inline
+
+This is a sample message.
+
+--
+Gregory K. Johnson
+
+--NMuMz9nt05w80d4+
+Content-Type: application/octet-stream
+Content-Disposition: attachment; filename="text.gz"
+Content-Transfer-Encoding: base64
+
+H4sICM2D1UIAA3RleHQAC8nILFYAokSFktSKEoW0zJxUPa7wzJIMhZLyfIWczLzUYj0uAHTs
+3FYlAAAA
+
+--NMuMz9nt05w80d4+--
+"""
+
+_sample_headers = {
+ "Return-Path":"",
+ "X-Original-To":"gkj+person at localhost",
+ "Delivered-To":"gkj+person at localhost",
+ "Received":"""from localhost (localhost [127.0.0.1])
+ by andy.gregorykjohnson.com (Postfix) with ESMTP id 356ED9DD17
+ for ; Wed, 13 Jul 2005 17:23:16 -0400 (EDT)""",
+ "Delivered-To":"gkj at sundance.gregorykjohnson.com",
+ "Received":"""from localhost [127.0.0.1]
+ by localhost with POP3 (fetchmail-6.2.5)
+ for gkj+person at localhost (single-drop); Wed, 13 Jul 2005 17:23:16 -0400 (EDT)""",
+ "Received":"""from andy.gregorykjohnson.com (andy.gregorykjohnson.com [64.32.235.228])
+ by sundance.gregorykjohnson.com (Postfix) with ESMTP id 5B056316746
+ for ; Wed, 13 Jul 2005 17:23:11 -0400 (EDT)""",
+ "Received":"""by andy.gregorykjohnson.com (Postfix, from userid 1000)
+ id 490CD9DD17; Wed, 13 Jul 2005 17:23:11 -0400 (EDT)""",
+ "Date":"Wed, 13 Jul 2005 17:23:11 -0400",
+ "From":""""Gregory K. Johnson" """,
+ "To":"gkj at gregorykjohnson.com",
+ "Subject":"Sample message",
+ "Mime-Version":"1.0",
+ "Content-Type":"""multipart/mixed; boundary="NMuMz9nt05w80d4+\"""",
+ "Content-Disposition":"inline",
+ "User-Agent": "Mutt/1.5.9i" }
+
+_sample_payloads = ("""This is a sample message.
+
+--
+Gregory K. Johnson
+""",
+"""H4sICM2D1UIAA3RleHQAC8nILFYAokSFktSKEoW0zJxUPa7wzJIMhZLyfIWczLzUYj0uAHTs
+3FYlAAAA
+""")
+
+
+def test_main():
+ tests = (TestMailboxSuperclass, TestMaildir, TestMbox, TestMMDF, TestMH,
+ TestBabyl, TestMessage, TestMaildirMessage, TestMboxMessage,
+ TestMHMessage, TestBabylMessage, TestMMDFMessage,
+ TestMessageConversion, TestProxyFile, TestPartialFile,
+ MaildirTestCase)
+ test_support.run_unittest(*tests)
+ test_support.reap_children()
+
+
+if __name__ == '__main__':
+ test_main()
diff --git a/lib-python/2.7/test/test_old_mailbox.py b/lib-python/2.7/test/test_old_mailbox.py
new file mode 100644
--- /dev/null
+++ b/lib-python/2.7/test/test_old_mailbox.py
@@ -0,0 +1,152 @@
+# This set of tests exercises the backward-compatibility class
+# in mailbox.py (the ones without write support).
+
+import mailbox
+import os
+import time
+import unittest
+from test import test_support
+
+# cleanup earlier tests
+try:
+ os.unlink(test_support.TESTFN)
+except os.error:
+ pass
+
+FROM_ = "From some.body at dummy.domain Sat Jul 24 13:43:35 2004\n"
+DUMMY_MESSAGE = """\
+From: some.body at dummy.domain
+To: me at my.domain
+Subject: Simple Test
+
+This is a dummy message.
+"""
+
+class MaildirTestCase(unittest.TestCase):
+
+ def setUp(self):
+ # create a new maildir mailbox to work with:
+ self._dir = test_support.TESTFN
+ os.mkdir(self._dir)
+ os.mkdir(os.path.join(self._dir, "cur"))
+ os.mkdir(os.path.join(self._dir, "tmp"))
+ os.mkdir(os.path.join(self._dir, "new"))
+ self._counter = 1
+ self._msgfiles = []
+
+ def tearDown(self):
+ map(os.unlink, self._msgfiles)
+ os.rmdir(os.path.join(self._dir, "cur"))
+ os.rmdir(os.path.join(self._dir, "tmp"))
+ os.rmdir(os.path.join(self._dir, "new"))
+ os.rmdir(self._dir)
+
+ def createMessage(self, dir, mbox=False):
+ t = int(time.time() % 1000000)
+ pid = self._counter
+ self._counter += 1
+ filename = os.extsep.join((str(t), str(pid), "myhostname", "mydomain"))
+ tmpname = os.path.join(self._dir, "tmp", filename)
+ newname = os.path.join(self._dir, dir, filename)
+ with open(tmpname, "w") as fp:
+ self._msgfiles.append(tmpname)
+ if mbox:
+ fp.write(FROM_)
+ fp.write(DUMMY_MESSAGE)
+ if hasattr(os, "link"):
+ os.link(tmpname, newname)
+ else:
+ with open(newname, "w") as fp:
+ fp.write(DUMMY_MESSAGE)
+ self._msgfiles.append(newname)
+ return tmpname
+
+ def test_empty_maildir(self):
+ """Test an empty maildir mailbox"""
+ # Test for regression on bug #117490:
+ self.mbox = mailbox.Maildir(test_support.TESTFN)
+ self.assertTrue(len(self.mbox) == 0)
+ self.assertTrue(self.mbox.next() is None)
+ self.assertTrue(self.mbox.next() is None)
+
+ def test_nonempty_maildir_cur(self):
+ self.createMessage("cur")
+ self.mbox = mailbox.Maildir(test_support.TESTFN)
+ self.assertTrue(len(self.mbox) == 1)
+ self.assertTrue(self.mbox.next() is not None)
+ self.assertTrue(self.mbox.next() is None)
+ self.assertTrue(self.mbox.next() is None)
+
+ def test_nonempty_maildir_new(self):
+ self.createMessage("new")
+ self.mbox = mailbox.Maildir(test_support.TESTFN)
+ self.assertTrue(len(self.mbox) == 1)
+ self.assertTrue(self.mbox.next() is not None)
+ self.assertTrue(self.mbox.next() is None)
+ self.assertTrue(self.mbox.next() is None)
+
+ def test_nonempty_maildir_both(self):
+ self.createMessage("cur")
+ self.createMessage("new")
+ self.mbox = mailbox.Maildir(test_support.TESTFN)
+ self.assertTrue(len(self.mbox) == 2)
+ self.assertTrue(self.mbox.next() is not None)
+ self.assertTrue(self.mbox.next() is not None)
+ self.assertTrue(self.mbox.next() is None)
+ self.assertTrue(self.mbox.next() is None)
+
+ def test_unix_mbox(self):
+ ### should be better!
+ import email.parser
+ fname = self.createMessage("cur", True)
+ n = 0
+ with open(fname) as f:
+ for msg in mailbox.PortableUnixMailbox(f,
+ email.parser.Parser().parse):
+ n += 1
+ self.assertEqual(msg["subject"], "Simple Test")
+ self.assertEqual(len(str(msg)), len(FROM_)+len(DUMMY_MESSAGE))
+ self.assertEqual(n, 1)
+
+class MboxTestCase(unittest.TestCase):
+ def setUp(self):
+ # create a new maildir mailbox to work with:
+ self._path = test_support.TESTFN
+
+ def tearDown(self):
+ os.unlink(self._path)
+
+ def test_from_regex (self):
+ # Testing new regex from bug #1633678
+ with open(self._path, 'w') as f:
+ f.write("""From fred at example.com Mon May 31 13:24:50 2004 +0200
+Subject: message 1
+
+body1
+From fred at example.com Mon May 31 13:24:50 2004 -0200
+Subject: message 2
+
+body2
+From fred at example.com Mon May 31 13:24:50 2004
+Subject: message 3
+
+body3
+From fred at example.com Mon May 31 13:24:50 2004
+Subject: message 4
+
+body4
+""")
+ with open(self._path, 'r') as f:
+ box = mailbox.UnixMailbox(f)
+ self.assertTrue(len(list(iter(box))) == 4)
+
+
+ # XXX We still need more tests!
+
+
+def test_main():
+ test_support.run_unittest(MaildirTestCase, MboxTestCase)
+
+
+if __name__ == "__main__":
+ test_main()
diff --git a/lib-python/2.7/test/test_os.py b/lib-python/2.7/test/test_os.py
new file mode 100644
--- /dev/null
+++ b/lib-python/2.7/test/test_os.py
@@ -0,0 +1,824 @@
+# As a test suite for the os module, this is woefully inadequate, but this
+# does add tests for a few functions which have been determined to be more
+# portable than they had been thought to be.
+
+import os
+import errno
+import unittest
+import warnings
+import sys
+import signal
+import subprocess
+import time
+from test import test_support
+import mmap
+import uuid
+
+warnings.filterwarnings("ignore", "tempnam", RuntimeWarning, __name__)
+warnings.filterwarnings("ignore", "tmpnam", RuntimeWarning, __name__)
+
+# Tests creating TESTFN
+class FileTests(unittest.TestCase):
+ def setUp(self):
+ if os.path.exists(test_support.TESTFN):
+ os.unlink(test_support.TESTFN)
+ tearDown = setUp
+
+ def test_access(self):
+ f = os.open(test_support.TESTFN, os.O_CREAT|os.O_RDWR)
+ os.close(f)
+ self.assertTrue(os.access(test_support.TESTFN, os.W_OK))
+
+ def test_closerange(self):
+ first = os.open(test_support.TESTFN, os.O_CREAT|os.O_RDWR)
+ # We must allocate two consecutive file descriptors, otherwise
+ # it will mess up other file descriptors (perhaps even the three
+ # standard ones).
+ second = os.dup(first)
+ try:
+ retries = 0
+ while second != first + 1:
+ os.close(first)
+ retries += 1
+ if retries > 10:
+ # XXX test skipped
+ self.skipTest("couldn't allocate two consecutive fds")
+ first, second = second, os.dup(second)
+ finally:
+ os.close(second)
+ # close a fd that is open, and one that isn't
+ os.closerange(first, first + 2)
+ self.assertRaises(OSError, os.write, first, "a")
+
+ @test_support.cpython_only
+ def test_rename(self):
+ path = unicode(test_support.TESTFN)
+ old = sys.getrefcount(path)
+ self.assertRaises(TypeError, os.rename, path, 0)
+ new = sys.getrefcount(path)
+ self.assertEqual(old, new)
+
+
+class TemporaryFileTests(unittest.TestCase):
+ def setUp(self):
+ self.files = []
+ os.mkdir(test_support.TESTFN)
+
+ def tearDown(self):
+ for name in self.files:
+ os.unlink(name)
+ os.rmdir(test_support.TESTFN)
+
+ def check_tempfile(self, name):
+ # make sure it doesn't already exist:
+ self.assertFalse(os.path.exists(name),
+ "file already exists for temporary file")
+ # make sure we can create the file
+ open(name, "w")
+ self.files.append(name)
+
+ def test_tempnam(self):
+ if not hasattr(os, "tempnam"):
+ return
+ with warnings.catch_warnings():
+ warnings.filterwarnings("ignore", "tempnam", RuntimeWarning,
+ r"test_os$")
+ warnings.filterwarnings("ignore", "tempnam", DeprecationWarning)
+ self.check_tempfile(os.tempnam())
+
+ name = os.tempnam(test_support.TESTFN)
+ self.check_tempfile(name)
+
+ name = os.tempnam(test_support.TESTFN, "pfx")
+ self.assertTrue(os.path.basename(name)[:3] == "pfx")
+ self.check_tempfile(name)
+
+ def test_tmpfile(self):
+ if not hasattr(os, "tmpfile"):
+ return
+ # As with test_tmpnam() below, the Windows implementation of tmpfile()
+ # attempts to create a file in the root directory of the current drive.
+ # On Vista and Server 2008, this test will always fail for normal users
+ # as writing to the root directory requires elevated privileges. With
+ # XP and below, the semantics of tmpfile() are the same, but the user
+ # running the test is more likely to have administrative privileges on
+ # their account already. If that's the case, then os.tmpfile() should
+ # work. In order to make this test as useful as possible, rather than
+ # trying to detect Windows versions or whether or not the user has the
+ # right permissions, just try and create a file in the root directory
+ # and see if it raises a 'Permission denied' OSError. If it does, then
+ # test that a subsequent call to os.tmpfile() raises the same error. If
+ # it doesn't, assume we're on XP or below and the user running the test
+ # has administrative privileges, and proceed with the test as normal.
+ with warnings.catch_warnings():
+ warnings.filterwarnings("ignore", "tmpfile", DeprecationWarning)
+
+ if sys.platform == 'win32':
+ name = '\\python_test_os_test_tmpfile.txt'
+ if os.path.exists(name):
+ os.remove(name)
+ try:
+ fp = open(name, 'w')
+ except IOError, first:
+ # open() failed, assert tmpfile() fails in the same way.
+ # Although open() raises an IOError and os.tmpfile() raises an
+ # OSError(), 'args' will be (13, 'Permission denied') in both
+ # cases.
+ try:
+ fp = os.tmpfile()
+ except OSError, second:
+ self.assertEqual(first.args, second.args)
+ else:
+ self.fail("expected os.tmpfile() to raise OSError")
+ return
+ else:
+ # open() worked, therefore, tmpfile() should work. Close our
+ # dummy file and proceed with the test as normal.
+ fp.close()
+ os.remove(name)
+
+ fp = os.tmpfile()
+ fp.write("foobar")
+ fp.seek(0,0)
+ s = fp.read()
+ fp.close()
+ self.assertTrue(s == "foobar")
+
+ def test_tmpnam(self):
+ if not hasattr(os, "tmpnam"):
+ return
+ with warnings.catch_warnings():
+ warnings.filterwarnings("ignore", "tmpnam", RuntimeWarning,
+ r"test_os$")
+ warnings.filterwarnings("ignore", "tmpnam", DeprecationWarning)
+
+ name = os.tmpnam()
+ if sys.platform in ("win32",):
+ # The Windows tmpnam() seems useless. From the MS docs:
+ #
+ # The character string that tmpnam creates consists of
+ # the path prefix, defined by the entry P_tmpdir in the
+ # file STDIO.H, followed by a sequence consisting of the
+ # digit characters '0' through '9'; the numerical value
+ # of this string is in the range 1 - 65,535. Changing the
+ # definitions of L_tmpnam or P_tmpdir in STDIO.H does not
+ # change the operation of tmpnam.
+ #
+ # The really bizarre part is that, at least under MSVC6,
+ # P_tmpdir is "\\". That is, the path returned refers to
+ # the root of the current drive. That's a terrible place to
+ # put temp files, and, depending on privileges, the user
+ # may not even be able to open a file in the root directory.
+ self.assertFalse(os.path.exists(name),
+ "file already exists for temporary file")
+ else:
+ self.check_tempfile(name)
+
+# Test attributes on return values from os.*stat* family.
+class StatAttributeTests(unittest.TestCase):
+ def setUp(self):
+ os.mkdir(test_support.TESTFN)
+ self.fname = os.path.join(test_support.TESTFN, "f1")
+ f = open(self.fname, 'wb')
+ f.write("ABC")
+ f.close()
+
+ def tearDown(self):
+ os.unlink(self.fname)
+ os.rmdir(test_support.TESTFN)
+
+ def test_stat_attributes(self):
+ if not hasattr(os, "stat"):
+ return
+
+ import stat
+ result = os.stat(self.fname)
+
+ # Make sure direct access works
+ self.assertEqual(result[stat.ST_SIZE], 3)
+ self.assertEqual(result.st_size, 3)
+
+ # Make sure all the attributes are there
+ members = dir(result)
+ for name in dir(stat):
+ if name[:3] == 'ST_':
+ attr = name.lower()
+ if name.endswith("TIME"):
+ def trunc(x): return int(x)
+ else:
+ def trunc(x): return x
+ self.assertEqual(trunc(getattr(result, attr)),
+ result[getattr(stat, name)])
+ self.assertIn(attr, members)
+
+ try:
+ result[200]
+ self.fail("No exception thrown")
+ except IndexError:
+ pass
+
+ # Make sure that assignment fails
+ try:
+ result.st_mode = 1
+ self.fail("No exception thrown")
+ except (AttributeError, TypeError):
+ pass
+
+ try:
+ result.st_rdev = 1
+ self.fail("No exception thrown")
+ except (AttributeError, TypeError):
+ pass
+
+ try:
+ result.parrot = 1
+ self.fail("No exception thrown")
+ except AttributeError:
+ pass
+
+ # Use the stat_result constructor with a too-short tuple.
+ try:
+ result2 = os.stat_result((10,))
+ self.fail("No exception thrown")
+ except TypeError:
+ pass
+
+ # Use the constructor with a too-long tuple.
+ try:
+ result2 = os.stat_result((0,1,2,3,4,5,6,7,8,9,10,11,12,13,14))
+ except TypeError:
+ pass
+
+
+ def test_statvfs_attributes(self):
+ if not hasattr(os, "statvfs"):
+ return
+
+ try:
+ result = os.statvfs(self.fname)
+ except OSError, e:
+ # On AtheOS, glibc always returns ENOSYS
+ if e.errno == errno.ENOSYS:
+ return
+
+ # Make sure direct access works
+ self.assertEqual(result.f_bfree, result[3])
+
+ # Make sure all the attributes are there.
+ members = ('bsize', 'frsize', 'blocks', 'bfree', 'bavail', 'files',
+ 'ffree', 'favail', 'flag', 'namemax')
+ for value, member in enumerate(members):
+ self.assertEqual(getattr(result, 'f_' + member), result[value])
+
+ # Make sure that assignment really fails
+ try:
+ result.f_bfree = 1
+ self.fail("No exception thrown")
+ except TypeError:
+ pass
+
+ try:
+ result.parrot = 1
+ self.fail("No exception thrown")
+ except AttributeError:
+ pass
+
+ # Use the constructor with a too-short tuple.
+ try:
+ result2 = os.statvfs_result((10,))
+ self.fail("No exception thrown")
+ except TypeError:
+ pass
+
+ # Use the constructor with a too-long tuple.
+ try:
+ result2 = os.statvfs_result((0,1,2,3,4,5,6,7,8,9,10,11,12,13,14))
+ except TypeError:
+ pass
+
+ def test_utime_dir(self):
+ delta = 1000000
+ st = os.stat(test_support.TESTFN)
+ # round to int, because some systems may support sub-second
+ # time stamps in stat, but not in utime.
+ os.utime(test_support.TESTFN, (st.st_atime, int(st.st_mtime-delta)))
+ st2 = os.stat(test_support.TESTFN)
+ self.assertEqual(st2.st_mtime, int(st.st_mtime-delta))
+
+ # Restrict test to Win32, since there is no guarantee other
+ # systems support centiseconds
+ if sys.platform == 'win32':
+ def get_file_system(path):
+ root = os.path.splitdrive(os.path.abspath(path))[0] + '\\'
+ import ctypes
+ kernel32 = ctypes.windll.kernel32
+ buf = ctypes.create_string_buffer("", 100)
+ if kernel32.GetVolumeInformationA(root, None, 0, None, None, None, buf, len(buf)):
+ return buf.value
+
+ if get_file_system(test_support.TESTFN) == "NTFS":
+ def test_1565150(self):
+ t1 = 1159195039.25
+ os.utime(self.fname, (t1, t1))
+ self.assertEqual(os.stat(self.fname).st_mtime, t1)
+
+ def test_large_time(self):
+ t1 = 5000000000 # some day in 2128
+ os.utime(self.fname, (t1, t1))
+ self.assertEqual(os.stat(self.fname).st_mtime, t1)
+
+ def test_1686475(self):
+ # Verify that an open file can be stat'ed
+ try:
+ os.stat(r"c:\pagefile.sys")
+ except WindowsError, e:
+ if e.errno == 2: # file does not exist; cannot run test
+ return
+ self.fail("Could not stat pagefile.sys")
+
+from test import mapping_tests
+
+class EnvironTests(mapping_tests.BasicTestMappingProtocol):
+ """check that os.environ object conform to mapping protocol"""
+ type2test = None
+ def _reference(self):
+ return {"KEY1":"VALUE1", "KEY2":"VALUE2", "KEY3":"VALUE3"}
+ def _empty_mapping(self):
+ os.environ.clear()
+ return os.environ
+ def setUp(self):
+ self.__save = dict(os.environ)
+ os.environ.clear()
+ def tearDown(self):
+ os.environ.clear()
+ os.environ.update(self.__save)
+
+ # Bug 1110478
+ def test_update2(self):
+ if os.path.exists("/bin/sh"):
+ os.environ.update(HELLO="World")
+ with os.popen("/bin/sh -c 'echo $HELLO'") as popen:
+ value = popen.read().strip()
+ self.assertEqual(value, "World")
+
+class WalkTests(unittest.TestCase):
+ """Tests for os.walk()."""
+
+ def test_traversal(self):
+ import os
+ from os.path import join
+
+ # Build:
+ # TESTFN/
+ # TEST1/ a file kid and two directory kids
+ # tmp1
+ # SUB1/ a file kid and a directory kid
+ # tmp2
+ # SUB11/ no kids
+ # SUB2/ a file kid and a dirsymlink kid
+ # tmp3
+ # link/ a symlink to TESTFN.2
+ # TEST2/
+ # tmp4 a lone file
+ walk_path = join(test_support.TESTFN, "TEST1")
+ sub1_path = join(walk_path, "SUB1")
+ sub11_path = join(sub1_path, "SUB11")
+ sub2_path = join(walk_path, "SUB2")
+ tmp1_path = join(walk_path, "tmp1")
+ tmp2_path = join(sub1_path, "tmp2")
+ tmp3_path = join(sub2_path, "tmp3")
+ link_path = join(sub2_path, "link")
+ t2_path = join(test_support.TESTFN, "TEST2")
+ tmp4_path = join(test_support.TESTFN, "TEST2", "tmp4")
+
+ # Create stuff.
+ os.makedirs(sub11_path)
+ os.makedirs(sub2_path)
+ os.makedirs(t2_path)
+ for path in tmp1_path, tmp2_path, tmp3_path, tmp4_path:
+ f = file(path, "w")
+ f.write("I'm " + path + " and proud of it. Blame test_os.\n")
+ f.close()
+ if hasattr(os, "symlink"):
+ os.symlink(os.path.abspath(t2_path), link_path)
+ sub2_tree = (sub2_path, ["link"], ["tmp3"])
+ else:
+ sub2_tree = (sub2_path, [], ["tmp3"])
+
+ # Walk top-down.
+ all = list(os.walk(walk_path))
+ self.assertEqual(len(all), 4)
+ # We can't know which order SUB1 and SUB2 will appear in.
+ # Not flipped: TESTFN, SUB1, SUB11, SUB2
+ # flipped: TESTFN, SUB2, SUB1, SUB11
+ flipped = all[0][1][0] != "SUB1"
+ all[0][1].sort()
+ self.assertEqual(all[0], (walk_path, ["SUB1", "SUB2"], ["tmp1"]))
+ self.assertEqual(all[1 + flipped], (sub1_path, ["SUB11"], ["tmp2"]))
+ self.assertEqual(all[2 + flipped], (sub11_path, [], []))
+ self.assertEqual(all[3 - 2 * flipped], sub2_tree)
+
+ # Prune the search.
+ all = []
+ for root, dirs, files in os.walk(walk_path):
+ all.append((root, dirs, files))
+ # Don't descend into SUB1.
+ if 'SUB1' in dirs:
+ # Note that this also mutates the dirs we appended to all!
+ dirs.remove('SUB1')
+ self.assertEqual(len(all), 2)
+ self.assertEqual(all[0], (walk_path, ["SUB2"], ["tmp1"]))
+ self.assertEqual(all[1], sub2_tree)
+
+ # Walk bottom-up.
+ all = list(os.walk(walk_path, topdown=False))
+ self.assertEqual(len(all), 4)
+ # We can't know which order SUB1 and SUB2 will appear in.
+ # Not flipped: SUB11, SUB1, SUB2, TESTFN
+ # flipped: SUB2, SUB11, SUB1, TESTFN
+ flipped = all[3][1][0] != "SUB1"
+ all[3][1].sort()
+ self.assertEqual(all[3], (walk_path, ["SUB1", "SUB2"], ["tmp1"]))
+ self.assertEqual(all[flipped], (sub11_path, [], []))
+ self.assertEqual(all[flipped + 1], (sub1_path, ["SUB11"], ["tmp2"]))
+ self.assertEqual(all[2 - 2 * flipped], sub2_tree)
+
+ if hasattr(os, "symlink"):
+ # Walk, following symlinks.
+ for root, dirs, files in os.walk(walk_path, followlinks=True):
+ if root == link_path:
+ self.assertEqual(dirs, [])
+ self.assertEqual(files, ["tmp4"])
+ break
+ else:
+ self.fail("Didn't follow symlink with followlinks=True")
+
+ def tearDown(self):
+ # Tear everything down. This is a decent use for bottom-up on
+ # Windows, which doesn't have a recursive delete command. The
+ # (not so) subtlety is that rmdir will fail unless the dir's
+ # kids are removed first, so bottom up is essential.
+ for root, dirs, files in os.walk(test_support.TESTFN, topdown=False):
+ for name in files:
+ os.remove(os.path.join(root, name))
+ for name in dirs:
+ dirname = os.path.join(root, name)
+ if not os.path.islink(dirname):
+ os.rmdir(dirname)
+ else:
+ os.remove(dirname)
+ os.rmdir(test_support.TESTFN)
+
+class MakedirTests (unittest.TestCase):
+ def setUp(self):
+ os.mkdir(test_support.TESTFN)
+
+ def test_makedir(self):
+ base = test_support.TESTFN
+ path = os.path.join(base, 'dir1', 'dir2', 'dir3')
+ os.makedirs(path) # Should work
+ path = os.path.join(base, 'dir1', 'dir2', 'dir3', 'dir4')
+ os.makedirs(path)
+
+ # Try paths with a '.' in them
+ self.assertRaises(OSError, os.makedirs, os.curdir)
+ path = os.path.join(base, 'dir1', 'dir2', 'dir3', 'dir4', 'dir5', os.curdir)
+ os.makedirs(path)
+ path = os.path.join(base, 'dir1', os.curdir, 'dir2', 'dir3', 'dir4',
+ 'dir5', 'dir6')
+ os.makedirs(path)
+
+
+
+
+ def tearDown(self):
+ path = os.path.join(test_support.TESTFN, 'dir1', 'dir2', 'dir3',
+ 'dir4', 'dir5', 'dir6')
+ # If the tests failed, the bottom-most directory ('../dir6')
+ # may not have been created, so we look for the outermost directory
+ # that exists.
+ while not os.path.exists(path) and path != test_support.TESTFN:
+ path = os.path.dirname(path)
+
+ os.removedirs(path)
+
+class DevNullTests (unittest.TestCase):
+ def test_devnull(self):
+ f = file(os.devnull, 'w')
+ f.write('hello')
+ f.close()
+ f = file(os.devnull, 'r')
+ self.assertEqual(f.read(), '')
+ f.close()
+
+class URandomTests (unittest.TestCase):
+ def test_urandom(self):
+ try:
+ self.assertEqual(len(os.urandom(1)), 1)
+ self.assertEqual(len(os.urandom(10)), 10)
+ self.assertEqual(len(os.urandom(100)), 100)
+ self.assertEqual(len(os.urandom(1000)), 1000)
+ # see http://bugs.python.org/issue3708
+ self.assertRaises(TypeError, os.urandom, 0.9)
+ self.assertRaises(TypeError, os.urandom, 1.1)
+ self.assertRaises(TypeError, os.urandom, 2.0)
+ except NotImplementedError:
+ pass
+
+ def test_execvpe_with_bad_arglist(self):
+ self.assertRaises(ValueError, os.execvpe, 'notepad', [], None)
+
+class Win32ErrorTests(unittest.TestCase):
+ def test_rename(self):
+ self.assertRaises(WindowsError, os.rename, test_support.TESTFN, test_support.TESTFN+".bak")
+
+ def test_remove(self):
+ self.assertRaises(WindowsError, os.remove, test_support.TESTFN)
+
+ def test_chdir(self):
+ self.assertRaises(WindowsError, os.chdir, test_support.TESTFN)
+
+ def test_mkdir(self):
+ f = open(test_support.TESTFN, "w")
+ try:
+ self.assertRaises(WindowsError, os.mkdir, test_support.TESTFN)
+ finally:
+ f.close()
+ os.unlink(test_support.TESTFN)
+
+ def test_utime(self):
+ self.assertRaises(WindowsError, os.utime, test_support.TESTFN, None)
+
+ def test_chmod(self):
+ self.assertRaises(WindowsError, os.chmod, test_support.TESTFN, 0)
+
+class TestInvalidFD(unittest.TestCase):
+ singles = ["fchdir", "fdopen", "dup", "fdatasync", "fstat",
+ "fstatvfs", "fsync", "tcgetpgrp", "ttyname"]
+ #singles.append("close")
+ #We omit close because it doesn'r raise an exception on some platforms
+ def get_single(f):
+ def helper(self):
+ if hasattr(os, f):
+ self.check(getattr(os, f))
+ return helper
+ for f in singles:
+ locals()["test_"+f] = get_single(f)
+
+ def check(self, f, *args):
+ try:
+ f(test_support.make_bad_fd(), *args)
+ except OSError as e:
+ self.assertEqual(e.errno, errno.EBADF)
+ else:
+ self.fail("%r didn't raise a OSError with a bad file descriptor"
+ % f)
+
+ def test_isatty(self):
+ if hasattr(os, "isatty"):
+ self.assertEqual(os.isatty(test_support.make_bad_fd()), False)
+
+ def test_closerange(self):
+ if hasattr(os, "closerange"):
+ fd = test_support.make_bad_fd()
+ # Make sure none of the descriptors we are about to close are
+ # currently valid (issue 6542).
+ for i in range(10):
+ try: os.fstat(fd+i)
+ except OSError:
+ pass
+ else:
+ break
+ if i < 2:
+ raise unittest.SkipTest(
+ "Unable to acquire a range of invalid file descriptors")
+ self.assertEqual(os.closerange(fd, fd + i-1), None)
+
+ def test_dup2(self):
+ if hasattr(os, "dup2"):
+ self.check(os.dup2, 20)
+
+ def test_fchmod(self):
+ if hasattr(os, "fchmod"):
+ self.check(os.fchmod, 0)
+
+ def test_fchown(self):
+ if hasattr(os, "fchown"):
+ self.check(os.fchown, -1, -1)
+
+ def test_fpathconf(self):
+ if hasattr(os, "fpathconf"):
+ self.check(os.fpathconf, "PC_NAME_MAX")
+
+ def test_ftruncate(self):
+ if hasattr(os, "ftruncate"):
+ self.check(os.ftruncate, 0)
+
+ def test_lseek(self):
+ if hasattr(os, "lseek"):
+ self.check(os.lseek, 0, 0)
+
+ def test_read(self):
+ if hasattr(os, "read"):
+ self.check(os.read, 1)
+
+ def test_tcsetpgrpt(self):
+ if hasattr(os, "tcsetpgrp"):
+ self.check(os.tcsetpgrp, 0)
+
+ def test_write(self):
+ if hasattr(os, "write"):
+ self.check(os.write, " ")
+
+if sys.platform != 'win32':
+ class Win32ErrorTests(unittest.TestCase):
+ pass
+
+ class PosixUidGidTests(unittest.TestCase):
+ if hasattr(os, 'setuid'):
+ def test_setuid(self):
+ if os.getuid() != 0:
+ self.assertRaises(os.error, os.setuid, 0)
+ self.assertRaises(OverflowError, os.setuid, 1<<32)
+
+ if hasattr(os, 'setgid'):
+ def test_setgid(self):
+ if os.getuid() != 0:
+ self.assertRaises(os.error, os.setgid, 0)
+ self.assertRaises(OverflowError, os.setgid, 1<<32)
+
+ if hasattr(os, 'seteuid'):
+ def test_seteuid(self):
+ if os.getuid() != 0:
+ self.assertRaises(os.error, os.seteuid, 0)
+ self.assertRaises(OverflowError, os.seteuid, 1<<32)
+
+ if hasattr(os, 'setegid'):
+ def test_setegid(self):
+ if os.getuid() != 0:
+ self.assertRaises(os.error, os.setegid, 0)
+ self.assertRaises(OverflowError, os.setegid, 1<<32)
+
+ if hasattr(os, 'setreuid'):
+ def test_setreuid(self):
+ if os.getuid() != 0:
+ self.assertRaises(os.error, os.setreuid, 0, 0)
+ self.assertRaises(OverflowError, os.setreuid, 1<<32, 0)
+ self.assertRaises(OverflowError, os.setreuid, 0, 1<<32)
+
+ def test_setreuid_neg1(self):
+ # Needs to accept -1. We run this in a subprocess to avoid
+ # altering the test runner's process state (issue8045).
+ subprocess.check_call([
+ sys.executable, '-c',
+ 'import os,sys;os.setreuid(-1,-1);sys.exit(0)'])
+
+ if hasattr(os, 'setregid'):
+ def test_setregid(self):
+ if os.getuid() != 0:
+ self.assertRaises(os.error, os.setregid, 0, 0)
+ self.assertRaises(OverflowError, os.setregid, 1<<32, 0)
+ self.assertRaises(OverflowError, os.setregid, 0, 1<<32)
+
+ def test_setregid_neg1(self):
+ # Needs to accept -1. We run this in a subprocess to avoid
+ # altering the test runner's process state (issue8045).
+ subprocess.check_call([
+ sys.executable, '-c',
+ 'import os,sys;os.setregid(-1,-1);sys.exit(0)'])
+else:
+ class PosixUidGidTests(unittest.TestCase):
+ pass
+
+ at unittest.skipUnless(sys.platform == "win32", "Win32 specific tests")
+class Win32KillTests(unittest.TestCase):
+ def _kill(self, sig):
+ # Start sys.executable as a subprocess and communicate from the
+ # subprocess to the parent that the interpreter is ready. When it
+ # becomes ready, send *sig* via os.kill to the subprocess and check
+ # that the return code is equal to *sig*.
+ import ctypes
+ from ctypes import wintypes
+ import msvcrt
+
+ # Since we can't access the contents of the process' stdout until the
+ # process has exited, use PeekNamedPipe to see what's inside stdout
+ # without waiting. This is done so we can tell that the interpreter
+ # is started and running at a point where it could handle a signal.
+ PeekNamedPipe = ctypes.windll.kernel32.PeekNamedPipe
+ PeekNamedPipe.restype = wintypes.BOOL
+ PeekNamedPipe.argtypes = (wintypes.HANDLE, # Pipe handle
+ ctypes.POINTER(ctypes.c_char), # stdout buf
+ wintypes.DWORD, # Buffer size
+ ctypes.POINTER(wintypes.DWORD), # bytes read
+ ctypes.POINTER(wintypes.DWORD), # bytes avail
+ ctypes.POINTER(wintypes.DWORD)) # bytes left
+ msg = "running"
+ proc = subprocess.Popen([sys.executable, "-c",
+ "import sys;"
+ "sys.stdout.write('{}');"
+ "sys.stdout.flush();"
+ "input()".format(msg)],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ stdin=subprocess.PIPE)
+ self.addCleanup(proc.stdout.close)
+ self.addCleanup(proc.stderr.close)
+ self.addCleanup(proc.stdin.close)
+
+ count, max = 0, 100
+ while count < max and proc.poll() is None:
+ # Create a string buffer to store the result of stdout from the pipe
+ buf = ctypes.create_string_buffer(len(msg))
+ # Obtain the text currently in proc.stdout
+ # Bytes read/avail/left are left as NULL and unused
+ rslt = PeekNamedPipe(msvcrt.get_osfhandle(proc.stdout.fileno()),
+ buf, ctypes.sizeof(buf), None, None, None)
+ self.assertNotEqual(rslt, 0, "PeekNamedPipe failed")
+ if buf.value:
+ self.assertEqual(msg, buf.value)
+ break
+ time.sleep(0.1)
+ count += 1
+ else:
+ self.fail("Did not receive communication from the subprocess")
+
+ os.kill(proc.pid, sig)
+ self.assertEqual(proc.wait(), sig)
+
+ def test_kill_sigterm(self):
+ # SIGTERM doesn't mean anything special, but make sure it works
+ self._kill(signal.SIGTERM)
+
+ def test_kill_int(self):
+ # os.kill on Windows can take an int which gets set as the exit code
+ self._kill(100)
+
+ def _kill_with_event(self, event, name):
+ tagname = "test_os_%s" % uuid.uuid1()
+ m = mmap.mmap(-1, 1, tagname)
+ m[0] = '0'
+ # Run a script which has console control handling enabled.
+ proc = subprocess.Popen([sys.executable,
+ os.path.join(os.path.dirname(__file__),
+ "win_console_handler.py"), tagname],
+ creationflags=subprocess.CREATE_NEW_PROCESS_GROUP)
+ # Let the interpreter startup before we send signals. See #3137.
+ count, max = 0, 20
+ while count < max and proc.poll() is None:
+ if m[0] == '1':
+ break
+ time.sleep(0.5)
+ count += 1
+ else:
+ self.fail("Subprocess didn't finish initialization")
+ os.kill(proc.pid, event)
+ # proc.send_signal(event) could also be done here.
+ # Allow time for the signal to be passed and the process to exit.
+ time.sleep(0.5)
+ if not proc.poll():
+ # Forcefully kill the process if we weren't able to signal it.
+ os.kill(proc.pid, signal.SIGINT)
+ self.fail("subprocess did not stop on {}".format(name))
+
+ @unittest.skip("subprocesses aren't inheriting CTRL+C property")
+ def test_CTRL_C_EVENT(self):
+ from ctypes import wintypes
+ import ctypes
+
+ # Make a NULL value by creating a pointer with no argument.
+ NULL = ctypes.POINTER(ctypes.c_int)()
+ SetConsoleCtrlHandler = ctypes.windll.kernel32.SetConsoleCtrlHandler
+ SetConsoleCtrlHandler.argtypes = (ctypes.POINTER(ctypes.c_int),
+ wintypes.BOOL)
+ SetConsoleCtrlHandler.restype = wintypes.BOOL
+
+ # Calling this with NULL and FALSE causes the calling process to
+ # handle CTRL+C, rather than ignore it. This property is inherited
+ # by subprocesses.
+ SetConsoleCtrlHandler(NULL, 0)
+
+ self._kill_with_event(signal.CTRL_C_EVENT, "CTRL_C_EVENT")
+
+ def test_CTRL_BREAK_EVENT(self):
+ self._kill_with_event(signal.CTRL_BREAK_EVENT, "CTRL_BREAK_EVENT")
+
+
+def test_main():
+ test_support.run_unittest(
+ FileTests,
+ TemporaryFileTests,
+ StatAttributeTests,
+ EnvironTests,
+ WalkTests,
+ MakedirTests,
+ DevNullTests,
+ URandomTests,
+ Win32ErrorTests,
+ TestInvalidFD,
+ PosixUidGidTests,
+ Win32KillTests
+ )
+
+if __name__ == "__main__":
+ test_main()
diff --git a/pypy/rlib/clibffi.py b/pypy/rlib/clibffi.py
--- a/pypy/rlib/clibffi.py
+++ b/pypy/rlib/clibffi.py
@@ -655,6 +655,13 @@
return FuncPtr(name, argtypes, restype, dlsym(self.lib, name),
flags=flags, keepalive=self)
+ def getpointer_byordinal(self, name, argtypes, restype,
+ flags=FUNCFLAG_CDECL):
+ # these arguments are already casted to proper ffi
+ # structures!
+ return FuncPtr(name, argtypes, restype, dlsym_byordinal(self.lib, name),
+ flags=flags, keepalive=self)
+
def getrawpointer(self, name, argtypes, restype, flags=FUNCFLAG_CDECL):
# these arguments are already casted to proper ffi
# structures!
From noreply at buildbot.pypy.org Wed Apr 11 21:15:25 2012
From: noreply at buildbot.pypy.org (mattip)
Date: Wed, 11 Apr 2012 21:15:25 +0200 (CEST)
Subject: [pypy-commit] pypy win32-stdlib: help find import library,
silence compiler warnings
Message-ID: <20120411191525.9DDA582F4E@wyvern.cs.uni-duesseldorf.de>
Author: Matti Picus
Branch: win32-stdlib
Changeset: r54293:056d06128b67
Date: 2012-04-11 22:14 +0300
http://bitbucket.org/pypy/pypy/changeset/056d06128b67/
Log: help find import library, silence compiler warnings
diff --git a/lib_pypy/_ctypes_test.py b/lib_pypy/_ctypes_test.py
--- a/lib_pypy/_ctypes_test.py
+++ b/lib_pypy/_ctypes_test.py
@@ -21,7 +21,7 @@
# Compile .c file
include_dir = os.path.join(thisdir, '..', 'include')
if sys.platform == 'win32':
- ccflags = []
+ ccflags = ['-D_CRT_SECURE_NO_WARNINGS']
else:
ccflags = ['-fPIC']
res = compiler.compile([os.path.join(thisdir, '_ctypes_test.c')],
@@ -34,6 +34,13 @@
if sys.platform == 'win32':
# XXX libpypy-c.lib is currently not installed automatically
library = os.path.join(thisdir, '..', 'include', 'libpypy-c')
+ if not os.path.exists(library + '.lib'):
+ #For a nightly build
+ library = os.path.join(thisdir, '..', 'include', 'python27')
+ if not os.path.exists(library + '.lib'):
+ # For a local translation
+ library = os.path.join(thisdir, '..', 'pypy', 'translator',
+ 'goal', 'libpypy-c')
libraries = [library, 'oleaut32']
extra_ldargs = ['/MANIFEST'] # needed for VC10
else:
diff --git a/lib_pypy/_testcapi.py b/lib_pypy/_testcapi.py
--- a/lib_pypy/_testcapi.py
+++ b/lib_pypy/_testcapi.py
@@ -16,7 +16,7 @@
# Compile .c file
include_dir = os.path.join(thisdir, '..', 'include')
if sys.platform == 'win32':
- ccflags = []
+ ccflags = ['-D_CRT_SECURE_NO_WARNINGS']
else:
ccflags = ['-fPIC', '-Wimplicit-function-declaration']
res = compiler.compile([os.path.join(thisdir, '_testcapimodule.c')],
From noreply at buildbot.pypy.org Wed Apr 11 21:19:18 2012
From: noreply at buildbot.pypy.org (mattip)
Date: Wed, 11 Apr 2012 21:19:18 +0200 (CEST)
Subject: [pypy-commit] pypy win32-stdlib: remove prematurely added function
Message-ID: <20120411191918.4163182F4E@wyvern.cs.uni-duesseldorf.de>
Author: Matti Picus
Branch: win32-stdlib
Changeset: r54294:b4573bb1e0b4
Date: 2012-04-11 22:18 +0300
http://bitbucket.org/pypy/pypy/changeset/b4573bb1e0b4/
Log: remove prematurely added function
diff --git a/pypy/rlib/clibffi.py b/pypy/rlib/clibffi.py
--- a/pypy/rlib/clibffi.py
+++ b/pypy/rlib/clibffi.py
@@ -655,13 +655,6 @@
return FuncPtr(name, argtypes, restype, dlsym(self.lib, name),
flags=flags, keepalive=self)
- def getpointer_byordinal(self, name, argtypes, restype,
- flags=FUNCFLAG_CDECL):
- # these arguments are already casted to proper ffi
- # structures!
- return FuncPtr(name, argtypes, restype, dlsym_byordinal(self.lib, name),
- flags=flags, keepalive=self)
-
def getrawpointer(self, name, argtypes, restype, flags=FUNCFLAG_CDECL):
# these arguments are already casted to proper ffi
# structures!
From noreply at buildbot.pypy.org Wed Apr 11 23:42:29 2012
From: noreply at buildbot.pypy.org (mattip)
Date: Wed, 11 Apr 2012 23:42:29 +0200 (CEST)
Subject: [pypy-commit] pypy win32-cleanup2: make tests applicicable to
windows, fix check_signum for valid values
Message-ID: <20120411214229.C9FE782F4E@wyvern.cs.uni-duesseldorf.de>
Author: Matti Picus
Branch: win32-cleanup2
Changeset: r54295:a2db34cd9d65
Date: 2012-04-12 00:17 +0300
http://bitbucket.org/pypy/pypy/changeset/a2db34cd9d65/
Log: make tests applicicable to windows, fix check_signum for valid
values
diff --git a/pypy/module/signal/interp_signal.py b/pypy/module/signal/interp_signal.py
--- a/pypy/module/signal/interp_signal.py
+++ b/pypy/module/signal/interp_signal.py
@@ -15,7 +15,8 @@
def setup():
for key, value in cpy_signal.__dict__.items():
- if key.startswith('SIG') and is_valid_int(value):
+ if (key.startswith('SIG') or key.startswith('CTRL_')) and \
+ is_valid_int(value):
globals()[key] = value
yield key
@@ -242,9 +243,12 @@
return space.w_None
def check_signum(space, signum):
- if signum < 1 or signum >= NSIG:
- raise OperationError(space.w_ValueError,
- space.wrap("signal number out of range"))
+ for sig in signal_names:
+ if signum == globals()[sig]:
+ return
+ raise OperationError(space.w_ValueError,
+ space.wrap("invalid signal value"))
+
@jit.dont_look_inside
@unwrap_spec(signum=int)
diff --git a/pypy/module/signal/test/test_signal.py b/pypy/module/signal/test/test_signal.py
--- a/pypy/module/signal/test/test_signal.py
+++ b/pypy/module/signal/test/test_signal.py
@@ -8,6 +8,8 @@
def setup_class(cls):
if not hasattr(os, 'kill') or not hasattr(os, 'getpid'):
py.test.skip("requires os.kill() and os.getpid()")
+ if not hasattr(cpy_signal, 'SIGUSR1'):
+ py.test.skip("requires SIGUSR1 in signal")
cls.space = gettestobjspace(usemodules=['signal'])
def test_checksignals(self):
@@ -45,64 +47,72 @@
def test_exported_names(self):
self.signal.__dict__ # crashes if the interpleveldefs are invalid
- def test_usr1(self):
- import types, posix
+ def test_basics(self):
+ import types, os
+ if not hasattr(os, 'kill') or not hasattr(os, 'getpid'):
+ skip("requires os.kill() and os.getpid()")
signal = self.signal # the signal module to test
+ try:
+ signum = signal.USR1
+ except:
+ signum = signal.CTRL_BREAK_EVENT
received = []
def myhandler(signum, frame):
assert isinstance(frame, types.FrameType)
received.append(signum)
- signal.signal(signal.SIGUSR1, myhandler)
+ signal.signal(signum, myhandler)
- posix.kill(posix.getpid(), signal.SIGUSR1)
+ print dir(os)
+
+ os.kill(os.getpid(), signum)
# the signal should be delivered to the handler immediately
- assert received == [signal.SIGUSR1]
+ assert received == [signum]
del received[:]
- posix.kill(posix.getpid(), signal.SIGUSR1)
+ os.kill(os.getpid(), signum)
# the signal should be delivered to the handler immediately
- assert received == [signal.SIGUSR1]
+ assert received == [signum]
del received[:]
- signal.signal(signal.SIGUSR1, signal.SIG_IGN)
+ signal.signal(signum, signal.SIG_IGN)
- posix.kill(posix.getpid(), signal.SIGUSR1)
+ os.kill(os.getpid(), signum)
for i in range(10000):
# wait a bit - signal should not arrive
if received:
break
assert received == []
- signal.signal(signal.SIGUSR1, signal.SIG_DFL)
+ signal.signal(signum, signal.SIG_DFL)
def test_default_return(self):
"""
Test that signal.signal returns SIG_DFL if that is the current handler.
"""
- from signal import signal, SIGUSR1, SIG_DFL, SIG_IGN
+ from signal import signal, SIGINT, SIG_DFL, SIG_IGN
try:
for handler in SIG_DFL, SIG_IGN, lambda *a: None:
- signal(SIGUSR1, SIG_DFL)
- assert signal(SIGUSR1, handler) == SIG_DFL
+ signal(SIGINT, SIG_DFL)
+ assert signal(SIGINT, handler) == SIG_DFL
finally:
- signal(SIGUSR1, SIG_DFL)
+ signal(SIGINT, SIG_DFL)
def test_ignore_return(self):
"""
Test that signal.signal returns SIG_IGN if that is the current handler.
"""
- from signal import signal, SIGUSR1, SIG_DFL, SIG_IGN
+ from signal import signal, SIGINT, SIG_DFL, SIG_IGN
try:
for handler in SIG_DFL, SIG_IGN, lambda *a: None:
- signal(SIGUSR1, SIG_IGN)
- assert signal(SIGUSR1, handler) == SIG_IGN
+ signal(SIGINT, SIG_IGN)
+ assert signal(SIGINT, handler) == SIG_IGN
finally:
- signal(SIGUSR1, SIG_DFL)
+ signal(SIGINT, SIG_DFL)
def test_obj_return(self):
@@ -110,43 +120,47 @@
Test that signal.signal returns a Python object if one is the current
handler.
"""
- from signal import signal, SIGUSR1, SIG_DFL, SIG_IGN
+ from signal import signal, SIGINT, SIG_DFL, SIG_IGN
def installed(*a):
pass
try:
for handler in SIG_DFL, SIG_IGN, lambda *a: None:
- signal(SIGUSR1, installed)
- assert signal(SIGUSR1, handler) is installed
+ signal(SIGINT, installed)
+ assert signal(SIGINT, handler) is installed
finally:
- signal(SIGUSR1, SIG_DFL)
+ signal(SIGINT, SIG_DFL)
def test_getsignal(self):
"""
Test that signal.getsignal returns the currently installed handler.
"""
- from signal import getsignal, signal, SIGUSR1, SIG_DFL, SIG_IGN
+ from signal import getsignal, signal, SIGINT, SIG_DFL, SIG_IGN
def handler(*a):
pass
try:
- assert getsignal(SIGUSR1) == SIG_DFL
- signal(SIGUSR1, SIG_DFL)
- assert getsignal(SIGUSR1) == SIG_DFL
- signal(SIGUSR1, SIG_IGN)
- assert getsignal(SIGUSR1) == SIG_IGN
- signal(SIGUSR1, handler)
- assert getsignal(SIGUSR1) is handler
+ assert getsignal(SIGINT) == SIG_DFL
+ signal(SIGINT, SIG_DFL)
+ assert getsignal(SIGINT) == SIG_DFL
+ signal(SIGINT, SIG_IGN)
+ assert getsignal(SIGINT) == SIG_IGN
+ signal(SIGINT, handler)
+ assert getsignal(SIGINT) is handler
finally:
- signal(SIGUSR1, SIG_DFL)
+ signal(SIGINT, SIG_DFL)
raises(ValueError, getsignal, 4444)
raises(ValueError, signal, 4444, lambda *args: None)
+ raises(ValueError, signal, 7, lambda *args: None)
def test_alarm(self):
- from signal import alarm, signal, SIG_DFL, SIGALRM
+ try:
+ from signal import alarm, signal, SIG_DFL, SIGALRM
+ except:
+ skip('no alarm on this platform')
import time
l = []
def handler(*a):
@@ -163,10 +177,13 @@
signal(SIGALRM, SIG_DFL)
def test_set_wakeup_fd(self):
- import signal, posix, fcntl
+ try:
+ import signal, posix, fcntl
+ except ImportError:
+ skip('cannot import posix or fcntl')
def myhandler(signum, frame):
pass
- signal.signal(signal.SIGUSR1, myhandler)
+ signal.signal(signal.SIGINT, myhandler)
#
def cannot_read():
try:
@@ -187,17 +204,19 @@
old_wakeup = signal.set_wakeup_fd(fd_write)
try:
cannot_read()
- posix.kill(posix.getpid(), signal.SIGUSR1)
+ posix.kill(posix.getpid(), signal.SIGINT)
res = posix.read(fd_read, 1)
assert res == '\x00'
cannot_read()
finally:
old_wakeup = signal.set_wakeup_fd(old_wakeup)
#
- signal.signal(signal.SIGUSR1, signal.SIG_DFL)
+ signal.signal(signal.SIGINT, signal.SIG_DFL)
def test_siginterrupt(self):
import signal, os, time
+ if not hasattr(signal, 'siginterrupt'):
+ skip('non siginterrupt in signal')
signum = signal.SIGUSR1
def readpipe_is_not_interrupted():
# from CPython's test_signal.readpipe_interrupted()
From noreply at buildbot.pypy.org Wed Apr 11 23:42:31 2012
From: noreply at buildbot.pypy.org (mattip)
Date: Wed, 11 Apr 2012 23:42:31 +0200 (CEST)
Subject: [pypy-commit] pypy win32-cleanup2: add tests,
fix implementation of strftime('%f')
Message-ID: <20120411214231.1C10382F4E@wyvern.cs.uni-duesseldorf.de>
Author: Matti Picus
Branch: win32-cleanup2
Changeset: r54296:30144e45f849
Date: 2012-04-12 00:41 +0300
http://bitbucket.org/pypy/pypy/changeset/30144e45f849/
Log: add tests, fix implementation of strftime('%f')
diff --git a/pypy/module/rctime/interp_time.py b/pypy/module/rctime/interp_time.py
--- a/pypy/module/rctime/interp_time.py
+++ b/pypy/module/rctime/interp_time.py
@@ -572,7 +572,7 @@
if i < length and format[i] == '#':
# not documented by python
i += 1
- if i >= length or format[i] not in "aAbBcdfHIjmMpSUwWxXyYzZ%":
+ if i >= length or format[i] not in "aAbBcdHIjmMpSUwWxXyYzZ%":
raise OperationError(space.w_ValueError,
space.wrap("invalid format string"))
i += 1
diff --git a/pypy/module/rctime/test/test_rctime.py b/pypy/module/rctime/test/test_rctime.py
--- a/pypy/module/rctime/test/test_rctime.py
+++ b/pypy/module/rctime/test/test_rctime.py
@@ -211,7 +211,7 @@
def test_strftime(self):
import time as rctime
-
+ import os
t = rctime.time()
tt = rctime.gmtime(t)
for directive in ('a', 'A', 'b', 'B', 'c', 'd', 'H', 'I',
@@ -226,6 +226,14 @@
exp = '2000 01 01 00 00 00 1 001'
assert rctime.strftime("%Y %m %d %H %M %S %w %j", (0,)*9) == exp
+ # Guard against invalid/non-supported format string
+ # so that Python don't crash (Windows crashes when the format string
+ # input to [w]strftime is not kosher.
+ if os.name =='nt':
+ raises(ValueError, rctime.strftime, '%f')
+ else:
+ assert rctime.strftime('%f') == '%f'
+
def test_strftime_ext(self):
import time as rctime
From noreply at buildbot.pypy.org Thu Apr 12 00:13:45 2012
From: noreply at buildbot.pypy.org (mattip)
Date: Thu, 12 Apr 2012 00:13:45 +0200 (CEST)
Subject: [pypy-commit] pypy win32-cleanup2: fix for translation
Message-ID: <20120411221345.D1BC382F4E@wyvern.cs.uni-duesseldorf.de>
Author: Matti Picus
Branch: win32-cleanup2
Changeset: r54297:b0a1bc6e4750
Date: 2012-04-12 01:13 +0300
http://bitbucket.org/pypy/pypy/changeset/b0a1bc6e4750/
Log: fix for translation
diff --git a/pypy/module/signal/interp_signal.py b/pypy/module/signal/interp_signal.py
--- a/pypy/module/signal/interp_signal.py
+++ b/pypy/module/signal/interp_signal.py
@@ -24,6 +24,7 @@
SIG_DFL = cpy_signal.SIG_DFL
SIG_IGN = cpy_signal.SIG_IGN
signal_names = list(setup())
+signal_values = [globals()[key] for key in signal_names]
includes = ['stdlib.h', 'src/signals.h']
if sys.platform != 'win32':
@@ -243,8 +244,9 @@
return space.w_None
def check_signum(space, signum):
- for sig in signal_names:
- if signum == globals()[sig]:
+ xxx
+ for sig in signal_values:
+ if signum ==sig:
return
raise OperationError(space.w_ValueError,
space.wrap("invalid signal value"))
diff --git a/pypy/module/signal/test/test_interp_signal.py b/pypy/module/signal/test/test_interp_signal.py
--- a/pypy/module/signal/test/test_interp_signal.py
+++ b/pypy/module/signal/test/test_interp_signal.py
@@ -6,6 +6,8 @@
def setup_module(mod):
if not hasattr(os, 'kill') or not hasattr(os, 'getpid'):
py.test.skip("requires os.kill() and os.getpid()")
+ if not hasattr(interp_signal, 'SIGUSR1'):
+ py.test.skip("requires SIGUSR1 in signal")
def check(expected):
From noreply at buildbot.pypy.org Thu Apr 12 07:24:32 2012
From: noreply at buildbot.pypy.org (mattip)
Date: Thu, 12 Apr 2012 07:24:32 +0200 (CEST)
Subject: [pypy-commit] pypy win32-cleanup2: remove debug cruft
Message-ID: <20120412052432.27D8582F4E@wyvern.cs.uni-duesseldorf.de>
Author: Matti Picus
Branch: win32-cleanup2
Changeset: r54298:eb926aa445bc
Date: 2012-04-12 08:24 +0300
http://bitbucket.org/pypy/pypy/changeset/eb926aa445bc/
Log: remove debug cruft
diff --git a/pypy/module/signal/interp_signal.py b/pypy/module/signal/interp_signal.py
--- a/pypy/module/signal/interp_signal.py
+++ b/pypy/module/signal/interp_signal.py
@@ -244,7 +244,6 @@
return space.w_None
def check_signum(space, signum):
- xxx
for sig in signal_values:
if signum ==sig:
return
From noreply at buildbot.pypy.org Thu Apr 12 07:27:33 2012
From: noreply at buildbot.pypy.org (taavi_burns)
Date: Thu, 12 Apr 2012 07:27:33 +0200 (CEST)
Subject: [pypy-commit] pypy numpy-ufuncs3: Replaced logaddexp(2) functions
with the numpy implementations. Tests pass better now!
Message-ID: <20120412052733.A85CF82F4E@wyvern.cs.uni-duesseldorf.de>
Author: Taavi Burns
Branch: numpy-ufuncs3
Changeset: r54299:c644bd3dfefe
Date: 2012-04-12 01:27 -0400
http://bitbucket.org/pypy/pypy/changeset/c644bd3dfefe/
Log: Replaced logaddexp(2) functions with the numpy implementations.
Tests pass better now!
diff --git a/pypy/module/micronumpy/test/test_ufuncs.py b/pypy/module/micronumpy/test/test_ufuncs.py
--- a/pypy/module/micronumpy/test/test_ufuncs.py
+++ b/pypy/module/micronumpy/test/test_ufuncs.py
@@ -764,6 +764,7 @@
def test_logaddexp(self):
import math
import sys
+ float_max, float_min = sys.float_info.max, sys.float_info.min
from _numpypy import logaddexp
# From the numpy documentation
@@ -774,8 +775,8 @@
assert logaddexp(0, 0) == math.log(2)
assert logaddexp(float('-inf'), 0) == 0
- assert logaddexp(sys.float_info.max, sys.float_info.max) == sys.float_info.max
- assert logaddexp(sys.float_info.min, sys.float_info.min) == math.log(2)
+ assert logaddexp(float_max, float_max) == float_max
+ assert logaddexp(float_min, float_min) == math.log(2)
assert math.isnan(logaddexp(float('nan'), 1))
assert math.isnan(logaddexp(1, float('nan')))
@@ -789,6 +790,7 @@
def test_logaddexp2(self):
import math
import sys
+ float_max, float_min = sys.float_info.max, sys.float_info.min
from _numpypy import logaddexp2
log2 = math.log(2)
@@ -800,8 +802,8 @@
assert logaddexp2(0, 0) == 1
assert logaddexp2(float('-inf'), 0) == 0
- assert logaddexp2(sys.float_info.max, sys.float_info.max) == sys.float_info.max
- assert logaddexp2(sys.float_info.min, sys.float_info.min) == 1.0
+ assert logaddexp2(float_max, float_max) == float_max
+ assert logaddexp2(float_min, float_min) == 1.0
assert math.isnan(logaddexp2(float('nan'), 1))
assert math.isnan(logaddexp2(1, float('nan')))
diff --git a/pypy/module/micronumpy/types.py b/pypy/module/micronumpy/types.py
--- a/pypy/module/micronumpy/types.py
+++ b/pypy/module/micronumpy/types.py
@@ -17,6 +17,7 @@
'render_as_void': True})
degToRad = math.pi / 180.0
log2 = math.log(2)
+log2e = 1./log2
def simple_unary_op(func):
specialize.argtype(1)(func)
@@ -841,45 +842,26 @@
@simple_binary_op
def logaddexp(self, v1, v2):
- try:
- v1e = math.exp(v1)
- except OverflowError:
- v1e = rfloat.INFINITY
- try:
- v2e = math.exp(v2)
- except OverflowError:
- v2e = rfloat.INFINITY
+ tmp = v1 - v2
+ if tmp > 0:
+ return v1 + rfloat.log1p(math.exp(-tmp))
+ elif tmp <= 0:
+ return v2 + rfloat.log1p(math.exp(tmp))
+ else:
+ return v1 + v2
- v12e = v1e + v2e
- try:
- return math.log(v12e)
- except ValueError:
- if v12e == 0.0:
- # CPython raises ValueError here, so we have to check
- # the value to find the correct numpy return value
- return -rfloat.INFINITY
- return rfloat.NAN
+ def npy_log2_1p(self, v):
+ return log2e * rfloat.log1p(v)
@simple_binary_op
def logaddexp2(self, v1, v2):
- try:
- v1e = math.pow(2, v1)
- except OverflowError:
- v1e = rfloat.INFINITY
- try:
- v2e = math.pow(2, v2)
- except OverflowError:
- v2e = rfloat.INFINITY
-
- v12e = v1e + v2e
- try:
- return math.log(v12e) / log2
- except ValueError:
- if v12e == 0.0:
- # CPython raises ValueError here, so we have to check
- # the value to find the correct numpy return value
- return -rfloat.INFINITY
- return rfloat.NAN
+ tmp = v1 - v2
+ if tmp > 0:
+ return v1 + self.npy_log2_1p(math.pow(2, -tmp))
+ if tmp <= 0:
+ return v2 + self.npy_log2_1p(math.pow(2, tmp))
+ else:
+ return v1 + v2
class NonNativeFloat(NonNativePrimitive, Float):
_mixin_ = True
From noreply at buildbot.pypy.org Thu Apr 12 10:20:52 2012
From: noreply at buildbot.pypy.org (arigo)
Date: Thu, 12 Apr 2012 10:20:52 +0200 (CEST)
Subject: [pypy-commit] pypy default: issue1126: document
Message-ID: <20120412082052.B680682F4E@wyvern.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r54300:2a7fffa8e63e
Date: 2012-04-12 10:08 +0200
http://bitbucket.org/pypy/pypy/changeset/2a7fffa8e63e/
Log: issue1126: document
diff --git a/pypy/doc/cpython_differences.rst b/pypy/doc/cpython_differences.rst
--- a/pypy/doc/cpython_differences.rst
+++ b/pypy/doc/cpython_differences.rst
@@ -158,6 +158,12 @@
.. __: http://morepypy.blogspot.com/2008/02/python-finalizers-semantics-part-1.html
.. __: http://morepypy.blogspot.com/2008/02/python-finalizers-semantics-part-2.html
+Note that this difference might show up indirectly in some cases. For
+example, a generator left pending in the middle is --- again ---
+garbage-collected later in PyPy than in CPython. You can see the
+difference if the ``yield`` keyword it is suspended at is itself
+enclosed in a ``try:`` or a ``with:`` block.
+
Using the default GC called ``minimark``, the built-in function ``id()``
works like it does in CPython. With other GCs it returns numbers that
are not real addresses (because an object can move around several times)
From noreply at buildbot.pypy.org Thu Apr 12 11:07:11 2012
From: noreply at buildbot.pypy.org (RonnyPfannschmidt)
Date: Thu, 12 Apr 2012 11:07:11 +0200 (CEST)
Subject: [pypy-commit] pypy vendor/stdlib: create branch for tracking the
unchanged stdlib
Message-ID: <20120412090711.9A89582F4E@wyvern.cs.uni-duesseldorf.de>
Author: Ronny Pfannschmidt
Branch: vendor/stdlib
Changeset: r54301:7406215587c6
Date: 2012-04-12 11:00 +0200
http://bitbucket.org/pypy/pypy/changeset/7406215587c6/
Log: create branch for tracking the unchanged stdlib
From noreply at buildbot.pypy.org Thu Apr 12 11:07:23 2012
From: noreply at buildbot.pypy.org (RonnyPfannschmidt)
Date: Thu, 12 Apr 2012 11:07:23 +0200 (CEST)
Subject: [pypy-commit] pypy stdlib-unification: copy all pypy mods into the
stdlib folder
Message-ID: <20120412090723.5D83882F4E@wyvern.cs.uni-duesseldorf.de>
Author: Ronny Pfannschmidt
Branch: stdlib-unification
Changeset: r54302:cb0772450c59
Date: 2012-04-12 11:05 +0200
http://bitbucket.org/pypy/pypy/changeset/cb0772450c59/
Log: copy all pypy mods into the stdlib folder
diff too long, truncating to 10000 out of 186682 lines
diff --git a/lib-python/2.7/UserDict.py b/lib-python/2.7/UserDict.py
--- a/lib-python/2.7/UserDict.py
+++ b/lib-python/2.7/UserDict.py
@@ -1,5 +1,10 @@
"""A more or less complete user-defined wrapper around dictionary objects."""
+# XXX This is a bit of a hack (as usual :-))
+# the actual content of the file is not changed, but we put it here to make
+# virtualenv happy (because its internal logic expects at least one of the
+# REQUIRED_MODULES to be in modified-*)
+
class UserDict:
def __init__(self, dict=None, **kwargs):
self.data = {}
@@ -80,8 +85,12 @@
def __iter__(self):
return iter(self.data)
-import _abcoll
-_abcoll.MutableMapping.register(IterableUserDict)
+try:
+ import _abcoll
+except ImportError:
+ pass # e.g. no '_weakref' module on this pypy
+else:
+ _abcoll.MutableMapping.register(IterableUserDict)
class DictMixin:
diff --git a/lib-python/2.7/_threading_local.py b/lib-python/2.7/_threading_local.py
--- a/lib-python/2.7/_threading_local.py
+++ b/lib-python/2.7/_threading_local.py
@@ -155,7 +155,7 @@
object.__setattr__(self, '_local__args', (args, kw))
object.__setattr__(self, '_local__lock', RLock())
- if (args or kw) and (cls.__init__ is object.__init__):
+ if (args or kw) and (cls.__init__ == object.__init__):
raise TypeError("Initialization arguments are not supported")
# We need to create the thread dict in anticipation of
diff --git a/lib-python/2.7/ctypes/__init__.py b/lib-python/2.7/ctypes/__init__.py
--- a/lib-python/2.7/ctypes/__init__.py
+++ b/lib-python/2.7/ctypes/__init__.py
@@ -7,6 +7,7 @@
__version__ = "1.1.0"
+import _ffi
from _ctypes import Union, Structure, Array
from _ctypes import _Pointer
from _ctypes import CFuncPtr as _CFuncPtr
@@ -350,16 +351,17 @@
self._FuncPtr = _FuncPtr
if handle is None:
- self._handle = _dlopen(self._name, mode)
+ self._handle = _ffi.CDLL(name, mode)
else:
self._handle = handle
def __repr__(self):
- return "<%s '%s', handle %x at %x>" % \
+ return "<%s '%s', handle %r at %x>" % \
(self.__class__.__name__, self._name,
- (self._handle & (_sys.maxint*2 + 1)),
+ (self._handle),
id(self) & (_sys.maxint*2 + 1))
+
def __getattr__(self, name):
if name.startswith('__') and name.endswith('__'):
raise AttributeError(name)
@@ -487,9 +489,12 @@
_flags_ = _FUNCFLAG_CDECL | _FUNCFLAG_PYTHONAPI
return CFunctionType
-_cast = PYFUNCTYPE(py_object, c_void_p, py_object, py_object)(_cast_addr)
def cast(obj, typ):
- return _cast(obj, obj, typ)
+ try:
+ c_void_p.from_param(obj)
+ except TypeError, e:
+ raise ArgumentError(str(e))
+ return _cast_addr(obj, obj, typ)
_string_at = PYFUNCTYPE(py_object, c_void_p, c_int)(_string_at_addr)
def string_at(ptr, size=-1):
diff --git a/lib-python/2.7/ctypes/test/__init__.py b/lib-python/2.7/ctypes/test/__init__.py
--- a/lib-python/2.7/ctypes/test/__init__.py
+++ b/lib-python/2.7/ctypes/test/__init__.py
@@ -206,3 +206,16 @@
result = unittest.TestResult()
test(result)
return result
+
+def xfail(method):
+ """
+ Poor's man xfail: remove it when all the failures have been fixed
+ """
+ def new_method(self, *args, **kwds):
+ try:
+ method(self, *args, **kwds)
+ except:
+ pass
+ else:
+ self.assertTrue(False, "DID NOT RAISE")
+ return new_method
diff --git a/lib-python/2.7/ctypes/test/test_arrays.py b/lib-python/2.7/ctypes/test/test_arrays.py
--- a/lib-python/2.7/ctypes/test/test_arrays.py
+++ b/lib-python/2.7/ctypes/test/test_arrays.py
@@ -1,12 +1,23 @@
import unittest
from ctypes import *
+from test.test_support import impl_detail
formats = "bBhHiIlLqQfd"
+# c_longdouble commented out for PyPy, look at the commend in test_longdouble
formats = c_byte, c_ubyte, c_short, c_ushort, c_int, c_uint, \
- c_long, c_ulonglong, c_float, c_double, c_longdouble
+ c_long, c_ulonglong, c_float, c_double #, c_longdouble
class ArrayTestCase(unittest.TestCase):
+
+ @impl_detail('long double not supported by PyPy', pypy=False)
+ def test_longdouble(self):
+ """
+ This test is empty. It's just here to remind that we commented out
+ c_longdouble in "formats". If pypy will ever supports c_longdouble, we
+ should kill this test and uncomment c_longdouble inside formats.
+ """
+
def test_simple(self):
# create classes holding simple numeric types, and check
# various properties.
diff --git a/lib-python/2.7/ctypes/test/test_bitfields.py b/lib-python/2.7/ctypes/test/test_bitfields.py
--- a/lib-python/2.7/ctypes/test/test_bitfields.py
+++ b/lib-python/2.7/ctypes/test/test_bitfields.py
@@ -115,17 +115,21 @@
def test_nonint_types(self):
# bit fields are not allowed on non-integer types.
result = self.fail_fields(("a", c_char_p, 1))
- self.assertEqual(result, (TypeError, 'bit fields not allowed for type c_char_p'))
+ self.assertEqual(result[0], TypeError)
+ self.assertIn('bit fields not allowed for type', result[1])
result = self.fail_fields(("a", c_void_p, 1))
- self.assertEqual(result, (TypeError, 'bit fields not allowed for type c_void_p'))
+ self.assertEqual(result[0], TypeError)
+ self.assertIn('bit fields not allowed for type', result[1])
if c_int != c_long:
result = self.fail_fields(("a", POINTER(c_int), 1))
- self.assertEqual(result, (TypeError, 'bit fields not allowed for type LP_c_int'))
+ self.assertEqual(result[0], TypeError)
+ self.assertIn('bit fields not allowed for type', result[1])
result = self.fail_fields(("a", c_char, 1))
- self.assertEqual(result, (TypeError, 'bit fields not allowed for type c_char'))
+ self.assertEqual(result[0], TypeError)
+ self.assertIn('bit fields not allowed for type', result[1])
try:
c_wchar
@@ -133,13 +137,15 @@
pass
else:
result = self.fail_fields(("a", c_wchar, 1))
- self.assertEqual(result, (TypeError, 'bit fields not allowed for type c_wchar'))
+ self.assertEqual(result[0], TypeError)
+ self.assertIn('bit fields not allowed for type', result[1])
class Dummy(Structure):
_fields_ = []
result = self.fail_fields(("a", Dummy, 1))
- self.assertEqual(result, (TypeError, 'bit fields not allowed for type Dummy'))
+ self.assertEqual(result[0], TypeError)
+ self.assertIn('bit fields not allowed for type', result[1])
def test_single_bitfield_size(self):
for c_typ in int_types:
diff --git a/lib-python/2.7/ctypes/test/test_byteswap.py b/lib-python/2.7/ctypes/test/test_byteswap.py
--- a/lib-python/2.7/ctypes/test/test_byteswap.py
+++ b/lib-python/2.7/ctypes/test/test_byteswap.py
@@ -2,6 +2,7 @@
from binascii import hexlify
from ctypes import *
+from ctypes.test import xfail
def bin(s):
return hexlify(memoryview(s)).upper()
@@ -21,6 +22,7 @@
setattr(bits, "i%s" % i, 1)
dump(bits)
+ @xfail
def test_endian_short(self):
if sys.byteorder == "little":
self.assertTrue(c_short.__ctype_le__ is c_short)
@@ -48,6 +50,7 @@
self.assertEqual(bin(s), "3412")
self.assertEqual(s.value, 0x1234)
+ @xfail
def test_endian_int(self):
if sys.byteorder == "little":
self.assertTrue(c_int.__ctype_le__ is c_int)
@@ -76,6 +79,7 @@
self.assertEqual(bin(s), "78563412")
self.assertEqual(s.value, 0x12345678)
+ @xfail
def test_endian_longlong(self):
if sys.byteorder == "little":
self.assertTrue(c_longlong.__ctype_le__ is c_longlong)
@@ -104,6 +108,7 @@
self.assertEqual(bin(s), "EFCDAB9078563412")
self.assertEqual(s.value, 0x1234567890ABCDEF)
+ @xfail
def test_endian_float(self):
if sys.byteorder == "little":
self.assertTrue(c_float.__ctype_le__ is c_float)
@@ -122,6 +127,7 @@
self.assertAlmostEqual(s.value, math.pi, 6)
self.assertEqual(bin(struct.pack(">f", math.pi)), bin(s))
+ @xfail
def test_endian_double(self):
if sys.byteorder == "little":
self.assertTrue(c_double.__ctype_le__ is c_double)
@@ -149,6 +155,7 @@
self.assertTrue(c_char.__ctype_le__ is c_char)
self.assertTrue(c_char.__ctype_be__ is c_char)
+ @xfail
def test_struct_fields_1(self):
if sys.byteorder == "little":
base = BigEndianStructure
@@ -198,6 +205,7 @@
pass
self.assertRaises(TypeError, setattr, S, "_fields_", [("s", T)])
+ @xfail
def test_struct_fields_2(self):
# standard packing in struct uses no alignment.
# So, we have to align using pad bytes.
@@ -221,6 +229,7 @@
s2 = struct.pack(fmt, 0x12, 0x1234, 0x12345678, 3.14)
self.assertEqual(bin(s1), bin(s2))
+ @xfail
def test_unaligned_nonnative_struct_fields(self):
if sys.byteorder == "little":
base = BigEndianStructure
diff --git a/lib-python/2.7/ctypes/test/test_callbacks.py b/lib-python/2.7/ctypes/test/test_callbacks.py
--- a/lib-python/2.7/ctypes/test/test_callbacks.py
+++ b/lib-python/2.7/ctypes/test/test_callbacks.py
@@ -1,5 +1,6 @@
import unittest
from ctypes import *
+from ctypes.test import xfail
import _ctypes_test
class Callbacks(unittest.TestCase):
@@ -98,6 +99,7 @@
## self.check_type(c_char_p, "abc")
## self.check_type(c_char_p, "def")
+ @xfail
def test_pyobject(self):
o = ()
from sys import getrefcount as grc
diff --git a/lib-python/2.7/ctypes/test/test_cfuncs.py b/lib-python/2.7/ctypes/test/test_cfuncs.py
--- a/lib-python/2.7/ctypes/test/test_cfuncs.py
+++ b/lib-python/2.7/ctypes/test/test_cfuncs.py
@@ -3,8 +3,8 @@
import unittest
from ctypes import *
-
import _ctypes_test
+from test.test_support import impl_detail
class CFunctions(unittest.TestCase):
_dll = CDLL(_ctypes_test.__file__)
@@ -158,12 +158,14 @@
self.assertEqual(self._dll.tf_bd(0, 42.), 14.)
self.assertEqual(self.S(), 42)
+ @impl_detail('long double not supported by PyPy', pypy=False)
def test_longdouble(self):
self._dll.tf_D.restype = c_longdouble
self._dll.tf_D.argtypes = (c_longdouble,)
self.assertEqual(self._dll.tf_D(42.), 14.)
self.assertEqual(self.S(), 42)
-
+
+ @impl_detail('long double not supported by PyPy', pypy=False)
def test_longdouble_plus(self):
self._dll.tf_bD.restype = c_longdouble
self._dll.tf_bD.argtypes = (c_byte, c_longdouble)
diff --git a/lib-python/2.7/ctypes/test/test_delattr.py b/lib-python/2.7/ctypes/test/test_delattr.py
--- a/lib-python/2.7/ctypes/test/test_delattr.py
+++ b/lib-python/2.7/ctypes/test/test_delattr.py
@@ -6,15 +6,15 @@
class TestCase(unittest.TestCase):
def test_simple(self):
- self.assertRaises(TypeError,
+ self.assertRaises((TypeError, AttributeError),
delattr, c_int(42), "value")
def test_chararray(self):
- self.assertRaises(TypeError,
+ self.assertRaises((TypeError, AttributeError),
delattr, (c_char * 5)(), "value")
def test_struct(self):
- self.assertRaises(TypeError,
+ self.assertRaises((TypeError, AttributeError),
delattr, X(), "foo")
if __name__ == "__main__":
diff --git a/lib-python/2.7/ctypes/test/test_frombuffer.py b/lib-python/2.7/ctypes/test/test_frombuffer.py
--- a/lib-python/2.7/ctypes/test/test_frombuffer.py
+++ b/lib-python/2.7/ctypes/test/test_frombuffer.py
@@ -2,6 +2,7 @@
import array
import gc
import unittest
+from ctypes.test import xfail
class X(Structure):
_fields_ = [("c_int", c_int)]
@@ -10,6 +11,7 @@
self._init_called = True
class Test(unittest.TestCase):
+ @xfail
def test_fom_buffer(self):
a = array.array("i", range(16))
x = (c_int * 16).from_buffer(a)
@@ -35,6 +37,7 @@
self.assertRaises(TypeError,
(c_char * 16).from_buffer, "a" * 16)
+ @xfail
def test_fom_buffer_with_offset(self):
a = array.array("i", range(16))
x = (c_int * 15).from_buffer(a, sizeof(c_int))
@@ -43,6 +46,7 @@
self.assertRaises(ValueError, lambda: (c_int * 16).from_buffer(a, sizeof(c_int)))
self.assertRaises(ValueError, lambda: (c_int * 1).from_buffer(a, 16 * sizeof(c_int)))
+ @xfail
def test_from_buffer_copy(self):
a = array.array("i", range(16))
x = (c_int * 16).from_buffer_copy(a)
@@ -67,6 +71,7 @@
x = (c_char * 16).from_buffer_copy("a" * 16)
self.assertEqual(x[:], "a" * 16)
+ @xfail
def test_fom_buffer_copy_with_offset(self):
a = array.array("i", range(16))
x = (c_int * 15).from_buffer_copy(a, sizeof(c_int))
diff --git a/lib-python/2.7/ctypes/test/test_functions.py b/lib-python/2.7/ctypes/test/test_functions.py
--- a/lib-python/2.7/ctypes/test/test_functions.py
+++ b/lib-python/2.7/ctypes/test/test_functions.py
@@ -7,6 +7,8 @@
from ctypes import *
import sys, unittest
+from ctypes.test import xfail
+from test.test_support import impl_detail
try:
WINFUNCTYPE
@@ -143,6 +145,7 @@
self.assertEqual(result, -21)
self.assertEqual(type(result), float)
+ @impl_detail('long double not supported by PyPy', pypy=False)
def test_longdoubleresult(self):
f = dll._testfunc_D_bhilfD
f.argtypes = [c_byte, c_short, c_int, c_long, c_float, c_longdouble]
@@ -393,6 +396,7 @@
self.assertEqual((s8i.a, s8i.b, s8i.c, s8i.d, s8i.e, s8i.f, s8i.g, s8i.h),
(9*2, 8*3, 7*4, 6*5, 5*6, 4*7, 3*8, 2*9))
+ @xfail
def test_sf1651235(self):
# see http://www.python.org/sf/1651235
diff --git a/lib-python/2.7/ctypes/test/test_internals.py b/lib-python/2.7/ctypes/test/test_internals.py
--- a/lib-python/2.7/ctypes/test/test_internals.py
+++ b/lib-python/2.7/ctypes/test/test_internals.py
@@ -33,7 +33,13 @@
refcnt = grc(s)
cs = c_char_p(s)
self.assertEqual(refcnt + 1, grc(s))
- self.assertSame(cs._objects, s)
+ try:
+ # Moving gcs need to allocate a nonmoving buffer
+ cs._objects._obj
+ except AttributeError:
+ self.assertSame(cs._objects, s)
+ else:
+ self.assertSame(cs._objects._obj, s)
def test_simple_struct(self):
class X(Structure):
diff --git a/lib-python/2.7/ctypes/test/test_libc.py b/lib-python/2.7/ctypes/test/test_libc.py
--- a/lib-python/2.7/ctypes/test/test_libc.py
+++ b/lib-python/2.7/ctypes/test/test_libc.py
@@ -25,5 +25,14 @@
lib.my_qsort(chars, len(chars)-1, sizeof(c_char), comparefunc(sort))
self.assertEqual(chars.raw, " ,,aaaadmmmnpppsss\x00")
+ def SKIPPED_test_no_more_xfail(self):
+ # We decided to not explicitly support the whole ctypes-2.7
+ # and instead go for a case-by-case, demand-driven approach.
+ # So this test is skipped instead of failing.
+ import socket
+ import ctypes.test
+ self.assertTrue(not hasattr(ctypes.test, 'xfail'),
+ "You should incrementally grep for '@xfail' and remove them, they are real failures")
+
if __name__ == "__main__":
unittest.main()
diff --git a/lib-python/2.7/ctypes/test/test_loading.py b/lib-python/2.7/ctypes/test/test_loading.py
--- a/lib-python/2.7/ctypes/test/test_loading.py
+++ b/lib-python/2.7/ctypes/test/test_loading.py
@@ -2,7 +2,7 @@
import sys, unittest
import os
from ctypes.util import find_library
-from ctypes.test import is_resource_enabled
+from ctypes.test import is_resource_enabled, xfail
libc_name = None
if os.name == "nt":
@@ -75,6 +75,7 @@
self.assertRaises(AttributeError, dll.__getitem__, 1234)
if os.name == "nt":
+ @xfail
def test_1703286_A(self):
from _ctypes import LoadLibrary, FreeLibrary
# On winXP 64-bit, advapi32 loads at an address that does
@@ -85,6 +86,7 @@
handle = LoadLibrary("advapi32")
FreeLibrary(handle)
+ @xfail
def test_1703286_B(self):
# Since on winXP 64-bit advapi32 loads like described
# above, the (arbitrarily selected) CloseEventLog function
diff --git a/lib-python/2.7/ctypes/test/test_macholib.py b/lib-python/2.7/ctypes/test/test_macholib.py
--- a/lib-python/2.7/ctypes/test/test_macholib.py
+++ b/lib-python/2.7/ctypes/test/test_macholib.py
@@ -52,7 +52,6 @@
'/usr/lib/libSystem.B.dylib')
result = find_lib('z')
- self.assertTrue(result.startswith('/usr/lib/libz.1'))
self.assertTrue(result.endswith('.dylib'))
self.assertEqual(find_lib('IOKit'),
diff --git a/lib-python/2.7/ctypes/test/test_numbers.py b/lib-python/2.7/ctypes/test/test_numbers.py
--- a/lib-python/2.7/ctypes/test/test_numbers.py
+++ b/lib-python/2.7/ctypes/test/test_numbers.py
@@ -1,6 +1,7 @@
from ctypes import *
import unittest
import struct
+from ctypes.test import xfail
def valid_ranges(*types):
# given a sequence of numeric types, collect their _type_
@@ -89,12 +90,14 @@
## self.assertRaises(ValueError, t, l-1)
## self.assertRaises(ValueError, t, h+1)
+ @xfail
def test_from_param(self):
# the from_param class method attribute always
# returns PyCArgObject instances
for t in signed_types + unsigned_types + float_types:
self.assertEqual(ArgType, type(t.from_param(0)))
+ @xfail
def test_byref(self):
# calling byref returns also a PyCArgObject instance
for t in signed_types + unsigned_types + float_types + bool_types:
@@ -102,6 +105,7 @@
self.assertEqual(ArgType, type(parm))
+ @xfail
def test_floats(self):
# c_float and c_double can be created from
# Python int, long and float
@@ -115,6 +119,7 @@
self.assertEqual(t(2L).value, 2.0)
self.assertEqual(t(f).value, 2.0)
+ @xfail
def test_integers(self):
class FloatLike(object):
def __float__(self):
diff --git a/lib-python/2.7/ctypes/test/test_objects.py b/lib-python/2.7/ctypes/test/test_objects.py
--- a/lib-python/2.7/ctypes/test/test_objects.py
+++ b/lib-python/2.7/ctypes/test/test_objects.py
@@ -22,7 +22,7 @@
>>> array[4] = 'foo bar'
>>> array._objects
-{'4': 'foo bar'}
+{'4': }
>>> array[4]
'foo bar'
>>>
@@ -47,9 +47,9 @@
>>> x.array[0] = 'spam spam spam'
>>> x._objects
-{'0:2': 'spam spam spam'}
+{'0:2': }
>>> x.array._b_base_._objects
-{'0:2': 'spam spam spam'}
+{'0:2': }
>>>
'''
diff --git a/lib-python/2.7/ctypes/test/test_parameters.py b/lib-python/2.7/ctypes/test/test_parameters.py
--- a/lib-python/2.7/ctypes/test/test_parameters.py
+++ b/lib-python/2.7/ctypes/test/test_parameters.py
@@ -1,5 +1,7 @@
import unittest, sys
+from ctypes.test import xfail
+
class SimpleTypesTestCase(unittest.TestCase):
def setUp(self):
@@ -49,6 +51,7 @@
self.assertEqual(CWCHARP.from_param("abc"), "abcabcabc")
# XXX Replace by c_char_p tests
+ @xfail
def test_cstrings(self):
from ctypes import c_char_p, byref
@@ -86,7 +89,10 @@
pa = c_wchar_p.from_param(c_wchar_p(u"123"))
self.assertEqual(type(pa), c_wchar_p)
+ if sys.platform == "win32":
+ test_cw_strings = xfail(test_cw_strings)
+ @xfail
def test_int_pointers(self):
from ctypes import c_short, c_uint, c_int, c_long, POINTER, pointer
LPINT = POINTER(c_int)
diff --git a/lib-python/2.7/ctypes/test/test_pep3118.py b/lib-python/2.7/ctypes/test/test_pep3118.py
--- a/lib-python/2.7/ctypes/test/test_pep3118.py
+++ b/lib-python/2.7/ctypes/test/test_pep3118.py
@@ -1,6 +1,7 @@
import unittest
from ctypes import *
import re, sys
+from ctypes.test import xfail
if sys.byteorder == "little":
THIS_ENDIAN = "<"
@@ -19,6 +20,7 @@
class Test(unittest.TestCase):
+ @xfail
def test_native_types(self):
for tp, fmt, shape, itemtp in native_types:
ob = tp()
@@ -46,6 +48,7 @@
print(tp)
raise
+ @xfail
def test_endian_types(self):
for tp, fmt, shape, itemtp in endian_types:
ob = tp()
diff --git a/lib-python/2.7/ctypes/test/test_pickling.py b/lib-python/2.7/ctypes/test/test_pickling.py
--- a/lib-python/2.7/ctypes/test/test_pickling.py
+++ b/lib-python/2.7/ctypes/test/test_pickling.py
@@ -3,6 +3,7 @@
from ctypes import *
import _ctypes_test
dll = CDLL(_ctypes_test.__file__)
+from ctypes.test import xfail
class X(Structure):
_fields_ = [("a", c_int), ("b", c_double)]
@@ -21,6 +22,7 @@
def loads(self, item):
return pickle.loads(item)
+ @xfail
def test_simple(self):
for src in [
c_int(42),
@@ -31,6 +33,7 @@
self.assertEqual(memoryview(src).tobytes(),
memoryview(dst).tobytes())
+ @xfail
def test_struct(self):
X.init_called = 0
@@ -49,6 +52,7 @@
self.assertEqual(memoryview(y).tobytes(),
memoryview(x).tobytes())
+ @xfail
def test_unpickable(self):
# ctypes objects that are pointers or contain pointers are
# unpickable.
@@ -66,6 +70,7 @@
]:
self.assertRaises(ValueError, lambda: self.dumps(item))
+ @xfail
def test_wchar(self):
pickle.dumps(c_char("x"))
# Issue 5049
diff --git a/lib-python/2.7/ctypes/test/test_python_api.py b/lib-python/2.7/ctypes/test/test_python_api.py
--- a/lib-python/2.7/ctypes/test/test_python_api.py
+++ b/lib-python/2.7/ctypes/test/test_python_api.py
@@ -1,6 +1,6 @@
from ctypes import *
import unittest, sys
-from ctypes.test import is_resource_enabled
+from ctypes.test import is_resource_enabled, xfail
################################################################
# This section should be moved into ctypes\__init__.py, when it's ready.
@@ -17,6 +17,7 @@
class PythonAPITestCase(unittest.TestCase):
+ @xfail
def test_PyString_FromStringAndSize(self):
PyString_FromStringAndSize = pythonapi.PyString_FromStringAndSize
@@ -25,6 +26,7 @@
self.assertEqual(PyString_FromStringAndSize("abcdefghi", 3), "abc")
+ @xfail
def test_PyString_FromString(self):
pythonapi.PyString_FromString.restype = py_object
pythonapi.PyString_FromString.argtypes = (c_char_p,)
@@ -56,6 +58,7 @@
del res
self.assertEqual(grc(42), ref42)
+ @xfail
def test_PyObj_FromPtr(self):
s = "abc def ghi jkl"
ref = grc(s)
@@ -81,6 +84,7 @@
# not enough arguments
self.assertRaises(TypeError, PyOS_snprintf, buf)
+ @xfail
def test_pyobject_repr(self):
self.assertEqual(repr(py_object()), "py_object()")
self.assertEqual(repr(py_object(42)), "py_object(42)")
diff --git a/lib-python/2.7/ctypes/test/test_refcounts.py b/lib-python/2.7/ctypes/test/test_refcounts.py
--- a/lib-python/2.7/ctypes/test/test_refcounts.py
+++ b/lib-python/2.7/ctypes/test/test_refcounts.py
@@ -90,6 +90,7 @@
return a * b * 2
f = proto(func)
+ gc.collect()
a = sys.getrefcount(ctypes.c_int)
f(1, 2)
self.assertEqual(sys.getrefcount(ctypes.c_int), a)
diff --git a/lib-python/2.7/ctypes/test/test_stringptr.py b/lib-python/2.7/ctypes/test/test_stringptr.py
--- a/lib-python/2.7/ctypes/test/test_stringptr.py
+++ b/lib-python/2.7/ctypes/test/test_stringptr.py
@@ -2,11 +2,13 @@
from ctypes import *
import _ctypes_test
+from ctypes.test import xfail
lib = CDLL(_ctypes_test.__file__)
class StringPtrTestCase(unittest.TestCase):
+ @xfail
def test__POINTER_c_char(self):
class X(Structure):
_fields_ = [("str", POINTER(c_char))]
@@ -27,6 +29,7 @@
self.assertRaises(TypeError, setattr, x, "str", "Hello, World")
+ @xfail
def test__c_char_p(self):
class X(Structure):
_fields_ = [("str", c_char_p)]
diff --git a/lib-python/2.7/ctypes/test/test_strings.py b/lib-python/2.7/ctypes/test/test_strings.py
--- a/lib-python/2.7/ctypes/test/test_strings.py
+++ b/lib-python/2.7/ctypes/test/test_strings.py
@@ -31,8 +31,9 @@
buf.value = "Hello, World"
self.assertEqual(buf.value, "Hello, World")
- self.assertRaises(TypeError, setattr, buf, "value", memoryview("Hello, World"))
- self.assertRaises(TypeError, setattr, buf, "value", memoryview("abc"))
+ if test_support.check_impl_detail():
+ self.assertRaises(TypeError, setattr, buf, "value", memoryview("Hello, World"))
+ self.assertRaises(TypeError, setattr, buf, "value", memoryview("abc"))
self.assertRaises(ValueError, setattr, buf, "raw", memoryview("x" * 100))
def test_c_buffer_raw(self, memoryview=memoryview):
@@ -40,7 +41,8 @@
buf.raw = memoryview("Hello, World")
self.assertEqual(buf.value, "Hello, World")
- self.assertRaises(TypeError, setattr, buf, "value", memoryview("abc"))
+ if test_support.check_impl_detail():
+ self.assertRaises(TypeError, setattr, buf, "value", memoryview("abc"))
self.assertRaises(ValueError, setattr, buf, "raw", memoryview("x" * 100))
def test_c_buffer_deprecated(self):
diff --git a/lib-python/2.7/ctypes/test/test_structures.py b/lib-python/2.7/ctypes/test/test_structures.py
--- a/lib-python/2.7/ctypes/test/test_structures.py
+++ b/lib-python/2.7/ctypes/test/test_structures.py
@@ -194,8 +194,8 @@
self.assertEqual(X.b.offset, min(8, longlong_align))
- d = {"_fields_": [("a", "b"),
- ("b", "q")],
+ d = {"_fields_": [("a", c_byte),
+ ("b", c_longlong)],
"_pack_": -1}
self.assertRaises(ValueError, type(Structure), "X", (Structure,), d)
diff --git a/lib-python/2.7/ctypes/test/test_varsize_struct.py b/lib-python/2.7/ctypes/test/test_varsize_struct.py
--- a/lib-python/2.7/ctypes/test/test_varsize_struct.py
+++ b/lib-python/2.7/ctypes/test/test_varsize_struct.py
@@ -1,7 +1,9 @@
from ctypes import *
import unittest
+from ctypes.test import xfail
class VarSizeTest(unittest.TestCase):
+ @xfail
def test_resize(self):
class X(Structure):
_fields_ = [("item", c_int),
diff --git a/lib-python/2.7/ctypes/util.py b/lib-python/2.7/ctypes/util.py
--- a/lib-python/2.7/ctypes/util.py
+++ b/lib-python/2.7/ctypes/util.py
@@ -72,8 +72,8 @@
return name
if os.name == "posix" and sys.platform == "darwin":
- from ctypes.macholib.dyld import dyld_find as _dyld_find
def find_library(name):
+ from ctypes.macholib.dyld import dyld_find as _dyld_find
possible = ['lib%s.dylib' % name,
'%s.dylib' % name,
'%s.framework/%s' % (name, name)]
diff --git a/lib-python/2.7/distutils/command/bdist_wininst.py b/lib-python/2.7/distutils/command/bdist_wininst.py
--- a/lib-python/2.7/distutils/command/bdist_wininst.py
+++ b/lib-python/2.7/distutils/command/bdist_wininst.py
@@ -298,7 +298,8 @@
bitmaplen, # number of bytes in bitmap
)
file.write(header)
- file.write(open(arcname, "rb").read())
+ with open(arcname, "rb") as arcfile:
+ file.write(arcfile.read())
# create_exe()
diff --git a/lib-python/2.7/distutils/command/build_ext.py b/lib-python/2.7/distutils/command/build_ext.py
--- a/lib-python/2.7/distutils/command/build_ext.py
+++ b/lib-python/2.7/distutils/command/build_ext.py
@@ -184,7 +184,7 @@
# the 'libs' directory is for binary installs - we assume that
# must be the *native* platform. But we don't really support
# cross-compiling via a binary install anyway, so we let it go.
- self.library_dirs.append(os.path.join(sys.exec_prefix, 'libs'))
+ self.library_dirs.append(os.path.join(sys.exec_prefix, 'include'))
if self.debug:
self.build_temp = os.path.join(self.build_temp, "Debug")
else:
@@ -192,8 +192,13 @@
# Append the source distribution include and library directories,
# this allows distutils on windows to work in the source tree
- self.include_dirs.append(os.path.join(sys.exec_prefix, 'PC'))
- if MSVC_VERSION == 9:
+ if 0:
+ # pypy has no PC directory
+ self.include_dirs.append(os.path.join(sys.exec_prefix, 'PC'))
+ if 1:
+ # pypy has no PCBuild directory
+ pass
+ elif MSVC_VERSION == 9:
# Use the .lib files for the correct architecture
if self.plat_name == 'win32':
suffix = ''
@@ -695,24 +700,14 @@
shared extension. On most platforms, this is just 'ext.libraries';
on Windows and OS/2, we add the Python library (eg. python20.dll).
"""
- # The python library is always needed on Windows. For MSVC, this
- # is redundant, since the library is mentioned in a pragma in
- # pyconfig.h that MSVC groks. The other Windows compilers all seem
- # to need it mentioned explicitly, though, so that's what we do.
- # Append '_d' to the python import library on debug builds.
+ # The python library is always needed on Windows.
if sys.platform == "win32":
- from distutils.msvccompiler import MSVCCompiler
- if not isinstance(self.compiler, MSVCCompiler):
- template = "python%d%d"
- if self.debug:
- template = template + '_d'
- pythonlib = (template %
- (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
- # don't extend ext.libraries, it may be shared with other
- # extensions, it is a reference to the original list
- return ext.libraries + [pythonlib]
- else:
- return ext.libraries
+ template = "python%d%d"
+ pythonlib = (template %
+ (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
+ # don't extend ext.libraries, it may be shared with other
+ # extensions, it is a reference to the original list
+ return ext.libraries + [pythonlib]
elif sys.platform == "os2emx":
# EMX/GCC requires the python library explicitly, and I
# believe VACPP does as well (though not confirmed) - AIM Apr01
diff --git a/lib-python/2.7/distutils/command/install.py b/lib-python/2.7/distutils/command/install.py
--- a/lib-python/2.7/distutils/command/install.py
+++ b/lib-python/2.7/distutils/command/install.py
@@ -83,6 +83,13 @@
'scripts': '$userbase/bin',
'data' : '$userbase',
},
+ 'pypy': {
+ 'purelib': '$base/site-packages',
+ 'platlib': '$base/site-packages',
+ 'headers': '$base/include',
+ 'scripts': '$base/bin',
+ 'data' : '$base',
+ },
}
# The keys to an installation scheme; if any new types of files are to be
@@ -467,6 +474,8 @@
def select_scheme (self, name):
# it's the caller's problem if they supply a bad name!
+ if hasattr(sys, 'pypy_version_info'):
+ name = 'pypy'
scheme = INSTALL_SCHEMES[name]
for key in SCHEME_KEYS:
attrname = 'install_' + key
diff --git a/lib-python/2.7/distutils/cygwinccompiler.py b/lib-python/2.7/distutils/cygwinccompiler.py
--- a/lib-python/2.7/distutils/cygwinccompiler.py
+++ b/lib-python/2.7/distutils/cygwinccompiler.py
@@ -75,6 +75,9 @@
elif msc_ver == '1500':
# VS2008 / MSVC 9.0
return ['msvcr90']
+ elif msc_ver == '1600':
+ # VS2010 / MSVC 10.0
+ return ['msvcr100']
else:
raise ValueError("Unknown MS Compiler version %s " % msc_ver)
diff --git a/lib-python/2.7/distutils/msvc9compiler.py b/lib-python/2.7/distutils/msvc9compiler.py
--- a/lib-python/2.7/distutils/msvc9compiler.py
+++ b/lib-python/2.7/distutils/msvc9compiler.py
@@ -648,6 +648,7 @@
temp_manifest = os.path.join(
build_temp,
os.path.basename(output_filename) + ".manifest")
+ ld_args.append('/MANIFEST')
ld_args.append('/MANIFESTFILE:' + temp_manifest)
if extra_preargs:
diff --git a/lib-python/2.7/distutils/spawn.py b/lib-python/2.7/distutils/spawn.py
--- a/lib-python/2.7/distutils/spawn.py
+++ b/lib-python/2.7/distutils/spawn.py
@@ -58,7 +58,6 @@
def _spawn_nt(cmd, search_path=1, verbose=0, dry_run=0):
executable = cmd[0]
- cmd = _nt_quote_args(cmd)
if search_path:
# either we find one or it stays the same
executable = find_executable(executable) or executable
@@ -66,7 +65,8 @@
if not dry_run:
# spawn for NT requires a full path to the .exe
try:
- rc = os.spawnv(os.P_WAIT, executable, cmd)
+ import subprocess
+ rc = subprocess.call(cmd)
except OSError, exc:
# this seems to happen when the command isn't found
raise DistutilsExecError, \
diff --git a/lib-python/2.7/distutils/sysconfig.py b/lib-python/2.7/distutils/sysconfig.py
--- a/lib-python/2.7/distutils/sysconfig.py
+++ b/lib-python/2.7/distutils/sysconfig.py
@@ -9,563 +9,21 @@
Email:
"""
-__revision__ = "$Id$"
+__revision__ = "$Id: sysconfig.py 85358 2010-10-10 09:54:59Z antoine.pitrou $"
-import os
-import re
-import string
import sys
-from distutils.errors import DistutilsPlatformError
-# These are needed in a couple of spots, so just compute them once.
-PREFIX = os.path.normpath(sys.prefix)
-EXEC_PREFIX = os.path.normpath(sys.exec_prefix)
+# The content of this file is redirected from
+# sysconfig_cpython or sysconfig_pypy.
-# Path to the base directory of the project. On Windows the binary may
-# live in project/PCBuild9. If we're dealing with an x64 Windows build,
-# it'll live in project/PCbuild/amd64.
-project_base = os.path.dirname(os.path.abspath(sys.executable))
-if os.name == "nt" and "pcbuild" in project_base[-8:].lower():
- project_base = os.path.abspath(os.path.join(project_base, os.path.pardir))
-# PC/VS7.1
-if os.name == "nt" and "\\pc\\v" in project_base[-10:].lower():
- project_base = os.path.abspath(os.path.join(project_base, os.path.pardir,
- os.path.pardir))
-# PC/AMD64
-if os.name == "nt" and "\\pcbuild\\amd64" in project_base[-14:].lower():
- project_base = os.path.abspath(os.path.join(project_base, os.path.pardir,
- os.path.pardir))
+if '__pypy__' in sys.builtin_module_names:
+ from distutils.sysconfig_pypy import *
+ from distutils.sysconfig_pypy import _config_vars # needed by setuptools
+ from distutils.sysconfig_pypy import _variable_rx # read_setup_file()
+else:
+ from distutils.sysconfig_cpython import *
+ from distutils.sysconfig_cpython import _config_vars # needed by setuptools
+ from distutils.sysconfig_cpython import _variable_rx # read_setup_file()
-# python_build: (Boolean) if true, we're either building Python or
-# building an extension with an un-installed Python, so we use
-# different (hard-wired) directories.
-# Setup.local is available for Makefile builds including VPATH builds,
-# Setup.dist is available on Windows
-def _python_build():
- for fn in ("Setup.dist", "Setup.local"):
- if os.path.isfile(os.path.join(project_base, "Modules", fn)):
- return True
- return False
-python_build = _python_build()
-
-def get_python_version():
- """Return a string containing the major and minor Python version,
- leaving off the patchlevel. Sample return values could be '1.5'
- or '2.2'.
- """
- return sys.version[:3]
-
-
-def get_python_inc(plat_specific=0, prefix=None):
- """Return the directory containing installed Python header files.
-
- If 'plat_specific' is false (the default), this is the path to the
- non-platform-specific header files, i.e. Python.h and so on;
- otherwise, this is the path to platform-specific header files
- (namely pyconfig.h).
-
- If 'prefix' is supplied, use it instead of sys.prefix or
- sys.exec_prefix -- i.e., ignore 'plat_specific'.
- """
- if prefix is None:
- prefix = plat_specific and EXEC_PREFIX or PREFIX
-
- if os.name == "posix":
- if python_build:
- buildir = os.path.dirname(sys.executable)
- if plat_specific:
- # python.h is located in the buildir
- inc_dir = buildir
- else:
- # the source dir is relative to the buildir
- srcdir = os.path.abspath(os.path.join(buildir,
- get_config_var('srcdir')))
- # Include is located in the srcdir
- inc_dir = os.path.join(srcdir, "Include")
- return inc_dir
- return os.path.join(prefix, "include", "python" + get_python_version())
- elif os.name == "nt":
- return os.path.join(prefix, "include")
- elif os.name == "os2":
- return os.path.join(prefix, "Include")
- else:
- raise DistutilsPlatformError(
- "I don't know where Python installs its C header files "
- "on platform '%s'" % os.name)
-
-
-def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
- """Return the directory containing the Python library (standard or
- site additions).
-
- If 'plat_specific' is true, return the directory containing
- platform-specific modules, i.e. any module from a non-pure-Python
- module distribution; otherwise, return the platform-shared library
- directory. If 'standard_lib' is true, return the directory
- containing standard Python library modules; otherwise, return the
- directory for site-specific modules.
-
- If 'prefix' is supplied, use it instead of sys.prefix or
- sys.exec_prefix -- i.e., ignore 'plat_specific'.
- """
- if prefix is None:
- prefix = plat_specific and EXEC_PREFIX or PREFIX
-
- if os.name == "posix":
- libpython = os.path.join(prefix,
- "lib", "python" + get_python_version())
- if standard_lib:
- return libpython
- else:
- return os.path.join(libpython, "site-packages")
-
- elif os.name == "nt":
- if standard_lib:
- return os.path.join(prefix, "Lib")
- else:
- if get_python_version() < "2.2":
- return prefix
- else:
- return os.path.join(prefix, "Lib", "site-packages")
-
- elif os.name == "os2":
- if standard_lib:
- return os.path.join(prefix, "Lib")
- else:
- return os.path.join(prefix, "Lib", "site-packages")
-
- else:
- raise DistutilsPlatformError(
- "I don't know where Python installs its library "
- "on platform '%s'" % os.name)
-
-
-def customize_compiler(compiler):
- """Do any platform-specific customization of a CCompiler instance.
-
- Mainly needed on Unix, so we can plug in the information that
- varies across Unices and is stored in Python's Makefile.
- """
- if compiler.compiler_type == "unix":
- (cc, cxx, opt, cflags, ccshared, ldshared, so_ext) = \
- get_config_vars('CC', 'CXX', 'OPT', 'CFLAGS',
- 'CCSHARED', 'LDSHARED', 'SO')
-
- if 'CC' in os.environ:
- cc = os.environ['CC']
- if 'CXX' in os.environ:
- cxx = os.environ['CXX']
- if 'LDSHARED' in os.environ:
- ldshared = os.environ['LDSHARED']
- if 'CPP' in os.environ:
- cpp = os.environ['CPP']
- else:
- cpp = cc + " -E" # not always
- if 'LDFLAGS' in os.environ:
- ldshared = ldshared + ' ' + os.environ['LDFLAGS']
- if 'CFLAGS' in os.environ:
- cflags = opt + ' ' + os.environ['CFLAGS']
- ldshared = ldshared + ' ' + os.environ['CFLAGS']
- if 'CPPFLAGS' in os.environ:
- cpp = cpp + ' ' + os.environ['CPPFLAGS']
- cflags = cflags + ' ' + os.environ['CPPFLAGS']
- ldshared = ldshared + ' ' + os.environ['CPPFLAGS']
-
- cc_cmd = cc + ' ' + cflags
- compiler.set_executables(
- preprocessor=cpp,
- compiler=cc_cmd,
- compiler_so=cc_cmd + ' ' + ccshared,
- compiler_cxx=cxx,
- linker_so=ldshared,
- linker_exe=cc)
-
- compiler.shared_lib_extension = so_ext
-
-
-def get_config_h_filename():
- """Return full pathname of installed pyconfig.h file."""
- if python_build:
- if os.name == "nt":
- inc_dir = os.path.join(project_base, "PC")
- else:
- inc_dir = project_base
- else:
- inc_dir = get_python_inc(plat_specific=1)
- if get_python_version() < '2.2':
- config_h = 'config.h'
- else:
- # The name of the config.h file changed in 2.2
- config_h = 'pyconfig.h'
- return os.path.join(inc_dir, config_h)
-
-
-def get_makefile_filename():
- """Return full pathname of installed Makefile from the Python build."""
- if python_build:
- return os.path.join(os.path.dirname(sys.executable), "Makefile")
- lib_dir = get_python_lib(plat_specific=1, standard_lib=1)
- return os.path.join(lib_dir, "config", "Makefile")
-
-
-def parse_config_h(fp, g=None):
- """Parse a config.h-style file.
-
- A dictionary containing name/value pairs is returned. If an
- optional dictionary is passed in as the second argument, it is
- used instead of a new dictionary.
- """
- if g is None:
- g = {}
- define_rx = re.compile("#define ([A-Z][A-Za-z0-9_]+) (.*)\n")
- undef_rx = re.compile("/[*] #undef ([A-Z][A-Za-z0-9_]+) [*]/\n")
- #
- while 1:
- line = fp.readline()
- if not line:
- break
- m = define_rx.match(line)
- if m:
- n, v = m.group(1, 2)
- try: v = int(v)
- except ValueError: pass
- g[n] = v
- else:
- m = undef_rx.match(line)
- if m:
- g[m.group(1)] = 0
- return g
-
-
-# Regexes needed for parsing Makefile (and similar syntaxes,
-# like old-style Setup files).
-_variable_rx = re.compile("([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)")
-_findvar1_rx = re.compile(r"\$\(([A-Za-z][A-Za-z0-9_]*)\)")
-_findvar2_rx = re.compile(r"\${([A-Za-z][A-Za-z0-9_]*)}")
-
-def parse_makefile(fn, g=None):
- """Parse a Makefile-style file.
-
- A dictionary containing name/value pairs is returned. If an
- optional dictionary is passed in as the second argument, it is
- used instead of a new dictionary.
- """
- from distutils.text_file import TextFile
- fp = TextFile(fn, strip_comments=1, skip_blanks=1, join_lines=1)
-
- if g is None:
- g = {}
- done = {}
- notdone = {}
-
- while 1:
- line = fp.readline()
- if line is None: # eof
- break
- m = _variable_rx.match(line)
- if m:
- n, v = m.group(1, 2)
- v = v.strip()
- # `$$' is a literal `$' in make
- tmpv = v.replace('$$', '')
-
- if "$" in tmpv:
- notdone[n] = v
- else:
- try:
- v = int(v)
- except ValueError:
- # insert literal `$'
- done[n] = v.replace('$$', '$')
- else:
- done[n] = v
-
- # do variable interpolation here
- while notdone:
- for name in notdone.keys():
- value = notdone[name]
- m = _findvar1_rx.search(value) or _findvar2_rx.search(value)
- if m:
- n = m.group(1)
- found = True
- if n in done:
- item = str(done[n])
- elif n in notdone:
- # get it on a subsequent round
- found = False
- elif n in os.environ:
- # do it like make: fall back to environment
- item = os.environ[n]
- else:
- done[n] = item = ""
- if found:
- after = value[m.end():]
- value = value[:m.start()] + item + after
- if "$" in after:
- notdone[name] = value
- else:
- try: value = int(value)
- except ValueError:
- done[name] = value.strip()
- else:
- done[name] = value
- del notdone[name]
- else:
- # bogus variable reference; just drop it since we can't deal
- del notdone[name]
-
- fp.close()
-
- # strip spurious spaces
- for k, v in done.items():
- if isinstance(v, str):
- done[k] = v.strip()
-
- # save the results in the global dictionary
- g.update(done)
- return g
-
-
-def expand_makefile_vars(s, vars):
- """Expand Makefile-style variables -- "${foo}" or "$(foo)" -- in
- 'string' according to 'vars' (a dictionary mapping variable names to
- values). Variables not present in 'vars' are silently expanded to the
- empty string. The variable values in 'vars' should not contain further
- variable expansions; if 'vars' is the output of 'parse_makefile()',
- you're fine. Returns a variable-expanded version of 's'.
- """
-
- # This algorithm does multiple expansion, so if vars['foo'] contains
- # "${bar}", it will expand ${foo} to ${bar}, and then expand
- # ${bar}... and so forth. This is fine as long as 'vars' comes from
- # 'parse_makefile()', which takes care of such expansions eagerly,
- # according to make's variable expansion semantics.
-
- while 1:
- m = _findvar1_rx.search(s) or _findvar2_rx.search(s)
- if m:
- (beg, end) = m.span()
- s = s[0:beg] + vars.get(m.group(1)) + s[end:]
- else:
- break
- return s
-
-
-_config_vars = None
-
-def _init_posix():
- """Initialize the module as appropriate for POSIX systems."""
- g = {}
- # load the installed Makefile:
- try:
- filename = get_makefile_filename()
- parse_makefile(filename, g)
- except IOError, msg:
- my_msg = "invalid Python installation: unable to open %s" % filename
- if hasattr(msg, "strerror"):
- my_msg = my_msg + " (%s)" % msg.strerror
-
- raise DistutilsPlatformError(my_msg)
-
- # load the installed pyconfig.h:
- try:
- filename = get_config_h_filename()
- parse_config_h(file(filename), g)
- except IOError, msg:
- my_msg = "invalid Python installation: unable to open %s" % filename
- if hasattr(msg, "strerror"):
- my_msg = my_msg + " (%s)" % msg.strerror
-
- raise DistutilsPlatformError(my_msg)
-
- # On MacOSX we need to check the setting of the environment variable
- # MACOSX_DEPLOYMENT_TARGET: configure bases some choices on it so
- # it needs to be compatible.
- # If it isn't set we set it to the configure-time value
- if sys.platform == 'darwin' and 'MACOSX_DEPLOYMENT_TARGET' in g:
- cfg_target = g['MACOSX_DEPLOYMENT_TARGET']
- cur_target = os.getenv('MACOSX_DEPLOYMENT_TARGET', '')
- if cur_target == '':
- cur_target = cfg_target
- os.environ['MACOSX_DEPLOYMENT_TARGET'] = cfg_target
- elif map(int, cfg_target.split('.')) > map(int, cur_target.split('.')):
- my_msg = ('$MACOSX_DEPLOYMENT_TARGET mismatch: now "%s" but "%s" during configure'
- % (cur_target, cfg_target))
- raise DistutilsPlatformError(my_msg)
-
- # On AIX, there are wrong paths to the linker scripts in the Makefile
- # -- these paths are relative to the Python source, but when installed
- # the scripts are in another directory.
- if python_build:
- g['LDSHARED'] = g['BLDSHARED']
-
- elif get_python_version() < '2.1':
- # The following two branches are for 1.5.2 compatibility.
- if sys.platform == 'aix4': # what about AIX 3.x ?
- # Linker script is in the config directory, not in Modules as the
- # Makefile says.
- python_lib = get_python_lib(standard_lib=1)
- ld_so_aix = os.path.join(python_lib, 'config', 'ld_so_aix')
- python_exp = os.path.join(python_lib, 'config', 'python.exp')
-
- g['LDSHARED'] = "%s %s -bI:%s" % (ld_so_aix, g['CC'], python_exp)
-
- elif sys.platform == 'beos':
- # Linker script is in the config directory. In the Makefile it is
- # relative to the srcdir, which after installation no longer makes
- # sense.
- python_lib = get_python_lib(standard_lib=1)
- linkerscript_path = string.split(g['LDSHARED'])[0]
- linkerscript_name = os.path.basename(linkerscript_path)
- linkerscript = os.path.join(python_lib, 'config',
- linkerscript_name)
-
- # XXX this isn't the right place to do this: adding the Python
- # library to the link, if needed, should be in the "build_ext"
- # command. (It's also needed for non-MS compilers on Windows, and
- # it's taken care of for them by the 'build_ext.get_libraries()'
- # method.)
- g['LDSHARED'] = ("%s -L%s/lib -lpython%s" %
- (linkerscript, PREFIX, get_python_version()))
-
- global _config_vars
- _config_vars = g
-
-
-def _init_nt():
- """Initialize the module as appropriate for NT"""
- g = {}
- # set basic install directories
- g['LIBDEST'] = get_python_lib(plat_specific=0, standard_lib=1)
- g['BINLIBDEST'] = get_python_lib(plat_specific=1, standard_lib=1)
-
- # XXX hmmm.. a normal install puts include files here
- g['INCLUDEPY'] = get_python_inc(plat_specific=0)
-
- g['SO'] = '.pyd'
- g['EXE'] = ".exe"
- g['VERSION'] = get_python_version().replace(".", "")
- g['BINDIR'] = os.path.dirname(os.path.abspath(sys.executable))
-
- global _config_vars
- _config_vars = g
-
-
-def _init_os2():
- """Initialize the module as appropriate for OS/2"""
- g = {}
- # set basic install directories
- g['LIBDEST'] = get_python_lib(plat_specific=0, standard_lib=1)
- g['BINLIBDEST'] = get_python_lib(plat_specific=1, standard_lib=1)
-
- # XXX hmmm.. a normal install puts include files here
- g['INCLUDEPY'] = get_python_inc(plat_specific=0)
-
- g['SO'] = '.pyd'
- g['EXE'] = ".exe"
-
- global _config_vars
- _config_vars = g
-
-
-def get_config_vars(*args):
- """With no arguments, return a dictionary of all configuration
- variables relevant for the current platform. Generally this includes
- everything needed to build extensions and install both pure modules and
- extensions. On Unix, this means every variable defined in Python's
- installed Makefile; on Windows and Mac OS it's a much smaller set.
-
- With arguments, return a list of values that result from looking up
- each argument in the configuration variable dictionary.
- """
- global _config_vars
- if _config_vars is None:
- func = globals().get("_init_" + os.name)
- if func:
- func()
- else:
- _config_vars = {}
-
- # Normalized versions of prefix and exec_prefix are handy to have;
- # in fact, these are the standard versions used most places in the
- # Distutils.
- _config_vars['prefix'] = PREFIX
- _config_vars['exec_prefix'] = EXEC_PREFIX
-
- if sys.platform == 'darwin':
- kernel_version = os.uname()[2] # Kernel version (8.4.3)
- major_version = int(kernel_version.split('.')[0])
-
- if major_version < 8:
- # On Mac OS X before 10.4, check if -arch and -isysroot
- # are in CFLAGS or LDFLAGS and remove them if they are.
- # This is needed when building extensions on a 10.3 system
- # using a universal build of python.
- for key in ('LDFLAGS', 'BASECFLAGS', 'LDSHARED',
- # a number of derived variables. These need to be
- # patched up as well.
- 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):
- flags = _config_vars[key]
- flags = re.sub('-arch\s+\w+\s', ' ', flags)
- flags = re.sub('-isysroot [^ \t]*', ' ', flags)
- _config_vars[key] = flags
-
- else:
-
- # Allow the user to override the architecture flags using
- # an environment variable.
- # NOTE: This name was introduced by Apple in OSX 10.5 and
- # is used by several scripting languages distributed with
- # that OS release.
-
- if 'ARCHFLAGS' in os.environ:
- arch = os.environ['ARCHFLAGS']
- for key in ('LDFLAGS', 'BASECFLAGS', 'LDSHARED',
- # a number of derived variables. These need to be
- # patched up as well.
- 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):
-
- flags = _config_vars[key]
- flags = re.sub('-arch\s+\w+\s', ' ', flags)
- flags = flags + ' ' + arch
- _config_vars[key] = flags
-
- # If we're on OSX 10.5 or later and the user tries to
- # compiles an extension using an SDK that is not present
- # on the current machine it is better to not use an SDK
- # than to fail.
- #
- # The major usecase for this is users using a Python.org
- # binary installer on OSX 10.6: that installer uses
- # the 10.4u SDK, but that SDK is not installed by default
- # when you install Xcode.
- #
- m = re.search('-isysroot\s+(\S+)', _config_vars['CFLAGS'])
- if m is not None:
- sdk = m.group(1)
- if not os.path.exists(sdk):
- for key in ('LDFLAGS', 'BASECFLAGS', 'LDSHARED',
- # a number of derived variables. These need to be
- # patched up as well.
- 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):
-
- flags = _config_vars[key]
- flags = re.sub('-isysroot\s+\S+(\s|$)', ' ', flags)
- _config_vars[key] = flags
-
- if args:
- vals = []
- for name in args:
- vals.append(_config_vars.get(name))
- return vals
- else:
- return _config_vars
-
-def get_config_var(name):
- """Return the value of a single variable using the dictionary
- returned by 'get_config_vars()'. Equivalent to
- get_config_vars().get(name)
- """
- return get_config_vars().get(name)
diff --git a/lib-python/modified-2.7/distutils/sysconfig_cpython.py b/lib-python/2.7/distutils/sysconfig_cpython.py
rename from lib-python/modified-2.7/distutils/sysconfig_cpython.py
rename to lib-python/2.7/distutils/sysconfig_cpython.py
diff --git a/lib-python/modified-2.7/distutils/sysconfig_pypy.py b/lib-python/2.7/distutils/sysconfig_pypy.py
rename from lib-python/modified-2.7/distutils/sysconfig_pypy.py
rename to lib-python/2.7/distutils/sysconfig_pypy.py
diff --git a/lib-python/2.7/distutils/tests/test_build_ext.py b/lib-python/2.7/distutils/tests/test_build_ext.py
--- a/lib-python/2.7/distutils/tests/test_build_ext.py
+++ b/lib-python/2.7/distutils/tests/test_build_ext.py
@@ -293,7 +293,7 @@
finally:
os.chdir(old_wd)
self.assertTrue(os.path.exists(so_file))
- self.assertEqual(os.path.splitext(so_file)[-1],
+ self.assertEqual(so_file[so_file.index(os.path.extsep):],
sysconfig.get_config_var('SO'))
so_dir = os.path.dirname(so_file)
self.assertEqual(so_dir, other_tmp_dir)
@@ -302,7 +302,7 @@
cmd.run()
so_file = cmd.get_outputs()[0]
self.assertTrue(os.path.exists(so_file))
- self.assertEqual(os.path.splitext(so_file)[-1],
+ self.assertEqual(so_file[so_file.index(os.path.extsep):],
sysconfig.get_config_var('SO'))
so_dir = os.path.dirname(so_file)
self.assertEqual(so_dir, cmd.build_lib)
diff --git a/lib-python/2.7/distutils/tests/test_install.py b/lib-python/2.7/distutils/tests/test_install.py
--- a/lib-python/2.7/distutils/tests/test_install.py
+++ b/lib-python/2.7/distutils/tests/test_install.py
@@ -2,6 +2,7 @@
import os
import unittest
+from test import test_support
from test.test_support import run_unittest
@@ -40,14 +41,15 @@
expected = os.path.normpath(expected)
self.assertEqual(got, expected)
- libdir = os.path.join(destination, "lib", "python")
- check_path(cmd.install_lib, libdir)
- check_path(cmd.install_platlib, libdir)
- check_path(cmd.install_purelib, libdir)
- check_path(cmd.install_headers,
- os.path.join(destination, "include", "python", "foopkg"))
- check_path(cmd.install_scripts, os.path.join(destination, "bin"))
- check_path(cmd.install_data, destination)
+ if test_support.check_impl_detail():
+ libdir = os.path.join(destination, "lib", "python")
+ check_path(cmd.install_lib, libdir)
+ check_path(cmd.install_platlib, libdir)
+ check_path(cmd.install_purelib, libdir)
+ check_path(cmd.install_headers,
+ os.path.join(destination, "include", "python", "foopkg"))
+ check_path(cmd.install_scripts, os.path.join(destination, "bin"))
+ check_path(cmd.install_data, destination)
def test_suite():
diff --git a/lib-python/2.7/distutils/unixccompiler.py b/lib-python/2.7/distutils/unixccompiler.py
--- a/lib-python/2.7/distutils/unixccompiler.py
+++ b/lib-python/2.7/distutils/unixccompiler.py
@@ -125,7 +125,22 @@
}
if sys.platform[:6] == "darwin":
+ import platform
+ if platform.machine() == 'i386':
+ if platform.architecture()[0] == '32bit':
+ arch = 'i386'
+ else:
+ arch = 'x86_64'
+ else:
+ # just a guess
+ arch = platform.machine()
executables['ranlib'] = ["ranlib"]
+ executables['linker_so'] += ['-undefined', 'dynamic_lookup']
+
+ for k, v in executables.iteritems():
+ if v and v[0] == 'cc':
+ v += ['-arch', arch]
+
# Needed for the filename generation methods provided by the base
# class, CCompiler. NB. whoever instantiates/uses a particular
@@ -309,7 +324,7 @@
# On OSX users can specify an alternate SDK using
# '-isysroot', calculate the SDK root if it is specified
# (and use it further on)
- cflags = sysconfig.get_config_var('CFLAGS')
+ cflags = sysconfig.get_config_var('CFLAGS') or ''
m = re.search(r'-isysroot\s+(\S+)', cflags)
if m is None:
sysroot = '/'
diff --git a/lib-python/2.7/heapq.py b/lib-python/2.7/heapq.py
--- a/lib-python/2.7/heapq.py
+++ b/lib-python/2.7/heapq.py
@@ -193,6 +193,8 @@
Equivalent to: sorted(iterable, reverse=True)[:n]
"""
+ if n < 0: # for consistency with the c impl
+ return []
it = iter(iterable)
result = list(islice(it, n))
if not result:
@@ -209,6 +211,8 @@
Equivalent to: sorted(iterable)[:n]
"""
+ if n < 0: # for consistency with the c impl
+ return []
if hasattr(iterable, '__len__') and n * 10 <= len(iterable):
# For smaller values of n, the bisect method is faster than a minheap.
# It is also memory efficient, consuming only n elements of space.
diff --git a/lib-python/2.7/httplib.py b/lib-python/2.7/httplib.py
--- a/lib-python/2.7/httplib.py
+++ b/lib-python/2.7/httplib.py
@@ -1024,7 +1024,11 @@
kwds["buffering"] = True;
response = self.response_class(*args, **kwds)
- response.begin()
+ try:
+ response.begin()
+ except:
+ response.close()
+ raise
assert response.will_close != _UNKNOWN
self.__state = _CS_IDLE
diff --git a/lib-python/2.7/idlelib/Delegator.py b/lib-python/2.7/idlelib/Delegator.py
--- a/lib-python/2.7/idlelib/Delegator.py
+++ b/lib-python/2.7/idlelib/Delegator.py
@@ -12,6 +12,14 @@
self.__cache[name] = attr
return attr
+ def __nonzero__(self):
+ # this is needed for PyPy: else, if self.delegate is None, the
+ # __getattr__ above picks NoneType.__nonzero__, which returns
+ # False. Thus, bool(Delegator()) is False as well, but it's not what
+ # we want. On CPython, bool(Delegator()) is True because NoneType
+ # does not have __nonzero__
+ return True
+
def resetcache(self):
for key in self.__cache.keys():
try:
diff --git a/lib-python/2.7/inspect.py b/lib-python/2.7/inspect.py
--- a/lib-python/2.7/inspect.py
+++ b/lib-python/2.7/inspect.py
@@ -746,8 +746,15 @@
'varargs' and 'varkw' are the names of the * and ** arguments or None."""
if not iscode(co):
- raise TypeError('{!r} is not a code object'.format(co))
+ if hasattr(len, 'func_code') and type(co) is type(len.func_code):
+ # PyPy extension: built-in function objects have a func_code too.
+ # There is no co_code on it, but co_argcount and co_varnames and
+ # co_flags are present.
+ pass
+ else:
+ raise TypeError('{!r} is not a code object'.format(co))
+ code = getattr(co, 'co_code', '')
nargs = co.co_argcount
names = co.co_varnames
args = list(names[:nargs])
@@ -757,12 +764,12 @@
for i in range(nargs):
if args[i][:1] in ('', '.'):
stack, remain, count = [], [], []
- while step < len(co.co_code):
- op = ord(co.co_code[step])
+ while step < len(code):
+ op = ord(code[step])
step = step + 1
if op >= dis.HAVE_ARGUMENT:
opname = dis.opname[op]
- value = ord(co.co_code[step]) + ord(co.co_code[step+1])*256
+ value = ord(code[step]) + ord(code[step+1])*256
step = step + 2
if opname in ('UNPACK_TUPLE', 'UNPACK_SEQUENCE'):
remain.append(value)
@@ -809,7 +816,9 @@
if ismethod(func):
func = func.im_func
- if not isfunction(func):
+ if not (isfunction(func) or
+ isbuiltin(func) and hasattr(func, 'func_code')):
+ # PyPy extension: this works for built-in functions too
raise TypeError('{!r} is not a Python function'.format(func))
args, varargs, varkw = getargs(func.func_code)
return ArgSpec(args, varargs, varkw, func.func_defaults)
@@ -949,7 +958,7 @@
raise TypeError('%s() takes exactly 0 arguments '
'(%d given)' % (f_name, num_total))
else:
- raise TypeError('%s() takes no arguments (%d given)' %
+ raise TypeError('%s() takes no argument (%d given)' %
(f_name, num_total))
for arg in args:
if isinstance(arg, str) and arg in named:
diff --git a/lib-python/2.7/json/encoder.py b/lib-python/2.7/json/encoder.py
--- a/lib-python/2.7/json/encoder.py
+++ b/lib-python/2.7/json/encoder.py
@@ -2,14 +2,7 @@
"""
import re
-try:
- from _json import encode_basestring_ascii as c_encode_basestring_ascii
-except ImportError:
- c_encode_basestring_ascii = None
-try:
- from _json import make_encoder as c_make_encoder
-except ImportError:
- c_make_encoder = None
+from __pypy__.builders import StringBuilder, UnicodeBuilder
ESCAPE = re.compile(r'[\x00-\x1f\\"\b\f\n\r\t]')
ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])')
@@ -24,23 +17,22 @@
'\t': '\\t',
}
for i in range(0x20):
- ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i))
- #ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
+ ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
# Assume this produces an infinity on all machines (probably not guaranteed)
INFINITY = float('1e66666')
FLOAT_REPR = repr
-def encode_basestring(s):
+def raw_encode_basestring(s):
"""Return a JSON representation of a Python string
"""
def replace(match):
return ESCAPE_DCT[match.group(0)]
- return '"' + ESCAPE.sub(replace, s) + '"'
+ return ESCAPE.sub(replace, s)
+encode_basestring = lambda s: '"' + raw_encode_basestring(s) + '"'
-
-def py_encode_basestring_ascii(s):
+def raw_encode_basestring_ascii(s):
"""Return an ASCII-only JSON representation of a Python string
"""
@@ -53,21 +45,19 @@
except KeyError:
n = ord(s)
if n < 0x10000:
- return '\\u{0:04x}'.format(n)
- #return '\\u%04x' % (n,)
+ return '\\u%04x' % (n,)
else:
# surrogate pair
n -= 0x10000
s1 = 0xd800 | ((n >> 10) & 0x3ff)
s2 = 0xdc00 | (n & 0x3ff)
- return '\\u{0:04x}\\u{1:04x}'.format(s1, s2)
- #return '\\u%04x\\u%04x' % (s1, s2)
- return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
+ return '\\u%04x\\u%04x' % (s1, s2)
+ if ESCAPE_ASCII.search(s):
+ return str(ESCAPE_ASCII.sub(replace, s))
+ return s
+encode_basestring_ascii = lambda s: '"' + raw_encode_basestring_ascii(s) + '"'
-encode_basestring_ascii = (
- c_encode_basestring_ascii or py_encode_basestring_ascii)
-
class JSONEncoder(object):
"""Extensible JSON encoder for Python data structures.
@@ -147,6 +137,17 @@
self.skipkeys = skipkeys
self.ensure_ascii = ensure_ascii
+ if ensure_ascii:
+ self.encoder = raw_encode_basestring_ascii
+ else:
+ self.encoder = raw_encode_basestring
+ if encoding != 'utf-8':
+ orig_encoder = self.encoder
+ def encoder(o):
+ if isinstance(o, str):
+ o = o.decode(encoding)
+ return orig_encoder(o)
+ self.encoder = encoder
self.check_circular = check_circular
self.allow_nan = allow_nan
self.sort_keys = sort_keys
@@ -184,24 +185,126 @@
'{"foo": ["bar", "baz"]}'
"""
- # This is for extremely simple cases and benchmarks.
+ if self.check_circular:
+ markers = {}
+ else:
+ markers = None
+ if self.ensure_ascii:
+ builder = StringBuilder()
+ else:
+ builder = UnicodeBuilder()
+ self._encode(o, markers, builder, 0)
+ return builder.build()
+
+ def _emit_indent(self, builder, _current_indent_level):
+ if self.indent is not None:
+ _current_indent_level += 1
+ newline_indent = '\n' + (' ' * (self.indent *
+ _current_indent_level))
+ separator = self.item_separator + newline_indent
+ builder.append(newline_indent)
+ else:
+ separator = self.item_separator
+ return separator, _current_indent_level
+
+ def _emit_unindent(self, builder, _current_indent_level):
+ if self.indent is not None:
+ builder.append('\n')
+ builder.append(' ' * (self.indent * (_current_indent_level - 1)))
+
+ def _encode(self, o, markers, builder, _current_indent_level):
if isinstance(o, basestring):
- if isinstance(o, str):
- _encoding = self.encoding
- if (_encoding is not None
- and not (_encoding == 'utf-8')):
- o = o.decode(_encoding)
- if self.ensure_ascii:
- return encode_basestring_ascii(o)
+ builder.append('"')
+ builder.append(self.encoder(o))
+ builder.append('"')
+ elif o is None:
+ builder.append('null')
+ elif o is True:
+ builder.append('true')
+ elif o is False:
+ builder.append('false')
+ elif isinstance(o, (int, long)):
+ builder.append(str(o))
+ elif isinstance(o, float):
+ builder.append(self._floatstr(o))
+ elif isinstance(o, (list, tuple)):
+ if not o:
+ builder.append('[]')
+ return
+ self._encode_list(o, markers, builder, _current_indent_level)
+ elif isinstance(o, dict):
+ if not o:
+ builder.append('{}')
+ return
+ self._encode_dict(o, markers, builder, _current_indent_level)
+ else:
+ self._mark_markers(markers, o)
+ res = self.default(o)
+ self._encode(res, markers, builder, _current_indent_level)
+ self._remove_markers(markers, o)
+ return res
+
+ def _encode_list(self, l, markers, builder, _current_indent_level):
+ self._mark_markers(markers, l)
+ builder.append('[')
+ first = True
+ separator, _current_indent_level = self._emit_indent(builder,
+ _current_indent_level)
+ for elem in l:
+ if first:
+ first = False
else:
- return encode_basestring(o)
- # This doesn't pass the iterator directly to ''.join() because the
- # exceptions aren't as detailed. The list call should be roughly
- # equivalent to the PySequence_Fast that ''.join() would do.
- chunks = self.iterencode(o, _one_shot=True)
- if not isinstance(chunks, (list, tuple)):
- chunks = list(chunks)
- return ''.join(chunks)
+ builder.append(separator)
+ self._encode(elem, markers, builder, _current_indent_level)
+ del elem # XXX grumble
+ self._emit_unindent(builder, _current_indent_level)
+ builder.append(']')
+ self._remove_markers(markers, l)
+
+ def _encode_dict(self, d, markers, builder, _current_indent_level):
+ self._mark_markers(markers, d)
+ first = True
+ builder.append('{')
+ separator, _current_indent_level = self._emit_indent(builder,
+ _current_indent_level)
+ if self.sort_keys:
+ items = sorted(d.items(), key=lambda kv: kv[0])
+ else:
+ items = d.iteritems()
+
+ for key, v in items:
+ if first:
+ first = False
+ else:
+ builder.append(separator)
+ if isinstance(key, basestring):
+ pass
+ # JavaScript is weakly typed for these, so it makes sense to
+ # also allow them. Many encoders seem to do something like this.
+ elif isinstance(key, float):
+ key = self._floatstr(key)
+ elif key is True:
+ key = 'true'
+ elif key is False:
+ key = 'false'
+ elif key is None:
+ key = 'null'
+ elif isinstance(key, (int, long)):
+ key = str(key)
+ elif self.skipkeys:
+ continue
+ else:
+ raise TypeError("key " + repr(key) + " is not a string")
+ builder.append('"')
+ builder.append(self.encoder(key))
+ builder.append('"')
+ builder.append(self.key_separator)
+ self._encode(v, markers, builder, _current_indent_level)
+ del key
+ del v # XXX grumble
+ self._emit_unindent(builder, _current_indent_level)
+ builder.append('}')
+ self._remove_markers(markers, d)
def iterencode(self, o, _one_shot=False):
"""Encode the given object and yield each string
@@ -217,86 +320,54 @@
markers = {}
else:
markers = None
- if self.ensure_ascii:
- _encoder = encode_basestring_ascii
+ return self._iterencode(o, markers, 0)
+
+ def _floatstr(self, o):
+ # Check for specials. Note that this type of test is processor
+ # and/or platform-specific, so do tests which don't depend on the
+ # internals.
+
+ if o != o:
+ text = 'NaN'
+ elif o == INFINITY:
+ text = 'Infinity'
+ elif o == -INFINITY:
+ text = '-Infinity'
else:
- _encoder = encode_basestring
- if self.encoding != 'utf-8':
- def _encoder(o, _orig_encoder=_encoder, _encoding=self.encoding):
- if isinstance(o, str):
- o = o.decode(_encoding)
- return _orig_encoder(o)
+ return FLOAT_REPR(o)
- def floatstr(o, allow_nan=self.allow_nan,
- _repr=FLOAT_REPR, _inf=INFINITY, _neginf=-INFINITY):
- # Check for specials. Note that this type of test is processor
- # and/or platform-specific, so do tests which don't depend on the
- # internals.
+ if not self.allow_nan:
+ raise ValueError(
+ "Out of range float values are not JSON compliant: " +
+ repr(o))
- if o != o:
- text = 'NaN'
- elif o == _inf:
- text = 'Infinity'
- elif o == _neginf:
- text = '-Infinity'
- else:
- return _repr(o)
+ return text
- if not allow_nan:
- raise ValueError(
- "Out of range float values are not JSON compliant: " +
- repr(o))
+ def _mark_markers(self, markers, o):
+ if markers is not None:
+ if id(o) in markers:
+ raise ValueError("Circular reference detected")
+ markers[id(o)] = None
- return text
+ def _remove_markers(self, markers, o):
+ if markers is not None:
+ del markers[id(o)]
-
- if (_one_shot and c_make_encoder is not None
- and self.indent is None and not self.sort_keys):
- _iterencode = c_make_encoder(
- markers, self.default, _encoder, self.indent,
- self.key_separator, self.item_separator, self.sort_keys,
- self.skipkeys, self.allow_nan)
- else:
- _iterencode = _make_iterencode(
- markers, self.default, _encoder, self.indent, floatstr,
- self.key_separator, self.item_separator, self.sort_keys,
- self.skipkeys, _one_shot)
- return _iterencode(o, 0)
-
-def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
- _key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot,
- ## HACK: hand-optimized bytecode; turn globals into locals
- ValueError=ValueError,
- basestring=basestring,
- dict=dict,
- float=float,
- id=id,
- int=int,
- isinstance=isinstance,
- list=list,
- long=long,
- str=str,
- tuple=tuple,
- ):
-
- def _iterencode_list(lst, _current_indent_level):
+ def _iterencode_list(self, lst, markers, _current_indent_level):
if not lst:
yield '[]'
return
- if markers is not None:
- markerid = id(lst)
- if markerid in markers:
- raise ValueError("Circular reference detected")
- markers[markerid] = lst
+ self._mark_markers(markers, lst)
buf = '['
- if _indent is not None:
+ if self.indent is not None:
_current_indent_level += 1
- newline_indent = '\n' + (' ' * (_indent * _current_indent_level))
- separator = _item_separator + newline_indent
+ newline_indent = '\n' + (' ' * (self.indent *
+ _current_indent_level))
+ separator = self.item_separator + newline_indent
buf += newline_indent
else:
newline_indent = None
- separator = _item_separator
+ separator = self.item_separator
first = True
for value in lst:
if first:
@@ -304,7 +375,7 @@
else:
buf = separator
if isinstance(value, basestring):
- yield buf + _encoder(value)
+ yield buf + '"' + self.encoder(value) + '"'
elif value is None:
yield buf + 'null'
elif value is True:
@@ -314,44 +385,43 @@
elif isinstance(value, (int, long)):
yield buf + str(value)
elif isinstance(value, float):
- yield buf + _floatstr(value)
+ yield buf + self._floatstr(value)
else:
yield buf
if isinstance(value, (list, tuple)):
- chunks = _iterencode_list(value, _current_indent_level)
+ chunks = self._iterencode_list(value, markers,
+ _current_indent_level)
elif isinstance(value, dict):
- chunks = _iterencode_dict(value, _current_indent_level)
+ chunks = self._iterencode_dict(value, markers,
+ _current_indent_level)
else:
- chunks = _iterencode(value, _current_indent_level)
+ chunks = self._iterencode(value, markers,
+ _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
- yield '\n' + (' ' * (_indent * _current_indent_level))
+ yield '\n' + (' ' * (self.indent * _current_indent_level))
yield ']'
- if markers is not None:
- del markers[markerid]
+ self._remove_markers(markers, lst)
- def _iterencode_dict(dct, _current_indent_level):
+ def _iterencode_dict(self, dct, markers, _current_indent_level):
if not dct:
yield '{}'
return
- if markers is not None:
- markerid = id(dct)
- if markerid in markers:
- raise ValueError("Circular reference detected")
- markers[markerid] = dct
+ self._mark_markers(markers, dct)
yield '{'
- if _indent is not None:
+ if self.indent is not None:
_current_indent_level += 1
- newline_indent = '\n' + (' ' * (_indent * _current_indent_level))
- item_separator = _item_separator + newline_indent
+ newline_indent = '\n' + (' ' * (self.indent *
+ _current_indent_level))
+ item_separator = self.item_separator + newline_indent
yield newline_indent
else:
newline_indent = None
- item_separator = _item_separator
+ item_separator = self.item_separator
first = True
- if _sort_keys:
+ if self.sort_keys:
items = sorted(dct.items(), key=lambda kv: kv[0])
else:
items = dct.iteritems()
@@ -361,7 +431,7 @@
# JavaScript is weakly typed for these, so it makes sense to
# also allow them. Many encoders seem to do something like this.
elif isinstance(key, float):
- key = _floatstr(key)
+ key = self._floatstr(key)
elif key is True:
key = 'true'
elif key is False:
@@ -370,7 +440,7 @@
key = 'null'
elif isinstance(key, (int, long)):
key = str(key)
- elif _skipkeys:
+ elif self.skipkeys:
continue
else:
raise TypeError("key " + repr(key) + " is not a string")
@@ -378,10 +448,10 @@
first = False
else:
yield item_separator
- yield _encoder(key)
- yield _key_separator
+ yield '"' + self.encoder(key) + '"'
+ yield self.key_separator
if isinstance(value, basestring):
- yield _encoder(value)
+ yield '"' + self.encoder(value) + '"'
elif value is None:
yield 'null'
elif value is True:
@@ -391,26 +461,28 @@
elif isinstance(value, (int, long)):
yield str(value)
elif isinstance(value, float):
- yield _floatstr(value)
+ yield self._floatstr(value)
else:
if isinstance(value, (list, tuple)):
- chunks = _iterencode_list(value, _current_indent_level)
+ chunks = self._iterencode_list(value, markers,
+ _current_indent_level)
elif isinstance(value, dict):
- chunks = _iterencode_dict(value, _current_indent_level)
+ chunks = self._iterencode_dict(value, markers,
+ _current_indent_level)
else:
- chunks = _iterencode(value, _current_indent_level)
+ chunks = self._iterencode(value, markers,
+ _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
- yield '\n' + (' ' * (_indent * _current_indent_level))
+ yield '\n' + (' ' * (self.indent * _current_indent_level))
yield '}'
- if markers is not None:
- del markers[markerid]
+ self._remove_markers(markers, dct)
- def _iterencode(o, _current_indent_level):
+ def _iterencode(self, o, markers, _current_indent_level):
if isinstance(o, basestring):
- yield _encoder(o)
+ yield '"' + self.encoder(o) + '"'
elif o is None:
yield 'null'
elif o is True:
@@ -420,23 +492,19 @@
elif isinstance(o, (int, long)):
yield str(o)
elif isinstance(o, float):
- yield _floatstr(o)
+ yield self._floatstr(o)
elif isinstance(o, (list, tuple)):
- for chunk in _iterencode_list(o, _current_indent_level):
+ for chunk in self._iterencode_list(o, markers,
+ _current_indent_level):
yield chunk
elif isinstance(o, dict):
- for chunk in _iterencode_dict(o, _current_indent_level):
+ for chunk in self._iterencode_dict(o, markers,
+ _current_indent_level):
yield chunk
else:
- if markers is not None:
- markerid = id(o)
- if markerid in markers:
- raise ValueError("Circular reference detected")
- markers[markerid] = o
- o = _default(o)
- for chunk in _iterencode(o, _current_indent_level):
+ self._mark_markers(markers, o)
+ obj = self.default(o)
+ for chunk in self._iterencode(obj, markers,
+ _current_indent_level):
yield chunk
- if markers is not None:
- del markers[markerid]
-
- return _iterencode
+ self._remove_markers(markers, o)
diff --git a/lib-python/2.7/json/tests/test_unicode.py b/lib-python/2.7/json/tests/test_unicode.py
--- a/lib-python/2.7/json/tests/test_unicode.py
+++ b/lib-python/2.7/json/tests/test_unicode.py
@@ -80,6 +80,12 @@
# Issue 10038.
self.assertEqual(type(self.loads('"foo"')), unicode)
+ def test_encode_not_utf_8(self):
+ self.assertEqual(self.dumps('\xb1\xe6', encoding='iso8859-2'),
+ '"\\u0105\\u0107"')
+ self.assertEqual(self.dumps(['\xb1\xe6'], encoding='iso8859-2'),
+ '["\\u0105\\u0107"]')
+
class TestPyUnicode(TestUnicode, PyTest): pass
class TestCUnicode(TestUnicode, CTest): pass
diff --git a/lib-python/2.7/multiprocessing/forking.py b/lib-python/2.7/multiprocessing/forking.py
--- a/lib-python/2.7/multiprocessing/forking.py
+++ b/lib-python/2.7/multiprocessing/forking.py
@@ -73,15 +73,12 @@
return getattr, (m.im_self, m.im_func.func_name)
ForkingPickler.register(type(ForkingPickler.save), _reduce_method)
-def _reduce_method_descriptor(m):
- return getattr, (m.__objclass__, m.__name__)
-ForkingPickler.register(type(list.append), _reduce_method_descriptor)
-ForkingPickler.register(type(int.__add__), _reduce_method_descriptor)
-
-#def _reduce_builtin_function_or_method(m):
-# return getattr, (m.__self__, m.__name__)
-#ForkingPickler.register(type(list().append), _reduce_builtin_function_or_method)
-#ForkingPickler.register(type(int().__add__), _reduce_builtin_function_or_method)
+if type(list.append) is not type(ForkingPickler.save):
+ # Some python implementations have unbound methods even for builtin types
+ def _reduce_method_descriptor(m):
+ return getattr, (m.__objclass__, m.__name__)
+ ForkingPickler.register(type(list.append), _reduce_method_descriptor)
+ ForkingPickler.register(type(int.__add__), _reduce_method_descriptor)
try:
from functools import partial
diff --git a/lib-python/2.7/opcode.py b/lib-python/2.7/opcode.py
--- a/lib-python/2.7/opcode.py
+++ b/lib-python/2.7/opcode.py
@@ -1,4 +1,3 @@
-
"""
opcode module - potentially shared between dis and other modules which
operate on bytecodes (e.g. peephole optimizers).
@@ -189,4 +188,10 @@
def_op('SET_ADD', 146)
def_op('MAP_ADD', 147)
+# pypy modification, experimental bytecode
+def_op('LOOKUP_METHOD', 201) # Index in name list
+hasname.append(201)
+def_op('CALL_METHOD', 202) # #args not including 'self'
+def_op('BUILD_LIST_FROM_ARG', 203)
+
del def_op, name_op, jrel_op, jabs_op
diff --git a/lib-python/2.7/pickle.py b/lib-python/2.7/pickle.py
--- a/lib-python/2.7/pickle.py
+++ b/lib-python/2.7/pickle.py
@@ -168,7 +168,7 @@
# Pickling machinery
-class Pickler:
+class Pickler(object):
def __init__(self, file, protocol=None):
"""This takes a file-like object for writing a pickle data stream.
@@ -638,6 +638,10 @@
# else tmp is empty, and we're done
def save_dict(self, obj):
+ modict_saver = self._pickle_moduledict(obj)
+ if modict_saver is not None:
+ return self.save_reduce(*modict_saver)
+
write = self.write
if self.bin:
@@ -687,6 +691,29 @@
write(SETITEM)
# else tmp is empty, and we're done
+ def _pickle_moduledict(self, obj):
+ # save module dictionary as "getattr(module, '__dict__')"
+
+ # build index of module dictionaries
+ try:
+ modict = self.module_dict_ids
+ except AttributeError:
+ modict = {}
+ from sys import modules
+ for mod in modules.values():
+ if isinstance(mod, ModuleType):
+ modict[id(mod.__dict__)] = mod
+ self.module_dict_ids = modict
+
+ thisid = id(obj)
+ try:
+ themodule = modict[thisid]
+ except KeyError:
+ return None
+ from __builtin__ import getattr
+ return getattr, (themodule, '__dict__')
+
+
def save_inst(self, obj):
cls = obj.__class__
@@ -727,6 +754,29 @@
dispatch[InstanceType] = save_inst
+ def save_function(self, obj):
+ try:
+ return self.save_global(obj)
+ except PicklingError, e:
+ pass
+ # Check copy_reg.dispatch_table
+ reduce = dispatch_table.get(type(obj))
+ if reduce:
+ rv = reduce(obj)
+ else:
+ # Check for a __reduce_ex__ method, fall back to __reduce__
+ reduce = getattr(obj, "__reduce_ex__", None)
+ if reduce:
+ rv = reduce(self.proto)
+ else:
+ reduce = getattr(obj, "__reduce__", None)
+ if reduce:
+ rv = reduce()
+ else:
+ raise e
+ return self.save_reduce(obj=obj, *rv)
+ dispatch[FunctionType] = save_function
+
def save_global(self, obj, name=None, pack=struct.pack):
write = self.write
memo = self.memo
@@ -768,7 +818,6 @@
self.memoize(obj)
dispatch[ClassType] = save_global
- dispatch[FunctionType] = save_global
dispatch[BuiltinFunctionType] = save_global
dispatch[TypeType] = save_global
@@ -824,7 +873,7 @@
# Unpickling machinery
-class Unpickler:
+class Unpickler(object):
def __init__(self, file):
"""This takes a file-like object for reading a pickle data stream.
diff --git a/lib-python/2.7/pkgutil.py b/lib-python/2.7/pkgutil.py
--- a/lib-python/2.7/pkgutil.py
+++ b/lib-python/2.7/pkgutil.py
@@ -244,7 +244,8 @@
return mod
def get_data(self, pathname):
- return open(pathname, "rb").read()
+ with open(pathname, "rb") as f:
+ return f.read()
def _reopen(self):
if self.file and self.file.closed:
diff --git a/lib-python/2.7/pprint.py b/lib-python/2.7/pprint.py
--- a/lib-python/2.7/pprint.py
+++ b/lib-python/2.7/pprint.py
@@ -144,7 +144,7 @@
return
r = getattr(typ, "__repr__", None)
- if issubclass(typ, dict) and r is dict.__repr__:
+ if issubclass(typ, dict) and r == dict.__repr__:
write('{')
if self._indent_per_level > 1:
write((self._indent_per_level - 1) * ' ')
@@ -173,10 +173,10 @@
write('}')
return
- if ((issubclass(typ, list) and r is list.__repr__) or
- (issubclass(typ, tuple) and r is tuple.__repr__) or
- (issubclass(typ, set) and r is set.__repr__) or
- (issubclass(typ, frozenset) and r is frozenset.__repr__)
+ if ((issubclass(typ, list) and r == list.__repr__) or
+ (issubclass(typ, tuple) and r == tuple.__repr__) or
+ (issubclass(typ, set) and r == set.__repr__) or
+ (issubclass(typ, frozenset) and r == frozenset.__repr__)
):
length = _len(object)
if issubclass(typ, list):
@@ -266,7 +266,7 @@
return ("%s%s%s" % (closure, sio.getvalue(), closure)), True, False
r = getattr(typ, "__repr__", None)
- if issubclass(typ, dict) and r is dict.__repr__:
+ if issubclass(typ, dict) and r == dict.__repr__:
if not object:
return "{}", True, False
objid = _id(object)
@@ -291,8 +291,8 @@
del context[objid]
return "{%s}" % _commajoin(components), readable, recursive
- if (issubclass(typ, list) and r is list.__repr__) or \
- (issubclass(typ, tuple) and r is tuple.__repr__):
+ if (issubclass(typ, list) and r == list.__repr__) or \
+ (issubclass(typ, tuple) and r == tuple.__repr__):
if issubclass(typ, list):
if not object:
return "[]", True, False
diff --git a/lib-python/2.7/pydoc.py b/lib-python/2.7/pydoc.py
--- a/lib-python/2.7/pydoc.py
+++ b/lib-python/2.7/pydoc.py
@@ -623,7 +623,9 @@
head, '#ffffff', '#7799ee',
'index
' + filelink + docloc)
- modules = inspect.getmembers(object, inspect.ismodule)
+ def isnonbuiltinmodule(obj):
+ return inspect.ismodule(obj) and obj is not __builtin__
+ modules = inspect.getmembers(object, isnonbuiltinmodule)
classes, cdict = [], {}
for key, value in inspect.getmembers(object, inspect.isclass):
diff --git a/lib-python/2.7/random.py b/lib-python/2.7/random.py
--- a/lib-python/2.7/random.py
+++ b/lib-python/2.7/random.py
@@ -41,7 +41,6 @@
from __future__ import division
from warnings import warn as _warn
-from types import MethodType as _MethodType, BuiltinMethodType as _BuiltinMethodType
from math import log as _log, exp as _exp, pi as _pi, e as _e, ceil as _ceil
from math import sqrt as _sqrt, acos as _acos, cos as _cos, sin as _sin
from os import urandom as _urandom
@@ -240,8 +239,7 @@
return self.randrange(a, b+1)
- def _randbelow(self, n, _log=_log, int=int, _maxwidth=1L< n-1 > 2**(k-2)
r = getrandbits(k)
while r >= n:
diff --git a/lib-python/2.7/site.py b/lib-python/2.7/site.py
--- a/lib-python/2.7/site.py
+++ b/lib-python/2.7/site.py
@@ -75,7 +75,6 @@
USER_SITE = None
USER_BASE = None
-
def makepath(*paths):
dir = os.path.join(*paths)
try:
@@ -91,7 +90,10 @@
if hasattr(m, '__loader__'):
continue # don't mess with a PEP 302-supplied __file__
try:
- m.__file__ = os.path.abspath(m.__file__)
+ prev = m.__file__
+ new = os.path.abspath(m.__file__)
+ if prev != new:
+ m.__file__ = new
except (AttributeError, OSError):
pass
@@ -289,6 +291,7 @@
will find its `site-packages` subdirectory depending on the system
environment, and will return a list of full paths.
"""
+ is_pypy = '__pypy__' in sys.builtin_module_names
sitepackages = []
seen = set()
@@ -299,6 +302,10 @@
if sys.platform in ('os2emx', 'riscos'):
sitepackages.append(os.path.join(prefix, "Lib", "site-packages"))
+ elif is_pypy:
+ from distutils.sysconfig import get_python_lib
+ sitedir = get_python_lib(standard_lib=False, prefix=prefix)
+ sitepackages.append(sitedir)
elif os.sep == '/':
sitepackages.append(os.path.join(prefix, "lib",
"python" + sys.version[:3],
@@ -435,22 +442,33 @@
if key == 'q':
break
+##def setcopyright():
+## """Set 'copyright' and 'credits' in __builtin__"""
+## __builtin__.copyright = _Printer("copyright", sys.copyright)
+## if sys.platform[:4] == 'java':
+## __builtin__.credits = _Printer(
+## "credits",
+## "Jython is maintained by the Jython developers (www.jython.org).")
+## else:
+## __builtin__.credits = _Printer("credits", """\
+## Thanks to CWI, CNRI, BeOpen.com, Zope Corporation and a cast of thousands
+## for supporting Python development. See www.python.org for more information.""")
+## here = os.path.dirname(os.__file__)
+## __builtin__.license = _Printer(
+## "license", "See http://www.python.org/%.3s/license.html" % sys.version,
+## ["LICENSE.txt", "LICENSE"],
+## [os.path.join(here, os.pardir), here, os.curdir])
+
def setcopyright():
- """Set 'copyright' and 'credits' in __builtin__"""
+ # XXX this is the PyPy-specific version. Should be unified with the above.
__builtin__.copyright = _Printer("copyright", sys.copyright)
- if sys.platform[:4] == 'java':
- __builtin__.credits = _Printer(
- "credits",
- "Jython is maintained by the Jython developers (www.jython.org).")
- else:
- __builtin__.credits = _Printer("credits", """\
- Thanks to CWI, CNRI, BeOpen.com, Zope Corporation and a cast of thousands
- for supporting Python development. See www.python.org for more information.""")
- here = os.path.dirname(os.__file__)
+ __builtin__.credits = _Printer(
+ "credits",
+ "PyPy is maintained by the PyPy developers: http://pypy.org/")
__builtin__.license = _Printer(
- "license", "See http://www.python.org/%.3s/license.html" % sys.version,
- ["LICENSE.txt", "LICENSE"],
- [os.path.join(here, os.pardir), here, os.curdir])
+ "license",
+ "See https://bitbucket.org/pypy/pypy/src/default/LICENSE")
+
class _Helper(object):
@@ -476,7 +494,7 @@
if sys.platform == 'win32':
import locale, codecs
enc = locale.getdefaultlocale()[1]
- if enc.startswith('cp'): # "cp***" ?
+ if enc is not None and enc.startswith('cp'): # "cp***" ?
try:
codecs.lookup(enc)
except LookupError:
@@ -532,9 +550,18 @@
"'import usercustomize' failed; use -v for traceback"
+def import_builtin_stuff():
+ """PyPy specific: pre-import a few built-in modules, because
+ some programs actually rely on them to be in sys.modules :-("""
+ import exceptions
+ if 'zipimport' in sys.builtin_module_names:
+ import zipimport
+
+
def main():
global ENABLE_USER_SITE
+ import_builtin_stuff()
abs__file__()
known_paths = removeduppaths()
if (os.name == "posix" and sys.path and
diff --git a/lib-python/2.7/socket.py b/lib-python/2.7/socket.py
--- a/lib-python/2.7/socket.py
+++ b/lib-python/2.7/socket.py
@@ -46,8 +46,6 @@
import _socket
from _socket import *
-from functools import partial
-from types import MethodType
try:
import _ssl
@@ -159,11 +157,6 @@
if sys.platform == "riscos":
_socketmethods = _socketmethods + ('sleeptaskw',)
-# All the method names that must be delegated to either the real socket
-# object or the _closedsocket object.
-_delegate_methods = ("recv", "recvfrom", "recv_into", "recvfrom_into",
- "send", "sendto")
-
class _closedsocket(object):
__slots__ = []
def _dummy(*args):
@@ -180,22 +173,43 @@
__doc__ = _realsocket.__doc__
- __slots__ = ["_sock", "__weakref__"] + list(_delegate_methods)
-
def __init__(self, family=AF_INET, type=SOCK_STREAM, proto=0, _sock=None):
if _sock is None:
_sock = _realsocket(family, type, proto)
self._sock = _sock
- for method in _delegate_methods:
- setattr(self, method, getattr(_sock, method))
+ self._io_refs = 0
+ self._closed = False
- def close(self, _closedsocket=_closedsocket,
- _delegate_methods=_delegate_methods, setattr=setattr):
+ def send(self, data, flags=0):
+ return self._sock.send(data, flags=flags)
+ send.__doc__ = _realsocket.send.__doc__
+
+ def recv(self, buffersize, flags=0):
+ return self._sock.recv(buffersize, flags=flags)
+ recv.__doc__ = _realsocket.recv.__doc__
+
+ def recv_into(self, buffer, nbytes=0, flags=0):
+ return self._sock.recv_into(buffer, nbytes=nbytes, flags=flags)
+ recv_into.__doc__ = _realsocket.recv_into.__doc__
+
+ def recvfrom(self, buffersize, flags=0):
+ return self._sock.recvfrom(buffersize, flags=flags)
+ recvfrom.__doc__ = _realsocket.recvfrom.__doc__
+
+ def recvfrom_into(self, buffer, nbytes=0, flags=0):
+ return self._sock.recvfrom_into(buffer, nbytes=nbytes, flags=flags)
+ recvfrom_into.__doc__ = _realsocket.recvfrom_into.__doc__
+
+ def sendto(self, data, param2, param3=None):
+ if param3 is None:
+ return self._sock.sendto(data, param2)
+ else:
+ return self._sock.sendto(data, param2, param3)
+ sendto.__doc__ = _realsocket.sendto.__doc__
+
+ def close(self):
# This function should not reference any globals. See issue #808164.
self._sock = _closedsocket()
- dummy = self._sock._dummy
- for method in _delegate_methods:
- setattr(self, method, dummy)
close.__doc__ = _realsocket.close.__doc__
def accept(self):
@@ -214,21 +228,49 @@
Return a regular file object corresponding to the socket. The mode
and bufsize arguments are as for the built-in open() function."""
- return _fileobject(self._sock, mode, bufsize)
+ self._io_refs += 1
+ return _fileobject(self, mode, bufsize)
+
+ def _decref_socketios(self):
+ if self._io_refs > 0:
+ self._io_refs -= 1
+ if self._closed:
+ self.close()
+
+ def _real_close(self):
+ # This function should not reference any globals. See issue #808164.
+ self._sock.close()
+
+ def close(self):
+ # This function should not reference any globals. See issue #808164.
+ self._closed = True
+ if self._io_refs <= 0:
+ self._real_close()
family = property(lambda self: self._sock.family, doc="the socket family")
type = property(lambda self: self._sock.type, doc="the socket type")
proto = property(lambda self: self._sock.proto, doc="the socket protocol")
-def meth(name,self,*args):
- return getattr(self._sock,name)(*args)
+ # Delegate many calls to the raw socket object.
+ _s = ("def %(name)s(self, %(args)s): return self._sock.%(name)s(%(args)s)\n\n"
+ "%(name)s.__doc__ = _realsocket.%(name)s.__doc__\n")
+ for _m in _socketmethods:
+ # yupi! we're on pypy, all code objects have this interface
+ argcount = getattr(_realsocket, _m).im_func.func_code.co_argcount - 1
+ exec _s % {'name': _m, 'args': ', '.join('arg%d' % i for i in range(argcount))}
+ del _m, _s, argcount
-for _m in _socketmethods:
- p = partial(meth,_m)
- p.__name__ = _m
- p.__doc__ = getattr(_realsocket,_m).__doc__
- m = MethodType(p,None,_socketobject)
- setattr(_socketobject,_m,m)
+ # Delegation methods with default arguments, that the code above
+ # cannot handle correctly
+ def sendall(self, data, flags=0):
+ self._sock.sendall(data, flags)
+ sendall.__doc__ = _realsocket.sendall.__doc__
+
+ def getsockopt(self, level, optname, buflen=None):
+ if buflen is None:
+ return self._sock.getsockopt(level, optname)
+ return self._sock.getsockopt(level, optname, buflen)
+ getsockopt.__doc__ = _realsocket.getsockopt.__doc__
socket = SocketType = _socketobject
@@ -278,8 +320,11 @@
if self._sock:
self.flush()
finally:
- if self._close:
- self._sock.close()
+ if self._sock:
+ if self._close:
+ self._sock.close()
+ else:
+ self._sock._decref_socketios()
self._sock = None
def __del__(self):
diff --git a/lib-python/2.7/sqlite3/test/dbapi.py b/lib-python/2.7/sqlite3/test/dbapi.py
--- a/lib-python/2.7/sqlite3/test/dbapi.py
+++ b/lib-python/2.7/sqlite3/test/dbapi.py
@@ -1,4 +1,4 @@
-#-*- coding: ISO-8859-1 -*-
+#-*- coding: iso-8859-1 -*-
# pysqlite2/test/dbapi.py: tests for DB-API compliance
#
# Copyright (C) 2004-2010 Gerhard H�ring
@@ -332,6 +332,9 @@
def __init__(self):
self.value = 5
+ def __iter__(self):
+ return self
+
def next(self):
if self.value == 10:
raise StopIteration
@@ -826,7 +829,7 @@
con = sqlite.connect(":memory:")
con.close()
try:
- con()
+ con("select 1")
self.fail("Should have raised a ProgrammingError")
except sqlite.ProgrammingError:
pass
diff --git a/lib-python/2.7/sqlite3/test/regression.py b/lib-python/2.7/sqlite3/test/regression.py
--- a/lib-python/2.7/sqlite3/test/regression.py
+++ b/lib-python/2.7/sqlite3/test/regression.py
@@ -264,6 +264,28 @@
"""
self.assertRaises(sqlite.Warning, self.con, 1)
+ def CheckUpdateDescriptionNone(self):
+ """
+ Call Cursor.update with an UPDATE query and check that it sets the
+ cursor's description to be None.
+ """
+ cur = self.con.cursor()
+ cur.execute("CREATE TABLE foo (id INTEGER)")
+ cur.execute("UPDATE foo SET id = 3 WHERE id = 1")
+ self.assertEqual(cur.description, None)
+
+ def CheckStatementCache(self):
+ cur = self.con.cursor()
+ cur.execute("CREATE TABLE foo (id INTEGER)")
+ values = [(i,) for i in xrange(5)]
+ cur.executemany("INSERT INTO foo (id) VALUES (?)", values)
+
+ cur.execute("SELECT id FROM foo")
+ self.assertEqual(list(cur), values)
+ self.con.commit()
+ cur.execute("SELECT id FROM foo")
+ self.assertEqual(list(cur), values)
+
def suite():
regression_suite = unittest.makeSuite(RegressionTests, "Check")
return unittest.TestSuite((regression_suite,))
diff --git a/lib-python/2.7/sqlite3/test/userfunctions.py b/lib-python/2.7/sqlite3/test/userfunctions.py
--- a/lib-python/2.7/sqlite3/test/userfunctions.py
+++ b/lib-python/2.7/sqlite3/test/userfunctions.py
@@ -275,12 +275,14 @@
pass
def CheckAggrNoStep(self):
+ # XXX it's better to raise OperationalError in order to stop
+ # the query earlier.
cur = self.con.cursor()
try:
cur.execute("select nostep(t) from test")
- self.fail("should have raised an AttributeError")
- except AttributeError, e:
- self.assertEqual(e.args[0], "AggrNoStep instance has no attribute 'step'")
+ self.fail("should have raised an OperationalError")
+ except sqlite.OperationalError, e:
+ self.assertEqual(e.args[0], "user-defined aggregate's 'step' method raised error")
def CheckAggrNoFinalize(self):
cur = self.con.cursor()
diff --git a/lib-python/2.7/ssl.py b/lib-python/2.7/ssl.py
--- a/lib-python/2.7/ssl.py
+++ b/lib-python/2.7/ssl.py
@@ -86,7 +86,7 @@
else:
_PROTOCOL_NAMES[PROTOCOL_SSLv2] = "SSLv2"
-from socket import socket, _fileobject, _delegate_methods, error as socket_error
+from socket import socket, _fileobject, error as socket_error
from socket import getnameinfo as _getnameinfo
import base64 # for DER-to-PEM translation
import errno
@@ -103,14 +103,6 @@
do_handshake_on_connect=True,
suppress_ragged_eofs=True, ciphers=None):
socket.__init__(self, _sock=sock._sock)
- # The initializer for socket overrides the methods send(), recv(), etc.
- # in the instancce, which we don't need -- but we want to provide the
- # methods defined in SSLSocket.
- for attr in _delegate_methods:
- try:
- delattr(self, attr)
- except AttributeError:
- pass
if certfile and not keyfile:
keyfile = certfile
diff --git a/lib-python/2.7/subprocess.py b/lib-python/2.7/subprocess.py
--- a/lib-python/2.7/subprocess.py
+++ b/lib-python/2.7/subprocess.py
@@ -803,7 +803,7 @@
elif stderr == PIPE:
errread, errwrite = _subprocess.CreatePipe(None, 0)
elif stderr == STDOUT:
- errwrite = c2pwrite
+ errwrite = c2pwrite.handle # pass id to not close it
elif isinstance(stderr, int):
errwrite = msvcrt.get_osfhandle(stderr)
else:
@@ -818,9 +818,13 @@
def _make_inheritable(self, handle):
"""Return a duplicate of handle, which is inheritable"""
- return _subprocess.DuplicateHandle(_subprocess.GetCurrentProcess(),
+ dupl = _subprocess.DuplicateHandle(_subprocess.GetCurrentProcess(),
handle, _subprocess.GetCurrentProcess(), 0, 1,
_subprocess.DUPLICATE_SAME_ACCESS)
+ # If the initial handle was obtained with CreatePipe, close it.
+ if not isinstance(handle, int):
+ handle.Close()
+ return dupl
def _find_w9xpopen(self):
diff --git a/lib-python/2.7/sysconfig.py b/lib-python/2.7/sysconfig.py
--- a/lib-python/2.7/sysconfig.py
+++ b/lib-python/2.7/sysconfig.py
@@ -26,6 +26,16 @@
'scripts': '{base}/bin',
'data' : '{base}',
},
+ 'pypy': {
+ 'stdlib': '{base}/lib-python',
+ 'platstdlib': '{base}/lib-python',
+ 'purelib': '{base}/lib-python',
+ 'platlib': '{base}/lib-python',
+ 'include': '{base}/include',
+ 'platinclude': '{base}/include',
+ 'scripts': '{base}/bin',
+ 'data' : '{base}',
+ },
'nt': {
'stdlib': '{base}/Lib',
'platstdlib': '{base}/Lib',
@@ -158,7 +168,9 @@
return res
def _get_default_scheme():
- if os.name == 'posix':
+ if '__pypy__' in sys.builtin_module_names:
+ return 'pypy'
+ elif os.name == 'posix':
# the default scheme for posix is posix_prefix
return 'posix_prefix'
return os.name
@@ -182,126 +194,9 @@
return env_base if env_base else joinuser("~", ".local")
-def _parse_makefile(filename, vars=None):
- """Parse a Makefile-style file.
-
- A dictionary containing name/value pairs is returned. If an
- optional dictionary is passed in as the second argument, it is
- used instead of a new dictionary.
- """
- import re
- # Regexes needed for parsing Makefile (and similar syntaxes,
- # like old-style Setup files).
- _variable_rx = re.compile("([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)")
- _findvar1_rx = re.compile(r"\$\(([A-Za-z][A-Za-z0-9_]*)\)")
- _findvar2_rx = re.compile(r"\${([A-Za-z][A-Za-z0-9_]*)}")
-
- if vars is None:
- vars = {}
- done = {}
- notdone = {}
-
- with open(filename) as f:
- lines = f.readlines()
-
- for line in lines:
- if line.startswith('#') or line.strip() == '':
- continue
- m = _variable_rx.match(line)
- if m:
- n, v = m.group(1, 2)
- v = v.strip()
- # `$$' is a literal `$' in make
- tmpv = v.replace('$$', '')
-
- if "$" in tmpv:
- notdone[n] = v
- else:
- try:
- v = int(v)
- except ValueError:
- # insert literal `$'
- done[n] = v.replace('$$', '$')
- else:
- done[n] = v
-
- # do variable interpolation here
- while notdone:
- for name in notdone.keys():
- value = notdone[name]
- m = _findvar1_rx.search(value) or _findvar2_rx.search(value)
- if m:
- n = m.group(1)
- found = True
- if n in done:
- item = str(done[n])
- elif n in notdone:
- # get it on a subsequent round
- found = False
- elif n in os.environ:
- # do it like make: fall back to environment
- item = os.environ[n]
- else:
- done[n] = item = ""
- if found:
- after = value[m.end():]
- value = value[:m.start()] + item + after
- if "$" in after:
- notdone[name] = value
- else:
- try: value = int(value)
- except ValueError:
- done[name] = value.strip()
- else:
- done[name] = value
- del notdone[name]
- else:
- # bogus variable reference; just drop it since we can't deal
- del notdone[name]
- # strip spurious spaces
- for k, v in done.items():
- if isinstance(v, str):
- done[k] = v.strip()
-
- # save the results in the global dictionary
- vars.update(done)
- return vars
-
-
-def _get_makefile_filename():
- if _PYTHON_BUILD:
- return os.path.join(_PROJECT_BASE, "Makefile")
- return os.path.join(get_path('platstdlib'), "config", "Makefile")
-
-
def _init_posix(vars):
"""Initialize the module as appropriate for POSIX systems."""
- # load the installed Makefile:
- makefile = _get_makefile_filename()
- try:
- _parse_makefile(makefile, vars)
- except IOError, e:
- msg = "invalid Python installation: unable to open %s" % makefile
- if hasattr(e, "strerror"):
- msg = msg + " (%s)" % e.strerror
- raise IOError(msg)
-
- # load the installed pyconfig.h:
- config_h = get_config_h_filename()
- try:
- with open(config_h) as f:
- parse_config_h(f, vars)
- except IOError, e:
- msg = "invalid Python installation: unable to open %s" % config_h
- if hasattr(e, "strerror"):
- msg = msg + " (%s)" % e.strerror
- raise IOError(msg)
-
- # On AIX, there are wrong paths to the linker scripts in the Makefile
- # -- these paths are relative to the Python source, but when installed
- # the scripts are in another directory.
- if _PYTHON_BUILD:
- vars['LDSHARED'] = vars['BLDSHARED']
+ return
def _init_non_posix(vars):
"""Initialize the module as appropriate for NT"""
@@ -474,10 +369,11 @@
# patched up as well.
'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):
- flags = _CONFIG_VARS[key]
- flags = re.sub('-arch\s+\w+\s', ' ', flags)
- flags = flags + ' ' + arch
- _CONFIG_VARS[key] = flags
+ if key in _CONFIG_VARS:
+ flags = _CONFIG_VARS[key]
+ flags = re.sub('-arch\s+\w+\s', ' ', flags)
+ flags = flags + ' ' + arch
+ _CONFIG_VARS[key] = flags
# If we're on OSX 10.5 or later and the user tries to
# compiles an extension using an SDK that is not present
diff --git a/lib-python/2.7/tarfile.py b/lib-python/2.7/tarfile.py
--- a/lib-python/2.7/tarfile.py
+++ b/lib-python/2.7/tarfile.py
@@ -1716,9 +1716,6 @@
except (ImportError, AttributeError):
raise CompressionError("gzip module is not available")
- if fileobj is None:
- fileobj = bltn_open(name, mode + "b")
-
try:
t = cls.taropen(name, mode,
gzip.GzipFile(name, mode, compresslevel, fileobj),
diff --git a/lib-python/2.7/test/__init__.py b/lib-python/2.7/test/__init__.py
--- a/lib-python/2.7/test/__init__.py
+++ b/lib-python/2.7/test/__init__.py
@@ -1,1 +1,11 @@
-# Dummy file to make this directory a package.
+"""
+This package only contains the tests that we have modified for PyPy.
+It uses the 'official' hack to include the rest of the standard
+'test' package from CPython.
+
+This assumes that sys.path is configured to contain
+'lib-python/modified-2.7.0' before 'lib-python/2.7.0'.
+"""
+
+from pkgutil import extend_path
+__path__ = extend_path(__path__, __name__)
diff --git a/lib-python/2.7/test/list_tests.py b/lib-python/2.7/test/list_tests.py
--- a/lib-python/2.7/test/list_tests.py
+++ b/lib-python/2.7/test/list_tests.py
@@ -45,8 +45,12 @@
self.assertEqual(str(a2), "[0, 1, 2, [...], 3]")
self.assertEqual(repr(a2), "[0, 1, 2, [...], 3]")
+ if test_support.check_impl_detail():
+ depth = sys.getrecursionlimit() + 100
+ else:
+ depth = 1000 * 1000 # should be enough to exhaust the stack
l0 = []
- for i in xrange(sys.getrecursionlimit() + 100):
+ for i in xrange(depth):
l0 = [l0]
self.assertRaises(RuntimeError, repr, l0)
@@ -472,7 +476,11 @@
u += "eggs"
self.assertEqual(u, self.type2test("spameggs"))
- self.assertRaises(TypeError, u.__iadd__, None)
+ def f_iadd(u, x):
+ u += x
+ return u
+
+ self.assertRaises(TypeError, f_iadd, u, None)
def test_imul(self):
u = self.type2test([0, 1])
diff --git a/lib-python/2.7/test/mapping_tests.py b/lib-python/2.7/test/mapping_tests.py
--- a/lib-python/2.7/test/mapping_tests.py
+++ b/lib-python/2.7/test/mapping_tests.py
@@ -531,7 +531,10 @@
self.assertEqual(va, int(ka))
kb, vb = tb = b.popitem()
self.assertEqual(vb, int(kb))
- self.assertTrue(not(copymode < 0 and ta != tb))
+ if copymode < 0 and test_support.check_impl_detail():
+ # popitem() is not guaranteed to be deterministic on
+ # all implementations
+ self.assertEqual(ta, tb)
self.assertTrue(not a)
self.assertTrue(not b)
diff --git a/lib-python/2.7/test/pickletester.py b/lib-python/2.7/test/pickletester.py
--- a/lib-python/2.7/test/pickletester.py
+++ b/lib-python/2.7/test/pickletester.py
@@ -6,7 +6,7 @@
import pickletools
import copy_reg
-from test.test_support import TestFailed, have_unicode, TESTFN
+from test.test_support import TestFailed, have_unicode, TESTFN, impl_detail
# Tests that try a number of pickle protocols should have a
# for proto in protocols:
@@ -949,6 +949,7 @@
"Failed protocol %d: %r != %r"
% (proto, obj, loaded))
+ @impl_detail("pypy does not store attribute names", pypy=False)
def test_attribute_name_interning(self):
# Test that attribute names of pickled objects are interned when
# unpickling.
@@ -1091,6 +1092,7 @@
s = StringIO.StringIO("X''.")
self.assertRaises(EOFError, self.module.load, s)
+ @impl_detail("no full restricted mode in pypy", pypy=False)
def test_restricted(self):
# issue7128: cPickle failed in restricted mode
builtins = {self.module.__name__: self.module,
diff --git a/lib-python/2.7/test/regrtest.py b/lib-python/2.7/test/regrtest.py
--- a/lib-python/2.7/test/regrtest.py
+++ b/lib-python/2.7/test/regrtest.py
@@ -680,8 +680,13 @@
def findtests(testdir=None, stdtests=STDTESTS, nottests=NOTTESTS):
"""Return a list of all applicable test modules."""
- testdir = findtestdir(testdir)
- names = os.listdir(testdir)
+ if testdir:
+ testdirs = [testdir]
+ else:
+ testdirs = findtestdirs()
+ names = {}
+ for testdir in testdirs:
+ names.update(dict.fromkeys(os.listdir(testdir)))
tests = []
others = set(stdtests) | nottests
for name in names:
@@ -1080,8 +1085,19 @@
# Collect cyclic trash.
gc.collect()
-def findtestdir(path=None):
- return path or os.path.dirname(__file__) or os.curdir
+def findtestdirs():
+ # XXX hacking: returns a list of both the '2.7.0/test' and the
+ # 'modified-2.7.0/test' directories, as full paths.
+ testdir = os.path.abspath(os.path.dirname(__file__) or os.curdir)
+ assert os.path.basename(testdir).lower() == 'test'
+ maindir = os.path.dirname(testdir)
+ libpythondir = os.path.dirname(maindir)
+ maindirname = os.path.basename(maindir).lower()
+ if maindirname.startswith('modified-'):
+ maindirname = maindirname[len('modified-'):]
+ testdir1 = os.path.join(libpythondir, maindirname, 'test')
+ testdir2 = os.path.join(libpythondir, 'modified-'+maindirname, 'test')
+ return [testdir1, testdir2]
def removepy(names):
if not names:
@@ -1388,7 +1404,26 @@
test_zipimport
test_zlib
""",
- 'openbsd3':
+ 'openbsd4':
+ """
+ test_ascii_formatd
+ test_bsddb
+ test_bsddb3
+ test_ctypes
+ test_dl
+ test_epoll
+ test_gdbm
+ test_locale
+ test_normalization
+ test_ossaudiodev
+ test_pep277
+ test_tcl
+ test_tk
+ test_ttk_guionly
+ test_ttk_textonly
+ test_multiprocessing
+ """,
+ 'openbsd5':
"""
test_ascii_formatd
test_bsddb
@@ -1503,13 +1538,7 @@
return self.expected
if __name__ == '__main__':
- # findtestdir() gets the dirname out of __file__, so we have to make it
- # absolute before changing the working directory.
- # For example __file__ may be relative when running trace or profile.
- # See issue #9323.
- __file__ = os.path.abspath(__file__)
-
- # sanity check
+ # Simplification for findtestdir().
assert __file__ == os.path.abspath(sys.argv[0])
# When tests are run from the Python build directory, it is best practice
diff --git a/lib-python/2.7/test/seq_tests.py b/lib-python/2.7/test/seq_tests.py
--- a/lib-python/2.7/test/seq_tests.py
+++ b/lib-python/2.7/test/seq_tests.py
@@ -307,12 +307,18 @@
def test_bigrepeat(self):
import sys
- if sys.maxint <= 2147483647:
- x = self.type2test([0])
- x *= 2**16
- self.assertRaises(MemoryError, x.__mul__, 2**16)
- if hasattr(x, '__imul__'):
- self.assertRaises(MemoryError, x.__imul__, 2**16)
+ # we chose an N such as 2**16 * N does not fit into a cpu word
+ if sys.maxint == 2147483647:
+ # 32 bit system
+ N = 2**16
+ else:
+ # 64 bit system
+ N = 2**48
+ x = self.type2test([0])
+ x *= 2**16
+ self.assertRaises(MemoryError, x.__mul__, N)
+ if hasattr(x, '__imul__'):
+ self.assertRaises(MemoryError, x.__imul__, N)
def test_subscript(self):
a = self.type2test([10, 11])
diff --git a/lib-python/2.7/test/string_tests.py b/lib-python/2.7/test/string_tests.py
--- a/lib-python/2.7/test/string_tests.py
+++ b/lib-python/2.7/test/string_tests.py
@@ -1024,7 +1024,10 @@
self.checkequal('abc', 'abc', '__mul__', 1)
self.checkequal('abcabcabc', 'abc', '__mul__', 3)
self.checkraises(TypeError, 'abc', '__mul__')
- self.checkraises(TypeError, 'abc', '__mul__', '')
+ class Mul(object):
+ def mul(self, a, b):
+ return a * b
+ self.checkraises(TypeError, Mul(), 'mul', 'abc', '')
# XXX: on a 64-bit system, this doesn't raise an overflow error,
# but either raises a MemoryError, or succeeds (if you have 54TiB)
#self.checkraises(OverflowError, 10000*'abc', '__mul__', 2000000000)
diff --git a/lib-python/2.7/test/test_abstract_numbers.py b/lib-python/2.7/test/test_abstract_numbers.py
--- a/lib-python/2.7/test/test_abstract_numbers.py
+++ b/lib-python/2.7/test/test_abstract_numbers.py
@@ -40,7 +40,8 @@
c1, c2 = complex(3, 2), complex(4,1)
# XXX: This is not ideal, but see the comment in math_trunc().
- self.assertRaises(AttributeError, math.trunc, c1)
+ # Modified to suit PyPy, which gives TypeError in all cases
+ self.assertRaises((AttributeError, TypeError), math.trunc, c1)
self.assertRaises(TypeError, float, c1)
self.assertRaises(TypeError, int, c1)
diff --git a/lib-python/2.7/test/test_aifc.py b/lib-python/2.7/test/test_aifc.py
--- a/lib-python/2.7/test/test_aifc.py
+++ b/lib-python/2.7/test/test_aifc.py
@@ -1,4 +1,4 @@
-from test.test_support import findfile, run_unittest, TESTFN
+from test.test_support import findfile, run_unittest, TESTFN, impl_detail
import unittest
import os
@@ -68,6 +68,7 @@
self.assertEqual(f.getparams(), fout.getparams())
self.assertEqual(f.readframes(5), fout.readframes(5))
+ @impl_detail("PyPy has no audioop module yet", pypy=False)
def test_compress(self):
f = self.f = aifc.open(self.sndfilepath)
fout = self.fout = aifc.open(TESTFN, 'wb')
diff --git a/lib-python/2.7/test/test_array.py b/lib-python/2.7/test/test_array.py
--- a/lib-python/2.7/test/test_array.py
+++ b/lib-python/2.7/test/test_array.py
@@ -295,9 +295,10 @@
)
b = array.array(self.badtypecode())
- self.assertRaises(TypeError, a.__add__, b)
-
- self.assertRaises(TypeError, a.__add__, "bad")
+ with self.assertRaises(TypeError):
+ a + b
+ with self.assertRaises(TypeError):
+ a + 'bad'
def test_iadd(self):
a = array.array(self.typecode, self.example[::-1])
@@ -316,9 +317,10 @@
)
b = array.array(self.badtypecode())
- self.assertRaises(TypeError, a.__add__, b)
-
- self.assertRaises(TypeError, a.__iadd__, "bad")
+ with self.assertRaises(TypeError):
+ a += b
+ with self.assertRaises(TypeError):
+ a += 'bad'
def test_mul(self):
a = 5*array.array(self.typecode, self.example)
@@ -345,7 +347,8 @@
array.array(self.typecode)
)
- self.assertRaises(TypeError, a.__mul__, "bad")
+ with self.assertRaises(TypeError):
+ a * 'bad'
def test_imul(self):
a = array.array(self.typecode, self.example)
@@ -374,7 +377,8 @@
a *= -1
self.assertEqual(a, array.array(self.typecode))
- self.assertRaises(TypeError, a.__imul__, "bad")
+ with self.assertRaises(TypeError):
+ a *= 'bad'
def test_getitem(self):
a = array.array(self.typecode, self.example)
@@ -769,6 +773,7 @@
p = proxy(s)
self.assertEqual(p.tostring(), s.tostring())
s = None
+ test_support.gc_collect()
self.assertRaises(ReferenceError, len, p)
def test_bug_782369(self):
diff --git a/lib-python/2.7/test/test_ascii_formatd.py b/lib-python/2.7/test/test_ascii_formatd.py
--- a/lib-python/2.7/test/test_ascii_formatd.py
+++ b/lib-python/2.7/test/test_ascii_formatd.py
@@ -4,6 +4,10 @@
import unittest
from test.test_support import check_warnings, run_unittest, import_module
+from test.test_support import check_impl_detail
+
+if not check_impl_detail(cpython=True):
+ raise unittest.SkipTest("this test is only for CPython")
# Skip tests if _ctypes module does not exist
import_module('_ctypes')
diff --git a/lib-python/2.7/test/test_ast.py b/lib-python/2.7/test/test_ast.py
--- a/lib-python/2.7/test/test_ast.py
+++ b/lib-python/2.7/test/test_ast.py
@@ -20,10 +20,24 @@
# These tests are compiled through "exec"
# There should be atleast one test per statement
exec_tests = [
+ # None
+ "None",
# FunctionDef
"def f(): pass",
+ # FunctionDef with arg
+ "def f(a): pass",
+ # FunctionDef with arg and default value
+ "def f(a=0): pass",
+ # FunctionDef with varargs
+ "def f(*args): pass",
+ # FunctionDef with kwargs
+ "def f(**kwargs): pass",
+ # FunctionDef with all kind of args
+ "def f(a, b=1, c=None, d=[], e={}, *args, **kwargs): pass",
# ClassDef
"class C:pass",
+ # ClassDef, new style class
+ "class C(object): pass",
# Return
"def f():return 1",
# Delete
@@ -68,6 +82,27 @@
"for a,b in c: pass",
"[(a,b) for a,b in c]",
"((a,b) for a,b in c)",
+ "((a,b) for (a,b) in c)",
+ # Multiline generator expression
+ """(
+ (
+ Aa
+ ,
+ Bb
+ )
+ for
+ Aa
+ ,
+ Bb in Cc
+ )""",
+ # dictcomp
+ "{a : b for w in x for m in p if g}",
+ # dictcomp with naked tuple
+ "{a : b for v,w in x}",
+ # setcomp
+ "{r for l in x if g}",
+ # setcomp with naked tuple
+ "{r for l,m in x}",
]
# These are compiled through "single"
@@ -80,6 +115,8 @@
# These are compiled through "eval"
# It should test all expressions
eval_tests = [
+ # None
+ "None",
# BoolOp
"a and b",
# BinOp
@@ -90,6 +127,16 @@
"lambda:None",
# Dict
"{ 1:2 }",
+ # Empty dict
+ "{}",
+ # Set
+ "{None,}",
+ # Multiline dict
+ """{
+ 1
+ :
+ 2
+ }""",
# ListComp
"[a for b in c if d]",
# GeneratorExp
@@ -114,8 +161,14 @@
"v",
# List
"[1,2,3]",
+ # Empty list
+ "[]",
# Tuple
"1,2,3",
+ # Tuple
+ "(1,2,3)",
+ # Empty tuple
+ "()",
# Combination
"a.b.c.d(a.b[1:2])",
@@ -141,6 +194,35 @@
elif value is not None:
self._assertTrueorder(value, parent_pos)
+ def test_AST_objects(self):
+ if test_support.check_impl_detail():
+ # PyPy also provides a __dict__ to the ast.AST base class.
+
+ x = ast.AST()
+ try:
+ x.foobar = 21
+ except AttributeError, e:
+ self.assertEquals(e.args[0],
+ "'_ast.AST' object has no attribute 'foobar'")
+ else:
+ self.assert_(False)
+
+ try:
+ ast.AST(lineno=2)
+ except AttributeError, e:
+ self.assertEquals(e.args[0],
+ "'_ast.AST' object has no attribute 'lineno'")
+ else:
+ self.assert_(False)
+
+ try:
+ ast.AST(2)
+ except TypeError, e:
+ self.assertEquals(e.args[0],
+ "_ast.AST constructor takes 0 positional arguments")
+ else:
+ self.assert_(False)
+
def test_snippets(self):
for input, output, kind in ((exec_tests, exec_results, "exec"),
(single_tests, single_results, "single"),
@@ -169,6 +251,114 @@
self.assertTrue(issubclass(ast.comprehension, ast.AST))
self.assertTrue(issubclass(ast.Gt, ast.AST))
+ def test_field_attr_existence(self):
+ for name, item in ast.__dict__.iteritems():
+ if isinstance(item, type) and name != 'AST' and name[0].isupper(): # XXX: pypy does not allow abstract ast class instanciation
+ x = item()
+ if isinstance(x, ast.AST):
+ self.assertEquals(type(x._fields), tuple)
+
+ def test_arguments(self):
+ x = ast.arguments()
+ self.assertEquals(x._fields, ('args', 'vararg', 'kwarg', 'defaults'))
+ try:
+ x.vararg
+ except AttributeError, e:
+ self.assertEquals(e.args[0],
+ "'arguments' object has no attribute 'vararg'")
+ else:
+ self.assert_(False)
+ x = ast.arguments(1, 2, 3, 4)
+ self.assertEquals(x.vararg, 2)
+
+ def test_field_attr_writable(self):
+ x = ast.Num()
+ # We can assign to _fields
+ x._fields = 666
+ self.assertEquals(x._fields, 666)
+
+ def test_classattrs(self):
+ x = ast.Num()
+ self.assertEquals(x._fields, ('n',))
+ try:
+ x.n
+ except AttributeError, e:
+ self.assertEquals(e.args[0],
+ "'Num' object has no attribute 'n'")
+ else:
+ self.assert_(False)
+
+ x = ast.Num(42)
+ self.assertEquals(x.n, 42)
+ try:
+ x.lineno
+ except AttributeError, e:
+ self.assertEquals(e.args[0],
+ "'Num' object has no attribute 'lineno'")
+ else:
+ self.assert_(False)
+
+ y = ast.Num()
+ x.lineno = y
+ self.assertEquals(x.lineno, y)
+
+ try:
+ x.foobar
+ except AttributeError, e:
+ self.assertEquals(e.args[0],
+ "'Num' object has no attribute 'foobar'")
+ else:
+ self.assert_(False)
+
+ x = ast.Num(lineno=2)
+ self.assertEquals(x.lineno, 2)
+
+ x = ast.Num(42, lineno=0)
+ self.assertEquals(x.lineno, 0)
+ self.assertEquals(x._fields, ('n',))
+ self.assertEquals(x.n, 42)
+
+ self.assertRaises(TypeError, ast.Num, 1, 2)
+ self.assertRaises(TypeError, ast.Num, 1, 2, lineno=0)
+
+ def test_module(self):
+ body = [ast.Num(42)]
+ x = ast.Module(body)
+ self.assertEquals(x.body, body)
+
+ def test_nodeclass(self):
+ x = ast.BinOp()
+ self.assertEquals(x._fields, ('left', 'op', 'right'))
+
+ # Zero arguments constructor explicitely allowed
+ x = ast.BinOp()
+ # Random attribute allowed too
+ x.foobarbaz = 5
+ self.assertEquals(x.foobarbaz, 5)
+
+ n1 = ast.Num(1)
+ n3 = ast.Num(3)
+ addop = ast.Add()
+ x = ast.BinOp(n1, addop, n3)
+ self.assertEquals(x.left, n1)
+ self.assertEquals(x.op, addop)
+ self.assertEquals(x.right, n3)
+
+ x = ast.BinOp(1, 2, 3)
+ self.assertEquals(x.left, 1)
+ self.assertEquals(x.op, 2)
+ self.assertEquals(x.right, 3)
+
+ x = ast.BinOp(1, 2, 3, lineno=0)
+ self.assertEquals(x.lineno, 0)
+
+ def test_nodeclasses(self):
+ x = ast.BinOp(1, 2, 3, lineno=0)
+ self.assertEquals(x.left, 1)
+ self.assertEquals(x.op, 2)
+ self.assertEquals(x.right, 3)
+ self.assertEquals(x.lineno, 0)
+
def test_nodeclasses(self):
x = ast.BinOp(1, 2, 3, lineno=0)
self.assertEqual(x.left, 1)
@@ -178,6 +368,12 @@
# node raises exception when not given enough arguments
self.assertRaises(TypeError, ast.BinOp, 1, 2)
+ # node raises exception when given too many arguments
+ self.assertRaises(TypeError, ast.BinOp, 1, 2, 3, 4)
+ # node raises exception when not given enough arguments
+ self.assertRaises(TypeError, ast.BinOp, 1, 2, lineno=0)
+ # node raises exception when given too many arguments
+ self.assertRaises(TypeError, ast.BinOp, 1, 2, 3, 4, lineno=0)
# can set attributes through kwargs too
x = ast.BinOp(left=1, op=2, right=3, lineno=0)
@@ -186,8 +382,14 @@
self.assertEqual(x.right, 3)
self.assertEqual(x.lineno, 0)
+ # Random kwargs also allowed
+ x = ast.BinOp(1, 2, 3, foobarbaz=42)
+ self.assertEquals(x.foobarbaz, 42)
+
+ def test_no_fields(self):
# this used to fail because Sub._fields was None
x = ast.Sub()
+ self.assertEquals(x._fields, ())
def test_pickling(self):
import pickle
@@ -330,8 +532,15 @@
#### EVERYTHING BELOW IS GENERATED #####
exec_results = [
+('Module', [('Expr', (1, 0), ('Name', (1, 0), 'None', ('Load',)))]),
('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [], None, None, []), [('Pass', (1, 9))], [])]),
+('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [('Name', (1, 6), 'a', ('Param',))], None, None, []), [('Pass', (1, 10))], [])]),
+('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [('Name', (1, 6), 'a', ('Param',))], None, None, [('Num', (1, 8), 0)]), [('Pass', (1, 12))], [])]),
+('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [], 'args', None, []), [('Pass', (1, 14))], [])]),
+('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [], None, 'kwargs', []), [('Pass', (1, 17))], [])]),
+('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [('Name', (1, 6), 'a', ('Param',)), ('Name', (1, 9), 'b', ('Param',)), ('Name', (1, 14), 'c', ('Param',)), ('Name', (1, 22), 'd', ('Param',)), ('Name', (1, 28), 'e', ('Param',))], 'args', 'kwargs', [('Num', (1, 11), 1), ('Name', (1, 16), 'None', ('Load',)), ('List', (1, 24), [], ('Load',)), ('Dict', (1, 30), [], [])]), [('Pass', (1, 52))], [])]),
('Module', [('ClassDef', (1, 0), 'C', [], [('Pass', (1, 8))], [])]),
+('Module', [('ClassDef', (1, 0), 'C', [('Name', (1, 8), 'object', ('Load',))], [('Pass', (1, 17))], [])]),
('Module', [('FunctionDef', (1, 0), 'f', ('arguments', [], None, None, []), [('Return', (1, 8), ('Num', (1, 15), 1))], [])]),
('Module', [('Delete', (1, 0), [('Name', (1, 4), 'v', ('Del',))])]),
('Module', [('Assign', (1, 0), [('Name', (1, 0), 'v', ('Store',))], ('Num', (1, 4), 1))]),
@@ -355,16 +564,26 @@
('Module', [('For', (1, 0), ('Tuple', (1, 4), [('Name', (1, 4), 'a', ('Store',)), ('Name', (1, 6), 'b', ('Store',))], ('Store',)), ('Name', (1, 11), 'c', ('Load',)), [('Pass', (1, 14))], [])]),
('Module', [('Expr', (1, 0), ('ListComp', (1, 1), ('Tuple', (1, 2), [('Name', (1, 2), 'a', ('Load',)), ('Name', (1, 4), 'b', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (1, 11), [('Name', (1, 11), 'a', ('Store',)), ('Name', (1, 13), 'b', ('Store',))], ('Store',)), ('Name', (1, 18), 'c', ('Load',)), [])]))]),
('Module', [('Expr', (1, 0), ('GeneratorExp', (1, 1), ('Tuple', (1, 2), [('Name', (1, 2), 'a', ('Load',)), ('Name', (1, 4), 'b', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (1, 11), [('Name', (1, 11), 'a', ('Store',)), ('Name', (1, 13), 'b', ('Store',))], ('Store',)), ('Name', (1, 18), 'c', ('Load',)), [])]))]),
+('Module', [('Expr', (1, 0), ('GeneratorExp', (1, 1), ('Tuple', (1, 2), [('Name', (1, 2), 'a', ('Load',)), ('Name', (1, 4), 'b', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (1, 12), [('Name', (1, 12), 'a', ('Store',)), ('Name', (1, 14), 'b', ('Store',))], ('Store',)), ('Name', (1, 20), 'c', ('Load',)), [])]))]),
+('Module', [('Expr', (1, 0), ('GeneratorExp', (2, 4), ('Tuple', (3, 4), [('Name', (3, 4), 'Aa', ('Load',)), ('Name', (5, 7), 'Bb', ('Load',))], ('Load',)), [('comprehension', ('Tuple', (8, 4), [('Name', (8, 4), 'Aa', ('Store',)), ('Name', (10, 4), 'Bb', ('Store',))], ('Store',)), ('Name', (10, 10), 'Cc', ('Load',)), [])]))]),
+('Module', [('Expr', (1, 0), ('DictComp', (1, 1), ('Name', (1, 1), 'a', ('Load',)), ('Name', (1, 5), 'b', ('Load',)), [('comprehension', ('Name', (1, 11), 'w', ('Store',)), ('Name', (1, 16), 'x', ('Load',)), []), ('comprehension', ('Name', (1, 22), 'm', ('Store',)), ('Name', (1, 27), 'p', ('Load',)), [('Name', (1, 32), 'g', ('Load',))])]))]),
+('Module', [('Expr', (1, 0), ('DictComp', (1, 1), ('Name', (1, 1), 'a', ('Load',)), ('Name', (1, 5), 'b', ('Load',)), [('comprehension', ('Tuple', (1, 11), [('Name', (1, 11), 'v', ('Store',)), ('Name', (1, 13), 'w', ('Store',))], ('Store',)), ('Name', (1, 18), 'x', ('Load',)), [])]))]),
+('Module', [('Expr', (1, 0), ('SetComp', (1, 1), ('Name', (1, 1), 'r', ('Load',)), [('comprehension', ('Name', (1, 7), 'l', ('Store',)), ('Name', (1, 12), 'x', ('Load',)), [('Name', (1, 17), 'g', ('Load',))])]))]),
+('Module', [('Expr', (1, 0), ('SetComp', (1, 1), ('Name', (1, 1), 'r', ('Load',)), [('comprehension', ('Tuple', (1, 7), [('Name', (1, 7), 'l', ('Store',)), ('Name', (1, 9), 'm', ('Store',))], ('Store',)), ('Name', (1, 14), 'x', ('Load',)), [])]))]),
]
single_results = [
('Interactive', [('Expr', (1, 0), ('BinOp', (1, 0), ('Num', (1, 0), 1), ('Add',), ('Num', (1, 2), 2)))]),
]
eval_results = [
+('Expression', ('Name', (1, 0), 'None', ('Load',))),
('Expression', ('BoolOp', (1, 0), ('And',), [('Name', (1, 0), 'a', ('Load',)), ('Name', (1, 6), 'b', ('Load',))])),
('Expression', ('BinOp', (1, 0), ('Name', (1, 0), 'a', ('Load',)), ('Add',), ('Name', (1, 4), 'b', ('Load',)))),
('Expression', ('UnaryOp', (1, 0), ('Not',), ('Name', (1, 4), 'v', ('Load',)))),
('Expression', ('Lambda', (1, 0), ('arguments', [], None, None, []), ('Name', (1, 7), 'None', ('Load',)))),
('Expression', ('Dict', (1, 0), [('Num', (1, 2), 1)], [('Num', (1, 4), 2)])),
+('Expression', ('Dict', (1, 0), [], [])),
+('Expression', ('Set', (1, 0), [('Name', (1, 1), 'None', ('Load',))])),
+('Expression', ('Dict', (1, 0), [('Num', (2, 6), 1)], [('Num', (4, 10), 2)])),
('Expression', ('ListComp', (1, 1), ('Name', (1, 1), 'a', ('Load',)), [('comprehension', ('Name', (1, 7), 'b', ('Store',)), ('Name', (1, 12), 'c', ('Load',)), [('Name', (1, 17), 'd', ('Load',))])])),
('Expression', ('GeneratorExp', (1, 1), ('Name', (1, 1), 'a', ('Load',)), [('comprehension', ('Name', (1, 7), 'b', ('Store',)), ('Name', (1, 12), 'c', ('Load',)), [('Name', (1, 17), 'd', ('Load',))])])),
('Expression', ('Compare', (1, 0), ('Num', (1, 0), 1), [('Lt',), ('Lt',)], [('Num', (1, 4), 2), ('Num', (1, 8), 3)])),
@@ -376,7 +595,10 @@
('Expression', ('Subscript', (1, 0), ('Name', (1, 0), 'a', ('Load',)), ('Slice', ('Name', (1, 2), 'b', ('Load',)), ('Name', (1, 4), 'c', ('Load',)), None), ('Load',))),
('Expression', ('Name', (1, 0), 'v', ('Load',))),
('Expression', ('List', (1, 0), [('Num', (1, 1), 1), ('Num', (1, 3), 2), ('Num', (1, 5), 3)], ('Load',))),
+('Expression', ('List', (1, 0), [], ('Load',))),
('Expression', ('Tuple', (1, 0), [('Num', (1, 0), 1), ('Num', (1, 2), 2), ('Num', (1, 4), 3)], ('Load',))),
+('Expression', ('Tuple', (1, 1), [('Num', (1, 1), 1), ('Num', (1, 3), 2), ('Num', (1, 5), 3)], ('Load',))),
+('Expression', ('Tuple', (1, 0), [], ('Load',))),
('Expression', ('Call', (1, 0), ('Attribute', (1, 0), ('Attribute', (1, 0), ('Attribute', (1, 0), ('Name', (1, 0), 'a', ('Load',)), 'b', ('Load',)), 'c', ('Load',)), 'd', ('Load',)), [('Subscript', (1, 8), ('Attribute', (1, 8), ('Name', (1, 8), 'a', ('Load',)), 'b', ('Load',)), ('Slice', ('Num', (1, 12), 1), ('Num', (1, 14), 2), None), ('Load',))], [], None, None)),
]
main()
diff --git a/lib-python/2.7/test/test_builtin.py b/lib-python/2.7/test/test_builtin.py
--- a/lib-python/2.7/test/test_builtin.py
+++ b/lib-python/2.7/test/test_builtin.py
@@ -3,7 +3,8 @@
import platform
import unittest
from test.test_support import fcmp, have_unicode, TESTFN, unlink, \
- run_unittest, check_py3k_warnings
+ run_unittest, check_py3k_warnings, \
+ check_impl_detail
import warnings
from operator import neg
@@ -247,12 +248,14 @@
self.assertRaises(TypeError, compile)
self.assertRaises(ValueError, compile, 'print 42\n', '', 'badmode')
self.assertRaises(ValueError, compile, 'print 42\n', '', 'single', 0xff)
- self.assertRaises(TypeError, compile, chr(0), 'f', 'exec')
+ if check_impl_detail(cpython=True):
+ self.assertRaises(TypeError, compile, chr(0), 'f', 'exec')
self.assertRaises(TypeError, compile, 'pass', '?', 'exec',
mode='eval', source='0', filename='tmp')
if have_unicode:
compile(unicode('print u"\xc3\xa5"\n', 'utf8'), '', 'exec')
- self.assertRaises(TypeError, compile, unichr(0), 'f', 'exec')
+ if check_impl_detail(cpython=True):
+ self.assertRaises(TypeError, compile, unichr(0), 'f', 'exec')
self.assertRaises(ValueError, compile, unicode('a = 1'), 'f', 'bad')
@@ -395,12 +398,16 @@
self.assertEqual(eval('dir()', g, m), list('xyz'))
self.assertEqual(eval('globals()', g, m), g)
self.assertEqual(eval('locals()', g, m), m)
- self.assertRaises(TypeError, eval, 'a', m)
+ # on top of CPython, the first dictionary (the globals) has to
+ # be a real dict. This is not the case on top of PyPy.
+ if check_impl_detail(pypy=False):
+ self.assertRaises(TypeError, eval, 'a', m)
+
class A:
"Non-mapping"
pass
m = A()
- self.assertRaises(TypeError, eval, 'a', g, m)
+ self.assertRaises((TypeError, AttributeError), eval, 'a', g, m)
# Verify that dict subclasses work as well
class D(dict):
@@ -491,9 +498,10 @@
execfile(TESTFN, globals, locals)
self.assertEqual(locals['z'], 2)
+ self.assertRaises(TypeError, execfile, TESTFN, {}, ())
unlink(TESTFN)
self.assertRaises(TypeError, execfile)
- self.assertRaises(TypeError, execfile, TESTFN, {}, ())
+ self.assertRaises((TypeError, IOError), execfile, TESTFN, {}, ())
import os
self.assertRaises(IOError, execfile, os.curdir)
self.assertRaises(IOError, execfile, "I_dont_exist")
@@ -1108,7 +1116,8 @@
def __cmp__(self, other):
raise RuntimeError
__hash__ = None # Invalid cmp makes this unhashable
- self.assertRaises(RuntimeError, range, a, a + 1, badzero(1))
+ if check_impl_detail(cpython=True):
+ self.assertRaises(RuntimeError, range, a, a + 1, badzero(1))
# Reject floats.
self.assertRaises(TypeError, range, 1., 1., 1.)
diff --git a/lib-python/2.7/test/test_bytes.py b/lib-python/2.7/test/test_bytes.py
--- a/lib-python/2.7/test/test_bytes.py
+++ b/lib-python/2.7/test/test_bytes.py
@@ -694,6 +694,7 @@
self.assertEqual(b, b1)
self.assertTrue(b is b1)
+ @test.test_support.impl_detail("undocumented bytes.__alloc__()")
def test_alloc(self):
b = bytearray()
alloc = b.__alloc__()
@@ -821,6 +822,8 @@
self.assertEqual(b, b"")
self.assertEqual(c, b"")
+ @test.test_support.impl_detail(
+ "resizing semantics of CPython rely on refcounting")
def test_resize_forbidden(self):
# #4509: can't resize a bytearray when there are buffer exports, even
# if it wouldn't reallocate the underlying buffer.
@@ -853,6 +856,26 @@
self.assertRaises(BufferError, delslice)
self.assertEqual(b, orig)
+ @test.test_support.impl_detail("resizing semantics", cpython=False)
+ def test_resize_forbidden_non_cpython(self):
+ # on non-CPython implementations, we cannot prevent changes to
+ # bytearrays just because there are buffers around. Instead,
+ # we get (on PyPy) a buffer that follows the changes and resizes.
+ b = bytearray(range(10))
+ for v in [memoryview(b), buffer(b)]:
+ b[5] = 99
+ self.assertIn(v[5], (99, chr(99)))
+ b[5] = 100
+ b += b
+ b += b
+ b += b
+ self.assertEquals(len(v), 80)
+ self.assertIn(v[5], (100, chr(100)))
+ self.assertIn(v[79], (9, chr(9)))
+ del b[10:]
+ self.assertRaises(IndexError, lambda: v[10])
+ self.assertEquals(len(v), 10)
+
def test_empty_bytearray(self):
# Issue #7561: operations on empty bytearrays could crash in many
# situations, due to a fragile implementation of the
diff --git a/lib-python/2.7/test/test_bz2.py b/lib-python/2.7/test/test_bz2.py
--- a/lib-python/2.7/test/test_bz2.py
+++ b/lib-python/2.7/test/test_bz2.py
@@ -50,6 +50,7 @@
self.filename = TESTFN
def tearDown(self):
+ test_support.gc_collect()
if os.path.isfile(self.filename):
os.unlink(self.filename)
@@ -246,6 +247,8 @@
for i in xrange(10000):
o = BZ2File(self.filename)
del o
+ if i % 100 == 0:
+ test_support.gc_collect()
def testOpenNonexistent(self):
# "Test opening a nonexistent file"
@@ -310,6 +313,7 @@
for t in threads:
t.join()
+ @test_support.impl_detail()
def testMixedIterationReads(self):
# Issue #8397: mixed iteration and reads should be forbidden.
with bz2.BZ2File(self.filename, 'wb') as f:
diff --git a/lib-python/2.7/test/test_cmd_line_script.py b/lib-python/2.7/test/test_cmd_line_script.py
--- a/lib-python/2.7/test/test_cmd_line_script.py
+++ b/lib-python/2.7/test/test_cmd_line_script.py
@@ -112,6 +112,8 @@
self._check_script(script_dir, script_name, script_dir, '')
def test_directory_compiled(self):
+ if test.test_support.check_impl_detail(pypy=True):
+ raise unittest.SkipTest("pypy won't load lone .pyc files")
with temp_dir() as script_dir:
script_name = _make_test_script(script_dir, '__main__')
compiled_name = compile_script(script_name)
@@ -173,6 +175,8 @@
script_name, 'test_pkg')
def test_package_compiled(self):
+ if test.test_support.check_impl_detail(pypy=True):
+ raise unittest.SkipTest("pypy won't load lone .pyc files")
with temp_dir() as script_dir:
pkg_dir = os.path.join(script_dir, 'test_pkg')
make_pkg(pkg_dir)
diff --git a/lib-python/2.7/test/test_code.py b/lib-python/2.7/test/test_code.py
--- a/lib-python/2.7/test/test_code.py
+++ b/lib-python/2.7/test/test_code.py
@@ -82,7 +82,7 @@
import unittest
import weakref
-import _testcapi
+from test import test_support
def consts(t):
@@ -104,7 +104,9 @@
class CodeTest(unittest.TestCase):
+ @test_support.impl_detail("test for PyCode_NewEmpty")
def test_newempty(self):
+ import _testcapi
co = _testcapi.code_newempty("filename", "funcname", 15)
self.assertEqual(co.co_filename, "filename")
self.assertEqual(co.co_name, "funcname")
@@ -132,6 +134,7 @@
coderef = weakref.ref(f.__code__, callback)
self.assertTrue(bool(coderef()))
del f
+ test_support.gc_collect()
self.assertFalse(bool(coderef()))
self.assertTrue(self.called)
diff --git a/lib-python/2.7/test/test_codeop.py b/lib-python/2.7/test/test_codeop.py
--- a/lib-python/2.7/test/test_codeop.py
+++ b/lib-python/2.7/test/test_codeop.py
@@ -3,7 +3,7 @@
Nick Mathewson
"""
import unittest
-from test.test_support import run_unittest, is_jython
+from test.test_support import run_unittest, is_jython, check_impl_detail
from codeop import compile_command, PyCF_DONT_IMPLY_DEDENT
@@ -270,7 +270,9 @@
ai("a = 'a\\\n")
ai("a = 1","eval")
- ai("a = (","eval")
+ if check_impl_detail(): # on PyPy it asks for more data, which is not
+ ai("a = (","eval") # completely correct but hard to fix and
+ # really a detail (in my opinion )
ai("]","eval")
ai("())","eval")
ai("[}","eval")
diff --git a/lib-python/2.7/test/test_coercion.py b/lib-python/2.7/test/test_coercion.py
--- a/lib-python/2.7/test/test_coercion.py
+++ b/lib-python/2.7/test/test_coercion.py
@@ -1,6 +1,7 @@
import copy
import unittest
-from test.test_support import run_unittest, TestFailed, check_warnings
+from test.test_support import (
+ run_unittest, TestFailed, check_warnings, check_impl_detail)
# Fake a number that implements numeric methods through __coerce__
@@ -306,12 +307,18 @@
self.assertNotEqual(cmp(u'fish', evil_coercer), 0)
self.assertNotEqual(cmp(slice(1), evil_coercer), 0)
# ...but that this still works
- class WackyComparer(object):
- def __cmp__(slf, other):
- self.assertTrue(other == 42, 'expected evil_coercer, got %r' % other)
- return 0
- __hash__ = None # Invalid cmp makes this unhashable
- self.assertEqual(cmp(WackyComparer(), evil_coercer), 0)
+ if check_impl_detail():
+ # NB. I (arigo) would consider the following as implementation-
+ # specific. For example, in CPython, if we replace 42 with 42.0
+ # both below and in CoerceTo() above, then the test fails. This
+ # hints that the behavior is really dependent on some obscure
+ # internal details.
+ class WackyComparer(object):
+ def __cmp__(slf, other):
+ self.assertTrue(other == 42, 'expected evil_coercer, got %r' % other)
+ return 0
+ __hash__ = None # Invalid cmp makes this unhashable
+ self.assertEqual(cmp(WackyComparer(), evil_coercer), 0)
# ...and classic classes too, since that code path is a little different
class ClassicWackyComparer:
def __cmp__(slf, other):
diff --git a/lib-python/2.7/test/test_compile.py b/lib-python/2.7/test/test_compile.py
--- a/lib-python/2.7/test/test_compile.py
+++ b/lib-python/2.7/test/test_compile.py
@@ -3,6 +3,7 @@
import _ast
from test import test_support
import textwrap
+from test.test_support import check_impl_detail
class TestSpecifics(unittest.TestCase):
@@ -90,12 +91,13 @@
self.assertEqual(m.results, ('z', g))
exec 'z = locals()' in g, m
self.assertEqual(m.results, ('z', m))
- try:
- exec 'z = b' in m
- except TypeError:
- pass
- else:
- self.fail('Did not validate globals as a real dict')
+ if check_impl_detail():
+ try:
+ exec 'z = b' in m
+ except TypeError:
+ pass
+ else:
+ self.fail('Did not validate globals as a real dict')
class A:
"Non-mapping"
diff --git a/lib-python/2.7/test/test_copy.py b/lib-python/2.7/test/test_copy.py
--- a/lib-python/2.7/test/test_copy.py
+++ b/lib-python/2.7/test/test_copy.py
@@ -637,6 +637,7 @@
self.assertEqual(v[c], d)
self.assertEqual(len(v), 2)
del c, d
+ test_support.gc_collect()
self.assertEqual(len(v), 1)
x, y = C(), C()
# The underlying containers are decoupled
@@ -666,6 +667,7 @@
self.assertEqual(v[a].i, b.i)
self.assertEqual(v[c].i, d.i)
del c
+ test_support.gc_collect()
self.assertEqual(len(v), 1)
def test_deepcopy_weakvaluedict(self):
@@ -689,6 +691,7 @@
self.assertTrue(t is d)
del x, y, z, t
del d
+ test_support.gc_collect()
self.assertEqual(len(v), 1)
def test_deepcopy_bound_method(self):
diff --git a/lib-python/2.7/test/test_cpickle.py b/lib-python/2.7/test/test_cpickle.py
--- a/lib-python/2.7/test/test_cpickle.py
+++ b/lib-python/2.7/test/test_cpickle.py
@@ -61,27 +61,27 @@
error = cPickle.BadPickleGet
def test_recursive_list(self):
- self.assertRaises(ValueError,
+ self.assertRaises((ValueError, RuntimeError),
AbstractPickleTests.test_recursive_list,
self)
def test_recursive_tuple(self):
- self.assertRaises(ValueError,
+ self.assertRaises((ValueError, RuntimeError),
AbstractPickleTests.test_recursive_tuple,
self)
def test_recursive_inst(self):
- self.assertRaises(ValueError,
+ self.assertRaises((ValueError, RuntimeError),
AbstractPickleTests.test_recursive_inst,
self)
def test_recursive_dict(self):
- self.assertRaises(ValueError,
+ self.assertRaises((ValueError, RuntimeError),
AbstractPickleTests.test_recursive_dict,
self)
def test_recursive_multi(self):
- self.assertRaises(ValueError,
+ self.assertRaises((ValueError, RuntimeError),
AbstractPickleTests.test_recursive_multi,
self)
diff --git a/lib-python/2.7/test/test_csv.py b/lib-python/2.7/test/test_csv.py
--- a/lib-python/2.7/test/test_csv.py
+++ b/lib-python/2.7/test/test_csv.py
@@ -54,8 +54,10 @@
self.assertEqual(obj.dialect.skipinitialspace, False)
self.assertEqual(obj.dialect.strict, False)
# Try deleting or changing attributes (they are read-only)
- self.assertRaises(TypeError, delattr, obj.dialect, 'delimiter')
- self.assertRaises(TypeError, setattr, obj.dialect, 'delimiter', ':')
+ self.assertRaises((TypeError, AttributeError), delattr, obj.dialect,
+ 'delimiter')
+ self.assertRaises((TypeError, AttributeError), setattr, obj.dialect,
+ 'delimiter', ':')
self.assertRaises(AttributeError, delattr, obj.dialect, 'quoting')
self.assertRaises(AttributeError, setattr, obj.dialect,
'quoting', None)
diff --git a/lib-python/2.7/test/test_deque.py b/lib-python/2.7/test/test_deque.py
--- a/lib-python/2.7/test/test_deque.py
+++ b/lib-python/2.7/test/test_deque.py
@@ -109,7 +109,7 @@
self.assertEqual(deque('abc', maxlen=4).maxlen, 4)
self.assertEqual(deque('abc', maxlen=2).maxlen, 2)
self.assertEqual(deque('abc', maxlen=0).maxlen, 0)
- with self.assertRaises(AttributeError):
+ with self.assertRaises((AttributeError, TypeError)):
d = deque('abc')
d.maxlen = 10
@@ -352,7 +352,10 @@
for match in (True, False):
d = deque(['ab'])
d.extend([MutateCmp(d, match), 'c'])
- self.assertRaises(IndexError, d.remove, 'c')
+ # On CPython we get IndexError: deque mutated during remove().
+ # Why is it an IndexError during remove() only???
+ # On PyPy it is a RuntimeError, as in the other operations.
+ self.assertRaises((IndexError, RuntimeError), d.remove, 'c')
self.assertEqual(d, deque())
def test_repr(self):
@@ -514,7 +517,7 @@
container = reversed(deque([obj, 1]))
obj.x = iter(container)
del obj, container
- gc.collect()
+ test_support.gc_collect()
self.assertTrue(ref() is None, "Cycle was not collected")
class TestVariousIteratorArgs(unittest.TestCase):
@@ -630,6 +633,7 @@
p = weakref.proxy(d)
self.assertEqual(str(p), str(d))
d = None
+ test_support.gc_collect()
self.assertRaises(ReferenceError, str, p)
def test_strange_subclass(self):
diff --git a/lib-python/2.7/test/test_descr.py b/lib-python/2.7/test/test_descr.py
--- a/lib-python/2.7/test/test_descr.py
+++ b/lib-python/2.7/test/test_descr.py
@@ -2,6 +2,7 @@
import sys
import types
import unittest
+import popen2 # trigger early the warning from popen2.py
from copy import deepcopy
from test import test_support
@@ -1128,7 +1129,7 @@
# Test lookup leaks [SF bug 572567]
import gc
- if hasattr(gc, 'get_objects'):
+ if test_support.check_impl_detail():
class G(object):
def __cmp__(self, other):
return 0
@@ -1741,6 +1742,10 @@
raise MyException
for name, runner, meth_impl, ok, env in specials:
+ if name == '__length_hint__' or name == '__sizeof__':
+ if not test_support.check_impl_detail():
+ continue
+
class X(Checker):
pass
for attr, obj in env.iteritems():
@@ -1980,7 +1985,9 @@
except TypeError, msg:
self.assertTrue(str(msg).find("weak reference") >= 0)
else:
- self.fail("weakref.ref(no) should be illegal")
+ if test_support.check_impl_detail(pypy=False):
+ self.fail("weakref.ref(no) should be illegal")
+ #else: pypy supports taking weakrefs to some more objects
class Weak(object):
__slots__ = ['foo', '__weakref__']
yes = Weak()
@@ -3092,7 +3099,16 @@
class R(J):
__slots__ = ["__dict__", "__weakref__"]
- for cls, cls2 in ((G, H), (G, I), (I, H), (Q, R), (R, Q)):
+ if test_support.check_impl_detail(pypy=False):
+ lst = ((G, H), (G, I), (I, H), (Q, R), (R, Q))
+ else:
+ # Not supported in pypy: changing the __class__ of an object
+ # to another __class__ that just happens to have the same slots.
+ # If needed, we can add the feature, but what we'll likely do
+ # then is to allow mostly any __class__ assignment, even if the
+ # classes have different __slots__, because we it's easier.
+ lst = ((Q, R), (R, Q))
+ for cls, cls2 in lst:
x = cls()
x.a = 1
x.__class__ = cls2
@@ -3175,7 +3191,8 @@
except TypeError:
pass
else:
- self.fail("%r's __dict__ can be modified" % cls)
+ if test_support.check_impl_detail(pypy=False):
+ self.fail("%r's __dict__ can be modified" % cls)
# Modules also disallow __dict__ assignment
class Module1(types.ModuleType, Base):
@@ -4383,13 +4400,10 @@
self.assertTrue(l.__add__ != [5].__add__)
self.assertTrue(l.__add__ != l.__mul__)
self.assertTrue(l.__add__.__name__ == '__add__')
- if hasattr(l.__add__, '__self__'):
- # CPython
- self.assertTrue(l.__add__.__self__ is l)
+ self.assertTrue(l.__add__.__self__ is l)
+ if hasattr(l.__add__, '__objclass__'): # CPython
self.assertTrue(l.__add__.__objclass__ is list)
- else:
- # Python implementations where [].__add__ is a normal bound method
- self.assertTrue(l.__add__.im_self is l)
+ else: # PyPy
self.assertTrue(l.__add__.im_class is list)
self.assertEqual(l.__add__.__doc__, list.__add__.__doc__)
try:
@@ -4578,8 +4592,12 @@
str.split(fake_str)
# call a slot wrapper descriptor
- with self.assertRaises(TypeError):
- str.__add__(fake_str, "abc")
+ try:
+ r = str.__add__(fake_str, "abc")
+ except TypeError:
+ pass
+ else:
+ self.assertEqual(r, NotImplemented)
class DictProxyTests(unittest.TestCase):
diff --git a/lib-python/2.7/test/test_descrtut.py b/lib-python/2.7/test/test_descrtut.py
--- a/lib-python/2.7/test/test_descrtut.py
+++ b/lib-python/2.7/test/test_descrtut.py
@@ -172,46 +172,12 @@
AttributeError: 'list' object has no attribute '__methods__'
>>>
-Instead, you can get the same information from the list type:
+Instead, you can get the same information from the list type
+(the following example filters out the numerous method names
+starting with '_'):
- >>> pprint.pprint(dir(list)) # like list.__dict__.keys(), but sorted
- ['__add__',
- '__class__',
- '__contains__',
- '__delattr__',
- '__delitem__',
- '__delslice__',
- '__doc__',
- '__eq__',
- '__format__',
- '__ge__',
- '__getattribute__',
- '__getitem__',
- '__getslice__',
- '__gt__',
- '__hash__',
- '__iadd__',
- '__imul__',
- '__init__',
- '__iter__',
- '__le__',
- '__len__',
- '__lt__',
- '__mul__',
- '__ne__',
- '__new__',
- '__reduce__',
- '__reduce_ex__',
- '__repr__',
- '__reversed__',
- '__rmul__',
- '__setattr__',
- '__setitem__',
- '__setslice__',
- '__sizeof__',
- '__str__',
- '__subclasshook__',
- 'append',
+ >>> pprint.pprint([name for name in dir(list) if not name.startswith('_')])
+ ['append',
'count',
'extend',
'index',
diff --git a/lib-python/2.7/test/test_dict.py b/lib-python/2.7/test/test_dict.py
--- a/lib-python/2.7/test/test_dict.py
+++ b/lib-python/2.7/test/test_dict.py
@@ -319,7 +319,8 @@
self.assertEqual(va, int(ka))
kb, vb = tb = b.popitem()
self.assertEqual(vb, int(kb))
- self.assertFalse(copymode < 0 and ta != tb)
+ if test_support.check_impl_detail():
+ self.assertFalse(copymode < 0 and ta != tb)
self.assertFalse(a)
self.assertFalse(b)
diff --git a/lib-python/2.7/test/test_dis.py b/lib-python/2.7/test/test_dis.py
--- a/lib-python/2.7/test/test_dis.py
+++ b/lib-python/2.7/test/test_dis.py
@@ -56,8 +56,8 @@
%-4d 0 LOAD_CONST 1 (0)
3 POP_JUMP_IF_TRUE 38
6 LOAD_GLOBAL 0 (AssertionError)
- 9 BUILD_LIST 0
- 12 LOAD_FAST 0 (x)
+ 9 LOAD_FAST 0 (x)
+ 12 BUILD_LIST_FROM_ARG 0
15 GET_ITER
>> 16 FOR_ITER 12 (to 31)
19 STORE_FAST 1 (s)
diff --git a/lib-python/2.7/test/test_doctest.py b/lib-python/2.7/test/test_doctest.py
--- a/lib-python/2.7/test/test_doctest.py
+++ b/lib-python/2.7/test/test_doctest.py
@@ -782,7 +782,7 @@
... >>> x = 12
... >>> print x//0
... Traceback (most recent call last):
- ... ZeroDivisionError: integer division or modulo by zero
+ ... ZeroDivisionError: integer division by zero
... '''
>>> test = doctest.DocTestFinder().find(f)[0]
>>> doctest.DocTestRunner(verbose=False).run(test)
@@ -799,7 +799,7 @@
... >>> print 'pre-exception output', x//0
... pre-exception output
... Traceback (most recent call last):
- ... ZeroDivisionError: integer division or modulo by zero
+ ... ZeroDivisionError: integer division by zero
... '''
>>> test = doctest.DocTestFinder().find(f)[0]
>>> doctest.DocTestRunner(verbose=False).run(test)
@@ -810,7 +810,7 @@
print 'pre-exception output', x//0
Exception raised:
...
- ZeroDivisionError: integer division or modulo by zero
+ ZeroDivisionError: integer division by zero
TestResults(failed=1, attempted=2)
Exception messages may contain newlines:
@@ -978,7 +978,7 @@
Exception raised:
Traceback (most recent call last):
...
- ZeroDivisionError: integer division or modulo by zero
+ ZeroDivisionError: integer division by zero
TestResults(failed=1, attempted=1)
"""
def displayhook(): r"""
@@ -1924,7 +1924,7 @@
> (1)()
-> calls_set_trace()
(Pdb) print foo
- *** NameError: name 'foo' is not defined
+ *** NameError: global name 'foo' is not defined
(Pdb) continue
TestResults(failed=0, attempted=2)
"""
@@ -2229,7 +2229,7 @@
favorite_color
Exception raised:
...
- NameError: name 'favorite_color' is not defined
+ NameError: global name 'favorite_color' is not defined
@@ -2289,7 +2289,7 @@
favorite_color
Exception raised:
...
- NameError: name 'favorite_color' is not defined
+ NameError: global name 'favorite_color' is not defined
**********************************************************************
1 items had failures:
1 of 2 in test_doctest.txt
@@ -2382,7 +2382,7 @@
favorite_color
Exception raised:
...
- NameError: name 'favorite_color' is not defined
+ NameError: global name 'favorite_color' is not defined
TestResults(failed=1, attempted=2)
>>> doctest.master = None # Reset master.
diff --git a/lib-python/2.7/test/test_dumbdbm.py b/lib-python/2.7/test/test_dumbdbm.py
--- a/lib-python/2.7/test/test_dumbdbm.py
+++ b/lib-python/2.7/test/test_dumbdbm.py
@@ -107,9 +107,11 @@
f.close()
# Mangle the file by adding \r before each newline
- data = open(_fname + '.dir').read()
+ with open(_fname + '.dir') as f:
+ data = f.read()
data = data.replace('\n', '\r\n')
- open(_fname + '.dir', 'wb').write(data)
+ with open(_fname + '.dir', 'wb') as f:
+ f.write(data)
f = dumbdbm.open(_fname)
self.assertEqual(f['1'], 'hello')
diff --git a/lib-python/2.7/test/test_extcall.py b/lib-python/2.7/test/test_extcall.py
--- a/lib-python/2.7/test/test_extcall.py
+++ b/lib-python/2.7/test/test_extcall.py
@@ -90,19 +90,19 @@
>>> class Nothing: pass
...
- >>> g(*Nothing())
+ >>> g(*Nothing()) #doctest: +ELLIPSIS
Traceback (most recent call last):
...
- TypeError: g() argument after * must be a sequence, not instance
+ TypeError: ...argument after * must be a sequence, not instance
>>> class Nothing:
... def __len__(self): return 5
...
- >>> g(*Nothing())
+ >>> g(*Nothing()) #doctest: +ELLIPSIS
Traceback (most recent call last):
...
- TypeError: g() argument after * must be a sequence, not instance
+ TypeError: ...argument after * must be a sequence, not instance
>>> class Nothing():
... def __len__(self): return 5
@@ -154,52 +154,50 @@
...
TypeError: g() got multiple values for keyword argument 'x'
- >>> f(**{1:2})
+ >>> f(**{1:2}) #doctest: +ELLIPSIS
Traceback (most recent call last):
...
- TypeError: f() keywords must be strings
+ TypeError: ...keywords must be strings
>>> h(**{'e': 2})
Traceback (most recent call last):
...
TypeError: h() got an unexpected keyword argument 'e'
- >>> h(*h)
+ >>> h(*h) #doctest: +ELLIPSIS
Traceback (most recent call last):
...
- TypeError: h() argument after * must be a sequence, not function
+ TypeError: ...argument after * must be a sequence, not function
- >>> dir(*h)
+ >>> dir(*h) #doctest: +ELLIPSIS
Traceback (most recent call last):
...
- TypeError: dir() argument after * must be a sequence, not function
+ TypeError: ...argument after * must be a sequence, not function
- >>> None(*h)
+ >>> None(*h) #doctest: +ELLIPSIS
Traceback (most recent call last):
...
- TypeError: NoneType object argument after * must be a sequence, \
-not function
+ TypeError: ...argument after * must be a sequence, not function
- >>> h(**h)
+ >>> h(**h) #doctest: +ELLIPSIS
Traceback (most recent call last):
...
- TypeError: h() argument after ** must be a mapping, not function
+ TypeError: ...argument after ** must be a mapping, not function
- >>> dir(**h)
+ >>> dir(**h) #doctest: +ELLIPSIS
Traceback (most recent call last):
...
- TypeError: dir() argument after ** must be a mapping, not function
+ TypeError: ...argument after ** must be a mapping, not function
- >>> None(**h)
+ >>> None(**h) #doctest: +ELLIPSIS
Traceback (most recent call last):
...
- TypeError: NoneType object argument after ** must be a mapping, \
-not function
+ TypeError: ...argument after ** must be a mapping, not function
- >>> dir(b=1, **{'b': 1})
+ >>> dir(b=1, **{'b': 1}) #doctest: +ELLIPSIS
Traceback (most recent call last):
...
- TypeError: dir() got multiple values for keyword argument 'b'
+ TypeError: ...got multiple values for keyword argument 'b'
Another helper function
@@ -247,10 +245,10 @@
... False
True
- >>> id(1, **{'foo': 1})
+ >>> id(1, **{'foo': 1}) #doctest: +ELLIPSIS
Traceback (most recent call last):
...
- TypeError: id() takes no keyword arguments
+ TypeError: id() ... keyword argument...
A corner case of keyword dictionary items being deleted during
the function call setup. See .
diff --git a/lib-python/2.7/test/test_fcntl.py b/lib-python/2.7/test/test_fcntl.py
--- a/lib-python/2.7/test/test_fcntl.py
+++ b/lib-python/2.7/test/test_fcntl.py
@@ -32,7 +32,7 @@
'freebsd2', 'freebsd3', 'freebsd4', 'freebsd5',
'freebsd6', 'freebsd7', 'freebsd8',
'bsdos2', 'bsdos3', 'bsdos4',
- 'openbsd', 'openbsd2', 'openbsd3', 'openbsd4'):
+ 'openbsd', 'openbsd2', 'openbsd3', 'openbsd4', 'openbsd5'):
if struct.calcsize('l') == 8:
off_t = 'l'
pid_t = 'i'
diff --git a/lib-python/2.7/test/test_file.py b/lib-python/2.7/test/test_file.py
--- a/lib-python/2.7/test/test_file.py
+++ b/lib-python/2.7/test/test_file.py
@@ -12,7 +12,7 @@
import io
import _pyio as pyio
-from test.test_support import TESTFN, run_unittest
+from test.test_support import TESTFN, run_unittest, gc_collect
from UserList import UserList
class AutoFileTests(unittest.TestCase):
@@ -33,6 +33,7 @@
self.assertEqual(self.f.tell(), p.tell())
self.f.close()
self.f = None
+ gc_collect()
self.assertRaises(ReferenceError, getattr, p, 'tell')
def testAttributes(self):
@@ -157,7 +158,12 @@
def testStdin(self):
# This causes the interpreter to exit on OSF1 v5.1.
if sys.platform != 'osf1V5':
- self.assertRaises((IOError, ValueError), sys.stdin.seek, -1)
+ if sys.stdin.isatty():
+ self.assertRaises((IOError, ValueError), sys.stdin.seek, -1)
+ else:
+ print((
+ ' Skipping sys.stdin.seek(-1): stdin is not a tty.'
+ ' Test manually.'), file=sys.__stdout__)
else:
print((
' Skipping sys.stdin.seek(-1), it may crash the interpreter.'
diff --git a/lib-python/2.7/test/test_file2k.py b/lib-python/2.7/test/test_file2k.py
--- a/lib-python/2.7/test/test_file2k.py
+++ b/lib-python/2.7/test/test_file2k.py
@@ -11,7 +11,7 @@
threading = None
from test import test_support
-from test.test_support import TESTFN, run_unittest
+from test.test_support import TESTFN, run_unittest, gc_collect
from UserList import UserList
class AutoFileTests(unittest.TestCase):
@@ -32,6 +32,7 @@
self.assertEqual(self.f.tell(), p.tell())
self.f.close()
self.f = None
+ gc_collect()
self.assertRaises(ReferenceError, getattr, p, 'tell')
def testAttributes(self):
@@ -116,8 +117,12 @@
for methodname in methods:
method = getattr(self.f, methodname)
+ args = {'readinto': (bytearray(''),),
+ 'seek': (0,),
+ 'write': ('',),
+ }.get(methodname, ())
# should raise on closed file
- self.assertRaises(ValueError, method)
+ self.assertRaises(ValueError, method, *args)
with test_support.check_py3k_warnings():
for methodname in deprecated_methods:
method = getattr(self.f, methodname)
@@ -216,7 +221,12 @@
def testStdin(self):
# This causes the interpreter to exit on OSF1 v5.1.
if sys.platform != 'osf1V5':
- self.assertRaises(IOError, sys.stdin.seek, -1)
+ if sys.stdin.isatty():
+ self.assertRaises(IOError, sys.stdin.seek, -1)
+ else:
+ print >>sys.__stdout__, (
+ ' Skipping sys.stdin.seek(-1): stdin is not a tty.'
+ ' Test manualy.')
else:
print >>sys.__stdout__, (
' Skipping sys.stdin.seek(-1), it may crash the interpreter.'
@@ -336,8 +346,9 @@
except ValueError:
pass
else:
- self.fail("%s%r after next() didn't raise ValueError" %
- (methodname, args))
+ if test_support.check_impl_detail():
+ self.fail("%s%r after next() didn't raise ValueError" %
+ (methodname, args))
f.close()
# Test to see if harmless (by accident) mixing of read* and
@@ -388,6 +399,7 @@
if lines != testlines:
self.fail("readlines() after next() with empty buffer "
"failed. Got %r, expected %r" % (line, testline))
+ f.close()
# Reading after iteration hit EOF shouldn't hurt either
f = open(TESTFN)
try:
@@ -438,6 +450,9 @@
self.close_count = 0
self.close_success_count = 0
self.use_buffering = False
+ # to prevent running out of file descriptors on PyPy,
+ # we only keep the 50 most recent files open
+ self.all_files = [None] * 50
def tearDown(self):
if self.f:
@@ -453,9 +468,14 @@
def _create_file(self):
if self.use_buffering:
- self.f = open(self.filename, "w+", buffering=1024*16)
+ f = open(self.filename, "w+", buffering=1024*16)
else:
- self.f = open(self.filename, "w+")
+ f = open(self.filename, "w+")
+ self.f = f
+ self.all_files.append(f)
+ oldf = self.all_files.pop(0)
+ if oldf is not None:
+ oldf.close()
def _close_file(self):
with self._count_lock:
@@ -496,7 +516,6 @@
def _test_close_open_io(self, io_func, nb_workers=5):
def worker():
- self._create_file()
funcs = itertools.cycle((
lambda: io_func(),
lambda: self._close_and_reopen_file(),
@@ -508,7 +527,11 @@
f()
except (IOError, ValueError):
pass
+ self._create_file()
self._run_workers(worker, nb_workers)
+ # make sure that all files can be closed now
+ del self.all_files
+ gc_collect()
if test_support.verbose:
# Useful verbose statistics when tuning this test to take
# less time to run but still ensuring that its still useful.
diff --git a/lib-python/2.7/test/test_fileio.py b/lib-python/2.7/test/test_fileio.py
--- a/lib-python/2.7/test/test_fileio.py
+++ b/lib-python/2.7/test/test_fileio.py
@@ -12,6 +12,7 @@
from test.test_support import TESTFN, check_warnings, run_unittest, make_bad_fd
from test.test_support import py3k_bytes as bytes
+from test.test_support import gc_collect
from test.script_helper import run_python
from _io import FileIO as _FileIO
@@ -34,6 +35,7 @@
self.assertEqual(self.f.tell(), p.tell())
self.f.close()
self.f = None
+ gc_collect()
self.assertRaises(ReferenceError, getattr, p, 'tell')
def testSeekTell(self):
@@ -104,8 +106,8 @@
self.assertTrue(f.closed)
def testMethods(self):
- methods = ['fileno', 'isatty', 'read', 'readinto',
- 'seek', 'tell', 'truncate', 'write', 'seekable',
+ methods = ['fileno', 'isatty', 'read',
+ 'tell', 'truncate', 'seekable',
'readable', 'writable']
if sys.platform.startswith('atheos'):
methods.remove('truncate')
@@ -117,6 +119,10 @@
method = getattr(self.f, methodname)
# should raise on closed file
self.assertRaises(ValueError, method)
+ # methods with one argument
+ self.assertRaises(ValueError, self.f.readinto, 0)
+ self.assertRaises(ValueError, self.f.write, 0)
+ self.assertRaises(ValueError, self.f.seek, 0)
def testOpendir(self):
# Issue 3703: opening a directory should fill the errno
diff --git a/lib-python/2.7/test/test_format.py b/lib-python/2.7/test/test_format.py
--- a/lib-python/2.7/test/test_format.py
+++ b/lib-python/2.7/test/test_format.py
@@ -242,7 +242,7 @@
try:
testformat(formatstr, args)
except exception, exc:
- if str(exc) == excmsg:
+ if str(exc) == excmsg or not test_support.check_impl_detail():
if verbose:
print "yes"
else:
@@ -272,13 +272,16 @@
test_exc(u'no format', u'1', TypeError,
"not all arguments converted during string formatting")
- class Foobar(long):
- def __oct__(self):
- # Returning a non-string should not blow up.
- return self + 1
-
- test_exc('%o', Foobar(), TypeError,
- "expected string or Unicode object, long found")
+ if test_support.check_impl_detail():
+ # __oct__() is called if Foobar inherits from 'long', but
+ # not, say, 'object' or 'int' or 'str'. This seems strange
+ # enough to consider it a complete implementation detail.
+ class Foobar(long):
+ def __oct__(self):
+ # Returning a non-string should not blow up.
+ return self + 1
+ test_exc('%o', Foobar(), TypeError,
+ "expected string or Unicode object, long found")
if maxsize == 2**31-1:
# crashes 2.2.1 and earlier:
diff --git a/lib-python/2.7/test/test_funcattrs.py b/lib-python/2.7/test/test_funcattrs.py
--- a/lib-python/2.7/test/test_funcattrs.py
+++ b/lib-python/2.7/test/test_funcattrs.py
@@ -14,6 +14,8 @@
self.b = b
def cannot_set_attr(self, obj, name, value, exceptions):
+ if not test_support.check_impl_detail():
+ exceptions = (TypeError, AttributeError)
# Helper method for other tests.
try:
setattr(obj, name, value)
@@ -286,13 +288,13 @@
def test_delete_func_dict(self):
try:
del self.b.__dict__
- except TypeError:
+ except (AttributeError, TypeError):
pass
else:
self.fail("deleting function dictionary should raise TypeError")
try:
del self.b.func_dict
- except TypeError:
+ except (AttributeError, TypeError):
pass
else:
self.fail("deleting function dictionary should raise TypeError")
diff --git a/lib-python/2.7/test/test_functools.py b/lib-python/2.7/test/test_functools.py
--- a/lib-python/2.7/test/test_functools.py
+++ b/lib-python/2.7/test/test_functools.py
@@ -45,6 +45,8 @@
# attributes should not be writable
if not isinstance(self.thetype, type):
return
+ if not test_support.check_impl_detail():
+ return
self.assertRaises(TypeError, setattr, p, 'func', map)
self.assertRaises(TypeError, setattr, p, 'args', (1, 2))
self.assertRaises(TypeError, setattr, p, 'keywords', dict(a=1, b=2))
@@ -136,6 +138,7 @@
p = proxy(f)
self.assertEqual(f.func, p.func)
f = None
+ test_support.gc_collect()
self.assertRaises(ReferenceError, getattr, p, 'func')
def test_with_bound_and_unbound_methods(self):
@@ -172,7 +175,7 @@
updated=functools.WRAPPER_UPDATES):
# Check attributes were assigned
for name in assigned:
- self.assertTrue(getattr(wrapper, name) is getattr(wrapped, name))
+ self.assertTrue(getattr(wrapper, name) == getattr(wrapped, name), name)
# Check attributes were updated
for name in updated:
wrapper_attr = getattr(wrapper, name)
diff --git a/lib-python/2.7/test/test_generators.py b/lib-python/2.7/test/test_generators.py
--- a/lib-python/2.7/test/test_generators.py
+++ b/lib-python/2.7/test/test_generators.py
@@ -190,7 +190,7 @@
File "", line 1, in ?
File "", line 2, in g
File "", line 2, in f
- ZeroDivisionError: integer division or modulo by zero
+ ZeroDivisionError: integer division by zero
>>> k.next() # and the generator cannot be resumed
Traceback (most recent call last):
File "", line 1, in ?
@@ -733,14 +733,16 @@
... yield 1
Traceback (most recent call last):
..
-SyntaxError: 'return' with argument inside generator (, line 3)
+ File "", line 3
+SyntaxError: 'return' with argument inside generator
>>> def f():
... yield 1
... return 22
Traceback (most recent call last):
..
-SyntaxError: 'return' with argument inside generator (, line 3)
+ File "", line 3
+SyntaxError: 'return' with argument inside generator
"return None" is not the same as "return" in a generator:
@@ -749,7 +751,8 @@
... return None
Traceback (most recent call last):
..
-SyntaxError: 'return' with argument inside generator (, line 3)
+ File "", line 3
+SyntaxError: 'return' with argument inside generator
These are fine:
@@ -878,7 +881,9 @@
... if 0:
... yield 2 # because it's a generator (line 10)
Traceback (most recent call last):
-SyntaxError: 'return' with argument inside generator (, line 10)
+ ...
+ File "", line 10
+SyntaxError: 'return' with argument inside generator
This one caused a crash (see SF bug 567538):
@@ -1496,6 +1501,10 @@
"""
coroutine_tests = """\
+A helper function to call gc.collect() without printing
+>>> import gc
+>>> def gc_collect(): gc.collect()
+
Sending a value into a started generator:
>>> def f():
@@ -1570,13 +1579,14 @@
>>> def f(): return lambda x=(yield): 1
Traceback (most recent call last):
...
-SyntaxError: 'return' with argument inside generator (, line 1)
+ File "", line 1
+SyntaxError: 'return' with argument inside generator
>>> def f(): x = yield = y
Traceback (most recent call last):
...
File "", line 1
-SyntaxError: assignment to yield expression not possible
+SyntaxError: can't assign to yield expression
>>> def f(): (yield bar) = y
Traceback (most recent call last):
@@ -1665,7 +1675,7 @@
>>> f().throw("abc") # throw on just-opened generator
Traceback (most recent call last):
...
-TypeError: exceptions must be classes, or instances, not str
+TypeError: exceptions must be old-style classes or derived from BaseException, not str
Now let's try closing a generator:
@@ -1697,7 +1707,7 @@
>>> g = f()
>>> g.next()
->>> del g
+>>> del g; gc_collect()
exiting
>>> class context(object):
@@ -1708,7 +1718,7 @@
... yield
>>> g = f()
>>> g.next()
->>> del g
+>>> del g; gc_collect()
exiting
@@ -1721,7 +1731,7 @@
>>> g = f()
>>> g.next()
->>> del g
+>>> del g; gc_collect()
finally
@@ -1747,6 +1757,7 @@
>>> g = f()
>>> g.next()
>>> del g
+>>> gc_collect()
>>> sys.stderr.getvalue().startswith(
... "Exception RuntimeError: 'generator ignored GeneratorExit' in "
... )
@@ -1812,6 +1823,9 @@
references. We add it to the standard suite so the routine refleak-tests
would trigger if it starts being uncleanable again.
+>>> import gc
+>>> def gc_collect(): gc.collect()
+
>>> import itertools
>>> def leak():
... class gen:
@@ -1863,9 +1877,10 @@
...
... l = Leaker()
... del l
+... gc_collect()
... err = sys.stderr.getvalue().strip()
... err.startswith(
-... "Exception RuntimeError: RuntimeError() in <"
+... "Exception RuntimeError: RuntimeError() in "
... )
... err.endswith("> ignored")
... len(err.splitlines())
diff --git a/lib-python/2.7/test/test_genexps.py b/lib-python/2.7/test/test_genexps.py
--- a/lib-python/2.7/test/test_genexps.py
+++ b/lib-python/2.7/test/test_genexps.py
@@ -128,8 +128,9 @@
Verify re-use of tuples (a side benefit of using genexps over listcomps)
+ >>> from test.test_support import check_impl_detail
>>> tupleids = map(id, ((i,i) for i in xrange(10)))
- >>> int(max(tupleids) - min(tupleids))
+ >>> int(max(tupleids) - min(tupleids)) if check_impl_detail() else 0
0
Verify that syntax error's are raised for genexps used as lvalues
@@ -198,13 +199,13 @@
>>> g = (10 // i for i in (5, 0, 2))
>>> g.next()
2
- >>> g.next()
+ >>> g.next() # doctest: +ELLIPSIS
Traceback (most recent call last):
File "", line 1, in -toplevel-
g.next()
File "", line 1, in
g = (10 // i for i in (5, 0, 2))
- ZeroDivisionError: integer division or modulo by zero
+ ZeroDivisionError: integer division...by zero
>>> g.next()
Traceback (most recent call last):
File "", line 1, in -toplevel-
diff --git a/lib-python/2.7/test/test_heapq.py b/lib-python/2.7/test/test_heapq.py
--- a/lib-python/2.7/test/test_heapq.py
+++ b/lib-python/2.7/test/test_heapq.py
@@ -215,6 +215,11 @@
class TestHeapPython(TestHeap):
module = py_heapq
+ def test_islice_protection(self):
+ m = self.module
+ self.assertFalse(m.nsmallest(-1, [1]))
+ self.assertFalse(m.nlargest(-1, [1]))
+
@skipUnless(c_heapq, 'requires _heapq')
class TestHeapC(TestHeap):
diff --git a/lib-python/2.7/test/test_import.py b/lib-python/2.7/test/test_import.py
--- a/lib-python/2.7/test/test_import.py
+++ b/lib-python/2.7/test/test_import.py
@@ -7,7 +7,8 @@
import sys
import unittest
from test.test_support import (unlink, TESTFN, unload, run_unittest, rmtree,
- is_jython, check_warnings, EnvironmentVarGuard)
+ is_jython, check_warnings, EnvironmentVarGuard,
+ impl_detail, check_impl_detail)
import textwrap
from test import script_helper
@@ -69,7 +70,8 @@
self.assertEqual(mod.b, b,
"module loaded (%s) but contents invalid" % mod)
finally:
- unlink(source)
+ if check_impl_detail(pypy=False):
+ unlink(source)
try:
imp.reload(mod)
@@ -149,13 +151,16 @@
# Compile & remove .py file, we only need .pyc (or .pyo).
with open(filename, 'r') as f:
py_compile.compile(filename)
- unlink(filename)
+ if check_impl_detail(pypy=False):
+ # pypy refuses to import a .pyc if the .py does not exist
+ unlink(filename)
# Need to be able to load from current dir.
sys.path.append('')
# This used to crash.
exec 'import ' + module
+ reload(longlist)
# Cleanup.
del sys.path[-1]
@@ -326,6 +331,7 @@
self.assertEqual(mod.code_filename, self.file_name)
self.assertEqual(mod.func_filename, self.file_name)
+ @impl_detail("pypy refuses to import without a .py source", pypy=False)
def test_module_without_source(self):
target = "another_module.py"
py_compile.compile(self.file_name, dfile=target)
diff --git a/lib-python/2.7/test/test_inspect.py b/lib-python/2.7/test/test_inspect.py
--- a/lib-python/2.7/test/test_inspect.py
+++ b/lib-python/2.7/test/test_inspect.py
@@ -4,11 +4,11 @@
import unittest
import inspect
import linecache
-import datetime
from UserList import UserList
from UserDict import UserDict
from test.test_support import run_unittest, check_py3k_warnings
+from test.test_support import check_impl_detail
with check_py3k_warnings(
("tuple parameter unpacking has been removed", SyntaxWarning),
@@ -74,7 +74,8 @@
def test_excluding_predicates(self):
self.istest(inspect.isbuiltin, 'sys.exit')
- self.istest(inspect.isbuiltin, '[].append')
+ if check_impl_detail():
+ self.istest(inspect.isbuiltin, '[].append')
self.istest(inspect.iscode, 'mod.spam.func_code')
self.istest(inspect.isframe, 'tb.tb_frame')
self.istest(inspect.isfunction, 'mod.spam')
@@ -92,9 +93,9 @@
else:
self.assertFalse(inspect.isgetsetdescriptor(type(tb.tb_frame).f_locals))
if hasattr(types, 'MemberDescriptorType'):
- self.istest(inspect.ismemberdescriptor, 'datetime.timedelta.days')
+ self.istest(inspect.ismemberdescriptor, 'type(lambda: None).func_globals')
else:
- self.assertFalse(inspect.ismemberdescriptor(datetime.timedelta.days))
+ self.assertFalse(inspect.ismemberdescriptor(type(lambda: None).func_globals))
def test_isroutine(self):
self.assertTrue(inspect.isroutine(mod.spam))
@@ -567,7 +568,8 @@
else:
self.fail('Exception not raised')
self.assertIs(type(ex1), type(ex2))
- self.assertEqual(str(ex1), str(ex2))
+ if check_impl_detail():
+ self.assertEqual(str(ex1), str(ex2))
def makeCallable(self, signature):
"""Create a function that returns its locals(), excluding the
diff --git a/lib-python/2.7/test/test_int.py b/lib-python/2.7/test/test_int.py
--- a/lib-python/2.7/test/test_int.py
+++ b/lib-python/2.7/test/test_int.py
@@ -1,7 +1,7 @@
import sys
import unittest
-from test.test_support import run_unittest, have_unicode
+from test.test_support import run_unittest, have_unicode, check_impl_detail
import math
L = [
@@ -392,9 +392,10 @@
try:
int(TruncReturnsNonIntegral())
except TypeError as e:
- self.assertEqual(str(e),
- "__trunc__ returned non-Integral"
- " (type NonIntegral)")
+ if check_impl_detail(cpython=True):
+ self.assertEqual(str(e),
+ "__trunc__ returned non-Integral"
+ " (type NonIntegral)")
else:
self.fail("Failed to raise TypeError with %s" %
((base, trunc_result_base),))
diff --git a/lib-python/2.7/test/test_io.py b/lib-python/2.7/test/test_io.py
--- a/lib-python/2.7/test/test_io.py
+++ b/lib-python/2.7/test/test_io.py
@@ -2561,6 +2561,31 @@
"""Check that a partial write, when it gets interrupted, properly
invokes the signal handler, and bubbles up the exception raised
in the latter."""
+
+ # XXX This test has three flaws that appear when objects are
+ # XXX not reference counted.
+
+ # - if wio.write() happens to trigger a garbage collection,
+ # the signal exception may be raised when some __del__
+ # method is running; it will not reach the assertRaises()
+ # call.
+
+ # - more subtle, if the wio object is not destroyed at once
+ # and survives this function, the next opened file is likely
+ # to have the same fileno (since the file descriptor was
+ # actively closed). When wio.__del__ is finally called, it
+ # will close the other's test file... To trigger this with
+ # CPython, try adding "global wio" in this function.
+
+ # - This happens only for streams created by the _pyio module,
+ # because a wio.close() that fails still consider that the
+ # file needs to be closed again. You can try adding an
+ # "assert wio.closed" at the end of the function.
+
+ # Fortunately, a little gc.gollect() seems to be enough to
+ # work around all these issues.
+ support.gc_collect()
+
read_results = []
def _read():
s = os.read(r, 1)
diff --git a/lib-python/2.7/test/test_isinstance.py b/lib-python/2.7/test/test_isinstance.py
--- a/lib-python/2.7/test/test_isinstance.py
+++ b/lib-python/2.7/test/test_isinstance.py
@@ -260,7 +260,18 @@
# Make sure that calling isinstance with a deeply nested tuple for its
# argument will raise RuntimeError eventually.
tuple_arg = (compare_to,)
- for cnt in xrange(sys.getrecursionlimit()+5):
+
+
+ if test_support.check_impl_detail(cpython=True):
+ RECURSION_LIMIT = sys.getrecursionlimit()
+ else:
+ # on non-CPython implementations, the maximum
+ # actual recursion limit might be higher, but
+ # probably not higher than 99999
+ #
+ RECURSION_LIMIT = 99999
+
+ for cnt in xrange(RECURSION_LIMIT+5):
tuple_arg = (tuple_arg,)
fxn(arg, tuple_arg)
diff --git a/lib-python/2.7/test/test_itertools.py b/lib-python/2.7/test/test_itertools.py
--- a/lib-python/2.7/test/test_itertools.py
+++ b/lib-python/2.7/test/test_itertools.py
@@ -137,6 +137,8 @@
self.assertEqual(result, list(combinations2(values, r))) # matches second pure python version
self.assertEqual(result, list(combinations3(values, r))) # matches second pure python version
+ @test_support.impl_detail("tuple reuse is specific to CPython")
+ def test_combinations_tuple_reuse(self):
# Test implementation detail: tuple re-use
self.assertEqual(len(set(map(id, combinations('abcde', 3)))), 1)
self.assertNotEqual(len(set(map(id, list(combinations('abcde', 3))))), 1)
@@ -207,7 +209,10 @@
self.assertEqual(result, list(cwr1(values, r))) # matches first pure python version
self.assertEqual(result, list(cwr2(values, r))) # matches second pure python version
+ @test_support.impl_detail("tuple reuse is specific to CPython")
+ def test_combinations_with_replacement_tuple_reuse(self):
# Test implementation detail: tuple re-use
+ cwr = combinations_with_replacement
self.assertEqual(len(set(map(id, cwr('abcde', 3)))), 1)
self.assertNotEqual(len(set(map(id, list(cwr('abcde', 3))))), 1)
@@ -271,6 +276,8 @@
self.assertEqual(result, list(permutations(values, None))) # test r as None
self.assertEqual(result, list(permutations(values))) # test default r
+ @test_support.impl_detail("tuple reuse is specific to CPython")
+ def test_permutations_tuple_reuse(self):
# Test implementation detail: tuple re-use
self.assertEqual(len(set(map(id, permutations('abcde', 3)))), 1)
self.assertNotEqual(len(set(map(id, list(permutations('abcde', 3))))), 1)
@@ -526,6 +533,9 @@
self.assertEqual(list(izip()), zip())
self.assertRaises(TypeError, izip, 3)
self.assertRaises(TypeError, izip, range(3), 3)
+
+ @test_support.impl_detail("tuple reuse is specific to CPython")
+ def test_izip_tuple_reuse(self):
# Check tuple re-use (implementation detail)
self.assertEqual([tuple(list(pair)) for pair in izip('abc', 'def')],
zip('abc', 'def'))
@@ -575,6 +585,8 @@
else:
self.fail('Did not raise Type in: ' + stmt)
+ @test_support.impl_detail("tuple reuse is specific to CPython")
+ def test_iziplongest_tuple_reuse(self):
# Check tuple re-use (implementation detail)
self.assertEqual([tuple(list(pair)) for pair in izip_longest('abc', 'def')],
zip('abc', 'def'))
@@ -683,6 +695,8 @@
args = map(iter, args)
self.assertEqual(len(list(product(*args))), expected_len)
+ @test_support.impl_detail("tuple reuse is specific to CPython")
+ def test_product_tuple_reuse(self):
# Test implementation detail: tuple re-use
self.assertEqual(len(set(map(id, product('abc', 'def')))), 1)
self.assertNotEqual(len(set(map(id, list(product('abc', 'def'))))), 1)
@@ -771,11 +785,11 @@
self.assertRaises(ValueError, islice, xrange(10), 1, -5, -1)
self.assertRaises(ValueError, islice, xrange(10), 1, 10, -1)
self.assertRaises(ValueError, islice, xrange(10), 1, 10, 0)
- self.assertRaises(ValueError, islice, xrange(10), 'a')
- self.assertRaises(ValueError, islice, xrange(10), 'a', 1)
- self.assertRaises(ValueError, islice, xrange(10), 1, 'a')
- self.assertRaises(ValueError, islice, xrange(10), 'a', 1, 1)
- self.assertRaises(ValueError, islice, xrange(10), 1, 'a', 1)
+ self.assertRaises((ValueError, TypeError), islice, xrange(10), 'a')
+ self.assertRaises((ValueError, TypeError), islice, xrange(10), 'a', 1)
+ self.assertRaises((ValueError, TypeError), islice, xrange(10), 1, 'a')
+ self.assertRaises((ValueError, TypeError), islice, xrange(10), 'a', 1, 1)
+ self.assertRaises((ValueError, TypeError), islice, xrange(10), 1, 'a', 1)
self.assertEqual(len(list(islice(count(), 1, 10, maxsize))), 1)
# Issue #10323: Less islice in a predictable state
@@ -855,9 +869,17 @@
self.assertRaises(TypeError, tee, [1,2], 3, 'x')
# tee object should be instantiable
- a, b = tee('abc')
- c = type(a)('def')
- self.assertEqual(list(c), list('def'))
+ if test_support.check_impl_detail():
+ # XXX I (arigo) would argue that 'type(a)(iterable)' has
+ # ill-defined semantics: it always return a fresh tee object,
+ # but depending on whether 'iterable' is itself a tee object
+ # or not, it is ok or not to continue using 'iterable' after
+ # the call. I cannot imagine why 'type(a)(non_tee_object)'
+ # would be useful, as 'iter(non_tee_obect)' is equivalent
+ # as far as I can see.
+ a, b = tee('abc')
+ c = type(a)('def')
+ self.assertEqual(list(c), list('def'))
# test long-lagged and multi-way split
a, b, c = tee(xrange(2000), 3)
@@ -895,6 +917,7 @@
p = proxy(a)
self.assertEqual(getattr(p, '__class__'), type(b))
del a
+ test_support.gc_collect()
self.assertRaises(ReferenceError, getattr, p, '__class__')
def test_StopIteration(self):
@@ -1317,6 +1340,7 @@
class LengthTransparency(unittest.TestCase):
+ @test_support.impl_detail("__length_hint__() API is undocumented")
def test_repeat(self):
from test.test_iterlen import len
self.assertEqual(len(repeat(None, 50)), 50)
diff --git a/lib-python/2.7/test/test_linecache.py b/lib-python/2.7/test/test_linecache.py
--- a/lib-python/2.7/test/test_linecache.py
+++ b/lib-python/2.7/test/test_linecache.py
@@ -54,13 +54,13 @@
# Check whether lines correspond to those from file iteration
for entry in TESTS:
- filename = os.path.join(TEST_PATH, entry) + '.py'
+ filename = support.findfile( entry + '.py')
for index, line in enumerate(open(filename)):
self.assertEqual(line, getline(filename, index + 1))
# Check module loading
for entry in MODULES:
- filename = os.path.join(MODULE_PATH, entry) + '.py'
+ filename = support.findfile( entry + '.py')
for index, line in enumerate(open(filename)):
self.assertEqual(line, getline(filename, index + 1))
@@ -78,7 +78,7 @@
def test_clearcache(self):
cached = []
for entry in TESTS:
- filename = os.path.join(TEST_PATH, entry) + '.py'
+ filename = support.findfile( entry + '.py')
cached.append(filename)
linecache.getline(filename, 1)
diff --git a/lib-python/2.7/test/test_list.py b/lib-python/2.7/test/test_list.py
--- a/lib-python/2.7/test/test_list.py
+++ b/lib-python/2.7/test/test_list.py
@@ -15,6 +15,10 @@
self.assertEqual(list(''), [])
self.assertEqual(list('spam'), ['s', 'p', 'a', 'm'])
+ # the following test also works with pypy, but eats all your address
+ # space's RAM before raising and takes too long.
+ @test_support.impl_detail("eats all your RAM before working", pypy=False)
+ def test_segfault_1(self):
if sys.maxsize == 0x7fffffff:
# This test can currently only work on 32-bit machines.
# XXX If/when PySequence_Length() returns a ssize_t, it should be
@@ -32,6 +36,7 @@
# http://sources.redhat.com/ml/newlib/2002/msg00369.html
self.assertRaises(MemoryError, list, xrange(sys.maxint // 2))
+ def test_segfault_2(self):
# This code used to segfault in Py2.4a3
x = []
x.extend(-y for y in x)
diff --git a/lib-python/2.7/test/test_long.py b/lib-python/2.7/test/test_long.py
--- a/lib-python/2.7/test/test_long.py
+++ b/lib-python/2.7/test/test_long.py
@@ -530,9 +530,10 @@
try:
long(TruncReturnsNonIntegral())
except TypeError as e:
- self.assertEqual(str(e),
- "__trunc__ returned non-Integral"
- " (type NonIntegral)")
+ if test_support.check_impl_detail(cpython=True):
+ self.assertEqual(str(e),
+ "__trunc__ returned non-Integral"
+ " (type NonIntegral)")
else:
self.fail("Failed to raise TypeError with %s" %
((base, trunc_result_base),))
diff --git a/lib-python/2.7/test/test_marshal.py b/lib-python/2.7/test/test_marshal.py
--- a/lib-python/2.7/test/test_marshal.py
+++ b/lib-python/2.7/test/test_marshal.py
@@ -7,20 +7,31 @@
import unittest
import os
-class IntTestCase(unittest.TestCase):
+class HelperMixin:
+ def helper(self, sample, *extra, **kwargs):
+ expected = kwargs.get('expected', sample)
+ new = marshal.loads(marshal.dumps(sample, *extra))
+ self.assertEqual(expected, new)
+ self.assertEqual(type(expected), type(new))
+ try:
+ with open(test_support.TESTFN, "wb") as f:
+ marshal.dump(sample, f, *extra)
+ with open(test_support.TESTFN, "rb") as f:
+ new = marshal.load(f)
+ self.assertEqual(expected, new)
+ self.assertEqual(type(expected), type(new))
+ finally:
+ test_support.unlink(test_support.TESTFN)
+
+
+class IntTestCase(unittest.TestCase, HelperMixin):
def test_ints(self):
# Test the full range of Python ints.
n = sys.maxint
while n:
for expected in (-n, n):
- s = marshal.dumps(expected)
- got = marshal.loads(s)
- self.assertEqual(expected, got)
- marshal.dump(expected, file(test_support.TESTFN, "wb"))
- got = marshal.load(file(test_support.TESTFN, "rb"))
- self.assertEqual(expected, got)
+ self.helper(expected)
n = n >> 1
- os.unlink(test_support.TESTFN)
def test_int64(self):
# Simulate int marshaling on a 64-bit box. This is most interesting if
@@ -48,28 +59,16 @@
def test_bool(self):
for b in (True, False):
- new = marshal.loads(marshal.dumps(b))
- self.assertEqual(b, new)
- self.assertEqual(type(b), type(new))
- marshal.dump(b, file(test_support.TESTFN, "wb"))
- new = marshal.load(file(test_support.TESTFN, "rb"))
- self.assertEqual(b, new)
- self.assertEqual(type(b), type(new))
+ self.helper(b)
-class FloatTestCase(unittest.TestCase):
+class FloatTestCase(unittest.TestCase, HelperMixin):
def test_floats(self):
# Test a few floats
small = 1e-25
n = sys.maxint * 3.7e250
while n > small:
for expected in (-n, n):
- f = float(expected)
- s = marshal.dumps(f)
- got = marshal.loads(s)
- self.assertEqual(f, got)
- marshal.dump(f, file(test_support.TESTFN, "wb"))
- got = marshal.load(file(test_support.TESTFN, "rb"))
- self.assertEqual(f, got)
+ self.helper(expected)
n /= 123.4567
f = 0.0
@@ -85,59 +84,25 @@
while n < small:
for expected in (-n, n):
f = float(expected)
+ self.helper(f)
+ self.helper(f, 1)
+ n *= 123.4567
- s = marshal.dumps(f)
- got = marshal.loads(s)
- self.assertEqual(f, got)
-
- s = marshal.dumps(f, 1)
- got = marshal.loads(s)
- self.assertEqual(f, got)
-
- marshal.dump(f, file(test_support.TESTFN, "wb"))
- got = marshal.load(file(test_support.TESTFN, "rb"))
- self.assertEqual(f, got)
-
- marshal.dump(f, file(test_support.TESTFN, "wb"), 1)
- got = marshal.load(file(test_support.TESTFN, "rb"))
- self.assertEqual(f, got)
- n *= 123.4567
- os.unlink(test_support.TESTFN)
-
-class StringTestCase(unittest.TestCase):
+class StringTestCase(unittest.TestCase, HelperMixin):
def test_unicode(self):
for s in [u"", u"Andr� Previn", u"abc", u" "*10000]:
- new = marshal.loads(marshal.dumps(s))
- self.assertEqual(s, new)
- self.assertEqual(type(s), type(new))
- marshal.dump(s, file(test_support.TESTFN, "wb"))
- new = marshal.load(file(test_support.TESTFN, "rb"))
- self.assertEqual(s, new)
- self.assertEqual(type(s), type(new))
- os.unlink(test_support.TESTFN)
+ self.helper(s)
def test_string(self):
for s in ["", "Andr� Previn", "abc", " "*10000]:
- new = marshal.loads(marshal.dumps(s))
- self.assertEqual(s, new)
- self.assertEqual(type(s), type(new))
- marshal.dump(s, file(test_support.TESTFN, "wb"))
- new = marshal.load(file(test_support.TESTFN, "rb"))
- self.assertEqual(s, new)
- self.assertEqual(type(s), type(new))
- os.unlink(test_support.TESTFN)
+ self.helper(s)
def test_buffer(self):
for s in ["", "Andr� Previn", "abc", " "*10000]:
with test_support.check_py3k_warnings(("buffer.. not supported",
DeprecationWarning)):
b = buffer(s)
- new = marshal.loads(marshal.dumps(b))
- self.assertEqual(s, new)
- marshal.dump(b, file(test_support.TESTFN, "wb"))
- new = marshal.load(file(test_support.TESTFN, "rb"))
- self.assertEqual(s, new)
- os.unlink(test_support.TESTFN)
+ self.helper(b, expected=s)
class ExceptionTestCase(unittest.TestCase):
def test_exceptions(self):
@@ -150,7 +115,7 @@
new = marshal.loads(marshal.dumps(co))
self.assertEqual(co, new)
-class ContainerTestCase(unittest.TestCase):
+class ContainerTestCase(unittest.TestCase, HelperMixin):
d = {'astring': 'foo at bar.baz.spam',
'afloat': 7283.43,
'anint': 2**20,
@@ -161,42 +126,20 @@
'aunicode': u"Andr� Previn"
}
def test_dict(self):
- new = marshal.loads(marshal.dumps(self.d))
- self.assertEqual(self.d, new)
- marshal.dump(self.d, file(test_support.TESTFN, "wb"))
- new = marshal.load(file(test_support.TESTFN, "rb"))
- self.assertEqual(self.d, new)
- os.unlink(test_support.TESTFN)
+ self.helper(self.d)
def test_list(self):
lst = self.d.items()
- new = marshal.loads(marshal.dumps(lst))
- self.assertEqual(lst, new)
- marshal.dump(lst, file(test_support.TESTFN, "wb"))
- new = marshal.load(file(test_support.TESTFN, "rb"))
- self.assertEqual(lst, new)
- os.unlink(test_support.TESTFN)
+ self.helper(lst)
def test_tuple(self):
t = tuple(self.d.keys())
- new = marshal.loads(marshal.dumps(t))
- self.assertEqual(t, new)
- marshal.dump(t, file(test_support.TESTFN, "wb"))
- new = marshal.load(file(test_support.TESTFN, "rb"))
- self.assertEqual(t, new)
- os.unlink(test_support.TESTFN)
+ self.helper(t)
def test_sets(self):
for constructor in (set, frozenset):
t = constructor(self.d.keys())
- new = marshal.loads(marshal.dumps(t))
- self.assertEqual(t, new)
- self.assertTrue(isinstance(new, constructor))
- self.assertNotEqual(id(t), id(new))
- marshal.dump(t, file(test_support.TESTFN, "wb"))
- new = marshal.load(file(test_support.TESTFN, "rb"))
- self.assertEqual(t, new)
- os.unlink(test_support.TESTFN)
+ self.helper(t)
class BugsTestCase(unittest.TestCase):
def test_bug_5888452(self):
@@ -226,6 +169,7 @@
s = 'c' + ('X' * 4*4) + '{' * 2**20
self.assertRaises(ValueError, marshal.loads, s)
+ @test_support.impl_detail('specific recursion check')
def test_recursion_limit(self):
# Create a deeply nested structure.
head = last = []
diff --git a/lib-python/2.7/test/test_memoryio.py b/lib-python/2.7/test/test_memoryio.py
--- a/lib-python/2.7/test/test_memoryio.py
+++ b/lib-python/2.7/test/test_memoryio.py
@@ -617,7 +617,7 @@
state = memio.__getstate__()
self.assertEqual(len(state), 3)
bytearray(state[0]) # Check if state[0] supports the buffer interface.
- self.assertIsInstance(state[1], int)
+ self.assertIsInstance(state[1], (int, long))
self.assertTrue(isinstance(state[2], dict) or state[2] is None)
memio.close()
self.assertRaises(ValueError, memio.__getstate__)
diff --git a/lib-python/2.7/test/test_memoryview.py b/lib-python/2.7/test/test_memoryview.py
--- a/lib-python/2.7/test/test_memoryview.py
+++ b/lib-python/2.7/test/test_memoryview.py
@@ -26,7 +26,8 @@
def check_getitem_with_type(self, tp):
item = self.getitem_type
b = tp(self._source)
- oldrefcount = sys.getrefcount(b)
+ if hasattr(sys, 'getrefcount'):
+ oldrefcount = sys.getrefcount(b)
m = self._view(b)
self.assertEqual(m[0], item(b"a"))
self.assertIsInstance(m[0], bytes)
@@ -43,7 +44,8 @@
self.assertRaises(TypeError, lambda: m[0.0])
self.assertRaises(TypeError, lambda: m["a"])
m = None
- self.assertEqual(sys.getrefcount(b), oldrefcount)
+ if hasattr(sys, 'getrefcount'):
+ self.assertEqual(sys.getrefcount(b), oldrefcount)
def test_getitem(self):
for tp in self._types:
@@ -65,7 +67,8 @@
if not self.ro_type:
return
b = self.ro_type(self._source)
- oldrefcount = sys.getrefcount(b)
+ if hasattr(sys, 'getrefcount'):
+ oldrefcount = sys.getrefcount(b)
m = self._view(b)
def setitem(value):
m[0] = value
@@ -73,14 +76,16 @@
self.assertRaises(TypeError, setitem, 65)
self.assertRaises(TypeError, setitem, memoryview(b"a"))
m = None
- self.assertEqual(sys.getrefcount(b), oldrefcount)
+ if hasattr(sys, 'getrefcount'):
+ self.assertEqual(sys.getrefcount(b), oldrefcount)
def test_setitem_writable(self):
if not self.rw_type:
return
tp = self.rw_type
b = self.rw_type(self._source)
- oldrefcount = sys.getrefcount(b)
+ if hasattr(sys, 'getrefcount'):
+ oldrefcount = sys.getrefcount(b)
m = self._view(b)
m[0] = tp(b"0")
self._check_contents(tp, b, b"0bcdef")
@@ -110,13 +115,14 @@
self.assertRaises(TypeError, setitem, (0,), b"a")
self.assertRaises(TypeError, setitem, "a", b"a")
# Trying to resize the memory object
- self.assertRaises(ValueError, setitem, 0, b"")
- self.assertRaises(ValueError, setitem, 0, b"ab")
+ self.assertRaises((ValueError, TypeError), setitem, 0, b"")
+ self.assertRaises((ValueError, TypeError), setitem, 0, b"ab")
self.assertRaises(ValueError, setitem, slice(1,1), b"a")
self.assertRaises(ValueError, setitem, slice(0,2), b"a")
m = None
- self.assertEqual(sys.getrefcount(b), oldrefcount)
+ if hasattr(sys, 'getrefcount'):
+ self.assertEqual(sys.getrefcount(b), oldrefcount)
def test_delitem(self):
for tp in self._types:
@@ -292,6 +298,7 @@
def _check_contents(self, tp, obj, contents):
self.assertEqual(obj[1:7], tp(contents))
+ @unittest.skipUnless(hasattr(sys, 'getrefcount'), "Reference counting")
def test_refs(self):
for tp in self._types:
m = memoryview(tp(self._source))
diff --git a/lib-python/2.7/test/test_mmap.py b/lib-python/2.7/test/test_mmap.py
--- a/lib-python/2.7/test/test_mmap.py
+++ b/lib-python/2.7/test/test_mmap.py
@@ -119,7 +119,8 @@
def test_access_parameter(self):
# Test for "access" keyword parameter
mapsize = 10
- open(TESTFN, "wb").write("a"*mapsize)
+ with open(TESTFN, "wb") as f:
+ f.write("a"*mapsize)
f = open(TESTFN, "rb")
m = mmap.mmap(f.fileno(), mapsize, access=mmap.ACCESS_READ)
self.assertEqual(m[:], 'a'*mapsize, "Readonly memory map data incorrect.")
@@ -168,9 +169,11 @@
else:
self.fail("Able to resize readonly memory map")
f.close()
+ m.close()
del m, f
- self.assertEqual(open(TESTFN, "rb").read(), 'a'*mapsize,
- "Readonly memory map data file was modified")
+ with open(TESTFN, "rb") as f:
+ self.assertEqual(f.read(), 'a'*mapsize,
+ "Readonly memory map data file was modified")
# Opening mmap with size too big
import sys
@@ -220,11 +223,13 @@
self.assertEqual(m[:], 'd' * mapsize,
"Copy-on-write memory map data not written correctly.")
m.flush()
- self.assertEqual(open(TESTFN, "rb").read(), 'c'*mapsize,
- "Copy-on-write test data file should not be modified.")
+ f.close()
+ with open(TESTFN, "rb") as f:
+ self.assertEqual(f.read(), 'c'*mapsize,
+ "Copy-on-write test data file should not be modified.")
# Ensuring copy-on-write maps cannot be resized
self.assertRaises(TypeError, m.resize, 2*mapsize)
- f.close()
+ m.close()
del m, f
# Ensuring invalid access parameter raises exception
@@ -287,6 +292,7 @@
self.assertEqual(m.find('one', 1), 8)
self.assertEqual(m.find('one', 1, -1), 8)
self.assertEqual(m.find('one', 1, -2), -1)
+ m.close()
def test_rfind(self):
@@ -305,6 +311,7 @@
self.assertEqual(m.rfind('one', 0, -2), 0)
self.assertEqual(m.rfind('one', 1, -1), 8)
self.assertEqual(m.rfind('one', 1, -2), -1)
+ m.close()
def test_double_close(self):
@@ -533,7 +540,8 @@
if not hasattr(mmap, 'PROT_READ'):
return
mapsize = 10
- open(TESTFN, "wb").write("a"*mapsize)
+ with open(TESTFN, "wb") as f:
+ f.write("a"*mapsize)
f = open(TESTFN, "rb")
m = mmap.mmap(f.fileno(), mapsize, prot=mmap.PROT_READ)
self.assertRaises(TypeError, m.write, "foo")
@@ -545,7 +553,8 @@
def test_io_methods(self):
data = "0123456789"
- open(TESTFN, "wb").write("x"*len(data))
+ with open(TESTFN, "wb") as f:
+ f.write("x"*len(data))
f = open(TESTFN, "r+b")
m = mmap.mmap(f.fileno(), len(data))
f.close()
@@ -574,6 +583,7 @@
self.assertEqual(m[:], "012bar6789")
m.seek(8)
self.assertRaises(ValueError, m.write, "bar")
+ m.close()
if os.name == 'nt':
def test_tagname(self):
@@ -611,7 +621,8 @@
m.close()
# Should not crash (Issue 5385)
- open(TESTFN, "wb").write("x"*10)
+ with open(TESTFN, "wb") as f:
+ f.write("x"*10)
f = open(TESTFN, "r+b")
m = mmap.mmap(f.fileno(), 0)
f.close()
diff --git a/lib-python/2.7/test/test_module.py b/lib-python/2.7/test/test_module.py
--- a/lib-python/2.7/test/test_module.py
+++ b/lib-python/2.7/test/test_module.py
@@ -1,6 +1,6 @@
# Test the module type
import unittest
-from test.test_support import run_unittest, gc_collect
+from test.test_support import run_unittest, gc_collect, check_impl_detail
import sys
ModuleType = type(sys)
@@ -10,8 +10,10 @@
# An uninitialized module has no __dict__ or __name__,
# and __doc__ is None
foo = ModuleType.__new__(ModuleType)
- self.assertTrue(foo.__dict__ is None)
- self.assertRaises(SystemError, dir, foo)
+ self.assertFalse(foo.__dict__)
+ if check_impl_detail():
+ self.assertTrue(foo.__dict__ is None)
+ self.assertRaises(SystemError, dir, foo)
try:
s = foo.__name__
self.fail("__name__ = %s" % repr(s))
diff --git a/lib-python/2.7/test/test_multibytecodec.py b/lib-python/2.7/test/test_multibytecodec.py
--- a/lib-python/2.7/test/test_multibytecodec.py
+++ b/lib-python/2.7/test/test_multibytecodec.py
@@ -42,7 +42,7 @@
dec = codecs.getdecoder('euc-kr')
myreplace = lambda exc: (u'', sys.maxint+1)
codecs.register_error('test.cjktest', myreplace)
- self.assertRaises(IndexError, dec,
+ self.assertRaises((IndexError, OverflowError), dec,
'apple\x92ham\x93spam', 'test.cjktest')
def test_codingspec(self):
@@ -148,7 +148,8 @@
class Test_StreamReader(unittest.TestCase):
def test_bug1728403(self):
try:
- open(TESTFN, 'w').write('\xa1')
+ with open(TESTFN, 'w') as f:
+ f.write('\xa1')
f = codecs.open(TESTFN, encoding='cp949')
self.assertRaises(UnicodeDecodeError, f.read, 2)
finally:
diff --git a/lib-python/2.7/test/test_multibytecodec_support.py b/lib-python/2.7/test/test_multibytecodec_support.py
--- a/lib-python/2.7/test/test_multibytecodec_support.py
+++ b/lib-python/2.7/test/test_multibytecodec_support.py
@@ -110,8 +110,8 @@
def myreplace(exc):
return (u'x', sys.maxint + 1)
codecs.register_error("test.cjktest", myreplace)
- self.assertRaises(IndexError, self.encode, self.unmappedunicode,
- 'test.cjktest')
+ self.assertRaises((IndexError, OverflowError), self.encode,
+ self.unmappedunicode, 'test.cjktest')
def test_callback_None_index(self):
def myreplace(exc):
@@ -330,7 +330,7 @@
repr(csetch), repr(unich), exc.reason))
def load_teststring(name):
- dir = os.path.join(os.path.dirname(__file__), 'cjkencodings')
+ dir = test_support.findfile('cjkencodings')
with open(os.path.join(dir, name + '.txt'), 'rb') as f:
encoded = f.read()
with open(os.path.join(dir, name + '-utf8.txt'), 'rb') as f:
diff --git a/lib-python/2.7/test/test_multiprocessing.py b/lib-python/2.7/test/test_multiprocessing.py
--- a/lib-python/2.7/test/test_multiprocessing.py
+++ b/lib-python/2.7/test/test_multiprocessing.py
@@ -1316,6 +1316,7 @@
queue = manager.get_queue()
self.assertEqual(queue.get(), 'hello world')
del queue
+ test_support.gc_collect()
manager.shutdown()
manager = QueueManager(
address=addr, authkey=authkey, serializer=SERIALIZER)
@@ -1605,6 +1606,10 @@
if len(blocks) > maxblocks:
i = random.randrange(maxblocks)
del blocks[i]
+ # XXX There should be a better way to release resources for a
+ # single block
+ if i % maxblocks == 0:
+ import gc; gc.collect()
# get the heap object
heap = multiprocessing.heap.BufferWrapper._heap
@@ -1704,6 +1709,7 @@
a = Foo()
util.Finalize(a, conn.send, args=('a',))
del a # triggers callback for a
+ test_support.gc_collect()
b = Foo()
close_b = util.Finalize(b, conn.send, args=('b',))
diff --git a/lib-python/2.7/test/test_mutants.py b/lib-python/2.7/test/test_mutants.py
--- a/lib-python/2.7/test/test_mutants.py
+++ b/lib-python/2.7/test/test_mutants.py
@@ -1,4 +1,4 @@
-from test.test_support import verbose, TESTFN
+from test.test_support import verbose, TESTFN, check_impl_detail
import random
import os
@@ -137,10 +137,16 @@
while dict1 and len(dict1) == len(dict2):
if verbose:
print ".",
- if random.random() < 0.5:
- c = cmp(dict1, dict2)
- else:
- c = dict1 == dict2
+ try:
+ if random.random() < 0.5:
+ c = cmp(dict1, dict2)
+ else:
+ c = dict1 == dict2
+ except RuntimeError:
+ # CPython never raises RuntimeError here, but other implementations
+ # might, and it's fine.
+ if check_impl_detail(cpython=True):
+ raise
if verbose:
print
diff --git a/lib-python/2.7/test/test_optparse.py b/lib-python/2.7/test/test_optparse.py
--- a/lib-python/2.7/test/test_optparse.py
+++ b/lib-python/2.7/test/test_optparse.py
@@ -383,6 +383,7 @@
self.assertRaises(self.parser.remove_option, ('foo',), None,
ValueError, "no such option 'foo'")
+ @test_support.impl_detail("sys.getrefcount")
def test_refleak(self):
# If an OptionParser is carrying around a reference to a large
# object, various cycles can prevent it from being GC'd in
diff --git a/lib-python/2.7/test/test_peepholer.py b/lib-python/2.7/test/test_peepholer.py
--- a/lib-python/2.7/test/test_peepholer.py
+++ b/lib-python/2.7/test/test_peepholer.py
@@ -41,7 +41,7 @@
def test_none_as_constant(self):
# LOAD_GLOBAL None --> LOAD_CONST None
def f(x):
- None
+ y = None
return x
asm = disassemble(f)
for elem in ('LOAD_GLOBAL',):
@@ -67,10 +67,13 @@
self.assertIn(elem, asm)
def test_pack_unpack(self):
+ # On PyPy, "a, b = ..." is even more optimized, by removing
+ # the ROT_TWO. But the ROT_TWO is not removed if assigning
+ # to more complex expressions, so check that.
for line, elem in (
('a, = a,', 'LOAD_CONST',),
- ('a, b = a, b', 'ROT_TWO',),
- ('a, b, c = a, b, c', 'ROT_THREE',),
+ ('a[1], b = a, b', 'ROT_TWO',),
+ ('a, b[2], c = a, b, c', 'ROT_THREE',),
):
asm = dis_single(line)
self.assertIn(elem, asm)
@@ -78,6 +81,8 @@
self.assertNotIn('UNPACK_TUPLE', asm)
def test_folding_of_tuples_of_constants(self):
+ # On CPython, "a,b,c=1,2,3" turns into "a,b,c="
+ # but on PyPy, it turns into "a=1;b=2;c=3".
for line, elem in (
('a = 1,2,3', '((1, 2, 3))'),
('("a","b","c")', "(('a', 'b', 'c'))"),
@@ -86,7 +91,8 @@
('((1, 2), 3, 4)', '(((1, 2), 3, 4))'),
):
asm = dis_single(line)
- self.assertIn(elem, asm)
+ self.assert_(elem in asm or (
+ line == 'a,b,c = 1,2,3' and 'UNPACK_TUPLE' not in asm))
self.assertNotIn('BUILD_TUPLE', asm)
# Bug 1053819: Tuple of constants misidentified when presented with:
diff --git a/lib-python/2.7/test/test_pprint.py b/lib-python/2.7/test/test_pprint.py
--- a/lib-python/2.7/test/test_pprint.py
+++ b/lib-python/2.7/test/test_pprint.py
@@ -233,7 +233,16 @@
frozenset([0, 2]),
frozenset([0, 1])])}"""
cube = test.test_set.cube(3)
- self.assertEqual(pprint.pformat(cube), cube_repr_tgt)
+ # XXX issues of dictionary order, and for the case below,
+ # order of items in the frozenset([...]) representation.
+ # Whether we get precisely cube_repr_tgt or not is open
+ # to implementation-dependent choices (this test probably
+ # fails horribly in CPython if we tweak the dict order too).
+ got = pprint.pformat(cube)
+ if test.test_support.check_impl_detail(cpython=True):
+ self.assertEqual(got, cube_repr_tgt)
+ else:
+ self.assertEqual(eval(got), cube)
cubo_repr_tgt = """\
{frozenset([frozenset([0, 2]), frozenset([0])]): frozenset([frozenset([frozenset([0,
2]),
@@ -393,7 +402,11 @@
2])])])}"""
cubo = test.test_set.linegraph(cube)
- self.assertEqual(pprint.pformat(cubo), cubo_repr_tgt)
+ got = pprint.pformat(cubo)
+ if test.test_support.check_impl_detail(cpython=True):
+ self.assertEqual(got, cubo_repr_tgt)
+ else:
+ self.assertEqual(eval(got), cubo)
def test_depth(self):
nested_tuple = (1, (2, (3, (4, (5, 6)))))
diff --git a/lib-python/2.7/test/test_pydoc.py b/lib-python/2.7/test/test_pydoc.py
--- a/lib-python/2.7/test/test_pydoc.py
+++ b/lib-python/2.7/test/test_pydoc.py
@@ -267,8 +267,8 @@
testpairs = (
('i_am_not_here', 'i_am_not_here'),
('test.i_am_not_here_either', 'i_am_not_here_either'),
- ('test.i_am_not_here.neither_am_i', 'i_am_not_here.neither_am_i'),
- ('i_am_not_here.{}'.format(modname), 'i_am_not_here.{}'.format(modname)),
+ ('test.i_am_not_here.neither_am_i', 'i_am_not_here'),
+ ('i_am_not_here.{}'.format(modname), 'i_am_not_here'),
('test.{}'.format(modname), modname),
)
@@ -292,8 +292,8 @@
result = run_pydoc(modname)
finally:
forget(modname)
- expected = badimport_pattern % (modname, expectedinmsg)
- self.assertEqual(expected, result)
+ expected = badimport_pattern % (modname, '(.+\\.)?' + expectedinmsg + '(\\..+)?$')
+ self.assertTrue(re.match(expected, result))
def test_input_strip(self):
missing_module = " test.i_am_not_here "
diff --git a/lib-python/2.7/test/test_pyexpat.py b/lib-python/2.7/test/test_pyexpat.py
--- a/lib-python/2.7/test/test_pyexpat.py
+++ b/lib-python/2.7/test/test_pyexpat.py
@@ -570,6 +570,9 @@
self.assertEqual(self.n, 4)
class MalformedInputText(unittest.TestCase):
+ # CPython seems to ship its own version of expat, they fixed it on this commit :
+ # http://svn.python.org/view?revision=74429&view=revision
+ @unittest.skipIf(sys.platform == "darwin", "Expat is broken on Mac OS X 10.6.6")
def test1(self):
xml = "\0\r\n"
parser = expat.ParserCreate()
@@ -579,6 +582,7 @@
except expat.ExpatError as e:
self.assertEqual(str(e), 'unclosed token: line 2, column 0')
+ @unittest.skipIf(sys.platform == "darwin", "Expat is broken on Mac OS X 10.6.6")
def test2(self):
xml = "\r\n"
parser = expat.ParserCreate()
diff --git a/lib-python/2.7/test/test_repr.py b/lib-python/2.7/test/test_repr.py
--- a/lib-python/2.7/test/test_repr.py
+++ b/lib-python/2.7/test/test_repr.py
@@ -9,6 +9,7 @@
import unittest
from test.test_support import run_unittest, check_py3k_warnings
+from test.test_support import check_impl_detail
from repr import repr as r # Don't shadow builtin repr
from repr import Repr
@@ -145,8 +146,11 @@
# Functions
eq(repr(hash), '')
# Methods
- self.assertTrue(repr(''.split).startswith(
- '")
def test_xrange(self):
eq = self.assertEqual
@@ -185,7 +189,10 @@
def test_descriptors(self):
eq = self.assertEqual
# method descriptors
- eq(repr(dict.items), "")
+ if check_impl_detail(cpython=True):
+ eq(repr(dict.items), "")
+ elif check_impl_detail(pypy=True):
+ eq(repr(dict.items), "")
# XXX member descriptors
# XXX attribute descriptors
# XXX slot descriptors
@@ -247,8 +254,14 @@
eq = self.assertEqual
touch(os.path.join(self.subpkgname, self.pkgname + os.extsep + 'py'))
from areallylongpackageandmodulenametotestreprtruncation.areallylongpackageandmodulenametotestreprtruncation import areallylongpackageandmodulenametotestreprtruncation
- eq(repr(areallylongpackageandmodulenametotestreprtruncation),
- "" % (areallylongpackageandmodulenametotestreprtruncation.__name__, areallylongpackageandmodulenametotestreprtruncation.__file__))
+ # On PyPy, we use %r to format the file name; on CPython it is done
+ # with '%s'. It seems to me that %r is safer .
+ if '__pypy__' in sys.builtin_module_names:
+ eq(repr(areallylongpackageandmodulenametotestreprtruncation),
+ "" % (areallylongpackageandmodulenametotestreprtruncation.__name__, areallylongpackageandmodulenametotestreprtruncation.__file__))
+ else:
+ eq(repr(areallylongpackageandmodulenametotestreprtruncation),
+ "" % (areallylongpackageandmodulenametotestreprtruncation.__name__, areallylongpackageandmodulenametotestreprtruncation.__file__))
eq(repr(sys), "")
def test_type(self):
diff --git a/lib-python/2.7/test/test_runpy.py b/lib-python/2.7/test/test_runpy.py
--- a/lib-python/2.7/test/test_runpy.py
+++ b/lib-python/2.7/test/test_runpy.py
@@ -5,10 +5,15 @@
import sys
import re
import tempfile
-from test.test_support import verbose, run_unittest, forget
+from test.test_support import verbose, run_unittest, forget, check_impl_detail
from test.script_helper import (temp_dir, make_script, compile_script,
make_pkg, make_zip_script, make_zip_pkg)
+if check_impl_detail(pypy=True):
+ no_lone_pyc_file = True
+else:
+ no_lone_pyc_file = False
+
from runpy import _run_code, _run_module_code, run_module, run_path
# Note: This module can't safely test _run_module_as_main as it
@@ -168,13 +173,14 @@
self.assertIn("x", d1)
self.assertTrue(d1["x"] == 1)
del d1 # Ensure __loader__ entry doesn't keep file open
- __import__(mod_name)
- os.remove(mod_fname)
- if verbose: print "Running from compiled:", mod_name
- d2 = run_module(mod_name) # Read from bytecode
- self.assertIn("x", d2)
- self.assertTrue(d2["x"] == 1)
- del d2 # Ensure __loader__ entry doesn't keep file open
+ if not no_lone_pyc_file:
+ __import__(mod_name)
+ os.remove(mod_fname)
+ if verbose: print "Running from compiled:", mod_name
+ d2 = run_module(mod_name) # Read from bytecode
+ self.assertIn("x", d2)
+ self.assertTrue(d2["x"] == 1)
+ del d2 # Ensure __loader__ entry doesn't keep file open
finally:
self._del_pkg(pkg_dir, depth, mod_name)
if verbose: print "Module executed successfully"
@@ -190,13 +196,14 @@
self.assertIn("x", d1)
self.assertTrue(d1["x"] == 1)
del d1 # Ensure __loader__ entry doesn't keep file open
- __import__(mod_name)
- os.remove(mod_fname)
- if verbose: print "Running from compiled:", pkg_name
- d2 = run_module(pkg_name) # Read from bytecode
- self.assertIn("x", d2)
- self.assertTrue(d2["x"] == 1)
- del d2 # Ensure __loader__ entry doesn't keep file open
+ if not no_lone_pyc_file:
+ __import__(mod_name)
+ os.remove(mod_fname)
+ if verbose: print "Running from compiled:", pkg_name
+ d2 = run_module(pkg_name) # Read from bytecode
+ self.assertIn("x", d2)
+ self.assertTrue(d2["x"] == 1)
+ del d2 # Ensure __loader__ entry doesn't keep file open
finally:
self._del_pkg(pkg_dir, depth, pkg_name)
if verbose: print "Package executed successfully"
@@ -244,15 +251,17 @@
self.assertIn("sibling", d1)
self.assertIn("nephew", d1)
del d1 # Ensure __loader__ entry doesn't keep file open
- __import__(mod_name)
- os.remove(mod_fname)
- if verbose: print "Running from compiled:", mod_name
- d2 = run_module(mod_name, run_name=run_name) # Read from bytecode
- self.assertIn("__package__", d2)
- self.assertTrue(d2["__package__"] == pkg_name)
- self.assertIn("sibling", d2)
- self.assertIn("nephew", d2)
- del d2 # Ensure __loader__ entry doesn't keep file open
+ if not no_lone_pyc_file:
+ __import__(mod_name)
+ os.remove(mod_fname)
+ if verbose: print "Running from compiled:", mod_name
+ # Read from bytecode
+ d2 = run_module(mod_name, run_name=run_name)
+ self.assertIn("__package__", d2)
+ self.assertTrue(d2["__package__"] == pkg_name)
+ self.assertIn("sibling", d2)
+ self.assertIn("nephew", d2)
+ del d2 # Ensure __loader__ entry doesn't keep file open
finally:
self._del_pkg(pkg_dir, depth, mod_name)
if verbose: print "Module executed successfully"
@@ -345,6 +354,8 @@
script_dir, '')
def test_directory_compiled(self):
+ if no_lone_pyc_file:
+ return
with temp_dir() as script_dir:
mod_name = '__main__'
script_name = self._make_test_script(script_dir, mod_name)
diff --git a/lib-python/2.7/test/test_scope.py b/lib-python/2.7/test/test_scope.py
--- a/lib-python/2.7/test/test_scope.py
+++ b/lib-python/2.7/test/test_scope.py
@@ -1,6 +1,6 @@
import unittest
from test.test_support import check_syntax_error, check_py3k_warnings, \
- check_warnings, run_unittest
+ check_warnings, run_unittest, gc_collect
class ScopeTests(unittest.TestCase):
@@ -432,6 +432,7 @@
for i in range(100):
f1()
+ gc_collect()
self.assertEqual(Foo.count, 0)
diff --git a/lib-python/2.7/test/test_set.py b/lib-python/2.7/test/test_set.py
--- a/lib-python/2.7/test/test_set.py
+++ b/lib-python/2.7/test/test_set.py
@@ -309,6 +309,7 @@
fo.close()
test_support.unlink(test_support.TESTFN)
+ @test_support.impl_detail(pypy=False)
def test_do_not_rehash_dict_keys(self):
n = 10
d = dict.fromkeys(map(HashCountingInt, xrange(n)))
@@ -559,6 +560,7 @@
p = weakref.proxy(s)
self.assertEqual(str(p), str(s))
s = None
+ test_support.gc_collect()
self.assertRaises(ReferenceError, str, p)
# C API test only available in a debug build
@@ -590,6 +592,7 @@
s.__init__(self.otherword)
self.assertEqual(s, set(self.word))
+ @test_support.impl_detail()
def test_singleton_empty_frozenset(self):
f = frozenset()
efs = [frozenset(), frozenset([]), frozenset(()), frozenset(''),
@@ -770,9 +773,10 @@
for v in self.set:
self.assertIn(v, self.values)
setiter = iter(self.set)
- # note: __length_hint__ is an internal undocumented API,
- # don't rely on it in your own programs
- self.assertEqual(setiter.__length_hint__(), len(self.set))
+ if test_support.check_impl_detail():
+ # note: __length_hint__ is an internal undocumented API,
+ # don't rely on it in your own programs
+ self.assertEqual(setiter.__length_hint__(), len(self.set))
def test_pickling(self):
p = pickle.dumps(self.set)
@@ -1564,7 +1568,7 @@
for meth in (s.union, s.intersection, s.difference, s.symmetric_difference, s.isdisjoint):
for g in (G, I, Ig, L, R):
expected = meth(data)
- actual = meth(G(data))
+ actual = meth(g(data))
if isinstance(expected, bool):
self.assertEqual(actual, expected)
else:
diff --git a/lib-python/2.7/test/test_sets.py b/lib-python/2.7/test/test_sets.py
--- a/lib-python/2.7/test/test_sets.py
+++ b/lib-python/2.7/test/test_sets.py
@@ -686,7 +686,9 @@
set_list = sorted(self.set)
self.assertEqual(len(dup_list), len(set_list))
for i, el in enumerate(dup_list):
- self.assertIs(el, set_list[i])
+ # Object identity is not guarnteed for immutable objects, so we
+ # can't use assertIs here.
+ self.assertEqual(el, set_list[i])
def test_deep_copy(self):
dup = copy.deepcopy(self.set)
diff --git a/lib-python/2.7/test/test_site.py b/lib-python/2.7/test/test_site.py
--- a/lib-python/2.7/test/test_site.py
+++ b/lib-python/2.7/test/test_site.py
@@ -226,6 +226,10 @@
self.assertEqual(len(dirs), 1)
wanted = os.path.join('xoxo', 'Lib', 'site-packages')
self.assertEqual(dirs[0], wanted)
+ elif '__pypy__' in sys.builtin_module_names:
+ self.assertEquals(len(dirs), 1)
+ wanted = os.path.join('xoxo', 'site-packages')
+ self.assertEquals(dirs[0], wanted)
elif os.sep == '/':
self.assertEqual(len(dirs), 2)
wanted = os.path.join('xoxo', 'lib', 'python' + sys.version[:3],
diff --git a/lib-python/2.7/test/test_socket.py b/lib-python/2.7/test/test_socket.py
--- a/lib-python/2.7/test/test_socket.py
+++ b/lib-python/2.7/test/test_socket.py
@@ -252,6 +252,7 @@
self.assertEqual(p.fileno(), s.fileno())
s.close()
s = None
+ test_support.gc_collect()
try:
p.fileno()
except ReferenceError:
@@ -285,32 +286,34 @@
s.sendto(u'\u2620', sockname)
with self.assertRaises(TypeError) as cm:
s.sendto(5j, sockname)
- self.assertIn('not complex', str(cm.exception))
+ self.assertIn('complex', str(cm.exception))
with self.assertRaises(TypeError) as cm:
s.sendto('foo', None)
- self.assertIn('not NoneType', str(cm.exception))
+ self.assertIn('NoneType', str(cm.exception))
# 3 args
with self.assertRaises(UnicodeEncodeError):
s.sendto(u'\u2620', 0, sockname)
with self.assertRaises(TypeError) as cm:
s.sendto(5j, 0, sockname)
- self.assertIn('not complex', str(cm.exception))
+ self.assertIn('complex', str(cm.exception))
with self.assertRaises(TypeError) as cm:
s.sendto('foo', 0, None)
- self.assertIn('not NoneType', str(cm.exception))
+ if test_support.check_impl_detail():
+ self.assertIn('not NoneType', str(cm.exception))
with self.assertRaises(TypeError) as cm:
s.sendto('foo', 'bar', sockname)
- self.assertIn('an integer is required', str(cm.exception))
+ self.assertIn('integer', str(cm.exception))
with self.assertRaises(TypeError) as cm:
s.sendto('foo', None, None)
- self.assertIn('an integer is required', str(cm.exception))
+ if test_support.check_impl_detail():
+ self.assertIn('an integer is required', str(cm.exception))
# wrong number of args
with self.assertRaises(TypeError) as cm:
s.sendto('foo')
- self.assertIn('(1 given)', str(cm.exception))
+ self.assertIn(' given)', str(cm.exception))
with self.assertRaises(TypeError) as cm:
s.sendto('foo', 0, sockname, 4)
- self.assertIn('(4 given)', str(cm.exception))
+ self.assertIn(' given)', str(cm.exception))
def testCrucialConstants(self):
@@ -385,10 +388,10 @@
socket.htonl(k)
socket.htons(k)
for k in bad_values:
- self.assertRaises(OverflowError, socket.ntohl, k)
- self.assertRaises(OverflowError, socket.ntohs, k)
- self.assertRaises(OverflowError, socket.htonl, k)
- self.assertRaises(OverflowError, socket.htons, k)
+ self.assertRaises((OverflowError, ValueError), socket.ntohl, k)
+ self.assertRaises((OverflowError, ValueError), socket.ntohs, k)
+ self.assertRaises((OverflowError, ValueError), socket.htonl, k)
+ self.assertRaises((OverflowError, ValueError), socket.htons, k)
def testGetServBy(self):
eq = self.assertEqual
@@ -428,8 +431,8 @@
if udpport is not None:
eq(socket.getservbyport(udpport, 'udp'), service)
# Make sure getservbyport does not accept out of range ports.
- self.assertRaises(OverflowError, socket.getservbyport, -1)
- self.assertRaises(OverflowError, socket.getservbyport, 65536)
+ self.assertRaises((OverflowError, ValueError), socket.getservbyport, -1)
+ self.assertRaises((OverflowError, ValueError), socket.getservbyport, 65536)
def testDefaultTimeout(self):
# Testing default timeout
@@ -608,8 +611,8 @@
neg_port = port - 65536
sock = socket.socket()
try:
- self.assertRaises(OverflowError, sock.bind, (host, big_port))
- self.assertRaises(OverflowError, sock.bind, (host, neg_port))
+ self.assertRaises((OverflowError, ValueError), sock.bind, (host, big_port))
+ self.assertRaises((OverflowError, ValueError), sock.bind, (host, neg_port))
sock.bind((host, port))
finally:
sock.close()
@@ -1309,6 +1312,7 @@
closed = False
def flush(self): pass
def close(self): self.closed = True
+ def _decref_socketios(self): pass
# must not close unless we request it: the original use of _fileobject
# by module socket requires that the underlying socket not be closed until
diff --git a/lib-python/2.7/test/test_sort.py b/lib-python/2.7/test/test_sort.py
--- a/lib-python/2.7/test/test_sort.py
+++ b/lib-python/2.7/test/test_sort.py
@@ -140,7 +140,10 @@
return random.random() < 0.5
L = [C() for i in range(50)]
- self.assertRaises(ValueError, L.sort)
+ try:
+ L.sort()
+ except ValueError:
+ pass
def test_cmpNone(self):
# Testing None as a comparison function.
@@ -150,8 +153,10 @@
L.sort(None)
self.assertEqual(L, range(50))
+ @test_support.impl_detail(pypy=False)
def test_undetected_mutation(self):
# Python 2.4a1 did not always detect mutation
+ # So does pypy...
memorywaster = []
for i in range(20):
def mutating_cmp(x, y):
@@ -226,7 +231,10 @@
def __del__(self):
del data[:]
data[:] = range(20)
- self.assertRaises(ValueError, data.sort, key=SortKiller)
+ try:
+ data.sort(key=SortKiller)
+ except ValueError:
+ pass
def test_key_with_mutating_del_and_exception(self):
data = range(10)
diff --git a/lib-python/2.7/test/test_ssl.py b/lib-python/2.7/test/test_ssl.py
--- a/lib-python/2.7/test/test_ssl.py
+++ b/lib-python/2.7/test/test_ssl.py
@@ -881,6 +881,8 @@
c = socket.socket()
c.connect((HOST, port))
listener_gone.wait()
+ # XXX why is it necessary?
+ test_support.gc_collect()
try:
ssl_sock = ssl.wrap_socket(c)
except IOError:
@@ -1330,10 +1332,8 @@
def test_main(verbose=False):
global CERTFILE, SVN_PYTHON_ORG_ROOT_CERT
- CERTFILE = os.path.join(os.path.dirname(__file__) or os.curdir,
- "keycert.pem")
- SVN_PYTHON_ORG_ROOT_CERT = os.path.join(
- os.path.dirname(__file__) or os.curdir,
+ CERTFILE = test_support.findfile("keycert.pem")
+ SVN_PYTHON_ORG_ROOT_CERT = test_support.findfile(
"https_svn_python_org_root.pem")
if (not os.path.exists(CERTFILE) or
diff --git a/lib-python/2.7/test/test_str.py b/lib-python/2.7/test/test_str.py
--- a/lib-python/2.7/test/test_str.py
+++ b/lib-python/2.7/test/test_str.py
@@ -422,10 +422,11 @@
for meth in ('foo'.startswith, 'foo'.endswith):
with self.assertRaises(TypeError) as cm:
meth(['f'])
- exc = str(cm.exception)
- self.assertIn('unicode', exc)
- self.assertIn('str', exc)
- self.assertIn('tuple', exc)
+ if test_support.check_impl_detail():
+ exc = str(cm.exception)
+ self.assertIn('unicode', exc)
+ self.assertIn('str', exc)
+ self.assertIn('tuple', exc)
def test_main():
test_support.run_unittest(StrTest)
diff --git a/lib-python/2.7/test/test_struct.py b/lib-python/2.7/test/test_struct.py
--- a/lib-python/2.7/test/test_struct.py
+++ b/lib-python/2.7/test/test_struct.py
@@ -535,7 +535,8 @@
@unittest.skipUnless(IS32BIT, "Specific to 32bit machines")
def test_crasher(self):
- self.assertRaises(MemoryError, struct.pack, "357913941c", "a")
+ self.assertRaises((MemoryError, struct.error), struct.pack,
+ "357913941c", "a")
def test_count_overflow(self):
hugecount = '{}b'.format(sys.maxsize+1)
diff --git a/lib-python/2.7/test/test_subprocess.py b/lib-python/2.7/test/test_subprocess.py
--- a/lib-python/2.7/test/test_subprocess.py
+++ b/lib-python/2.7/test/test_subprocess.py
@@ -16,11 +16,11 @@
# Depends on the following external programs: Python
#
-if mswindows:
- SETBINARY = ('import msvcrt; msvcrt.setmode(sys.stdout.fileno(), '
- 'os.O_BINARY);')
-else:
- SETBINARY = ''
+#if mswindows:
+# SETBINARY = ('import msvcrt; msvcrt.setmode(sys.stdout.fileno(), '
+# 'os.O_BINARY);')
+#else:
+# SETBINARY = ''
try:
@@ -420,8 +420,9 @@
self.assertStderrEqual(stderr, "")
def test_universal_newlines(self):
- p = subprocess.Popen([sys.executable, "-c",
- 'import sys,os;' + SETBINARY +
+ # NB. replaced SETBINARY with the -u flag
+ p = subprocess.Popen([sys.executable, "-u", "-c",
+ 'import sys,os;' + #SETBINARY +
'sys.stdout.write("line1\\n");'
'sys.stdout.flush();'
'sys.stdout.write("line2\\r");'
@@ -448,8 +449,9 @@
def test_universal_newlines_communicate(self):
# universal newlines through communicate()
- p = subprocess.Popen([sys.executable, "-c",
- 'import sys,os;' + SETBINARY +
+ # NB. replaced SETBINARY with the -u flag
+ p = subprocess.Popen([sys.executable, "-u", "-c",
+ 'import sys,os;' + #SETBINARY +
'sys.stdout.write("line1\\n");'
'sys.stdout.flush();'
'sys.stdout.write("line2\\r");'
diff --git a/lib-python/2.7/test/test_support.py b/lib-python/2.7/test/test_support.py
--- a/lib-python/2.7/test/test_support.py
+++ b/lib-python/2.7/test/test_support.py
@@ -431,16 +431,20 @@
rmtree(name)
-def findfile(file, here=__file__, subdir=None):
+def findfile(file, here=None, subdir=None):
"""Try to find a file on sys.path and the working directory. If it is not
found the argument passed to the function is returned (this does not
necessarily signal failure; could still be the legitimate path)."""
+ import test
if os.path.isabs(file):
return file
if subdir is not None:
file = os.path.join(subdir, file)
path = sys.path
- path = [os.path.dirname(here)] + path
+ if here is None:
+ path = test.__path__ + path
+ else:
+ path = [os.path.dirname(here)] + path
for dn in path:
fn = os.path.join(dn, file)
if os.path.exists(fn): return fn
@@ -1050,15 +1054,33 @@
guards, default = _parse_guards(guards)
return guards.get(platform.python_implementation().lower(), default)
+# ----------------------------------
+# PyPy extension: you can run::
+# python ..../test_foo.py --pdb
+# to get a pdb prompt in case of exceptions
+ResultClass = unittest.TextTestRunner.resultclass
+
+class TestResultWithPdb(ResultClass):
+
+ def addError(self, testcase, exc_info):
+ ResultClass.addError(self, testcase, exc_info)
+ if '--pdb' in sys.argv:
+ import pdb, traceback
+ traceback.print_tb(exc_info[2])
+ pdb.post_mortem(exc_info[2])
+
+# ----------------------------------
def _run_suite(suite):
"""Run tests from a unittest.TestSuite-derived class."""
if verbose:
- runner = unittest.TextTestRunner(sys.stdout, verbosity=2)
+ runner = unittest.TextTestRunner(sys.stdout, verbosity=2,
+ resultclass=TestResultWithPdb)
else:
runner = BasicTestRunner()
+
result = runner.run(suite)
if not result.wasSuccessful():
if len(result.errors) == 1 and not result.failures:
@@ -1071,6 +1093,34 @@
err += "; run in verbose mode for details"
raise TestFailed(err)
+# ----------------------------------
+# PyPy extension: you can run::
+# python ..../test_foo.py --filter bar
+# to run only the test cases whose name contains bar
+
+def filter_maybe(suite):
+ try:
+ i = sys.argv.index('--filter')
+ filter = sys.argv[i+1]
+ except (ValueError, IndexError):
+ return suite
+ tests = []
+ for test in linearize_suite(suite):
+ if filter in test._testMethodName:
+ tests.append(test)
+ return unittest.TestSuite(tests)
+
+def linearize_suite(suite_or_test):
+ try:
+ it = iter(suite_or_test)
+ except TypeError:
+ yield suite_or_test
+ return
+ for subsuite in it:
+ for item in linearize_suite(subsuite):
+ yield item
+
+# ----------------------------------
def run_unittest(*classes):
"""Run tests from unittest.TestCase-derived classes."""
@@ -1086,6 +1136,7 @@
suite.addTest(cls)
else:
suite.addTest(unittest.makeSuite(cls))
+ suite = filter_maybe(suite)
_run_suite(suite)
diff --git a/lib-python/2.7/test/test_syntax.py b/lib-python/2.7/test/test_syntax.py
--- a/lib-python/2.7/test/test_syntax.py
+++ b/lib-python/2.7/test/test_syntax.py
@@ -5,7 +5,8 @@
>>> def f(x):
... global x
Traceback (most recent call last):
-SyntaxError: name 'x' is local and global (, line 1)
+ File "", line 1
+SyntaxError: name 'x' is local and global
The tests are all raise SyntaxErrors. They were created by checking
each C call that raises SyntaxError. There are several modules that
@@ -375,7 +376,7 @@
In 2.5 there was a missing exception and an assert was triggered in a debug
build. The number of blocks must be greater than CO_MAXBLOCKS. SF #1565514
- >>> while 1:
+ >>> while 1: # doctest:+SKIP
... while 2:
... while 3:
... while 4:
diff --git a/lib-python/2.7/test/test_sys.py b/lib-python/2.7/test/test_sys.py
--- a/lib-python/2.7/test/test_sys.py
+++ b/lib-python/2.7/test/test_sys.py
@@ -264,6 +264,7 @@
self.assertEqual(sys.getdlopenflags(), oldflags+1)
sys.setdlopenflags(oldflags)
+ @test.test_support.impl_detail("reference counting")
def test_refcount(self):
# n here must be a global in order for this test to pass while
# tracing with a python function. Tracing calls PyFrame_FastToLocals
@@ -287,7 +288,7 @@
is sys._getframe().f_code
)
- # sys._current_frames() is a CPython-only gimmick.
+ @test.test_support.impl_detail("current_frames")
def test_current_frames(self):
have_threads = True
try:
@@ -383,7 +384,10 @@
self.assertEqual(len(sys.float_info), 11)
self.assertEqual(sys.float_info.radix, 2)
self.assertEqual(len(sys.long_info), 2)
- self.assertTrue(sys.long_info.bits_per_digit % 5 == 0)
+ if test.test_support.check_impl_detail(cpython=True):
+ self.assertTrue(sys.long_info.bits_per_digit % 5 == 0)
+ else:
+ self.assertTrue(sys.long_info.bits_per_digit >= 1)
self.assertTrue(sys.long_info.sizeof_digit >= 1)
self.assertEqual(type(sys.long_info.bits_per_digit), int)
self.assertEqual(type(sys.long_info.sizeof_digit), int)
@@ -432,6 +436,7 @@
self.assertEqual(type(getattr(sys.flags, attr)), int, attr)
self.assertTrue(repr(sys.flags))
+ @test.test_support.impl_detail("sys._clear_type_cache")
def test_clear_type_cache(self):
sys._clear_type_cache()
@@ -473,6 +478,7 @@
p.wait()
self.assertIn(executable, ["''", repr(sys.executable)])
+ at unittest.skipUnless(test.test_support.check_impl_detail(), "sys.getsizeof()")
class SizeofTest(unittest.TestCase):
TPFLAGS_HAVE_GC = 1<<14
diff --git a/lib-python/2.7/test/test_sys_settrace.py b/lib-python/2.7/test/test_sys_settrace.py
--- a/lib-python/2.7/test/test_sys_settrace.py
+++ b/lib-python/2.7/test/test_sys_settrace.py
@@ -213,12 +213,16 @@
"finally"
def generator_example():
# any() will leave the generator before its end
- x = any(generator_function())
+ x = any(generator_function()); gc.collect()
# the following lines were not traced
for x in range(10):
y = x
+# On CPython, when the generator is decref'ed to zero, we see the trace
+# for the "finally:" portion. On PyPy, we don't see it before the next
+# garbage collection. That's why we put gc.collect() on the same line above.
+
generator_example.events = ([(0, 'call'),
(2, 'line'),
(-6, 'call'),
@@ -282,11 +286,11 @@
self.compare_events(func.func_code.co_firstlineno,
tracer.events, func.events)
- def set_and_retrieve_none(self):
+ def test_set_and_retrieve_none(self):
sys.settrace(None)
assert sys.gettrace() is None
- def set_and_retrieve_func(self):
+ def test_set_and_retrieve_func(self):
def fn(*args):
pass
@@ -323,17 +327,24 @@
self.run_test(tighterloop_example)
def test_13_genexp(self):
- self.run_test(generator_example)
- # issue1265: if the trace function contains a generator,
- # and if the traced function contains another generator
- # that is not completely exhausted, the trace stopped.
- # Worse: the 'finally' clause was not invoked.
- tracer = Tracer()
- sys.settrace(tracer.traceWithGenexp)
- generator_example()
- sys.settrace(None)
- self.compare_events(generator_example.__code__.co_firstlineno,
- tracer.events, generator_example.events)
+ if self.using_gc:
+ test_support.gc_collect()
+ gc.enable()
+ try:
+ self.run_test(generator_example)
+ # issue1265: if the trace function contains a generator,
+ # and if the traced function contains another generator
+ # that is not completely exhausted, the trace stopped.
+ # Worse: the 'finally' clause was not invoked.
+ tracer = Tracer()
+ sys.settrace(tracer.traceWithGenexp)
+ generator_example()
+ sys.settrace(None)
+ self.compare_events(generator_example.__code__.co_firstlineno,
+ tracer.events, generator_example.events)
+ finally:
+ if self.using_gc:
+ gc.disable()
def test_14_onliner_if(self):
def onliners():
diff --git a/lib-python/2.7/test/test_sysconfig.py b/lib-python/2.7/test/test_sysconfig.py
--- a/lib-python/2.7/test/test_sysconfig.py
+++ b/lib-python/2.7/test/test_sysconfig.py
@@ -209,13 +209,22 @@
self.assertEqual(get_platform(), 'macosx-10.4-fat64')
- for arch in ('ppc', 'i386', 'x86_64', 'ppc64'):
+ for arch in ('ppc', 'i386', 'ppc64', 'x86_64'):
get_config_vars()['CFLAGS'] = ('-arch %s -isysroot '
'/Developer/SDKs/MacOSX10.4u.sdk '
'-fno-strict-aliasing -fno-common '
'-dynamic -DNDEBUG -g -O3'%(arch,))
self.assertEqual(get_platform(), 'macosx-10.4-%s'%(arch,))
+
+ # macosx with ARCHFLAGS set and empty _CONFIG_VARS
+ os.environ['ARCHFLAGS'] = '-arch i386'
+ sysconfig._CONFIG_VARS = None
+
+ # this will attempt to recreate the _CONFIG_VARS based on environment
+ # variables; used to check a problem with the PyPy's _init_posix
+ # implementation; see: issue 705
+ get_config_vars()
# linux debian sarge
os.name = 'posix'
@@ -235,7 +244,7 @@
def test_get_scheme_names(self):
wanted = ('nt', 'nt_user', 'os2', 'os2_home', 'osx_framework_user',
- 'posix_home', 'posix_prefix', 'posix_user')
+ 'posix_home', 'posix_prefix', 'posix_user', 'pypy')
self.assertEqual(get_scheme_names(), wanted)
def test_symlink(self):
diff --git a/lib-python/2.7/test/test_tarfile.py b/lib-python/2.7/test/test_tarfile.py
--- a/lib-python/2.7/test/test_tarfile.py
+++ b/lib-python/2.7/test/test_tarfile.py
@@ -169,6 +169,7 @@
except tarfile.ReadError:
self.fail("tarfile.open() failed on empty archive")
self.assertListEqual(tar.getmembers(), [])
+ tar.close()
def test_null_tarfile(self):
# Test for issue6123: Allow opening empty archives.
@@ -207,16 +208,21 @@
fobj = open(self.tarname, "rb")
tar = tarfile.open(fileobj=fobj, mode=self.mode)
self.assertEqual(tar.name, os.path.abspath(fobj.name))
+ tar.close()
def test_no_name_attribute(self):
- data = open(self.tarname, "rb").read()
+ f = open(self.tarname, "rb")
+ data = f.read()
+ f.close()
fobj = StringIO.StringIO(data)
self.assertRaises(AttributeError, getattr, fobj, "name")
tar = tarfile.open(fileobj=fobj, mode=self.mode)
self.assertEqual(tar.name, None)
def test_empty_name_attribute(self):
- data = open(self.tarname, "rb").read()
+ f = open(self.tarname, "rb")
+ data = f.read()
+ f.close()
fobj = StringIO.StringIO(data)
fobj.name = ""
tar = tarfile.open(fileobj=fobj, mode=self.mode)
@@ -515,6 +521,7 @@
self.tar = tarfile.open(self.tarname, mode=self.mode, encoding="iso8859-1")
tarinfo = self.tar.getmember("pax/umlauts-�������")
self._test_member(tarinfo, size=7011, chksum=md5_regtype)
+ self.tar.close()
class LongnameTest(ReadTest):
@@ -675,6 +682,7 @@
tar = tarfile.open(tmpname, self.mode)
tarinfo = tar.gettarinfo(path)
self.assertEqual(tarinfo.size, 0)
+ tar.close()
finally:
os.rmdir(path)
@@ -692,6 +700,7 @@
tar.gettarinfo(target)
tarinfo = tar.gettarinfo(link)
self.assertEqual(tarinfo.size, 0)
+ tar.close()
finally:
os.remove(target)
os.remove(link)
@@ -704,6 +713,7 @@
tar = tarfile.open(tmpname, self.mode)
tarinfo = tar.gettarinfo(path)
self.assertEqual(tarinfo.size, 0)
+ tar.close()
finally:
os.remove(path)
@@ -722,6 +732,7 @@
tar.add(dstname)
os.chdir(cwd)
self.assertTrue(tar.getnames() == [], "added the archive to itself")
+ tar.close()
def test_exclude(self):
tempdir = os.path.join(TEMPDIR, "exclude")
@@ -742,6 +753,7 @@
tar = tarfile.open(tmpname, "r")
self.assertEqual(len(tar.getmembers()), 1)
self.assertEqual(tar.getnames()[0], "empty_dir")
+ tar.close()
finally:
shutil.rmtree(tempdir)
@@ -947,7 +959,9 @@
fobj.close()
elif self.mode.endswith("bz2"):
dec = bz2.BZ2Decompressor()
- data = open(tmpname, "rb").read()
+ f = open(tmpname, "rb")
+ data = f.read()
+ f.close()
data = dec.decompress(data)
self.assertTrue(len(dec.unused_data) == 0,
"found trailing data")
@@ -1026,6 +1040,7 @@
"unable to read longname member")
self.assertEqual(tarinfo.linkname, member.linkname,
"unable to read longname member")
+ tar.close()
def test_longname_1023(self):
self._test(("longnam/" * 127) + "longnam")
@@ -1118,6 +1133,7 @@
else:
n = tar.getmembers()[0].name
self.assertTrue(name == n, "PAX longname creation failed")
+ tar.close()
def test_pax_global_header(self):
pax_headers = {
@@ -1146,6 +1162,7 @@
tarfile.PAX_NUMBER_FIELDS[key](val)
except (TypeError, ValueError):
self.fail("unable to convert pax header field")
+ tar.close()
def test_pax_extended_header(self):
# The fields from the pax header have priority over the
@@ -1165,6 +1182,7 @@
self.assertEqual(t.pax_headers, pax_headers)
self.assertEqual(t.name, "foo")
self.assertEqual(t.uid, 123)
+ tar.close()
class UstarUnicodeTest(unittest.TestCase):
@@ -1208,6 +1226,7 @@
tarinfo.name = "foo"
tarinfo.uname = u"���"
self.assertRaises(UnicodeError, tar.addfile, tarinfo)
+ tar.close()
def test_unicode_argument(self):
tar = tarfile.open(tarname, "r", encoding="iso8859-1", errors="strict")
@@ -1262,6 +1281,7 @@
tar = tarfile.open(tmpname, format=self.format, encoding="ascii",
errors=handler)
self.assertEqual(tar.getnames()[0], name)
+ tar.close()
self.assertRaises(UnicodeError, tarfile.open, tmpname,
encoding="ascii", errors="strict")
@@ -1274,6 +1294,7 @@
tar = tarfile.open(tmpname, format=self.format, encoding="iso8859-1",
errors="utf-8")
self.assertEqual(tar.getnames()[0], "���/" + u"�".encode("utf8"))
+ tar.close()
class AppendTest(unittest.TestCase):
@@ -1301,6 +1322,7 @@
def _test(self, names=["bar"], fileobj=None):
tar = tarfile.open(self.tarname, fileobj=fileobj)
self.assertEqual(tar.getnames(), names)
+ tar.close()
def test_non_existing(self):
self._add_testfile()
@@ -1319,7 +1341,9 @@
def test_fileobj(self):
self._create_testtar()
- data = open(self.tarname).read()
+ f = open(self.tarname)
+ data = f.read()
+ f.close()
fobj = StringIO.StringIO(data)
self._add_testfile(fobj)
fobj.seek(0)
@@ -1345,7 +1369,9 @@
# Append mode is supposed to fail if the tarfile to append to
# does not end with a zero block.
def _test_error(self, data):
- open(self.tarname, "wb").write(data)
+ f = open(self.tarname, "wb")
+ f.write(data)
+ f.close()
self.assertRaises(tarfile.ReadError, self._add_testfile)
def test_null(self):
diff --git a/lib-python/2.7/test/test_tempfile.py b/lib-python/2.7/test/test_tempfile.py
--- a/lib-python/2.7/test/test_tempfile.py
+++ b/lib-python/2.7/test/test_tempfile.py
@@ -23,8 +23,8 @@
# TEST_FILES may need to be tweaked for systems depending on the maximum
# number of files that can be opened at one time (see ulimit -n)
-if sys.platform in ('openbsd3', 'openbsd4'):
- TEST_FILES = 48
+if sys.platform.startswith("openbsd"):
+ TEST_FILES = 64 # ulimit -n defaults to 128 for normal users
else:
TEST_FILES = 100
@@ -244,6 +244,7 @@
dir = tempfile.mkdtemp()
try:
self.do_create(dir=dir).write("blat")
+ test_support.gc_collect()
finally:
os.rmdir(dir)
@@ -528,12 +529,15 @@
self.do_create(suf="b")
self.do_create(pre="a", suf="b")
self.do_create(pre="aa", suf=".txt")
+ test_support.gc_collect()
def test_many(self):
# mktemp can choose many usable file names (stochastic)
extant = range(TEST_FILES)
for i in extant:
extant[i] = self.do_create(pre="aa")
+ del extant
+ test_support.gc_collect()
## def test_warning(self):
## # mktemp issues a warning when used
diff --git a/lib-python/2.7/test/test_thread.py b/lib-python/2.7/test/test_thread.py
--- a/lib-python/2.7/test/test_thread.py
+++ b/lib-python/2.7/test/test_thread.py
@@ -128,6 +128,7 @@
del task
while not done:
time.sleep(0.01)
+ test_support.gc_collect()
self.assertEqual(thread._count(), orig)
diff --git a/lib-python/2.7/test/test_threading.py b/lib-python/2.7/test/test_threading.py
--- a/lib-python/2.7/test/test_threading.py
+++ b/lib-python/2.7/test/test_threading.py
@@ -161,6 +161,7 @@
# PyThreadState_SetAsyncExc() is a CPython-only gimmick, not (currently)
# exposed at the Python level. This test relies on ctypes to get at it.
+ @test.test_support.cpython_only
def test_PyThreadState_SetAsyncExc(self):
try:
import ctypes
@@ -266,6 +267,7 @@
finally:
threading._start_new_thread = _start_new_thread
+ @test.test_support.cpython_only
def test_finalize_runnning_thread(self):
# Issue 1402: the PyGILState_Ensure / _Release functions may be called
# very late on python exit: on deallocation of a running thread for
@@ -383,6 +385,7 @@
finally:
sys.setcheckinterval(old_interval)
+ @test.test_support.cpython_only
def test_no_refcycle_through_target(self):
class RunSelfFunction(object):
def __init__(self, should_raise):
@@ -425,6 +428,9 @@
def joiningfunc(mainthread):
mainthread.join()
print 'end of thread'
+ # stdout is fully buffered because not a tty, we have to flush
+ # before exit.
+ sys.stdout.flush()
\n""" + script
p = subprocess.Popen([sys.executable, "-c", script], stdout=subprocess.PIPE)
diff --git a/lib-python/2.7/test/test_threading_local.py b/lib-python/2.7/test/test_threading_local.py
--- a/lib-python/2.7/test/test_threading_local.py
+++ b/lib-python/2.7/test/test_threading_local.py
@@ -173,8 +173,9 @@
obj = cls()
obj.x = 5
self.assertEqual(obj.__dict__, {'x': 5})
- with self.assertRaises(AttributeError):
- obj.__dict__ = {}
+ if test_support.check_impl_detail():
+ with self.assertRaises(AttributeError):
+ obj.__dict__ = {}
with self.assertRaises(AttributeError):
del obj.__dict__
diff --git a/lib-python/2.7/test/test_traceback.py b/lib-python/2.7/test/test_traceback.py
--- a/lib-python/2.7/test/test_traceback.py
+++ b/lib-python/2.7/test/test_traceback.py
@@ -5,7 +5,8 @@
import sys
import unittest
from imp import reload
-from test.test_support import run_unittest, is_jython, Error
+from test.test_support import run_unittest, Error
+from test.test_support import impl_detail, check_impl_detail
import traceback
@@ -49,10 +50,8 @@
self.assertTrue(err[2].count('\n') == 1) # and no additional newline
self.assertTrue(err[1].find("+") == err[2].find("^")) # in the right place
+ @impl_detail("other implementations may add a caret (why shouldn't they?)")
def test_nocaret(self):
- if is_jython:
- # jython adds a caret in this case (why shouldn't it?)
- return
err = self.get_exception_format(self.syntax_error_without_caret,
SyntaxError)
self.assertTrue(len(err) == 3)
@@ -63,8 +62,11 @@
IndentationError)
self.assertTrue(len(err) == 4)
self.assertTrue(err[1].strip() == "print 2")
- self.assertIn("^", err[2])
- self.assertTrue(err[1].find("2") == err[2].find("^"))
+ if check_impl_detail():
+ # on CPython, there is a "^" at the end of the line
+ # on PyPy, there is a "^" too, but at the start, more logically
+ self.assertIn("^", err[2])
+ self.assertTrue(err[1].find("2") == err[2].find("^"))
def test_bug737473(self):
import os, tempfile, time
@@ -74,7 +76,8 @@
try:
sys.path.insert(0, testdir)
testfile = os.path.join(testdir, 'test_bug737473.py')
- print >> open(testfile, 'w'), """
+ with open(testfile, 'w') as f:
+ print >> f, """
def test():
raise ValueError"""
@@ -96,7 +99,8 @@
# three seconds are needed for this test to pass reliably :-(
time.sleep(4)
- print >> open(testfile, 'w'), """
+ with open(testfile, 'w') as f:
+ print >> f, """
def test():
raise NotImplementedError"""
reload(test_bug737473)
diff --git a/lib-python/2.7/test/test_types.py b/lib-python/2.7/test/test_types.py
--- a/lib-python/2.7/test/test_types.py
+++ b/lib-python/2.7/test/test_types.py
@@ -1,7 +1,8 @@
# Python test set -- part 6, built-in types
from test.test_support import run_unittest, have_unicode, run_with_locale, \
- check_py3k_warnings
+ check_py3k_warnings, \
+ impl_detail, check_impl_detail
import unittest
import sys
import locale
@@ -289,9 +290,14 @@
# array.array() returns an object that does not implement a char buffer,
# something which int() uses for conversion.
import array
- try: int(buffer(array.array('c')))
+ try: int(buffer(array.array('c', '5')))
except TypeError: pass
- else: self.fail("char buffer (at C level) not working")
+ else:
+ if check_impl_detail():
+ self.fail("char buffer (at C level) not working")
+ #else:
+ # it works on PyPy, which does not have the distinction
+ # between char buffer and binary buffer. XXX fine enough?
def test_int__format__(self):
def test(i, format_spec, result):
@@ -741,6 +747,7 @@
for code in 'xXobns':
self.assertRaises(ValueError, format, 0, ',' + code)
+ @impl_detail("the types' internal size attributes are CPython-only")
def test_internal_sizes(self):
self.assertGreater(object.__basicsize__, 0)
self.assertGreater(tuple.__itemsize__, 0)
diff --git a/lib-python/2.7/test/test_unicode.py b/lib-python/2.7/test/test_unicode.py
--- a/lib-python/2.7/test/test_unicode.py
+++ b/lib-python/2.7/test/test_unicode.py
@@ -448,10 +448,11 @@
meth('\xff')
with self.assertRaises(TypeError) as cm:
meth(['f'])
- exc = str(cm.exception)
- self.assertIn('unicode', exc)
- self.assertIn('str', exc)
- self.assertIn('tuple', exc)
+ if test_support.check_impl_detail():
+ exc = str(cm.exception)
+ self.assertIn('unicode', exc)
+ self.assertIn('str', exc)
+ self.assertIn('tuple', exc)
@test_support.run_with_locale('LC_ALL', 'de_DE', 'fr_FR')
def test_format_float(self):
@@ -1062,7 +1063,8 @@
# to take a 64-bit long, this test should apply to all platforms.
if sys.maxint > (1 << 32) or struct.calcsize('P') != 4:
return
- self.assertRaises(OverflowError, u't\tt\t'.expandtabs, sys.maxint)
+ self.assertRaises((OverflowError, MemoryError),
+ u't\tt\t'.expandtabs, sys.maxint)
def test__format__(self):
def test(value, format, expected):
diff --git a/lib-python/2.7/test/test_unicodedata.py b/lib-python/2.7/test/test_unicodedata.py
--- a/lib-python/2.7/test/test_unicodedata.py
+++ b/lib-python/2.7/test/test_unicodedata.py
@@ -233,10 +233,12 @@
# been loaded in this process.
popen = subprocess.Popen(args, stderr=subprocess.PIPE)
popen.wait()
- self.assertEqual(popen.returncode, 1)
- error = "SyntaxError: (unicode error) \N escapes not supported " \
- "(can't load unicodedata module)"
- self.assertIn(error, popen.stderr.read())
+ self.assertIn(popen.returncode, [0, 1]) # at least it did not segfault
+ if test.test_support.check_impl_detail():
+ self.assertEqual(popen.returncode, 1)
+ error = "SyntaxError: (unicode error) \N escapes not supported " \
+ "(can't load unicodedata module)"
+ self.assertIn(error, popen.stderr.read())
def test_decimal_numeric_consistent(self):
# Test that decimal and numeric are consistent,
diff --git a/lib-python/2.7/test/test_unpack.py b/lib-python/2.7/test/test_unpack.py
--- a/lib-python/2.7/test/test_unpack.py
+++ b/lib-python/2.7/test/test_unpack.py
@@ -62,14 +62,14 @@
>>> a, b = t
Traceback (most recent call last):
...
- ValueError: too many values to unpack
+ ValueError: expected length 2, got 3
Unpacking tuple of wrong size
>>> a, b = l
Traceback (most recent call last):
...
- ValueError: too many values to unpack
+ ValueError: expected length 2, got 3
Unpacking sequence too short
diff --git a/lib-python/2.7/test/test_urllib2.py b/lib-python/2.7/test/test_urllib2.py
--- a/lib-python/2.7/test/test_urllib2.py
+++ b/lib-python/2.7/test/test_urllib2.py
@@ -307,6 +307,9 @@
def getresponse(self):
return MockHTTPResponse(MockFile(), {}, 200, "OK")
+ def close(self):
+ pass
+
class MockHandler:
# useful for testing handler machinery
# see add_ordered_mock_handlers() docstring
diff --git a/lib-python/2.7/test/test_warnings.py b/lib-python/2.7/test/test_warnings.py
--- a/lib-python/2.7/test/test_warnings.py
+++ b/lib-python/2.7/test/test_warnings.py
@@ -355,7 +355,8 @@
# test_support.import_fresh_module utility function
def test_accelerated(self):
self.assertFalse(original_warnings is self.module)
- self.assertFalse(hasattr(self.module.warn, 'func_code'))
+ self.assertFalse(hasattr(self.module.warn, 'func_code') and
+ hasattr(self.module.warn.func_code, 'co_filename'))
class PyWarnTests(BaseTest, WarnTests):
module = py_warnings
@@ -364,7 +365,8 @@
# test_support.import_fresh_module utility function
def test_pure_python(self):
self.assertFalse(original_warnings is self.module)
- self.assertTrue(hasattr(self.module.warn, 'func_code'))
+ self.assertTrue(hasattr(self.module.warn, 'func_code') and
+ hasattr(self.module.warn.func_code, 'co_filename'))
class WCmdLineTests(unittest.TestCase):
diff --git a/lib-python/2.7/test/test_weakref.py b/lib-python/2.7/test/test_weakref.py
--- a/lib-python/2.7/test/test_weakref.py
+++ b/lib-python/2.7/test/test_weakref.py
@@ -1,4 +1,3 @@
-import gc
import sys
import unittest
import UserList
@@ -6,6 +5,7 @@
import operator
from test import test_support
+from test.test_support import gc_collect
# Used in ReferencesTestCase.test_ref_created_during_del() .
ref_from_del = None
@@ -70,6 +70,7 @@
ref1 = weakref.ref(o, self.callback)
ref2 = weakref.ref(o, self.callback)
del o
+ gc_collect()
self.assertTrue(ref1() is None,
"expected reference to be invalidated")
self.assertTrue(ref2() is None,
@@ -101,13 +102,16 @@
ref1 = weakref.proxy(o, self.callback)
ref2 = weakref.proxy(o, self.callback)
del o
+ gc_collect()
def check(proxy):
proxy.bar
self.assertRaises(weakref.ReferenceError, check, ref1)
self.assertRaises(weakref.ReferenceError, check, ref2)
- self.assertRaises(weakref.ReferenceError, bool, weakref.proxy(C()))
+ ref3 = weakref.proxy(C())
+ gc_collect()
+ self.assertRaises(weakref.ReferenceError, bool, ref3)
self.assertTrue(self.cbcalled == 2)
def check_basic_ref(self, factory):
@@ -124,6 +128,7 @@
o = factory()
ref = weakref.ref(o, self.callback)
del o
+ gc_collect()
self.assertTrue(self.cbcalled == 1,
"callback did not properly set 'cbcalled'")
self.assertTrue(ref() is None,
@@ -148,6 +153,7 @@
self.assertTrue(weakref.getweakrefcount(o) == 2,
"wrong weak ref count for object")
del proxy
+ gc_collect()
self.assertTrue(weakref.getweakrefcount(o) == 1,
"wrong weak ref count for object after deleting proxy")
@@ -325,6 +331,7 @@
"got wrong number of weak reference objects")
del ref1, ref2, proxy1, proxy2
+ gc_collect()
self.assertTrue(weakref.getweakrefcount(o) == 0,
"weak reference objects not unlinked from"
" referent when discarded.")
@@ -338,6 +345,7 @@
ref1 = weakref.ref(o, self.callback)
ref2 = weakref.ref(o, self.callback)
del ref1
+ gc_collect()
self.assertTrue(weakref.getweakrefs(o) == [ref2],
"list of refs does not match")
@@ -345,10 +353,12 @@
ref1 = weakref.ref(o, self.callback)
ref2 = weakref.ref(o, self.callback)
del ref2
+ gc_collect()
self.assertTrue(weakref.getweakrefs(o) == [ref1],
"list of refs does not match")
del ref1
+ gc_collect()
self.assertTrue(weakref.getweakrefs(o) == [],
"list of refs not cleared")
@@ -400,13 +410,11 @@
# when the second attempt to remove the instance from the "list
# of all objects" occurs.
- import gc
-
class C(object):
pass
c = C()
- wr = weakref.ref(c, lambda ignore: gc.collect())
+ wr = weakref.ref(c, lambda ignore: gc_collect())
del c
# There endeth the first part. It gets worse.
@@ -414,7 +422,7 @@
c1 = C()
c1.i = C()
- wr = weakref.ref(c1.i, lambda ignore: gc.collect())
+ wr = weakref.ref(c1.i, lambda ignore: gc_collect())
c2 = C()
c2.c1 = c1
@@ -430,8 +438,6 @@
del c2
def test_callback_in_cycle_1(self):
- import gc
-
class J(object):
pass
@@ -467,11 +473,9 @@
# search II.__mro__, but that's NULL. The result was a segfault in
# a release build, and an assert failure in a debug build.
del I, J, II
- gc.collect()
+ gc_collect()
def test_callback_in_cycle_2(self):
- import gc
-
# This is just like test_callback_in_cycle_1, except that II is an
# old-style class. The symptom is different then: an instance of an
# old-style class looks in its own __dict__ first. 'J' happens to
@@ -496,11 +500,9 @@
I.wr = weakref.ref(J, I.acallback)
del I, J, II
- gc.collect()
+ gc_collect()
def test_callback_in_cycle_3(self):
- import gc
-
# This one broke the first patch that fixed the last two. In this
# case, the objects reachable from the callback aren't also reachable
# from the object (c1) *triggering* the callback: you can get to
@@ -520,11 +522,9 @@
c2.wr = weakref.ref(c1, c2.cb)
del c1, c2
- gc.collect()
+ gc_collect()
def test_callback_in_cycle_4(self):
- import gc
-
# Like test_callback_in_cycle_3, except c2 and c1 have different
# classes. c2's class (C) isn't reachable from c1 then, so protecting
# objects reachable from the dying object (c1) isn't enough to stop
@@ -548,11 +548,9 @@
c2.wr = weakref.ref(c1, c2.cb)
del c1, c2, C, D
- gc.collect()
+ gc_collect()
def test_callback_in_cycle_resurrection(self):
- import gc
-
# Do something nasty in a weakref callback: resurrect objects
# from dead cycles. For this to be attempted, the weakref and
# its callback must also be part of the cyclic trash (else the
@@ -583,7 +581,7 @@
del c1, c2, C # make them all trash
self.assertEqual(alist, []) # del isn't enough to reclaim anything
- gc.collect()
+ gc_collect()
# c1.wr and c2.wr were part of the cyclic trash, so should have
# been cleared without their callbacks executing. OTOH, the weakref
# to C is bound to a function local (wr), and wasn't trash, so that
@@ -593,12 +591,10 @@
self.assertEqual(wr(), None)
del alist[:]
- gc.collect()
+ gc_collect()
self.assertEqual(alist, [])
def test_callbacks_on_callback(self):
- import gc
-
# Set up weakref callbacks *on* weakref callbacks.
alist = []
def safe_callback(ignore):
@@ -626,12 +622,12 @@
del callback, c, d, C
self.assertEqual(alist, []) # del isn't enough to clean up cycles
- gc.collect()
+ gc_collect()
self.assertEqual(alist, ["safe_callback called"])
self.assertEqual(external_wr(), None)
del alist[:]
- gc.collect()
+ gc_collect()
self.assertEqual(alist, [])
def test_gc_during_ref_creation(self):
@@ -641,9 +637,11 @@
self.check_gc_during_creation(weakref.proxy)
def check_gc_during_creation(self, makeref):
- thresholds = gc.get_threshold()
- gc.set_threshold(1, 1, 1)
- gc.collect()
+ if test_support.check_impl_detail():
+ import gc
+ thresholds = gc.get_threshold()
+ gc.set_threshold(1, 1, 1)
+ gc_collect()
class A:
pass
@@ -663,7 +661,8 @@
weakref.ref(referenced, callback)
finally:
- gc.set_threshold(*thresholds)
+ if test_support.check_impl_detail():
+ gc.set_threshold(*thresholds)
def test_ref_created_during_del(self):
# Bug #1377858
@@ -683,7 +682,7 @@
r = weakref.ref(Exception)
self.assertRaises(TypeError, r.__init__, 0, 0, 0, 0, 0)
# No exception should be raised here
- gc.collect()
+ gc_collect()
def test_classes(self):
# Check that both old-style classes and new-style classes
@@ -696,12 +695,12 @@
weakref.ref(int)
a = weakref.ref(A, l.append)
A = None
- gc.collect()
+ gc_collect()
self.assertEqual(a(), None)
self.assertEqual(l, [a])
b = weakref.ref(B, l.append)
B = None
- gc.collect()
+ gc_collect()
self.assertEqual(b(), None)
self.assertEqual(l, [a, b])
@@ -722,6 +721,7 @@
self.assertTrue(mr.called)
self.assertEqual(mr.value, 24)
del o
+ gc_collect()
self.assertTrue(mr() is None)
self.assertTrue(mr.called)
@@ -738,9 +738,11 @@
self.assertEqual(weakref.getweakrefcount(o), 3)
refs = weakref.getweakrefs(o)
self.assertEqual(len(refs), 3)
- self.assertTrue(r2 is refs[0])
- self.assertIn(r1, refs[1:])
- self.assertIn(r3, refs[1:])
+ assert set(refs) == set((r1, r2, r3))
+ if test_support.check_impl_detail():
+ self.assertTrue(r2 is refs[0])
+ self.assertIn(r1, refs[1:])
+ self.assertIn(r3, refs[1:])
def test_subclass_refs_dont_conflate_callbacks(self):
class MyRef(weakref.ref):
@@ -839,15 +841,18 @@
del items1, items2
self.assertTrue(len(dict) == self.COUNT)
del objects[0]
+ gc_collect()
self.assertTrue(len(dict) == (self.COUNT - 1),
"deleting object did not cause dictionary update")
del objects, o
+ gc_collect()
self.assertTrue(len(dict) == 0,
"deleting the values did not clear the dictionary")
# regression on SF bug #447152:
dict = weakref.WeakValueDictionary()
self.assertRaises(KeyError, dict.__getitem__, 1)
dict[2] = C()
+ gc_collect()
self.assertRaises(KeyError, dict.__getitem__, 2)
def test_weak_keys(self):
@@ -868,9 +873,11 @@
del items1, items2
self.assertTrue(len(dict) == self.COUNT)
del objects[0]
+ gc_collect()
self.assertTrue(len(dict) == (self.COUNT - 1),
"deleting object did not cause dictionary update")
del objects, o
+ gc_collect()
self.assertTrue(len(dict) == 0,
"deleting the keys did not clear the dictionary")
o = Object(42)
@@ -986,13 +993,13 @@
self.assertTrue(len(weakdict) == 2)
k, v = weakdict.popitem()
self.assertTrue(len(weakdict) == 1)
- if k is key1:
+ if k == key1:
self.assertTrue(v is value1)
else:
self.assertTrue(v is value2)
k, v = weakdict.popitem()
self.assertTrue(len(weakdict) == 0)
- if k is key1:
+ if k == key1:
self.assertTrue(v is value1)
else:
self.assertTrue(v is value2)
@@ -1137,6 +1144,7 @@
for o in objs:
count += 1
del d[o]
+ gc_collect()
self.assertEqual(len(d), 0)
self.assertEqual(count, 2)
@@ -1177,6 +1185,7 @@
>>> o is o2
True
>>> del o, o2
+>>> gc_collect()
>>> print r()
None
@@ -1229,6 +1238,7 @@
>>> id2obj(a_id) is a
True
>>> del a
+>>> gc_collect()
>>> try:
... id2obj(a_id)
... except KeyError:
diff --git a/lib-python/2.7/test/test_weakset.py b/lib-python/2.7/test/test_weakset.py
--- a/lib-python/2.7/test/test_weakset.py
+++ b/lib-python/2.7/test/test_weakset.py
@@ -57,6 +57,7 @@
self.assertEqual(len(self.s), len(self.d))
self.assertEqual(len(self.fs), 1)
del self.obj
+ test_support.gc_collect()
self.assertEqual(len(self.fs), 0)
def test_contains(self):
@@ -66,6 +67,7 @@
self.assertNotIn(1, self.s)
self.assertIn(self.obj, self.fs)
del self.obj
+ test_support.gc_collect()
self.assertNotIn(SomeClass('F'), self.fs)
def test_union(self):
@@ -204,6 +206,7 @@
self.assertEqual(self.s, dup)
self.assertRaises(TypeError, self.s.add, [])
self.fs.add(Foo())
+ test_support.gc_collect()
self.assertTrue(len(self.fs) == 1)
self.fs.add(self.obj)
self.assertTrue(len(self.fs) == 1)
@@ -330,10 +333,11 @@
next(it) # Trigger internal iteration
# Destroy an item
del items[-1]
- gc.collect() # just in case
+ test_support.gc_collect()
# We have removed either the first consumed items, or another one
self.assertIn(len(list(it)), [len(items), len(items) - 1])
del it
+ test_support.gc_collect()
# The removal has been committed
self.assertEqual(len(s), len(items))
diff --git a/lib-python/2.7/test/test_xml_etree.py b/lib-python/2.7/test/test_xml_etree.py
--- a/lib-python/2.7/test/test_xml_etree.py
+++ b/lib-python/2.7/test/test_xml_etree.py
@@ -1633,10 +1633,10 @@
Check reference leak.
>>> xmltoolkit63()
- >>> count = sys.getrefcount(None)
+ >>> count = sys.getrefcount(None) #doctest: +SKIP
>>> for i in range(1000):
... xmltoolkit63()
- >>> sys.getrefcount(None) - count
+ >>> sys.getrefcount(None) - count #doctest: +SKIP
0
"""
diff --git a/lib-python/2.7/test/test_xmlrpc.py b/lib-python/2.7/test/test_xmlrpc.py
--- a/lib-python/2.7/test/test_xmlrpc.py
+++ b/lib-python/2.7/test/test_xmlrpc.py
@@ -308,7 +308,7 @@
global ADDR, PORT, URL
ADDR, PORT = serv.socket.getsockname()
#connect to IP address directly. This avoids socket.create_connection()
- #trying to connect to "localhost" using all address families, which
+ #trying to connect to to "localhost" using all address families, which
#causes slowdown e.g. on vista which supports AF_INET6. The server listens
#on AF_INET only.
URL = "http://%s:%d"%(ADDR, PORT)
@@ -367,7 +367,7 @@
global ADDR, PORT, URL
ADDR, PORT = serv.socket.getsockname()
#connect to IP address directly. This avoids socket.create_connection()
- #trying to connect to "localhost" using all address families, which
+ #trying to connect to to "localhost" using all address families, which
#causes slowdown e.g. on vista which supports AF_INET6. The server listens
#on AF_INET only.
URL = "http://%s:%d"%(ADDR, PORT)
@@ -435,6 +435,7 @@
def tearDown(self):
# wait on the server thread to terminate
+ test_support.gc_collect() # to close the active connections
self.evt.wait(10)
# disable traceback reporting
@@ -472,9 +473,6 @@
# protocol error; provide additional information in test output
self.fail("%s\n%s" % (e, getattr(e, "headers", "")))
- def test_unicode_host(self):
- server = xmlrpclib.ServerProxy(u"http://%s:%d/RPC2"%(ADDR, PORT))
- self.assertEqual(server.add("a", u"\xe9"), u"a\xe9")
# [ch] The test 404 is causing lots of false alarms.
def XXXtest_404(self):
@@ -589,12 +587,6 @@
# This avoids waiting for the socket timeout.
self.test_simple1()
- def test_partial_post(self):
- # Check that a partial POST doesn't make the server loop: issue #14001.
- conn = httplib.HTTPConnection(ADDR, PORT)
- conn.request('POST', '/RPC2 HTTP/1.0\r\nContent-Length: 100\r\n\r\nbye')
- conn.close()
-
class MultiPathServerTestCase(BaseServerTestCase):
threadFunc = staticmethod(http_multi_server)
request_count = 2
diff --git a/lib-python/2.7/test/test_zlib.py b/lib-python/2.7/test/test_zlib.py
--- a/lib-python/2.7/test/test_zlib.py
+++ b/lib-python/2.7/test/test_zlib.py
@@ -1,6 +1,7 @@
import unittest
from test.test_support import TESTFN, run_unittest, import_module, unlink, requires
import binascii
+import os
import random
from test.test_support import precisionbigmemtest, _1G, _4G
import sys
@@ -99,14 +100,7 @@
class BaseCompressTestCase(object):
def check_big_compress_buffer(self, size, compress_func):
- _1M = 1024 * 1024
- fmt = "%%0%dx" % (2 * _1M)
- # Generate 10MB worth of random, and expand it by repeating it.
- # The assumption is that zlib's memory is not big enough to exploit
- # such spread out redundancy.
- data = ''.join([binascii.a2b_hex(fmt % random.getrandbits(8 * _1M))
- for i in range(10)])
- data = data * (size // len(data) + 1)
+ data = os.urandom(size)
try:
compress_func(data)
finally:
diff --git a/lib-python/2.7/trace.py b/lib-python/2.7/trace.py
--- a/lib-python/2.7/trace.py
+++ b/lib-python/2.7/trace.py
@@ -559,6 +559,10 @@
if len(funcs) == 1:
dicts = [d for d in gc.get_referrers(funcs[0])
if isinstance(d, dict)]
+ if len(dicts) == 0:
+ # PyPy may store functions directly on the class
+ # (more exactly: the container is not a Python object)
+ dicts = funcs
if len(dicts) == 1:
classes = [c for c in gc.get_referrers(dicts[0])
if hasattr(c, "__bases__")]
diff --git a/lib-python/2.7/urllib2.py b/lib-python/2.7/urllib2.py
--- a/lib-python/2.7/urllib2.py
+++ b/lib-python/2.7/urllib2.py
@@ -1171,6 +1171,7 @@
except TypeError: #buffering kw not supported
r = h.getresponse()
except socket.error, err: # XXX what error?
+ h.close()
raise URLError(err)
# Pick apart the HTTPResponse object to get the addinfourl
diff --git a/lib-python/2.7/uuid.py b/lib-python/2.7/uuid.py
--- a/lib-python/2.7/uuid.py
+++ b/lib-python/2.7/uuid.py
@@ -406,8 +406,12 @@
continue
if hasattr(lib, 'uuid_generate_random'):
_uuid_generate_random = lib.uuid_generate_random
+ _uuid_generate_random.argtypes = [ctypes.c_char * 16]
+ _uuid_generate_random.restype = None
if hasattr(lib, 'uuid_generate_time'):
_uuid_generate_time = lib.uuid_generate_time
+ _uuid_generate_time.argtypes = [ctypes.c_char * 16]
+ _uuid_generate_time.restype = None
# The uuid_generate_* functions are broken on MacOS X 10.5, as noted
# in issue #8621 the function generates the same sequence of values
@@ -436,6 +440,9 @@
lib = None
_UuidCreate = getattr(lib, 'UuidCreateSequential',
getattr(lib, 'UuidCreate', None))
+ if _UuidCreate is not None:
+ _UuidCreate.argtypes = [ctypes.c_char * 16]
+ _UuidCreate.restype = ctypes.c_int
except:
pass
diff --git a/lib-python/modified-2.7/UserDict.py b/lib-python/modified-2.7/UserDict.py
deleted file mode 100644
--- a/lib-python/modified-2.7/UserDict.py
+++ /dev/null
@@ -1,189 +0,0 @@
-"""A more or less complete user-defined wrapper around dictionary objects."""
-
-# XXX This is a bit of a hack (as usual :-))
-# the actual content of the file is not changed, but we put it here to make
-# virtualenv happy (because its internal logic expects at least one of the
-# REQUIRED_MODULES to be in modified-*)
-
-class UserDict:
- def __init__(self, dict=None, **kwargs):
- self.data = {}
- if dict is not None:
- self.update(dict)
- if len(kwargs):
- self.update(kwargs)
- def __repr__(self): return repr(self.data)
- def __cmp__(self, dict):
- if isinstance(dict, UserDict):
- return cmp(self.data, dict.data)
- else:
- return cmp(self.data, dict)
- __hash__ = None # Avoid Py3k warning
- def __len__(self): return len(self.data)
- def __getitem__(self, key):
- if key in self.data:
- return self.data[key]
- if hasattr(self.__class__, "__missing__"):
- return self.__class__.__missing__(self, key)
- raise KeyError(key)
- def __setitem__(self, key, item): self.data[key] = item
- def __delitem__(self, key): del self.data[key]
- def clear(self): self.data.clear()
- def copy(self):
- if self.__class__ is UserDict:
- return UserDict(self.data.copy())
- import copy
- data = self.data
- try:
- self.data = {}
- c = copy.copy(self)
- finally:
- self.data = data
- c.update(self)
- return c
- def keys(self): return self.data.keys()
- def items(self): return self.data.items()
- def iteritems(self): return self.data.iteritems()
- def iterkeys(self): return self.data.iterkeys()
- def itervalues(self): return self.data.itervalues()
- def values(self): return self.data.values()
- def has_key(self, key): return key in self.data
- def update(self, dict=None, **kwargs):
- if dict is None:
- pass
- elif isinstance(dict, UserDict):
- self.data.update(dict.data)
- elif isinstance(dict, type({})) or not hasattr(dict, 'items'):
- self.data.update(dict)
- else:
- for k, v in dict.items():
- self[k] = v
- if len(kwargs):
- self.data.update(kwargs)
- def get(self, key, failobj=None):
- if key not in self:
- return failobj
- return self[key]
- def setdefault(self, key, failobj=None):
- if key not in self:
- self[key] = failobj
- return self[key]
- def pop(self, key, *args):
- return self.data.pop(key, *args)
- def popitem(self):
- return self.data.popitem()
- def __contains__(self, key):
- return key in self.data
- @classmethod
- def fromkeys(cls, iterable, value=None):
- d = cls()
- for key in iterable:
- d[key] = value
- return d
-
-class IterableUserDict(UserDict):
- def __iter__(self):
- return iter(self.data)
-
-try:
- import _abcoll
-except ImportError:
- pass # e.g. no '_weakref' module on this pypy
-else:
- _abcoll.MutableMapping.register(IterableUserDict)
-
-
-class DictMixin:
- # Mixin defining all dictionary methods for classes that already have
- # a minimum dictionary interface including getitem, setitem, delitem,
- # and keys. Without knowledge of the subclass constructor, the mixin
- # does not define __init__() or copy(). In addition to the four base
- # methods, progressively more efficiency comes with defining
- # __contains__(), __iter__(), and iteritems().
-
- # second level definitions support higher levels
- def __iter__(self):
- for k in self.keys():
- yield k
- def has_key(self, key):
- try:
- self[key]
- except KeyError:
- return False
- return True
- def __contains__(self, key):
- return self.has_key(key)
-
- # third level takes advantage of second level definitions
- def iteritems(self):
- for k in self:
- yield (k, self[k])
- def iterkeys(self):
- return self.__iter__()
-
- # fourth level uses definitions from lower levels
- def itervalues(self):
- for _, v in self.iteritems():
- yield v
- def values(self):
- return [v for _, v in self.iteritems()]
- def items(self):
- return list(self.iteritems())
- def clear(self):
- for key in self.keys():
- del self[key]
- def setdefault(self, key, default=None):
- try:
- return self[key]
- except KeyError:
- self[key] = default
- return default
- def pop(self, key, *args):
- if len(args) > 1:
- raise TypeError, "pop expected at most 2 arguments, got "\
- + repr(1 + len(args))
- try:
- value = self[key]
- except KeyError:
- if args:
- return args[0]
- raise
- del self[key]
- return value
- def popitem(self):
- try:
- k, v = self.iteritems().next()
- except StopIteration:
- raise KeyError, 'container is empty'
- del self[k]
- return (k, v)
- def update(self, other=None, **kwargs):
- # Make progressively weaker assumptions about "other"
- if other is None:
- pass
- elif hasattr(other, 'iteritems'): # iteritems saves memory and lookups
- for k, v in other.iteritems():
- self[k] = v
- elif hasattr(other, 'keys'):
- for k in other.keys():
- self[k] = other[k]
- else:
- for k, v in other:
- self[k] = v
- if kwargs:
- self.update(kwargs)
- def get(self, key, default=None):
- try:
- return self[key]
- except KeyError:
- return default
- def __repr__(self):
- return repr(dict(self.iteritems()))
- def __cmp__(self, other):
- if other is None:
- return 1
- if isinstance(other, DictMixin):
- other = dict(other.iteritems())
- return cmp(dict(self.iteritems()), other)
- def __len__(self):
- return len(self.keys())
diff --git a/lib-python/modified-2.7/_threading_local.py b/lib-python/modified-2.7/_threading_local.py
deleted file mode 100644
--- a/lib-python/modified-2.7/_threading_local.py
+++ /dev/null
@@ -1,251 +0,0 @@
-"""Thread-local objects.
-
-(Note that this module provides a Python version of the threading.local
- class. Depending on the version of Python you're using, there may be a
- faster one available. You should always import the `local` class from
- `threading`.)
-
-Thread-local objects support the management of thread-local data.
-If you have data that you want to be local to a thread, simply create
-a thread-local object and use its attributes:
-
- >>> mydata = local()
- >>> mydata.number = 42
- >>> mydata.number
- 42
-
-You can also access the local-object's dictionary:
-
- >>> mydata.__dict__
- {'number': 42}
- >>> mydata.__dict__.setdefault('widgets', [])
- []
- >>> mydata.widgets
- []
-
-What's important about thread-local objects is that their data are
-local to a thread. If we access the data in a different thread:
-
- >>> log = []
- >>> def f():
- ... items = mydata.__dict__.items()
- ... items.sort()
- ... log.append(items)
- ... mydata.number = 11
- ... log.append(mydata.number)
-
- >>> import threading
- >>> thread = threading.Thread(target=f)
- >>> thread.start()
- >>> thread.join()
- >>> log
- [[], 11]
-
-we get different data. Furthermore, changes made in the other thread
-don't affect data seen in this thread:
-
- >>> mydata.number
- 42
-
-Of course, values you get from a local object, including a __dict__
-attribute, are for whatever thread was current at the time the
-attribute was read. For that reason, you generally don't want to save
-these values across threads, as they apply only to the thread they
-came from.
-
-You can create custom local objects by subclassing the local class:
-
- >>> class MyLocal(local):
- ... number = 2
- ... initialized = False
- ... def __init__(self, **kw):
- ... if self.initialized:
- ... raise SystemError('__init__ called too many times')
- ... self.initialized = True
- ... self.__dict__.update(kw)
- ... def squared(self):
- ... return self.number ** 2
-
-This can be useful to support default values, methods and
-initialization. Note that if you define an __init__ method, it will be
-called each time the local object is used in a separate thread. This
-is necessary to initialize each thread's dictionary.
-
-Now if we create a local object:
-
- >>> mydata = MyLocal(color='red')
-
-Now we have a default number:
-
- >>> mydata.number
- 2
-
-an initial color:
-
- >>> mydata.color
- 'red'
- >>> del mydata.color
-
-And a method that operates on the data:
-
- >>> mydata.squared()
- 4
-
-As before, we can access the data in a separate thread:
-
- >>> log = []
- >>> thread = threading.Thread(target=f)
- >>> thread.start()
- >>> thread.join()
- >>> log
- [[('color', 'red'), ('initialized', True)], 11]
-
-without affecting this thread's data:
-
- >>> mydata.number
- 2
- >>> mydata.color
- Traceback (most recent call last):
- ...
- AttributeError: 'MyLocal' object has no attribute 'color'
-
-Note that subclasses can define slots, but they are not thread
-local. They are shared across threads:
-
- >>> class MyLocal(local):
- ... __slots__ = 'number'
-
- >>> mydata = MyLocal()
- >>> mydata.number = 42
- >>> mydata.color = 'red'
-
-So, the separate thread:
-
- >>> thread = threading.Thread(target=f)
- >>> thread.start()
- >>> thread.join()
-
-affects what we see:
-
- >>> mydata.number
- 11
-
->>> del mydata
-"""
-
-__all__ = ["local"]
-
-# We need to use objects from the threading module, but the threading
-# module may also want to use our `local` class, if support for locals
-# isn't compiled in to the `thread` module. This creates potential problems
-# with circular imports. For that reason, we don't import `threading`
-# until the bottom of this file (a hack sufficient to worm around the
-# potential problems). Note that almost all platforms do have support for
-# locals in the `thread` module, and there is no circular import problem
-# then, so problems introduced by fiddling the order of imports here won't
-# manifest on most boxes.
-
-class _localbase(object):
- __slots__ = '_local__key', '_local__args', '_local__lock'
-
- def __new__(cls, *args, **kw):
- self = object.__new__(cls)
- key = '_local__key', 'thread.local.' + str(id(self))
- object.__setattr__(self, '_local__key', key)
- object.__setattr__(self, '_local__args', (args, kw))
- object.__setattr__(self, '_local__lock', RLock())
-
- if (args or kw) and (cls.__init__ == object.__init__):
- raise TypeError("Initialization arguments are not supported")
-
- # We need to create the thread dict in anticipation of
- # __init__ being called, to make sure we don't call it
- # again ourselves.
- dict = object.__getattribute__(self, '__dict__')
- current_thread().__dict__[key] = dict
-
- return self
-
-def _patch(self):
- key = object.__getattribute__(self, '_local__key')
- d = current_thread().__dict__.get(key)
- if d is None:
- d = {}
- current_thread().__dict__[key] = d
- object.__setattr__(self, '__dict__', d)
-
- # we have a new instance dict, so call out __init__ if we have
- # one
- cls = type(self)
- if cls.__init__ is not object.__init__:
- args, kw = object.__getattribute__(self, '_local__args')
- cls.__init__(self, *args, **kw)
- else:
- object.__setattr__(self, '__dict__', d)
-
-class local(_localbase):
-
- def __getattribute__(self, name):
- lock = object.__getattribute__(self, '_local__lock')
- lock.acquire()
- try:
- _patch(self)
- return object.__getattribute__(self, name)
- finally:
- lock.release()
-
- def __setattr__(self, name, value):
- if name == '__dict__':
- raise AttributeError(
- "%r object attribute '__dict__' is read-only"
- % self.__class__.__name__)
- lock = object.__getattribute__(self, '_local__lock')
- lock.acquire()
- try:
- _patch(self)
- return object.__setattr__(self, name, value)
- finally:
- lock.release()
-
- def __delattr__(self, name):
- if name == '__dict__':
- raise AttributeError(
- "%r object attribute '__dict__' is read-only"
- % self.__class__.__name__)
- lock = object.__getattribute__(self, '_local__lock')
- lock.acquire()
- try:
- _patch(self)
- return object.__delattr__(self, name)
- finally:
- lock.release()
-
- def __del__(self):
- import threading
-
- key = object.__getattribute__(self, '_local__key')
-
- try:
- # We use the non-locking API since we might already hold the lock
- # (__del__ can be called at any point by the cyclic GC).
- threads = threading._enumerate()
- except:
- # If enumerating the current threads fails, as it seems to do
- # during shutdown, we'll skip cleanup under the assumption
- # that there is nothing to clean up.
- return
-
- for thread in threads:
- try:
- __dict__ = thread.__dict__
- except AttributeError:
- # Thread is dying, rest in peace.
- continue
-
- if key in __dict__:
- try:
- del __dict__[key]
- except KeyError:
- pass # didn't have anything in this thread
-
-from threading import current_thread, RLock
diff --git a/lib-python/modified-2.7/ctypes/__init__.py b/lib-python/modified-2.7/ctypes/__init__.py
deleted file mode 100644
--- a/lib-python/modified-2.7/ctypes/__init__.py
+++ /dev/null
@@ -1,554 +0,0 @@
-######################################################################
-# This file should be kept compatible with Python 2.3, see PEP 291. #
-######################################################################
-"""create and manipulate C data types in Python"""
-
-import os as _os, sys as _sys
-
-__version__ = "1.1.0"
-
-import _ffi
-from _ctypes import Union, Structure, Array
-from _ctypes import _Pointer
-from _ctypes import CFuncPtr as _CFuncPtr
-from _ctypes import __version__ as _ctypes_version
-from _ctypes import RTLD_LOCAL, RTLD_GLOBAL
-from _ctypes import ArgumentError
-
-from struct import calcsize as _calcsize
-
-if __version__ != _ctypes_version:
- raise Exception("Version number mismatch", __version__, _ctypes_version)
-
-if _os.name in ("nt", "ce"):
- from _ctypes import FormatError
-
-DEFAULT_MODE = RTLD_LOCAL
-if _os.name == "posix" and _sys.platform == "darwin":
- # On OS X 10.3, we use RTLD_GLOBAL as default mode
- # because RTLD_LOCAL does not work at least on some
- # libraries. OS X 10.3 is Darwin 7, so we check for
- # that.
-
- if int(_os.uname()[2].split('.')[0]) < 8:
- DEFAULT_MODE = RTLD_GLOBAL
-
-from _ctypes import FUNCFLAG_CDECL as _FUNCFLAG_CDECL, \
- FUNCFLAG_PYTHONAPI as _FUNCFLAG_PYTHONAPI, \
- FUNCFLAG_USE_ERRNO as _FUNCFLAG_USE_ERRNO, \
- FUNCFLAG_USE_LASTERROR as _FUNCFLAG_USE_LASTERROR
-
-"""
-WINOLEAPI -> HRESULT
-WINOLEAPI_(type)
-
-STDMETHODCALLTYPE
-
-STDMETHOD(name)
-STDMETHOD_(type, name)
-
-STDAPICALLTYPE
-"""
-
-def create_string_buffer(init, size=None):
- """create_string_buffer(aString) -> character array
- create_string_buffer(anInteger) -> character array
- create_string_buffer(aString, anInteger) -> character array
- """
- if isinstance(init, (str, unicode)):
- if size is None:
- size = len(init)+1
- buftype = c_char * size
- buf = buftype()
- buf.value = init
- return buf
- elif isinstance(init, (int, long)):
- buftype = c_char * init
- buf = buftype()
- return buf
- raise TypeError(init)
-
-def c_buffer(init, size=None):
-## "deprecated, use create_string_buffer instead"
-## import warnings
-## warnings.warn("c_buffer is deprecated, use create_string_buffer instead",
-## DeprecationWarning, stacklevel=2)
- return create_string_buffer(init, size)
-
-_c_functype_cache = {}
-def CFUNCTYPE(restype, *argtypes, **kw):
- """CFUNCTYPE(restype, *argtypes,
- use_errno=False, use_last_error=False) -> function prototype.
-
- restype: the result type
- argtypes: a sequence specifying the argument types
-
- The function prototype can be called in different ways to create a
- callable object:
-
- prototype(integer address) -> foreign function
- prototype(callable) -> create and return a C callable function from callable
- prototype(integer index, method name[, paramflags]) -> foreign function calling a COM method
- prototype((ordinal number, dll object)[, paramflags]) -> foreign function exported by ordinal
- prototype((function name, dll object)[, paramflags]) -> foreign function exported by name
- """
- flags = _FUNCFLAG_CDECL
- if kw.pop("use_errno", False):
- flags |= _FUNCFLAG_USE_ERRNO
- if kw.pop("use_last_error", False):
- flags |= _FUNCFLAG_USE_LASTERROR
- if kw:
- raise ValueError("unexpected keyword argument(s) %s" % kw.keys())
- try:
- return _c_functype_cache[(restype, argtypes, flags)]
- except KeyError:
- class CFunctionType(_CFuncPtr):
- _argtypes_ = argtypes
- _restype_ = restype
- _flags_ = flags
- _c_functype_cache[(restype, argtypes, flags)] = CFunctionType
- return CFunctionType
-
-if _os.name in ("nt", "ce"):
- from _ctypes import LoadLibrary as _dlopen
- from _ctypes import FUNCFLAG_STDCALL as _FUNCFLAG_STDCALL
- if _os.name == "ce":
- # 'ce' doesn't have the stdcall calling convention
- _FUNCFLAG_STDCALL = _FUNCFLAG_CDECL
-
- _win_functype_cache = {}
- def WINFUNCTYPE(restype, *argtypes, **kw):
- # docstring set later (very similar to CFUNCTYPE.__doc__)
- flags = _FUNCFLAG_STDCALL
- if kw.pop("use_errno", False):
- flags |= _FUNCFLAG_USE_ERRNO
- if kw.pop("use_last_error", False):
- flags |= _FUNCFLAG_USE_LASTERROR
- if kw:
- raise ValueError("unexpected keyword argument(s) %s" % kw.keys())
- try:
- return _win_functype_cache[(restype, argtypes, flags)]
- except KeyError:
- class WinFunctionType(_CFuncPtr):
- _argtypes_ = argtypes
- _restype_ = restype
- _flags_ = flags
- _win_functype_cache[(restype, argtypes, flags)] = WinFunctionType
- return WinFunctionType
- if WINFUNCTYPE.__doc__:
- WINFUNCTYPE.__doc__ = CFUNCTYPE.__doc__.replace("CFUNCTYPE", "WINFUNCTYPE")
-
-elif _os.name == "posix":
- from _ctypes import dlopen as _dlopen
-
-from _ctypes import sizeof, byref, addressof, alignment, resize
-from _ctypes import get_errno, set_errno
-from _ctypes import _SimpleCData
-
-def _check_size(typ, typecode=None):
- # Check if sizeof(ctypes_type) against struct.calcsize. This
- # should protect somewhat against a misconfigured libffi.
- from struct import calcsize
- if typecode is None:
- # Most _type_ codes are the same as used in struct
- typecode = typ._type_
- actual, required = sizeof(typ), calcsize(typecode)
- if actual != required:
- raise SystemError("sizeof(%s) wrong: %d instead of %d" % \
- (typ, actual, required))
-
-class py_object(_SimpleCData):
- _type_ = "O"
- def __repr__(self):
- try:
- return super(py_object, self).__repr__()
- except ValueError:
- return "%s()" % type(self).__name__
-_check_size(py_object, "P")
-
-class c_short(_SimpleCData):
- _type_ = "h"
-_check_size(c_short)
-
-class c_ushort(_SimpleCData):
- _type_ = "H"
-_check_size(c_ushort)
-
-class c_long(_SimpleCData):
- _type_ = "l"
-_check_size(c_long)
-
-class c_ulong(_SimpleCData):
- _type_ = "L"
-_check_size(c_ulong)
-
-if _calcsize("i") == _calcsize("l"):
- # if int and long have the same size, make c_int an alias for c_long
- c_int = c_long
- c_uint = c_ulong
-else:
- class c_int(_SimpleCData):
- _type_ = "i"
- _check_size(c_int)
-
- class c_uint(_SimpleCData):
- _type_ = "I"
- _check_size(c_uint)
-
-class c_float(_SimpleCData):
- _type_ = "f"
-_check_size(c_float)
-
-class c_double(_SimpleCData):
- _type_ = "d"
-_check_size(c_double)
-
-class c_longdouble(_SimpleCData):
- _type_ = "g"
-if sizeof(c_longdouble) == sizeof(c_double):
- c_longdouble = c_double
-
-if _calcsize("l") == _calcsize("q"):
- # if long and long long have the same size, make c_longlong an alias for c_long
- c_longlong = c_long
- c_ulonglong = c_ulong
-else:
- class c_longlong(_SimpleCData):
- _type_ = "q"
- _check_size(c_longlong)
-
- class c_ulonglong(_SimpleCData):
- _type_ = "Q"
- ## def from_param(cls, val):
- ## return ('d', float(val), val)
- ## from_param = classmethod(from_param)
- _check_size(c_ulonglong)
-
-class c_ubyte(_SimpleCData):
- _type_ = "B"
-c_ubyte.__ctype_le__ = c_ubyte.__ctype_be__ = c_ubyte
-# backward compatibility:
-##c_uchar = c_ubyte
-_check_size(c_ubyte)
-
-class c_byte(_SimpleCData):
- _type_ = "b"
-c_byte.__ctype_le__ = c_byte.__ctype_be__ = c_byte
-_check_size(c_byte)
-
-class c_char(_SimpleCData):
- _type_ = "c"
-c_char.__ctype_le__ = c_char.__ctype_be__ = c_char
-_check_size(c_char)
-
-class c_char_p(_SimpleCData):
- _type_ = "z"
- if _os.name == "nt":
- def __repr__(self):
- if not windll.kernel32.IsBadStringPtrA(self, -1):
- return "%s(%r)" % (self.__class__.__name__, self.value)
- return "%s(%s)" % (self.__class__.__name__, cast(self, c_void_p).value)
- else:
- def __repr__(self):
- return "%s(%s)" % (self.__class__.__name__, cast(self, c_void_p).value)
-_check_size(c_char_p, "P")
-
-class c_void_p(_SimpleCData):
- _type_ = "P"
-c_voidp = c_void_p # backwards compatibility (to a bug)
-_check_size(c_void_p)
-
-class c_bool(_SimpleCData):
- _type_ = "?"
-
-from _ctypes import POINTER, pointer, _pointer_type_cache
-
-try:
- from _ctypes import set_conversion_mode
-except ImportError:
- pass
-else:
- if _os.name in ("nt", "ce"):
- set_conversion_mode("mbcs", "ignore")
- else:
- set_conversion_mode("ascii", "strict")
-
- class c_wchar_p(_SimpleCData):
- _type_ = "Z"
-
- class c_wchar(_SimpleCData):
- _type_ = "u"
-
- POINTER(c_wchar).from_param = c_wchar_p.from_param #_SimpleCData.c_wchar_p_from_param
-
- def create_unicode_buffer(init, size=None):
- """create_unicode_buffer(aString) -> character array
- create_unicode_buffer(anInteger) -> character array
- create_unicode_buffer(aString, anInteger) -> character array
- """
- if isinstance(init, (str, unicode)):
- if size is None:
- size = len(init)+1
- buftype = c_wchar * size
- buf = buftype()
- buf.value = init
- return buf
- elif isinstance(init, (int, long)):
- buftype = c_wchar * init
- buf = buftype()
- return buf
- raise TypeError(init)
-
-POINTER(c_char).from_param = c_char_p.from_param #_SimpleCData.c_char_p_from_param
-
-# XXX Deprecated
-def SetPointerType(pointer, cls):
- if _pointer_type_cache.get(cls, None) is not None:
- raise RuntimeError("This type already exists in the cache")
- if id(pointer) not in _pointer_type_cache:
- raise RuntimeError("What's this???")
- pointer.set_type(cls)
- _pointer_type_cache[cls] = pointer
- del _pointer_type_cache[id(pointer)]
-
-# XXX Deprecated
-def ARRAY(typ, len):
- return typ * len
-
-################################################################
-
-
-class CDLL(object):
- """An instance of this class represents a loaded dll/shared
- library, exporting functions using the standard C calling
- convention (named 'cdecl' on Windows).
-
- The exported functions can be accessed as attributes, or by
- indexing with the function name. Examples:
-
- .qsort -> callable object
- ['qsort'] -> callable object
-
- Calling the functions releases the Python GIL during the call and
- reacquires it afterwards.
- """
- _func_flags_ = _FUNCFLAG_CDECL
- _func_restype_ = c_int
-
- def __init__(self, name, mode=DEFAULT_MODE, handle=None,
- use_errno=False,
- use_last_error=False):
- self._name = name
- flags = self._func_flags_
- if use_errno:
- flags |= _FUNCFLAG_USE_ERRNO
- if use_last_error:
- flags |= _FUNCFLAG_USE_LASTERROR
-
- class _FuncPtr(_CFuncPtr):
- _flags_ = flags
- _restype_ = self._func_restype_
- self._FuncPtr = _FuncPtr
-
- if handle is None:
- self._handle = _ffi.CDLL(name, mode)
- else:
- self._handle = handle
-
- def __repr__(self):
- return "<%s '%s', handle %r at %x>" % \
- (self.__class__.__name__, self._name,
- (self._handle),
- id(self) & (_sys.maxint*2 + 1))
-
-
- def __getattr__(self, name):
- if name.startswith('__') and name.endswith('__'):
- raise AttributeError(name)
- func = self.__getitem__(name)
- setattr(self, name, func)
- return func
-
- def __getitem__(self, name_or_ordinal):
- func = self._FuncPtr((name_or_ordinal, self))
- if not isinstance(name_or_ordinal, (int, long)):
- func.__name__ = name_or_ordinal
- return func
-
-class PyDLL(CDLL):
- """This class represents the Python library itself. It allows to
- access Python API functions. The GIL is not released, and
- Python exceptions are handled correctly.
- """
- _func_flags_ = _FUNCFLAG_CDECL | _FUNCFLAG_PYTHONAPI
-
-if _os.name in ("nt", "ce"):
-
- class WinDLL(CDLL):
- """This class represents a dll exporting functions using the
- Windows stdcall calling convention.
- """
- _func_flags_ = _FUNCFLAG_STDCALL
-
- # XXX Hm, what about HRESULT as normal parameter?
- # Mustn't it derive from c_long then?
- from _ctypes import _check_HRESULT, _SimpleCData
- class HRESULT(_SimpleCData):
- _type_ = "l"
- # _check_retval_ is called with the function's result when it
- # is used as restype. It checks for the FAILED bit, and
- # raises a WindowsError if it is set.
- #
- # The _check_retval_ method is implemented in C, so that the
- # method definition itself is not included in the traceback
- # when it raises an error - that is what we want (and Python
- # doesn't have a way to raise an exception in the caller's
- # frame).
- _check_retval_ = _check_HRESULT
-
- class OleDLL(CDLL):
- """This class represents a dll exporting functions using the
- Windows stdcall calling convention, and returning HRESULT.
- HRESULT error values are automatically raised as WindowsError
- exceptions.
- """
- _func_flags_ = _FUNCFLAG_STDCALL
- _func_restype_ = HRESULT
-
-class LibraryLoader(object):
- def __init__(self, dlltype):
- self._dlltype = dlltype
-
- def __getattr__(self, name):
- if name[0] == '_':
- raise AttributeError(name)
- dll = self._dlltype(name)
- setattr(self, name, dll)
- return dll
-
- def __getitem__(self, name):
- return getattr(self, name)
-
- def LoadLibrary(self, name):
- return self._dlltype(name)
-
-cdll = LibraryLoader(CDLL)
-pydll = LibraryLoader(PyDLL)
-
-if _os.name in ("nt", "ce"):
- pythonapi = PyDLL("python dll", None, _sys.dllhandle)
-elif _sys.platform == "cygwin":
- pythonapi = PyDLL("libpython%d.%d.dll" % _sys.version_info[:2])
-else:
- pythonapi = PyDLL(None)
-
-
-if _os.name in ("nt", "ce"):
- windll = LibraryLoader(WinDLL)
- oledll = LibraryLoader(OleDLL)
-
- if _os.name == "nt":
- GetLastError = windll.kernel32.GetLastError
- else:
- GetLastError = windll.coredll.GetLastError
- from _ctypes import get_last_error, set_last_error
-
- def WinError(code=None, descr=None):
- if code is None:
- code = GetLastError()
- if descr is None:
- descr = FormatError(code).strip()
- return WindowsError(code, descr)
-
-_pointer_type_cache[None] = c_void_p
-
-if sizeof(c_uint) == sizeof(c_void_p):
- c_size_t = c_uint
- c_ssize_t = c_int
-elif sizeof(c_ulong) == sizeof(c_void_p):
- c_size_t = c_ulong
- c_ssize_t = c_long
-elif sizeof(c_ulonglong) == sizeof(c_void_p):
- c_size_t = c_ulonglong
- c_ssize_t = c_longlong
-
-# functions
-
-from _ctypes import _memmove_addr, _memset_addr, _string_at_addr, _cast_addr
-
-## void *memmove(void *, const void *, size_t);
-memmove = CFUNCTYPE(c_void_p, c_void_p, c_void_p, c_size_t)(_memmove_addr)
-
-## void *memset(void *, int, size_t)
-memset = CFUNCTYPE(c_void_p, c_void_p, c_int, c_size_t)(_memset_addr)
-
-def PYFUNCTYPE(restype, *argtypes):
- class CFunctionType(_CFuncPtr):
- _argtypes_ = argtypes
- _restype_ = restype
- _flags_ = _FUNCFLAG_CDECL | _FUNCFLAG_PYTHONAPI
- return CFunctionType
-
-def cast(obj, typ):
- try:
- c_void_p.from_param(obj)
- except TypeError, e:
- raise ArgumentError(str(e))
- return _cast_addr(obj, obj, typ)
-
-_string_at = PYFUNCTYPE(py_object, c_void_p, c_int)(_string_at_addr)
-def string_at(ptr, size=-1):
- """string_at(addr[, size]) -> string
-
- Return the string at addr."""
- return _string_at(ptr, size)
-
-try:
- from _ctypes import _wstring_at_addr
-except ImportError:
- pass
-else:
- _wstring_at = PYFUNCTYPE(py_object, c_void_p, c_int)(_wstring_at_addr)
- def wstring_at(ptr, size=-1):
- """wstring_at(addr[, size]) -> string
-
- Return the string at addr."""
- return _wstring_at(ptr, size)
-
-
-if _os.name in ("nt", "ce"): # COM stuff
- def DllGetClassObject(rclsid, riid, ppv):
- try:
- ccom = __import__("comtypes.server.inprocserver", globals(), locals(), ['*'])
- except ImportError:
- return -2147221231 # CLASS_E_CLASSNOTAVAILABLE
- else:
- return ccom.DllGetClassObject(rclsid, riid, ppv)
-
- def DllCanUnloadNow():
- try:
- ccom = __import__("comtypes.server.inprocserver", globals(), locals(), ['*'])
- except ImportError:
- return 0 # S_OK
- return ccom.DllCanUnloadNow()
-
-from ctypes._endian import BigEndianStructure, LittleEndianStructure
-
-# Fill in specifically-sized types
-c_int8 = c_byte
-c_uint8 = c_ubyte
-for kind in [c_short, c_int, c_long, c_longlong]:
- if sizeof(kind) == 2: c_int16 = kind
- elif sizeof(kind) == 4: c_int32 = kind
- elif sizeof(kind) == 8: c_int64 = kind
-for kind in [c_ushort, c_uint, c_ulong, c_ulonglong]:
- if sizeof(kind) == 2: c_uint16 = kind
- elif sizeof(kind) == 4: c_uint32 = kind
- elif sizeof(kind) == 8: c_uint64 = kind
-del(kind)
-
-# XXX for whatever reasons, creating the first instance of a callback
-# function is needed for the unittests on Win64 to succeed. This MAY
-# be a compiler bug, since the problem occurs only when _ctypes is
-# compiled with the MS SDK compiler. Or an uninitialized variable?
-CFUNCTYPE(c_int)(lambda: None)
diff --git a/lib-python/modified-2.7/ctypes/_endian.py b/lib-python/modified-2.7/ctypes/_endian.py
deleted file mode 100644
--- a/lib-python/modified-2.7/ctypes/_endian.py
+++ /dev/null
@@ -1,60 +0,0 @@
-######################################################################
-# This file should be kept compatible with Python 2.3, see PEP 291. #
-######################################################################
-import sys
-from ctypes import *
-
-_array_type = type(c_int * 3)
-
-def _other_endian(typ):
- """Return the type with the 'other' byte order. Simple types like
- c_int and so on already have __ctype_be__ and __ctype_le__
- attributes which contain the types, for more complicated types
- only arrays are supported.
- """
- try:
- return getattr(typ, _OTHER_ENDIAN)
- except AttributeError:
- if type(typ) == _array_type:
- return _other_endian(typ._type_) * typ._length_
- raise TypeError("This type does not support other endian: %s" % typ)
-
-class _swapped_meta(type(Structure)):
- def __setattr__(self, attrname, value):
- if attrname == "_fields_":
- fields = []
- for desc in value:
- name = desc[0]
- typ = desc[1]
- rest = desc[2:]
- fields.append((name, _other_endian(typ)) + rest)
- value = fields
- super(_swapped_meta, self).__setattr__(attrname, value)
-
-################################################################
-
-# Note: The Structure metaclass checks for the *presence* (not the
-# value!) of a _swapped_bytes_ attribute to determine the bit order in
-# structures containing bit fields.
-
-if sys.byteorder == "little":
- _OTHER_ENDIAN = "__ctype_be__"
-
- LittleEndianStructure = Structure
-
- class BigEndianStructure(Structure):
- """Structure with big endian byte order"""
- __metaclass__ = _swapped_meta
- _swappedbytes_ = None
-
-elif sys.byteorder == "big":
- _OTHER_ENDIAN = "__ctype_le__"
-
- BigEndianStructure = Structure
- class LittleEndianStructure(Structure):
- """Structure with little endian byte order"""
- __metaclass__ = _swapped_meta
- _swappedbytes_ = None
-
-else:
- raise RuntimeError("Invalid byteorder")
diff --git a/lib-python/modified-2.7/ctypes/macholib/README.ctypes b/lib-python/modified-2.7/ctypes/macholib/README.ctypes
deleted file mode 100644
--- a/lib-python/modified-2.7/ctypes/macholib/README.ctypes
+++ /dev/null
@@ -1,7 +0,0 @@
-Files in this directory from from Bob Ippolito's py2app.
-
-License: Any components of the py2app suite may be distributed under
-the MIT or PSF open source licenses.
-
-This is version 1.0, SVN revision 789, from 2006/01/25.
-The main repository is http://svn.red-bean.com/bob/macholib/trunk/macholib/
\ No newline at end of file
diff --git a/lib-python/modified-2.7/ctypes/macholib/__init__.py b/lib-python/modified-2.7/ctypes/macholib/__init__.py
deleted file mode 100644
--- a/lib-python/modified-2.7/ctypes/macholib/__init__.py
+++ /dev/null
@@ -1,12 +0,0 @@
-######################################################################
-# This file should be kept compatible with Python 2.3, see PEP 291. #
-######################################################################
-"""
-Enough Mach-O to make your head spin.
-
-See the relevant header files in /usr/include/mach-o
-
-And also Apple's documentation.
-"""
-
-__version__ = '1.0'
diff --git a/lib-python/modified-2.7/ctypes/macholib/dyld.py b/lib-python/modified-2.7/ctypes/macholib/dyld.py
deleted file mode 100644
--- a/lib-python/modified-2.7/ctypes/macholib/dyld.py
+++ /dev/null
@@ -1,169 +0,0 @@
-######################################################################
-# This file should be kept compatible with Python 2.3, see PEP 291. #
-######################################################################
-"""
-dyld emulation
-"""
-
-import os
-from framework import framework_info
-from dylib import dylib_info
-from itertools import *
-
-__all__ = [
- 'dyld_find', 'framework_find',
- 'framework_info', 'dylib_info',
-]
-
-# These are the defaults as per man dyld(1)
-#
-DEFAULT_FRAMEWORK_FALLBACK = [
- os.path.expanduser("~/Library/Frameworks"),
- "/Library/Frameworks",
- "/Network/Library/Frameworks",
- "/System/Library/Frameworks",
-]
-
-DEFAULT_LIBRARY_FALLBACK = [
- os.path.expanduser("~/lib"),
- "/usr/local/lib",
- "/lib",
- "/usr/lib",
-]
-
-def ensure_utf8(s):
- """Not all of PyObjC and Python understand unicode paths very well yet"""
- if isinstance(s, unicode):
- return s.encode('utf8')
- return s
-
-def dyld_env(env, var):
- if env is None:
- env = os.environ
- rval = env.get(var)
- if rval is None:
- return []
- return rval.split(':')
-
-def dyld_image_suffix(env=None):
- if env is None:
- env = os.environ
- return env.get('DYLD_IMAGE_SUFFIX')
-
-def dyld_framework_path(env=None):
- return dyld_env(env, 'DYLD_FRAMEWORK_PATH')
-
-def dyld_library_path(env=None):
- return dyld_env(env, 'DYLD_LIBRARY_PATH')
-
-def dyld_fallback_framework_path(env=None):
- return dyld_env(env, 'DYLD_FALLBACK_FRAMEWORK_PATH')
-
-def dyld_fallback_library_path(env=None):
- return dyld_env(env, 'DYLD_FALLBACK_LIBRARY_PATH')
-
-def dyld_image_suffix_search(iterator, env=None):
- """For a potential path iterator, add DYLD_IMAGE_SUFFIX semantics"""
- suffix = dyld_image_suffix(env)
- if suffix is None:
- return iterator
- def _inject(iterator=iterator, suffix=suffix):
- for path in iterator:
- if path.endswith('.dylib'):
- yield path[:-len('.dylib')] + suffix + '.dylib'
- else:
- yield path + suffix
- yield path
- return _inject()
-
-def dyld_override_search(name, env=None):
- # If DYLD_FRAMEWORK_PATH is set and this dylib_name is a
- # framework name, use the first file that exists in the framework
- # path if any. If there is none go on to search the DYLD_LIBRARY_PATH
- # if any.
-
- framework = framework_info(name)
-
- if framework is not None:
- for path in dyld_framework_path(env):
- yield os.path.join(path, framework['name'])
-
- # If DYLD_LIBRARY_PATH is set then use the first file that exists
- # in the path. If none use the original name.
- for path in dyld_library_path(env):
- yield os.path.join(path, os.path.basename(name))
-
-def dyld_executable_path_search(name, executable_path=None):
- # If we haven't done any searching and found a library and the
- # dylib_name starts with "@executable_path/" then construct the
- # library name.
- if name.startswith('@executable_path/') and executable_path is not None:
- yield os.path.join(executable_path, name[len('@executable_path/'):])
-
-def dyld_default_search(name, env=None):
- yield name
-
- framework = framework_info(name)
-
- if framework is not None:
- fallback_framework_path = dyld_fallback_framework_path(env)
- for path in fallback_framework_path:
- yield os.path.join(path, framework['name'])
-
- fallback_library_path = dyld_fallback_library_path(env)
- for path in fallback_library_path:
- yield os.path.join(path, os.path.basename(name))
-
- if framework is not None and not fallback_framework_path:
- for path in DEFAULT_FRAMEWORK_FALLBACK:
- yield os.path.join(path, framework['name'])
-
- if not fallback_library_path:
- for path in DEFAULT_LIBRARY_FALLBACK:
- yield os.path.join(path, os.path.basename(name))
-
-def dyld_find(name, executable_path=None, env=None):
- """
- Find a library or framework using dyld semantics
- """
- name = ensure_utf8(name)
- executable_path = ensure_utf8(executable_path)
- for path in dyld_image_suffix_search(chain(
- dyld_override_search(name, env),
- dyld_executable_path_search(name, executable_path),
- dyld_default_search(name, env),
- ), env):
- if os.path.isfile(path):
- return path
- raise ValueError("dylib %s could not be found" % (name,))
-
-def framework_find(fn, executable_path=None, env=None):
- """
- Find a framework using dyld semantics in a very loose manner.
-
- Will take input such as:
- Python
- Python.framework
- Python.framework/Versions/Current
- """
- try:
- return dyld_find(fn, executable_path=executable_path, env=env)
- except ValueError, e:
- pass
- fmwk_index = fn.rfind('.framework')
- if fmwk_index == -1:
- fmwk_index = len(fn)
- fn += '.framework'
- fn = os.path.join(fn, os.path.basename(fn[:fmwk_index]))
- try:
- return dyld_find(fn, executable_path=executable_path, env=env)
- except ValueError:
- raise e
-
-def test_dyld_find():
- env = {}
- assert dyld_find('libSystem.dylib') == '/usr/lib/libSystem.dylib'
- assert dyld_find('System.framework/System') == '/System/Library/Frameworks/System.framework/System'
-
-if __name__ == '__main__':
- test_dyld_find()
diff --git a/lib-python/modified-2.7/ctypes/macholib/dylib.py b/lib-python/modified-2.7/ctypes/macholib/dylib.py
deleted file mode 100644
--- a/lib-python/modified-2.7/ctypes/macholib/dylib.py
+++ /dev/null
@@ -1,66 +0,0 @@
-######################################################################
-# This file should be kept compatible with Python 2.3, see PEP 291. #
-######################################################################
-"""
-Generic dylib path manipulation
-"""
-
-import re
-
-__all__ = ['dylib_info']
-
-DYLIB_RE = re.compile(r"""(?x)
-(?P^.*)(?:^|/)
-(?P
- (?P\w+?)
- (?:\.(?P[^._]+))?
- (?:_(?P[^._]+))?
- \.dylib$
-)
-""")
-
-def dylib_info(filename):
- """
- A dylib name can take one of the following four forms:
- Location/Name.SomeVersion_Suffix.dylib
- Location/Name.SomeVersion.dylib
- Location/Name_Suffix.dylib
- Location/Name.dylib
-
- returns None if not found or a mapping equivalent to:
- dict(
- location='Location',
- name='Name.SomeVersion_Suffix.dylib',
- shortname='Name',
- version='SomeVersion',
- suffix='Suffix',
- )
-
- Note that SomeVersion and Suffix are optional and may be None
- if not present.
- """
- is_dylib = DYLIB_RE.match(filename)
- if not is_dylib:
- return None
- return is_dylib.groupdict()
-
-
-def test_dylib_info():
- def d(location=None, name=None, shortname=None, version=None, suffix=None):
- return dict(
- location=location,
- name=name,
- shortname=shortname,
- version=version,
- suffix=suffix
- )
- assert dylib_info('completely/invalid') is None
- assert dylib_info('completely/invalide_debug') is None
- assert dylib_info('P/Foo.dylib') == d('P', 'Foo.dylib', 'Foo')
- assert dylib_info('P/Foo_debug.dylib') == d('P', 'Foo_debug.dylib', 'Foo', suffix='debug')
- assert dylib_info('P/Foo.A.dylib') == d('P', 'Foo.A.dylib', 'Foo', 'A')
- assert dylib_info('P/Foo_debug.A.dylib') == d('P', 'Foo_debug.A.dylib', 'Foo_debug', 'A')
- assert dylib_info('P/Foo.A_debug.dylib') == d('P', 'Foo.A_debug.dylib', 'Foo', 'A', 'debug')
-
-if __name__ == '__main__':
- test_dylib_info()
diff --git a/lib-python/modified-2.7/ctypes/macholib/fetch_macholib b/lib-python/modified-2.7/ctypes/macholib/fetch_macholib
deleted file mode 100755
--- a/lib-python/modified-2.7/ctypes/macholib/fetch_macholib
+++ /dev/null
@@ -1,2 +0,0 @@
-#!/bin/sh
-svn export --force http://svn.red-bean.com/bob/macholib/trunk/macholib/ .
diff --git a/lib-python/modified-2.7/ctypes/macholib/fetch_macholib.bat b/lib-python/modified-2.7/ctypes/macholib/fetch_macholib.bat
deleted file mode 100644
--- a/lib-python/modified-2.7/ctypes/macholib/fetch_macholib.bat
+++ /dev/null
@@ -1,1 +0,0 @@
-svn export --force http://svn.red-bean.com/bob/macholib/trunk/macholib/ .
diff --git a/lib-python/modified-2.7/ctypes/macholib/framework.py b/lib-python/modified-2.7/ctypes/macholib/framework.py
deleted file mode 100644
--- a/lib-python/modified-2.7/ctypes/macholib/framework.py
+++ /dev/null
@@ -1,68 +0,0 @@
-######################################################################
-# This file should be kept compatible with Python 2.3, see PEP 291. #
-######################################################################
-"""
-Generic framework path manipulation
-"""
-
-import re
-
-__all__ = ['framework_info']
-
-STRICT_FRAMEWORK_RE = re.compile(r"""(?x)
-(?P^.*)(?:^|/)
-(?P
- (?P\w+).framework/
- (?:Versions/(?P[^/]+)/)?
- (?P=shortname)
- (?:_(?P[^_]+))?
-)$
-""")
-
-def framework_info(filename):
- """
- A framework name can take one of the following four forms:
- Location/Name.framework/Versions/SomeVersion/Name_Suffix
- Location/Name.framework/Versions/SomeVersion/Name
- Location/Name.framework/Name_Suffix
- Location/Name.framework/Name
-
- returns None if not found, or a mapping equivalent to:
- dict(
- location='Location',
- name='Name.framework/Versions/SomeVersion/Name_Suffix',
- shortname='Name',
- version='SomeVersion',
- suffix='Suffix',
- )
-
- Note that SomeVersion and Suffix are optional and may be None
- if not present
- """
- is_framework = STRICT_FRAMEWORK_RE.match(filename)
- if not is_framework:
- return None
- return is_framework.groupdict()
-
-def test_framework_info():
- def d(location=None, name=None, shortname=None, version=None, suffix=None):
- return dict(
- location=location,
- name=name,
- shortname=shortname,
- version=version,
- suffix=suffix
- )
- assert framework_info('completely/invalid') is None
- assert framework_info('completely/invalid/_debug') is None
- assert framework_info('P/F.framework') is None
- assert framework_info('P/F.framework/_debug') is None
- assert framework_info('P/F.framework/F') == d('P', 'F.framework/F', 'F')
- assert framework_info('P/F.framework/F_debug') == d('P', 'F.framework/F_debug', 'F', suffix='debug')
- assert framework_info('P/F.framework/Versions') is None
- assert framework_info('P/F.framework/Versions/A') is None
- assert framework_info('P/F.framework/Versions/A/F') == d('P', 'F.framework/Versions/A/F', 'F', 'A')
- assert framework_info('P/F.framework/Versions/A/F_debug') == d('P', 'F.framework/Versions/A/F_debug', 'F', 'A', 'debug')
-
-if __name__ == '__main__':
- test_framework_info()
diff --git a/lib-python/modified-2.7/ctypes/test/__init__.py b/lib-python/modified-2.7/ctypes/test/__init__.py
deleted file mode 100644
--- a/lib-python/modified-2.7/ctypes/test/__init__.py
+++ /dev/null
@@ -1,221 +0,0 @@
-import os, sys, unittest, getopt, time
-
-use_resources = []
-
-class ResourceDenied(Exception):
- """Test skipped because it requested a disallowed resource.
-
- This is raised when a test calls requires() for a resource that
- has not be enabled. Resources are defined by test modules.
- """
-
-def is_resource_enabled(resource):
- """Test whether a resource is enabled.
-
- If the caller's module is __main__ then automatically return True."""
- if sys._getframe().f_back.f_globals.get("__name__") == "__main__":
- return True
- result = use_resources is not None and \
- (resource in use_resources or "*" in use_resources)
- if not result:
- _unavail[resource] = None
- return result
-
-_unavail = {}
-def requires(resource, msg=None):
- """Raise ResourceDenied if the specified resource is not available.
-
- If the caller's module is __main__ then automatically return True."""
- # see if the caller's module is __main__ - if so, treat as if
- # the resource was set
- if sys._getframe().f_back.f_globals.get("__name__") == "__main__":
- return
- if not is_resource_enabled(resource):
- if msg is None:
- msg = "Use of the `%s' resource not enabled" % resource
- raise ResourceDenied(msg)
-
-def find_package_modules(package, mask):
- import fnmatch
- if (hasattr(package, "__loader__") and
- hasattr(package.__loader__, '_files')):
- path = package.__name__.replace(".", os.path.sep)
- mask = os.path.join(path, mask)
- for fnm in package.__loader__._files.iterkeys():
- if fnmatch.fnmatchcase(fnm, mask):
- yield os.path.splitext(fnm)[0].replace(os.path.sep, ".")
- else:
- path = package.__path__[0]
- for fnm in os.listdir(path):
- if fnmatch.fnmatchcase(fnm, mask):
- yield "%s.%s" % (package.__name__, os.path.splitext(fnm)[0])
-
-def get_tests(package, mask, verbosity, exclude=()):
- """Return a list of skipped test modules, and a list of test cases."""
- tests = []
- skipped = []
- for modname in find_package_modules(package, mask):
- if modname.split(".")[-1] in exclude:
- skipped.append(modname)
- if verbosity > 1:
- print >> sys.stderr, "Skipped %s: excluded" % modname
- continue
- try:
- mod = __import__(modname, globals(), locals(), ['*'])
- except ResourceDenied, detail:
- skipped.append(modname)
- if verbosity > 1:
- print >> sys.stderr, "Skipped %s: %s" % (modname, detail)
- continue
- for name in dir(mod):
- if name.startswith("_"):
- continue
- o = getattr(mod, name)
- if type(o) is type(unittest.TestCase) and issubclass(o, unittest.TestCase):
- tests.append(o)
- return skipped, tests
-
-def usage():
- print __doc__
- return 1
-
-def test_with_refcounts(runner, verbosity, testcase):
- """Run testcase several times, tracking reference counts."""
- import gc
- import ctypes
- ptc = ctypes._pointer_type_cache.copy()
- cfc = ctypes._c_functype_cache.copy()
- wfc = ctypes._win_functype_cache.copy()
-
- # when searching for refcount leaks, we have to manually reset any
- # caches that ctypes has.
- def cleanup():
- ctypes._pointer_type_cache = ptc.copy()
- ctypes._c_functype_cache = cfc.copy()
- ctypes._win_functype_cache = wfc.copy()
- gc.collect()
-
- test = unittest.makeSuite(testcase)
- for i in range(5):
- rc = sys.gettotalrefcount()
- runner.run(test)
- cleanup()
- COUNT = 5
- refcounts = [None] * COUNT
- for i in range(COUNT):
- rc = sys.gettotalrefcount()
- runner.run(test)
- cleanup()
- refcounts[i] = sys.gettotalrefcount() - rc
- if filter(None, refcounts):
- print "%s leaks:\n\t" % testcase, refcounts
- elif verbosity:
- print "%s: ok." % testcase
-
-class TestRunner(unittest.TextTestRunner):
- def run(self, test, skipped):
- "Run the given test case or test suite."
- # Same as unittest.TextTestRunner.run, except that it reports
- # skipped tests.
- result = self._makeResult()
- startTime = time.time()
- test(result)
- stopTime = time.time()
- timeTaken = stopTime - startTime
- result.printErrors()
- self.stream.writeln(result.separator2)
- run = result.testsRun
- if _unavail: #skipped:
- requested = _unavail.keys()
- requested.sort()
- self.stream.writeln("Ran %d test%s in %.3fs (%s module%s skipped)" %
- (run, run != 1 and "s" or "", timeTaken,
- len(skipped),
- len(skipped) != 1 and "s" or ""))
- self.stream.writeln("Unavailable resources: %s" % ", ".join(requested))
- else:
- self.stream.writeln("Ran %d test%s in %.3fs" %
- (run, run != 1 and "s" or "", timeTaken))
- self.stream.writeln()
- if not result.wasSuccessful():
- self.stream.write("FAILED (")
- failed, errored = map(len, (result.failures, result.errors))
- if failed:
- self.stream.write("failures=%d" % failed)
- if errored:
- if failed: self.stream.write(", ")
- self.stream.write("errors=%d" % errored)
- self.stream.writeln(")")
- else:
- self.stream.writeln("OK")
- return result
-
-
-def main(*packages):
- try:
- opts, args = getopt.getopt(sys.argv[1:], "rqvu:x:")
- except getopt.error:
- return usage()
-
- verbosity = 1
- search_leaks = False
- exclude = []
- for flag, value in opts:
- if flag == "-q":
- verbosity -= 1
- elif flag == "-v":
- verbosity += 1
- elif flag == "-r":
- try:
- sys.gettotalrefcount
- except AttributeError:
- print >> sys.stderr, "-r flag requires Python debug build"
- return -1
- search_leaks = True
- elif flag == "-u":
- use_resources.extend(value.split(","))
- elif flag == "-x":
- exclude.extend(value.split(","))
-
- mask = "test_*.py"
- if args:
- mask = args[0]
-
- for package in packages:
- run_tests(package, mask, verbosity, search_leaks, exclude)
-
-
-def run_tests(package, mask, verbosity, search_leaks, exclude):
- skipped, testcases = get_tests(package, mask, verbosity, exclude)
- runner = TestRunner(verbosity=verbosity)
-
- suites = [unittest.makeSuite(o) for o in testcases]
- suite = unittest.TestSuite(suites)
- result = runner.run(suite, skipped)
-
- if search_leaks:
- # hunt for refcount leaks
- runner = BasicTestRunner()
- for t in testcases:
- test_with_refcounts(runner, verbosity, t)
-
- return bool(result.errors)
-
-class BasicTestRunner:
- def run(self, test):
- result = unittest.TestResult()
- test(result)
- return result
-
-def xfail(method):
- """
- Poor's man xfail: remove it when all the failures have been fixed
- """
- def new_method(self, *args, **kwds):
- try:
- method(self, *args, **kwds)
- except:
- pass
- else:
- self.assertTrue(False, "DID NOT RAISE")
- return new_method
diff --git a/lib-python/modified-2.7/ctypes/test/runtests.py b/lib-python/modified-2.7/ctypes/test/runtests.py
deleted file mode 100644
--- a/lib-python/modified-2.7/ctypes/test/runtests.py
+++ /dev/null
@@ -1,19 +0,0 @@
-"""Usage: runtests.py [-q] [-r] [-v] [-u resources] [mask]
-
-Run all tests found in this directory, and print a summary of the results.
-Command line flags:
- -q quiet mode: don't prnt anything while the tests are running
- -r run tests repeatedly, look for refcount leaks
- -u
- Add resources to the lits of allowed resources. '*' allows all
- resources.
- -v verbose mode: print the test currently executed
- -x
- Exclude specified tests.
- mask mask to select filenames containing testcases, wildcards allowed
-"""
-import sys
-import ctypes.test
-
-if __name__ == "__main__":
- sys.exit(ctypes.test.main(ctypes.test))
diff --git a/lib-python/modified-2.7/ctypes/test/test_anon.py b/lib-python/modified-2.7/ctypes/test/test_anon.py
deleted file mode 100644
--- a/lib-python/modified-2.7/ctypes/test/test_anon.py
+++ /dev/null
@@ -1,60 +0,0 @@
-import unittest
-from ctypes import *
-
-class AnonTest(unittest.TestCase):
-
- def test_anon(self):
- class ANON(Union):
- _fields_ = [("a", c_int),
- ("b", c_int)]
-
- class Y(Structure):
- _fields_ = [("x", c_int),
- ("_", ANON),
- ("y", c_int)]
- _anonymous_ = ["_"]
-
- self.assertEqual(Y.a.offset, sizeof(c_int))
- self.assertEqual(Y.b.offset, sizeof(c_int))
-
- self.assertEqual(ANON.a.offset, 0)
- self.assertEqual(ANON.b.offset, 0)
-
- def test_anon_nonseq(self):
- # TypeError: _anonymous_ must be a sequence
- self.assertRaises(TypeError,
- lambda: type(Structure)("Name",
- (Structure,),
- {"_fields_": [], "_anonymous_": 42}))
-
- def test_anon_nonmember(self):
- # AttributeError: type object 'Name' has no attribute 'x'
- self.assertRaises(AttributeError,
- lambda: type(Structure)("Name",
- (Structure,),
- {"_fields_": [],
- "_anonymous_": ["x"]}))
-
- def test_nested(self):
- class ANON_S(Structure):
- _fields_ = [("a", c_int)]
-
- class ANON_U(Union):
- _fields_ = [("_", ANON_S),
- ("b", c_int)]
- _anonymous_ = ["_"]
-
- class Y(Structure):
- _fields_ = [("x", c_int),
- ("_", ANON_U),
- ("y", c_int)]
- _anonymous_ = ["_"]
-
- self.assertEqual(Y.x.offset, 0)
- self.assertEqual(Y.a.offset, sizeof(c_int))
- self.assertEqual(Y.b.offset, sizeof(c_int))
- self.assertEqual(Y._.offset, sizeof(c_int))
- self.assertEqual(Y.y.offset, sizeof(c_int) * 2)
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/lib-python/modified-2.7/ctypes/test/test_array_in_pointer.py b/lib-python/modified-2.7/ctypes/test/test_array_in_pointer.py
deleted file mode 100644
--- a/lib-python/modified-2.7/ctypes/test/test_array_in_pointer.py
+++ /dev/null
@@ -1,64 +0,0 @@
-import unittest
-from ctypes import *
-from binascii import hexlify
-import re
-
-def dump(obj):
- # helper function to dump memory contents in hex, with a hyphen
- # between the bytes.
- h = hexlify(memoryview(obj))
- return re.sub(r"(..)", r"\1-", h)[:-1]
-
-
-class Value(Structure):
- _fields_ = [("val", c_byte)]
-
-class Container(Structure):
- _fields_ = [("pvalues", POINTER(Value))]
-
-class Test(unittest.TestCase):
- def test(self):
- # create an array of 4 values
- val_array = (Value * 4)()
-
- # create a container, which holds a pointer to the pvalues array.
- c = Container()
- c.pvalues = val_array
-
- # memory contains 4 NUL bytes now, that's correct
- self.assertEqual("00-00-00-00", dump(val_array))
-
- # set the values of the array through the pointer:
- for i in range(4):
- c.pvalues[i].val = i + 1
-
- values = [c.pvalues[i].val for i in range(4)]
-
- # These are the expected results: here s the bug!
- self.assertEqual(
- (values, dump(val_array)),
- ([1, 2, 3, 4], "01-02-03-04")
- )
-
- def test_2(self):
-
- val_array = (Value * 4)()
-
- # memory contains 4 NUL bytes now, that's correct
- self.assertEqual("00-00-00-00", dump(val_array))
-
- ptr = cast(val_array, POINTER(Value))
- # set the values of the array through the pointer:
- for i in range(4):
- ptr[i].val = i + 1
-
- values = [ptr[i].val for i in range(4)]
-
- # These are the expected results: here s the bug!
- self.assertEqual(
- (values, dump(val_array)),
- ([1, 2, 3, 4], "01-02-03-04")
- )
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/lib-python/modified-2.7/ctypes/test/test_arrays.py b/lib-python/modified-2.7/ctypes/test/test_arrays.py
deleted file mode 100644
--- a/lib-python/modified-2.7/ctypes/test/test_arrays.py
+++ /dev/null
@@ -1,145 +0,0 @@
-import unittest
-from ctypes import *
-from test.test_support import impl_detail
-
-formats = "bBhHiIlLqQfd"
-
-# c_longdouble commented out for PyPy, look at the commend in test_longdouble
-formats = c_byte, c_ubyte, c_short, c_ushort, c_int, c_uint, \
- c_long, c_ulonglong, c_float, c_double #, c_longdouble
-
-class ArrayTestCase(unittest.TestCase):
-
- @impl_detail('long double not supported by PyPy', pypy=False)
- def test_longdouble(self):
- """
- This test is empty. It's just here to remind that we commented out
- c_longdouble in "formats". If pypy will ever supports c_longdouble, we
- should kill this test and uncomment c_longdouble inside formats.
- """
-
- def test_simple(self):
- # create classes holding simple numeric types, and check
- # various properties.
-
- init = range(15, 25)
-
- for fmt in formats:
- alen = len(init)
- int_array = ARRAY(fmt, alen)
-
- ia = int_array(*init)
- # length of instance ok?
- self.assertEqual(len(ia), alen)
-
- # slot values ok?
- values = [ia[i] for i in range(len(init))]
- self.assertEqual(values, init)
-
- # change the items
- from operator import setitem
- new_values = range(42, 42+alen)
- [setitem(ia, n, new_values[n]) for n in range(alen)]
- values = [ia[i] for i in range(len(init))]
- self.assertEqual(values, new_values)
-
- # are the items initialized to 0?
- ia = int_array()
- values = [ia[i] for i in range(len(init))]
- self.assertEqual(values, [0] * len(init))
-
- # Too many initializers should be caught
- self.assertRaises(IndexError, int_array, *range(alen*2))
-
- CharArray = ARRAY(c_char, 3)
-
- ca = CharArray("a", "b", "c")
-
- # Should this work? It doesn't:
- # CharArray("abc")
- self.assertRaises(TypeError, CharArray, "abc")
-
- self.assertEqual(ca[0], "a")
- self.assertEqual(ca[1], "b")
- self.assertEqual(ca[2], "c")
- self.assertEqual(ca[-3], "a")
- self.assertEqual(ca[-2], "b")
- self.assertEqual(ca[-1], "c")
-
- self.assertEqual(len(ca), 3)
-
- # slicing is now supported, but not extended slicing (3-argument)!
- from operator import getslice, delitem
- self.assertRaises(TypeError, getslice, ca, 0, 1, -1)
-
- # cannot delete items
- self.assertRaises(TypeError, delitem, ca, 0)
-
- def test_numeric_arrays(self):
-
- alen = 5
-
- numarray = ARRAY(c_int, alen)
-
- na = numarray()
- values = [na[i] for i in range(alen)]
- self.assertEqual(values, [0] * alen)
-
- na = numarray(*[c_int()] * alen)
- values = [na[i] for i in range(alen)]
- self.assertEqual(values, [0]*alen)
-
- na = numarray(1, 2, 3, 4, 5)
- values = [i for i in na]
- self.assertEqual(values, [1, 2, 3, 4, 5])
-
- na = numarray(*map(c_int, (1, 2, 3, 4, 5)))
- values = [i for i in na]
- self.assertEqual(values, [1, 2, 3, 4, 5])
-
- def test_classcache(self):
- self.assertTrue(not ARRAY(c_int, 3) is ARRAY(c_int, 4))
- self.assertTrue(ARRAY(c_int, 3) is ARRAY(c_int, 3))
-
- def test_from_address(self):
- # Failed with 0.9.8, reported by JUrner
- p = create_string_buffer("foo")
- sz = (c_char * 3).from_address(addressof(p))
- self.assertEqual(sz[:], "foo")
- self.assertEqual(sz[::], "foo")
- self.assertEqual(sz[::-1], "oof")
- self.assertEqual(sz[::3], "f")
- self.assertEqual(sz[1:4:2], "o")
- self.assertEqual(sz.value, "foo")
-
- try:
- create_unicode_buffer
- except NameError:
- pass
- else:
- def test_from_addressW(self):
- p = create_unicode_buffer("foo")
- sz = (c_wchar * 3).from_address(addressof(p))
- self.assertEqual(sz[:], "foo")
- self.assertEqual(sz[::], "foo")
- self.assertEqual(sz[::-1], "oof")
- self.assertEqual(sz[::3], "f")
- self.assertEqual(sz[1:4:2], "o")
- self.assertEqual(sz.value, "foo")
-
- def test_cache(self):
- # Array types are cached internally in the _ctypes extension,
- # in a WeakValueDictionary. Make sure the array type is
- # removed from the cache when the itemtype goes away. This
- # test will not fail, but will show a leak in the testsuite.
-
- # Create a new type:
- class my_int(c_int):
- pass
- # Create a new array type based on it:
- t1 = my_int * 1
- t2 = my_int * 1
- self.assertTrue(t1 is t2)
-
-if __name__ == '__main__':
- unittest.main()
diff --git a/lib-python/modified-2.7/ctypes/test/test_as_parameter.py b/lib-python/modified-2.7/ctypes/test/test_as_parameter.py
deleted file mode 100644
--- a/lib-python/modified-2.7/ctypes/test/test_as_parameter.py
+++ /dev/null
@@ -1,227 +0,0 @@
-import unittest
-from ctypes import *
-import _ctypes_test
-
-dll = CDLL(_ctypes_test.__file__)
-
-try:
- CALLBACK_FUNCTYPE = WINFUNCTYPE
-except NameError:
- # fake to enable this test on Linux
- CALLBACK_FUNCTYPE = CFUNCTYPE
-
-class POINT(Structure):
- _fields_ = [("x", c_int), ("y", c_int)]
-
-class BasicWrapTestCase(unittest.TestCase):
- def wrap(self, param):
- return param
-
- def test_wchar_parm(self):
- try:
- c_wchar
- except NameError:
- return
- f = dll._testfunc_i_bhilfd
- f.argtypes = [c_byte, c_wchar, c_int, c_long, c_float, c_double]
- result = f(self.wrap(1), self.wrap(u"x"), self.wrap(3), self.wrap(4), self.wrap(5.0), self.wrap(6.0))
- self.assertEqual(result, 139)
- self.assertTrue(type(result), int)
-
- def test_pointers(self):
- f = dll._testfunc_p_p
- f.restype = POINTER(c_int)
- f.argtypes = [POINTER(c_int)]
-
- # This only works if the value c_int(42) passed to the
- # function is still alive while the pointer (the result) is
- # used.
-
- v = c_int(42)
-
- self.assertEqual(pointer(v).contents.value, 42)
- result = f(self.wrap(pointer(v)))
- self.assertEqual(type(result), POINTER(c_int))
- self.assertEqual(result.contents.value, 42)
-
- # This on works...
- result = f(self.wrap(pointer(v)))
- self.assertEqual(result.contents.value, v.value)
-
- p = pointer(c_int(99))
- result = f(self.wrap(p))
- self.assertEqual(result.contents.value, 99)
-
- def test_shorts(self):
- f = dll._testfunc_callback_i_if
-
- args = []
- expected = [262144, 131072, 65536, 32768, 16384, 8192, 4096, 2048,
- 1024, 512, 256, 128, 64, 32, 16, 8, 4, 2, 1]
-
- def callback(v):
- args.append(v)
- return v
-
- CallBack = CFUNCTYPE(c_int, c_int)
-
- cb = CallBack(callback)
- f(self.wrap(2**18), self.wrap(cb))
- self.assertEqual(args, expected)
-
- ################################################################
-
- def test_callbacks(self):
- f = dll._testfunc_callback_i_if
- f.restype = c_int
-
- MyCallback = CFUNCTYPE(c_int, c_int)
-
- def callback(value):
- #print "called back with", value
- return value
-
- cb = MyCallback(callback)
-
- result = f(self.wrap(-10), self.wrap(cb))
- self.assertEqual(result, -18)
-
- # test with prototype
- f.argtypes = [c_int, MyCallback]
- cb = MyCallback(callback)
-
- result = f(self.wrap(-10), self.wrap(cb))
- self.assertEqual(result, -18)
-
- result = f(self.wrap(-10), self.wrap(cb))
- self.assertEqual(result, -18)
-
- AnotherCallback = CALLBACK_FUNCTYPE(c_int, c_int, c_int, c_int, c_int)
-
- # check that the prototype works: we call f with wrong
- # argument types
- cb = AnotherCallback(callback)
- self.assertRaises(ArgumentError, f, self.wrap(-10), self.wrap(cb))
-
- def test_callbacks_2(self):
- # Can also use simple datatypes as argument type specifiers
- # for the callback function.
- # In this case the call receives an instance of that type
- f = dll._testfunc_callback_i_if
- f.restype = c_int
-
- MyCallback = CFUNCTYPE(c_int, c_int)
-
- f.argtypes = [c_int, MyCallback]
-
- def callback(value):
- #print "called back with", value
- self.assertEqual(type(value), int)
- return value
-
- cb = MyCallback(callback)
- result = f(self.wrap(-10), self.wrap(cb))
- self.assertEqual(result, -18)
-
- def test_longlong_callbacks(self):
-
- f = dll._testfunc_callback_q_qf
- f.restype = c_longlong
-
- MyCallback = CFUNCTYPE(c_longlong, c_longlong)
-
- f.argtypes = [c_longlong, MyCallback]
-
- def callback(value):
- self.assertTrue(isinstance(value, (int, long)))
- return value & 0x7FFFFFFF
-
- cb = MyCallback(callback)
-
- self.assertEqual(13577625587, int(f(self.wrap(1000000000000), self.wrap(cb))))
-
- def test_byval(self):
- # without prototype
- ptin = POINT(1, 2)
- ptout = POINT()
- # EXPORT int _testfunc_byval(point in, point *pout)
- result = dll._testfunc_byval(ptin, byref(ptout))
- got = result, ptout.x, ptout.y
- expected = 3, 1, 2
- self.assertEqual(got, expected)
-
- # with prototype
- ptin = POINT(101, 102)
- ptout = POINT()
- dll._testfunc_byval.argtypes = (POINT, POINTER(POINT))
- dll._testfunc_byval.restype = c_int
- result = dll._testfunc_byval(self.wrap(ptin), byref(ptout))
- got = result, ptout.x, ptout.y
- expected = 203, 101, 102
- self.assertEqual(got, expected)
-
- def test_struct_return_2H(self):
- class S2H(Structure):
- _fields_ = [("x", c_short),
- ("y", c_short)]
- dll.ret_2h_func.restype = S2H
- dll.ret_2h_func.argtypes = [S2H]
- inp = S2H(99, 88)
- s2h = dll.ret_2h_func(self.wrap(inp))
- self.assertEqual((s2h.x, s2h.y), (99*2, 88*3))
-
- def test_struct_return_8H(self):
- class S8I(Structure):
- _fields_ = [("a", c_int),
- ("b", c_int),
- ("c", c_int),
- ("d", c_int),
- ("e", c_int),
- ("f", c_int),
- ("g", c_int),
- ("h", c_int)]
From noreply at buildbot.pypy.org Thu Apr 12 11:31:59 2012
From: noreply at buildbot.pypy.org (cfbolz)
Date: Thu, 12 Apr 2012 11:31:59 +0200 (CEST)
Subject: [pypy-commit] benchmarks default: run sympy benchmarks the normal
number of times, they are fast
Message-ID: <20120412093159.73E5582F4E@wyvern.cs.uni-duesseldorf.de>
Author: Carl Friedrich Bolz
Branch:
Changeset: r181:062b90951d4b
Date: 2012-04-12 11:30 +0200
http://bitbucket.org/pypy/benchmarks/changeset/062b90951d4b/
Log: run sympy benchmarks the normal number of times, they are fast
diff --git a/benchmarks.py b/benchmarks.py
--- a/benchmarks.py
+++ b/benchmarks.py
@@ -49,8 +49,7 @@
for name in ['expand', 'integrate', 'sum', 'str']:
_register_new_bm('bm_sympy', 'sympy_' + name,
globals(), bm_env={'PYTHONPATH': relative('lib/sympy')},
- extra_args=['--benchmark=' + name],
- iteration_scaling=0.1)
+ extra_args=['--benchmark=' + name])
for name in ['xml', 'text']:
_register_new_bm('bm_genshi', 'genshi_' + name,
From noreply at buildbot.pypy.org Thu Apr 12 12:17:22 2012
From: noreply at buildbot.pypy.org (fijal)
Date: Thu, 12 Apr 2012 12:17:22 +0200 (CEST)
Subject: [pypy-commit] extradoc extradoc: draft next numpy status update
Message-ID: <20120412101722.2311982F4E@wyvern.cs.uni-duesseldorf.de>
Author: Maciej Fijalkowski
Branch: extradoc
Changeset: r4187:ab6a378f28e1
Date: 2012-04-12 10:47 +0200
http://bitbucket.org/pypy/extradoc/changeset/ab6a378f28e1/
Log: draft next numpy status update
diff --git a/blog/draft/numpy-status-update-3.rst b/blog/draft/numpy-status-update-3.rst
new file mode 100644
--- /dev/null
+++ b/blog/draft/numpy-status-update-3.rst
@@ -0,0 +1,43 @@
+NumPy on PyPy progress report
+=============================
+
+Hello.
+
+A lot of things happened in March, like `pycon`_. I was also busy doing other
+things (pictured), so apologies for the late numpy status update.
+
+However, a lot of things have happened and numpy continues to be on of the
+main points of entry for hacking on PyPy. Apologies to all the people who's
+patches I don't review in timely manner, but seriously, you do **a lot** of
+work.
+
+The list of things is definitely not exhaustive, and I might be forgetting
+important contributions. In a loose order:
+
+* Matti Picus made ``out`` parameter working for a lot (but not all)
+ functions.
+
+* We merged record dtypes support. The only missing dtypes left are complex
+ (important), datetime (less important) and object (which will probably
+ never be implemented).
+
+* Taavi Burns and others implemented lots of details, including lots of ufuncs.
+ On the completely unscientific measure of "implemented functions" on
+ `numpypy status page`_, we're close to 50% of numpy working. In reality
+ it might be more or less, but after complex dtypes we're getting very close
+ to running real programs.
+
+* Bool indexing of arrays of the same size should work, leaving only
+ arrays-of-ints indexing as the last missing element of fancy indexing.
+
+* I did some very early experiments on SSE. This work is **seriously**
+ preliminary - in fact the only implemented operation is addition of
+ float single-dimension numpy arrays. However, results are encouraging,
+ given that our assembler generator is far from ideal:
+
+ +
+
+Next step would be to just continue implementing missing features. Future
+is hard to predict, but we're not far off!
+
+.. _`pycon`: http://us.pycon.org
From noreply at buildbot.pypy.org Thu Apr 12 12:17:23 2012
From: noreply at buildbot.pypy.org (fijal)
Date: Thu, 12 Apr 2012 12:17:23 +0200 (CEST)
Subject: [pypy-commit] extradoc extradoc: merge
Message-ID: <20120412101723.5AFBF82F4E@wyvern.cs.uni-duesseldorf.de>
Author: Maciej Fijalkowski
Branch: extradoc
Changeset: r4188:34f623b889f3
Date: 2012-04-12 12:17 +0200
http://bitbucket.org/pypy/extradoc/changeset/34f623b889f3/
Log: merge
diff --git a/blog/draft/py3k-status-update-3.rst b/blog/draft/py3k-status-update-3.rst
--- a/blog/draft/py3k-status-update-3.rst
+++ b/blog/draft/py3k-status-update-3.rst
@@ -1,22 +1,22 @@
Py3k status update #3
---------------------
-This is the third status update about my work on the `py3k branch`_, which I
+This is the third status update about our work on the `py3k branch`_, which we
can work on thanks to all of the people who donated_ to the `py3k proposal`_.
A lot of work has been done during the last month: as usual, the list of
-changes is too big to be reported in a detalied way, so this is just a summary
+changes is too big to be reported in a detailed way, so this is just a summary
of what happened.
One of the most active areas was killing old and deprecated features. In
particular, we killed support for the ``__cmp__`` special method and its
-counsins, the ``cmp`` builtin function and keyword argument for
-``list.sort()`` and ``sorted()``. Killing is easy, but then you have to fix
-all the places which breaks because of this, including all the types which
-relied on ``__cmp__`` to be comparable,, fixing all the tests which tried to
-order objects which are no longer ordeable now, or implementing new behavior
-like forbidding calling ``hash()`` on objects which implement ``__eq__`` but
-not ``__hash__``.
+counsins, the ``cmp`` builtin function and keyword argument for ``list.sort()``
+and ``sorted()``. Killing is easy, but then you have to fix all the places
+which breaks because of this, including all the types which relied on
+``__cmp__`` to be comparable and all the tests which tried to order objects
+which are no longer ordeable. New behavior, like forbidding calling ``hash()``
+on objects which implement ``__eq__`` but not ``__hash__``, also has to be
+implemented.
Among the other features, we killed lots of now-gone functions in the
``operator`` module, the builtins ``apply()``, ``reduce()`` and ``buffer``,
diff --git a/blog/draft/pycon-wrapup.rst b/blog/draft/pycon-wrapup.rst
--- a/blog/draft/pycon-wrapup.rst
+++ b/blog/draft/pycon-wrapup.rst
@@ -6,12 +6,13 @@
From the PyPy perspective, a lot at PyCon was about PyPy. Listing things:
-* David Beazley did an excellent keynote on trying to dive head-first into
- PyPy and at least partly failing. He however did not fail to explain
- bits and pieces about PyPy's architecture. `Video`_ is available.
+* David Beazley presented an excellent keynote describing his experience
+ diving head-first into PyPy and at least partly failing. He, however, did
+ not fail to explain bits and pieces about PyPy's architecture.
+ `Video`_ is available.
* We gave tons of talks, including the `tutorial`_, `why pypy by example`_
- and `pypy's JIT architecturew`_
+ and `pypy's JIT architecture`_
* We had a giant influx of new commiters, easily doubling the amount of pull
requests ever created for PyPy. The main topics for newcomers were numpy and
@@ -23,7 +24,7 @@
We would like to thank everyone who talked to us, shared ideas and especially
those who participated in sprints - we're always happy to welcome newcomers!
-I'm sure there is tons of things I forgot, but thank you all!
+I'm sure there are tons of things I forgot, but thank you all!
Cheers,
fijal
@@ -31,5 +32,5 @@
.. _`Video`: http://pyvideo.org/video/659/keynote-david-beazley
.. _`tutorial`: http://pyvideo.org/video/612/how-to-get-the-most-out-of-your-pypy
.. _`why pypy by example`: http://pyvideo.org/video/661/why-pypy-by-example
-.. _`pypy's JIT architecturew`: http://pyvideo.org/video/662/how-the-pypy-jit-works
+.. _`pypy's JIT architecture`: http://pyvideo.org/video/662/how-the-pypy-jit-works
.. _`prove him correct`: http://mrjoes.github.com/2011/12/15/sockjs-bench.html
From noreply at buildbot.pypy.org Thu Apr 12 14:15:49 2012
From: noreply at buildbot.pypy.org (fijal)
Date: Thu, 12 Apr 2012 14:15:49 +0200 (CEST)
Subject: [pypy-commit] pypy gc-minimark-pinning: a branch to experiment with
pinning
Message-ID: <20120412121549.7972482F4E@wyvern.cs.uni-duesseldorf.de>
Author: Maciej Fijalkowski
Branch: gc-minimark-pinning
Changeset: r54303:cb0a5d8aa8af
Date: 2012-04-12 13:52 +0200
http://bitbucket.org/pypy/pypy/changeset/cb0a5d8aa8af/
Log: a branch to experiment with pinning
diff --git a/pypy/rlib/rgc.py b/pypy/rlib/rgc.py
--- a/pypy/rlib/rgc.py
+++ b/pypy/rlib/rgc.py
@@ -18,6 +18,24 @@
"""
pass
+def pin(obj):
+ pass
+
+def unpin(obj):
+ pass
+
+
+class pinned_object(object):
+ def __init__(self, obj):
+ self.obj = obj
+
+ def __enter__(self):
+ pin(self.obj)
+ return self
+
+ def __exit__(self, *args):
+ unpin(self.obj)
+
# ____________________________________________________________
# Annotation and specialization
diff --git a/pypy/rlib/test/test_rgc.py b/pypy/rlib/test/test_rgc.py
--- a/pypy/rlib/test/test_rgc.py
+++ b/pypy/rlib/test/test_rgc.py
@@ -171,3 +171,9 @@
x1 = X()
n = rgc.get_rpy_memory_usage(rgc.cast_instance_to_gcref(x1))
assert n >= 8 and n <= 64
+
+def test_pin_obj():
+ l = []
+ with rgc.pinned_object(l):
+ l.append(3)
+ assert l == [3]
From noreply at buildbot.pypy.org Thu Apr 12 14:15:51 2012
From: noreply at buildbot.pypy.org (fijal)
Date: Thu, 12 Apr 2012 14:15:51 +0200 (CEST)
Subject: [pypy-commit] pypy gc-minimark-pinning: Start a branch to
experiment with pinning on the GC. Boilerplate so far
Message-ID: <20120412121551.9A24682F4E@wyvern.cs.uni-duesseldorf.de>
Author: Maciej Fijalkowski
Branch: gc-minimark-pinning
Changeset: r54304:43d51ad02ec5
Date: 2012-04-12 14:14 +0200
http://bitbucket.org/pypy/pypy/changeset/43d51ad02ec5/
Log: Start a branch to experiment with pinning on the GC. Boilerplate so
far
diff --git a/pypy/rlib/rgc.py b/pypy/rlib/rgc.py
--- a/pypy/rlib/rgc.py
+++ b/pypy/rlib/rgc.py
@@ -493,3 +493,25 @@
def specialize_call(self, hop):
hop.exception_is_here()
return hop.genop('gc_typeids_z', [], resulttype = hop.r_result)
+
+class PinEntry(ExtRegistryEntry):
+ _about_ = pin
+
+ def compute_result_annotation(self, s_arg):
+ pass
+
+ def specialize_call(self, hop):
+ hop.exception_cannot_occur()
+ v_obj, = hop.inputargs(hop.args_r[0])
+ hop.genop('gc_pin', [v_obj])
+
+class UnpinEntry(ExtRegistryEntry):
+ _about_ = unpin
+
+ def compute_result_annotation(self, s_arg):
+ pass
+
+ def specialize_call(self, hop):
+ hop.exception_cannot_occur()
+ v_obj, = hop.inputargs(hop.args_r[0])
+ hop.genop('gc_unpin', [v_obj])
diff --git a/pypy/rlib/test/test_rgc.py b/pypy/rlib/test/test_rgc.py
--- a/pypy/rlib/test/test_rgc.py
+++ b/pypy/rlib/test/test_rgc.py
@@ -177,3 +177,12 @@
with rgc.pinned_object(l):
l.append(3)
assert l == [3]
+
+def test_interp_pin_obj():
+ def f(i):
+ l = []
+ with rgc.pinned_object(l):
+ l.append(i)
+ return l[0]
+
+ assert interpret(f, [3]) == 3
diff --git a/pypy/rpython/llinterp.py b/pypy/rpython/llinterp.py
--- a/pypy/rpython/llinterp.py
+++ b/pypy/rpython/llinterp.py
@@ -890,6 +890,12 @@
def op_gc_stack_bottom(self):
pass # marker for trackgcroot.py
+ def op_gc_pin(self, obj):
+ self.heap.pin(obj)
+
+ def op_gc_unpin(self, obj):
+ self.heap.unpin(obj)
+
def op_gc_shadowstackref_new(self): # stacklet+shadowstack
raise NotImplementedError("gc_shadowstackref_new")
def op_gc_shadowstackref_context(self):
diff --git a/pypy/rpython/lltypesystem/llheap.py b/pypy/rpython/lltypesystem/llheap.py
--- a/pypy/rpython/lltypesystem/llheap.py
+++ b/pypy/rpython/lltypesystem/llheap.py
@@ -35,3 +35,10 @@
def thread_die():
pass
+
+def pin(obj):
+ pass
+
+def unpin(obj):
+ pass
+
diff --git a/pypy/rpython/lltypesystem/lloperation.py b/pypy/rpython/lltypesystem/lloperation.py
--- a/pypy/rpython/lltypesystem/lloperation.py
+++ b/pypy/rpython/lltypesystem/lloperation.py
@@ -469,6 +469,8 @@
'gc_assume_young_pointers': LLOp(canrun=True),
'gc_writebarrier_before_copy': LLOp(canrun=True),
'gc_heap_stats' : LLOp(canmallocgc=True),
+ 'gc_pin' : LLOp(canrun=True),
+ 'gc_unpin' : LLOp(canrun=True),
'gc_get_rpy_roots' : LLOp(),
'gc_get_rpy_referents': LLOp(),
From noreply at buildbot.pypy.org Thu Apr 12 14:16:08 2012
From: noreply at buildbot.pypy.org (mikefc)
Date: Thu, 12 Apr 2012 14:16:08 +0200 (CEST)
Subject: [pypy-commit] extradoc extradoc: small changes
Message-ID: <20120412121608.BD93A82F4E@wyvern.cs.uni-duesseldorf.de>
Author: Michael Cheng
Branch: extradoc
Changeset: r4189:ea0070e7a081
Date: 2012-04-12 22:13 +1000
http://bitbucket.org/pypy/extradoc/changeset/ea0070e7a081/
Log: small changes
diff --git a/blog/draft/numpy-status-update-3.rst b/blog/draft/numpy-status-update-3.rst
--- a/blog/draft/numpy-status-update-3.rst
+++ b/blog/draft/numpy-status-update-3.rst
@@ -6,15 +6,15 @@
A lot of things happened in March, like `pycon`_. I was also busy doing other
things (pictured), so apologies for the late numpy status update.
-However, a lot of things have happened and numpy continues to be on of the
-main points of entry for hacking on PyPy. Apologies to all the people who's
+However, a lot of things have happened and numpy continues to be one of the
+main points of entry for hacking on PyPy. Apologies to all the people whose
patches I don't review in timely manner, but seriously, you do **a lot** of
work.
The list of things is definitely not exhaustive, and I might be forgetting
important contributions. In a loose order:
-* Matti Picus made ``out`` parameter working for a lot (but not all)
+* Matti Picus made ``out`` parameter work for a lot of (but not all)
functions.
* We merged record dtypes support. The only missing dtypes left are complex
From noreply at buildbot.pypy.org Thu Apr 12 14:36:14 2012
From: noreply at buildbot.pypy.org (mikefc)
Date: Thu, 12 Apr 2012 14:36:14 +0200 (CEST)
Subject: [pypy-commit] extradoc extradoc: more ToDos mentioned at end
Message-ID: <20120412123614.9001D82F4E@wyvern.cs.uni-duesseldorf.de>
Author: Michael Cheng
Branch: extradoc
Changeset: r4190:bfc2761330f2
Date: 2012-04-12 22:35 +1000
http://bitbucket.org/pypy/extradoc/changeset/bfc2761330f2/
Log: more ToDos mentioned at end
diff --git a/blog/draft/numpy-status-update-3.rst b/blog/draft/numpy-status-update-3.rst
--- a/blog/draft/numpy-status-update-3.rst
+++ b/blog/draft/numpy-status-update-3.rst
@@ -11,7 +11,7 @@
patches I don't review in timely manner, but seriously, you do **a lot** of
work.
-The list of things is definitely not exhaustive, and I might be forgetting
+This list of changes is definitely not exhaustive, and I might be forgetting
important contributions. In a loose order:
* Matti Picus made ``out`` parameter work for a lot of (but not all)
@@ -19,7 +19,7 @@
* We merged record dtypes support. The only missing dtypes left are complex
(important), datetime (less important) and object (which will probably
- never be implemented).
+ never be implemented because XXXXXXX).
* Taavi Burns and others implemented lots of details, including lots of ufuncs.
On the completely unscientific measure of "implemented functions" on
@@ -37,7 +37,15 @@
+
-Next step would be to just continue implementing missing features. Future
-is hard to predict, but we're not far off!
+Next step would be to just continue implementing missing features such as
+
+* specialised arrays i.e. masked arrays and matrixes
+
+* core modules such as ``fft``, ``linalg``, ``random``.
+
+* numpy's testing framework
+
+The future is hard to predict, but we're not far off!
.. _`pycon`: http://us.pycon.org
+.. _`numpypy status page`: http://buildbot.pypy.org/numpy-status/latest.html
From noreply at buildbot.pypy.org Thu Apr 12 14:55:17 2012
From: noreply at buildbot.pypy.org (cfbolz)
Date: Thu, 12 Apr 2012 14:55:17 +0200 (CEST)
Subject: [pypy-commit] pypy kwargsdict-strategy: import py missing
Message-ID: <20120412125517.8F37882F4E@wyvern.cs.uni-duesseldorf.de>
Author: Carl Friedrich Bolz
Branch: kwargsdict-strategy
Changeset: r54305:7b4d81251b58
Date: 2012-04-12 14:55 +0200
http://bitbucket.org/pypy/pypy/changeset/7b4d81251b58/
Log: import py missing
diff --git a/pypy/objspace/std/test/test_kwargsdict.py b/pypy/objspace/std/test/test_kwargsdict.py
--- a/pypy/objspace/std/test/test_kwargsdict.py
+++ b/pypy/objspace/std/test/test_kwargsdict.py
@@ -1,3 +1,4 @@
+import py
from pypy.conftest import gettestobjspace, option
from pypy.objspace.std.test.test_dictmultiobject import FakeSpace, W_DictMultiObject
from pypy.objspace.std.kwargsdict import *
From noreply at buildbot.pypy.org Thu Apr 12 15:13:33 2012
From: noreply at buildbot.pypy.org (cfbolz)
Date: Thu, 12 Apr 2012 15:13:33 +0200 (CEST)
Subject: [pypy-commit] pypy kwargsdict-strategy: optimize the keys method
Message-ID: <20120412131333.DCB5382F4E@wyvern.cs.uni-duesseldorf.de>
Author: Carl Friedrich Bolz
Branch: kwargsdict-strategy
Changeset: r54306:f37a2d944ce1
Date: 2012-04-12 15:13 +0200
http://bitbucket.org/pypy/pypy/changeset/f37a2d944ce1/
Log: optimize the keys method
diff --git a/pypy/objspace/std/kwargsdict.py b/pypy/objspace/std/kwargsdict.py
--- a/pypy/objspace/std/kwargsdict.py
+++ b/pypy/objspace/std/kwargsdict.py
@@ -102,8 +102,8 @@
return w_dict.getitem(w_key)
def w_keys(self, w_dict):
- l = [self.wrap(key) for key in self.unerase(w_dict.dstorage)[0]]
- return self.space.newlist(l)
+ l = self.unerase(w_dict.dstorage)[0]
+ return self.space.newlist_str(l[:])
def values(self, w_dict):
return self.unerase(w_dict.dstorage)[1][:] # to make non-resizable
diff --git a/pypy/objspace/std/test/test_kwargsdict.py b/pypy/objspace/std/test/test_kwargsdict.py
--- a/pypy/objspace/std/test/test_kwargsdict.py
+++ b/pypy/objspace/std/test/test_kwargsdict.py
@@ -76,6 +76,17 @@
assert d.strategy is not strategy
assert "StringDictStrategy" == d.strategy.__class__.__name__
+def test_keys_doesnt_wrap():
+ space = FakeSpace()
+ space.newlist = None
+ strategy = KwargsDictStrategy(space)
+ keys = ["a", "b", "c"]
+ values = [1, 2, 3]
+ storage = strategy.erase((keys, values))
+ d = W_DictMultiObject(space, strategy, storage)
+ w_l = d.w_keys() # does not crash
+
+
from pypy.objspace.std.test.test_dictmultiobject import BaseTestRDictImplementation, BaseTestDevolvedDictImplementation
def get_impl(self):
storage = strategy.erase(([], []))
From noreply at buildbot.pypy.org Thu Apr 12 15:23:46 2012
From: noreply at buildbot.pypy.org (cfbolz)
Date: Thu, 12 Apr 2012 15:23:46 +0200 (CEST)
Subject: [pypy-commit] pypy kwargsdict-strategy: close before impending merge
Message-ID: <20120412132346.C5E7C82F4E@wyvern.cs.uni-duesseldorf.de>
Author: Carl Friedrich Bolz
Branch: kwargsdict-strategy
Changeset: r54307:c02cd7a449c0
Date: 2012-04-12 15:22 +0200
http://bitbucket.org/pypy/pypy/changeset/c02cd7a449c0/
Log: close before impending merge
From noreply at buildbot.pypy.org Thu Apr 12 15:23:48 2012
From: noreply at buildbot.pypy.org (cfbolz)
Date: Thu, 12 Apr 2012 15:23:48 +0200 (CEST)
Subject: [pypy-commit] pypy default: merge kwargsdict-strategy:
Message-ID: <20120412132348.B09EA82F4E@wyvern.cs.uni-duesseldorf.de>
Author: Carl Friedrich Bolz
Branch:
Changeset: r54308:97c57afceef4
Date: 2012-04-12 15:23 +0200
http://bitbucket.org/pypy/pypy/changeset/97c57afceef4/
Log: merge kwargsdict-strategy:
add a new dict strategy that is created for being passed into **args
arguments. This makes decorators that just receive **args and pass
them on to another call a lot faster.
diff --git a/pypy/interpreter/argument.py b/pypy/interpreter/argument.py
--- a/pypy/interpreter/argument.py
+++ b/pypy/interpreter/argument.py
@@ -169,9 +169,11 @@
def _combine_starstarargs_wrapped(self, w_starstararg):
# unpack the ** arguments
space = self.space
+ keywords, values_w = space.view_as_kwargs(w_starstararg)
+ if keywords is not None: # this path also taken for empty dicts
+ self._add_keywordargs_no_unwrapping(keywords, values_w)
+ return not jit.isconstant(len(self.keywords))
if space.isinstance_w(w_starstararg, space.w_dict):
- if not space.is_true(w_starstararg):
- return False # don't call unpackiterable - it's jit-opaque
keys_w = space.unpackiterable(w_starstararg)
else:
try:
@@ -186,11 +188,8 @@
"a mapping, not %s" % (typename,)))
raise
keys_w = space.unpackiterable(w_keys)
- if keys_w:
- self._do_combine_starstarargs_wrapped(keys_w, w_starstararg)
- return True
- else:
- return False # empty dict; don't disable the JIT
+ self._do_combine_starstarargs_wrapped(keys_w, w_starstararg)
+ return True
def _do_combine_starstarargs_wrapped(self, keys_w, w_starstararg):
space = self.space
@@ -227,6 +226,26 @@
self.keywords_w = self.keywords_w + keywords_w
self.keyword_names_w = keys_w
+ @jit.look_inside_iff(lambda self, keywords, keywords_w:
+ jit.isconstant(len(keywords) and
+ jit.isconstant(self.keywords)))
+ def _add_keywordargs_no_unwrapping(self, keywords, keywords_w):
+ if self.keywords is None:
+ self.keywords = keywords[:] # copy to make non-resizable
+ self.keywords_w = keywords_w[:]
+ else:
+ # looks quadratic, but the JIT should remove all of it nicely.
+ # Also, all the lists should be small
+ for key in keywords:
+ for otherkey in self.keywords:
+ if otherkey == key:
+ raise operationerrfmt(self.space.w_TypeError,
+ "got multiple values "
+ "for keyword argument "
+ "'%s'", key)
+ self.keywords = self.keywords + keywords
+ self.keywords_w = self.keywords_w + keywords_w
+
def fixedunpack(self, argcount):
"""The simplest argument parsing: get the 'argcount' arguments,
or raise a real ValueError if the length is wrong."""
@@ -385,7 +404,7 @@
# collect extra keyword arguments into the **kwarg
if has_kwarg:
- w_kwds = self.space.newdict()
+ w_kwds = self.space.newdict(kwargs=True)
if num_remainingkwds:
#
limit = len(keywords)
diff --git a/pypy/interpreter/baseobjspace.py b/pypy/interpreter/baseobjspace.py
--- a/pypy/interpreter/baseobjspace.py
+++ b/pypy/interpreter/baseobjspace.py
@@ -914,6 +914,12 @@
"""
return None
+ def view_as_kwargs(self, w_dict):
+ """ if w_dict is a kwargs-dict, return two lists, one of unwrapped
+ strings and one of wrapped values. otherwise return (None, None)
+ """
+ return (None, None)
+
def newlist_str(self, list_s):
return self.newlist([self.wrap(s) for s in list_s])
diff --git a/pypy/interpreter/test/test_argument.py b/pypy/interpreter/test/test_argument.py
--- a/pypy/interpreter/test/test_argument.py
+++ b/pypy/interpreter/test/test_argument.py
@@ -75,7 +75,10 @@
def unpackiterable(self, it):
return list(it)
- def newdict(self):
+ def view_as_kwargs(self, x):
+ return None, None
+
+ def newdict(self, kwargs=False):
return {}
def newlist(self, l=[]):
@@ -488,6 +491,57 @@
assert len(l) == 1
assert l[0] == space.wrap(5)
+ def test_starstarargs_special(self):
+ class kwargs(object):
+ def __init__(self, k, v):
+ self.k = k
+ self.v = v
+ class MyDummySpace(DummySpace):
+ def view_as_kwargs(self, kw):
+ if isinstance(kw, kwargs):
+ return kw.k, kw.v
+ return None, None
+ space = MyDummySpace()
+ for i in range(3):
+ kwds = [("c", 3)]
+ kwds_w = dict(kwds[:i])
+ keywords = kwds_w.keys()
+ keywords_w = kwds_w.values()
+ rest = dict(kwds[i:])
+ w_kwds = kwargs(rest.keys(), rest.values())
+ if i == 2:
+ w_kwds = None
+ assert len(keywords) == len(keywords_w)
+ args = Arguments(space, [1, 2], keywords[:], keywords_w[:], w_starstararg=w_kwds)
+ l = [None, None, None]
+ args._match_signature(None, l, Signature(["a", "b", "c"]), defaults_w=[4])
+ assert l == [1, 2, 3]
+ args = Arguments(space, [1, 2], keywords[:], keywords_w[:], w_starstararg=w_kwds)
+ l = [None, None, None, None]
+ args._match_signature(None, l, Signature(["a", "b", "b1", "c"]), defaults_w=[4, 5])
+ assert l == [1, 2, 4, 3]
+ args = Arguments(space, [1, 2], keywords[:], keywords_w[:], w_starstararg=w_kwds)
+ l = [None, None, None, None]
+ args._match_signature(None, l, Signature(["a", "b", "c", "d"]), defaults_w=[4, 5])
+ assert l == [1, 2, 3, 5]
+ args = Arguments(space, [1, 2], keywords[:], keywords_w[:], w_starstararg=w_kwds)
+ l = [None, None, None, None]
+ py.test.raises(ArgErr, args._match_signature, None, l,
+ Signature(["c", "b", "a", "d"]), defaults_w=[4, 5])
+ args = Arguments(space, [1, 2], keywords[:], keywords_w[:], w_starstararg=w_kwds)
+ l = [None, None, None, None]
+ py.test.raises(ArgErr, args._match_signature, None, l,
+ Signature(["a", "b", "c1", "d"]), defaults_w=[4, 5])
+ args = Arguments(space, [1, 2], keywords[:], keywords_w[:], w_starstararg=w_kwds)
+ l = [None, None, None]
+ args._match_signature(None, l, Signature(["a", "b"], None, "**"))
+ assert l == [1, 2, {'c': 3}]
+ excinfo = py.test.raises(OperationError, Arguments, space, [], ["a"],
+ [1], w_starstararg=kwargs(["a"], [2]))
+ assert excinfo.value.w_type is TypeError
+
+
+
class TestErrorHandling(object):
def test_missing_args(self):
# got_nargs, nkwds, expected_nargs, has_vararg, has_kwarg,
diff --git a/pypy/module/pypyjit/test_pypy_c/test_call.py b/pypy/module/pypyjit/test_pypy_c/test_call.py
--- a/pypy/module/pypyjit/test_pypy_c/test_call.py
+++ b/pypy/module/pypyjit/test_pypy_c/test_call.py
@@ -244,6 +244,7 @@
print guards
assert len(guards) <= 20
+
def test_stararg_virtual(self):
def main(x):
def g(*args):
@@ -486,3 +487,38 @@
--TICK--
jump(..., descr=...)
""")
+
+ def test_kwargs_virtual2(self):
+ log = self.run("""
+ def f(*args, **kwargs):
+ kwargs['a'] = kwargs['z'] * 0
+ return g(1, *args, **kwargs)
+
+ def g(x, y, z=2, a=1):
+ return x - y + z + a
+
+ def main(stop):
+ res = 0
+ i = 0
+ while i < stop:
+ res = f(res, z=i) # ID: call
+ i += 1
+ return res""", [1000])
+ assert log.result == 500
+ loop, = log.loops_by_id('call')
+ print loop.ops_by_id('call')
+ assert loop.match("""
+ i65 = int_lt(i58, i29)
+ guard_true(i65, descr=...)
+ guard_not_invalidated(..., descr=...)
+ i66 = force_token()
+ i67 = force_token()
+ i69 = int_sub_ovf(1, i56)
+ guard_no_overflow(..., descr=...)
+ i70 = int_add_ovf(i69, i58)
+ guard_no_overflow(..., descr=...)
+ i71 = int_add(i58, 1)
+ --TICK--
+ jump(..., descr=...)
+ """)
+
diff --git a/pypy/objspace/fake/objspace.py b/pypy/objspace/fake/objspace.py
--- a/pypy/objspace/fake/objspace.py
+++ b/pypy/objspace/fake/objspace.py
@@ -110,7 +110,7 @@
"NOT_RPYTHON"
raise NotImplementedError
- def newdict(self, module=False, instance=False,
+ def newdict(self, module=False, instance=False, kwargs=False,
strdict=False):
return w_some_obj()
diff --git a/pypy/objspace/std/dictmultiobject.py b/pypy/objspace/std/dictmultiobject.py
--- a/pypy/objspace/std/dictmultiobject.py
+++ b/pypy/objspace/std/dictmultiobject.py
@@ -33,7 +33,7 @@
@staticmethod
def allocate_and_init_instance(space, w_type=None, module=False,
- instance=False, strdict=False):
+ instance=False, strdict=False, kwargs=False):
if space.config.objspace.std.withcelldict and module:
from pypy.objspace.std.celldict import ModuleDictStrategy
@@ -46,11 +46,15 @@
assert w_type is None
strategy = space.fromcache(StringDictStrategy)
+ elif kwargs:
+ assert w_type is None
+ from pypy.objspace.std.kwargsdict import KwargsDictStrategy
+ strategy = space.fromcache(KwargsDictStrategy)
else:
strategy = space.fromcache(EmptyDictStrategy)
-
if w_type is None:
w_type = space.w_dict
+
storage = strategy.get_empty_storage()
w_self = space.allocate_instance(W_DictMultiObject, w_type)
W_DictMultiObject.__init__(w_self, space, strategy, storage)
@@ -91,7 +95,8 @@
getitem_str delitem length \
clear w_keys values \
items iter setdefault \
- popitem listview_str listview_int".split()
+ popitem listview_str listview_int \
+ view_as_kwargs".split()
def make_method(method):
def f(self, *args):
@@ -165,6 +170,9 @@
def listview_int(self, w_dict):
return None
+ def view_as_kwargs(self, w_dict):
+ return (None, None)
+
class EmptyDictStrategy(DictStrategy):
erase, unerase = rerased.new_erasing_pair("empty")
@@ -254,6 +262,9 @@
def popitem(self, w_dict):
raise KeyError
+ def view_as_kwargs(self, w_dict):
+ return ([], [])
+
registerimplementation(W_DictMultiObject)
# DictImplementation lattice
diff --git a/pypy/objspace/std/kwargsdict.py b/pypy/objspace/std/kwargsdict.py
new file mode 100644
--- /dev/null
+++ b/pypy/objspace/std/kwargsdict.py
@@ -0,0 +1,165 @@
+## ----------------------------------------------------------------------------
+## dict strategy (see dictmultiobject.py)
+
+from pypy.rlib import rerased, jit
+from pypy.objspace.std.dictmultiobject import (DictStrategy,
+ IteratorImplementation,
+ ObjectDictStrategy,
+ StringDictStrategy)
+
+
+class KwargsDictStrategy(DictStrategy):
+ erase, unerase = rerased.new_erasing_pair("kwargsdict")
+ erase = staticmethod(erase)
+ unerase = staticmethod(unerase)
+
+ def wrap(self, key):
+ return self.space.wrap(key)
+
+ def unwrap(self, wrapped):
+ return self.space.str_w(wrapped)
+
+ def get_empty_storage(self):
+ d = ([], [])
+ return self.erase(d)
+
+ def is_correct_type(self, w_obj):
+ space = self.space
+ return space.is_w(space.type(w_obj), space.w_str)
+
+ def _never_equal_to(self, w_lookup_type):
+ return False
+
+ def iter(self, w_dict):
+ return KwargsDictIterator(self.space, self, w_dict)
+
+ def w_keys(self, w_dict):
+ return self.space.newlist([self.space.wrap(key) for key in self.unerase(w_dict.dstorage)[0]])
+
+ def setitem(self, w_dict, w_key, w_value):
+ space = self.space
+ if self.is_correct_type(w_key):
+ self.setitem_str(w_dict, self.unwrap(w_key), w_value)
+ return
+ else:
+ self.switch_to_object_strategy(w_dict)
+ w_dict.setitem(w_key, w_value)
+
+ def setitem_str(self, w_dict, key, w_value):
+ self._setitem_str_indirection(w_dict, key, w_value)
+
+ @jit.look_inside_iff(lambda self, w_dict, key, w_value:
+ jit.isconstant(self.length(w_dict)) and jit.isconstant(key))
+ def _setitem_str_indirection(self, w_dict, key, w_value):
+ keys, values_w = self.unerase(w_dict.dstorage)
+ result = []
+ for i in range(len(keys)):
+ if keys[i] == key:
+ values_w[i] = w_value
+ break
+ else:
+ # limit the size so that the linear searches don't become too long
+ if len(keys) >= 16:
+ self.switch_to_string_strategy(w_dict)
+ w_dict.setitem_str(key, w_value)
+ else:
+ keys.append(key)
+ values_w.append(w_value)
+
+ def setdefault(self, w_dict, w_key, w_default):
+ # XXX could do better, but is it worth it?
+ self.switch_to_object_strategy(w_dict)
+ return w_dict.setdefault(w_key, w_default)
+
+ def delitem(self, w_dict, w_key):
+ # XXX could do better, but is it worth it?
+ self.switch_to_object_strategy(w_dict)
+ return w_dict.delitem(w_key)
+
+ def length(self, w_dict):
+ return len(self.unerase(w_dict.dstorage)[0])
+
+ def getitem_str(self, w_dict, key):
+ return self._getitem_str_indirection(w_dict, key)
+
+ @jit.look_inside_iff(lambda self, w_dict, key: jit.isconstant(self.length(w_dict)) and jit.isconstant(key))
+ def _getitem_str_indirection(self, w_dict, key):
+ keys, values_w = self.unerase(w_dict.dstorage)
+ result = []
+ for i in range(len(keys)):
+ if keys[i] == key:
+ return values_w[i]
+ return None
+
+ def getitem(self, w_dict, w_key):
+ space = self.space
+ if self.is_correct_type(w_key):
+ return self.getitem_str(w_dict, self.unwrap(w_key))
+ elif self._never_equal_to(space.type(w_key)):
+ return None
+ else:
+ self.switch_to_object_strategy(w_dict)
+ return w_dict.getitem(w_key)
+
+ def w_keys(self, w_dict):
+ l = self.unerase(w_dict.dstorage)[0]
+ return self.space.newlist_str(l[:])
+
+ def values(self, w_dict):
+ return self.unerase(w_dict.dstorage)[1][:] # to make non-resizable
+
+ def items(self, w_dict):
+ space = self.space
+ keys, values_w = self.unerase(w_dict.dstorage)
+ result = []
+ for i in range(len(keys)):
+ result.append(space.newtuple([self.wrap(keys[i]), values_w[i]]))
+ return result
+
+ def popitem(self, w_dict):
+ keys, values_w = self.unerase(w_dict.dstorage)
+ key = keys.pop()
+ w_value = values_w.pop()
+ return (self.wrap(key), w_value)
+
+ def clear(self, w_dict):
+ w_dict.dstorage = self.get_empty_storage()
+
+ def switch_to_object_strategy(self, w_dict):
+ strategy = self.space.fromcache(ObjectDictStrategy)
+ keys, values_w = self.unerase(w_dict.dstorage)
+ d_new = strategy.unerase(strategy.get_empty_storage())
+ for i in range(len(keys)):
+ d_new[self.wrap(keys[i])] = values_w[i]
+ w_dict.strategy = strategy
+ w_dict.dstorage = strategy.erase(d_new)
+
+ def switch_to_string_strategy(self, w_dict):
+ strategy = self.space.fromcache(StringDictStrategy)
+ keys, values_w = self.unerase(w_dict.dstorage)
+ storage = strategy.get_empty_storage()
+ d_new = strategy.unerase(storage)
+ for i in range(len(keys)):
+ d_new[keys[i]] = values_w[i]
+ w_dict.strategy = strategy
+ w_dict.dstorage = storage
+
+ def view_as_kwargs(self, w_dict):
+ return self.unerase(w_dict.dstorage)
+
+
+class KwargsDictIterator(IteratorImplementation):
+ def __init__(self, space, strategy, dictimplementation):
+ IteratorImplementation.__init__(self, space, strategy, dictimplementation)
+ keys, values_w = strategy.unerase(self.dictimplementation.dstorage)
+ self.iterator = iter(range(len(keys)))
+ # XXX this potentially leaks
+ self.keys = keys
+ self.values_w = values_w
+
+ def next_entry(self):
+ # note that this 'for' loop only runs once, at most
+ for i in self.iterator:
+ return self.space.wrap(self.keys[i]), self.values_w[i]
+ else:
+ return None, None
diff --git a/pypy/objspace/std/objspace.py b/pypy/objspace/std/objspace.py
--- a/pypy/objspace/std/objspace.py
+++ b/pypy/objspace/std/objspace.py
@@ -313,11 +313,11 @@
def newlist_str(self, list_s):
return W_ListObject.newlist_str(self, list_s)
- def newdict(self, module=False, instance=False,
+ def newdict(self, module=False, instance=False, kwargs=False,
strdict=False):
return W_DictMultiObject.allocate_and_init_instance(
self, module=module, instance=instance,
- strdict=strdict)
+ strdict=strdict, kwargs=kwargs)
def newset(self):
from pypy.objspace.std.setobject import newset
@@ -472,6 +472,11 @@
return w_obj.getitems_int()
return None
+ def view_as_kwargs(self, w_dict):
+ if type(w_dict) is W_DictMultiObject:
+ return w_dict.view_as_kwargs()
+ return (None, None)
+
def _uses_list_iter(self, w_obj):
from pypy.objspace.descroperation import list_iter
return self.lookup(w_obj, '__iter__') is list_iter(self)
diff --git a/pypy/objspace/std/test/test_kwargsdict.py b/pypy/objspace/std/test/test_kwargsdict.py
new file mode 100644
--- /dev/null
+++ b/pypy/objspace/std/test/test_kwargsdict.py
@@ -0,0 +1,120 @@
+import py
+from pypy.conftest import gettestobjspace, option
+from pypy.objspace.std.test.test_dictmultiobject import FakeSpace, W_DictMultiObject
+from pypy.objspace.std.kwargsdict import *
+
+space = FakeSpace()
+strategy = KwargsDictStrategy(space)
+
+def test_create():
+ keys = ["a", "b", "c"]
+ values = [1, 2, 3]
+ storage = strategy.erase((keys, values))
+ d = W_DictMultiObject(space, strategy, storage)
+ assert d.getitem_str("a") == 1
+ assert d.getitem_str("b") == 2
+ assert d.getitem_str("c") == 3
+ assert d.getitem(space.wrap("a")) == 1
+ assert d.getitem(space.wrap("b")) == 2
+ assert d.getitem(space.wrap("c")) == 3
+ assert d.w_keys() == keys
+ assert d.values() == values
+
+def test_set_existing():
+ keys = ["a", "b", "c"]
+ values = [1, 2, 3]
+ storage = strategy.erase((keys, values))
+ d = W_DictMultiObject(space, strategy, storage)
+ assert d.getitem_str("a") == 1
+ assert d.getitem_str("b") == 2
+ assert d.getitem_str("c") == 3
+ assert d.setitem_str("a", 4) is None
+ assert d.getitem_str("a") == 4
+ assert d.getitem_str("b") == 2
+ assert d.getitem_str("c") == 3
+ assert d.setitem_str("b", 5) is None
+ assert d.getitem_str("a") == 4
+ assert d.getitem_str("b") == 5
+ assert d.getitem_str("c") == 3
+ assert d.setitem_str("c", 6) is None
+ assert d.getitem_str("a") == 4
+ assert d.getitem_str("b") == 5
+ assert d.getitem_str("c") == 6
+ assert d.getitem(space.wrap("a")) == 4
+ assert d.getitem(space.wrap("b")) == 5
+ assert d.getitem(space.wrap("c")) == 6
+ assert d.w_keys() == keys
+ assert d.values() == values
+ assert keys == ["a", "b", "c"]
+ assert values == [4, 5, 6]
+
+
+def test_set_new():
+ keys = ["a", "b", "c"]
+ values = [1, 2, 3]
+ storage = strategy.erase((keys, values))
+ d = W_DictMultiObject(space, strategy, storage)
+ assert d.getitem_str("a") == 1
+ assert d.getitem_str("b") == 2
+ assert d.getitem_str("c") == 3
+ assert d.getitem_str("d") is None
+ assert d.setitem_str("d", 4) is None
+ assert d.getitem_str("a") == 1
+ assert d.getitem_str("b") == 2
+ assert d.getitem_str("c") == 3
+ assert d.getitem_str("d") == 4
+ assert d.w_keys() == keys
+ assert d.values() == values
+ assert keys == ["a", "b", "c", "d"]
+ assert values == [1, 2, 3, 4]
+
+def test_limit_size():
+ storage = strategy.get_empty_storage()
+ d = W_DictMultiObject(space, strategy, storage)
+ for i in range(100):
+ assert d.setitem_str("d%s" % i, 4) is None
+ assert d.strategy is not strategy
+ assert "StringDictStrategy" == d.strategy.__class__.__name__
+
+def test_keys_doesnt_wrap():
+ space = FakeSpace()
+ space.newlist = None
+ strategy = KwargsDictStrategy(space)
+ keys = ["a", "b", "c"]
+ values = [1, 2, 3]
+ storage = strategy.erase((keys, values))
+ d = W_DictMultiObject(space, strategy, storage)
+ w_l = d.w_keys() # does not crash
+
+
+from pypy.objspace.std.test.test_dictmultiobject import BaseTestRDictImplementation, BaseTestDevolvedDictImplementation
+def get_impl(self):
+ storage = strategy.erase(([], []))
+ return W_DictMultiObject(space, strategy, storage)
+class TestKwargsDictImplementation(BaseTestRDictImplementation):
+ StrategyClass = KwargsDictStrategy
+ get_impl = get_impl
+ def test_delitem(self):
+ pass # delitem devolves for now
+
+class TestDevolvedKwargsDictImplementation(BaseTestDevolvedDictImplementation):
+ get_impl = get_impl
+ StrategyClass = KwargsDictStrategy
+
+
+class AppTestKwargsDictStrategy(object):
+ def setup_class(cls):
+ if option.runappdirect:
+ py.test.skip("__repr__ doesn't work on appdirect")
+
+ def w_get_strategy(self, obj):
+ import __pypy__
+ r = __pypy__.internal_repr(obj)
+ return r[r.find("(") + 1: r.find(")")]
+
+ def test_create(self):
+ def f(**args):
+ return args
+ d = f(a=1)
+ assert "KwargsDictStrategy" in self.get_strategy(d)
+
From noreply at buildbot.pypy.org Thu Apr 12 15:24:48 2012
From: noreply at buildbot.pypy.org (RonnyPfannschmidt)
Date: Thu, 12 Apr 2012 15:24:48 +0200 (CEST)
Subject: [pypy-commit] pypy stdlib-unification: adapt sandbox interaction
test to unified site
Message-ID: <20120412132448.4554382F4E@wyvern.cs.uni-duesseldorf.de>
Author: Ronny Pfannschmidt
Branch: stdlib-unification
Changeset: r54309:08c38e4707f6
Date: 2012-04-12 15:24 +0200
http://bitbucket.org/pypy/pypy/changeset/08c38e4707f6/
Log: adapt sandbox interaction test to unified site
diff --git a/pypy/module/sys/state.py b/pypy/module/sys/state.py
--- a/pypy/module/sys/state.py
+++ b/pypy/module/sys/state.py
@@ -42,8 +42,6 @@
lib_python = os.path.join(prefix, 'lib-python')
python_std_lib = os.path.join(lib_python, dirname)
checkdir(python_std_lib)
- python_std_lib_modified = os.path.join(lib_python, 'modified-' + dirname)
- checkdir(python_std_lib_modified)
lib_pypy = os.path.join(prefix, 'lib_pypy')
checkdir(lib_pypy)
@@ -56,12 +54,9 @@
importlist.append(lib_extensions)
#
importlist.append(lib_pypy)
- importlist.append(python_std_lib_modified)
importlist.append(python_std_lib)
#
- lib_tk_modified = os.path.join(python_std_lib_modified, 'lib-tk')
lib_tk = os.path.join(python_std_lib, 'lib-tk')
- importlist.append(lib_tk_modified)
importlist.append(lib_tk)
#
# List here the extra platform-specific paths.
diff --git a/pypy/tool/lib_pypy.py b/pypy/tool/lib_pypy.py
--- a/pypy/tool/lib_pypy.py
+++ b/pypy/tool/lib_pypy.py
@@ -5,9 +5,7 @@
LIB_ROOT = py.path.local(pypy.__path__[0]).dirpath()
LIB_PYPY = LIB_ROOT.join('lib_pypy')
-LIB_PYTHON = LIB_ROOT.join('lib-python')
-LIB_PYTHON_VANILLA = LIB_PYTHON.join('%d.%d' % CPYTHON_VERSION[:2])
-LIB_PYTHON_MODIFIED = LIB_PYTHON.join('modified-%d.%d' % CPYTHON_VERSION[:2])
+LIB_PYTHON = LIB_ROOT.join('lib-python', '%d.%d' % CPYTHON_VERSION[:2])
def import_from_lib_pypy(modname):
diff --git a/pypy/tool/stdlib_opcode.py b/pypy/tool/stdlib_opcode.py
--- a/pypy/tool/stdlib_opcode.py
+++ b/pypy/tool/stdlib_opcode.py
@@ -106,8 +106,8 @@
opmap as host_opmap, HAVE_ARGUMENT as host_HAVE_ARGUMENT)
def load_pypy_opcode():
- from pypy.tool.lib_pypy import LIB_PYTHON_MODIFIED
- opcode_path = LIB_PYTHON_MODIFIED.join('opcode.py')
+ from pypy.tool.lib_pypy import LIB_PYTHON
+ opcode_path = LIB_PYTHON.join('opcode.py')
d = {}
execfile(str(opcode_path), d)
for name in __all__:
diff --git a/pypy/translator/sandbox/test/test_pypy_interact.py b/pypy/translator/sandbox/test/test_pypy_interact.py
--- a/pypy/translator/sandbox/test/test_pypy_interact.py
+++ b/pypy/translator/sandbox/test/test_pypy_interact.py
@@ -2,14 +2,12 @@
import os, sys, stat, errno
from pypy.translator.sandbox.pypy_interact import PyPySandboxedProc
from pypy.translator.interactive import Translation
+
from pypy.module.sys.version import CPYTHON_VERSION
+from pypy.tool.lib_pypy import LIB_PYTHON
VERSION = '%d.%d' % CPYTHON_VERSION[:2]
-SITE_PY_CONTENT = open(os.path.join(autopath.pypydir,
- '..',
- 'lib-python',
- 'modified-' + VERSION, 'site.py'),
- 'rb').read()
+SITE_PY_CONTENT = LIB_PYTHON.join('site.py').read()
ERROR_TEXT = os.strerror(errno.ENOENT)
def assert_(cond, text):
@@ -39,15 +37,14 @@
pass
else:
assert_(False, "os.stat('site') should have failed")
- st = os.stat('/bin/lib-python/modified-%s/site.py' % VERSION)
- assert_(stat.S_ISREG(st.st_mode), "bad st_mode for .../site.py")
+
try:
- os.stat('/bin/lib-python/modified-%s/site.pyc' % VERSION)
+ os.stat('/bin/lib-python/%s/site.pyc' % VERSION)
except OSError:
pass
else:
assert_(False, "os.stat('....pyc') should have failed")
- fd = os.open('/bin/lib-python/modified-%s/site.py' % VERSION,
+ fd = os.open('/bin/lib-python/%s/site.py' % VERSION,
os.O_RDONLY, 0666)
length = 8192
ofs = 0
From noreply at buildbot.pypy.org Thu Apr 12 15:37:00 2012
From: noreply at buildbot.pypy.org (fijal)
Date: Thu, 12 Apr 2012 15:37:00 +0200 (CEST)
Subject: [pypy-commit] pypy gc-minimark-pinning: test and more boilerplate
Message-ID: <20120412133700.51DC082F4E@wyvern.cs.uni-duesseldorf.de>
Author: Maciej Fijalkowski
Branch: gc-minimark-pinning
Changeset: r54310:afd55b84ac33
Date: 2012-04-12 14:32 +0200
http://bitbucket.org/pypy/pypy/changeset/afd55b84ac33/
Log: test and more boilerplate
diff --git a/pypy/rlib/rgc.py b/pypy/rlib/rgc.py
--- a/pypy/rlib/rgc.py
+++ b/pypy/rlib/rgc.py
@@ -503,7 +503,9 @@
def specialize_call(self, hop):
hop.exception_cannot_occur()
v_obj, = hop.inputargs(hop.args_r[0])
- hop.genop('gc_pin', [v_obj])
+ v_addr = hop.genop('cast_ptr_to_adr', [v_obj],
+ resulttype=llmemory.Address)
+ hop.genop('gc_pin', [v_addr])
class UnpinEntry(ExtRegistryEntry):
_about_ = unpin
@@ -514,4 +516,6 @@
def specialize_call(self, hop):
hop.exception_cannot_occur()
v_obj, = hop.inputargs(hop.args_r[0])
- hop.genop('gc_unpin', [v_obj])
+ v_addr = hop.genop('cast_ptr_to_adr', [v_obj],
+ resulttype=llmemory.Address)
+ hop.genop('gc_unpin', [v_addr])
diff --git a/pypy/rpython/memory/gc/base.py b/pypy/rpython/memory/gc/base.py
--- a/pypy/rpython/memory/gc/base.py
+++ b/pypy/rpython/memory/gc/base.py
@@ -18,9 +18,11 @@
needs_write_barrier = False
malloc_zero_filled = False
prebuilt_gc_objects_are_static_roots = True
+ can_always_pin_objects = False
object_minimal_size = 0
gcflag_extra = 0 # or a real GC flag that is always 0 when not collecting
+
def __init__(self, config, chunk_size=DEFAULT_CHUNK_SIZE,
translated_to_c=True):
self.gcheaderbuilder = GCHeaderBuilder(self.HDR)
@@ -176,6 +178,12 @@
def can_move(self, addr):
return False
+ def pin(self, addr):
+ pass
+
+ def unpin(self, addr):
+ pass
+
def set_max_heap_size(self, size):
raise NotImplementedError
diff --git a/pypy/rpython/memory/gc/minimark.py b/pypy/rpython/memory/gc/minimark.py
--- a/pypy/rpython/memory/gc/minimark.py
+++ b/pypy/rpython/memory/gc/minimark.py
@@ -130,6 +130,7 @@
needs_write_barrier = True
prebuilt_gc_objects_are_static_roots = False
malloc_zero_filled = True # xxx experiment with False
+ can_always_pin_objects = True
gcflag_extra = GCFLAG_FINALIZATION_ORDERING
# All objects start with a HDR, i.e. with a field 'tid' which contains
diff --git a/pypy/rpython/memory/gcwrapper.py b/pypy/rpython/memory/gcwrapper.py
--- a/pypy/rpython/memory/gcwrapper.py
+++ b/pypy/rpython/memory/gcwrapper.py
@@ -123,6 +123,12 @@
def can_move(self, addr):
return self.gc.can_move(addr)
+ def pin(self, addr):
+ self.gc.pin(addr)
+
+ def unpin(self, addr):
+ self.gc.unpin(addr)
+
def weakref_create_getlazy(self, objgetter):
# we have to be lazy in reading the llinterp variable containing
# the 'obj' pointer, because the gc.malloc() call below could
diff --git a/pypy/rpython/memory/test/test_gc.py b/pypy/rpython/memory/test/test_gc.py
--- a/pypy/rpython/memory/test/test_gc.py
+++ b/pypy/rpython/memory/test/test_gc.py
@@ -746,6 +746,25 @@
res = self.interpret(fn, [])
assert res == ord('y')
+ def test_pinning(self):
+ def f(i):
+ s = str(i)
+ if not rgc.can_move(s):
+ return 13
+ sum = 0
+ with rgc.pinned_object(s):
+ sum += int(rgc.can_move(s))
+ sum += 10 * int(rgc.can_move(s))
+ return sum
+
+ res = self.interpret(f, [10])
+ if not self.GCClass.moving_gc:
+ assert res == 13
+ elif self.GCClass.can_always_pin_objects:
+ assert res == 10
+ else:
+ assert res == 11
+
from pypy.rlib.objectmodel import UnboxedValue
class TaggedBase(object):
From noreply at buildbot.pypy.org Thu Apr 12 15:37:01 2012
From: noreply at buildbot.pypy.org (fijal)
Date: Thu, 12 Apr 2012 15:37:01 +0200 (CEST)
Subject: [pypy-commit] pypy gc-minimark-pinning: enough to make the test
pass, but obviously it does not work
Message-ID: <20120412133701.92B1482F4E@wyvern.cs.uni-duesseldorf.de>
Author: Maciej Fijalkowski
Branch: gc-minimark-pinning
Changeset: r54311:55a65a85e0d6
Date: 2012-04-12 15:32 +0200
http://bitbucket.org/pypy/pypy/changeset/55a65a85e0d6/
Log: enough to make the test pass, but obviously it does not work
diff --git a/pypy/rpython/memory/gc/minimark.py b/pypy/rpython/memory/gc/minimark.py
--- a/pypy/rpython/memory/gc/minimark.py
+++ b/pypy/rpython/memory/gc/minimark.py
@@ -113,8 +113,9 @@
# one bit per 'card_page_indices' indices.
GCFLAG_HAS_CARDS = first_gcflag << 5
GCFLAG_CARDS_SET = first_gcflag << 6 # <- at least one card bit is set
+GCFLAG_PINNED = first_gcflag << 7
-TID_MASK = (first_gcflag << 7) - 1
+TID_MASK = (first_gcflag << 8) - 1
FORWARDSTUB = lltype.GcStruct('forwarding_stub',
@@ -304,6 +305,13 @@
# GCFLAG_HAS_SHADOW to their future location at the next
# minor collection.
self.nursery_objects_shadows = self.AddressDict()
+ # all pinned objects in the nursery
+ self.pinned_objects = self.AddressStack()
+ # all pinned objects that were in the nursery *before* last
+ # minor collect. This is a sorted stack that should be consulted when
+ # considering next nursery ceiling
+ self.nursery_barriers = self.AddressStack()
+
#
# Allocate a nursery. In case of auto_nursery_size, start by
# allocating a very small nursery, enough to do things like look
@@ -762,8 +770,14 @@
def can_move(self, obj):
"""Overrides the parent can_move()."""
- return self.is_in_nursery(obj)
+ return (self.is_in_nursery(obj) and
+ not self.header(obj).tid & GCFLAG_PINNED)
+ def pin(self, obj):
+ self.header(obj).tid |= GCFLAG_PINNED
+
+ def unpin(self, obj):
+ self.header(obj).tid &= ~GCFLAG_PINNED
def shrink_array(self, obj, smallerlength):
#
diff --git a/pypy/rpython/memory/test/test_gc.py b/pypy/rpython/memory/test/test_gc.py
--- a/pypy/rpython/memory/test/test_gc.py
+++ b/pypy/rpython/memory/test/test_gc.py
@@ -763,7 +763,7 @@
elif self.GCClass.can_always_pin_objects:
assert res == 10
else:
- assert res == 11
+ assert res == 11 or res == 13 # sometimes fresh objs can't move
from pypy.rlib.objectmodel import UnboxedValue
From noreply at buildbot.pypy.org Thu Apr 12 15:39:11 2012
From: noreply at buildbot.pypy.org (RonnyPfannschmidt)
Date: Thu, 12 Apr 2012 15:39:11 +0200 (CEST)
Subject: [pypy-commit] pypy stdlib-unification: fix the note on using the
stdlib test_set
Message-ID: <20120412133911.BE83782F4E@wyvern.cs.uni-duesseldorf.de>
Author: Ronny Pfannschmidt
Branch: stdlib-unification
Changeset: r54312:e0fe8d719183
Date: 2012-04-12 15:27 +0200
http://bitbucket.org/pypy/pypy/changeset/e0fe8d719183/
Log: fix the note on using the stdlib test_set
diff --git a/pypy/objspace/std/test/test_setobject.py b/pypy/objspace/std/test/test_setobject.py
--- a/pypy/objspace/std/test/test_setobject.py
+++ b/pypy/objspace/std/test/test_setobject.py
@@ -1,9 +1,10 @@
"""
The main test for the set implementation is located
-at:
- pypy-dist/lib-python/modified-2.5.2/test/test_set.py
- go there and invoke
+in the stdlibs test/test_set.py which is located in lib-python
+go there and invoke::
+
../../../pypy/bin/py.py test_set.py
+
This file just contains some basic tests that make sure, the implementation
is not too wrong.
"""
From noreply at buildbot.pypy.org Thu Apr 12 15:39:13 2012
From: noreply at buildbot.pypy.org (RonnyPfannschmidt)
Date: Thu, 12 Apr 2012 15:39:13 +0200 (CEST)
Subject: [pypy-commit] pypy stdlib-unification: todo note in tool/pytest
Message-ID: <20120412133913.0049282F4E@wyvern.cs.uni-duesseldorf.de>
Author: Ronny Pfannschmidt
Branch: stdlib-unification
Changeset: r54313:4a9a542ed7e4
Date: 2012-04-12 15:38 +0200
http://bitbucket.org/pypy/pypy/changeset/4a9a542ed7e4/
Log: todo note in tool/pytest
diff --git a/pypy/tool/pytest/htmlreport.py b/pypy/tool/pytest/htmlreport.py
--- a/pypy/tool/pytest/htmlreport.py
+++ b/pypy/tool/pytest/htmlreport.py
@@ -75,6 +75,7 @@
def render_test_references(self, result):
dest = self.make_single_test_result(result)
+ #XXX: ask hg for differences between test and vendor branch
modified = result.ismodifiedtest() and " [mod]" or ""
return html.div(html.a(result.path.purebasename + modified,
href=self.getrelpath(dest)),
From noreply at buildbot.pypy.org Thu Apr 12 16:35:50 2012
From: noreply at buildbot.pypy.org (mattip)
Date: Thu, 12 Apr 2012 16:35:50 +0200 (CEST)
Subject: [pypy-commit] pypy win32-stdlib: rework zipfile and test_zipfile to
respect open files
Message-ID: <20120412143550.E66BD82F4E@wyvern.cs.uni-duesseldorf.de>
Author: Matti Picus
Branch: win32-stdlib
Changeset: r54314:ba500a555707
Date: 2012-04-12 17:35 +0300
http://bitbucket.org/pypy/pypy/changeset/ba500a555707/
Log: rework zipfile and test_zipfile to respect open files
diff --git a/lib-python/2.7/test/test_zipfile.py b/lib-python/modified-2.7/test/test_zipfile.py
copy from lib-python/2.7/test/test_zipfile.py
copy to lib-python/modified-2.7/test/test_zipfile.py
--- a/lib-python/2.7/test/test_zipfile.py
+++ b/lib-python/modified-2.7/test/test_zipfile.py
@@ -234,8 +234,9 @@
# Read the ZIP archive
with zipfile.ZipFile(f, "r") as zipfp:
- for line, zipline in zip(self.line_gen, zipfp.open(TESTFN)):
- self.assertEqual(zipline, line + '\n')
+ with zipfp.open(TESTFN) as f:
+ for line, zipline in zip(self.line_gen, f):
+ self.assertEqual(zipline, line + '\n')
def test_readline_read_stored(self):
# Issue #7610: calls to readline() interleaved with calls to read().
@@ -340,7 +341,8 @@
produces the expected result."""
with zipfile.ZipFile(TESTFN2, "w") as zipfp:
zipfp.write(TESTFN)
- self.assertEqual(zipfp.read(TESTFN), open(TESTFN).read())
+ with open(TESTFN) as f:
+ self.assertEqual(zipfp.read(TESTFN), f.read())
@skipUnless(zlib, "requires zlib")
def test_per_file_compression(self):
@@ -382,7 +384,8 @@
self.assertEqual(writtenfile, correctfile)
# make sure correct data is in correct file
- self.assertEqual(fdata, open(writtenfile, "rb").read())
+ with open(writtenfile, "rb") as fid:
+ self.assertEqual(fdata, fid.read())
os.remove(writtenfile)
# remove the test file subdirectories
@@ -401,24 +404,25 @@
else:
outfile = os.path.join(os.getcwd(), fpath)
- self.assertEqual(fdata, open(outfile, "rb").read())
+ with open(outfile, "rb") as fid:
+ self.assertEqual(fdata, fid.read())
os.remove(outfile)
# remove the test file subdirectories
shutil.rmtree(os.path.join(os.getcwd(), 'ziptest2dir'))
def test_writestr_compression(self):
- zipfp = zipfile.ZipFile(TESTFN2, "w")
- zipfp.writestr("a.txt", "hello world", compress_type=zipfile.ZIP_STORED)
- if zlib:
- zipfp.writestr("b.txt", "hello world", compress_type=zipfile.ZIP_DEFLATED)
+ with zipfile.ZipFile(TESTFN2, "w") as zipfp:
+ zipfp.writestr("a.txt", "hello world", compress_type=zipfile.ZIP_STORED)
+ if zlib:
+ zipfp.writestr("b.txt", "hello world", compress_type=zipfile.ZIP_DEFLATED)
- info = zipfp.getinfo('a.txt')
- self.assertEqual(info.compress_type, zipfile.ZIP_STORED)
+ info = zipfp.getinfo('a.txt')
+ self.assertEqual(info.compress_type, zipfile.ZIP_STORED)
- if zlib:
- info = zipfp.getinfo('b.txt')
- self.assertEqual(info.compress_type, zipfile.ZIP_DEFLATED)
+ if zlib:
+ info = zipfp.getinfo('b.txt')
+ self.assertEqual(info.compress_type, zipfile.ZIP_DEFLATED)
def zip_test_writestr_permissions(self, f, compression):
@@ -646,7 +650,8 @@
def test_write_non_pyfile(self):
with zipfile.PyZipFile(TemporaryFile(), "w") as zipfp:
- open(TESTFN, 'w').write('most definitely not a python file')
+ with open(TESTFN, 'w') as f:
+ f.write('most definitely not a python file')
self.assertRaises(RuntimeError, zipfp.writepy, TESTFN)
os.remove(TESTFN)
@@ -795,7 +800,8 @@
self.assertRaises(RuntimeError, zipf.open, "foo.txt")
self.assertRaises(RuntimeError, zipf.testzip)
self.assertRaises(RuntimeError, zipf.writestr, "bogus.txt", "bogus")
- open(TESTFN, 'w').write('zipfile test data')
+ with open(TESTFN, 'w') as fp:
+ fp.write('zipfile test data')
self.assertRaises(RuntimeError, zipf.write, TESTFN)
def test_bad_constructor_mode(self):
@@ -803,7 +809,6 @@
self.assertRaises(RuntimeError, zipfile.ZipFile, TESTFN, "q")
def test_bad_open_mode(self):
- """Check that bad modes passed to ZipFile.open are caught."""
with zipfile.ZipFile(TESTFN, mode="w") as zipf:
zipf.writestr("foo.txt", "O, for a Muse of Fire!")
@@ -851,7 +856,6 @@
def test_comments(self):
"""Check that comments on the archive are handled properly."""
-
# check default comment is empty
with zipfile.ZipFile(TESTFN, mode="w") as zipf:
self.assertEqual(zipf.comment, '')
@@ -953,14 +957,16 @@
with zipfile.ZipFile(TESTFN, mode="w") as zipf:
pass
try:
- zipf = zipfile.ZipFile(TESTFN, mode="r")
+ with zipfile.ZipFile(TESTFN, mode="r") as zipf:
+ pass
except zipfile.BadZipfile:
self.fail("Unable to create empty ZIP file in 'w' mode")
with zipfile.ZipFile(TESTFN, mode="a") as zipf:
pass
try:
- zipf = zipfile.ZipFile(TESTFN, mode="r")
+ with zipfile.ZipFile(TESTFN, mode="r") as zipf:
+ pass
except:
self.fail("Unable to create empty ZIP file in 'a' mode")
@@ -1160,6 +1166,8 @@
data1 += zopen1.read(500)
data2 += zopen2.read(500)
self.assertEqual(data1, data2)
+ zopen1.close()
+ zopen2.close()
def test_different_file(self):
# Verify that (when the ZipFile is in control of creating file objects)
@@ -1207,9 +1215,9 @@
def test_store_dir(self):
os.mkdir(os.path.join(TESTFN2, "x"))
- zipf = zipfile.ZipFile(TESTFN, "w")
- zipf.write(os.path.join(TESTFN2, "x"), "x")
- self.assertTrue(zipf.filelist[0].filename.endswith("x/"))
+ with zipfile.ZipFile(TESTFN, "w") as zipf:
+ zipf.write(os.path.join(TESTFN2, "x"), "x")
+ self.assertTrue(zipf.filelist[0].filename.endswith("x/"))
def tearDown(self):
shutil.rmtree(TESTFN2)
@@ -1226,7 +1234,8 @@
for n, s in enumerate(self.seps):
self.arcdata[s] = s.join(self.line_gen) + s
self.arcfiles[s] = '%s-%d' % (TESTFN, n)
- open(self.arcfiles[s], "wb").write(self.arcdata[s])
+ with open(self.arcfiles[s], "wb") as f:
+ f.write(self.arcdata[s])
def make_test_archive(self, f, compression):
# Create the ZIP archive
@@ -1295,8 +1304,9 @@
# Read the ZIP archive
with zipfile.ZipFile(f, "r") as zipfp:
for sep, fn in self.arcfiles.items():
- for line, zipline in zip(self.line_gen, zipfp.open(fn, "rU")):
- self.assertEqual(zipline, line + '\n')
+ with zipfp.open(fn, "rU") as f:
+ for line, zipline in zip(self.line_gen, f):
+ self.assertEqual(zipline, line + '\n')
def test_read_stored(self):
for f in (TESTFN2, TemporaryFile(), StringIO()):
diff --git a/lib-python/2.7/zipfile.py b/lib-python/modified-2.7/zipfile.py
copy from lib-python/2.7/zipfile.py
copy to lib-python/modified-2.7/zipfile.py
--- a/lib-python/2.7/zipfile.py
+++ b/lib-python/modified-2.7/zipfile.py
@@ -648,6 +648,10 @@
return data
+class ZipExtFileWithClose(ZipExtFile):
+ def close(self):
+ self._fileobj.close()
+
class ZipFile:
""" Class with methods to open, read, write, close, list zip files.
@@ -843,9 +847,9 @@
try:
# Read by chunks, to avoid an OverflowError or a
# MemoryError with very large embedded files.
- f = self.open(zinfo.filename, "r")
- while f.read(chunk_size): # Check CRC-32
- pass
+ with self.open(zinfo.filename, "r") as f:
+ while f.read(chunk_size): # Check CRC-32
+ pass
except BadZipfile:
return zinfo.filename
@@ -864,7 +868,9 @@
def read(self, name, pwd=None):
"""Return file bytes (as a string) for name."""
- return self.open(name, "r", pwd).read()
+ with self.open(name, "r", pwd) as f:
+ retval = f.read()
+ return retval
def open(self, name, mode="r", pwd=None):
"""Return file-like object for 'name'."""
@@ -881,59 +887,66 @@
else:
zef_file = open(self.filename, 'rb')
- # Make sure we have an info object
- if isinstance(name, ZipInfo):
- # 'name' is already an info object
- zinfo = name
+ try:
+ # Make sure we have an info object
+ if isinstance(name, ZipInfo):
+ # 'name' is already an info object
+ zinfo = name
+ else:
+ # Get info object for name
+ zinfo = self.getinfo(name)
+
+ zef_file.seek(zinfo.header_offset, 0)
+
+ # Skip the file header:
+ fheader = zef_file.read(sizeFileHeader)
+ if fheader[0:4] != stringFileHeader:
+ raise BadZipfile, "Bad magic number for file header"
+
+ fheader = struct.unpack(structFileHeader, fheader)
+ fname = zef_file.read(fheader[_FH_FILENAME_LENGTH])
+ if fheader[_FH_EXTRA_FIELD_LENGTH]:
+ zef_file.read(fheader[_FH_EXTRA_FIELD_LENGTH])
+
+ if fname != zinfo.orig_filename:
+ raise BadZipfile, \
+ 'File name in directory "%s" and header "%s" differ.' % (
+ zinfo.orig_filename, fname)
+
+ # check for encrypted flag & handle password
+ is_encrypted = zinfo.flag_bits & 0x1
+ zd = None
+ if is_encrypted:
+ if not pwd:
+ pwd = self.pwd
+ if not pwd:
+ raise RuntimeError, "File %s is encrypted, " \
+ "password required for extraction" % name
+
+ zd = _ZipDecrypter(pwd)
+ # The first 12 bytes in the cypher stream is an encryption header
+ # used to strengthen the algorithm. The first 11 bytes are
+ # completely random, while the 12th contains the MSB of the CRC,
+ # or the MSB of the file time depending on the header type
+ # and is used to check the correctness of the password.
+ bytes = zef_file.read(12)
+ h = map(zd, bytes[0:12])
+ if zinfo.flag_bits & 0x8:
+ # compare against the file type from extended local headers
+ check_byte = (zinfo._raw_time >> 8) & 0xff
+ else:
+ # compare against the CRC otherwise
+ check_byte = (zinfo.CRC >> 24) & 0xff
+ if ord(h[11]) != check_byte:
+ raise RuntimeError("Bad password for file", name)
+ except:
+ if not self._filePassed:
+ zef_file.close()
+ raise
+ if self._filePassed:
+ return ZipExtFile(zef_file, mode, zinfo, zd)
else:
- # Get info object for name
- zinfo = self.getinfo(name)
-
- zef_file.seek(zinfo.header_offset, 0)
-
- # Skip the file header:
- fheader = zef_file.read(sizeFileHeader)
- if fheader[0:4] != stringFileHeader:
- raise BadZipfile, "Bad magic number for file header"
-
- fheader = struct.unpack(structFileHeader, fheader)
- fname = zef_file.read(fheader[_FH_FILENAME_LENGTH])
- if fheader[_FH_EXTRA_FIELD_LENGTH]:
- zef_file.read(fheader[_FH_EXTRA_FIELD_LENGTH])
-
- if fname != zinfo.orig_filename:
- raise BadZipfile, \
- 'File name in directory "%s" and header "%s" differ.' % (
- zinfo.orig_filename, fname)
-
- # check for encrypted flag & handle password
- is_encrypted = zinfo.flag_bits & 0x1
- zd = None
- if is_encrypted:
- if not pwd:
- pwd = self.pwd
- if not pwd:
- raise RuntimeError, "File %s is encrypted, " \
- "password required for extraction" % name
-
- zd = _ZipDecrypter(pwd)
- # The first 12 bytes in the cypher stream is an encryption header
- # used to strengthen the algorithm. The first 11 bytes are
- # completely random, while the 12th contains the MSB of the CRC,
- # or the MSB of the file time depending on the header type
- # and is used to check the correctness of the password.
- bytes = zef_file.read(12)
- h = map(zd, bytes[0:12])
- if zinfo.flag_bits & 0x8:
- # compare against the file type from extended local headers
- check_byte = (zinfo._raw_time >> 8) & 0xff
- else:
- # compare against the CRC otherwise
- check_byte = (zinfo.CRC >> 24) & 0xff
- if ord(h[11]) != check_byte:
- raise RuntimeError("Bad password for file", name)
-
- return ZipExtFile(zef_file, mode, zinfo, zd)
+ return ZipExtFileWithClose(zef_file, mode, zinfo, zd)
def extract(self, member, path=None, pwd=None):
"""Extract a member from the archive to the current working directory,
@@ -989,7 +1002,6 @@
if not os.path.isdir(targetpath):
os.mkdir(targetpath)
return targetpath
-
source = self.open(member, pwd=pwd)
target = file(targetpath, "wb")
shutil.copyfileobj(source, target)
From noreply at buildbot.pypy.org Thu Apr 12 17:43:18 2012
From: noreply at buildbot.pypy.org (arigo)
Date: Thu, 12 Apr 2012 17:43:18 +0200 (CEST)
Subject: [pypy-commit] pypy default: Fix the test. The issue is not
arg+memo. The issue is that you cannot
Message-ID: <20120412154318.104B182F4E@wyvern.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch:
Changeset: r54315:1da1c1632353
Date: 2012-04-12 17:41 +0200
http://bitbucket.org/pypy/pypy/changeset/1da1c1632353/
Log: Fix the test. The issue is not arg+memo. The issue is that you
cannot call the specialize:arg function f with "f(i)", even if you
just did "if i == 2" before.
diff --git a/pypy/annotation/test/test_annrpython.py b/pypy/annotation/test/test_annrpython.py
--- a/pypy/annotation/test/test_annrpython.py
+++ b/pypy/annotation/test/test_annrpython.py
@@ -3746,9 +3746,9 @@
return g(i)
def main(i):
if i == 2:
- return f(i)
+ return f(2)
elif i == 3:
- return f(i)
+ return f(3)
else:
raise NotImplementedError
From noreply at buildbot.pypy.org Thu Apr 12 22:02:18 2012
From: noreply at buildbot.pypy.org (fijal)
Date: Thu, 12 Apr 2012 22:02:18 +0200 (CEST)
Subject: [pypy-commit] pypy gc-minimark-pinning: enough support for
nursery_barriers to pass one test, still xxx left
Message-ID: <20120412200218.457FB82F4E@wyvern.cs.uni-duesseldorf.de>
Author: Maciej Fijalkowski
Branch: gc-minimark-pinning
Changeset: r54316:399757c26f46
Date: 2012-04-12 22:01 +0200
http://bitbucket.org/pypy/pypy/changeset/399757c26f46/
Log: enough support for nursery_barriers to pass one test, still xxx left
diff --git a/pypy/rpython/memory/gc/minimark.py b/pypy/rpython/memory/gc/minimark.py
--- a/pypy/rpython/memory/gc/minimark.py
+++ b/pypy/rpython/memory/gc/minimark.py
@@ -305,12 +305,10 @@
# GCFLAG_HAS_SHADOW to their future location at the next
# minor collection.
self.nursery_objects_shadows = self.AddressDict()
- # all pinned objects in the nursery
- self.pinned_objects = self.AddressStack()
# all pinned objects that were in the nursery *before* last
# minor collect. This is a sorted stack that should be consulted when
# considering next nursery ceiling
- self.nursery_barriers = self.AddressStack()
+ self.nursery_barriers = self.AddressDeque()
#
# Allocate a nursery. In case of auto_nursery_size, start by
@@ -452,6 +450,7 @@
def debug_rotate_nursery(self):
if self.debug_rotating_nurseries is not None:
+ ll_assert(self.nursery_barriers.empty(), "non empty nursery barriers with rotating nursery")
debug_start("gc-debug")
oldnurs = self.nursery
llarena.arena_protect(oldnurs, self._nursery_memory_size(), True)
@@ -587,22 +586,27 @@
and finally reserve 'totalsize' bytes at the start of the
now-empty nursery.
"""
- self.minor_collection()
+ if not self.nursery_top == self.nursery + self.nursery_size:
+ xxx
+ self.minor_collection(totalsize)
+ # try allocating now, otherwise we do a major collect
+ do_major_collect = False
#
- if self.get_total_memory_used() > self.next_major_collection_threshold:
+ if do_major_collect or (self.get_total_memory_used() > self.next_major_collection_threshold):
self.major_collection()
#
# The nursery might not be empty now, because of
# execute_finalizers(). If it is almost full again,
# we need to fix it with another call to minor_collection().
if self.nursery_free + totalsize > self.nursery_top:
- self.minor_collection()
+ self.minor_collection(totalsize)
#
result = self.nursery_free
self.nursery_free = result + totalsize
ll_assert(self.nursery_free <= self.nursery_top, "nursery overflow")
#
if self.debug_tiny_nursery >= 0: # for debugging
+ ll_assert(not self.nursery_barriers.non_empty(), "no support for nursery debug and pinning")
if self.nursery_top - self.nursery_free > self.debug_tiny_nursery:
self.nursery_free = self.nursery_top - self.debug_tiny_nursery
#
@@ -1245,7 +1249,7 @@
# ----------
# Nursery collection
- def minor_collection(self):
+ def minor_collection(self, min_size=0):
"""Perform a minor collection: find the objects from the nursery
that remain alive and move them out."""
#
@@ -1303,9 +1307,34 @@
#
# All live nursery objects are out, and the rest dies. Fill
# the whole nursery with zero and reset the current nursery pointer.
- llarena.arena_reset(self.nursery, self.nursery_size, 2)
+ # self.nursery_barriers are *not* freed
+ # XXX sort the nursery_barriers
+ new_barriers = self.AddressDeque()
+ prev = self.nursery
+ size_gc_header = self.gcheaderbuilder.size_gc_header
+ while self.nursery_barriers.non_empty():
+ next = self.nursery_barriers.popleft()
+ llarena.arena_reset(prev, next - prev, 2)
+ # clean the visited flag
+ obj = next + size_gc_header
+ self.header(obj).tid &= ~GCFLAG_VISITED
+ prev = prev + (next - prev) + (size_gc_header +
+ self.get_size(obj))
+ new_barriers.append(next)
+ llarena.arena_reset(prev, self.nursery_top - prev, 2)
+ self.nursery_barriers.delete()
+ self.nursery_barriers = new_barriers
self.debug_rotate_nursery()
self.nursery_free = self.nursery
+ self.nursery_barriers.append(self.nursery + self.nursery_size)
+ self.nursery_top = self.nursery_barriers.popleft()
+ while self.nursery_barriers.non_empty() and self.nursery_free + min_size > self.nursery_top:
+ cur_obj_size = size_gc_header + self.get_size(self.nursery_free +
+ size_gc_header)
+ self.nursery_free = self.nursery_free + cur_obj_size
+ self.nursery_top = self.nursery_barriers.popleft()
+ if self.nursery_free + min_size > self.nursery_top:
+ ll_assert(False, "too many pinned objects")
#
debug_print("minor collect, total memory used:",
self.get_total_memory_used())
@@ -1442,7 +1471,16 @@
return
#
size_gc_header = self.gcheaderbuilder.size_gc_header
- if self.header(obj).tid & GCFLAG_HAS_SHADOW == 0:
+ if self.header(obj).tid & GCFLAG_PINNED:
+ hdr = self.header(obj)
+ if hdr.tid & GCFLAG_VISITED:
+ return
+ hdr.tid |= GCFLAG_VISITED
+ ll_assert(not self.header(obj).tid & GCFLAG_HAS_SHADOW, "support shadow with pinning")
+ ll_assert(not self.header(obj).tid & GCFLAG_HAS_CARDS, "support cards with pinning")
+ self.nursery_barriers.append(obj - size_gc_header)
+ return
+ elif self.header(obj).tid & GCFLAG_HAS_SHADOW == 0:
#
# Common case: 'obj' was not already forwarded (otherwise
# tid == -42, containing all flags), and it doesn't have the
diff --git a/pypy/rpython/memory/test/test_gc.py b/pypy/rpython/memory/test/test_gc.py
--- a/pypy/rpython/memory/test/test_gc.py
+++ b/pypy/rpython/memory/test/test_gc.py
@@ -6,8 +6,7 @@
from pypy.rpython.test.test_llinterp import get_interpreter
from pypy.rpython.lltypesystem import lltype
from pypy.rpython.lltypesystem.lloperation import llop
-from pypy.rlib.objectmodel import we_are_translated
-from pypy.rlib.objectmodel import compute_unique_id
+from pypy.rlib.objectmodel import we_are_translated, compute_unique_id
from pypy.rlib import rgc
from pypy.rlib.rstring import StringBuilder
from pypy.rlib.rarithmetic import LONG_BIT
@@ -938,5 +937,34 @@
GC_CAN_MALLOC_NONMOVABLE = True
BUT_HOW_BIG_IS_A_BIG_STRING = 11*WORD
+ # those tests are here because they'll be messy and useless
+ # on GCs that can't pin objects
+
+ def test_pinning_collect(self):
+ from pypy.rpython.lltypesystem import llmemory
+
+ TP = lltype.GcStruct('x', ('x', lltype.Signed), ('y', lltype.Signed))
+
+ def f(i):
+ e = lltype.malloc(TP)
+ e.x = 3
+ prev = llmemory.cast_ptr_to_adr(e)
+ rgc.pin(e)
+ for k in range(i):
+ lltype.malloc(TP)
+ res = int(llmemory.cast_ptr_to_adr(e) == prev)
+ for k in range(i):
+ lltype.malloc(TP)
+ res += int(llmemory.cast_ptr_to_adr(e) == prev)
+ assert e.x == 3 # noone overwrote it
+ rgc.unpin(e)
+ for k in range(i):
+ lltype.malloc(TP)
+ assert e.x == 3 # noone overwrote it
+ return res
+
+ res = self.interpret(f, [10])
+ assert res == 2
+
class TestMiniMarkGCCardMarking(TestMiniMarkGC):
GC_PARAMS = {'card_page_indices': 4}
From noreply at buildbot.pypy.org Thu Apr 12 23:32:10 2012
From: noreply at buildbot.pypy.org (fijal)
Date: Thu, 12 Apr 2012 23:32:10 +0200 (CEST)
Subject: [pypy-commit] pypy gc-minimark-pinning: pass more of a test and
write insert for support. crap ugly
Message-ID: <20120412213210.EC82A82F4E@wyvern.cs.uni-duesseldorf.de>
Author: Maciej Fijalkowski
Branch: gc-minimark-pinning
Changeset: r54317:aa6c3180899c
Date: 2012-04-12 23:31 +0200
http://bitbucket.org/pypy/pypy/changeset/aa6c3180899c/
Log: pass more of a test and write insert for support. crap ugly
diff --git a/pypy/rpython/lltypesystem/llarena.py b/pypy/rpython/lltypesystem/llarena.py
--- a/pypy/rpython/lltypesystem/llarena.py
+++ b/pypy/rpython/lltypesystem/llarena.py
@@ -394,7 +394,7 @@
arena_addr = getfakearenaaddress(arena_addr)
assert arena_addr.offset == 0
assert size == arena_addr.arena.nbytes
- arena_addr.arena.set_protect(inaccessible)
+ arena_addr.arena.set_protect(inaccessible)
# ____________________________________________________________
#
diff --git a/pypy/rpython/memory/gc/minimark.py b/pypy/rpython/memory/gc/minimark.py
--- a/pypy/rpython/memory/gc/minimark.py
+++ b/pypy/rpython/memory/gc/minimark.py
@@ -247,6 +247,8 @@
self.nursery_top = NULL
self.debug_tiny_nursery = -1
self.debug_rotating_nurseries = None
+ self.surviving_pinned_objects = NULL
+ self.nursery_barriers = NULL
#
# The ArenaCollection() handles the nonmovable objects allocation.
if ArenaCollectionClass is None:
@@ -306,7 +308,7 @@
# minor collection.
self.nursery_objects_shadows = self.AddressDict()
# all pinned objects that were in the nursery *before* last
- # minor collect. This is a sorted stack that should be consulted when
+ # minor collect. This is a sorted deque that should be consulted when
# considering next nursery ceiling
self.nursery_barriers = self.AddressDeque()
@@ -1257,6 +1259,8 @@
#
# Before everything else, remove from 'old_objects_pointing_to_young'
# the young arrays.
+ self.nursery_barriers.delete()
+ self.surviving_pinned_objects = self.AddressStack()
if self.young_rawmalloced_objects:
self.remove_young_arrays_from_old_objects_pointing_to_young()
#
@@ -1308,22 +1312,24 @@
# All live nursery objects are out, and the rest dies. Fill
# the whole nursery with zero and reset the current nursery pointer.
# self.nursery_barriers are *not* freed
- # XXX sort the nursery_barriers
- new_barriers = self.AddressDeque()
+ nursery_barriers = self.AddressDeque()
prev = self.nursery
size_gc_header = self.gcheaderbuilder.size_gc_header
- while self.nursery_barriers.non_empty():
- next = self.nursery_barriers.popleft()
- llarena.arena_reset(prev, next - prev, 2)
+ while self.surviving_pinned_objects.non_empty():
+ next = self.surviving_pinned_objects.pop()
+ assert next >= prev
+ size = llarena.getfakearenaaddress(next) - prev
+ llarena.arena_reset(prev, size, 2)
# clean the visited flag
obj = next + size_gc_header
self.header(obj).tid &= ~GCFLAG_VISITED
- prev = prev + (next - prev) + (size_gc_header +
+ prev = prev + size + (size_gc_header +
self.get_size(obj))
- new_barriers.append(next)
+ nursery_barriers.append(next)
llarena.arena_reset(prev, self.nursery_top - prev, 2)
- self.nursery_barriers.delete()
- self.nursery_barriers = new_barriers
+ self.surviving_pinned_objects.delete()
+ self.surviving_pinned_objects = NULL
+ self.nursery_barriers = nursery_barriers
self.debug_rotate_nursery()
self.nursery_free = self.nursery
self.nursery_barriers.append(self.nursery + self.nursery_size)
@@ -1478,7 +1484,8 @@
hdr.tid |= GCFLAG_VISITED
ll_assert(not self.header(obj).tid & GCFLAG_HAS_SHADOW, "support shadow with pinning")
ll_assert(not self.header(obj).tid & GCFLAG_HAS_CARDS, "support cards with pinning")
- self.nursery_barriers.append(obj - size_gc_header)
+ self.surviving_pinned_objects.insert(
+ llarena.getfakearenaaddress(obj - size_gc_header))
return
elif self.header(obj).tid & GCFLAG_HAS_SHADOW == 0:
#
diff --git a/pypy/rpython/memory/support.py b/pypy/rpython/memory/support.py
--- a/pypy/rpython/memory/support.py
+++ b/pypy/rpython/memory/support.py
@@ -173,6 +173,47 @@
chunk.items[count] = got
got = next
+ def insert(self, addr):
+ """ Insert addr in the already sorted stack to make sure
+ the smallest one is on top
+ """
+ if self.used_in_last_chunk == 0:
+ self.append(addr)
+ return
+ got = self.pop()
+ read = self.used_in_last_chunk - 1
+ if read == -1 and got <= addr:
+ self.append(addr)
+ self.append(got)
+ return
+ read_chunk = self.chunk
+ self.append(got)
+ if got > addr:
+ self.append(addr)
+ return
+ write = self.used_in_last_chunk
+ if self.used_in_last_chunk == chunk_size:
+ self.enlarge()
+ write = 0
+ self.used_in_last_chunk += 1
+ write_chunk = self.chunk
+ while got < addr and not read_chunk is null_chunk:
+ write_chunk.items[write] = got
+ write -= 1
+ if write < 0:
+ write_chunk = write_chunk.next
+ write = chunk_size - 1
+ got = read_chunk.items[read]
+ read -= 1
+ if read < 0:
+ read_chunk = read_chunk.next
+ read = chunk_size - 1
+ if got < addr:
+ write_chunk.items[write] = got
+ write_chunk.items[0] = addr
+ else:
+ write_chunk.items[write] = addr
+
cache[chunk_size] = AddressStack
return AddressStack
diff --git a/pypy/rpython/memory/test/test_gc.py b/pypy/rpython/memory/test/test_gc.py
--- a/pypy/rpython/memory/test/test_gc.py
+++ b/pypy/rpython/memory/test/test_gc.py
@@ -966,5 +966,35 @@
res = self.interpret(f, [10])
assert res == 2
+ def test_pinning_collect_2(self):
+ from pypy.rpython.lltypesystem import llmemory
+
+ TP = lltype.GcStruct('x', ('x', lltype.Signed), ('y', lltype.Signed))
+
+ def f(i):
+ e = lltype.malloc(TP)
+ e.x = 3
+ lltype.malloc(TP)
+ e2 = lltype.malloc(TP)
+ e2.x = 5
+ rgc.pin(e2)
+ rgc.pin(e)
+ prev = llmemory.cast_ptr_to_adr(e)
+ prev2 = llmemory.cast_ptr_to_adr(e2)
+ for k in range(i):
+ lltype.malloc(TP)
+ res = int(llmemory.cast_ptr_to_adr(e) == prev)
+ res += int(llmemory.cast_ptr_to_adr(e2) == prev2)
+ rgc.unpin(e)
+ for k in range(i):
+ lltype.malloc(TP)
+ rgc.unpin(e2)
+ assert e.x == 3 # noone overwrote it
+ assert e2.x == 5 # noone overwrote it
+ return res
+
+ res = self.interpret(f, [10])
+ assert res == 2
+
class TestMiniMarkGCCardMarking(TestMiniMarkGC):
GC_PARAMS = {'card_page_indices': 4}
diff --git a/pypy/rpython/memory/test/test_support.py b/pypy/rpython/memory/test/test_support.py
--- a/pypy/rpython/memory/test/test_support.py
+++ b/pypy/rpython/memory/test/test_support.py
@@ -3,7 +3,7 @@
from pypy.rpython.memory.support import get_address_deque
from pypy.rpython.test.test_llinterp import interpret
-from pypy.rpython.lltypesystem import lltype, llmemory
+from pypy.rpython.lltypesystem import lltype, llmemory, llarena
from pypy.rpython.lltypesystem.llmemory import raw_malloc, raw_free, NULL
class TestAddressStack(object):
@@ -94,6 +94,26 @@
assert a == addrs[i]
assert not ll.non_empty()
+ def test_insert(self):
+ AddressStack = get_address_stack(chunk_size=5)
+ ll = AddressStack()
+ lla = llarena.arena_malloc(10, 2)
+ addrs = [lla + i for i in range(10)]
+ ll.insert(addrs[2])
+ ll.insert(addrs[1])
+ ll.insert(addrs[5])
+ ll.insert(addrs[4])
+ ll.insert(addrs[6])
+ ll.insert(addrs[9])
+ ll.insert(addrs[0])
+ ll.insert(addrs[8])
+ ll.insert(addrs[7])
+ ll.insert(addrs[3])
+ expected = range(10)
+ for i in expected:
+ a = ll.pop()
+ assert a == addrs[i]
+
class TestAddressDeque:
def test_big_access(self):
From noreply at buildbot.pypy.org Thu Apr 12 23:37:29 2012
From: noreply at buildbot.pypy.org (fijal)
Date: Thu, 12 Apr 2012 23:37:29 +0200 (CEST)
Subject: [pypy-commit] pypy gc-minimark-pinning: surprisingly enough make
the test pass
Message-ID: <20120412213729.3CEFD82F4E@wyvern.cs.uni-duesseldorf.de>
Author: Maciej Fijalkowski
Branch: gc-minimark-pinning
Changeset: r54318:825c0462f962
Date: 2012-04-12 23:37 +0200
http://bitbucket.org/pypy/pypy/changeset/825c0462f962/
Log: surprisingly enough make the test pass
diff --git a/pypy/rpython/memory/gc/minimark.py b/pypy/rpython/memory/gc/minimark.py
--- a/pypy/rpython/memory/gc/minimark.py
+++ b/pypy/rpython/memory/gc/minimark.py
@@ -589,7 +589,17 @@
now-empty nursery.
"""
if not self.nursery_top == self.nursery + self.nursery_size:
- xxx
+ self.nursery_top = self.nursery_barriers.popleft()
+ size_gc_header = self.gcheaderbuilder.size_gc_header
+ while self.nursery_barriers.non_empty() and self.nursery_free + totalsize > self.nursery_top:
+ cur_obj_size = size_gc_header + self.get_size(
+ self.nursery_free + size_gc_header)
+ self.nursery_free = self.nursery_free + cur_obj_size
+ self.nursery_top = self.nursery_barriers.popleft()
+ if self.nursery_free + totalsize <= self.nursery_top:
+ llarena.arena_reserve(self.nursery_free, totalsize)
+ res = self.nursery_free
+ self.nursery_free = res + totalsize
self.minor_collection(totalsize)
# try allocating now, otherwise we do a major collect
do_major_collect = False
From noreply at buildbot.pypy.org Thu Apr 12 23:54:33 2012
From: noreply at buildbot.pypy.org (fijal)
Date: Thu, 12 Apr 2012 23:54:33 +0200 (CEST)
Subject: [pypy-commit] benchmarks default: ok,
we cannot change numbers like this. It makes benchmark results hike
wildly,
Message-ID: <20120412215433.C6B4882F4E@wyvern.cs.uni-duesseldorf.de>
Author: Maciej Fijalkowski
Branch:
Changeset: r182:0a63cb1bbb5f
Date: 2012-04-12 23:54 +0200
http://bitbucket.org/pypy/benchmarks/changeset/0a63cb1bbb5f/
Log: ok, we cannot change numbers like this. It makes benchmark results
hike wildly, because of both the GC warmup and JIT warmup
diff --git a/benchmarks.py b/benchmarks.py
--- a/benchmarks.py
+++ b/benchmarks.py
@@ -49,7 +49,8 @@
for name in ['expand', 'integrate', 'sum', 'str']:
_register_new_bm('bm_sympy', 'sympy_' + name,
globals(), bm_env={'PYTHONPATH': relative('lib/sympy')},
- extra_args=['--benchmark=' + name])
+ extra_args=['--benchmark=' + name],
+ iteration_scaling=.1)
for name in ['xml', 'text']:
_register_new_bm('bm_genshi', 'genshi_' + name,
From noreply at buildbot.pypy.org Fri Apr 13 05:43:23 2012
From: noreply at buildbot.pypy.org (wlav)
Date: Fri, 13 Apr 2012 05:43:23 +0200 (CEST)
Subject: [pypy-commit] pypy reflex-support: merge default into branch
Message-ID: <20120413034323.1187B82F4E@wyvern.cs.uni-duesseldorf.de>
Author: Wim Lavrijsen
Branch: reflex-support
Changeset: r54319:69f0499a8cb2
Date: 2012-04-11 17:41 -0700
http://bitbucket.org/pypy/pypy/changeset/69f0499a8cb2/
Log: merge default into branch
diff --git a/_pytest/__init__.py b/_pytest/__init__.py
--- a/_pytest/__init__.py
+++ b/_pytest/__init__.py
@@ -1,2 +1,2 @@
#
-__version__ = '2.1.0.dev4'
+__version__ = '2.2.4.dev2'
diff --git a/_pytest/assertion/__init__.py b/_pytest/assertion/__init__.py
--- a/_pytest/assertion/__init__.py
+++ b/_pytest/assertion/__init__.py
@@ -2,35 +2,25 @@
support for presenting detailed information in failing assertions.
"""
import py
-import imp
-import marshal
-import struct
import sys
import pytest
from _pytest.monkeypatch import monkeypatch
-from _pytest.assertion import reinterpret, util
-
-try:
- from _pytest.assertion.rewrite import rewrite_asserts
-except ImportError:
- rewrite_asserts = None
-else:
- import ast
+from _pytest.assertion import util
def pytest_addoption(parser):
group = parser.getgroup("debugconfig")
- group.addoption('--assertmode', action="store", dest="assertmode",
- choices=("on", "old", "off", "default"), default="default",
- metavar="on|old|off",
+ group.addoption('--assert', action="store", dest="assertmode",
+ choices=("rewrite", "reinterp", "plain",),
+ default="rewrite", metavar="MODE",
help="""control assertion debugging tools.
-'off' performs no assertion debugging.
-'old' reinterprets the expressions in asserts to glean information.
-'on' (the default) rewrites the assert statements in test modules to provide
-sub-expression results.""")
+'plain' performs no assertion debugging.
+'reinterp' reinterprets assert statements after they failed to provide assertion expression information.
+'rewrite' (the default) rewrites assert statements in test modules on import
+to provide assert expression information. """)
group.addoption('--no-assert', action="store_true", default=False,
- dest="noassert", help="DEPRECATED equivalent to --assertmode=off")
+ dest="noassert", help="DEPRECATED equivalent to --assert=plain")
group.addoption('--nomagic', action="store_true", default=False,
- dest="nomagic", help="DEPRECATED equivalent to --assertmode=off")
+ dest="nomagic", help="DEPRECATED equivalent to --assert=plain")
class AssertionState:
"""State for the assertion plugin."""
@@ -40,89 +30,90 @@
self.trace = config.trace.root.get("assertion")
def pytest_configure(config):
- warn_about_missing_assertion()
mode = config.getvalue("assertmode")
if config.getvalue("noassert") or config.getvalue("nomagic"):
- if mode not in ("off", "default"):
- raise pytest.UsageError("assertion options conflict")
- mode = "off"
- elif mode == "default":
- mode = "on"
- if mode != "off":
- def callbinrepr(op, left, right):
- hook_result = config.hook.pytest_assertrepr_compare(
- config=config, op=op, left=left, right=right)
- for new_expl in hook_result:
- if new_expl:
- return '\n~'.join(new_expl)
+ mode = "plain"
+ if mode == "rewrite":
+ try:
+ import ast
+ except ImportError:
+ mode = "reinterp"
+ else:
+ if sys.platform.startswith('java'):
+ mode = "reinterp"
+ if mode != "plain":
+ _load_modules(mode)
m = monkeypatch()
config._cleanup.append(m.undo)
m.setattr(py.builtin.builtins, 'AssertionError',
reinterpret.AssertionError)
- m.setattr(util, '_reprcompare', callbinrepr)
- if mode == "on" and rewrite_asserts is None:
- mode = "old"
+ hook = None
+ if mode == "rewrite":
+ hook = rewrite.AssertionRewritingHook()
+ sys.meta_path.append(hook)
+ warn_about_missing_assertion(mode)
config._assertstate = AssertionState(config, mode)
+ config._assertstate.hook = hook
config._assertstate.trace("configured with mode set to %r" % (mode,))
-def _write_pyc(co, source_path):
- if hasattr(imp, "cache_from_source"):
- # Handle PEP 3147 pycs.
- pyc = py.path.local(imp.cache_from_source(str(source_path)))
- pyc.ensure()
- else:
- pyc = source_path + "c"
- mtime = int(source_path.mtime())
- fp = pyc.open("wb")
- try:
- fp.write(imp.get_magic())
- fp.write(struct.pack(" 0 and
- item.identifier != "__future__"):
+ elif (not isinstance(item, ast.ImportFrom) or item.level > 0 or
+ item.module != "__future__"):
lineno = item.lineno
break
pos += 1
@@ -118,9 +357,9 @@
for alias in aliases]
mod.body[pos:pos] = imports
# Collect asserts.
- nodes = collections.deque([mod])
+ nodes = [mod]
while nodes:
- node = nodes.popleft()
+ node = nodes.pop()
for name, field in ast.iter_fields(node):
if isinstance(field, list):
new = []
@@ -143,7 +382,7 @@
"""Get a new variable."""
# Use a character invalid in python identifiers to avoid clashing.
name = "@py_assert" + str(next(self.variable_counter))
- self.variables.add(name)
+ self.variables.append(name)
return name
def assign(self, expr):
@@ -198,7 +437,8 @@
# There's already a message. Don't mess with it.
return [assert_]
self.statements = []
- self.variables = set()
+ self.cond_chain = ()
+ self.variables = []
self.variable_counter = itertools.count()
self.stack = []
self.on_failure = []
@@ -220,11 +460,11 @@
else:
raise_ = ast.Raise(exc, None, None)
body.append(raise_)
- # Delete temporary variables.
- names = [ast.Name(name, ast.Del()) for name in self.variables]
- if names:
- delete = ast.Delete(names)
- self.statements.append(delete)
+ # Clear temporary variables by setting them to None.
+ if self.variables:
+ variables = [ast.Name(name, ast.Store()) for name in self.variables]
+ clear = ast.Assign(variables, ast.Name("None", ast.Load()))
+ self.statements.append(clear)
# Fix line numbers.
for stmt in self.statements:
set_location(stmt, assert_.lineno, assert_.col_offset)
@@ -240,21 +480,38 @@
return name, self.explanation_param(expr)
def visit_BoolOp(self, boolop):
- operands = []
- explanations = []
+ res_var = self.variable()
+ expl_list = self.assign(ast.List([], ast.Load()))
+ app = ast.Attribute(expl_list, "append", ast.Load())
+ is_or = int(isinstance(boolop.op, ast.Or))
+ body = save = self.statements
+ fail_save = self.on_failure
+ levels = len(boolop.values) - 1
self.push_format_context()
- for operand in boolop.values:
- res, explanation = self.visit(operand)
- operands.append(res)
- explanations.append(explanation)
- expls = ast.Tuple([ast.Str(expl) for expl in explanations], ast.Load())
- is_or = ast.Num(isinstance(boolop.op, ast.Or))
- expl_template = self.helper("format_boolop",
- ast.Tuple(operands, ast.Load()), expls,
- is_or)
+ # Process each operand, short-circuting if needed.
+ for i, v in enumerate(boolop.values):
+ if i:
+ fail_inner = []
+ self.on_failure.append(ast.If(cond, fail_inner, []))
+ self.on_failure = fail_inner
+ self.push_format_context()
+ res, expl = self.visit(v)
+ body.append(ast.Assign([ast.Name(res_var, ast.Store())], res))
+ expl_format = self.pop_format_context(ast.Str(expl))
+ call = ast.Call(app, [expl_format], [], None, None)
+ self.on_failure.append(ast.Expr(call))
+ if i < levels:
+ cond = res
+ if is_or:
+ cond = ast.UnaryOp(ast.Not(), cond)
+ inner = []
+ self.statements.append(ast.If(cond, inner, []))
+ self.statements = body = inner
+ self.statements = save
+ self.on_failure = fail_save
+ expl_template = self.helper("format_boolop", expl_list, ast.Num(is_or))
expl = self.pop_format_context(expl_template)
- res = self.assign(ast.BoolOp(boolop.op, operands))
- return res, self.explanation_param(expl)
+ return ast.Name(res_var, ast.Load()), self.explanation_param(expl)
def visit_UnaryOp(self, unary):
pattern = unary_map[unary.op.__class__]
@@ -288,7 +545,7 @@
new_star, expl = self.visit(call.starargs)
arg_expls.append("*" + expl)
if call.kwargs:
- new_kwarg, expl = self.visit(call.kwarg)
+ new_kwarg, expl = self.visit(call.kwargs)
arg_expls.append("**" + expl)
expl = "%s(%s)" % (func_expl, ', '.join(arg_expls))
new_call = ast.Call(new_func, new_args, new_kwargs, new_star, new_kwarg)
diff --git a/_pytest/capture.py b/_pytest/capture.py
--- a/_pytest/capture.py
+++ b/_pytest/capture.py
@@ -11,22 +11,22 @@
group._addoption('-s', action="store_const", const="no", dest="capture",
help="shortcut for --capture=no.")
+ at pytest.mark.tryfirst
+def pytest_cmdline_parse(pluginmanager, args):
+ # we want to perform capturing already for plugin/conftest loading
+ if '-s' in args or "--capture=no" in args:
+ method = "no"
+ elif hasattr(os, 'dup') and '--capture=sys' not in args:
+ method = "fd"
+ else:
+ method = "sys"
+ capman = CaptureManager(method)
+ pluginmanager.register(capman, "capturemanager")
+
def addouterr(rep, outerr):
- repr = getattr(rep, 'longrepr', None)
- if not hasattr(repr, 'addsection'):
- return
for secname, content in zip(["out", "err"], outerr):
if content:
- repr.addsection("Captured std%s" % secname, content.rstrip())
-
-def pytest_unconfigure(config):
- # registered in config.py during early conftest.py loading
- capman = config.pluginmanager.getplugin('capturemanager')
- while capman._method2capture:
- name, cap = capman._method2capture.popitem()
- # XXX logging module may wants to close it itself on process exit
- # otherwise we could do finalization here and call "reset()".
- cap.suspend()
+ rep.sections.append(("Captured std%s" % secname, content))
class NoCapture:
def startall(self):
@@ -39,8 +39,9 @@
return "", ""
class CaptureManager:
- def __init__(self):
+ def __init__(self, defaultmethod=None):
self._method2capture = {}
+ self._defaultmethod = defaultmethod
def _maketempfile(self):
f = py.std.tempfile.TemporaryFile()
@@ -65,14 +66,6 @@
else:
raise ValueError("unknown capturing method: %r" % method)
- def _getmethod_preoptionparse(self, args):
- if '-s' in args or "--capture=no" in args:
- return "no"
- elif hasattr(os, 'dup') and '--capture=sys' not in args:
- return "fd"
- else:
- return "sys"
-
def _getmethod(self, config, fspath):
if config.option.capture:
method = config.option.capture
@@ -85,16 +78,22 @@
method = "sys"
return method
+ def reset_capturings(self):
+ for name, cap in self._method2capture.items():
+ cap.reset()
+
def resumecapture_item(self, item):
method = self._getmethod(item.config, item.fspath)
if not hasattr(item, 'outerr'):
item.outerr = ('', '') # we accumulate outerr on the item
return self.resumecapture(method)
- def resumecapture(self, method):
+ def resumecapture(self, method=None):
if hasattr(self, '_capturing'):
raise ValueError("cannot resume, already capturing with %r" %
(self._capturing,))
+ if method is None:
+ method = self._defaultmethod
cap = self._method2capture.get(method)
self._capturing = method
if cap is None:
@@ -164,17 +163,6 @@
def pytest_runtest_teardown(self, item):
self.resumecapture_item(item)
- def pytest__teardown_final(self, __multicall__, session):
- method = self._getmethod(session.config, None)
- self.resumecapture(method)
- try:
- rep = __multicall__.execute()
- finally:
- outerr = self.suspendcapture()
- if rep:
- addouterr(rep, outerr)
- return rep
-
def pytest_keyboard_interrupt(self, excinfo):
if hasattr(self, '_capturing'):
self.suspendcapture()
diff --git a/_pytest/config.py b/_pytest/config.py
--- a/_pytest/config.py
+++ b/_pytest/config.py
@@ -8,13 +8,15 @@
def pytest_cmdline_parse(pluginmanager, args):
config = Config(pluginmanager)
config.parse(args)
- if config.option.debug:
- config.trace.root.setwriter(sys.stderr.write)
return config
def pytest_unconfigure(config):
- for func in config._cleanup:
- func()
+ while 1:
+ try:
+ fin = config._cleanup.pop()
+ except IndexError:
+ break
+ fin()
class Parser:
""" Parser for command line arguments. """
@@ -81,6 +83,7 @@
self._inidict[name] = (help, type, default)
self._ininames.append(name)
+
class OptionGroup:
def __init__(self, name, description="", parser=None):
self.name = name
@@ -256,11 +259,14 @@
self.hook = self.pluginmanager.hook
self._inicache = {}
self._cleanup = []
-
+
@classmethod
def fromdictargs(cls, option_dict, args):
""" constructor useable for subprocesses. """
config = cls()
+ # XXX slightly crude way to initialize capturing
+ import _pytest.capture
+ _pytest.capture.pytest_cmdline_parse(config.pluginmanager, args)
config._preparse(args, addopts=False)
config.option.__dict__.update(option_dict)
for x in config.option.plugins:
@@ -285,11 +291,10 @@
def _setinitialconftest(self, args):
# capture output during conftest init (#issue93)
- from _pytest.capture import CaptureManager
- capman = CaptureManager()
- self.pluginmanager.register(capman, 'capturemanager')
- # will be unregistered in capture.py's unconfigure()
- capman.resumecapture(capman._getmethod_preoptionparse(args))
+ # XXX introduce load_conftest hook to avoid needing to know
+ # about capturing plugin here
+ capman = self.pluginmanager.getplugin("capturemanager")
+ capman.resumecapture()
try:
try:
self._conftest.setinitial(args)
@@ -334,6 +339,7 @@
# Note that this can only be called once per testing process.
assert not hasattr(self, 'args'), (
"can only parse cmdline args at most once per Config object")
+ self._origargs = args
self._preparse(args)
self._parser.hints.extend(self.pluginmanager._hints)
args = self._parser.parse_setoption(args, self.option)
@@ -341,6 +347,14 @@
args.append(py.std.os.getcwd())
self.args = args
+ def addinivalue_line(self, name, line):
+ """ add a line to an ini-file option. The option must have been
+ declared but might not yet be set in which case the line becomes the
+ the first line in its value. """
+ x = self.getini(name)
+ assert isinstance(x, list)
+ x.append(line) # modifies the cached list inline
+
def getini(self, name):
""" return configuration value from an ini file. If the
specified name hasn't been registered through a prior ``parse.addini``
@@ -422,7 +436,7 @@
def getcfg(args, inibasenames):
- args = [x for x in args if str(x)[0] != "-"]
+ args = [x for x in args if not str(x).startswith("-")]
if not args:
args = [py.path.local()]
for arg in args:
diff --git a/_pytest/core.py b/_pytest/core.py
--- a/_pytest/core.py
+++ b/_pytest/core.py
@@ -16,11 +16,10 @@
"junitxml resultlog doctest").split()
class TagTracer:
- def __init__(self, prefix="[pytest] "):
+ def __init__(self):
self._tag2proc = {}
self.writer = None
self.indent = 0
- self.prefix = prefix
def get(self, name):
return TagTracerSub(self, (name,))
@@ -30,7 +29,7 @@
if args:
indent = " " * self.indent
content = " ".join(map(str, args))
- self.writer("%s%s%s\n" %(self.prefix, indent, content))
+ self.writer("%s%s [%s]\n" %(indent, content, ":".join(tags)))
try:
self._tag2proc[tags](tags, args)
except KeyError:
@@ -212,6 +211,14 @@
self.register(mod, modname)
self.consider_module(mod)
+ def pytest_configure(self, config):
+ config.addinivalue_line("markers",
+ "tryfirst: mark a hook implementation function such that the "
+ "plugin machinery will try to call it first/as early as possible.")
+ config.addinivalue_line("markers",
+ "trylast: mark a hook implementation function such that the "
+ "plugin machinery will try to call it last/as late as possible.")
+
def pytest_plugin_registered(self, plugin):
import pytest
dic = self.call_plugin(plugin, "pytest_namespace", {}) or {}
@@ -432,10 +439,7 @@
def _preloadplugins():
_preinit.append(PluginManager(load=True))
-def main(args=None, plugins=None):
- """ returned exit code integer, after an in-process testing run
- with the given command line arguments, preloading an optional list
- of passed in plugin objects. """
+def _prepareconfig(args=None, plugins=None):
if args is None:
args = sys.argv[1:]
elif isinstance(args, py.path.local):
@@ -449,13 +453,19 @@
else: # subsequent calls to main will create a fresh instance
_pluginmanager = PluginManager(load=True)
hook = _pluginmanager.hook
+ if plugins:
+ for plugin in plugins:
+ _pluginmanager.register(plugin)
+ return hook.pytest_cmdline_parse(
+ pluginmanager=_pluginmanager, args=args)
+
+def main(args=None, plugins=None):
+ """ returned exit code integer, after an in-process testing run
+ with the given command line arguments, preloading an optional list
+ of passed in plugin objects. """
try:
- if plugins:
- for plugin in plugins:
- _pluginmanager.register(plugin)
- config = hook.pytest_cmdline_parse(
- pluginmanager=_pluginmanager, args=args)
- exitstatus = hook.pytest_cmdline_main(config=config)
+ config = _prepareconfig(args, plugins)
+ exitstatus = config.hook.pytest_cmdline_main(config=config)
except UsageError:
e = sys.exc_info()[1]
sys.stderr.write("ERROR: %s\n" %(e.args[0],))
diff --git a/_pytest/helpconfig.py b/_pytest/helpconfig.py
--- a/_pytest/helpconfig.py
+++ b/_pytest/helpconfig.py
@@ -1,7 +1,7 @@
""" version info, help messages, tracing configuration. """
import py
import pytest
-import inspect, sys
+import os, inspect, sys
from _pytest.core import varnames
def pytest_addoption(parser):
@@ -18,7 +18,29 @@
help="trace considerations of conftest.py files."),
group.addoption('--debug',
action="store_true", dest="debug", default=False,
- help="generate and show internal debugging information.")
+ help="store internal tracing debug information in 'pytestdebug.log'.")
+
+
+def pytest_cmdline_parse(__multicall__):
+ config = __multicall__.execute()
+ if config.option.debug:
+ path = os.path.abspath("pytestdebug.log")
+ f = open(path, 'w')
+ config._debugfile = f
+ f.write("versions pytest-%s, py-%s, python-%s\ncwd=%s\nargs=%s\n\n" %(
+ pytest.__version__, py.__version__, ".".join(map(str, sys.version_info)),
+ os.getcwd(), config._origargs))
+ config.trace.root.setwriter(f.write)
+ sys.stderr.write("writing pytestdebug information to %s\n" % path)
+ return config
+
+ at pytest.mark.trylast
+def pytest_unconfigure(config):
+ if hasattr(config, '_debugfile'):
+ config._debugfile.close()
+ sys.stderr.write("wrote pytestdebug information to %s\n" %
+ config._debugfile.name)
+ config.trace.root.setwriter(None)
def pytest_cmdline_main(config):
@@ -34,6 +56,7 @@
elif config.option.help:
config.pluginmanager.do_configure(config)
showhelp(config)
+ config.pluginmanager.do_unconfigure(config)
return 0
def showhelp(config):
@@ -91,7 +114,7 @@
verinfo = getpluginversioninfo(config)
if verinfo:
lines.extend(verinfo)
-
+
if config.option.traceconfig:
lines.append("active plugins:")
plugins = []
diff --git a/_pytest/hookspec.py b/_pytest/hookspec.py
--- a/_pytest/hookspec.py
+++ b/_pytest/hookspec.py
@@ -121,16 +121,23 @@
def pytest_itemstart(item, node=None):
""" (deprecated, use pytest_runtest_logstart). """
-def pytest_runtest_protocol(item):
- """ implements the standard runtest_setup/call/teardown protocol including
- capturing exceptions and calling reporting hooks on the results accordingly.
+def pytest_runtest_protocol(item, nextitem):
+ """ implements the runtest_setup/call/teardown protocol for
+ the given test item, including capturing exceptions and calling
+ reporting hooks.
+
+ :arg item: test item for which the runtest protocol is performed.
+
+ :arg nexitem: the scheduled-to-be-next test item (or None if this
+ is the end my friend). This argument is passed on to
+ :py:func:`pytest_runtest_teardown`.
:return boolean: True if no further hook implementations should be invoked.
"""
pytest_runtest_protocol.firstresult = True
def pytest_runtest_logstart(nodeid, location):
- """ signal the start of a test run. """
+ """ signal the start of running a single test item. """
def pytest_runtest_setup(item):
""" called before ``pytest_runtest_call(item)``. """
@@ -138,8 +145,14 @@
def pytest_runtest_call(item):
""" called to execute the test ``item``. """
-def pytest_runtest_teardown(item):
- """ called after ``pytest_runtest_call``. """
+def pytest_runtest_teardown(item, nextitem):
+ """ called after ``pytest_runtest_call``.
+
+ :arg nexitem: the scheduled-to-be-next test item (None if no further
+ test item is scheduled). This argument can be used to
+ perform exact teardowns, i.e. calling just enough finalizers
+ so that nextitem only needs to call setup-functions.
+ """
def pytest_runtest_makereport(item, call):
""" return a :py:class:`_pytest.runner.TestReport` object
@@ -149,15 +162,8 @@
pytest_runtest_makereport.firstresult = True
def pytest_runtest_logreport(report):
- """ process item test report. """
-
-# special handling for final teardown - somewhat internal for now
-def pytest__teardown_final(session):
- """ called before test session finishes. """
-pytest__teardown_final.firstresult = True
-
-def pytest__teardown_final_logerror(report, session):
- """ called if runtest_teardown_final failed. """
+ """ process a test setup/call/teardown report relating to
+ the respective phase of executing a test. """
# -------------------------------------------------------------------------
# test session related hooks
diff --git a/_pytest/junitxml.py b/_pytest/junitxml.py
--- a/_pytest/junitxml.py
+++ b/_pytest/junitxml.py
@@ -25,21 +25,39 @@
long = int
+class Junit(py.xml.Namespace):
+ pass
+
+
# We need to get the subset of the invalid unicode ranges according to
# XML 1.0 which are valid in this python build. Hence we calculate
# this dynamically instead of hardcoding it. The spec range of valid
# chars is: Char ::= #x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD]
# | [#x10000-#x10FFFF]
-_illegal_unichrs = [(0x00, 0x08), (0x0B, 0x0C), (0x0E, 0x19),
- (0xD800, 0xDFFF), (0xFDD0, 0xFFFF)]
-_illegal_ranges = [unicode("%s-%s") % (unichr(low), unichr(high))
- for (low, high) in _illegal_unichrs
+_legal_chars = (0x09, 0x0A, 0x0d)
+_legal_ranges = (
+ (0x20, 0xD7FF),
+ (0xE000, 0xFFFD),
+ (0x10000, 0x10FFFF),
+)
+_legal_xml_re = [unicode("%s-%s") % (unichr(low), unichr(high))
+ for (low, high) in _legal_ranges
if low < sys.maxunicode]
-illegal_xml_re = re.compile(unicode('[%s]') %
- unicode('').join(_illegal_ranges))
-del _illegal_unichrs
-del _illegal_ranges
+_legal_xml_re = [unichr(x) for x in _legal_chars] + _legal_xml_re
+illegal_xml_re = re.compile(unicode('[^%s]') %
+ unicode('').join(_legal_xml_re))
+del _legal_chars
+del _legal_ranges
+del _legal_xml_re
+def bin_xml_escape(arg):
+ def repl(matchobj):
+ i = ord(matchobj.group())
+ if i <= 0xFF:
+ return unicode('#x%02X') % i
+ else:
+ return unicode('#x%04X') % i
+ return illegal_xml_re.sub(repl, py.xml.escape(arg))
def pytest_addoption(parser):
group = parser.getgroup("terminal reporting")
@@ -68,117 +86,97 @@
logfile = os.path.expanduser(os.path.expandvars(logfile))
self.logfile = os.path.normpath(logfile)
self.prefix = prefix
- self.test_logs = []
+ self.tests = []
self.passed = self.skipped = 0
self.failed = self.errors = 0
- self._durations = {}
def _opentestcase(self, report):
names = report.nodeid.split("::")
names[0] = names[0].replace("/", '.')
- names = tuple(names)
- d = {'time': self._durations.pop(report.nodeid, "0")}
names = [x.replace(".py", "") for x in names if x != "()"]
classnames = names[:-1]
if self.prefix:
classnames.insert(0, self.prefix)
- d['classname'] = ".".join(classnames)
- d['name'] = py.xml.escape(names[-1])
- attrs = ['%s="%s"' % item for item in sorted(d.items())]
- self.test_logs.append("\n" % " ".join(attrs))
+ self.tests.append(Junit.testcase(
+ classname=".".join(classnames),
+ name=names[-1],
+ time=getattr(report, 'duration', 0)
+ ))
- def _closetestcase(self):
- self.test_logs.append("")
-
- def appendlog(self, fmt, *args):
- def repl(matchobj):
- i = ord(matchobj.group())
- if i <= 0xFF:
- return unicode('#x%02X') % i
- else:
- return unicode('#x%04X') % i
- args = tuple([illegal_xml_re.sub(repl, py.xml.escape(arg))
- for arg in args])
- self.test_logs.append(fmt % args)
+ def append(self, obj):
+ self.tests[-1].append(obj)
def append_pass(self, report):
self.passed += 1
- self._opentestcase(report)
- self._closetestcase()
def append_failure(self, report):
- self._opentestcase(report)
#msg = str(report.longrepr.reprtraceback.extraline)
if "xfail" in report.keywords:
- self.appendlog(
- '')
+ self.append(
+ Junit.skipped(message="xfail-marked test passes unexpectedly"))
self.skipped += 1
else:
- self.appendlog('%s',
- report.longrepr)
+ sec = dict(report.sections)
+ fail = Junit.failure(message="test failure")
+ fail.append(str(report.longrepr))
+ self.append(fail)
+ for name in ('out', 'err'):
+ content = sec.get("Captured std%s" % name)
+ if content:
+ tag = getattr(Junit, 'system-'+name)
+ self.append(tag(bin_xml_escape(content)))
self.failed += 1
- self._closetestcase()
def append_collect_failure(self, report):
- self._opentestcase(report)
#msg = str(report.longrepr.reprtraceback.extraline)
- self.appendlog('%s',
- report.longrepr)
- self._closetestcase()
+ self.append(Junit.failure(str(report.longrepr),
+ message="collection failure"))
self.errors += 1
def append_collect_skipped(self, report):
- self._opentestcase(report)
#msg = str(report.longrepr.reprtraceback.extraline)
- self.appendlog('%s',
- report.longrepr)
- self._closetestcase()
+ self.append(Junit.skipped(str(report.longrepr),
+ message="collection skipped"))
self.skipped += 1
def append_error(self, report):
- self._opentestcase(report)
- self.appendlog('%s',
- report.longrepr)
- self._closetestcase()
+ self.append(Junit.error(str(report.longrepr),
+ message="test setup failure"))
self.errors += 1
def append_skipped(self, report):
- self._opentestcase(report)
if "xfail" in report.keywords:
- self.appendlog(
- '%s',
- report.keywords['xfail'])
+ self.append(Junit.skipped(str(report.keywords['xfail']),
+ message="expected test failure"))
else:
filename, lineno, skipreason = report.longrepr
if skipreason.startswith("Skipped: "):
skipreason = skipreason[9:]
- self.appendlog('%s',
- skipreason, "%s:%s: %s" % report.longrepr,
- )
- self._closetestcase()
+ self.append(
+ Junit.skipped("%s:%s: %s" % report.longrepr,
+ type="pytest.skip",
+ message=skipreason
+ ))
self.skipped += 1
def pytest_runtest_logreport(self, report):
if report.passed:
- self.append_pass(report)
+ if report.when == "call": # ignore setup/teardown
+ self._opentestcase(report)
+ self.append_pass(report)
elif report.failed:
+ self._opentestcase(report)
if report.when != "call":
self.append_error(report)
else:
self.append_failure(report)
elif report.skipped:
+ self._opentestcase(report)
self.append_skipped(report)
- def pytest_runtest_call(self, item, __multicall__):
- start = time.time()
- try:
- return __multicall__.execute()
- finally:
- self._durations[item.nodeid] = time.time() - start
-
def pytest_collectreport(self, report):
if not report.passed:
+ self._opentestcase(report)
if report.failed:
self.append_collect_failure(report)
else:
@@ -187,10 +185,11 @@
def pytest_internalerror(self, excrepr):
self.errors += 1
data = py.xml.escape(excrepr)
- self.test_logs.append(
- '\n'
- ' '
- '%s' % data)
+ self.tests.append(
+ Junit.testcase(
+ Junit.error(data, message="internal error"),
+ classname="pytest",
+ name="internal"))
def pytest_sessionstart(self, session):
self.suite_start_time = time.time()
@@ -204,17 +203,17 @@
suite_stop_time = time.time()
suite_time_delta = suite_stop_time - self.suite_start_time
numtests = self.passed + self.failed
+
logfile.write('')
- logfile.write('')
- logfile.writelines(self.test_logs)
- logfile.write('')
+ logfile.write(Junit.testsuite(
+ self.tests,
+ name="",
+ errors=self.errors,
+ failures=self.failed,
+ skips=self.skipped,
+ tests=numtests,
+ time="%.3f" % suite_time_delta,
+ ).unicode(indent=0))
logfile.close()
def pytest_terminal_summary(self, terminalreporter):
diff --git a/_pytest/main.py b/_pytest/main.py
--- a/_pytest/main.py
+++ b/_pytest/main.py
@@ -2,7 +2,7 @@
import py
import pytest, _pytest
-import os, sys
+import os, sys, imp
tracebackcutdir = py.path.local(_pytest.__file__).dirpath()
# exitcodes for the command line
@@ -11,6 +11,8 @@
EXIT_INTERRUPTED = 2
EXIT_INTERNALERROR = 3
+name_re = py.std.re.compile("^[a-zA-Z_]\w*$")
+
def pytest_addoption(parser):
parser.addini("norecursedirs", "directory patterns to avoid for recursion",
type="args", default=('.*', 'CVS', '_darcs', '{arch}'))
@@ -27,6 +29,9 @@
action="store", type="int", dest="maxfail", default=0,
help="exit after first num failures or errors.")
+ group._addoption('--strict', action="store_true",
+ help="run pytest in strict mode, warnings become errors.")
+
group = parser.getgroup("collect", "collection")
group.addoption('--collectonly',
action="store_true", dest="collectonly",
@@ -48,7 +53,7 @@
def pytest_namespace():
collect = dict(Item=Item, Collector=Collector, File=File, Session=Session)
return dict(collect=collect)
-
+
def pytest_configure(config):
py.test.config = config # compatibiltiy
if config.option.exitfirst:
@@ -77,11 +82,11 @@
session.exitstatus = EXIT_INTERNALERROR
if excinfo.errisinstance(SystemExit):
sys.stderr.write("mainloop: caught Spurious SystemExit!\n")
+ if initstate >= 2:
+ config.hook.pytest_sessionfinish(session=session,
+ exitstatus=session.exitstatus or (session._testsfailed and 1))
if not session.exitstatus and session._testsfailed:
session.exitstatus = EXIT_TESTSFAILED
- if initstate >= 2:
- config.hook.pytest_sessionfinish(session=session,
- exitstatus=session.exitstatus)
if initstate >= 1:
config.pluginmanager.do_unconfigure(config)
return session.exitstatus
@@ -101,8 +106,12 @@
def pytest_runtestloop(session):
if session.config.option.collectonly:
return True
- for item in session.session.items:
- item.config.hook.pytest_runtest_protocol(item=item)
+ for i, item in enumerate(session.items):
+ try:
+ nextitem = session.items[i+1]
+ except IndexError:
+ nextitem = None
+ item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem)
if session.shouldstop:
raise session.Interrupted(session.shouldstop)
return True
@@ -132,7 +141,7 @@
return getattr(pytest, name)
return property(fget, None, None,
"deprecated attribute %r, use pytest.%s" % (name,name))
-
+
class Node(object):
""" base class for all Nodes in the collection tree.
Collector subclasses have children, Items are terminal nodes."""
@@ -143,13 +152,13 @@
#: the parent collector node.
self.parent = parent
-
+
#: the test config object
self.config = config or parent.config
#: the collection this node is part of
self.session = session or parent.session
-
+
#: filesystem path where this node was collected from
self.fspath = getattr(parent, 'fspath', None)
self.ihook = self.session.gethookproxy(self.fspath)
@@ -224,13 +233,13 @@
def listchain(self):
""" return list of all parent collectors up to self,
starting from root of collection tree. """
- l = [self]
- while 1:
- x = l[0]
- if x.parent is not None: # and x.parent.parent is not None:
- l.insert(0, x.parent)
- else:
- return l
+ chain = []
+ item = self
+ while item is not None:
+ chain.append(item)
+ item = item.parent
+ chain.reverse()
+ return chain
def listnames(self):
return [x.name for x in self.listchain()]
@@ -325,6 +334,8 @@
""" a basic test invocation item. Note that for a single function
there might be multiple test invocation items.
"""
+ nextitem = None
+
def reportinfo(self):
return self.fspath, None, ""
@@ -399,6 +410,7 @@
self._notfound = []
self._initialpaths = set()
self._initialparts = []
+ self.items = items = []
for arg in args:
parts = self._parsearg(arg)
self._initialparts.append(parts)
@@ -414,7 +426,6 @@
if not genitems:
return rep.result
else:
- self.items = items = []
if rep.passed:
for node in rep.result:
self.items.extend(self.genitems(node))
@@ -469,16 +480,29 @@
return True
def _tryconvertpyarg(self, x):
- try:
- mod = __import__(x, None, None, ['__doc__'])
- except (ValueError, ImportError):
- return x
- p = py.path.local(mod.__file__)
- if p.purebasename == "__init__":
- p = p.dirpath()
- else:
- p = p.new(basename=p.purebasename+".py")
- return str(p)
+ mod = None
+ path = [os.path.abspath('.')] + sys.path
+ for name in x.split('.'):
+ # ignore anything that's not a proper name here
+ # else something like --pyargs will mess up '.'
+ # since imp.find_module will actually sometimes work for it
+ # but it's supposed to be considered a filesystem path
+ # not a package
+ if name_re.match(name) is None:
+ return x
+ try:
+ fd, mod, type_ = imp.find_module(name, path)
+ except ImportError:
+ return x
+ else:
+ if fd is not None:
+ fd.close()
+
+ if type_[2] != imp.PKG_DIRECTORY:
+ path = [os.path.dirname(mod)]
+ else:
+ path = [mod]
+ return mod
def _parsearg(self, arg):
""" return (fspath, names) tuple after checking the file exists. """
@@ -496,7 +520,7 @@
raise pytest.UsageError(msg + arg)
parts[0] = path
return parts
-
+
def matchnodes(self, matching, names):
self.trace("matchnodes", matching, names)
self.trace.root.indent += 1
diff --git a/_pytest/mark.py b/_pytest/mark.py
--- a/_pytest/mark.py
+++ b/_pytest/mark.py
@@ -14,12 +14,37 @@
"Terminate expression with ':' to make the first match match "
"all subsequent tests (usually file-order). ")
+ group._addoption("-m",
+ action="store", dest="markexpr", default="", metavar="MARKEXPR",
+ help="only run tests matching given mark expression. "
+ "example: -m 'mark1 and not mark2'."
+ )
+
+ group.addoption("--markers", action="store_true", help=
+ "show markers (builtin, plugin and per-project ones).")
+
+ parser.addini("markers", "markers for test functions", 'linelist')
+
+def pytest_cmdline_main(config):
+ if config.option.markers:
+ config.pluginmanager.do_configure(config)
+ tw = py.io.TerminalWriter()
+ for line in config.getini("markers"):
+ name, rest = line.split(":", 1)
+ tw.write("@pytest.mark.%s:" % name, bold=True)
+ tw.line(rest)
+ tw.line()
+ config.pluginmanager.do_unconfigure(config)
+ return 0
+pytest_cmdline_main.tryfirst = True
+
def pytest_collection_modifyitems(items, config):
keywordexpr = config.option.keyword
- if not keywordexpr:
+ matchexpr = config.option.markexpr
+ if not keywordexpr and not matchexpr:
return
selectuntil = False
- if keywordexpr[-1] == ":":
+ if keywordexpr[-1:] == ":":
selectuntil = True
keywordexpr = keywordexpr[:-1]
@@ -29,21 +54,38 @@
if keywordexpr and skipbykeyword(colitem, keywordexpr):
deselected.append(colitem)
else:
- remaining.append(colitem)
if selectuntil:
keywordexpr = None
+ if matchexpr:
+ if not matchmark(colitem, matchexpr):
+ deselected.append(colitem)
+ continue
+ remaining.append(colitem)
if deselected:
config.hook.pytest_deselected(items=deselected)
items[:] = remaining
+class BoolDict:
+ def __init__(self, mydict):
+ self._mydict = mydict
+ def __getitem__(self, name):
+ return name in self._mydict
+
+def matchmark(colitem, matchexpr):
+ return eval(matchexpr, {}, BoolDict(colitem.obj.__dict__))
+
+def pytest_configure(config):
+ if config.option.strict:
+ pytest.mark._config = config
+
def skipbykeyword(colitem, keywordexpr):
""" return True if they given keyword expression means to
skip this collector/item.
"""
if not keywordexpr:
return
-
+
itemkeywords = getkeywords(colitem)
for key in filter(None, keywordexpr.split()):
eor = key[:1] == '-'
@@ -77,15 +119,31 @@
@py.test.mark.slowtest
def test_function():
pass
-
+
will set a 'slowtest' :class:`MarkInfo` object
on the ``test_function`` object. """
def __getattr__(self, name):
if name[0] == "_":
raise AttributeError(name)
+ if hasattr(self, '_config'):
+ self._check(name)
return MarkDecorator(name)
+ def _check(self, name):
+ try:
+ if name in self._markers:
+ return
+ except AttributeError:
+ pass
+ self._markers = l = set()
+ for line in self._config.getini("markers"):
+ beginning = line.split(":", 1)
+ x = beginning[0].split("(", 1)[0]
+ l.add(x)
+ if name not in self._markers:
+ raise AttributeError("%r not a registered marker" % (name,))
+
class MarkDecorator:
""" A decorator for test functions and test classes. When applied
it will create :class:`MarkInfo` objects which may be
@@ -133,8 +191,7 @@
holder = MarkInfo(self.markname, self.args, self.kwargs)
setattr(func, self.markname, holder)
else:
- holder.kwargs.update(self.kwargs)
- holder.args += self.args
+ holder.add(self.args, self.kwargs)
return func
kw = self.kwargs.copy()
kw.update(kwargs)
@@ -150,27 +207,20 @@
self.args = args
#: keyword argument dictionary, empty if nothing specified
self.kwargs = kwargs
+ self._arglist = [(args, kwargs.copy())]
def __repr__(self):
return "" % (
self.name, self.args, self.kwargs)
-def pytest_itemcollected(item):
- if not isinstance(item, pytest.Function):
- return
- try:
- func = item.obj.__func__
- except AttributeError:
- func = getattr(item.obj, 'im_func', item.obj)
- pyclasses = (pytest.Class, pytest.Module)
- for node in item.listchain():
- if isinstance(node, pyclasses):
- marker = getattr(node.obj, 'pytestmark', None)
- if marker is not None:
- if isinstance(marker, list):
- for mark in marker:
- mark(func)
- else:
- marker(func)
- node = node.parent
- item.keywords.update(py.builtin._getfuncdict(func))
+ def add(self, args, kwargs):
+ """ add a MarkInfo with the given args and kwargs. """
+ self._arglist.append((args, kwargs))
+ self.args += args
+ self.kwargs.update(kwargs)
+
+ def __iter__(self):
+ """ yield MarkInfo objects each relating to a marking-call. """
+ for args, kwargs in self._arglist:
+ yield MarkInfo(self.name, args, kwargs)
+
diff --git a/_pytest/monkeypatch.py b/_pytest/monkeypatch.py
--- a/_pytest/monkeypatch.py
+++ b/_pytest/monkeypatch.py
@@ -13,6 +13,7 @@
monkeypatch.setenv(name, value, prepend=False)
monkeypatch.delenv(name, value, raising=True)
monkeypatch.syspath_prepend(path)
+ monkeypatch.chdir(path)
All modifications will be undone after the requesting
test function has finished. The ``raising``
@@ -30,6 +31,7 @@
def __init__(self):
self._setattr = []
self._setitem = []
+ self._cwd = None
def setattr(self, obj, name, value, raising=True):
""" set attribute ``name`` on ``obj`` to ``value``, by default
@@ -83,6 +85,17 @@
self._savesyspath = sys.path[:]
sys.path.insert(0, str(path))
+ def chdir(self, path):
+ """ change the current working directory to the specified path
+ path can be a string or a py.path.local object
+ """
+ if self._cwd is None:
+ self._cwd = os.getcwd()
+ if hasattr(path, "chdir"):
+ path.chdir()
+ else:
+ os.chdir(path)
+
def undo(self):
""" undo previous changes. This call consumes the
undo stack. Calling it a second time has no effect unless
@@ -95,9 +108,17 @@
self._setattr[:] = []
for dictionary, name, value in self._setitem:
if value is notset:
- del dictionary[name]
+ try:
+ del dictionary[name]
+ except KeyError:
+ pass # was already deleted, so we have the desired state
else:
dictionary[name] = value
self._setitem[:] = []
if hasattr(self, '_savesyspath'):
sys.path[:] = self._savesyspath
+ del self._savesyspath
+
+ if self._cwd is not None:
+ os.chdir(self._cwd)
+ self._cwd = None
diff --git a/_pytest/nose.py b/_pytest/nose.py
--- a/_pytest/nose.py
+++ b/_pytest/nose.py
@@ -13,6 +13,7 @@
call.excinfo = call2.excinfo
+ at pytest.mark.trylast
def pytest_runtest_setup(item):
if isinstance(item, (pytest.Function)):
if isinstance(item.parent, pytest.Generator):
diff --git a/_pytest/pastebin.py b/_pytest/pastebin.py
--- a/_pytest/pastebin.py
+++ b/_pytest/pastebin.py
@@ -38,7 +38,11 @@
del tr._tw.__dict__['write']
def getproxy():
- return py.std.xmlrpclib.ServerProxy(url.xmlrpc).pastes
+ if sys.version_info < (3, 0):
+ from xmlrpclib import ServerProxy
+ else:
+ from xmlrpc.client import ServerProxy
+ return ServerProxy(url.xmlrpc).pastes
def pytest_terminal_summary(terminalreporter):
if terminalreporter.config.option.pastebin != "failed":
diff --git a/_pytest/pdb.py b/_pytest/pdb.py
--- a/_pytest/pdb.py
+++ b/_pytest/pdb.py
@@ -19,11 +19,13 @@
class pytestPDB:
""" Pseudo PDB that defers to the real pdb. """
item = None
+ collector = None
def set_trace(self):
""" invoke PDB set_trace debugging, dropping any IO capturing. """
frame = sys._getframe().f_back
- item = getattr(self, 'item', None)
+ item = self.item or self.collector
+
if item is not None:
capman = item.config.pluginmanager.getplugin("capturemanager")
out, err = capman.suspendcapture()
@@ -38,6 +40,14 @@
pytestPDB.item = item
pytest_runtest_setup = pytest_runtest_call = pytest_runtest_teardown = pdbitem
+ at pytest.mark.tryfirst
+def pytest_make_collect_report(__multicall__, collector):
+ try:
+ pytestPDB.collector = collector
+ return __multicall__.execute()
+ finally:
+ pytestPDB.collector = None
+
def pytest_runtest_makereport():
pytestPDB.item = None
@@ -60,7 +70,13 @@
tw.sep(">", "traceback")
rep.toterminal(tw)
tw.sep(">", "entering PDB")
- post_mortem(call.excinfo._excinfo[2])
+ # A doctest.UnexpectedException is not useful for post_mortem.
+ # Use the underlying exception instead:
+ if isinstance(call.excinfo.value, py.std.doctest.UnexpectedException):
+ tb = call.excinfo.value.exc_info[2]
+ else:
+ tb = call.excinfo._excinfo[2]
+ post_mortem(tb)
rep._pdbshown = True
return rep
diff --git a/_pytest/pytester.py b/_pytest/pytester.py
--- a/_pytest/pytester.py
+++ b/_pytest/pytester.py
@@ -25,6 +25,7 @@
_pytest_fullpath
except NameError:
_pytest_fullpath = os.path.abspath(pytest.__file__.rstrip("oc"))
+ _pytest_fullpath = _pytest_fullpath.replace("$py.class", ".py")
def pytest_funcarg___pytest(request):
return PytestArg(request)
@@ -313,16 +314,6 @@
result.extend(session.genitems(colitem))
return result
- def inline_genitems(self, *args):
- #config = self.parseconfig(*args)
- config = self.parseconfigure(*args)
- rec = self.getreportrecorder(config)
- session = Session(config)
- config.hook.pytest_sessionstart(session=session)
- session.perform_collect()
- config.hook.pytest_sessionfinish(session=session, exitstatus=EXIT_OK)
- return session.items, rec
-
def runitem(self, source):
# used from runner functional tests
item = self.getitem(source)
@@ -343,64 +334,57 @@
l = list(args) + [p]
reprec = self.inline_run(*l)
reports = reprec.getreports("pytest_runtest_logreport")
- assert len(reports) == 1, reports
- return reports[0]
+ assert len(reports) == 3, reports # setup/call/teardown
+ return reports[1]
+
+ def inline_genitems(self, *args):
+ return self.inprocess_run(list(args) + ['--collectonly'])
def inline_run(self, *args):
- args = ("-s", ) + args # otherwise FD leakage
- config = self.parseconfig(*args)
- reprec = self.getreportrecorder(config)
- #config.pluginmanager.do_configure(config)
- config.hook.pytest_cmdline_main(config=config)
- #config.pluginmanager.do_unconfigure(config)
- return reprec
+ items, rec = self.inprocess_run(args)
+ return rec
- def config_preparse(self):
- config = self.Config()
- for plugin in self.plugins:
- if isinstance(plugin, str):
- config.pluginmanager.import_plugin(plugin)
- else:
- if isinstance(plugin, dict):
- plugin = PseudoPlugin(plugin)
- if not config.pluginmanager.isregistered(plugin):
- config.pluginmanager.register(plugin)
- return config
+ def inprocess_run(self, args, plugins=None):
+ rec = []
+ items = []
+ class Collect:
+ def pytest_configure(x, config):
+ rec.append(self.getreportrecorder(config))
+ def pytest_itemcollected(self, item):
+ items.append(item)
+ if not plugins:
+ plugins = []
+ plugins.append(Collect())
+ ret = self.pytestmain(list(args), plugins=[Collect()])
+ reprec = rec[0]
+ reprec.ret = ret
+ assert len(rec) == 1
+ return items, reprec
def parseconfig(self, *args):
- if not args:
- args = (self.tmpdir,)
- config = self.config_preparse()
- args = list(args)
+ args = [str(x) for x in args]
for x in args:
if str(x).startswith('--basetemp'):
break
else:
args.append("--basetemp=%s" % self.tmpdir.dirpath('basetemp'))
- config.parse(args)
+ import _pytest.core
+ config = _pytest.core._prepareconfig(args, self.plugins)
+ # the in-process pytest invocation needs to avoid leaking FDs
+ # so we register a "reset_capturings" callmon the capturing manager
+ # and make sure it gets called
+ config._cleanup.append(
+ config.pluginmanager.getplugin("capturemanager").reset_capturings)
+ import _pytest.config
+ self.request.addfinalizer(
+ lambda: _pytest.config.pytest_unconfigure(config))
return config
- def reparseconfig(self, args=None):
- """ this is used from tests that want to re-invoke parse(). """
- if not args:
- args = [self.tmpdir]
- oldconfig = getattr(py.test, 'config', None)
- try:
- c = py.test.config = self.Config()
- c.basetemp = py.path.local.make_numbered_dir(prefix="reparse",
- keep=0, rootdir=self.tmpdir, lock_timeout=None)
- c.parse(args)
- c.pluginmanager.do_configure(c)
- self.request.addfinalizer(lambda: c.pluginmanager.do_unconfigure(c))
- return c
- finally:
- py.test.config = oldconfig
-
def parseconfigure(self, *args):
config = self.parseconfig(*args)
config.pluginmanager.do_configure(config)
self.request.addfinalizer(lambda:
- config.pluginmanager.do_unconfigure(config))
+ config.pluginmanager.do_unconfigure(config))
return config
def getitem(self, source, funcname="test_func"):
@@ -420,7 +404,6 @@
self.makepyfile(__init__ = "#")
self.config = config = self.parseconfigure(path, *configargs)
node = self.getnode(config, path)
- #config.pluginmanager.do_unconfigure(config)
return node
def collect_by_name(self, modcol, name):
@@ -437,9 +420,16 @@
return py.std.subprocess.Popen(cmdargs, stdout=stdout, stderr=stderr, **kw)
def pytestmain(self, *args, **kwargs):
- ret = pytest.main(*args, **kwargs)
- if ret == 2:
- raise KeyboardInterrupt()
+ class ResetCapturing:
+ @pytest.mark.trylast
+ def pytest_unconfigure(self, config):
+ capman = config.pluginmanager.getplugin("capturemanager")
+ capman.reset_capturings()
+ plugins = kwargs.setdefault("plugins", [])
+ rc = ResetCapturing()
+ plugins.append(rc)
+ return pytest.main(*args, **kwargs)
+
def run(self, *cmdargs):
return self._run(*cmdargs)
@@ -528,6 +518,8 @@
pexpect = py.test.importorskip("pexpect", "2.4")
if hasattr(sys, 'pypy_version_info') and '64' in py.std.platform.machine():
pytest.skip("pypy-64 bit not supported")
+ if sys.platform == "darwin":
+ pytest.xfail("pexpect does not work reliably on darwin?!")
logfile = self.tmpdir.join("spawn.out")
child = pexpect.spawn(cmd, logfile=logfile.open("w"))
child.timeout = expect_timeout
@@ -540,10 +532,6 @@
return "INTERNAL not-utf8-decodeable, truncated string:\n%s" % (
py.io.saferepr(out),)
-class PseudoPlugin:
- def __init__(self, vars):
- self.__dict__.update(vars)
-
class ReportRecorder(object):
def __init__(self, hook):
self.hook = hook
@@ -565,10 +553,17 @@
def getreports(self, names="pytest_runtest_logreport pytest_collectreport"):
return [x.report for x in self.getcalls(names)]
- def matchreport(self, inamepart="", names="pytest_runtest_logreport pytest_collectreport", when=None):
+ def matchreport(self, inamepart="",
+ names="pytest_runtest_logreport pytest_collectreport", when=None):
""" return a testreport whose dotted import path matches """
l = []
for rep in self.getreports(names=names):
+ try:
+ if not when and rep.when != "call" and rep.passed:
+ # setup/teardown passing reports - let's ignore those
+ continue
+ except AttributeError:
+ pass
if when and getattr(rep, 'when', None) != when:
continue
if not inamepart or inamepart in rep.nodeid.split("::"):
diff --git a/_pytest/python.py b/_pytest/python.py
--- a/_pytest/python.py
+++ b/_pytest/python.py
@@ -4,6 +4,7 @@
import sys
import pytest
from py._code.code import TerminalRepr
+from _pytest.monkeypatch import monkeypatch
import _pytest
cutdir = py.path.local(_pytest.__file__).dirpath()
@@ -26,6 +27,24 @@
showfuncargs(config)
return 0
+
+def pytest_generate_tests(metafunc):
+ try:
+ param = metafunc.function.parametrize
+ except AttributeError:
+ return
+ for p in param:
+ metafunc.parametrize(*p.args, **p.kwargs)
+
+def pytest_configure(config):
+ config.addinivalue_line("markers",
+ "parametrize(argnames, argvalues): call a test function multiple "
+ "times passing in multiple different argument value sets. Example: "
+ "@parametrize('arg1', [1,2]) would lead to two calls of the decorated "
+ "test function, one with arg1=1 and another with arg1=2."
+ )
+
+
@pytest.mark.trylast
def pytest_namespace():
raises.Exception = pytest.fail.Exception
@@ -138,6 +157,7 @@
obj = obj.place_as
self._fslineno = py.code.getfslineno(obj)
+ assert isinstance(self._fslineno[1], int), obj
return self._fslineno
def reportinfo(self):
@@ -155,6 +175,7 @@
else:
fspath, lineno = self._getfslineno()
modpath = self.getmodpath()
+ assert isinstance(lineno, int)
return fspath, lineno, modpath
class PyCollectorMixin(PyobjMixin, pytest.Collector):
@@ -200,6 +221,7 @@
module = self.getparent(Module).obj
clscol = self.getparent(Class)
cls = clscol and clscol.obj or None
+ transfer_markers(funcobj, cls, module)
metafunc = Metafunc(funcobj, config=self.config,
cls=cls, module=module)
gentesthook = self.config.hook.pytest_generate_tests
@@ -219,6 +241,19 @@
l.append(function)
return l
+def transfer_markers(funcobj, cls, mod):
+ # XXX this should rather be code in the mark plugin or the mark
+ # plugin should merge with the python plugin.
+ for holder in (cls, mod):
+ try:
+ pytestmark = holder.pytestmark
+ except AttributeError:
+ continue
+ if isinstance(pytestmark, list):
+ for mark in pytestmark:
+ mark(funcobj)
+ else:
+ pytestmark(funcobj)
class Module(pytest.File, PyCollectorMixin):
def _getobj(self):
@@ -226,13 +261,8 @@
def _importtestmodule(self):
# we assume we are only called once per module
- from _pytest import assertion
- assertion.before_module_import(self)
try:
- try:
- mod = self.fspath.pyimport(ensuresyspath=True)
- finally:
- assertion.after_module_import(self)
+ mod = self.fspath.pyimport(ensuresyspath=True)
except SyntaxError:
excinfo = py.code.ExceptionInfo()
raise self.CollectError(excinfo.getrepr(style="short"))
@@ -244,7 +274,8 @@
" %s\n"
"which is not the same as the test file we want to collect:\n"
" %s\n"
- "HINT: use a unique basename for your test file modules"
+ "HINT: remove __pycache__ / .pyc files and/or use a "
+ "unique basename for your test file modules"
% e.args
)
#print "imported test module", mod
@@ -374,6 +405,7 @@
tw.line()
tw.line("%s:%d" % (self.filename, self.firstlineno+1))
+
class Generator(FunctionMixin, PyCollectorMixin, pytest.Collector):
def collect(self):
# test generators are seen as collectors but they also
@@ -430,6 +462,7 @@
"yielded functions (deprecated) cannot have funcargs")
else:
if callspec is not None:
+ self.callspec = callspec
self.funcargs = callspec.funcargs or {}
self._genid = callspec.id
if hasattr(callspec, "param"):
@@ -506,15 +539,59 @@
request._fillfuncargs()
_notexists = object()
-class CallSpec:
- def __init__(self, funcargs, id, param):
- self.funcargs = funcargs
- self.id = id
+
+class CallSpec2(object):
+ def __init__(self, metafunc):
+ self.metafunc = metafunc
+ self.funcargs = {}
+ self._idlist = []
+ self.params = {}
+ self._globalid = _notexists
+ self._globalid_args = set()
+ self._globalparam = _notexists
+
+ def copy(self, metafunc):
+ cs = CallSpec2(self.metafunc)
+ cs.funcargs.update(self.funcargs)
+ cs.params.update(self.params)
+ cs._idlist = list(self._idlist)
+ cs._globalid = self._globalid
+ cs._globalid_args = self._globalid_args
+ cs._globalparam = self._globalparam
+ return cs
+
+ def _checkargnotcontained(self, arg):
+ if arg in self.params or arg in self.funcargs:
+ raise ValueError("duplicate %r" %(arg,))
+
+ def getparam(self, name):
+ try:
+ return self.params[name]
+ except KeyError:
+ if self._globalparam is _notexists:
+ raise ValueError(name)
+ return self._globalparam
+
+ @property
+ def id(self):
+ return "-".join(map(str, filter(None, self._idlist)))
+
+ def setmulti(self, valtype, argnames, valset, id):
+ for arg,val in zip(argnames, valset):
+ self._checkargnotcontained(arg)
+ getattr(self, valtype)[arg] = val
+ self._idlist.append(id)
+
+ def setall(self, funcargs, id, param):
+ for x in funcargs:
+ self._checkargnotcontained(x)
+ self.funcargs.update(funcargs)
+ if id is not _notexists:
+ self._idlist.append(id)
if param is not _notexists:
- self.param = param
- def __repr__(self):
- return "" %(
- self.id, getattr(self, 'param', '?'), self.funcargs)
+ assert self._globalparam is _notexists
+ self._globalparam = param
+
class Metafunc:
def __init__(self, function, config=None, cls=None, module=None):
@@ -528,31 +605,71 @@
self._calls = []
self._ids = py.builtin.set()
+ def parametrize(self, argnames, argvalues, indirect=False, ids=None):
+ """ Add new invocations to the underlying test function using the list
+ of argvalues for the given argnames. Parametrization is performed
+ during the collection phase. If you need to setup expensive resources
+ you may pass indirect=True and implement a funcarg factory which can
+ perform the expensive setup just before a test is actually run.
+
+ :arg argnames: an argument name or a list of argument names
+
+ :arg argvalues: a list of values for the argname or a list of tuples of
+ values for the list of argument names.
+
+ :arg indirect: if True each argvalue corresponding to an argument will
+ be passed as request.param to its respective funcarg factory so
+ that it can perform more expensive setups during the setup phase of
+ a test rather than at collection time.
+
+ :arg ids: list of string ids each corresponding to the argvalues so
+ that they are part of the test id. If no ids are provided they will
+ be generated automatically from the argvalues.
+ """
+ if not isinstance(argnames, (tuple, list)):
+ argnames = (argnames,)
+ argvalues = [(val,) for val in argvalues]
+ if not indirect:
+ #XXX should we also check for the opposite case?
+ for arg in argnames:
+ if arg not in self.funcargnames:
+ raise ValueError("%r has no argument %r" %(self.function, arg))
+ valtype = indirect and "params" or "funcargs"
+ if not ids:
+ idmaker = IDMaker()
+ ids = list(map(idmaker, argvalues))
+ newcalls = []
+ for callspec in self._calls or [CallSpec2(self)]:
+ for i, valset in enumerate(argvalues):
+ assert len(valset) == len(argnames)
+ newcallspec = callspec.copy(self)
+ newcallspec.setmulti(valtype, argnames, valset, ids[i])
+ newcalls.append(newcallspec)
+ self._calls = newcalls
+
def addcall(self, funcargs=None, id=_notexists, param=_notexists):
- """ add a new call to the underlying test function during the
- collection phase of a test run. Note that request.addcall() is
- called during the test collection phase prior and independently
- to actual test execution. Therefore you should perform setup
- of resources in a funcarg factory which can be instrumented
- with the ``param``.
+ """ (deprecated, use parametrize) Add a new call to the underlying
+ test function during the collection phase of a test run. Note that
+ request.addcall() is called during the test collection phase prior and
+ independently to actual test execution. You should only use addcall()
+ if you need to specify multiple arguments of a test function.
:arg funcargs: argument keyword dictionary used when invoking
the test function.
:arg id: used for reporting and identification purposes. If you
- don't supply an `id` the length of the currently
- list of calls to the test function will be used.
+ don't supply an `id` an automatic unique id will be generated.
- :arg param: will be exposed to a later funcarg factory invocation
- through the ``request.param`` attribute. It allows to
- defer test fixture setup activities to when an actual
- test is run.
+ :arg param: a parameter which will be exposed to a later funcarg factory
+ invocation through the ``request.param`` attribute.
"""
assert funcargs is None or isinstance(funcargs, dict)
if funcargs is not None:
for name in funcargs:
if name not in self.funcargnames:
pytest.fail("funcarg %r not used in this function." % name)
+ else:
+ funcargs = {}
if id is None:
raise ValueError("id=None not allowed")
if id is _notexists:
@@ -561,11 +678,26 @@
if id in self._ids:
raise ValueError("duplicate id %r" % id)
self._ids.add(id)
- self._calls.append(CallSpec(funcargs, id, param))
+
+ cs = CallSpec2(self)
+ cs.setall(funcargs, id, param)
+ self._calls.append(cs)
+
+class IDMaker:
+ def __init__(self):
+ self.counter = 0
+ def __call__(self, valset):
+ l = []
+ for val in valset:
+ if not isinstance(val, (int, str)):
+ val = "."+str(self.counter)
+ self.counter += 1
+ l.append(str(val))
+ return "-".join(l)
class FuncargRequest:
""" A request for function arguments from a test function.
-
+
Note that there is an optional ``param`` attribute in case
there was an invocation to metafunc.addcall(param=...).
If no such call was done in a ``pytest_generate_tests``
@@ -637,7 +769,7 @@
def applymarker(self, marker):
- """ apply a marker to a single test function invocation.
+ """ Apply a marker to a single test function invocation.
This method is useful if you don't want to have a keyword/marker
on all function invocations.
@@ -649,7 +781,7 @@
self._pyfuncitem.keywords[marker.markname] = marker
def cached_setup(self, setup, teardown=None, scope="module", extrakey=None):
- """ return a testing resource managed by ``setup`` &
+ """ Return a testing resource managed by ``setup`` &
``teardown`` calls. ``scope`` and ``extrakey`` determine when the
``teardown`` function will be called so that subsequent calls to
``setup`` would recreate the resource.
@@ -698,11 +830,18 @@
self._raiselookupfailed(argname)
funcargfactory = self._name2factory[argname].pop()
oldarg = self._currentarg
- self._currentarg = argname
+ mp = monkeypatch()
+ mp.setattr(self, '_currentarg', argname)
+ try:
+ param = self._pyfuncitem.callspec.getparam(argname)
+ except (AttributeError, ValueError):
+ pass
+ else:
+ mp.setattr(self, 'param', param, raising=False)
try:
self._funcargs[argname] = res = funcargfactory(request=self)
finally:
- self._currentarg = oldarg
+ mp.undo()
return res
def _getscopeitem(self, scope):
@@ -817,8 +956,7 @@
>>> raises(ZeroDivisionError, f, x=0)
- A third possibility is to use a string which which will
- be executed::
+ A third possibility is to use a string to be executed::
>>> raises(ZeroDivisionError, "f(0)")
diff --git a/_pytest/resultlog.py b/_pytest/resultlog.py
--- a/_pytest/resultlog.py
+++ b/_pytest/resultlog.py
@@ -63,6 +63,8 @@
self.write_log_entry(testpath, lettercode, longrepr)
def pytest_runtest_logreport(self, report):
+ if report.when != "call" and report.passed:
+ return
res = self.config.hook.pytest_report_teststatus(report=report)
code = res[1]
if code == 'x':
@@ -89,5 +91,8 @@
self.log_outcome(report, code, longrepr)
def pytest_internalerror(self, excrepr):
- path = excrepr.reprcrash.path
+ reprcrash = getattr(excrepr, 'reprcrash', None)
+ path = getattr(reprcrash, "path", None)
+ if path is None:
+ path = "cwd:%s" % py.path.local()
self.write_log_entry(path, '!', str(excrepr))
diff --git a/_pytest/runner.py b/_pytest/runner.py
--- a/_pytest/runner.py
+++ b/_pytest/runner.py
@@ -1,6 +1,6 @@
""" basic collect and runtest protocol implementations """
-import py, sys
+import py, sys, time
from py._code.code import TerminalRepr
def pytest_namespace():
@@ -14,33 +14,60 @@
#
# pytest plugin hooks
+def pytest_addoption(parser):
+ group = parser.getgroup("terminal reporting", "reporting", after="general")
+ group.addoption('--durations',
+ action="store", type="int", default=None, metavar="N",
+ help="show N slowest setup/test durations (N=0 for all)."),
+
+def pytest_terminal_summary(terminalreporter):
+ durations = terminalreporter.config.option.durations
+ if durations is None:
+ return
+ tr = terminalreporter
+ dlist = []
+ for replist in tr.stats.values():
+ for rep in replist:
+ if hasattr(rep, 'duration'):
+ dlist.append(rep)
+ if not dlist:
+ return
+ dlist.sort(key=lambda x: x.duration)
+ dlist.reverse()
+ if not durations:
+ tr.write_sep("=", "slowest test durations")
+ else:
+ tr.write_sep("=", "slowest %s test durations" % durations)
+ dlist = dlist[:durations]
+
+ for rep in dlist:
+ nodeid = rep.nodeid.replace("::()::", "::")
+ tr.write_line("%02.2fs %-8s %s" %
+ (rep.duration, rep.when, nodeid))
+
def pytest_sessionstart(session):
session._setupstate = SetupState()
-
-def pytest_sessionfinish(session, exitstatus):
- hook = session.config.hook
- rep = hook.pytest__teardown_final(session=session)
- if rep:
- hook.pytest__teardown_final_logerror(session=session, report=rep)
- session.exitstatus = 1
+def pytest_sessionfinish(session):
+ session._setupstate.teardown_all()
class NodeInfo:
def __init__(self, location):
self.location = location
-def pytest_runtest_protocol(item):
+def pytest_runtest_protocol(item, nextitem):
item.ihook.pytest_runtest_logstart(
nodeid=item.nodeid, location=item.location,
)
- runtestprotocol(item)
+ runtestprotocol(item, nextitem=nextitem)
return True
-def runtestprotocol(item, log=True):
+def runtestprotocol(item, log=True, nextitem=None):
rep = call_and_report(item, "setup", log)
reports = [rep]
if rep.passed:
reports.append(call_and_report(item, "call", log))
- reports.append(call_and_report(item, "teardown", log))
+ reports.append(call_and_report(item, "teardown", log,
+ nextitem=nextitem))
return reports
def pytest_runtest_setup(item):
@@ -49,16 +76,8 @@
def pytest_runtest_call(item):
item.runtest()
-def pytest_runtest_teardown(item):
- item.session._setupstate.teardown_exact(item)
-
-def pytest__teardown_final(session):
- call = CallInfo(session._setupstate.teardown_all, when="teardown")
- if call.excinfo:
- ntraceback = call.excinfo.traceback .cut(excludepath=py._pydir)
- call.excinfo.traceback = ntraceback.filter()
- longrepr = call.excinfo.getrepr(funcargs=True)
- return TeardownErrorReport(longrepr)
+def pytest_runtest_teardown(item, nextitem):
+ item.session._setupstate.teardown_exact(item, nextitem)
def pytest_report_teststatus(report):
if report.when in ("setup", "teardown"):
@@ -74,18 +93,18 @@
#
# Implementation
-def call_and_report(item, when, log=True):
- call = call_runtest_hook(item, when)
+def call_and_report(item, when, log=True, **kwds):
+ call = call_runtest_hook(item, when, **kwds)
hook = item.ihook
report = hook.pytest_runtest_makereport(item=item, call=call)
- if log and (when == "call" or not report.passed):
+ if log:
hook.pytest_runtest_logreport(report=report)
return report
-def call_runtest_hook(item, when):
+def call_runtest_hook(item, when, **kwds):
hookname = "pytest_runtest_" + when
ihook = getattr(item.ihook, hookname)
- return CallInfo(lambda: ihook(item=item), when=when)
+ return CallInfo(lambda: ihook(item=item, **kwds), when=when)
class CallInfo:
""" Result/Exception info a function invocation. """
@@ -95,12 +114,16 @@
#: context of invocation: one of "setup", "call",
#: "teardown", "memocollect"
self.when = when
+ self.start = time.time()
try:
- self.result = func()
- except KeyboardInterrupt:
- raise
- except:
- self.excinfo = py.code.ExceptionInfo()
+ try:
+ self.result = func()
+ except KeyboardInterrupt:
+ raise
+ except:
+ self.excinfo = py.code.ExceptionInfo()
+ finally:
+ self.stop = time.time()
def __repr__(self):
if self.excinfo:
@@ -120,6 +143,10 @@
return s
class BaseReport(object):
+
+ def __init__(self, **kw):
+ self.__dict__.update(kw)
+
def toterminal(self, out):
longrepr = self.longrepr
if hasattr(self, 'node'):
@@ -139,6 +166,7 @@
def pytest_runtest_makereport(item, call):
when = call.when
+ duration = call.stop-call.start
keywords = dict([(x,1) for x in item.keywords])
excinfo = call.excinfo
if not call.excinfo:
@@ -160,14 +188,15 @@
else: # exception in setup or teardown
longrepr = item._repr_failure_py(excinfo)
return TestReport(item.nodeid, item.location,
- keywords, outcome, longrepr, when)
+ keywords, outcome, longrepr, when,
+ duration=duration)
class TestReport(BaseReport):
""" Basic test report object (also used for setup and teardown calls if
they fail).
"""
def __init__(self, nodeid, location,
- keywords, outcome, longrepr, when):
+ keywords, outcome, longrepr, when, sections=(), duration=0, **extra):
#: normalized collection node id
self.nodeid = nodeid
@@ -179,16 +208,25 @@
#: a name -> value dictionary containing all keywords and
#: markers associated with a test invocation.
self.keywords = keywords
-
+
#: test outcome, always one of "passed", "failed", "skipped".
self.outcome = outcome
#: None or a failure representation.
self.longrepr = longrepr
-
+
#: one of 'setup', 'call', 'teardown' to indicate runtest phase.
self.when = when
+ #: list of (secname, data) extra information which needs to
+ #: marshallable
+ self.sections = list(sections)
+
+ #: time it took to run just the test
+ self.duration = duration
+
+ self.__dict__.update(extra)
+
def __repr__(self):
return "" % (
self.nodeid, self.when, self.outcome)
@@ -196,8 +234,10 @@
class TeardownErrorReport(BaseReport):
outcome = "failed"
when = "teardown"
- def __init__(self, longrepr):
+ def __init__(self, longrepr, **extra):
self.longrepr = longrepr
+ self.sections = []
+ self.__dict__.update(extra)
def pytest_make_collect_report(collector):
call = CallInfo(collector._memocollect, "memocollect")
@@ -219,11 +259,13 @@
getattr(call, 'result', None))
class CollectReport(BaseReport):
- def __init__(self, nodeid, outcome, longrepr, result):
+ def __init__(self, nodeid, outcome, longrepr, result, sections=(), **extra):
self.nodeid = nodeid
self.outcome = outcome
self.longrepr = longrepr
self.result = result or []
+ self.sections = list(sections)
+ self.__dict__.update(extra)
@property
def location(self):
@@ -277,20 +319,22 @@
self._teardown_with_finalization(None)
assert not self._finalizers
- def teardown_exact(self, item):
- if self.stack and item == self.stack[-1]:
+ def teardown_exact(self, item, nextitem):
+ needed_collectors = nextitem and nextitem.listchain() or []
+ self._teardown_towards(needed_collectors)
+
+ def _teardown_towards(self, needed_collectors):
+ while self.stack:
+ if self.stack == needed_collectors[:len(self.stack)]:
+ break
self._pop_and_teardown()
- else:
- self._callfinalizers(item)
def prepare(self, colitem):
""" setup objects along the collector chain to the test-method
and teardown previously setup objects."""
needed_collectors = colitem.listchain()
- while self.stack:
- if self.stack == needed_collectors[:len(self.stack)]:
- break
- self._pop_and_teardown()
+ self._teardown_towards(needed_collectors)
+
# check if the last collection node has raised an error
for col in self.stack:
if hasattr(col, '_prepare_exc'):
diff --git a/_pytest/skipping.py b/_pytest/skipping.py
--- a/_pytest/skipping.py
+++ b/_pytest/skipping.py
@@ -9,6 +9,21 @@
action="store_true", dest="runxfail", default=False,
help="run tests even if they are marked xfail")
+def pytest_configure(config):
+ config.addinivalue_line("markers",
+ "skipif(*conditions): skip the given test function if evaluation "
+ "of all conditions has a True value. Evaluation happens within the "
+ "module global context. Example: skipif('sys.platform == \"win32\"') "
+ "skips the test if we are on the win32 platform. "
+ )
+ config.addinivalue_line("markers",
+ "xfail(*conditions, reason=None, run=True): mark the the test function "
+ "as an expected failure. Optionally specify a reason and run=False "
+ "if you don't even want to execute the test function. Any positional "
+ "condition strings will be evaluated (like with skipif) and if one is "
+ "False the marker will not be applied."
+ )
+
def pytest_namespace():
return dict(xfail=xfail)
@@ -117,6 +132,14 @@
def pytest_runtest_makereport(__multicall__, item, call):
if not isinstance(item, pytest.Function):
return
+ # unitttest special case, see setting of _unexpectedsuccess
+ if hasattr(item, '_unexpectedsuccess'):
+ rep = __multicall__.execute()
+ if rep.when == "call":
+ # we need to translate into how py.test encodes xpass
+ rep.keywords['xfail'] = "reason: " + item._unexpectedsuccess
+ rep.outcome = "failed"
+ return rep
if not (call.excinfo and
call.excinfo.errisinstance(py.test.xfail.Exception)):
evalxfail = getattr(item, '_evalxfail', None)
@@ -169,21 +192,23 @@
elif char == "X":
show_xpassed(terminalreporter, lines)
elif char in "fF":
- show_failed(terminalreporter, lines)
+ show_simple(terminalreporter, lines, 'failed', "FAIL %s")
elif char in "sS":
show_skipped(terminalreporter, lines)
+ elif char == "E":
+ show_simple(terminalreporter, lines, 'error', "ERROR %s")
if lines:
tr._tw.sep("=", "short test summary info")
for line in lines:
tr._tw.line(line)
-def show_failed(terminalreporter, lines):
+def show_simple(terminalreporter, lines, stat, format):
tw = terminalreporter._tw
- failed = terminalreporter.stats.get("failed")
+ failed = terminalreporter.stats.get(stat)
if failed:
for rep in failed:
pos = rep.nodeid
- lines.append("FAIL %s" %(pos, ))
+ lines.append(format %(pos, ))
def show_xfailed(terminalreporter, lines):
xfailed = terminalreporter.stats.get("xfailed")
diff --git a/_pytest/terminal.py b/_pytest/terminal.py
--- a/_pytest/terminal.py
+++ b/_pytest/terminal.py
@@ -15,7 +15,7 @@
group._addoption('-r',
action="store", dest="reportchars", default=None, metavar="chars",
help="show extra test summary info as specified by chars (f)ailed, "
- "(s)skipped, (x)failed, (X)passed.")
+ "(E)error, (s)skipped, (x)failed, (X)passed.")
group._addoption('-l', '--showlocals',
action="store_true", dest="showlocals", default=False,
help="show locals in tracebacks (disabled by default).")
@@ -43,7 +43,8 @@
pass
else:
stdout = os.fdopen(newfd, stdout.mode, 1)
- config._toclose = stdout
+ config._cleanup.append(lambda: stdout.close())
+
reporter = TerminalReporter(config, stdout)
config.pluginmanager.register(reporter, 'terminalreporter')
if config.option.debug or config.option.traceconfig:
@@ -52,11 +53,6 @@
reporter.write_line("[traceconfig] " + msg)
config.trace.root.setprocessor("pytest:config", mywriter)
-def pytest_unconfigure(config):
- if hasattr(config, '_toclose'):
- #print "closing", config._toclose, config._toclose.fileno()
- config._toclose.close()
-
def getreportopt(config):
reportopts = ""
optvalue = config.option.report
@@ -165,9 +161,6 @@
def pytest_deselected(self, items):
self.stats.setdefault('deselected', []).extend(items)
- def pytest__teardown_final_logerror(self, report):
- self.stats.setdefault("error", []).append(report)
-
def pytest_runtest_logstart(self, nodeid, location):
# ensure that the path is printed before the
# 1st test of a module starts running
@@ -259,7 +252,7 @@
msg = "platform %s -- Python %s" % (sys.platform, verinfo)
if hasattr(sys, 'pypy_version_info'):
verinfo = ".".join(map(str, sys.pypy_version_info[:3]))
- msg += "[pypy-%s]" % verinfo
+ msg += "[pypy-%s-%s]" % (verinfo, sys.pypy_version_info[3])
msg += " -- pytest-%s" % (py.test.__version__)
if self.verbosity > 0 or self.config.option.debug or \
getattr(self.config.option, 'pastebin', None):
@@ -289,10 +282,18 @@
# we take care to leave out Instances aka ()
# because later versions are going to get rid of them anyway
if self.config.option.verbose < 0:
- for item in items:
- nodeid = item.nodeid
- nodeid = nodeid.replace("::()::", "::")
- self._tw.line(nodeid)
+ if self.config.option.verbose < -1:
+ counts = {}
+ for item in items:
+ name = item.nodeid.split('::', 1)[0]
+ counts[name] = counts.get(name, 0) + 1
+ for name, count in sorted(counts.items()):
+ self._tw.line("%s: %d" % (name, count))
+ else:
+ for item in items:
+ nodeid = item.nodeid
+ nodeid = nodeid.replace("::()::", "::")
+ self._tw.line(nodeid)
return
stack = []
indent = ""
@@ -318,12 +319,17 @@
self.config.hook.pytest_terminal_summary(terminalreporter=self)
if exitstatus == 2:
self._report_keyboardinterrupt()
+ del self._keyboardinterrupt_memo
self.summary_deselected()
self.summary_stats()
def pytest_keyboard_interrupt(self, excinfo):
self._keyboardinterrupt_memo = excinfo.getrepr(funcargs=True)
+ def pytest_unconfigure(self):
+ if hasattr(self, '_keyboardinterrupt_memo'):
+ self._report_keyboardinterrupt()
+
def _report_keyboardinterrupt(self):
excrepr = self._keyboardinterrupt_memo
msg = excrepr.reprcrash.message
@@ -388,7 +394,7 @@
else:
msg = self._getfailureheadline(rep)
self.write_sep("_", msg)
- rep.toterminal(self._tw)
+ self._outrep_summary(rep)
def summary_errors(self):
if self.config.option.tbstyle != "no":
@@ -406,7 +412,15 @@
elif rep.when == "teardown":
msg = "ERROR at teardown of " + msg
self.write_sep("_", msg)
- rep.toterminal(self._tw)
+ self._outrep_summary(rep)
+
+ def _outrep_summary(self, rep):
+ rep.toterminal(self._tw)
+ for secname, content in rep.sections:
+ self._tw.sep("-", secname)
+ if content[-1:] == "\n":
+ content = content[:-1]
+ self._tw.line(content)
def summary_stats(self):
session_duration = py.std.time.time() - self._sessionstarttime
@@ -417,9 +431,10 @@
keys.append(key)
parts = []
for key in keys:
- val = self.stats.get(key, None)
- if val:
- parts.append("%d %s" %(len(val), key))
+ if key: # setup/teardown reports have an empty key, ignore them
+ val = self.stats.get(key, None)
+ if val:
+ parts.append("%d %s" %(len(val), key))
line = ", ".join(parts)
# XXX coloring
msg = "%s in %.2f seconds" %(line, session_duration)
@@ -430,8 +445,15 @@
def summary_deselected(self):
if 'deselected' in self.stats:
+ l = []
+ k = self.config.option.keyword
+ if k:
+ l.append("-k%s" % k)
+ m = self.config.option.markexpr
+ if m:
+ l.append("-m %r" % m)
self.write_sep("=", "%d tests deselected by %r" %(
- len(self.stats['deselected']), self.config.option.keyword), bold=True)
+ len(self.stats['deselected']), " ".join(l)), bold=True)
def repr_pythonversion(v=None):
if v is None:
diff --git a/_pytest/tmpdir.py b/_pytest/tmpdir.py
--- a/_pytest/tmpdir.py
+++ b/_pytest/tmpdir.py
@@ -46,7 +46,7 @@
def finish(self):
self.trace("finish")
-
+
def pytest_configure(config):
mp = monkeypatch()
t = TempdirHandler(config)
@@ -64,5 +64,5 @@
name = request._pyfuncitem.name
name = py.std.re.sub("[\W]", "_", name)
x = request.config._tmpdirhandler.mktemp(name, numbered=True)
- return x.realpath()
+ return x
diff --git a/_pytest/unittest.py b/_pytest/unittest.py
--- a/_pytest/unittest.py
+++ b/_pytest/unittest.py
@@ -2,6 +2,9 @@
import pytest, py
import sys, pdb
+# for transfering markers
+from _pytest.python import transfer_markers
+
def pytest_pycollect_makeitem(collector, name, obj):
unittest = sys.modules.get('unittest')
if unittest is None:
@@ -19,7 +22,14 @@
class UnitTestCase(pytest.Class):
def collect(self):
loader = py.std.unittest.TestLoader()
+ module = self.getparent(pytest.Module).obj
+ cls = self.obj
for name in loader.getTestCaseNames(self.obj):
+ x = getattr(self.obj, name)
+ funcobj = getattr(x, 'im_func', x)
+ transfer_markers(funcobj, cls, module)
+ if hasattr(funcobj, 'todo'):
+ pytest.mark.xfail(reason=str(funcobj.todo))(funcobj)
yield TestCaseFunction(name, parent=self)
def setup(self):
@@ -37,15 +47,13 @@
class TestCaseFunction(pytest.Function):
_excinfo = None
- def __init__(self, name, parent):
- super(TestCaseFunction, self).__init__(name, parent)
- if hasattr(self._obj, 'todo'):
- getattr(self._obj, 'im_func', self._obj).xfail = \
- pytest.mark.xfail(reason=str(self._obj.todo))
-
def setup(self):
self._testcase = self.parent.obj(self.name)
self._obj = getattr(self._testcase, self.name)
+ if hasattr(self._testcase, 'skip'):
+ pytest.skip(self._testcase.skip)
+ if hasattr(self._obj, 'skip'):
+ pytest.skip(self._obj.skip)
if hasattr(self._testcase, 'setup_method'):
self._testcase.setup_method(self._obj)
@@ -83,28 +91,37 @@
self._addexcinfo(rawexcinfo)
def addFailure(self, testcase, rawexcinfo):
self._addexcinfo(rawexcinfo)
+
def addSkip(self, testcase, reason):
try:
pytest.skip(reason)
except pytest.skip.Exception:
self._addexcinfo(sys.exc_info())
- def addExpectedFailure(self, testcase, rawexcinfo, reason):
+
+ def addExpectedFailure(self, testcase, rawexcinfo, reason=""):
try:
pytest.xfail(str(reason))
except pytest.xfail.Exception:
self._addexcinfo(sys.exc_info())
- def addUnexpectedSuccess(self, testcase, reason):
- pass
+
+ def addUnexpectedSuccess(self, testcase, reason=""):
+ self._unexpectedsuccess = reason
+
def addSuccess(self, testcase):
pass
+
def stopTest(self, testcase):
pass
+
def runtest(self):
self._testcase(result=self)
def _prunetraceback(self, excinfo):
pytest.Function._prunetraceback(self, excinfo)
- excinfo.traceback = excinfo.traceback.filter(lambda x:not x.frame.f_globals.get('__unittest'))
+ traceback = excinfo.traceback.filter(
+ lambda x:not x.frame.f_globals.get('__unittest'))
+ if traceback:
+ excinfo.traceback = traceback
@pytest.mark.tryfirst
def pytest_runtest_makereport(item, call):
@@ -120,14 +137,19 @@
ut = sys.modules['twisted.python.failure']
Failure__init__ = ut.Failure.__init__.im_func
check_testcase_implements_trial_reporter()
- def excstore(self, exc_value=None, exc_type=None, exc_tb=None):
+ def excstore(self, exc_value=None, exc_type=None, exc_tb=None,
+ captureVars=None):
if exc_value is None:
self._rawexcinfo = sys.exc_info()
else:
if exc_type is None:
exc_type = type(exc_value)
self._rawexcinfo = (exc_type, exc_value, exc_tb)
- Failure__init__(self, exc_value, exc_type, exc_tb)
+ try:
+ Failure__init__(self, exc_value, exc_type, exc_tb,
+ captureVars=captureVars)
+ except TypeError:
+ Failure__init__(self, exc_value, exc_type, exc_tb)
ut.Failure.__init__ = excstore
try:
return __multicall__.execute()
diff --git a/py/__init__.py b/py/__init__.py
--- a/py/__init__.py
+++ b/py/__init__.py
@@ -8,7 +8,7 @@
(c) Holger Krekel and others, 2004-2010
"""
-__version__ = '1.4.4.dev1'
+__version__ = '1.4.7'
from py import _apipkg
@@ -70,6 +70,11 @@
'getrawcode' : '._code.code:getrawcode',
'patch_builtins' : '._code.code:patch_builtins',
'unpatch_builtins' : '._code.code:unpatch_builtins',
+ '_AssertionError' : '._code.assertion:AssertionError',
+ '_reinterpret_old' : '._code.assertion:reinterpret_old',
+ '_reinterpret' : '._code.assertion:reinterpret',
+ '_reprcompare' : '._code.assertion:_reprcompare',
+ '_format_explanation' : '._code.assertion:_format_explanation',
},
# backports and additions of builtins
diff --git a/py/_builtin.py b/py/_builtin.py
--- a/py/_builtin.py
+++ b/py/_builtin.py
@@ -113,9 +113,12 @@
# some backward compatibility helpers
_basestring = str
- def _totext(obj, encoding=None):
+ def _totext(obj, encoding=None, errors=None):
if isinstance(obj, bytes):
- obj = obj.decode(encoding)
+ if errors is None:
+ obj = obj.decode(encoding)
+ else:
+ obj = obj.decode(encoding, errors)
elif not isinstance(obj, str):
obj = str(obj)
return obj
@@ -142,7 +145,7 @@
del back
elif locs is None:
locs = globs
- fp = open(fn, "rb")
+ fp = open(fn, "r")
try:
source = fp.read()
finally:
diff --git a/py/_code/_assertionnew.py b/py/_code/_assertionnew.py
new file mode 100644
--- /dev/null
+++ b/py/_code/_assertionnew.py
@@ -0,0 +1,339 @@
+"""
+Find intermediate evalutation results in assert statements through builtin AST.
+This should replace _assertionold.py eventually.
+"""
+
+import sys
+import ast
+
+import py
+from py._code.assertion import _format_explanation, BuiltinAssertionError
+
+
+if sys.platform.startswith("java") and sys.version_info < (2, 5, 2):
+ # See http://bugs.jython.org/issue1497
+ _exprs = ("BoolOp", "BinOp", "UnaryOp", "Lambda", "IfExp", "Dict",
+ "ListComp", "GeneratorExp", "Yield", "Compare", "Call",
+ "Repr", "Num", "Str", "Attribute", "Subscript", "Name",
+ "List", "Tuple")
+ _stmts = ("FunctionDef", "ClassDef", "Return", "Delete", "Assign",
+ "AugAssign", "Print", "For", "While", "If", "With", "Raise",
+ "TryExcept", "TryFinally", "Assert", "Import", "ImportFrom",
+ "Exec", "Global", "Expr", "Pass", "Break", "Continue")
+ _expr_nodes = set(getattr(ast, name) for name in _exprs)
+ _stmt_nodes = set(getattr(ast, name) for name in _stmts)
+ def _is_ast_expr(node):
+ return node.__class__ in _expr_nodes
+ def _is_ast_stmt(node):
+ return node.__class__ in _stmt_nodes
+else:
+ def _is_ast_expr(node):
+ return isinstance(node, ast.expr)
+ def _is_ast_stmt(node):
+ return isinstance(node, ast.stmt)
+
+
+class Failure(Exception):
+ """Error found while interpreting AST."""
+
+ def __init__(self, explanation=""):
+ self.cause = sys.exc_info()
+ self.explanation = explanation
+
+
+def interpret(source, frame, should_fail=False):
+ mod = ast.parse(source)
+ visitor = DebugInterpreter(frame)
+ try:
+ visitor.visit(mod)
+ except Failure:
+ failure = sys.exc_info()[1]
+ return getfailure(failure)
+ if should_fail:
+ return ("(assertion failed, but when it was re-run for "
+ "printing intermediate values, it did not fail. Suggestions: "
+ "compute assert expression before the assert or use --no-assert)")
+
+def run(offending_line, frame=None):
+ if frame is None:
+ frame = py.code.Frame(sys._getframe(1))
+ return interpret(offending_line, frame)
+
+def getfailure(failure):
+ explanation = _format_explanation(failure.explanation)
+ value = failure.cause[1]
+ if str(value):
+ lines = explanation.splitlines()
+ if not lines:
+ lines.append("")
+ lines[0] += " << %s" % (value,)
+ explanation = "\n".join(lines)
+ text = "%s: %s" % (failure.cause[0].__name__, explanation)
+ if text.startswith("AssertionError: assert "):
+ text = text[16:]
+ return text
+
+
+operator_map = {
+ ast.BitOr : "|",
+ ast.BitXor : "^",
+ ast.BitAnd : "&",
+ ast.LShift : "<<",
+ ast.RShift : ">>",
+ ast.Add : "+",
+ ast.Sub : "-",
+ ast.Mult : "*",
+ ast.Div : "/",
+ ast.FloorDiv : "//",
+ ast.Mod : "%",
+ ast.Eq : "==",
+ ast.NotEq : "!=",
+ ast.Lt : "<",
+ ast.LtE : "<=",
+ ast.Gt : ">",
+ ast.GtE : ">=",
+ ast.Pow : "**",
+ ast.Is : "is",
+ ast.IsNot : "is not",
+ ast.In : "in",
+ ast.NotIn : "not in"
+}
+
+unary_map = {
+ ast.Not : "not %s",
+ ast.Invert : "~%s",
+ ast.USub : "-%s",
+ ast.UAdd : "+%s"
+}
+
+
+class DebugInterpreter(ast.NodeVisitor):
+ """Interpret AST nodes to gleam useful debugging information. """
+
+ def __init__(self, frame):
+ self.frame = frame
+
+ def generic_visit(self, node):
+ # Fallback when we don't have a special implementation.
+ if _is_ast_expr(node):
+ mod = ast.Expression(node)
+ co = self._compile(mod)
+ try:
+ result = self.frame.eval(co)
+ except Exception:
+ raise Failure()
+ explanation = self.frame.repr(result)
+ return explanation, result
+ elif _is_ast_stmt(node):
+ mod = ast.Module([node])
+ co = self._compile(mod, "exec")
+ try:
+ self.frame.exec_(co)
+ except Exception:
+ raise Failure()
+ return None, None
+ else:
+ raise AssertionError("can't handle %s" %(node,))
+
+ def _compile(self, source, mode="eval"):
+ return compile(source, "", mode)
+
+ def visit_Expr(self, expr):
+ return self.visit(expr.value)
+
+ def visit_Module(self, mod):
+ for stmt in mod.body:
+ self.visit(stmt)
+
+ def visit_Name(self, name):
+ explanation, result = self.generic_visit(name)
+ # See if the name is local.
+ source = "%r in locals() is not globals()" % (name.id,)
+ co = self._compile(source)
+ try:
+ local = self.frame.eval(co)
+ except Exception:
+ # have to assume it isn't
+ local = False
+ if not local:
+ return name.id, result
+ return explanation, result
+
+ def visit_Compare(self, comp):
+ left = comp.left
+ left_explanation, left_result = self.visit(left)
+ for op, next_op in zip(comp.ops, comp.comparators):
+ next_explanation, next_result = self.visit(next_op)
+ op_symbol = operator_map[op.__class__]
+ explanation = "%s %s %s" % (left_explanation, op_symbol,
+ next_explanation)
+ source = "__exprinfo_left %s __exprinfo_right" % (op_symbol,)
+ co = self._compile(source)
+ try:
+ result = self.frame.eval(co, __exprinfo_left=left_result,
+ __exprinfo_right=next_result)
+ except Exception:
+ raise Failure(explanation)
+ try:
+ if not result:
+ break
+ except KeyboardInterrupt:
+ raise
+ except:
+ break
+ left_explanation, left_result = next_explanation, next_result
+
+ rcomp = py.code._reprcompare
+ if rcomp:
+ res = rcomp(op_symbol, left_result, next_result)
+ if res:
+ explanation = res
+ return explanation, result
+
+ def visit_BoolOp(self, boolop):
+ is_or = isinstance(boolop.op, ast.Or)
+ explanations = []
+ for operand in boolop.values:
+ explanation, result = self.visit(operand)
+ explanations.append(explanation)
+ if result == is_or:
+ break
+ name = is_or and " or " or " and "
+ explanation = "(" + name.join(explanations) + ")"
+ return explanation, result
+
+ def visit_UnaryOp(self, unary):
+ pattern = unary_map[unary.op.__class__]
+ operand_explanation, operand_result = self.visit(unary.operand)
+ explanation = pattern % (operand_explanation,)
+ co = self._compile(pattern % ("__exprinfo_expr",))
+ try:
+ result = self.frame.eval(co, __exprinfo_expr=operand_result)
+ except Exception:
+ raise Failure(explanation)
+ return explanation, result
+
+ def visit_BinOp(self, binop):
+ left_explanation, left_result = self.visit(binop.left)
+ right_explanation, right_result = self.visit(binop.right)
+ symbol = operator_map[binop.op.__class__]
+ explanation = "(%s %s %s)" % (left_explanation, symbol,
+ right_explanation)
+ source = "__exprinfo_left %s __exprinfo_right" % (symbol,)
+ co = self._compile(source)
+ try:
+ result = self.frame.eval(co, __exprinfo_left=left_result,
+ __exprinfo_right=right_result)
+ except Exception:
+ raise Failure(explanation)
+ return explanation, result
+
+ def visit_Call(self, call):
+ func_explanation, func = self.visit(call.func)
+ arg_explanations = []
+ ns = {"__exprinfo_func" : func}
+ arguments = []
+ for arg in call.args:
+ arg_explanation, arg_result = self.visit(arg)
+ arg_name = "__exprinfo_%s" % (len(ns),)
+ ns[arg_name] = arg_result
+ arguments.append(arg_name)
+ arg_explanations.append(arg_explanation)
+ for keyword in call.keywords:
+ arg_explanation, arg_result = self.visit(keyword.value)
+ arg_name = "__exprinfo_%s" % (len(ns),)
+ ns[arg_name] = arg_result
+ keyword_source = "%s=%%s" % (keyword.arg)
+ arguments.append(keyword_source % (arg_name,))
+ arg_explanations.append(keyword_source % (arg_explanation,))
+ if call.starargs:
+ arg_explanation, arg_result = self.visit(call.starargs)
+ arg_name = "__exprinfo_star"
+ ns[arg_name] = arg_result
+ arguments.append("*%s" % (arg_name,))
+ arg_explanations.append("*%s" % (arg_explanation,))
+ if call.kwargs:
+ arg_explanation, arg_result = self.visit(call.kwargs)
+ arg_name = "__exprinfo_kwds"
+ ns[arg_name] = arg_result
+ arguments.append("**%s" % (arg_name,))
+ arg_explanations.append("**%s" % (arg_explanation,))
+ args_explained = ", ".join(arg_explanations)
+ explanation = "%s(%s)" % (func_explanation, args_explained)
+ args = ", ".join(arguments)
+ source = "__exprinfo_func(%s)" % (args,)
+ co = self._compile(source)
+ try:
+ result = self.frame.eval(co, **ns)
+ except Exception:
+ raise Failure(explanation)
+ pattern = "%s\n{%s = %s\n}"
+ rep = self.frame.repr(result)
+ explanation = pattern % (rep, rep, explanation)
+ return explanation, result
+
+ def _is_builtin_name(self, name):
+ pattern = "%r not in globals() and %r not in locals()"
+ source = pattern % (name.id, name.id)
+ co = self._compile(source)
+ try:
+ return self.frame.eval(co)
+ except Exception:
+ return False
+
+ def visit_Attribute(self, attr):
+ if not isinstance(attr.ctx, ast.Load):
+ return self.generic_visit(attr)
+ source_explanation, source_result = self.visit(attr.value)
+ explanation = "%s.%s" % (source_explanation, attr.attr)
+ source = "__exprinfo_expr.%s" % (attr.attr,)
+ co = self._compile(source)
+ try:
+ result = self.frame.eval(co, __exprinfo_expr=source_result)
+ except Exception:
+ raise Failure(explanation)
+ explanation = "%s\n{%s = %s.%s\n}" % (self.frame.repr(result),
+ self.frame.repr(result),
+ source_explanation, attr.attr)
+ # Check if the attr is from an instance.
+ source = "%r in getattr(__exprinfo_expr, '__dict__', {})"
+ source = source % (attr.attr,)
+ co = self._compile(source)
+ try:
+ from_instance = self.frame.eval(co, __exprinfo_expr=source_result)
+ except Exception:
+ from_instance = True
+ if from_instance:
+ rep = self.frame.repr(result)
+ pattern = "%s\n{%s = %s\n}"
+ explanation = pattern % (rep, rep, explanation)
+ return explanation, result
+
+ def visit_Assert(self, assrt):
+ test_explanation, test_result = self.visit(assrt.test)
+ if test_explanation.startswith("False\n{False =") and \
+ test_explanation.endswith("\n"):
+ test_explanation = test_explanation[15:-2]
+ explanation = "assert %s" % (test_explanation,)
+ if not test_result:
+ try:
+ raise BuiltinAssertionError
+ except Exception:
+ raise Failure(explanation)
+ return explanation, test_result
+
+ def visit_Assign(self, assign):
+ value_explanation, value_result = self.visit(assign.value)
+ explanation = "... = %s" % (value_explanation,)
+ name = ast.Name("__exprinfo_expr", ast.Load(),
+ lineno=assign.value.lineno,
+ col_offset=assign.value.col_offset)
+ new_assign = ast.Assign(assign.targets, name, lineno=assign.lineno,
+ col_offset=assign.col_offset)
+ mod = ast.Module([new_assign])
+ co = self._compile(mod, "exec")
+ try:
+ self.frame.exec_(co, __exprinfo_expr=value_result)
+ except Exception:
+ raise Failure(explanation)
+ return explanation, value_result
diff --git a/py/_code/_assertionold.py b/py/_code/_assertionold.py
new file mode 100644
--- /dev/null
+++ b/py/_code/_assertionold.py
@@ -0,0 +1,555 @@
+import py
+import sys, inspect
+from compiler import parse, ast, pycodegen
+from py._code.assertion import BuiltinAssertionError, _format_explanation
+
+passthroughex = py.builtin._sysex
+
+class Failure:
+ def __init__(self, node):
+ self.exc, self.value, self.tb = sys.exc_info()
+ self.node = node
+
+class View(object):
+ """View base class.
+
+ If C is a subclass of View, then C(x) creates a proxy object around
+ the object x. The actual class of the proxy is not C in general,
+ but a *subclass* of C determined by the rules below. To avoid confusion
+ we call view class the class of the proxy (a subclass of C, so of View)
+ and object class the class of x.
+
+ Attributes and methods not found in the proxy are automatically read on x.
+ Other operations like setting attributes are performed on the proxy, as
+ determined by its view class. The object x is available from the proxy
+ as its __obj__ attribute.
+
+ The view class selection is determined by the __view__ tuples and the
+ optional __viewkey__ method. By default, the selected view class is the
+ most specific subclass of C whose __view__ mentions the class of x.
+ If no such subclass is found, the search proceeds with the parent
+ object classes. For example, C(True) will first look for a subclass
+ of C with __view__ = (..., bool, ...) and only if it doesn't find any
+ look for one with __view__ = (..., int, ...), and then ..., object,...
+ If everything fails the class C itself is considered to be the default.
+
+ Alternatively, the view class selection can be driven by another aspect
+ of the object x, instead of the class of x, by overriding __viewkey__.
+ See last example at the end of this module.
+ """
+
+ _viewcache = {}
+ __view__ = ()
+
+ def __new__(rootclass, obj, *args, **kwds):
+ self = object.__new__(rootclass)
+ self.__obj__ = obj
+ self.__rootclass__ = rootclass
+ key = self.__viewkey__()
+ try:
+ self.__class__ = self._viewcache[key]
+ except KeyError:
+ self.__class__ = self._selectsubclass(key)
+ return self
+
+ def __getattr__(self, attr):
+ # attributes not found in the normal hierarchy rooted on View
+ # are looked up in the object's real class
+ return getattr(self.__obj__, attr)
+
+ def __viewkey__(self):
+ return self.__obj__.__class__
+
+ def __matchkey__(self, key, subclasses):
+ if inspect.isclass(key):
+ keys = inspect.getmro(key)
+ else:
+ keys = [key]
+ for key in keys:
+ result = [C for C in subclasses if key in C.__view__]
+ if result:
+ return result
+ return []
+
+ def _selectsubclass(self, key):
+ subclasses = list(enumsubclasses(self.__rootclass__))
+ for C in subclasses:
+ if not isinstance(C.__view__, tuple):
+ C.__view__ = (C.__view__,)
+ choices = self.__matchkey__(key, subclasses)
+ if not choices:
+ return self.__rootclass__
+ elif len(choices) == 1:
+ return choices[0]
+ else:
+ # combine the multiple choices
+ return type('?', tuple(choices), {})
+
+ def __repr__(self):
+ return '%s(%r)' % (self.__rootclass__.__name__, self.__obj__)
+
+
+def enumsubclasses(cls):
+ for subcls in cls.__subclasses__():
+ for subsubclass in enumsubclasses(subcls):
+ yield subsubclass
+ yield cls
+
+
+class Interpretable(View):
+ """A parse tree node with a few extra methods."""
+ explanation = None
+
+ def is_builtin(self, frame):
+ return False
+
+ def eval(self, frame):
+ # fall-back for unknown expression nodes
+ try:
+ expr = ast.Expression(self.__obj__)
+ expr.filename = ''
+ self.__obj__.filename = ''
+ co = pycodegen.ExpressionCodeGenerator(expr).getCode()
+ result = frame.eval(co)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+ self.result = result
+ self.explanation = self.explanation or frame.repr(self.result)
+
+ def run(self, frame):
+ # fall-back for unknown statement nodes
+ try:
+ expr = ast.Module(None, ast.Stmt([self.__obj__]))
+ expr.filename = ''
+ co = pycodegen.ModuleCodeGenerator(expr).getCode()
+ frame.exec_(co)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+
+ def nice_explanation(self):
+ return _format_explanation(self.explanation)
+
+
+class Name(Interpretable):
+ __view__ = ast.Name
+
+ def is_local(self, frame):
+ source = '%r in locals() is not globals()' % self.name
+ try:
+ return frame.is_true(frame.eval(source))
+ except passthroughex:
+ raise
+ except:
+ return False
+
+ def is_global(self, frame):
+ source = '%r in globals()' % self.name
+ try:
+ return frame.is_true(frame.eval(source))
+ except passthroughex:
+ raise
+ except:
+ return False
+
+ def is_builtin(self, frame):
+ source = '%r not in locals() and %r not in globals()' % (
+ self.name, self.name)
+ try:
+ return frame.is_true(frame.eval(source))
+ except passthroughex:
+ raise
+ except:
+ return False
+
+ def eval(self, frame):
+ super(Name, self).eval(frame)
+ if not self.is_local(frame):
+ self.explanation = self.name
+
+class Compare(Interpretable):
+ __view__ = ast.Compare
+
+ def eval(self, frame):
+ expr = Interpretable(self.expr)
+ expr.eval(frame)
+ for operation, expr2 in self.ops:
+ if hasattr(self, 'result'):
+ # shortcutting in chained expressions
+ if not frame.is_true(self.result):
+ break
+ expr2 = Interpretable(expr2)
+ expr2.eval(frame)
+ self.explanation = "%s %s %s" % (
+ expr.explanation, operation, expr2.explanation)
+ source = "__exprinfo_left %s __exprinfo_right" % operation
+ try:
+ self.result = frame.eval(source,
+ __exprinfo_left=expr.result,
+ __exprinfo_right=expr2.result)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+ expr = expr2
+
+class And(Interpretable):
+ __view__ = ast.And
+
+ def eval(self, frame):
+ explanations = []
+ for expr in self.nodes:
+ expr = Interpretable(expr)
+ expr.eval(frame)
+ explanations.append(expr.explanation)
+ self.result = expr.result
+ if not frame.is_true(expr.result):
+ break
+ self.explanation = '(' + ' and '.join(explanations) + ')'
+
+class Or(Interpretable):
+ __view__ = ast.Or
+
+ def eval(self, frame):
+ explanations = []
+ for expr in self.nodes:
+ expr = Interpretable(expr)
+ expr.eval(frame)
+ explanations.append(expr.explanation)
+ self.result = expr.result
+ if frame.is_true(expr.result):
+ break
+ self.explanation = '(' + ' or '.join(explanations) + ')'
+
+
+# == Unary operations ==
+keepalive = []
+for astclass, astpattern in {
+ ast.Not : 'not __exprinfo_expr',
+ ast.Invert : '(~__exprinfo_expr)',
+ }.items():
+
+ class UnaryArith(Interpretable):
+ __view__ = astclass
+
+ def eval(self, frame, astpattern=astpattern):
+ expr = Interpretable(self.expr)
+ expr.eval(frame)
+ self.explanation = astpattern.replace('__exprinfo_expr',
+ expr.explanation)
+ try:
+ self.result = frame.eval(astpattern,
+ __exprinfo_expr=expr.result)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+
+ keepalive.append(UnaryArith)
+
+# == Binary operations ==
+for astclass, astpattern in {
+ ast.Add : '(__exprinfo_left + __exprinfo_right)',
+ ast.Sub : '(__exprinfo_left - __exprinfo_right)',
+ ast.Mul : '(__exprinfo_left * __exprinfo_right)',
+ ast.Div : '(__exprinfo_left / __exprinfo_right)',
+ ast.Mod : '(__exprinfo_left % __exprinfo_right)',
+ ast.Power : '(__exprinfo_left ** __exprinfo_right)',
+ }.items():
+
+ class BinaryArith(Interpretable):
+ __view__ = astclass
+
+ def eval(self, frame, astpattern=astpattern):
+ left = Interpretable(self.left)
+ left.eval(frame)
+ right = Interpretable(self.right)
+ right.eval(frame)
+ self.explanation = (astpattern
+ .replace('__exprinfo_left', left .explanation)
+ .replace('__exprinfo_right', right.explanation))
+ try:
+ self.result = frame.eval(astpattern,
+ __exprinfo_left=left.result,
+ __exprinfo_right=right.result)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+
+ keepalive.append(BinaryArith)
+
+
+class CallFunc(Interpretable):
+ __view__ = ast.CallFunc
+
+ def is_bool(self, frame):
+ source = 'isinstance(__exprinfo_value, bool)'
+ try:
+ return frame.is_true(frame.eval(source,
+ __exprinfo_value=self.result))
+ except passthroughex:
+ raise
+ except:
+ return False
+
+ def eval(self, frame):
+ node = Interpretable(self.node)
+ node.eval(frame)
+ explanations = []
+ vars = {'__exprinfo_fn': node.result}
+ source = '__exprinfo_fn('
+ for a in self.args:
+ if isinstance(a, ast.Keyword):
+ keyword = a.name
+ a = a.expr
+ else:
+ keyword = None
+ a = Interpretable(a)
+ a.eval(frame)
+ argname = '__exprinfo_%d' % len(vars)
+ vars[argname] = a.result
+ if keyword is None:
+ source += argname + ','
+ explanations.append(a.explanation)
+ else:
+ source += '%s=%s,' % (keyword, argname)
+ explanations.append('%s=%s' % (keyword, a.explanation))
+ if self.star_args:
+ star_args = Interpretable(self.star_args)
+ star_args.eval(frame)
+ argname = '__exprinfo_star'
+ vars[argname] = star_args.result
+ source += '*' + argname + ','
+ explanations.append('*' + star_args.explanation)
+ if self.dstar_args:
+ dstar_args = Interpretable(self.dstar_args)
+ dstar_args.eval(frame)
+ argname = '__exprinfo_kwds'
+ vars[argname] = dstar_args.result
+ source += '**' + argname + ','
+ explanations.append('**' + dstar_args.explanation)
+ self.explanation = "%s(%s)" % (
+ node.explanation, ', '.join(explanations))
+ if source.endswith(','):
+ source = source[:-1]
+ source += ')'
+ try:
+ self.result = frame.eval(source, **vars)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+ if not node.is_builtin(frame) or not self.is_bool(frame):
+ r = frame.repr(self.result)
+ self.explanation = '%s\n{%s = %s\n}' % (r, r, self.explanation)
+
+class Getattr(Interpretable):
+ __view__ = ast.Getattr
+
+ def eval(self, frame):
+ expr = Interpretable(self.expr)
+ expr.eval(frame)
+ source = '__exprinfo_expr.%s' % self.attrname
+ try:
+ self.result = frame.eval(source, __exprinfo_expr=expr.result)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+ self.explanation = '%s.%s' % (expr.explanation, self.attrname)
+ # if the attribute comes from the instance, its value is interesting
+ source = ('hasattr(__exprinfo_expr, "__dict__") and '
+ '%r in __exprinfo_expr.__dict__' % self.attrname)
+ try:
+ from_instance = frame.is_true(
+ frame.eval(source, __exprinfo_expr=expr.result))
+ except passthroughex:
+ raise
+ except:
+ from_instance = True
+ if from_instance:
+ r = frame.repr(self.result)
+ self.explanation = '%s\n{%s = %s\n}' % (r, r, self.explanation)
+
+# == Re-interpretation of full statements ==
+
+class Assert(Interpretable):
+ __view__ = ast.Assert
+
+ def run(self, frame):
+ test = Interpretable(self.test)
+ test.eval(frame)
+ # simplify 'assert False where False = ...'
+ if (test.explanation.startswith('False\n{False = ') and
+ test.explanation.endswith('\n}')):
+ test.explanation = test.explanation[15:-2]
+ # print the result as 'assert '
+ self.result = test.result
+ self.explanation = 'assert ' + test.explanation
+ if not frame.is_true(test.result):
+ try:
+ raise BuiltinAssertionError
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+
+class Assign(Interpretable):
+ __view__ = ast.Assign
+
+ def run(self, frame):
+ expr = Interpretable(self.expr)
+ expr.eval(frame)
+ self.result = expr.result
+ self.explanation = '... = ' + expr.explanation
+ # fall-back-run the rest of the assignment
+ ass = ast.Assign(self.nodes, ast.Name('__exprinfo_expr'))
+ mod = ast.Module(None, ast.Stmt([ass]))
+ mod.filename = ''
+ co = pycodegen.ModuleCodeGenerator(mod).getCode()
+ try:
+ frame.exec_(co, __exprinfo_expr=expr.result)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+
+class Discard(Interpretable):
+ __view__ = ast.Discard
+
+ def run(self, frame):
+ expr = Interpretable(self.expr)
+ expr.eval(frame)
+ self.result = expr.result
+ self.explanation = expr.explanation
+
+class Stmt(Interpretable):
+ __view__ = ast.Stmt
+
+ def run(self, frame):
+ for stmt in self.nodes:
+ stmt = Interpretable(stmt)
+ stmt.run(frame)
+
+
+def report_failure(e):
+ explanation = e.node.nice_explanation()
+ if explanation:
+ explanation = ", in: " + explanation
+ else:
+ explanation = ""
+ sys.stdout.write("%s: %s%s\n" % (e.exc.__name__, e.value, explanation))
+
+def check(s, frame=None):
+ if frame is None:
+ frame = sys._getframe(1)
+ frame = py.code.Frame(frame)
+ expr = parse(s, 'eval')
+ assert isinstance(expr, ast.Expression)
+ node = Interpretable(expr.node)
+ try:
+ node.eval(frame)
+ except passthroughex:
+ raise
+ except Failure:
+ e = sys.exc_info()[1]
+ report_failure(e)
+ else:
+ if not frame.is_true(node.result):
+ sys.stderr.write("assertion failed: %s\n" % node.nice_explanation())
+
+
+###########################################################
+# API / Entry points
+# #########################################################
+
+def interpret(source, frame, should_fail=False):
+ module = Interpretable(parse(source, 'exec').node)
+ #print "got module", module
+ if isinstance(frame, py.std.types.FrameType):
+ frame = py.code.Frame(frame)
+ try:
+ module.run(frame)
+ except Failure:
+ e = sys.exc_info()[1]
+ return getfailure(e)
+ except passthroughex:
+ raise
+ except:
+ import traceback
+ traceback.print_exc()
+ if should_fail:
+ return ("(assertion failed, but when it was re-run for "
+ "printing intermediate values, it did not fail. Suggestions: "
+ "compute assert expression before the assert or use --nomagic)")
+ else:
+ return None
+
+def getmsg(excinfo):
+ if isinstance(excinfo, tuple):
+ excinfo = py.code.ExceptionInfo(excinfo)
+ #frame, line = gettbline(tb)
+ #frame = py.code.Frame(frame)
+ #return interpret(line, frame)
+
+ tb = excinfo.traceback[-1]
+ source = str(tb.statement).strip()
+ x = interpret(source, tb.frame, should_fail=True)
+ if not isinstance(x, str):
+ raise TypeError("interpret returned non-string %r" % (x,))
+ return x
+
+def getfailure(e):
+ explanation = e.node.nice_explanation()
+ if str(e.value):
+ lines = explanation.split('\n')
+ lines[0] += " << %s" % (e.value,)
+ explanation = '\n'.join(lines)
+ text = "%s: %s" % (e.exc.__name__, explanation)
+ if text.startswith('AssertionError: assert '):
+ text = text[16:]
+ return text
+
+def run(s, frame=None):
+ if frame is None:
+ frame = sys._getframe(1)
+ frame = py.code.Frame(frame)
+ module = Interpretable(parse(s, 'exec').node)
+ try:
+ module.run(frame)
+ except Failure:
+ e = sys.exc_info()[1]
+ report_failure(e)
+
+
+if __name__ == '__main__':
+ # example:
+ def f():
+ return 5
+ def g():
+ return 3
+ def h(x):
+ return 'never'
+ check("f() * g() == 5")
+ check("not f()")
+ check("not (f() and g() or 0)")
+ check("f() == g()")
+ i = 4
+ check("i == f()")
+ check("len(f()) == 0")
+ check("isinstance(2+3+4, float)")
+
+ run("x = i")
+ check("x == 5")
+
+ run("assert not f(), 'oops'")
+ run("a, b, c = 1, 2")
+ run("a, b, c = f()")
+
+ check("max([f(),g()]) == 4")
+ check("'hello'[g()] == 'h'")
+ run("'guk%d' % h(f())")
diff --git a/py/_code/assertion.py b/py/_code/assertion.py
new file mode 100644
--- /dev/null
+++ b/py/_code/assertion.py
@@ -0,0 +1,94 @@
+import sys
+import py
+
+BuiltinAssertionError = py.builtin.builtins.AssertionError
+
+_reprcompare = None # if set, will be called by assert reinterp for comparison ops
+
+def _format_explanation(explanation):
+ """This formats an explanation
+
+ Normally all embedded newlines are escaped, however there are
+ three exceptions: \n{, \n} and \n~. The first two are intended
+ cover nested explanations, see function and attribute explanations
+ for examples (.visit_Call(), visit_Attribute()). The last one is
+ for when one explanation needs to span multiple lines, e.g. when
+ displaying diffs.
+ """
+ raw_lines = (explanation or '').split('\n')
+ # escape newlines not followed by {, } and ~
+ lines = [raw_lines[0]]
+ for l in raw_lines[1:]:
+ if l.startswith('{') or l.startswith('}') or l.startswith('~'):
+ lines.append(l)
+ else:
+ lines[-1] += '\\n' + l
+
+ result = lines[:1]
+ stack = [0]
+ stackcnt = [0]
+ for line in lines[1:]:
+ if line.startswith('{'):
+ if stackcnt[-1]:
+ s = 'and '
+ else:
+ s = 'where '
+ stack.append(len(result))
+ stackcnt[-1] += 1
+ stackcnt.append(0)
+ result.append(' +' + ' '*(len(stack)-1) + s + line[1:])
+ elif line.startswith('}'):
+ assert line.startswith('}')
+ stack.pop()
+ stackcnt.pop()
+ result[stack[-1]] += line[1:]
+ else:
+ assert line.startswith('~')
+ result.append(' '*len(stack) + line[1:])
+ assert len(stack) == 1
+ return '\n'.join(result)
+
+
+class AssertionError(BuiltinAssertionError):
+ def __init__(self, *args):
+ BuiltinAssertionError.__init__(self, *args)
+ if args:
+ try:
+ self.msg = str(args[0])
+ except py.builtin._sysex:
+ raise
+ except:
+ self.msg = "<[broken __repr__] %s at %0xd>" %(
+ args[0].__class__, id(args[0]))
+ else:
+ f = py.code.Frame(sys._getframe(1))
+ try:
+ source = f.code.fullsource
+ if source is not None:
+ try:
+ source = source.getstatement(f.lineno, assertion=True)
+ except IndexError:
+ source = None
+ else:
+ source = str(source.deindent()).strip()
+ except py.error.ENOENT:
+ source = None
+ # this can also occur during reinterpretation, when the
+ # co_filename is set to "".
+ if source:
+ self.msg = reinterpret(source, f, should_fail=True)
+ else:
+ self.msg = ""
+ if not self.args:
+ self.args = (self.msg,)
+
+if sys.version_info > (3, 0):
+ AssertionError.__module__ = "builtins"
+ reinterpret_old = "old reinterpretation not available for py3"
+else:
+ from py._code._assertionold import interpret as reinterpret_old
+if sys.version_info >= (2, 6) or (sys.platform.startswith("java")):
+ from py._code._assertionnew import interpret as reinterpret
+else:
+ reinterpret = reinterpret_old
+
diff --git a/py/_code/code.py b/py/_code/code.py
--- a/py/_code/code.py
+++ b/py/_code/code.py
@@ -145,6 +145,17 @@
return self.frame.f_locals
locals = property(getlocals, None, None, "locals of underlaying frame")
+ def reinterpret(self):
+ """Reinterpret the failing statement and returns a detailed information
+ about what operations are performed."""
+ if self.exprinfo is None:
+ source = str(self.statement).strip()
+ x = py.code._reinterpret(source, self.frame, should_fail=True)
+ if not isinstance(x, str):
+ raise TypeError("interpret returned non-string %r" % (x,))
+ self.exprinfo = x
+ return self.exprinfo
+
def getfirstlinesource(self):
# on Jython this firstlineno can be -1 apparently
return max(self.frame.code.firstlineno, 0)
@@ -158,13 +169,12 @@
end = self.lineno
try:
_, end = source.getstatementrange(end)
- except IndexError:
+ except (IndexError, ValueError):
end = self.lineno + 1
# heuristic to stop displaying source on e.g.
# if something: # assume this causes a NameError
# # _this_ lines and the one
# below we don't want from entry.getsource()
- end = min(end, len(source))
for i in range(self.lineno, end):
if source[i].rstrip().endswith(':'):
end = i + 1
@@ -273,7 +283,11 @@
"""
cache = {}
for i, entry in enumerate(self):
- key = entry.frame.code.path, entry.lineno
+ # id for the code.raw is needed to work around
+ # the strange metaprogramming in the decorator lib from pypi
+ # which generates code objects that have hash/value equality
+ #XXX needs a test
+ key = entry.frame.code.path, id(entry.frame.code.raw), entry.lineno
#print "checking for recursion at", key
l = cache.setdefault(key, [])
if l:
@@ -308,7 +322,7 @@
self._striptext = 'AssertionError: '
self._excinfo = tup
self.type, self.value, tb = self._excinfo
- self.typename = getattr(self.type, "__name__", "???")
+ self.typename = self.type.__name__
self.traceback = py.code.Traceback(tb)
def __repr__(self):
@@ -347,14 +361,16 @@
showlocals: show locals per traceback entry
style: long|short|no|native traceback style
tbfilter: hide entries (where __tracebackhide__ is true)
+
+ in case of style==native, tbfilter and showlocals is ignored.
"""
if style == 'native':
- import traceback
- return ''.join(traceback.format_exception(
- self.type,
- self.value,
- self.traceback[0]._rawentry,
- ))
+ return ReprExceptionInfo(ReprTracebackNative(
+ py.std.traceback.format_exception(
+ self.type,
+ self.value,
+ self.traceback[0]._rawentry,
+ )), self._getreprcrash())
fmt = FormattedExcinfo(showlocals=showlocals, style=style,
abspath=abspath, tbfilter=tbfilter, funcargs=funcargs)
@@ -452,7 +468,7 @@
def repr_locals(self, locals):
if self.showlocals:
lines = []
- keys = list(locals)
+ keys = [loc for loc in locals if loc[0] != "@"]
keys.sort()
for name in keys:
value = locals[name]
@@ -506,7 +522,10 @@
def _makepath(self, path):
if not self.abspath:
- np = py.path.local().bestrelpath(path)
+ try:
+ np = py.path.local().bestrelpath(path)
+ except OSError:
+ return path
if len(np) < len(str(path)):
path = np
return path
@@ -595,6 +614,19 @@
if self.extraline:
tw.line(self.extraline)
+class ReprTracebackNative(ReprTraceback):
+ def __init__(self, tblines):
+ self.style = "native"
+ self.reprentries = [ReprEntryNative(tblines)]
+ self.extraline = None
+
+class ReprEntryNative(TerminalRepr):
+ def __init__(self, tblines):
+ self.lines = tblines
+
+ def toterminal(self, tw):
+ tw.write("".join(self.lines))
+
class ReprEntry(TerminalRepr):
localssep = "_ "
@@ -680,19 +712,26 @@
oldbuiltins = {}
-def patch_builtins(compile=True):
- """ put compile builtins to Python's builtins. """
+def patch_builtins(assertion=True, compile=True):
+ """ put compile and AssertionError builtins to Python's builtins. """
+ if assertion:
+ from py._code import assertion
+ l = oldbuiltins.setdefault('AssertionError', [])
+ l.append(py.builtin.builtins.AssertionError)
+ py.builtin.builtins.AssertionError = assertion.AssertionError
if compile:
l = oldbuiltins.setdefault('compile', [])
l.append(py.builtin.builtins.compile)
py.builtin.builtins.compile = py.code.compile
-def unpatch_builtins(compile=True):
+def unpatch_builtins(assertion=True, compile=True):
""" remove compile and AssertionError builtins from Python builtins. """
+ if assertion:
+ py.builtin.builtins.AssertionError = oldbuiltins['AssertionError'].pop()
if compile:
py.builtin.builtins.compile = oldbuiltins['compile'].pop()
-def getrawcode(obj):
+def getrawcode(obj, trycall=True):
""" return code object for given function. """
try:
return obj.__code__
@@ -701,5 +740,10 @@
obj = getattr(obj, 'func_code', obj)
obj = getattr(obj, 'f_code', obj)
obj = getattr(obj, '__code__', obj)
+ if trycall and not hasattr(obj, 'co_firstlineno'):
+ if hasattr(obj, '__call__') and not py.std.inspect.isclass(obj):
+ x = getrawcode(obj.__call__, trycall=False)
+ if hasattr(x, 'co_firstlineno'):
+ return x
return obj
diff --git a/py/_code/source.py b/py/_code/source.py
--- a/py/_code/source.py
+++ b/py/_code/source.py
@@ -108,6 +108,7 @@
def getstatementrange(self, lineno, assertion=False):
""" return (start, end) tuple which spans the minimal
statement region which containing the given lineno.
+ raise an IndexError if no such statementrange can be found.
"""
# XXX there must be a better than these heuristic ways ...
# XXX there may even be better heuristics :-)
@@ -116,6 +117,7 @@
# 1. find the start of the statement
from codeop import compile_command
+ end = None
for start in range(lineno, -1, -1):
if assertion:
line = self.lines[start]
@@ -139,7 +141,9 @@
trysource = self[start:end]
if trysource.isparseable():
return start, end
- return start, len(self)
+ if end is None:
+ raise IndexError("no valid source range around line %d " % (lineno,))
+ return start, end
def getblockend(self, lineno):
# XXX
@@ -257,23 +261,29 @@
def getfslineno(obj):
+ """ Return source location (path, lineno) for the given object.
+ If the source cannot be determined return ("", -1)
+ """
try:
code = py.code.Code(obj)
except TypeError:
- # fallback to
- fn = (py.std.inspect.getsourcefile(obj) or
- py.std.inspect.getfile(obj))
+ try:
+ fn = (py.std.inspect.getsourcefile(obj) or
+ py.std.inspect.getfile(obj))
+ except TypeError:
+ return "", -1
+
fspath = fn and py.path.local(fn) or None
+ lineno = -1
if fspath:
try:
_, lineno = findsource(obj)
except IOError:
- lineno = None
- else:
- lineno = None
+ pass
else:
fspath = code.path
lineno = code.firstlineno
+ assert isinstance(lineno, int)
return fspath, lineno
#
@@ -286,7 +296,7 @@
except py.builtin._sysex:
raise
except:
- return None, None
+ return None, -1
source = Source()
source.lines = [line.rstrip() for line in sourcelines]
return source, lineno
diff --git a/py/_error.py b/py/_error.py
--- a/py/_error.py
+++ b/py/_error.py
@@ -23,6 +23,7 @@
2: errno.ENOENT,
3: errno.ENOENT,
17: errno.EEXIST,
+ 13: errno.EBUSY, # empty cd drive, but ENOMEDIUM seems unavailiable
22: errno.ENOTDIR,
267: errno.ENOTDIR,
5: errno.EACCES, # anything better?
diff --git a/py/_iniconfig.py b/py/_iniconfig.py
--- a/py/_iniconfig.py
+++ b/py/_iniconfig.py
@@ -103,6 +103,7 @@
def _parseline(self, line, lineno):
# comments
line = line.split('#')[0].rstrip()
+ line = line.split(';')[0].rstrip()
# blank lines
if not line:
return None, None
diff --git a/py/_io/capture.py b/py/_io/capture.py
--- a/py/_io/capture.py
+++ b/py/_io/capture.py
@@ -12,7 +12,7 @@
class TextIO(StringIO):
def write(self, data):
if not isinstance(data, unicode):
- data = unicode(data, getattr(self, '_encoding', 'UTF-8'))
+ data = unicode(data, getattr(self, '_encoding', 'UTF-8'), 'replace')
StringIO.write(self, data)
else:
TextIO = StringIO
@@ -258,6 +258,9 @@
f = getattr(self, name).tmpfile
f.seek(0)
res = f.read()
+ enc = getattr(f, 'encoding', None)
+ if enc:
+ res = py.builtin._totext(res, enc, 'replace')
f.truncate(0)
f.seek(0)
l.append(res)
diff --git a/py/_io/terminalwriter.py b/py/_io/terminalwriter.py
--- a/py/_io/terminalwriter.py
+++ b/py/_io/terminalwriter.py
@@ -105,6 +105,8 @@
Blue=44, Purple=45, Cyan=46, White=47,
bold=1, light=2, blink=5, invert=7)
+ _newline = None # the last line printed
+
# XXX deprecate stringio argument
def __init__(self, file=None, stringio=False, encoding=None):
if file is None:
@@ -112,11 +114,9 @@
self.stringio = file = py.io.TextIO()
else:
file = py.std.sys.stdout
- if hasattr(file, 'encoding'):
- encoding = file.encoding
elif hasattr(file, '__call__'):
file = WriteFile(file, encoding=encoding)
- self.encoding = encoding
+ self.encoding = encoding or getattr(file, 'encoding', "utf-8")
self._file = file
self.fullwidth = get_terminal_width()
self.hasmarkup = should_do_markup(file)
@@ -182,8 +182,31 @@
return s
def line(self, s='', **kw):
+ if self._newline == False:
+ self.write("\n")
self.write(s, **kw)
self.write('\n')
+ self._newline = True
+
+ def reline(self, line, **opts):
+ if not self.hasmarkup:
+ raise ValueError("cannot use rewrite-line without terminal")
+ if not self._newline:
+ self.write("\r")
+ self.write(line, **opts)
+ # see if we need to fill up some spaces at the end
+ # xxx have a more exact lastlinelen working from self.write?
+ lenline = len(line)
+ try:
+ lastlen = self._lastlinelen
+ except AttributeError:
+ pass
+ else:
+ if lenline < lastlen:
+ self.write(" " * (lastlen - lenline + 1))
+ self._lastlinelen = lenline
+ self._newline = False
+
class Win32ConsoleWriter(TerminalWriter):
def write(self, s, **kw):
@@ -280,10 +303,10 @@
SetConsoleTextAttribute = ctypes.windll.kernel32.SetConsoleTextAttribute
SetConsoleTextAttribute.argtypes = [wintypes.HANDLE, wintypes.WORD]
SetConsoleTextAttribute.restype = wintypes.BOOL
-
+
_GetConsoleScreenBufferInfo = \
ctypes.windll.kernel32.GetConsoleScreenBufferInfo
- _GetConsoleScreenBufferInfo.argtypes = [wintypes.HANDLE,
+ _GetConsoleScreenBufferInfo.argtypes = [wintypes.HANDLE,
ctypes.POINTER(CONSOLE_SCREEN_BUFFER_INFO)]
_GetConsoleScreenBufferInfo.restype = wintypes.BOOL
def GetConsoleInfo(handle):
diff --git a/py/_path/common.py b/py/_path/common.py
--- a/py/_path/common.py
+++ b/py/_path/common.py
@@ -64,7 +64,10 @@
else:
if bool(value) ^ bool(meth()) ^ invert:
return False
- except (py.error.ENOENT, py.error.ENOTDIR):
+ except (py.error.ENOENT, py.error.ENOTDIR, py.error.EBUSY):
+ # EBUSY feels not entirely correct,
+ # but its kind of necessary since ENOMEDIUM
+ # is not accessible in python
for name in self._depend_on_existence:
if name in kw:
if kw.get(name):
@@ -368,6 +371,5 @@
else:
name = str(path) # path.strpath # XXX svn?
pattern = '*' + path.sep + pattern
- from fnmatch import fnmatch
- return fnmatch(name, pattern)
+ return py.std.fnmatch.fnmatch(name, pattern)
diff --git a/py/_path/local.py b/py/_path/local.py
--- a/py/_path/local.py
+++ b/py/_path/local.py
@@ -157,14 +157,16 @@
return str(self) < str(other)
def samefile(self, other):
- """ return True if 'other' references the same file as 'self'. """
- if not iswin32:
- return py.error.checked_call(
- os.path.samefile, str(self), str(other))
+ """ return True if 'other' references the same file as 'self'.
+ """
+ if not isinstance(other, py.path.local):
+ other = os.path.abspath(str(other))
if self == other:
return True
- other = os.path.abspath(str(other))
- return self == other
+ if iswin32:
+ return False # ther is no samefile
+ return py.error.checked_call(
+ os.path.samefile, str(self), str(other))
def remove(self, rec=1, ignore_errors=False):
""" remove a file or directory (or a directory tree if rec=1).
@@ -539,7 +541,11 @@
if self.basename != "__init__.py":
modfile = modfile[:-12]
- if not self.samefile(modfile):
+ try:
+ issame = self.samefile(modfile)
+ except py.error.ENOENT:
+ issame = False
+ if not issame:
raise self.ImportMismatchError(modname, modfile, self)
return mod
else:
diff --git a/py/_path/svnurl.py b/py/_path/svnurl.py
--- a/py/_path/svnurl.py
+++ b/py/_path/svnurl.py
@@ -233,6 +233,8 @@
e = sys.exc_info()[1]
if e.err.find('non-existent in that revision') != -1:
raise py.error.ENOENT(self, e.err)
+ elif e.err.find("E200009:") != -1:
+ raise py.error.ENOENT(self, e.err)
elif e.err.find('File not found') != -1:
raise py.error.ENOENT(self, e.err)
elif e.err.find('not part of a repository')!=-1:
diff --git a/py/_path/svnwc.py b/py/_path/svnwc.py
--- a/py/_path/svnwc.py
+++ b/py/_path/svnwc.py
@@ -482,10 +482,13 @@
except py.process.cmdexec.Error:
e = sys.exc_info()[1]
strerr = e.err.lower()
- if strerr.find('file not found') != -1:
+ if strerr.find('not found') != -1:
+ raise py.error.ENOENT(self)
+ elif strerr.find("E200009:") != -1:
raise py.error.ENOENT(self)
if (strerr.find('file exists') != -1 or
strerr.find('file already exists') != -1 or
+ strerr.find('w150002:') != -1 or
strerr.find("can't create directory") != -1):
raise py.error.EEXIST(self)
raise
@@ -593,7 +596,7 @@
out = self._authsvn('lock').strip()
if not out:
# warning or error, raise exception
- raise Exception(out[4:])
+ raise ValueError("unknown error in svn lock command")
def unlock(self):
""" unset a previously set lock """
@@ -1066,6 +1069,8 @@
modrev = '?'
author = '?'
date = ''
+ elif itemstatus == "replaced":
+ pass
else:
#print entryel.toxml()
commitel = entryel.getElementsByTagName('commit')[0]
@@ -1148,7 +1153,11 @@
raise ValueError("Not a versioned resource")
#raise ValueError, "Not a versioned resource %r" % path
self.kind = d['nodekind'] == 'directory' and 'dir' or d['nodekind']
- self.rev = int(d['revision'])
+ try:
+ self.rev = int(d['revision'])
+ except KeyError:
+ self.rev = None
+
self.path = py.path.local(d['path'])
self.size = self.path.size()
if 'lastchangedrev' in d:
diff --git a/py/_xmlgen.py b/py/_xmlgen.py
--- a/py/_xmlgen.py
+++ b/py/_xmlgen.py
@@ -52,7 +52,7 @@
def unicode(self, indent=2):
l = []
SimpleUnicodeVisitor(l.append, indent).visit(self)
- return "".join(l)
+ return u("").join(l)
def __repr__(self):
name = self.__class__.__name__
@@ -122,11 +122,13 @@
if visitmethod is not None:
break
else:
- visitmethod = self.object
+ visitmethod = self.__object
self.cache[cls] = visitmethod
visitmethod(node)
- def object(self, obj):
+ # the default fallback handler is marked private
+ # to avoid clashes with the tag name object
+ def __object(self, obj):
#self.write(obj)
self.write(escape(unicode(obj)))
@@ -136,7 +138,8 @@
def list(self, obj):
assert id(obj) not in self.visited
self.visited[id(obj)] = 1
- map(self.visit, obj)
+ for elem in obj:
+ self.visit(elem)
def Tag(self, tag):
assert id(tag) not in self.visited
@@ -181,7 +184,11 @@
value = getattr(attrs, name)
if name.endswith('_'):
name = name[:-1]
- return ' %s="%s"' % (name, escape(unicode(value)))
+ if isinstance(value, raw):
+ insert = value.uniobj
+ else:
+ insert = escape(unicode(value))
+ return ' %s="%s"' % (name, insert)
def getstyle(self, tag):
""" return attribute list suitable for styling. """
diff --git a/pypy/annotation/test/test_annrpython.py b/pypy/annotation/test/test_annrpython.py
--- a/pypy/annotation/test/test_annrpython.py
+++ b/pypy/annotation/test/test_annrpython.py
@@ -483,7 +483,7 @@
return a_str.strip(' ')
elif n == 1:
return a_str.rstrip(' ')
- else:
+ else:
return a_str.lstrip(' ')
s = a.build_types(f, [int, annmodel.SomeString(no_nul=True)])
assert s.no_nul
@@ -3737,6 +3737,25 @@
s = a.build_types(f, [int])
assert s.listdef.listitem.range_step == 0
+ def test_specialize_arg_memo(self):
+ @objectmodel.specialize.memo()
+ def g(n):
+ return n
+ @objectmodel.specialize.arg(0)
+ def f(i):
+ return g(i)
+ def main(i):
+ if i == 2:
+ return f(i)
+ elif i == 3:
+ return f(i)
+ else:
+ raise NotImplementedError
+
+ a = self.RPythonAnnotator()
+ s = a.build_types(main, [int])
+ assert isinstance(s, annmodel.SomeInteger)
+
def g(n):
return [0,1,2,n]
diff --git a/pypy/doc/tool/makecontributor.py b/pypy/doc/tool/makecontributor.py
new file mode 100644
--- /dev/null
+++ b/pypy/doc/tool/makecontributor.py
@@ -0,0 +1,133 @@
+import py
+import sys
+from collections import defaultdict
+import operator
+import re
+import mercurial.localrepo
+import mercurial.ui
+
+ROOT = py.path.local(__file__).join('..', '..', '..', '..')
+author_re = re.compile('(.*) <.*>')
+pair_programming_re = re.compile(r'^\((.*?)\)')
+excluded = set(["pypy", "convert-repo"])
+
+alias = {
+ 'Anders Chrigstrom': ['arre'],
+ 'Antonio Cuni': ['antocuni', 'anto'],
+ 'Armin Rigo': ['arigo', 'arfigo', 'armin', 'arigato'],
+ 'Maciej Fijalkowski': ['fijal'],
+ 'Carl Friedrich Bolz': ['cfbolz', 'cf'],
+ 'Samuele Pedroni': ['pedronis', 'samuele', 'samule'],
+ 'Michael Hudson': ['mwh'],
+ 'Holger Krekel': ['hpk', 'holger krekel', 'holger', 'hufpk'],
+ "Amaury Forgeot d'Arc": ['afa'],
+ 'Alex Gaynor': ['alex', 'agaynor'],
+ 'David Schneider': ['bivab', 'david'],
+ 'Christian Tismer': ['chris', 'christian', 'tismer',
+ 'tismer at christia-wjtqxl.localdomain'],
+ 'Benjamin Peterson': ['benjamin'],
+ 'Hakan Ardo': ['hakan', 'hakanardo'],
+ 'Niklaus Haldimann': ['nik'],
+ 'Alexander Schremmer': ['xoraxax'],
+ 'Anders Hammarquist': ['iko'],
+ 'David Edelsohn': ['edelsoh', 'edelsohn'],
+ 'Niko Matsakis': ['niko'],
+ 'Jakub Gustak': ['jlg'],
+ 'Guido Wesdorp': ['guido'],
+ 'Michael Foord': ['mfoord'],
+ 'Mark Pearse': ['mwp'],
+ 'Toon Verwaest': ['tverwaes'],
+ 'Eric van Riet Paap': ['ericvrp'],
+ 'Jacob Hallen': ['jacob', 'jakob'],
+ 'Anders Lehmann': ['ale', 'anders'],
+ 'Bert Freudenberg': ['bert'],
+ 'Boris Feigin': ['boris', 'boria'],
+ 'Valentino Volonghi': ['valentino', 'dialtone'],
+ 'Aurelien Campeas': ['aurelien', 'aureliene'],
+ 'Adrien Di Mascio': ['adim'],
+ 'Jacek Generowicz': ['Jacek', 'jacek'],
+ 'Jim Hunziker': ['landtuna at gmail.com'],
+ 'Kristjan Valur Jonsson': ['kristjan at kristjan-lp.ccp.ad.local'],
+ 'Laura Creighton': ['lac'],
+ 'Aaron Iles': ['aliles'],
+ 'Ludovic Aubry': ['ludal', 'ludovic'],
+ 'Lukas Diekmann': ['l.diekmann', 'ldiekmann'],
+ 'Matti Picus': ['Matti Picus matti.picus at gmail.com',
+ 'matthp', 'mattip', 'mattip>'],
+ 'Michael Cheng': ['mikefc'],
+ 'Richard Emslie': ['rxe'],
+ 'Roberto De Ioris': ['roberto at goyle'],
+ 'Roberto De Ioris': ['roberto at mrspurr'],
+ 'Sven Hager': ['hager'],
+ 'Tomo Cocoa': ['cocoatomo'],
+ }
+
+alias_map = {}
+for name, nicks in alias.iteritems():
+ for nick in nicks:
+ alias_map[nick] = name
+
+def get_canonical_author(name):
+ match = author_re.match(name)
+ if match:
+ name = match.group(1)
+ return alias_map.get(name, name)
+
+ignored_nicknames = defaultdict(int)
+
+def get_more_authors(log):
+ match = pair_programming_re.match(log)
+ if not match:
+ return set()
+ ignore_words = ['around', 'consulting', 'yesterday', 'for a bit', 'thanks',
+ 'in-progress', 'bits of', 'even a little', 'floating',]
+ sep_words = ['and', ';', '+', '/', 'with special by']
+ nicknames = match.group(1)
+ for word in ignore_words:
+ nicknames = nicknames.replace(word, '')
+ for word in sep_words:
+ nicknames = nicknames.replace(word, ',')
+ nicknames = [nick.strip().lower() for nick in nicknames.split(',')]
+ authors = set()
+ for nickname in nicknames:
+ author = alias_map.get(nickname)
+ if not author:
+ ignored_nicknames[nickname] += 1
+ else:
+ authors.add(author)
+ return authors
+
+def main(show_numbers):
+ ui = mercurial.ui.ui()
+ repo = mercurial.localrepo.localrepository(ui, str(ROOT))
+ authors_count = defaultdict(int)
+ for i in repo:
+ ctx = repo[i]
+ authors = set()
+ authors.add(get_canonical_author(ctx.user()))
+ authors.update(get_more_authors(ctx.description()))
+ for author in authors:
+ if author not in excluded:
+ authors_count[author] += 1
+
+ # uncomment the next lines to get the list of nicknamed which could not be
+ # parsed from commit logs
+ ## items = ignored_nicknames.items()
+ ## items.sort(key=operator.itemgetter(1), reverse=True)
+ ## for name, n in items:
+ ## if show_numbers:
+ ## print '%5d %s' % (n, name)
+ ## else:
+ ## print name
+
+ items = authors_count.items()
+ items.sort(key=operator.itemgetter(1), reverse=True)
+ for name, n in items:
+ if show_numbers:
+ print '%5d %s' % (n, name)
+ else:
+ print name
+
+if __name__ == '__main__':
+ show_numbers = '-n' in sys.argv
+ main(show_numbers)
diff --git a/pypy/interpreter/typedef.py b/pypy/interpreter/typedef.py
--- a/pypy/interpreter/typedef.py
+++ b/pypy/interpreter/typedef.py
@@ -321,7 +321,7 @@
def user_setup(self, space, w_subtype):
self.w__dict__ = space.newdict(
- instance=True, classofinstance=w_subtype)
+ instance=True)
base_user_setup(self, space, w_subtype)
def setclass(self, space, w_subtype):
diff --git a/pypy/module/cppyy/test/Makefile b/pypy/module/cppyy/test/Makefile
--- a/pypy/module/cppyy/test/Makefile
+++ b/pypy/module/cppyy/test/Makefile
@@ -46,9 +46,9 @@
ifeq ($(CINT),)
# TODO: methptrgetter causes these tests to crash, so don't use it for now
-stltypesDict.so: stltypes.cxx stltypes.h stltypes.xml
- $(genreflex) stltypes.h --selection=stltypes.xml
- g++ -o $@ stltypes_rflx.cpp stltypes.cxx -shared -lReflex $(cppflags) $(cppflags2)
+#stltypesDict.so: stltypes.cxx stltypes.h stltypes.xml
+# $(genreflex) stltypes.h --selection=stltypes.xml
+# g++ -o $@ stltypes_rflx.cpp stltypes.cxx -shared -lReflex $(cppflags) $(cppflags2)
std_streamsDict.so: std_streams.cxx std_streams.h std_streams.xml
$(genreflex) std_streams.h --selection=std_streams.xml
diff --git a/pypy/module/cppyy/test/test_stltypes.py b/pypy/module/cppyy/test/test_stltypes.py
--- a/pypy/module/cppyy/test/test_stltypes.py
+++ b/pypy/module/cppyy/test/test_stltypes.py
@@ -65,15 +65,20 @@
#-----
v = tv1()
for i in range(self.N):
+ print i, 'before'
v.push_back(i)
+ print i, 'after'
assert v.size() == i+1
assert v.at(i) == i
assert v[i] == i
+ return
+
assert v.size() == self.N
assert len(v) == self.N
v.destruct()
+
def test02_user_type_vector_type(self):
"""Test access to an std::vector"""
diff --git a/pypy/module/cpyext/include/patchlevel.h b/pypy/module/cpyext/include/patchlevel.h
--- a/pypy/module/cpyext/include/patchlevel.h
+++ b/pypy/module/cpyext/include/patchlevel.h
@@ -29,7 +29,7 @@
#define PY_VERSION "2.7.2"
/* PyPy version as a string */
-#define PYPY_VERSION "1.8.1"
+#define PYPY_VERSION "1.9.1"
/* Subversion Revision number of this file (not of the repository).
* Empty since Mercurial migration. */
diff --git a/pypy/module/cpyext/stubs.py b/pypy/module/cpyext/stubs.py
--- a/pypy/module/cpyext/stubs.py
+++ b/pypy/module/cpyext/stubs.py
@@ -2253,24 +2253,6 @@
"""Concat two strings giving a new Unicode string."""
raise NotImplementedError
- at cpython_api([PyObject, PyObject, Py_ssize_t], PyObject)
-def PyUnicode_Split(space, s, sep, maxsplit):
- """Split a string giving a list of Unicode strings. If sep is NULL, splitting
- will be done at all whitespace substrings. Otherwise, splits occur at the given
- separator. At most maxsplit splits will be done. If negative, no limit is
- set. Separators are not included in the resulting list.
-
- This function used an int type for maxsplit. This might require
- changes in your code for properly supporting 64-bit systems."""
- raise NotImplementedError
-
- at cpython_api([PyObject, rffi.INT_real], PyObject)
-def PyUnicode_Splitlines(space, s, keepend):
- """Split a Unicode string at line breaks, returning a list of Unicode strings.
- CRLF is considered to be one line break. If keepend is 0, the Line break
- characters are not included in the resulting strings."""
- raise NotImplementedError
-
@cpython_api([PyObject, PyObject, rffi.CCHARP], PyObject)
def PyUnicode_Translate(space, str, table, errors):
"""Translate a string by applying a character mapping table to it and return the
@@ -2287,29 +2269,6 @@
use the default error handling."""
raise NotImplementedError
- at cpython_api([PyObject, PyObject, Py_ssize_t, Py_ssize_t, rffi.INT_real], Py_ssize_t, error=-2)
-def PyUnicode_Find(space, str, substr, start, end, direction):
- """Return the first position of substr in str*[*start:end] using the given
- direction (direction == 1 means to do a forward search, direction == -1 a
- backward search). The return value is the index of the first match; a value of
- -1 indicates that no match was found, and -2 indicates that an error
- occurred and an exception has been set.
-
- This function used an int type for start and end. This
- might require changes in your code for properly supporting 64-bit
- systems."""
- raise NotImplementedError
-
- at cpython_api([PyObject, PyObject, Py_ssize_t, Py_ssize_t], Py_ssize_t, error=-1)
-def PyUnicode_Count(space, str, substr, start, end):
- """Return the number of non-overlapping occurrences of substr in
- str[start:end]. Return -1 if an error occurred.
-
- This function returned an int type and used an int
- type for start and end. This might require changes in your code for
- properly supporting 64-bit systems."""
- raise NotImplementedError
-
@cpython_api([PyObject, PyObject, rffi.INT_real], PyObject)
def PyUnicode_RichCompare(space, left, right, op):
"""Rich compare two unicode strings and return one of the following:
diff --git a/pypy/module/cpyext/test/test_unicodeobject.py b/pypy/module/cpyext/test/test_unicodeobject.py
--- a/pypy/module/cpyext/test/test_unicodeobject.py
+++ b/pypy/module/cpyext/test/test_unicodeobject.py
@@ -457,3 +457,31 @@
assert api.PyUnicode_Tailmatch(w_str, space.wrap("cde"), 1, 5, -1) == 1
self.raises(space, api, TypeError,
api.PyUnicode_Tailmatch, w_str, space.wrap(3), 2, 10, 1)
+
+ def test_count(self, space, api):
+ w_str = space.wrap(u"abcabdab")
+ assert api.PyUnicode_Count(w_str, space.wrap(u"ab"), 0, -1) == 2
+ assert api.PyUnicode_Count(w_str, space.wrap(u"ab"), 0, 2) == 1
+ assert api.PyUnicode_Count(w_str, space.wrap(u"ab"), -5, 30) == 2
+
+ def test_find(self, space, api):
+ w_str = space.wrap(u"abcabcd")
+ assert api.PyUnicode_Find(w_str, space.wrap(u"c"), 0, 7, 1) == 2
+ assert api.PyUnicode_Find(w_str, space.wrap(u"c"), 3, 7, 1) == 5
+ assert api.PyUnicode_Find(w_str, space.wrap(u"c"), 0, 7, -1) == 5
+ assert api.PyUnicode_Find(w_str, space.wrap(u"c"), 3, 7, -1) == 5
+ assert api.PyUnicode_Find(w_str, space.wrap(u"c"), 0, 4, -1) == 2
+ assert api.PyUnicode_Find(w_str, space.wrap(u"z"), 0, 4, -1) == -1
+
+ def test_split(self, space, api):
+ w_str = space.wrap(u"a\nb\nc\nd")
+ assert "[u'a', u'b', u'c', u'd']" == space.unwrap(space.repr(
+ api.PyUnicode_Split(w_str, space.wrap('\n'), -1)))
+ assert r"[u'a', u'b', u'c\nd']" == space.unwrap(space.repr(
+ api.PyUnicode_Split(w_str, space.wrap('\n'), 2)))
+ assert r"[u'a', u'b', u'c d']" == space.unwrap(space.repr(
+ api.PyUnicode_Split(space.wrap(u'a\nb c d'), None, 2)))
+ assert "[u'a', u'b', u'c', u'd']" == space.unwrap(space.repr(
+ api.PyUnicode_Splitlines(w_str, 0)))
+ assert r"[u'a\n', u'b\n', u'c\n', u'd']" == space.unwrap(space.repr(
+ api.PyUnicode_Splitlines(w_str, 1)))
diff --git a/pypy/module/cpyext/unicodeobject.py b/pypy/module/cpyext/unicodeobject.py
--- a/pypy/module/cpyext/unicodeobject.py
+++ b/pypy/module/cpyext/unicodeobject.py
@@ -598,3 +598,46 @@
else:
return stringtype.stringendswith(str, substr, start, end)
+ at cpython_api([PyObject, PyObject, Py_ssize_t, Py_ssize_t], Py_ssize_t, error=-1)
+def PyUnicode_Count(space, w_str, w_substr, start, end):
+ """Return the number of non-overlapping occurrences of substr in
+ str[start:end]. Return -1 if an error occurred."""
+ w_count = space.call_method(w_str, "count", w_substr,
+ space.wrap(start), space.wrap(end))
+ return space.int_w(w_count)
+
+ at cpython_api([PyObject, PyObject, Py_ssize_t, Py_ssize_t, rffi.INT_real],
+ Py_ssize_t, error=-2)
+def PyUnicode_Find(space, w_str, w_substr, start, end, direction):
+ """Return the first position of substr in str*[*start:end] using
+ the given direction (direction == 1 means to do a forward search,
+ direction == -1 a backward search). The return value is the index
+ of the first match; a value of -1 indicates that no match was
+ found, and -2 indicates that an error occurred and an exception
+ has been set."""
+ if rffi.cast(lltype.Signed, direction) > 0:
+ w_pos = space.call_method(w_str, "find", w_substr,
+ space.wrap(start), space.wrap(end))
+ else:
+ w_pos = space.call_method(w_str, "rfind", w_substr,
+ space.wrap(start), space.wrap(end))
+ return space.int_w(w_pos)
+
+ at cpython_api([PyObject, PyObject, Py_ssize_t], PyObject)
+def PyUnicode_Split(space, w_str, w_sep, maxsplit):
+ """Split a string giving a list of Unicode strings. If sep is
+ NULL, splitting will be done at all whitespace substrings.
+ Otherwise, splits occur at the given separator. At most maxsplit
+ splits will be done. If negative, no limit is set. Separators
+ are not included in the resulting list."""
+ if w_sep is None:
+ w_sep = space.w_None
+ return space.call_method(w_str, "split", w_sep, space.wrap(maxsplit))
+
+ at cpython_api([PyObject, rffi.INT_real], PyObject)
+def PyUnicode_Splitlines(space, w_str, keepend):
+ """Split a Unicode string at line breaks, returning a list of
+ Unicode strings. CRLF is considered to be one line break. If
+ keepend is 0, the Line break characters are not included in the
+ resulting strings."""
+ return space.call_method(w_str, "splitlines", space.wrap(keepend))
diff --git a/pypy/module/micronumpy/__init__.py b/pypy/module/micronumpy/__init__.py
--- a/pypy/module/micronumpy/__init__.py
+++ b/pypy/module/micronumpy/__init__.py
@@ -105,6 +105,7 @@
("fmod", "fmod"),
("floor", "floor"),
("ceil", "ceil"),
+ ("trunc", "trunc"),
("greater", "greater"),
("greater_equal", "greater_equal"),
("less", "less"),
@@ -132,6 +133,8 @@
('bitwise_or', 'bitwise_or'),
('bitwise_xor', 'bitwise_xor'),
('bitwise_not', 'invert'),
+ ('left_shift', 'left_shift'),
+ ('right_shift', 'right_shift'),
('invert', 'invert'),
('isnan', 'isnan'),
('isinf', 'isinf'),
diff --git a/pypy/module/micronumpy/interp_ufuncs.py b/pypy/module/micronumpy/interp_ufuncs.py
--- a/pypy/module/micronumpy/interp_ufuncs.py
+++ b/pypy/module/micronumpy/interp_ufuncs.py
@@ -546,6 +546,7 @@
("fmod", "fmod", 2, {"promote_to_float": True}),
("floor", "floor", 1, {"promote_to_float": True}),
("ceil", "ceil", 1, {"promote_to_float": True}),
+ ("trunc", "trunc", 1, {"promote_to_float": True}),
("exp", "exp", 1, {"promote_to_float": True}),
("exp2", "exp2", 1, {"promote_to_float": True}),
("expm1", "expm1", 1, {"promote_to_float": True}),
diff --git a/pypy/module/micronumpy/test/test_ufuncs.py b/pypy/module/micronumpy/test/test_ufuncs.py
--- a/pypy/module/micronumpy/test/test_ufuncs.py
+++ b/pypy/module/micronumpy/test/test_ufuncs.py
@@ -253,24 +253,17 @@
for i in range(3):
assert c[i] == a[i] - b[i]
- def test_floorceil(self):
- from _numpypy import array, floor, ceil
+ def test_floorceiltrunc(self):
+ from _numpypy import array, floor, ceil, trunc
import math
- reference = [-2.0, -2.0, -1.0, 0.0, 1.0, 1.0, 0]
- a = array([-1.4, -1.5, -1.0, 0.0, 1.0, 1.4, 0.5])
- b = floor(a)
- for i in range(5):
- assert b[i] == reference[i]
- reference = [-1.0, -1.0, -1.0, 0.0, 1.0, 2.0, 1.0]
- a = array([-1.4, -1.5, -1.0, 0.0, 1.0, 1.4, 0.5])
- b = ceil(a)
- assert (reference == b).all()
- inf = float("inf")
- data = [1.5, 2.9999, -1.999, inf]
- results = [math.floor(x) for x in data]
- assert (floor(data) == results).all()
- results = [math.ceil(x) for x in data]
- assert (ceil(data) == results).all()
+ ninf, inf = float("-inf"), float("inf")
+ a = array([ninf, -1.4, -1.5, -1.0, 0.0, 1.0, 1.4, 0.5, inf])
+ assert ([ninf, -2.0, -2.0, -1.0, 0.0, 1.0, 1.0, 0.0, inf] == floor(a)).all()
+ assert ([ninf, -1.0, -1.0, -1.0, 0.0, 1.0, 2.0, 1.0, inf] == ceil(a)).all()
+ assert ([ninf, -1.0, -1.0, -1.0, 0.0, 1.0, 1.0, 0.0, inf] == trunc(a)).all()
+ assert all([math.isnan(f(float("nan"))) for f in floor, ceil, trunc])
+ assert all([math.copysign(1, f(float("nan"))) == 1 for f in floor, ceil, trunc])
+ assert all([math.copysign(1, f(float("-nan"))) == -1 for f in floor, ceil, trunc])
def test_copysign(self):
from _numpypy import array, copysign
@@ -597,6 +590,12 @@
assert (bitwise_not(a) == ~a).all()
assert (invert(a) == ~a).all()
+ def test_shift(self):
+ from _numpypy import left_shift, right_shift
+
+ assert (left_shift([5, 1], [2, 13]) == [20, 2**13]).all()
+ assert (right_shift(10, range(5)) == [10, 5, 2, 1, 0]).all()
+
def test_comparisons(self):
import operator
from _numpypy import equal, not_equal, less, less_equal, greater, greater_equal
diff --git a/pypy/module/micronumpy/types.py b/pypy/module/micronumpy/types.py
--- a/pypy/module/micronumpy/types.py
+++ b/pypy/module/micronumpy/types.py
@@ -668,6 +668,13 @@
return math.ceil(v)
@simple_unary_op
+ def trunc(self, v):
+ if v < 0:
+ return math.ceil(v)
+ else:
+ return math.floor(v)
+
+ @simple_unary_op
def exp(self, v):
try:
return math.exp(v)
diff --git a/pypy/module/pyexpat/test/__init__.py b/pypy/module/pyexpat/test/__init__.py
new file mode 100644
diff --git a/pypy/module/sys/version.py b/pypy/module/sys/version.py
--- a/pypy/module/sys/version.py
+++ b/pypy/module/sys/version.py
@@ -10,7 +10,7 @@
CPYTHON_VERSION = (2, 7, 2, "final", 42) #XXX # sync patchlevel.h
CPYTHON_API_VERSION = 1013 #XXX # sync with include/modsupport.h
-PYPY_VERSION = (1, 8, 1, "dev", 0) #XXX # sync patchlevel.h
+PYPY_VERSION = (1, 9, 1, "dev", 0) #XXX # sync patchlevel.h
if platform.name == 'msvc':
COMPILER_INFO = 'MSC v.%d 32 bit' % (platform.version * 10 + 600)
diff --git a/pypy/objspace/fake/objspace.py b/pypy/objspace/fake/objspace.py
--- a/pypy/objspace/fake/objspace.py
+++ b/pypy/objspace/fake/objspace.py
@@ -110,7 +110,7 @@
"NOT_RPYTHON"
raise NotImplementedError
- def newdict(self, module=False, instance=False, classofinstance=None,
+ def newdict(self, module=False, instance=False,
strdict=False):
return w_some_obj()
diff --git a/pypy/objspace/flow/flowcontext.py b/pypy/objspace/flow/flowcontext.py
--- a/pypy/objspace/flow/flowcontext.py
+++ b/pypy/objspace/flow/flowcontext.py
@@ -434,6 +434,13 @@
self.lastblock = block
self.pushvalue(w_result)
+ def BUILD_LIST_FROM_ARG(self, _, next_instr):
+ # This opcode was added with pypy-1.8. Here is a simpler
+ # version, enough for annotation.
+ last_val = self.popvalue()
+ self.pushvalue(self.space.newlist([]))
+ self.pushvalue(last_val)
+
# XXX Unimplemented 2.7 opcodes ----------------
# Set literals, set comprehensions
diff --git a/pypy/objspace/std/dictmultiobject.py b/pypy/objspace/std/dictmultiobject.py
--- a/pypy/objspace/std/dictmultiobject.py
+++ b/pypy/objspace/std/dictmultiobject.py
@@ -33,8 +33,7 @@
@staticmethod
def allocate_and_init_instance(space, w_type=None, module=False,
- instance=False, classofinstance=None,
- strdict=False):
+ instance=False, strdict=False):
if space.config.objspace.std.withcelldict and module:
from pypy.objspace.std.celldict import ModuleDictStrategy
@@ -563,10 +562,7 @@
def listview_int(self, w_dict):
return self.unerase(w_dict.dstorage).keys()
- def w_keys(self, w_dict):
- # XXX there is no space.newlist_int yet
- space = self.space
- return space.call_function(space.w_list, w_dict)
+ # XXX there is no space.newlist_int yet to implement w_keys more efficiently
class IntIteratorImplementation(_WrappedIteratorMixin, IteratorImplementation):
pass
diff --git a/pypy/objspace/std/identitydict.py b/pypy/objspace/std/identitydict.py
--- a/pypy/objspace/std/identitydict.py
+++ b/pypy/objspace/std/identitydict.py
@@ -1,5 +1,5 @@
## ----------------------------------------------------------------------------
-## dict strategy (see dict_multiobject.py)
+## dict strategy (see dictmultiobject.py)
from pypy.rlib import rerased
from pypy.rlib.debug import mark_dict_non_null
@@ -80,8 +80,8 @@
def iter(self, w_dict):
return IdentityDictIteratorImplementation(self.space, self, w_dict)
- def keys(self, w_dict):
- return self.unerase(w_dict.dstorage).keys()
+ def w_keys(self, w_dict):
+ return self.space.newlist(self.unerase(w_dict.dstorage).keys())
class IdentityDictIteratorImplementation(_UnwrappedIteratorMixin, IteratorImplementation):
diff --git a/pypy/objspace/std/objspace.py b/pypy/objspace/std/objspace.py
--- a/pypy/objspace/std/objspace.py
+++ b/pypy/objspace/std/objspace.py
@@ -313,11 +313,10 @@
def newlist_str(self, list_s):
return W_ListObject.newlist_str(self, list_s)
- def newdict(self, module=False, instance=False, classofinstance=None,
+ def newdict(self, module=False, instance=False,
strdict=False):
return W_DictMultiObject.allocate_and_init_instance(
self, module=module, instance=instance,
- classofinstance=classofinstance,
strdict=strdict)
def newset(self):
diff --git a/pypy/objspace/std/test/test_dictmultiobject.py b/pypy/objspace/std/test/test_dictmultiobject.py
--- a/pypy/objspace/std/test/test_dictmultiobject.py
+++ b/pypy/objspace/std/test/test_dictmultiobject.py
@@ -885,10 +885,9 @@
def newtuple(self, l):
return tuple(l)
- def newdict(self, module=False, instance=False, classofinstance=None):
+ def newdict(self, module=False, instance=False):
return W_DictMultiObject.allocate_and_init_instance(
- self, module=module, instance=instance,
- classofinstance=classofinstance)
+ self, module=module, instance=instance)
def finditem_str(self, w_dict, s):
return w_dict.getitem_str(s) # assume it's a multidict
@@ -968,6 +967,20 @@
assert type(self.impl.strategy) is self.StrategyClass
#assert self.impl.r_dict_content is None
+ def test_popitem(self):
+ self.fill_impl()
+ assert self.impl.length() == 2
+ a, b = self.impl.popitem()
+ assert self.impl.length() == 1
+ if a == self.string:
+ assert b == 1000
+ assert self.impl.getitem(self.string2) == 2000
+ else:
+ assert a == self.string2
+ assert b == 2000
+ assert self.impl.getitem_str(self.string) == 1000
+ self.check_not_devolved()
+
def test_setitem(self):
self.impl.setitem(self.string, 1000)
assert self.impl.length() == 1
diff --git a/pypy/pytest.ini b/pypy/pytest.ini
--- a/pypy/pytest.ini
+++ b/pypy/pytest.ini
@@ -1,2 +1,2 @@
[pytest]
-addopts = --assertmode=old -rf
+addopts = --assert=plain -rf
diff --git a/pypy/rlib/rsre/rsre_re.py b/pypy/rlib/rsre/rsre_re.py
--- a/pypy/rlib/rsre/rsre_re.py
+++ b/pypy/rlib/rsre/rsre_re.py
@@ -172,8 +172,9 @@
self._ctx = ctx
def span(self, groupnum=0):
- if not isinstance(groupnum, (int, long)):
- groupnum = self.re.groupindex[groupnum]
+# if not isinstance(groupnum, (int, long)):
+# groupnum = self.re.groupindex[groupnum]
+
return self._ctx.span(groupnum)
def start(self, groupnum=0):
@@ -182,19 +183,25 @@
def end(self, groupnum=0):
return self.span(groupnum)[1]
- def group(self, *groups):
- groups = groups or (0,)
- result = []
- for group in groups:
- frm, to = self.span(group)
- if 0 <= frm <= to:
- result.append(self._ctx._string[frm:to])
- else:
- result.append(None)
- if len(result) > 1:
- return tuple(result)
+ def group(self, group=0):
+ frm, to = self.span(group)
+ if 0 <= frm <= to:
+ return self._ctx._string[frm:to]
else:
- return result[0]
+ return None
+
+# def group(self, *groups):
+# groups = groups or (0,)
+# result = []
+# for group in groups:
+# frm, to = self.span(group)
+# if 0 <= frm <= to:
+# result.append(self._ctx._string[frm:to])
+# else:
+# result.append(None)
+# if len(result) > 1:
+# return tuple(result)
+
def groups(self, default=None):
fmarks = self._ctx.flatten_marks()
diff --git a/pypy/rlib/rsre/test/test_re.py b/pypy/rlib/rsre/test/test_re.py
--- a/pypy/rlib/rsre/test/test_re.py
+++ b/pypy/rlib/rsre/test/test_re.py
@@ -204,7 +204,7 @@
assert re.match('(a)', 'a').groups() == ('a',)
assert re.match(r'(a)', 'a').group(0) == 'a'
assert re.match(r'(a)', 'a').group(1) == 'a'
- assert re.match(r'(a)', 'a').group(1, 1) == ('a', 'a')
+ #assert re.match(r'(a)', 'a').group(1, 1) == ('a', 'a')
pat = re.compile('((a)|(b))(c)?')
assert pat.match('a').groups() == ('a', 'a', None, None)
@@ -218,13 +218,13 @@
assert m.group(0) == 'a'
assert m.group(0) == 'a'
assert m.group(1) == 'a'
- assert m.group(1, 1) == ('a', 'a')
+ #assert m.group(1, 1) == ('a', 'a')
pat = re.compile('(?:(?Pa)|(?Pb))(?Pc)?')
- assert pat.match('a').group(1, 2, 3) == ('a', None, None)
- assert pat.match('b').group('a1', 'b2', 'c3') == (
- (None, 'b', None))
- assert pat.match('ac').group(1, 'b2', 3) == ('a', None, 'c')
+ #assert pat.match('a').group(1, 2, 3) == ('a', None, None)
+ #assert pat.match('b').group('a1', 'b2', 'c3') == (
+ # (None, 'b', None))
+ #assert pat.match('ac').group(1, 'b2', 3) == ('a', None, 'c')
def test_bug_923(self):
# Issue923: grouping inside optional lookahead problem
diff --git a/pypy/rlib/rsre/test/test_zinterp.py b/pypy/rlib/rsre/test/test_zinterp.py
--- a/pypy/rlib/rsre/test/test_zinterp.py
+++ b/pypy/rlib/rsre/test/test_zinterp.py
@@ -1,7 +1,8 @@
# minimal test: just checks that (parts of) rsre can be translated
-from pypy.rpython.test.test_llinterp import gengraph
+from pypy.rpython.test.test_llinterp import gengraph, interpret
from pypy.rlib.rsre import rsre_core
+from pypy.rlib.rsre.rsre_re import compile
def main(n):
assert n >= 0
@@ -19,3 +20,18 @@
def test_gengraph():
t, typer, graph = gengraph(main, [int])
+
+m = compile("(a|b)aaaaa")
+
+def test_match():
+ def f(i):
+ if i:
+ s = "aaaaaa"
+ else:
+ s = "caaaaa"
+ g = m.match(s)
+ if g is None:
+ return 3
+ return int("aaaaaa" == g.group(0))
+ assert interpret(f, [3]) == 1
+ assert interpret(f, [0]) == 3
diff --git a/pypy/rpython/annlowlevel.py b/pypy/rpython/annlowlevel.py
--- a/pypy/rpython/annlowlevel.py
+++ b/pypy/rpython/annlowlevel.py
@@ -543,11 +543,11 @@
else:
assert False
+ hop.exception_cannot_occur()
if isinstance(hop.args_r[1], rpbc.NoneFrozenPBCRepr):
return hop.inputconst(PTR, null)
v_arg = hop.inputarg(hop.args_r[1], arg=1)
assert isinstance(v_arg.concretetype, T)
- hop.exception_cannot_occur()
return hop.genop(opname, [v_arg], resulttype = PTR)
diff --git a/pypy/rpython/memory/gc/minimark.py b/pypy/rpython/memory/gc/minimark.py
--- a/pypy/rpython/memory/gc/minimark.py
+++ b/pypy/rpython/memory/gc/minimark.py
@@ -1426,23 +1426,25 @@
self._visit_young_rawmalloced_object(obj)
return
#
- # If 'obj' was already forwarded, change it to its forwarding address.
- if self.is_forwarded(obj):
+ size_gc_header = self.gcheaderbuilder.size_gc_header
+ if self.header(obj).tid & GCFLAG_HAS_SHADOW == 0:
+ #
+ # Common case: 'obj' was not already forwarded (otherwise
+ # tid == -42, containing all flags), and it doesn't have the
+ # HAS_SHADOW flag either. We must move it out of the nursery,
+ # into a new nonmovable location.
+ totalsize = size_gc_header + self.get_size(obj)
+ newhdr = self._malloc_out_of_nursery(totalsize)
+ #
+ elif self.is_forwarded(obj):
+ #
+ # 'obj' was already forwarded. Change the original reference
+ # to point to its forwarding address, and we're done.
root.address[0] = self.get_forwarding_address(obj)
return
- #
- # First visit to 'obj': we must move it out of the nursery.
- size_gc_header = self.gcheaderbuilder.size_gc_header
- size = self.get_size(obj)
- totalsize = size_gc_header + size
- #
- if self.header(obj).tid & GCFLAG_HAS_SHADOW == 0:
- #
- # Common case: allocate a new nonmovable location for it.
- newhdr = self._malloc_out_of_nursery(totalsize)
#
else:
- # The object has already a shadow.
+ # First visit to an object that has already a shadow.
newobj = self.nursery_objects_shadows.get(obj)
ll_assert(newobj != NULL, "GCFLAG_HAS_SHADOW but no shadow found")
newhdr = newobj - size_gc_header
@@ -1450,6 +1452,8 @@
# Remove the flag GCFLAG_HAS_SHADOW, so that it doesn't get
# copied to the shadow itself.
self.header(obj).tid &= ~GCFLAG_HAS_SHADOW
+ #
+ totalsize = size_gc_header + self.get_size(obj)
#
# Copy it. Note that references to other objects in the
# nursery are kept unchanged in this step.
diff --git a/pypy/rpython/module/ll_os_stat.py b/pypy/rpython/module/ll_os_stat.py
--- a/pypy/rpython/module/ll_os_stat.py
+++ b/pypy/rpython/module/ll_os_stat.py
@@ -455,6 +455,6 @@
return intmask(time), intmask(nsec)
def time_t_to_FILE_TIME(time, filetime):
- ft = (rffi.r_longlong(time) + secs_between_epochs) * 10000000
+ ft = rffi.r_longlong((time + secs_between_epochs) * 10000000)
filetime.c_dwHighDateTime = rffi.r_uint(ft >> 32)
filetime.c_dwLowDateTime = rffi.r_uint(ft) # masking off high bits
diff --git a/pypy/test_all.py b/pypy/test_all.py
old mode 100755
new mode 100644
--- a/pypy/test_all.py
+++ b/pypy/test_all.py
@@ -11,11 +11,12 @@
"""
import sys, os
-if len(sys.argv) == 1 and os.path.dirname(sys.argv[0]) in '.':
- print >> sys.stderr, __doc__
- sys.exit(2)
if __name__ == '__main__':
+ if len(sys.argv) == 1 and os.path.dirname(sys.argv[0]) in '.':
+ print >> sys.stderr, __doc__
+ sys.exit(2)
+
import tool.autopath
import pytest
import pytest_cov
diff --git a/pypy/tool/jitlogparser/test/__init__.py b/pypy/tool/jitlogparser/test/__init__.py
new file mode 100644
diff --git a/pypy/tool/pytest/test/test_pytestsupport.py b/pypy/tool/pytest/test/test_pytestsupport.py
--- a/pypy/tool/pytest/test/test_pytestsupport.py
+++ b/pypy/tool/pytest/test/test_pytestsupport.py
@@ -165,7 +165,10 @@
def test_one(self): exec 'blow'
""")
- ev, = sorter.getreports("pytest_runtest_logreport")
+ reports = sorter.getreports("pytest_runtest_logreport")
+ setup, ev, teardown = reports
assert ev.failed
+ assert setup.passed
+ assert teardown.passed
assert 'NameError' in ev.longrepr.reprcrash.message
assert 'blow' in ev.longrepr.reprcrash.message
diff --git a/pypy/translator/c/gcc/trackgcroot.py b/pypy/translator/c/gcc/trackgcroot.py
--- a/pypy/translator/c/gcc/trackgcroot.py
+++ b/pypy/translator/c/gcc/trackgcroot.py
@@ -847,6 +847,10 @@
if sources:
target, = sources
+ if target.endswith('@PLT'):
+ # In -fPIC mode, all functions calls have this suffix
+ target = target[:-4]
+
if target in self.FUNCTIONS_NOT_RETURNING:
return [InsnStop(target)]
if self.format == 'mingw32' and target == '__alloca':
@@ -1137,7 +1141,7 @@
r_jump_rel_label = re.compile(r"\tj\w+\s+"+"(\d+)f"+"\s*$")
r_unaryinsn_star= re.compile(r"\t[a-z]\w*\s+[*]("+OPERAND+")\s*$")
- r_jmptable_item = re.compile(r"\t.quad\t"+LABEL+"(-\"[A-Za-z0-9$]+\")?\s*$")
+ r_jmptable_item = re.compile(r"\t.(?:quad|long)\t"+LABEL+"(-\"[A-Za-z0-9$]+\"|-"+LABEL+")?\s*$")
r_jmptable_end = re.compile(r"\t.text|\t.section\s+.text|\t\.align|"+LABEL)
r_gcroot_marker = re.compile(r"\t/[*] GCROOT ("+LOCALVARFP+") [*]/")
diff --git a/testrunner/runner.py b/testrunner/runner.py
--- a/testrunner/runner.py
+++ b/testrunner/runner.py
@@ -110,7 +110,10 @@
do_dry_run=False, timeout=None,
_win32=(sys.platform=='win32')):
args = interp + test_driver
- args += ['-p', 'resultlog', '--resultlog=%s' % logfname, test]
+ args += ['-p', 'resultlog',
+ '--resultlog=%s' % logfname,
+ '--junitxml=%s.junit' % logfname,
+ test]
args = map(str, args)
interp0 = args[0]
diff --git a/testrunner/scratchbox_runner.py b/testrunner/scratchbox_runner.py
--- a/testrunner/scratchbox_runner.py
+++ b/testrunner/scratchbox_runner.py
@@ -14,14 +14,14 @@
def dry_run_scratchbox(args, cwd, out, timeout=None):
return dry_run(args_for_scratchbox(cwd, args), cwd, out, timeout)
-import runner
-# XXX hack hack hack
-dry_run = runner.dry_run
-run = runner.run
+if __name__ == '__main__':
+ import runner
+ # XXX hack hack hack
+ dry_run = runner.dry_run
+ run = runner.run
-runner.dry_run = dry_run_scratchbox
-runner.run = run_scratchbox
+ runner.dry_run = dry_run_scratchbox
+ runner.run = run_scratchbox
-if __name__ == '__main__':
import sys
runner.main(sys.argv)
diff --git a/testrunner/test/conftest.py b/testrunner/test/conftest.py
new file mode 100644
--- /dev/null
+++ b/testrunner/test/conftest.py
@@ -0,0 +1,6 @@
+
+def pytest_runtest_makereport(__multicall__, item):
+ report = __multicall__.execute()
+ if 'out' in item.funcargs:
+ report.sections.append(('out', item.funcargs['out'].read()))
+ return report
diff --git a/testrunner/test/test_runner.py b/testrunner/test/test_runner.py
--- a/testrunner/test/test_runner.py
+++ b/testrunner/test/test_runner.py
@@ -53,49 +53,44 @@
assert not should_report_failure("F Def\n. Ghi\n. Jkl\n")
+
class TestRunHelper(object):
+ def pytest_funcarg__out(self, request):
+ tmpdir = request.getfuncargvalue('tmpdir')
+ return tmpdir.ensure('out')
- def setup_method(self, meth):
- h, self.fn = tempfile.mkstemp()
- os.close(h)
+ def test_run(self, out):
+ res = runner.run([sys.executable, "-c", "print 42"], '.', out)
+ assert res == 0
+ assert out.read() == "42\n"
- def teardown_method(self, meth):
- os.unlink(self.fn)
-
- def test_run(self):
- res = runner.run([sys.executable, "-c", "print 42"], '.',
- py.path.local(self.fn))
- assert res == 0
- out = py.path.local(self.fn).read('r')
- assert out == "42\n"
-
- def test_error(self):
- res = runner.run([sys.executable, "-c", "import sys; sys.exit(3)"], '.', py.path.local(self.fn))
+ def test_error(self, out):
+ res = runner.run([sys.executable, "-c", "import sys; sys.exit(3)"], '.', out)
assert res == 3
- def test_signal(self):
+ def test_signal(self, out):
if sys.platform == 'win32':
py.test.skip("no death by signal on windows")
- res = runner.run([sys.executable, "-c", "import os; os.kill(os.getpid(), 9)"], '.', py.path.local(self.fn))
+ res = runner.run([sys.executable, "-c", "import os; os.kill(os.getpid(), 9)"], '.', out)
assert res == -9
- def test_timeout(self):
- res = runner.run([sys.executable, "-c", "while True: pass"], '.', py.path.local(self.fn), timeout=3)
+ def test_timeout(self, out):
+ res = runner.run([sys.executable, "-c", "while True: pass"], '.', out, timeout=3)
assert res == -999
- def test_timeout_lock(self):
- res = runner.run([sys.executable, "-c", "import threading; l=threading.Lock(); l.acquire(); l.acquire()"], '.', py.path.local(self.fn), timeout=3)
+ def test_timeout_lock(self, out):
+ res = runner.run([sys.executable, "-c", "import threading; l=threading.Lock(); l.acquire(); l.acquire()"], '.', out, timeout=3)
assert res == -999
- def test_timeout_syscall(self):
- res = runner.run([sys.executable, "-c", "import socket; s=s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM); s.bind(('', 0)); s.recv(1000)"], '.', py.path.local(self.fn), timeout=3)
+ def test_timeout_syscall(self, out):
+ res = runner.run([sys.executable, "-c", "import socket; s=s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM); s.bind(('', 0)); s.recv(1000)"], '.', out, timeout=3)
assert res == -999
- def test_timeout_success(self):
+ def test_timeout_success(self, out):
res = runner.run([sys.executable, "-c", "print 42"], '.',
- py.path.local(self.fn), timeout=2)
+ out, timeout=2)
assert res == 0
- out = py.path.local(self.fn).read('r')
+ out = out.read()
assert out == "42\n"
@@ -122,7 +117,10 @@
expected = ['INTERP', 'IARG',
'driver', 'darg',
+ '-p', 'resultlog',
'--resultlog=LOGFILE',
+ '--junitxml=LOGFILE.junit',
+
'test_one']
assert self.called == (expected, '/wd', 'out', 'secs')
@@ -138,9 +136,11 @@
expected = ['/wd' + os.sep + './INTERP', 'IARG',
'driver', 'darg',
+ '-p', 'resultlog',
'--resultlog=LOGFILE',
+ '--junitxml=LOGFILE.junit',
'test_one']
-
+ assert self.called[0] == expected
assert self.called == (expected, '/wd', 'out', 'secs')
assert res == 0
@@ -251,7 +251,7 @@
assert '\n' in log
log_lines = log.splitlines()
- assert log_lines[0] == ". test_normal/test_example.py:test_one"
+ assert ". test_normal/test_example.py::test_one" in log_lines
nfailures = 0
noutcomes = 0
for line in log_lines:
From noreply at buildbot.pypy.org Fri Apr 13 05:43:24 2012
From: noreply at buildbot.pypy.org (wlav)
Date: Fri, 13 Apr 2012 05:43:24 +0200 (CEST)
Subject: [pypy-commit] pypy reflex-support: remove debugging printout
Message-ID: <20120413034324.576B782F4E@wyvern.cs.uni-duesseldorf.de>
Author: Wim Lavrijsen
Branch: reflex-support
Changeset: r54320:b1f5b7f6d85b
Date: 2012-04-12 11:29 -0700
http://bitbucket.org/pypy/pypy/changeset/b1f5b7f6d85b/
Log: remove debugging printout
diff --git a/pypy/module/cppyy/test/test_stltypes.py b/pypy/module/cppyy/test/test_stltypes.py
--- a/pypy/module/cppyy/test/test_stltypes.py
+++ b/pypy/module/cppyy/test/test_stltypes.py
@@ -65,9 +65,7 @@
#-----
v = tv1()
for i in range(self.N):
- print i, 'before'
v.push_back(i)
- print i, 'after'
assert v.size() == i+1
assert v.at(i) == i
assert v[i] == i
From noreply at buildbot.pypy.org Fri Apr 13 05:43:25 2012
From: noreply at buildbot.pypy.org (wlav)
Date: Fri, 13 Apr 2012 05:43:25 +0200 (CEST)
Subject: [pypy-commit] pypy reflex-support: unlike for stubs (for which a
value type is used), libffi requires a pointer type for const builtin&
Message-ID: <20120413034325.9A05282F4E@wyvern.cs.uni-duesseldorf.de>
Author: Wim Lavrijsen
Branch: reflex-support
Changeset: r54321:86397c9e6d24
Date: 2012-04-12 11:52 -0700
http://bitbucket.org/pypy/pypy/changeset/86397c9e6d24/
Log: unlike for stubs (for which a value type is used), libffi requires a
pointer type for const builtin&
diff --git a/pypy/module/cppyy/converter.py b/pypy/module/cppyy/converter.py
--- a/pypy/module/cppyy/converter.py
+++ b/pypy/module/cppyy/converter.py
@@ -275,6 +275,10 @@
def _unwrap_object(self, space, w_obj):
return rffi.cast(rffi.SHORT, space.int_w(w_obj))
+class ConstShortRefConverter(ShortConverter):
+ _immutable_ = True
+ libffitype = lltype.nullptr(clibffi.FFI_TYPE_P.TO)
+
class UnsignedShortConverter(IntTypeConverterMixin, TypeConverter):
_immutable_ = True
libffitype = libffi.types.sshort
@@ -286,6 +290,10 @@
def _unwrap_object(self, space, w_obj):
return rffi.cast(rffi.USHORT, space.int_w(w_obj))
+class ConstUnsignedShortRefConverter(UnsignedShortConverter):
+ _immutable_ = True
+ libffitype = lltype.nullptr(clibffi.FFI_TYPE_P.TO)
+
class IntConverter(IntTypeConverterMixin, TypeConverter):
_immutable_ = True
libffitype = libffi.types.sint
@@ -297,6 +305,10 @@
def _unwrap_object(self, space, w_obj):
return rffi.cast(rffi.INT, space.c_int_w(w_obj))
+class ConstIntRefConverter(IntConverter):
+ _immutable_ = True
+ libffitype = lltype.nullptr(clibffi.FFI_TYPE_P.TO)
+
class UnsignedIntConverter(IntTypeConverterMixin, TypeConverter):
_immutable_ = True
libffitype = libffi.types.uint
@@ -308,6 +320,10 @@
def _unwrap_object(self, space, w_obj):
return rffi.cast(rffi.UINT, space.uint_w(w_obj))
+class ConstUnsignedIntRefConverter(UnsignedIntConverter):
+ _immutable_ = True
+ libffitype = lltype.nullptr(clibffi.FFI_TYPE_P.TO)
+
class LongConverter(IntTypeConverterMixin, TypeConverter):
_immutable_ = True
libffitype = libffi.types.slong
@@ -319,6 +335,10 @@
def _unwrap_object(self, space, w_obj):
return space.int_w(w_obj)
+class ConstLongRefConverter(LongConverter):
+ _immutable_ = True
+ libffitype = lltype.nullptr(clibffi.FFI_TYPE_P.TO)
+
class UnsignedLongConverter(IntTypeConverterMixin, TypeConverter):
_immutable_ = True
libffitype = libffi.types.ulong
@@ -330,6 +350,9 @@
def _unwrap_object(self, space, w_obj):
return space.uint_w(w_obj)
+class ConstUnsignedLongRefConverter(UnsignedLongConverter):
+ _immutable_ = True
+ libffitype = lltype.nullptr(clibffi.FFI_TYPE_P.TO)
class FloatConverter(FloatTypeConverterMixin, TypeConverter):
_immutable_ = True
@@ -352,6 +375,10 @@
rffiptr = rffi.cast(self.rffiptype, address)
return space.wrap(float(rffiptr[0]))
+class ConstFloatRefConverter(FloatConverter):
+ _immutable_ = True
+ libffitype = lltype.nullptr(clibffi.FFI_TYPE_P.TO)
+
class DoubleConverter(FloatTypeConverterMixin, TypeConverter):
_immutable_ = True
libffitype = libffi.types.double
@@ -367,6 +394,10 @@
def _unwrap_object(self, space, w_obj):
return space.float_w(w_obj)
+class ConstDoubleRefConverter(DoubleConverter):
+ _immutable_ = True
+ libffitype = lltype.nullptr(clibffi.FFI_TYPE_P.TO)
+
class CStringConverter(TypeConverter):
_immutable_ = True
@@ -593,10 +624,10 @@
# 1) full, exact match
# 1a) const-removed match
# 2) match of decorated, unqualified type
- # 3) accept const ref as by value
- # 4) accept ref as pointer
- # 5) generalized cases (covers basically all user classes)
- # 6) void converter, which fails on use
+ # 3) accept ref as pointer (for the stubs, const& can be
+ # by value, but that does not work for the ffi path)
+ # 4) generalized cases (covers basically all user classes)
+ # 5) void converter, which fails on use
name = capi.c_resolve_name(name)
@@ -622,14 +653,9 @@
except KeyError:
pass
- # 3) accept const ref as by value
- if compound and compound[len(compound)-1] == "&":
- try:
- return _converters[clean_name](space, default)
- except KeyError:
- pass
+ # 3) TODO: accept ref as pointer
- # 5) generalized cases (covers basically all user classes)
+ # 4) generalized cases (covers basically all user classes)
from pypy.module.cppyy import interp_cppyy
cppclass = interp_cppyy.scope_byname(space, clean_name)
if cppclass:
@@ -643,7 +669,7 @@
elif capi.c_is_enum(clean_name):
return UnsignedIntConverter(space, default)
- # 6) void converter, which fails on use
+ # 5) void converter, which fails on use
#
# return a void converter here, so that the class can be build even
# when some types are unknown; this overload will simply fail on use
@@ -654,17 +680,29 @@
_converters["char"] = CharConverter
_converters["unsigned char"] = CharConverter
_converters["short int"] = ShortConverter
+_converters["const short int&"] = ConstIntRefConverter
_converters["short"] = _converters["short int"]
+_converters["const short&"] = _converters["const short int&"]
_converters["unsigned short int"] = UnsignedShortConverter
+_converters["const unsigned short int&"] = ConstUnsignedShortRefConverter
_converters["unsigned short"] = _converters["unsigned short int"]
+_converters["const unsigned short&"] = _converters["const unsigned short int&"]
_converters["int"] = IntConverter
+_converters["const int&"] = ConstIntRefConverter
_converters["unsigned int"] = UnsignedIntConverter
+_converters["const unsigned int&"] = ConstUnsignedIntRefConverter
_converters["long int"] = LongConverter
+_converters["const long int&"] = ConstLongRefConverter
_converters["long"] = _converters["long int"]
+_converters["const long&"] = _converters["const long int&"]
_converters["unsigned long int"] = UnsignedLongConverter
+_converters["const unsigned long int&"] = ConstUnsignedLongRefConverter
_converters["unsigned long"] = _converters["unsigned long int"]
+_converters["const unsigned long&"] = _converters["const unsigned long int&"]
_converters["float"] = FloatConverter
+_converters["const float&"] = ConstFloatRefConverter
_converters["double"] = DoubleConverter
+_converters["const double&"] = ConstDoubleRefConverter
_converters["const char*"] = CStringConverter
_converters["char*"] = CStringConverter
_converters["void*"] = VoidPtrConverter
diff --git a/pypy/module/cppyy/genreflex-methptrgetter.patch b/pypy/module/cppyy/genreflex-methptrgetter.patch
--- a/pypy/module/cppyy/genreflex-methptrgetter.patch
+++ b/pypy/module/cppyy/genreflex-methptrgetter.patch
@@ -1,6 +1,6 @@
Index: cint/reflex/python/genreflex/gendict.py
===================================================================
---- cint/reflex/python/genreflex/gendict.py (revision 40448)
+--- cint/reflex/python/genreflex/gendict.py (revision 43705)
+++ cint/reflex/python/genreflex/gendict.py (working copy)
@@ -52,6 +52,7 @@
self.typedefs_for_usr = []
@@ -10,7 +10,7 @@
# The next is to avoid a known problem with gccxml that it generates a
# references to id equal '_0' which is not defined anywhere
self.xref['_0'] = {'elem':'Unknown', 'attrs':{'id':'_0','name':''}, 'subelems':[]}
-@@ -1281,6 +1282,8 @@
+@@ -1306,6 +1307,8 @@
bases = self.getBases( attrs['id'] )
if inner and attrs.has_key('demangled') and self.isUnnamedType(attrs['demangled']) :
cls = attrs['demangled']
@@ -19,7 +19,7 @@
clt = ''
else:
cls = self.genTypeName(attrs['id'],const=True,colon=True)
-@@ -1318,7 +1321,7 @@
+@@ -1343,7 +1346,7 @@
# Inner class/struct/union/enum.
for m in memList :
member = self.xref[m]
@@ -28,7 +28,7 @@
and member['attrs'].get('access') in ('private','protected') \
and not self.isUnnamedType(member['attrs'].get('demangled')):
cmem = self.genTypeName(member['attrs']['id'],const=True,colon=True)
-@@ -1956,8 +1959,15 @@
+@@ -1981,8 +1984,15 @@
else : params = '0'
s = ' .AddFunctionMember(%s, Reflex::Literal("%s"), %s%s, 0, %s, %s)' % (self.genTypeID(id), name, type, id, params, mod)
s += self.genCommentProperty(attrs)
@@ -44,7 +44,7 @@
def genMCODef(self, type, name, attrs, args):
id = attrs['id']
cl = self.genTypeName(attrs['context'],colon=True)
-@@ -2024,8 +2034,44 @@
+@@ -2049,8 +2059,44 @@
if returns == 'void' : body += ' }\n'
else : body += ' }\n'
body += '}\n'
@@ -92,7 +92,7 @@
for a in args :
Index: cint/reflex/python/genreflex/genreflex.py
===================================================================
---- cint/reflex/python/genreflex/genreflex.py (revision 40448)
+--- cint/reflex/python/genreflex/genreflex.py (revision 43705)
+++ cint/reflex/python/genreflex/genreflex.py (working copy)
@@ -108,6 +108,10 @@
Print extra debug information while processing. Keep intermediate files\n
diff --git a/pypy/module/cppyy/test/Makefile b/pypy/module/cppyy/test/Makefile
--- a/pypy/module/cppyy/test/Makefile
+++ b/pypy/module/cppyy/test/Makefile
@@ -46,17 +46,9 @@
ifeq ($(CINT),)
# TODO: methptrgetter causes these tests to crash, so don't use it for now
-#stltypesDict.so: stltypes.cxx stltypes.h stltypes.xml
-# $(genreflex) stltypes.h --selection=stltypes.xml
-# g++ -o $@ stltypes_rflx.cpp stltypes.cxx -shared -lReflex $(cppflags) $(cppflags2)
-
std_streamsDict.so: std_streams.cxx std_streams.h std_streams.xml
$(genreflex) std_streams.h --selection=std_streams.xml
g++ -o $@ std_streams_rflx.cpp std_streams.cxx -shared -lReflex $(cppflags) $(cppflags2)
-
-operatorsDict.so: operators.cxx operators.h operators.xml
- $(genreflex) operators.h --selection=operators.xml
- g++ -o $@ operators_rflx.cpp operators.cxx -shared -lReflex $(cppflags) $(cppflags2)
endif
.PHONY: clean
diff --git a/pypy/module/cppyy/test/datatypes.cxx b/pypy/module/cppyy/test/datatypes.cxx
--- a/pypy/module/cppyy/test/datatypes.cxx
+++ b/pypy/module/cppyy/test/datatypes.cxx
@@ -114,18 +114,26 @@
cppyy_test_pod*& cppyy_test_data::get_pod_ptrref() { return m_ppod; }
//- setters -----------------------------------------------------------------
-void cppyy_test_data::set_bool(bool b) { m_bool = b; }
-void cppyy_test_data::set_char(char c) { m_char = c; }
-void cppyy_test_data::set_uchar(unsigned char uc) { m_uchar = uc; }
-void cppyy_test_data::set_short(short s) { m_short = s; }
-void cppyy_test_data::set_ushort(unsigned short us) { m_ushort = us; }
-void cppyy_test_data::set_int(int i) { m_int = i; }
-void cppyy_test_data::set_uint(unsigned int ui) { m_uint = ui; }
-void cppyy_test_data::set_long(long l) { m_long = l; }
-void cppyy_test_data::set_ulong(unsigned long ul) { m_ulong = ul; }
-void cppyy_test_data::set_float(float f) { m_float = f; }
-void cppyy_test_data::set_double(double d) { m_double = d; }
-void cppyy_test_data::set_enum(what w) { m_enum = w; }
+void cppyy_test_data::set_bool(bool b) { m_bool = b; }
+void cppyy_test_data::set_char(char c) { m_char = c; }
+void cppyy_test_data::set_uchar(unsigned char uc) { m_uchar = uc; }
+void cppyy_test_data::set_short(short s) { m_short = s; }
+void cppyy_test_data::set_short_c(const short& s) { m_short = s; }
+void cppyy_test_data::set_ushort(unsigned short us) { m_ushort = us; }
+void cppyy_test_data::set_ushort_c(const unsigned short& us) { m_ushort = us; }
+void cppyy_test_data::set_int(int i) { m_int = i; }
+void cppyy_test_data::set_int_c(const int& i) { m_int = i; }
+void cppyy_test_data::set_uint(unsigned int ui) { m_uint = ui; }
+void cppyy_test_data::set_uint_c(const unsigned int& ui) { m_uint = ui; }
+void cppyy_test_data::set_long(long l) { m_long = l; }
+void cppyy_test_data::set_long_c(const long& l) { m_long = l; }
+void cppyy_test_data::set_ulong(unsigned long ul) { m_ulong = ul; }
+void cppyy_test_data::set_ulong_c(const unsigned long& ul) { m_ulong = ul; }
+void cppyy_test_data::set_float(float f) { m_float = f; }
+void cppyy_test_data::set_float_c(const float& f) { m_float = f; }
+void cppyy_test_data::set_double(double d) { m_double = d; }
+void cppyy_test_data::set_double_c(const double& d) { m_double = d; }
+void cppyy_test_data::set_enum(what w) { m_enum = w; }
char cppyy_test_data::s_char = 's';
unsigned char cppyy_test_data::s_uchar = 'u';
diff --git a/pypy/module/cppyy/test/datatypes.h b/pypy/module/cppyy/test/datatypes.h
--- a/pypy/module/cppyy/test/datatypes.h
+++ b/pypy/module/cppyy/test/datatypes.h
@@ -62,13 +62,21 @@
void set_char(char c);
void set_uchar(unsigned char uc);
void set_short(short s);
+ void set_short_c(const short& s);
void set_ushort(unsigned short us);
+ void set_ushort_c(const unsigned short& us);
void set_int(int i);
+ void set_int_c(const int& i);
void set_uint(unsigned int ui);
+ void set_uint_c(const unsigned int& ui);
void set_long(long l);
+ void set_long_c(const long& l);
void set_ulong(unsigned long ul);
+ void set_ulong_c(const unsigned long& ul);
void set_float(float f);
+ void set_float_c(const float& f);
void set_double(double d);
+ void set_double_c(const double& d);
void set_enum(what w);
public:
diff --git a/pypy/module/cppyy/test/test_datatypes.py b/pypy/module/cppyy/test/test_datatypes.py
--- a/pypy/module/cppyy/test/test_datatypes.py
+++ b/pypy/module/cppyy/test/test_datatypes.py
@@ -152,8 +152,12 @@
assert eval('c.get_%s()' % names[i]) == i
for i in range(len(names)):
- exec 'c.set_%s = %d' % (names[i],2*i)
- assert eval('c.m_%s' % names[i]) == i
+ exec 'c.set_%s(%d)' % (names[i],2*i)
+ assert eval('c.m_%s' % names[i]) == 2*i
+
+ for i in range(len(names)):
+ exec 'c.set_%s_c(%d)' % (names[i],3*i)
+ assert eval('c.m_%s' % names[i]) == 3*i
# float types through functions
c.set_float( 0.123 ); assert round(c.get_float() - 0.123, 5) == 0
@@ -161,9 +165,11 @@
# float types through data members
c.m_float = 0.123; assert round(c.get_float() - 0.123, 5) == 0
- c.set_float( 0.234 ); assert round(c.m_float - 0.234, 5) == 0
- c.m_double = 0.456; assert round(c.get_double() - 0.456, 8) == 0
- c.set_double( 0.567 ); assert round(c.m_double - 0.567, 8) == 0
+ c.set_float(0.234); assert round(c.m_float - 0.234, 5) == 0
+ c.set_float_c(0.456); assert round(c.m_float - 0.456, 5) == 0
+ c.m_double = 0.678; assert round(c.get_double() - 0.678, 8) == 0
+ c.set_double(0.890); assert round(c.m_double - 0.890, 8) == 0
+ c.set_double_c(0.012); assert round(c.m_double - 0.012, 8) == 0
# arrays; there will be pointer copies, so destroy the current ones
c.destroy_arrays()
From noreply at buildbot.pypy.org Fri Apr 13 05:43:26 2012
From: noreply at buildbot.pypy.org (wlav)
Date: Fri, 13 Apr 2012 05:43:26 +0200 (CEST)
Subject: [pypy-commit] pypy reflex-support: const builtin& for CINT backend
Message-ID: <20120413034326.E57FA82F4E@wyvern.cs.uni-duesseldorf.de>
Author: Wim Lavrijsen
Branch: reflex-support
Changeset: r54322:6c02358185a1
Date: 2012-04-12 12:41 -0700
http://bitbucket.org/pypy/pypy/changeset/6c02358185a1/
Log: const builtin& for CINT backend
diff --git a/pypy/module/cppyy/converter.py b/pypy/module/cppyy/converter.py
--- a/pypy/module/cppyy/converter.py
+++ b/pypy/module/cppyy/converter.py
@@ -338,6 +338,13 @@
class ConstLongRefConverter(LongConverter):
_immutable_ = True
libffitype = lltype.nullptr(clibffi.FFI_TYPE_P.TO)
+ typecode = 'r'
+
+ def convert_argument(self, space, w_obj, address):
+ x = rffi.cast(self.rffiptype, address)
+ x[0] = self._unwrap_object(space, w_obj)
+ ba = rffi.cast(rffi.CCHARP, address)
+ ba[capi.c_function_arg_typeoffset()] = self.typecode
class UnsignedLongConverter(IntTypeConverterMixin, TypeConverter):
_immutable_ = True
@@ -378,6 +385,7 @@
class ConstFloatRefConverter(FloatConverter):
_immutable_ = True
libffitype = lltype.nullptr(clibffi.FFI_TYPE_P.TO)
+ typecode = 'F'
class DoubleConverter(FloatTypeConverterMixin, TypeConverter):
_immutable_ = True
@@ -397,6 +405,7 @@
class ConstDoubleRefConverter(DoubleConverter):
_immutable_ = True
libffitype = lltype.nullptr(clibffi.FFI_TYPE_P.TO)
+ typecode = 'D'
class CStringConverter(TypeConverter):
diff --git a/pypy/module/cppyy/src/cintcwrapper.cxx b/pypy/module/cppyy/src/cintcwrapper.cxx
--- a/pypy/module/cppyy/src/cintcwrapper.cxx
+++ b/pypy/module/cppyy/src/cintcwrapper.cxx
@@ -167,15 +167,33 @@
for (int i = 0; i < libp->paran; ++i) {
libp->para[i].ref = libp->para[i].obj.i;
const char partype = libp->para[i].type;
- if (partype == 'p')
+ switch (partype) {
+ case 'p': {
libp->para[i].obj.i = (long)&libp->para[i].ref;
- else if (partype == 'r')
+ break;
+ }
+ case 'r': {
libp->para[i].ref = (long)&libp->para[i].obj.i;
- else if (partype == 'f') {
+ break;
+ }
+ case 'f': {
assert(sizeof(float) <= sizeof(long));
long val = libp->para[i].obj.i;
void* pval = (void*)&val;
libp->para[i].obj.d = *(float*)pval;
+ break;
+ }
+ case 'F': {
+ libp->para[i].ref = (long)&libp->para[i].obj.i;
+ libp->para[i].type = 'f';
+ break;
+ }
+ case 'D': {
+ libp->para[i].ref = (long)&libp->para[i].obj.i;
+ libp->para[i].type = 'd';
+ break;
+
+ }
}
}
}
From noreply at buildbot.pypy.org Fri Apr 13 05:43:28 2012
From: noreply at buildbot.pypy.org (wlav)
Date: Fri, 13 Apr 2012 05:43:28 +0200 (CEST)
Subject: [pypy-commit] pypy reflex-support: const ref only worked b/c it
didn't; 2nd attempt with explicit tests
Message-ID: <20120413034328.5003682F4E@wyvern.cs.uni-duesseldorf.de>
Author: Wim Lavrijsen
Branch: reflex-support
Changeset: r54323:1ea1ba708588
Date: 2012-04-12 20:43 -0700
http://bitbucket.org/pypy/pypy/changeset/1ea1ba708588/
Log: const ref only worked b/c it didn't; 2nd attempt with explicit tests
diff --git a/pypy/module/cppyy/converter.py b/pypy/module/cppyy/converter.py
--- a/pypy/module/cppyy/converter.py
+++ b/pypy/module/cppyy/converter.py
@@ -150,26 +150,39 @@
def convert_argument_libffi(self, space, w_obj, argchain):
argchain.arg(self._unwrap_object(space, w_obj))
+ return lltype.nullptr(rffi.VOIDP.TO)
def default_argument_libffi(self, space, argchain):
argchain.arg(self.default)
def from_memory(self, space, w_obj, w_pycppclass, offset):
address = self._get_raw_address(space, w_obj, offset)
- rffiptr = rffi.cast(self.rffiptype, address)
+ rffiptr = rffi.cast(self.c_ptrtype, address)
return space.wrap(rffiptr[0])
def to_memory(self, space, w_obj, w_value, offset):
address = self._get_raw_address(space, w_obj, offset)
- rffiptr = rffi.cast(self.rffiptype, address)
+ rffiptr = rffi.cast(self.c_ptrtype, address)
rffiptr[0] = self._unwrap_object(space, w_value)
+class ConstRefNumericTypeConverterMixin(NumericTypeConverterMixin):
+ _mixin_ = True
+ _immutable_ = True
+
+ def convert_argument_libffi(self, space, w_obj, argchain):
+ obj = self._unwrap_object(space, w_obj)
+ tbuf = lltype.malloc(self.c_ptrtype.TO, rffi.sizeof(self.c_type), flavor='raw')
+ tbuf[0] = obj
+ vbuf = rffi.cast(rffi.VOIDP, tbuf)
+ argchain.arg(vbuf)
+ return vbuf
+
class IntTypeConverterMixin(NumericTypeConverterMixin):
_mixin_ = True
_immutable_ = True
def convert_argument(self, space, w_obj, address):
- x = rffi.cast(self.rffiptype, address)
+ x = rffi.cast(self.c_ptrtype, address)
x[0] = self._unwrap_object(space, w_obj)
class FloatTypeConverterMixin(NumericTypeConverterMixin):
@@ -177,7 +190,7 @@
_immutable_ = True
def convert_argument(self, space, w_obj, address):
- x = rffi.cast(self.rffiptype, address)
+ x = rffi.cast(self.c_ptrtype, address)
x[0] = self._unwrap_object(space, w_obj)
ba = rffi.cast(rffi.CCHARP, address)
ba[capi.c_function_arg_typeoffset()] = self.typecode
@@ -212,6 +225,7 @@
def convert_argument_libffi(self, space, w_obj, argchain):
argchain.arg(self._unwrap_object(space, w_obj))
+ return lltype.nullptr(rffi.VOIDP.TO)
def from_memory(self, space, w_obj, w_pycppclass, offset):
address = rffi.cast(rffi.CCHARP, self._get_raw_address(space, w_obj, offset))
@@ -254,6 +268,7 @@
def convert_argument_libffi(self, space, w_obj, argchain):
argchain.arg(self._unwrap_object(space, w_obj))
+ return lltype.nullptr(rffi.VOIDP.TO)
def from_memory(self, space, w_obj, w_pycppclass, offset):
address = rffi.cast(rffi.CCHARP, self._get_raw_address(space, w_obj, offset))
@@ -267,7 +282,8 @@
class ShortConverter(IntTypeConverterMixin, TypeConverter):
_immutable_ = True
libffitype = libffi.types.sshort
- rffiptype = rffi.SHORTP
+ c_type = rffi.SHORT
+ c_ptrtype = rffi.SHORTP
def __init__(self, space, default):
self.default = rffi.cast(rffi.SHORT, capi.c_strtoll(default))
@@ -275,73 +291,77 @@
def _unwrap_object(self, space, w_obj):
return rffi.cast(rffi.SHORT, space.int_w(w_obj))
-class ConstShortRefConverter(ShortConverter):
+class ConstShortRefConverter(ConstRefNumericTypeConverterMixin, ShortConverter):
_immutable_ = True
- libffitype = lltype.nullptr(clibffi.FFI_TYPE_P.TO)
+ libffitype = libffi.types.pointer
class UnsignedShortConverter(IntTypeConverterMixin, TypeConverter):
_immutable_ = True
libffitype = libffi.types.sshort
- rffiptype = rffi.USHORTP
+ c_type = rffi.USHORT
+ c_ptrtype = rffi.USHORTP
def __init__(self, space, default):
- self.default = rffi.cast(rffi.USHORT, capi.c_strtoull(default))
+ self.default = rffi.cast(self.c_type, capi.c_strtoull(default))
def _unwrap_object(self, space, w_obj):
- return rffi.cast(rffi.USHORT, space.int_w(w_obj))
+ return rffi.cast(self.c_type, space.int_w(w_obj))
-class ConstUnsignedShortRefConverter(UnsignedShortConverter):
+class ConstUnsignedShortRefConverter(ConstRefNumericTypeConverterMixin, UnsignedShortConverter):
_immutable_ = True
- libffitype = lltype.nullptr(clibffi.FFI_TYPE_P.TO)
+ libffitype = libffi.types.pointer
class IntConverter(IntTypeConverterMixin, TypeConverter):
_immutable_ = True
libffitype = libffi.types.sint
- rffiptype = rffi.INTP
+ c_type = rffi.INT
+ c_ptrtype = rffi.INTP
def __init__(self, space, default):
- self.default = rffi.cast(rffi.INT, capi.c_strtoll(default))
+ self.default = rffi.cast(self.c_type, capi.c_strtoll(default))
def _unwrap_object(self, space, w_obj):
- return rffi.cast(rffi.INT, space.c_int_w(w_obj))
+ return rffi.cast(self.c_type, space.c_int_w(w_obj))
-class ConstIntRefConverter(IntConverter):
+class ConstIntRefConverter(ConstRefNumericTypeConverterMixin, IntConverter):
_immutable_ = True
- libffitype = lltype.nullptr(clibffi.FFI_TYPE_P.TO)
+ libffitype = libffi.types.pointer
class UnsignedIntConverter(IntTypeConverterMixin, TypeConverter):
_immutable_ = True
libffitype = libffi.types.uint
- rffiptype = rffi.UINTP
+ c_type = rffi.UINT
+ c_ptrtype = rffi.UINTP
def __init__(self, space, default):
- self.default = rffi.cast(rffi.UINT, capi.c_strtoull(default))
+ self.default = rffi.cast(self.c_type, capi.c_strtoull(default))
def _unwrap_object(self, space, w_obj):
- return rffi.cast(rffi.UINT, space.uint_w(w_obj))
+ return rffi.cast(self.c_type, space.uint_w(w_obj))
-class ConstUnsignedIntRefConverter(UnsignedIntConverter):
+class ConstUnsignedIntRefConverter(ConstRefNumericTypeConverterMixin, UnsignedIntConverter):
_immutable_ = True
- libffitype = lltype.nullptr(clibffi.FFI_TYPE_P.TO)
+ libffitype = libffi.types.pointer
class LongConverter(IntTypeConverterMixin, TypeConverter):
_immutable_ = True
libffitype = libffi.types.slong
- rffiptype = rffi.LONGP
+ c_type = rffi.LONG
+ c_ptrtype = rffi.LONGP
def __init__(self, space, default):
- self.default = rffi.cast(rffi.LONG, capi.c_strtoll(default))
+ self.default = rffi.cast(self.c_type, capi.c_strtoll(default))
def _unwrap_object(self, space, w_obj):
return space.int_w(w_obj)
-class ConstLongRefConverter(LongConverter):
+class ConstLongRefConverter(ConstRefNumericTypeConverterMixin, LongConverter):
_immutable_ = True
- libffitype = lltype.nullptr(clibffi.FFI_TYPE_P.TO)
+ libffitype = libffi.types.pointer
typecode = 'r'
def convert_argument(self, space, w_obj, address):
- x = rffi.cast(self.rffiptype, address)
+ x = rffi.cast(self.c_ptrtype, address)
x[0] = self._unwrap_object(space, w_obj)
ba = rffi.cast(rffi.CCHARP, address)
ba[capi.c_function_arg_typeoffset()] = self.typecode
@@ -349,23 +369,25 @@
class UnsignedLongConverter(IntTypeConverterMixin, TypeConverter):
_immutable_ = True
libffitype = libffi.types.ulong
- rffiptype = rffi.ULONGP
+ c_type = rffi.ULONG
+ c_ptrtype = rffi.ULONGP
def __init__(self, space, default):
- self.default = rffi.cast(rffi.ULONG, capi.c_strtoull(default))
+ self.default = rffi.cast(self.c_type, capi.c_strtoull(default))
def _unwrap_object(self, space, w_obj):
return space.uint_w(w_obj)
-class ConstUnsignedLongRefConverter(UnsignedLongConverter):
+class ConstUnsignedLongRefConverter(ConstRefNumericTypeConverterMixin, UnsignedLongConverter):
_immutable_ = True
- libffitype = lltype.nullptr(clibffi.FFI_TYPE_P.TO)
+ libffitype = libffi.types.pointer
class FloatConverter(FloatTypeConverterMixin, TypeConverter):
_immutable_ = True
libffitype = libffi.types.float
- rffiptype = rffi.FLOATP
- typecode = 'f'
+ c_type = rffi.FLOAT
+ c_ptrtype = rffi.FLOATP
+ typecode = 'f'
def __init__(self, space, default):
if default:
@@ -379,32 +401,37 @@
def from_memory(self, space, w_obj, w_pycppclass, offset):
address = self._get_raw_address(space, w_obj, offset)
- rffiptr = rffi.cast(self.rffiptype, address)
+ rffiptr = rffi.cast(self.c_ptrtype, address)
return space.wrap(float(rffiptr[0]))
class ConstFloatRefConverter(FloatConverter):
_immutable_ = True
- libffitype = lltype.nullptr(clibffi.FFI_TYPE_P.TO)
+ libffitype = libffi.types.pointer
typecode = 'F'
+ def convert_argument_libffi(self, space, w_obj, argchain):
+ from pypy.module.cppyy.interp_cppyy import FastCallNotPossible
+ raise FastCallNotPossible
+
class DoubleConverter(FloatTypeConverterMixin, TypeConverter):
_immutable_ = True
libffitype = libffi.types.double
- rffiptype = rffi.DOUBLEP
- typecode = 'd'
+ c_type = rffi.DOUBLE
+ c_ptrtype = rffi.DOUBLEP
+ typecode = 'd'
def __init__(self, space, default):
if default:
- self.default = rffi.cast(rffi.DOUBLE, rfloat.rstring_to_float(default))
+ self.default = rffi.cast(self.c_type, rfloat.rstring_to_float(default))
else:
- self.default = rffi.cast(rffi.DOUBLE, 0.)
+ self.default = rffi.cast(self.c_type, 0.)
def _unwrap_object(self, space, w_obj):
return space.float_w(w_obj)
-class ConstDoubleRefConverter(DoubleConverter):
+class ConstDoubleRefConverter(ConstRefNumericTypeConverterMixin, DoubleConverter):
_immutable_ = True
- libffitype = lltype.nullptr(clibffi.FFI_TYPE_P.TO)
+ libffitype = libffi.types.pointer
typecode = 'D'
@@ -438,6 +465,7 @@
def convert_argument_libffi(self, space, w_obj, argchain):
argchain.arg(get_rawobject(space, w_obj))
+ return lltype.nullptr(rffi.VOIDP.TO)
class VoidPtrPtrConverter(TypeConverter):
@@ -572,6 +600,7 @@
def convert_argument_libffi(self, space, w_obj, argchain):
argchain.arg(self._unwrap_object(space, w_obj))
+ return lltype.nullptr(rffi.VOIDP.TO)
def from_memory(self, space, w_obj, w_pycppclass, offset):
address = rffi.cast(capi.C_OBJECT, self._get_raw_address(space, w_obj, offset))
@@ -689,7 +718,7 @@
_converters["char"] = CharConverter
_converters["unsigned char"] = CharConverter
_converters["short int"] = ShortConverter
-_converters["const short int&"] = ConstIntRefConverter
+_converters["const short int&"] = ConstShortRefConverter
_converters["short"] = _converters["short int"]
_converters["const short&"] = _converters["const short int&"]
_converters["unsigned short int"] = UnsignedShortConverter
diff --git a/pypy/module/cppyy/interp_cppyy.py b/pypy/module/cppyy/interp_cppyy.py
--- a/pypy/module/cppyy/interp_cppyy.py
+++ b/pypy/module/cppyy/interp_cppyy.py
@@ -158,14 +158,21 @@
argchain = libffi.ArgChain()
argchain.arg(cppthis)
i = len(self.arg_defs)
- for i in range(len(args_w)):
- conv = self.arg_converters[i]
- w_arg = args_w[i]
- conv.convert_argument_libffi(self.space, w_arg, argchain)
- for j in range(i+1, len(self.arg_defs)):
- conv = self.arg_converters[j]
- conv.default_argument_libffi(self.space, argchain)
- return self.executor.execute_libffi(self.space, self._libffifunc, argchain)
+ refbuffers = []
+ try:
+ for i in range(len(args_w)):
+ conv = self.arg_converters[i]
+ w_arg = args_w[i]
+ refbuf = conv.convert_argument_libffi(self.space, w_arg, argchain)
+ if refbuf:
+ refbuffers.append(refbuf)
+ for j in range(i+1, len(self.arg_defs)):
+ conv = self.arg_converters[j]
+ conv.default_argument_libffi(self.space, argchain)
+ return self.executor.execute_libffi(self.space, self._libffifunc, argchain)
+ finally:
+ for refbuf in refbuffers:
+ lltype.free(refbuf, flavor='raw')
def _setup(self, cppthis):
self.arg_converters = [converter.get_converter(self.space, arg_type, arg_dflt)
diff --git a/pypy/module/cppyy/test/bench1.py b/pypy/module/cppyy/test/bench1.py
--- a/pypy/module/cppyy/test/bench1.py
+++ b/pypy/module/cppyy/test/bench1.py
@@ -72,6 +72,14 @@
addDataToInt.call(instance, i)
return i
+class CppyyInterpBench3(CppyyInterpBench1):
+ def __call__(self):
+ addDataToInt = self.cls.get_overload("addDataToIntConstRef")
+ instance = self.inst
+ for i in range(NNN):
+ addDataToInt.call(instance, i)
+ return i
+
class CppyyPythonBench1(object):
scale = 1
def __init__(self):
@@ -121,6 +129,7 @@
print "warming up ... "
interp_bench1 = CppyyInterpBench1()
interp_bench2 = CppyyInterpBench2()
+ interp_bench3 = CppyyInterpBench3()
python_bench1 = CppyyPythonBench1()
interp_bench1(); interp_bench2(); python_bench1()
@@ -130,6 +139,7 @@
# test runs ...
print_bench("cppyy interp", run_bench(interp_bench1))
print_bench("... overload", run_bench(interp_bench2))
+ print_bench("... constref", run_bench(interp_bench3))
print_bench("cppyy python", run_bench(python_bench1))
stat, t_cintex = commands.getstatusoutput("python bench1.py --pycintex")
print_bench("pycintex ", float(t_cintex))
diff --git a/pypy/module/cppyy/test/example01.cxx b/pypy/module/cppyy/test/example01.cxx
--- a/pypy/module/cppyy/test/example01.cxx
+++ b/pypy/module/cppyy/test/example01.cxx
@@ -91,6 +91,10 @@
return m_somedata + a;
}
+int example01::addDataToIntConstRef(const int& a) {
+ return m_somedata + a;
+}
+
int example01::overloadedAddDataToInt(int a, int b) {
return m_somedata + a + b;
}
diff --git a/pypy/module/cppyy/test/example01.h b/pypy/module/cppyy/test/example01.h
--- a/pypy/module/cppyy/test/example01.h
+++ b/pypy/module/cppyy/test/example01.h
@@ -39,6 +39,7 @@
public: // instance methods
int addDataToInt(int a);
+ int addDataToIntConstRef(const int& a);
int overloadedAddDataToInt(int a, int b);
int overloadedAddDataToInt(int a);
int overloadedAddDataToInt(int a, int b, int c);
diff --git a/pypy/module/cppyy/test/test_zjit.py b/pypy/module/cppyy/test/test_zjit.py
--- a/pypy/module/cppyy/test/test_zjit.py
+++ b/pypy/module/cppyy/test/test_zjit.py
@@ -1,3 +1,4 @@
+import py, os, sys
from pypy.jit.metainterp.test.support import LLJitMixin
from pypy.rlib.objectmodel import specialize, instantiate
from pypy.rlib import rarithmetic, jit
@@ -6,6 +7,18 @@
from pypy.module.cppyy import interp_cppyy, capi
+
+currpath = py.path.local(__file__).dirpath()
+test_dct = str(currpath.join("example01Dict.so"))
+
+def setup_module(mod):
+ if sys.platform == 'win32':
+ py.test.skip("win32 not supported so far")
+ err = os.system("cd '%s' && make example01Dict.so" % currpath)
+ if err:
+ raise OSError("'make' failed (see stderr)")
+
+
class FakeBase(W_Root):
typename = None
@@ -150,24 +163,22 @@
return True
class TestFastPathJIT(LLJitMixin):
- def test_simple(self):
- """Test fast path being taken for methods"""
-
+ def _run_zjit(self, method_name):
if capi.identify() == 'CINT': # CINT does not support fast path
return
space = FakeSpace()
- drv = jit.JitDriver(greens=[], reds=["i", "inst", "addDataToInt"])
+ drv = jit.JitDriver(greens=[], reds=["i", "inst", "cppmethod"])
def f():
lib = interp_cppyy.load_dictionary(space, "./example01Dict.so")
cls = interp_cppyy.scope_byname(space, "example01")
inst = cls.get_overload("example01").call(None, [FakeInt(0)])
- addDataToInt = cls.get_overload("addDataToInt")
+ cppmethod = cls.get_overload(method_name)
assert isinstance(inst, interp_cppyy.W_CPPInstance)
i = 10
while i > 0:
- drv.jit_merge_point(inst=inst, addDataToInt=addDataToInt, i=i)
- addDataToInt.call(inst, [FakeInt(i)])
+ drv.jit_merge_point(inst=inst, cppmethod=cppmethod, i=i)
+ cppmethod.call(inst, [FakeInt(i)])
i -= 1
return 7
f()
@@ -175,27 +186,17 @@
result = self.meta_interp(f, [], listops=True, backendopt=True, listcomp=True)
self.check_jitcell_token_count(1)
- def test_overload(self):
+ def test01_simple(self):
+ """Test fast path being taken for methods"""
+
+ self._run_zjit("addDataToInt")
+
+ def test02_overload(self):
"""Test fast path being taken for overloaded methods"""
- if capi.identify() == 'CINT': # CINT does not support fast path
- return
+ self._run_zjit("overloadedAddDataToInt")
- space = FakeSpace()
- drv = jit.JitDriver(greens=[], reds=["i", "inst", "addDataToInt"])
- def f():
- lib = interp_cppyy.load_dictionary(space, "./example01Dict.so")
- cls = interp_cppyy.scope_byname(space, "example01")
- inst = cls.get_overload("example01").call(None, [FakeInt(0)])
- addDataToInt = cls.get_overload("overloadedAddDataToInt")
- assert isinstance(inst, interp_cppyy.W_CPPInstance)
- i = 10
- while i > 0:
- drv.jit_merge_point(inst=inst, addDataToInt=addDataToInt, i=i)
- addDataToInt.call(inst, [FakeInt(i)])
- i -= 1
- return 7
- f()
- space = FakeSpace()
- result = self.meta_interp(f, [], listops=True, backendopt=True, listcomp=True)
- self.check_jitcell_token_count(1)
+ def test03_const_ref(self):
+ """Test fast path being taken for methods with const ref arguments"""
+
+ self._run_zjit("addDataToIntConstRef")
From noreply at buildbot.pypy.org Fri Apr 13 06:36:32 2012
From: noreply at buildbot.pypy.org (wlav)
Date: Fri, 13 Apr 2012 06:36:32 +0200 (CEST)
Subject: [pypy-commit] pypy reflex-support: merge default into branch
Message-ID: <20120413043632.9239082F4E@wyvern.cs.uni-duesseldorf.de>
Author: Wim Lavrijsen
Branch: reflex-support
Changeset: r54324:2fc2d0b8a090
Date: 2012-04-12 21:36 -0700
http://bitbucket.org/pypy/pypy/changeset/2fc2d0b8a090/
Log: merge default into branch
diff --git a/pypy/annotation/test/test_annrpython.py b/pypy/annotation/test/test_annrpython.py
--- a/pypy/annotation/test/test_annrpython.py
+++ b/pypy/annotation/test/test_annrpython.py
@@ -3746,9 +3746,9 @@
return g(i)
def main(i):
if i == 2:
- return f(i)
+ return f(2)
elif i == 3:
- return f(i)
+ return f(3)
else:
raise NotImplementedError
diff --git a/pypy/doc/cpython_differences.rst b/pypy/doc/cpython_differences.rst
--- a/pypy/doc/cpython_differences.rst
+++ b/pypy/doc/cpython_differences.rst
@@ -158,6 +158,12 @@
.. __: http://morepypy.blogspot.com/2008/02/python-finalizers-semantics-part-1.html
.. __: http://morepypy.blogspot.com/2008/02/python-finalizers-semantics-part-2.html
+Note that this difference might show up indirectly in some cases. For
+example, a generator left pending in the middle is --- again ---
+garbage-collected later in PyPy than in CPython. You can see the
+difference if the ``yield`` keyword it is suspended at is itself
+enclosed in a ``try:`` or a ``with:`` block.
+
Using the default GC called ``minimark``, the built-in function ``id()``
works like it does in CPython. With other GCs it returns numbers that
are not real addresses (because an object can move around several times)
diff --git a/pypy/interpreter/argument.py b/pypy/interpreter/argument.py
--- a/pypy/interpreter/argument.py
+++ b/pypy/interpreter/argument.py
@@ -169,9 +169,11 @@
def _combine_starstarargs_wrapped(self, w_starstararg):
# unpack the ** arguments
space = self.space
+ keywords, values_w = space.view_as_kwargs(w_starstararg)
+ if keywords is not None: # this path also taken for empty dicts
+ self._add_keywordargs_no_unwrapping(keywords, values_w)
+ return not jit.isconstant(len(self.keywords))
if space.isinstance_w(w_starstararg, space.w_dict):
- if not space.is_true(w_starstararg):
- return False # don't call unpackiterable - it's jit-opaque
keys_w = space.unpackiterable(w_starstararg)
else:
try:
@@ -186,11 +188,8 @@
"a mapping, not %s" % (typename,)))
raise
keys_w = space.unpackiterable(w_keys)
- if keys_w:
- self._do_combine_starstarargs_wrapped(keys_w, w_starstararg)
- return True
- else:
- return False # empty dict; don't disable the JIT
+ self._do_combine_starstarargs_wrapped(keys_w, w_starstararg)
+ return True
def _do_combine_starstarargs_wrapped(self, keys_w, w_starstararg):
space = self.space
@@ -227,6 +226,26 @@
self.keywords_w = self.keywords_w + keywords_w
self.keyword_names_w = keys_w
+ @jit.look_inside_iff(lambda self, keywords, keywords_w:
+ jit.isconstant(len(keywords) and
+ jit.isconstant(self.keywords)))
+ def _add_keywordargs_no_unwrapping(self, keywords, keywords_w):
+ if self.keywords is None:
+ self.keywords = keywords[:] # copy to make non-resizable
+ self.keywords_w = keywords_w[:]
+ else:
+ # looks quadratic, but the JIT should remove all of it nicely.
+ # Also, all the lists should be small
+ for key in keywords:
+ for otherkey in self.keywords:
+ if otherkey == key:
+ raise operationerrfmt(self.space.w_TypeError,
+ "got multiple values "
+ "for keyword argument "
+ "'%s'", key)
+ self.keywords = self.keywords + keywords
+ self.keywords_w = self.keywords_w + keywords_w
+
def fixedunpack(self, argcount):
"""The simplest argument parsing: get the 'argcount' arguments,
or raise a real ValueError if the length is wrong."""
@@ -385,7 +404,7 @@
# collect extra keyword arguments into the **kwarg
if has_kwarg:
- w_kwds = self.space.newdict()
+ w_kwds = self.space.newdict(kwargs=True)
if num_remainingkwds:
#
limit = len(keywords)
diff --git a/pypy/interpreter/baseobjspace.py b/pypy/interpreter/baseobjspace.py
--- a/pypy/interpreter/baseobjspace.py
+++ b/pypy/interpreter/baseobjspace.py
@@ -914,6 +914,12 @@
"""
return None
+ def view_as_kwargs(self, w_dict):
+ """ if w_dict is a kwargs-dict, return two lists, one of unwrapped
+ strings and one of wrapped values. otherwise return (None, None)
+ """
+ return (None, None)
+
def newlist_str(self, list_s):
return self.newlist([self.wrap(s) for s in list_s])
diff --git a/pypy/interpreter/test/test_argument.py b/pypy/interpreter/test/test_argument.py
--- a/pypy/interpreter/test/test_argument.py
+++ b/pypy/interpreter/test/test_argument.py
@@ -75,7 +75,10 @@
def unpackiterable(self, it):
return list(it)
- def newdict(self):
+ def view_as_kwargs(self, x):
+ return None, None
+
+ def newdict(self, kwargs=False):
return {}
def newlist(self, l=[]):
@@ -488,6 +491,57 @@
assert len(l) == 1
assert l[0] == space.wrap(5)
+ def test_starstarargs_special(self):
+ class kwargs(object):
+ def __init__(self, k, v):
+ self.k = k
+ self.v = v
+ class MyDummySpace(DummySpace):
+ def view_as_kwargs(self, kw):
+ if isinstance(kw, kwargs):
+ return kw.k, kw.v
+ return None, None
+ space = MyDummySpace()
+ for i in range(3):
+ kwds = [("c", 3)]
+ kwds_w = dict(kwds[:i])
+ keywords = kwds_w.keys()
+ keywords_w = kwds_w.values()
+ rest = dict(kwds[i:])
+ w_kwds = kwargs(rest.keys(), rest.values())
+ if i == 2:
+ w_kwds = None
+ assert len(keywords) == len(keywords_w)
+ args = Arguments(space, [1, 2], keywords[:], keywords_w[:], w_starstararg=w_kwds)
+ l = [None, None, None]
+ args._match_signature(None, l, Signature(["a", "b", "c"]), defaults_w=[4])
+ assert l == [1, 2, 3]
+ args = Arguments(space, [1, 2], keywords[:], keywords_w[:], w_starstararg=w_kwds)
+ l = [None, None, None, None]
+ args._match_signature(None, l, Signature(["a", "b", "b1", "c"]), defaults_w=[4, 5])
+ assert l == [1, 2, 4, 3]
+ args = Arguments(space, [1, 2], keywords[:], keywords_w[:], w_starstararg=w_kwds)
+ l = [None, None, None, None]
+ args._match_signature(None, l, Signature(["a", "b", "c", "d"]), defaults_w=[4, 5])
+ assert l == [1, 2, 3, 5]
+ args = Arguments(space, [1, 2], keywords[:], keywords_w[:], w_starstararg=w_kwds)
+ l = [None, None, None, None]
+ py.test.raises(ArgErr, args._match_signature, None, l,
+ Signature(["c", "b", "a", "d"]), defaults_w=[4, 5])
+ args = Arguments(space, [1, 2], keywords[:], keywords_w[:], w_starstararg=w_kwds)
+ l = [None, None, None, None]
+ py.test.raises(ArgErr, args._match_signature, None, l,
+ Signature(["a", "b", "c1", "d"]), defaults_w=[4, 5])
+ args = Arguments(space, [1, 2], keywords[:], keywords_w[:], w_starstararg=w_kwds)
+ l = [None, None, None]
+ args._match_signature(None, l, Signature(["a", "b"], None, "**"))
+ assert l == [1, 2, {'c': 3}]
+ excinfo = py.test.raises(OperationError, Arguments, space, [], ["a"],
+ [1], w_starstararg=kwargs(["a"], [2]))
+ assert excinfo.value.w_type is TypeError
+
+
+
class TestErrorHandling(object):
def test_missing_args(self):
# got_nargs, nkwds, expected_nargs, has_vararg, has_kwarg,
diff --git a/pypy/module/pypyjit/test_pypy_c/test_call.py b/pypy/module/pypyjit/test_pypy_c/test_call.py
--- a/pypy/module/pypyjit/test_pypy_c/test_call.py
+++ b/pypy/module/pypyjit/test_pypy_c/test_call.py
@@ -244,6 +244,7 @@
print guards
assert len(guards) <= 20
+
def test_stararg_virtual(self):
def main(x):
def g(*args):
@@ -486,3 +487,38 @@
--TICK--
jump(..., descr=...)
""")
+
+ def test_kwargs_virtual2(self):
+ log = self.run("""
+ def f(*args, **kwargs):
+ kwargs['a'] = kwargs['z'] * 0
+ return g(1, *args, **kwargs)
+
+ def g(x, y, z=2, a=1):
+ return x - y + z + a
+
+ def main(stop):
+ res = 0
+ i = 0
+ while i < stop:
+ res = f(res, z=i) # ID: call
+ i += 1
+ return res""", [1000])
+ assert log.result == 500
+ loop, = log.loops_by_id('call')
+ print loop.ops_by_id('call')
+ assert loop.match("""
+ i65 = int_lt(i58, i29)
+ guard_true(i65, descr=...)
+ guard_not_invalidated(..., descr=...)
+ i66 = force_token()
+ i67 = force_token()
+ i69 = int_sub_ovf(1, i56)
+ guard_no_overflow(..., descr=...)
+ i70 = int_add_ovf(i69, i58)
+ guard_no_overflow(..., descr=...)
+ i71 = int_add(i58, 1)
+ --TICK--
+ jump(..., descr=...)
+ """)
+
diff --git a/pypy/objspace/fake/objspace.py b/pypy/objspace/fake/objspace.py
--- a/pypy/objspace/fake/objspace.py
+++ b/pypy/objspace/fake/objspace.py
@@ -110,7 +110,7 @@
"NOT_RPYTHON"
raise NotImplementedError
- def newdict(self, module=False, instance=False,
+ def newdict(self, module=False, instance=False, kwargs=False,
strdict=False):
return w_some_obj()
diff --git a/pypy/objspace/std/dictmultiobject.py b/pypy/objspace/std/dictmultiobject.py
--- a/pypy/objspace/std/dictmultiobject.py
+++ b/pypy/objspace/std/dictmultiobject.py
@@ -33,7 +33,7 @@
@staticmethod
def allocate_and_init_instance(space, w_type=None, module=False,
- instance=False, strdict=False):
+ instance=False, strdict=False, kwargs=False):
if space.config.objspace.std.withcelldict and module:
from pypy.objspace.std.celldict import ModuleDictStrategy
@@ -46,11 +46,15 @@
assert w_type is None
strategy = space.fromcache(StringDictStrategy)
+ elif kwargs:
+ assert w_type is None
+ from pypy.objspace.std.kwargsdict import KwargsDictStrategy
+ strategy = space.fromcache(KwargsDictStrategy)
else:
strategy = space.fromcache(EmptyDictStrategy)
-
if w_type is None:
w_type = space.w_dict
+
storage = strategy.get_empty_storage()
w_self = space.allocate_instance(W_DictMultiObject, w_type)
W_DictMultiObject.__init__(w_self, space, strategy, storage)
@@ -91,7 +95,8 @@
getitem_str delitem length \
clear w_keys values \
items iter setdefault \
- popitem listview_str listview_int".split()
+ popitem listview_str listview_int \
+ view_as_kwargs".split()
def make_method(method):
def f(self, *args):
@@ -165,6 +170,9 @@
def listview_int(self, w_dict):
return None
+ def view_as_kwargs(self, w_dict):
+ return (None, None)
+
class EmptyDictStrategy(DictStrategy):
erase, unerase = rerased.new_erasing_pair("empty")
@@ -254,6 +262,9 @@
def popitem(self, w_dict):
raise KeyError
+ def view_as_kwargs(self, w_dict):
+ return ([], [])
+
registerimplementation(W_DictMultiObject)
# DictImplementation lattice
diff --git a/pypy/objspace/std/kwargsdict.py b/pypy/objspace/std/kwargsdict.py
new file mode 100644
--- /dev/null
+++ b/pypy/objspace/std/kwargsdict.py
@@ -0,0 +1,165 @@
+## ----------------------------------------------------------------------------
+## dict strategy (see dictmultiobject.py)
+
+from pypy.rlib import rerased, jit
+from pypy.objspace.std.dictmultiobject import (DictStrategy,
+ IteratorImplementation,
+ ObjectDictStrategy,
+ StringDictStrategy)
+
+
+class KwargsDictStrategy(DictStrategy):
+ erase, unerase = rerased.new_erasing_pair("kwargsdict")
+ erase = staticmethod(erase)
+ unerase = staticmethod(unerase)
+
+ def wrap(self, key):
+ return self.space.wrap(key)
+
+ def unwrap(self, wrapped):
+ return self.space.str_w(wrapped)
+
+ def get_empty_storage(self):
+ d = ([], [])
+ return self.erase(d)
+
+ def is_correct_type(self, w_obj):
+ space = self.space
+ return space.is_w(space.type(w_obj), space.w_str)
+
+ def _never_equal_to(self, w_lookup_type):
+ return False
+
+ def iter(self, w_dict):
+ return KwargsDictIterator(self.space, self, w_dict)
+
+ def w_keys(self, w_dict):
+ return self.space.newlist([self.space.wrap(key) for key in self.unerase(w_dict.dstorage)[0]])
+
+ def setitem(self, w_dict, w_key, w_value):
+ space = self.space
+ if self.is_correct_type(w_key):
+ self.setitem_str(w_dict, self.unwrap(w_key), w_value)
+ return
+ else:
+ self.switch_to_object_strategy(w_dict)
+ w_dict.setitem(w_key, w_value)
+
+ def setitem_str(self, w_dict, key, w_value):
+ self._setitem_str_indirection(w_dict, key, w_value)
+
+ @jit.look_inside_iff(lambda self, w_dict, key, w_value:
+ jit.isconstant(self.length(w_dict)) and jit.isconstant(key))
+ def _setitem_str_indirection(self, w_dict, key, w_value):
+ keys, values_w = self.unerase(w_dict.dstorage)
+ result = []
+ for i in range(len(keys)):
+ if keys[i] == key:
+ values_w[i] = w_value
+ break
+ else:
+ # limit the size so that the linear searches don't become too long
+ if len(keys) >= 16:
+ self.switch_to_string_strategy(w_dict)
+ w_dict.setitem_str(key, w_value)
+ else:
+ keys.append(key)
+ values_w.append(w_value)
+
+ def setdefault(self, w_dict, w_key, w_default):
+ # XXX could do better, but is it worth it?
+ self.switch_to_object_strategy(w_dict)
+ return w_dict.setdefault(w_key, w_default)
+
+ def delitem(self, w_dict, w_key):
+ # XXX could do better, but is it worth it?
+ self.switch_to_object_strategy(w_dict)
+ return w_dict.delitem(w_key)
+
+ def length(self, w_dict):
+ return len(self.unerase(w_dict.dstorage)[0])
+
+ def getitem_str(self, w_dict, key):
+ return self._getitem_str_indirection(w_dict, key)
+
+ @jit.look_inside_iff(lambda self, w_dict, key: jit.isconstant(self.length(w_dict)) and jit.isconstant(key))
+ def _getitem_str_indirection(self, w_dict, key):
+ keys, values_w = self.unerase(w_dict.dstorage)
+ result = []
+ for i in range(len(keys)):
+ if keys[i] == key:
+ return values_w[i]
+ return None
+
+ def getitem(self, w_dict, w_key):
+ space = self.space
+ if self.is_correct_type(w_key):
+ return self.getitem_str(w_dict, self.unwrap(w_key))
+ elif self._never_equal_to(space.type(w_key)):
+ return None
+ else:
+ self.switch_to_object_strategy(w_dict)
+ return w_dict.getitem(w_key)
+
+ def w_keys(self, w_dict):
+ l = self.unerase(w_dict.dstorage)[0]
+ return self.space.newlist_str(l[:])
+
+ def values(self, w_dict):
+ return self.unerase(w_dict.dstorage)[1][:] # to make non-resizable
+
+ def items(self, w_dict):
+ space = self.space
+ keys, values_w = self.unerase(w_dict.dstorage)
+ result = []
+ for i in range(len(keys)):
+ result.append(space.newtuple([self.wrap(keys[i]), values_w[i]]))
+ return result
+
+ def popitem(self, w_dict):
+ keys, values_w = self.unerase(w_dict.dstorage)
+ key = keys.pop()
+ w_value = values_w.pop()
+ return (self.wrap(key), w_value)
+
+ def clear(self, w_dict):
+ w_dict.dstorage = self.get_empty_storage()
+
+ def switch_to_object_strategy(self, w_dict):
+ strategy = self.space.fromcache(ObjectDictStrategy)
+ keys, values_w = self.unerase(w_dict.dstorage)
+ d_new = strategy.unerase(strategy.get_empty_storage())
+ for i in range(len(keys)):
+ d_new[self.wrap(keys[i])] = values_w[i]
+ w_dict.strategy = strategy
+ w_dict.dstorage = strategy.erase(d_new)
+
+ def switch_to_string_strategy(self, w_dict):
+ strategy = self.space.fromcache(StringDictStrategy)
+ keys, values_w = self.unerase(w_dict.dstorage)
+ storage = strategy.get_empty_storage()
+ d_new = strategy.unerase(storage)
+ for i in range(len(keys)):
+ d_new[keys[i]] = values_w[i]
+ w_dict.strategy = strategy
+ w_dict.dstorage = storage
+
+ def view_as_kwargs(self, w_dict):
+ return self.unerase(w_dict.dstorage)
+
+
+class KwargsDictIterator(IteratorImplementation):
+ def __init__(self, space, strategy, dictimplementation):
+ IteratorImplementation.__init__(self, space, strategy, dictimplementation)
+ keys, values_w = strategy.unerase(self.dictimplementation.dstorage)
+ self.iterator = iter(range(len(keys)))
+ # XXX this potentially leaks
+ self.keys = keys
+ self.values_w = values_w
+
+ def next_entry(self):
+ # note that this 'for' loop only runs once, at most
+ for i in self.iterator:
+ return self.space.wrap(self.keys[i]), self.values_w[i]
+ else:
+ return None, None
diff --git a/pypy/objspace/std/objspace.py b/pypy/objspace/std/objspace.py
--- a/pypy/objspace/std/objspace.py
+++ b/pypy/objspace/std/objspace.py
@@ -313,11 +313,11 @@
def newlist_str(self, list_s):
return W_ListObject.newlist_str(self, list_s)
- def newdict(self, module=False, instance=False,
+ def newdict(self, module=False, instance=False, kwargs=False,
strdict=False):
return W_DictMultiObject.allocate_and_init_instance(
self, module=module, instance=instance,
- strdict=strdict)
+ strdict=strdict, kwargs=kwargs)
def newset(self):
from pypy.objspace.std.setobject import newset
@@ -472,6 +472,11 @@
return w_obj.getitems_int()
return None
+ def view_as_kwargs(self, w_dict):
+ if type(w_dict) is W_DictMultiObject:
+ return w_dict.view_as_kwargs()
+ return (None, None)
+
def _uses_list_iter(self, w_obj):
from pypy.objspace.descroperation import list_iter
return self.lookup(w_obj, '__iter__') is list_iter(self)
diff --git a/pypy/objspace/std/test/test_kwargsdict.py b/pypy/objspace/std/test/test_kwargsdict.py
new file mode 100644
--- /dev/null
+++ b/pypy/objspace/std/test/test_kwargsdict.py
@@ -0,0 +1,120 @@
+import py
+from pypy.conftest import gettestobjspace, option
+from pypy.objspace.std.test.test_dictmultiobject import FakeSpace, W_DictMultiObject
+from pypy.objspace.std.kwargsdict import *
+
+space = FakeSpace()
+strategy = KwargsDictStrategy(space)
+
+def test_create():
+ keys = ["a", "b", "c"]
+ values = [1, 2, 3]
+ storage = strategy.erase((keys, values))
+ d = W_DictMultiObject(space, strategy, storage)
+ assert d.getitem_str("a") == 1
+ assert d.getitem_str("b") == 2
+ assert d.getitem_str("c") == 3
+ assert d.getitem(space.wrap("a")) == 1
+ assert d.getitem(space.wrap("b")) == 2
+ assert d.getitem(space.wrap("c")) == 3
+ assert d.w_keys() == keys
+ assert d.values() == values
+
+def test_set_existing():
+ keys = ["a", "b", "c"]
+ values = [1, 2, 3]
+ storage = strategy.erase((keys, values))
+ d = W_DictMultiObject(space, strategy, storage)
+ assert d.getitem_str("a") == 1
+ assert d.getitem_str("b") == 2
+ assert d.getitem_str("c") == 3
+ assert d.setitem_str("a", 4) is None
+ assert d.getitem_str("a") == 4
+ assert d.getitem_str("b") == 2
+ assert d.getitem_str("c") == 3
+ assert d.setitem_str("b", 5) is None
+ assert d.getitem_str("a") == 4
+ assert d.getitem_str("b") == 5
+ assert d.getitem_str("c") == 3
+ assert d.setitem_str("c", 6) is None
+ assert d.getitem_str("a") == 4
+ assert d.getitem_str("b") == 5
+ assert d.getitem_str("c") == 6
+ assert d.getitem(space.wrap("a")) == 4
+ assert d.getitem(space.wrap("b")) == 5
+ assert d.getitem(space.wrap("c")) == 6
+ assert d.w_keys() == keys
+ assert d.values() == values
+ assert keys == ["a", "b", "c"]
+ assert values == [4, 5, 6]
+
+
+def test_set_new():
+ keys = ["a", "b", "c"]
+ values = [1, 2, 3]
+ storage = strategy.erase((keys, values))
+ d = W_DictMultiObject(space, strategy, storage)
+ assert d.getitem_str("a") == 1
+ assert d.getitem_str("b") == 2
+ assert d.getitem_str("c") == 3
+ assert d.getitem_str("d") is None
+ assert d.setitem_str("d", 4) is None
+ assert d.getitem_str("a") == 1
+ assert d.getitem_str("b") == 2
+ assert d.getitem_str("c") == 3
+ assert d.getitem_str("d") == 4
+ assert d.w_keys() == keys
+ assert d.values() == values
+ assert keys == ["a", "b", "c", "d"]
+ assert values == [1, 2, 3, 4]
+
+def test_limit_size():
+ storage = strategy.get_empty_storage()
+ d = W_DictMultiObject(space, strategy, storage)
+ for i in range(100):
+ assert d.setitem_str("d%s" % i, 4) is None
+ assert d.strategy is not strategy
+ assert "StringDictStrategy" == d.strategy.__class__.__name__
+
+def test_keys_doesnt_wrap():
+ space = FakeSpace()
+ space.newlist = None
+ strategy = KwargsDictStrategy(space)
+ keys = ["a", "b", "c"]
+ values = [1, 2, 3]
+ storage = strategy.erase((keys, values))
+ d = W_DictMultiObject(space, strategy, storage)
+ w_l = d.w_keys() # does not crash
+
+
+from pypy.objspace.std.test.test_dictmultiobject import BaseTestRDictImplementation, BaseTestDevolvedDictImplementation
+def get_impl(self):
+ storage = strategy.erase(([], []))
+ return W_DictMultiObject(space, strategy, storage)
+class TestKwargsDictImplementation(BaseTestRDictImplementation):
+ StrategyClass = KwargsDictStrategy
+ get_impl = get_impl
+ def test_delitem(self):
+ pass # delitem devolves for now
+
+class TestDevolvedKwargsDictImplementation(BaseTestDevolvedDictImplementation):
+ get_impl = get_impl
+ StrategyClass = KwargsDictStrategy
+
+
+class AppTestKwargsDictStrategy(object):
+ def setup_class(cls):
+ if option.runappdirect:
+ py.test.skip("__repr__ doesn't work on appdirect")
+
+ def w_get_strategy(self, obj):
+ import __pypy__
+ r = __pypy__.internal_repr(obj)
+ return r[r.find("(") + 1: r.find(")")]
+
+ def test_create(self):
+ def f(**args):
+ return args
+ d = f(a=1)
+ assert "KwargsDictStrategy" in self.get_strategy(d)
+
From noreply at buildbot.pypy.org Fri Apr 13 11:00:53 2012
From: noreply at buildbot.pypy.org (bivab)
Date: Fri, 13 Apr 2012 11:00:53 +0200 (CEST)
Subject: [pypy-commit] pypy arm-backend-2: de-tab backend
Message-ID: <20120413090053.35BFC82F4F@wyvern.cs.uni-duesseldorf.de>
Author: David Schneider
Branch: arm-backend-2
Changeset: r54326:c2f86113b607
Date: 2012-04-13 07:58 +0000
http://bitbucket.org/pypy/pypy/changeset/c2f86113b607/
Log: de-tab backend
diff --git a/pypy/jit/backend/arm/assembler.py b/pypy/jit/backend/arm/assembler.py
--- a/pypy/jit/backend/arm/assembler.py
+++ b/pypy/jit/backend/arm/assembler.py
@@ -929,7 +929,7 @@
effectinfo = op.getdescr().get_extra_info()
oopspecindex = effectinfo.oopspecindex
asm_llong_operations[oopspecindex](self, op, arglocs, regalloc, fcond)
- return fcond
+ return fcond
def regalloc_emit_math(self, op, arglocs, fcond, regalloc):
effectinfo = op.getdescr().get_extra_info()
diff --git a/pypy/jit/backend/arm/instruction_builder.py b/pypy/jit/backend/arm/instruction_builder.py
--- a/pypy/jit/backend/arm/instruction_builder.py
+++ b/pypy/jit/backend/arm/instruction_builder.py
@@ -365,10 +365,10 @@
size = 0x3
n |= size << 20
def f(self, dd, dn, dm):
- N = (dn >> 4) & 0x1
- M = (dm >> 4) & 0x1
- D = (dd >> 4) & 0x1
- Q = 0 # we want doubleword regs
+ N = (dn >> 4) & 0x1
+ M = (dm >> 4) & 0x1
+ D = (dd >> 4) & 0x1
+ Q = 0 # we want doubleword regs
instr = (n
| D << 22
| (dn & 0xf) << 16
@@ -377,7 +377,7 @@
| Q << 6
| M << 5
| (dm & 0xf))
-
+
self.write32(instr)
return f
diff --git a/pypy/jit/backend/arm/opassembler.py b/pypy/jit/backend/arm/opassembler.py
--- a/pypy/jit/backend/arm/opassembler.py
+++ b/pypy/jit/backend/arm/opassembler.py
@@ -1272,14 +1272,14 @@
assert arg.is_vfp_reg()
assert res.is_reg()
self.mc.VCVT_float_to_int(r.vfp_ip.value, arg.value)
- self.mc.VMOV_rc(res.value, r.ip.value, r.vfp_ip.value)
+ self.mc.VMOV_rc(res.value, r.ip.value, r.vfp_ip.value)
return fcond
def emit_op_cast_int_to_float(self, op, arglocs, regalloc, fcond):
arg, res = arglocs
assert res.is_vfp_reg()
assert arg.is_reg()
- self.mc.MOV_ri(r.ip.value, 0)
+ self.mc.MOV_ri(r.ip.value, 0)
self.mc.VMOV_cr(res.value, arg.value, r.ip.value)
self.mc.VCVT_int_to_float(res.value, res.value)
return fcond
@@ -1301,9 +1301,9 @@
emit_op_convert_float_bytes_to_longlong = gen_emit_unary_float_op('float_bytes_to_longlong', 'VMOV_cc')
def emit_op_read_timestamp(self, op, arglocs, regalloc, fcond):
- tmp = arglocs[0]
+ tmp = arglocs[0]
res = arglocs[1]
self.mc.MRC(15, 0, tmp.value, 15, 12, 1)
- self.mc.MOV_ri(r.ip.value, 0)
+ self.mc.MOV_ri(r.ip.value, 0)
self.mc.VMOV_cr(res.value, tmp.value, r.ip.value)
return fcond
diff --git a/pypy/jit/backend/arm/regalloc.py b/pypy/jit/backend/arm/regalloc.py
--- a/pypy/jit/backend/arm/regalloc.py
+++ b/pypy/jit/backend/arm/regalloc.py
@@ -378,7 +378,7 @@
return self.assembler.regalloc_emit_llong(op, args, fcond, self)
def perform_math(self, op, args, fcond):
- return self.assembler.regalloc_emit_math(op, args, self, fcond)
+ return self.assembler.regalloc_emit_math(op, args, self, fcond)
def force_spill_var(self, var):
if var.type == FLOAT:
@@ -548,16 +548,16 @@
if effectinfo is not None:
oopspecindex = effectinfo.oopspecindex
if oopspecindex in (EffectInfo.OS_LLONG_ADD,
- EffectInfo.OS_LLONG_SUB,
- EffectInfo.OS_LLONG_AND,
- EffectInfo.OS_LLONG_OR,
- EffectInfo.OS_LLONG_XOR):
+ EffectInfo.OS_LLONG_SUB,
+ EffectInfo.OS_LLONG_AND,
+ EffectInfo.OS_LLONG_OR,
+ EffectInfo.OS_LLONG_XOR):
args = self._prepare_llong_binop_xx(op, fcond)
- self.perform_llong(op, args, fcond)
+ self.perform_llong(op, args, fcond)
return
if oopspecindex == EffectInfo.OS_LLONG_TO_INT:
args = self._prepare_llong_to_int(op, fcond)
- self.perform_llong(op, args, fcond)
+ self.perform_llong(op, args, fcond)
return
if oopspecindex == EffectInfo.OS_MATH_SQRT:
args = self.prepare_op_math_sqrt(op, fcond)
@@ -587,9 +587,9 @@
return self._prepare_call(op)
def _prepare_llong_binop_xx(self, op, fcond):
- # arg 0 is the address of the function
+ # arg 0 is the address of the function
loc0 = self._ensure_value_is_boxed(op.getarg(1))
- loc1 = self._ensure_value_is_boxed(op.getarg(2))
+ loc1 = self._ensure_value_is_boxed(op.getarg(2))
self.possibly_free_vars_for_op(op)
self.free_temp_vars()
res = self.vfprm.force_allocate_reg(op.result)
@@ -1205,7 +1205,7 @@
return []
prepare_op_convert_float_bytes_to_longlong = prepare_float_op(base=False,
- name='prepare_op_convert_float_bytes_to_longlong')
+ name='prepare_op_convert_float_bytes_to_longlong')
def prepare_op_read_timestamp(self, op, fcond):
loc = self.get_scratch_reg(INT)
From noreply at buildbot.pypy.org Fri Apr 13 11:00:51 2012
From: noreply at buildbot.pypy.org (bivab)
Date: Fri, 13 Apr 2012 11:00:51 +0200 (CEST)
Subject: [pypy-commit] pypy arm-backend-2: merge default
Message-ID: <20120413090051.96AAA82F4E@wyvern.cs.uni-duesseldorf.de>
Author: David Schneider
Branch: arm-backend-2
Changeset: r54325:308075361821
Date: 2012-04-10 13:16 +0000
http://bitbucket.org/pypy/pypy/changeset/308075361821/
Log: merge default
diff --git a/_pytest/__init__.py b/_pytest/__init__.py
--- a/_pytest/__init__.py
+++ b/_pytest/__init__.py
@@ -1,2 +1,2 @@
#
-__version__ = '2.1.0.dev4'
+__version__ = '2.2.4.dev2'
diff --git a/_pytest/assertion/__init__.py b/_pytest/assertion/__init__.py
--- a/_pytest/assertion/__init__.py
+++ b/_pytest/assertion/__init__.py
@@ -2,35 +2,25 @@
support for presenting detailed information in failing assertions.
"""
import py
-import imp
-import marshal
-import struct
import sys
import pytest
from _pytest.monkeypatch import monkeypatch
-from _pytest.assertion import reinterpret, util
-
-try:
- from _pytest.assertion.rewrite import rewrite_asserts
-except ImportError:
- rewrite_asserts = None
-else:
- import ast
+from _pytest.assertion import util
def pytest_addoption(parser):
group = parser.getgroup("debugconfig")
- group.addoption('--assertmode', action="store", dest="assertmode",
- choices=("on", "old", "off", "default"), default="default",
- metavar="on|old|off",
+ group.addoption('--assert', action="store", dest="assertmode",
+ choices=("rewrite", "reinterp", "plain",),
+ default="rewrite", metavar="MODE",
help="""control assertion debugging tools.
-'off' performs no assertion debugging.
-'old' reinterprets the expressions in asserts to glean information.
-'on' (the default) rewrites the assert statements in test modules to provide
-sub-expression results.""")
+'plain' performs no assertion debugging.
+'reinterp' reinterprets assert statements after they failed to provide assertion expression information.
+'rewrite' (the default) rewrites assert statements in test modules on import
+to provide assert expression information. """)
group.addoption('--no-assert', action="store_true", default=False,
- dest="noassert", help="DEPRECATED equivalent to --assertmode=off")
+ dest="noassert", help="DEPRECATED equivalent to --assert=plain")
group.addoption('--nomagic', action="store_true", default=False,
- dest="nomagic", help="DEPRECATED equivalent to --assertmode=off")
+ dest="nomagic", help="DEPRECATED equivalent to --assert=plain")
class AssertionState:
"""State for the assertion plugin."""
@@ -40,89 +30,90 @@
self.trace = config.trace.root.get("assertion")
def pytest_configure(config):
- warn_about_missing_assertion()
mode = config.getvalue("assertmode")
if config.getvalue("noassert") or config.getvalue("nomagic"):
- if mode not in ("off", "default"):
- raise pytest.UsageError("assertion options conflict")
- mode = "off"
- elif mode == "default":
- mode = "on"
- if mode != "off":
- def callbinrepr(op, left, right):
- hook_result = config.hook.pytest_assertrepr_compare(
- config=config, op=op, left=left, right=right)
- for new_expl in hook_result:
- if new_expl:
- return '\n~'.join(new_expl)
+ mode = "plain"
+ if mode == "rewrite":
+ try:
+ import ast
+ except ImportError:
+ mode = "reinterp"
+ else:
+ if sys.platform.startswith('java'):
+ mode = "reinterp"
+ if mode != "plain":
+ _load_modules(mode)
m = monkeypatch()
config._cleanup.append(m.undo)
m.setattr(py.builtin.builtins, 'AssertionError',
reinterpret.AssertionError)
- m.setattr(util, '_reprcompare', callbinrepr)
- if mode == "on" and rewrite_asserts is None:
- mode = "old"
+ hook = None
+ if mode == "rewrite":
+ hook = rewrite.AssertionRewritingHook()
+ sys.meta_path.append(hook)
+ warn_about_missing_assertion(mode)
config._assertstate = AssertionState(config, mode)
+ config._assertstate.hook = hook
config._assertstate.trace("configured with mode set to %r" % (mode,))
-def _write_pyc(co, source_path):
- if hasattr(imp, "cache_from_source"):
- # Handle PEP 3147 pycs.
- pyc = py.path.local(imp.cache_from_source(str(source_path)))
- pyc.ensure()
- else:
- pyc = source_path + "c"
- mtime = int(source_path.mtime())
- fp = pyc.open("wb")
- try:
- fp.write(imp.get_magic())
- fp.write(struct.pack(" 0 and
- item.identifier != "__future__"):
+ elif (not isinstance(item, ast.ImportFrom) or item.level > 0 or
+ item.module != "__future__"):
lineno = item.lineno
break
pos += 1
@@ -118,9 +357,9 @@
for alias in aliases]
mod.body[pos:pos] = imports
# Collect asserts.
- nodes = collections.deque([mod])
+ nodes = [mod]
while nodes:
- node = nodes.popleft()
+ node = nodes.pop()
for name, field in ast.iter_fields(node):
if isinstance(field, list):
new = []
@@ -143,7 +382,7 @@
"""Get a new variable."""
# Use a character invalid in python identifiers to avoid clashing.
name = "@py_assert" + str(next(self.variable_counter))
- self.variables.add(name)
+ self.variables.append(name)
return name
def assign(self, expr):
@@ -198,7 +437,8 @@
# There's already a message. Don't mess with it.
return [assert_]
self.statements = []
- self.variables = set()
+ self.cond_chain = ()
+ self.variables = []
self.variable_counter = itertools.count()
self.stack = []
self.on_failure = []
@@ -220,11 +460,11 @@
else:
raise_ = ast.Raise(exc, None, None)
body.append(raise_)
- # Delete temporary variables.
- names = [ast.Name(name, ast.Del()) for name in self.variables]
- if names:
- delete = ast.Delete(names)
- self.statements.append(delete)
+ # Clear temporary variables by setting them to None.
+ if self.variables:
+ variables = [ast.Name(name, ast.Store()) for name in self.variables]
+ clear = ast.Assign(variables, ast.Name("None", ast.Load()))
+ self.statements.append(clear)
# Fix line numbers.
for stmt in self.statements:
set_location(stmt, assert_.lineno, assert_.col_offset)
@@ -240,21 +480,38 @@
return name, self.explanation_param(expr)
def visit_BoolOp(self, boolop):
- operands = []
- explanations = []
+ res_var = self.variable()
+ expl_list = self.assign(ast.List([], ast.Load()))
+ app = ast.Attribute(expl_list, "append", ast.Load())
+ is_or = int(isinstance(boolop.op, ast.Or))
+ body = save = self.statements
+ fail_save = self.on_failure
+ levels = len(boolop.values) - 1
self.push_format_context()
- for operand in boolop.values:
- res, explanation = self.visit(operand)
- operands.append(res)
- explanations.append(explanation)
- expls = ast.Tuple([ast.Str(expl) for expl in explanations], ast.Load())
- is_or = ast.Num(isinstance(boolop.op, ast.Or))
- expl_template = self.helper("format_boolop",
- ast.Tuple(operands, ast.Load()), expls,
- is_or)
+ # Process each operand, short-circuting if needed.
+ for i, v in enumerate(boolop.values):
+ if i:
+ fail_inner = []
+ self.on_failure.append(ast.If(cond, fail_inner, []))
+ self.on_failure = fail_inner
+ self.push_format_context()
+ res, expl = self.visit(v)
+ body.append(ast.Assign([ast.Name(res_var, ast.Store())], res))
+ expl_format = self.pop_format_context(ast.Str(expl))
+ call = ast.Call(app, [expl_format], [], None, None)
+ self.on_failure.append(ast.Expr(call))
+ if i < levels:
+ cond = res
+ if is_or:
+ cond = ast.UnaryOp(ast.Not(), cond)
+ inner = []
+ self.statements.append(ast.If(cond, inner, []))
+ self.statements = body = inner
+ self.statements = save
+ self.on_failure = fail_save
+ expl_template = self.helper("format_boolop", expl_list, ast.Num(is_or))
expl = self.pop_format_context(expl_template)
- res = self.assign(ast.BoolOp(boolop.op, operands))
- return res, self.explanation_param(expl)
+ return ast.Name(res_var, ast.Load()), self.explanation_param(expl)
def visit_UnaryOp(self, unary):
pattern = unary_map[unary.op.__class__]
@@ -288,7 +545,7 @@
new_star, expl = self.visit(call.starargs)
arg_expls.append("*" + expl)
if call.kwargs:
- new_kwarg, expl = self.visit(call.kwarg)
+ new_kwarg, expl = self.visit(call.kwargs)
arg_expls.append("**" + expl)
expl = "%s(%s)" % (func_expl, ', '.join(arg_expls))
new_call = ast.Call(new_func, new_args, new_kwargs, new_star, new_kwarg)
diff --git a/_pytest/capture.py b/_pytest/capture.py
--- a/_pytest/capture.py
+++ b/_pytest/capture.py
@@ -11,22 +11,22 @@
group._addoption('-s', action="store_const", const="no", dest="capture",
help="shortcut for --capture=no.")
+ at pytest.mark.tryfirst
+def pytest_cmdline_parse(pluginmanager, args):
+ # we want to perform capturing already for plugin/conftest loading
+ if '-s' in args or "--capture=no" in args:
+ method = "no"
+ elif hasattr(os, 'dup') and '--capture=sys' not in args:
+ method = "fd"
+ else:
+ method = "sys"
+ capman = CaptureManager(method)
+ pluginmanager.register(capman, "capturemanager")
+
def addouterr(rep, outerr):
- repr = getattr(rep, 'longrepr', None)
- if not hasattr(repr, 'addsection'):
- return
for secname, content in zip(["out", "err"], outerr):
if content:
- repr.addsection("Captured std%s" % secname, content.rstrip())
-
-def pytest_unconfigure(config):
- # registered in config.py during early conftest.py loading
- capman = config.pluginmanager.getplugin('capturemanager')
- while capman._method2capture:
- name, cap = capman._method2capture.popitem()
- # XXX logging module may wants to close it itself on process exit
- # otherwise we could do finalization here and call "reset()".
- cap.suspend()
+ rep.sections.append(("Captured std%s" % secname, content))
class NoCapture:
def startall(self):
@@ -39,8 +39,9 @@
return "", ""
class CaptureManager:
- def __init__(self):
+ def __init__(self, defaultmethod=None):
self._method2capture = {}
+ self._defaultmethod = defaultmethod
def _maketempfile(self):
f = py.std.tempfile.TemporaryFile()
@@ -65,14 +66,6 @@
else:
raise ValueError("unknown capturing method: %r" % method)
- def _getmethod_preoptionparse(self, args):
- if '-s' in args or "--capture=no" in args:
- return "no"
- elif hasattr(os, 'dup') and '--capture=sys' not in args:
- return "fd"
- else:
- return "sys"
-
def _getmethod(self, config, fspath):
if config.option.capture:
method = config.option.capture
@@ -85,16 +78,22 @@
method = "sys"
return method
+ def reset_capturings(self):
+ for name, cap in self._method2capture.items():
+ cap.reset()
+
def resumecapture_item(self, item):
method = self._getmethod(item.config, item.fspath)
if not hasattr(item, 'outerr'):
item.outerr = ('', '') # we accumulate outerr on the item
return self.resumecapture(method)
- def resumecapture(self, method):
+ def resumecapture(self, method=None):
if hasattr(self, '_capturing'):
raise ValueError("cannot resume, already capturing with %r" %
(self._capturing,))
+ if method is None:
+ method = self._defaultmethod
cap = self._method2capture.get(method)
self._capturing = method
if cap is None:
@@ -164,17 +163,6 @@
def pytest_runtest_teardown(self, item):
self.resumecapture_item(item)
- def pytest__teardown_final(self, __multicall__, session):
- method = self._getmethod(session.config, None)
- self.resumecapture(method)
- try:
- rep = __multicall__.execute()
- finally:
- outerr = self.suspendcapture()
- if rep:
- addouterr(rep, outerr)
- return rep
-
def pytest_keyboard_interrupt(self, excinfo):
if hasattr(self, '_capturing'):
self.suspendcapture()
diff --git a/_pytest/config.py b/_pytest/config.py
--- a/_pytest/config.py
+++ b/_pytest/config.py
@@ -8,13 +8,15 @@
def pytest_cmdline_parse(pluginmanager, args):
config = Config(pluginmanager)
config.parse(args)
- if config.option.debug:
- config.trace.root.setwriter(sys.stderr.write)
return config
def pytest_unconfigure(config):
- for func in config._cleanup:
- func()
+ while 1:
+ try:
+ fin = config._cleanup.pop()
+ except IndexError:
+ break
+ fin()
class Parser:
""" Parser for command line arguments. """
@@ -81,6 +83,7 @@
self._inidict[name] = (help, type, default)
self._ininames.append(name)
+
class OptionGroup:
def __init__(self, name, description="", parser=None):
self.name = name
@@ -256,11 +259,14 @@
self.hook = self.pluginmanager.hook
self._inicache = {}
self._cleanup = []
-
+
@classmethod
def fromdictargs(cls, option_dict, args):
""" constructor useable for subprocesses. """
config = cls()
+ # XXX slightly crude way to initialize capturing
+ import _pytest.capture
+ _pytest.capture.pytest_cmdline_parse(config.pluginmanager, args)
config._preparse(args, addopts=False)
config.option.__dict__.update(option_dict)
for x in config.option.plugins:
@@ -285,11 +291,10 @@
def _setinitialconftest(self, args):
# capture output during conftest init (#issue93)
- from _pytest.capture import CaptureManager
- capman = CaptureManager()
- self.pluginmanager.register(capman, 'capturemanager')
- # will be unregistered in capture.py's unconfigure()
- capman.resumecapture(capman._getmethod_preoptionparse(args))
+ # XXX introduce load_conftest hook to avoid needing to know
+ # about capturing plugin here
+ capman = self.pluginmanager.getplugin("capturemanager")
+ capman.resumecapture()
try:
try:
self._conftest.setinitial(args)
@@ -334,6 +339,7 @@
# Note that this can only be called once per testing process.
assert not hasattr(self, 'args'), (
"can only parse cmdline args at most once per Config object")
+ self._origargs = args
self._preparse(args)
self._parser.hints.extend(self.pluginmanager._hints)
args = self._parser.parse_setoption(args, self.option)
@@ -341,6 +347,14 @@
args.append(py.std.os.getcwd())
self.args = args
+ def addinivalue_line(self, name, line):
+ """ add a line to an ini-file option. The option must have been
+ declared but might not yet be set in which case the line becomes the
+ the first line in its value. """
+ x = self.getini(name)
+ assert isinstance(x, list)
+ x.append(line) # modifies the cached list inline
+
def getini(self, name):
""" return configuration value from an ini file. If the
specified name hasn't been registered through a prior ``parse.addini``
@@ -422,7 +436,7 @@
def getcfg(args, inibasenames):
- args = [x for x in args if str(x)[0] != "-"]
+ args = [x for x in args if not str(x).startswith("-")]
if not args:
args = [py.path.local()]
for arg in args:
diff --git a/_pytest/core.py b/_pytest/core.py
--- a/_pytest/core.py
+++ b/_pytest/core.py
@@ -16,11 +16,10 @@
"junitxml resultlog doctest").split()
class TagTracer:
- def __init__(self, prefix="[pytest] "):
+ def __init__(self):
self._tag2proc = {}
self.writer = None
self.indent = 0
- self.prefix = prefix
def get(self, name):
return TagTracerSub(self, (name,))
@@ -30,7 +29,7 @@
if args:
indent = " " * self.indent
content = " ".join(map(str, args))
- self.writer("%s%s%s\n" %(self.prefix, indent, content))
+ self.writer("%s%s [%s]\n" %(indent, content, ":".join(tags)))
try:
self._tag2proc[tags](tags, args)
except KeyError:
@@ -212,6 +211,14 @@
self.register(mod, modname)
self.consider_module(mod)
+ def pytest_configure(self, config):
+ config.addinivalue_line("markers",
+ "tryfirst: mark a hook implementation function such that the "
+ "plugin machinery will try to call it first/as early as possible.")
+ config.addinivalue_line("markers",
+ "trylast: mark a hook implementation function such that the "
+ "plugin machinery will try to call it last/as late as possible.")
+
def pytest_plugin_registered(self, plugin):
import pytest
dic = self.call_plugin(plugin, "pytest_namespace", {}) or {}
@@ -432,10 +439,7 @@
def _preloadplugins():
_preinit.append(PluginManager(load=True))
-def main(args=None, plugins=None):
- """ returned exit code integer, after an in-process testing run
- with the given command line arguments, preloading an optional list
- of passed in plugin objects. """
+def _prepareconfig(args=None, plugins=None):
if args is None:
args = sys.argv[1:]
elif isinstance(args, py.path.local):
@@ -449,13 +453,19 @@
else: # subsequent calls to main will create a fresh instance
_pluginmanager = PluginManager(load=True)
hook = _pluginmanager.hook
+ if plugins:
+ for plugin in plugins:
+ _pluginmanager.register(plugin)
+ return hook.pytest_cmdline_parse(
+ pluginmanager=_pluginmanager, args=args)
+
+def main(args=None, plugins=None):
+ """ returned exit code integer, after an in-process testing run
+ with the given command line arguments, preloading an optional list
+ of passed in plugin objects. """
try:
- if plugins:
- for plugin in plugins:
- _pluginmanager.register(plugin)
- config = hook.pytest_cmdline_parse(
- pluginmanager=_pluginmanager, args=args)
- exitstatus = hook.pytest_cmdline_main(config=config)
+ config = _prepareconfig(args, plugins)
+ exitstatus = config.hook.pytest_cmdline_main(config=config)
except UsageError:
e = sys.exc_info()[1]
sys.stderr.write("ERROR: %s\n" %(e.args[0],))
diff --git a/_pytest/helpconfig.py b/_pytest/helpconfig.py
--- a/_pytest/helpconfig.py
+++ b/_pytest/helpconfig.py
@@ -1,7 +1,7 @@
""" version info, help messages, tracing configuration. """
import py
import pytest
-import inspect, sys
+import os, inspect, sys
from _pytest.core import varnames
def pytest_addoption(parser):
@@ -18,7 +18,29 @@
help="trace considerations of conftest.py files."),
group.addoption('--debug',
action="store_true", dest="debug", default=False,
- help="generate and show internal debugging information.")
+ help="store internal tracing debug information in 'pytestdebug.log'.")
+
+
+def pytest_cmdline_parse(__multicall__):
+ config = __multicall__.execute()
+ if config.option.debug:
+ path = os.path.abspath("pytestdebug.log")
+ f = open(path, 'w')
+ config._debugfile = f
+ f.write("versions pytest-%s, py-%s, python-%s\ncwd=%s\nargs=%s\n\n" %(
+ pytest.__version__, py.__version__, ".".join(map(str, sys.version_info)),
+ os.getcwd(), config._origargs))
+ config.trace.root.setwriter(f.write)
+ sys.stderr.write("writing pytestdebug information to %s\n" % path)
+ return config
+
+ at pytest.mark.trylast
+def pytest_unconfigure(config):
+ if hasattr(config, '_debugfile'):
+ config._debugfile.close()
+ sys.stderr.write("wrote pytestdebug information to %s\n" %
+ config._debugfile.name)
+ config.trace.root.setwriter(None)
def pytest_cmdline_main(config):
@@ -34,6 +56,7 @@
elif config.option.help:
config.pluginmanager.do_configure(config)
showhelp(config)
+ config.pluginmanager.do_unconfigure(config)
return 0
def showhelp(config):
@@ -91,7 +114,7 @@
verinfo = getpluginversioninfo(config)
if verinfo:
lines.extend(verinfo)
-
+
if config.option.traceconfig:
lines.append("active plugins:")
plugins = []
diff --git a/_pytest/hookspec.py b/_pytest/hookspec.py
--- a/_pytest/hookspec.py
+++ b/_pytest/hookspec.py
@@ -121,16 +121,23 @@
def pytest_itemstart(item, node=None):
""" (deprecated, use pytest_runtest_logstart). """
-def pytest_runtest_protocol(item):
- """ implements the standard runtest_setup/call/teardown protocol including
- capturing exceptions and calling reporting hooks on the results accordingly.
+def pytest_runtest_protocol(item, nextitem):
+ """ implements the runtest_setup/call/teardown protocol for
+ the given test item, including capturing exceptions and calling
+ reporting hooks.
+
+ :arg item: test item for which the runtest protocol is performed.
+
+ :arg nexitem: the scheduled-to-be-next test item (or None if this
+ is the end my friend). This argument is passed on to
+ :py:func:`pytest_runtest_teardown`.
:return boolean: True if no further hook implementations should be invoked.
"""
pytest_runtest_protocol.firstresult = True
def pytest_runtest_logstart(nodeid, location):
- """ signal the start of a test run. """
+ """ signal the start of running a single test item. """
def pytest_runtest_setup(item):
""" called before ``pytest_runtest_call(item)``. """
@@ -138,8 +145,14 @@
def pytest_runtest_call(item):
""" called to execute the test ``item``. """
-def pytest_runtest_teardown(item):
- """ called after ``pytest_runtest_call``. """
+def pytest_runtest_teardown(item, nextitem):
+ """ called after ``pytest_runtest_call``.
+
+ :arg nexitem: the scheduled-to-be-next test item (None if no further
+ test item is scheduled). This argument can be used to
+ perform exact teardowns, i.e. calling just enough finalizers
+ so that nextitem only needs to call setup-functions.
+ """
def pytest_runtest_makereport(item, call):
""" return a :py:class:`_pytest.runner.TestReport` object
@@ -149,15 +162,8 @@
pytest_runtest_makereport.firstresult = True
def pytest_runtest_logreport(report):
- """ process item test report. """
-
-# special handling for final teardown - somewhat internal for now
-def pytest__teardown_final(session):
- """ called before test session finishes. """
-pytest__teardown_final.firstresult = True
-
-def pytest__teardown_final_logerror(report, session):
- """ called if runtest_teardown_final failed. """
+ """ process a test setup/call/teardown report relating to
+ the respective phase of executing a test. """
# -------------------------------------------------------------------------
# test session related hooks
diff --git a/_pytest/junitxml.py b/_pytest/junitxml.py
--- a/_pytest/junitxml.py
+++ b/_pytest/junitxml.py
@@ -25,21 +25,39 @@
long = int
+class Junit(py.xml.Namespace):
+ pass
+
+
# We need to get the subset of the invalid unicode ranges according to
# XML 1.0 which are valid in this python build. Hence we calculate
# this dynamically instead of hardcoding it. The spec range of valid
# chars is: Char ::= #x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD]
# | [#x10000-#x10FFFF]
-_illegal_unichrs = [(0x00, 0x08), (0x0B, 0x0C), (0x0E, 0x19),
- (0xD800, 0xDFFF), (0xFDD0, 0xFFFF)]
-_illegal_ranges = [unicode("%s-%s") % (unichr(low), unichr(high))
- for (low, high) in _illegal_unichrs
+_legal_chars = (0x09, 0x0A, 0x0d)
+_legal_ranges = (
+ (0x20, 0xD7FF),
+ (0xE000, 0xFFFD),
+ (0x10000, 0x10FFFF),
+)
+_legal_xml_re = [unicode("%s-%s") % (unichr(low), unichr(high))
+ for (low, high) in _legal_ranges
if low < sys.maxunicode]
-illegal_xml_re = re.compile(unicode('[%s]') %
- unicode('').join(_illegal_ranges))
-del _illegal_unichrs
-del _illegal_ranges
+_legal_xml_re = [unichr(x) for x in _legal_chars] + _legal_xml_re
+illegal_xml_re = re.compile(unicode('[^%s]') %
+ unicode('').join(_legal_xml_re))
+del _legal_chars
+del _legal_ranges
+del _legal_xml_re
+def bin_xml_escape(arg):
+ def repl(matchobj):
+ i = ord(matchobj.group())
+ if i <= 0xFF:
+ return unicode('#x%02X') % i
+ else:
+ return unicode('#x%04X') % i
+ return illegal_xml_re.sub(repl, py.xml.escape(arg))
def pytest_addoption(parser):
group = parser.getgroup("terminal reporting")
@@ -68,117 +86,97 @@
logfile = os.path.expanduser(os.path.expandvars(logfile))
self.logfile = os.path.normpath(logfile)
self.prefix = prefix
- self.test_logs = []
+ self.tests = []
self.passed = self.skipped = 0
self.failed = self.errors = 0
- self._durations = {}
def _opentestcase(self, report):
names = report.nodeid.split("::")
names[0] = names[0].replace("/", '.')
- names = tuple(names)
- d = {'time': self._durations.pop(report.nodeid, "0")}
names = [x.replace(".py", "") for x in names if x != "()"]
classnames = names[:-1]
if self.prefix:
classnames.insert(0, self.prefix)
- d['classname'] = ".".join(classnames)
- d['name'] = py.xml.escape(names[-1])
- attrs = ['%s="%s"' % item for item in sorted(d.items())]
- self.test_logs.append("\n" % " ".join(attrs))
+ self.tests.append(Junit.testcase(
+ classname=".".join(classnames),
+ name=names[-1],
+ time=getattr(report, 'duration', 0)
+ ))
- def _closetestcase(self):
- self.test_logs.append("")
-
- def appendlog(self, fmt, *args):
- def repl(matchobj):
- i = ord(matchobj.group())
- if i <= 0xFF:
- return unicode('#x%02X') % i
- else:
- return unicode('#x%04X') % i
- args = tuple([illegal_xml_re.sub(repl, py.xml.escape(arg))
- for arg in args])
- self.test_logs.append(fmt % args)
+ def append(self, obj):
+ self.tests[-1].append(obj)
def append_pass(self, report):
self.passed += 1
- self._opentestcase(report)
- self._closetestcase()
def append_failure(self, report):
- self._opentestcase(report)
#msg = str(report.longrepr.reprtraceback.extraline)
if "xfail" in report.keywords:
- self.appendlog(
- '')
+ self.append(
+ Junit.skipped(message="xfail-marked test passes unexpectedly"))
self.skipped += 1
else:
- self.appendlog('%s',
- report.longrepr)
+ sec = dict(report.sections)
+ fail = Junit.failure(message="test failure")
+ fail.append(str(report.longrepr))
+ self.append(fail)
+ for name in ('out', 'err'):
+ content = sec.get("Captured std%s" % name)
+ if content:
+ tag = getattr(Junit, 'system-'+name)
+ self.append(tag(bin_xml_escape(content)))
self.failed += 1
- self._closetestcase()
def append_collect_failure(self, report):
- self._opentestcase(report)
#msg = str(report.longrepr.reprtraceback.extraline)
- self.appendlog('%s',
- report.longrepr)
- self._closetestcase()
+ self.append(Junit.failure(str(report.longrepr),
+ message="collection failure"))
self.errors += 1
def append_collect_skipped(self, report):
- self._opentestcase(report)
#msg = str(report.longrepr.reprtraceback.extraline)
- self.appendlog('%s',
- report.longrepr)
- self._closetestcase()
+ self.append(Junit.skipped(str(report.longrepr),
+ message="collection skipped"))
self.skipped += 1
def append_error(self, report):
- self._opentestcase(report)
- self.appendlog('%s',
- report.longrepr)
- self._closetestcase()
+ self.append(Junit.error(str(report.longrepr),
+ message="test setup failure"))
self.errors += 1
def append_skipped(self, report):
- self._opentestcase(report)
if "xfail" in report.keywords:
- self.appendlog(
- '%s',
- report.keywords['xfail'])
+ self.append(Junit.skipped(str(report.keywords['xfail']),
+ message="expected test failure"))
else:
filename, lineno, skipreason = report.longrepr
if skipreason.startswith("Skipped: "):
skipreason = skipreason[9:]
- self.appendlog('%s',
- skipreason, "%s:%s: %s" % report.longrepr,
- )
- self._closetestcase()
+ self.append(
+ Junit.skipped("%s:%s: %s" % report.longrepr,
+ type="pytest.skip",
+ message=skipreason
+ ))
self.skipped += 1
def pytest_runtest_logreport(self, report):
if report.passed:
- self.append_pass(report)
+ if report.when == "call": # ignore setup/teardown
+ self._opentestcase(report)
+ self.append_pass(report)
elif report.failed:
+ self._opentestcase(report)
if report.when != "call":
self.append_error(report)
else:
self.append_failure(report)
elif report.skipped:
+ self._opentestcase(report)
self.append_skipped(report)
- def pytest_runtest_call(self, item, __multicall__):
- start = time.time()
- try:
- return __multicall__.execute()
- finally:
- self._durations[item.nodeid] = time.time() - start
-
def pytest_collectreport(self, report):
if not report.passed:
+ self._opentestcase(report)
if report.failed:
self.append_collect_failure(report)
else:
@@ -187,10 +185,11 @@
def pytest_internalerror(self, excrepr):
self.errors += 1
data = py.xml.escape(excrepr)
- self.test_logs.append(
- '\n'
- ' '
- '%s' % data)
+ self.tests.append(
+ Junit.testcase(
+ Junit.error(data, message="internal error"),
+ classname="pytest",
+ name="internal"))
def pytest_sessionstart(self, session):
self.suite_start_time = time.time()
@@ -204,17 +203,17 @@
suite_stop_time = time.time()
suite_time_delta = suite_stop_time - self.suite_start_time
numtests = self.passed + self.failed
+
logfile.write('')
- logfile.write('')
- logfile.writelines(self.test_logs)
- logfile.write('')
+ logfile.write(Junit.testsuite(
+ self.tests,
+ name="",
+ errors=self.errors,
+ failures=self.failed,
+ skips=self.skipped,
+ tests=numtests,
+ time="%.3f" % suite_time_delta,
+ ).unicode(indent=0))
logfile.close()
def pytest_terminal_summary(self, terminalreporter):
diff --git a/_pytest/main.py b/_pytest/main.py
--- a/_pytest/main.py
+++ b/_pytest/main.py
@@ -2,7 +2,7 @@
import py
import pytest, _pytest
-import os, sys
+import os, sys, imp
tracebackcutdir = py.path.local(_pytest.__file__).dirpath()
# exitcodes for the command line
@@ -11,6 +11,8 @@
EXIT_INTERRUPTED = 2
EXIT_INTERNALERROR = 3
+name_re = py.std.re.compile("^[a-zA-Z_]\w*$")
+
def pytest_addoption(parser):
parser.addini("norecursedirs", "directory patterns to avoid for recursion",
type="args", default=('.*', 'CVS', '_darcs', '{arch}'))
@@ -27,6 +29,9 @@
action="store", type="int", dest="maxfail", default=0,
help="exit after first num failures or errors.")
+ group._addoption('--strict', action="store_true",
+ help="run pytest in strict mode, warnings become errors.")
+
group = parser.getgroup("collect", "collection")
group.addoption('--collectonly',
action="store_true", dest="collectonly",
@@ -48,7 +53,7 @@
def pytest_namespace():
collect = dict(Item=Item, Collector=Collector, File=File, Session=Session)
return dict(collect=collect)
-
+
def pytest_configure(config):
py.test.config = config # compatibiltiy
if config.option.exitfirst:
@@ -77,11 +82,11 @@
session.exitstatus = EXIT_INTERNALERROR
if excinfo.errisinstance(SystemExit):
sys.stderr.write("mainloop: caught Spurious SystemExit!\n")
+ if initstate >= 2:
+ config.hook.pytest_sessionfinish(session=session,
+ exitstatus=session.exitstatus or (session._testsfailed and 1))
if not session.exitstatus and session._testsfailed:
session.exitstatus = EXIT_TESTSFAILED
- if initstate >= 2:
- config.hook.pytest_sessionfinish(session=session,
- exitstatus=session.exitstatus)
if initstate >= 1:
config.pluginmanager.do_unconfigure(config)
return session.exitstatus
@@ -101,8 +106,12 @@
def pytest_runtestloop(session):
if session.config.option.collectonly:
return True
- for item in session.session.items:
- item.config.hook.pytest_runtest_protocol(item=item)
+ for i, item in enumerate(session.items):
+ try:
+ nextitem = session.items[i+1]
+ except IndexError:
+ nextitem = None
+ item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem)
if session.shouldstop:
raise session.Interrupted(session.shouldstop)
return True
@@ -132,7 +141,7 @@
return getattr(pytest, name)
return property(fget, None, None,
"deprecated attribute %r, use pytest.%s" % (name,name))
-
+
class Node(object):
""" base class for all Nodes in the collection tree.
Collector subclasses have children, Items are terminal nodes."""
@@ -143,13 +152,13 @@
#: the parent collector node.
self.parent = parent
-
+
#: the test config object
self.config = config or parent.config
#: the collection this node is part of
self.session = session or parent.session
-
+
#: filesystem path where this node was collected from
self.fspath = getattr(parent, 'fspath', None)
self.ihook = self.session.gethookproxy(self.fspath)
@@ -224,13 +233,13 @@
def listchain(self):
""" return list of all parent collectors up to self,
starting from root of collection tree. """
- l = [self]
- while 1:
- x = l[0]
- if x.parent is not None: # and x.parent.parent is not None:
- l.insert(0, x.parent)
- else:
- return l
+ chain = []
+ item = self
+ while item is not None:
+ chain.append(item)
+ item = item.parent
+ chain.reverse()
+ return chain
def listnames(self):
return [x.name for x in self.listchain()]
@@ -325,6 +334,8 @@
""" a basic test invocation item. Note that for a single function
there might be multiple test invocation items.
"""
+ nextitem = None
+
def reportinfo(self):
return self.fspath, None, ""
@@ -399,6 +410,7 @@
self._notfound = []
self._initialpaths = set()
self._initialparts = []
+ self.items = items = []
for arg in args:
parts = self._parsearg(arg)
self._initialparts.append(parts)
@@ -414,7 +426,6 @@
if not genitems:
return rep.result
else:
- self.items = items = []
if rep.passed:
for node in rep.result:
self.items.extend(self.genitems(node))
@@ -469,16 +480,29 @@
return True
def _tryconvertpyarg(self, x):
- try:
- mod = __import__(x, None, None, ['__doc__'])
- except (ValueError, ImportError):
- return x
- p = py.path.local(mod.__file__)
- if p.purebasename == "__init__":
- p = p.dirpath()
- else:
- p = p.new(basename=p.purebasename+".py")
- return str(p)
+ mod = None
+ path = [os.path.abspath('.')] + sys.path
+ for name in x.split('.'):
+ # ignore anything that's not a proper name here
+ # else something like --pyargs will mess up '.'
+ # since imp.find_module will actually sometimes work for it
+ # but it's supposed to be considered a filesystem path
+ # not a package
+ if name_re.match(name) is None:
+ return x
+ try:
+ fd, mod, type_ = imp.find_module(name, path)
+ except ImportError:
+ return x
+ else:
+ if fd is not None:
+ fd.close()
+
+ if type_[2] != imp.PKG_DIRECTORY:
+ path = [os.path.dirname(mod)]
+ else:
+ path = [mod]
+ return mod
def _parsearg(self, arg):
""" return (fspath, names) tuple after checking the file exists. """
@@ -496,7 +520,7 @@
raise pytest.UsageError(msg + arg)
parts[0] = path
return parts
-
+
def matchnodes(self, matching, names):
self.trace("matchnodes", matching, names)
self.trace.root.indent += 1
diff --git a/_pytest/mark.py b/_pytest/mark.py
--- a/_pytest/mark.py
+++ b/_pytest/mark.py
@@ -14,12 +14,37 @@
"Terminate expression with ':' to make the first match match "
"all subsequent tests (usually file-order). ")
+ group._addoption("-m",
+ action="store", dest="markexpr", default="", metavar="MARKEXPR",
+ help="only run tests matching given mark expression. "
+ "example: -m 'mark1 and not mark2'."
+ )
+
+ group.addoption("--markers", action="store_true", help=
+ "show markers (builtin, plugin and per-project ones).")
+
+ parser.addini("markers", "markers for test functions", 'linelist')
+
+def pytest_cmdline_main(config):
+ if config.option.markers:
+ config.pluginmanager.do_configure(config)
+ tw = py.io.TerminalWriter()
+ for line in config.getini("markers"):
+ name, rest = line.split(":", 1)
+ tw.write("@pytest.mark.%s:" % name, bold=True)
+ tw.line(rest)
+ tw.line()
+ config.pluginmanager.do_unconfigure(config)
+ return 0
+pytest_cmdline_main.tryfirst = True
+
def pytest_collection_modifyitems(items, config):
keywordexpr = config.option.keyword
- if not keywordexpr:
+ matchexpr = config.option.markexpr
+ if not keywordexpr and not matchexpr:
return
selectuntil = False
- if keywordexpr[-1] == ":":
+ if keywordexpr[-1:] == ":":
selectuntil = True
keywordexpr = keywordexpr[:-1]
@@ -29,21 +54,38 @@
if keywordexpr and skipbykeyword(colitem, keywordexpr):
deselected.append(colitem)
else:
- remaining.append(colitem)
if selectuntil:
keywordexpr = None
+ if matchexpr:
+ if not matchmark(colitem, matchexpr):
+ deselected.append(colitem)
+ continue
+ remaining.append(colitem)
if deselected:
config.hook.pytest_deselected(items=deselected)
items[:] = remaining
+class BoolDict:
+ def __init__(self, mydict):
+ self._mydict = mydict
+ def __getitem__(self, name):
+ return name in self._mydict
+
+def matchmark(colitem, matchexpr):
+ return eval(matchexpr, {}, BoolDict(colitem.obj.__dict__))
+
+def pytest_configure(config):
+ if config.option.strict:
+ pytest.mark._config = config
+
def skipbykeyword(colitem, keywordexpr):
""" return True if they given keyword expression means to
skip this collector/item.
"""
if not keywordexpr:
return
-
+
itemkeywords = getkeywords(colitem)
for key in filter(None, keywordexpr.split()):
eor = key[:1] == '-'
@@ -77,15 +119,31 @@
@py.test.mark.slowtest
def test_function():
pass
-
+
will set a 'slowtest' :class:`MarkInfo` object
on the ``test_function`` object. """
def __getattr__(self, name):
if name[0] == "_":
raise AttributeError(name)
+ if hasattr(self, '_config'):
+ self._check(name)
return MarkDecorator(name)
+ def _check(self, name):
+ try:
+ if name in self._markers:
+ return
+ except AttributeError:
+ pass
+ self._markers = l = set()
+ for line in self._config.getini("markers"):
+ beginning = line.split(":", 1)
+ x = beginning[0].split("(", 1)[0]
+ l.add(x)
+ if name not in self._markers:
+ raise AttributeError("%r not a registered marker" % (name,))
+
class MarkDecorator:
""" A decorator for test functions and test classes. When applied
it will create :class:`MarkInfo` objects which may be
@@ -133,8 +191,7 @@
holder = MarkInfo(self.markname, self.args, self.kwargs)
setattr(func, self.markname, holder)
else:
- holder.kwargs.update(self.kwargs)
- holder.args += self.args
+ holder.add(self.args, self.kwargs)
return func
kw = self.kwargs.copy()
kw.update(kwargs)
@@ -150,27 +207,20 @@
self.args = args
#: keyword argument dictionary, empty if nothing specified
self.kwargs = kwargs
+ self._arglist = [(args, kwargs.copy())]
def __repr__(self):
return "" % (
self.name, self.args, self.kwargs)
-def pytest_itemcollected(item):
- if not isinstance(item, pytest.Function):
- return
- try:
- func = item.obj.__func__
- except AttributeError:
- func = getattr(item.obj, 'im_func', item.obj)
- pyclasses = (pytest.Class, pytest.Module)
- for node in item.listchain():
- if isinstance(node, pyclasses):
- marker = getattr(node.obj, 'pytestmark', None)
- if marker is not None:
- if isinstance(marker, list):
- for mark in marker:
- mark(func)
- else:
- marker(func)
- node = node.parent
- item.keywords.update(py.builtin._getfuncdict(func))
+ def add(self, args, kwargs):
+ """ add a MarkInfo with the given args and kwargs. """
+ self._arglist.append((args, kwargs))
+ self.args += args
+ self.kwargs.update(kwargs)
+
+ def __iter__(self):
+ """ yield MarkInfo objects each relating to a marking-call. """
+ for args, kwargs in self._arglist:
+ yield MarkInfo(self.name, args, kwargs)
+
diff --git a/_pytest/monkeypatch.py b/_pytest/monkeypatch.py
--- a/_pytest/monkeypatch.py
+++ b/_pytest/monkeypatch.py
@@ -13,6 +13,7 @@
monkeypatch.setenv(name, value, prepend=False)
monkeypatch.delenv(name, value, raising=True)
monkeypatch.syspath_prepend(path)
+ monkeypatch.chdir(path)
All modifications will be undone after the requesting
test function has finished. The ``raising``
@@ -30,6 +31,7 @@
def __init__(self):
self._setattr = []
self._setitem = []
+ self._cwd = None
def setattr(self, obj, name, value, raising=True):
""" set attribute ``name`` on ``obj`` to ``value``, by default
@@ -83,6 +85,17 @@
self._savesyspath = sys.path[:]
sys.path.insert(0, str(path))
+ def chdir(self, path):
+ """ change the current working directory to the specified path
+ path can be a string or a py.path.local object
+ """
+ if self._cwd is None:
+ self._cwd = os.getcwd()
+ if hasattr(path, "chdir"):
+ path.chdir()
+ else:
+ os.chdir(path)
+
def undo(self):
""" undo previous changes. This call consumes the
undo stack. Calling it a second time has no effect unless
@@ -95,9 +108,17 @@
self._setattr[:] = []
for dictionary, name, value in self._setitem:
if value is notset:
- del dictionary[name]
+ try:
+ del dictionary[name]
+ except KeyError:
+ pass # was already deleted, so we have the desired state
else:
dictionary[name] = value
self._setitem[:] = []
if hasattr(self, '_savesyspath'):
sys.path[:] = self._savesyspath
+ del self._savesyspath
+
+ if self._cwd is not None:
+ os.chdir(self._cwd)
+ self._cwd = None
diff --git a/_pytest/nose.py b/_pytest/nose.py
--- a/_pytest/nose.py
+++ b/_pytest/nose.py
@@ -13,6 +13,7 @@
call.excinfo = call2.excinfo
+ at pytest.mark.trylast
def pytest_runtest_setup(item):
if isinstance(item, (pytest.Function)):
if isinstance(item.parent, pytest.Generator):
diff --git a/_pytest/pastebin.py b/_pytest/pastebin.py
--- a/_pytest/pastebin.py
+++ b/_pytest/pastebin.py
@@ -38,7 +38,11 @@
del tr._tw.__dict__['write']
def getproxy():
- return py.std.xmlrpclib.ServerProxy(url.xmlrpc).pastes
+ if sys.version_info < (3, 0):
+ from xmlrpclib import ServerProxy
+ else:
+ from xmlrpc.client import ServerProxy
+ return ServerProxy(url.xmlrpc).pastes
def pytest_terminal_summary(terminalreporter):
if terminalreporter.config.option.pastebin != "failed":
diff --git a/_pytest/pdb.py b/_pytest/pdb.py
--- a/_pytest/pdb.py
+++ b/_pytest/pdb.py
@@ -19,11 +19,13 @@
class pytestPDB:
""" Pseudo PDB that defers to the real pdb. """
item = None
+ collector = None
def set_trace(self):
""" invoke PDB set_trace debugging, dropping any IO capturing. """
frame = sys._getframe().f_back
- item = getattr(self, 'item', None)
+ item = self.item or self.collector
+
if item is not None:
capman = item.config.pluginmanager.getplugin("capturemanager")
out, err = capman.suspendcapture()
@@ -38,6 +40,14 @@
pytestPDB.item = item
pytest_runtest_setup = pytest_runtest_call = pytest_runtest_teardown = pdbitem
+ at pytest.mark.tryfirst
+def pytest_make_collect_report(__multicall__, collector):
+ try:
+ pytestPDB.collector = collector
+ return __multicall__.execute()
+ finally:
+ pytestPDB.collector = None
+
def pytest_runtest_makereport():
pytestPDB.item = None
@@ -60,7 +70,13 @@
tw.sep(">", "traceback")
rep.toterminal(tw)
tw.sep(">", "entering PDB")
- post_mortem(call.excinfo._excinfo[2])
+ # A doctest.UnexpectedException is not useful for post_mortem.
+ # Use the underlying exception instead:
+ if isinstance(call.excinfo.value, py.std.doctest.UnexpectedException):
+ tb = call.excinfo.value.exc_info[2]
+ else:
+ tb = call.excinfo._excinfo[2]
+ post_mortem(tb)
rep._pdbshown = True
return rep
diff --git a/_pytest/pytester.py b/_pytest/pytester.py
--- a/_pytest/pytester.py
+++ b/_pytest/pytester.py
@@ -25,6 +25,7 @@
_pytest_fullpath
except NameError:
_pytest_fullpath = os.path.abspath(pytest.__file__.rstrip("oc"))
+ _pytest_fullpath = _pytest_fullpath.replace("$py.class", ".py")
def pytest_funcarg___pytest(request):
return PytestArg(request)
@@ -313,16 +314,6 @@
result.extend(session.genitems(colitem))
return result
- def inline_genitems(self, *args):
- #config = self.parseconfig(*args)
- config = self.parseconfigure(*args)
- rec = self.getreportrecorder(config)
- session = Session(config)
- config.hook.pytest_sessionstart(session=session)
- session.perform_collect()
- config.hook.pytest_sessionfinish(session=session, exitstatus=EXIT_OK)
- return session.items, rec
-
def runitem(self, source):
# used from runner functional tests
item = self.getitem(source)
@@ -343,64 +334,57 @@
l = list(args) + [p]
reprec = self.inline_run(*l)
reports = reprec.getreports("pytest_runtest_logreport")
- assert len(reports) == 1, reports
- return reports[0]
+ assert len(reports) == 3, reports # setup/call/teardown
+ return reports[1]
+
+ def inline_genitems(self, *args):
+ return self.inprocess_run(list(args) + ['--collectonly'])
def inline_run(self, *args):
- args = ("-s", ) + args # otherwise FD leakage
- config = self.parseconfig(*args)
- reprec = self.getreportrecorder(config)
- #config.pluginmanager.do_configure(config)
- config.hook.pytest_cmdline_main(config=config)
- #config.pluginmanager.do_unconfigure(config)
- return reprec
+ items, rec = self.inprocess_run(args)
+ return rec
- def config_preparse(self):
- config = self.Config()
- for plugin in self.plugins:
- if isinstance(plugin, str):
- config.pluginmanager.import_plugin(plugin)
- else:
- if isinstance(plugin, dict):
- plugin = PseudoPlugin(plugin)
- if not config.pluginmanager.isregistered(plugin):
- config.pluginmanager.register(plugin)
- return config
+ def inprocess_run(self, args, plugins=None):
+ rec = []
+ items = []
+ class Collect:
+ def pytest_configure(x, config):
+ rec.append(self.getreportrecorder(config))
+ def pytest_itemcollected(self, item):
+ items.append(item)
+ if not plugins:
+ plugins = []
+ plugins.append(Collect())
+ ret = self.pytestmain(list(args), plugins=[Collect()])
+ reprec = rec[0]
+ reprec.ret = ret
+ assert len(rec) == 1
+ return items, reprec
def parseconfig(self, *args):
- if not args:
- args = (self.tmpdir,)
- config = self.config_preparse()
- args = list(args)
+ args = [str(x) for x in args]
for x in args:
if str(x).startswith('--basetemp'):
break
else:
args.append("--basetemp=%s" % self.tmpdir.dirpath('basetemp'))
- config.parse(args)
+ import _pytest.core
+ config = _pytest.core._prepareconfig(args, self.plugins)
+ # the in-process pytest invocation needs to avoid leaking FDs
+ # so we register a "reset_capturings" callmon the capturing manager
+ # and make sure it gets called
+ config._cleanup.append(
+ config.pluginmanager.getplugin("capturemanager").reset_capturings)
+ import _pytest.config
+ self.request.addfinalizer(
+ lambda: _pytest.config.pytest_unconfigure(config))
return config
- def reparseconfig(self, args=None):
- """ this is used from tests that want to re-invoke parse(). """
- if not args:
- args = [self.tmpdir]
- oldconfig = getattr(py.test, 'config', None)
- try:
- c = py.test.config = self.Config()
- c.basetemp = py.path.local.make_numbered_dir(prefix="reparse",
- keep=0, rootdir=self.tmpdir, lock_timeout=None)
- c.parse(args)
- c.pluginmanager.do_configure(c)
- self.request.addfinalizer(lambda: c.pluginmanager.do_unconfigure(c))
- return c
- finally:
- py.test.config = oldconfig
-
def parseconfigure(self, *args):
config = self.parseconfig(*args)
config.pluginmanager.do_configure(config)
self.request.addfinalizer(lambda:
- config.pluginmanager.do_unconfigure(config))
+ config.pluginmanager.do_unconfigure(config))
return config
def getitem(self, source, funcname="test_func"):
@@ -420,7 +404,6 @@
self.makepyfile(__init__ = "#")
self.config = config = self.parseconfigure(path, *configargs)
node = self.getnode(config, path)
- #config.pluginmanager.do_unconfigure(config)
return node
def collect_by_name(self, modcol, name):
@@ -437,9 +420,16 @@
return py.std.subprocess.Popen(cmdargs, stdout=stdout, stderr=stderr, **kw)
def pytestmain(self, *args, **kwargs):
- ret = pytest.main(*args, **kwargs)
- if ret == 2:
- raise KeyboardInterrupt()
+ class ResetCapturing:
+ @pytest.mark.trylast
+ def pytest_unconfigure(self, config):
+ capman = config.pluginmanager.getplugin("capturemanager")
+ capman.reset_capturings()
+ plugins = kwargs.setdefault("plugins", [])
+ rc = ResetCapturing()
+ plugins.append(rc)
+ return pytest.main(*args, **kwargs)
+
def run(self, *cmdargs):
return self._run(*cmdargs)
@@ -528,6 +518,8 @@
pexpect = py.test.importorskip("pexpect", "2.4")
if hasattr(sys, 'pypy_version_info') and '64' in py.std.platform.machine():
pytest.skip("pypy-64 bit not supported")
+ if sys.platform == "darwin":
+ pytest.xfail("pexpect does not work reliably on darwin?!")
logfile = self.tmpdir.join("spawn.out")
child = pexpect.spawn(cmd, logfile=logfile.open("w"))
child.timeout = expect_timeout
@@ -540,10 +532,6 @@
return "INTERNAL not-utf8-decodeable, truncated string:\n%s" % (
py.io.saferepr(out),)
-class PseudoPlugin:
- def __init__(self, vars):
- self.__dict__.update(vars)
-
class ReportRecorder(object):
def __init__(self, hook):
self.hook = hook
@@ -565,10 +553,17 @@
def getreports(self, names="pytest_runtest_logreport pytest_collectreport"):
return [x.report for x in self.getcalls(names)]
- def matchreport(self, inamepart="", names="pytest_runtest_logreport pytest_collectreport", when=None):
+ def matchreport(self, inamepart="",
+ names="pytest_runtest_logreport pytest_collectreport", when=None):
""" return a testreport whose dotted import path matches """
l = []
for rep in self.getreports(names=names):
+ try:
+ if not when and rep.when != "call" and rep.passed:
+ # setup/teardown passing reports - let's ignore those
+ continue
+ except AttributeError:
+ pass
if when and getattr(rep, 'when', None) != when:
continue
if not inamepart or inamepart in rep.nodeid.split("::"):
diff --git a/_pytest/python.py b/_pytest/python.py
--- a/_pytest/python.py
+++ b/_pytest/python.py
@@ -4,6 +4,7 @@
import sys
import pytest
from py._code.code import TerminalRepr
+from _pytest.monkeypatch import monkeypatch
import _pytest
cutdir = py.path.local(_pytest.__file__).dirpath()
@@ -26,6 +27,24 @@
showfuncargs(config)
return 0
+
+def pytest_generate_tests(metafunc):
+ try:
+ param = metafunc.function.parametrize
+ except AttributeError:
+ return
+ for p in param:
+ metafunc.parametrize(*p.args, **p.kwargs)
+
+def pytest_configure(config):
+ config.addinivalue_line("markers",
+ "parametrize(argnames, argvalues): call a test function multiple "
+ "times passing in multiple different argument value sets. Example: "
+ "@parametrize('arg1', [1,2]) would lead to two calls of the decorated "
+ "test function, one with arg1=1 and another with arg1=2."
+ )
+
+
@pytest.mark.trylast
def pytest_namespace():
raises.Exception = pytest.fail.Exception
@@ -138,6 +157,7 @@
obj = obj.place_as
self._fslineno = py.code.getfslineno(obj)
+ assert isinstance(self._fslineno[1], int), obj
return self._fslineno
def reportinfo(self):
@@ -155,6 +175,7 @@
else:
fspath, lineno = self._getfslineno()
modpath = self.getmodpath()
+ assert isinstance(lineno, int)
return fspath, lineno, modpath
class PyCollectorMixin(PyobjMixin, pytest.Collector):
@@ -200,6 +221,7 @@
module = self.getparent(Module).obj
clscol = self.getparent(Class)
cls = clscol and clscol.obj or None
+ transfer_markers(funcobj, cls, module)
metafunc = Metafunc(funcobj, config=self.config,
cls=cls, module=module)
gentesthook = self.config.hook.pytest_generate_tests
@@ -219,6 +241,19 @@
l.append(function)
return l
+def transfer_markers(funcobj, cls, mod):
+ # XXX this should rather be code in the mark plugin or the mark
+ # plugin should merge with the python plugin.
+ for holder in (cls, mod):
+ try:
+ pytestmark = holder.pytestmark
+ except AttributeError:
+ continue
+ if isinstance(pytestmark, list):
+ for mark in pytestmark:
+ mark(funcobj)
+ else:
+ pytestmark(funcobj)
class Module(pytest.File, PyCollectorMixin):
def _getobj(self):
@@ -226,13 +261,8 @@
def _importtestmodule(self):
# we assume we are only called once per module
- from _pytest import assertion
- assertion.before_module_import(self)
try:
- try:
- mod = self.fspath.pyimport(ensuresyspath=True)
- finally:
- assertion.after_module_import(self)
+ mod = self.fspath.pyimport(ensuresyspath=True)
except SyntaxError:
excinfo = py.code.ExceptionInfo()
raise self.CollectError(excinfo.getrepr(style="short"))
@@ -244,7 +274,8 @@
" %s\n"
"which is not the same as the test file we want to collect:\n"
" %s\n"
- "HINT: use a unique basename for your test file modules"
+ "HINT: remove __pycache__ / .pyc files and/or use a "
+ "unique basename for your test file modules"
% e.args
)
#print "imported test module", mod
@@ -374,6 +405,7 @@
tw.line()
tw.line("%s:%d" % (self.filename, self.firstlineno+1))
+
class Generator(FunctionMixin, PyCollectorMixin, pytest.Collector):
def collect(self):
# test generators are seen as collectors but they also
@@ -430,6 +462,7 @@
"yielded functions (deprecated) cannot have funcargs")
else:
if callspec is not None:
+ self.callspec = callspec
self.funcargs = callspec.funcargs or {}
self._genid = callspec.id
if hasattr(callspec, "param"):
@@ -506,15 +539,59 @@
request._fillfuncargs()
_notexists = object()
-class CallSpec:
- def __init__(self, funcargs, id, param):
- self.funcargs = funcargs
- self.id = id
+
+class CallSpec2(object):
+ def __init__(self, metafunc):
+ self.metafunc = metafunc
+ self.funcargs = {}
+ self._idlist = []
+ self.params = {}
+ self._globalid = _notexists
+ self._globalid_args = set()
+ self._globalparam = _notexists
+
+ def copy(self, metafunc):
+ cs = CallSpec2(self.metafunc)
+ cs.funcargs.update(self.funcargs)
+ cs.params.update(self.params)
+ cs._idlist = list(self._idlist)
+ cs._globalid = self._globalid
+ cs._globalid_args = self._globalid_args
+ cs._globalparam = self._globalparam
+ return cs
+
+ def _checkargnotcontained(self, arg):
+ if arg in self.params or arg in self.funcargs:
+ raise ValueError("duplicate %r" %(arg,))
+
+ def getparam(self, name):
+ try:
+ return self.params[name]
+ except KeyError:
+ if self._globalparam is _notexists:
+ raise ValueError(name)
+ return self._globalparam
+
+ @property
+ def id(self):
+ return "-".join(map(str, filter(None, self._idlist)))
+
+ def setmulti(self, valtype, argnames, valset, id):
+ for arg,val in zip(argnames, valset):
+ self._checkargnotcontained(arg)
+ getattr(self, valtype)[arg] = val
+ self._idlist.append(id)
+
+ def setall(self, funcargs, id, param):
+ for x in funcargs:
+ self._checkargnotcontained(x)
+ self.funcargs.update(funcargs)
+ if id is not _notexists:
+ self._idlist.append(id)
if param is not _notexists:
- self.param = param
- def __repr__(self):
- return "" %(
- self.id, getattr(self, 'param', '?'), self.funcargs)
+ assert self._globalparam is _notexists
+ self._globalparam = param
+
class Metafunc:
def __init__(self, function, config=None, cls=None, module=None):
@@ -528,31 +605,71 @@
self._calls = []
self._ids = py.builtin.set()
+ def parametrize(self, argnames, argvalues, indirect=False, ids=None):
+ """ Add new invocations to the underlying test function using the list
+ of argvalues for the given argnames. Parametrization is performed
+ during the collection phase. If you need to setup expensive resources
+ you may pass indirect=True and implement a funcarg factory which can
+ perform the expensive setup just before a test is actually run.
+
+ :arg argnames: an argument name or a list of argument names
+
+ :arg argvalues: a list of values for the argname or a list of tuples of
+ values for the list of argument names.
+
+ :arg indirect: if True each argvalue corresponding to an argument will
+ be passed as request.param to its respective funcarg factory so
+ that it can perform more expensive setups during the setup phase of
+ a test rather than at collection time.
+
+ :arg ids: list of string ids each corresponding to the argvalues so
+ that they are part of the test id. If no ids are provided they will
+ be generated automatically from the argvalues.
+ """
+ if not isinstance(argnames, (tuple, list)):
+ argnames = (argnames,)
+ argvalues = [(val,) for val in argvalues]
+ if not indirect:
+ #XXX should we also check for the opposite case?
+ for arg in argnames:
+ if arg not in self.funcargnames:
+ raise ValueError("%r has no argument %r" %(self.function, arg))
+ valtype = indirect and "params" or "funcargs"
+ if not ids:
+ idmaker = IDMaker()
+ ids = list(map(idmaker, argvalues))
+ newcalls = []
+ for callspec in self._calls or [CallSpec2(self)]:
+ for i, valset in enumerate(argvalues):
+ assert len(valset) == len(argnames)
+ newcallspec = callspec.copy(self)
+ newcallspec.setmulti(valtype, argnames, valset, ids[i])
+ newcalls.append(newcallspec)
+ self._calls = newcalls
+
def addcall(self, funcargs=None, id=_notexists, param=_notexists):
- """ add a new call to the underlying test function during the
- collection phase of a test run. Note that request.addcall() is
- called during the test collection phase prior and independently
- to actual test execution. Therefore you should perform setup
- of resources in a funcarg factory which can be instrumented
- with the ``param``.
+ """ (deprecated, use parametrize) Add a new call to the underlying
+ test function during the collection phase of a test run. Note that
+ request.addcall() is called during the test collection phase prior and
+ independently to actual test execution. You should only use addcall()
+ if you need to specify multiple arguments of a test function.
:arg funcargs: argument keyword dictionary used when invoking
the test function.
:arg id: used for reporting and identification purposes. If you
- don't supply an `id` the length of the currently
- list of calls to the test function will be used.
+ don't supply an `id` an automatic unique id will be generated.
- :arg param: will be exposed to a later funcarg factory invocation
- through the ``request.param`` attribute. It allows to
- defer test fixture setup activities to when an actual
- test is run.
+ :arg param: a parameter which will be exposed to a later funcarg factory
+ invocation through the ``request.param`` attribute.
"""
assert funcargs is None or isinstance(funcargs, dict)
if funcargs is not None:
for name in funcargs:
if name not in self.funcargnames:
pytest.fail("funcarg %r not used in this function." % name)
+ else:
+ funcargs = {}
if id is None:
raise ValueError("id=None not allowed")
if id is _notexists:
@@ -561,11 +678,26 @@
if id in self._ids:
raise ValueError("duplicate id %r" % id)
self._ids.add(id)
- self._calls.append(CallSpec(funcargs, id, param))
+
+ cs = CallSpec2(self)
+ cs.setall(funcargs, id, param)
+ self._calls.append(cs)
+
+class IDMaker:
+ def __init__(self):
+ self.counter = 0
+ def __call__(self, valset):
+ l = []
+ for val in valset:
+ if not isinstance(val, (int, str)):
+ val = "."+str(self.counter)
+ self.counter += 1
+ l.append(str(val))
+ return "-".join(l)
class FuncargRequest:
""" A request for function arguments from a test function.
-
+
Note that there is an optional ``param`` attribute in case
there was an invocation to metafunc.addcall(param=...).
If no such call was done in a ``pytest_generate_tests``
@@ -637,7 +769,7 @@
def applymarker(self, marker):
- """ apply a marker to a single test function invocation.
+ """ Apply a marker to a single test function invocation.
This method is useful if you don't want to have a keyword/marker
on all function invocations.
@@ -649,7 +781,7 @@
self._pyfuncitem.keywords[marker.markname] = marker
def cached_setup(self, setup, teardown=None, scope="module", extrakey=None):
- """ return a testing resource managed by ``setup`` &
+ """ Return a testing resource managed by ``setup`` &
``teardown`` calls. ``scope`` and ``extrakey`` determine when the
``teardown`` function will be called so that subsequent calls to
``setup`` would recreate the resource.
@@ -698,11 +830,18 @@
self._raiselookupfailed(argname)
funcargfactory = self._name2factory[argname].pop()
oldarg = self._currentarg
- self._currentarg = argname
+ mp = monkeypatch()
+ mp.setattr(self, '_currentarg', argname)
+ try:
+ param = self._pyfuncitem.callspec.getparam(argname)
+ except (AttributeError, ValueError):
+ pass
+ else:
+ mp.setattr(self, 'param', param, raising=False)
try:
self._funcargs[argname] = res = funcargfactory(request=self)
finally:
- self._currentarg = oldarg
+ mp.undo()
return res
def _getscopeitem(self, scope):
@@ -817,8 +956,7 @@
>>> raises(ZeroDivisionError, f, x=0)
- A third possibility is to use a string which which will
- be executed::
+ A third possibility is to use a string to be executed::
>>> raises(ZeroDivisionError, "f(0)")
diff --git a/_pytest/resultlog.py b/_pytest/resultlog.py
--- a/_pytest/resultlog.py
+++ b/_pytest/resultlog.py
@@ -63,6 +63,8 @@
self.write_log_entry(testpath, lettercode, longrepr)
def pytest_runtest_logreport(self, report):
+ if report.when != "call" and report.passed:
+ return
res = self.config.hook.pytest_report_teststatus(report=report)
code = res[1]
if code == 'x':
@@ -89,5 +91,8 @@
self.log_outcome(report, code, longrepr)
def pytest_internalerror(self, excrepr):
- path = excrepr.reprcrash.path
+ reprcrash = getattr(excrepr, 'reprcrash', None)
+ path = getattr(reprcrash, "path", None)
+ if path is None:
+ path = "cwd:%s" % py.path.local()
self.write_log_entry(path, '!', str(excrepr))
diff --git a/_pytest/runner.py b/_pytest/runner.py
--- a/_pytest/runner.py
+++ b/_pytest/runner.py
@@ -1,6 +1,6 @@
""" basic collect and runtest protocol implementations """
-import py, sys
+import py, sys, time
from py._code.code import TerminalRepr
def pytest_namespace():
@@ -14,33 +14,60 @@
#
# pytest plugin hooks
+def pytest_addoption(parser):
+ group = parser.getgroup("terminal reporting", "reporting", after="general")
+ group.addoption('--durations',
+ action="store", type="int", default=None, metavar="N",
+ help="show N slowest setup/test durations (N=0 for all)."),
+
+def pytest_terminal_summary(terminalreporter):
+ durations = terminalreporter.config.option.durations
+ if durations is None:
+ return
+ tr = terminalreporter
+ dlist = []
+ for replist in tr.stats.values():
+ for rep in replist:
+ if hasattr(rep, 'duration'):
+ dlist.append(rep)
+ if not dlist:
+ return
+ dlist.sort(key=lambda x: x.duration)
+ dlist.reverse()
+ if not durations:
+ tr.write_sep("=", "slowest test durations")
+ else:
+ tr.write_sep("=", "slowest %s test durations" % durations)
+ dlist = dlist[:durations]
+
+ for rep in dlist:
+ nodeid = rep.nodeid.replace("::()::", "::")
+ tr.write_line("%02.2fs %-8s %s" %
+ (rep.duration, rep.when, nodeid))
+
def pytest_sessionstart(session):
session._setupstate = SetupState()
-
-def pytest_sessionfinish(session, exitstatus):
- hook = session.config.hook
- rep = hook.pytest__teardown_final(session=session)
- if rep:
- hook.pytest__teardown_final_logerror(session=session, report=rep)
- session.exitstatus = 1
+def pytest_sessionfinish(session):
+ session._setupstate.teardown_all()
class NodeInfo:
def __init__(self, location):
self.location = location
-def pytest_runtest_protocol(item):
+def pytest_runtest_protocol(item, nextitem):
item.ihook.pytest_runtest_logstart(
nodeid=item.nodeid, location=item.location,
)
- runtestprotocol(item)
+ runtestprotocol(item, nextitem=nextitem)
return True
-def runtestprotocol(item, log=True):
+def runtestprotocol(item, log=True, nextitem=None):
rep = call_and_report(item, "setup", log)
reports = [rep]
if rep.passed:
reports.append(call_and_report(item, "call", log))
- reports.append(call_and_report(item, "teardown", log))
+ reports.append(call_and_report(item, "teardown", log,
+ nextitem=nextitem))
return reports
def pytest_runtest_setup(item):
@@ -49,16 +76,8 @@
def pytest_runtest_call(item):
item.runtest()
-def pytest_runtest_teardown(item):
- item.session._setupstate.teardown_exact(item)
-
-def pytest__teardown_final(session):
- call = CallInfo(session._setupstate.teardown_all, when="teardown")
- if call.excinfo:
- ntraceback = call.excinfo.traceback .cut(excludepath=py._pydir)
- call.excinfo.traceback = ntraceback.filter()
- longrepr = call.excinfo.getrepr(funcargs=True)
- return TeardownErrorReport(longrepr)
+def pytest_runtest_teardown(item, nextitem):
+ item.session._setupstate.teardown_exact(item, nextitem)
def pytest_report_teststatus(report):
if report.when in ("setup", "teardown"):
@@ -74,18 +93,18 @@
#
# Implementation
-def call_and_report(item, when, log=True):
- call = call_runtest_hook(item, when)
+def call_and_report(item, when, log=True, **kwds):
+ call = call_runtest_hook(item, when, **kwds)
hook = item.ihook
report = hook.pytest_runtest_makereport(item=item, call=call)
- if log and (when == "call" or not report.passed):
+ if log:
hook.pytest_runtest_logreport(report=report)
return report
-def call_runtest_hook(item, when):
+def call_runtest_hook(item, when, **kwds):
hookname = "pytest_runtest_" + when
ihook = getattr(item.ihook, hookname)
- return CallInfo(lambda: ihook(item=item), when=when)
+ return CallInfo(lambda: ihook(item=item, **kwds), when=when)
class CallInfo:
""" Result/Exception info a function invocation. """
@@ -95,12 +114,16 @@
#: context of invocation: one of "setup", "call",
#: "teardown", "memocollect"
self.when = when
+ self.start = time.time()
try:
- self.result = func()
- except KeyboardInterrupt:
- raise
- except:
- self.excinfo = py.code.ExceptionInfo()
+ try:
+ self.result = func()
+ except KeyboardInterrupt:
+ raise
+ except:
+ self.excinfo = py.code.ExceptionInfo()
+ finally:
+ self.stop = time.time()
def __repr__(self):
if self.excinfo:
@@ -120,6 +143,10 @@
return s
class BaseReport(object):
+
+ def __init__(self, **kw):
+ self.__dict__.update(kw)
+
def toterminal(self, out):
longrepr = self.longrepr
if hasattr(self, 'node'):
@@ -139,6 +166,7 @@
def pytest_runtest_makereport(item, call):
when = call.when
+ duration = call.stop-call.start
keywords = dict([(x,1) for x in item.keywords])
excinfo = call.excinfo
if not call.excinfo:
@@ -160,14 +188,15 @@
else: # exception in setup or teardown
longrepr = item._repr_failure_py(excinfo)
return TestReport(item.nodeid, item.location,
- keywords, outcome, longrepr, when)
+ keywords, outcome, longrepr, when,
+ duration=duration)
class TestReport(BaseReport):
""" Basic test report object (also used for setup and teardown calls if
they fail).
"""
def __init__(self, nodeid, location,
- keywords, outcome, longrepr, when):
+ keywords, outcome, longrepr, when, sections=(), duration=0, **extra):
#: normalized collection node id
self.nodeid = nodeid
@@ -179,16 +208,25 @@
#: a name -> value dictionary containing all keywords and
#: markers associated with a test invocation.
self.keywords = keywords
-
+
#: test outcome, always one of "passed", "failed", "skipped".
self.outcome = outcome
#: None or a failure representation.
self.longrepr = longrepr
-
+
#: one of 'setup', 'call', 'teardown' to indicate runtest phase.
self.when = when
+ #: list of (secname, data) extra information which needs to
+ #: marshallable
+ self.sections = list(sections)
+
+ #: time it took to run just the test
+ self.duration = duration
+
+ self.__dict__.update(extra)
+
def __repr__(self):
return "" % (
self.nodeid, self.when, self.outcome)
@@ -196,8 +234,10 @@
class TeardownErrorReport(BaseReport):
outcome = "failed"
when = "teardown"
- def __init__(self, longrepr):
+ def __init__(self, longrepr, **extra):
self.longrepr = longrepr
+ self.sections = []
+ self.__dict__.update(extra)
def pytest_make_collect_report(collector):
call = CallInfo(collector._memocollect, "memocollect")
@@ -219,11 +259,13 @@
getattr(call, 'result', None))
class CollectReport(BaseReport):
- def __init__(self, nodeid, outcome, longrepr, result):
+ def __init__(self, nodeid, outcome, longrepr, result, sections=(), **extra):
self.nodeid = nodeid
self.outcome = outcome
self.longrepr = longrepr
self.result = result or []
+ self.sections = list(sections)
+ self.__dict__.update(extra)
@property
def location(self):
@@ -277,20 +319,22 @@
self._teardown_with_finalization(None)
assert not self._finalizers
- def teardown_exact(self, item):
- if self.stack and item == self.stack[-1]:
+ def teardown_exact(self, item, nextitem):
+ needed_collectors = nextitem and nextitem.listchain() or []
+ self._teardown_towards(needed_collectors)
+
+ def _teardown_towards(self, needed_collectors):
+ while self.stack:
+ if self.stack == needed_collectors[:len(self.stack)]:
+ break
self._pop_and_teardown()
- else:
- self._callfinalizers(item)
def prepare(self, colitem):
""" setup objects along the collector chain to the test-method
and teardown previously setup objects."""
needed_collectors = colitem.listchain()
- while self.stack:
- if self.stack == needed_collectors[:len(self.stack)]:
- break
- self._pop_and_teardown()
+ self._teardown_towards(needed_collectors)
+
# check if the last collection node has raised an error
for col in self.stack:
if hasattr(col, '_prepare_exc'):
diff --git a/_pytest/skipping.py b/_pytest/skipping.py
--- a/_pytest/skipping.py
+++ b/_pytest/skipping.py
@@ -9,6 +9,21 @@
action="store_true", dest="runxfail", default=False,
help="run tests even if they are marked xfail")
+def pytest_configure(config):
+ config.addinivalue_line("markers",
+ "skipif(*conditions): skip the given test function if evaluation "
+ "of all conditions has a True value. Evaluation happens within the "
+ "module global context. Example: skipif('sys.platform == \"win32\"') "
+ "skips the test if we are on the win32 platform. "
+ )
+ config.addinivalue_line("markers",
+ "xfail(*conditions, reason=None, run=True): mark the the test function "
+ "as an expected failure. Optionally specify a reason and run=False "
+ "if you don't even want to execute the test function. Any positional "
+ "condition strings will be evaluated (like with skipif) and if one is "
+ "False the marker will not be applied."
+ )
+
def pytest_namespace():
return dict(xfail=xfail)
@@ -117,6 +132,14 @@
def pytest_runtest_makereport(__multicall__, item, call):
if not isinstance(item, pytest.Function):
return
+ # unitttest special case, see setting of _unexpectedsuccess
+ if hasattr(item, '_unexpectedsuccess'):
+ rep = __multicall__.execute()
+ if rep.when == "call":
+ # we need to translate into how py.test encodes xpass
+ rep.keywords['xfail'] = "reason: " + item._unexpectedsuccess
+ rep.outcome = "failed"
+ return rep
if not (call.excinfo and
call.excinfo.errisinstance(py.test.xfail.Exception)):
evalxfail = getattr(item, '_evalxfail', None)
@@ -169,21 +192,23 @@
elif char == "X":
show_xpassed(terminalreporter, lines)
elif char in "fF":
- show_failed(terminalreporter, lines)
+ show_simple(terminalreporter, lines, 'failed', "FAIL %s")
elif char in "sS":
show_skipped(terminalreporter, lines)
+ elif char == "E":
+ show_simple(terminalreporter, lines, 'error', "ERROR %s")
if lines:
tr._tw.sep("=", "short test summary info")
for line in lines:
tr._tw.line(line)
-def show_failed(terminalreporter, lines):
+def show_simple(terminalreporter, lines, stat, format):
tw = terminalreporter._tw
- failed = terminalreporter.stats.get("failed")
+ failed = terminalreporter.stats.get(stat)
if failed:
for rep in failed:
pos = rep.nodeid
- lines.append("FAIL %s" %(pos, ))
+ lines.append(format %(pos, ))
def show_xfailed(terminalreporter, lines):
xfailed = terminalreporter.stats.get("xfailed")
diff --git a/_pytest/terminal.py b/_pytest/terminal.py
--- a/_pytest/terminal.py
+++ b/_pytest/terminal.py
@@ -15,7 +15,7 @@
group._addoption('-r',
action="store", dest="reportchars", default=None, metavar="chars",
help="show extra test summary info as specified by chars (f)ailed, "
- "(s)skipped, (x)failed, (X)passed.")
+ "(E)error, (s)skipped, (x)failed, (X)passed.")
group._addoption('-l', '--showlocals',
action="store_true", dest="showlocals", default=False,
help="show locals in tracebacks (disabled by default).")
@@ -43,7 +43,8 @@
pass
else:
stdout = os.fdopen(newfd, stdout.mode, 1)
- config._toclose = stdout
+ config._cleanup.append(lambda: stdout.close())
+
reporter = TerminalReporter(config, stdout)
config.pluginmanager.register(reporter, 'terminalreporter')
if config.option.debug or config.option.traceconfig:
@@ -52,11 +53,6 @@
reporter.write_line("[traceconfig] " + msg)
config.trace.root.setprocessor("pytest:config", mywriter)
-def pytest_unconfigure(config):
- if hasattr(config, '_toclose'):
- #print "closing", config._toclose, config._toclose.fileno()
- config._toclose.close()
-
def getreportopt(config):
reportopts = ""
optvalue = config.option.report
@@ -165,9 +161,6 @@
def pytest_deselected(self, items):
self.stats.setdefault('deselected', []).extend(items)
- def pytest__teardown_final_logerror(self, report):
- self.stats.setdefault("error", []).append(report)
-
def pytest_runtest_logstart(self, nodeid, location):
# ensure that the path is printed before the
# 1st test of a module starts running
@@ -259,7 +252,7 @@
msg = "platform %s -- Python %s" % (sys.platform, verinfo)
if hasattr(sys, 'pypy_version_info'):
verinfo = ".".join(map(str, sys.pypy_version_info[:3]))
- msg += "[pypy-%s]" % verinfo
+ msg += "[pypy-%s-%s]" % (verinfo, sys.pypy_version_info[3])
msg += " -- pytest-%s" % (py.test.__version__)
if self.verbosity > 0 or self.config.option.debug or \
getattr(self.config.option, 'pastebin', None):
@@ -289,10 +282,18 @@
# we take care to leave out Instances aka ()
# because later versions are going to get rid of them anyway
if self.config.option.verbose < 0:
- for item in items:
- nodeid = item.nodeid
- nodeid = nodeid.replace("::()::", "::")
- self._tw.line(nodeid)
+ if self.config.option.verbose < -1:
+ counts = {}
+ for item in items:
+ name = item.nodeid.split('::', 1)[0]
+ counts[name] = counts.get(name, 0) + 1
+ for name, count in sorted(counts.items()):
+ self._tw.line("%s: %d" % (name, count))
+ else:
+ for item in items:
+ nodeid = item.nodeid
+ nodeid = nodeid.replace("::()::", "::")
+ self._tw.line(nodeid)
return
stack = []
indent = ""
@@ -318,12 +319,17 @@
self.config.hook.pytest_terminal_summary(terminalreporter=self)
if exitstatus == 2:
self._report_keyboardinterrupt()
+ del self._keyboardinterrupt_memo
self.summary_deselected()
self.summary_stats()
def pytest_keyboard_interrupt(self, excinfo):
self._keyboardinterrupt_memo = excinfo.getrepr(funcargs=True)
+ def pytest_unconfigure(self):
+ if hasattr(self, '_keyboardinterrupt_memo'):
+ self._report_keyboardinterrupt()
+
def _report_keyboardinterrupt(self):
excrepr = self._keyboardinterrupt_memo
msg = excrepr.reprcrash.message
@@ -388,7 +394,7 @@
else:
msg = self._getfailureheadline(rep)
self.write_sep("_", msg)
- rep.toterminal(self._tw)
+ self._outrep_summary(rep)
def summary_errors(self):
if self.config.option.tbstyle != "no":
@@ -406,7 +412,15 @@
elif rep.when == "teardown":
msg = "ERROR at teardown of " + msg
self.write_sep("_", msg)
- rep.toterminal(self._tw)
+ self._outrep_summary(rep)
+
+ def _outrep_summary(self, rep):
+ rep.toterminal(self._tw)
+ for secname, content in rep.sections:
+ self._tw.sep("-", secname)
+ if content[-1:] == "\n":
+ content = content[:-1]
+ self._tw.line(content)
def summary_stats(self):
session_duration = py.std.time.time() - self._sessionstarttime
@@ -417,9 +431,10 @@
keys.append(key)
parts = []
for key in keys:
- val = self.stats.get(key, None)
- if val:
- parts.append("%d %s" %(len(val), key))
+ if key: # setup/teardown reports have an empty key, ignore them
+ val = self.stats.get(key, None)
+ if val:
+ parts.append("%d %s" %(len(val), key))
line = ", ".join(parts)
# XXX coloring
msg = "%s in %.2f seconds" %(line, session_duration)
@@ -430,8 +445,15 @@
def summary_deselected(self):
if 'deselected' in self.stats:
+ l = []
+ k = self.config.option.keyword
+ if k:
+ l.append("-k%s" % k)
+ m = self.config.option.markexpr
+ if m:
+ l.append("-m %r" % m)
self.write_sep("=", "%d tests deselected by %r" %(
- len(self.stats['deselected']), self.config.option.keyword), bold=True)
+ len(self.stats['deselected']), " ".join(l)), bold=True)
def repr_pythonversion(v=None):
if v is None:
diff --git a/_pytest/tmpdir.py b/_pytest/tmpdir.py
--- a/_pytest/tmpdir.py
+++ b/_pytest/tmpdir.py
@@ -46,7 +46,7 @@
def finish(self):
self.trace("finish")
-
+
def pytest_configure(config):
mp = monkeypatch()
t = TempdirHandler(config)
@@ -64,5 +64,5 @@
name = request._pyfuncitem.name
name = py.std.re.sub("[\W]", "_", name)
x = request.config._tmpdirhandler.mktemp(name, numbered=True)
- return x.realpath()
+ return x
diff --git a/_pytest/unittest.py b/_pytest/unittest.py
--- a/_pytest/unittest.py
+++ b/_pytest/unittest.py
@@ -2,6 +2,9 @@
import pytest, py
import sys, pdb
+# for transfering markers
+from _pytest.python import transfer_markers
+
def pytest_pycollect_makeitem(collector, name, obj):
unittest = sys.modules.get('unittest')
if unittest is None:
@@ -19,7 +22,14 @@
class UnitTestCase(pytest.Class):
def collect(self):
loader = py.std.unittest.TestLoader()
+ module = self.getparent(pytest.Module).obj
+ cls = self.obj
for name in loader.getTestCaseNames(self.obj):
+ x = getattr(self.obj, name)
+ funcobj = getattr(x, 'im_func', x)
+ transfer_markers(funcobj, cls, module)
+ if hasattr(funcobj, 'todo'):
+ pytest.mark.xfail(reason=str(funcobj.todo))(funcobj)
yield TestCaseFunction(name, parent=self)
def setup(self):
@@ -37,15 +47,13 @@
class TestCaseFunction(pytest.Function):
_excinfo = None
- def __init__(self, name, parent):
- super(TestCaseFunction, self).__init__(name, parent)
- if hasattr(self._obj, 'todo'):
- getattr(self._obj, 'im_func', self._obj).xfail = \
- pytest.mark.xfail(reason=str(self._obj.todo))
-
def setup(self):
self._testcase = self.parent.obj(self.name)
self._obj = getattr(self._testcase, self.name)
+ if hasattr(self._testcase, 'skip'):
+ pytest.skip(self._testcase.skip)
+ if hasattr(self._obj, 'skip'):
+ pytest.skip(self._obj.skip)
if hasattr(self._testcase, 'setup_method'):
self._testcase.setup_method(self._obj)
@@ -83,28 +91,37 @@
self._addexcinfo(rawexcinfo)
def addFailure(self, testcase, rawexcinfo):
self._addexcinfo(rawexcinfo)
+
def addSkip(self, testcase, reason):
try:
pytest.skip(reason)
except pytest.skip.Exception:
self._addexcinfo(sys.exc_info())
- def addExpectedFailure(self, testcase, rawexcinfo, reason):
+
+ def addExpectedFailure(self, testcase, rawexcinfo, reason=""):
try:
pytest.xfail(str(reason))
except pytest.xfail.Exception:
self._addexcinfo(sys.exc_info())
- def addUnexpectedSuccess(self, testcase, reason):
- pass
+
+ def addUnexpectedSuccess(self, testcase, reason=""):
+ self._unexpectedsuccess = reason
+
def addSuccess(self, testcase):
pass
+
def stopTest(self, testcase):
pass
+
def runtest(self):
self._testcase(result=self)
def _prunetraceback(self, excinfo):
pytest.Function._prunetraceback(self, excinfo)
- excinfo.traceback = excinfo.traceback.filter(lambda x:not x.frame.f_globals.get('__unittest'))
+ traceback = excinfo.traceback.filter(
+ lambda x:not x.frame.f_globals.get('__unittest'))
+ if traceback:
+ excinfo.traceback = traceback
@pytest.mark.tryfirst
def pytest_runtest_makereport(item, call):
@@ -120,14 +137,19 @@
ut = sys.modules['twisted.python.failure']
Failure__init__ = ut.Failure.__init__.im_func
check_testcase_implements_trial_reporter()
- def excstore(self, exc_value=None, exc_type=None, exc_tb=None):
+ def excstore(self, exc_value=None, exc_type=None, exc_tb=None,
+ captureVars=None):
if exc_value is None:
self._rawexcinfo = sys.exc_info()
else:
if exc_type is None:
exc_type = type(exc_value)
self._rawexcinfo = (exc_type, exc_value, exc_tb)
- Failure__init__(self, exc_value, exc_type, exc_tb)
+ try:
+ Failure__init__(self, exc_value, exc_type, exc_tb,
+ captureVars=captureVars)
+ except TypeError:
+ Failure__init__(self, exc_value, exc_type, exc_tb)
ut.Failure.__init__ = excstore
try:
return __multicall__.execute()
diff --git a/dotviewer/graphparse.py b/dotviewer/graphparse.py
--- a/dotviewer/graphparse.py
+++ b/dotviewer/graphparse.py
@@ -93,6 +93,7 @@
return result
def parse_plain(graph_id, plaincontent, links={}, fixedfont=False):
+ plaincontent = plaincontent.replace('\r\n', '\n') # fix Windows EOL
lines = plaincontent.splitlines(True)
for i in range(len(lines)-2, -1, -1):
if lines[i].endswith('\\\n'): # line ending in '\'
diff --git a/lib_pypy/datetime.py b/lib_pypy/datetime.py
--- a/lib_pypy/datetime.py
+++ b/lib_pypy/datetime.py
@@ -968,8 +968,7 @@
self._checkOverflow(t.year)
result = date(t.year, t.month, t.day)
return result
- raise TypeError
- # XXX Should be 'return NotImplemented', but there's a bug in 2.2...
+ return NotImplemented # note that this doesn't work on CPython 2.2
__radd__ = __add__
diff --git a/py/__init__.py b/py/__init__.py
--- a/py/__init__.py
+++ b/py/__init__.py
@@ -8,7 +8,7 @@
(c) Holger Krekel and others, 2004-2010
"""
-__version__ = '1.4.4.dev1'
+__version__ = '1.4.7'
from py import _apipkg
@@ -70,6 +70,11 @@
'getrawcode' : '._code.code:getrawcode',
'patch_builtins' : '._code.code:patch_builtins',
'unpatch_builtins' : '._code.code:unpatch_builtins',
+ '_AssertionError' : '._code.assertion:AssertionError',
+ '_reinterpret_old' : '._code.assertion:reinterpret_old',
+ '_reinterpret' : '._code.assertion:reinterpret',
+ '_reprcompare' : '._code.assertion:_reprcompare',
+ '_format_explanation' : '._code.assertion:_format_explanation',
},
# backports and additions of builtins
diff --git a/py/_builtin.py b/py/_builtin.py
--- a/py/_builtin.py
+++ b/py/_builtin.py
@@ -113,9 +113,12 @@
# some backward compatibility helpers
_basestring = str
- def _totext(obj, encoding=None):
+ def _totext(obj, encoding=None, errors=None):
if isinstance(obj, bytes):
- obj = obj.decode(encoding)
+ if errors is None:
+ obj = obj.decode(encoding)
+ else:
+ obj = obj.decode(encoding, errors)
elif not isinstance(obj, str):
obj = str(obj)
return obj
@@ -142,7 +145,7 @@
del back
elif locs is None:
locs = globs
- fp = open(fn, "rb")
+ fp = open(fn, "r")
try:
source = fp.read()
finally:
diff --git a/py/_code/_assertionnew.py b/py/_code/_assertionnew.py
new file mode 100644
--- /dev/null
+++ b/py/_code/_assertionnew.py
@@ -0,0 +1,339 @@
+"""
+Find intermediate evalutation results in assert statements through builtin AST.
+This should replace _assertionold.py eventually.
+"""
+
+import sys
+import ast
+
+import py
+from py._code.assertion import _format_explanation, BuiltinAssertionError
+
+
+if sys.platform.startswith("java") and sys.version_info < (2, 5, 2):
+ # See http://bugs.jython.org/issue1497
+ _exprs = ("BoolOp", "BinOp", "UnaryOp", "Lambda", "IfExp", "Dict",
+ "ListComp", "GeneratorExp", "Yield", "Compare", "Call",
+ "Repr", "Num", "Str", "Attribute", "Subscript", "Name",
+ "List", "Tuple")
+ _stmts = ("FunctionDef", "ClassDef", "Return", "Delete", "Assign",
+ "AugAssign", "Print", "For", "While", "If", "With", "Raise",
+ "TryExcept", "TryFinally", "Assert", "Import", "ImportFrom",
+ "Exec", "Global", "Expr", "Pass", "Break", "Continue")
+ _expr_nodes = set(getattr(ast, name) for name in _exprs)
+ _stmt_nodes = set(getattr(ast, name) for name in _stmts)
+ def _is_ast_expr(node):
+ return node.__class__ in _expr_nodes
+ def _is_ast_stmt(node):
+ return node.__class__ in _stmt_nodes
+else:
+ def _is_ast_expr(node):
+ return isinstance(node, ast.expr)
+ def _is_ast_stmt(node):
+ return isinstance(node, ast.stmt)
+
+
+class Failure(Exception):
+ """Error found while interpreting AST."""
+
+ def __init__(self, explanation=""):
+ self.cause = sys.exc_info()
+ self.explanation = explanation
+
+
+def interpret(source, frame, should_fail=False):
+ mod = ast.parse(source)
+ visitor = DebugInterpreter(frame)
+ try:
+ visitor.visit(mod)
+ except Failure:
+ failure = sys.exc_info()[1]
+ return getfailure(failure)
+ if should_fail:
+ return ("(assertion failed, but when it was re-run for "
+ "printing intermediate values, it did not fail. Suggestions: "
+ "compute assert expression before the assert or use --no-assert)")
+
+def run(offending_line, frame=None):
+ if frame is None:
+ frame = py.code.Frame(sys._getframe(1))
+ return interpret(offending_line, frame)
+
+def getfailure(failure):
+ explanation = _format_explanation(failure.explanation)
+ value = failure.cause[1]
+ if str(value):
+ lines = explanation.splitlines()
+ if not lines:
+ lines.append("")
+ lines[0] += " << %s" % (value,)
+ explanation = "\n".join(lines)
+ text = "%s: %s" % (failure.cause[0].__name__, explanation)
+ if text.startswith("AssertionError: assert "):
+ text = text[16:]
+ return text
+
+
+operator_map = {
+ ast.BitOr : "|",
+ ast.BitXor : "^",
+ ast.BitAnd : "&",
+ ast.LShift : "<<",
+ ast.RShift : ">>",
+ ast.Add : "+",
+ ast.Sub : "-",
+ ast.Mult : "*",
+ ast.Div : "/",
+ ast.FloorDiv : "//",
+ ast.Mod : "%",
+ ast.Eq : "==",
+ ast.NotEq : "!=",
+ ast.Lt : "<",
+ ast.LtE : "<=",
+ ast.Gt : ">",
+ ast.GtE : ">=",
+ ast.Pow : "**",
+ ast.Is : "is",
+ ast.IsNot : "is not",
+ ast.In : "in",
+ ast.NotIn : "not in"
+}
+
+unary_map = {
+ ast.Not : "not %s",
+ ast.Invert : "~%s",
+ ast.USub : "-%s",
+ ast.UAdd : "+%s"
+}
+
+
+class DebugInterpreter(ast.NodeVisitor):
+ """Interpret AST nodes to gleam useful debugging information. """
+
+ def __init__(self, frame):
+ self.frame = frame
+
+ def generic_visit(self, node):
+ # Fallback when we don't have a special implementation.
+ if _is_ast_expr(node):
+ mod = ast.Expression(node)
+ co = self._compile(mod)
+ try:
+ result = self.frame.eval(co)
+ except Exception:
+ raise Failure()
+ explanation = self.frame.repr(result)
+ return explanation, result
+ elif _is_ast_stmt(node):
+ mod = ast.Module([node])
+ co = self._compile(mod, "exec")
+ try:
+ self.frame.exec_(co)
+ except Exception:
+ raise Failure()
+ return None, None
+ else:
+ raise AssertionError("can't handle %s" %(node,))
+
+ def _compile(self, source, mode="eval"):
+ return compile(source, "", mode)
+
+ def visit_Expr(self, expr):
+ return self.visit(expr.value)
+
+ def visit_Module(self, mod):
+ for stmt in mod.body:
+ self.visit(stmt)
+
+ def visit_Name(self, name):
+ explanation, result = self.generic_visit(name)
+ # See if the name is local.
+ source = "%r in locals() is not globals()" % (name.id,)
+ co = self._compile(source)
+ try:
+ local = self.frame.eval(co)
+ except Exception:
+ # have to assume it isn't
+ local = False
+ if not local:
+ return name.id, result
+ return explanation, result
+
+ def visit_Compare(self, comp):
+ left = comp.left
+ left_explanation, left_result = self.visit(left)
+ for op, next_op in zip(comp.ops, comp.comparators):
+ next_explanation, next_result = self.visit(next_op)
+ op_symbol = operator_map[op.__class__]
+ explanation = "%s %s %s" % (left_explanation, op_symbol,
+ next_explanation)
+ source = "__exprinfo_left %s __exprinfo_right" % (op_symbol,)
+ co = self._compile(source)
+ try:
+ result = self.frame.eval(co, __exprinfo_left=left_result,
+ __exprinfo_right=next_result)
+ except Exception:
+ raise Failure(explanation)
+ try:
+ if not result:
+ break
+ except KeyboardInterrupt:
+ raise
+ except:
+ break
+ left_explanation, left_result = next_explanation, next_result
+
+ rcomp = py.code._reprcompare
+ if rcomp:
+ res = rcomp(op_symbol, left_result, next_result)
+ if res:
+ explanation = res
+ return explanation, result
+
+ def visit_BoolOp(self, boolop):
+ is_or = isinstance(boolop.op, ast.Or)
+ explanations = []
+ for operand in boolop.values:
+ explanation, result = self.visit(operand)
+ explanations.append(explanation)
+ if result == is_or:
+ break
+ name = is_or and " or " or " and "
+ explanation = "(" + name.join(explanations) + ")"
+ return explanation, result
+
+ def visit_UnaryOp(self, unary):
+ pattern = unary_map[unary.op.__class__]
+ operand_explanation, operand_result = self.visit(unary.operand)
+ explanation = pattern % (operand_explanation,)
+ co = self._compile(pattern % ("__exprinfo_expr",))
+ try:
+ result = self.frame.eval(co, __exprinfo_expr=operand_result)
+ except Exception:
+ raise Failure(explanation)
+ return explanation, result
+
+ def visit_BinOp(self, binop):
+ left_explanation, left_result = self.visit(binop.left)
+ right_explanation, right_result = self.visit(binop.right)
+ symbol = operator_map[binop.op.__class__]
+ explanation = "(%s %s %s)" % (left_explanation, symbol,
+ right_explanation)
+ source = "__exprinfo_left %s __exprinfo_right" % (symbol,)
+ co = self._compile(source)
+ try:
+ result = self.frame.eval(co, __exprinfo_left=left_result,
+ __exprinfo_right=right_result)
+ except Exception:
+ raise Failure(explanation)
+ return explanation, result
+
+ def visit_Call(self, call):
+ func_explanation, func = self.visit(call.func)
+ arg_explanations = []
+ ns = {"__exprinfo_func" : func}
+ arguments = []
+ for arg in call.args:
+ arg_explanation, arg_result = self.visit(arg)
+ arg_name = "__exprinfo_%s" % (len(ns),)
+ ns[arg_name] = arg_result
+ arguments.append(arg_name)
+ arg_explanations.append(arg_explanation)
+ for keyword in call.keywords:
+ arg_explanation, arg_result = self.visit(keyword.value)
+ arg_name = "__exprinfo_%s" % (len(ns),)
+ ns[arg_name] = arg_result
+ keyword_source = "%s=%%s" % (keyword.arg)
+ arguments.append(keyword_source % (arg_name,))
+ arg_explanations.append(keyword_source % (arg_explanation,))
+ if call.starargs:
+ arg_explanation, arg_result = self.visit(call.starargs)
+ arg_name = "__exprinfo_star"
+ ns[arg_name] = arg_result
+ arguments.append("*%s" % (arg_name,))
+ arg_explanations.append("*%s" % (arg_explanation,))
+ if call.kwargs:
+ arg_explanation, arg_result = self.visit(call.kwargs)
+ arg_name = "__exprinfo_kwds"
+ ns[arg_name] = arg_result
+ arguments.append("**%s" % (arg_name,))
+ arg_explanations.append("**%s" % (arg_explanation,))
+ args_explained = ", ".join(arg_explanations)
+ explanation = "%s(%s)" % (func_explanation, args_explained)
+ args = ", ".join(arguments)
+ source = "__exprinfo_func(%s)" % (args,)
+ co = self._compile(source)
+ try:
+ result = self.frame.eval(co, **ns)
+ except Exception:
+ raise Failure(explanation)
+ pattern = "%s\n{%s = %s\n}"
+ rep = self.frame.repr(result)
+ explanation = pattern % (rep, rep, explanation)
+ return explanation, result
+
+ def _is_builtin_name(self, name):
+ pattern = "%r not in globals() and %r not in locals()"
+ source = pattern % (name.id, name.id)
+ co = self._compile(source)
+ try:
+ return self.frame.eval(co)
+ except Exception:
+ return False
+
+ def visit_Attribute(self, attr):
+ if not isinstance(attr.ctx, ast.Load):
+ return self.generic_visit(attr)
+ source_explanation, source_result = self.visit(attr.value)
+ explanation = "%s.%s" % (source_explanation, attr.attr)
+ source = "__exprinfo_expr.%s" % (attr.attr,)
+ co = self._compile(source)
+ try:
+ result = self.frame.eval(co, __exprinfo_expr=source_result)
+ except Exception:
+ raise Failure(explanation)
+ explanation = "%s\n{%s = %s.%s\n}" % (self.frame.repr(result),
+ self.frame.repr(result),
+ source_explanation, attr.attr)
+ # Check if the attr is from an instance.
+ source = "%r in getattr(__exprinfo_expr, '__dict__', {})"
+ source = source % (attr.attr,)
+ co = self._compile(source)
+ try:
+ from_instance = self.frame.eval(co, __exprinfo_expr=source_result)
+ except Exception:
+ from_instance = True
+ if from_instance:
+ rep = self.frame.repr(result)
+ pattern = "%s\n{%s = %s\n}"
+ explanation = pattern % (rep, rep, explanation)
+ return explanation, result
+
+ def visit_Assert(self, assrt):
+ test_explanation, test_result = self.visit(assrt.test)
+ if test_explanation.startswith("False\n{False =") and \
+ test_explanation.endswith("\n"):
+ test_explanation = test_explanation[15:-2]
+ explanation = "assert %s" % (test_explanation,)
+ if not test_result:
+ try:
+ raise BuiltinAssertionError
+ except Exception:
+ raise Failure(explanation)
+ return explanation, test_result
+
+ def visit_Assign(self, assign):
+ value_explanation, value_result = self.visit(assign.value)
+ explanation = "... = %s" % (value_explanation,)
+ name = ast.Name("__exprinfo_expr", ast.Load(),
+ lineno=assign.value.lineno,
+ col_offset=assign.value.col_offset)
+ new_assign = ast.Assign(assign.targets, name, lineno=assign.lineno,
+ col_offset=assign.col_offset)
+ mod = ast.Module([new_assign])
+ co = self._compile(mod, "exec")
+ try:
+ self.frame.exec_(co, __exprinfo_expr=value_result)
+ except Exception:
+ raise Failure(explanation)
+ return explanation, value_result
diff --git a/py/_code/_assertionold.py b/py/_code/_assertionold.py
new file mode 100644
--- /dev/null
+++ b/py/_code/_assertionold.py
@@ -0,0 +1,555 @@
+import py
+import sys, inspect
+from compiler import parse, ast, pycodegen
+from py._code.assertion import BuiltinAssertionError, _format_explanation
+
+passthroughex = py.builtin._sysex
+
+class Failure:
+ def __init__(self, node):
+ self.exc, self.value, self.tb = sys.exc_info()
+ self.node = node
+
+class View(object):
+ """View base class.
+
+ If C is a subclass of View, then C(x) creates a proxy object around
+ the object x. The actual class of the proxy is not C in general,
+ but a *subclass* of C determined by the rules below. To avoid confusion
+ we call view class the class of the proxy (a subclass of C, so of View)
+ and object class the class of x.
+
+ Attributes and methods not found in the proxy are automatically read on x.
+ Other operations like setting attributes are performed on the proxy, as
+ determined by its view class. The object x is available from the proxy
+ as its __obj__ attribute.
+
+ The view class selection is determined by the __view__ tuples and the
+ optional __viewkey__ method. By default, the selected view class is the
+ most specific subclass of C whose __view__ mentions the class of x.
+ If no such subclass is found, the search proceeds with the parent
+ object classes. For example, C(True) will first look for a subclass
+ of C with __view__ = (..., bool, ...) and only if it doesn't find any
+ look for one with __view__ = (..., int, ...), and then ..., object,...
+ If everything fails the class C itself is considered to be the default.
+
+ Alternatively, the view class selection can be driven by another aspect
+ of the object x, instead of the class of x, by overriding __viewkey__.
+ See last example at the end of this module.
+ """
+
+ _viewcache = {}
+ __view__ = ()
+
+ def __new__(rootclass, obj, *args, **kwds):
+ self = object.__new__(rootclass)
+ self.__obj__ = obj
+ self.__rootclass__ = rootclass
+ key = self.__viewkey__()
+ try:
+ self.__class__ = self._viewcache[key]
+ except KeyError:
+ self.__class__ = self._selectsubclass(key)
+ return self
+
+ def __getattr__(self, attr):
+ # attributes not found in the normal hierarchy rooted on View
+ # are looked up in the object's real class
+ return getattr(self.__obj__, attr)
+
+ def __viewkey__(self):
+ return self.__obj__.__class__
+
+ def __matchkey__(self, key, subclasses):
+ if inspect.isclass(key):
+ keys = inspect.getmro(key)
+ else:
+ keys = [key]
+ for key in keys:
+ result = [C for C in subclasses if key in C.__view__]
+ if result:
+ return result
+ return []
+
+ def _selectsubclass(self, key):
+ subclasses = list(enumsubclasses(self.__rootclass__))
+ for C in subclasses:
+ if not isinstance(C.__view__, tuple):
+ C.__view__ = (C.__view__,)
+ choices = self.__matchkey__(key, subclasses)
+ if not choices:
+ return self.__rootclass__
+ elif len(choices) == 1:
+ return choices[0]
+ else:
+ # combine the multiple choices
+ return type('?', tuple(choices), {})
+
+ def __repr__(self):
+ return '%s(%r)' % (self.__rootclass__.__name__, self.__obj__)
+
+
+def enumsubclasses(cls):
+ for subcls in cls.__subclasses__():
+ for subsubclass in enumsubclasses(subcls):
+ yield subsubclass
+ yield cls
+
+
+class Interpretable(View):
+ """A parse tree node with a few extra methods."""
+ explanation = None
+
+ def is_builtin(self, frame):
+ return False
+
+ def eval(self, frame):
+ # fall-back for unknown expression nodes
+ try:
+ expr = ast.Expression(self.__obj__)
+ expr.filename = ''
+ self.__obj__.filename = ''
+ co = pycodegen.ExpressionCodeGenerator(expr).getCode()
+ result = frame.eval(co)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+ self.result = result
+ self.explanation = self.explanation or frame.repr(self.result)
+
+ def run(self, frame):
+ # fall-back for unknown statement nodes
+ try:
+ expr = ast.Module(None, ast.Stmt([self.__obj__]))
+ expr.filename = ''
+ co = pycodegen.ModuleCodeGenerator(expr).getCode()
+ frame.exec_(co)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+
+ def nice_explanation(self):
+ return _format_explanation(self.explanation)
+
+
+class Name(Interpretable):
+ __view__ = ast.Name
+
+ def is_local(self, frame):
+ source = '%r in locals() is not globals()' % self.name
+ try:
+ return frame.is_true(frame.eval(source))
+ except passthroughex:
+ raise
+ except:
+ return False
+
+ def is_global(self, frame):
+ source = '%r in globals()' % self.name
+ try:
+ return frame.is_true(frame.eval(source))
+ except passthroughex:
+ raise
+ except:
+ return False
+
+ def is_builtin(self, frame):
+ source = '%r not in locals() and %r not in globals()' % (
+ self.name, self.name)
+ try:
+ return frame.is_true(frame.eval(source))
+ except passthroughex:
+ raise
+ except:
+ return False
+
+ def eval(self, frame):
+ super(Name, self).eval(frame)
+ if not self.is_local(frame):
+ self.explanation = self.name
+
+class Compare(Interpretable):
+ __view__ = ast.Compare
+
+ def eval(self, frame):
+ expr = Interpretable(self.expr)
+ expr.eval(frame)
+ for operation, expr2 in self.ops:
+ if hasattr(self, 'result'):
+ # shortcutting in chained expressions
+ if not frame.is_true(self.result):
+ break
+ expr2 = Interpretable(expr2)
+ expr2.eval(frame)
+ self.explanation = "%s %s %s" % (
+ expr.explanation, operation, expr2.explanation)
+ source = "__exprinfo_left %s __exprinfo_right" % operation
+ try:
+ self.result = frame.eval(source,
+ __exprinfo_left=expr.result,
+ __exprinfo_right=expr2.result)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+ expr = expr2
+
+class And(Interpretable):
+ __view__ = ast.And
+
+ def eval(self, frame):
+ explanations = []
+ for expr in self.nodes:
+ expr = Interpretable(expr)
+ expr.eval(frame)
+ explanations.append(expr.explanation)
+ self.result = expr.result
+ if not frame.is_true(expr.result):
+ break
+ self.explanation = '(' + ' and '.join(explanations) + ')'
+
+class Or(Interpretable):
+ __view__ = ast.Or
+
+ def eval(self, frame):
+ explanations = []
+ for expr in self.nodes:
+ expr = Interpretable(expr)
+ expr.eval(frame)
+ explanations.append(expr.explanation)
+ self.result = expr.result
+ if frame.is_true(expr.result):
+ break
+ self.explanation = '(' + ' or '.join(explanations) + ')'
+
+
+# == Unary operations ==
+keepalive = []
+for astclass, astpattern in {
+ ast.Not : 'not __exprinfo_expr',
+ ast.Invert : '(~__exprinfo_expr)',
+ }.items():
+
+ class UnaryArith(Interpretable):
+ __view__ = astclass
+
+ def eval(self, frame, astpattern=astpattern):
+ expr = Interpretable(self.expr)
+ expr.eval(frame)
+ self.explanation = astpattern.replace('__exprinfo_expr',
+ expr.explanation)
+ try:
+ self.result = frame.eval(astpattern,
+ __exprinfo_expr=expr.result)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+
+ keepalive.append(UnaryArith)
+
+# == Binary operations ==
+for astclass, astpattern in {
+ ast.Add : '(__exprinfo_left + __exprinfo_right)',
+ ast.Sub : '(__exprinfo_left - __exprinfo_right)',
+ ast.Mul : '(__exprinfo_left * __exprinfo_right)',
+ ast.Div : '(__exprinfo_left / __exprinfo_right)',
+ ast.Mod : '(__exprinfo_left % __exprinfo_right)',
+ ast.Power : '(__exprinfo_left ** __exprinfo_right)',
+ }.items():
+
+ class BinaryArith(Interpretable):
+ __view__ = astclass
+
+ def eval(self, frame, astpattern=astpattern):
+ left = Interpretable(self.left)
+ left.eval(frame)
+ right = Interpretable(self.right)
+ right.eval(frame)
+ self.explanation = (astpattern
+ .replace('__exprinfo_left', left .explanation)
+ .replace('__exprinfo_right', right.explanation))
+ try:
+ self.result = frame.eval(astpattern,
+ __exprinfo_left=left.result,
+ __exprinfo_right=right.result)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+
+ keepalive.append(BinaryArith)
+
+
+class CallFunc(Interpretable):
+ __view__ = ast.CallFunc
+
+ def is_bool(self, frame):
+ source = 'isinstance(__exprinfo_value, bool)'
+ try:
+ return frame.is_true(frame.eval(source,
+ __exprinfo_value=self.result))
+ except passthroughex:
+ raise
+ except:
+ return False
+
+ def eval(self, frame):
+ node = Interpretable(self.node)
+ node.eval(frame)
+ explanations = []
+ vars = {'__exprinfo_fn': node.result}
+ source = '__exprinfo_fn('
+ for a in self.args:
+ if isinstance(a, ast.Keyword):
+ keyword = a.name
+ a = a.expr
+ else:
+ keyword = None
+ a = Interpretable(a)
+ a.eval(frame)
+ argname = '__exprinfo_%d' % len(vars)
+ vars[argname] = a.result
+ if keyword is None:
+ source += argname + ','
+ explanations.append(a.explanation)
+ else:
+ source += '%s=%s,' % (keyword, argname)
+ explanations.append('%s=%s' % (keyword, a.explanation))
+ if self.star_args:
+ star_args = Interpretable(self.star_args)
+ star_args.eval(frame)
+ argname = '__exprinfo_star'
+ vars[argname] = star_args.result
+ source += '*' + argname + ','
+ explanations.append('*' + star_args.explanation)
+ if self.dstar_args:
+ dstar_args = Interpretable(self.dstar_args)
+ dstar_args.eval(frame)
+ argname = '__exprinfo_kwds'
+ vars[argname] = dstar_args.result
+ source += '**' + argname + ','
+ explanations.append('**' + dstar_args.explanation)
+ self.explanation = "%s(%s)" % (
+ node.explanation, ', '.join(explanations))
+ if source.endswith(','):
+ source = source[:-1]
+ source += ')'
+ try:
+ self.result = frame.eval(source, **vars)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+ if not node.is_builtin(frame) or not self.is_bool(frame):
+ r = frame.repr(self.result)
+ self.explanation = '%s\n{%s = %s\n}' % (r, r, self.explanation)
+
+class Getattr(Interpretable):
+ __view__ = ast.Getattr
+
+ def eval(self, frame):
+ expr = Interpretable(self.expr)
+ expr.eval(frame)
+ source = '__exprinfo_expr.%s' % self.attrname
+ try:
+ self.result = frame.eval(source, __exprinfo_expr=expr.result)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+ self.explanation = '%s.%s' % (expr.explanation, self.attrname)
+ # if the attribute comes from the instance, its value is interesting
+ source = ('hasattr(__exprinfo_expr, "__dict__") and '
+ '%r in __exprinfo_expr.__dict__' % self.attrname)
+ try:
+ from_instance = frame.is_true(
+ frame.eval(source, __exprinfo_expr=expr.result))
+ except passthroughex:
+ raise
+ except:
+ from_instance = True
+ if from_instance:
+ r = frame.repr(self.result)
+ self.explanation = '%s\n{%s = %s\n}' % (r, r, self.explanation)
+
+# == Re-interpretation of full statements ==
+
+class Assert(Interpretable):
+ __view__ = ast.Assert
+
+ def run(self, frame):
+ test = Interpretable(self.test)
+ test.eval(frame)
+ # simplify 'assert False where False = ...'
+ if (test.explanation.startswith('False\n{False = ') and
+ test.explanation.endswith('\n}')):
+ test.explanation = test.explanation[15:-2]
+ # print the result as 'assert '
+ self.result = test.result
+ self.explanation = 'assert ' + test.explanation
+ if not frame.is_true(test.result):
+ try:
+ raise BuiltinAssertionError
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+
+class Assign(Interpretable):
+ __view__ = ast.Assign
+
+ def run(self, frame):
+ expr = Interpretable(self.expr)
+ expr.eval(frame)
+ self.result = expr.result
+ self.explanation = '... = ' + expr.explanation
+ # fall-back-run the rest of the assignment
+ ass = ast.Assign(self.nodes, ast.Name('__exprinfo_expr'))
+ mod = ast.Module(None, ast.Stmt([ass]))
+ mod.filename = ''
+ co = pycodegen.ModuleCodeGenerator(mod).getCode()
+ try:
+ frame.exec_(co, __exprinfo_expr=expr.result)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+
+class Discard(Interpretable):
+ __view__ = ast.Discard
+
+ def run(self, frame):
+ expr = Interpretable(self.expr)
+ expr.eval(frame)
+ self.result = expr.result
+ self.explanation = expr.explanation
+
+class Stmt(Interpretable):
+ __view__ = ast.Stmt
+
+ def run(self, frame):
+ for stmt in self.nodes:
+ stmt = Interpretable(stmt)
+ stmt.run(frame)
+
+
+def report_failure(e):
+ explanation = e.node.nice_explanation()
+ if explanation:
+ explanation = ", in: " + explanation
+ else:
+ explanation = ""
+ sys.stdout.write("%s: %s%s\n" % (e.exc.__name__, e.value, explanation))
+
+def check(s, frame=None):
+ if frame is None:
+ frame = sys._getframe(1)
+ frame = py.code.Frame(frame)
+ expr = parse(s, 'eval')
+ assert isinstance(expr, ast.Expression)
+ node = Interpretable(expr.node)
+ try:
+ node.eval(frame)
+ except passthroughex:
+ raise
+ except Failure:
+ e = sys.exc_info()[1]
+ report_failure(e)
+ else:
+ if not frame.is_true(node.result):
+ sys.stderr.write("assertion failed: %s\n" % node.nice_explanation())
+
+
+###########################################################
+# API / Entry points
+# #########################################################
+
+def interpret(source, frame, should_fail=False):
+ module = Interpretable(parse(source, 'exec').node)
+ #print "got module", module
+ if isinstance(frame, py.std.types.FrameType):
+ frame = py.code.Frame(frame)
+ try:
+ module.run(frame)
+ except Failure:
+ e = sys.exc_info()[1]
+ return getfailure(e)
+ except passthroughex:
+ raise
+ except:
+ import traceback
+ traceback.print_exc()
+ if should_fail:
+ return ("(assertion failed, but when it was re-run for "
+ "printing intermediate values, it did not fail. Suggestions: "
+ "compute assert expression before the assert or use --nomagic)")
+ else:
+ return None
+
+def getmsg(excinfo):
+ if isinstance(excinfo, tuple):
+ excinfo = py.code.ExceptionInfo(excinfo)
+ #frame, line = gettbline(tb)
+ #frame = py.code.Frame(frame)
+ #return interpret(line, frame)
+
+ tb = excinfo.traceback[-1]
+ source = str(tb.statement).strip()
+ x = interpret(source, tb.frame, should_fail=True)
+ if not isinstance(x, str):
+ raise TypeError("interpret returned non-string %r" % (x,))
+ return x
+
+def getfailure(e):
+ explanation = e.node.nice_explanation()
+ if str(e.value):
+ lines = explanation.split('\n')
+ lines[0] += " << %s" % (e.value,)
+ explanation = '\n'.join(lines)
+ text = "%s: %s" % (e.exc.__name__, explanation)
+ if text.startswith('AssertionError: assert '):
+ text = text[16:]
+ return text
+
+def run(s, frame=None):
+ if frame is None:
+ frame = sys._getframe(1)
+ frame = py.code.Frame(frame)
+ module = Interpretable(parse(s, 'exec').node)
+ try:
+ module.run(frame)
+ except Failure:
+ e = sys.exc_info()[1]
+ report_failure(e)
+
+
+if __name__ == '__main__':
+ # example:
+ def f():
+ return 5
+ def g():
+ return 3
+ def h(x):
+ return 'never'
+ check("f() * g() == 5")
+ check("not f()")
+ check("not (f() and g() or 0)")
+ check("f() == g()")
+ i = 4
+ check("i == f()")
+ check("len(f()) == 0")
+ check("isinstance(2+3+4, float)")
+
+ run("x = i")
+ check("x == 5")
+
+ run("assert not f(), 'oops'")
+ run("a, b, c = 1, 2")
+ run("a, b, c = f()")
+
+ check("max([f(),g()]) == 4")
+ check("'hello'[g()] == 'h'")
+ run("'guk%d' % h(f())")
diff --git a/py/_code/assertion.py b/py/_code/assertion.py
new file mode 100644
--- /dev/null
+++ b/py/_code/assertion.py
@@ -0,0 +1,94 @@
+import sys
+import py
+
+BuiltinAssertionError = py.builtin.builtins.AssertionError
+
+_reprcompare = None # if set, will be called by assert reinterp for comparison ops
+
+def _format_explanation(explanation):
+ """This formats an explanation
+
+ Normally all embedded newlines are escaped, however there are
+ three exceptions: \n{, \n} and \n~. The first two are intended
+ cover nested explanations, see function and attribute explanations
+ for examples (.visit_Call(), visit_Attribute()). The last one is
+ for when one explanation needs to span multiple lines, e.g. when
+ displaying diffs.
+ """
+ raw_lines = (explanation or '').split('\n')
+ # escape newlines not followed by {, } and ~
+ lines = [raw_lines[0]]
+ for l in raw_lines[1:]:
+ if l.startswith('{') or l.startswith('}') or l.startswith('~'):
+ lines.append(l)
+ else:
+ lines[-1] += '\\n' + l
+
+ result = lines[:1]
+ stack = [0]
+ stackcnt = [0]
+ for line in lines[1:]:
+ if line.startswith('{'):
+ if stackcnt[-1]:
+ s = 'and '
+ else:
+ s = 'where '
+ stack.append(len(result))
+ stackcnt[-1] += 1
+ stackcnt.append(0)
+ result.append(' +' + ' '*(len(stack)-1) + s + line[1:])
+ elif line.startswith('}'):
+ assert line.startswith('}')
+ stack.pop()
+ stackcnt.pop()
+ result[stack[-1]] += line[1:]
+ else:
+ assert line.startswith('~')
+ result.append(' '*len(stack) + line[1:])
+ assert len(stack) == 1
+ return '\n'.join(result)
+
+
+class AssertionError(BuiltinAssertionError):
+ def __init__(self, *args):
+ BuiltinAssertionError.__init__(self, *args)
+ if args:
+ try:
+ self.msg = str(args[0])
+ except py.builtin._sysex:
+ raise
+ except:
+ self.msg = "<[broken __repr__] %s at %0xd>" %(
+ args[0].__class__, id(args[0]))
+ else:
+ f = py.code.Frame(sys._getframe(1))
+ try:
+ source = f.code.fullsource
+ if source is not None:
+ try:
+ source = source.getstatement(f.lineno, assertion=True)
+ except IndexError:
+ source = None
+ else:
+ source = str(source.deindent()).strip()
+ except py.error.ENOENT:
+ source = None
+ # this can also occur during reinterpretation, when the
+ # co_filename is set to "".
+ if source:
+ self.msg = reinterpret(source, f, should_fail=True)
+ else:
+ self.msg = ""
+ if not self.args:
+ self.args = (self.msg,)
+
+if sys.version_info > (3, 0):
+ AssertionError.__module__ = "builtins"
+ reinterpret_old = "old reinterpretation not available for py3"
+else:
+ from py._code._assertionold import interpret as reinterpret_old
+if sys.version_info >= (2, 6) or (sys.platform.startswith("java")):
+ from py._code._assertionnew import interpret as reinterpret
+else:
+ reinterpret = reinterpret_old
+
diff --git a/py/_code/code.py b/py/_code/code.py
--- a/py/_code/code.py
+++ b/py/_code/code.py
@@ -145,6 +145,17 @@
return self.frame.f_locals
locals = property(getlocals, None, None, "locals of underlaying frame")
+ def reinterpret(self):
+ """Reinterpret the failing statement and returns a detailed information
+ about what operations are performed."""
+ if self.exprinfo is None:
+ source = str(self.statement).strip()
+ x = py.code._reinterpret(source, self.frame, should_fail=True)
+ if not isinstance(x, str):
+ raise TypeError("interpret returned non-string %r" % (x,))
+ self.exprinfo = x
+ return self.exprinfo
+
def getfirstlinesource(self):
# on Jython this firstlineno can be -1 apparently
return max(self.frame.code.firstlineno, 0)
@@ -158,13 +169,12 @@
end = self.lineno
try:
_, end = source.getstatementrange(end)
- except IndexError:
+ except (IndexError, ValueError):
end = self.lineno + 1
# heuristic to stop displaying source on e.g.
# if something: # assume this causes a NameError
# # _this_ lines and the one
# below we don't want from entry.getsource()
- end = min(end, len(source))
for i in range(self.lineno, end):
if source[i].rstrip().endswith(':'):
end = i + 1
@@ -273,7 +283,11 @@
"""
cache = {}
for i, entry in enumerate(self):
- key = entry.frame.code.path, entry.lineno
+ # id for the code.raw is needed to work around
+ # the strange metaprogramming in the decorator lib from pypi
+ # which generates code objects that have hash/value equality
+ #XXX needs a test
+ key = entry.frame.code.path, id(entry.frame.code.raw), entry.lineno
#print "checking for recursion at", key
l = cache.setdefault(key, [])
if l:
@@ -308,7 +322,7 @@
self._striptext = 'AssertionError: '
self._excinfo = tup
self.type, self.value, tb = self._excinfo
- self.typename = getattr(self.type, "__name__", "???")
+ self.typename = self.type.__name__
self.traceback = py.code.Traceback(tb)
def __repr__(self):
@@ -347,14 +361,16 @@
showlocals: show locals per traceback entry
style: long|short|no|native traceback style
tbfilter: hide entries (where __tracebackhide__ is true)
+
+ in case of style==native, tbfilter and showlocals is ignored.
"""
if style == 'native':
- import traceback
- return ''.join(traceback.format_exception(
- self.type,
- self.value,
- self.traceback[0]._rawentry,
- ))
+ return ReprExceptionInfo(ReprTracebackNative(
+ py.std.traceback.format_exception(
+ self.type,
+ self.value,
+ self.traceback[0]._rawentry,
+ )), self._getreprcrash())
fmt = FormattedExcinfo(showlocals=showlocals, style=style,
abspath=abspath, tbfilter=tbfilter, funcargs=funcargs)
@@ -452,7 +468,7 @@
def repr_locals(self, locals):
if self.showlocals:
lines = []
- keys = list(locals)
+ keys = [loc for loc in locals if loc[0] != "@"]
keys.sort()
for name in keys:
value = locals[name]
@@ -506,7 +522,10 @@
def _makepath(self, path):
if not self.abspath:
- np = py.path.local().bestrelpath(path)
+ try:
+ np = py.path.local().bestrelpath(path)
+ except OSError:
+ return path
if len(np) < len(str(path)):
path = np
return path
@@ -595,6 +614,19 @@
if self.extraline:
tw.line(self.extraline)
+class ReprTracebackNative(ReprTraceback):
+ def __init__(self, tblines):
+ self.style = "native"
+ self.reprentries = [ReprEntryNative(tblines)]
+ self.extraline = None
+
+class ReprEntryNative(TerminalRepr):
+ def __init__(self, tblines):
+ self.lines = tblines
+
+ def toterminal(self, tw):
+ tw.write("".join(self.lines))
+
class ReprEntry(TerminalRepr):
localssep = "_ "
@@ -680,19 +712,26 @@
oldbuiltins = {}
-def patch_builtins(compile=True):
- """ put compile builtins to Python's builtins. """
+def patch_builtins(assertion=True, compile=True):
+ """ put compile and AssertionError builtins to Python's builtins. """
+ if assertion:
+ from py._code import assertion
+ l = oldbuiltins.setdefault('AssertionError', [])
+ l.append(py.builtin.builtins.AssertionError)
+ py.builtin.builtins.AssertionError = assertion.AssertionError
if compile:
l = oldbuiltins.setdefault('compile', [])
l.append(py.builtin.builtins.compile)
py.builtin.builtins.compile = py.code.compile
-def unpatch_builtins(compile=True):
+def unpatch_builtins(assertion=True, compile=True):
""" remove compile and AssertionError builtins from Python builtins. """
+ if assertion:
+ py.builtin.builtins.AssertionError = oldbuiltins['AssertionError'].pop()
if compile:
py.builtin.builtins.compile = oldbuiltins['compile'].pop()
-def getrawcode(obj):
+def getrawcode(obj, trycall=True):
""" return code object for given function. """
try:
return obj.__code__
@@ -701,5 +740,10 @@
obj = getattr(obj, 'func_code', obj)
obj = getattr(obj, 'f_code', obj)
obj = getattr(obj, '__code__', obj)
+ if trycall and not hasattr(obj, 'co_firstlineno'):
+ if hasattr(obj, '__call__') and not py.std.inspect.isclass(obj):
+ x = getrawcode(obj.__call__, trycall=False)
+ if hasattr(x, 'co_firstlineno'):
+ return x
return obj
diff --git a/py/_code/source.py b/py/_code/source.py
--- a/py/_code/source.py
+++ b/py/_code/source.py
@@ -108,6 +108,7 @@
def getstatementrange(self, lineno, assertion=False):
""" return (start, end) tuple which spans the minimal
statement region which containing the given lineno.
+ raise an IndexError if no such statementrange can be found.
"""
# XXX there must be a better than these heuristic ways ...
# XXX there may even be better heuristics :-)
@@ -116,6 +117,7 @@
# 1. find the start of the statement
from codeop import compile_command
+ end = None
for start in range(lineno, -1, -1):
if assertion:
line = self.lines[start]
@@ -139,7 +141,9 @@
trysource = self[start:end]
if trysource.isparseable():
return start, end
- return start, len(self)
+ if end is None:
+ raise IndexError("no valid source range around line %d " % (lineno,))
+ return start, end
def getblockend(self, lineno):
# XXX
@@ -257,23 +261,29 @@
def getfslineno(obj):
+ """ Return source location (path, lineno) for the given object.
+ If the source cannot be determined return ("", -1)
+ """
try:
code = py.code.Code(obj)
except TypeError:
- # fallback to
- fn = (py.std.inspect.getsourcefile(obj) or
- py.std.inspect.getfile(obj))
+ try:
+ fn = (py.std.inspect.getsourcefile(obj) or
+ py.std.inspect.getfile(obj))
+ except TypeError:
+ return "", -1
+
fspath = fn and py.path.local(fn) or None
+ lineno = -1
if fspath:
try:
_, lineno = findsource(obj)
except IOError:
- lineno = None
- else:
- lineno = None
+ pass
else:
fspath = code.path
lineno = code.firstlineno
+ assert isinstance(lineno, int)
return fspath, lineno
#
@@ -286,7 +296,7 @@
except py.builtin._sysex:
raise
except:
- return None, None
+ return None, -1
source = Source()
source.lines = [line.rstrip() for line in sourcelines]
return source, lineno
diff --git a/py/_error.py b/py/_error.py
--- a/py/_error.py
+++ b/py/_error.py
@@ -23,6 +23,7 @@
2: errno.ENOENT,
3: errno.ENOENT,
17: errno.EEXIST,
+ 13: errno.EBUSY, # empty cd drive, but ENOMEDIUM seems unavailiable
22: errno.ENOTDIR,
267: errno.ENOTDIR,
5: errno.EACCES, # anything better?
diff --git a/py/_iniconfig.py b/py/_iniconfig.py
--- a/py/_iniconfig.py
+++ b/py/_iniconfig.py
@@ -103,6 +103,7 @@
def _parseline(self, line, lineno):
# comments
line = line.split('#')[0].rstrip()
+ line = line.split(';')[0].rstrip()
# blank lines
if not line:
return None, None
diff --git a/py/_io/capture.py b/py/_io/capture.py
--- a/py/_io/capture.py
+++ b/py/_io/capture.py
@@ -12,7 +12,7 @@
class TextIO(StringIO):
def write(self, data):
if not isinstance(data, unicode):
- data = unicode(data, getattr(self, '_encoding', 'UTF-8'))
+ data = unicode(data, getattr(self, '_encoding', 'UTF-8'), 'replace')
StringIO.write(self, data)
else:
TextIO = StringIO
@@ -258,6 +258,9 @@
f = getattr(self, name).tmpfile
f.seek(0)
res = f.read()
+ enc = getattr(f, 'encoding', None)
+ if enc:
+ res = py.builtin._totext(res, enc, 'replace')
f.truncate(0)
f.seek(0)
l.append(res)
diff --git a/py/_io/terminalwriter.py b/py/_io/terminalwriter.py
--- a/py/_io/terminalwriter.py
+++ b/py/_io/terminalwriter.py
@@ -105,6 +105,8 @@
Blue=44, Purple=45, Cyan=46, White=47,
bold=1, light=2, blink=5, invert=7)
+ _newline = None # the last line printed
+
# XXX deprecate stringio argument
def __init__(self, file=None, stringio=False, encoding=None):
if file is None:
@@ -112,11 +114,9 @@
self.stringio = file = py.io.TextIO()
else:
file = py.std.sys.stdout
- if hasattr(file, 'encoding'):
- encoding = file.encoding
elif hasattr(file, '__call__'):
file = WriteFile(file, encoding=encoding)
- self.encoding = encoding
+ self.encoding = encoding or getattr(file, 'encoding', "utf-8")
self._file = file
self.fullwidth = get_terminal_width()
self.hasmarkup = should_do_markup(file)
@@ -182,8 +182,31 @@
return s
def line(self, s='', **kw):
+ if self._newline == False:
+ self.write("\n")
self.write(s, **kw)
self.write('\n')
+ self._newline = True
+
+ def reline(self, line, **opts):
+ if not self.hasmarkup:
+ raise ValueError("cannot use rewrite-line without terminal")
+ if not self._newline:
+ self.write("\r")
+ self.write(line, **opts)
+ # see if we need to fill up some spaces at the end
+ # xxx have a more exact lastlinelen working from self.write?
+ lenline = len(line)
+ try:
+ lastlen = self._lastlinelen
+ except AttributeError:
+ pass
+ else:
+ if lenline < lastlen:
+ self.write(" " * (lastlen - lenline + 1))
+ self._lastlinelen = lenline
+ self._newline = False
+
class Win32ConsoleWriter(TerminalWriter):
def write(self, s, **kw):
@@ -280,10 +303,10 @@
SetConsoleTextAttribute = ctypes.windll.kernel32.SetConsoleTextAttribute
SetConsoleTextAttribute.argtypes = [wintypes.HANDLE, wintypes.WORD]
SetConsoleTextAttribute.restype = wintypes.BOOL
-
+
_GetConsoleScreenBufferInfo = \
ctypes.windll.kernel32.GetConsoleScreenBufferInfo
- _GetConsoleScreenBufferInfo.argtypes = [wintypes.HANDLE,
+ _GetConsoleScreenBufferInfo.argtypes = [wintypes.HANDLE,
ctypes.POINTER(CONSOLE_SCREEN_BUFFER_INFO)]
_GetConsoleScreenBufferInfo.restype = wintypes.BOOL
def GetConsoleInfo(handle):
diff --git a/py/_path/common.py b/py/_path/common.py
--- a/py/_path/common.py
+++ b/py/_path/common.py
@@ -64,7 +64,10 @@
else:
if bool(value) ^ bool(meth()) ^ invert:
return False
- except (py.error.ENOENT, py.error.ENOTDIR):
+ except (py.error.ENOENT, py.error.ENOTDIR, py.error.EBUSY):
+ # EBUSY feels not entirely correct,
+ # but its kind of necessary since ENOMEDIUM
+ # is not accessible in python
for name in self._depend_on_existence:
if name in kw:
if kw.get(name):
@@ -368,6 +371,5 @@
else:
name = str(path) # path.strpath # XXX svn?
pattern = '*' + path.sep + pattern
- from fnmatch import fnmatch
- return fnmatch(name, pattern)
+ return py.std.fnmatch.fnmatch(name, pattern)
diff --git a/py/_path/local.py b/py/_path/local.py
--- a/py/_path/local.py
+++ b/py/_path/local.py
@@ -157,14 +157,16 @@
return str(self) < str(other)
def samefile(self, other):
- """ return True if 'other' references the same file as 'self'. """
- if not iswin32:
- return py.error.checked_call(
- os.path.samefile, str(self), str(other))
+ """ return True if 'other' references the same file as 'self'.
+ """
+ if not isinstance(other, py.path.local):
+ other = os.path.abspath(str(other))
if self == other:
return True
- other = os.path.abspath(str(other))
- return self == other
+ if iswin32:
+ return False # ther is no samefile
+ return py.error.checked_call(
+ os.path.samefile, str(self), str(other))
def remove(self, rec=1, ignore_errors=False):
""" remove a file or directory (or a directory tree if rec=1).
@@ -539,7 +541,11 @@
if self.basename != "__init__.py":
modfile = modfile[:-12]
- if not self.samefile(modfile):
+ try:
+ issame = self.samefile(modfile)
+ except py.error.ENOENT:
+ issame = False
+ if not issame:
raise self.ImportMismatchError(modname, modfile, self)
return mod
else:
diff --git a/py/_path/svnurl.py b/py/_path/svnurl.py
--- a/py/_path/svnurl.py
+++ b/py/_path/svnurl.py
@@ -233,6 +233,8 @@
e = sys.exc_info()[1]
if e.err.find('non-existent in that revision') != -1:
raise py.error.ENOENT(self, e.err)
+ elif e.err.find("E200009:") != -1:
+ raise py.error.ENOENT(self, e.err)
elif e.err.find('File not found') != -1:
raise py.error.ENOENT(self, e.err)
elif e.err.find('not part of a repository')!=-1:
diff --git a/py/_path/svnwc.py b/py/_path/svnwc.py
--- a/py/_path/svnwc.py
+++ b/py/_path/svnwc.py
@@ -482,10 +482,13 @@
except py.process.cmdexec.Error:
e = sys.exc_info()[1]
strerr = e.err.lower()
- if strerr.find('file not found') != -1:
+ if strerr.find('not found') != -1:
+ raise py.error.ENOENT(self)
+ elif strerr.find("E200009:") != -1:
raise py.error.ENOENT(self)
if (strerr.find('file exists') != -1 or
strerr.find('file already exists') != -1 or
+ strerr.find('w150002:') != -1 or
strerr.find("can't create directory") != -1):
raise py.error.EEXIST(self)
raise
@@ -593,7 +596,7 @@
out = self._authsvn('lock').strip()
if not out:
# warning or error, raise exception
- raise Exception(out[4:])
+ raise ValueError("unknown error in svn lock command")
def unlock(self):
""" unset a previously set lock """
@@ -1066,6 +1069,8 @@
modrev = '?'
author = '?'
date = ''
+ elif itemstatus == "replaced":
+ pass
else:
#print entryel.toxml()
commitel = entryel.getElementsByTagName('commit')[0]
@@ -1148,7 +1153,11 @@
raise ValueError("Not a versioned resource")
#raise ValueError, "Not a versioned resource %r" % path
self.kind = d['nodekind'] == 'directory' and 'dir' or d['nodekind']
- self.rev = int(d['revision'])
+ try:
+ self.rev = int(d['revision'])
+ except KeyError:
+ self.rev = None
+
self.path = py.path.local(d['path'])
self.size = self.path.size()
if 'lastchangedrev' in d:
diff --git a/py/_xmlgen.py b/py/_xmlgen.py
--- a/py/_xmlgen.py
+++ b/py/_xmlgen.py
@@ -52,7 +52,7 @@
def unicode(self, indent=2):
l = []
SimpleUnicodeVisitor(l.append, indent).visit(self)
- return "".join(l)
+ return u("").join(l)
def __repr__(self):
name = self.__class__.__name__
@@ -122,11 +122,13 @@
if visitmethod is not None:
break
else:
- visitmethod = self.object
+ visitmethod = self.__object
self.cache[cls] = visitmethod
visitmethod(node)
- def object(self, obj):
+ # the default fallback handler is marked private
+ # to avoid clashes with the tag name object
+ def __object(self, obj):
#self.write(obj)
self.write(escape(unicode(obj)))
@@ -136,7 +138,8 @@
def list(self, obj):
assert id(obj) not in self.visited
self.visited[id(obj)] = 1
- map(self.visit, obj)
+ for elem in obj:
+ self.visit(elem)
def Tag(self, tag):
assert id(tag) not in self.visited
@@ -181,7 +184,11 @@
value = getattr(attrs, name)
if name.endswith('_'):
name = name[:-1]
- return ' %s="%s"' % (name, escape(unicode(value)))
+ if isinstance(value, raw):
+ insert = value.uniobj
+ else:
+ insert = escape(unicode(value))
+ return ' %s="%s"' % (name, insert)
def getstyle(self, tag):
""" return attribute list suitable for styling. """
diff --git a/py/bin/_findpy.py b/py/bin/_findpy.py
deleted file mode 100644
--- a/py/bin/_findpy.py
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/usr/bin/env python
-
-#
-# find and import a version of 'py'
-#
-import sys
-import os
-from os.path import dirname as opd, exists, join, basename, abspath
-
-def searchpy(current):
- while 1:
- last = current
- initpy = join(current, '__init__.py')
- if not exists(initpy):
- pydir = join(current, 'py')
- # recognize py-package and ensure it is importable
- if exists(pydir) and exists(join(pydir, '__init__.py')):
- #for p in sys.path:
- # if p == current:
- # return True
- if current != sys.path[0]: # if we are already first, then ok
- sys.stderr.write("inserting into sys.path: %s\n" % current)
- sys.path.insert(0, current)
- return True
- current = opd(current)
- if last == current:
- return False
-
-if not searchpy(abspath(os.curdir)):
- if not searchpy(opd(abspath(sys.argv[0]))):
- if not searchpy(opd(__file__)):
- pass # let's hope it is just on sys.path
-
-import py
-import pytest
-
-if __name__ == '__main__':
- print ("py lib is at %s" % py.__file__)
diff --git a/py/bin/py.test b/py/bin/py.test
deleted file mode 100755
--- a/py/bin/py.test
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/usr/bin/env python
-from _findpy import pytest
-raise SystemExit(pytest.main())
diff --git a/pypy/annotation/test/test_annrpython.py b/pypy/annotation/test/test_annrpython.py
--- a/pypy/annotation/test/test_annrpython.py
+++ b/pypy/annotation/test/test_annrpython.py
@@ -483,7 +483,7 @@
return a_str.strip(' ')
elif n == 1:
return a_str.rstrip(' ')
- else:
+ else:
return a_str.lstrip(' ')
s = a.build_types(f, [int, annmodel.SomeString(no_nul=True)])
assert s.no_nul
@@ -3737,6 +3737,25 @@
s = a.build_types(f, [int])
assert s.listdef.listitem.range_step == 0
+ def test_specialize_arg_memo(self):
+ @objectmodel.specialize.memo()
+ def g(n):
+ return n
+ @objectmodel.specialize.arg(0)
+ def f(i):
+ return g(i)
+ def main(i):
+ if i == 2:
+ return f(i)
+ elif i == 3:
+ return f(i)
+ else:
+ raise NotImplementedError
+
+ a = self.RPythonAnnotator()
+ s = a.build_types(main, [int])
+ assert isinstance(s, annmodel.SomeInteger)
+
def g(n):
return [0,1,2,n]
diff --git a/pypy/doc/project-ideas.rst b/pypy/doc/project-ideas.rst
--- a/pypy/doc/project-ideas.rst
+++ b/pypy/doc/project-ideas.rst
@@ -149,6 +149,22 @@
exported. This would give us a one-size-fits-all generic .so file to be
imported by any application that wants to load .so files :-)
+Optimising cpyext (CPython C-API compatibility layer)
+-----------------------------------------------------
+
+A lot of work has gone into PyPy's implementation of CPython's C-API over
+the last years to let it reach a practical level of compatibility, so that
+C extensions for CPython work on PyPy without major rewrites. However,
+there are still many edges and corner cases where it misbehaves, and it has
+not received any substantial optimisation so far.
+
+The objective of this project is to fix bugs in cpyext and to optimise
+several performance critical parts of it, such as the reference counting
+support and other heavily used C-API functions. The net result would be to
+have CPython extensions run much faster on PyPy than they currently do, or
+to make them work at all if they currently don't. A part of this work would
+be to get cpyext into a shape where it supports running Cython generated
+extensions.
.. _`issue tracker`: http://bugs.pypy.org
.. _`mailing list`: http://mail.python.org/mailman/listinfo/pypy-dev
diff --git a/pypy/doc/stackless.rst b/pypy/doc/stackless.rst
--- a/pypy/doc/stackless.rst
+++ b/pypy/doc/stackless.rst
@@ -199,17 +199,11 @@
The following features (present in some past Stackless version of PyPy)
are for the time being not supported any more:
-* Tasklets and channels (currently ``stackless.py`` seems to import,
- but you have tasklets on top of coroutines on top of greenlets on
- top of continulets on top of stacklets, and it's probably not too
- hard to cut two of these levels by adapting ``stackless.py`` to
- use directly continulets)
-
* Coroutines (could be rewritten at app-level)
-* Pickling and unpickling continulets (*)
-
-* Continuing execution of a continulet in a different thread (*)
+* Continuing execution of a continulet in a different thread
+ (but if it is "simple enough", you can pickle it and unpickle it
+ in the other thread).
* Automatic unlimited stack (must be emulated__ so far)
@@ -217,15 +211,6 @@
.. __: `recursion depth limit`_
-(*) Pickling, as well as changing threads, could be implemented by using
-a "soft" stack switching mode again. We would get either "hard" or
-"soft" switches, similarly to Stackless Python 3rd version: you get a
-"hard" switch (like now) when the C stack contains non-trivial C frames
-to save, and a "soft" switch (like previously) when it contains only
-simple calls from Python to Python. Soft-switched continulets would
-also consume a bit less RAM, and the switch might be a bit faster too
-(unsure about that; what is the Stackless Python experience?).
-
Recursion depth limit
+++++++++++++++++++++
diff --git a/pypy/doc/tool/makecontributor.py b/pypy/doc/tool/makecontributor.py
new file mode 100644
--- /dev/null
+++ b/pypy/doc/tool/makecontributor.py
@@ -0,0 +1,133 @@
+import py
+import sys
+from collections import defaultdict
+import operator
+import re
+import mercurial.localrepo
+import mercurial.ui
+
+ROOT = py.path.local(__file__).join('..', '..', '..', '..')
+author_re = re.compile('(.*) <.*>')
+pair_programming_re = re.compile(r'^\((.*?)\)')
+excluded = set(["pypy", "convert-repo"])
+
+alias = {
+ 'Anders Chrigstrom': ['arre'],
+ 'Antonio Cuni': ['antocuni', 'anto'],
+ 'Armin Rigo': ['arigo', 'arfigo', 'armin', 'arigato'],
+ 'Maciej Fijalkowski': ['fijal'],
+ 'Carl Friedrich Bolz': ['cfbolz', 'cf'],
+ 'Samuele Pedroni': ['pedronis', 'samuele', 'samule'],
+ 'Michael Hudson': ['mwh'],
+ 'Holger Krekel': ['hpk', 'holger krekel', 'holger', 'hufpk'],
+ "Amaury Forgeot d'Arc": ['afa'],
+ 'Alex Gaynor': ['alex', 'agaynor'],
+ 'David Schneider': ['bivab', 'david'],
+ 'Christian Tismer': ['chris', 'christian', 'tismer',
+ 'tismer at christia-wjtqxl.localdomain'],
+ 'Benjamin Peterson': ['benjamin'],
+ 'Hakan Ardo': ['hakan', 'hakanardo'],
+ 'Niklaus Haldimann': ['nik'],
+ 'Alexander Schremmer': ['xoraxax'],
+ 'Anders Hammarquist': ['iko'],
+ 'David Edelsohn': ['edelsoh', 'edelsohn'],
+ 'Niko Matsakis': ['niko'],
+ 'Jakub Gustak': ['jlg'],
+ 'Guido Wesdorp': ['guido'],
+ 'Michael Foord': ['mfoord'],
+ 'Mark Pearse': ['mwp'],
+ 'Toon Verwaest': ['tverwaes'],
+ 'Eric van Riet Paap': ['ericvrp'],
+ 'Jacob Hallen': ['jacob', 'jakob'],
+ 'Anders Lehmann': ['ale', 'anders'],
+ 'Bert Freudenberg': ['bert'],
+ 'Boris Feigin': ['boris', 'boria'],
+ 'Valentino Volonghi': ['valentino', 'dialtone'],
+ 'Aurelien Campeas': ['aurelien', 'aureliene'],
+ 'Adrien Di Mascio': ['adim'],
+ 'Jacek Generowicz': ['Jacek', 'jacek'],
+ 'Jim Hunziker': ['landtuna at gmail.com'],
+ 'Kristjan Valur Jonsson': ['kristjan at kristjan-lp.ccp.ad.local'],
+ 'Laura Creighton': ['lac'],
+ 'Aaron Iles': ['aliles'],
+ 'Ludovic Aubry': ['ludal', 'ludovic'],
+ 'Lukas Diekmann': ['l.diekmann', 'ldiekmann'],
+ 'Matti Picus': ['Matti Picus matti.picus at gmail.com',
+ 'matthp', 'mattip', 'mattip>'],
+ 'Michael Cheng': ['mikefc'],
+ 'Richard Emslie': ['rxe'],
+ 'Roberto De Ioris': ['roberto at goyle'],
+ 'Roberto De Ioris': ['roberto at mrspurr'],
+ 'Sven Hager': ['hager'],
+ 'Tomo Cocoa': ['cocoatomo'],
+ }
+
+alias_map = {}
+for name, nicks in alias.iteritems():
+ for nick in nicks:
+ alias_map[nick] = name
+
+def get_canonical_author(name):
+ match = author_re.match(name)
+ if match:
+ name = match.group(1)
+ return alias_map.get(name, name)
+
+ignored_nicknames = defaultdict(int)
+
+def get_more_authors(log):
+ match = pair_programming_re.match(log)
+ if not match:
+ return set()
+ ignore_words = ['around', 'consulting', 'yesterday', 'for a bit', 'thanks',
+ 'in-progress', 'bits of', 'even a little', 'floating',]
+ sep_words = ['and', ';', '+', '/', 'with special by']
+ nicknames = match.group(1)
+ for word in ignore_words:
+ nicknames = nicknames.replace(word, '')
+ for word in sep_words:
+ nicknames = nicknames.replace(word, ',')
+ nicknames = [nick.strip().lower() for nick in nicknames.split(',')]
+ authors = set()
+ for nickname in nicknames:
+ author = alias_map.get(nickname)
+ if not author:
+ ignored_nicknames[nickname] += 1
+ else:
+ authors.add(author)
+ return authors
+
+def main(show_numbers):
+ ui = mercurial.ui.ui()
+ repo = mercurial.localrepo.localrepository(ui, str(ROOT))
+ authors_count = defaultdict(int)
+ for i in repo:
+ ctx = repo[i]
+ authors = set()
+ authors.add(get_canonical_author(ctx.user()))
+ authors.update(get_more_authors(ctx.description()))
+ for author in authors:
+ if author not in excluded:
+ authors_count[author] += 1
+
+ # uncomment the next lines to get the list of nicknamed which could not be
+ # parsed from commit logs
+ ## items = ignored_nicknames.items()
+ ## items.sort(key=operator.itemgetter(1), reverse=True)
+ ## for name, n in items:
+ ## if show_numbers:
+ ## print '%5d %s' % (n, name)
+ ## else:
+ ## print name
+
+ items = authors_count.items()
+ items.sort(key=operator.itemgetter(1), reverse=True)
+ for name, n in items:
+ if show_numbers:
+ print '%5d %s' % (n, name)
+ else:
+ print name
+
+if __name__ == '__main__':
+ show_numbers = '-n' in sys.argv
+ main(show_numbers)
diff --git a/pypy/interpreter/baseobjspace.py b/pypy/interpreter/baseobjspace.py
--- a/pypy/interpreter/baseobjspace.py
+++ b/pypy/interpreter/baseobjspace.py
@@ -296,6 +296,7 @@
self.check_signal_action = None # changed by the signal module
self.user_del_action = UserDelAction(self)
self.frame_trace_action = FrameTraceAction(self)
+ self._code_of_sys_exc_info = None
from pypy.interpreter.pycode import cpython_magic, default_magic
self.our_magic = default_magic
@@ -467,9 +468,9 @@
if name not in modules:
modules.append(name)
- # a bit of custom logic: time2 or rctime take precedence over time
+ # a bit of custom logic: rctime take precedence over time
# XXX this could probably be done as a "requires" in the config
- if ('time2' in modules or 'rctime' in modules) and 'time' in modules:
+ if 'rctime' in modules and 'time' in modules:
modules.remove('time')
if not self.config.objspace.nofaking:
diff --git a/pypy/interpreter/executioncontext.py b/pypy/interpreter/executioncontext.py
--- a/pypy/interpreter/executioncontext.py
+++ b/pypy/interpreter/executioncontext.py
@@ -154,6 +154,7 @@
#operationerr.print_detailed_traceback(self.space)
def _convert_exc(self, operr):
+ # Only for the flow object space
return operr
def sys_exc_info(self): # attn: the result is not the wrapped sys.exc_info() !!!
@@ -166,6 +167,11 @@
frame = self.getnextframe_nohidden(frame)
return None
+ def set_sys_exc_info(self, operror):
+ frame = self.gettopframe_nohidden()
+ if frame: # else, the exception goes nowhere and is lost
+ frame.last_exception = operror
+
def settrace(self, w_func):
"""Set the global trace function."""
if self.space.is_w(w_func, self.space.w_None):
diff --git a/pypy/interpreter/function.py b/pypy/interpreter/function.py
--- a/pypy/interpreter/function.py
+++ b/pypy/interpreter/function.py
@@ -113,6 +113,12 @@
from pypy.interpreter.pycode import PyCode
code = self.getcode() # hook for the jit
+ #
+ if (jit.we_are_jitted() and code is self.space._code_of_sys_exc_info
+ and nargs == 0):
+ from pypy.module.sys.vm import exc_info_direct
+ return exc_info_direct(self.space, frame)
+ #
fast_natural_arity = code.fast_natural_arity
if nargs == fast_natural_arity:
if nargs == 0:
diff --git a/pypy/interpreter/gateway.py b/pypy/interpreter/gateway.py
--- a/pypy/interpreter/gateway.py
+++ b/pypy/interpreter/gateway.py
@@ -874,6 +874,12 @@
fn.add_to_table()
if gateway.as_classmethod:
fn = ClassMethod(space.wrap(fn))
+ #
+ from pypy.module.sys.vm import exc_info
+ if code._bltin is exc_info:
+ assert space._code_of_sys_exc_info is None
+ space._code_of_sys_exc_info = code
+ #
return fn
diff --git a/pypy/interpreter/pyparser/parsestring.py b/pypy/interpreter/pyparser/parsestring.py
--- a/pypy/interpreter/pyparser/parsestring.py
+++ b/pypy/interpreter/pyparser/parsestring.py
@@ -2,34 +2,39 @@
from pypy.interpreter import unicodehelper
from pypy.rlib.rstring import StringBuilder
-def parsestr(space, encoding, s, unicode_literals=False):
- # compiler.transformer.Transformer.decode_literal depends on what
- # might seem like minor details of this function -- changes here
- # must be reflected there.
+def parsestr(space, encoding, s, unicode_literal=False):
+ """Parses a string or unicode literal, and return a wrapped value.
+
+ If encoding=iso8859-1, the source string is also in this encoding.
+ If encoding=None, the source string is ascii only.
+ In other cases, the source string is in utf-8 encoding.
+
+ When a bytes string is returned, it will be encoded with the
+ original encoding.
+
+ Yes, it's very inefficient.
+ Yes, CPython has very similar code.
+ """
# we use ps as "pointer to s"
# q is the virtual last char index of the string
ps = 0
quote = s[ps]
rawmode = False
- unicode = unicode_literals
# string decoration handling
- o = ord(quote)
- isalpha = (o>=97 and o<=122) or (o>=65 and o<=90)
- if isalpha or quote == '_':
- if quote == 'b' or quote == 'B':
- ps += 1
- quote = s[ps]
- unicode = False
- elif quote == 'u' or quote == 'U':
- ps += 1
- quote = s[ps]
- unicode = True
- if quote == 'r' or quote == 'R':
- ps += 1
- quote = s[ps]
- rawmode = True
+ if quote == 'b' or quote == 'B':
+ ps += 1
+ quote = s[ps]
+ unicode_literal = False
+ elif quote == 'u' or quote == 'U':
+ ps += 1
+ quote = s[ps]
+ unicode_literal = True
+ if quote == 'r' or quote == 'R':
+ ps += 1
+ quote = s[ps]
+ rawmode = True
if quote != "'" and quote != '"':
raise_app_valueerror(space,
'Internal error: parser passed unquoted literal')
@@ -46,21 +51,28 @@
'unmatched triple quotes in literal')
q -= 2
- if unicode: # XXX Py_UnicodeFlag is ignored for now
+ if unicode_literal: # XXX Py_UnicodeFlag is ignored for now
if encoding is None or encoding == "iso-8859-1":
+ # 'unicode_escape' expects latin-1 bytes, string is ready.
buf = s
bufp = ps
bufq = q
u = None
else:
- # "\XX" may become "\u005c\uHHLL" (12 bytes)
+ # String is utf8-encoded, but 'unicode_escape' expects
+ # latin-1; So multibyte sequences must be escaped.
lis = [] # using a list to assemble the value
end = q
+ # Worst case: "\XX" may become "\u005c\uHHLL" (12 bytes)
while ps < end:
if s[ps] == '\\':
lis.append(s[ps])
ps += 1
if ord(s[ps]) & 0x80:
+ # A multibyte sequence will follow, it will be
+ # escaped like \u1234. To avoid confusion with
+ # the backslash we just wrote, we emit "\u005c"
+ # instead.
lis.append("u005c")
if ord(s[ps]) & 0x80: # XXX inefficient
w, ps = decode_utf8(space, s, ps, end, "utf-16-be")
@@ -86,13 +98,11 @@
need_encoding = (encoding is not None and
encoding != "utf-8" and encoding != "iso-8859-1")
- # XXX add strchr like interface to rtyper
assert 0 <= ps <= q
substr = s[ps : q]
if rawmode or '\\' not in s[ps:]:
if need_encoding:
w_u = space.wrap(unicodehelper.PyUnicode_DecodeUTF8(space, substr))
- #w_v = space.wrap(space.unwrap(w_u).encode(encoding)) this works
w_v = unicodehelper.PyUnicode_AsEncodedString(space, w_u, space.wrap(encoding))
return w_v
else:
diff --git a/pypy/interpreter/typedef.py b/pypy/interpreter/typedef.py
--- a/pypy/interpreter/typedef.py
+++ b/pypy/interpreter/typedef.py
@@ -321,7 +321,7 @@
def user_setup(self, space, w_subtype):
self.w__dict__ = space.newdict(
- instance=True, classofinstance=w_subtype)
+ instance=True)
base_user_setup(self, space, w_subtype)
def setclass(self, space, w_subtype):
diff --git a/pypy/jit/backend/llgraph/llimpl.py b/pypy/jit/backend/llgraph/llimpl.py
--- a/pypy/jit/backend/llgraph/llimpl.py
+++ b/pypy/jit/backend/llgraph/llimpl.py
@@ -1809,6 +1809,7 @@
if specialize_as_constant:
def specialize_call(self, hop):
llvalue = func(hop.args_s[0].const)
+ hop.exception_cannot_occur()
return hop.inputconst(lltype.typeOf(llvalue), llvalue)
else:
# specialize as direct_call
@@ -1825,6 +1826,7 @@
sm = ootype._static_meth(FUNCTYPE, _name=func.__name__, _callable=func)
cfunc = hop.inputconst(FUNCTYPE, sm)
args_v = hop.inputargs(*hop.args_r)
+ hop.exception_is_here()
return hop.genop('direct_call', [cfunc] + args_v, hop.r_result)
diff --git a/pypy/jit/backend/test/runner_test.py b/pypy/jit/backend/test/runner_test.py
--- a/pypy/jit/backend/test/runner_test.py
+++ b/pypy/jit/backend/test/runner_test.py
@@ -27,6 +27,12 @@
def constfloat(x):
return ConstFloat(longlong.getfloatstorage(x))
+def boxlonglong(ll):
+ if longlong.is_64_bit:
+ return BoxInt(ll)
+ else:
+ return BoxFloat(ll)
+
class Runner(object):
@@ -1782,6 +1788,11 @@
[boxfloat(2.5)], t).value
assert res == longlong2float.float2longlong(2.5)
+ bytes = longlong2float.float2longlong(2.5)
+ res = self.execute_operation(rop.CONVERT_LONGLONG_BYTES_TO_FLOAT,
+ [boxlonglong(res)], 'float').value
+ assert longlong.getrealfloat(res) == 2.5
+
def test_ooops_non_gc(self):
x = lltype.malloc(lltype.Struct('x'), flavor='raw')
v = heaptracker.adr2int(llmemory.cast_ptr_to_adr(x))
diff --git a/pypy/jit/backend/test/test_random.py b/pypy/jit/backend/test/test_random.py
--- a/pypy/jit/backend/test/test_random.py
+++ b/pypy/jit/backend/test/test_random.py
@@ -328,6 +328,15 @@
def produce_into(self, builder, r):
self.put(builder, [r.choice(builder.intvars)])
+class CastLongLongToFloatOperation(AbstractFloatOperation):
+ def produce_into(self, builder, r):
+ if longlong.is_64_bit:
+ self.put(builder, [r.choice(builder.intvars)])
+ else:
+ if not builder.floatvars:
+ raise CannotProduceOperation
+ self.put(builder, [r.choice(builder.floatvars)])
+
class CastFloatToIntOperation(AbstractFloatOperation):
def produce_into(self, builder, r):
if not builder.floatvars:
@@ -450,6 +459,7 @@
OPERATIONS.append(CastFloatToIntOperation(rop.CAST_FLOAT_TO_INT))
OPERATIONS.append(CastIntToFloatOperation(rop.CAST_INT_TO_FLOAT))
OPERATIONS.append(CastFloatToIntOperation(rop.CONVERT_FLOAT_BYTES_TO_LONGLONG))
+OPERATIONS.append(CastLongLongToFloatOperation(rop.CONVERT_LONGLONG_BYTES_TO_FLOAT))
OperationBuilder.OPERATIONS = OPERATIONS
diff --git a/pypy/jit/backend/x86/assembler.py b/pypy/jit/backend/x86/assembler.py
--- a/pypy/jit/backend/x86/assembler.py
+++ b/pypy/jit/backend/x86/assembler.py
@@ -1251,6 +1251,15 @@
else:
self.mov(loc0, resloc)
+ def genop_convert_longlong_bytes_to_float(self, op, arglocs, resloc):
+ loc0, = arglocs
+ if longlong.is_64_bit:
+ assert isinstance(resloc, RegLoc)
+ assert isinstance(loc0, RegLoc)
+ self.mc.MOVD(resloc, loc0)
+ else:
+ self.mov(loc0, resloc)
+
def genop_guard_int_is_true(self, op, guard_op, guard_token, arglocs, resloc):
guard_opnum = guard_op.getopnum()
self.mc.CMP(arglocs[0], imm0)
diff --git a/pypy/jit/backend/x86/regalloc.py b/pypy/jit/backend/x86/regalloc.py
--- a/pypy/jit/backend/x86/regalloc.py
+++ b/pypy/jit/backend/x86/regalloc.py
@@ -719,7 +719,20 @@
loc0 = self.xrm.loc(arg0)
loc1 = self.xrm.force_allocate_reg(op.result, forbidden_vars=[arg0])
self.Perform(op, [loc0], loc1)
- self.xrm.possibly_free_var(op.getarg(0))
+ self.xrm.possibly_free_var(arg0)
+
+ def consider_convert_longlong_bytes_to_float(self, op):
+ if longlong.is_64_bit:
+ loc0 = self.rm.make_sure_var_in_reg(op.getarg(0))
+ loc1 = self.xrm.force_allocate_reg(op.result)
+ self.Perform(op, [loc0], loc1)
+ self.rm.possibly_free_var(op.getarg(0))
+ else:
+ arg0 = op.getarg(0)
+ loc0 = self.xrm.make_sure_var_in_reg(arg0)
+ loc1 = self.xrm.force_allocate_reg(op.result, forbidden_vars=[arg0])
+ self.Perform(op, [loc0], loc1)
+ self.xrm.possibly_free_var(arg0)
def _consider_llong_binop_xx(self, op):
# must force both arguments into xmm registers, because we don't
diff --git a/pypy/jit/codewriter/jtransform.py b/pypy/jit/codewriter/jtransform.py
--- a/pypy/jit/codewriter/jtransform.py
+++ b/pypy/jit/codewriter/jtransform.py
@@ -295,6 +295,7 @@
return op
rewrite_op_convert_float_bytes_to_longlong = _noop_rewrite
+ rewrite_op_convert_longlong_bytes_to_float = _noop_rewrite
# ----------
# Various kinds of calls
diff --git a/pypy/jit/codewriter/test/test_flatten.py b/pypy/jit/codewriter/test/test_flatten.py
--- a/pypy/jit/codewriter/test/test_flatten.py
+++ b/pypy/jit/codewriter/test/test_flatten.py
@@ -968,20 +968,22 @@
int_return %i2
""", transform=True)
- def test_convert_float_bytes_to_int(self):
- from pypy.rlib.longlong2float import float2longlong
+ def test_convert_float_bytes(self):
+ from pypy.rlib.longlong2float import float2longlong, longlong2float
def f(x):
- return float2longlong(x)
+ ll = float2longlong(x)
+ return longlong2float(ll)
if longlong.is_64_bit:
- result_var = "%i0"
- return_op = "int_return"
+ tmp_var = "%i0"
+ result_var = "%f1"
else:
- result_var = "%f1"
- return_op = "float_return"
+ tmp_var = "%f1"
+ result_var = "%f2"
self.encoding_test(f, [25.0], """
- convert_float_bytes_to_longlong %%f0 -> %(result_var)s
- %(return_op)s %(result_var)s
- """ % {"result_var": result_var, "return_op": return_op})
+ convert_float_bytes_to_longlong %%f0 -> %(tmp_var)s
+ convert_longlong_bytes_to_float %(tmp_var)s -> %(result_var)s
+ float_return %(result_var)s
+ """ % {"result_var": result_var, "tmp_var": tmp_var}, transform=True)
def check_force_cast(FROM, TO, operations, value):
diff --git a/pypy/jit/metainterp/blackhole.py b/pypy/jit/metainterp/blackhole.py
--- a/pypy/jit/metainterp/blackhole.py
+++ b/pypy/jit/metainterp/blackhole.py
@@ -672,6 +672,11 @@
a = longlong.getrealfloat(a)
return longlong2float.float2longlong(a)
+ @arguments(LONGLONG_TYPECODE, returns="f")
+ def bhimpl_convert_longlong_bytes_to_float(a):
+ a = longlong2float.longlong2float(a)
+ return longlong.getfloatstorage(a)
+
# ----------
# control flow operations
diff --git a/pypy/jit/metainterp/pyjitpl.py b/pypy/jit/metainterp/pyjitpl.py
--- a/pypy/jit/metainterp/pyjitpl.py
+++ b/pypy/jit/metainterp/pyjitpl.py
@@ -224,6 +224,7 @@
'float_neg', 'float_abs',
'cast_ptr_to_int', 'cast_int_to_ptr',
'convert_float_bytes_to_longlong',
+ 'convert_longlong_bytes_to_float',
]:
exec py.code.Source('''
@arguments("box")
diff --git a/pypy/jit/metainterp/resoperation.py b/pypy/jit/metainterp/resoperation.py
--- a/pypy/jit/metainterp/resoperation.py
+++ b/pypy/jit/metainterp/resoperation.py
@@ -420,6 +420,7 @@
'CAST_FLOAT_TO_SINGLEFLOAT/1',
'CAST_SINGLEFLOAT_TO_FLOAT/1',
'CONVERT_FLOAT_BYTES_TO_LONGLONG/1',
+ 'CONVERT_LONGLONG_BYTES_TO_FLOAT/1',
#
'INT_LT/2b',
'INT_LE/2b',
diff --git a/pypy/jit/metainterp/test/test_ajit.py b/pypy/jit/metainterp/test/test_ajit.py
--- a/pypy/jit/metainterp/test/test_ajit.py
+++ b/pypy/jit/metainterp/test/test_ajit.py
@@ -1,3 +1,4 @@
+import math
import sys
import py
@@ -15,7 +16,7 @@
loop_invariant, elidable, promote, jit_debug, assert_green,
AssertGreenFailed, unroll_safe, current_trace_length, look_inside_iff,
isconstant, isvirtual, promote_string, set_param, record_known_class)
-from pypy.rlib.longlong2float import float2longlong
+from pypy.rlib.longlong2float import float2longlong, longlong2float
from pypy.rlib.rarithmetic import ovfcheck, is_valid_int
from pypy.rpython.lltypesystem import lltype, llmemory, rffi
from pypy.rpython.ootypesystem import ootype
@@ -3795,15 +3796,15 @@
res = self.interp_operations(g, [1])
assert res == 3
- def test_float2longlong(self):
+ def test_float_bytes(self):
def f(n):
- return float2longlong(n)
+ ll = float2longlong(n)
+ return longlong2float(ll)
for x in [2.5, float("nan"), -2.5, float("inf")]:
# There are tests elsewhere to verify the correctness of this.
- expected = float2longlong(x)
res = self.interp_operations(f, [x])
- assert longlong.getfloatstorage(res) == expected
+ assert res == x or math.isnan(x) and math.isnan(res)
class TestLLtype(BaseLLtypeTests, LLJitMixin):
diff --git a/pypy/module/_ssl/interp_ssl.py b/pypy/module/_ssl/interp_ssl.py
--- a/pypy/module/_ssl/interp_ssl.py
+++ b/pypy/module/_ssl/interp_ssl.py
@@ -432,7 +432,8 @@
raise _ssl_seterror(self.space, self, length)
try:
# this is actually an immutable bytes sequence
- return self.space.wrap(rffi.charp2str(buf_ptr[0]))
+ return self.space.wrap(rffi.charpsize2str(buf_ptr[0],
+ length))
finally:
libssl_OPENSSL_free(buf_ptr[0])
else:
diff --git a/pypy/module/array/interp_array.py b/pypy/module/array/interp_array.py
--- a/pypy/module/array/interp_array.py
+++ b/pypy/module/array/interp_array.py
@@ -11,7 +11,7 @@
from pypy.objspace.std.register_all import register_all
from pypy.rlib.rarithmetic import ovfcheck
from pypy.rlib.unroll import unrolling_iterable
-from pypy.rlib.objectmodel import specialize
+from pypy.rlib.objectmodel import specialize, keepalive_until_here
from pypy.rpython.lltypesystem import lltype, rffi
@@ -145,18 +145,24 @@
unroll_typecodes = unrolling_iterable(types.keys())
class ArrayBuffer(RWBuffer):
- def __init__(self, data, bytes):
- self.data = data
- self.len = bytes
+ def __init__(self, array):
+ self.array = array
def getlength(self):
- return self.len
+ return self.array.len * self.array.itemsize
def getitem(self, index):
- return self.data[index]
+ array = self.array
+ data = array._charbuf_start()
+ char = data[index]
+ array._charbuf_stop()
+ return char
def setitem(self, index, char):
- self.data[index] = char
+ array = self.array
+ data = array._charbuf_start()
+ data[index] = char
+ array._charbuf_stop()
def make_array(mytype):
@@ -278,9 +284,10 @@
oldlen = self.len
new = len(s) / mytype.bytes
self.setlen(oldlen + new)
- cbuf = self.charbuf()
+ cbuf = self._charbuf_start()
for i in range(len(s)):
cbuf[oldlen * mytype.bytes + i] = s[i]
+ self._charbuf_stop()
def fromlist(self, w_lst):
s = self.len
@@ -310,8 +317,11 @@
else:
self.fromsequence(w_iterable)
- def charbuf(self):
- return rffi.cast(rffi.CCHARP, self.buffer)
+ def _charbuf_start(self):
+ return rffi.cast(rffi.CCHARP, self.buffer)
+
+ def _charbuf_stop(self):
+ keepalive_until_here(self)
def w_getitem(self, space, idx):
item = self.buffer[idx]
@@ -530,8 +540,10 @@
self.fromstring(space.str_w(w_s))
def array_tostring__Array(space, self):
- cbuf = self.charbuf()
- return self.space.wrap(rffi.charpsize2str(cbuf, self.len * mytype.bytes))
+ cbuf = self._charbuf_start()
+ s = rffi.charpsize2str(cbuf, self.len * mytype.bytes)
+ self._charbuf_stop()
+ return self.space.wrap(s)
def array_fromfile__Array_ANY_ANY(space, self, w_f, w_n):
if not isinstance(w_f, W_File):
@@ -613,8 +625,7 @@
# Misc methods
def buffer__Array(space, self):
- b = ArrayBuffer(self.charbuf(), self.len * mytype.bytes)
- return space.wrap(b)
+ return space.wrap(ArrayBuffer(self))
def array_buffer_info__Array(space, self):
w_ptr = space.wrap(rffi.cast(lltype.Unsigned, self.buffer))
@@ -649,7 +660,7 @@
raise OperationError(space.w_RuntimeError, space.wrap(msg))
if self.len == 0:
return
- bytes = self.charbuf()
+ bytes = self._charbuf_start()
tmp = [bytes[0]] * mytype.bytes
for start in range(0, self.len * mytype.bytes, mytype.bytes):
stop = start + mytype.bytes - 1
@@ -657,6 +668,7 @@
tmp[i] = bytes[start + i]
for i in range(mytype.bytes):
bytes[stop - i] = tmp[i]
+ self._charbuf_stop()
def repr__Array(space, self):
if self.len == 0:
diff --git a/pypy/module/array/test/test_array.py b/pypy/module/array/test/test_array.py
--- a/pypy/module/array/test/test_array.py
+++ b/pypy/module/array/test/test_array.py
@@ -433,7 +433,25 @@
a = self.array('h', 'Hi')
buf = buffer(a)
assert buf[1] == 'i'
- #raises(TypeError, buf.__setitem__, 1, 'o')
+
+ def test_buffer_write(self):
+ a = self.array('c', 'hello')
+ buf = buffer(a)
+ print repr(buf)
+ try:
+ buf[3] = 'L'
+ except TypeError:
+ skip("buffer(array) returns a read-only buffer on CPython")
+ assert a.tostring() == 'helLo'
+
+ def test_buffer_keepalive(self):
+ buf = buffer(self.array('c', 'text'))
+ assert buf[2] == 'x'
+ #
+ a = self.array('c', 'foobarbaz')
+ buf = buffer(a)
+ a.fromstring('some extra text')
+ assert buf[:] == 'foobarbazsome extra text'
def test_list_methods(self):
assert repr(self.array('i')) == "array('i')"
diff --git a/pypy/module/cpyext/bufferobject.py b/pypy/module/cpyext/bufferobject.py
--- a/pypy/module/cpyext/bufferobject.py
+++ b/pypy/module/cpyext/bufferobject.py
@@ -2,8 +2,10 @@
from pypy.module.cpyext.api import (
cpython_api, Py_ssize_t, cpython_struct, bootstrap_function,
PyObjectFields, PyObject)
-from pypy.module.cpyext.pyobject import make_typedescr, Py_DecRef
+from pypy.module.cpyext.pyobject import make_typedescr, Py_DecRef, make_ref
from pypy.interpreter.buffer import Buffer, StringBuffer, SubBuffer
+from pypy.interpreter.error import OperationError
+from pypy.module.array.interp_array import ArrayBuffer
PyBufferObjectStruct = lltype.ForwardReference()
@@ -41,26 +43,38 @@
py_buf.c_b_offset = w_obj.offset
w_obj = w_obj.buffer
+ # If w_obj already allocated a fixed buffer, use it, and keep a
+ # reference to w_obj.
+ # Otherwise, b_base stays NULL, and we own the b_ptr.
+
if isinstance(w_obj, StringBuffer):
- py_buf.c_b_base = rffi.cast(PyObject, 0) # space.wrap(w_obj.value)
- py_buf.c_b_ptr = rffi.cast(rffi.VOIDP, rffi.str2charp(w_obj.as_str()))
+ py_buf.c_b_base = lltype.nullptr(PyObject.TO)
+ py_buf.c_b_ptr = rffi.cast(rffi.VOIDP, rffi.str2charp(w_obj.value))
+ py_buf.c_b_size = w_obj.getlength()
+ elif isinstance(w_obj, ArrayBuffer):
+ w_base = w_obj.array
+ py_buf.c_b_base = make_ref(space, w_base)
+ py_buf.c_b_ptr = rffi.cast(rffi.VOIDP, w_obj.array._charbuf_start())
py_buf.c_b_size = w_obj.getlength()
else:
- raise Exception("Fail fail fail fail fail")
+ raise OperationError(space.w_NotImplementedError, space.wrap(
+ "buffer flavor not supported"))
def buffer_realize(space, py_obj):
"""
Creates the buffer in the PyPy interpreter from a cpyext representation.
"""
- raise Exception("realize fail fail fail")
-
+ raise OperationError(space.w_NotImplementedError, space.wrap(
+ "Don't know how to realize a buffer"))
@cpython_api([PyObject], lltype.Void, external=False)
def buffer_dealloc(space, py_obj):
py_buf = rffi.cast(PyBufferObject, py_obj)
- Py_DecRef(space, py_buf.c_b_base)
- rffi.free_charp(rffi.cast(rffi.CCHARP, py_buf.c_b_ptr))
+ if py_buf.c_b_base:
+ Py_DecRef(space, py_buf.c_b_base)
+ else:
+ rffi.free_charp(rffi.cast(rffi.CCHARP, py_buf.c_b_ptr))
from pypy.module.cpyext.object import PyObject_dealloc
PyObject_dealloc(space, py_obj)
diff --git a/pypy/module/cpyext/include/patchlevel.h b/pypy/module/cpyext/include/patchlevel.h
--- a/pypy/module/cpyext/include/patchlevel.h
+++ b/pypy/module/cpyext/include/patchlevel.h
@@ -29,7 +29,7 @@
#define PY_VERSION "2.7.2"
/* PyPy version as a string */
-#define PYPY_VERSION "1.8.1"
+#define PYPY_VERSION "1.9.1"
/* Subversion Revision number of this file (not of the repository).
* Empty since Mercurial migration. */
diff --git a/pypy/module/cpyext/pyerrors.py b/pypy/module/cpyext/pyerrors.py
--- a/pypy/module/cpyext/pyerrors.py
+++ b/pypy/module/cpyext/pyerrors.py
@@ -2,6 +2,7 @@
from pypy.rpython.lltypesystem import rffi, lltype
from pypy.interpreter.error import OperationError
+from pypy.interpreter import pytraceback
from pypy.module.cpyext.api import cpython_api, CANNOT_FAIL, CONST_STRING
from pypy.module.exceptions.interp_exceptions import W_RuntimeWarning
from pypy.module.cpyext.pyobject import (
@@ -315,3 +316,65 @@
It may be called without holding the interpreter lock."""
space.check_signal_action.set_interrupt()
+ at cpython_api([PyObjectP, PyObjectP, PyObjectP], lltype.Void)
+def PyErr_GetExcInfo(space, ptype, pvalue, ptraceback):
+ """---Cython extension---
+
+ Retrieve the exception info, as known from ``sys.exc_info()``. This
+ refers to an exception that was already caught, not to an exception
+ that was freshly raised. Returns new references for the three
+ objects, any of which may be *NULL*. Does not modify the exception
+ info state.
+
+ .. note::
+
+ This function is not normally used by code that wants to handle
+ exceptions. Rather, it can be used when code needs to save and
+ restore the exception state temporarily. Use
+ :c:func:`PyErr_SetExcInfo` to restore or clear the exception
+ state.
+ """
+ ec = space.getexecutioncontext()
+ operror = ec.sys_exc_info()
+ if operror:
+ ptype[0] = make_ref(space, operror.w_type)
+ pvalue[0] = make_ref(space, operror.get_w_value(space))
+ ptraceback[0] = make_ref(space, space.wrap(operror.get_traceback()))
+ else:
+ ptype[0] = lltype.nullptr(PyObject.TO)
+ pvalue[0] = lltype.nullptr(PyObject.TO)
+ ptraceback[0] = lltype.nullptr(PyObject.TO)
+
+ at cpython_api([PyObject, PyObject, PyObject], lltype.Void)
+def PyErr_SetExcInfo(space, w_type, w_value, w_traceback):
+ """---Cython extension---
+
+ Set the exception info, as known from ``sys.exc_info()``. This refers
+ to an exception that was already caught, not to an exception that was
+ freshly raised. This function steals the references of the arguments.
+ To clear the exception state, pass *NULL* for all three arguments.
+ For general rules about the three arguments, see :c:func:`PyErr_Restore`.
+
+ .. note::
+
+ This function is not normally used by code that wants to handle
+ exceptions. Rather, it can be used when code needs to save and
+ restore the exception state temporarily. Use
+ :c:func:`PyErr_GetExcInfo` to read the exception state.
+ """
+ if w_value is None or space.is_w(w_value, space.w_None):
+ operror = None
+ else:
+ tb = None
+ if w_traceback is not None:
+ try:
+ tb = pytraceback.check_traceback(space, w_traceback, '?')
+ except OperationError: # catch and ignore bogus objects
+ pass
+ operror = OperationError(w_type, w_value, tb)
+ #
+ ec = space.getexecutioncontext()
+ ec.set_sys_exc_info(operror)
+ Py_DecRef(space, w_type)
+ Py_DecRef(space, w_value)
+ Py_DecRef(space, w_traceback)
diff --git a/pypy/module/cpyext/slotdefs.py b/pypy/module/cpyext/slotdefs.py
--- a/pypy/module/cpyext/slotdefs.py
+++ b/pypy/module/cpyext/slotdefs.py
@@ -167,14 +167,16 @@
if rffi.cast(lltype.Signed, res) == -1:
space.fromcache(State).check_and_raise_exception(always=True)
+# Warning, confusing function name (like CPython). Used only for sq_contains.
def wrap_objobjproc(space, w_self, w_args, func):
func_target = rffi.cast(objobjproc, func)
check_num_args(space, w_args, 1)
w_value, = space.fixedview(w_args)
res = generic_cpy_call(space, func_target, w_self, w_value)
- if rffi.cast(lltype.Signed, res) == -1:
+ res = rffi.cast(lltype.Signed, res)
+ if res == -1:
space.fromcache(State).check_and_raise_exception(always=True)
- return space.wrap(res)
+ return space.wrap(bool(res))
def wrap_objobjargproc(space, w_self, w_args, func):
func_target = rffi.cast(objobjargproc, func)
@@ -183,7 +185,7 @@
res = generic_cpy_call(space, func_target, w_self, w_key, w_value)
if rffi.cast(lltype.Signed, res) == -1:
space.fromcache(State).check_and_raise_exception(always=True)
- return space.wrap(res)
+ return space.w_None
def wrap_delitem(space, w_self, w_args, func):
func_target = rffi.cast(objobjargproc, func)
diff --git a/pypy/module/cpyext/test/foo.c b/pypy/module/cpyext/test/foo.c
--- a/pypy/module/cpyext/test/foo.c
+++ b/pypy/module/cpyext/test/foo.c
@@ -176,6 +176,8 @@
{NULL} /* Sentinel */
};
+PyDoc_STRVAR(foo_doc, "foo is for testing.");
+
static PyTypeObject footype = {
PyVarObject_HEAD_INIT(NULL, 0)
"foo.foo", /*tp_name*/
@@ -198,7 +200,7 @@
(setattrofunc)foo_setattro, /*tp_setattro*/
0, /*tp_as_buffer*/
Py_TPFLAGS_DEFAULT, /*tp_flags*/
- 0, /*tp_doc*/
+ foo_doc, /*tp_doc*/
0, /*tp_traverse*/
0, /*tp_clear*/
0, /*tp_richcompare*/
diff --git a/pypy/module/cpyext/test/test_bufferobject.py b/pypy/module/cpyext/test/test_bufferobject.py
--- a/pypy/module/cpyext/test/test_bufferobject.py
+++ b/pypy/module/cpyext/test/test_bufferobject.py
@@ -48,3 +48,17 @@
])
b = module.buffer_new()
raises(AttributeError, getattr, b, 'x')
+
+ def test_array_buffer(self):
+ module = self.import_extension('foo', [
+ ("roundtrip", "METH_O",
+ """
+ PyBufferObject *buf = (PyBufferObject *)args;
+ return PyString_FromStringAndSize(buf->b_ptr, buf->b_size);
+ """),
+ ])
+ import array
+ a = array.array('c', 'text')
+ b = buffer(a)
+ assert module.roundtrip(b) == 'text'
+
diff --git a/pypy/module/cpyext/test/test_pyerrors.py b/pypy/module/cpyext/test/test_pyerrors.py
--- a/pypy/module/cpyext/test/test_pyerrors.py
+++ b/pypy/module/cpyext/test/test_pyerrors.py
@@ -218,3 +218,51 @@
assert e.filename == "blyf"
assert e.errno == errno.EBADF
assert e.strerror == os.strerror(errno.EBADF)
+
+ def test_GetSetExcInfo(self):
+ import sys
+ module = self.import_extension('foo', [
+ ("getset_exc_info", "METH_VARARGS",
+ r'''
+ PyObject *type, *val, *tb;
+ PyObject *new_type, *new_val, *new_tb;
+ PyObject *result;
+
+ if (!PyArg_ParseTuple(args, "OOO", &new_type, &new_val, &new_tb))
+ return NULL;
+
+ PyErr_GetExcInfo(&type, &val, &tb);
+
+ Py_INCREF(new_type);
+ Py_INCREF(new_val);
+ Py_INCREF(new_tb);
+ PyErr_SetExcInfo(new_type, new_val, new_tb);
+
+ result = Py_BuildValue("OOO",
+ type ? type : Py_None,
+ val ? val : Py_None,
+ tb ? tb : Py_None);
+ Py_XDECREF(type);
+ Py_XDECREF(val);
+ Py_XDECREF(tb);
+ return result;
+ '''
+ ),
+ ])
+ try:
+ raise ValueError(5)
+ except ValueError, old_exc:
+ new_exc = TypeError("TEST")
+ orig_sys_exc_info = sys.exc_info()
+ orig_exc_info = module.getset_exc_info(new_exc.__class__,
+ new_exc, None)
+ new_sys_exc_info = sys.exc_info()
+ new_exc_info = module.getset_exc_info(*orig_exc_info)
+ reset_sys_exc_info = sys.exc_info()
+
+ assert orig_exc_info[0] is old_exc.__class__
+ assert orig_exc_info[1] is old_exc
+ assert orig_exc_info == orig_sys_exc_info
+ assert orig_exc_info == reset_sys_exc_info
+ assert new_exc_info == (new_exc.__class__, new_exc, None)
+ assert new_exc_info == new_sys_exc_info
diff --git a/pypy/module/cpyext/test/test_typeobject.py b/pypy/module/cpyext/test/test_typeobject.py
--- a/pypy/module/cpyext/test/test_typeobject.py
+++ b/pypy/module/cpyext/test/test_typeobject.py
@@ -20,6 +20,7 @@
assert type(obj) is module.fooType
print "type of obj has type", type(type(obj))
print "type of type of obj has type", type(type(type(obj)))
+ assert module.fooType.__doc__ == "foo is for testing."
def test_typeobject_method_descriptor(self):
module = self.import_module(name='foo')
@@ -414,8 +415,11 @@
static int
mp_ass_subscript(PyObject *self, PyObject *key, PyObject *value)
{
- PyErr_SetNone(PyExc_ZeroDivisionError);
- return -1;
+ if (PyInt_Check(key)) {
+ PyErr_SetNone(PyExc_ZeroDivisionError);
+ return -1;
+ }
+ return 0;
}
PyMappingMethods tp_as_mapping;
static PyTypeObject Foo_Type = {
@@ -425,6 +429,36 @@
''')
obj = module.new_obj()
raises(ZeroDivisionError, obj.__setitem__, 5, None)
+ res = obj.__setitem__('foo', None)
+ assert res is None
+
+ def test_sq_contains(self):
+ module = self.import_extension('foo', [
+ ("new_obj", "METH_NOARGS",
+ '''
+ PyObject *obj;
+ Foo_Type.tp_as_sequence = &tp_as_sequence;
+ tp_as_sequence.sq_contains = sq_contains;
+ if (PyType_Ready(&Foo_Type) < 0) return NULL;
+ obj = PyObject_New(PyObject, &Foo_Type);
+ return obj;
+ '''
+ )],
+ '''
+ static int
+ sq_contains(PyObject *self, PyObject *value)
+ {
+ return 42;
+ }
+ PySequenceMethods tp_as_sequence;
+ static PyTypeObject Foo_Type = {
+ PyVarObject_HEAD_INIT(NULL, 0)
+ "foo.foo",
+ };
+ ''')
+ obj = module.new_obj()
+ res = "foo" in obj
+ assert res is True
def test_tp_iter(self):
module = self.import_extension('foo', [
diff --git a/pypy/module/cpyext/typeobject.py b/pypy/module/cpyext/typeobject.py
--- a/pypy/module/cpyext/typeobject.py
+++ b/pypy/module/cpyext/typeobject.py
@@ -307,6 +307,8 @@
if not space.is_true(space.issubtype(self, space.w_type)):
self.flag_cpytype = True
self.flag_heaptype = False
+ if pto.c_tp_doc:
+ self.w_doc = space.wrap(rffi.charp2str(pto.c_tp_doc))
@bootstrap_function
def init_typeobject(space):
@@ -624,7 +626,6 @@
Creates an interpreter type from a PyTypeObject structure.
"""
# missing:
- # setting __doc__ if not defined and tp_doc defined
# inheriting tp_as_* slots
# unsupported:
# tp_mro, tp_subclasses
diff --git a/pypy/module/micronumpy/__init__.py b/pypy/module/micronumpy/__init__.py
--- a/pypy/module/micronumpy/__init__.py
+++ b/pypy/module/micronumpy/__init__.py
@@ -29,6 +29,7 @@
'flatiter': 'interp_numarray.W_FlatIterator',
'isna': 'interp_numarray.isna',
'concatenate': 'interp_numarray.concatenate',
+ 'repeat': 'interp_numarray.repeat',
'set_string_function': 'appbridge.set_string_function',
@@ -99,9 +100,12 @@
("exp2", "exp2"),
("expm1", "expm1"),
("fabs", "fabs"),
+ ("fmax", "fmax"),
+ ("fmin", "fmin"),
("fmod", "fmod"),
("floor", "floor"),
("ceil", "ceil"),
+ ("trunc", "trunc"),
("greater", "greater"),
("greater_equal", "greater_equal"),
("less", "less"),
@@ -122,12 +126,16 @@
("sinh", "sinh"),
("subtract", "subtract"),
('sqrt', 'sqrt'),
+ ('square', 'square'),
("tan", "tan"),
("tanh", "tanh"),
('bitwise_and', 'bitwise_and'),
('bitwise_or', 'bitwise_or'),
('bitwise_xor', 'bitwise_xor'),
('bitwise_not', 'invert'),
+ ('left_shift', 'left_shift'),
+ ('right_shift', 'right_shift'),
+ ('invert', 'invert'),
('isnan', 'isnan'),
('isinf', 'isinf'),
('isneginf', 'isneginf'),
diff --git a/pypy/module/micronumpy/interp_numarray.py b/pypy/module/micronumpy/interp_numarray.py
--- a/pypy/module/micronumpy/interp_numarray.py
+++ b/pypy/module/micronumpy/interp_numarray.py
@@ -673,6 +673,10 @@
def compute_first_step(self, sig, frame):
pass
+ @unwrap_spec(repeats=int)
+ def descr_repeat(self, space, repeats, w_axis=None):
+ return repeat(space, self, repeats, w_axis)
+
def convert_to_array(space, w_obj):
if isinstance(w_obj, BaseArray):
return w_obj
@@ -1261,6 +1265,31 @@
return convert_to_array(space, w_obj2).descr_dot(space, w_arr)
return w_arr.descr_dot(space, w_obj2)
+ at unwrap_spec(repeats=int)
+def repeat(space, w_arr, repeats, w_axis=None):
+ arr = convert_to_array(space, w_arr)
+ if space.is_w(w_axis, space.w_None):
+ arr = arr.descr_flatten(space).get_concrete()
+ orig_size = arr.shape[0]
+ shape = [arr.shape[0] * repeats]
+ res = W_NDimArray(shape, arr.find_dtype())
+ for i in range(repeats):
+ Chunks([Chunk(i, shape[0] - repeats + i, repeats,
+ orig_size)]).apply(res).setslice(space, arr)
+ else:
+ arr = arr.get_concrete()
+ axis = space.int_w(w_axis)
+ shape = arr.shape[:]
+ chunks = [Chunk(0, i, 1, i) for i in shape]
+ orig_size = shape[axis]
+ shape[axis] *= repeats
+ res = W_NDimArray(shape, arr.find_dtype())
+ for i in range(repeats):
+ chunks[axis] = Chunk(i, shape[axis] - repeats + i, repeats,
+ orig_size)
+ Chunks(chunks).apply(res).setslice(space, arr)
+ return res
+
@unwrap_spec(axis=int)
def concatenate(space, w_args, axis=0):
args_w = space.listview(w_args)
@@ -1386,6 +1415,7 @@
tolist = interp2app(BaseArray.descr_tolist),
take = interp2app(BaseArray.descr_take),
compress = interp2app(BaseArray.descr_compress),
+ repeat = interp2app(BaseArray.descr_repeat),
)
diff --git a/pypy/module/micronumpy/interp_ufuncs.py b/pypy/module/micronumpy/interp_ufuncs.py
--- a/pypy/module/micronumpy/interp_ufuncs.py
+++ b/pypy/module/micronumpy/interp_ufuncs.py
@@ -3,9 +3,11 @@
from pypy.interpreter.gateway import interp2app, unwrap_spec, NoneNotWrapped
from pypy.interpreter.typedef import TypeDef, GetSetProperty, interp_attrproperty
from pypy.module.micronumpy import interp_boxes, interp_dtype, support, loop
+from pypy.rlib import jit
from pypy.rlib.rarithmetic import LONG_BIT
from pypy.tool.sourcetools import func_with_new_name
+
class W_Ufunc(Wrappable):
_attrs_ = ["name", "promote_to_float", "promote_bools", "identity"]
_immutable_fields_ = ["promote_to_float", "promote_bools", "name"]
@@ -28,7 +30,7 @@
return self.identity
def descr_call(self, space, __args__):
- from interp_numarray import BaseArray
+ from interp_numarray import BaseArray
args_w, kwds_w = __args__.unpack()
# it occurs to me that we don't support any datatypes that
# require casting, change it later when we do
@@ -179,7 +181,7 @@
elif out.shape != shape:
raise operationerrfmt(space.w_ValueError,
'output parameter shape mismatch, expecting [%s]' +
- ' , got [%s]',
+ ' , got [%s]',
",".join([str(x) for x in shape]),
",".join([str(x) for x in out.shape]),
)
@@ -204,7 +206,7 @@
else:
arr = ReduceArray(self.func, self.name, self.identity, obj, dtype)
val = loop.compute(arr)
- return val
+ return val
def do_axis_reduce(self, obj, dtype, axis, result):
from pypy.module.micronumpy.interp_numarray import AxisReduce
@@ -253,7 +255,7 @@
if isinstance(w_obj, Scalar):
arr = self.func(calc_dtype, w_obj.value.convert_to(calc_dtype))
if isinstance(out,Scalar):
- out.value=arr
+ out.value = arr
elif isinstance(out, BaseArray):
out.fill(space, arr)
else:
@@ -265,7 +267,7 @@
if not broadcast_shape or broadcast_shape != out.shape:
raise operationerrfmt(space.w_ValueError,
'output parameter shape mismatch, could not broadcast [%s]' +
- ' to [%s]',
+ ' to [%s]',
",".join([str(x) for x in w_obj.shape]),
",".join([str(x) for x in out.shape]),
)
@@ -292,10 +294,11 @@
self.func = func
self.comparison_func = comparison_func
+ @jit.unroll_safe
def call(self, space, args_w):
from pypy.module.micronumpy.interp_numarray import (Call2,
convert_to_array, Scalar, shape_agreement, BaseArray)
- if len(args_w)>2:
+ if len(args_w) > 2:
[w_lhs, w_rhs, w_out] = args_w
else:
[w_lhs, w_rhs] = args_w
@@ -326,7 +329,7 @@
w_rhs.value.convert_to(calc_dtype)
)
if isinstance(out,Scalar):
- out.value=arr
+ out.value = arr
elif isinstance(out, BaseArray):
out.fill(space, arr)
else:
@@ -337,7 +340,7 @@
if out and out.shape != shape_agreement(space, new_shape, out.shape):
raise operationerrfmt(space.w_ValueError,
'output parameter shape mismatch, could not broadcast [%s]' +
- ' to [%s]',
+ ' to [%s]',
",".join([str(x) for x in new_shape]),
",".join([str(x) for x in out.shape]),
)
@@ -347,7 +350,6 @@
w_lhs.add_invalidates(w_res)
w_rhs.add_invalidates(w_res)
if out:
- #out.add_invalidates(w_res) #causes a recursion loop
w_res.get_concrete()
return w_res
@@ -539,14 +541,18 @@
("reciprocal", "reciprocal", 1),
("fabs", "fabs", 1, {"promote_to_float": True}),
+ ("fmax", "fmax", 2, {"promote_to_float": True}),
+ ("fmin", "fmin", 2, {"promote_to_float": True}),
("fmod", "fmod", 2, {"promote_to_float": True}),
("floor", "floor", 1, {"promote_to_float": True}),
("ceil", "ceil", 1, {"promote_to_float": True}),
+ ("trunc", "trunc", 1, {"promote_to_float": True}),
("exp", "exp", 1, {"promote_to_float": True}),
("exp2", "exp2", 1, {"promote_to_float": True}),
("expm1", "expm1", 1, {"promote_to_float": True}),
('sqrt', 'sqrt', 1, {'promote_to_float': True}),
+ ('square', 'square', 1, {'promote_to_float': True}),
("sin", "sin", 1, {"promote_to_float": True}),
("cos", "cos", 1, {"promote_to_float": True}),
diff --git a/pypy/module/micronumpy/signature.py b/pypy/module/micronumpy/signature.py
--- a/pypy/module/micronumpy/signature.py
+++ b/pypy/module/micronumpy/signature.py
@@ -107,6 +107,10 @@
arr.compute_first_step(self, f)
return f
+ def debug_repr(self):
+ # should be overridden, but in case it isn't, provide a default
+ return str(self)
+
class ConcreteSignature(Signature):
_immutable_fields_ = ['dtype']
@@ -207,7 +211,7 @@
def _create_iter(self, iterlist, arraylist, arr, transforms):
from pypy.module.micronumpy.interp_numarray import VirtualSlice
assert isinstance(arr, VirtualSlice)
- transforms = transforms + [ViewTransform(arr.chunks)]
+ transforms = [ViewTransform(arr.chunks)] + transforms
self.child._create_iter(iterlist, arraylist, arr.child, transforms)
def eval(self, frame, arr):
@@ -215,6 +219,9 @@
assert isinstance(arr, VirtualSlice)
return self.child.eval(frame, arr.child)
+ def debug_repr(self):
+ return 'VirtualSlice(%s)' % self.child.debug_repr()
+
class Call1(Signature):
_immutable_fields_ = ['unfunc', 'name', 'child', 'res', 'dtype']
@@ -270,7 +277,7 @@
from pypy.module.micronumpy.interp_numarray import Call1
assert isinstance(arr, Call1)
- vtransforms = transforms + [BroadcastTransform(arr.values.shape)]
+ vtransforms = [BroadcastTransform(arr.values.shape)] + transforms
self.child._create_iter(iterlist, arraylist, arr.values, vtransforms)
self.res._create_iter(iterlist, arraylist, arr.res, transforms)
@@ -348,7 +355,7 @@
from pypy.module.micronumpy.interp_numarray import ResultArray
assert isinstance(arr, ResultArray)
- rtransforms = transforms + [BroadcastTransform(arr.left.shape)]
+ rtransforms = [BroadcastTransform(arr.left.shape)] + transforms
self.left._create_iter(iterlist, arraylist, arr.left, transforms)
self.right._create_iter(iterlist, arraylist, arr.right, rtransforms)
@@ -375,7 +382,7 @@
from pypy.module.micronumpy.interp_numarray import Call2
assert isinstance(arr, Call2)
- ltransforms = transforms + [BroadcastTransform(arr.shape)]
+ ltransforms = [BroadcastTransform(arr.shape)] + transforms
self.left._create_iter(iterlist, arraylist, arr.left, ltransforms)
self.right._create_iter(iterlist, arraylist, arr.right, transforms)
@@ -388,7 +395,7 @@
from pypy.module.micronumpy.interp_numarray import Call2
assert isinstance(arr, Call2)
- rtransforms = transforms + [BroadcastTransform(arr.shape)]
+ rtransforms = [BroadcastTransform(arr.shape)] + transforms
self.left._create_iter(iterlist, arraylist, arr.left, transforms)
self.right._create_iter(iterlist, arraylist, arr.right, rtransforms)
@@ -401,8 +408,8 @@
from pypy.module.micronumpy.interp_numarray import Call2
assert isinstance(arr, Call2)
- rtransforms = transforms + [BroadcastTransform(arr.shape)]
- ltransforms = transforms + [BroadcastTransform(arr.shape)]
+ rtransforms = [BroadcastTransform(arr.shape)] + transforms
+ ltransforms = [BroadcastTransform(arr.shape)] + transforms
self.left._create_iter(iterlist, arraylist, arr.left, ltransforms)
self.right._create_iter(iterlist, arraylist, arr.right, rtransforms)
@@ -424,7 +431,7 @@
frame.cur_value = self.binfunc(self.calc_dtype, frame.cur_value, rval)
def debug_repr(self):
- return 'ReduceSig(%s)' % (self.name, self.right.debug_repr())
+ return 'ReduceSig(%s, %s)' % (self.name, self.right.debug_repr())
class SliceloopSignature(Call2):
def eval(self, frame, arr):
@@ -448,7 +455,7 @@
from pypy.module.micronumpy.interp_numarray import SliceArray
assert isinstance(arr, SliceArray)
- rtransforms = transforms + [BroadcastTransform(arr.shape)]
+ rtransforms = [BroadcastTransform(arr.shape)] + transforms
self.left._create_iter(iterlist, arraylist, arr.left, transforms)
self.right._create_iter(iterlist, arraylist, arr.right, rtransforms)
diff --git a/pypy/module/micronumpy/strides.py b/pypy/module/micronumpy/strides.py
--- a/pypy/module/micronumpy/strides.py
+++ b/pypy/module/micronumpy/strides.py
@@ -1,6 +1,7 @@
from pypy.rlib import jit
from pypy.interpreter.error import OperationError
+ at jit.look_inside_iff(lambda chunks: jit.isconstant(len(chunks)))
def enumerate_chunks(chunks):
result = []
i = -1
@@ -85,9 +86,9 @@
space.isinstance_w(w_item_or_slice, space.w_slice)):
raise OperationError(space.w_IndexError,
space.wrap('unsupported iterator index'))
-
+
start, stop, step, lngth = space.decode_index4(w_item_or_slice, size)
-
+
coords = [0] * len(shape)
i = start
if order == 'C':
diff --git a/pypy/module/micronumpy/support.py b/pypy/module/micronumpy/support.py
--- a/pypy/module/micronumpy/support.py
+++ b/pypy/module/micronumpy/support.py
@@ -1,5 +1,9 @@
+from pypy.rlib import jit
+
+
+ at jit.unroll_safe
def product(s):
i = 1
for x in s:
i *= x
- return i
\ No newline at end of file
+ return i
diff --git a/pypy/module/micronumpy/test/test_numarray.py b/pypy/module/micronumpy/test/test_numarray.py
--- a/pypy/module/micronumpy/test/test_numarray.py
+++ b/pypy/module/micronumpy/test/test_numarray.py
@@ -395,11 +395,19 @@
assert a[3] == 0.
def test_newaxis(self):
- from _numpypy import array
+ import math
+ from _numpypy import array, cos, zeros
from numpypy.core.numeric import newaxis
a = array(range(5))
b = array([range(5)])
assert (a[newaxis] == b).all()
+ a = array(range(3))
+ b = array([1, 3])
+ expected = zeros((3, 2))
+ for x in range(3):
+ for y in range(2):
+ expected[x, y] = math.cos(a[x]) * math.cos(b[y])
+ assert ((cos(a)[:,newaxis] * cos(b).T) == expected).all()
def test_newaxis_slice(self):
from _numpypy import array
@@ -1338,6 +1346,10 @@
dims_disagree = raises(ValueError, concatenate, (a1, b1), axis=0)
assert str(dims_disagree.value) == \
"array dimensions must agree except for axis being concatenated"
+ a = array([1, 2, 3, 4, 5, 6])
+ a = (a + a)[::2]
+ b = concatenate((a[:3], a[-3:]))
+ assert (b == [2, 6, 10, 2, 6, 10]).all()
def test_std(self):
from _numpypy import array
@@ -1387,6 +1399,16 @@
assert (ones(1) + ones(1)).nbytes == 8
assert array(3.0).nbytes == 8
+ def test_repeat(self):
+ from _numpypy import repeat, array
+ assert (repeat([[1, 2], [3, 4]], 3) == [1, 1, 1, 2, 2, 2,
+ 3, 3, 3, 4, 4, 4]).all()
+ assert (repeat([[1, 2], [3, 4]], 2, axis=0) == [[1, 2], [1, 2], [3, 4],
+ [3, 4]]).all()
+ assert (repeat([[1, 2], [3, 4]], 2, axis=1) == [[1, 1, 2, 2], [3, 3,
+ 4, 4]]).all()
+ assert (array([1, 2]).repeat(2) == array([1, 1, 2, 2])).all()
+
class AppTestMultiDim(BaseNumpyAppTest):
def test_init(self):
diff --git a/pypy/module/micronumpy/test/test_ufuncs.py b/pypy/module/micronumpy/test/test_ufuncs.py
--- a/pypy/module/micronumpy/test/test_ufuncs.py
+++ b/pypy/module/micronumpy/test/test_ufuncs.py
@@ -135,6 +135,38 @@
assert fabs(float('-inf')) == float('inf')
assert isnan(fabs(float('nan')))
+ def test_fmax(self):
+ from _numpypy import fmax
+ import math
+
+ nnan, nan, inf, ninf = float('-nan'), float('nan'), float('inf'), float('-inf')
+
+ a = [ninf, -5, 0, 5, inf]
+ assert (fmax(a, [ninf]*5) == a).all()
+ assert (fmax(a, [inf]*5) == [inf]*5).all()
+ assert (fmax(a, [1]*5) == [1, 1, 1, 5, inf]).all()
+ assert math.isnan(fmax(nan, 0))
+ assert math.isnan(fmax(0, nan))
+ assert math.isnan(fmax(nan, nan))
+ # The numpy docs specify that the FIRST NaN should be used if both are NaN
+ assert math.copysign(1.0, fmax(nnan, nan)) == -1.0
+
+ def test_fmin(self):
+ from _numpypy import fmin
+ import math
+
+ nnan, nan, inf, ninf = float('-nan'), float('nan'), float('inf'), float('-inf')
+
+ a = [ninf, -5, 0, 5, inf]
+ assert (fmin(a, [ninf]*5) == [ninf]*5).all()
+ assert (fmin(a, [inf]*5) == a).all()
+ assert (fmin(a, [1]*5) == [ninf, -5, 0, 1, 1]).all()
+ assert math.isnan(fmin(nan, 0))
+ assert math.isnan(fmin(0, nan))
+ assert math.isnan(fmin(nan, nan))
+ # The numpy docs specify that the FIRST NaN should be used if both are NaN
+ assert math.copysign(1.0, fmin(nnan, nan)) == -1.0
+
def test_fmod(self):
from _numpypy import fmod
import math
@@ -221,24 +253,17 @@
for i in range(3):
assert c[i] == a[i] - b[i]
- def test_floorceil(self):
- from _numpypy import array, floor, ceil
+ def test_floorceiltrunc(self):
+ from _numpypy import array, floor, ceil, trunc
import math
- reference = [-2.0, -2.0, -1.0, 0.0, 1.0, 1.0, 0]
- a = array([-1.4, -1.5, -1.0, 0.0, 1.0, 1.4, 0.5])
- b = floor(a)
- for i in range(5):
- assert b[i] == reference[i]
- reference = [-1.0, -1.0, -1.0, 0.0, 1.0, 2.0, 1.0]
- a = array([-1.4, -1.5, -1.0, 0.0, 1.0, 1.4, 0.5])
- b = ceil(a)
- assert (reference == b).all()
- inf = float("inf")
- data = [1.5, 2.9999, -1.999, inf]
- results = [math.floor(x) for x in data]
- assert (floor(data) == results).all()
- results = [math.ceil(x) for x in data]
- assert (ceil(data) == results).all()
+ ninf, inf = float("-inf"), float("inf")
+ a = array([ninf, -1.4, -1.5, -1.0, 0.0, 1.0, 1.4, 0.5, inf])
+ assert ([ninf, -2.0, -2.0, -1.0, 0.0, 1.0, 1.0, 0.0, inf] == floor(a)).all()
+ assert ([ninf, -1.0, -1.0, -1.0, 0.0, 1.0, 2.0, 1.0, inf] == ceil(a)).all()
+ assert ([ninf, -1.0, -1.0, -1.0, 0.0, 1.0, 1.0, 0.0, inf] == trunc(a)).all()
+ assert all([math.isnan(f(float("nan"))) for f in floor, ceil, trunc])
+ assert all([math.copysign(1, f(float("nan"))) == 1 for f in floor, ceil, trunc])
+ assert all([math.copysign(1, f(float("-nan"))) == -1 for f in floor, ceil, trunc])
def test_copysign(self):
from _numpypy import array, copysign
@@ -455,6 +480,19 @@
assert math.isnan(sqrt(-1))
assert math.isnan(sqrt(nan))
+ def test_square(self):
+ import math
+ from _numpypy import square
+
+ nan, inf, ninf = float("nan"), float("inf"), float("-inf")
+
+ assert math.isnan(square(nan))
+ assert math.isinf(square(inf))
+ assert math.isinf(square(ninf))
+ assert square(ninf) > 0
+ assert [square(x) for x in range(-5, 5)] == [x*x for x in range(-5, 5)]
+ assert math.isinf(square(1e300))
+
def test_radians(self):
import math
from _numpypy import radians, array
@@ -546,10 +584,17 @@
raises(TypeError, 'array([1.0]) & 1')
def test_unary_bitops(self):
- from _numpypy import bitwise_not, array
+ from _numpypy import bitwise_not, invert, array
a = array([1, 2, 3, 4])
assert (~a == [-2, -3, -4, -5]).all()
assert (bitwise_not(a) == ~a).all()
+ assert (invert(a) == ~a).all()
+
+ def test_shift(self):
+ from _numpypy import left_shift, right_shift
+
+ assert (left_shift([5, 1], [2, 13]) == [20, 2**13]).all()
+ assert (right_shift(10, range(5)) == [10, 5, 2, 1, 0]).all()
def test_comparisons(self):
import operator
diff --git a/pypy/module/micronumpy/types.py b/pypy/module/micronumpy/types.py
--- a/pypy/module/micronumpy/types.py
+++ b/pypy/module/micronumpy/types.py
@@ -631,6 +631,22 @@
return math.fabs(v)
@simple_binary_op
+ def fmax(self, v1, v2):
+ if math.isnan(v1):
+ return v1
+ elif math.isnan(v2):
+ return v2
+ return max(v1, v2)
+
+ @simple_binary_op
+ def fmin(self, v1, v2):
+ if math.isnan(v1):
+ return v1
+ elif math.isnan(v2):
+ return v2
+ return min(v1, v2)
+
+ @simple_binary_op
def fmod(self, v1, v2):
try:
return math.fmod(v1, v2)
@@ -652,6 +668,13 @@
return math.ceil(v)
@simple_unary_op
+ def trunc(self, v):
+ if v < 0:
+ return math.ceil(v)
+ else:
+ return math.floor(v)
+
+ @simple_unary_op
def exp(self, v):
try:
return math.exp(v)
@@ -741,6 +764,10 @@
except ValueError:
return rfloat.NAN
+ @simple_unary_op
+ def square(self, v):
+ return v*v
+
@raw_unary_op
def isnan(self, v):
return rfloat.isnan(v)
diff --git a/pypy/module/pyexpat/test/__init__.py b/pypy/module/pyexpat/test/__init__.py
new file mode 100644
diff --git a/pypy/module/pypyjit/test_pypy_c/test_misc.py b/pypy/module/pypyjit/test_pypy_c/test_misc.py
--- a/pypy/module/pypyjit/test_pypy_c/test_misc.py
+++ b/pypy/module/pypyjit/test_pypy_c/test_misc.py
@@ -212,7 +212,7 @@
i19 = int_add(i12, 1)
setfield_gc(p9, i19, descr=)
guard_nonnull_class(p17, 146982464, descr=...)
- i21 = getfield_gc(p17, descr=)
+ i21 = getfield_gc(p17, descr=)
i23 = int_lt(0, i21)
guard_true(i23, descr=...)
i24 = getfield_gc(p17, descr=)
@@ -351,3 +351,23 @@
# the following assertion fails if the loop was cancelled due
# to "abort: vable escape"
assert len(log.loops_by_id("eval")) == 1
+
+ def test_sys_exc_info(self):
+ def main():
+ i = 1
+ lst = [i]
+ while i < 1000:
+ try:
+ return lst[i]
+ except:
+ e = sys.exc_info()[1] # ID: exc_info
+ if not isinstance(e, IndexError):
+ raise
+ i += 1
+ return 42
+
+ log = self.run(main)
+ assert log.result == 42
+ # the following assertion fails if the loop was cancelled due
+ # to "abort: vable escape"
+ assert len(log.loops_by_id("exc_info")) == 1
diff --git a/pypy/module/pypyjit/test_pypy_c/test_string.py b/pypy/module/pypyjit/test_pypy_c/test_string.py
--- a/pypy/module/pypyjit/test_pypy_c/test_string.py
+++ b/pypy/module/pypyjit/test_pypy_c/test_string.py
@@ -198,3 +198,37 @@
i49 = call(ConstClass(_ll_2_str_eq_nonnull__rpy_stringPtr_rpy_stringPtr), p35, ConstPtr(ptr48), descr=)
guard_value(i49, 1, descr=...)
''')
+
+ def test_remove_duplicate_method_calls(self):
+ def main(n):
+ lst = []
+ for i in range(n):
+ s = 'Hello %d' % i
+ t = s.lower() # ID: callone
+ u = s.lower() # ID: calltwo
+ lst.append(t)
+ lst.append(u)
+ return len(','.join(lst))
+ log = self.run(main, [1000])
+ assert log.result == main(1000)
+ loops = log.loops_by_filename(self.filepath)
+ loop, = loops
+ loop.match_by_id('callone', '''
+ p114 = call(ConstClass(ll_lower__rpy_stringPtr), p113, descr=)
+ guard_no_exception(descr=...)
+ ''')
+ loop.match_by_id('calltwo', '') # nothing
+
+ def test_move_method_call_out_of_loop(self):
+ def main(n):
+ lst = []
+ s = 'Hello %d' % n
+ for i in range(n):
+ t = s.lower() # ID: callone
+ lst.append(t)
+ return len(','.join(lst))
+ log = self.run(main, [1000])
+ assert log.result == main(1000)
+ loops = log.loops_by_filename(self.filepath)
+ loop, = loops
+ loop.match_by_id('callone', '') # nothing
diff --git a/pypy/module/sys/test/test_sysmodule.py b/pypy/module/sys/test/test_sysmodule.py
--- a/pypy/module/sys/test/test_sysmodule.py
+++ b/pypy/module/sys/test/test_sysmodule.py
@@ -595,3 +595,124 @@
assert len(frames) == 1
_, other_frame = frames.popitem()
assert other_frame.f_code.co_name in ('other_thread', '?')
+
+
+class AppTestSysExcInfoDirect:
+
+ def setup_method(self, meth):
+ self.checking = not option.runappdirect
+ if self.checking:
+ self.seen = []
+ from pypy.module.sys import vm
+ def exc_info_with_tb(*args):
+ self.seen.append("n") # not optimized
+ return self.old[0](*args)
+ def exc_info_without_tb(*args):
+ self.seen.append("y") # optimized
+ return self.old[1](*args)
+ self.old = [vm.exc_info_with_tb, vm.exc_info_without_tb]
+ vm.exc_info_with_tb = exc_info_with_tb
+ vm.exc_info_without_tb = exc_info_without_tb
+ #
+ from pypy.rlib import jit
+ self.old2 = [jit.we_are_jitted]
+ jit.we_are_jitted = lambda: True
+
+ def teardown_method(self, meth):
+ if self.checking:
+ from pypy.module.sys import vm
+ from pypy.rlib import jit
+ vm.exc_info_with_tb = self.old[0]
+ vm.exc_info_without_tb = self.old[1]
+ jit.we_are_jitted = self.old2[0]
+ #
+ assert ''.join(self.seen) == meth.expected
+
+ def test_returns_none(self):
+ import sys
+ assert sys.exc_info() == (None, None, None)
+ assert sys.exc_info()[0] is None
+ assert sys.exc_info()[1] is None
+ assert sys.exc_info()[2] is None
+ assert sys.exc_info()[:2] == (None, None)
+ assert sys.exc_info()[:3] == (None, None, None)
+ assert sys.exc_info()[0:2] == (None, None)
+ assert sys.exc_info()[2:4] == (None,)
+ test_returns_none.expected = 'nnnnnnnn'
+
+ def test_returns_subscr(self):
+ import sys
+ e = KeyError("boom")
+ try:
+ raise e
+ except:
+ assert sys.exc_info()[0] is KeyError # y
+ assert sys.exc_info()[1] is e # y
+ assert sys.exc_info()[2] is not None # n
+ assert sys.exc_info()[-3] is KeyError # y
+ assert sys.exc_info()[-2] is e # y
+ assert sys.exc_info()[-1] is not None # n
+ test_returns_subscr.expected = 'yynyyn'
+
+ def test_returns_slice_2(self):
+ import sys
+ e = KeyError("boom")
+ try:
+ raise e
+ except:
+ foo = sys.exc_info() # n
+ assert sys.exc_info()[:0] == () # y
+ assert sys.exc_info()[:1] == foo[:1] # y
+ assert sys.exc_info()[:2] == foo[:2] # y
+ assert sys.exc_info()[:3] == foo # n
+ assert sys.exc_info()[:4] == foo # n
+ assert sys.exc_info()[:-1] == foo[:2] # y
+ assert sys.exc_info()[:-2] == foo[:1] # y
+ assert sys.exc_info()[:-3] == () # y
+ test_returns_slice_2.expected = 'nyyynnyyy'
+
+ def test_returns_slice_3(self):
+ import sys
+ e = KeyError("boom")
+ try:
+ raise e
+ except:
+ foo = sys.exc_info() # n
+ assert sys.exc_info()[2:2] == () # y
+ assert sys.exc_info()[0:1] == foo[:1] # y
+ assert sys.exc_info()[1:2] == foo[1:2] # y
+ assert sys.exc_info()[0:3] == foo # n
+ assert sys.exc_info()[2:4] == foo[2:] # n
+ assert sys.exc_info()[0:-1] == foo[:2] # y
+ assert sys.exc_info()[0:-2] == foo[:1] # y
+ assert sys.exc_info()[5:-3] == () # y
+ test_returns_slice_3.expected = 'nyyynnyyy'
+
+ def test_strange_invocation(self):
+ import sys
+ e = KeyError("boom")
+ try:
+ raise e
+ except:
+ a = []; k = {}
+ assert sys.exc_info(*a)[:0] == ()
+ assert sys.exc_info(**k)[:0] == ()
+ test_strange_invocation.expected = 'nn'
+
+ def test_call_in_subfunction(self):
+ import sys
+ def g():
+ # this case is not optimized, because we need to search the
+ # frame chain. it's probably not worth the complications
+ return sys.exc_info()[1]
+ e = KeyError("boom")
+ try:
+ raise e
+ except:
+ assert g() is e
+ test_call_in_subfunction.expected = 'n'
+
+
+class AppTestSysExcInfoDirectCallMethod(AppTestSysExcInfoDirect):
+ def setup_class(cls):
+ cls.space = gettestobjspace(**{"objspace.opcodes.CALL_METHOD": True})
diff --git a/pypy/module/sys/version.py b/pypy/module/sys/version.py
--- a/pypy/module/sys/version.py
+++ b/pypy/module/sys/version.py
@@ -10,7 +10,7 @@
CPYTHON_VERSION = (2, 7, 2, "final", 42) #XXX # sync patchlevel.h
CPYTHON_API_VERSION = 1013 #XXX # sync with include/modsupport.h
-PYPY_VERSION = (1, 8, 1, "dev", 0) #XXX # sync patchlevel.h
+PYPY_VERSION = (1, 9, 1, "dev", 0) #XXX # sync patchlevel.h
if platform.name == 'msvc':
COMPILER_INFO = 'MSC v.%d 32 bit' % (platform.version * 10 + 600)
diff --git a/pypy/module/sys/vm.py b/pypy/module/sys/vm.py
--- a/pypy/module/sys/vm.py
+++ b/pypy/module/sys/vm.py
@@ -89,6 +89,9 @@
"""Return the (type, value, traceback) of the most recent exception
caught by an except clause in the current stack frame or in an older stack
frame."""
+ return exc_info_with_tb(space) # indirection for the tests
+
+def exc_info_with_tb(space):
operror = space.getexecutioncontext().sys_exc_info()
if operror is None:
return space.newtuple([space.w_None,space.w_None,space.w_None])
@@ -96,6 +99,59 @@
return space.newtuple([operror.w_type, operror.get_w_value(space),
space.wrap(operror.get_traceback())])
+def exc_info_without_tb(space, frame):
+ operror = frame.last_exception
+ return space.newtuple([operror.w_type, operror.get_w_value(space),
+ space.w_None])
+
+def exc_info_direct(space, frame):
+ from pypy.tool import stdlib_opcode
+ # In order to make the JIT happy, we try to return (exc, val, None)
+ # instead of (exc, val, tb). We can do that only if we recognize
+ # the following pattern in the bytecode:
+ # CALL_FUNCTION/CALL_METHOD <-- invoking me
+ # LOAD_CONST 0, 1, -2 or -3
+ # BINARY_SUBSCR
+ # or:
+ # CALL_FUNCTION/CALL_METHOD
+ # LOAD_CONST <=2
+ # SLICE_2
+ # or:
+ # CALL_FUNCTION/CALL_METHOD
+ # LOAD_CONST any integer
+ # LOAD_CONST <=2
+ # SLICE_3
+ need_all_three_args = True
+ co = frame.getcode().co_code
+ p = frame.last_instr
+ if (ord(co[p]) == stdlib_opcode.CALL_FUNCTION or
+ ord(co[p]) == stdlib_opcode.CALL_METHOD):
+ if ord(co[p+3]) == stdlib_opcode.LOAD_CONST:
+ lo = ord(co[p+4])
+ hi = ord(co[p+5])
+ w_constant = frame.getconstant_w((hi * 256) | lo)
+ if space.isinstance_w(w_constant, space.w_int):
+ constant = space.int_w(w_constant)
+ if ord(co[p+6]) == stdlib_opcode.BINARY_SUBSCR:
+ if -3 <= constant <= 1 and constant != -1:
+ need_all_three_args = False
+ elif ord(co[p+6]) == stdlib_opcode.SLICE+2:
+ if constant <= 2:
+ need_all_three_args = False
+ elif (ord(co[p+6]) == stdlib_opcode.LOAD_CONST and
+ ord(co[p+9]) == stdlib_opcode.SLICE+3):
+ lo = ord(co[p+7])
+ hi = ord(co[p+8])
+ w_constant = frame.getconstant_w((hi * 256) | lo)
+ if space.isinstance_w(w_constant, space.w_int):
+ if space.int_w(w_constant) <= 2:
+ need_all_three_args = False
+ #
+ if need_all_three_args or frame.last_exception is None or frame.hide():
+ return exc_info_with_tb(space)
+ else:
+ return exc_info_without_tb(space, frame)
+
def exc_clear(space):
"""Clear global information on the current exception. Subsequent calls
to exc_info() will return (None,None,None) until another exception is
diff --git a/pypy/module/test_lib_pypy/test_datetime.py b/pypy/module/test_lib_pypy/test_datetime.py
--- a/pypy/module/test_lib_pypy/test_datetime.py
+++ b/pypy/module/test_lib_pypy/test_datetime.py
@@ -44,3 +44,9 @@
assert type(dt.microsecond) is int
copy.copy(dt)
+
+def test_radd():
+ class X(object):
+ def __radd__(self, other):
+ return "radd"
+ assert datetime.date(10, 10, 10) + X() == "radd"
diff --git a/pypy/module/thread/test/test_ll_thread.py b/pypy/module/thread/test/test_ll_thread.py
--- a/pypy/module/thread/test/test_ll_thread.py
+++ b/pypy/module/thread/test/test_ll_thread.py
@@ -66,7 +66,6 @@
def test_gc_locking(self):
import time
from pypy.rlib.objectmodel import invoke_around_extcall
- from pypy.rlib.objectmodel import we_are_translated
from pypy.rlib.debug import ll_assert
class State:
@@ -129,8 +128,6 @@
state.finished = 0
# the next line installs before_extcall() and after_extcall()
# to be called automatically around external function calls.
- # When not translated it does not work around time.sleep(),
- # so we have to call them manually for this test.
invoke_around_extcall(before_extcall, after_extcall)
g(10, 1)
@@ -142,13 +139,9 @@
willing_to_wait_more -= 1
done = len(state.answers) == expected
- if not we_are_translated(): before_extcall()
time.sleep(0.01)
- if not we_are_translated(): after_extcall()
- if not we_are_translated(): before_extcall()
time.sleep(0.1)
- if not we_are_translated(): after_extcall()
return len(state.answers)
@@ -160,12 +153,11 @@
answers = fn()
assert answers == expected
-class TestRunDirectly(AbstractThreadTests):
- def getcompiled(self, f, argtypes):
- return f
-
- def test_start_new_thread(self):
- py.test.skip("deadlocks occasionally -- why???")
+#class TestRunDirectly(AbstractThreadTests):
+# def getcompiled(self, f, argtypes):
+# return f
+# These are disabled because they crash occasionally for bad reasons
+# related to the fact that ll2ctypes is not at all thread-safe
class TestUsingBoehm(AbstractThreadTests):
gcpolicy = 'boehm'
diff --git a/pypy/objspace/descroperation.py b/pypy/objspace/descroperation.py
--- a/pypy/objspace/descroperation.py
+++ b/pypy/objspace/descroperation.py
@@ -43,6 +43,13 @@
return w_eq
type_eq._annspecialcase_ = 'specialize:memo'
+def list_iter(space):
+ "Utility that returns the app-level descriptor list.__iter__."
+ w_src, w_iter = space.lookup_in_type_where(space.w_list,
+ '__iter__')
+ return w_iter
+list_iter._annspecialcase_ = 'specialize:memo'
+
def raiseattrerror(space, w_obj, name, w_descr=None):
w_type = space.type(w_obj)
typename = w_type.getname(space)
diff --git a/pypy/objspace/fake/objspace.py b/pypy/objspace/fake/objspace.py
--- a/pypy/objspace/fake/objspace.py
+++ b/pypy/objspace/fake/objspace.py
@@ -86,6 +86,7 @@
return s_None
def specialize_call(self, hop):
+ hop.exception_cannot_occur()
return hop.inputconst(lltype.Void, None)
# ____________________________________________________________
@@ -109,7 +110,7 @@
"NOT_RPYTHON"
raise NotImplementedError
- def newdict(self, module=False, instance=False, classofinstance=None,
+ def newdict(self, module=False, instance=False,
strdict=False):
return w_some_obj()
diff --git a/pypy/objspace/flow/flowcontext.py b/pypy/objspace/flow/flowcontext.py
--- a/pypy/objspace/flow/flowcontext.py
+++ b/pypy/objspace/flow/flowcontext.py
@@ -434,6 +434,13 @@
self.lastblock = block
self.pushvalue(w_result)
+ def BUILD_LIST_FROM_ARG(self, _, next_instr):
+ # This opcode was added with pypy-1.8. Here is a simpler
+ # version, enough for annotation.
+ last_val = self.popvalue()
+ self.pushvalue(self.space.newlist([]))
+ self.pushvalue(last_val)
+
# XXX Unimplemented 2.7 opcodes ----------------
# Set literals, set comprehensions
diff --git a/pypy/objspace/std/celldict.py b/pypy/objspace/std/celldict.py
--- a/pypy/objspace/std/celldict.py
+++ b/pypy/objspace/std/celldict.py
@@ -163,7 +163,8 @@
class ModuleDictIteratorImplementation(IteratorImplementation):
def __init__(self, space, strategy, dictimplementation):
- IteratorImplementation.__init__(self, space, dictimplementation)
+ IteratorImplementation.__init__(
+ self, space, strategy, dictimplementation)
dict_w = strategy.unerase(dictimplementation.dstorage)
self.iterator = dict_w.iteritems()
diff --git a/pypy/objspace/std/dictmultiobject.py b/pypy/objspace/std/dictmultiobject.py
--- a/pypy/objspace/std/dictmultiobject.py
+++ b/pypy/objspace/std/dictmultiobject.py
@@ -33,8 +33,7 @@
@staticmethod
def allocate_and_init_instance(space, w_type=None, module=False,
- instance=False, classofinstance=None,
- strdict=False):
+ instance=False, strdict=False):
if space.config.objspace.std.withcelldict and module:
from pypy.objspace.std.celldict import ModuleDictStrategy
@@ -247,7 +246,7 @@
return 0
def iter(self, w_dict):
- return EmptyIteratorImplementation(self.space, w_dict)
+ return EmptyIteratorImplementation(self.space, self, w_dict)
def clear(self, w_dict):
return
@@ -263,8 +262,9 @@
# Iterator Implementation base classes
class IteratorImplementation(object):
- def __init__(self, space, implementation):
+ def __init__(self, space, strategy, implementation):
self.space = space
+ self.strategy = strategy
self.dictimplementation = implementation
self.len = implementation.length()
self.pos = 0
@@ -280,7 +280,20 @@
if self.pos < self.len:
result = self.next_entry()
self.pos += 1
- return result
+ if self.strategy is self.dictimplementation.strategy:
+ return result # common case
+ else:
+ # waaa, obscure case: the strategy changed, but not the
+ # length of the dict. The (key, value) pair in 'result'
+ # might be out-of-date. We try to explicitly look up
+ # the key in the dict.
+ w_key = result[0]
+ w_value = self.dictimplementation.getitem(w_key)
+ if w_value is None:
+ self.len = -1 # Make this error state sticky
+ raise OperationError(self.space.w_RuntimeError,
+ self.space.wrap("dictionary changed during iteration"))
+ return (w_key, w_value)
# no more entries
self.dictimplementation = None
return None, None
@@ -489,7 +502,7 @@
_mixin_ = True
def __init__(self, space, strategy, dictimplementation):
- IteratorImplementation.__init__(self, space, dictimplementation)
+ IteratorImplementation.__init__(self, space, strategy, dictimplementation)
self.iterator = strategy.unerase(dictimplementation.dstorage).iteritems()
def next_entry(self):
@@ -503,7 +516,7 @@
_mixin_ = True
def __init__(self, space, strategy, dictimplementation):
- IteratorImplementation.__init__(self, space, dictimplementation)
+ IteratorImplementation.__init__(self, space, strategy, dictimplementation)
self.iterator = strategy.unerase(dictimplementation.dstorage).iteritems()
def next_entry(self):
@@ -549,10 +562,7 @@
def listview_int(self, w_dict):
return self.unerase(w_dict.dstorage).keys()
- def w_keys(self, w_dict):
- # XXX there is no space.newlist_int yet
- space = self.space
- return space.call_function(space.w_list, w_dict)
+ # XXX there is no space.newlist_int yet to implement w_keys more efficiently
class IntIteratorImplementation(_WrappedIteratorMixin, IteratorImplementation):
pass
diff --git a/pypy/objspace/std/dictproxyobject.py b/pypy/objspace/std/dictproxyobject.py
--- a/pypy/objspace/std/dictproxyobject.py
+++ b/pypy/objspace/std/dictproxyobject.py
@@ -20,7 +20,17 @@
def getitem(self, w_dict, w_key):
space = self.space
w_lookup_type = space.type(w_key)
- if space.is_w(w_lookup_type, space.w_str):
+ if (space.is_w(w_lookup_type, space.w_str) or # Most common path first
+ space.abstract_issubclass_w(w_lookup_type, space.w_str)):
+ return self.getitem_str(w_dict, space.str_w(w_key))
+ elif space.abstract_issubclass_w(w_lookup_type, space.w_unicode):
+ try:
+ w_key = space.str(w_key)
+ except OperationError, e:
+ if not e.match(space, space.w_UnicodeEncodeError):
+ raise
+ # non-ascii unicode is never equal to a byte string
+ return None
return self.getitem_str(w_dict, space.str_w(w_key))
else:
return None
@@ -98,7 +108,8 @@
class DictProxyIteratorImplementation(IteratorImplementation):
def __init__(self, space, strategy, dictimplementation):
- IteratorImplementation.__init__(self, space, dictimplementation)
+ IteratorImplementation.__init__(
+ self, space, strategy, dictimplementation)
w_type = strategy.unerase(dictimplementation.dstorage)
self.iterator = w_type.dict_w.iteritems()
diff --git a/pypy/objspace/std/identitydict.py b/pypy/objspace/std/identitydict.py
--- a/pypy/objspace/std/identitydict.py
+++ b/pypy/objspace/std/identitydict.py
@@ -1,5 +1,5 @@
## ----------------------------------------------------------------------------
-## dict strategy (see dict_multiobject.py)
+## dict strategy (see dictmultiobject.py)
from pypy.rlib import rerased
from pypy.rlib.debug import mark_dict_non_null
@@ -80,8 +80,8 @@
def iter(self, w_dict):
return IdentityDictIteratorImplementation(self.space, self, w_dict)
- def keys(self, w_dict):
- return self.unerase(w_dict.dstorage).keys()
+ def w_keys(self, w_dict):
+ return self.space.newlist(self.unerase(w_dict.dstorage).keys())
class IdentityDictIteratorImplementation(_UnwrappedIteratorMixin, IteratorImplementation):
diff --git a/pypy/objspace/std/mapdict.py b/pypy/objspace/std/mapdict.py
--- a/pypy/objspace/std/mapdict.py
+++ b/pypy/objspace/std/mapdict.py
@@ -703,7 +703,8 @@
class MapDictIteratorImplementation(IteratorImplementation):
def __init__(self, space, strategy, dictimplementation):
- IteratorImplementation.__init__(self, space, dictimplementation)
+ IteratorImplementation.__init__(
+ self, space, strategy, dictimplementation)
w_obj = strategy.unerase(dictimplementation.dstorage)
self.w_obj = w_obj
self.orig_map = self.curr_map = w_obj._get_mapdict_map()
diff --git a/pypy/objspace/std/objspace.py b/pypy/objspace/std/objspace.py
--- a/pypy/objspace/std/objspace.py
+++ b/pypy/objspace/std/objspace.py
@@ -313,11 +313,10 @@
def newlist_str(self, list_s):
return W_ListObject.newlist_str(self, list_s)
- def newdict(self, module=False, instance=False, classofinstance=None,
+ def newdict(self, module=False, instance=False,
strdict=False):
return W_DictMultiObject.allocate_and_init_instance(
self, module=module, instance=instance,
- classofinstance=classofinstance,
strdict=strdict)
def newset(self):
@@ -439,6 +438,8 @@
t = w_obj.getitems()
elif isinstance(w_obj, W_AbstractTupleObject):
t = w_obj.getitems_copy()
+ elif isinstance(w_obj, W_ListObject) and self._uses_list_iter(w_obj):
+ t = w_obj.getitems()
else:
return ObjSpace.unpackiterable(self, w_obj, expected_length)
if expected_length != -1 and len(t) != expected_length:
@@ -456,6 +457,8 @@
return w_obj.listview_str()
if isinstance(w_obj, W_StringObject):
return w_obj.listview_str()
+ if isinstance(w_obj, W_ListObject) and self._uses_list_iter(w_obj):
+ return w_obj.getitems_str()
return None
def listview_int(self, w_obj):
@@ -465,8 +468,14 @@
return w_obj.listview_int()
if type(w_obj) is W_SetObject or type(w_obj) is W_FrozensetObject:
return w_obj.listview_int()
+ if isinstance(w_obj, W_ListObject) and self._uses_list_iter(w_obj):
+ return w_obj.getitems_int()
return None
+ def _uses_list_iter(self, w_obj):
+ from pypy.objspace.descroperation import list_iter
+ return self.lookup(w_obj, '__iter__') is list_iter(self)
+
def sliceindices(self, w_slice, w_length):
if isinstance(w_slice, W_SliceObject):
a, b, c = w_slice.indices3(self, self.int_w(w_length))
diff --git a/pypy/objspace/std/setobject.py b/pypy/objspace/std/setobject.py
--- a/pypy/objspace/std/setobject.py
+++ b/pypy/objspace/std/setobject.py
@@ -359,7 +359,7 @@
w_set.sstorage = w_other.get_storage_copy()
def iter(self, w_set):
- return EmptyIteratorImplementation(self.space, w_set)
+ return EmptyIteratorImplementation(self.space, self, w_set)
def popitem(self, w_set):
raise OperationError(self.space.w_KeyError,
@@ -784,8 +784,9 @@
d_obj[w_item] = None
class IteratorImplementation(object):
- def __init__(self, space, implementation):
+ def __init__(self, space, strategy, implementation):
self.space = space
+ self.strategy = strategy
self.setimplementation = implementation
self.len = implementation.length()
self.pos = 0
@@ -801,7 +802,17 @@
if self.pos < self.len:
result = self.next_entry()
self.pos += 1
- return result
+ if self.strategy is self.setimplementation.strategy:
+ return result # common case
+ else:
+ # waaa, obscure case: the strategy changed, but not the
+ # length of the set. The 'result' might be out-of-date.
+ # We try to explicitly look it up in the set.
+ if not self.setimplementation.has_key(result):
+ self.len = -1 # Make this error state sticky
+ raise OperationError(self.space.w_RuntimeError,
+ self.space.wrap("dictionary changed during iteration"))
+ return result
# no more entries
self.setimplementation = None
return None
@@ -823,7 +834,7 @@
class StringIteratorImplementation(IteratorImplementation):
def __init__(self, space, strategy, w_set):
- IteratorImplementation.__init__(self, space, w_set)
+ IteratorImplementation.__init__(self, space, strategy, w_set)
d = strategy.unerase(w_set.sstorage)
self.iterator = d.iterkeys()
@@ -835,9 +846,9 @@
class IntegerIteratorImplementation(IteratorImplementation):
#XXX same implementation in dictmultiobject on dictstrategy-branch
- def __init__(self, space, strategy, dictimplementation):
- IteratorImplementation.__init__(self, space, dictimplementation)
- d = strategy.unerase(dictimplementation.sstorage)
+ def __init__(self, space, strategy, w_set):
+ IteratorImplementation.__init__(self, space, strategy, w_set)
+ d = strategy.unerase(w_set.sstorage)
self.iterator = d.iterkeys()
def next_entry(self):
@@ -848,9 +859,9 @@
return None
class RDictIteratorImplementation(IteratorImplementation):
- def __init__(self, space, strategy, dictimplementation):
- IteratorImplementation.__init__(self, space, dictimplementation)
- d = strategy.unerase(dictimplementation.sstorage)
+ def __init__(self, space, strategy, w_set):
+ IteratorImplementation.__init__(self, space, strategy, w_set)
+ d = strategy.unerase(w_set.sstorage)
self.iterator = d.iterkeys()
def next_entry(self):
diff --git a/pypy/objspace/std/test/test_dictmultiobject.py b/pypy/objspace/std/test/test_dictmultiobject.py
--- a/pypy/objspace/std/test/test_dictmultiobject.py
+++ b/pypy/objspace/std/test/test_dictmultiobject.py
@@ -804,6 +804,33 @@
assert "IntDictStrategy" in self.get_strategy(d)
assert d[1L] == "hi"
+ def test_iter_dict_length_change(self):
+ d = {1: 2, 3: 4, 5: 6}
+ it = d.iteritems()
+ d[7] = 8
+ # 'd' is now length 4
+ raises(RuntimeError, it.next)
+
+ def test_iter_dict_strategy_only_change_1(self):
+ d = {1: 2, 3: 4, 5: 6}
+ it = d.iteritems()
+ class Foo(object):
+ def __eq__(self, other):
+ return False
+ assert d.get(Foo()) is None # this changes the strategy of 'd'
+ lst = list(it) # but iterating still works
+ assert sorted(lst) == [(1, 2), (3, 4), (5, 6)]
+
+ def test_iter_dict_strategy_only_change_2(self):
+ d = {1: 2, 3: 4, 5: 6}
+ it = d.iteritems()
+ d['foo'] = 'bar'
+ del d[1]
+ # 'd' is still length 3, but its strategy changed. we are
+ # getting a RuntimeError because iterating over the old storage
+ # gives us (1, 2), but 1 is not in the dict any longer.
+ raises(RuntimeError, list, it)
+
class FakeString(str):
hash_count = 0
@@ -858,10 +885,9 @@
def newtuple(self, l):
return tuple(l)
- def newdict(self, module=False, instance=False, classofinstance=None):
+ def newdict(self, module=False, instance=False):
return W_DictMultiObject.allocate_and_init_instance(
- self, module=module, instance=instance,
- classofinstance=classofinstance)
+ self, module=module, instance=instance)
def finditem_str(self, w_dict, s):
return w_dict.getitem_str(s) # assume it's a multidict
@@ -941,6 +967,20 @@
assert type(self.impl.strategy) is self.StrategyClass
#assert self.impl.r_dict_content is None
+ def test_popitem(self):
+ self.fill_impl()
+ assert self.impl.length() == 2
+ a, b = self.impl.popitem()
+ assert self.impl.length() == 1
+ if a == self.string:
+ assert b == 1000
+ assert self.impl.getitem(self.string2) == 2000
+ else:
+ assert a == self.string2
+ assert b == 2000
+ assert self.impl.getitem_str(self.string) == 1000
+ self.check_not_devolved()
+
def test_setitem(self):
self.impl.setitem(self.string, 1000)
assert self.impl.length() == 1
diff --git a/pypy/objspace/std/test/test_dictproxy.py b/pypy/objspace/std/test/test_dictproxy.py
--- a/pypy/objspace/std/test/test_dictproxy.py
+++ b/pypy/objspace/std/test/test_dictproxy.py
@@ -25,6 +25,16 @@
key, value = NotEmpty.__dict__.popitem()
assert (key == 'a' and value == 1) or (key == 'b' and value == 4)
+ def test_dictproxy_getitem(self):
+ class NotEmpty(object):
+ a = 1
+ assert 'a' in NotEmpty.__dict__
+ class substr(str): pass
+ assert substr('a') in NotEmpty.__dict__
+ assert u'a' in NotEmpty.__dict__
+ assert NotEmpty.__dict__[u'a'] == 1
+ assert u'\xe9' not in NotEmpty.__dict__
+
def test_dictproxyeq(self):
class a(object):
pass
diff --git a/pypy/objspace/std/test/test_listobject.py b/pypy/objspace/std/test/test_listobject.py
--- a/pypy/objspace/std/test/test_listobject.py
+++ b/pypy/objspace/std/test/test_listobject.py
@@ -1186,14 +1186,23 @@
# of dicts, because the OrderedDict in the stdlib relies on this.
# we extend the use case to lists and sets, i.e. all types that have
# strategies, to avoid surprizes depending on the strategy.
- for base, arg in [(list, []), (list, [5]), (list, ['x']),
- (set, []), (set, [5]), (set, ['x']),
- (dict, []), (dict, [(5,6)]), (dict, [('x',7)])]:
+ class X: pass
+ for base, arg in [
+ (list, []), (list, [5]), (list, ['x']), (list, [X]),
+ (set, []), (set, [5]), (set, ['x']), (set, [X]),
+ (dict, []), (dict, [(5,6)]), (dict, [('x',7)]), (dict, [(X,8)]),
+ ]:
print base, arg
class SubClass(base):
def __iter__(self):
return iter("foobar")
assert list(SubClass(arg)) == ['f', 'o', 'o', 'b', 'a', 'r']
+ class Sub2(base):
+ pass
+ assert list(Sub2(arg)) == list(base(arg))
+ s = set()
+ s.update(Sub2(arg))
+ assert s == set(base(arg))
class AppTestForRangeLists(AppTestW_ListObject):
diff --git a/pypy/objspace/std/test/test_setobject.py b/pypy/objspace/std/test/test_setobject.py
--- a/pypy/objspace/std/test/test_setobject.py
+++ b/pypy/objspace/std/test/test_setobject.py
@@ -907,3 +907,30 @@
return [5, 3, 4][i]
s = set([10,3,2]).intersection(Obj())
assert list(s) == [3]
+
+ def test_iter_set_length_change(self):
+ s = set([1, 3, 5])
+ it = iter(s)
+ s.add(7)
+ # 's' is now length 4
+ raises(RuntimeError, it.next)
+
+ def test_iter_set_strategy_only_change_1(self):
+ s = set([1, 3, 5])
+ it = iter(s)
+ class Foo(object):
+ def __eq__(self, other):
+ return False
+ assert Foo() not in s # this changes the strategy of 'd'
+ lst = list(s) # but iterating still works
+ assert sorted(lst) == [1, 3, 5]
+
+ def test_iter_set_strategy_only_change_2(self):
+ s = set([1, 3, 5])
+ it = iter(s)
+ s.add('foo')
+ s.remove(1)
+ # 's' is still length 3, but its strategy changed. we are
+ # getting a RuntimeError because iterating over the old storage
+ # gives us 1, but 1 is not in the set any longer.
+ raises(RuntimeError, list, it)
diff --git a/pypy/pytest.ini b/pypy/pytest.ini
--- a/pypy/pytest.ini
+++ b/pypy/pytest.ini
@@ -1,2 +1,2 @@
[pytest]
-addopts = --assertmode=old -rf
+addopts = --assert=plain -rf
diff --git a/pypy/rlib/jit.py b/pypy/rlib/jit.py
--- a/pypy/rlib/jit.py
+++ b/pypy/rlib/jit.py
@@ -382,7 +382,7 @@
pass
def specialize_call(self, hop):
- pass
+ hop.exception_cannot_occur()
vref_None = non_virtual_ref(None)
diff --git a/pypy/rlib/jit_hooks.py b/pypy/rlib/jit_hooks.py
--- a/pypy/rlib/jit_hooks.py
+++ b/pypy/rlib/jit_hooks.py
@@ -22,6 +22,7 @@
c_name = hop.inputconst(lltype.Void, 'access_helper')
args_v = [hop.inputarg(arg, arg=i)
for i, arg in enumerate(hop.args_r)]
+ hop.exception_cannot_occur()
return hop.genop('jit_marker', [c_name, c_func] + args_v,
resulttype=hop.r_result)
return helper
diff --git a/pypy/rlib/longlong2float.py b/pypy/rlib/longlong2float.py
--- a/pypy/rlib/longlong2float.py
+++ b/pypy/rlib/longlong2float.py
@@ -21,7 +21,7 @@
FLOAT_ARRAY_PTR = lltype.Ptr(lltype.Array(rffi.FLOAT))
# these definitions are used only in tests, when not translated
-def longlong2float_emulator(llval):
+def longlong2float(llval):
with lltype.scoped_alloc(DOUBLE_ARRAY_PTR.TO, 1) as d_array:
ll_array = rffi.cast(LONGLONG_ARRAY_PTR, d_array)
ll_array[0] = llval
@@ -51,12 +51,6 @@
eci = ExternalCompilationInfo(includes=['string.h', 'assert.h'],
post_include_bits=["""
-static double pypy__longlong2float(long long x) {
- double dd;
- assert(sizeof(double) == 8 && sizeof(long long) == 8);
- memcpy(&dd, &x, 8);
- return dd;
-}
static float pypy__uint2singlefloat(unsigned int x) {
float ff;
assert(sizeof(float) == 4 && sizeof(unsigned int) == 4);
@@ -71,12 +65,6 @@
}
"""])
-longlong2float = rffi.llexternal(
- "pypy__longlong2float", [rffi.LONGLONG], rffi.DOUBLE,
- _callable=longlong2float_emulator, compilation_info=eci,
- _nowrapper=True, elidable_function=True, sandboxsafe=True,
- oo_primitive="pypy__longlong2float")
-
uint2singlefloat = rffi.llexternal(
"pypy__uint2singlefloat", [rffi.UINT], rffi.FLOAT,
_callable=uint2singlefloat_emulator, compilation_info=eci,
@@ -99,4 +87,17 @@
def specialize_call(self, hop):
[v_float] = hop.inputargs(lltype.Float)
- return hop.genop("convert_float_bytes_to_longlong", [v_float], resulttype=hop.r_result)
+ hop.exception_cannot_occur()
+ return hop.genop("convert_float_bytes_to_longlong", [v_float], resulttype=lltype.SignedLongLong)
+
+class LongLong2FloatEntry(ExtRegistryEntry):
+ _about_ = longlong2float
+
+ def compute_result_annotation(self, s_longlong):
+ assert annmodel.SomeInteger(knowntype=r_int64).contains(s_longlong)
+ return annmodel.SomeFloat()
+
+ def specialize_call(self, hop):
+ [v_longlong] = hop.inputargs(lltype.SignedLongLong)
+ hop.exception_cannot_occur()
+ return hop.genop("convert_longlong_bytes_to_float", [v_longlong], resulttype=lltype.Float)
diff --git a/pypy/rlib/objectmodel.py b/pypy/rlib/objectmodel.py
--- a/pypy/rlib/objectmodel.py
+++ b/pypy/rlib/objectmodel.py
@@ -215,6 +215,7 @@
def specialize_call(self, hop):
from pypy.rpython.lltypesystem import lltype
+ hop.exception_cannot_occur()
return hop.inputconst(lltype.Bool, hop.s_result.const)
# ____________________________________________________________
@@ -397,6 +398,7 @@
r_obj, = hop.args_r
v_obj, = hop.inputargs(r_obj)
ll_fn = r_obj.get_ll_hash_function()
+ hop.exception_is_here()
return hop.gendirectcall(ll_fn, v_obj)
class Entry(ExtRegistryEntry):
@@ -419,6 +421,7 @@
from pypy.rpython.error import TyperError
raise TyperError("compute_identity_hash() cannot be applied to"
" %r" % (vobj.concretetype,))
+ hop.exception_cannot_occur()
return hop.genop('gc_identityhash', [vobj], resulttype=lltype.Signed)
class Entry(ExtRegistryEntry):
@@ -441,6 +444,7 @@
from pypy.rpython.error import TyperError
raise TyperError("compute_unique_id() cannot be applied to"
" %r" % (vobj.concretetype,))
+ hop.exception_cannot_occur()
return hop.genop('gc_id', [vobj], resulttype=lltype.Signed)
class Entry(ExtRegistryEntry):
@@ -452,6 +456,7 @@
def specialize_call(self, hop):
vobj, = hop.inputargs(hop.args_r[0])
+ hop.exception_cannot_occur()
if hop.rtyper.type_system.name == 'lltypesystem':
from pypy.rpython.lltypesystem import lltype
if isinstance(vobj.concretetype, lltype.Ptr):
diff --git a/pypy/rlib/rbigint.py b/pypy/rlib/rbigint.py
--- a/pypy/rlib/rbigint.py
+++ b/pypy/rlib/rbigint.py
@@ -85,7 +85,7 @@
s_DIGIT = self.bookkeeper.valueoftype(type(NULLDIGIT))
assert s_DIGIT.contains(s_list.listdef.listitem.s_value)
def specialize_call(self, hop):
- pass
+ hop.exception_cannot_occur()
class rbigint(object):
diff --git a/pypy/rlib/rerased.py b/pypy/rlib/rerased.py
--- a/pypy/rlib/rerased.py
+++ b/pypy/rlib/rerased.py
@@ -100,6 +100,7 @@
def specialize_call(self, hop):
bk = hop.rtyper.annotator.bookkeeper
s_obj = identity.get_input_annotation(bk)
+ hop.exception_cannot_occur()
return hop.r_result.rtype_erase(hop, s_obj)
class Entry(ExtRegistryEntry):
@@ -110,6 +111,7 @@
return identity.leave_tunnel(self.bookkeeper)
def specialize_call(self, hop):
+ hop.exception_cannot_occur()
if hop.r_result.lowleveltype is lltype.Void:
return hop.inputconst(lltype.Void, None)
[v] = hop.inputargs(hop.args_r[0])
@@ -214,6 +216,7 @@
return hop.genop('cast_opaque_ptr', [v], resulttype=hop.r_result)
def rtype_unerase_int(self, hop, v):
+ hop.exception_cannot_occur()
return hop.gendirectcall(ll_unerase_int, v)
def rtype_erase_int(self, hop):
@@ -264,6 +267,7 @@
def rtype_unerase_int(self, hop, v):
c_one = hop.inputconst(lltype.Signed, 1)
+ hop.exception_cannot_occur()
v2 = hop.genop('oounbox_int', [v], resulttype=hop.r_result)
return hop.genop('int_rshift', [v2, c_one], resulttype=lltype.Signed)
diff --git a/pypy/rlib/rgc.py b/pypy/rlib/rgc.py
--- a/pypy/rlib/rgc.py
+++ b/pypy/rlib/rgc.py
@@ -382,6 +382,7 @@
def compute_result_annotation(self):
return s_list_of_gcrefs()
def specialize_call(self, hop):
+ hop.exception_cannot_occur()
return hop.genop('gc_get_rpy_roots', [], resulttype = hop.r_result)
class Entry(ExtRegistryEntry):
@@ -392,6 +393,7 @@
return s_list_of_gcrefs()
def specialize_call(self, hop):
vlist = hop.inputargs(hop.args_r[0])
+ hop.exception_cannot_occur()
return hop.genop('gc_get_rpy_referents', vlist,
resulttype = hop.r_result)
@@ -402,6 +404,7 @@
return annmodel.SomeInteger()
def specialize_call(self, hop):
vlist = hop.inputargs(hop.args_r[0])
+ hop.exception_cannot_occur()
return hop.genop('gc_get_rpy_memory_usage', vlist,
resulttype = hop.r_result)
@@ -412,6 +415,7 @@
return annmodel.SomeInteger()
def specialize_call(self, hop):
vlist = hop.inputargs(hop.args_r[0])
+ hop.exception_cannot_occur()
return hop.genop('gc_get_rpy_type_index', vlist,
resulttype = hop.r_result)
@@ -430,6 +434,7 @@
return annmodel.SomeBool()
def specialize_call(self, hop):
vlist = hop.inputargs(hop.args_r[0])
+ hop.exception_cannot_occur()
return hop.genop('gc_is_rpy_instance', vlist,
resulttype = hop.r_result)
@@ -449,6 +454,7 @@
classrepr = getclassrepr(hop.rtyper, classdef)
vtable = classrepr.getvtable()
assert lltype.typeOf(vtable) == rclass.CLASSTYPE
+ hop.exception_cannot_occur()
return Constant(vtable, concretetype=rclass.CLASSTYPE)
class Entry(ExtRegistryEntry):
diff --git a/pypy/rlib/rsre/rsre_re.py b/pypy/rlib/rsre/rsre_re.py
--- a/pypy/rlib/rsre/rsre_re.py
+++ b/pypy/rlib/rsre/rsre_re.py
@@ -172,8 +172,9 @@
self._ctx = ctx
def span(self, groupnum=0):
- if not isinstance(groupnum, (int, long)):
- groupnum = self.re.groupindex[groupnum]
+# if not isinstance(groupnum, (int, long)):
+# groupnum = self.re.groupindex[groupnum]
+
return self._ctx.span(groupnum)
def start(self, groupnum=0):
@@ -182,19 +183,25 @@
def end(self, groupnum=0):
return self.span(groupnum)[1]
- def group(self, *groups):
- groups = groups or (0,)
- result = []
- for group in groups:
- frm, to = self.span(group)
- if 0 <= frm <= to:
- result.append(self._ctx._string[frm:to])
- else:
- result.append(None)
- if len(result) > 1:
- return tuple(result)
+ def group(self, group=0):
+ frm, to = self.span(group)
+ if 0 <= frm <= to:
+ return self._ctx._string[frm:to]
else:
- return result[0]
+ return None
+
+# def group(self, *groups):
+# groups = groups or (0,)
+# result = []
+# for group in groups:
+# frm, to = self.span(group)
+# if 0 <= frm <= to:
+# result.append(self._ctx._string[frm:to])
+# else:
+# result.append(None)
+# if len(result) > 1:
+# return tuple(result)
+
def groups(self, default=None):
fmarks = self._ctx.flatten_marks()
diff --git a/pypy/rlib/rsre/test/test_re.py b/pypy/rlib/rsre/test/test_re.py
--- a/pypy/rlib/rsre/test/test_re.py
+++ b/pypy/rlib/rsre/test/test_re.py
@@ -204,7 +204,7 @@
assert re.match('(a)', 'a').groups() == ('a',)
assert re.match(r'(a)', 'a').group(0) == 'a'
assert re.match(r'(a)', 'a').group(1) == 'a'
- assert re.match(r'(a)', 'a').group(1, 1) == ('a', 'a')
+ #assert re.match(r'(a)', 'a').group(1, 1) == ('a', 'a')
pat = re.compile('((a)|(b))(c)?')
assert pat.match('a').groups() == ('a', 'a', None, None)
@@ -218,13 +218,13 @@
assert m.group(0) == 'a'
assert m.group(0) == 'a'
assert m.group(1) == 'a'
- assert m.group(1, 1) == ('a', 'a')
+ #assert m.group(1, 1) == ('a', 'a')
pat = re.compile('(?:(?Pa)|(?Pb))(?Pc)?')
- assert pat.match('a').group(1, 2, 3) == ('a', None, None)
- assert pat.match('b').group('a1', 'b2', 'c3') == (
- (None, 'b', None))
- assert pat.match('ac').group(1, 'b2', 3) == ('a', None, 'c')
+ #assert pat.match('a').group(1, 2, 3) == ('a', None, None)
+ #assert pat.match('b').group('a1', 'b2', 'c3') == (
+ # (None, 'b', None))
+ #assert pat.match('ac').group(1, 'b2', 3) == ('a', None, 'c')
def test_bug_923(self):
# Issue923: grouping inside optional lookahead problem
diff --git a/pypy/rlib/rsre/test/test_zinterp.py b/pypy/rlib/rsre/test/test_zinterp.py
--- a/pypy/rlib/rsre/test/test_zinterp.py
+++ b/pypy/rlib/rsre/test/test_zinterp.py
@@ -1,7 +1,8 @@
# minimal test: just checks that (parts of) rsre can be translated
-from pypy.rpython.test.test_llinterp import gengraph
+from pypy.rpython.test.test_llinterp import gengraph, interpret
from pypy.rlib.rsre import rsre_core
+from pypy.rlib.rsre.rsre_re import compile
def main(n):
assert n >= 0
@@ -19,3 +20,18 @@
def test_gengraph():
t, typer, graph = gengraph(main, [int])
+
+m = compile("(a|b)aaaaa")
+
+def test_match():
+ def f(i):
+ if i:
+ s = "aaaaaa"
+ else:
+ s = "caaaaa"
+ g = m.match(s)
+ if g is None:
+ return 3
+ return int("aaaaaa" == g.group(0))
+ assert interpret(f, [3]) == 1
+ assert interpret(f, [0]) == 3
diff --git a/pypy/rlib/rstring.py b/pypy/rlib/rstring.py
--- a/pypy/rlib/rstring.py
+++ b/pypy/rlib/rstring.py
@@ -245,5 +245,5 @@
raise ValueError("Value is not no_nul")
def specialize_call(self, hop):
- pass
+ hop.exception_cannot_occur()
diff --git a/pypy/rlib/test/test_longlong2float.py b/pypy/rlib/test/test_longlong2float.py
--- a/pypy/rlib/test/test_longlong2float.py
+++ b/pypy/rlib/test/test_longlong2float.py
@@ -2,6 +2,7 @@
from pypy.rlib.longlong2float import longlong2float, float2longlong
from pypy.rlib.longlong2float import uint2singlefloat, singlefloat2uint
from pypy.rlib.rarithmetic import r_singlefloat
+from pypy.rpython.test.test_llinterp import interpret
def fn(f1):
@@ -31,6 +32,18 @@
res = fn2(x)
assert repr(res) == repr(x)
+def test_interpreted():
+ def f(f1):
+ try:
+ ll = float2longlong(f1)
+ return longlong2float(ll)
+ except Exception:
+ return 500
+
+ for x in enum_floats():
+ res = interpret(f, [x])
+ assert repr(res) == repr(x)
+
# ____________________________________________________________
def fnsingle(f1):
diff --git a/pypy/rpython/annlowlevel.py b/pypy/rpython/annlowlevel.py
--- a/pypy/rpython/annlowlevel.py
+++ b/pypy/rpython/annlowlevel.py
@@ -543,11 +543,11 @@
else:
assert False
+ hop.exception_cannot_occur()
if isinstance(hop.args_r[1], rpbc.NoneFrozenPBCRepr):
return hop.inputconst(PTR, null)
v_arg = hop.inputarg(hop.args_r[1], arg=1)
assert isinstance(v_arg.concretetype, T)
- hop.exception_cannot_occur()
return hop.genop(opname, [v_arg], resulttype = PTR)
diff --git a/pypy/rpython/controllerentry.py b/pypy/rpython/controllerentry.py
--- a/pypy/rpython/controllerentry.py
+++ b/pypy/rpython/controllerentry.py
@@ -201,6 +201,7 @@
def specialize_call(self, hop):
from pypy.rpython.lltypesystem import lltype
assert hop.s_result.is_constant()
+ hop.exception_cannot_occur()
return hop.inputconst(lltype.Bool, hop.s_result.const)
# ____________________________________________________________
diff --git a/pypy/rpython/lltypesystem/lloperation.py b/pypy/rpython/lltypesystem/lloperation.py
--- a/pypy/rpython/lltypesystem/lloperation.py
+++ b/pypy/rpython/lltypesystem/lloperation.py
@@ -130,6 +130,7 @@
def specialize_call(self, hop):
from pypy.rpython.lltypesystem import lltype
+ hop.exception_cannot_occur()
return hop.inputconst(lltype.Void, None)
def enum_ops_without_sideeffects(raising_is_ok=False):
@@ -350,6 +351,7 @@
'truncate_longlong_to_int':LLOp(canfold=True),
'force_cast': LLOp(sideeffects=False), # only for rffi.cast()
'convert_float_bytes_to_longlong': LLOp(canfold=True),
+ 'convert_longlong_bytes_to_float': LLOp(canfold=True),
# __________ pointer operations __________
diff --git a/pypy/rpython/lltypesystem/opimpl.py b/pypy/rpython/lltypesystem/opimpl.py
--- a/pypy/rpython/lltypesystem/opimpl.py
+++ b/pypy/rpython/lltypesystem/opimpl.py
@@ -431,6 +431,10 @@
from pypy.rlib.longlong2float import float2longlong
return float2longlong(a)
+def op_convert_longlong_bytes_to_float(a):
+ from pypy.rlib.longlong2float import longlong2float
+ return longlong2float(a)
+
def op_unichar_eq(x, y):
assert isinstance(x, unicode) and len(x) == 1
diff --git a/pypy/rpython/lltypesystem/rbuiltin.py b/pypy/rpython/lltypesystem/rbuiltin.py
--- a/pypy/rpython/lltypesystem/rbuiltin.py
+++ b/pypy/rpython/lltypesystem/rbuiltin.py
@@ -9,6 +9,7 @@
from pypy.rpython.rbool import bool_repr
def rtype_builtin_isinstance(hop):
+ hop.exception_cannot_occur()
if hop.s_result.is_constant():
return hop.inputconst(lltype.Bool, hop.s_result.const)
if hop.args_r[0] == pyobj_repr or hop.args_r[1] == pyobj_repr:
@@ -33,6 +34,7 @@
return my_instantiate()
def rtype_instantiate(hop):
+ hop.exception_cannot_occur()
s_class = hop.args_s[0]
assert isinstance(s_class, annmodel.SomePBC)
if len(s_class.descriptions) != 1:
@@ -46,6 +48,7 @@
return rclass.rtype_new_instance(hop.rtyper, classdef, hop.llops)
def rtype_builtin_hasattr(hop):
+ hop.exception_cannot_occur()
if hop.s_result.is_constant():
return hop.inputconst(lltype.Bool, hop.s_result.const)
if hop.args_r[0] == pyobj_repr:
@@ -56,6 +59,7 @@
raise TyperError("hasattr is only suported on a constant or on PyObject")
def rtype_builtin___import__(hop):
+ xxx # should not be used any more
args_v = hop.inputargs(*[pyobj_repr for ign in hop.args_r])
c = hop.inputconst(pyobj_repr, __import__)
return hop.genop('simple_call', [c] + args_v, resulttype = pyobj_repr)
diff --git a/pypy/rpython/lltypesystem/rclass.py b/pypy/rpython/lltypesystem/rclass.py
--- a/pypy/rpython/lltypesystem/rclass.py
+++ b/pypy/rpython/lltypesystem/rclass.py
@@ -746,4 +746,5 @@
assert isinstance(TYPE, GcStruct)
assert lltype._castdepth(TYPE, OBJECT) > 0
hop.rtyper.set_type_for_typeptr(vtable, TYPE)
+ hop.exception_cannot_occur()
return hop.inputconst(lltype.Void, None)
diff --git a/pypy/rpython/lltypesystem/rstr.py b/pypy/rpython/lltypesystem/rstr.py
--- a/pypy/rpython/lltypesystem/rstr.py
+++ b/pypy/rpython/lltypesystem/rstr.py
@@ -765,7 +765,11 @@
def _ll_stringslice(s1, start, stop):
lgt = stop - start
assert start >= 0
- assert lgt >= 0
+ # If start > stop, return a empty string. This can happen if the start
+ # is greater than the length of the string. Use < instead of <= to avoid
+ # creating another path for the JIT when start == stop.
+ if lgt < 0:
+ return s1.empty()
newstr = s1.malloc(lgt)
s1.copy_contents(s1, newstr, start, 0, lgt)
return newstr
diff --git a/pypy/rpython/lltypesystem/rtuple.py b/pypy/rpython/lltypesystem/rtuple.py
--- a/pypy/rpython/lltypesystem/rtuple.py
+++ b/pypy/rpython/lltypesystem/rtuple.py
@@ -55,6 +55,7 @@
vtup = hop.inputarg(self, 0)
LIST = hop.r_result.lowleveltype.TO
cno = inputconst(Signed, nitems)
+ hop.exception_is_here()
vlist = hop.gendirectcall(LIST.ll_newlist, cno)
v_func = hop.inputconst(Void, rlist.dum_nocheck)
for index in range(nitems):
diff --git a/pypy/rpython/memory/gc/minimark.py b/pypy/rpython/memory/gc/minimark.py
--- a/pypy/rpython/memory/gc/minimark.py
+++ b/pypy/rpython/memory/gc/minimark.py
@@ -1426,23 +1426,25 @@
self._visit_young_rawmalloced_object(obj)
return
#
- # If 'obj' was already forwarded, change it to its forwarding address.
- if self.is_forwarded(obj):
+ size_gc_header = self.gcheaderbuilder.size_gc_header
+ if self.header(obj).tid & GCFLAG_HAS_SHADOW == 0:
+ #
+ # Common case: 'obj' was not already forwarded (otherwise
+ # tid == -42, containing all flags), and it doesn't have the
+ # HAS_SHADOW flag either. We must move it out of the nursery,
+ # into a new nonmovable location.
+ totalsize = size_gc_header + self.get_size(obj)
+ newhdr = self._malloc_out_of_nursery(totalsize)
+ #
+ elif self.is_forwarded(obj):
+ #
+ # 'obj' was already forwarded. Change the original reference
+ # to point to its forwarding address, and we're done.
root.address[0] = self.get_forwarding_address(obj)
return
- #
- # First visit to 'obj': we must move it out of the nursery.
- size_gc_header = self.gcheaderbuilder.size_gc_header
- size = self.get_size(obj)
- totalsize = size_gc_header + size
- #
- if self.header(obj).tid & GCFLAG_HAS_SHADOW == 0:
- #
- # Common case: allocate a new nonmovable location for it.
- newhdr = self._malloc_out_of_nursery(totalsize)
#
else:
- # The object has already a shadow.
+ # First visit to an object that has already a shadow.
newobj = self.nursery_objects_shadows.get(obj)
ll_assert(newobj != NULL, "GCFLAG_HAS_SHADOW but no shadow found")
newhdr = newobj - size_gc_header
@@ -1450,6 +1452,8 @@
# Remove the flag GCFLAG_HAS_SHADOW, so that it doesn't get
# copied to the shadow itself.
self.header(obj).tid &= ~GCFLAG_HAS_SHADOW
+ #
+ totalsize = size_gc_header + self.get_size(obj)
#
# Copy it. Note that references to other objects in the
# nursery are kept unchanged in this step.
diff --git a/pypy/rpython/module/ll_os_stat.py b/pypy/rpython/module/ll_os_stat.py
--- a/pypy/rpython/module/ll_os_stat.py
+++ b/pypy/rpython/module/ll_os_stat.py
@@ -455,6 +455,6 @@
return intmask(time), intmask(nsec)
def time_t_to_FILE_TIME(time, filetime):
- ft = (rffi.r_longlong(time) + secs_between_epochs) * 10000000
+ ft = rffi.r_longlong((time + secs_between_epochs) * 10000000)
filetime.c_dwHighDateTime = rffi.r_uint(ft >> 32)
filetime.c_dwLowDateTime = rffi.r_uint(ft) # masking off high bits
diff --git a/pypy/rpython/module/r_os_stat.py b/pypy/rpython/module/r_os_stat.py
--- a/pypy/rpython/module/r_os_stat.py
+++ b/pypy/rpython/module/r_os_stat.py
@@ -65,4 +65,5 @@
r_StatResult = hop.rtyper.getrepr(ll_os_stat.s_StatResult)
[v_result] = hop.inputargs(r_StatResult.r_tuple)
# no-op conversion from r_StatResult.r_tuple to r_StatResult
+ hop.exception_cannot_occur()
return v_result
diff --git a/pypy/rpython/ootypesystem/ooregistry.py b/pypy/rpython/ootypesystem/ooregistry.py
--- a/pypy/rpython/ootypesystem/ooregistry.py
+++ b/pypy/rpython/ootypesystem/ooregistry.py
@@ -22,6 +22,7 @@
annmodel.SomeOOInstance,
annmodel.SomeString))
vlist = hop.inputargs(hop.args_r[0], ootype.Signed)
+ hop.exception_cannot_occur()
return hop.genop('oostring', vlist, resulttype = ootype.String)
class Entry_oounicode(ExtRegistryEntry):
@@ -38,6 +39,7 @@
assert isinstance(hop.args_s[0], (annmodel.SomeUnicodeCodePoint,
annmodel.SomeOOInstance))
vlist = hop.inputargs(hop.args_r[0], ootype.Signed)
+ hop.exception_cannot_occur()
return hop.genop('oounicode', vlist, resulttype = ootype.Unicode)
diff --git a/pypy/rpython/ootypesystem/rbuiltin.py b/pypy/rpython/ootypesystem/rbuiltin.py
--- a/pypy/rpython/ootypesystem/rbuiltin.py
+++ b/pypy/rpython/ootypesystem/rbuiltin.py
@@ -7,12 +7,14 @@
from pypy.rpython.error import TyperError
def rtype_new(hop):
+ hop.exception_cannot_occur()
assert hop.args_s[0].is_constant()
vlist = hop.inputargs(ootype.Void)
return hop.genop('new', vlist,
resulttype = hop.r_result.lowleveltype)
def rtype_oonewarray(hop):
+ hop.exception_cannot_occur()
assert hop.args_s[0].is_constant()
vlist = hop.inputarg(ootype.Void, arg=0)
vlength = hop.inputarg(ootype.Signed, arg=1)
@@ -20,23 +22,27 @@
resulttype = hop.r_result.lowleveltype)
def rtype_null(hop):
+ hop.exception_cannot_occur()
assert hop.args_s[0].is_constant()
TYPE = hop.args_s[0].const
nullvalue = ootype.null(TYPE)
return hop.inputconst(TYPE, nullvalue)
def rtype_classof(hop):
+ hop.exception_cannot_occur()
assert isinstance(hop.args_s[0], annmodel.SomeOOInstance)
vlist = hop.inputargs(hop.args_r[0])
return hop.genop('classof', vlist,
resulttype = ootype.Class)
def rtype_subclassof(hop):
+ hop.exception_cannot_occur()
vlist = hop.inputargs(rootype.ooclass_repr, rootype.ooclass_repr)
return hop.genop('subclassof', vlist,
resulttype = ootype.Bool)
def rtype_instanceof(hop):
+ hop.exception_cannot_occur()
INSTANCE = hop.args_v[1].value
v_inst = hop.inputarg(hop.args_r[0], arg=0)
c_cls = hop.inputconst(ootype.Void, INSTANCE)
@@ -44,23 +50,27 @@
resulttype=ootype.Bool)
def rtype_runtimenew(hop):
+ hop.exception_cannot_occur()
vlist = hop.inputargs(rootype.ooclass_repr)
return hop.genop('runtimenew', vlist,
resulttype = hop.r_result.lowleveltype)
def rtype_ooupcast(hop):
+ hop.exception_cannot_occur()
assert isinstance(hop.args_s[0].const, ootype.Instance)
assert isinstance(hop.args_s[1], annmodel.SomeOOInstance)
v_inst = hop.inputarg(hop.args_r[1], arg=1)
return hop.genop('ooupcast', [v_inst], resulttype = hop.r_result.lowleveltype)
def rtype_oodowncast(hop):
+ hop.exception_cannot_occur()
assert isinstance(hop.args_s[0].const, ootype.Instance)
assert isinstance(hop.args_s[1], annmodel.SomeOOInstance)
v_inst = hop.inputarg(hop.args_r[1], arg=1)
return hop.genop('oodowncast', [v_inst], resulttype = hop.r_result.lowleveltype)
def rtype_cast_to_object(hop):
+ hop.exception_cannot_occur()
assert isinstance(hop.args_s[0], annmodel.SomeOOStaticMeth) or \
isinstance(hop.args_s[0], annmodel.SomeOOClass) or \
isinstance(hop.args_s[0].ootype, ootype.OOType)
@@ -68,12 +78,14 @@
return hop.genop('cast_to_object', [v_inst], resulttype = hop.r_result.lowleveltype)
def rtype_cast_from_object(hop):
+ hop.exception_cannot_occur()
assert isinstance(hop.args_s[0].const, ootype.OOType)
assert isinstance(hop.args_s[1], annmodel.SomeOOObject)
v_inst = hop.inputarg(hop.args_r[1], arg=1)
return hop.genop('cast_from_object', [v_inst], resulttype = hop.r_result.lowleveltype)
def rtype_builtin_isinstance(hop):
+ hop.exception_cannot_occur()
if hop.s_result.is_constant():
return hop.inputconst(ootype.Bool, hop.s_result.const)
@@ -99,6 +111,7 @@
return ootype.subclassof(c1, class_)
def rtype_instantiate(hop):
+ hop.exception_cannot_occur()
if hop.args_s[0].is_constant():
## INSTANCE = hop.s_result.rtyper_makerepr(hop.rtyper).lowleveltype
## v_instance = hop.inputconst(ootype.Void, INSTANCE)
diff --git a/pypy/rpython/ootypesystem/rstr.py b/pypy/rpython/ootypesystem/rstr.py
--- a/pypy/rpython/ootypesystem/rstr.py
+++ b/pypy/rpython/ootypesystem/rstr.py
@@ -222,6 +222,10 @@
length = s.ll_strlen()
if stop > length:
stop = length
+ # If start > stop, return a empty string. This can happen if the start
+ # is greater than the length of the string.
+ if start > stop:
+ start = stop
return s.ll_substring(start, stop-start)
def ll_stringslice_minusone(s):
diff --git a/pypy/rpython/ootypesystem/rtuple.py b/pypy/rpython/ootypesystem/rtuple.py
--- a/pypy/rpython/ootypesystem/rtuple.py
+++ b/pypy/rpython/ootypesystem/rtuple.py
@@ -39,6 +39,7 @@
RESULT = hop.r_result.lowleveltype
c_resulttype = inputconst(ootype.Void, RESULT)
c_length = inputconst(ootype.Signed, len(self.items_r))
+ hop.exception_is_here()
if isinstance(RESULT, ootype.Array):
v_list = hop.genop('oonewarray', [c_resulttype, c_length], resulttype=RESULT)
else:
diff --git a/pypy/rpython/rbool.py b/pypy/rpython/rbool.py
--- a/pypy/rpython/rbool.py
+++ b/pypy/rpython/rbool.py
@@ -34,6 +34,7 @@
def rtype_float(_, hop):
vlist = hop.inputargs(Float)
+ hop.exception_cannot_occur()
return vlist[0]
#
diff --git a/pypy/rpython/rbuiltin.py b/pypy/rpython/rbuiltin.py
--- a/pypy/rpython/rbuiltin.py
+++ b/pypy/rpython/rbuiltin.py
@@ -111,25 +111,32 @@
raise TyperError("don't know about built-in function %r" % (
self.builtinfunc,))
+ def _call(self, hop2, **kwds_i):
+ bltintyper = self.findbltintyper(hop2.rtyper)
+ hop2.llops._called_exception_is_here_or_cannot_occur = False
+ v_result = bltintyper(hop2, **kwds_i)
+ if not hop2.llops._called_exception_is_here_or_cannot_occur:
+ raise TyperError("missing hop.exception_cannot_occur() or "
+ "hop.exception_is_here() in %s" % bltintyper)
+ return v_result
+
def rtype_simple_call(self, hop):
- bltintyper = self.findbltintyper(hop.rtyper)
hop2 = hop.copy()
hop2.r_s_popfirstarg()
- return bltintyper(hop2)
+ return self._call(hop2)
def rtype_call_args(self, hop):
# calling a built-in function with keyword arguments:
# mostly for rpython.objectmodel.hint()
hop, kwds_i = call_args_expand(hop)
- bltintyper = self.findbltintyper(hop.rtyper)
hop2 = hop.copy()
hop2.r_s_popfirstarg()
hop2.r_s_popfirstarg()
# the RPython-level keyword args are passed with an 'i_' prefix and
# the corresponding value is an *index* in the hop2 arguments,
# to be used with hop.inputarg(arg=..)
- return bltintyper(hop2, **kwds_i)
+ return self._call(hop2, **kwds_i)
class BuiltinMethodRepr(Repr):
@@ -198,6 +205,7 @@
# ____________________________________________________________
def rtype_builtin_bool(hop):
+ # not called any more?
assert hop.nb_args == 1
return hop.args_r[0].rtype_is_true(hop)
@@ -241,6 +249,7 @@
def rtype_builtin_min(hop):
v1, v2 = hop.inputargs(hop.r_result, hop.r_result)
+ hop.exception_cannot_occur()
return hop.gendirectcall(ll_min, v1, v2)
def ll_min(i1, i2):
@@ -250,6 +259,7 @@
def rtype_builtin_max(hop):
v1, v2 = hop.inputargs(hop.r_result, hop.r_result)
+ hop.exception_cannot_occur()
return hop.gendirectcall(ll_max, v1, v2)
def ll_max(i1, i2):
@@ -264,6 +274,7 @@
pass
def rtype_OSError__init__(hop):
+ hop.exception_cannot_occur()
if hop.nb_args == 2:
raise TyperError("OSError() should not be called with "
"a single argument")
@@ -274,6 +285,7 @@
r_self.setfield(v_self, 'errno', v_errno, hop.llops)
def rtype_WindowsError__init__(hop):
+ hop.exception_cannot_occur()
if hop.nb_args == 2:
raise TyperError("WindowsError() should not be called with "
"a single argument")
@@ -442,6 +454,7 @@
assert hop.args_s[0].is_constant()
TGT = hop.args_s[0].const
v_type, v_value = hop.inputargs(lltype.Void, hop.args_r[1])
+ hop.exception_cannot_occur()
return gen_cast(hop.llops, TGT, v_value)
_cast_to_Signed = {
@@ -523,11 +536,13 @@
def rtype_identity_hash(hop):
vlist = hop.inputargs(hop.args_r[0])
+ hop.exception_cannot_occur()
return hop.genop('gc_identityhash', vlist, resulttype=lltype.Signed)
def rtype_runtime_type_info(hop):
assert isinstance(hop.args_r[0], rptr.PtrRepr)
vlist = hop.inputargs(hop.args_r[0])
+ hop.exception_cannot_occur()
return hop.genop('runtime_type_info', vlist,
resulttype = hop.r_result.lowleveltype)
@@ -558,6 +573,7 @@
def rtype_raw_malloc(hop):
v_size, = hop.inputargs(lltype.Signed)
+ hop.exception_cannot_occur()
return hop.genop('raw_malloc', [v_size], resulttype=llmemory.Address)
def rtype_raw_malloc_usage(hop):
@@ -586,6 +602,7 @@
if s_addr.is_null_address():
raise TyperError("raw_memclear(x, n) where x is the constant NULL")
v_list = hop.inputargs(llmemory.Address, lltype.Signed)
+ hop.exception_cannot_occur()
return hop.genop('raw_memclear', v_list)
BUILTIN_TYPER[llmemory.raw_malloc] = rtype_raw_malloc
@@ -596,6 +613,7 @@
def rtype_offsetof(hop):
TYPE, field = hop.inputargs(lltype.Void, lltype.Void)
+ hop.exception_cannot_occur()
return hop.inputconst(lltype.Signed,
llmemory.offsetof(TYPE.value, field.value))
@@ -605,6 +623,7 @@
# non-gc objects
def rtype_free_non_gc_object(hop):
+ hop.exception_cannot_occur()
vinst, = hop.inputargs(hop.args_r[0])
flavor = hop.args_r[0].gcflavor
assert flavor != 'gc'
@@ -617,6 +636,7 @@
# keepalive_until_here
def rtype_keepalive_until_here(hop):
+ hop.exception_cannot_occur()
for v in hop.args_v:
hop.genop('keepalive', [v], resulttype=lltype.Void)
return hop.inputconst(lltype.Void, None)
diff --git a/pypy/rpython/rfloat.py b/pypy/rpython/rfloat.py
--- a/pypy/rpython/rfloat.py
+++ b/pypy/rpython/rfloat.py
@@ -136,7 +136,10 @@
hop.exception_cannot_occur()
return hop.genop('cast_float_to_int', vlist, resulttype=Signed)
- rtype_float = rtype_pos
+ def rtype_float(_, hop):
+ vlist = hop.inputargs(Float)
+ hop.exception_cannot_occur()
+ return vlist[0]
# version picked by specialisation based on which
# type system rtyping is using, from .ll_str module
diff --git a/pypy/rpython/rint.py b/pypy/rpython/rint.py
--- a/pypy/rpython/rint.py
+++ b/pypy/rpython/rint.py
@@ -310,6 +310,8 @@
if hop.has_implicit_exception(ValueError):
hop.exception_is_here()
hop.gendirectcall(ll_check_chr, vlist[0])
+ else:
+ hop.exception_cannot_occur()
return hop.genop('cast_int_to_char', vlist, resulttype=Char)
def rtype_unichr(_, hop):
@@ -317,6 +319,8 @@
if hop.has_implicit_exception(ValueError):
hop.exception_is_here()
hop.gendirectcall(ll_check_unichr, vlist[0])
+ else:
+ hop.exception_cannot_occur()
return hop.genop('cast_int_to_unichar', vlist, resulttype=UniChar)
def rtype_is_true(self, hop):
diff --git a/pypy/rpython/rlist.py b/pypy/rpython/rlist.py
--- a/pypy/rpython/rlist.py
+++ b/pypy/rpython/rlist.py
@@ -115,6 +115,7 @@
def rtype_bltn_list(self, hop):
v_lst = hop.inputarg(self, 0)
cRESLIST = hop.inputconst(Void, hop.r_result.LIST)
+ hop.exception_is_here()
return hop.gendirectcall(ll_copy, cRESLIST, v_lst)
def rtype_len(self, hop):
diff --git a/pypy/rpython/rrange.py b/pypy/rpython/rrange.py
--- a/pypy/rpython/rrange.py
+++ b/pypy/rpython/rrange.py
@@ -107,8 +107,10 @@
if isinstance(hop.r_result, AbstractRangeRepr):
if hop.r_result.step != 0:
c_rng = hop.inputconst(Void, hop.r_result.RANGE)
+ hop.exception_is_here()
return hop.gendirectcall(hop.r_result.ll_newrange, c_rng, vstart, vstop)
else:
+ hop.exception_is_here()
return hop.gendirectcall(hop.r_result.ll_newrangest, vstart, vstop, vstep)
else:
# cannot build a RANGE object, needs a real list
@@ -117,6 +119,7 @@
if isinstance(ITEMTYPE, Ptr):
ITEMTYPE = ITEMTYPE.TO
cLIST = hop.inputconst(Void, ITEMTYPE)
+ hop.exception_is_here()
return hop.gendirectcall(ll_range2list, cLIST, vstart, vstop, vstep)
rtype_builtin_xrange = rtype_builtin_range
@@ -212,4 +215,5 @@
[v_index, v_item])
def rtype_builtin_enumerate(hop):
+ hop.exception_cannot_occur()
return hop.r_result.r_baseiter.newiter(hop)
diff --git a/pypy/rpython/rstr.py b/pypy/rpython/rstr.py
--- a/pypy/rpython/rstr.py
+++ b/pypy/rpython/rstr.py
@@ -288,6 +288,8 @@
def rtype_unicode(self, hop):
if hop.args_s[0].is_constant():
+ # convertion errors occur during annotation, so cannot any more:
+ hop.exception_cannot_occur()
return hop.inputconst(hop.r_result, hop.s_result.const)
repr = hop.args_r[0].repr
v_str = hop.inputarg(repr, 0)
diff --git a/pypy/rpython/rtyper.py b/pypy/rpython/rtyper.py
--- a/pypy/rpython/rtyper.py
+++ b/pypy/rpython/rtyper.py
@@ -846,6 +846,7 @@
return result
def exception_is_here(self):
+ self.llops._called_exception_is_here_or_cannot_occur = True
if self.llops.llop_raising_exceptions is not None:
raise TyperError("cannot catch an exception at more than one llop")
if not self.exceptionlinks:
@@ -861,6 +862,7 @@
self.llops.llop_raising_exceptions = len(self.llops)
def exception_cannot_occur(self):
+ self.llops._called_exception_is_here_or_cannot_occur = True
if self.llops.llop_raising_exceptions is not None:
raise TyperError("cannot catch an exception at more than one llop")
if not self.exceptionlinks:
diff --git a/pypy/rpython/test/test_extregistry.py b/pypy/rpython/test/test_extregistry.py
--- a/pypy/rpython/test/test_extregistry.py
+++ b/pypy/rpython/test/test_extregistry.py
@@ -114,6 +114,7 @@
_about_ = dummy_func
s_result_annotation = annmodel.SomeInteger()
def specialize_call(self, hop):
+ hop.exception_cannot_occur()
return hop.inputconst(lltype.Signed, 42)
def func():
diff --git a/pypy/rpython/test/test_rclass.py b/pypy/rpython/test/test_rclass.py
--- a/pypy/rpython/test/test_rclass.py
+++ b/pypy/rpython/test/test_rclass.py
@@ -1085,6 +1085,7 @@
return annmodel.SomeInteger()
def specialize_call(self, hop):
[v_instance] = hop.inputargs(*hop.args_r)
+ hop.exception_is_here()
return hop.gendirectcall(ll_my_gethash, v_instance)
def f(n):
diff --git a/pypy/rpython/test/test_rstr.py b/pypy/rpython/test/test_rstr.py
--- a/pypy/rpython/test/test_rstr.py
+++ b/pypy/rpython/test/test_rstr.py
@@ -477,7 +477,11 @@
s1 = s[:3]
s2 = s[3:]
s3 = s[3:10]
- return s1+s2 == s and s2+s1 == const('lohel') and s1+s3 == s
+ s4 = s[42:44]
+ return (s1+s2 == s and
+ s2+s1 == const('lohel') and
+ s1+s3 == s and
+ s4 == const(''))
res = self.interpret(fn, [0])
assert res
diff --git a/pypy/test_all.py b/pypy/test_all.py
old mode 100755
new mode 100644
--- a/pypy/test_all.py
+++ b/pypy/test_all.py
@@ -11,11 +11,12 @@
"""
import sys, os
-if len(sys.argv) == 1 and os.path.dirname(sys.argv[0]) in '.':
- print >> sys.stderr, __doc__
- sys.exit(2)
if __name__ == '__main__':
+ if len(sys.argv) == 1 and os.path.dirname(sys.argv[0]) in '.':
+ print >> sys.stderr, __doc__
+ sys.exit(2)
+
import tool.autopath
import pytest
import pytest_cov
diff --git a/pypy/tool/clean_old_branches.py b/pypy/tool/clean_old_branches.py
--- a/pypy/tool/clean_old_branches.py
+++ b/pypy/tool/clean_old_branches.py
@@ -38,7 +38,7 @@
closed_heads.reverse()
for head, branch in closed_heads:
- print '\t', branch
+ print '\t', head, '\t', branch
print
print 'The branches listed above will be merged to "closed-branches".'
print 'You need to run this script in a clean working copy where you'
diff --git a/pypy/tool/jitlogparser/test/__init__.py b/pypy/tool/jitlogparser/test/__init__.py
new file mode 100644
diff --git a/pypy/tool/pytest/test/test_pytestsupport.py b/pypy/tool/pytest/test/test_pytestsupport.py
--- a/pypy/tool/pytest/test/test_pytestsupport.py
+++ b/pypy/tool/pytest/test/test_pytestsupport.py
@@ -165,7 +165,10 @@
def test_one(self): exec 'blow'
""")
- ev, = sorter.getreports("pytest_runtest_logreport")
+ reports = sorter.getreports("pytest_runtest_logreport")
+ setup, ev, teardown = reports
assert ev.failed
+ assert setup.passed
+ assert teardown.passed
assert 'NameError' in ev.longrepr.reprcrash.message
assert 'blow' in ev.longrepr.reprcrash.message
diff --git a/pypy/translator/c/gcc/test/test_asmgcroot.py b/pypy/translator/c/gcc/test/test_asmgcroot.py
--- a/pypy/translator/c/gcc/test/test_asmgcroot.py
+++ b/pypy/translator/c/gcc/test/test_asmgcroot.py
@@ -7,10 +7,17 @@
from pypy import conftest
from pypy.translator.tool.cbuild import ExternalCompilationInfo
from pypy.translator.platform import platform as compiler
+from pypy.rlib.rarithmetic import is_emulated_long
from pypy.rpython.lltypesystem import lltype, rffi
from pypy.rlib.entrypoint import entrypoint, secondary_entrypoints
from pypy.rpython.lltypesystem.lloperation import llop
+_MSVC = compiler.name == "msvc"
+_MINGW = compiler.name == "mingw32"
+_WIN32 = _MSVC or _MINGW
+_WIN64 = _WIN32 and is_emulated_long
+# XXX get rid of 'is_emulated_long' and have a real config here.
+
class AbstractTestAsmGCRoot:
# the asmgcroot gc transformer doesn't generate gc_reload_possibly_moved
# instructions:
@@ -18,8 +25,8 @@
@classmethod
def make_config(cls):
- if compiler.name == "msvc":
- py.test.skip("all asmgcroot tests disabled for MSVC")
+ if _MSVC and _WIN64:
+ py.test.skip("all asmgcroot tests disabled for MSVC X64")
from pypy.config.pypyoption import get_pypy_config
config = get_pypy_config(translating=True)
config.translation.gc = cls.gcpolicy
diff --git a/pypy/translator/c/gcc/trackgcroot.py b/pypy/translator/c/gcc/trackgcroot.py
--- a/pypy/translator/c/gcc/trackgcroot.py
+++ b/pypy/translator/c/gcc/trackgcroot.py
@@ -847,6 +847,10 @@
if sources:
target, = sources
+ if target.endswith('@PLT'):
+ # In -fPIC mode, all functions calls have this suffix
+ target = target[:-4]
+
if target in self.FUNCTIONS_NOT_RETURNING:
return [InsnStop(target)]
if self.format == 'mingw32' and target == '__alloca':
@@ -1137,7 +1141,7 @@
r_jump_rel_label = re.compile(r"\tj\w+\s+"+"(\d+)f"+"\s*$")
r_unaryinsn_star= re.compile(r"\t[a-z]\w*\s+[*]("+OPERAND+")\s*$")
- r_jmptable_item = re.compile(r"\t.quad\t"+LABEL+"(-\"[A-Za-z0-9$]+\")?\s*$")
+ r_jmptable_item = re.compile(r"\t.(?:quad|long)\t"+LABEL+"(-\"[A-Za-z0-9$]+\"|-"+LABEL+")?\s*$")
r_jmptable_end = re.compile(r"\t.text|\t.section\s+.text|\t\.align|"+LABEL)
r_gcroot_marker = re.compile(r"\t/[*] GCROOT ("+LOCALVARFP+") [*]/")
diff --git a/pypy/translator/c/src/float.h b/pypy/translator/c/src/float.h
--- a/pypy/translator/c/src/float.h
+++ b/pypy/translator/c/src/float.h
@@ -43,5 +43,6 @@
#define OP_CAST_FLOAT_TO_LONGLONG(x,r) r = (long long)(x)
#define OP_CAST_FLOAT_TO_ULONGLONG(x,r) r = (unsigned long long)(x)
#define OP_CONVERT_FLOAT_BYTES_TO_LONGLONG(x,r) memcpy(&r, &x, sizeof(double))
+#define OP_CONVERT_LONGLONG_BYTES_TO_FLOAT(x,r) memcpy(&r, &x, sizeof(long long))
#endif
diff --git a/pypy/translator/c/test/test_extfunc.py b/pypy/translator/c/test/test_extfunc.py
--- a/pypy/translator/c/test/test_extfunc.py
+++ b/pypy/translator/c/test/test_extfunc.py
@@ -919,4 +919,5 @@
t, cbuilder = self.compile(does_stuff)
data = cbuilder.cmdexec('')
res = os.nice(0) + 3
+ if res > 19: res = 19 # xxx Linux specific, probably
assert data.startswith('os.nice returned %d\n' % res)
diff --git a/pypy/translator/cli/dotnet.py b/pypy/translator/cli/dotnet.py
--- a/pypy/translator/cli/dotnet.py
+++ b/pypy/translator/cli/dotnet.py
@@ -459,6 +459,7 @@
def specialize_call(self, hop):
+ hop.exception_cannot_occur()
assert hop.args_s[1].is_constant()
TYPE = hop.args_s[1].const
v_obj = hop.inputarg(hop.args_r[0], arg=0)
@@ -507,6 +508,7 @@
def specialize_call(self, hop):
v_obj, = hop.inputargs(*hop.args_r)
+ hop.exception_cannot_occur()
return hop.genop('same_as', [v_obj], hop.r_result.lowleveltype)
def new_array(type, length):
@@ -608,6 +610,7 @@
def specialize_call(self, hop):
v_type, = hop.inputargs(*hop.args_r)
+ hop.exception_cannot_occur()
return hop.genop('cli_typeof', [v_type], hop.r_result.lowleveltype)
@@ -626,6 +629,7 @@
v_obj, = hop.inputargs(*hop.args_r)
methodname = hop.args_r[0].methodname
c_methodname = hop.inputconst(ootype.Void, methodname)
+ hop.exception_cannot_occur()
return hop.genop('cli_eventhandler', [v_obj, c_methodname], hop.r_result.lowleveltype)
@@ -647,6 +651,7 @@
def specialize_call(self, hop):
assert isinstance(hop.args_s[0], annmodel.SomeOOInstance)
v_inst = hop.inputarg(hop.args_r[0], arg=0)
+ hop.exception_cannot_occur()
return hop.genop('oodowncast', [v_inst], resulttype = hop.r_result.lowleveltype)
@@ -668,6 +673,7 @@
def specialize_call(self, hop):
assert isinstance(hop.args_s[0], annmodel.SomeOOInstance)
v_inst = hop.inputarg(hop.args_r[0], arg=0)
+ hop.exception_cannot_occur()
return hop.genop('ooupcast', [v_inst], resulttype = hop.r_result.lowleveltype)
@@ -701,6 +707,7 @@
def specialize_call(self, hop):
v_obj = hop.inputarg(hop.args_r[0], arg=0)
+ hop.exception_cannot_occur()
return hop.genop('oodowncast', [v_obj], hop.r_result.lowleveltype)
diff --git a/pypy/translator/jvm/opcodes.py b/pypy/translator/jvm/opcodes.py
--- a/pypy/translator/jvm/opcodes.py
+++ b/pypy/translator/jvm/opcodes.py
@@ -243,4 +243,5 @@
'force_cast': [PushAllArgs, CastPrimitive, StoreResult],
'convert_float_bytes_to_longlong': jvm.PYPYDOUBLEBYTESTOLONG,
+ 'convert_longlong_bytes_to_float': jvm.PYPYLONGBYTESTODOUBLE,
})
diff --git a/pypy/translator/jvm/typesystem.py b/pypy/translator/jvm/typesystem.py
--- a/pypy/translator/jvm/typesystem.py
+++ b/pypy/translator/jvm/typesystem.py
@@ -942,6 +942,7 @@
PYPYULONGTODOUBLE = Method.s(jPyPy, 'ulong_to_double', (jLong,), jDouble)
PYPYLONGBITWISENEGATE = Method.v(jPyPy, 'long_bitwise_negate', (jLong,), jLong)
PYPYDOUBLEBYTESTOLONG = Method.v(jPyPy, 'pypy__float2longlong', (jDouble,), jLong)
+PYPYLONGBYTESTODOUBLE = Method.v(jPyPy, 'pypy__longlong2float', (jLong,), jDouble)
PYPYSTRTOINT = Method.v(jPyPy, 'str_to_int', (jString,), jInt)
PYPYSTRTOUINT = Method.v(jPyPy, 'str_to_uint', (jString,), jInt)
PYPYSTRTOLONG = Method.v(jPyPy, 'str_to_long', (jString,), jLong)
diff --git a/testrunner/runner.py b/testrunner/runner.py
--- a/testrunner/runner.py
+++ b/testrunner/runner.py
@@ -110,7 +110,10 @@
do_dry_run=False, timeout=None,
_win32=(sys.platform=='win32')):
args = interp + test_driver
- args += ['-p', 'resultlog', '--resultlog=%s' % logfname, test]
+ args += ['-p', 'resultlog',
+ '--resultlog=%s' % logfname,
+ '--junitxml=%s.junit' % logfname,
+ test]
args = map(str, args)
interp0 = args[0]
diff --git a/testrunner/scratchbox_runner.py b/testrunner/scratchbox_runner.py
--- a/testrunner/scratchbox_runner.py
+++ b/testrunner/scratchbox_runner.py
@@ -14,14 +14,14 @@
def dry_run_scratchbox(args, cwd, out, timeout=None):
return dry_run(args_for_scratchbox(cwd, args), cwd, out, timeout)
-import runner
-# XXX hack hack hack
-dry_run = runner.dry_run
-run = runner.run
+if __name__ == '__main__':
+ import runner
+ # XXX hack hack hack
+ dry_run = runner.dry_run
+ run = runner.run
-runner.dry_run = dry_run_scratchbox
-runner.run = run_scratchbox
+ runner.dry_run = dry_run_scratchbox
+ runner.run = run_scratchbox
-if __name__ == '__main__':
import sys
runner.main(sys.argv)
diff --git a/testrunner/test/conftest.py b/testrunner/test/conftest.py
new file mode 100644
--- /dev/null
+++ b/testrunner/test/conftest.py
@@ -0,0 +1,6 @@
+
+def pytest_runtest_makereport(__multicall__, item):
+ report = __multicall__.execute()
+ if 'out' in item.funcargs:
+ report.sections.append(('out', item.funcargs['out'].read()))
+ return report
diff --git a/testrunner/test/test_runner.py b/testrunner/test/test_runner.py
--- a/testrunner/test/test_runner.py
+++ b/testrunner/test/test_runner.py
@@ -53,49 +53,44 @@
assert not should_report_failure("F Def\n. Ghi\n. Jkl\n")
+
class TestRunHelper(object):
+ def pytest_funcarg__out(self, request):
+ tmpdir = request.getfuncargvalue('tmpdir')
+ return tmpdir.ensure('out')
- def setup_method(self, meth):
- h, self.fn = tempfile.mkstemp()
- os.close(h)
+ def test_run(self, out):
+ res = runner.run([sys.executable, "-c", "print 42"], '.', out)
+ assert res == 0
+ assert out.read() == "42\n"
- def teardown_method(self, meth):
- os.unlink(self.fn)
-
- def test_run(self):
- res = runner.run([sys.executable, "-c", "print 42"], '.',
- py.path.local(self.fn))
- assert res == 0
- out = py.path.local(self.fn).read('r')
- assert out == "42\n"
-
- def test_error(self):
- res = runner.run([sys.executable, "-c", "import sys; sys.exit(3)"], '.', py.path.local(self.fn))
+ def test_error(self, out):
+ res = runner.run([sys.executable, "-c", "import sys; sys.exit(3)"], '.', out)
assert res == 3
- def test_signal(self):
+ def test_signal(self, out):
if sys.platform == 'win32':
py.test.skip("no death by signal on windows")
- res = runner.run([sys.executable, "-c", "import os; os.kill(os.getpid(), 9)"], '.', py.path.local(self.fn))
+ res = runner.run([sys.executable, "-c", "import os; os.kill(os.getpid(), 9)"], '.', out)
assert res == -9
- def test_timeout(self):
- res = runner.run([sys.executable, "-c", "while True: pass"], '.', py.path.local(self.fn), timeout=3)
+ def test_timeout(self, out):
+ res = runner.run([sys.executable, "-c", "while True: pass"], '.', out, timeout=3)
assert res == -999
- def test_timeout_lock(self):
- res = runner.run([sys.executable, "-c", "import threading; l=threading.Lock(); l.acquire(); l.acquire()"], '.', py.path.local(self.fn), timeout=3)
+ def test_timeout_lock(self, out):
+ res = runner.run([sys.executable, "-c", "import threading; l=threading.Lock(); l.acquire(); l.acquire()"], '.', out, timeout=3)
assert res == -999
- def test_timeout_syscall(self):
- res = runner.run([sys.executable, "-c", "import socket; s=s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM); s.bind(('', 0)); s.recv(1000)"], '.', py.path.local(self.fn), timeout=3)
+ def test_timeout_syscall(self, out):
+ res = runner.run([sys.executable, "-c", "import socket; s=s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM); s.bind(('', 0)); s.recv(1000)"], '.', out, timeout=3)
assert res == -999
- def test_timeout_success(self):
+ def test_timeout_success(self, out):
res = runner.run([sys.executable, "-c", "print 42"], '.',
- py.path.local(self.fn), timeout=2)
+ out, timeout=2)
assert res == 0
- out = py.path.local(self.fn).read('r')
+ out = out.read()
assert out == "42\n"
@@ -122,7 +117,10 @@
expected = ['INTERP', 'IARG',
'driver', 'darg',
+ '-p', 'resultlog',
'--resultlog=LOGFILE',
+ '--junitxml=LOGFILE.junit',
+
'test_one']
assert self.called == (expected, '/wd', 'out', 'secs')
@@ -138,9 +136,11 @@
expected = ['/wd' + os.sep + './INTERP', 'IARG',
'driver', 'darg',
+ '-p', 'resultlog',
'--resultlog=LOGFILE',
+ '--junitxml=LOGFILE.junit',
'test_one']
-
+ assert self.called[0] == expected
assert self.called == (expected, '/wd', 'out', 'secs')
assert res == 0
@@ -251,7 +251,7 @@
assert '\n' in log
log_lines = log.splitlines()
- assert log_lines[0] == ". test_normal/test_example.py:test_one"
+ assert ". test_normal/test_example.py::test_one" in log_lines
nfailures = 0
noutcomes = 0
for line in log_lines:
From noreply at buildbot.pypy.org Fri Apr 13 11:00:54 2012
From: noreply at buildbot.pypy.org (bivab)
Date: Fri, 13 Apr 2012 11:00:54 +0200 (CEST)
Subject: [pypy-commit] pypy arm-backend-2: implement
convert_longlong_bytes_to_float
Message-ID: <20120413090054.7976C82F4E@wyvern.cs.uni-duesseldorf.de>
Author: David Schneider
Branch: arm-backend-2
Changeset: r54327:ad3a31a28410
Date: 2012-04-13 08:03 +0000
http://bitbucket.org/pypy/pypy/changeset/ad3a31a28410/
Log: implement convert_longlong_bytes_to_float
diff --git a/pypy/jit/backend/arm/opassembler.py b/pypy/jit/backend/arm/opassembler.py
--- a/pypy/jit/backend/arm/opassembler.py
+++ b/pypy/jit/backend/arm/opassembler.py
@@ -1299,6 +1299,7 @@
return fcond
emit_op_convert_float_bytes_to_longlong = gen_emit_unary_float_op('float_bytes_to_longlong', 'VMOV_cc')
+ emit_op_convert_longlong_bytes_to_float = gen_emit_unary_float_op('longlong_bytes_to_float', 'VMOV_cc')
def emit_op_read_timestamp(self, op, arglocs, regalloc, fcond):
tmp = arglocs[0]
diff --git a/pypy/jit/backend/arm/regalloc.py b/pypy/jit/backend/arm/regalloc.py
--- a/pypy/jit/backend/arm/regalloc.py
+++ b/pypy/jit/backend/arm/regalloc.py
@@ -1206,6 +1206,8 @@
prepare_op_convert_float_bytes_to_longlong = prepare_float_op(base=False,
name='prepare_op_convert_float_bytes_to_longlong')
+ prepare_op_convert_longlong_bytes_to_float = prepare_float_op(base=False,
+ name='prepare_op_convert_longlong_bytes_to_float')
def prepare_op_read_timestamp(self, op, fcond):
loc = self.get_scratch_reg(INT)
From noreply at buildbot.pypy.org Fri Apr 13 11:00:55 2012
From: noreply at buildbot.pypy.org (bivab)
Date: Fri, 13 Apr 2012 11:00:55 +0200 (CEST)
Subject: [pypy-commit] pypy arm-backend-2: fix math_sqrt
Message-ID: <20120413090055.C648182F4E@wyvern.cs.uni-duesseldorf.de>
Author: David Schneider
Branch: arm-backend-2
Changeset: r54328:6bac78dc2ff5
Date: 2012-04-13 08:06 +0000
http://bitbucket.org/pypy/pypy/changeset/6bac78dc2ff5/
Log: fix math_sqrt
diff --git a/pypy/jit/backend/arm/assembler.py b/pypy/jit/backend/arm/assembler.py
--- a/pypy/jit/backend/arm/assembler.py
+++ b/pypy/jit/backend/arm/assembler.py
@@ -934,7 +934,7 @@
def regalloc_emit_math(self, op, arglocs, fcond, regalloc):
effectinfo = op.getdescr().get_extra_info()
oopspecindex = effectinfo.oopspecindex
- asm_math_operations[oopspecindex](self, op, arglocs, resloc)
+ asm_math_operations[oopspecindex](self, op, arglocs, regalloc, fcond)
return fcond
From noreply at buildbot.pypy.org Fri Apr 13 12:52:59 2012
From: noreply at buildbot.pypy.org (RonnyPfannschmidt)
Date: Fri, 13 Apr 2012 12:52:59 +0200 (CEST)
Subject: [pypy-commit] pypy pytest: apply patch from upstream that fixes
armins py25 issue
Message-ID: <20120413105259.E012582F4E@wyvern.cs.uni-duesseldorf.de>
Author: Ronny Pfannschmidt
Branch: pytest
Changeset: r54329:9b3c28e1af6c
Date: 2012-04-13 12:46 +0200
http://bitbucket.org/pypy/pypy/changeset/9b3c28e1af6c/
Log: apply patch from upstream that fixes armins py25 issue
diff --git a/_pytest/assertion/oldinterpret.py b/_pytest/assertion/oldinterpret.py
--- a/_pytest/assertion/oldinterpret.py
+++ b/_pytest/assertion/oldinterpret.py
@@ -1,8 +1,7 @@
import py
import sys, inspect
from compiler import parse, ast, pycodegen
-from _pytest.assertion.util import format_explanation
-from _pytest.assertion.reinterpret import BuiltinAssertionError
+from _pytest.assertion.util import format_explanation, BuiltinAssertionError
passthroughex = py.builtin._sysex
diff --git a/_pytest/assertion/reinterpret.py b/_pytest/assertion/reinterpret.py
--- a/_pytest/assertion/reinterpret.py
+++ b/_pytest/assertion/reinterpret.py
@@ -1,7 +1,6 @@
import sys
import py
-
-BuiltinAssertionError = py.builtin.builtins.AssertionError
+from _pytest.assertion.util import BuiltinAssertionError
class AssertionError(BuiltinAssertionError):
def __init__(self, *args):
diff --git a/_pytest/assertion/util.py b/_pytest/assertion/util.py
--- a/_pytest/assertion/util.py
+++ b/_pytest/assertion/util.py
@@ -2,6 +2,7 @@
import py
+BuiltinAssertionError = py.builtin.builtins.AssertionError
# The _reprcompare attribute on the util module is used by the new assertion
# interpretation code and assertion rewriter to detect this plugin was
From noreply at buildbot.pypy.org Fri Apr 13 12:53:01 2012
From: noreply at buildbot.pypy.org (RonnyPfannschmidt)
Date: Fri, 13 Apr 2012 12:53:01 +0200 (CEST)
Subject: [pypy-commit] pypy default: merge from pytest
Message-ID: <20120413105301.400F082F4E@wyvern.cs.uni-duesseldorf.de>
Author: Ronny Pfannschmidt
Branch:
Changeset: r54330:8bafd99b776b
Date: 2012-04-13 12:47 +0200
http://bitbucket.org/pypy/pypy/changeset/8bafd99b776b/
Log: merge from pytest
diff --git a/_pytest/assertion/oldinterpret.py b/_pytest/assertion/oldinterpret.py
--- a/_pytest/assertion/oldinterpret.py
+++ b/_pytest/assertion/oldinterpret.py
@@ -1,8 +1,7 @@
import py
import sys, inspect
from compiler import parse, ast, pycodegen
-from _pytest.assertion.util import format_explanation
-from _pytest.assertion.reinterpret import BuiltinAssertionError
+from _pytest.assertion.util import format_explanation, BuiltinAssertionError
passthroughex = py.builtin._sysex
diff --git a/_pytest/assertion/reinterpret.py b/_pytest/assertion/reinterpret.py
--- a/_pytest/assertion/reinterpret.py
+++ b/_pytest/assertion/reinterpret.py
@@ -1,7 +1,6 @@
import sys
import py
-
-BuiltinAssertionError = py.builtin.builtins.AssertionError
+from _pytest.assertion.util import BuiltinAssertionError
class AssertionError(BuiltinAssertionError):
def __init__(self, *args):
diff --git a/_pytest/assertion/util.py b/_pytest/assertion/util.py
--- a/_pytest/assertion/util.py
+++ b/_pytest/assertion/util.py
@@ -2,6 +2,7 @@
import py
+BuiltinAssertionError = py.builtin.builtins.AssertionError
# The _reprcompare attribute on the util module is used by the new assertion
# interpretation code and assertion rewriter to detect this plugin was
From noreply at buildbot.pypy.org Fri Apr 13 12:53:02 2012
From: noreply at buildbot.pypy.org (RonnyPfannschmidt)
Date: Fri, 13 Apr 2012 12:53:02 +0200 (CEST)
Subject: [pypy-commit] pypy default: merge upstream
Message-ID: <20120413105302.CEE1D82F4E@wyvern.cs.uni-duesseldorf.de>
Author: Ronny Pfannschmidt
Branch:
Changeset: r54331:f7ddf20f141f
Date: 2012-04-13 12:52 +0200
http://bitbucket.org/pypy/pypy/changeset/f7ddf20f141f/
Log: merge upstream
diff --git a/pypy/annotation/test/test_annrpython.py b/pypy/annotation/test/test_annrpython.py
--- a/pypy/annotation/test/test_annrpython.py
+++ b/pypy/annotation/test/test_annrpython.py
@@ -3746,9 +3746,9 @@
return g(i)
def main(i):
if i == 2:
- return f(i)
+ return f(2)
elif i == 3:
- return f(i)
+ return f(3)
else:
raise NotImplementedError
diff --git a/pypy/interpreter/argument.py b/pypy/interpreter/argument.py
--- a/pypy/interpreter/argument.py
+++ b/pypy/interpreter/argument.py
@@ -169,9 +169,11 @@
def _combine_starstarargs_wrapped(self, w_starstararg):
# unpack the ** arguments
space = self.space
+ keywords, values_w = space.view_as_kwargs(w_starstararg)
+ if keywords is not None: # this path also taken for empty dicts
+ self._add_keywordargs_no_unwrapping(keywords, values_w)
+ return not jit.isconstant(len(self.keywords))
if space.isinstance_w(w_starstararg, space.w_dict):
- if not space.is_true(w_starstararg):
- return False # don't call unpackiterable - it's jit-opaque
keys_w = space.unpackiterable(w_starstararg)
else:
try:
@@ -186,11 +188,8 @@
"a mapping, not %s" % (typename,)))
raise
keys_w = space.unpackiterable(w_keys)
- if keys_w:
- self._do_combine_starstarargs_wrapped(keys_w, w_starstararg)
- return True
- else:
- return False # empty dict; don't disable the JIT
+ self._do_combine_starstarargs_wrapped(keys_w, w_starstararg)
+ return True
def _do_combine_starstarargs_wrapped(self, keys_w, w_starstararg):
space = self.space
@@ -227,6 +226,26 @@
self.keywords_w = self.keywords_w + keywords_w
self.keyword_names_w = keys_w
+ @jit.look_inside_iff(lambda self, keywords, keywords_w:
+ jit.isconstant(len(keywords) and
+ jit.isconstant(self.keywords)))
+ def _add_keywordargs_no_unwrapping(self, keywords, keywords_w):
+ if self.keywords is None:
+ self.keywords = keywords[:] # copy to make non-resizable
+ self.keywords_w = keywords_w[:]
+ else:
+ # looks quadratic, but the JIT should remove all of it nicely.
+ # Also, all the lists should be small
+ for key in keywords:
+ for otherkey in self.keywords:
+ if otherkey == key:
+ raise operationerrfmt(self.space.w_TypeError,
+ "got multiple values "
+ "for keyword argument "
+ "'%s'", key)
+ self.keywords = self.keywords + keywords
+ self.keywords_w = self.keywords_w + keywords_w
+
def fixedunpack(self, argcount):
"""The simplest argument parsing: get the 'argcount' arguments,
or raise a real ValueError if the length is wrong."""
@@ -385,7 +404,7 @@
# collect extra keyword arguments into the **kwarg
if has_kwarg:
- w_kwds = self.space.newdict()
+ w_kwds = self.space.newdict(kwargs=True)
if num_remainingkwds:
#
limit = len(keywords)
diff --git a/pypy/interpreter/baseobjspace.py b/pypy/interpreter/baseobjspace.py
--- a/pypy/interpreter/baseobjspace.py
+++ b/pypy/interpreter/baseobjspace.py
@@ -914,6 +914,12 @@
"""
return None
+ def view_as_kwargs(self, w_dict):
+ """ if w_dict is a kwargs-dict, return two lists, one of unwrapped
+ strings and one of wrapped values. otherwise return (None, None)
+ """
+ return (None, None)
+
def newlist_str(self, list_s):
return self.newlist([self.wrap(s) for s in list_s])
diff --git a/pypy/interpreter/test/test_argument.py b/pypy/interpreter/test/test_argument.py
--- a/pypy/interpreter/test/test_argument.py
+++ b/pypy/interpreter/test/test_argument.py
@@ -75,7 +75,10 @@
def unpackiterable(self, it):
return list(it)
- def newdict(self):
+ def view_as_kwargs(self, x):
+ return None, None
+
+ def newdict(self, kwargs=False):
return {}
def newlist(self, l=[]):
@@ -488,6 +491,57 @@
assert len(l) == 1
assert l[0] == space.wrap(5)
+ def test_starstarargs_special(self):
+ class kwargs(object):
+ def __init__(self, k, v):
+ self.k = k
+ self.v = v
+ class MyDummySpace(DummySpace):
+ def view_as_kwargs(self, kw):
+ if isinstance(kw, kwargs):
+ return kw.k, kw.v
+ return None, None
+ space = MyDummySpace()
+ for i in range(3):
+ kwds = [("c", 3)]
+ kwds_w = dict(kwds[:i])
+ keywords = kwds_w.keys()
+ keywords_w = kwds_w.values()
+ rest = dict(kwds[i:])
+ w_kwds = kwargs(rest.keys(), rest.values())
+ if i == 2:
+ w_kwds = None
+ assert len(keywords) == len(keywords_w)
+ args = Arguments(space, [1, 2], keywords[:], keywords_w[:], w_starstararg=w_kwds)
+ l = [None, None, None]
+ args._match_signature(None, l, Signature(["a", "b", "c"]), defaults_w=[4])
+ assert l == [1, 2, 3]
+ args = Arguments(space, [1, 2], keywords[:], keywords_w[:], w_starstararg=w_kwds)
+ l = [None, None, None, None]
+ args._match_signature(None, l, Signature(["a", "b", "b1", "c"]), defaults_w=[4, 5])
+ assert l == [1, 2, 4, 3]
+ args = Arguments(space, [1, 2], keywords[:], keywords_w[:], w_starstararg=w_kwds)
+ l = [None, None, None, None]
+ args._match_signature(None, l, Signature(["a", "b", "c", "d"]), defaults_w=[4, 5])
+ assert l == [1, 2, 3, 5]
+ args = Arguments(space, [1, 2], keywords[:], keywords_w[:], w_starstararg=w_kwds)
+ l = [None, None, None, None]
+ py.test.raises(ArgErr, args._match_signature, None, l,
+ Signature(["c", "b", "a", "d"]), defaults_w=[4, 5])
+ args = Arguments(space, [1, 2], keywords[:], keywords_w[:], w_starstararg=w_kwds)
+ l = [None, None, None, None]
+ py.test.raises(ArgErr, args._match_signature, None, l,
+ Signature(["a", "b", "c1", "d"]), defaults_w=[4, 5])
+ args = Arguments(space, [1, 2], keywords[:], keywords_w[:], w_starstararg=w_kwds)
+ l = [None, None, None]
+ args._match_signature(None, l, Signature(["a", "b"], None, "**"))
+ assert l == [1, 2, {'c': 3}]
+ excinfo = py.test.raises(OperationError, Arguments, space, [], ["a"],
+ [1], w_starstararg=kwargs(["a"], [2]))
+ assert excinfo.value.w_type is TypeError
+
+
+
class TestErrorHandling(object):
def test_missing_args(self):
# got_nargs, nkwds, expected_nargs, has_vararg, has_kwarg,
diff --git a/pypy/module/pypyjit/test_pypy_c/test_call.py b/pypy/module/pypyjit/test_pypy_c/test_call.py
--- a/pypy/module/pypyjit/test_pypy_c/test_call.py
+++ b/pypy/module/pypyjit/test_pypy_c/test_call.py
@@ -244,6 +244,7 @@
print guards
assert len(guards) <= 20
+
def test_stararg_virtual(self):
def main(x):
def g(*args):
@@ -486,3 +487,38 @@
--TICK--
jump(..., descr=...)
""")
+
+ def test_kwargs_virtual2(self):
+ log = self.run("""
+ def f(*args, **kwargs):
+ kwargs['a'] = kwargs['z'] * 0
+ return g(1, *args, **kwargs)
+
+ def g(x, y, z=2, a=1):
+ return x - y + z + a
+
+ def main(stop):
+ res = 0
+ i = 0
+ while i < stop:
+ res = f(res, z=i) # ID: call
+ i += 1
+ return res""", [1000])
+ assert log.result == 500
+ loop, = log.loops_by_id('call')
+ print loop.ops_by_id('call')
+ assert loop.match("""
+ i65 = int_lt(i58, i29)
+ guard_true(i65, descr=...)
+ guard_not_invalidated(..., descr=...)
+ i66 = force_token()
+ i67 = force_token()
+ i69 = int_sub_ovf(1, i56)
+ guard_no_overflow(..., descr=...)
+ i70 = int_add_ovf(i69, i58)
+ guard_no_overflow(..., descr=...)
+ i71 = int_add(i58, 1)
+ --TICK--
+ jump(..., descr=...)
+ """)
+
diff --git a/pypy/objspace/fake/objspace.py b/pypy/objspace/fake/objspace.py
--- a/pypy/objspace/fake/objspace.py
+++ b/pypy/objspace/fake/objspace.py
@@ -110,7 +110,7 @@
"NOT_RPYTHON"
raise NotImplementedError
- def newdict(self, module=False, instance=False,
+ def newdict(self, module=False, instance=False, kwargs=False,
strdict=False):
return w_some_obj()
diff --git a/pypy/objspace/std/dictmultiobject.py b/pypy/objspace/std/dictmultiobject.py
--- a/pypy/objspace/std/dictmultiobject.py
+++ b/pypy/objspace/std/dictmultiobject.py
@@ -33,7 +33,7 @@
@staticmethod
def allocate_and_init_instance(space, w_type=None, module=False,
- instance=False, strdict=False):
+ instance=False, strdict=False, kwargs=False):
if space.config.objspace.std.withcelldict and module:
from pypy.objspace.std.celldict import ModuleDictStrategy
@@ -46,11 +46,15 @@
assert w_type is None
strategy = space.fromcache(StringDictStrategy)
+ elif kwargs:
+ assert w_type is None
+ from pypy.objspace.std.kwargsdict import KwargsDictStrategy
+ strategy = space.fromcache(KwargsDictStrategy)
else:
strategy = space.fromcache(EmptyDictStrategy)
-
if w_type is None:
w_type = space.w_dict
+
storage = strategy.get_empty_storage()
w_self = space.allocate_instance(W_DictMultiObject, w_type)
W_DictMultiObject.__init__(w_self, space, strategy, storage)
@@ -91,7 +95,8 @@
getitem_str delitem length \
clear w_keys values \
items iter setdefault \
- popitem listview_str listview_int".split()
+ popitem listview_str listview_int \
+ view_as_kwargs".split()
def make_method(method):
def f(self, *args):
@@ -165,6 +170,9 @@
def listview_int(self, w_dict):
return None
+ def view_as_kwargs(self, w_dict):
+ return (None, None)
+
class EmptyDictStrategy(DictStrategy):
erase, unerase = rerased.new_erasing_pair("empty")
@@ -254,6 +262,9 @@
def popitem(self, w_dict):
raise KeyError
+ def view_as_kwargs(self, w_dict):
+ return ([], [])
+
registerimplementation(W_DictMultiObject)
# DictImplementation lattice
diff --git a/pypy/objspace/std/kwargsdict.py b/pypy/objspace/std/kwargsdict.py
new file mode 100644
--- /dev/null
+++ b/pypy/objspace/std/kwargsdict.py
@@ -0,0 +1,165 @@
+## ----------------------------------------------------------------------------
+## dict strategy (see dictmultiobject.py)
+
+from pypy.rlib import rerased, jit
+from pypy.objspace.std.dictmultiobject import (DictStrategy,
+ IteratorImplementation,
+ ObjectDictStrategy,
+ StringDictStrategy)
+
+
+class KwargsDictStrategy(DictStrategy):
+ erase, unerase = rerased.new_erasing_pair("kwargsdict")
+ erase = staticmethod(erase)
+ unerase = staticmethod(unerase)
+
+ def wrap(self, key):
+ return self.space.wrap(key)
+
+ def unwrap(self, wrapped):
+ return self.space.str_w(wrapped)
+
+ def get_empty_storage(self):
+ d = ([], [])
+ return self.erase(d)
+
+ def is_correct_type(self, w_obj):
+ space = self.space
+ return space.is_w(space.type(w_obj), space.w_str)
+
+ def _never_equal_to(self, w_lookup_type):
+ return False
+
+ def iter(self, w_dict):
+ return KwargsDictIterator(self.space, self, w_dict)
+
+ def w_keys(self, w_dict):
+ return self.space.newlist([self.space.wrap(key) for key in self.unerase(w_dict.dstorage)[0]])
+
+ def setitem(self, w_dict, w_key, w_value):
+ space = self.space
+ if self.is_correct_type(w_key):
+ self.setitem_str(w_dict, self.unwrap(w_key), w_value)
+ return
+ else:
+ self.switch_to_object_strategy(w_dict)
+ w_dict.setitem(w_key, w_value)
+
+ def setitem_str(self, w_dict, key, w_value):
+ self._setitem_str_indirection(w_dict, key, w_value)
+
+ @jit.look_inside_iff(lambda self, w_dict, key, w_value:
+ jit.isconstant(self.length(w_dict)) and jit.isconstant(key))
+ def _setitem_str_indirection(self, w_dict, key, w_value):
+ keys, values_w = self.unerase(w_dict.dstorage)
+ result = []
+ for i in range(len(keys)):
+ if keys[i] == key:
+ values_w[i] = w_value
+ break
+ else:
+ # limit the size so that the linear searches don't become too long
+ if len(keys) >= 16:
+ self.switch_to_string_strategy(w_dict)
+ w_dict.setitem_str(key, w_value)
+ else:
+ keys.append(key)
+ values_w.append(w_value)
+
+ def setdefault(self, w_dict, w_key, w_default):
+ # XXX could do better, but is it worth it?
+ self.switch_to_object_strategy(w_dict)
+ return w_dict.setdefault(w_key, w_default)
+
+ def delitem(self, w_dict, w_key):
+ # XXX could do better, but is it worth it?
+ self.switch_to_object_strategy(w_dict)
+ return w_dict.delitem(w_key)
+
+ def length(self, w_dict):
+ return len(self.unerase(w_dict.dstorage)[0])
+
+ def getitem_str(self, w_dict, key):
+ return self._getitem_str_indirection(w_dict, key)
+
+ @jit.look_inside_iff(lambda self, w_dict, key: jit.isconstant(self.length(w_dict)) and jit.isconstant(key))
+ def _getitem_str_indirection(self, w_dict, key):
+ keys, values_w = self.unerase(w_dict.dstorage)
+ result = []
+ for i in range(len(keys)):
+ if keys[i] == key:
+ return values_w[i]
+ return None
+
+ def getitem(self, w_dict, w_key):
+ space = self.space
+ if self.is_correct_type(w_key):
+ return self.getitem_str(w_dict, self.unwrap(w_key))
+ elif self._never_equal_to(space.type(w_key)):
+ return None
+ else:
+ self.switch_to_object_strategy(w_dict)
+ return w_dict.getitem(w_key)
+
+ def w_keys(self, w_dict):
+ l = self.unerase(w_dict.dstorage)[0]
+ return self.space.newlist_str(l[:])
+
+ def values(self, w_dict):
+ return self.unerase(w_dict.dstorage)[1][:] # to make non-resizable
+
+ def items(self, w_dict):
+ space = self.space
+ keys, values_w = self.unerase(w_dict.dstorage)
+ result = []
+ for i in range(len(keys)):
+ result.append(space.newtuple([self.wrap(keys[i]), values_w[i]]))
+ return result
+
+ def popitem(self, w_dict):
+ keys, values_w = self.unerase(w_dict.dstorage)
+ key = keys.pop()
+ w_value = values_w.pop()
+ return (self.wrap(key), w_value)
+
+ def clear(self, w_dict):
+ w_dict.dstorage = self.get_empty_storage()
+
+ def switch_to_object_strategy(self, w_dict):
+ strategy = self.space.fromcache(ObjectDictStrategy)
+ keys, values_w = self.unerase(w_dict.dstorage)
+ d_new = strategy.unerase(strategy.get_empty_storage())
+ for i in range(len(keys)):
+ d_new[self.wrap(keys[i])] = values_w[i]
+ w_dict.strategy = strategy
+ w_dict.dstorage = strategy.erase(d_new)
+
+ def switch_to_string_strategy(self, w_dict):
+ strategy = self.space.fromcache(StringDictStrategy)
+ keys, values_w = self.unerase(w_dict.dstorage)
+ storage = strategy.get_empty_storage()
+ d_new = strategy.unerase(storage)
+ for i in range(len(keys)):
+ d_new[keys[i]] = values_w[i]
+ w_dict.strategy = strategy
+ w_dict.dstorage = storage
+
+ def view_as_kwargs(self, w_dict):
+ return self.unerase(w_dict.dstorage)
+
+
+class KwargsDictIterator(IteratorImplementation):
+ def __init__(self, space, strategy, dictimplementation):
+ IteratorImplementation.__init__(self, space, strategy, dictimplementation)
+ keys, values_w = strategy.unerase(self.dictimplementation.dstorage)
+ self.iterator = iter(range(len(keys)))
+ # XXX this potentially leaks
+ self.keys = keys
+ self.values_w = values_w
+
+ def next_entry(self):
+ # note that this 'for' loop only runs once, at most
+ for i in self.iterator:
+ return self.space.wrap(self.keys[i]), self.values_w[i]
+ else:
+ return None, None
diff --git a/pypy/objspace/std/objspace.py b/pypy/objspace/std/objspace.py
--- a/pypy/objspace/std/objspace.py
+++ b/pypy/objspace/std/objspace.py
@@ -313,11 +313,11 @@
def newlist_str(self, list_s):
return W_ListObject.newlist_str(self, list_s)
- def newdict(self, module=False, instance=False,
+ def newdict(self, module=False, instance=False, kwargs=False,
strdict=False):
return W_DictMultiObject.allocate_and_init_instance(
self, module=module, instance=instance,
- strdict=strdict)
+ strdict=strdict, kwargs=kwargs)
def newset(self):
from pypy.objspace.std.setobject import newset
@@ -472,6 +472,11 @@
return w_obj.getitems_int()
return None
+ def view_as_kwargs(self, w_dict):
+ if type(w_dict) is W_DictMultiObject:
+ return w_dict.view_as_kwargs()
+ return (None, None)
+
def _uses_list_iter(self, w_obj):
from pypy.objspace.descroperation import list_iter
return self.lookup(w_obj, '__iter__') is list_iter(self)
diff --git a/pypy/objspace/std/test/test_kwargsdict.py b/pypy/objspace/std/test/test_kwargsdict.py
new file mode 100644
--- /dev/null
+++ b/pypy/objspace/std/test/test_kwargsdict.py
@@ -0,0 +1,120 @@
+import py
+from pypy.conftest import gettestobjspace, option
+from pypy.objspace.std.test.test_dictmultiobject import FakeSpace, W_DictMultiObject
+from pypy.objspace.std.kwargsdict import *
+
+space = FakeSpace()
+strategy = KwargsDictStrategy(space)
+
+def test_create():
+ keys = ["a", "b", "c"]
+ values = [1, 2, 3]
+ storage = strategy.erase((keys, values))
+ d = W_DictMultiObject(space, strategy, storage)
+ assert d.getitem_str("a") == 1
+ assert d.getitem_str("b") == 2
+ assert d.getitem_str("c") == 3
+ assert d.getitem(space.wrap("a")) == 1
+ assert d.getitem(space.wrap("b")) == 2
+ assert d.getitem(space.wrap("c")) == 3
+ assert d.w_keys() == keys
+ assert d.values() == values
+
+def test_set_existing():
+ keys = ["a", "b", "c"]
+ values = [1, 2, 3]
+ storage = strategy.erase((keys, values))
+ d = W_DictMultiObject(space, strategy, storage)
+ assert d.getitem_str("a") == 1
+ assert d.getitem_str("b") == 2
+ assert d.getitem_str("c") == 3
+ assert d.setitem_str("a", 4) is None
+ assert d.getitem_str("a") == 4
+ assert d.getitem_str("b") == 2
+ assert d.getitem_str("c") == 3
+ assert d.setitem_str("b", 5) is None
+ assert d.getitem_str("a") == 4
+ assert d.getitem_str("b") == 5
+ assert d.getitem_str("c") == 3
+ assert d.setitem_str("c", 6) is None
+ assert d.getitem_str("a") == 4
+ assert d.getitem_str("b") == 5
+ assert d.getitem_str("c") == 6
+ assert d.getitem(space.wrap("a")) == 4
+ assert d.getitem(space.wrap("b")) == 5
+ assert d.getitem(space.wrap("c")) == 6
+ assert d.w_keys() == keys
+ assert d.values() == values
+ assert keys == ["a", "b", "c"]
+ assert values == [4, 5, 6]
+
+
+def test_set_new():
+ keys = ["a", "b", "c"]
+ values = [1, 2, 3]
+ storage = strategy.erase((keys, values))
+ d = W_DictMultiObject(space, strategy, storage)
+ assert d.getitem_str("a") == 1
+ assert d.getitem_str("b") == 2
+ assert d.getitem_str("c") == 3
+ assert d.getitem_str("d") is None
+ assert d.setitem_str("d", 4) is None
+ assert d.getitem_str("a") == 1
+ assert d.getitem_str("b") == 2
+ assert d.getitem_str("c") == 3
+ assert d.getitem_str("d") == 4
+ assert d.w_keys() == keys
+ assert d.values() == values
+ assert keys == ["a", "b", "c", "d"]
+ assert values == [1, 2, 3, 4]
+
+def test_limit_size():
+ storage = strategy.get_empty_storage()
+ d = W_DictMultiObject(space, strategy, storage)
+ for i in range(100):
+ assert d.setitem_str("d%s" % i, 4) is None
+ assert d.strategy is not strategy
+ assert "StringDictStrategy" == d.strategy.__class__.__name__
+
+def test_keys_doesnt_wrap():
+ space = FakeSpace()
+ space.newlist = None
+ strategy = KwargsDictStrategy(space)
+ keys = ["a", "b", "c"]
+ values = [1, 2, 3]
+ storage = strategy.erase((keys, values))
+ d = W_DictMultiObject(space, strategy, storage)
+ w_l = d.w_keys() # does not crash
+
+
+from pypy.objspace.std.test.test_dictmultiobject import BaseTestRDictImplementation, BaseTestDevolvedDictImplementation
+def get_impl(self):
+ storage = strategy.erase(([], []))
+ return W_DictMultiObject(space, strategy, storage)
+class TestKwargsDictImplementation(BaseTestRDictImplementation):
+ StrategyClass = KwargsDictStrategy
+ get_impl = get_impl
+ def test_delitem(self):
+ pass # delitem devolves for now
+
+class TestDevolvedKwargsDictImplementation(BaseTestDevolvedDictImplementation):
+ get_impl = get_impl
+ StrategyClass = KwargsDictStrategy
+
+
+class AppTestKwargsDictStrategy(object):
+ def setup_class(cls):
+ if option.runappdirect:
+ py.test.skip("__repr__ doesn't work on appdirect")
+
+ def w_get_strategy(self, obj):
+ import __pypy__
+ r = __pypy__.internal_repr(obj)
+ return r[r.find("(") + 1: r.find(")")]
+
+ def test_create(self):
+ def f(**args):
+ return args
+ d = f(a=1)
+ assert "KwargsDictStrategy" in self.get_strategy(d)
+
From noreply at buildbot.pypy.org Fri Apr 13 16:06:26 2012
From: noreply at buildbot.pypy.org (arigo)
Date: Fri, 13 Apr 2012 16:06:26 +0200 (CEST)
Subject: [pypy-commit] pypy stm-gc: Intermediate check-in.
Message-ID: <20120413140626.33D6982F4E@wyvern.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch: stm-gc
Changeset: r54332:2abd27c473de
Date: 2012-04-13 15:21 +0200
http://bitbucket.org/pypy/pypy/changeset/2abd27c473de/
Log: Intermediate check-in.
diff --git a/pypy/rpython/memory/gc/stmgc.py b/pypy/rpython/memory/gc/stmgc.py
--- a/pypy/rpython/memory/gc/stmgc.py
+++ b/pypy/rpython/memory/gc/stmgc.py
@@ -33,7 +33,10 @@
# - The LOCAL objects might be YOUNG or OLD depending on whether they
# already survived a collection. YOUNG LOCAL objects are either in
# the nursery or, if they are big, raw-malloced. OLD LOCAL objects
-# are in the shared area.
+# are in the shared area. Getting the write barrier right for both
+# this and the general STM mechanisms is tricky, so for now this GC
+# is not actually generational (slow when running long transactions
+# or before running transactions at all).
#
GCFLAG_GLOBAL = first_gcflag << 0 # keep in sync with et.c
GCFLAG_WAS_COPIED = first_gcflag << 1 # keep in sync with et.c
@@ -65,7 +68,7 @@
TRANSLATION_PARAMS = {
'stm_operations': 'use_real_one',
- 'nursery_size': 4*1024*1024, # 4 MB
+ 'nursery_size': 32*1024*1024, # 32 MB
"page_size": 1024*WORD, # copied from minimark.py
"arena_size": 65536*WORD, # copied from minimark.py
@@ -120,32 +123,20 @@
self.main_thread_tls = StmGCTLS(self, in_main_thread=True)
self.main_thread_tls.start_transaction()
+ @always_inline
def get_tls(self):
from pypy.rpython.memory.gc.stmtls import StmGCTLS
tls = self.stm_operations.get_tls()
return StmGCTLS.cast_address_to_tls_object(tls)
+ def enter_transactional_mode(self):
+ self.main_thread_tls.enter_transactional_mode()
+
+ def leave_transactional_mode(self):
+ self.main_thread_tls.leave_transactional_mode()
+
# ----------
- @always_inline
- def allocate_bump_pointer(self, size):
- tls = self.collector.get_tls()
- free = tls.nursery_free
- top = tls.nursery_top
- if (top - free) < llmemory.raw_malloc_usage(size):
- free = self.local_collection(size)
- tls.nursery_free = free + size
- return free
-
- @dont_inline
- def local_collection(self, size):
- tls = self.collector.get_tls()
- if not tls.nursery_free:
- fatalerror("malloc in a non-main thread but outside a transaction")
- #...
- xxxxxxxxx
-
-
def malloc_fixedsize_clear(self, typeid, size,
needs_finalizer=False,
is_finalizer_light=False,
@@ -159,7 +150,7 @@
# Get the memory from the nursery.
size_gc_header = self.gcheaderbuilder.size_gc_header
totalsize = size_gc_header + size
- result = self.allocate_bump_pointer(totalsize)
+ result = self.get_tls().allocate_bump_pointer(totalsize)
#
# Build the object.
llarena.arena_reserve(result, totalsize)
@@ -180,7 +171,7 @@
nonvarsize = size_gc_header + size
totalsize = nonvarsize + itemsize * length
totalsize = llarena.round_up_for_allocation(totalsize)
- result = self.allocate_bump_pointer(totalsize)
+ result = self.get_tls().allocate_bump_pointer(totalsize)
llarena.arena_reserve(result, totalsize)
obj = result + size_gc_header
self.init_gc_object(result, typeid, flags=0)
@@ -315,7 +306,7 @@
def _stm_write_barrier_global(obj):
if not stm_operations.in_transaction():
return obj
- # we need to find of make a local copy
+ # we need to find or make a local copy
hdr = self.header(obj)
if hdr.tid & GCFLAG_WAS_COPIED == 0:
# in this case, we are sure that we don't have a copy
diff --git a/pypy/rpython/memory/gc/stmtls.py b/pypy/rpython/memory/gc/stmtls.py
--- a/pypy/rpython/memory/gc/stmtls.py
+++ b/pypy/rpython/memory/gc/stmtls.py
@@ -3,9 +3,11 @@
from pypy.rpython.annlowlevel import cast_base_ptr_to_instance, base_ptr_lltype
from pypy.rlib.objectmodel import we_are_translated, free_non_gc_object
from pypy.rlib.rarithmetic import r_uint
-from pypy.rlib.debug import ll_assert
+from pypy.rlib.debug import ll_assert, debug_start, debug_stop, fatalerror
from pypy.rpython.memory.gc.stmgc import WORD, NULL
+from pypy.rpython.memory.gc.stmgc import always_inline, dont_inline
+from pypy.rpython.memory.gc.stmgc import GCFLAG_GLOBAL
class StmGCTLS(object):
@@ -34,21 +36,18 @@
self.nursery_size = self.gc.nursery_size
self.nursery_start = self._alloc_nursery(self.nursery_size)
#
- # --- the local raw-malloced objects, young and old
- self.rawmalloced_young_objects = self.null_address_dict()
- self.rawmalloced_old_objects = None
- self.rawmalloced_total_size = r_uint(0)
- # --- the local objects with weakrefs, young and old
- self.young_objects_with_weakrefs = self.AddressStack()
- self.old_objects_with_weakrefs = self.AddressStack()
- # --- support for id and identityhash: maps nursery objects with
- # GCFLAG_HAS_SHADOW to their future location at the next
- # local collection
- self.nursery_objects_shadows = self.AddressDict()
+ # --- the local raw-malloced objects (chained list via hdr.version)
+ self.rawmalloced_objects = NULL
+ # --- the local "normal" old objects (chained list via hdr.version)
+ self.old_objects = NULL
+ # --- the local objects with weakrefs (chained list via hdr.version)
+ #self.young_objects_with_weakrefs = NULL
+ #self.old_objects_with_weakrefs = NULL
#
self._register_with_C_code()
def teardown_thread(self):
+ self._cleanup_state()
self._unregister_with_C_code()
self._free_nursery(self.nursery_start)
free_non_gc_object(self)
@@ -67,9 +66,9 @@
tls = cast_instance_to_base_ptr(self)
tlsaddr = llmemory.cast_ptr_to_adr(tls)
else:
- n = 10000 + len(self.nontranslated_dict)
+ n = 10000 + len(StmGCTLS.nontranslated_dict)
tlsaddr = rffi.cast(llmemory.Address, n)
- self.nontranslated_dict[n] = self
+ StmGCTLS.nontranslated_dict[n] = self
self.stm_operations.set_tls(tlsaddr, int(self.in_main_thread))
def _unregister_with_C_code(self):
@@ -78,16 +77,40 @@
self.stm_operations.del_tls()
@staticmethod
- def cast_address_to_tls_object(self, tlsaddr):
+ @always_inline
+ def cast_address_to_tls_object(tlsaddr):
if we_are_translated():
tls = llmemory.cast_adr_to_ptr(tlsaddr, base_ptr_lltype())
return cast_base_ptr_to_instance(tls)
else:
n = rffi.cast(lltype.Signed, tlsaddr)
- return self.nontranslated_dict[n]
+ return StmGCTLS.nontranslated_dict[n]
+
+ def _disable_mallocs(self):
+ ll_assert(bool(self.nursery_free), "disable_mallocs: already disabled")
+ self.nursery_pending_clear = self.nursery_free - self.nursery_start
+ self.nursery_free = NULL
+ self.nursery_top = NULL
# ------------------------------------------------------------
+ def enter_transactional_mode(self):
+ """Called on the main thread, just before spawning the other
+ threads."""
+ self.local_collection()
+ if not self.local_nursery_is_empty():
+ self.local_collection(run_finalizers=False)
+ self._promote_locals_to_globals()
+ self._disable_mallocs()
+
+ def leave_transactional_mode(self):
+ """Restart using the main thread for mallocs."""
+ if not we_are_translated():
+ for key, value in StmGCTLS.nontranslated_dict.items():
+ if value is not self:
+ del StmGCTLS.nontranslated_dict[key]
+ self.start_transaction()
+
def start_transaction(self):
"""Enter a thread: performs any pending cleanups, and set
up a fresh state for allocating. Called at the start of
@@ -96,29 +119,88 @@
# end_of_transaction_collection() are not balanced: if a
# transaction is aborted, the latter might never be called.
# Be ready here to clean up any state.
+ self._cleanup_state()
if self.nursery_free:
clear_size = self.nursery_free - self.nursery_start
else:
clear_size = self.nursery_pending_clear
+ self.nursery_pending_clear = 0
if clear_size > 0:
llarena.arena_reset(self.nursery_start, clear_size, 2)
- self.nursery_pending_clear = 0
- if self.rawmalloced_young_objects:
- xxx
- if self.rawmalloced_old_objects:
- xxx
self.nursery_free = self.nursery_start
self.nursery_top = self.nursery_start + self.nursery_size
+ def local_nursery_is_empty(self):
+ ll_assert(self.nursery_free, "local_nursery_is_empty: gc not running")
+ return self.nursery_free == self.nursery_start
+
# ------------------------------------------------------------
- def local_collection(self):
+ def local_collection(self, run_finalizers=True):
"""Do a local collection. Finds all surviving young objects
and make them old. Also looks for roots from the stack.
The flag GCFLAG_WAS_COPIED is kept and the C tree is updated
if the local young object moves.
"""
- xxx
+ #
+ debug_start("gc-local")
+ #
+ # First, find the roots that point to young objects. All nursery
+ # objects found are copied out of the nursery, and the occasional
+ # young raw-malloced object is flagged with GCFLAG_VISITED.
+ # Note that during this step, we ignore references to further
+ # young objects; only objects directly referenced by roots
+ # are copied out or flagged. They are also added to the list
+ # 'old_objects_pointing_to_young'.
+ self.collect_roots_in_nursery()
+ #
+ while True:
+ # If we are using card marking, do a partial trace of the arrays
+ # that are flagged with GCFLAG_CARDS_SET.
+ if self.card_page_indices > 0:
+ self.collect_cardrefs_to_nursery()
+ #
+ # Now trace objects from 'old_objects_pointing_to_young'.
+ # All nursery objects they reference are copied out of the
+ # nursery, and again added to 'old_objects_pointing_to_young'.
+ # All young raw-malloced object found are flagged GCFLAG_VISITED.
+ # We proceed until 'old_objects_pointing_to_young' is empty.
+ self.collect_oldrefs_to_nursery()
+ #
+ # We have to loop back if collect_oldrefs_to_nursery caused
+ # new objects to show up in old_objects_with_cards_set
+ if self.card_page_indices > 0:
+ if self.old_objects_with_cards_set.non_empty():
+ continue
+ break
+ #
+ # Now all live nursery objects should be out. Update the young
+ # weakrefs' targets.
+ if self.young_objects_with_weakrefs.non_empty():
+ self.invalidate_young_weakrefs()
+ if self.young_objects_with_light_finalizers.non_empty():
+ self.deal_with_young_objects_with_finalizers()
+ #
+ # Clear this mapping.
+ if self.nursery_objects_shadows.length() > 0:
+ self.nursery_objects_shadows.clear()
+ #
+ # Walk the list of young raw-malloced objects, and either free
+ # them or make them old.
+ if self.young_rawmalloced_objects:
+ self.free_young_rawmalloced_objects()
+ #
+ # All live nursery objects are out, and the rest dies. Fill
+ # the whole nursery with zero and reset the current nursery pointer.
+ llarena.arena_reset(self.nursery, self.nursery_size, 2)
+ self.debug_rotate_nursery()
+ self.nursery_free = self.nursery
+ #
+ debug_print("minor collect, total memory used:",
+ self.get_total_memory_used())
+ if self.DEBUG >= 2:
+ self.debug_check_consistency() # expensive!
+ debug_stop("gc-minor")
def end_of_transaction_collection(self):
"""Do an end-of-transaction collection. Finds all surviving
@@ -133,3 +215,49 @@
xxx
# ------------------------------------------------------------
+
+ @always_inline
+ def allocate_bump_pointer(self, size):
+ free = self.nursery_free
+ top = self.nursery_top
+ if (top - free) < llmemory.raw_malloc_usage(size):
+ free = self.allocate_object_of_size(size)
+ self.nursery_free = free + size
+ return free
+
+ @dont_inline
+ def allocate_object_of_size(self, size):
+ if not self.nursery_free:
+ fatalerror("malloc in a non-main thread but outside a transaction")
+ if size > self.nursery_size:
+ fatalerror("object too large to ever fit in the nursery")
+ while True:
+ self.local_collection()
+ free = self.nursery_free
+ top = self.nursery_top
+ if (top - free) < llmemory.raw_malloc_usage(size):
+ continue # try again
+ return free
+
+ # ------------------------------------------------------------
+
+ def _promote_locals_to_globals(self):
+ ll_assert(self.local_nursery_is_empty(), "nursery must be empty [1]")
+ #
+ obj = self.old_objects
+ self.old_objects = NULL
+ while obj:
+ hdr = self.header(obj)
+ hdr.tid |= GCFLAG_GLOBAL
+ obj = hdr.version
+ #
+ obj = self.rawmalloced_objects
+ self.rawmalloced_objects = NULL
+ while obj:
+ hdr = self.header(obj)
+ hdr.tid |= GCFLAG_GLOBAL
+ obj = hdr.version
+
+ def _cleanup_state(self):
+ if self.rawmalloced_objects:
+ xxx # free the rawmalloced_objects still around
diff --git a/pypy/rpython/memory/gc/test/test_stmgc.py b/pypy/rpython/memory/gc/test/test_stmgc.py
--- a/pypy/rpython/memory/gc/test/test_stmgc.py
+++ b/pypy/rpython/memory/gc/test/test_stmgc.py
@@ -133,7 +133,7 @@
return llmemory.offsetof(WR, 'wadr')
-class TestBasic:
+class StmGCTests:
GCClass = StmGC
def setup_method(self, meth):
@@ -155,6 +155,7 @@
if key != 0:
self.gc.stm_operations.threadnum = key
self.gc.teardown_thread()
+ self.gc.stm_operations.threadnum = 0
# ----------
# test helpers
@@ -187,14 +188,18 @@
meth = getattr(self.gc, 'read_int%d' % WORD)
return meth(obj, offset)
+
+class TestBasic(StmGCTests):
+
def test_gc_creation_works(self):
pass
def test_allocate_bump_pointer(self):
- a3 = self.gc.allocate_bump_pointer(3)
- a4 = self.gc.allocate_bump_pointer(4)
- a5 = self.gc.allocate_bump_pointer(5)
- a6 = self.gc.allocate_bump_pointer(6)
+ tls = self.gc.main_thread_tls
+ a3 = tls.allocate_bump_pointer(3)
+ a4 = tls.allocate_bump_pointer(4)
+ a5 = tls.allocate_bump_pointer(5)
+ a6 = tls.allocate_bump_pointer(6)
assert a4 - a3 == 3
assert a5 - a4 == 4
assert a6 - a5 == 5
@@ -210,7 +215,7 @@
def test_malloc_main_vs_thread(self):
gcref = self.gc.malloc_fixedsize_clear(123, llmemory.sizeof(S))
obj = llmemory.cast_ptr_to_adr(gcref)
- assert self.gc.header(obj).tid & GCFLAG_GLOBAL != 0
+ assert self.gc.header(obj).tid & GCFLAG_GLOBAL == 0
#
self.select_thread(1)
gcref = self.gc.malloc_fixedsize_clear(123, llmemory.sizeof(S))
@@ -588,7 +593,3 @@
assert a == sr1_adr
a = self.gc.stm_normalize_global(tr1_adr)
assert a == sr1_adr
-
- def test_alloc_a_lot_from_main_thread(self):
- for i in range(1000):
- sr1, sr1_adr = self.malloc(SR)
diff --git a/pypy/rpython/memory/gc/test/test_stmtls.py b/pypy/rpython/memory/gc/test/test_stmtls.py
new file mode 100644
--- /dev/null
+++ b/pypy/rpython/memory/gc/test/test_stmtls.py
@@ -0,0 +1,50 @@
+import py
+from pypy.rpython.lltypesystem import lltype, llmemory, llarena, llgroup, rffi
+from pypy.rpython.memory.gc.stmtls import StmGCTLS, WORD
+from pypy.rpython.memory.gc.test.test_stmgc import StmGCTests
+
+
+S = lltype.GcStruct('S', ('a', lltype.Signed), ('b', lltype.Signed),
+ ('c', lltype.Signed))
+
+
+class TestStmGCTLS(StmGCTests):
+ current_stack = ()
+
+ def stack_add(self, p):
+ if self.current_stack == ():
+ self.current_stack = []
+ self.current_stack.append(p)
+
+ def stack_pop(self):
+ return self.current_stack.pop()
+
+ # ----------
+
+ def test_creation_works(self):
+ pass
+
+ def test_allocate_bump_pointer(self):
+ tls = self.gc.main_thread_tls
+ a3 = tls.allocate_bump_pointer(3)
+ a4 = tls.allocate_bump_pointer(4)
+ a5 = tls.allocate_bump_pointer(5)
+ a6 = tls.allocate_bump_pointer(6)
+ assert a4 - a3 == 3
+ assert a5 - a4 == 4
+ assert a6 - a5 == 5
+
+ def test_local_collection(self):
+ s1, _ = self.malloc(S); s1.a = 111
+ s2, _ = self.malloc(S); s2.a = 222
+ self.stack_add(s2)
+ self.gc.main_thread_tls.local_collection()
+ s3 = self.stack_pop()
+ assert s3.a == 222
+ xxxx # raises...
+ s1.a
+ s2.a
+
+ def test_alloc_a_lot(self):
+ for i in range(1000):
+ sr1, sr1_adr = self.malloc(SR)
From noreply at buildbot.pypy.org Fri Apr 13 16:06:27 2012
From: noreply at buildbot.pypy.org (arigo)
Date: Fri, 13 Apr 2012 16:06:27 +0200 (CEST)
Subject: [pypy-commit] pypy gc-minimark-pinning: Just move the blocks around
to keep the number of flag checks
Message-ID: <20120413140627.742A382F4E@wyvern.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch: gc-minimark-pinning
Changeset: r54333:549ac1d84b87
Date: 2012-04-13 16:06 +0200
http://bitbucket.org/pypy/pypy/changeset/549ac1d84b87/
Log: Just move the blocks around to keep the number of flag checks
minimal in the common case.
diff --git a/pypy/rpython/memory/gc/minimark.py b/pypy/rpython/memory/gc/minimark.py
--- a/pypy/rpython/memory/gc/minimark.py
+++ b/pypy/rpython/memory/gc/minimark.py
@@ -1487,7 +1487,23 @@
return
#
size_gc_header = self.gcheaderbuilder.size_gc_header
- if self.header(obj).tid & GCFLAG_PINNED:
+ if self.header(obj).tid & (GCFLAG_HAS_SHADOW|GCFLAG_PINNED) == 0:
+ #
+ # Common case: 'obj' was not already forwarded (otherwise
+ # tid == -42, containing all flags), and it doesn't have the
+ # HAS_SHADOW or PINNED flags either. We must move it out of
+ # the nursery, into a new nonmovable location.
+ totalsize = size_gc_header + self.get_size(obj)
+ newhdr = self._malloc_out_of_nursery(totalsize)
+ #
+ elif self.is_forwarded(obj):
+ #
+ # 'obj' was already forwarded. Change the original reference
+ # to point to its forwarding address, and we're done.
+ root.address[0] = self.get_forwarding_address(obj)
+ return
+ #
+ elif self.header(obj).tid & GCFLAG_PINNED:
hdr = self.header(obj)
if hdr.tid & GCFLAG_VISITED:
return
@@ -1497,21 +1513,6 @@
self.surviving_pinned_objects.insert(
llarena.getfakearenaaddress(obj - size_gc_header))
return
- elif self.header(obj).tid & GCFLAG_HAS_SHADOW == 0:
- #
- # Common case: 'obj' was not already forwarded (otherwise
- # tid == -42, containing all flags), and it doesn't have the
- # HAS_SHADOW flag either. We must move it out of the nursery,
- # into a new nonmovable location.
- totalsize = size_gc_header + self.get_size(obj)
- newhdr = self._malloc_out_of_nursery(totalsize)
- #
- elif self.is_forwarded(obj):
- #
- # 'obj' was already forwarded. Change the original reference
- # to point to its forwarding address, and we're done.
- root.address[0] = self.get_forwarding_address(obj)
- return
#
else:
# First visit to an object that has already a shadow.
From noreply at buildbot.pypy.org Fri Apr 13 16:09:10 2012
From: noreply at buildbot.pypy.org (arigo)
Date: Fri, 13 Apr 2012 16:09:10 +0200 (CEST)
Subject: [pypy-commit] pypy gc-minimark-pinning: missing a "return"?
Message-ID: <20120413140910.15D1B82F4E@wyvern.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch: gc-minimark-pinning
Changeset: r54334:d6855594831c
Date: 2012-04-13 16:08 +0200
http://bitbucket.org/pypy/pypy/changeset/d6855594831c/
Log: missing a "return"?
diff --git a/pypy/rpython/memory/gc/minimark.py b/pypy/rpython/memory/gc/minimark.py
--- a/pypy/rpython/memory/gc/minimark.py
+++ b/pypy/rpython/memory/gc/minimark.py
@@ -600,6 +600,7 @@
llarena.arena_reserve(self.nursery_free, totalsize)
res = self.nursery_free
self.nursery_free = res + totalsize
+ XXX # it's missing a 'return' somewhere think
self.minor_collection(totalsize)
# try allocating now, otherwise we do a major collect
do_major_collect = False
From noreply at buildbot.pypy.org Fri Apr 13 16:17:44 2012
From: noreply at buildbot.pypy.org (bivab)
Date: Fri, 13 Apr 2012 16:17:44 +0200 (CEST)
Subject: [pypy-commit] pypy default: (cfbolz, bivab): simplify
Message-ID: <20120413141744.D160682F4E@wyvern.cs.uni-duesseldorf.de>
Author: David Schneider
Branch:
Changeset: r54335:83dbfcb6f927
Date: 2012-04-13 16:16 +0200
http://bitbucket.org/pypy/pypy/changeset/83dbfcb6f927/
Log: (cfbolz, bivab): simplify
diff --git a/pypy/translator/driver.py b/pypy/translator/driver.py
--- a/pypy/translator/driver.py
+++ b/pypy/translator/driver.py
@@ -115,12 +115,10 @@
backend, ts = self.get_backend_and_type_system()
for task in self.tasks:
explicit_task = task
- parts = task.split('_')
- if len(parts) == 1:
- if task in ('annotate',):
- expose_task(task)
+ if task == 'annotate':
+ expose_task(task)
else:
- task, postfix = parts
+ task, postfix = task.split('_')
if task in ('rtype', 'backendopt', 'llinterpret',
'pyjitpl'):
if ts:
From noreply at buildbot.pypy.org Fri Apr 13 17:04:33 2012
From: noreply at buildbot.pypy.org (fijal)
Date: Fri, 13 Apr 2012 17:04:33 +0200 (CEST)
Subject: [pypy-commit] pypy gc-minimark-pinning: of course,
put the return there
Message-ID: <20120413150433.B0CA282F4E@wyvern.cs.uni-duesseldorf.de>
Author: Maciej Fijalkowski
Branch: gc-minimark-pinning
Changeset: r54336:dd9a443b9106
Date: 2012-04-13 16:53 +0200
http://bitbucket.org/pypy/pypy/changeset/dd9a443b9106/
Log: of course, put the return there
diff --git a/pypy/rpython/memory/gc/minimark.py b/pypy/rpython/memory/gc/minimark.py
--- a/pypy/rpython/memory/gc/minimark.py
+++ b/pypy/rpython/memory/gc/minimark.py
@@ -600,7 +600,7 @@
llarena.arena_reserve(self.nursery_free, totalsize)
res = self.nursery_free
self.nursery_free = res + totalsize
- XXX # it's missing a 'return' somewhere think
+ return res
self.minor_collection(totalsize)
# try allocating now, otherwise we do a major collect
do_major_collect = False
From noreply at buildbot.pypy.org Fri Apr 13 17:04:35 2012
From: noreply at buildbot.pypy.org (fijal)
Date: Fri, 13 Apr 2012 17:04:35 +0200 (CEST)
Subject: [pypy-commit] pypy gc-minimark-pinning: make sure I remember
Message-ID: <20120413150435.0CB0282F4E@wyvern.cs.uni-duesseldorf.de>
Author: Maciej Fijalkowski
Branch: gc-minimark-pinning
Changeset: r54337:a2282c475c4d
Date: 2012-04-13 16:56 +0200
http://bitbucket.org/pypy/pypy/changeset/a2282c475c4d/
Log: make sure I remember
diff --git a/TODO b/TODO
new file mode 100644
--- /dev/null
+++ b/TODO
@@ -0,0 +1,8 @@
+
+* implement limit on no of pinned objects + replace insert with qsort
+
+* use pinning
+
+* make sure objects with shadows work
+
+* implement tracing for pinned objects (check that it works)
\ No newline at end of file
From noreply at buildbot.pypy.org Fri Apr 13 18:21:59 2012
From: noreply at buildbot.pypy.org (fijal)
Date: Fri, 13 Apr 2012 18:21:59 +0200 (CEST)
Subject: [pypy-commit] pypy gc-minimark-pinning: transform gc support
Message-ID: <20120413162159.D114782F4F@wyvern.cs.uni-duesseldorf.de>
Author: Maciej Fijalkowski
Branch: gc-minimark-pinning
Changeset: r54339:175be1a2c933
Date: 2012-04-13 17:27 +0200
http://bitbucket.org/pypy/pypy/changeset/175be1a2c933/
Log: transform gc support
diff --git a/pypy/rpython/memory/gctransform/framework.py b/pypy/rpython/memory/gctransform/framework.py
--- a/pypy/rpython/memory/gctransform/framework.py
+++ b/pypy/rpython/memory/gctransform/framework.py
@@ -446,6 +446,10 @@
[s_gc,
annmodel.SomeInteger(nonneg=True)],
annmodel.s_None)
+ self.pin_ptr = getfn(GCClass.pin,
+ [s_gc, annmodel.SomeAddress()], annmodel.s_None)
+ self.unpin_ptr = getfn(GCClass.unpin,
+ [s_gc, annmodel.SomeAddress()], annmodel.s_None)
self.write_barrier_ptr = None
self.write_barrier_from_array_ptr = None
@@ -747,6 +751,14 @@
hop.genop("direct_call", [self.can_move_ptr, self.c_const_gc, v_addr],
resultvar=op.result)
+ def gct_gc_pin(self, hop):
+ op = hop.spaceop
+ hop.genop("direct_call", [self.pin_ptr, self.c_const_gc, op.args[0]])
+
+ def gct_gc_unpin(self, hop):
+ op = hop.spaceop
+ hop.genop("direct_call", [self.unpin_ptr, self.c_const_gc, op.args[0]])
+
def gct_shrink_array(self, hop):
if self.shrink_array_ptr is None:
return GCTransformer.gct_shrink_array(self, hop)
From noreply at buildbot.pypy.org Fri Apr 13 18:21:58 2012
From: noreply at buildbot.pypy.org (fijal)
Date: Fri, 13 Apr 2012 18:21:58 +0200 (CEST)
Subject: [pypy-commit] pypy gc-minimark-pinning: rpython fixes and
transformed test
Message-ID: <20120413162158.89C1182F4E@wyvern.cs.uni-duesseldorf.de>
Author: Maciej Fijalkowski
Branch: gc-minimark-pinning
Changeset: r54338:0ac3b06d13d8
Date: 2012-04-13 17:23 +0200
http://bitbucket.org/pypy/pypy/changeset/0ac3b06d13d8/
Log: rpython fixes and transformed test
diff --git a/pypy/rpython/memory/gc/minimark.py b/pypy/rpython/memory/gc/minimark.py
--- a/pypy/rpython/memory/gc/minimark.py
+++ b/pypy/rpython/memory/gc/minimark.py
@@ -247,8 +247,6 @@
self.nursery_top = NULL
self.debug_tiny_nursery = -1
self.debug_rotating_nurseries = None
- self.surviving_pinned_objects = NULL
- self.nursery_barriers = NULL
#
# The ArenaCollection() handles the nonmovable objects allocation.
if ArenaCollectionClass is None:
@@ -452,7 +450,7 @@
def debug_rotate_nursery(self):
if self.debug_rotating_nurseries is not None:
- ll_assert(self.nursery_barriers.empty(), "non empty nursery barriers with rotating nursery")
+ ll_assert(not self.nursery_barriers.non_empty(), "non empty nursery barriers with rotating nursery")
debug_start("gc-debug")
oldnurs = self.nursery
llarena.arena_protect(oldnurs, self._nursery_memory_size(), True)
@@ -1339,7 +1337,6 @@
nursery_barriers.append(next)
llarena.arena_reset(prev, self.nursery_top - prev, 2)
self.surviving_pinned_objects.delete()
- self.surviving_pinned_objects = NULL
self.nursery_barriers = nursery_barriers
self.debug_rotate_nursery()
self.nursery_free = self.nursery
diff --git a/pypy/rpython/memory/test/test_transformed_gc.py b/pypy/rpython/memory/test/test_transformed_gc.py
--- a/pypy/rpython/memory/test/test_transformed_gc.py
+++ b/pypy/rpython/memory/test/test_transformed_gc.py
@@ -43,6 +43,7 @@
gcpolicy = None
GC_CAN_MOVE = False
GC_CAN_MALLOC_NONMOVABLE = True
+ GC_CAN_ALWAYS_PIN = False
taggedpointers = False
def setup_class(cls):
@@ -726,6 +727,28 @@
res = fn([])
assert res == ord('y')
+ def define_pinning(cls):
+ def f(i, j):
+ s = str(i)
+ if not rgc.can_move(s):
+ return 13
+ sum = 0
+ with rgc.pinned_object(s):
+ sum += int(rgc.can_move(s))
+ sum += 10 * int(rgc.can_move(s))
+ return sum
+ return f
+
+ def test_pinning(self):
+ res = self.runner("pinning")([10, 0])
+ if not self.GC_CAN_MOVE:
+ assert res == 13
+ elif self.GC_CAN_ALWAYS_PIN:
+ assert res == 10
+ else:
+ assert res == 11 or res == 13 # sometimes fresh objs can't move
+
+
class GenericMovingGCTests(GenericGCTests):
GC_CAN_MOVE = True
GC_CAN_MALLOC_NONMOVABLE = False
@@ -1273,6 +1296,7 @@
class TestMiniMarkGC(TestHybridGC):
gcname = "minimark"
GC_CAN_TEST_ID = True
+ GC_CAN_ALWAYS_PIN = True
class gcpolicy(gc.FrameworkGcPolicy):
class transformerclass(framework.FrameworkGCTransformer):
From noreply at buildbot.pypy.org Fri Apr 13 18:22:01 2012
From: noreply at buildbot.pypy.org (fijal)
Date: Fri, 13 Apr 2012 18:22:01 +0200 (CEST)
Subject: [pypy-commit] pypy gc-minimark-pinning: use pinning, easy (?)
Message-ID: <20120413162201.26A2B82F4E@wyvern.cs.uni-duesseldorf.de>
Author: Maciej Fijalkowski
Branch: gc-minimark-pinning
Changeset: r54340:049d60b7295d
Date: 2012-04-13 17:29 +0200
http://bitbucket.org/pypy/pypy/changeset/049d60b7295d/
Log: use pinning, easy (?)
diff --git a/pypy/rpython/lltypesystem/rffi.py b/pypy/rpython/lltypesystem/rffi.py
--- a/pypy/rpython/lltypesystem/rffi.py
+++ b/pypy/rpython/lltypesystem/rffi.py
@@ -714,6 +714,7 @@
string is already nonmovable. Must be followed by a
free_nonmovingbuffer call.
"""
+ rgc.pin(data)
if rgc.can_move(data):
count = len(data)
buf = lltype.malloc(TYPEP.TO, count, flavor='raw')
@@ -743,7 +744,9 @@
offsetof(STRTYPE, 'chars') + itemoffsetof(STRTYPE.chars, 0)
followed_2nd_path = (buf == cast(TYPEP, data_start))
keepalive_until_here(data)
- if not followed_2nd_path:
+ if followed_2nd_path:
+ rgc.unpin(data)
+ else:
lltype.free(buf, flavor='raw')
free_nonmovingbuffer._annenforceargs_ = [strtype, None]
From noreply at buildbot.pypy.org Fri Apr 13 19:06:12 2012
From: noreply at buildbot.pypy.org (arigo)
Date: Fri, 13 Apr 2012 19:06:12 +0200 (CEST)
Subject: [pypy-commit] pypy stm-gc: Random progress.
Message-ID: <20120413170612.5EAA482F4E@wyvern.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch: stm-gc
Changeset: r54341:e1e05308a3c9
Date: 2012-04-13 19:05 +0200
http://bitbucket.org/pypy/pypy/changeset/e1e05308a3c9/
Log: Random progress.
diff --git a/pypy/rpython/memory/gc/stmgc.py b/pypy/rpython/memory/gc/stmgc.py
--- a/pypy/rpython/memory/gc/stmgc.py
+++ b/pypy/rpython/memory/gc/stmgc.py
@@ -43,6 +43,7 @@
GCFLAG_HAS_SHADOW = first_gcflag << 2
GCFLAG_FIXED_HASH = first_gcflag << 3
GCFLAG_WEAKREF = first_gcflag << 4
+GCFLAG_VISITED = first_gcflag << 5
def always_inline(fn):
@@ -70,18 +71,18 @@
'stm_operations': 'use_real_one',
'nursery_size': 32*1024*1024, # 32 MB
- "page_size": 1024*WORD, # copied from minimark.py
- "arena_size": 65536*WORD, # copied from minimark.py
- "small_request_threshold": 35*WORD, # copied from minimark.py
+ #"page_size": 1024*WORD, # copied from minimark.py
+ #"arena_size": 65536*WORD, # copied from minimark.py
+ #"small_request_threshold": 35*WORD, # copied from minimark.py
}
def __init__(self, config,
stm_operations='use_emulator',
nursery_size=1024,
- page_size=16*WORD,
- arena_size=64*WORD,
- small_request_threshold=5*WORD,
- ArenaCollectionClass=None,
+ #page_size=16*WORD,
+ #arena_size=64*WORD,
+ #small_request_threshold=5*WORD,
+ #ArenaCollectionClass=None,
**kwds):
MovingGCBase.__init__(self, config, **kwds)
#
@@ -95,9 +96,7 @@
from pypy.rpython.memory.gc import stmshared
self.stm_operations = stm_operations
self.nursery_size = nursery_size
- self.sharedarea = stmshared.StmGCSharedArea(self, ArenaCollectionClass,
- page_size, arena_size,
- small_request_threshold)
+ self.sharedarea = stmshared.StmGCSharedArea(self)
#
def _get_size(obj): # indirection to hide 'self'
return self.get_size(obj)
diff --git a/pypy/rpython/memory/gc/stmshared.py b/pypy/rpython/memory/gc/stmshared.py
--- a/pypy/rpython/memory/gc/stmshared.py
+++ b/pypy/rpython/memory/gc/stmshared.py
@@ -1,18 +1,58 @@
+from pypy.rpython.lltypesystem import lltype, llmemory, llarena
+from pypy.rlib.objectmodel import free_non_gc_object
+
+NULL = llmemory.NULL
class StmGCSharedArea(object):
+ _alloc_flavor_ = 'raw'
- def __init__(self, gc, ArenaCollectionClass,
- page_size, arena_size, small_request_threshold):
+ def __init__(self, gc):
self.gc = gc
- # The ArenaCollection() handles the nonmovable objects allocation.
- # It contains all small GCFLAG_GLOBAL objects. The non-small ones
- # are directly malloc'ed.
- if ArenaCollectionClass is None:
- from pypy.rpython.memory.gc import minimarkpage
- ArenaCollectionClass = minimarkpage.ArenaCollection
- self.ac = ArenaCollectionClass(arena_size, page_size,
- small_request_threshold)
def setup(self):
pass
+
+
+class StmGCThreadLocalAllocator(object):
+ """A thread-local allocator for the shared area.
+ This is an optimization only: it lets us use thread-local variables
+ to keep track of what we allocated.
+ """
+ _alloc_flavor_ = 'raw'
+
+ def __init__(self, sharedarea):
+ self.gc = sharedarea.gc
+ self.sharedarea = sharedarea
+ self.chained_list = NULL
+ self.special_stack = self.gc.AddressStack()
+
+ def delete(self):
+ self.special_stack.delete()
+ free_non_gc_object(self)
+
+ def malloc_regular(self, size):
+ """Malloc for an object where the 'version' field can be used
+ internally for a chained list."""
+ adr1 = llarena.arena_malloc(size, 0)
+ adr2 = adr1 + self.gc.gcheaderbuilder
+ hdr = llmemory.cast_adr_to_ptr(adr1, lltype.Ptr(self.gc.HDR))
+ hdr.version = self.chained_list
+ self.chained_list = adr2
+ return adr2
+
+ def malloc_special(self, size):
+ """Malloc for an object where the 'version' field cannot be
+ used internally. It's the rare case here."""
+ adr1 = llarena.arena_malloc(size, 0)
+ adr2 = adr1 + self.gc.gcheaderbuilder.size_gc_header
+ self.special_stack.append(adr2)
+ return adr2
+
+ def free_object(self, adr2):
+ adr1 = adr2 - self.gc.gcheaderbuilder.size_gc_header
+ llarena.arena_free(adr1)
+
+ def replace_special_stack(self, new_special_stack):
+ self.special_stack.delete()
+ self.special_stack = new_special_stack
diff --git a/pypy/rpython/memory/gc/stmtls.py b/pypy/rpython/memory/gc/stmtls.py
--- a/pypy/rpython/memory/gc/stmtls.py
+++ b/pypy/rpython/memory/gc/stmtls.py
@@ -7,7 +7,7 @@
from pypy.rpython.memory.gc.stmgc import WORD, NULL
from pypy.rpython.memory.gc.stmgc import always_inline, dont_inline
-from pypy.rpython.memory.gc.stmgc import GCFLAG_GLOBAL
+from pypy.rpython.memory.gc.stmgc import GCFLAG_GLOBAL, GCFLAG_VISITED
class StmGCTLS(object):
@@ -19,6 +19,7 @@
nontranslated_dict = {}
def __init__(self, gc, in_main_thread):
+ from pypy.rpython.memory.gc.stmshared import StmGCThreadLocalAllocator
self.gc = gc
self.in_main_thread = in_main_thread
self.stm_operations = self.gc.stm_operations
@@ -37,13 +38,16 @@
self.nursery_start = self._alloc_nursery(self.nursery_size)
#
# --- the local raw-malloced objects (chained list via hdr.version)
- self.rawmalloced_objects = NULL
+ #self.rawmalloced_objects = NULL
# --- the local "normal" old objects (chained list via hdr.version)
self.old_objects = NULL
# --- the local objects with weakrefs (chained list via hdr.version)
#self.young_objects_with_weakrefs = NULL
#self.old_objects_with_weakrefs = NULL
#
+ # --- a thread-local allocator for the shared area
+ self.sharedarea_tls = StmGCThreadLocalAllocator(gc.sharedarea)
+ #
self._register_with_C_code()
def teardown_thread(self):
@@ -137,70 +141,52 @@
# ------------------------------------------------------------
def local_collection(self, run_finalizers=True):
- """Do a local collection. Finds all surviving young objects
- and make them old. Also looks for roots from the stack.
- The flag GCFLAG_WAS_COPIED is kept and the C tree is updated
- if the local young object moves.
+ """Do a local collection. This should be equivalent to a minor
+ collection only, but the GC is not generational so far, so it is
+ for now the same as a full collection --- but only on LOCAL
+ objects, not touching the GLOBAL objects. More precisely, this
+ finds all YOUNG LOCAL objects, move them out of the nursery if
+ necessary, and make them OLD LOCAL objects. This starts from
+ the roots from the stack. The flag GCFLAG_WAS_COPIED is kept
+ and the C tree is updated if the local young objects move.
"""
#
debug_start("gc-local")
#
- # First, find the roots that point to young objects. All nursery
- # objects found are copied out of the nursery, and the occasional
- # young raw-malloced object is flagged with GCFLAG_VISITED.
- # Note that during this step, we ignore references to further
- # young objects; only objects directly referenced by roots
- # are copied out or flagged. They are also added to the list
- # 'old_objects_pointing_to_young'.
+ # Linked list of LOCAL objects pending a visit. Note that no
+ # GLOBAL object can at any point contain a reference to a LOCAL
+ # object.
+ self.pending_list = NULL
+ #
+ # First, find the roots that point to LOCAL objects. All YOUNG
+ # (i.e. nursery) objects found are copied out of the nursery.
+ # All OLD objects found are flagged with GCFLAG_VISITED. At this
+ # point, the content of the objects is not modified; the objects
+ # are merely added to the chained list 'pending_list'.
self.collect_roots_in_nursery()
#
- while True:
- # If we are using card marking, do a partial trace of the arrays
- # that are flagged with GCFLAG_CARDS_SET.
- if self.card_page_indices > 0:
- self.collect_cardrefs_to_nursery()
- #
- # Now trace objects from 'old_objects_pointing_to_young'.
- # All nursery objects they reference are copied out of the
- # nursery, and again added to 'old_objects_pointing_to_young'.
- # All young raw-malloced object found are flagged GCFLAG_VISITED.
- # We proceed until 'old_objects_pointing_to_young' is empty.
- self.collect_oldrefs_to_nursery()
- #
- # We have to loop back if collect_oldrefs_to_nursery caused
- # new objects to show up in old_objects_with_cards_set
- if self.card_page_indices > 0:
- if self.old_objects_with_cards_set.non_empty():
- continue
- break
+ # Also find the roots that are the local copy of GCFLAG_WAS_COPIED
+ # objects.
+ self.collect_roots_from_tldict()
#
- # Now all live nursery objects should be out. Update the young
- # weakrefs' targets.
- if self.young_objects_with_weakrefs.non_empty():
- self.invalidate_young_weakrefs()
- if self.young_objects_with_light_finalizers.non_empty():
- self.deal_with_young_objects_with_finalizers()
+ # Now repeat following objects until 'pending_list' is empty.
+ self.collect_oldrefs_to_nursery()
#
- # Clear this mapping.
- if self.nursery_objects_shadows.length() > 0:
- self.nursery_objects_shadows.clear()
+ # Walk the list of LOCAL raw-malloced objects, and free them if
+ # necessary.
+ #self.free_local_rawmalloced_objects()
#
- # Walk the list of young raw-malloced objects, and either free
- # them or make them old.
- if self.young_rawmalloced_objects:
- self.free_young_rawmalloced_objects()
+ # Ask the ArenaCollection to visit all objects. Free the ones
+ # that have not been visited above, and reset GCFLAG_VISITED on
+ # the others.
+ self.ac.mass_free(self._free_if_unvisited)
#
# All live nursery objects are out, and the rest dies. Fill
# the whole nursery with zero and reset the current nursery pointer.
llarena.arena_reset(self.nursery, self.nursery_size, 2)
- self.debug_rotate_nursery()
- self.nursery_free = self.nursery
+ self.nursery_free = self.nursery_start
#
- debug_print("minor collect, total memory used:",
- self.get_total_memory_used())
- if self.DEBUG >= 2:
- self.debug_check_consistency() # expensive!
- debug_stop("gc-minor")
+ debug_stop("gc-local")
def end_of_transaction_collection(self):
"""Do an end-of-transaction collection. Finds all surviving
@@ -229,7 +215,7 @@
def allocate_object_of_size(self, size):
if not self.nursery_free:
fatalerror("malloc in a non-main thread but outside a transaction")
- if size > self.nursery_size:
+ if size > self.nursery_size // 8 * 7:
fatalerror("object too large to ever fit in the nursery")
while True:
self.local_collection()
@@ -251,12 +237,12 @@
hdr.tid |= GCFLAG_GLOBAL
obj = hdr.version
#
- obj = self.rawmalloced_objects
- self.rawmalloced_objects = NULL
- while obj:
- hdr = self.header(obj)
- hdr.tid |= GCFLAG_GLOBAL
- obj = hdr.version
+## obj = self.rawmalloced_objects
+## self.rawmalloced_objects = NULL
+## while obj:
+## hdr = self.header(obj)
+## hdr.tid |= GCFLAG_GLOBAL
+## obj = hdr.version
def _cleanup_state(self):
if self.rawmalloced_objects:
diff --git a/pypy/rpython/memory/gc/test/test_stmtls.py b/pypy/rpython/memory/gc/test/test_stmtls.py
--- a/pypy/rpython/memory/gc/test/test_stmtls.py
+++ b/pypy/rpython/memory/gc/test/test_stmtls.py
@@ -2,18 +2,42 @@
from pypy.rpython.lltypesystem import lltype, llmemory, llarena, llgroup, rffi
from pypy.rpython.memory.gc.stmtls import StmGCTLS, WORD
from pypy.rpython.memory.gc.test.test_stmgc import StmGCTests
+from pypy.rpython.memory.support import get_address_stack, get_address_deque
S = lltype.GcStruct('S', ('a', lltype.Signed), ('b', lltype.Signed),
('c', lltype.Signed))
-class TestStmGCTLS(StmGCTests):
- current_stack = ()
+class FakeStmOperations:
+ def set_tls(self, tlsaddr, num):
+ pass
+ def del_tls(self, tlsaddr):
+ pass
+
+class FakeSharedArea:
+ pass
+
+class FakeGC:
+ from pypy.rpython.memory.support import AddressDict, null_address_dict
+ AddressStack = get_address_stack()
+ AddressDeque = get_address_deque()
+ nursery_size = 128
+ stm_operations = FakeStmOperations()
+ sharedarea = FakeSharedArea()
+
+
+class TestStmGCTLS(object):
+
+ def setup_method(self, meth):
+ self.current_stack = []
+ self.gc = FakeGC()
+ self.gc.sharedarea.gc = self.gc
+ self.gctls_main = StmGCTLS(self.gc, in_main_thread=True)
+ self.gctls_thrd = StmGCTLS(self.gc, in_main_thread=False)
+ self.gc.main_thread_tls = self.gctls_main
def stack_add(self, p):
- if self.current_stack == ():
- self.current_stack = []
self.current_stack.append(p)
def stack_pop(self):
diff --git a/pypy/translator/c/src/allocator.h b/pypy/translator/c/src/allocator.h
--- a/pypy/translator/c/src/allocator.h
+++ b/pypy/translator/c/src/allocator.h
@@ -1,3 +1,14 @@
+#if defined(RPY_STM)
+
+
+/* XXX no special malloc function, use the thread-safe system-provided one */
+#define PyObject_Malloc malloc
+#define PyObject_Realloc realloc
+#define PyObject_Free free
+
+
+#else
+
/* allocation functions prototypes */
void *PyObject_Malloc(size_t n);
@@ -29,3 +40,4 @@
#endif
#endif
+#endif
diff --git a/pypy/translator/stm/stmgcintf.py b/pypy/translator/stm/stmgcintf.py
--- a/pypy/translator/stm/stmgcintf.py
+++ b/pypy/translator/stm/stmgcintf.py
@@ -11,7 +11,8 @@
eci = ExternalCompilationInfo(
include_dirs = [cdir, cdir2],
includes = ['src_stm/et.h', 'src_stm/et.c'],
- pre_include_bits = ['#define PYPY_LONG_BIT %d' % LONG_BIT],
+ pre_include_bits = ['#define PYPY_LONG_BIT %d' % LONG_BIT,
+ '#define RPY_STM 1'],
separate_module_sources = ['\n'], # hack for test_rffi_stm
)
From noreply at buildbot.pypy.org Fri Apr 13 19:24:24 2012
From: noreply at buildbot.pypy.org (arigo)
Date: Fri, 13 Apr 2012 19:24:24 +0200 (CEST)
Subject: [pypy-commit] extradoc extradoc: Update.
Message-ID: <20120413172424.650AD82F4E@wyvern.cs.uni-duesseldorf.de>
Author: Armin Rigo
Branch: extradoc
Changeset: r4191:028584929b5b
Date: 2012-04-13 19:24 +0200
http://bitbucket.org/pypy/extradoc/changeset/028584929b5b/
Log: Update.
diff --git a/planning/stm.txt b/planning/stm.txt
--- a/planning/stm.txt
+++ b/planning/stm.txt
@@ -8,8 +8,35 @@
|
-Overview
---------
+Python Interface
+----------------
+
+Planned interface refactoring: integrate the states of "running
+transactionally" and "not running transactionally".
+
+Running normally the program is done in one big top-level transaction.
+The "transaction" module should be provisionally renamed to "ame". At
+any point (including in sub-transactions) we can create an instance of
+'ame.RandomOrder()', and call methods on it:
+
+ - 'add(callable, *args, **kwds)'
+ - 'run()'
+
+When run() is called the current transaction is suspended and all
+added transactions are run (including the ones added by them). These
+transactions commit into the current transaction. When run() returns,
+the parent transaction continues running. If at any point any
+transaction becomes inevitable, its parent transaction does the same,
+and so on.
+
+This should give a model that composes more naturally than the current
+one. Also, it opens the door to other 'ame' objects; e.g. an
+'ame.Sequential()' would run the subtransactions in the same order as
+they have been added.
+
+
+Overview of the GC
+------------------
A saner approach (and likely better results that now): integrate with
the GC. Here is the basic plan.
From noreply at buildbot.pypy.org Fri Apr 13 22:03:21 2012
From: noreply at buildbot.pypy.org (RonnyPfannschmidt)
Date: Fri, 13 Apr 2012 22:03:21 +0200 (CEST)
Subject: [pypy-commit] pypy stdlib-unification: merge from default
Message-ID: <20120413200321.F24E182F4E@wyvern.cs.uni-duesseldorf.de>
Author: Ronny Pfannschmidt
Branch: stdlib-unification
Changeset: r54342:817ce54a56bb
Date: 2012-04-13 13:07 +0200
http://bitbucket.org/pypy/pypy/changeset/817ce54a56bb/
Log: merge from default
diff --git a/_pytest/assertion/oldinterpret.py b/_pytest/assertion/oldinterpret.py
--- a/_pytest/assertion/oldinterpret.py
+++ b/_pytest/assertion/oldinterpret.py
@@ -1,8 +1,7 @@
import py
import sys, inspect
from compiler import parse, ast, pycodegen
-from _pytest.assertion.util import format_explanation
-from _pytest.assertion.reinterpret import BuiltinAssertionError
+from _pytest.assertion.util import format_explanation, BuiltinAssertionError
passthroughex = py.builtin._sysex
diff --git a/_pytest/assertion/reinterpret.py b/_pytest/assertion/reinterpret.py
--- a/_pytest/assertion/reinterpret.py
+++ b/_pytest/assertion/reinterpret.py
@@ -1,7 +1,6 @@
import sys
import py
-
-BuiltinAssertionError = py.builtin.builtins.AssertionError
+from _pytest.assertion.util import BuiltinAssertionError
class AssertionError(BuiltinAssertionError):
def __init__(self, *args):
diff --git a/_pytest/assertion/util.py b/_pytest/assertion/util.py
--- a/_pytest/assertion/util.py
+++ b/_pytest/assertion/util.py
@@ -2,6 +2,7 @@
import py
+BuiltinAssertionError = py.builtin.builtins.AssertionError
# The _reprcompare attribute on the util module is used by the new assertion
# interpretation code and assertion rewriter to detect this plugin was
diff --git a/pypy/annotation/test/test_annrpython.py b/pypy/annotation/test/test_annrpython.py
--- a/pypy/annotation/test/test_annrpython.py
+++ b/pypy/annotation/test/test_annrpython.py
@@ -3746,9 +3746,9 @@
return g(i)
def main(i):
if i == 2:
- return f(i)
+ return f(2)
elif i == 3:
- return f(i)
+ return f(3)
else:
raise NotImplementedError
diff --git a/pypy/interpreter/argument.py b/pypy/interpreter/argument.py
--- a/pypy/interpreter/argument.py
+++ b/pypy/interpreter/argument.py
@@ -169,9 +169,11 @@
def _combine_starstarargs_wrapped(self, w_starstararg):
# unpack the ** arguments
space = self.space
+ keywords, values_w = space.view_as_kwargs(w_starstararg)
+ if keywords is not None: # this path also taken for empty dicts
+ self._add_keywordargs_no_unwrapping(keywords, values_w)
+ return not jit.isconstant(len(self.keywords))
if space.isinstance_w(w_starstararg, space.w_dict):
- if not space.is_true(w_starstararg):
- return False # don't call unpackiterable - it's jit-opaque
keys_w = space.unpackiterable(w_starstararg)
else:
try:
@@ -186,11 +188,8 @@
"a mapping, not %s" % (typename,)))
raise
keys_w = space.unpackiterable(w_keys)
- if keys_w:
- self._do_combine_starstarargs_wrapped(keys_w, w_starstararg)
- return True
- else:
- return False # empty dict; don't disable the JIT
+ self._do_combine_starstarargs_wrapped(keys_w, w_starstararg)
+ return True
def _do_combine_starstarargs_wrapped(self, keys_w, w_starstararg):
space = self.space
@@ -227,6 +226,26 @@
self.keywords_w = self.keywords_w + keywords_w
self.keyword_names_w = keys_w
+ @jit.look_inside_iff(lambda self, keywords, keywords_w:
+ jit.isconstant(len(keywords) and
+ jit.isconstant(self.keywords)))
+ def _add_keywordargs_no_unwrapping(self, keywords, keywords_w):
+ if self.keywords is None:
+ self.keywords = keywords[:] # copy to make non-resizable
+ self.keywords_w = keywords_w[:]
+ else:
+ # looks quadratic, but the JIT should remove all of it nicely.
+ # Also, all the lists should be small
+ for key in keywords:
+ for otherkey in self.keywords:
+ if otherkey == key:
+ raise operationerrfmt(self.space.w_TypeError,
+ "got multiple values "
+ "for keyword argument "
+ "'%s'", key)
+ self.keywords = self.keywords + keywords
+ self.keywords_w = self.keywords_w + keywords_w
+
def fixedunpack(self, argcount):
"""The simplest argument parsing: get the 'argcount' arguments,
or raise a real ValueError if the length is wrong."""
@@ -385,7 +404,7 @@
# collect extra keyword arguments into the **kwarg
if has_kwarg:
- w_kwds = self.space.newdict()
+ w_kwds = self.space.newdict(kwargs=True)
if num_remainingkwds:
#
limit = len(keywords)
diff --git a/pypy/interpreter/baseobjspace.py b/pypy/interpreter/baseobjspace.py
--- a/pypy/interpreter/baseobjspace.py
+++ b/pypy/interpreter/baseobjspace.py
@@ -914,6 +914,12 @@
"""
return None
+ def view_as_kwargs(self, w_dict):
+ """ if w_dict is a kwargs-dict, return two lists, one of unwrapped
+ strings and one of wrapped values. otherwise return (None, None)
+ """
+ return (None, None)
+
def newlist_str(self, list_s):
return self.newlist([self.wrap(s) for s in list_s])
diff --git a/pypy/interpreter/test/test_argument.py b/pypy/interpreter/test/test_argument.py
--- a/pypy/interpreter/test/test_argument.py
+++ b/pypy/interpreter/test/test_argument.py
@@ -75,7 +75,10 @@
def unpackiterable(self, it):
return list(it)
- def newdict(self):
+ def view_as_kwargs(self, x):
+ return None, None
+
+ def newdict(self, kwargs=False):
return {}
def newlist(self, l=[]):
@@ -488,6 +491,57 @@
assert len(l) == 1
assert l[0] == space.wrap(5)
+ def test_starstarargs_special(self):
+ class kwargs(object):
+ def __init__(self, k, v):
+ self.k = k
+ self.v = v
+ class MyDummySpace(DummySpace):
+ def view_as_kwargs(self, kw):
+ if isinstance(kw, kwargs):
+ return kw.k, kw.v
+ return None, None
+ space = MyDummySpace()
+ for i in range(3):
+ kwds = [("c", 3)]
+ kwds_w = dict(kwds[:i])
+ keywords = kwds_w.keys()
+ keywords_w = kwds_w.values()
+ rest = dict(kwds[i:])
+ w_kwds = kwargs(rest.keys(), rest.values())
+ if i == 2:
+ w_kwds = None
+ assert len(keywords) == len(keywords_w)
+ args = Arguments(space, [1, 2], keywords[:], keywords_w[:], w_starstararg=w_kwds)
+ l = [None, None, None]
+ args._match_signature(None, l, Signature(["a", "b", "c"]), defaults_w=[4])
+ assert l == [1, 2, 3]
+ args = Arguments(space, [1, 2], keywords[:], keywords_w[:], w_starstararg=w_kwds)
+ l = [None, None, None, None]
+ args._match_signature(None, l, Signature(["a", "b", "b1", "c"]), defaults_w=[4, 5])
+ assert l == [1, 2, 4, 3]
+ args = Arguments(space, [1, 2], keywords[:], keywords_w[:], w_starstararg=w_kwds)
+ l = [None, None, None, None]
+ args._match_signature(None, l, Signature(["a", "b", "c", "d"]), defaults_w=[4, 5])
+ assert l == [1, 2, 3, 5]
+ args = Arguments(space, [1, 2], keywords[:], keywords_w[:], w_starstararg=w_kwds)
+ l = [None, None, None, None]
+ py.test.raises(ArgErr, args._match_signature, None, l,
+ Signature(["c", "b", "a", "d"]), defaults_w=[4, 5])
+ args = Arguments(space, [1, 2], keywords[:], keywords_w[:], w_starstararg=w_kwds)
+ l = [None, None, None, None]
+ py.test.raises(ArgErr, args._match_signature, None, l,
+ Signature(["a", "b", "c1", "d"]), defaults_w=[4, 5])
+ args = Arguments(space, [1, 2], keywords[:], keywords_w[:], w_starstararg=w_kwds)
+ l = [None, None, None]
+ args._match_signature(None, l, Signature(["a", "b"], None, "**"))
+ assert l == [1, 2, {'c': 3}]
+ excinfo = py.test.raises(OperationError, Arguments, space, [], ["a"],
+ [1], w_starstararg=kwargs(["a"], [2]))
+ assert excinfo.value.w_type is TypeError
+
+
+
class TestErrorHandling(object):
def test_missing_args(self):
# got_nargs, nkwds, expected_nargs, has_vararg, has_kwarg,
diff --git a/pypy/module/pypyjit/test_pypy_c/test_call.py b/pypy/module/pypyjit/test_pypy_c/test_call.py
--- a/pypy/module/pypyjit/test_pypy_c/test_call.py
+++ b/pypy/module/pypyjit/test_pypy_c/test_call.py
@@ -244,6 +244,7 @@
print guards
assert len(guards) <= 20
+
def test_stararg_virtual(self):
def main(x):
def g(*args):
@@ -486,3 +487,38 @@
--TICK--
jump(..., descr=...)
""")
+
+ def test_kwargs_virtual2(self):
+ log = self.run("""
+ def f(*args, **kwargs):
+ kwargs['a'] = kwargs['z'] * 0
+ return g(1, *args, **kwargs)
+
+ def g(x, y, z=2, a=1):
+ return x - y + z + a
+
+ def main(stop):
+ res = 0
+ i = 0
+ while i < stop:
+ res = f(res, z=i) # ID: call
+ i += 1
+ return res""", [1000])
+ assert log.result == 500
+ loop, = log.loops_by_id('call')
+ print loop.ops_by_id('call')
+ assert loop.match("""
+ i65 = int_lt(i58, i29)
+ guard_true(i65, descr=...)
+ guard_not_invalidated(..., descr=...)
+ i66 = force_token()
+ i67 = force_token()
+ i69 = int_sub_ovf(1, i56)
+ guard_no_overflow(..., descr=...)
+ i70 = int_add_ovf(i69, i58)
+ guard_no_overflow(..., descr=...)
+ i71 = int_add(i58, 1)
+ --TICK--
+ jump(..., descr=...)
+ """)
+
diff --git a/pypy/objspace/fake/objspace.py b/pypy/objspace/fake/objspace.py
--- a/pypy/objspace/fake/objspace.py
+++ b/pypy/objspace/fake/objspace.py
@@ -110,7 +110,7 @@
"NOT_RPYTHON"
raise NotImplementedError
- def newdict(self, module=False, instance=False,
+ def newdict(self, module=False, instance=False, kwargs=False,
strdict=False):
return w_some_obj()
diff --git a/pypy/objspace/std/dictmultiobject.py b/pypy/objspace/std/dictmultiobject.py
--- a/pypy/objspace/std/dictmultiobject.py
+++ b/pypy/objspace/std/dictmultiobject.py
@@ -33,7 +33,7 @@
@staticmethod
def allocate_and_init_instance(space, w_type=None, module=False,
- instance=False, strdict=False):
+ instance=False, strdict=False, kwargs=False):
if space.config.objspace.std.withcelldict and module:
from pypy.objspace.std.celldict import ModuleDictStrategy
@@ -46,11 +46,15 @@
assert w_type is None
strategy = space.fromcache(StringDictStrategy)
+ elif kwargs:
+ assert w_type is None
+ from pypy.objspace.std.kwargsdict import KwargsDictStrategy
+ strategy = space.fromcache(KwargsDictStrategy)
else:
strategy = space.fromcache(EmptyDictStrategy)
-
if w_type is None:
w_type = space.w_dict
+
storage = strategy.get_empty_storage()
w_self = space.allocate_instance(W_DictMultiObject, w_type)
W_DictMultiObject.__init__(w_self, space, strategy, storage)
@@ -91,7 +95,8 @@
getitem_str delitem length \
clear w_keys values \
items iter setdefault \
- popitem listview_str listview_int".split()
+ popitem listview_str listview_int \
+ view_as_kwargs".split()
def make_method(method):
def f(self, *args):
@@ -165,6 +170,9 @@
def listview_int(self, w_dict):
return None
+ def view_as_kwargs(self, w_dict):
+ return (None, None)
+
class EmptyDictStrategy(DictStrategy):
erase, unerase = rerased.new_erasing_pair("empty")
@@ -254,6 +262,9 @@
def popitem(self, w_dict):
raise KeyError
+ def view_as_kwargs(self, w_dict):
+ return ([], [])
+
registerimplementation(W_DictMultiObject)
# DictImplementation lattice
diff --git a/pypy/objspace/std/kwargsdict.py b/pypy/objspace/std/kwargsdict.py
new file mode 100644
--- /dev/null
+++ b/pypy/objspace/std/kwargsdict.py
@@ -0,0 +1,165 @@
+## ----------------------------------------------------------------------------
+## dict strategy (see dictmultiobject.py)
+
+from pypy.rlib import rerased, jit
+from pypy.objspace.std.dictmultiobject import (DictStrategy,
+ IteratorImplementation,
+ ObjectDictStrategy,
+ StringDictStrategy)
+
+
+class KwargsDictStrategy(DictStrategy):
+ erase, unerase = rerased.new_erasing_pair("kwargsdict")
+ erase = staticmethod(erase)
+ unerase = staticmethod(unerase)
+
+ def wrap(self, key):
+ return self.space.wrap(key)
+
+ def unwrap(self, wrapped):
+ return self.space.str_w(wrapped)
+
+ def get_empty_storage(self):
+ d = ([], [])
+ return self.erase(d)
+
+ def is_correct_type(self, w_obj):
+ space = self.space
+ return space.is_w(space.type(w_obj), space.w_str)
+
+ def _never_equal_to(self, w_lookup_type):
+ return False
+
+ def iter(self, w_dict):
+ return KwargsDictIterator(self.space, self, w_dict)
+
+ def w_keys(self, w_dict):
+ return self.space.newlist([self.space.wrap(key) for key in self.unerase(w_dict.dstorage)[0]])
+
+ def setitem(self, w_dict, w_key, w_value):
+ space = self.space
+ if self.is_correct_type(w_key):
+ self.setitem_str(w_dict, self.unwrap(w_key), w_value)
+ return
+ else:
+ self.switch_to_object_strategy(w_dict)
+ w_dict.setitem(w_key, w_value)
+
+ def setitem_str(self, w_dict, key, w_value):
+ self._setitem_str_indirection(w_dict, key, w_value)
+
+ @jit.look_inside_iff(lambda self, w_dict, key, w_value:
+ jit.isconstant(self.length(w_dict)) and jit.isconstant(key))
+ def _setitem_str_indirection(self, w_dict, key, w_value):
+ keys, values_w = self.unerase(w_dict.dstorage)
+ result = []
+ for i in range(len(keys)):
+ if keys[i] == key:
+ values_w[i] = w_value
+ break
+ else:
+ # limit the size so that the linear searches don't become too long
+ if len(keys) >= 16:
+ self.switch_to_string_strategy(w_dict)
+ w_dict.setitem_str(key, w_value)
+ else:
+ keys.append(key)
+ values_w.append(w_value)
+
+ def setdefault(self, w_dict, w_key, w_default):
+ # XXX could do better, but is it worth it?
+ self.switch_to_object_strategy(w_dict)
+ return w_dict.setdefault(w_key, w_default)
+
+ def delitem(self, w_dict, w_key):
+ # XXX could do better, but is it worth it?
+ self.switch_to_object_strategy(w_dict)
+ return w_dict.delitem(w_key)
+
+ def length(self, w_dict):
+ return len(self.unerase(w_dict.dstorage)[0])
+
+ def getitem_str(self, w_dict, key):
+ return self._getitem_str_indirection(w_dict, key)
+
+ @jit.look_inside_iff(lambda self, w_dict, key: jit.isconstant(self.length(w_dict)) and jit.isconstant(key))
+ def _getitem_str_indirection(self, w_dict, key):
+ keys, values_w = self.unerase(w_dict.dstorage)
+ result = []
+ for i in range(len(keys)):
+ if keys[i] == key:
+ return values_w[i]
+ return None
+
+ def getitem(self, w_dict, w_key):
+ space = self.space
+ if self.is_correct_type(w_key):
+ return self.getitem_str(w_dict, self.unwrap(w_key))
+ elif self._never_equal_to(space.type(w_key)):
+ return None
+ else:
+ self.switch_to_object_strategy(w_dict)
+ return w_dict.getitem(w_key)
+
+ def w_keys(self, w_dict):
+ l = self.unerase(w_dict.dstorage)[0]
+ return self.space.newlist_str(l[:])
+
+ def values(self, w_dict):
+ return self.unerase(w_dict.dstorage)[1][:] # to make non-resizable
+
+ def items(self, w_dict):
+ space = self.space
+ keys, values_w = self.unerase(w_dict.dstorage)
+ result = []
+ for i in range(len(keys)):
+ result.append(space.newtuple([self.wrap(keys[i]), values_w[i]]))
+ return result
+
+ def popitem(self, w_dict):
+ keys, values_w = self.unerase(w_dict.dstorage)
+ key = keys.pop()
+ w_value = values_w.pop()
+ return (self.wrap(key), w_value)
+
+ def clear(self, w_dict):
+ w_dict.dstorage = self.get_empty_storage()
+
+ def switch_to_object_strategy(self, w_dict):
+ strategy = self.space.fromcache(ObjectDictStrategy)
+ keys, values_w = self.unerase(w_dict.dstorage)
+ d_new = strategy.unerase(strategy.get_empty_storage())
+ for i in range(len(keys)):
+ d_new[self.wrap(keys[i])] = values_w[i]
+ w_dict.strategy = strategy
+ w_dict.dstorage = strategy.erase(d_new)
+
+ def switch_to_string_strategy(self, w_dict):
+ strategy = self.space.fromcache(StringDictStrategy)
+ keys, values_w = self.unerase(w_dict.dstorage)
+ storage = strategy.get_empty_storage()
+ d_new = strategy.unerase(storage)
+ for i in range(len(keys)):
+ d_new[keys[i]] = values_w[i]
+ w_dict.strategy = strategy
+ w_dict.dstorage = storage
+
+ def view_as_kwargs(self, w_dict):
+ return self.unerase(w_dict.dstorage)
+
+
+class KwargsDictIterator(IteratorImplementation):
+ def __init__(self, space, strategy, dictimplementation):
+ IteratorImplementation.__init__(self, space, strategy, dictimplementation)
+ keys, values_w = strategy.unerase(self.dictimplementation.dstorage)
+ self.iterator = iter(range(len(keys)))
+ # XXX this potentially leaks
+ self.keys = keys
+ self.values_w = values_w
+
+ def next_entry(self):
+ # note that this 'for' loop only runs once, at most
+ for i in self.iterator:
+ return self.space.wrap(self.keys[i]), self.values_w[i]
+ else:
+ return None, None
diff --git a/pypy/objspace/std/objspace.py b/pypy/objspace/std/objspace.py
--- a/pypy/objspace/std/objspace.py
+++ b/pypy/objspace/std/objspace.py
@@ -313,11 +313,11 @@
def newlist_str(self, list_s):
return W_ListObject.newlist_str(self, list_s)
- def newdict(self, module=False, instance=False,
+ def newdict(self, module=False, instance=False, kwargs=False,
strdict=False):
return W_DictMultiObject.allocate_and_init_instance(
self, module=module, instance=instance,
- strdict=strdict)
+ strdict=strdict, kwargs=kwargs)
def newset(self):
from pypy.objspace.std.setobject import newset
@@ -472,6 +472,11 @@
return w_obj.getitems_int()
return None
+ def view_as_kwargs(self, w_dict):
+ if type(w_dict) is W_DictMultiObject:
+ return w_dict.view_as_kwargs()
+ return (None, None)
+
def _uses_list_iter(self, w_obj):
from pypy.objspace.descroperation import list_iter
return self.lookup(w_obj, '__iter__') is list_iter(self)
diff --git a/pypy/objspace/std/test/test_kwargsdict.py b/pypy/objspace/std/test/test_kwargsdict.py
new file mode 100644
--- /dev/null
+++ b/pypy/objspace/std/test/test_kwargsdict.py
@@ -0,0 +1,120 @@
+import py
+from pypy.conftest import gettestobjspace, option
+from pypy.objspace.std.test.test_dictmultiobject import FakeSpace, W_DictMultiObject
+from pypy.objspace.std.kwargsdict import *
+
+space = FakeSpace()
+strategy = KwargsDictStrategy(space)
+
+def test_create():
+ keys = ["a", "b", "c"]
+ values = [1, 2, 3]
+ storage = strategy.erase((keys, values))
+ d = W_DictMultiObject(space, strategy, storage)
+ assert d.getitem_str("a") == 1
+ assert d.getitem_str("b") == 2
+ assert d.getitem_str("c") == 3
+ assert d.getitem(space.wrap("a")) == 1
+ assert d.getitem(space.wrap("b")) == 2
+ assert d.getitem(space.wrap("c")) == 3
+ assert d.w_keys() == keys
+ assert d.values() == values
+
+def test_set_existing():
+ keys = ["a", "b", "c"]
+ values = [1, 2, 3]
+ storage = strategy.erase((keys, values))
+ d = W_DictMultiObject(space, strategy, storage)
+ assert d.getitem_str("a") == 1
+ assert d.getitem_str("b") == 2
+ assert d.getitem_str("c") == 3
+ assert d.setitem_str("a", 4) is None
+ assert d.getitem_str("a") == 4
+ assert d.getitem_str("b") == 2
+ assert d.getitem_str("c") == 3
+ assert d.setitem_str("b", 5) is None
+ assert d.getitem_str("a") == 4
+ assert d.getitem_str("b") == 5
+ assert d.getitem_str("c") == 3
+ assert d.setitem_str("c", 6) is None
+ assert d.getitem_str("a") == 4
+ assert d.getitem_str("b") == 5
+ assert d.getitem_str("c") == 6
+ assert d.getitem(space.wrap("a")) == 4
+ assert d.getitem(space.wrap("b")) == 5
+ assert d.getitem(space.wrap("c")) == 6
+ assert d.w_keys() == keys
+ assert d.values() == values
+ assert keys == ["a", "b", "c"]
+ assert values == [4, 5, 6]
+
+
+def test_set_new():
+ keys = ["a", "b", "c"]
+ values = [1, 2, 3]
+ storage = strategy.erase((keys, values))
+ d = W_DictMultiObject(space, strategy, storage)
+ assert d.getitem_str("a") == 1
+ assert d.getitem_str("b") == 2
+ assert d.getitem_str("c") == 3
+ assert d.getitem_str("d") is None
+ assert d.setitem_str("d", 4) is None
+ assert d.getitem_str("a") == 1
+ assert d.getitem_str("b") == 2
+ assert d.getitem_str("c") == 3
+ assert d.getitem_str("d") == 4
+ assert d.w_keys() == keys
+ assert d.values() == values
+ assert keys == ["a", "b", "c", "d"]
+ assert values == [1, 2, 3, 4]
+
+def test_limit_size():
+ storage = strategy.get_empty_storage()
+ d = W_DictMultiObject(space, strategy, storage)
+ for i in range(100):
+ assert d.setitem_str("d%s" % i, 4) is None
+ assert d.strategy is not strategy
+ assert "StringDictStrategy" == d.strategy.__class__.__name__
+
+def test_keys_doesnt_wrap():
+ space = FakeSpace()
+ space.newlist = None
+ strategy = KwargsDictStrategy(space)
+ keys = ["a", "b", "c"]
+ values = [1, 2, 3]
+ storage = strategy.erase((keys, values))
+ d = W_DictMultiObject(space, strategy, storage)
+ w_l = d.w_keys() # does not crash
+
+
+from pypy.objspace.std.test.test_dictmultiobject import BaseTestRDictImplementation, BaseTestDevolvedDictImplementation
+def get_impl(self):
+ storage = strategy.erase(([], []))
+ return W_DictMultiObject(space, strategy, storage)
+class TestKwargsDictImplementation(BaseTestRDictImplementation):
+ StrategyClass = KwargsDictStrategy
+ get_impl = get_impl
+ def test_delitem(self):
+ pass # delitem devolves for now
+
+class TestDevolvedKwargsDictImplementation(BaseTestDevolvedDictImplementation):
+ get_impl = get_impl
+ StrategyClass = KwargsDictStrategy
+
+
+class AppTestKwargsDictStrategy(object):
+ def setup_class(cls):
+ if option.runappdirect:
+ py.test.skip("__repr__ doesn't work on appdirect")
+
+ def w_get_strategy(self, obj):
+ import __pypy__
+ r = __pypy__.internal_repr(obj)
+ return r[r.find("(") + 1: r.find(")")]
+
+ def test_create(self):
+ def f(**args):
+ return args
+ d = f(a=1)
+ assert "KwargsDictStrategy" in self.get_strategy(d)
+
From noreply at buildbot.pypy.org Fri Apr 13 22:03:23 2012
From: noreply at buildbot.pypy.org (RonnyPfannschmidt)
Date: Fri, 13 Apr 2012 22:03:23 +0200 (CEST)
Subject: [pypy-commit] pypy stdlib-unification: reenable the ability to run
stdlib tests
Message-ID: <20120413200323.96AE982F4F@wyvern.cs.uni-duesseldorf.de>
Author: Ronny Pfannschmidt
Branch: stdlib-unification
Changeset: r54343:f8735b33c95a
Date: 2012-04-13 21:51 +0200
http://bitbucket.org/pypy/pypy/changeset/f8735b33c95a/
Log: reenable the ability to run stdlib tests
diff --git a/lib-python/conftest.py b/lib-python/conftest.py
--- a/lib-python/conftest.py
+++ b/lib-python/conftest.py
@@ -17,8 +17,7 @@
from pypy.conftest import gettestobjspace, option as pypy_option
from pypy.tool.pytest import appsupport
-from pypy.tool.pytest.confpath import pypydir, libpythondir, \
- regrtestdir, modregrtestdir, testresultdir
+from pypy.tool.pytest.confpath import pypydir, testdir, testresultdir
pytest_plugins = "resultlog",
rsyncdirs = ['.', '../pypy/']
@@ -76,14 +75,11 @@
compiler = property(compiler)
def ismodified(self):
- return modregrtestdir.join(self.basename).check()
+ #XXX: ask hg
+ return None
def getfspath(self):
- fn = modregrtestdir.join(self.basename)
- if fn.check():
- return fn
- fn = regrtestdir.join(self.basename)
- return fn
+ return testdir.join(self.basename)
def run_file(self, space):
fspath = self.getfspath()
@@ -526,7 +522,7 @@
listed_names = dict.fromkeys([regrtest.basename for regrtest in testmap])
listed_names['test_support.py'] = True # ignore this
missing = []
- for path in regrtestdir.listdir(fil='test_*.py'):
+ for path in testdir.listdir(fil='test_*.py'):
name = path.basename
if name not in listed_names:
missing.append(' RegrTest(%r),' % (name,))
@@ -547,7 +543,7 @@
regrtest = parent.config._basename2spec.get(path.basename, None)
if regrtest is None:
return
- if path.dirpath() not in (modregrtestdir, regrtestdir):
+ if path.dirpath() != testdir:
return
return RunFileExternal(path.basename, parent=parent, regrtest=regrtest)
@@ -715,14 +711,3 @@
lst.append('core')
return lst
-#
-# Sanity check (could be done more nicely too)
-#
-import os
-samefile = getattr(os.path, 'samefile',
- lambda x,y : str(x) == str(y))
-if samefile(os.getcwd(), str(regrtestdir.dirpath())):
- raise NotImplementedError(
- "Cannot run py.test with this current directory:\n"
- "the app-level sys.path will contain %s before %s)." % (
- regrtestdir.dirpath(), modregrtestdir.dirpath()))
diff --git a/pypy/tool/pytest/confpath.py b/pypy/tool/pytest/confpath.py
--- a/pypy/tool/pytest/confpath.py
+++ b/pypy/tool/pytest/confpath.py
@@ -7,6 +7,4 @@
distdir = pypydir.dirpath()
testresultdir = distdir.join('testresult')
assert pypydir.check(dir=1)
-libpythondir = lib_pypy.LIB_PYTHON
-regrtestdir = lib_pypy.LIB_PYTHON_VANILLA.join('test')
-modregrtestdir = lib_pypy.LIB_PYTHON_MODIFIED.join('test')
+testdir = lib_pypy.LIB_PYTHON.join('test')
From noreply at buildbot.pypy.org Fri Apr 13 22:03:24 2012
From: noreply at buildbot.pypy.org (RonnyPfannschmidt)
Date: Fri, 13 Apr 2012 22:03:24 +0200 (CEST)
Subject: [pypy-commit] pypy stdlib-unification: fix tests for initial
sys.path
Message-ID: <20120413200324.CCE6582F50@wyvern.cs.uni-duesseldorf.de>
Author: Ronny Pfannschmidt
Branch: stdlib-unification
Changeset: r54344:39affc663dc5
Date: 2012-04-13 22:02 +0200
http://bitbucket.org/pypy/pypy/changeset/39affc663dc5/
Log: fix tests for initial sys.path
diff --git a/pypy/module/sys/test/test_initialpath.py b/pypy/module/sys/test/test_initialpath.py
--- a/pypy/module/sys/test/test_initialpath.py
+++ b/pypy/module/sys/test/test_initialpath.py
@@ -3,12 +3,10 @@
from pypy.module.sys.version import PYPY_VERSION, CPYTHON_VERSION
def build_hierarchy(prefix):
- dirname = '%d.%d' % (CPYTHON_VERSION[0],
- CPYTHON_VERSION[1])
+ dirname = '%d.%d' % CPYTHON_VERSION[:2]
a = prefix.join('lib_pypy').ensure(dir=1)
- b = prefix.join('lib-python', 'modified-%s' % dirname).ensure(dir=1)
- c = prefix.join('lib-python', dirname).ensure(dir=1)
- return a, b, c
+ b = prefix.join('lib-python', dirname).ensure(dir=1)
+ return a, b
def test_stdlib_in_prefix(tmpdir):
@@ -18,10 +16,7 @@
assert path[:len(dirs)] == map(str, dirs)
def test_include_libtk(tmpdir):
- lib_pypy, lib_python_modified, lib_python = build_hierarchy(tmpdir)
- lib_tk_modified = lib_python_modified.join('lib-tk')
+ lib_pypy, lib_python = build_hierarchy(tmpdir)
lib_tk = lib_python.join('lib-tk')
path = getinitialpath(None, str(tmpdir))
- i = path.index(str(lib_tk_modified))
- j = path.index(str(lib_tk))
- assert i < j
+ assert lib_tk in path
From noreply at buildbot.pypy.org Fri Apr 13 22:16:17 2012
From: noreply at buildbot.pypy.org (RonnyPfannschmidt)
Date: Fri, 13 Apr 2012 22:16:17 +0200 (CEST)
Subject: [pypy-commit] pypy stdlib-unification: remove unneeded comment from
UserDict
Message-ID: <20120413201617.B123D82F4E@wyvern.cs.uni-duesseldorf.de>
Author: Ronny Pfannschmidt
Branch: stdlib-unification
Changeset: r54345:d0611e06277d
Date: 2012-04-13 22:06 +0200
http://bitbucket.org/pypy/pypy/changeset/d0611e06277d/
Log: remove unneeded comment from UserDict
diff --git a/lib-python/2.7/UserDict.py b/lib-python/2.7/UserDict.py
--- a/lib-python/2.7/UserDict.py
+++ b/lib-python/2.7/UserDict.py
@@ -1,10 +1,5 @@
"""A more or less complete user-defined wrapper around dictionary objects."""
-# XXX This is a bit of a hack (as usual :-))
-# the actual content of the file is not changed, but we put it here to make
-# virtualenv happy (because its internal logic expects at least one of the
-# REQUIRED_MODULES to be in modified-*)
-
class UserDict:
def __init__(self, dict=None, **kwargs):
self.data = {}
From noreply at buildbot.pypy.org Fri Apr 13 22:16:20 2012
From: noreply at buildbot.pypy.org (RonnyPfannschmidt)
Date: Fri, 13 Apr 2012 22:16:20 +0200 (CEST)
Subject: [pypy-commit] pypy stdlib-unification: revert stdlib test package
__init__ to stdlib version
Message-ID: <20120413201620.1C85382F4E@wyvern.cs.uni-duesseldorf.de>
Author: Ronny Pfannschmidt
Branch: stdlib-unification
Changeset: r54346:1f72d5b26f8c
Date: 2012-04-13 22:08 +0200
http://bitbucket.org/pypy/pypy/changeset/1f72d5b26f8c/
Log: revert stdlib test package __init__ to stdlib version
diff --git a/lib-python/2.7/test/__init__.py b/lib-python/2.7/test/__init__.py
--- a/lib-python/2.7/test/__init__.py
+++ b/lib-python/2.7/test/__init__.py
@@ -1,11 +1,1 @@
-"""
-This package only contains the tests that we have modified for PyPy.
-It uses the 'official' hack to include the rest of the standard
-'test' package from CPython.
-
-This assumes that sys.path is configured to contain
-'lib-python/modified-2.7.0' before 'lib-python/2.7.0'.
-"""
-
-from pkgutil import extend_path
-__path__ = extend_path(__path__, __name__)
+# Dummy file to make this directory a package.
From noreply at buildbot.pypy.org Fri Apr 13 22:16:22 2012
From: noreply at buildbot.pypy.org (RonnyPfannschmidt)
Date: Fri, 13 Apr 2012 22:16:22 +0200 (CEST)
Subject: [pypy-commit] pypy stdlib-unification: test.regrtest: undo the
findtestdir change, keep some simplifications for now
Message-ID: <20120413201622.7E58782F4E@wyvern.cs.uni-duesseldorf.de>
Author: Ronny Pfannschmidt
Branch: stdlib-unification
Changeset: r54347:092e9fa2cf0e
Date: 2012-04-13 22:14 +0200
http://bitbucket.org/pypy/pypy/changeset/092e9fa2cf0e/
Log: test.regrtest: undo the findtestdir change, keep some
simplifications for now
diff --git a/lib-python/2.7/test/regrtest.py b/lib-python/2.7/test/regrtest.py
--- a/lib-python/2.7/test/regrtest.py
+++ b/lib-python/2.7/test/regrtest.py
@@ -680,13 +680,8 @@
def findtests(testdir=None, stdtests=STDTESTS, nottests=NOTTESTS):
"""Return a list of all applicable test modules."""
- if testdir:
- testdirs = [testdir]
- else:
- testdirs = findtestdirs()
- names = {}
- for testdir in testdirs:
- names.update(dict.fromkeys(os.listdir(testdir)))
+ testdir = findtestdir(testdir)
+ names = os.listdir(testdir)
tests = []
others = set(stdtests) | nottests
for name in names:
@@ -1085,19 +1080,8 @@
# Collect cyclic trash.
gc.collect()
-def findtestdirs():
- # XXX hacking: returns a list of both the '2.7.0/test' and the
- # 'modified-2.7.0/test' directories, as full paths.
- testdir = os.path.abspath(os.path.dirname(__file__) or os.curdir)
- assert os.path.basename(testdir).lower() == 'test'
- maindir = os.path.dirname(testdir)
- libpythondir = os.path.dirname(maindir)
- maindirname = os.path.basename(maindir).lower()
- if maindirname.startswith('modified-'):
- maindirname = maindirname[len('modified-'):]
- testdir1 = os.path.join(libpythondir, maindirname, 'test')
- testdir2 = os.path.join(libpythondir, 'modified-'+maindirname, 'test')
- return [testdir1, testdir2]
+def findtestdir(path=None):
+ return path or os.path.dirname(__file__) or os.curdir
def removepy(names):
if not names:
From noreply at buildbot.pypy.org Fri Apr 13 22:16:24 2012
From: noreply at buildbot.pypy.org (RonnyPfannschmidt)
Date: Fri, 13 Apr 2012 22:16:24 +0200 (CEST)
Subject: [pypy-commit] pypy stdlib-unification: remove the modified-
expectations from test_app_main
Message-ID: <20120413201624.F3C4C82F4E@wyvern.cs.uni-duesseldorf.de>
Author: Ronny Pfannschmidt
Branch: stdlib-unification
Changeset: r54348:c1ed24dc186f
Date: 2012-04-13 22:15 +0200
http://bitbucket.org/pypy/pypy/changeset/c1ed24dc186f/
Log: remove the modified- expectations from test_app_main
diff --git a/pypy/translator/goal/test2/test_app_main.py b/pypy/translator/goal/test2/test_app_main.py
--- a/pypy/translator/goal/test2/test_app_main.py
+++ b/pypy/translator/goal/test2/test_app_main.py
@@ -803,7 +803,6 @@
fake_exe = prefix.join('bin/pypy-c').ensure(file=1)
expected_path = [str(prefix.join(subdir).ensure(dir=1))
for subdir in ('lib_pypy',
- 'lib-python/modified-%s' % cpy_ver,
'lib-python/%s' % cpy_ver)]
self.w_goal_dir = self.space.wrap(goal_dir)
@@ -837,9 +836,9 @@
app_main.os = os
pypy_c = os.path.join(self.trunkdir, 'pypy', 'translator', 'goal', 'pypy-c')
newpath = app_main.get_library_path(pypy_c)
- # we get at least lib_pypy, lib-python/modified-X.Y.Z,
+ # we get at least lib_pypy
# lib-python/X.Y.Z, and maybe more (e.g. plat-linux2)
- assert len(newpath) >= 3
+ assert len(newpath) >= 2
for p in newpath:
assert p.startswith(self.trunkdir)
finally:
From noreply at buildbot.pypy.org Fri Apr 13 22:55:53 2012
From: noreply at buildbot.pypy.org (RonnyPfannschmidt)
Date: Fri, 13 Apr 2012 22:55:53 +0200 (CEST)
Subject: [pypy-commit] pypy stdlib-unification: fix vanilla stdlib ref in
stdlib___future__
Message-ID: <20120413205553.5241A82F4E@wyvern.cs.uni-duesseldorf.de>
Author: Ronny Pfannschmidt
Branch: stdlib-unification
Changeset: r54349:4ed884067730
Date: 2012-04-13 22:53 +0200
http://bitbucket.org/pypy/pypy/changeset/4ed884067730/
Log: fix vanilla stdlib ref in stdlib___future__
diff --git a/pypy/tool/stdlib___future__.py b/pypy/tool/stdlib___future__.py
--- a/pypy/tool/stdlib___future__.py
+++ b/pypy/tool/stdlib___future__.py
@@ -1,8 +1,8 @@
# load __future__.py constants
def load_module():
- from pypy.tool.lib_pypy import LIB_PYTHON_VANILLA
- module_path = LIB_PYTHON_VANILLA.join('__future__.py')
+ from pypy.tool.lib_pypy import LIB_PYTHON
+ module_path = LIB_PYTHON.join('__future__.py')
execfile(str(module_path), globals())
load_module()
From noreply at buildbot.pypy.org Fri Apr 13 22:55:54 2012
From: noreply at buildbot.pypy.org (RonnyPfannschmidt)
Date: Fri, 13 Apr 2012 22:55:54 +0200 (CEST)
Subject: [pypy-commit] pypy stdlib-unification: fix MODIFID/VANILLA refs of
lib-pypy test
Message-ID: <20120413205554.93D3582F4E@wyvern.cs.uni-duesseldorf.de>
Author: Ronny Pfannschmidt
Branch: stdlib-unification
Changeset: r54350:77f9d750e08f
Date: 2012-04-13 22:55 +0200
http://bitbucket.org/pypy/pypy/changeset/77f9d750e08f/
Log: fix MODIFID/VANILLA refs of lib-pypy test
diff --git a/pypy/tool/test/test_lib_pypy.py b/pypy/tool/test/test_lib_pypy.py
--- a/pypy/tool/test/test_lib_pypy.py
+++ b/pypy/tool/test/test_lib_pypy.py
@@ -7,8 +7,6 @@
def test_lib_python_exists():
assert lib_pypy.LIB_PYTHON.check(dir=1)
- assert lib_pypy.LIB_PYTHON_VANILLA.check(dir=1)
- assert lib_pypy.LIB_PYTHON_MODIFIED.check(dir=1)
def test_import_from_lib_pypy():
_functools = lib_pypy.import_from_lib_pypy('_functools')
From noreply at buildbot.pypy.org Sat Apr 14 10:01:28 2012
From: noreply at buildbot.pypy.org (RonnyPfannschmidt)
Date: Sat, 14 Apr 2012 10:01:28 +0200 (CEST)
Subject: [pypy-commit] pypy stdlib-unification: reverting merge so we dont
get the deletions when merging from vendor/stdlib
Message-ID: <20120414080128.C6ADA46E1E4@wyvern.cs.uni-duesseldorf.de>
Author: Ronny Pfannschmidt
Branch: stdlib-unification
Changeset: r54352:087b58bc32e9
Date: 2012-04-14 09:57 +0200
http://bitbucket.org/pypy/pypy/changeset/087b58bc32e9/
Log: reverting merge so we dont get the deletions when merging from
vendor/stdlib
From noreply at buildbot.pypy.org Sat Apr 14 10:01:27 2012
From: noreply at buildbot.pypy.org (RonnyPfannschmidt)
Date: Sat, 14 Apr 2012 10:01:27 +0200 (CEST)
Subject: [pypy-commit] pypy vendor/stdlib: remove everything thats not in
the stdlib or tells about the version from the vendor/stdlib branch
Message-ID: <20120414080127.791E282F4E@wyvern.cs.uni-duesseldorf.de>
Author: Ronny Pfannschmidt
Branch: vendor/stdlib
Changeset: r54351:93769ea57f9b
Date: 2012-04-14 09:54 +0200
http://bitbucket.org/pypy/pypy/changeset/93769ea57f9b/
Log: remove everything thats not in the stdlib or tells about the version
from the vendor/stdlib branch
diff too long, truncating to 10000 out of 1166718 lines
diff --git a/.gitignore b/.gitignore
deleted file mode 100644
--- a/.gitignore
+++ /dev/null
@@ -1,21 +0,0 @@
-.hg
-.svn
-
-*.pyc
-*.pyo
-*~
-
-bin/pypy-c
-include/*.h
-lib_pypy/ctypes_config_cache/_[^_]*_*.py
-pypy/_cache
-pypy/doc/*.html
-pypy/doc/config/*.html
-pypy/doc/discussion/*.html
-pypy/module/cpyext/src/*.o
-pypy/module/cpyext/test/*.o
-pypy/module/test_lib_pypy/ctypes_tests/*.o
-pypy/translator/c/src/dtoa.o
-pypy/translator/goal/pypy-c
-pypy/translator/goal/target*-c
-release/
\ No newline at end of file
diff --git a/.hgignore b/.hgignore
deleted file mode 100644
--- a/.hgignore
+++ /dev/null
@@ -1,76 +0,0 @@
-syntax: glob
-*.py[co]
-*~
-.*.swp
-.idea
-.project
-.pydevproject
-
-syntax: regexp
-^testresult$
-^site-packages$
-^site-packages/.*$
-^site-packages/.*$
-^bin$
-^pypy/bin/pypy-c
-^pypy/module/cpyext/src/.+\.o$
-^pypy/module/cpyext/src/.+\.obj$
-^pypy/module/cpyext/test/.+\.errors$
-^pypy/module/cpyext/test/.+\.o$
-^pypy/module/cpyext/test/.+\.obj$
-^pypy/module/cpyext/test/.+\.manifest$
-^pypy/module/test_lib_pypy/ctypes_tests/.+\.o$
-^pypy/doc/.+\.html$
-^pypy/doc/config/.+\.rst$
-^pypy/doc/basicblock\.asc$
-^pypy/doc/.+\.svninfo$
-^pypy/translator/c/src/libffi_msvc/.+\.obj$
-^pypy/translator/c/src/libffi_msvc/.+\.dll$
-^pypy/translator/c/src/libffi_msvc/.+\.lib$
-^pypy/translator/c/src/libffi_msvc/.+\.exp$
-^pypy/translator/c/src/cjkcodecs/.+\.o$
-^pypy/translator/c/src/cjkcodecs/.+\.obj$
-^pypy/translator/jvm/\.project$
-^pypy/translator/jvm/\.classpath$
-^pypy/translator/jvm/eclipse-bin$
-^pypy/translator/jvm/src/pypy/.+\.class$
-^pypy/translator/benchmark/docutils$
-^pypy/translator/benchmark/templess$
-^pypy/translator/benchmark/gadfly$
-^pypy/translator/benchmark/mako$
-^pypy/translator/benchmark/bench-custom\.benchmark_result$
-^pypy/translator/benchmark/shootout_benchmarks$
-^pypy/translator/goal/pypy-translation-snapshot$
-^pypy/translator/goal/pypy-c
-^pypy/translator/goal/pypy-jvm
-^pypy/translator/goal/pypy-jvm.jar
-^pypy/translator/goal/.+\.exe$
-^pypy/translator/goal/.+\.dll$
-^pypy/translator/goal/target.+-c$
-^pypy/_cache$
-^pypy/doc/statistic/.+\.html$
-^pypy/doc/statistic/.+\.eps$
-^pypy/doc/statistic/.+\.pdf$
-^pypy/translator/cli/src/pypylib\.dll$
-^pypy/translator/cli/src/query\.exe$
-^pypy/translator/cli/src/main\.exe$
-^lib_pypy/ctypes_config_cache/_.+_cache\.py$
-^lib_pypy/ctypes_config_cache/_.+_.+_\.py$
-^pypy/translator/cli/query-descriptions$
-^pypy/doc/discussion/.+\.html$
-^include/.+\.h$
-^include/.+\.inl$
-^pypy/doc/_build/.*$
-^pypy/doc/config/.+\.html$
-^pypy/doc/config/style\.css$
-^pypy/doc/jit/.+\.html$
-^pypy/doc/jit/style\.css$
-^pypy/doc/image/lattice1\.png$
-^pypy/doc/image/lattice2\.png$
-^pypy/doc/image/lattice3\.png$
-^pypy/doc/image/stackless_informal\.png$
-^pypy/doc/image/parsing_example.+\.png$
-^pypy/module/test_lib_pypy/ctypes_tests/_ctypes_test\.o$
-^compiled
-^.git/
-^release/
diff --git a/.hgsubstate b/.hgsubstate
deleted file mode 100644
diff --git a/.hgtags b/.hgtags
deleted file mode 100644
--- a/.hgtags
+++ /dev/null
@@ -1,4 +0,0 @@
-b590cf6de4190623aad9aa698694c22e614d67b9 release-1.5
-b48df0bf4e75b81d98f19ce89d4a7dc3e1dab5e5 benchmarked
-d8ac7d23d3ec5f9a0fa1264972f74a010dbfd07f release-1.6
-ff4af8f318821f7f5ca998613a60fca09aa137da release-1.7
diff --git a/LICENSE b/LICENSE
deleted file mode 100644
--- a/LICENSE
+++ /dev/null
@@ -1,270 +0,0 @@
-License for files in the pypy/ directory
-==================================================
-
-Except when otherwise stated (look for LICENSE files in directories or
-information at the beginning of each file) all software and
-documentation in the 'pypy' directories is licensed as follows:
-
- The MIT License
-
- Permission is hereby granted, free of charge, to any person
- obtaining a copy of this software and associated documentation
- files (the "Software"), to deal in the Software without
- restriction, including without limitation the rights to use,
- copy, modify, merge, publish, distribute, sublicense, and/or
- sell copies of the Software, and to permit persons to whom the
- Software is furnished to do so, subject to the following conditions:
-
- The above copyright notice and this permission notice shall be included
- in all copies or substantial portions of the Software.
-
- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
- OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
- FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
- THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
- LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
- FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
- DEALINGS IN THE SOFTWARE.
-
-
-PyPy Copyright holders 2003-2012
------------------------------------
-
-Except when otherwise stated (look for LICENSE files or information at
-the beginning of each file) the files in the 'pypy' directory are each
-copyrighted by one or more of the following people and organizations:
-
- Armin Rigo
- Maciej Fijalkowski
- Carl Friedrich Bolz
- Amaury Forgeot d'Arc
- Antonio Cuni
- Samuele Pedroni
- Michael Hudson
- Holger Krekel
- Alex Gaynor
- Christian Tismer
- Hakan Ardo
- Benjamin Peterson
- David Schneider
- Eric van Riet Paap
- Anders Chrigstrom
- Richard Emslie
- Dan Villiom Podlaski Christiansen
- Alexander Schremmer
- Lukas Diekmann
- Aurelien Campeas
- Anders Lehmann
- Camillo Bruni
- Niklaus Haldimann
- Sven Hager
- Leonardo Santagada
- Toon Verwaest
- Seo Sanghyeon
- Justin Peel
- Lawrence Oluyede
- Bartosz Skowron
- Jakub Gustak
- Guido Wesdorp
- Daniel Roberts
- Laura Creighton
- Adrien Di Mascio
- Ludovic Aubry
- Niko Matsakis
- Wim Lavrijsen
- Matti Picus
- Jason Creighton
- Jacob Hallen
- Alex Martelli
- Anders Hammarquist
- Jan de Mooij
- Stephan Diehl
- Michael Foord
- Stefan Schwarzer
- Tomek Meka
- Patrick Maupin
- Bob Ippolito
- Bruno Gola
- Alexandre Fayolle
- Marius Gedminas
- Simon Burton
- David Edelsohn
- Jean-Paul Calderone
- John Witulski
- Timo Paulssen
- holger krekel
- Dario Bertini
- Mark Pearse
- Andreas Stührk
- Jean-Philippe St. Pierre
- Guido van Rossum
- Pavel Vinogradov
- Valentino Volonghi
- Paul deGrandis
- Ilya Osadchiy
- Ronny Pfannschmidt
- Adrian Kuhn
- tav
- Georg Brandl
- Philip Jenvey
- Gerald Klix
- Wanja Saatkamp
- Boris Feigin
- Oscar Nierstrasz
- David Malcolm
- Eugene Oden
- Henry Mason
- Jeff Terrace
- Lukas Renggli
- Guenter Jantzen
- Ned Batchelder
- Bert Freudenberg
- Amit Regmi
- Ben Young
- Nicolas Chauvat
- Andrew Durdin
- Michael Schneider
- Nicholas Riley
- Rocco Moretti
- Gintautas Miliauskas
- Michael Twomey
- Igor Trindade Oliveira
- Lucian Branescu Mihaila
- Olivier Dormond
- Jared Grubb
- Karl Bartel
- Gabriel Lavoie
- Victor Stinner
- Brian Dorsey
- Stuart Williams
- Toby Watson
- Antoine Pitrou
- Justas Sadzevicius
- Neil Shepperd
- Mikael Schönenberg
- Gasper Zejn
- Jonathan David Riehl
- Elmo Mäntynen
- Anders Qvist
- Beatrice During
- Alexander Sedov
- Corbin Simpson
- Vincent Legoll
- Romain Guillebert
- Alan McIntyre
- Alex Perry
- Jens-Uwe Mager
- Simon Cross
- Dan Stromberg
- Guillebert Romain
- Carl Meyer
- Pieter Zieschang
- Alejandro J. Cura
- Sylvain Thenault
- Christoph Gerum
- Travis Francis Athougies
- Henrik Vendelbo
- Lutz Paelike
- Jacob Oscarson
- Martin Blais
- Lucio Torre
- Lene Wagner
- Miguel de Val Borro
- Artur Lisiecki
- Bruno Gola
- Ignas Mikalajunas
- Stefano Rivera
- Joshua Gilbert
- Godefroid Chappelle
- Yusei Tahara
- Christopher Armstrong
- Stephan Busemann
- Gustavo Niemeyer
- William Leslie
- Akira Li
- Kristjan Valur Jonsson
- Bobby Impollonia
- Michael Hudson-Doyle
- Laurence Tratt
- Yasir Suhail
- Andrew Thompson
- Anders Sigfridsson
- Floris Bruynooghe
- Jacek Generowicz
- Dan Colish
- Zooko Wilcox-O Hearn
- Dan Loewenherz
- Chris Lambacher
- Dinu Gherman
- Brett Cannon
- Daniel Neuhäuser
- Michael Chermside
- Konrad Delong
- Anna Ravencroft
- Greg Price
- Armin Ronacher
- Christian Muirhead
- Jim Baker
- Rodrigo Araújo
- Romain Guillebert
-
- Heinrich-Heine University, Germany
- Open End AB (formerly AB Strakt), Sweden
- merlinux GmbH, Germany
- tismerysoft GmbH, Germany
- Logilab Paris, France
- DFKI GmbH, Germany
- Impara, Germany
- Change Maker, Sweden
-
-The PyPy Logo as used by http://speed.pypy.org and others was created
-by Samuel Reis and is distributed on terms of Creative Commons Share Alike
-License.
-
-License for 'lib-python/2.7.0' and 'lib-python/2.7.0-modified'
-==============================================================
-
-Except when otherwise stated (look for LICENSE files or
-copyright/license information at the beginning of each file) the files
-in the 'lib-python/2.7.0' and 'lib-python/2.7.0-modified' directories
-are all copyrighted by the Python Software Foundation and licensed under
-the Python Software License of which you can find a copy here:
-http://www.python.org/doc/Copyright.html
-
-License for 'pypy/translator/jvm/src/jna.jar'
-=============================================
-
-The file 'pypy/translator/jvm/src/jna.jar' is licensed under the GNU
-Lesser General Public License of which you can find a copy here:
-http://www.gnu.org/licenses/lgpl.html
-
-License for 'pypy/translator/jvm/src/jasmin.jar'
-================================================
-
-The file 'pypy/translator/jvm/src/jasmin.jar' is copyright (c) 1996-2004 Jon Meyer
-and distributed with permission. The use of Jasmin by PyPy does not imply
-that PyPy is endorsed by Jon Meyer nor any of Jasmin's contributors. Furthermore,
-the following disclaimer applies to Jasmin:
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED
-WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
-PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
-ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
-INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
-TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
-ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-License for 'pypy/module/unicodedata/'
-======================================
-
-The following files are from the website of The Unicode Consortium
-at http://www.unicode.org/. For the terms of use of these files, see
-http://www.unicode.org/terms_of_use.html . Or they are derived from
-files from the above website, and the same terms of use apply.
-
- CompositionExclusions-*.txt
- EastAsianWidth-*.txt
- LineBreak-*.txt
- UnicodeData-*.txt
- UnihanNumeric-*.txt
diff --git a/README b/README
deleted file mode 100644
--- a/README
+++ /dev/null
@@ -1,24 +0,0 @@
-=====================================
-PyPy: Python in Python Implementation
-=====================================
-
-Welcome to PyPy!
-
-PyPy is both an implementation of the Python programming language, and
-an extensive compiler framework for dynamic language implementations.
-You can build self-contained Python implementations which execute
-independently from CPython.
-
-The home page is:
-
- http://pypy.org/
-
-The getting-started document will help guide you:
-
- http://doc.pypy.org/en/latest/getting-started.html
-
-It will also point you to the rest of the documentation which is generated
-from files in the pypy/doc directory within the source repositories. Enjoy
-and send us feedback!
-
- the pypy-dev team
diff --git a/_pytest/__init__.py b/_pytest/__init__.py
deleted file mode 100644
--- a/_pytest/__init__.py
+++ /dev/null
@@ -1,2 +0,0 @@
-#
-__version__ = '2.2.4.dev2'
diff --git a/_pytest/assertion/__init__.py b/_pytest/assertion/__init__.py
deleted file mode 100644
--- a/_pytest/assertion/__init__.py
+++ /dev/null
@@ -1,119 +0,0 @@
-"""
-support for presenting detailed information in failing assertions.
-"""
-import py
-import sys
-import pytest
-from _pytest.monkeypatch import monkeypatch
-from _pytest.assertion import util
-
-def pytest_addoption(parser):
- group = parser.getgroup("debugconfig")
- group.addoption('--assert', action="store", dest="assertmode",
- choices=("rewrite", "reinterp", "plain",),
- default="rewrite", metavar="MODE",
- help="""control assertion debugging tools.
-'plain' performs no assertion debugging.
-'reinterp' reinterprets assert statements after they failed to provide assertion expression information.
-'rewrite' (the default) rewrites assert statements in test modules on import
-to provide assert expression information. """)
- group.addoption('--no-assert', action="store_true", default=False,
- dest="noassert", help="DEPRECATED equivalent to --assert=plain")
- group.addoption('--nomagic', action="store_true", default=False,
- dest="nomagic", help="DEPRECATED equivalent to --assert=plain")
-
-class AssertionState:
- """State for the assertion plugin."""
-
- def __init__(self, config, mode):
- self.mode = mode
- self.trace = config.trace.root.get("assertion")
-
-def pytest_configure(config):
- mode = config.getvalue("assertmode")
- if config.getvalue("noassert") or config.getvalue("nomagic"):
- mode = "plain"
- if mode == "rewrite":
- try:
- import ast
- except ImportError:
- mode = "reinterp"
- else:
- if sys.platform.startswith('java'):
- mode = "reinterp"
- if mode != "plain":
- _load_modules(mode)
- m = monkeypatch()
- config._cleanup.append(m.undo)
- m.setattr(py.builtin.builtins, 'AssertionError',
- reinterpret.AssertionError)
- hook = None
- if mode == "rewrite":
- hook = rewrite.AssertionRewritingHook()
- sys.meta_path.append(hook)
- warn_about_missing_assertion(mode)
- config._assertstate = AssertionState(config, mode)
- config._assertstate.hook = hook
- config._assertstate.trace("configured with mode set to %r" % (mode,))
-
-def pytest_unconfigure(config):
- hook = config._assertstate.hook
- if hook is not None:
- sys.meta_path.remove(hook)
-
-def pytest_collection(session):
- # this hook is only called when test modules are collected
- # so for example not in the master process of pytest-xdist
- # (which does not collect test modules)
- hook = session.config._assertstate.hook
- if hook is not None:
- hook.set_session(session)
-
-def pytest_runtest_setup(item):
- def callbinrepr(op, left, right):
- hook_result = item.ihook.pytest_assertrepr_compare(
- config=item.config, op=op, left=left, right=right)
- for new_expl in hook_result:
- if new_expl:
- res = '\n~'.join(new_expl)
- if item.config.getvalue("assertmode") == "rewrite":
- # The result will be fed back a python % formatting
- # operation, which will fail if there are extraneous
- # '%'s in the string. Escape them here.
- res = res.replace("%", "%%")
- return res
- util._reprcompare = callbinrepr
-
-def pytest_runtest_teardown(item):
- util._reprcompare = None
-
-def pytest_sessionfinish(session):
- hook = session.config._assertstate.hook
- if hook is not None:
- hook.session = None
-
-def _load_modules(mode):
- """Lazily import assertion related code."""
- global rewrite, reinterpret
- from _pytest.assertion import reinterpret
- if mode == "rewrite":
- from _pytest.assertion import rewrite
-
-def warn_about_missing_assertion(mode):
- try:
- assert False
- except AssertionError:
- pass
- else:
- if mode == "rewrite":
- specifically = ("assertions which are not in test modules "
- "will be ignored")
- else:
- specifically = "failing tests may report as passing"
-
- sys.stderr.write("WARNING: " + specifically +
- " because assert statements are not executed "
- "by the underlying Python interpreter "
- "(are you using python -O?)\n")
-
-pytest_assertrepr_compare = util.assertrepr_compare
diff --git a/_pytest/assertion/newinterpret.py b/_pytest/assertion/newinterpret.py
deleted file mode 100644
--- a/_pytest/assertion/newinterpret.py
+++ /dev/null
@@ -1,333 +0,0 @@
-"""
-Find intermediate evalutation results in assert statements through builtin AST.
-This should replace oldinterpret.py eventually.
-"""
-
-import sys
-import ast
-
-import py
-from _pytest.assertion import util
-from _pytest.assertion.reinterpret import BuiltinAssertionError
-
-
-if sys.platform.startswith("java") and sys.version_info < (2, 5, 2):
- # See http://bugs.jython.org/issue1497
- _exprs = ("BoolOp", "BinOp", "UnaryOp", "Lambda", "IfExp", "Dict",
- "ListComp", "GeneratorExp", "Yield", "Compare", "Call",
- "Repr", "Num", "Str", "Attribute", "Subscript", "Name",
- "List", "Tuple")
- _stmts = ("FunctionDef", "ClassDef", "Return", "Delete", "Assign",
- "AugAssign", "Print", "For", "While", "If", "With", "Raise",
- "TryExcept", "TryFinally", "Assert", "Import", "ImportFrom",
- "Exec", "Global", "Expr", "Pass", "Break", "Continue")
- _expr_nodes = set(getattr(ast, name) for name in _exprs)
- _stmt_nodes = set(getattr(ast, name) for name in _stmts)
- def _is_ast_expr(node):
- return node.__class__ in _expr_nodes
- def _is_ast_stmt(node):
- return node.__class__ in _stmt_nodes
-else:
- def _is_ast_expr(node):
- return isinstance(node, ast.expr)
- def _is_ast_stmt(node):
- return isinstance(node, ast.stmt)
-
-
-class Failure(Exception):
- """Error found while interpreting AST."""
-
- def __init__(self, explanation=""):
- self.cause = sys.exc_info()
- self.explanation = explanation
-
-
-def interpret(source, frame, should_fail=False):
- mod = ast.parse(source)
- visitor = DebugInterpreter(frame)
- try:
- visitor.visit(mod)
- except Failure:
- failure = sys.exc_info()[1]
- return getfailure(failure)
- if should_fail:
- return ("(assertion failed, but when it was re-run for "
- "printing intermediate values, it did not fail. Suggestions: "
- "compute assert expression before the assert or use --assert=plain)")
-
-def run(offending_line, frame=None):
- if frame is None:
- frame = py.code.Frame(sys._getframe(1))
- return interpret(offending_line, frame)
-
-def getfailure(e):
- explanation = util.format_explanation(e.explanation)
- value = e.cause[1]
- if str(value):
- lines = explanation.split('\n')
- lines[0] += " << %s" % (value,)
- explanation = '\n'.join(lines)
- text = "%s: %s" % (e.cause[0].__name__, explanation)
- if text.startswith('AssertionError: assert '):
- text = text[16:]
- return text
-
-operator_map = {
- ast.BitOr : "|",
- ast.BitXor : "^",
- ast.BitAnd : "&",
- ast.LShift : "<<",
- ast.RShift : ">>",
- ast.Add : "+",
- ast.Sub : "-",
- ast.Mult : "*",
- ast.Div : "/",
- ast.FloorDiv : "//",
- ast.Mod : "%",
- ast.Eq : "==",
- ast.NotEq : "!=",
- ast.Lt : "<",
- ast.LtE : "<=",
- ast.Gt : ">",
- ast.GtE : ">=",
- ast.Pow : "**",
- ast.Is : "is",
- ast.IsNot : "is not",
- ast.In : "in",
- ast.NotIn : "not in"
-}
-
-unary_map = {
- ast.Not : "not %s",
- ast.Invert : "~%s",
- ast.USub : "-%s",
- ast.UAdd : "+%s"
-}
-
-
-class DebugInterpreter(ast.NodeVisitor):
- """Interpret AST nodes to gleam useful debugging information. """
-
- def __init__(self, frame):
- self.frame = frame
-
- def generic_visit(self, node):
- # Fallback when we don't have a special implementation.
- if _is_ast_expr(node):
- mod = ast.Expression(node)
- co = self._compile(mod)
- try:
- result = self.frame.eval(co)
- except Exception:
- raise Failure()
- explanation = self.frame.repr(result)
- return explanation, result
- elif _is_ast_stmt(node):
- mod = ast.Module([node])
- co = self._compile(mod, "exec")
- try:
- self.frame.exec_(co)
- except Exception:
- raise Failure()
- return None, None
- else:
- raise AssertionError("can't handle %s" %(node,))
-
- def _compile(self, source, mode="eval"):
- return compile(source, "", mode)
-
- def visit_Expr(self, expr):
- return self.visit(expr.value)
-
- def visit_Module(self, mod):
- for stmt in mod.body:
- self.visit(stmt)
-
- def visit_Name(self, name):
- explanation, result = self.generic_visit(name)
- # See if the name is local.
- source = "%r in locals() is not globals()" % (name.id,)
- co = self._compile(source)
- try:
- local = self.frame.eval(co)
- except Exception:
- # have to assume it isn't
- local = None
- if local is None or not self.frame.is_true(local):
- return name.id, result
- return explanation, result
-
- def visit_Compare(self, comp):
- left = comp.left
- left_explanation, left_result = self.visit(left)
- for op, next_op in zip(comp.ops, comp.comparators):
- next_explanation, next_result = self.visit(next_op)
- op_symbol = operator_map[op.__class__]
- explanation = "%s %s %s" % (left_explanation, op_symbol,
- next_explanation)
- source = "__exprinfo_left %s __exprinfo_right" % (op_symbol,)
- co = self._compile(source)
- try:
- result = self.frame.eval(co, __exprinfo_left=left_result,
- __exprinfo_right=next_result)
- except Exception:
- raise Failure(explanation)
- try:
- if not self.frame.is_true(result):
- break
- except KeyboardInterrupt:
- raise
- except:
- break
- left_explanation, left_result = next_explanation, next_result
-
- if util._reprcompare is not None:
- res = util._reprcompare(op_symbol, left_result, next_result)
- if res:
- explanation = res
- return explanation, result
-
- def visit_BoolOp(self, boolop):
- is_or = isinstance(boolop.op, ast.Or)
- explanations = []
- for operand in boolop.values:
- explanation, result = self.visit(operand)
- explanations.append(explanation)
- if result == is_or:
- break
- name = is_or and " or " or " and "
- explanation = "(" + name.join(explanations) + ")"
- return explanation, result
-
- def visit_UnaryOp(self, unary):
- pattern = unary_map[unary.op.__class__]
- operand_explanation, operand_result = self.visit(unary.operand)
- explanation = pattern % (operand_explanation,)
- co = self._compile(pattern % ("__exprinfo_expr",))
- try:
- result = self.frame.eval(co, __exprinfo_expr=operand_result)
- except Exception:
- raise Failure(explanation)
- return explanation, result
-
- def visit_BinOp(self, binop):
- left_explanation, left_result = self.visit(binop.left)
- right_explanation, right_result = self.visit(binop.right)
- symbol = operator_map[binop.op.__class__]
- explanation = "(%s %s %s)" % (left_explanation, symbol,
- right_explanation)
- source = "__exprinfo_left %s __exprinfo_right" % (symbol,)
- co = self._compile(source)
- try:
- result = self.frame.eval(co, __exprinfo_left=left_result,
- __exprinfo_right=right_result)
- except Exception:
- raise Failure(explanation)
- return explanation, result
-
- def visit_Call(self, call):
- func_explanation, func = self.visit(call.func)
- arg_explanations = []
- ns = {"__exprinfo_func" : func}
- arguments = []
- for arg in call.args:
- arg_explanation, arg_result = self.visit(arg)
- arg_name = "__exprinfo_%s" % (len(ns),)
- ns[arg_name] = arg_result
- arguments.append(arg_name)
- arg_explanations.append(arg_explanation)
- for keyword in call.keywords:
- arg_explanation, arg_result = self.visit(keyword.value)
- arg_name = "__exprinfo_%s" % (len(ns),)
- ns[arg_name] = arg_result
- keyword_source = "%s=%%s" % (keyword.arg)
- arguments.append(keyword_source % (arg_name,))
- arg_explanations.append(keyword_source % (arg_explanation,))
- if call.starargs:
- arg_explanation, arg_result = self.visit(call.starargs)
- arg_name = "__exprinfo_star"
- ns[arg_name] = arg_result
- arguments.append("*%s" % (arg_name,))
- arg_explanations.append("*%s" % (arg_explanation,))
- if call.kwargs:
- arg_explanation, arg_result = self.visit(call.kwargs)
- arg_name = "__exprinfo_kwds"
- ns[arg_name] = arg_result
- arguments.append("**%s" % (arg_name,))
- arg_explanations.append("**%s" % (arg_explanation,))
- args_explained = ", ".join(arg_explanations)
- explanation = "%s(%s)" % (func_explanation, args_explained)
- args = ", ".join(arguments)
- source = "__exprinfo_func(%s)" % (args,)
- co = self._compile(source)
- try:
- result = self.frame.eval(co, **ns)
- except Exception:
- raise Failure(explanation)
- pattern = "%s\n{%s = %s\n}"
- rep = self.frame.repr(result)
- explanation = pattern % (rep, rep, explanation)
- return explanation, result
-
- def _is_builtin_name(self, name):
- pattern = "%r not in globals() and %r not in locals()"
- source = pattern % (name.id, name.id)
- co = self._compile(source)
- try:
- return self.frame.eval(co)
- except Exception:
- return False
-
- def visit_Attribute(self, attr):
- if not isinstance(attr.ctx, ast.Load):
- return self.generic_visit(attr)
- source_explanation, source_result = self.visit(attr.value)
- explanation = "%s.%s" % (source_explanation, attr.attr)
- source = "__exprinfo_expr.%s" % (attr.attr,)
- co = self._compile(source)
- try:
- result = self.frame.eval(co, __exprinfo_expr=source_result)
- except Exception:
- raise Failure(explanation)
- explanation = "%s\n{%s = %s.%s\n}" % (self.frame.repr(result),
- self.frame.repr(result),
- source_explanation, attr.attr)
- # Check if the attr is from an instance.
- source = "%r in getattr(__exprinfo_expr, '__dict__', {})"
- source = source % (attr.attr,)
- co = self._compile(source)
- try:
- from_instance = self.frame.eval(co, __exprinfo_expr=source_result)
- except Exception:
- from_instance = None
- if from_instance is None or self.frame.is_true(from_instance):
- rep = self.frame.repr(result)
- pattern = "%s\n{%s = %s\n}"
- explanation = pattern % (rep, rep, explanation)
- return explanation, result
-
- def visit_Assert(self, assrt):
- test_explanation, test_result = self.visit(assrt.test)
- explanation = "assert %s" % (test_explanation,)
- if not self.frame.is_true(test_result):
- try:
- raise BuiltinAssertionError
- except Exception:
- raise Failure(explanation)
- return explanation, test_result
-
- def visit_Assign(self, assign):
- value_explanation, value_result = self.visit(assign.value)
- explanation = "... = %s" % (value_explanation,)
- name = ast.Name("__exprinfo_expr", ast.Load(),
- lineno=assign.value.lineno,
- col_offset=assign.value.col_offset)
- new_assign = ast.Assign(assign.targets, name, lineno=assign.lineno,
- col_offset=assign.col_offset)
- mod = ast.Module([new_assign])
- co = self._compile(mod, "exec")
- try:
- self.frame.exec_(co, __exprinfo_expr=value_result)
- except Exception:
- raise Failure(explanation)
- return explanation, value_result
diff --git a/_pytest/assertion/oldinterpret.py b/_pytest/assertion/oldinterpret.py
deleted file mode 100644
--- a/_pytest/assertion/oldinterpret.py
+++ /dev/null
@@ -1,552 +0,0 @@
-import py
-import sys, inspect
-from compiler import parse, ast, pycodegen
-from _pytest.assertion.util import format_explanation
-from _pytest.assertion.reinterpret import BuiltinAssertionError
-
-passthroughex = py.builtin._sysex
-
-class Failure:
- def __init__(self, node):
- self.exc, self.value, self.tb = sys.exc_info()
- self.node = node
-
-class View(object):
- """View base class.
-
- If C is a subclass of View, then C(x) creates a proxy object around
- the object x. The actual class of the proxy is not C in general,
- but a *subclass* of C determined by the rules below. To avoid confusion
- we call view class the class of the proxy (a subclass of C, so of View)
- and object class the class of x.
-
- Attributes and methods not found in the proxy are automatically read on x.
- Other operations like setting attributes are performed on the proxy, as
- determined by its view class. The object x is available from the proxy
- as its __obj__ attribute.
-
- The view class selection is determined by the __view__ tuples and the
- optional __viewkey__ method. By default, the selected view class is the
- most specific subclass of C whose __view__ mentions the class of x.
- If no such subclass is found, the search proceeds with the parent
- object classes. For example, C(True) will first look for a subclass
- of C with __view__ = (..., bool, ...) and only if it doesn't find any
- look for one with __view__ = (..., int, ...), and then ..., object,...
- If everything fails the class C itself is considered to be the default.
-
- Alternatively, the view class selection can be driven by another aspect
- of the object x, instead of the class of x, by overriding __viewkey__.
- See last example at the end of this module.
- """
-
- _viewcache = {}
- __view__ = ()
-
- def __new__(rootclass, obj, *args, **kwds):
- self = object.__new__(rootclass)
- self.__obj__ = obj
- self.__rootclass__ = rootclass
- key = self.__viewkey__()
- try:
- self.__class__ = self._viewcache[key]
- except KeyError:
- self.__class__ = self._selectsubclass(key)
- return self
-
- def __getattr__(self, attr):
- # attributes not found in the normal hierarchy rooted on View
- # are looked up in the object's real class
- return getattr(self.__obj__, attr)
-
- def __viewkey__(self):
- return self.__obj__.__class__
-
- def __matchkey__(self, key, subclasses):
- if inspect.isclass(key):
- keys = inspect.getmro(key)
- else:
- keys = [key]
- for key in keys:
- result = [C for C in subclasses if key in C.__view__]
- if result:
- return result
- return []
-
- def _selectsubclass(self, key):
- subclasses = list(enumsubclasses(self.__rootclass__))
- for C in subclasses:
- if not isinstance(C.__view__, tuple):
- C.__view__ = (C.__view__,)
- choices = self.__matchkey__(key, subclasses)
- if not choices:
- return self.__rootclass__
- elif len(choices) == 1:
- return choices[0]
- else:
- # combine the multiple choices
- return type('?', tuple(choices), {})
-
- def __repr__(self):
- return '%s(%r)' % (self.__rootclass__.__name__, self.__obj__)
-
-
-def enumsubclasses(cls):
- for subcls in cls.__subclasses__():
- for subsubclass in enumsubclasses(subcls):
- yield subsubclass
- yield cls
-
-
-class Interpretable(View):
- """A parse tree node with a few extra methods."""
- explanation = None
-
- def is_builtin(self, frame):
- return False
-
- def eval(self, frame):
- # fall-back for unknown expression nodes
- try:
- expr = ast.Expression(self.__obj__)
- expr.filename = ''
- self.__obj__.filename = ''
- co = pycodegen.ExpressionCodeGenerator(expr).getCode()
- result = frame.eval(co)
- except passthroughex:
- raise
- except:
- raise Failure(self)
- self.result = result
- self.explanation = self.explanation or frame.repr(self.result)
-
- def run(self, frame):
- # fall-back for unknown statement nodes
- try:
- expr = ast.Module(None, ast.Stmt([self.__obj__]))
- expr.filename = ''
- co = pycodegen.ModuleCodeGenerator(expr).getCode()
- frame.exec_(co)
- except passthroughex:
- raise
- except:
- raise Failure(self)
-
- def nice_explanation(self):
- return format_explanation(self.explanation)
-
-
-class Name(Interpretable):
- __view__ = ast.Name
-
- def is_local(self, frame):
- source = '%r in locals() is not globals()' % self.name
- try:
- return frame.is_true(frame.eval(source))
- except passthroughex:
- raise
- except:
- return False
-
- def is_global(self, frame):
- source = '%r in globals()' % self.name
- try:
- return frame.is_true(frame.eval(source))
- except passthroughex:
- raise
- except:
- return False
-
- def is_builtin(self, frame):
- source = '%r not in locals() and %r not in globals()' % (
- self.name, self.name)
- try:
- return frame.is_true(frame.eval(source))
- except passthroughex:
- raise
- except:
- return False
-
- def eval(self, frame):
- super(Name, self).eval(frame)
- if not self.is_local(frame):
- self.explanation = self.name
-
-class Compare(Interpretable):
- __view__ = ast.Compare
-
- def eval(self, frame):
- expr = Interpretable(self.expr)
- expr.eval(frame)
- for operation, expr2 in self.ops:
- if hasattr(self, 'result'):
- # shortcutting in chained expressions
- if not frame.is_true(self.result):
- break
- expr2 = Interpretable(expr2)
- expr2.eval(frame)
- self.explanation = "%s %s %s" % (
- expr.explanation, operation, expr2.explanation)
- source = "__exprinfo_left %s __exprinfo_right" % operation
- try:
- self.result = frame.eval(source,
- __exprinfo_left=expr.result,
- __exprinfo_right=expr2.result)
- except passthroughex:
- raise
- except:
- raise Failure(self)
- expr = expr2
-
-class And(Interpretable):
- __view__ = ast.And
-
- def eval(self, frame):
- explanations = []
- for expr in self.nodes:
- expr = Interpretable(expr)
- expr.eval(frame)
- explanations.append(expr.explanation)
- self.result = expr.result
- if not frame.is_true(expr.result):
- break
- self.explanation = '(' + ' and '.join(explanations) + ')'
-
-class Or(Interpretable):
- __view__ = ast.Or
-
- def eval(self, frame):
- explanations = []
- for expr in self.nodes:
- expr = Interpretable(expr)
- expr.eval(frame)
- explanations.append(expr.explanation)
- self.result = expr.result
- if frame.is_true(expr.result):
- break
- self.explanation = '(' + ' or '.join(explanations) + ')'
-
-
-# == Unary operations ==
-keepalive = []
-for astclass, astpattern in {
- ast.Not : 'not __exprinfo_expr',
- ast.Invert : '(~__exprinfo_expr)',
- }.items():
-
- class UnaryArith(Interpretable):
- __view__ = astclass
-
- def eval(self, frame, astpattern=astpattern):
- expr = Interpretable(self.expr)
- expr.eval(frame)
- self.explanation = astpattern.replace('__exprinfo_expr',
- expr.explanation)
- try:
- self.result = frame.eval(astpattern,
- __exprinfo_expr=expr.result)
- except passthroughex:
- raise
- except:
- raise Failure(self)
-
- keepalive.append(UnaryArith)
-
-# == Binary operations ==
-for astclass, astpattern in {
- ast.Add : '(__exprinfo_left + __exprinfo_right)',
- ast.Sub : '(__exprinfo_left - __exprinfo_right)',
- ast.Mul : '(__exprinfo_left * __exprinfo_right)',
- ast.Div : '(__exprinfo_left / __exprinfo_right)',
- ast.Mod : '(__exprinfo_left % __exprinfo_right)',
- ast.Power : '(__exprinfo_left ** __exprinfo_right)',
- }.items():
-
- class BinaryArith(Interpretable):
- __view__ = astclass
-
- def eval(self, frame, astpattern=astpattern):
- left = Interpretable(self.left)
- left.eval(frame)
- right = Interpretable(self.right)
- right.eval(frame)
- self.explanation = (astpattern
- .replace('__exprinfo_left', left .explanation)
- .replace('__exprinfo_right', right.explanation))
- try:
- self.result = frame.eval(astpattern,
- __exprinfo_left=left.result,
- __exprinfo_right=right.result)
- except passthroughex:
- raise
- except:
- raise Failure(self)
-
- keepalive.append(BinaryArith)
-
-
-class CallFunc(Interpretable):
- __view__ = ast.CallFunc
-
- def is_bool(self, frame):
- source = 'isinstance(__exprinfo_value, bool)'
- try:
- return frame.is_true(frame.eval(source,
- __exprinfo_value=self.result))
- except passthroughex:
- raise
- except:
- return False
-
- def eval(self, frame):
- node = Interpretable(self.node)
- node.eval(frame)
- explanations = []
- vars = {'__exprinfo_fn': node.result}
- source = '__exprinfo_fn('
- for a in self.args:
- if isinstance(a, ast.Keyword):
- keyword = a.name
- a = a.expr
- else:
- keyword = None
- a = Interpretable(a)
- a.eval(frame)
- argname = '__exprinfo_%d' % len(vars)
- vars[argname] = a.result
- if keyword is None:
- source += argname + ','
- explanations.append(a.explanation)
- else:
- source += '%s=%s,' % (keyword, argname)
- explanations.append('%s=%s' % (keyword, a.explanation))
- if self.star_args:
- star_args = Interpretable(self.star_args)
- star_args.eval(frame)
- argname = '__exprinfo_star'
- vars[argname] = star_args.result
- source += '*' + argname + ','
- explanations.append('*' + star_args.explanation)
- if self.dstar_args:
- dstar_args = Interpretable(self.dstar_args)
- dstar_args.eval(frame)
- argname = '__exprinfo_kwds'
- vars[argname] = dstar_args.result
- source += '**' + argname + ','
- explanations.append('**' + dstar_args.explanation)
- self.explanation = "%s(%s)" % (
- node.explanation, ', '.join(explanations))
- if source.endswith(','):
- source = source[:-1]
- source += ')'
- try:
- self.result = frame.eval(source, **vars)
- except passthroughex:
- raise
- except:
- raise Failure(self)
- if not node.is_builtin(frame) or not self.is_bool(frame):
- r = frame.repr(self.result)
- self.explanation = '%s\n{%s = %s\n}' % (r, r, self.explanation)
-
-class Getattr(Interpretable):
- __view__ = ast.Getattr
-
- def eval(self, frame):
- expr = Interpretable(self.expr)
- expr.eval(frame)
- source = '__exprinfo_expr.%s' % self.attrname
- try:
- self.result = frame.eval(source, __exprinfo_expr=expr.result)
- except passthroughex:
- raise
- except:
- raise Failure(self)
- self.explanation = '%s.%s' % (expr.explanation, self.attrname)
- # if the attribute comes from the instance, its value is interesting
- source = ('hasattr(__exprinfo_expr, "__dict__") and '
- '%r in __exprinfo_expr.__dict__' % self.attrname)
- try:
- from_instance = frame.is_true(
- frame.eval(source, __exprinfo_expr=expr.result))
- except passthroughex:
- raise
- except:
- from_instance = True
- if from_instance:
- r = frame.repr(self.result)
- self.explanation = '%s\n{%s = %s\n}' % (r, r, self.explanation)
-
-# == Re-interpretation of full statements ==
-
-class Assert(Interpretable):
- __view__ = ast.Assert
-
- def run(self, frame):
- test = Interpretable(self.test)
- test.eval(frame)
- # print the result as 'assert '
- self.result = test.result
- self.explanation = 'assert ' + test.explanation
- if not frame.is_true(test.result):
- try:
- raise BuiltinAssertionError
- except passthroughex:
- raise
- except:
- raise Failure(self)
-
-class Assign(Interpretable):
- __view__ = ast.Assign
-
- def run(self, frame):
- expr = Interpretable(self.expr)
- expr.eval(frame)
- self.result = expr.result
- self.explanation = '... = ' + expr.explanation
- # fall-back-run the rest of the assignment
- ass = ast.Assign(self.nodes, ast.Name('__exprinfo_expr'))
- mod = ast.Module(None, ast.Stmt([ass]))
- mod.filename = ''
- co = pycodegen.ModuleCodeGenerator(mod).getCode()
- try:
- frame.exec_(co, __exprinfo_expr=expr.result)
- except passthroughex:
- raise
- except:
- raise Failure(self)
-
-class Discard(Interpretable):
- __view__ = ast.Discard
-
- def run(self, frame):
- expr = Interpretable(self.expr)
- expr.eval(frame)
- self.result = expr.result
- self.explanation = expr.explanation
-
-class Stmt(Interpretable):
- __view__ = ast.Stmt
-
- def run(self, frame):
- for stmt in self.nodes:
- stmt = Interpretable(stmt)
- stmt.run(frame)
-
-
-def report_failure(e):
- explanation = e.node.nice_explanation()
- if explanation:
- explanation = ", in: " + explanation
- else:
- explanation = ""
- sys.stdout.write("%s: %s%s\n" % (e.exc.__name__, e.value, explanation))
-
-def check(s, frame=None):
- if frame is None:
- frame = sys._getframe(1)
- frame = py.code.Frame(frame)
- expr = parse(s, 'eval')
- assert isinstance(expr, ast.Expression)
- node = Interpretable(expr.node)
- try:
- node.eval(frame)
- except passthroughex:
- raise
- except Failure:
- e = sys.exc_info()[1]
- report_failure(e)
- else:
- if not frame.is_true(node.result):
- sys.stderr.write("assertion failed: %s\n" % node.nice_explanation())
-
-
-###########################################################
-# API / Entry points
-# #########################################################
-
-def interpret(source, frame, should_fail=False):
- module = Interpretable(parse(source, 'exec').node)
- #print "got module", module
- if isinstance(frame, py.std.types.FrameType):
- frame = py.code.Frame(frame)
- try:
- module.run(frame)
- except Failure:
- e = sys.exc_info()[1]
- return getfailure(e)
- except passthroughex:
- raise
- except:
- import traceback
- traceback.print_exc()
- if should_fail:
- return ("(assertion failed, but when it was re-run for "
- "printing intermediate values, it did not fail. Suggestions: "
- "compute assert expression before the assert or use --assert=plain)")
- else:
- return None
-
-def getmsg(excinfo):
- if isinstance(excinfo, tuple):
- excinfo = py.code.ExceptionInfo(excinfo)
- #frame, line = gettbline(tb)
- #frame = py.code.Frame(frame)
- #return interpret(line, frame)
-
- tb = excinfo.traceback[-1]
- source = str(tb.statement).strip()
- x = interpret(source, tb.frame, should_fail=True)
- if not isinstance(x, str):
- raise TypeError("interpret returned non-string %r" % (x,))
- return x
-
-def getfailure(e):
- explanation = e.node.nice_explanation()
- if str(e.value):
- lines = explanation.split('\n')
- lines[0] += " << %s" % (e.value,)
- explanation = '\n'.join(lines)
- text = "%s: %s" % (e.exc.__name__, explanation)
- if text.startswith('AssertionError: assert '):
- text = text[16:]
- return text
-
-def run(s, frame=None):
- if frame is None:
- frame = sys._getframe(1)
- frame = py.code.Frame(frame)
- module = Interpretable(parse(s, 'exec').node)
- try:
- module.run(frame)
- except Failure:
- e = sys.exc_info()[1]
- report_failure(e)
-
-
-if __name__ == '__main__':
- # example:
- def f():
- return 5
- def g():
- return 3
- def h(x):
- return 'never'
- check("f() * g() == 5")
- check("not f()")
- check("not (f() and g() or 0)")
- check("f() == g()")
- i = 4
- check("i == f()")
- check("len(f()) == 0")
- check("isinstance(2+3+4, float)")
-
- run("x = i")
- check("x == 5")
-
- run("assert not f(), 'oops'")
- run("a, b, c = 1, 2")
- run("a, b, c = f()")
-
- check("max([f(),g()]) == 4")
- check("'hello'[g()] == 'h'")
- run("'guk%d' % h(f())")
diff --git a/_pytest/assertion/reinterpret.py b/_pytest/assertion/reinterpret.py
deleted file mode 100644
--- a/_pytest/assertion/reinterpret.py
+++ /dev/null
@@ -1,48 +0,0 @@
-import sys
-import py
-
-BuiltinAssertionError = py.builtin.builtins.AssertionError
-
-class AssertionError(BuiltinAssertionError):
- def __init__(self, *args):
- BuiltinAssertionError.__init__(self, *args)
- if args:
- try:
- self.msg = str(args[0])
- except py.builtin._sysex:
- raise
- except:
- self.msg = "<[broken __repr__] %s at %0xd>" %(
- args[0].__class__, id(args[0]))
- else:
- f = py.code.Frame(sys._getframe(1))
- try:
- source = f.code.fullsource
- if source is not None:
- try:
- source = source.getstatement(f.lineno, assertion=True)
- except IndexError:
- source = None
- else:
- source = str(source.deindent()).strip()
- except py.error.ENOENT:
- source = None
- # this can also occur during reinterpretation, when the
- # co_filename is set to "".
- if source:
- self.msg = reinterpret(source, f, should_fail=True)
- else:
- self.msg = ""
- if not self.args:
- self.args = (self.msg,)
-
-if sys.version_info > (3, 0):
- AssertionError.__module__ = "builtins"
- reinterpret_old = "old reinterpretation not available for py3"
-else:
- from _pytest.assertion.oldinterpret import interpret as reinterpret_old
-if sys.version_info >= (2, 6) or (sys.platform.startswith("java")):
- from _pytest.assertion.newinterpret import interpret as reinterpret
-else:
- reinterpret = reinterpret_old
-
diff --git a/_pytest/assertion/rewrite.py b/_pytest/assertion/rewrite.py
deleted file mode 100644
--- a/_pytest/assertion/rewrite.py
+++ /dev/null
@@ -1,597 +0,0 @@
-"""Rewrite assertion AST to produce nice error messages"""
-
-import ast
-import errno
-import itertools
-import imp
-import marshal
-import os
-import struct
-import sys
-import types
-
-import py
-from _pytest.assertion import util
-
-
-# Windows gives ENOENT in places *nix gives ENOTDIR.
-if sys.platform.startswith("win"):
- PATH_COMPONENT_NOT_DIR = errno.ENOENT
-else:
- PATH_COMPONENT_NOT_DIR = errno.ENOTDIR
-
-# py.test caches rewritten pycs in __pycache__.
-if hasattr(imp, "get_tag"):
- PYTEST_TAG = imp.get_tag() + "-PYTEST"
-else:
- if hasattr(sys, "pypy_version_info"):
- impl = "pypy"
- elif sys.platform == "java":
- impl = "jython"
- else:
- impl = "cpython"
- ver = sys.version_info
- PYTEST_TAG = "%s-%s%s-PYTEST" % (impl, ver[0], ver[1])
- del ver, impl
-
-PYC_EXT = ".py" + "c" if __debug__ else "o"
-PYC_TAIL = "." + PYTEST_TAG + PYC_EXT
-
-REWRITE_NEWLINES = sys.version_info[:2] != (2, 7) and sys.version_info < (3, 2)
-
-class AssertionRewritingHook(object):
- """Import hook which rewrites asserts."""
-
- def __init__(self):
- self.session = None
- self.modules = {}
-
- def set_session(self, session):
- self.fnpats = session.config.getini("python_files")
- self.session = session
-
- def find_module(self, name, path=None):
- if self.session is None:
- return None
- sess = self.session
- state = sess.config._assertstate
- state.trace("find_module called for: %s" % name)
- names = name.rsplit(".", 1)
- lastname = names[-1]
- pth = None
- if path is not None and len(path) == 1:
- pth = path[0]
- if pth is None:
- try:
- fd, fn, desc = imp.find_module(lastname, path)
- except ImportError:
- return None
- if fd is not None:
- fd.close()
- tp = desc[2]
- if tp == imp.PY_COMPILED:
- if hasattr(imp, "source_from_cache"):
- fn = imp.source_from_cache(fn)
- else:
- fn = fn[:-1]
- elif tp != imp.PY_SOURCE:
- # Don't know what this is.
- return None
- else:
- fn = os.path.join(pth, name.rpartition(".")[2] + ".py")
- fn_pypath = py.path.local(fn)
- # Is this a test file?
- if not sess.isinitpath(fn):
- # We have to be very careful here because imports in this code can
- # trigger a cycle.
- self.session = None
- try:
- for pat in self.fnpats:
- if fn_pypath.fnmatch(pat):
- state.trace("matched test file %r" % (fn,))
- break
- else:
- return None
- finally:
- self.session = sess
- else:
- state.trace("matched test file (was specified on cmdline): %r" % (fn,))
- # The requested module looks like a test file, so rewrite it. This is
- # the most magical part of the process: load the source, rewrite the
- # asserts, and load the rewritten source. We also cache the rewritten
- # module code in a special pyc. We must be aware of the possibility of
- # concurrent py.test processes rewriting and loading pycs. To avoid
- # tricky race conditions, we maintain the following invariant: The
- # cached pyc is always a complete, valid pyc. Operations on it must be
- # atomic. POSIX's atomic rename comes in handy.
- write = not sys.dont_write_bytecode
- cache_dir = os.path.join(fn_pypath.dirname, "__pycache__")
- if write:
- try:
- os.mkdir(cache_dir)
- except OSError:
- e = sys.exc_info()[1].errno
- if e == errno.EEXIST:
- # Either the __pycache__ directory already exists (the
- # common case) or it's blocked by a non-dir node. In the
- # latter case, we'll ignore it in _write_pyc.
- pass
- elif e == PATH_COMPONENT_NOT_DIR:
- # One of the path components was not a directory, likely
- # because we're in a zip file.
- write = False
- elif e == errno.EACCES:
- state.trace("read only directory: %r" % (fn_pypath.dirname,))
- write = False
- else:
- raise
- cache_name = fn_pypath.basename[:-3] + PYC_TAIL
- pyc = os.path.join(cache_dir, cache_name)
- # Notice that even if we're in a read-only directory, I'm going to check
- # for a cached pyc. This may not be optimal...
- co = _read_pyc(fn_pypath, pyc)
- if co is None:
- state.trace("rewriting %r" % (fn,))
- co = _rewrite_test(state, fn_pypath)
- if co is None:
- # Probably a SyntaxError in the test.
- return None
- if write:
- _make_rewritten_pyc(state, fn_pypath, pyc, co)
- else:
- state.trace("found cached rewritten pyc for %r" % (fn,))
- self.modules[name] = co, pyc
- return self
-
- def load_module(self, name):
- co, pyc = self.modules.pop(name)
- # I wish I could just call imp.load_compiled here, but __file__ has to
- # be set properly. In Python 3.2+, this all would be handled correctly
- # by load_compiled.
- mod = sys.modules[name] = imp.new_module(name)
- try:
- mod.__file__ = co.co_filename
- # Normally, this attribute is 3.2+.
- mod.__cached__ = pyc
- py.builtin.exec_(co, mod.__dict__)
- except:
- del sys.modules[name]
- raise
- return sys.modules[name]
-
-def _write_pyc(co, source_path, pyc):
- # Technically, we don't have to have the same pyc format as (C)Python, since
- # these "pycs" should never be seen by builtin import. However, there's
- # little reason deviate, and I hope sometime to be able to use
- # imp.load_compiled to load them. (See the comment in load_module above.)
- mtime = int(source_path.mtime())
- try:
- fp = open(pyc, "wb")
- except IOError:
- err = sys.exc_info()[1].errno
- if err == PATH_COMPONENT_NOT_DIR:
- # This happens when we get a EEXIST in find_module creating the
- # __pycache__ directory and __pycache__ is by some non-dir node.
- return False
- raise
- try:
- fp.write(imp.get_magic())
- fp.write(struct.pack(">",
- ast.Add : "+",
- ast.Sub : "-",
- ast.Mult : "*",
- ast.Div : "/",
- ast.FloorDiv : "//",
- ast.Mod : "%",
- ast.Eq : "==",
- ast.NotEq : "!=",
- ast.Lt : "<",
- ast.LtE : "<=",
- ast.Gt : ">",
- ast.GtE : ">=",
- ast.Pow : "**",
- ast.Is : "is",
- ast.IsNot : "is not",
- ast.In : "in",
- ast.NotIn : "not in"
-}
-
-
-def set_location(node, lineno, col_offset):
- """Set node location information recursively."""
- def _fix(node, lineno, col_offset):
- if "lineno" in node._attributes:
- node.lineno = lineno
- if "col_offset" in node._attributes:
- node.col_offset = col_offset
- for child in ast.iter_child_nodes(node):
- _fix(child, lineno, col_offset)
- _fix(node, lineno, col_offset)
- return node
-
-
-class AssertionRewriter(ast.NodeVisitor):
-
- def run(self, mod):
- """Find all assert statements in *mod* and rewrite them."""
- if not mod.body:
- # Nothing to do.
- return
- # Insert some special imports at the top of the module but after any
- # docstrings and __future__ imports.
- aliases = [ast.alias(py.builtin.builtins.__name__, "@py_builtins"),
- ast.alias("_pytest.assertion.rewrite", "@pytest_ar")]
- expect_docstring = True
- pos = 0
- lineno = 0
- for item in mod.body:
- if (expect_docstring and isinstance(item, ast.Expr) and
- isinstance(item.value, ast.Str)):
- doc = item.value.s
- if "PYTEST_DONT_REWRITE" in doc:
- # The module has disabled assertion rewriting.
- return
- lineno += len(doc) - 1
- expect_docstring = False
- elif (not isinstance(item, ast.ImportFrom) or item.level > 0 or
- item.module != "__future__"):
- lineno = item.lineno
- break
- pos += 1
- imports = [ast.Import([alias], lineno=lineno, col_offset=0)
- for alias in aliases]
- mod.body[pos:pos] = imports
- # Collect asserts.
- nodes = [mod]
- while nodes:
- node = nodes.pop()
- for name, field in ast.iter_fields(node):
- if isinstance(field, list):
- new = []
- for i, child in enumerate(field):
- if isinstance(child, ast.Assert):
- # Transform assert.
- new.extend(self.visit(child))
- else:
- new.append(child)
- if isinstance(child, ast.AST):
- nodes.append(child)
- setattr(node, name, new)
- elif (isinstance(field, ast.AST) and
- # Don't recurse into expressions as they can't contain
- # asserts.
- not isinstance(field, ast.expr)):
- nodes.append(field)
-
- def variable(self):
- """Get a new variable."""
- # Use a character invalid in python identifiers to avoid clashing.
- name = "@py_assert" + str(next(self.variable_counter))
- self.variables.append(name)
- return name
-
- def assign(self, expr):
- """Give *expr* a name."""
- name = self.variable()
- self.statements.append(ast.Assign([ast.Name(name, ast.Store())], expr))
- return ast.Name(name, ast.Load())
-
- def display(self, expr):
- """Call py.io.saferepr on the expression."""
- return self.helper("saferepr", expr)
-
- def helper(self, name, *args):
- """Call a helper in this module."""
- py_name = ast.Name("@pytest_ar", ast.Load())
- attr = ast.Attribute(py_name, "_" + name, ast.Load())
- return ast.Call(attr, list(args), [], None, None)
-
- def builtin(self, name):
- """Return the builtin called *name*."""
- builtin_name = ast.Name("@py_builtins", ast.Load())
- return ast.Attribute(builtin_name, name, ast.Load())
-
- def explanation_param(self, expr):
- specifier = "py" + str(next(self.variable_counter))
- self.explanation_specifiers[specifier] = expr
- return "%(" + specifier + ")s"
-
- def push_format_context(self):
- self.explanation_specifiers = {}
- self.stack.append(self.explanation_specifiers)
-
- def pop_format_context(self, expl_expr):
- current = self.stack.pop()
- if self.stack:
- self.explanation_specifiers = self.stack[-1]
- keys = [ast.Str(key) for key in current.keys()]
- format_dict = ast.Dict(keys, list(current.values()))
- form = ast.BinOp(expl_expr, ast.Mod(), format_dict)
- name = "@py_format" + str(next(self.variable_counter))
- self.on_failure.append(ast.Assign([ast.Name(name, ast.Store())], form))
- return ast.Name(name, ast.Load())
-
- def generic_visit(self, node):
- """Handle expressions we don't have custom code for."""
- assert isinstance(node, ast.expr)
- res = self.assign(node)
- return res, self.explanation_param(self.display(res))
-
- def visit_Assert(self, assert_):
- if assert_.msg:
- # There's already a message. Don't mess with it.
- return [assert_]
- self.statements = []
- self.cond_chain = ()
- self.variables = []
- self.variable_counter = itertools.count()
- self.stack = []
- self.on_failure = []
- self.push_format_context()
- # Rewrite assert into a bunch of statements.
- top_condition, explanation = self.visit(assert_.test)
- # Create failure message.
- body = self.on_failure
- negation = ast.UnaryOp(ast.Not(), top_condition)
- self.statements.append(ast.If(negation, body, []))
- explanation = "assert " + explanation
- template = ast.Str(explanation)
- msg = self.pop_format_context(template)
- fmt = self.helper("format_explanation", msg)
- err_name = ast.Name("AssertionError", ast.Load())
- exc = ast.Call(err_name, [fmt], [], None, None)
- if sys.version_info[0] >= 3:
- raise_ = ast.Raise(exc, None)
- else:
- raise_ = ast.Raise(exc, None, None)
- body.append(raise_)
- # Clear temporary variables by setting them to None.
- if self.variables:
- variables = [ast.Name(name, ast.Store()) for name in self.variables]
- clear = ast.Assign(variables, ast.Name("None", ast.Load()))
- self.statements.append(clear)
- # Fix line numbers.
- for stmt in self.statements:
- set_location(stmt, assert_.lineno, assert_.col_offset)
- return self.statements
-
- def visit_Name(self, name):
- # Check if the name is local or not.
- locs = ast.Call(self.builtin("locals"), [], [], None, None)
- globs = ast.Call(self.builtin("globals"), [], [], None, None)
- ops = [ast.In(), ast.IsNot()]
- test = ast.Compare(ast.Str(name.id), ops, [locs, globs])
- expr = ast.IfExp(test, self.display(name), ast.Str(name.id))
- return name, self.explanation_param(expr)
-
- def visit_BoolOp(self, boolop):
- res_var = self.variable()
- expl_list = self.assign(ast.List([], ast.Load()))
- app = ast.Attribute(expl_list, "append", ast.Load())
- is_or = int(isinstance(boolop.op, ast.Or))
- body = save = self.statements
- fail_save = self.on_failure
- levels = len(boolop.values) - 1
- self.push_format_context()
- # Process each operand, short-circuting if needed.
- for i, v in enumerate(boolop.values):
- if i:
- fail_inner = []
- self.on_failure.append(ast.If(cond, fail_inner, []))
- self.on_failure = fail_inner
- self.push_format_context()
- res, expl = self.visit(v)
- body.append(ast.Assign([ast.Name(res_var, ast.Store())], res))
- expl_format = self.pop_format_context(ast.Str(expl))
- call = ast.Call(app, [expl_format], [], None, None)
- self.on_failure.append(ast.Expr(call))
- if i < levels:
- cond = res
- if is_or:
- cond = ast.UnaryOp(ast.Not(), cond)
- inner = []
- self.statements.append(ast.If(cond, inner, []))
- self.statements = body = inner
- self.statements = save
- self.on_failure = fail_save
- expl_template = self.helper("format_boolop", expl_list, ast.Num(is_or))
- expl = self.pop_format_context(expl_template)
- return ast.Name(res_var, ast.Load()), self.explanation_param(expl)
-
- def visit_UnaryOp(self, unary):
- pattern = unary_map[unary.op.__class__]
- operand_res, operand_expl = self.visit(unary.operand)
- res = self.assign(ast.UnaryOp(unary.op, operand_res))
- return res, pattern % (operand_expl,)
-
- def visit_BinOp(self, binop):
- symbol = binop_map[binop.op.__class__]
- left_expr, left_expl = self.visit(binop.left)
- right_expr, right_expl = self.visit(binop.right)
- explanation = "(%s %s %s)" % (left_expl, symbol, right_expl)
- res = self.assign(ast.BinOp(left_expr, binop.op, right_expr))
- return res, explanation
-
- def visit_Call(self, call):
- new_func, func_expl = self.visit(call.func)
- arg_expls = []
- new_args = []
- new_kwargs = []
- new_star = new_kwarg = None
- for arg in call.args:
- res, expl = self.visit(arg)
- new_args.append(res)
- arg_expls.append(expl)
- for keyword in call.keywords:
- res, expl = self.visit(keyword.value)
- new_kwargs.append(ast.keyword(keyword.arg, res))
- arg_expls.append(keyword.arg + "=" + expl)
- if call.starargs:
- new_star, expl = self.visit(call.starargs)
- arg_expls.append("*" + expl)
- if call.kwargs:
- new_kwarg, expl = self.visit(call.kwargs)
- arg_expls.append("**" + expl)
- expl = "%s(%s)" % (func_expl, ', '.join(arg_expls))
- new_call = ast.Call(new_func, new_args, new_kwargs, new_star, new_kwarg)
- res = self.assign(new_call)
- res_expl = self.explanation_param(self.display(res))
- outer_expl = "%s\n{%s = %s\n}" % (res_expl, res_expl, expl)
- return res, outer_expl
-
- def visit_Attribute(self, attr):
- if not isinstance(attr.ctx, ast.Load):
- return self.generic_visit(attr)
- value, value_expl = self.visit(attr.value)
- res = self.assign(ast.Attribute(value, attr.attr, ast.Load()))
- res_expl = self.explanation_param(self.display(res))
- pat = "%s\n{%s = %s.%s\n}"
- expl = pat % (res_expl, res_expl, value_expl, attr.attr)
- return res, expl
-
- def visit_Compare(self, comp):
- self.push_format_context()
- left_res, left_expl = self.visit(comp.left)
- res_variables = [self.variable() for i in range(len(comp.ops))]
- load_names = [ast.Name(v, ast.Load()) for v in res_variables]
- store_names = [ast.Name(v, ast.Store()) for v in res_variables]
- it = zip(range(len(comp.ops)), comp.ops, comp.comparators)
- expls = []
- syms = []
- results = [left_res]
- for i, op, next_operand in it:
- next_res, next_expl = self.visit(next_operand)
- results.append(next_res)
- sym = binop_map[op.__class__]
- syms.append(ast.Str(sym))
- expl = "%s %s %s" % (left_expl, sym, next_expl)
- expls.append(ast.Str(expl))
- res_expr = ast.Compare(left_res, [op], [next_res])
- self.statements.append(ast.Assign([store_names[i]], res_expr))
- left_res, left_expl = next_res, next_expl
- # Use py.code._reprcompare if that's available.
- expl_call = self.helper("call_reprcompare",
- ast.Tuple(syms, ast.Load()),
- ast.Tuple(load_names, ast.Load()),
- ast.Tuple(expls, ast.Load()),
- ast.Tuple(results, ast.Load()))
- if len(comp.ops) > 1:
- res = ast.BoolOp(ast.And(), load_names)
- else:
- res = load_names[0]
- return res, self.explanation_param(self.pop_format_context(expl_call))
diff --git a/_pytest/assertion/util.py b/_pytest/assertion/util.py
deleted file mode 100644
--- a/_pytest/assertion/util.py
+++ /dev/null
@@ -1,213 +0,0 @@
-"""Utilities for assertion debugging"""
-
-import py
-
-
-# The _reprcompare attribute on the util module is used by the new assertion
-# interpretation code and assertion rewriter to detect this plugin was
-# loaded and in turn call the hooks defined here as part of the
-# DebugInterpreter.
-_reprcompare = None
-
-def format_explanation(explanation):
- """This formats an explanation
-
- Normally all embedded newlines are escaped, however there are
- three exceptions: \n{, \n} and \n~. The first two are intended
- cover nested explanations, see function and attribute explanations
- for examples (.visit_Call(), visit_Attribute()). The last one is
- for when one explanation needs to span multiple lines, e.g. when
- displaying diffs.
- """
- # simplify 'assert False where False = ...'
- where = 0
- while True:
- start = where = explanation.find("False\n{False = ", where)
- if where == -1:
- break
- level = 0
- for i, c in enumerate(explanation[start:]):
- if c == "{":
- level += 1
- elif c == "}":
- level -= 1
- if not level:
- break
- else:
- raise AssertionError("unbalanced braces: %r" % (explanation,))
- end = start + i
- where = end
- if explanation[end - 1] == '\n':
- explanation = (explanation[:start] + explanation[start+15:end-1] +
- explanation[end+1:])
- where -= 17
- raw_lines = (explanation or '').split('\n')
- # escape newlines not followed by {, } and ~
- lines = [raw_lines[0]]
- for l in raw_lines[1:]:
- if l.startswith('{') or l.startswith('}') or l.startswith('~'):
- lines.append(l)
- else:
- lines[-1] += '\\n' + l
-
- result = lines[:1]
- stack = [0]
- stackcnt = [0]
- for line in lines[1:]:
- if line.startswith('{'):
- if stackcnt[-1]:
- s = 'and '
- else:
- s = 'where '
- stack.append(len(result))
- stackcnt[-1] += 1
- stackcnt.append(0)
- result.append(' +' + ' '*(len(stack)-1) + s + line[1:])
- elif line.startswith('}'):
- assert line.startswith('}')
- stack.pop()
- stackcnt.pop()
- result[stack[-1]] += line[1:]
- else:
- assert line.startswith('~')
- result.append(' '*len(stack) + line[1:])
- assert len(stack) == 1
- return '\n'.join(result)
-
-
-# Provide basestring in python3
-try:
- basestring = basestring
-except NameError:
- basestring = str
-
-
-def assertrepr_compare(op, left, right):
- """return specialised explanations for some operators/operands"""
- width = 80 - 15 - len(op) - 2 # 15 chars indentation, 1 space around op
- left_repr = py.io.saferepr(left, maxsize=int(width/2))
- right_repr = py.io.saferepr(right, maxsize=width-len(left_repr))
- summary = '%s %s %s' % (left_repr, op, right_repr)
-
- issequence = lambda x: isinstance(x, (list, tuple))
- istext = lambda x: isinstance(x, basestring)
- isdict = lambda x: isinstance(x, dict)
- isset = lambda x: isinstance(x, set)
-
- explanation = None
- try:
- if op == '==':
- if istext(left) and istext(right):
- explanation = _diff_text(left, right)
- elif issequence(left) and issequence(right):
- explanation = _compare_eq_sequence(left, right)
- elif isset(left) and isset(right):
- explanation = _compare_eq_set(left, right)
- elif isdict(left) and isdict(right):
- explanation = _diff_text(py.std.pprint.pformat(left),
- py.std.pprint.pformat(right))
- elif op == 'not in':
- if istext(left) and istext(right):
- explanation = _notin_text(left, right)
- except py.builtin._sysex:
- raise
- except:
- excinfo = py.code.ExceptionInfo()
- explanation = ['(pytest_assertion plugin: representation of '
- 'details failed. Probably an object has a faulty __repr__.)',
- str(excinfo)
- ]
-
-
- if not explanation:
- return None
-
- # Don't include pageloads of data, should be configurable
- if len(''.join(explanation)) > 80*8:
- explanation = ['Detailed information too verbose, truncated']
-
- return [summary] + explanation
-
-
-def _diff_text(left, right):
- """Return the explanation for the diff between text
-
- This will skip leading and trailing characters which are
- identical to keep the diff minimal.
- """
- explanation = []
- i = 0 # just in case left or right has zero length
- for i in range(min(len(left), len(right))):
- if left[i] != right[i]:
- break
- if i > 42:
- i -= 10 # Provide some context
- explanation = ['Skipping %s identical '
- 'leading characters in diff' % i]
- left = left[i:]
- right = right[i:]
- if len(left) == len(right):
- for i in range(len(left)):
- if left[-i] != right[-i]:
- break
- if i > 42:
- i -= 10 # Provide some context
- explanation += ['Skipping %s identical '
- 'trailing characters in diff' % i]
- left = left[:-i]
- right = right[:-i]
- explanation += [line.strip('\n')
- for line in py.std.difflib.ndiff(left.splitlines(),
- right.splitlines())]
- return explanation
-
-
-def _compare_eq_sequence(left, right):
- explanation = []
- for i in range(min(len(left), len(right))):
- if left[i] != right[i]:
- explanation += ['At index %s diff: %r != %r' %
- (i, left[i], right[i])]
- break
- if len(left) > len(right):
- explanation += ['Left contains more items, '
- 'first extra item: %s' % py.io.saferepr(left[len(right)],)]
- elif len(left) < len(right):
- explanation += ['Right contains more items, '
- 'first extra item: %s' % py.io.saferepr(right[len(left)],)]
- return explanation # + _diff_text(py.std.pprint.pformat(left),
- # py.std.pprint.pformat(right))
-
-
-def _compare_eq_set(left, right):
- explanation = []
- diff_left = left - right
- diff_right = right - left
- if diff_left:
- explanation.append('Extra items in the left set:')
- for item in diff_left:
- explanation.append(py.io.saferepr(item))
- if diff_right:
- explanation.append('Extra items in the right set:')
- for item in diff_right:
- explanation.append(py.io.saferepr(item))
- return explanation
-
-
-def _notin_text(term, text):
- index = text.find(term)
- head = text[:index]
- tail = text[index+len(term):]
- correct_text = head + tail
- diff = _diff_text(correct_text, text)
- newdiff = ['%s is contained here:' % py.io.saferepr(term, maxsize=42)]
- for line in diff:
- if line.startswith('Skipping'):
- continue
- if line.startswith('- '):
- continue
- if line.startswith('+ '):
- newdiff.append(' ' + line[2:])
- else:
- newdiff.append(line)
- return newdiff
diff --git a/_pytest/capture.py b/_pytest/capture.py
deleted file mode 100644
--- a/_pytest/capture.py
+++ /dev/null
@@ -1,214 +0,0 @@
-""" per-test stdout/stderr capturing mechanisms, ``capsys`` and ``capfd`` function arguments. """
-
-import pytest, py
-import os
-
-def pytest_addoption(parser):
- group = parser.getgroup("general")
- group._addoption('--capture', action="store", default=None,
- metavar="method", type="choice", choices=['fd', 'sys', 'no'],
- help="per-test capturing method: one of fd (default)|sys|no.")
- group._addoption('-s', action="store_const", const="no", dest="capture",
- help="shortcut for --capture=no.")
-
- at pytest.mark.tryfirst
-def pytest_cmdline_parse(pluginmanager, args):
- # we want to perform capturing already for plugin/conftest loading
- if '-s' in args or "--capture=no" in args:
- method = "no"
- elif hasattr(os, 'dup') and '--capture=sys' not in args:
- method = "fd"
- else:
- method = "sys"
- capman = CaptureManager(method)
- pluginmanager.register(capman, "capturemanager")
-
-def addouterr(rep, outerr):
- for secname, content in zip(["out", "err"], outerr):
- if content:
- rep.sections.append(("Captured std%s" % secname, content))
-
-class NoCapture:
- def startall(self):
- pass
- def resume(self):
- pass
- def reset(self):
- pass
- def suspend(self):
- return "", ""
-
-class CaptureManager:
- def __init__(self, defaultmethod=None):
- self._method2capture = {}
- self._defaultmethod = defaultmethod
-
- def _maketempfile(self):
- f = py.std.tempfile.TemporaryFile()
- newf = py.io.dupfile(f, encoding="UTF-8")
- f.close()
- return newf
-
- def _makestringio(self):
- return py.io.TextIO()
-
- def _getcapture(self, method):
- if method == "fd":
- return py.io.StdCaptureFD(now=False,
- out=self._maketempfile(), err=self._maketempfile()
- )
- elif method == "sys":
- return py.io.StdCapture(now=False,
- out=self._makestringio(), err=self._makestringio()
- )
- elif method == "no":
- return NoCapture()
- else:
- raise ValueError("unknown capturing method: %r" % method)
-
- def _getmethod(self, config, fspath):
- if config.option.capture:
- method = config.option.capture
- else:
- try:
- method = config._conftest.rget("option_capture", path=fspath)
- except KeyError:
- method = "fd"
- if method == "fd" and not hasattr(os, 'dup'): # e.g. jython
- method = "sys"
- return method
-
- def reset_capturings(self):
- for name, cap in self._method2capture.items():
- cap.reset()
-
- def resumecapture_item(self, item):
- method = self._getmethod(item.config, item.fspath)
- if not hasattr(item, 'outerr'):
- item.outerr = ('', '') # we accumulate outerr on the item
- return self.resumecapture(method)
-
- def resumecapture(self, method=None):
- if hasattr(self, '_capturing'):
- raise ValueError("cannot resume, already capturing with %r" %
- (self._capturing,))
- if method is None:
- method = self._defaultmethod
- cap = self._method2capture.get(method)
- self._capturing = method
- if cap is None:
- self._method2capture[method] = cap = self._getcapture(method)
- cap.startall()
- else:
- cap.resume()
-
- def suspendcapture(self, item=None):
- self.deactivate_funcargs()
- if hasattr(self, '_capturing'):
- method = self._capturing
- cap = self._method2capture.get(method)
- if cap is not None:
- outerr = cap.suspend()
- del self._capturing
- if item:
- outerr = (item.outerr[0] + outerr[0],
- item.outerr[1] + outerr[1])
- return outerr
- if hasattr(item, 'outerr'):
- return item.outerr
- return "", ""
-
- def activate_funcargs(self, pyfuncitem):
- if not hasattr(pyfuncitem, 'funcargs'):
- return
- assert not hasattr(self, '_capturing_funcargs')
- self._capturing_funcargs = capturing_funcargs = []
- for name, capfuncarg in pyfuncitem.funcargs.items():
- if name in ('capsys', 'capfd'):
- capturing_funcargs.append(capfuncarg)
- capfuncarg._start()
-
- def deactivate_funcargs(self):
- capturing_funcargs = getattr(self, '_capturing_funcargs', None)
- if capturing_funcargs is not None:
- while capturing_funcargs:
- capfuncarg = capturing_funcargs.pop()
- capfuncarg._finalize()
- del self._capturing_funcargs
-
- def pytest_make_collect_report(self, __multicall__, collector):
- method = self._getmethod(collector.config, collector.fspath)
- try:
- self.resumecapture(method)
- except ValueError:
- return # recursive collect, XXX refactor capturing
- # to allow for more lightweight recursive capturing
- try:
- rep = __multicall__.execute()
- finally:
- outerr = self.suspendcapture()
- addouterr(rep, outerr)
- return rep
-
- @pytest.mark.tryfirst
- def pytest_runtest_setup(self, item):
- self.resumecapture_item(item)
-
- @pytest.mark.tryfirst
- def pytest_runtest_call(self, item):
- self.resumecapture_item(item)
- self.activate_funcargs(item)
-
- @pytest.mark.tryfirst
- def pytest_runtest_teardown(self, item):
- self.resumecapture_item(item)
-
- def pytest_keyboard_interrupt(self, excinfo):
- if hasattr(self, '_capturing'):
- self.suspendcapture()
-
- @pytest.mark.tryfirst
- def pytest_runtest_makereport(self, __multicall__, item, call):
- self.deactivate_funcargs()
- rep = __multicall__.execute()
- outerr = self.suspendcapture(item)
- if not rep.passed:
- addouterr(rep, outerr)
- if not rep.passed or rep.when == "teardown":
- outerr = ('', '')
- item.outerr = outerr
- return rep
-
-def pytest_funcarg__capsys(request):
- """enables capturing of writes to sys.stdout/sys.stderr and makes
- captured output available via ``capsys.readouterr()`` method calls
- which return a ``(out, err)`` tuple.
- """
- return CaptureFuncarg(py.io.StdCapture)
-
-def pytest_funcarg__capfd(request):
- """enables capturing of writes to file descriptors 1 and 2 and makes
- captured output available via ``capsys.readouterr()`` method calls
- which return a ``(out, err)`` tuple.
- """
- if not hasattr(os, 'dup'):
- py.test.skip("capfd funcarg needs os.dup")
- return CaptureFuncarg(py.io.StdCaptureFD)
-
-class CaptureFuncarg:
- def __init__(self, captureclass):
- self.capture = captureclass(now=False)
-
- def _start(self):
- self.capture.startall()
-
- def _finalize(self):
- if hasattr(self, 'capture'):
- self.capture.reset()
- del self.capture
-
- def readouterr(self):
- return self.capture.readouterr()
-
- def close(self):
- self._finalize()
diff --git a/_pytest/config.py b/_pytest/config.py
deleted file mode 100644
--- a/_pytest/config.py
+++ /dev/null
@@ -1,463 +0,0 @@
-""" command line options, ini-file and conftest.py processing. """
-
-import py
-import sys, os
-from _pytest.core import PluginManager
-import pytest
-
-def pytest_cmdline_parse(pluginmanager, args):
- config = Config(pluginmanager)
- config.parse(args)
- return config
-
-def pytest_unconfigure(config):
- while 1:
- try:
- fin = config._cleanup.pop()
- except IndexError:
- break
- fin()
-
-class Parser:
- """ Parser for command line arguments. """
-
- def __init__(self, usage=None, processopt=None):
- self._anonymous = OptionGroup("custom options", parser=self)
- self._groups = []
- self._processopt = processopt
- self._usage = usage
- self._inidict = {}
- self._ininames = []
- self.hints = []
-
- def processoption(self, option):
- if self._processopt:
- if option.dest:
- self._processopt(option)
-
- def addnote(self, note):
- self._notes.append(note)
-
- def getgroup(self, name, description="", after=None):
- """ get (or create) a named option Group.
-
- :name: unique name of the option group.
- :description: long description for --help output.
- :after: name of other group, used for ordering --help output.
- """
- for group in self._groups:
- if group.name == name:
- return group
- group = OptionGroup(name, description, parser=self)
- i = 0
- for i, grp in enumerate(self._groups):
- if grp.name == after:
- break
- self._groups.insert(i+1, group)
- return group
-
- def addoption(self, *opts, **attrs):
- """ add an optparse-style option. """
- self._anonymous.addoption(*opts, **attrs)
-
- def parse(self, args):
- self.optparser = optparser = MyOptionParser(self)
- groups = self._groups + [self._anonymous]
- for group in groups:
- if group.options:
- desc = group.description or group.name
- optgroup = py.std.optparse.OptionGroup(optparser, desc)
- optgroup.add_options(group.options)
- optparser.add_option_group(optgroup)
- return self.optparser.parse_args([str(x) for x in args])
-
- def parse_setoption(self, args, option):
- parsedoption, args = self.parse(args)
- for name, value in parsedoption.__dict__.items():
- setattr(option, name, value)
- return args
-
- def addini(self, name, help, type=None, default=None):
- """ add an ini-file option with the given name and description. """
- assert type in (None, "pathlist", "args", "linelist")
- self._inidict[name] = (help, type, default)
- self._ininames.append(name)
-
-
-class OptionGroup:
- def __init__(self, name, description="", parser=None):
- self.name = name
- self.description = description
- self.options = []
- self.parser = parser
-
- def addoption(self, *optnames, **attrs):
- """ add an option to this group. """
- option = py.std.optparse.Option(*optnames, **attrs)
- self._addoption_instance(option, shortupper=False)
-
- def _addoption(self, *optnames, **attrs):
- option = py.std.optparse.Option(*optnames, **attrs)
- self._addoption_instance(option, shortupper=True)
-
- def _addoption_instance(self, option, shortupper=False):
- if not shortupper:
- for opt in option._short_opts:
- if opt[0] == '-' and opt[1].islower():
- raise ValueError("lowercase shortoptions reserved")
- if self.parser:
- self.parser.processoption(option)
- self.options.append(option)
-
-
-class MyOptionParser(py.std.optparse.OptionParser):
- def __init__(self, parser):
- self._parser = parser
- py.std.optparse.OptionParser.__init__(self, usage=parser._usage,
- add_help_option=False)
- def format_epilog(self, formatter):
- hints = self._parser.hints
- if hints:
- s = "\n".join(["hint: " + x for x in hints]) + "\n"
- s = "\n" + s + "\n"
- return s
- return ""
-
-class Conftest(object):
- """ the single place for accessing values and interacting
- towards conftest modules from py.test objects.
- """
- def __init__(self, onimport=None, confcutdir=None):
- self._path2confmods = {}
- self._onimport = onimport
- self._conftestpath2mod = {}
- self._confcutdir = confcutdir
-
- def setinitial(self, args):
- """ try to find a first anchor path for looking up global values
- from conftests. This function is usually called _before_
- argument parsing. conftest files may add command line options
- and we thus have no completely safe way of determining
- which parts of the arguments are actually related to options
- and which are file system paths. We just try here to get
- bootstrapped ...
- """
- current = py.path.local()
- opt = '--confcutdir'
- for i in range(len(args)):
- opt1 = str(args[i])
- if opt1.startswith(opt):
- if opt1 == opt:
- if len(args) > i:
- p = current.join(args[i+1], abs=True)
- elif opt1.startswith(opt + "="):
- p = current.join(opt1[len(opt)+1:], abs=1)
- self._confcutdir = p
- break
- for arg in args + [current]:
- if hasattr(arg, 'startswith') and arg.startswith("--"):
- continue
- anchor = current.join(arg, abs=1)
- if anchor.check(): # we found some file object
- self._path2confmods[None] = self.getconftestmodules(anchor)
- # let's also consider test* dirs
- if anchor.check(dir=1):
- for x in anchor.listdir("test*"):
- if x.check(dir=1):
- self.getconftestmodules(x)
- break
- else:
- assert 0, "no root of filesystem?"
-
- def getconftestmodules(self, path):
- """ return a list of imported conftest modules for the given path. """
- try:
- clist = self._path2confmods[path]
- except KeyError:
- if path is None:
- raise ValueError("missing default confest.")
- dp = path.dirpath()
- clist = []
- if dp != path:
- cutdir = self._confcutdir
- if cutdir and path != cutdir and not path.relto(cutdir):
- pass
- else:
- conftestpath = path.join("conftest.py")
- if conftestpath.check(file=1):
- clist.append(self.importconftest(conftestpath))
- clist[:0] = self.getconftestmodules(dp)
- self._path2confmods[path] = clist
- # be defensive: avoid changes from caller side to
- # affect us by always returning a copy of the actual list
- return clist[:]
-
- def rget(self, name, path=None):
- mod, value = self.rget_with_confmod(name, path)
- return value
-
- def rget_with_confmod(self, name, path=None):
- modules = self.getconftestmodules(path)
- modules.reverse()
- for mod in modules:
- try:
- return mod, getattr(mod, name)
- except AttributeError:
- continue
- raise KeyError(name)
-
- def importconftest(self, conftestpath):
- assert conftestpath.check(), conftestpath
- try:
- return self._conftestpath2mod[conftestpath]
- except KeyError:
- pkgpath = conftestpath.pypkgpath()
- if pkgpath is None:
- _ensure_removed_sysmodule(conftestpath.purebasename)
- self._conftestpath2mod[conftestpath] = mod = conftestpath.pyimport()
- dirpath = conftestpath.dirpath()
- if dirpath in self._path2confmods:
- for path, mods in self._path2confmods.items():
- if path and path.relto(dirpath) or path == dirpath:
- assert mod not in mods
- mods.append(mod)
- self._postimport(mod)
- return mod
-
- def _postimport(self, mod):
- if self._onimport:
- self._onimport(mod)
- return mod
-
-def _ensure_removed_sysmodule(modname):
- try:
- del sys.modules[modname]
- except KeyError:
- pass
-
-class CmdOptions(object):
- """ holds cmdline options as attributes."""
- def __init__(self, **kwargs):
- self.__dict__.update(kwargs)
- def __repr__(self):
- return "" %(self.__dict__,)
-
-class Config(object):
- """ access to configuration values, pluginmanager and plugin hooks. """
- def __init__(self, pluginmanager=None):
- #: command line option values, usually added via parser.addoption(...)
- #: or parser.getgroup(...).addoption(...) calls
- self.option = CmdOptions()
- self._parser = Parser(
- usage="usage: %prog [options] [file_or_dir] [file_or_dir] [...]",
- processopt=self._processopt,
- )
- #: a pluginmanager instance
- self.pluginmanager = pluginmanager or PluginManager(load=True)
- self.trace = self.pluginmanager.trace.root.get("config")
- self._conftest = Conftest(onimport=self._onimportconftest)
- self.hook = self.pluginmanager.hook
- self._inicache = {}
- self._cleanup = []
-
- @classmethod
- def fromdictargs(cls, option_dict, args):
- """ constructor useable for subprocesses. """
- config = cls()
- # XXX slightly crude way to initialize capturing
- import _pytest.capture
- _pytest.capture.pytest_cmdline_parse(config.pluginmanager, args)
- config._preparse(args, addopts=False)
- config.option.__dict__.update(option_dict)
- for x in config.option.plugins:
- config.pluginmanager.consider_pluginarg(x)
- return config
-
- def _onimportconftest(self, conftestmodule):
- self.trace("loaded conftestmodule %r" %(conftestmodule,))
- self.pluginmanager.consider_conftest(conftestmodule)
-
- def _processopt(self, opt):
- if hasattr(opt, 'default') and opt.dest:
- if not hasattr(self.option, opt.dest):
- setattr(self.option, opt.dest, opt.default)
-
- def _getmatchingplugins(self, fspath):
- allconftests = self._conftest._conftestpath2mod.values()
- plugins = [x for x in self.pluginmanager.getplugins()
- if x not in allconftests]
- plugins += self._conftest.getconftestmodules(fspath)
- return plugins
-
- def _setinitialconftest(self, args):
- # capture output during conftest init (#issue93)
- # XXX introduce load_conftest hook to avoid needing to know
- # about capturing plugin here
- capman = self.pluginmanager.getplugin("capturemanager")
- capman.resumecapture()
- try:
- try:
- self._conftest.setinitial(args)
- finally:
- out, err = capman.suspendcapture() # logging might have got it
- except:
- sys.stdout.write(out)
- sys.stderr.write(err)
- raise
-
- def _initini(self, args):
- self.inicfg = getcfg(args, ["pytest.ini", "tox.ini", "setup.cfg"])
- self._parser.addini('addopts', 'extra command line options', 'args')
- self._parser.addini('minversion', 'minimally required pytest version')
-
- def _preparse(self, args, addopts=True):
- self._initini(args)
- if addopts:
- args[:] = self.getini("addopts") + args
- self._checkversion()
- self.pluginmanager.consider_preparse(args)
- self.pluginmanager.consider_setuptools_entrypoints()
- self.pluginmanager.consider_env()
- self._setinitialconftest(args)
- self.pluginmanager.do_addoption(self._parser)
- if addopts:
- self.hook.pytest_cmdline_preparse(config=self, args=args)
-
- def _checkversion(self):
- minver = self.inicfg.get('minversion', None)
- if minver:
- ver = minver.split(".")
- myver = pytest.__version__.split(".")
- if myver < ver:
- raise pytest.UsageError(
- "%s:%d: requires pytest-%s, actual pytest-%s'" %(
- self.inicfg.config.path, self.inicfg.lineof('minversion'),
- minver, pytest.__version__))
-
- def parse(self, args):
- # parse given cmdline arguments into this config object.
- # Note that this can only be called once per testing process.
- assert not hasattr(self, 'args'), (
- "can only parse cmdline args at most once per Config object")
- self._origargs = args
- self._preparse(args)
- self._parser.hints.extend(self.pluginmanager._hints)
- args = self._parser.parse_setoption(args, self.option)
- if not args:
- args.append(py.std.os.getcwd())
- self.args = args
-
- def addinivalue_line(self, name, line):
- """ add a line to an ini-file option. The option must have been
- declared but might not yet be set in which case the line becomes the
- the first line in its value. """
- x = self.getini(name)
- assert isinstance(x, list)
- x.append(line) # modifies the cached list inline
-
- def getini(self, name):
- """ return configuration value from an ini file. If the
- specified name hasn't been registered through a prior ``parse.addini``
- call (usually from a plugin), a ValueError is raised. """
- try:
- return self._inicache[name]
- except KeyError:
- self._inicache[name] = val = self._getini(name)
- return val
-
- def _getini(self, name):
- try:
- description, type, default = self._parser._inidict[name]
- except KeyError:
- raise ValueError("unknown configuration value: %r" %(name,))
- try:
- value = self.inicfg[name]
- except KeyError:
- if default is not None:
- return default
- if type is None:
- return ''
- return []
- if type == "pathlist":
- dp = py.path.local(self.inicfg.config.path).dirpath()
- l = []
- for relpath in py.std.shlex.split(value):
- l.append(dp.join(relpath, abs=True))
- return l
- elif type == "args":
- return py.std.shlex.split(value)
- elif type == "linelist":
- return [t for t in map(lambda x: x.strip(), value.split("\n")) if t]
- else:
- assert type is None
- return value
-
- def _getconftest_pathlist(self, name, path=None):
- try:
- mod, relroots = self._conftest.rget_with_confmod(name, path)
- except KeyError:
- return None
- modpath = py.path.local(mod.__file__).dirpath()
- l = []
- for relroot in relroots:
- if not isinstance(relroot, py.path.local):
- relroot = relroot.replace("/", py.path.local.sep)
- relroot = modpath.join(relroot, abs=True)
- l.append(relroot)
- return l
-
- def _getconftest(self, name, path=None, check=False):
- if check:
- self._checkconftest(name)
- return self._conftest.rget(name, path)
-
- def getvalue(self, name, path=None):
- """ return ``name`` value looked set from command line options.
-
- (deprecated) if we can't find the option also lookup
- the name in a matching conftest file.
- """
- try:
- return getattr(self.option, name)
- except AttributeError:
- return self._getconftest(name, path, check=False)
-
- def getvalueorskip(self, name, path=None):
- """ (deprecated) return getvalue(name) or call
- py.test.skip if no value exists. """
- __tracebackhide__ = True
- try:
- val = self.getvalue(name, path)
- if val is None:
- raise KeyError(name)
- return val
- except KeyError:
- py.test.skip("no %r value found" %(name,))
-
-
-def getcfg(args, inibasenames):
- args = [x for x in args if not str(x).startswith("-")]
- if not args:
- args = [py.path.local()]
- for arg in args:
- arg = py.path.local(arg)
- for base in arg.parts(reverse=True):
- for inibasename in inibasenames:
- p = base.join(inibasename)
- if p.check():
- iniconfig = py.iniconfig.IniConfig(p)
- if 'pytest' in iniconfig.sections:
- return iniconfig['pytest']
- return {}
-
-def findupwards(current, basename):
- current = py.path.local(current)
- while 1:
- p = current.join(basename)
- if p.check():
- return p
- p = current.dirpath()
- if p == current:
- return
- current = p
-
diff --git a/_pytest/core.py b/_pytest/core.py
deleted file mode 100644
--- a/_pytest/core.py
+++ /dev/null
@@ -1,477 +0,0 @@
-"""
-pytest PluginManager, basic initialization and tracing.
-(c) Holger Krekel 2004-2010
-"""
-import sys, os
-import inspect
-import py
-from _pytest import hookspec # the extension point definitions
-
-assert py.__version__.split(".")[:2] >= ['1', '4'], ("installation problem: "
- "%s is too old, remove or upgrade 'py'" % (py.__version__))
-
-default_plugins = (
- "config mark main terminal runner python pdb unittest capture skipping "
- "tmpdir monkeypatch recwarn pastebin helpconfig nose assertion genscript "
- "junitxml resultlog doctest").split()
-
-class TagTracer:
- def __init__(self):
- self._tag2proc = {}
- self.writer = None
- self.indent = 0
-
- def get(self, name):
- return TagTracerSub(self, (name,))
-
- def processmessage(self, tags, args):
- if self.writer is not None:
- if args:
- indent = " " * self.indent
- content = " ".join(map(str, args))
- self.writer("%s%s [%s]\n" %(indent, content, ":".join(tags)))
- try:
- self._tag2proc[tags](tags, args)
- except KeyError:
- pass
-
- def setwriter(self, writer):
- self.writer = writer
-
- def setprocessor(self, tags, processor):
- if isinstance(tags, str):
- tags = tuple(tags.split(":"))
- else:
- assert isinstance(tags, tuple)
- self._tag2proc[tags] = processor
-
-class TagTracerSub:
- def __init__(self, root, tags):
- self.root = root
- self.tags = tags
- def __call__(self, *args):
- self.root.processmessage(self.tags, args)
- def setmyprocessor(self, processor):
- self.root.setprocessor(self.tags, processor)
- def get(self, name):
- return self.__class__(self.root, self.tags + (name,))
-
-class PluginManager(object):
- def __init__(self, load=False):
- self._name2plugin = {}
- self._listattrcache = {}
- self._plugins = []
- self._hints = []
- self.trace = TagTracer().get("pluginmanage")
- self._plugin_distinfo = []
- if os.environ.get('PYTEST_DEBUG'):
- err = sys.stderr
- encoding = getattr(err, 'encoding', 'utf8')
- try:
- err = py.io.dupfile(err, encoding=encoding)
- except Exception:
- pass
- self.trace.root.setwriter(err.write)
- self.hook = HookRelay([hookspec], pm=self)
- self.register(self)
- if load:
- for spec in default_plugins:
- self.import_plugin(spec)
-
- def register(self, plugin, name=None, prepend=False):
- assert not self.isregistered(plugin), plugin
- name = name or getattr(plugin, '__name__', str(id(plugin)))
- if name in self._name2plugin:
- return False
- #self.trace("registering", name, plugin)
- self._name2plugin[name] = plugin
- self.call_plugin(plugin, "pytest_addhooks", {'pluginmanager': self})
- self.hook.pytest_plugin_registered(manager=self, plugin=plugin)
- if not prepend:
- self._plugins.append(plugin)
- else:
- self._plugins.insert(0, plugin)
- return True
-
- def unregister(self, plugin=None, name=None):
- if plugin is None:
- plugin = self.getplugin(name=name)
- self._plugins.remove(plugin)
- self.hook.pytest_plugin_unregistered(plugin=plugin)
- for name, value in list(self._name2plugin.items()):
- if value == plugin:
- del self._name2plugin[name]
-
- def isregistered(self, plugin, name=None):
- if self.getplugin(name) is not None:
- return True
- for val in self._name2plugin.values():
- if plugin == val:
- return True
-
- def addhooks(self, spec):
- self.hook._addhooks(spec, prefix="pytest_")
-
- def getplugins(self):
- return list(self._plugins)
-
- def skipifmissing(self, name):
- if not self.hasplugin(name):
- py.test.skip("plugin %r is missing" % name)
-
- def hasplugin(self, name):
- return bool(self.getplugin(name))
-
- def getplugin(self, name):
- if name is None:
- return None
- try:
- return self._name2plugin[name]
- except KeyError:
- return self._name2plugin.get("_pytest." + name, None)
-
- # API for bootstrapping
- #
- def _envlist(self, varname):
- val = py.std.os.environ.get(varname, None)
- if val is not None:
- return val.split(',')
- return ()
-
- def consider_env(self):
- for spec in self._envlist("PYTEST_PLUGINS"):
- self.import_plugin(spec)
-
- def consider_setuptools_entrypoints(self):
- try:
- from pkg_resources import iter_entry_points, DistributionNotFound
- except ImportError:
- return # XXX issue a warning
- for ep in iter_entry_points('pytest11'):
- name = ep.name
- if name.startswith("pytest_"):
- name = name[7:]
- if ep.name in self._name2plugin or name in self._name2plugin:
- continue
- try:
- plugin = ep.load()
- except DistributionNotFound:
- continue
- self._plugin_distinfo.append((ep.dist, plugin))
- self.register(plugin, name=name)
-
- def consider_preparse(self, args):
- for opt1,opt2 in zip(args, args[1:]):
- if opt1 == "-p":
- self.consider_pluginarg(opt2)
-
- def consider_pluginarg(self, arg):
- if arg.startswith("no:"):
- name = arg[3:]
- if self.getplugin(name) is not None:
- self.unregister(None, name=name)
- self._name2plugin[name] = -1
- else:
- if self.getplugin(arg) is None:
- self.import_plugin(arg)
-
- def consider_conftest(self, conftestmodule):
- if self.register(conftestmodule, name=conftestmodule.__file__):
- self.consider_module(conftestmodule)
-
- def consider_module(self, mod):
- attr = getattr(mod, "pytest_plugins", ())
- if attr:
- if not isinstance(attr, (list, tuple)):
- attr = (attr,)
- for spec in attr:
- self.import_plugin(spec)
-
- def import_plugin(self, modname):
- assert isinstance(modname, str)
- if self.getplugin(modname) is not None:
- return
- try:
- #self.trace("importing", modname)
- mod = importplugin(modname)
- except KeyboardInterrupt:
- raise
- except ImportError:
- if modname.startswith("pytest_"):
- return self.import_plugin(modname[7:])
- raise
- except:
- e = py.std.sys.exc_info()[1]
- if not hasattr(py.test, 'skip'):
- raise
- elif not isinstance(e, py.test.skip.Exception):
- raise
- self._hints.append("skipped plugin %r: %s" %((modname, e.msg)))
- else:
- self.register(mod, modname)
- self.consider_module(mod)
-
- def pytest_configure(self, config):
- config.addinivalue_line("markers",
- "tryfirst: mark a hook implementation function such that the "
- "plugin machinery will try to call it first/as early as possible.")
- config.addinivalue_line("markers",
- "trylast: mark a hook implementation function such that the "
- "plugin machinery will try to call it last/as late as possible.")
-
- def pytest_plugin_registered(self, plugin):
- import pytest
- dic = self.call_plugin(plugin, "pytest_namespace", {}) or {}
- if dic:
- self._setns(pytest, dic)
- if hasattr(self, '_config'):
- self.call_plugin(plugin, "pytest_addoption",
- {'parser': self._config._parser})
- self.call_plugin(plugin, "pytest_configure",
- {'config': self._config})
-
- def _setns(self, obj, dic):
- import pytest
- for name, value in dic.items():
- if isinstance(value, dict):
- mod = getattr(obj, name, None)
- if mod is None:
- modname = "pytest.%s" % name
- mod = py.std.types.ModuleType(modname)
- sys.modules[modname] = mod
- mod.__all__ = []
- setattr(obj, name, mod)
- obj.__all__.append(name)
- self._setns(mod, value)
- else:
- setattr(obj, name, value)
- obj.__all__.append(name)
- #if obj != pytest:
- # pytest.__all__.append(name)
- setattr(pytest, name, value)
-
- def pytest_terminal_summary(self, terminalreporter):
- tw = terminalreporter._tw
- if terminalreporter.config.option.traceconfig:
- for hint in self._hints:
- tw.line("hint: %s" % hint)
-
- def do_addoption(self, parser):
- mname = "pytest_addoption"
- methods = reversed(self.listattr(mname))
- MultiCall(methods, {'parser': parser}).execute()
-
- def do_configure(self, config):
- assert not hasattr(self, '_config')
- self._config = config
- config.hook.pytest_configure(config=self._config)
-
- def do_unconfigure(self, config):
- config = self._config
- del self._config
- config.hook.pytest_unconfigure(config=config)
- config.pluginmanager.unregister(self)
-
- def notify_exception(self, excinfo, option=None):
- if option and option.fulltrace:
- style = "long"
- else:
- style = "native"
- excrepr = excinfo.getrepr(funcargs=True,
- showlocals=getattr(option, 'showlocals', False),
- style=style,
- )
- res = self.hook.pytest_internalerror(excrepr=excrepr)
- if not py.builtin.any(res):
- for line in str(excrepr).split("\n"):
- sys.stderr.write("INTERNALERROR> %s\n" %line)
- sys.stderr.flush()
-
- def listattr(self, attrname, plugins=None):
- if plugins is None:
- plugins = self._plugins
- key = (attrname,) + tuple(plugins)
- try:
- return list(self._listattrcache[key])
- except KeyError:
- pass
- l = []
- last = []
- for plugin in plugins:
- try:
- meth = getattr(plugin, attrname)
- if hasattr(meth, 'tryfirst'):
- last.append(meth)
- elif hasattr(meth, 'trylast'):
- l.insert(0, meth)
- else:
- l.append(meth)
- except AttributeError:
- continue
- l.extend(last)
- self._listattrcache[key] = list(l)
- return l
-
- def call_plugin(self, plugin, methname, kwargs):
- return MultiCall(methods=self.listattr(methname, plugins=[plugin]),
- kwargs=kwargs, firstresult=True).execute()
-
-
-def importplugin(importspec):
- name = importspec
- try:
- mod = "_pytest." + name
- return __import__(mod, None, None, '__doc__')
- except ImportError:
- #e = py.std.sys.exc_info()[1]
- #if str(e).find(name) == -1:
- # raise
- pass #
- return __import__(importspec, None, None, '__doc__')
-
-class MultiCall:
- """ execute a call into multiple python functions/methods. """
- def __init__(self, methods, kwargs, firstresult=False):
- self.methods = list(methods)
- self.kwargs = kwargs
- self.results = []
- self.firstresult = firstresult
-
- def __repr__(self):
- status = "%d results, %d meths" % (len(self.results), len(self.methods))
- return "" %(status, self.kwargs)
-
- def execute(self):
- while self.methods:
- method = self.methods.pop()
- kwargs = self.getkwargs(method)
- res = method(**kwargs)
- if res is not None:
- self.results.append(res)
- if self.firstresult:
- return res
- if not self.firstresult:
- return self.results
-
- def getkwargs(self, method):
- kwargs = {}
- for argname in varnames(method):
- try:
- kwargs[argname] = self.kwargs[argname]
- except KeyError:
- if argname == "__multicall__":
- kwargs[argname] = self
- return kwargs
-
-def varnames(func):
- try:
- return func._varnames
- except AttributeError:
- pass
- if not inspect.isfunction(func) and not inspect.ismethod(func):
- func = getattr(func, '__call__', func)
- ismethod = inspect.ismethod(func)
- rawcode = py.code.getrawcode(func)
- try:
- x = rawcode.co_varnames[ismethod:rawcode.co_argcount]
- except AttributeError:
- x = ()
- py.builtin._getfuncdict(func)['_varnames'] = x
- return x
-
-class HookRelay:
- def __init__(self, hookspecs, pm, prefix="pytest_"):
- if not isinstance(hookspecs, list):
- hookspecs = [hookspecs]
- self._hookspecs = []
- self._pm = pm
- self.trace = pm.trace.root.get("hook")
- for hookspec in hookspecs:
- self._addhooks(hookspec, prefix)
-
- def _addhooks(self, hookspecs, prefix):
- self._hookspecs.append(hookspecs)
- added = False
- for name, method in vars(hookspecs).items():
- if name.startswith(prefix):
- firstresult = getattr(method, 'firstresult', False)
- hc = HookCaller(self, name, firstresult=firstresult)
- setattr(self, name, hc)
- added = True
- #print ("setting new hook", name)
- if not added:
- raise ValueError("did not find new %r hooks in %r" %(
- prefix, hookspecs,))
-
-
-class HookCaller:
- def __init__(self, hookrelay, name, firstresult):
- self.hookrelay = hookrelay
- self.name = name
- self.firstresult = firstresult
- self.trace = self.hookrelay.trace
-
- def __repr__(self):
- return "" %(self.name,)
-
- def __call__(self, **kwargs):
- methods = self.hookrelay._pm.listattr(self.name)
- return self._docall(methods, kwargs)
-
- def pcall(self, plugins, **kwargs):
- methods = self.hookrelay._pm.listattr(self.name, plugins=plugins)
- return self._docall(methods, kwargs)
-
- def _docall(self, methods, kwargs):
- self.trace(self.name, kwargs)
- self.trace.root.indent += 1
- mc = MultiCall(methods, kwargs, firstresult=self.firstresult)
- try:
- res = mc.execute()
- if res:
- self.trace("finish", self.name, "-->", res)
- finally:
- self.trace.root.indent -= 1
- return res
-
-_preinit = []
-
-def _preloadplugins():
- _preinit.append(PluginManager(load=True))
-
-def _prepareconfig(args=None, plugins=None):
- if args is None:
- args = sys.argv[1:]
- elif isinstance(args, py.path.local):
- args = [str(args)]
- elif not isinstance(args, (tuple, list)):
- if not isinstance(args, str):
- raise ValueError("not a string or argument list: %r" % (args,))
- args = py.std.shlex.split(args)
- if _preinit:
- _pluginmanager = _preinit.pop(0)
- else: # subsequent calls to main will create a fresh instance
- _pluginmanager = PluginManager(load=True)
- hook = _pluginmanager.hook
- if plugins:
- for plugin in plugins:
- _pluginmanager.register(plugin)
- return hook.pytest_cmdline_parse(
- pluginmanager=_pluginmanager, args=args)
-
-def main(args=None, plugins=None):
- """ returned exit code integer, after an in-process testing run
- with the given command line arguments, preloading an optional list
- of passed in plugin objects. """
- try:
- config = _prepareconfig(args, plugins)
- exitstatus = config.hook.pytest_cmdline_main(config=config)
- except UsageError:
- e = sys.exc_info()[1]
- sys.stderr.write("ERROR: %s\n" %(e.args[0],))
- exitstatus = 3
- return exitstatus
-
-class UsageError(Exception):
- """ error in py.test usage or invocation"""
-
diff --git a/_pytest/doctest.py b/_pytest/doctest.py
deleted file mode 100644
--- a/_pytest/doctest.py
+++ /dev/null
@@ -1,87 +0,0 @@
-""" discover and run doctests in modules and test files."""
-
-import pytest, py
-from py._code.code import TerminalRepr, ReprFileLocation
-
-def pytest_addoption(parser):
- group = parser.getgroup("collect")
- group.addoption("--doctest-modules",
- action="store_true", default=False,
- help="run doctests in all .py modules",
- dest="doctestmodules")
- group.addoption("--doctest-glob",
- action="store", default="test*.txt", metavar="pat",
- help="doctests file matching pattern, default: test*.txt",
- dest="doctestglob")
-
-def pytest_collect_file(path, parent):
- config = parent.config
- if path.ext == ".py":
- if config.option.doctestmodules:
- return DoctestModule(path, parent)
- elif (path.ext in ('.txt', '.rst') and parent.session.isinitpath(path)) or \
- path.check(fnmatch=config.getvalue("doctestglob")):
- return DoctestTextfile(path, parent)
-
-class ReprFailDoctest(TerminalRepr):
- def __init__(self, reprlocation, lines):
- self.reprlocation = reprlocation
- self.lines = lines
- def toterminal(self, tw):
- for line in self.lines:
- tw.line(line)
- self.reprlocation.toterminal(tw)
-
-class DoctestItem(pytest.Item):
- def repr_failure(self, excinfo):
- doctest = py.std.doctest
- if excinfo.errisinstance((doctest.DocTestFailure,
- doctest.UnexpectedException)):
- doctestfailure = excinfo.value
- example = doctestfailure.example
- test = doctestfailure.test
- filename = test.filename
- lineno = test.lineno + example.lineno + 1
- message = excinfo.type.__name__
- reprlocation = ReprFileLocation(filename, lineno, message)
- checker = py.std.doctest.OutputChecker()
- REPORT_UDIFF = py.std.doctest.REPORT_UDIFF
- filelines = py.path.local(filename).readlines(cr=0)
- i = max(test.lineno, max(0, lineno - 10)) # XXX?
- lines = []
- for line in filelines[i:lineno]:
- lines.append("%03d %s" % (i+1, line))
- i += 1
- if excinfo.errisinstance(doctest.DocTestFailure):
- lines += checker.output_difference(example,
- doctestfailure.got, REPORT_UDIFF).split("\n")
- else:
- inner_excinfo = py.code.ExceptionInfo(excinfo.value.exc_info)
- lines += ["UNEXPECTED EXCEPTION: %s" %
- repr(inner_excinfo.value)]
- lines += py.std.traceback.format_exception(*excinfo.value.exc_info)
- return ReprFailDoctest(reprlocation, lines)
- else:
- return super(DoctestItem, self).repr_failure(excinfo)
-
- def reportinfo(self):
- return self.fspath, None, "[doctest]"
-
-class DoctestTextfile(DoctestItem, pytest.File):
- def runtest(self):
- doctest = py.std.doctest
- failed, tot = doctest.testfile(
- str(self.fspath), module_relative=False,
- optionflags=doctest.ELLIPSIS,
- raise_on_error=True, verbose=0)
-
-class DoctestModule(DoctestItem, pytest.File):
- def runtest(self):
- doctest = py.std.doctest
- if self.fspath.basename == "conftest.py":
- module = self.config._conftest.importconftest(self.fspath)
- else:
- module = self.fspath.pyimport()
- failed, tot = doctest.testmod(
- module, raise_on_error=True, verbose=0,
- optionflags=doctest.ELLIPSIS)
diff --git a/_pytest/genscript.py b/_pytest/genscript.py
deleted file mode 100755
--- a/_pytest/genscript.py
+++ /dev/null
@@ -1,69 +0,0 @@
-""" generate a single-file self-contained version of py.test """
-import py
-
-def find_toplevel(name):
- for syspath in py.std.sys.path:
- base = py.path.local(syspath)
- lib = base/name
- if lib.check(dir=1):
- return lib
- mod = base.join("%s.py" % name)
- if mod.check(file=1):
- return mod
- raise LookupError(name)
-
-def pkgname(toplevel, rootpath, path):
- parts = path.parts()[len(rootpath.parts()):]
- return '.'.join([toplevel] + [x.purebasename for x in parts])
-
-def pkg_to_mapping(name):
- toplevel = find_toplevel(name)
- name2src = {}
- if toplevel.check(file=1): # module
- name2src[toplevel.purebasename] = toplevel.read()
- else: # package
- for pyfile in toplevel.visit('*.py'):
- pkg = pkgname(name, toplevel, pyfile)
- name2src[pkg] = pyfile.read()
- return name2src
-
-def compress_mapping(mapping):
- data = py.std.pickle.dumps(mapping, 2)
- data = py.std.zlib.compress(data, 9)
- data = py.std.base64.encodestring(data)
- data = data.decode('ascii')
- return data
-
-
-def compress_packages(names):
- mapping = {}
- for name in names:
- mapping.update(pkg_to_mapping(name))
- return compress_mapping(mapping)
-
-def generate_script(entry, packages):
- data = compress_packages(packages)
- tmpl = py.path.local(__file__).dirpath().join('standalonetemplate.py')
- exe = tmpl.read()
- exe = exe.replace('@SOURCES@', data)
- exe = exe.replace('@ENTRY@', entry)
- return exe
-
-
-def pytest_addoption(parser):
- group = parser.getgroup("debugconfig")
- group.addoption("--genscript", action="store", default=None,
- dest="genscript", metavar="path",
- help="create standalone py.test script at given target path.")
-
-def pytest_cmdline_main(config):
- genscript = config.getvalue("genscript")
- if genscript:
- script = generate_script(
- 'import py; raise SystemExit(py.test.cmdline.main())',
- ['py', '_pytest', 'pytest'],
- )
-
- genscript = py.path.local(genscript)
- genscript.write(script)
- return 0
diff --git a/_pytest/helpconfig.py b/_pytest/helpconfig.py
deleted file mode 100644
--- a/_pytest/helpconfig.py
+++ /dev/null
@@ -1,197 +0,0 @@
-""" version info, help messages, tracing configuration. """
-import py
-import pytest
-import os, inspect, sys
-from _pytest.core import varnames
-
-def pytest_addoption(parser):
- group = parser.getgroup('debugconfig')
- group.addoption('--version', action="store_true",
- help="display pytest lib version and import information.")
- group._addoption("-h", "--help", action="store_true", dest="help",
- help="show help message and configuration info")
- group._addoption('-p', action="append", dest="plugins", default = [],
- metavar="name",
- help="early-load given plugin (multi-allowed).")
- group.addoption('--traceconfig',
- action="store_true", dest="traceconfig", default=False,
- help="trace considerations of conftest.py files."),
- group.addoption('--debug',
- action="store_true", dest="debug", default=False,
- help="store internal tracing debug information in 'pytestdebug.log'.")
-
-
-def pytest_cmdline_parse(__multicall__):
- config = __multicall__.execute()
- if config.option.debug:
- path = os.path.abspath("pytestdebug.log")
- f = open(path, 'w')
- config._debugfile = f
- f.write("versions pytest-%s, py-%s, python-%s\ncwd=%s\nargs=%s\n\n" %(
- pytest.__version__, py.__version__, ".".join(map(str, sys.version_info)),
- os.getcwd(), config._origargs))
- config.trace.root.setwriter(f.write)
- sys.stderr.write("writing pytestdebug information to %s\n" % path)
- return config
-
- at pytest.mark.trylast
-def pytest_unconfigure(config):
- if hasattr(config, '_debugfile'):
- config._debugfile.close()
- sys.stderr.write("wrote pytestdebug information to %s\n" %
- config._debugfile.name)
- config.trace.root.setwriter(None)
-
-
-def pytest_cmdline_main(config):
- if config.option.version:
- p = py.path.local(pytest.__file__)
- sys.stderr.write("This is py.test version %s, imported from %s\n" %
- (pytest.__version__, p))
- plugininfo = getpluginversioninfo(config)
- if plugininfo:
- for line in plugininfo:
- sys.stderr.write(line + "\n")
- return 0
- elif config.option.help:
- config.pluginmanager.do_configure(config)
- showhelp(config)
- config.pluginmanager.do_unconfigure(config)
- return 0
-
-def showhelp(config):
- tw = py.io.TerminalWriter()
- tw.write(config._parser.optparser.format_help())
- tw.line()
- tw.line()
- #tw.sep( "=", "config file settings")
- tw.line("[pytest] ini-options in the next "
- "pytest.ini|tox.ini|setup.cfg file:")
- tw.line()
-
- for name in config._parser._ininames:
- help, type, default = config._parser._inidict[name]
- if type is None:
- type = "string"
- spec = "%s (%s)" % (name, type)
- line = " %-24s %s" %(spec, help)
- tw.line(line[:tw.fullwidth])
-
- tw.line() ; tw.line()
- #tw.sep("=")
- return
-
- tw.line("conftest.py options:")
- tw.line()
- conftestitems = sorted(config._parser._conftestdict.items())
- for name, help in conftest_options + conftestitems:
- line = " %-15s %s" %(name, help)
- tw.line(line[:tw.fullwidth])
- tw.line()
- #tw.sep( "=")
-
-conftest_options = [
- ('pytest_plugins', 'list of plugin names to load'),
-]
-
-def getpluginversioninfo(config):
- lines = []
- plugininfo = config.pluginmanager._plugin_distinfo
- if plugininfo:
- lines.append("setuptools registered plugins:")
- for dist, plugin in plugininfo:
- loc = getattr(plugin, '__file__', repr(plugin))
- content = "%s-%s at %s" % (dist.project_name, dist.version, loc)
- lines.append(" " + content)
- return lines
-
-def pytest_report_header(config):
- lines = []
- if config.option.debug or config.option.traceconfig:
- lines.append("using: pytest-%s pylib-%s" %
- (pytest.__version__,py.__version__))
-
- verinfo = getpluginversioninfo(config)
- if verinfo:
- lines.extend(verinfo)
-
- if config.option.traceconfig:
- lines.append("active plugins:")
- plugins = []
- items = config.pluginmanager._name2plugin.items()
- for name, plugin in items:
- if hasattr(plugin, '__file__'):
- r = plugin.__file__
- else:
- r = repr(plugin)
- lines.append(" %-20s: %s" %(name, r))
- return lines
-
-
-# =====================================================
-# validate plugin syntax and hooks
-# =====================================================
-
-def pytest_plugin_registered(manager, plugin):
- methods = collectattr(plugin)
- hooks = {}
- for hookspec in manager.hook._hookspecs:
- hooks.update(collectattr(hookspec))
-
- stringio = py.io.TextIO()
- def Print(*args):
- if args:
- stringio.write(" ".join(map(str, args)))
- stringio.write("\n")
-
- fail = False
- while methods:
- name, method = methods.popitem()
- #print "checking", name
- if isgenerichook(name):
- continue
- if name not in hooks:
- if not getattr(method, 'optionalhook', False):
- Print("found unknown hook:", name)
- fail = True
- else:
- #print "checking", method
- method_args = list(varnames(method))
- if '__multicall__' in method_args:
- method_args.remove('__multicall__')
- hook = hooks[name]
- hookargs = varnames(hook)
- for arg in method_args:
- if arg not in hookargs:
- Print("argument %r not available" %(arg, ))
- Print("actual definition: %s" %(formatdef(method)))
- Print("available hook arguments: %s" %
- ", ".join(hookargs))
- fail = True
- break
- #if not fail:
- # print "matching hook:", formatdef(method)
- if fail:
- name = getattr(plugin, '__name__', plugin)
- raise PluginValidationError("%s:\n%s" % (name, stringio.getvalue()))
-
-class PluginValidationError(Exception):
- """ plugin failed validation. """
-
-def isgenerichook(name):
- return name == "pytest_plugins" or \
- name.startswith("pytest_funcarg__")
-
-def collectattr(obj):
- methods = {}
- for apiname in dir(obj):
- if apiname.startswith("pytest_"):
- methods[apiname] = getattr(obj, apiname)
- return methods
-
-def formatdef(func):
- return "%s%s" % (
- func.__name__,
- inspect.formatargspec(*inspect.getargspec(func))
- )
-
diff --git a/_pytest/hookspec.py b/_pytest/hookspec.py
deleted file mode 100644
--- a/_pytest/hookspec.py
+++ /dev/null
@@ -1,228 +0,0 @@
-""" hook specifications for pytest plugins, invoked from main.py and builtin plugins. """
-
-# -------------------------------------------------------------------------
-# Initialization
-# -------------------------------------------------------------------------
-
-def pytest_addhooks(pluginmanager):
- """called at plugin load time to allow adding new hooks via a call to
- pluginmanager.registerhooks(module)."""
-
-
-def pytest_namespace():
- """return dict of name->object to be made globally available in
- the py.test/pytest namespace. This hook is called before command
- line options are parsed.
- """
-
-def pytest_cmdline_parse(pluginmanager, args):
- """return initialized config object, parsing the specified args. """
-pytest_cmdline_parse.firstresult = True
-
-def pytest_cmdline_preparse(config, args):
- """modify command line arguments before option parsing. """
-
-def pytest_addoption(parser):
- """add optparse-style options and ini-style config values via calls
- to ``parser.addoption`` and ``parser.addini(...)``.
- """
-
-def pytest_cmdline_main(config):
- """ called for performing the main command line action. The default
- implementation will invoke the configure hooks and runtest_mainloop. """
-pytest_cmdline_main.firstresult = True
-
-def pytest_configure(config):
- """ called after command line options have been parsed.
- and all plugins and initial conftest files been loaded.
- """
-
-def pytest_unconfigure(config):
- """ called before test process is exited. """
-
-def pytest_runtestloop(session):
- """ called for performing the main runtest loop
- (after collection finished). """
-pytest_runtestloop.firstresult = True
-
-# -------------------------------------------------------------------------
-# collection hooks
-# -------------------------------------------------------------------------
-
-def pytest_collection(session):
- """ perform the collection protocol for the given session. """
-pytest_collection.firstresult = True
-
-def pytest_collection_modifyitems(session, config, items):
- """ called after collection has been performed, may filter or re-order
- the items in-place."""
-
-def pytest_collection_finish(session):
- """ called after collection has been performed and modified. """
-
-def pytest_ignore_collect(path, config):
- """ return True to prevent considering this path for collection.
- This hook is consulted for all files and directories prior to calling
- more specific hooks.
- """
-pytest_ignore_collect.firstresult = True
-
-def pytest_collect_directory(path, parent):
- """ called before traversing a directory for collection files. """
-pytest_collect_directory.firstresult = True
-
-def pytest_collect_file(path, parent):
- """ return collection Node or None for the given path. Any new node
- needs to have the specified ``parent`` as a parent."""
-
-# logging hooks for collection
-def pytest_collectstart(collector):
- """ collector starts collecting. """
-
-def pytest_itemcollected(item):
- """ we just collected a test item. """
-
-def pytest_collectreport(report):
- """ collector finished collecting. """
-
-def pytest_deselected(items):
- """ called for test items deselected by keyword. """
-
-def pytest_make_collect_report(collector):
- """ perform ``collector.collect()`` and return a CollectReport. """
-pytest_make_collect_report.firstresult = True
-
-# -------------------------------------------------------------------------
-# Python test function related hooks
-# -------------------------------------------------------------------------
-
-def pytest_pycollect_makemodule(path, parent):
- """ return a Module collector or None for the given path.
- This hook will be called for each matching test module path.
- The pytest_collect_file hook needs to be used if you want to
- create test modules for files that do not match as a test module.
- """
-pytest_pycollect_makemodule.firstresult = True
-
-def pytest_pycollect_makeitem(collector, name, obj):
- """ return custom item/collector for a python object in a module, or None. """
-pytest_pycollect_makeitem.firstresult = True
-
-def pytest_pyfunc_call(pyfuncitem):
- """ call underlying test function. """
-pytest_pyfunc_call.firstresult = True
-
-def pytest_generate_tests(metafunc):
- """ generate (multiple) parametrized calls to a test function."""
-
-# -------------------------------------------------------------------------
-# generic runtest related hooks
-# -------------------------------------------------------------------------
-def pytest_itemstart(item, node=None):
- """ (deprecated, use pytest_runtest_logstart). """
-
-def pytest_runtest_protocol(item, nextitem):
- """ implements the runtest_setup/call/teardown protocol for
- the given test item, including capturing exceptions and calling
- reporting hooks.
-
- :arg item: test item for which the runtest protocol is performed.
-
- :arg nexitem: the scheduled-to-be-next test item (or None if this
- is the end my friend). This argument is passed on to
- :py:func:`pytest_runtest_teardown`.
-
- :return boolean: True if no further hook implementations should be invoked.
- """
-pytest_runtest_protocol.firstresult = True
-
-def pytest_runtest_logstart(nodeid, location):
- """ signal the start of running a single test item. """
-
-def pytest_runtest_setup(item):
- """ called before ``pytest_runtest_call(item)``. """
-
-def pytest_runtest_call(item):
- """ called to execute the test ``item``. """
-
-def pytest_runtest_teardown(item, nextitem):
- """ called after ``pytest_runtest_call``.
-
- :arg nexitem: the scheduled-to-be-next test item (None if no further
- test item is scheduled). This argument can be used to
- perform exact teardowns, i.e. calling just enough finalizers
- so that nextitem only needs to call setup-functions.
- """
-
-def pytest_runtest_makereport(item, call):
- """ return a :py:class:`_pytest.runner.TestReport` object
- for the given :py:class:`pytest.Item` and
- :py:class:`_pytest.runner.CallInfo`.
- """
-pytest_runtest_makereport.firstresult = True
-
-def pytest_runtest_logreport(report):
- """ process a test setup/call/teardown report relating to
- the respective phase of executing a test. """
-
-# -------------------------------------------------------------------------
-# test session related hooks
-# -------------------------------------------------------------------------
-
-def pytest_sessionstart(session):
- """ before session.main() is called. """
-
-def pytest_sessionfinish(session, exitstatus):
- """ whole test run finishes. """
-
-
-# -------------------------------------------------------------------------
-# hooks for customising the assert methods
-# -------------------------------------------------------------------------
-
-def pytest_assertrepr_compare(config, op, left, right):
- """return explanation for comparisons in failing assert expressions.
-
- Return None for no custom explanation, otherwise return a list
- of strings. The strings will be joined by newlines but any newlines
- *in* a string will be escaped. Note that all but the first line will
- be indented sligthly, the intention is for the first line to be a summary.
- """
-
-# -------------------------------------------------------------------------
-# hooks for influencing reporting (invoked from _pytest_terminal)
-# -------------------------------------------------------------------------
-
-def pytest_report_header(config):
- """ return a string to be displayed as header info for terminal reporting."""
-
-def pytest_report_teststatus(report):
- """ return result-category, shortletter and verbose word for reporting."""
-pytest_report_teststatus.firstresult = True
-
-def pytest_terminal_summary(terminalreporter):
- """ add additional section in terminal summary reporting. """
-
-# -------------------------------------------------------------------------
-# doctest hooks
-# -------------------------------------------------------------------------
-
-def pytest_doctest_prepare_content(content):
- """ return processed content for a given doctest"""
-pytest_doctest_prepare_content.firstresult = True
-
-# -------------------------------------------------------------------------
-# error handling and internal debugging hooks
-# -------------------------------------------------------------------------
-
-def pytest_plugin_registered(plugin, manager):
- """ a new py lib plugin got registered. """
-
-def pytest_plugin_unregistered(plugin):
- """ a py lib plugin got unregistered. """
-
-def pytest_internalerror(excrepr):
- """ called for internal errors. """
-
-def pytest_keyboard_interrupt(excinfo):
- """ called for keyboard interrupt. """
diff --git a/_pytest/junitxml.py b/_pytest/junitxml.py
deleted file mode 100644
--- a/_pytest/junitxml.py
+++ /dev/null
@@ -1,220 +0,0 @@
-""" report test results in JUnit-XML format, for use with Hudson and build integration servers.
-
-Based on initial code from Ross Lawley.
-"""
-
-import py
-import os
-import re
-import sys
-import time
-
-
-# Python 2.X and 3.X compatibility
-try:
- unichr(65)
-except NameError:
- unichr = chr
-try:
- unicode('A')
-except NameError:
- unicode = str
-try:
- long(1)
-except NameError:
- long = int
-
-
-class Junit(py.xml.Namespace):
- pass
-
-
-# We need to get the subset of the invalid unicode ranges according to
-# XML 1.0 which are valid in this python build. Hence we calculate
-# this dynamically instead of hardcoding it. The spec range of valid
-# chars is: Char ::= #x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD]
-# | [#x10000-#x10FFFF]
-_legal_chars = (0x09, 0x0A, 0x0d)
-_legal_ranges = (
- (0x20, 0xD7FF),
- (0xE000, 0xFFFD),
- (0x10000, 0x10FFFF),
-)
-_legal_xml_re = [unicode("%s-%s") % (unichr(low), unichr(high))
- for (low, high) in _legal_ranges
- if low < sys.maxunicode]
-_legal_xml_re = [unichr(x) for x in _legal_chars] + _legal_xml_re
-illegal_xml_re = re.compile(unicode('[^%s]') %
- unicode('').join(_legal_xml_re))
-del _legal_chars
-del _legal_ranges
-del _legal_xml_re
-
-def bin_xml_escape(arg):
- def repl(matchobj):
- i = ord(matchobj.group())
- if i <= 0xFF:
- return unicode('#x%02X') % i
- else:
- return unicode('#x%04X') % i
- return illegal_xml_re.sub(repl, py.xml.escape(arg))
-
-def pytest_addoption(parser):
- group = parser.getgroup("terminal reporting")
- group.addoption('--junitxml', action="store", dest="xmlpath",
- metavar="path", default=None,
- help="create junit-xml style report file at given path.")
- group.addoption('--junitprefix', action="store", dest="junitprefix",
- metavar="str", default=None,
- help="prepend prefix to classnames in junit-xml output")
-
-def pytest_configure(config):
- xmlpath = config.option.xmlpath
- if xmlpath:
- config._xml = LogXML(xmlpath, config.option.junitprefix)
- config.pluginmanager.register(config._xml)
-
-def pytest_unconfigure(config):
- xml = getattr(config, '_xml', None)
- if xml:
- del config._xml
- config.pluginmanager.unregister(xml)
-
-
-class LogXML(object):
- def __init__(self, logfile, prefix):
- logfile = os.path.expanduser(os.path.expandvars(logfile))
- self.logfile = os.path.normpath(logfile)
- self.prefix = prefix
- self.tests = []
- self.passed = self.skipped = 0
- self.failed = self.errors = 0
-
- def _opentestcase(self, report):
- names = report.nodeid.split("::")
- names[0] = names[0].replace("/", '.')
- names = [x.replace(".py", "") for x in names if x != "()"]
- classnames = names[:-1]
- if self.prefix:
- classnames.insert(0, self.prefix)
- self.tests.append(Junit.testcase(
- classname=".".join(classnames),
- name=names[-1],
- time=getattr(report, 'duration', 0)
- ))
-
- def append(self, obj):
- self.tests[-1].append(obj)
-
- def append_pass(self, report):
- self.passed += 1
-
- def append_failure(self, report):
- #msg = str(report.longrepr.reprtraceback.extraline)
- if "xfail" in report.keywords:
- self.append(
- Junit.skipped(message="xfail-marked test passes unexpectedly"))
- self.skipped += 1
- else:
- sec = dict(report.sections)
- fail = Junit.failure(message="test failure")
- fail.append(str(report.longrepr))
- self.append(fail)
- for name in ('out', 'err'):
- content = sec.get("Captured std%s" % name)
- if content:
- tag = getattr(Junit, 'system-'+name)
- self.append(tag(bin_xml_escape(content)))
- self.failed += 1
-
- def append_collect_failure(self, report):
- #msg = str(report.longrepr.reprtraceback.extraline)
- self.append(Junit.failure(str(report.longrepr),
- message="collection failure"))
- self.errors += 1
-
- def append_collect_skipped(self, report):
- #msg = str(report.longrepr.reprtraceback.extraline)
- self.append(Junit.skipped(str(report.longrepr),
- message="collection skipped"))
- self.skipped += 1
-
- def append_error(self, report):
- self.append(Junit.error(str(report.longrepr),
- message="test setup failure"))
- self.errors += 1
-
- def append_skipped(self, report):
- if "xfail" in report.keywords:
- self.append(Junit.skipped(str(report.keywords['xfail']),
- message="expected test failure"))
- else:
- filename, lineno, skipreason = report.longrepr
- if skipreason.startswith("Skipped: "):
- skipreason = skipreason[9:]
- self.append(
- Junit.skipped("%s:%s: %s" % report.longrepr,
- type="pytest.skip",
- message=skipreason
- ))
- self.skipped += 1
-
- def pytest_runtest_logreport(self, report):
- if report.passed:
- if report.when == "call": # ignore setup/teardown
- self._opentestcase(report)
- self.append_pass(report)
- elif report.failed:
- self._opentestcase(report)
- if report.when != "call":
- self.append_error(report)
- else:
- self.append_failure(report)
- elif report.skipped:
- self._opentestcase(report)
- self.append_skipped(report)
-
- def pytest_collectreport(self, report):
- if not report.passed:
- self._opentestcase(report)
- if report.failed:
- self.append_collect_failure(report)
- else:
- self.append_collect_skipped(report)
-
- def pytest_internalerror(self, excrepr):
- self.errors += 1
- data = py.xml.escape(excrepr)
- self.tests.append(
- Junit.testcase(
- Junit.error(data, message="internal error"),
- classname="pytest",
- name="internal"))
-
- def pytest_sessionstart(self, session):
- self.suite_start_time = time.time()
-
- def pytest_sessionfinish(self, session, exitstatus, __multicall__):
- if py.std.sys.version_info[0] < 3:
- logfile = py.std.codecs.open(self.logfile, 'w', encoding='utf-8')
- else:
- logfile = open(self.logfile, 'w', encoding='utf-8')
-
- suite_stop_time = time.time()
- suite_time_delta = suite_stop_time - self.suite_start_time
- numtests = self.passed + self.failed
-
- logfile.write('')
- logfile.write(Junit.testsuite(
- self.tests,
- name="",
- errors=self.errors,
- failures=self.failed,
- skips=self.skipped,
- tests=numtests,
- time="%.3f" % suite_time_delta,
- ).unicode(indent=0))
- logfile.close()
-
- def pytest_terminal_summary(self, terminalreporter):
- terminalreporter.write_sep("-", "generated xml file: %s" % (self.logfile))
diff --git a/_pytest/main.py b/_pytest/main.py
deleted file mode 100644
--- a/_pytest/main.py
+++ /dev/null
@@ -1,576 +0,0 @@
-""" core implementation of testing process: init, session, runtest loop. """
-
-import py
-import pytest, _pytest
-import os, sys, imp
-tracebackcutdir = py.path.local(_pytest.__file__).dirpath()
-
-# exitcodes for the command line
-EXIT_OK = 0
-EXIT_TESTSFAILED = 1
-EXIT_INTERRUPTED = 2
-EXIT_INTERNALERROR = 3
-
-name_re = py.std.re.compile("^[a-zA-Z_]\w*$")
-
-def pytest_addoption(parser):
- parser.addini("norecursedirs", "directory patterns to avoid for recursion",
- type="args", default=('.*', 'CVS', '_darcs', '{arch}'))
- #parser.addini("dirpatterns",
- # "patterns specifying possible locations of test files",
- # type="linelist", default=["**/test_*.txt",
- # "**/test_*.py", "**/*_test.py"]
- #)
- group = parser.getgroup("general", "running and selection options")
- group._addoption('-x', '--exitfirst', action="store_true", default=False,
- dest="exitfirst",
- help="exit instantly on first error or failed test."),
- group._addoption('--maxfail', metavar="num",
- action="store", type="int", dest="maxfail", default=0,
- help="exit after first num failures or errors.")
-
- group._addoption('--strict', action="store_true",
- help="run pytest in strict mode, warnings become errors.")
-
- group = parser.getgroup("collect", "collection")
- group.addoption('--collectonly',
- action="store_true", dest="collectonly",
- help="only collect tests, don't execute them."),
- group.addoption('--pyargs', action="store_true",
- help="try to interpret all arguments as python packages.")
- group.addoption("--ignore", action="append", metavar="path",
- help="ignore path during collection (multi-allowed).")
- group.addoption('--confcutdir', dest="confcutdir", default=None,
- metavar="dir",
- help="only load conftest.py's relative to specified dir.")
-
- group = parser.getgroup("debugconfig",
- "test session debugging and configuration")
- group.addoption('--basetemp', dest="basetemp", default=None, metavar="dir",
- help="base temporary directory for this test run.")
-
-
-def pytest_namespace():
- collect = dict(Item=Item, Collector=Collector, File=File, Session=Session)
- return dict(collect=collect)
-
-def pytest_configure(config):
- py.test.config = config # compatibiltiy
- if config.option.exitfirst:
- config.option.maxfail = 1
-
-def wrap_session(config, doit):
- """Skeleton command line program"""
- session = Session(config)
- session.exitstatus = EXIT_OK
- initstate = 0
- try:
- config.pluginmanager.do_configure(config)
- initstate = 1
- config.hook.pytest_sessionstart(session=session)
- initstate = 2
- doit(config, session)
- except pytest.UsageError:
- raise
- except KeyboardInterrupt:
- excinfo = py.code.ExceptionInfo()
- config.hook.pytest_keyboard_interrupt(excinfo=excinfo)
- session.exitstatus = EXIT_INTERRUPTED
- except:
- excinfo = py.code.ExceptionInfo()
- config.pluginmanager.notify_exception(excinfo, config.option)
- session.exitstatus = EXIT_INTERNALERROR
- if excinfo.errisinstance(SystemExit):
- sys.stderr.write("mainloop: caught Spurious SystemExit!\n")
- if initstate >= 2:
- config.hook.pytest_sessionfinish(session=session,
- exitstatus=session.exitstatus or (session._testsfailed and 1))
- if not session.exitstatus and session._testsfailed:
- session.exitstatus = EXIT_TESTSFAILED
- if initstate >= 1:
- config.pluginmanager.do_unconfigure(config)
- return session.exitstatus
-
-def pytest_cmdline_main(config):
- return wrap_session(config, _main)
-
-def _main(config, session):
- """ default command line protocol for initialization, session,
- running tests and reporting. """
- config.hook.pytest_collection(session=session)
- config.hook.pytest_runtestloop(session=session)
-
-def pytest_collection(session):
- return session.perform_collect()
-
-def pytest_runtestloop(session):
- if session.config.option.collectonly:
- return True
- for i, item in enumerate(session.items):
- try:
- nextitem = session.items[i+1]
- except IndexError:
- nextitem = None
- item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem)
- if session.shouldstop:
- raise session.Interrupted(session.shouldstop)
- return True
-
-def pytest_ignore_collect(path, config):
- p = path.dirpath()
- ignore_paths = config._getconftest_pathlist("collect_ignore", path=p)
- ignore_paths = ignore_paths or []
- excludeopt = config.getvalue("ignore")
- if excludeopt:
- ignore_paths.extend([py.path.local(x) for x in excludeopt])
- return path in ignore_paths
-
-class HookProxy:
- def __init__(self, fspath, config):
- self.fspath = fspath
- self.config = config
- def __getattr__(self, name):
- hookmethod = getattr(self.config.hook, name)
- def call_matching_hooks(**kwargs):
- plugins = self.config._getmatchingplugins(self.fspath)
- return hookmethod.pcall(plugins, **kwargs)
- return call_matching_hooks
-
-def compatproperty(name):
- def fget(self):
- return getattr(pytest, name)
- return property(fget, None, None,
- "deprecated attribute %r, use pytest.%s" % (name,name))
-
-class Node(object):
- """ base class for all Nodes in the collection tree.
- Collector subclasses have children, Items are terminal nodes."""
-
- def __init__(self, name, parent=None, config=None, session=None):
- #: a unique name with the scope of the parent
- self.name = name
-
- #: the parent collector node.
- self.parent = parent
-
- #: the test config object
- self.config = config or parent.config
-
- #: the collection this node is part of
- self.session = session or parent.session
-
- #: filesystem path where this node was collected from
- self.fspath = getattr(parent, 'fspath', None)
- self.ihook = self.session.gethookproxy(self.fspath)
- self.keywords = {self.name: True}
-
- Module = compatproperty("Module")
- Class = compatproperty("Class")
- Instance = compatproperty("Instance")
- Function = compatproperty("Function")
- File = compatproperty("File")
- Item = compatproperty("Item")
-
- def _getcustomclass(self, name):
- cls = getattr(self, name)
- if cls != getattr(pytest, name):
- py.log._apiwarn("2.0", "use of node.%s is deprecated, "
- "use pytest_pycollect_makeitem(...) to create custom "
- "collection nodes" % name)
- return cls
-
- def __repr__(self):
- return "<%s %r>" %(self.__class__.__name__, getattr(self, 'name', None))
-
- # methods for ordering nodes
- @property
- def nodeid(self):
- try:
- return self._nodeid
- except AttributeError:
- self._nodeid = x = self._makeid()
- return x
-
- def _makeid(self):
- return self.parent.nodeid + "::" + self.name
-
- def __eq__(self, other):
- if not isinstance(other, Node):
- return False
- return self.__class__ == other.__class__ and \
- self.name == other.name and self.parent == other.parent
-
- def __ne__(self, other):
- return not self == other
-
- def __hash__(self):
- return hash((self.name, self.parent))
-
- def setup(self):
- pass
-
- def teardown(self):
- pass
-
- def _memoizedcall(self, attrname, function):
- exattrname = "_ex_" + attrname
- failure = getattr(self, exattrname, None)
- if failure is not None:
- py.builtin._reraise(failure[0], failure[1], failure[2])
- if hasattr(self, attrname):
- return getattr(self, attrname)
- try:
- res = function()
- except py.builtin._sysex:
- raise
- except:
- failure = py.std.sys.exc_info()
- setattr(self, exattrname, failure)
- raise
- setattr(self, attrname, res)
- return res
-
- def listchain(self):
- """ return list of all parent collectors up to self,
- starting from root of collection tree. """
- chain = []
- item = self
- while item is not None:
- chain.append(item)
- item = item.parent
- chain.reverse()
- return chain
-
- def listnames(self):
- return [x.name for x in self.listchain()]
-
- def getplugins(self):
- return self.config._getmatchingplugins(self.fspath)
-
- def getparent(self, cls):
- current = self
- while current and not isinstance(current, cls):
- current = current.parent
- return current
-
- def _prunetraceback(self, excinfo):
- pass
-
- def _repr_failure_py(self, excinfo, style=None):
- if self.config.option.fulltrace:
- style="long"
- else:
- self._prunetraceback(excinfo)
- # XXX should excinfo.getrepr record all data and toterminal()
- # process it?
- if style is None:
- if self.config.option.tbstyle == "short":
- style = "short"
- else:
- style = "long"
- return excinfo.getrepr(funcargs=True,
- showlocals=self.config.option.showlocals,
- style=style)
-
- repr_failure = _repr_failure_py
-
-class Collector(Node):
- """ Collector instances create children through collect()
- and thus iteratively build a tree.
- """
- class CollectError(Exception):
- """ an error during collection, contains a custom message. """
-
- def collect(self):
- """ returns a list of children (items and collectors)
- for this collection node.
- """
- raise NotImplementedError("abstract")
-
- def repr_failure(self, excinfo):
- """ represent a collection failure. """
- if excinfo.errisinstance(self.CollectError):
- exc = excinfo.value
- return str(exc.args[0])
- return self._repr_failure_py(excinfo, style="short")
-
- def _memocollect(self):
- """ internal helper method to cache results of calling collect(). """
- return self._memoizedcall('_collected', lambda: list(self.collect()))
-
- def _prunetraceback(self, excinfo):
- if hasattr(self, 'fspath'):
- path = self.fspath
- traceback = excinfo.traceback
- ntraceback = traceback.cut(path=self.fspath)
- if ntraceback == traceback:
- ntraceback = ntraceback.cut(excludepath=tracebackcutdir)
- excinfo.traceback = ntraceback.filter()
-
-class FSCollector(Collector):
- def __init__(self, fspath, parent=None, config=None, session=None):
- fspath = py.path.local(fspath) # xxx only for test_resultlog.py?
- name = fspath.basename
- if parent is not None:
- rel = fspath.relto(parent.fspath)
- if rel:
- name = rel
- name = name.replace(os.sep, "/")
- super(FSCollector, self).__init__(name, parent, config, session)
- self.fspath = fspath
-
- def _makeid(self):
- if self == self.session:
- return "."
- relpath = self.session.fspath.bestrelpath(self.fspath)
- if os.sep != "/":
- relpath = relpath.replace(os.sep, "/")
- return relpath
-
-class File(FSCollector):
- """ base class for collecting tests from a file. """
-
-class Item(Node):
- """ a basic test invocation item. Note that for a single function
- there might be multiple test invocation items.
- """
- nextitem = None
-
- def reportinfo(self):
- return self.fspath, None, ""
-
- @property
- def location(self):
- try:
- return self._location
- except AttributeError:
- location = self.reportinfo()
- # bestrelpath is a quite slow function
- cache = self.config.__dict__.setdefault("_bestrelpathcache", {})
- try:
- fspath = cache[location[0]]
- except KeyError:
- fspath = self.session.fspath.bestrelpath(location[0])
- cache[location[0]] = fspath
- location = (fspath, location[1], str(location[2]))
- self._location = location
- return location
-
-class NoMatch(Exception):
- """ raised if matching cannot locate a matching names. """
-
-class Session(FSCollector):
- class Interrupted(KeyboardInterrupt):
- """ signals an interrupted test run. """
- __module__ = 'builtins' # for py3
-
- def __init__(self, config):
- super(Session, self).__init__(py.path.local(), parent=None,
- config=config, session=self)
- assert self.config.pluginmanager.register(self, name="session", prepend=True)
- self._testsfailed = 0
- self.shouldstop = False
- self.trace = config.trace.root.get("collection")
- self._norecursepatterns = config.getini("norecursedirs")
-
- def pytest_collectstart(self):
- if self.shouldstop:
- raise self.Interrupted(self.shouldstop)
-
- def pytest_runtest_logreport(self, report):
- if report.failed and 'xfail' not in getattr(report, 'keywords', []):
- self._testsfailed += 1
- maxfail = self.config.getvalue("maxfail")
- if maxfail and self._testsfailed >= maxfail:
- self.shouldstop = "stopping after %d failures" % (
- self._testsfailed)
- pytest_collectreport = pytest_runtest_logreport
-
- def isinitpath(self, path):
- return path in self._initialpaths
-
- def gethookproxy(self, fspath):
- return HookProxy(fspath, self.config)
-
- def perform_collect(self, args=None, genitems=True):
- hook = self.config.hook
- try:
- items = self._perform_collect(args, genitems)
- hook.pytest_collection_modifyitems(session=self,
- config=self.config, items=items)
- finally:
- hook.pytest_collection_finish(session=self)
- return items
-
- def _perform_collect(self, args, genitems):
- if args is None:
- args = self.config.args
- self.trace("perform_collect", self, args)
- self.trace.root.indent += 1
- self._notfound = []
- self._initialpaths = set()
- self._initialparts = []
- self.items = items = []
- for arg in args:
- parts = self._parsearg(arg)
- self._initialparts.append(parts)
- self._initialpaths.add(parts[0])
- self.ihook.pytest_collectstart(collector=self)
- rep = self.ihook.pytest_make_collect_report(collector=self)
- self.ihook.pytest_collectreport(report=rep)
- self.trace.root.indent -= 1
- if self._notfound:
- for arg, exc in self._notfound:
- line = "(no name %r in any of %r)" % (arg, exc.args[0])
- raise pytest.UsageError("not found: %s\n%s" %(arg, line))
- if not genitems:
- return rep.result
- else:
- if rep.passed:
- for node in rep.result:
- self.items.extend(self.genitems(node))
- return items
-
- def collect(self):
- for parts in self._initialparts:
- arg = "::".join(map(str, parts))
- self.trace("processing argument", arg)
- self.trace.root.indent += 1
- try:
- for x in self._collect(arg):
- yield x
- except NoMatch:
- # we are inside a make_report hook so
- # we cannot directly pass through the exception
- self._notfound.append((arg, sys.exc_info()[1]))
- self.trace.root.indent -= 1
- break
- self.trace.root.indent -= 1
-
- def _collect(self, arg):
- names = self._parsearg(arg)
- path = names.pop(0)
- if path.check(dir=1):
- assert not names, "invalid arg %r" %(arg,)
- for path in path.visit(fil=lambda x: x.check(file=1),
- rec=self._recurse, bf=True, sort=True):
- for x in self._collectfile(path):
- yield x
- else:
- assert path.check(file=1)
- for x in self.matchnodes(self._collectfile(path), names):
- yield x
-
- def _collectfile(self, path):
- ihook = self.gethookproxy(path)
- if not self.isinitpath(path):
- if ihook.pytest_ignore_collect(path=path, config=self.config):
- return ()
- return ihook.pytest_collect_file(path=path, parent=self)
-
- def _recurse(self, path):
- ihook = self.gethookproxy(path.dirpath())
- if ihook.pytest_ignore_collect(path=path, config=self.config):
- return
- for pat in self._norecursepatterns:
- if path.check(fnmatch=pat):
- return False
- ihook = self.gethookproxy(path)
- ihook.pytest_collect_directory(path=path, parent=self)
- return True
-
- def _tryconvertpyarg(self, x):
- mod = None
- path = [os.path.abspath('.')] + sys.path
- for name in x.split('.'):
- # ignore anything that's not a proper name here
- # else something like --pyargs will mess up '.'
- # since imp.find_module will actually sometimes work for it
- # but it's supposed to be considered a filesystem path
- # not a package
- if name_re.match(name) is None:
- return x
- try:
- fd, mod, type_ = imp.find_module(name, path)
- except ImportError:
- return x
- else:
- if fd is not None:
- fd.close()
-
- if type_[2] != imp.PKG_DIRECTORY:
- path = [os.path.dirname(mod)]
- else:
- path = [mod]
- return mod
-
- def _parsearg(self, arg):
- """ return (fspath, names) tuple after checking the file exists. """
- arg = str(arg)
- if self.config.option.pyargs:
- arg = self._tryconvertpyarg(arg)
- parts = str(arg).split("::")
- relpath = parts[0].replace("/", os.sep)
- path = self.fspath.join(relpath, abs=True)
- if not path.check():
- if self.config.option.pyargs:
- msg = "file or package not found: "
- else:
- msg = "file not found: "
- raise pytest.UsageError(msg + arg)
- parts[0] = path
- return parts
-
- def matchnodes(self, matching, names):
- self.trace("matchnodes", matching, names)
- self.trace.root.indent += 1
- nodes = self._matchnodes(matching, names)
- num = len(nodes)
- self.trace("matchnodes finished -> ", num, "nodes")
- self.trace.root.indent -= 1
- if num == 0:
- raise NoMatch(matching, names[:1])
- return nodes
-
- def _matchnodes(self, matching, names):
- if not matching or not names:
- return matching
- name = names[0]
- assert name
- nextnames = names[1:]
- resultnodes = []
- for node in matching:
- if isinstance(node, pytest.Item):
- if not names:
- resultnodes.append(node)
- continue
- assert isinstance(node, pytest.Collector)
- node.ihook.pytest_collectstart(collector=node)
- rep = node.ihook.pytest_make_collect_report(collector=node)
- if rep.passed:
- has_matched = False
- for x in rep.result:
- if x.name == name:
- resultnodes.extend(self.matchnodes([x], nextnames))
- has_matched = True
- # XXX accept IDs that don't have "()" for class instances
- if not has_matched and len(rep.result) == 1 and x.name == "()":
- nextnames.insert(0, name)
- resultnodes.extend(self.matchnodes([x], nextnames))
- node.ihook.pytest_collectreport(report=rep)
- return resultnodes
-
- def genitems(self, node):
- self.trace("genitems", node)
- if isinstance(node, pytest.Item):
- node.ihook.pytest_itemcollected(item=node)
- yield node
- else:
- assert isinstance(node, pytest.Collector)
- node.ihook.pytest_collectstart(collector=node)
- rep = node.ihook.pytest_make_collect_report(collector=node)
- if rep.passed:
- for subnode in rep.result:
- for x in self.genitems(subnode):
- yield x
- node.ihook.pytest_collectreport(report=rep)
diff --git a/_pytest/mark.py b/_pytest/mark.py
deleted file mode 100644
--- a/_pytest/mark.py
+++ /dev/null
@@ -1,226 +0,0 @@
-""" generic mechanism for marking and selecting python functions. """
-import pytest, py
-
-def pytest_namespace():
- return {'mark': MarkGenerator()}
-
-def pytest_addoption(parser):
- group = parser.getgroup("general")
- group._addoption('-k',
- action="store", dest="keyword", default='', metavar="KEYWORDEXPR",
- help="only run tests which match given keyword expression. "
- "An expression consists of space-separated terms. "
- "Each term must match. Precede a term with '-' to negate. "
- "Terminate expression with ':' to make the first match match "
- "all subsequent tests (usually file-order). ")
-
- group._addoption("-m",
- action="store", dest="markexpr", default="", metavar="MARKEXPR",
- help="only run tests matching given mark expression. "
- "example: -m 'mark1 and not mark2'."
- )
-
- group.addoption("--markers", action="store_true", help=
- "show markers (builtin, plugin and per-project ones).")
-
- parser.addini("markers", "markers for test functions", 'linelist')
-
-def pytest_cmdline_main(config):
- if config.option.markers:
- config.pluginmanager.do_configure(config)
- tw = py.io.TerminalWriter()
- for line in config.getini("markers"):
- name, rest = line.split(":", 1)
- tw.write("@pytest.mark.%s:" % name, bold=True)
- tw.line(rest)
- tw.line()
- config.pluginmanager.do_unconfigure(config)
- return 0
-pytest_cmdline_main.tryfirst = True
-
-def pytest_collection_modifyitems(items, config):
- keywordexpr = config.option.keyword
- matchexpr = config.option.markexpr
- if not keywordexpr and not matchexpr:
- return
- selectuntil = False
- if keywordexpr[-1:] == ":":
- selectuntil = True
- keywordexpr = keywordexpr[:-1]
-
- remaining = []
- deselected = []
- for colitem in items:
- if keywordexpr and skipbykeyword(colitem, keywordexpr):
- deselected.append(colitem)
- else:
- if selectuntil:
- keywordexpr = None
- if matchexpr:
- if not matchmark(colitem, matchexpr):
- deselected.append(colitem)
- continue
- remaining.append(colitem)
-
- if deselected:
- config.hook.pytest_deselected(items=deselected)
- items[:] = remaining
-
-class BoolDict:
- def __init__(self, mydict):
- self._mydict = mydict
- def __getitem__(self, name):
- return name in self._mydict
-
-def matchmark(colitem, matchexpr):
- return eval(matchexpr, {}, BoolDict(colitem.obj.__dict__))
-
-def pytest_configure(config):
- if config.option.strict:
- pytest.mark._config = config
-
-def skipbykeyword(colitem, keywordexpr):
- """ return True if they given keyword expression means to
- skip this collector/item.
- """
- if not keywordexpr:
- return
-
- itemkeywords = getkeywords(colitem)
- for key in filter(None, keywordexpr.split()):
- eor = key[:1] == '-'
- if eor:
- key = key[1:]
- if not (eor ^ matchonekeyword(key, itemkeywords)):
- return True
-
-def getkeywords(node):
- keywords = {}
- while node is not None:
- keywords.update(node.keywords)
- node = node.parent
- return keywords
-
-
-def matchonekeyword(key, itemkeywords):
- for elem in key.split("."):
- for kw in itemkeywords:
- if elem in kw:
- break
- else:
- return False
- return True
-
-class MarkGenerator:
- """ Factory for :class:`MarkDecorator` objects - exposed as
- a ``py.test.mark`` singleton instance. Example::
-
- import py
- @py.test.mark.slowtest
- def test_function():
- pass
-
- will set a 'slowtest' :class:`MarkInfo` object
- on the ``test_function`` object. """
-
- def __getattr__(self, name):
- if name[0] == "_":
- raise AttributeError(name)
- if hasattr(self, '_config'):
- self._check(name)
- return MarkDecorator(name)
-
- def _check(self, name):
- try:
- if name in self._markers:
- return
- except AttributeError:
- pass
- self._markers = l = set()
- for line in self._config.getini("markers"):
- beginning = line.split(":", 1)
- x = beginning[0].split("(", 1)[0]
- l.add(x)
- if name not in self._markers:
- raise AttributeError("%r not a registered marker" % (name,))
-
-class MarkDecorator:
- """ A decorator for test functions and test classes. When applied
- it will create :class:`MarkInfo` objects which may be
- :ref:`retrieved by hooks as item keywords `.
- MarkDecorator instances are often created like this::
-
- mark1 = py.test.mark.NAME # simple MarkDecorator
- mark2 = py.test.mark.NAME(name1=value) # parametrized MarkDecorator
-
- and can then be applied as decorators to test functions::
-
- @mark2
- def test_function():
- pass
- """
- def __init__(self, name, args=None, kwargs=None):
- self.markname = name
- self.args = args or ()
- self.kwargs = kwargs or {}
-
- def __repr__(self):
- d = self.__dict__.copy()
- name = d.pop('markname')
- return "" %(name, d)
-
- def __call__(self, *args, **kwargs):
- """ if passed a single callable argument: decorate it with mark info.
- otherwise add *args/**kwargs in-place to mark information. """
- if args:
- func = args[0]
- if len(args) == 1 and hasattr(func, '__call__') or \
- hasattr(func, '__bases__'):
- if hasattr(func, '__bases__'):
- if hasattr(func, 'pytestmark'):
- l = func.pytestmark
- if not isinstance(l, list):
- func.pytestmark = [l, self]
- else:
- l.append(self)
- else:
- func.pytestmark = [self]
- else:
- holder = getattr(func, self.markname, None)
- if holder is None:
- holder = MarkInfo(self.markname, self.args, self.kwargs)
- setattr(func, self.markname, holder)
- else:
- holder.add(self.args, self.kwargs)
- return func
- kw = self.kwargs.copy()
- kw.update(kwargs)
- args = self.args + args
- return self.__class__(self.markname, args=args, kwargs=kw)
-
-class MarkInfo:
- """ Marking object created by :class:`MarkDecorator` instances. """
- def __init__(self, name, args, kwargs):
- #: name of attribute
- self.name = name
- #: positional argument list, empty if none specified
- self.args = args
- #: keyword argument dictionary, empty if nothing specified
- self.kwargs = kwargs
- self._arglist = [(args, kwargs.copy())]
-
- def __repr__(self):
- return "" % (
- self.name, self.args, self.kwargs)
-
- def add(self, args, kwargs):
- """ add a MarkInfo with the given args and kwargs. """
- self._arglist.append((args, kwargs))
- self.args += args
- self.kwargs.update(kwargs)
-
- def __iter__(self):
- """ yield MarkInfo objects each relating to a marking-call. """
- for args, kwargs in self._arglist:
- yield MarkInfo(self.name, args, kwargs)
-
diff --git a/_pytest/monkeypatch.py b/_pytest/monkeypatch.py
deleted file mode 100644
--- a/_pytest/monkeypatch.py
+++ /dev/null
@@ -1,124 +0,0 @@
-""" monkeypatching and mocking functionality. """
-
-import os, sys
-
-def pytest_funcarg__monkeypatch(request):
- """The returned ``monkeypatch`` funcarg provides these
- helper methods to modify objects, dictionaries or os.environ::
-
- monkeypatch.setattr(obj, name, value, raising=True)
- monkeypatch.delattr(obj, name, raising=True)
- monkeypatch.setitem(mapping, name, value)
- monkeypatch.delitem(obj, name, raising=True)
- monkeypatch.setenv(name, value, prepend=False)
- monkeypatch.delenv(name, value, raising=True)
- monkeypatch.syspath_prepend(path)
- monkeypatch.chdir(path)
-
- All modifications will be undone after the requesting
- test function has finished. The ``raising``
- parameter determines if a KeyError or AttributeError
- will be raised if the set/deletion operation has no target.
- """
- mpatch = monkeypatch()
- request.addfinalizer(mpatch.undo)
- return mpatch
-
-notset = object()
-
-class monkeypatch:
- """ object keeping a record of setattr/item/env/syspath changes. """
- def __init__(self):
- self._setattr = []
- self._setitem = []
- self._cwd = None
-
- def setattr(self, obj, name, value, raising=True):
- """ set attribute ``name`` on ``obj`` to ``value``, by default
- raise AttributeEror if the attribute did not exist. """
- oldval = getattr(obj, name, notset)
- if raising and oldval is notset:
- raise AttributeError("%r has no attribute %r" %(obj, name))
- self._setattr.insert(0, (obj, name, oldval))
- setattr(obj, name, value)
-
- def delattr(self, obj, name, raising=True):
- """ delete attribute ``name`` from ``obj``, by default raise
- AttributeError it the attribute did not previously exist. """
- if not hasattr(obj, name):
- if raising:
- raise AttributeError(name)
- else:
- self._setattr.insert(0, (obj, name, getattr(obj, name, notset)))
- delattr(obj, name)
-
- def setitem(self, dic, name, value):
- """ set dictionary entry ``name`` to value. """
- self._setitem.insert(0, (dic, name, dic.get(name, notset)))
- dic[name] = value
-
- def delitem(self, dic, name, raising=True):
- """ delete ``name`` from dict, raise KeyError if it doesn't exist."""
- if name not in dic:
- if raising:
- raise KeyError(name)
- else:
- self._setitem.insert(0, (dic, name, dic.get(name, notset)))
- del dic[name]
-
- def setenv(self, name, value, prepend=None):
- """ set environment variable ``name`` to ``value``. if ``prepend``
- is a character, read the current environment variable value
- and prepend the ``value`` adjoined with the ``prepend`` character."""
- value = str(value)
- if prepend and name in os.environ:
- value = value + prepend + os.environ[name]
- self.setitem(os.environ, name, value)
-
- def delenv(self, name, raising=True):
- """ delete ``name`` from environment, raise KeyError it not exists."""
- self.delitem(os.environ, name, raising=raising)
-
- def syspath_prepend(self, path):
- """ prepend ``path`` to ``sys.path`` list of import locations. """
- if not hasattr(self, '_savesyspath'):
- self._savesyspath = sys.path[:]
- sys.path.insert(0, str(path))
-
- def chdir(self, path):
- """ change the current working directory to the specified path
- path can be a string or a py.path.local object
- """
- if self._cwd is None:
- self._cwd = os.getcwd()
- if hasattr(path, "chdir"):
- path.chdir()
- else:
- os.chdir(path)
-
- def undo(self):
- """ undo previous changes. This call consumes the
- undo stack. Calling it a second time has no effect unless
- you do more monkeypatching after the undo call."""
- for obj, name, value in self._setattr:
- if value is not notset:
- setattr(obj, name, value)
- else:
- delattr(obj, name)
- self._setattr[:] = []
- for dictionary, name, value in self._setitem:
- if value is notset:
- try:
- del dictionary[name]
- except KeyError:
- pass # was already deleted, so we have the desired state
- else:
- dictionary[name] = value
- self._setitem[:] = []
- if hasattr(self, '_savesyspath'):
- sys.path[:] = self._savesyspath
- del self._savesyspath
-
- if self._cwd is not None:
- os.chdir(self._cwd)
- self._cwd = None
diff --git a/_pytest/nose.py b/_pytest/nose.py
deleted file mode 100644
--- a/_pytest/nose.py
+++ /dev/null
@@ -1,48 +0,0 @@
-""" run test suites written for nose. """
-
-import pytest, py
-import inspect
-import sys
-
-def pytest_runtest_makereport(__multicall__, item, call):
- SkipTest = getattr(sys.modules.get('nose', None), 'SkipTest', None)
- if SkipTest:
- if call.excinfo and call.excinfo.errisinstance(SkipTest):
- # let's substitute the excinfo with a py.test.skip one
- call2 = call.__class__(lambda: py.test.skip(str(call.excinfo.value)), call.when)
- call.excinfo = call2.excinfo
-
-
- at pytest.mark.trylast
-def pytest_runtest_setup(item):
- if isinstance(item, (pytest.Function)):
- if isinstance(item.parent, pytest.Generator):
- gen = item.parent
- if not hasattr(gen, '_nosegensetup'):
- call_optional(gen.obj, 'setup')
- if isinstance(gen.parent, pytest.Instance):
- call_optional(gen.parent.obj, 'setup')
- gen._nosegensetup = True
- if not call_optional(item.obj, 'setup'):
- # call module level setup if there is no object level one
- call_optional(item.parent.obj, 'setup')
-
-def pytest_runtest_teardown(item):
- if isinstance(item, pytest.Function):
- if not call_optional(item.obj, 'teardown'):
- call_optional(item.parent.obj, 'teardown')
- #if hasattr(item.parent, '_nosegensetup'):
- # #call_optional(item._nosegensetup, 'teardown')
- # del item.parent._nosegensetup
-
-def pytest_make_collect_report(collector):
- if isinstance(collector, pytest.Generator):
- call_optional(collector.obj, 'setup')
-
-def call_optional(obj, name):
- method = getattr(obj, name, None)
- if method:
- # If there's any problems allow the exception to raise rather than
- # silently ignoring them
- method()
- return True
diff --git a/_pytest/pastebin.py b/_pytest/pastebin.py
deleted file mode 100644
--- a/_pytest/pastebin.py
+++ /dev/null
@@ -1,67 +0,0 @@
-""" submit failure or test session information to a pastebin service. """
-import py, sys
-
-class url:
- base = "http://paste.pocoo.org"
- xmlrpc = base + "/xmlrpc/"
- show = base + "/show/"
-
-def pytest_addoption(parser):
- group = parser.getgroup("terminal reporting")
- group._addoption('--pastebin', metavar="mode",
- action='store', dest="pastebin", default=None,
- type="choice", choices=['failed', 'all'],
- help="send failed|all info to Pocoo pastebin service.")
-
-def pytest_configure(__multicall__, config):
- import tempfile
- __multicall__.execute()
- if config.option.pastebin == "all":
- config._pastebinfile = tempfile.TemporaryFile('w+')
- tr = config.pluginmanager.getplugin('terminalreporter')
- oldwrite = tr._tw.write
- def tee_write(s, **kwargs):
- oldwrite(s, **kwargs)
- config._pastebinfile.write(str(s))
- tr._tw.write = tee_write
-
-def pytest_unconfigure(config):
- if hasattr(config, '_pastebinfile'):
- config._pastebinfile.seek(0)
- sessionlog = config._pastebinfile.read()
- config._pastebinfile.close()
- del config._pastebinfile
- proxyid = getproxy().newPaste("python", sessionlog)
- pastebinurl = "%s%s" % (url.show, proxyid)
- sys.stderr.write("pastebin session-log: %s\n" % pastebinurl)
- tr = config.pluginmanager.getplugin('terminalreporter')
- del tr._tw.__dict__['write']
-
-def getproxy():
- if sys.version_info < (3, 0):
- from xmlrpclib import ServerProxy
- else:
- from xmlrpc.client import ServerProxy
- return ServerProxy(url.xmlrpc).pastes
-
-def pytest_terminal_summary(terminalreporter):
- if terminalreporter.config.option.pastebin != "failed":
- return
- tr = terminalreporter
- if 'failed' in tr.stats:
- terminalreporter.write_sep("=", "Sending information to Paste Service")
- if tr.config.option.debug:
- terminalreporter.write_line("xmlrpcurl: %s" %(url.xmlrpc,))
- serverproxy = getproxy()
- for rep in terminalreporter.stats.get('failed'):
- try:
- msg = rep.longrepr.reprtraceback.reprentries[-1].reprfileloc
- except AttributeError:
- msg = tr._getfailureheadline(rep)
- tw = py.io.TerminalWriter(stringio=True)
- rep.toterminal(tw)
- s = tw.stringio.getvalue()
- assert len(s)
- proxyid = serverproxy.newPaste("python", s)
- pastebinurl = "%s%s" % (url.show, proxyid)
- tr.write_line("%s --> %s" %(msg, pastebinurl))
diff --git a/_pytest/pdb.py b/_pytest/pdb.py
deleted file mode 100644
--- a/_pytest/pdb.py
+++ /dev/null
@@ -1,95 +0,0 @@
-""" interactive debugging with PDB, the Python Debugger. """
-
-import pytest, py
-import sys
-
-def pytest_addoption(parser):
- group = parser.getgroup("general")
- group._addoption('--pdb',
- action="store_true", dest="usepdb", default=False,
- help="start the interactive Python debugger on errors.")
-
-def pytest_namespace():
- return {'set_trace': pytestPDB().set_trace}
-
-def pytest_configure(config):
- if config.getvalue("usepdb"):
- config.pluginmanager.register(PdbInvoke(), 'pdbinvoke')
-
-class pytestPDB:
- """ Pseudo PDB that defers to the real pdb. """
- item = None
- collector = None
-
- def set_trace(self):
- """ invoke PDB set_trace debugging, dropping any IO capturing. """
- frame = sys._getframe().f_back
- item = self.item or self.collector
-
- if item is not None:
- capman = item.config.pluginmanager.getplugin("capturemanager")
- out, err = capman.suspendcapture()
- if hasattr(item, 'outerr'):
- item.outerr = (item.outerr[0] + out, item.outerr[1] + err)
- tw = py.io.TerminalWriter()
- tw.line()
- tw.sep(">", "PDB set_trace (IO-capturing turned off)")
- py.std.pdb.Pdb().set_trace(frame)
-
-def pdbitem(item):
- pytestPDB.item = item
-pytest_runtest_setup = pytest_runtest_call = pytest_runtest_teardown = pdbitem
-
- at pytest.mark.tryfirst
-def pytest_make_collect_report(__multicall__, collector):
- try:
- pytestPDB.collector = collector
- return __multicall__.execute()
- finally:
- pytestPDB.collector = None
-
-def pytest_runtest_makereport():
- pytestPDB.item = None
-
-class PdbInvoke:
- @pytest.mark.tryfirst
- def pytest_runtest_makereport(self, item, call, __multicall__):
- rep = __multicall__.execute()
- if not call.excinfo or \
- call.excinfo.errisinstance(pytest.skip.Exception) or \
- call.excinfo.errisinstance(py.std.bdb.BdbQuit):
- return rep
- if "xfail" in rep.keywords:
- return rep
- # we assume that the above execute() suspended capturing
- # XXX we re-use the TerminalReporter's terminalwriter
- # because this seems to avoid some encoding related troubles
- # for not completely clear reasons.
- tw = item.config.pluginmanager.getplugin("terminalreporter")._tw
- tw.line()
- tw.sep(">", "traceback")
- rep.toterminal(tw)
- tw.sep(">", "entering PDB")
- # A doctest.UnexpectedException is not useful for post_mortem.
- # Use the underlying exception instead:
- if isinstance(call.excinfo.value, py.std.doctest.UnexpectedException):
- tb = call.excinfo.value.exc_info[2]
- else:
- tb = call.excinfo._excinfo[2]
- post_mortem(tb)
- rep._pdbshown = True
- return rep
-
-def post_mortem(t):
- pdb = py.std.pdb
- class Pdb(pdb.Pdb):
- def get_stack(self, f, t):
- stack, i = pdb.Pdb.get_stack(self, f, t)
- if f is None:
- i = max(0, len(stack) - 1)
- while i and stack[i][0].f_locals.get("__tracebackhide__", False):
- i-=1
- return stack, i
- p = Pdb()
- p.reset()
- p.interaction(None, t)
diff --git a/_pytest/pytester.py b/_pytest/pytester.py
deleted file mode 100644
--- a/_pytest/pytester.py
+++ /dev/null
@@ -1,680 +0,0 @@
-""" (disabled by default) support for testing py.test and py.test plugins. """
-
-import py, pytest
-import sys, os
-import re
-import inspect
-import time
-from fnmatch import fnmatch
-from _pytest.main import Session, EXIT_OK
-from py.builtin import print_
-from _pytest.core import HookRelay
-
-def pytest_addoption(parser):
- group = parser.getgroup("pylib")
- group.addoption('--no-tools-on-path',
- action="store_true", dest="notoolsonpath", default=False,
- help=("discover tools on PATH instead of going through py.cmdline.")
- )
-
-def pytest_configure(config):
- # This might be called multiple times. Only take the first.
- global _pytest_fullpath
- import pytest
- try:
- _pytest_fullpath
- except NameError:
- _pytest_fullpath = os.path.abspath(pytest.__file__.rstrip("oc"))
- _pytest_fullpath = _pytest_fullpath.replace("$py.class", ".py")
-
-def pytest_funcarg___pytest(request):
- return PytestArg(request)
-
-class PytestArg:
- def __init__(self, request):
- self.request = request
-
- def gethookrecorder(self, hook):
- hookrecorder = HookRecorder(hook._pm)
- hookrecorder.start_recording(hook._hookspecs)
- self.request.addfinalizer(hookrecorder.finish_recording)
- return hookrecorder
-
-class ParsedCall:
- def __init__(self, name, locals):
- assert '_name' not in locals
- self.__dict__.update(locals)
- self.__dict__.pop('self')
- self._name = name
-
- def __repr__(self):
- d = self.__dict__.copy()
- del d['_name']
- return "" %(self._name, d)
-
-class HookRecorder:
- def __init__(self, pluginmanager):
- self._pluginmanager = pluginmanager
- self.calls = []
- self._recorders = {}
-
- def start_recording(self, hookspecs):
- if not isinstance(hookspecs, (list, tuple)):
- hookspecs = [hookspecs]
- for hookspec in hookspecs:
- assert hookspec not in self._recorders
- class RecordCalls:
- _recorder = self
- for name, method in vars(hookspec).items():
- if name[0] != "_":
- setattr(RecordCalls, name, self._makecallparser(method))
- recorder = RecordCalls()
- self._recorders[hookspec] = recorder
- self._pluginmanager.register(recorder)
- self.hook = HookRelay(hookspecs, pm=self._pluginmanager,
- prefix="pytest_")
-
- def finish_recording(self):
- for recorder in self._recorders.values():
- self._pluginmanager.unregister(recorder)
- self._recorders.clear()
-
- def _makecallparser(self, method):
- name = method.__name__
- args, varargs, varkw, default = py.std.inspect.getargspec(method)
- if not args or args[0] != "self":
- args.insert(0, 'self')
- fspec = py.std.inspect.formatargspec(args, varargs, varkw, default)
- # we use exec because we want to have early type
- # errors on wrong input arguments, using
- # *args/**kwargs delays this and gives errors
- # elsewhere
- exec (py.code.compile("""
- def %(name)s%(fspec)s:
- self._recorder.calls.append(
- ParsedCall(%(name)r, locals()))
- """ % locals()))
- return locals()[name]
-
- def getcalls(self, names):
- if isinstance(names, str):
- names = names.split()
- for name in names:
- for cls in self._recorders:
- if name in vars(cls):
- break
- else:
- raise ValueError("callname %r not found in %r" %(
- name, self._recorders.keys()))
- l = []
- for call in self.calls:
- if call._name in names:
- l.append(call)
- return l
-
- def contains(self, entries):
- __tracebackhide__ = True
- from py.builtin import print_
- i = 0
- entries = list(entries)
- backlocals = py.std.sys._getframe(1).f_locals
- while entries:
- name, check = entries.pop(0)
- for ind, call in enumerate(self.calls[i:]):
- if call._name == name:
- print_("NAMEMATCH", name, call)
- if eval(check, backlocals, call.__dict__):
- print_("CHECKERMATCH", repr(check), "->", call)
- else:
- print_("NOCHECKERMATCH", repr(check), "-", call)
- continue
- i += ind + 1
- break
- print_("NONAMEMATCH", name, "with", call)
- else:
- py.test.fail("could not find %r check %r" % (name, check))
-
- def popcall(self, name):
- __tracebackhide__ = True
- for i, call in enumerate(self.calls):
- if call._name == name:
- del self.calls[i]
- return call
- lines = ["could not find call %r, in:" % (name,)]
- lines.extend([" %s" % str(x) for x in self.calls])
- py.test.fail("\n".join(lines))
-
- def getcall(self, name):
- l = self.getcalls(name)
- assert len(l) == 1, (name, l)
- return l[0]
-
-
-def pytest_funcarg__linecomp(request):
- return LineComp()
-
-def pytest_funcarg__LineMatcher(request):
- return LineMatcher
-
-def pytest_funcarg__testdir(request):
- tmptestdir = TmpTestdir(request)
- return tmptestdir
-
-rex_outcome = re.compile("(\d+) (\w+)")
-class RunResult:
- def __init__(self, ret, outlines, errlines, duration):
- self.ret = ret
- self.outlines = outlines
- self.errlines = errlines
- self.stdout = LineMatcher(outlines)
- self.stderr = LineMatcher(errlines)
- self.duration = duration
-
- def parseoutcomes(self):
- for line in reversed(self.outlines):
- if 'seconds' in line:
- outcomes = rex_outcome.findall(line)
- if outcomes:
- d = {}
- for num, cat in outcomes:
- d[cat] = int(num)
- return d
-
-class TmpTestdir:
- def __init__(self, request):
- self.request = request
- self.Config = request.config.__class__
- self._pytest = request.getfuncargvalue("_pytest")
- # XXX remove duplication with tmpdir plugin
- basetmp = request.config._tmpdirhandler.ensuretemp("testdir")
- name = request.function.__name__
- for i in range(100):
- try:
- tmpdir = basetmp.mkdir(name + str(i))
- except py.error.EEXIST:
- continue
- break
- # we need to create another subdir
- # because Directory.collect() currently loads
- # conftest.py from sibling directories
- self.tmpdir = tmpdir.mkdir(name)
- self.plugins = []
- self._syspathremove = []
- self.chdir() # always chdir
- self.request.addfinalizer(self.finalize)
-
- def __repr__(self):
- return "" % (self.tmpdir,)
-
- def finalize(self):
- for p in self._syspathremove:
- py.std.sys.path.remove(p)
- if hasattr(self, '_olddir'):
- self._olddir.chdir()
- # delete modules that have been loaded from tmpdir
- for name, mod in list(sys.modules.items()):
- if mod:
- fn = getattr(mod, '__file__', None)
- if fn and fn.startswith(str(self.tmpdir)):
- del sys.modules[name]
-
- def getreportrecorder(self, obj):
- if hasattr(obj, 'config'):
- obj = obj.config
- if hasattr(obj, 'hook'):
- obj = obj.hook
- assert hasattr(obj, '_hookspecs'), obj
- reprec = ReportRecorder(obj)
- reprec.hookrecorder = self._pytest.gethookrecorder(obj)
- reprec.hook = reprec.hookrecorder.hook
- return reprec
-
- def chdir(self):
- old = self.tmpdir.chdir()
- if not hasattr(self, '_olddir'):
- self._olddir = old
-
- def _makefile(self, ext, args, kwargs):
- items = list(kwargs.items())
- if args:
- source = py.builtin._totext("\n").join(
- map(py.builtin._totext, args)) + py.builtin._totext("\n")
- basename = self.request.function.__name__
- items.insert(0, (basename, source))
- ret = None
- for name, value in items:
- p = self.tmpdir.join(name).new(ext=ext)
- source = py.builtin._totext(py.code.Source(value)).lstrip()
- p.write(source.encode("utf-8"), "wb")
- if ret is None:
- ret = p
- return ret
-
-
- def makefile(self, ext, *args, **kwargs):
- return self._makefile(ext, args, kwargs)
-
- def makeini(self, source):
- return self.makefile('cfg', setup=source)
-
- def makeconftest(self, source):
- return self.makepyfile(conftest=source)
-
- def makeini(self, source):
- return self.makefile('.ini', tox=source)
-
- def getinicfg(self, source):
- p = self.makeini(source)
- return py.iniconfig.IniConfig(p)['pytest']
-
- def makepyfile(self, *args, **kwargs):
- return self._makefile('.py', args, kwargs)
-
- def maketxtfile(self, *args, **kwargs):
- return self._makefile('.txt', args, kwargs)
-
- def syspathinsert(self, path=None):
- if path is None:
- path = self.tmpdir
- py.std.sys.path.insert(0, str(path))
- self._syspathremove.append(str(path))
-
- def mkdir(self, name):
- return self.tmpdir.mkdir(name)
-
- def mkpydir(self, name):
- p = self.mkdir(name)
- p.ensure("__init__.py")
- return p
-
- Session = Session
- def getnode(self, config, arg):
- session = Session(config)
- assert '::' not in str(arg)
- p = py.path.local(arg)
- x = session.fspath.bestrelpath(p)
- config.hook.pytest_sessionstart(session=session)
- res = session.perform_collect([x], genitems=False)[0]
- config.hook.pytest_sessionfinish(session=session, exitstatus=EXIT_OK)
- return res
-
- def getpathnode(self, path):
- config = self.parseconfigure(path)
- session = Session(config)
- x = session.fspath.bestrelpath(path)
- config.hook.pytest_sessionstart(session=session)
- res = session.perform_collect([x], genitems=False)[0]
- config.hook.pytest_sessionfinish(session=session, exitstatus=EXIT_OK)
- return res
-
- def genitems(self, colitems):
- session = colitems[0].session
- result = []
- for colitem in colitems:
- result.extend(session.genitems(colitem))
- return result
-
- def runitem(self, source):
- # used from runner functional tests
- item = self.getitem(source)
- # the test class where we are called from wants to provide the runner
- testclassinstance = py.builtin._getimself(self.request.function)
- runner = testclassinstance.getrunner()
- return runner(item)
-
- def inline_runsource(self, source, *cmdlineargs):
- p = self.makepyfile(source)
- l = list(cmdlineargs) + [p]
- return self.inline_run(*l)
-
- def inline_runsource1(self, *args):
- args = list(args)
- source = args.pop()
- p = self.makepyfile(source)
- l = list(args) + [p]
- reprec = self.inline_run(*l)
- reports = reprec.getreports("pytest_runtest_logreport")
- assert len(reports) == 3, reports # setup/call/teardown
- return reports[1]
-
- def inline_genitems(self, *args):
- return self.inprocess_run(list(args) + ['--collectonly'])
-
- def inline_run(self, *args):
- items, rec = self.inprocess_run(args)
- return rec
-
- def inprocess_run(self, args, plugins=None):
- rec = []
- items = []
- class Collect:
- def pytest_configure(x, config):
- rec.append(self.getreportrecorder(config))
- def pytest_itemcollected(self, item):
- items.append(item)
- if not plugins:
- plugins = []
- plugins.append(Collect())
- ret = self.pytestmain(list(args), plugins=[Collect()])
- reprec = rec[0]
- reprec.ret = ret
- assert len(rec) == 1
- return items, reprec
-
- def parseconfig(self, *args):
- args = [str(x) for x in args]
- for x in args:
- if str(x).startswith('--basetemp'):
- break
- else:
- args.append("--basetemp=%s" % self.tmpdir.dirpath('basetemp'))
- import _pytest.core
- config = _pytest.core._prepareconfig(args, self.plugins)
- # the in-process pytest invocation needs to avoid leaking FDs
- # so we register a "reset_capturings" callmon the capturing manager
- # and make sure it gets called
- config._cleanup.append(
- config.pluginmanager.getplugin("capturemanager").reset_capturings)
- import _pytest.config
- self.request.addfinalizer(
- lambda: _pytest.config.pytest_unconfigure(config))
- return config
-
- def parseconfigure(self, *args):
- config = self.parseconfig(*args)
- config.pluginmanager.do_configure(config)
- self.request.addfinalizer(lambda:
- config.pluginmanager.do_unconfigure(config))
- return config
-
- def getitem(self, source, funcname="test_func"):
- for item in self.getitems(source):
- if item.name == funcname:
- return item
- assert 0, "%r item not found in module:\n%s" %(funcname, source)
-
- def getitems(self, source):
- modcol = self.getmodulecol(source)
- return self.genitems([modcol])
-
- def getmodulecol(self, source, configargs=(), withinit=False):
- kw = {self.request.function.__name__: py.code.Source(source).strip()}
- path = self.makepyfile(**kw)
- if withinit:
- self.makepyfile(__init__ = "#")
- self.config = config = self.parseconfigure(path, *configargs)
- node = self.getnode(config, path)
- return node
-
- def collect_by_name(self, modcol, name):
- for colitem in modcol._memocollect():
- if colitem.name == name:
- return colitem
-
- def popen(self, cmdargs, stdout, stderr, **kw):
- env = os.environ.copy()
- env['PYTHONPATH'] = os.pathsep.join(filter(None, [
- str(os.getcwd()), env.get('PYTHONPATH', '')]))
- kw['env'] = env
- #print "env", env
- return py.std.subprocess.Popen(cmdargs, stdout=stdout, stderr=stderr, **kw)
-
- def pytestmain(self, *args, **kwargs):
- class ResetCapturing:
- @pytest.mark.trylast
- def pytest_unconfigure(self, config):
- capman = config.pluginmanager.getplugin("capturemanager")
- capman.reset_capturings()
- plugins = kwargs.setdefault("plugins", [])
- rc = ResetCapturing()
- plugins.append(rc)
- return pytest.main(*args, **kwargs)
-
- def run(self, *cmdargs):
- return self._run(*cmdargs)
-
- def _run(self, *cmdargs):
- cmdargs = [str(x) for x in cmdargs]
- p1 = self.tmpdir.join("stdout")
- p2 = self.tmpdir.join("stderr")
- print_("running", cmdargs, "curdir=", py.path.local())
- f1 = p1.open("wb")
- f2 = p2.open("wb")
- now = time.time()
- popen = self.popen(cmdargs, stdout=f1, stderr=f2,
- close_fds=(sys.platform != "win32"))
- ret = popen.wait()
- f1.close()
- f2.close()
- out = p1.read("rb")
- out = getdecoded(out).splitlines()
- err = p2.read("rb")
- err = getdecoded(err).splitlines()
- def dump_lines(lines, fp):
- try:
- for line in lines:
- py.builtin.print_(line, file=fp)
- except UnicodeEncodeError:
- print("couldn't print to %s because of encoding" % (fp,))
- dump_lines(out, sys.stdout)
- dump_lines(err, sys.stderr)
- return RunResult(ret, out, err, time.time()-now)
-
- def runpybin(self, scriptname, *args):
- fullargs = self._getpybinargs(scriptname) + args
- return self.run(*fullargs)
-
- def _getpybinargs(self, scriptname):
- if not self.request.config.getvalue("notoolsonpath"):
- # XXX we rely on script refering to the correct environment
- # we cannot use "(py.std.sys.executable,script)"
- # becaue on windows the script is e.g. a py.test.exe
- return (py.std.sys.executable, _pytest_fullpath,)
- else:
- py.test.skip("cannot run %r with --no-tools-on-path" % scriptname)
-
- def runpython(self, script, prepend=True):
- if prepend:
- s = self._getsysprepend()
- if s:
- script.write(s + "\n" + script.read())
- return self.run(sys.executable, script)
-
- def _getsysprepend(self):
- if self.request.config.getvalue("notoolsonpath"):
- s = "import sys;sys.path.insert(0,%r);" % str(py._pydir.dirpath())
- else:
- s = ""
- return s
-
- def runpython_c(self, command):
- command = self._getsysprepend() + command
- return self.run(py.std.sys.executable, "-c", command)
-
- def runpytest(self, *args):
- p = py.path.local.make_numbered_dir(prefix="runpytest-",
- keep=None, rootdir=self.tmpdir)
- args = ('--basetemp=%s' % p, ) + args
- #for x in args:
- # if '--confcutdir' in str(x):
- # break
- #else:
- # pass
- # args = ('--confcutdir=.',) + args
- plugins = [x for x in self.plugins if isinstance(x, str)]
- if plugins:
- args = ('-p', plugins[0]) + args
- return self.runpybin("py.test", *args)
-
- def spawn_pytest(self, string, expect_timeout=10.0):
- if self.request.config.getvalue("notoolsonpath"):
- py.test.skip("--no-tools-on-path prevents running pexpect-spawn tests")
- basetemp = self.tmpdir.mkdir("pexpect")
- invoke = " ".join(map(str, self._getpybinargs("py.test")))
- cmd = "%s --basetemp=%s %s" % (invoke, basetemp, string)
- return self.spawn(cmd, expect_timeout=expect_timeout)
-
- def spawn(self, cmd, expect_timeout=10.0):
- pexpect = py.test.importorskip("pexpect", "2.4")
- if hasattr(sys, 'pypy_version_info') and '64' in py.std.platform.machine():
- pytest.skip("pypy-64 bit not supported")
- if sys.platform == "darwin":
- pytest.xfail("pexpect does not work reliably on darwin?!")
- logfile = self.tmpdir.join("spawn.out")
- child = pexpect.spawn(cmd, logfile=logfile.open("w"))
- child.timeout = expect_timeout
- return child
-
-def getdecoded(out):
- try:
- return out.decode("utf-8")
- except UnicodeDecodeError:
- return "INTERNAL not-utf8-decodeable, truncated string:\n%s" % (
- py.io.saferepr(out),)
-
-class ReportRecorder(object):
- def __init__(self, hook):
- self.hook = hook
- self.pluginmanager = hook._pm
- self.pluginmanager.register(self)
-
- def getcall(self, name):
- return self.hookrecorder.getcall(name)
-
- def popcall(self, name):
- return self.hookrecorder.popcall(name)
-
- def getcalls(self, names):
- """ return list of ParsedCall instances matching the given eventname. """
- return self.hookrecorder.getcalls(names)
-
- # functionality for test reports
-
- def getreports(self, names="pytest_runtest_logreport pytest_collectreport"):
- return [x.report for x in self.getcalls(names)]
-
- def matchreport(self, inamepart="",
- names="pytest_runtest_logreport pytest_collectreport", when=None):
- """ return a testreport whose dotted import path matches """
- l = []
- for rep in self.getreports(names=names):
- try:
- if not when and rep.when != "call" and rep.passed:
- # setup/teardown passing reports - let's ignore those
- continue
- except AttributeError:
- pass
- if when and getattr(rep, 'when', None) != when:
- continue
- if not inamepart or inamepart in rep.nodeid.split("::"):
- l.append(rep)
- if not l:
- raise ValueError("could not find test report matching %r: no test reports at all!" %
- (inamepart,))
- if len(l) > 1:
- raise ValueError("found more than one testreport matching %r: %s" %(
- inamepart, l))
- return l[0]
-
- def getfailures(self, names='pytest_runtest_logreport pytest_collectreport'):
- return [rep for rep in self.getreports(names) if rep.failed]
-
- def getfailedcollections(self):
- return self.getfailures('pytest_collectreport')
-
- def listoutcomes(self):
- passed = []
- skipped = []
- failed = []
- for rep in self.getreports("pytest_runtest_logreport"):
- if rep.passed:
- if rep.when == "call":
- passed.append(rep)
- elif rep.skipped:
- skipped.append(rep)
- elif rep.failed:
- failed.append(rep)
- return passed, skipped, failed
-
- def countoutcomes(self):
- return [len(x) for x in self.listoutcomes()]
-
- def assertoutcome(self, passed=0, skipped=0, failed=0):
- realpassed, realskipped, realfailed = self.listoutcomes()
- assert passed == len(realpassed)
- assert skipped == len(realskipped)
- assert failed == len(realfailed)
-
- def clear(self):
- self.hookrecorder.calls[:] = []
-
- def unregister(self):
- self.pluginmanager.unregister(self)
- self.hookrecorder.finish_recording()
-
-class LineComp:
- def __init__(self):
- self.stringio = py.io.TextIO()
-
- def assert_contains_lines(self, lines2):
- """ assert that lines2 are contained (linearly) in lines1.
- return a list of extralines found.
- """
- __tracebackhide__ = True
- val = self.stringio.getvalue()
- self.stringio.truncate(0)
- self.stringio.seek(0)
- lines1 = val.split("\n")
- return LineMatcher(lines1).fnmatch_lines(lines2)
-
-class LineMatcher:
- def __init__(self, lines):
- self.lines = lines
-
- def str(self):
- return "\n".join(self.lines)
-
- def _getlines(self, lines2):
- if isinstance(lines2, str):
- lines2 = py.code.Source(lines2)
- if isinstance(lines2, py.code.Source):
- lines2 = lines2.strip().lines
- return lines2
-
- def fnmatch_lines_random(self, lines2):
- lines2 = self._getlines(lines2)
- for line in lines2:
- for x in self.lines:
- if line == x or fnmatch(x, line):
- print_("matched: ", repr(line))
- break
- else:
- raise ValueError("line %r not found in output" % line)
-
- def fnmatch_lines(self, lines2):
- def show(arg1, arg2):
- py.builtin.print_(arg1, arg2, file=py.std.sys.stderr)
- lines2 = self._getlines(lines2)
- lines1 = self.lines[:]
- nextline = None
- extralines = []
- __tracebackhide__ = True
- for line in lines2:
- nomatchprinted = False
- while lines1:
- nextline = lines1.pop(0)
- if line == nextline:
- show("exact match:", repr(line))
- break
- elif fnmatch(nextline, line):
- show("fnmatch:", repr(line))
- show(" with:", repr(nextline))
- break
- else:
- if not nomatchprinted:
- show("nomatch:", repr(line))
- nomatchprinted = True
- show(" and:", repr(nextline))
- extralines.append(nextline)
- else:
- py.test.fail("remains unmatched: %r, see stderr" % (line,))
diff --git a/_pytest/python.py b/_pytest/python.py
deleted file mode 100644
--- a/_pytest/python.py
+++ /dev/null
@@ -1,1009 +0,0 @@
-""" Python test discovery, setup and run of test functions. """
-import py
-import inspect
-import sys
-import pytest
-from py._code.code import TerminalRepr
-from _pytest.monkeypatch import monkeypatch
-
-import _pytest
-cutdir = py.path.local(_pytest.__file__).dirpath()
-
-def pytest_addoption(parser):
- group = parser.getgroup("general")
- group.addoption('--funcargs',
- action="store_true", dest="showfuncargs", default=False,
- help="show available function arguments, sorted by plugin")
- parser.addini("python_files", type="args",
- default=('test_*.py', '*_test.py'),
- help="glob-style file patterns for Python test module discovery")
- parser.addini("python_classes", type="args", default=("Test",),
- help="prefixes for Python test class discovery")
- parser.addini("python_functions", type="args", default=("test",),
- help="prefixes for Python test function and method discovery")
-
-def pytest_cmdline_main(config):
- if config.option.showfuncargs:
- showfuncargs(config)
- return 0
-
-
-def pytest_generate_tests(metafunc):
- try:
- param = metafunc.function.parametrize
- except AttributeError:
- return
- for p in param:
- metafunc.parametrize(*p.args, **p.kwargs)
-
-def pytest_configure(config):
- config.addinivalue_line("markers",
- "parametrize(argnames, argvalues): call a test function multiple "
- "times passing in multiple different argument value sets. Example: "
- "@parametrize('arg1', [1,2]) would lead to two calls of the decorated "
- "test function, one with arg1=1 and another with arg1=2."
- )
-
-
- at pytest.mark.trylast
-def pytest_namespace():
- raises.Exception = pytest.fail.Exception
- return {
- 'raises' : raises,
- 'collect': {
- 'Module': Module, 'Class': Class, 'Instance': Instance,
- 'Function': Function, 'Generator': Generator,
- '_fillfuncargs': fillfuncargs}
- }
-
-def pytest_funcarg__pytestconfig(request):
- """ the pytest config object with access to command line opts."""
- return request.config
-
-def pytest_pyfunc_call(__multicall__, pyfuncitem):
- if not __multicall__.execute():
- testfunction = pyfuncitem.obj
- if pyfuncitem._isyieldedfunction():
- testfunction(*pyfuncitem._args)
- else:
- funcargs = pyfuncitem.funcargs
- testfunction(**funcargs)
-
-def pytest_collect_file(path, parent):
- ext = path.ext
- pb = path.purebasename
- if ext == ".py":
- if not parent.session.isinitpath(path):
- for pat in parent.config.getini('python_files'):
- if path.fnmatch(pat):
- break
- else:
- return
- return parent.ihook.pytest_pycollect_makemodule(
- path=path, parent=parent)
-
-def pytest_pycollect_makemodule(path, parent):
- return Module(path, parent)
-
-def pytest_pycollect_makeitem(__multicall__, collector, name, obj):
- res = __multicall__.execute()
- if res is not None:
- return res
- if inspect.isclass(obj):
- #if hasattr(collector.obj, 'unittest'):
- # return # we assume it's a mixin class for a TestCase derived one
- if collector.classnamefilter(name):
- if not hasinit(obj):
- Class = collector._getcustomclass("Class")
- return Class(name, parent=collector)
- elif collector.funcnamefilter(name) and hasattr(obj, '__call__'):
- if is_generator(obj):
- return Generator(name, parent=collector)
- else:
- return collector._genfunctions(name, obj)
-
-def is_generator(func):
- try:
- return py.code.getrawcode(func).co_flags & 32 # generator function
- except AttributeError: # builtin functions have no bytecode
- # assume them to not be generators
- return False
-
-class PyobjMixin(object):
- def obj():
- def fget(self):
- try:
- return self._obj
- except AttributeError:
- self._obj = obj = self._getobj()
- return obj
- def fset(self, value):
- self._obj = value
- return property(fget, fset, None, "underlying python object")
- obj = obj()
-
- def _getobj(self):
- return getattr(self.parent.obj, self.name)
-
- def getmodpath(self, stopatmodule=True, includemodule=False):
- """ return python path relative to the containing module. """
- chain = self.listchain()
- chain.reverse()
- parts = []
- for node in chain:
- if isinstance(node, Instance):
- continue
- name = node.name
- if isinstance(node, Module):
- assert name.endswith(".py")
- name = name[:-3]
- if stopatmodule:
- if includemodule:
- parts.append(name)
- break
- parts.append(name)
- parts.reverse()
- s = ".".join(parts)
- return s.replace(".[", "[")
-
- def _getfslineno(self):
- try:
- return self._fslineno
- except AttributeError:
- pass
- obj = self.obj
- # xxx let decorators etc specify a sane ordering
- if hasattr(obj, 'place_as'):
- obj = obj.place_as
-
- self._fslineno = py.code.getfslineno(obj)
- assert isinstance(self._fslineno[1], int), obj
- return self._fslineno
-
- def reportinfo(self):
- # XXX caching?
- obj = self.obj
- if hasattr(obj, 'compat_co_firstlineno'):
- # nose compatibility
- fspath = sys.modules[obj.__module__].__file__
- if fspath.endswith(".pyc"):
- fspath = fspath[:-1]
- #assert 0
- #fn = inspect.getsourcefile(obj) or inspect.getfile(obj)
- lineno = obj.compat_co_firstlineno
- modpath = obj.__module__
- else:
- fspath, lineno = self._getfslineno()
- modpath = self.getmodpath()
- assert isinstance(lineno, int)
- return fspath, lineno, modpath
-
-class PyCollectorMixin(PyobjMixin, pytest.Collector):
-
- def funcnamefilter(self, name):
- for prefix in self.config.getini("python_functions"):
- if name.startswith(prefix):
- return True
-
- def classnamefilter(self, name):
- for prefix in self.config.getini("python_classes"):
- if name.startswith(prefix):
- return True
-
- def collect(self):
- # NB. we avoid random getattrs and peek in the __dict__ instead
- # (XXX originally introduced from a PyPy need, still true?)
- dicts = [getattr(self.obj, '__dict__', {})]
- for basecls in inspect.getmro(self.obj.__class__):
- dicts.append(basecls.__dict__)
- seen = {}
- l = []
- for dic in dicts:
- for name, obj in dic.items():
- if name in seen:
- continue
- seen[name] = True
- if name[0] != "_":
- res = self.makeitem(name, obj)
- if res is None:
- continue
- if not isinstance(res, list):
- res = [res]
- l.extend(res)
- l.sort(key=lambda item: item.reportinfo()[:2])
- return l
-
- def makeitem(self, name, obj):
- return self.ihook.pytest_pycollect_makeitem(
- collector=self, name=name, obj=obj)
-
- def _genfunctions(self, name, funcobj):
- module = self.getparent(Module).obj
- clscol = self.getparent(Class)
- cls = clscol and clscol.obj or None
- transfer_markers(funcobj, cls, module)
- metafunc = Metafunc(funcobj, config=self.config,
- cls=cls, module=module)
- gentesthook = self.config.hook.pytest_generate_tests
- extra = [module]
- if cls is not None:
- extra.append(cls())
- plugins = self.getplugins() + extra
- gentesthook.pcall(plugins, metafunc=metafunc)
- Function = self._getcustomclass("Function")
- if not metafunc._calls:
- return Function(name, parent=self)
- l = []
- for callspec in metafunc._calls:
- subname = "%s[%s]" %(name, callspec.id)
- function = Function(name=subname, parent=self,
- callspec=callspec, callobj=funcobj, keywords={callspec.id:True})
- l.append(function)
- return l
-
-def transfer_markers(funcobj, cls, mod):
- # XXX this should rather be code in the mark plugin or the mark
- # plugin should merge with the python plugin.
- for holder in (cls, mod):
- try:
- pytestmark = holder.pytestmark
- except AttributeError:
- continue
- if isinstance(pytestmark, list):
- for mark in pytestmark:
- mark(funcobj)
- else:
- pytestmark(funcobj)
-
-class Module(pytest.File, PyCollectorMixin):
- def _getobj(self):
- return self._memoizedcall('_obj', self._importtestmodule)
-
- def _importtestmodule(self):
- # we assume we are only called once per module
- try:
- mod = self.fspath.pyimport(ensuresyspath=True)
- except SyntaxError:
- excinfo = py.code.ExceptionInfo()
- raise self.CollectError(excinfo.getrepr(style="short"))
- except self.fspath.ImportMismatchError:
- e = sys.exc_info()[1]
- raise self.CollectError(
- "import file mismatch:\n"
- "imported module %r has this __file__ attribute:\n"
- " %s\n"
- "which is not the same as the test file we want to collect:\n"
- " %s\n"
- "HINT: remove __pycache__ / .pyc files and/or use a "
- "unique basename for your test file modules"
- % e.args
- )
- #print "imported test module", mod
- self.config.pluginmanager.consider_module(mod)
- return mod
-
- def setup(self):
- if hasattr(self.obj, 'setup_module'):
- #XXX: nose compat hack, move to nose plugin
- # if it takes a positional arg, its probably a pytest style one
- # so we pass the current module object
- if inspect.getargspec(self.obj.setup_module)[0]:
- self.obj.setup_module(self.obj)
- else:
- self.obj.setup_module()
-
- def teardown(self):
- if hasattr(self.obj, 'teardown_module'):
- #XXX: nose compat hack, move to nose plugin
- # if it takes a positional arg, its probably a py.test style one
- # so we pass the current module object
- if inspect.getargspec(self.obj.teardown_module)[0]:
- self.obj.teardown_module(self.obj)
- else:
- self.obj.teardown_module()
-
-class Class(PyCollectorMixin, pytest.Collector):
-
- def collect(self):
- return [self._getcustomclass("Instance")(name="()", parent=self)]
-
- def setup(self):
- setup_class = getattr(self.obj, 'setup_class', None)
- if setup_class is not None:
- setup_class = getattr(setup_class, 'im_func', setup_class)
- setup_class(self.obj)
-
- def teardown(self):
- teardown_class = getattr(self.obj, 'teardown_class', None)
- if teardown_class is not None:
- teardown_class = getattr(teardown_class, 'im_func', teardown_class)
- teardown_class(self.obj)
-
-class Instance(PyCollectorMixin, pytest.Collector):
- def _getobj(self):
- return self.parent.obj()
-
- def newinstance(self):
- self.obj = self._getobj()
- return self.obj
-
-class FunctionMixin(PyobjMixin):
- """ mixin for the code common to Function and Generator.
- """
- def setup(self):
- """ perform setup for this test function. """
- if hasattr(self, '_preservedparent'):
- obj = self._preservedparent
- elif isinstance(self.parent, Instance):
- obj = self.parent.newinstance()
- self.obj = self._getobj()
- else:
- obj = self.parent.obj
- if inspect.ismethod(self.obj):
- name = 'setup_method'
- else:
- name = 'setup_function'
- setup_func_or_method = getattr(obj, name, None)
- if setup_func_or_method is not None:
- setup_func_or_method(self.obj)
-
- def teardown(self):
- """ perform teardown for this test function. """
- if inspect.ismethod(self.obj):
- name = 'teardown_method'
- else:
- name = 'teardown_function'
- obj = self.parent.obj
- teardown_func_or_meth = getattr(obj, name, None)
- if teardown_func_or_meth is not None:
- teardown_func_or_meth(self.obj)
-
- def _prunetraceback(self, excinfo):
- if hasattr(self, '_obj') and not self.config.option.fulltrace:
- code = py.code.Code(self.obj)
- path, firstlineno = code.path, code.firstlineno
- traceback = excinfo.traceback
- ntraceback = traceback.cut(path=path, firstlineno=firstlineno)
- if ntraceback == traceback:
- ntraceback = ntraceback.cut(path=path)
- if ntraceback == traceback:
- ntraceback = ntraceback.cut(excludepath=cutdir)
- excinfo.traceback = ntraceback.filter()
-
- def _repr_failure_py(self, excinfo, style="long"):
- if excinfo.errisinstance(FuncargRequest.LookupError):
- fspath, lineno, msg = self.reportinfo()
- lines, _ = inspect.getsourcelines(self.obj)
- for i, line in enumerate(lines):
- if line.strip().startswith('def'):
- return FuncargLookupErrorRepr(fspath, lineno,
- lines[:i+1], str(excinfo.value))
- if excinfo.errisinstance(pytest.fail.Exception):
- if not excinfo.value.pytrace:
- return str(excinfo.value)
- return super(FunctionMixin, self)._repr_failure_py(excinfo,
- style=style)
-
- def repr_failure(self, excinfo, outerr=None):
- assert outerr is None, "XXX outerr usage is deprecated"
- return self._repr_failure_py(excinfo,
- style=self.config.option.tbstyle)
-
-class FuncargLookupErrorRepr(TerminalRepr):
- def __init__(self, filename, firstlineno, deflines, errorstring):
- self.deflines = deflines
- self.errorstring = errorstring
- self.filename = filename
- self.firstlineno = firstlineno
-
- def toterminal(self, tw):
- tw.line()
- for line in self.deflines:
- tw.line(" " + line.strip())
- for line in self.errorstring.split("\n"):
- tw.line(" " + line.strip(), red=True)
- tw.line()
- tw.line("%s:%d" % (self.filename, self.firstlineno+1))
-
-
-class Generator(FunctionMixin, PyCollectorMixin, pytest.Collector):
- def collect(self):
- # test generators are seen as collectors but they also
- # invoke setup/teardown on popular request
- # (induced by the common "test_*" naming shared with normal tests)
- self.session._setupstate.prepare(self)
- # see FunctionMixin.setup and test_setupstate_is_preserved_134
- self._preservedparent = self.parent.obj
- l = []
- seen = {}
- for i, x in enumerate(self.obj()):
- name, call, args = self.getcallargs(x)
- if not py.builtin.callable(call):
- raise TypeError("%r yielded non callable test %r" %(self.obj, call,))
- if name is None:
- name = "[%d]" % i
- else:
- name = "['%s']" % name
- if name in seen:
- raise ValueError("%r generated tests with non-unique name %r" %(self, name))
- seen[name] = True
- l.append(self.Function(name, self, args=args, callobj=call))
- return l
-
- def getcallargs(self, obj):
- if not isinstance(obj, (tuple, list)):
- obj = (obj,)
- # explict naming
- if isinstance(obj[0], py.builtin._basestring):
- name = obj[0]
- obj = obj[1:]
- else:
- name = None
- call, args = obj[0], obj[1:]
- return name, call, args
-
-
-#
-# Test Items
-#
-_dummy = object()
-class Function(FunctionMixin, pytest.Item):
- """ a Function Item is responsible for setting up
- and executing a Python callable test object.
- """
- _genid = None
- def __init__(self, name, parent=None, args=None, config=None,
- callspec=None, callobj=_dummy, keywords=None, session=None):
- super(Function, self).__init__(name, parent,
- config=config, session=session)
- self._args = args
- if self._isyieldedfunction():
- assert not callspec, (
- "yielded functions (deprecated) cannot have funcargs")
- else:
- if callspec is not None:
- self.callspec = callspec
- self.funcargs = callspec.funcargs or {}
- self._genid = callspec.id
- if hasattr(callspec, "param"):
- self._requestparam = callspec.param
- else:
- self.funcargs = {}
- if callobj is not _dummy:
- self._obj = callobj
- self.function = getattr(self.obj, 'im_func', self.obj)
- self.keywords.update(py.builtin._getfuncdict(self.obj) or {})
- if keywords:
- self.keywords.update(keywords)
-
- def _getobj(self):
- name = self.name
- i = name.find("[") # parametrization
- if i != -1:
- name = name[:i]
- return getattr(self.parent.obj, name)
-
- def _isyieldedfunction(self):
- return self._args is not None
-
- def runtest(self):
- """ execute the underlying test function. """
- self.ihook.pytest_pyfunc_call(pyfuncitem=self)
-
- def setup(self):
- super(Function, self).setup()
- if hasattr(self, 'funcargs'):
- fillfuncargs(self)
-
- def __eq__(self, other):
- try:
- return (self.name == other.name and
- self._args == other._args and
- self.parent == other.parent and
- self.obj == other.obj and
- getattr(self, '_genid', None) ==
- getattr(other, '_genid', None)
- )
- except AttributeError:
- pass
- return False
-
- def __ne__(self, other):
- return not self == other
-
- def __hash__(self):
- return hash((self.parent, self.name))
-
-def hasinit(obj):
- init = getattr(obj, '__init__', None)
- if init:
- if init != object.__init__:
- return True
-
-
-def getfuncargnames(function, startindex=None):
- # XXX merge with main.py's varnames
- argnames = py.std.inspect.getargs(py.code.getrawcode(function))[0]
- if startindex is None:
- startindex = py.std.inspect.ismethod(function) and 1 or 0
- defaults = getattr(function, 'func_defaults',
- getattr(function, '__defaults__', None)) or ()
- numdefaults = len(defaults)
- if numdefaults:
- return argnames[startindex:-numdefaults]
- return argnames[startindex:]
-
-def fillfuncargs(function):
- """ fill missing funcargs. """
- request = FuncargRequest(pyfuncitem=function)
- request._fillfuncargs()
-
-_notexists = object()
-
-class CallSpec2(object):
- def __init__(self, metafunc):
- self.metafunc = metafunc
- self.funcargs = {}
- self._idlist = []
- self.params = {}
- self._globalid = _notexists
- self._globalid_args = set()
- self._globalparam = _notexists
-
- def copy(self, metafunc):
- cs = CallSpec2(self.metafunc)
- cs.funcargs.update(self.funcargs)
- cs.params.update(self.params)
- cs._idlist = list(self._idlist)
- cs._globalid = self._globalid
- cs._globalid_args = self._globalid_args
- cs._globalparam = self._globalparam
- return cs
-
- def _checkargnotcontained(self, arg):
- if arg in self.params or arg in self.funcargs:
- raise ValueError("duplicate %r" %(arg,))
-
- def getparam(self, name):
- try:
- return self.params[name]
- except KeyError:
- if self._globalparam is _notexists:
- raise ValueError(name)
- return self._globalparam
-
- @property
- def id(self):
- return "-".join(map(str, filter(None, self._idlist)))
-
- def setmulti(self, valtype, argnames, valset, id):
- for arg,val in zip(argnames, valset):
- self._checkargnotcontained(arg)
- getattr(self, valtype)[arg] = val
- self._idlist.append(id)
-
- def setall(self, funcargs, id, param):
- for x in funcargs:
- self._checkargnotcontained(x)
- self.funcargs.update(funcargs)
- if id is not _notexists:
- self._idlist.append(id)
- if param is not _notexists:
- assert self._globalparam is _notexists
- self._globalparam = param
-
-
-class Metafunc:
- def __init__(self, function, config=None, cls=None, module=None):
- self.config = config
- self.module = module
- self.function = function
- self.funcargnames = getfuncargnames(function,
- startindex=int(cls is not None))
- self.cls = cls
- self.module = module
- self._calls = []
- self._ids = py.builtin.set()
-
- def parametrize(self, argnames, argvalues, indirect=False, ids=None):
- """ Add new invocations to the underlying test function using the list
- of argvalues for the given argnames. Parametrization is performed
- during the collection phase. If you need to setup expensive resources
- you may pass indirect=True and implement a funcarg factory which can
- perform the expensive setup just before a test is actually run.
-
- :arg argnames: an argument name or a list of argument names
-
- :arg argvalues: a list of values for the argname or a list of tuples of
- values for the list of argument names.
-
- :arg indirect: if True each argvalue corresponding to an argument will
- be passed as request.param to its respective funcarg factory so
- that it can perform more expensive setups during the setup phase of
- a test rather than at collection time.
-
- :arg ids: list of string ids each corresponding to the argvalues so
- that they are part of the test id. If no ids are provided they will
- be generated automatically from the argvalues.
- """
- if not isinstance(argnames, (tuple, list)):
- argnames = (argnames,)
- argvalues = [(val,) for val in argvalues]
- if not indirect:
- #XXX should we also check for the opposite case?
- for arg in argnames:
- if arg not in self.funcargnames:
- raise ValueError("%r has no argument %r" %(self.function, arg))
- valtype = indirect and "params" or "funcargs"
- if not ids:
- idmaker = IDMaker()
- ids = list(map(idmaker, argvalues))
- newcalls = []
- for callspec in self._calls or [CallSpec2(self)]:
- for i, valset in enumerate(argvalues):
- assert len(valset) == len(argnames)
- newcallspec = callspec.copy(self)
- newcallspec.setmulti(valtype, argnames, valset, ids[i])
- newcalls.append(newcallspec)
- self._calls = newcalls
-
- def addcall(self, funcargs=None, id=_notexists, param=_notexists):
- """ (deprecated, use parametrize) Add a new call to the underlying
- test function during the collection phase of a test run. Note that
- request.addcall() is called during the test collection phase prior and
- independently to actual test execution. You should only use addcall()
- if you need to specify multiple arguments of a test function.
-
- :arg funcargs: argument keyword dictionary used when invoking
- the test function.
-
- :arg id: used for reporting and identification purposes. If you
- don't supply an `id` an automatic unique id will be generated.
-
- :arg param: a parameter which will be exposed to a later funcarg factory
- invocation through the ``request.param`` attribute.
- """
- assert funcargs is None or isinstance(funcargs, dict)
- if funcargs is not None:
- for name in funcargs:
- if name not in self.funcargnames:
- pytest.fail("funcarg %r not used in this function." % name)
- else:
- funcargs = {}
- if id is None:
- raise ValueError("id=None not allowed")
- if id is _notexists:
- id = len(self._calls)
- id = str(id)
- if id in self._ids:
- raise ValueError("duplicate id %r" % id)
- self._ids.add(id)
-
- cs = CallSpec2(self)
- cs.setall(funcargs, id, param)
- self._calls.append(cs)
-
-class IDMaker:
- def __init__(self):
- self.counter = 0
- def __call__(self, valset):
- l = []
- for val in valset:
- if not isinstance(val, (int, str)):
- val = "."+str(self.counter)
- self.counter += 1
- l.append(str(val))
- return "-".join(l)
-
-class FuncargRequest:
- """ A request for function arguments from a test function.
-
- Note that there is an optional ``param`` attribute in case
- there was an invocation to metafunc.addcall(param=...).
- If no such call was done in a ``pytest_generate_tests``
- hook, the attribute will not be present.
- """
- _argprefix = "pytest_funcarg__"
- _argname = None
-
- class LookupError(LookupError):
- """ error on performing funcarg request. """
-
- def __init__(self, pyfuncitem):
- self._pyfuncitem = pyfuncitem
- if hasattr(pyfuncitem, '_requestparam'):
- self.param = pyfuncitem._requestparam
- extra = [obj for obj in (self.module, self.instance) if obj]
- self._plugins = pyfuncitem.getplugins() + extra
- self._funcargs = self._pyfuncitem.funcargs.copy()
- self._name2factory = {}
- self._currentarg = None
-
- @property
- def function(self):
- """ function object of the test invocation. """
- return self._pyfuncitem.obj
-
- @property
- def keywords(self):
- """ keywords of the test function item.
-
- .. versionadded:: 2.0
- """
- return self._pyfuncitem.keywords
-
- @property
- def module(self):
- """ module where the test function was collected. """
- return self._pyfuncitem.getparent(pytest.Module).obj
-
- @property
- def cls(self):
- """ class (can be None) where the test function was collected. """
- clscol = self._pyfuncitem.getparent(pytest.Class)
- if clscol:
- return clscol.obj
- @property
- def instance(self):
- """ instance (can be None) on which test function was collected. """
- return py.builtin._getimself(self.function)
-
- @property
- def config(self):
- """ the pytest config object associated with this request. """
- return self._pyfuncitem.config
-
- @property
- def fspath(self):
- """ the file system path of the test module which collected this test. """
- return self._pyfuncitem.fspath
-
- def _fillfuncargs(self):
- argnames = getfuncargnames(self.function)
- if argnames:
- assert not getattr(self._pyfuncitem, '_args', None), (
- "yielded functions cannot have funcargs")
- for argname in argnames:
- if argname not in self._pyfuncitem.funcargs:
- self._pyfuncitem.funcargs[argname] = self.getfuncargvalue(argname)
-
-
- def applymarker(self, marker):
- """ Apply a marker to a single test function invocation.
- This method is useful if you don't want to have a keyword/marker
- on all function invocations.
-
- :arg marker: a :py:class:`_pytest.mark.MarkDecorator` object
- created by a call to ``py.test.mark.NAME(...)``.
- """
- if not isinstance(marker, py.test.mark.XYZ.__class__):
- raise ValueError("%r is not a py.test.mark.* object")
- self._pyfuncitem.keywords[marker.markname] = marker
-
- def cached_setup(self, setup, teardown=None, scope="module", extrakey=None):
- """ Return a testing resource managed by ``setup`` &
- ``teardown`` calls. ``scope`` and ``extrakey`` determine when the
- ``teardown`` function will be called so that subsequent calls to
- ``setup`` would recreate the resource.
-
- :arg teardown: function receiving a previously setup resource.
- :arg setup: a no-argument function creating a resource.
- :arg scope: a string value out of ``function``, ``class``, ``module``
- or ``session`` indicating the caching lifecycle of the resource.
- :arg extrakey: added to internal caching key of (funcargname, scope).
- """
- if not hasattr(self.config, '_setupcache'):
- self.config._setupcache = {} # XXX weakref?
- cachekey = (self._currentarg, self._getscopeitem(scope), extrakey)
- cache = self.config._setupcache
- try:
- val = cache[cachekey]
- except KeyError:
- val = setup()
- cache[cachekey] = val
- if teardown is not None:
- def finalizer():
- del cache[cachekey]
- teardown(val)
- self._addfinalizer(finalizer, scope=scope)
- return val
-
- def getfuncargvalue(self, argname):
- """ Retrieve a function argument by name for this test
- function invocation. This allows one function argument factory
- to call another function argument factory. If there are two
- funcarg factories for the same test function argument the first
- factory may use ``getfuncargvalue`` to call the second one and
- do something additional with the resource.
- """
- try:
- return self._funcargs[argname]
- except KeyError:
- pass
- if argname not in self._name2factory:
- self._name2factory[argname] = self.config.pluginmanager.listattr(
- plugins=self._plugins,
- attrname=self._argprefix + str(argname)
- )
- #else: we are called recursively
- if not self._name2factory[argname]:
- self._raiselookupfailed(argname)
- funcargfactory = self._name2factory[argname].pop()
- oldarg = self._currentarg
- mp = monkeypatch()
- mp.setattr(self, '_currentarg', argname)
- try:
- param = self._pyfuncitem.callspec.getparam(argname)
- except (AttributeError, ValueError):
- pass
- else:
- mp.setattr(self, 'param', param, raising=False)
- try:
- self._funcargs[argname] = res = funcargfactory(request=self)
- finally:
- mp.undo()
- return res
-
- def _getscopeitem(self, scope):
- if scope == "function":
- return self._pyfuncitem
- elif scope == "session":
- return None
- elif scope == "class":
- x = self._pyfuncitem.getparent(pytest.Class)
- if x is not None:
- return x
- scope = "module"
- if scope == "module":
- return self._pyfuncitem.getparent(pytest.Module)
- raise ValueError("unknown finalization scope %r" %(scope,))
-
- def addfinalizer(self, finalizer):
- """add finalizer function to be called after test function
- finished execution. """
- self._addfinalizer(finalizer, scope="function")
-
- def _addfinalizer(self, finalizer, scope):
- colitem = self._getscopeitem(scope)
- self._pyfuncitem.session._setupstate.addfinalizer(
- finalizer=finalizer, colitem=colitem)
-
- def __repr__(self):
- return "" %(self._pyfuncitem)
-
- def _raiselookupfailed(self, argname):
- available = []
- for plugin in self._plugins:
- for name in vars(plugin):
- if name.startswith(self._argprefix):
- name = name[len(self._argprefix):]
- if name not in available:
- available.append(name)
- fspath, lineno, msg = self._pyfuncitem.reportinfo()
- msg = "LookupError: no factory found for function argument %r" % (argname,)
- msg += "\n available funcargs: %s" %(", ".join(available),)
- msg += "\n use 'py.test --funcargs [testpath]' for help on them."
- raise self.LookupError(msg)
-
-def showfuncargs(config):
- from _pytest.main import wrap_session
- return wrap_session(config, _showfuncargs_main)
-
-def _showfuncargs_main(config, session):
- session.perform_collect()
- if session.items:
- plugins = session.items[0].getplugins()
- else:
- plugins = session.getplugins()
- curdir = py.path.local()
- tw = py.io.TerminalWriter()
- verbose = config.getvalue("verbose")
- for plugin in plugins:
- available = []
- for name, factory in vars(plugin).items():
- if name.startswith(FuncargRequest._argprefix):
- name = name[len(FuncargRequest._argprefix):]
- if name not in available:
- available.append([name, factory])
- if available:
- pluginname = plugin.__name__
- for name, factory in available:
- loc = getlocation(factory, curdir)
- if verbose:
- funcargspec = "%s -- %s" %(name, loc,)
- else:
- funcargspec = name
- tw.line(funcargspec, green=True)
- doc = factory.__doc__ or ""
- if doc:
- for line in doc.split("\n"):
- tw.line(" " + line.strip())
- else:
- tw.line(" %s: no docstring available" %(loc,),
- red=True)
-
-def getlocation(function, curdir):
- import inspect
- fn = py.path.local(inspect.getfile(function))
- lineno = py.builtin._getcode(function).co_firstlineno
- if fn.relto(curdir):
- fn = fn.relto(curdir)
- return "%s:%d" %(fn, lineno+1)
-
-# builtin pytest.raises helper
-
-def raises(ExpectedException, *args, **kwargs):
- """ assert that a code block/function call raises @ExpectedException
- and raise a failure exception otherwise.
-
- If using Python 2.5 or above, you may use this function as a
- context manager::
-
- >>> with raises(ZeroDivisionError):
- ... 1/0
-
- Or you can specify a callable by passing a to-be-called lambda::
-
- >>> raises(ZeroDivisionError, lambda: 1/0)
-
-
- or you can specify an arbitrary callable with arguments::
-
- >>> def f(x): return 1/x
- ...
- >>> raises(ZeroDivisionError, f, 0)
-
- >>> raises(ZeroDivisionError, f, x=0)
-
-
- A third possibility is to use a string to be executed::
-
- >>> raises(ZeroDivisionError, "f(0)")
-
- """
- __tracebackhide__ = True
-
- if not args:
- return RaisesContext(ExpectedException)
- elif isinstance(args[0], str):
- code, = args
- assert isinstance(code, str)
- frame = sys._getframe(1)
- loc = frame.f_locals.copy()
- loc.update(kwargs)
- #print "raises frame scope: %r" % frame.f_locals
- try:
- code = py.code.Source(code).compile()
- py.builtin.exec_(code, frame.f_globals, loc)
- # XXX didn'T mean f_globals == f_locals something special?
- # this is destroyed here ...
- except ExpectedException:
- return py.code.ExceptionInfo()
- else:
- func = args[0]
- try:
- func(*args[1:], **kwargs)
- except ExpectedException:
- return py.code.ExceptionInfo()
- k = ", ".join(["%s=%r" % x for x in kwargs.items()])
- if k:
- k = ', ' + k
- expr = '%s(%r%s)' %(getattr(func, '__name__', func), args, k)
- pytest.fail("DID NOT RAISE")
-
-class RaisesContext(object):
- def __init__(self, ExpectedException):
- self.ExpectedException = ExpectedException
- self.excinfo = None
-
- def __enter__(self):
- self.excinfo = object.__new__(py.code.ExceptionInfo)
- return self.excinfo
-
- def __exit__(self, *tp):
- __tracebackhide__ = True
- if tp[0] is None:
- pytest.fail("DID NOT RAISE")
- self.excinfo.__init__(tp)
- return issubclass(self.excinfo.type, self.ExpectedException)
-
diff --git a/_pytest/recwarn.py b/_pytest/recwarn.py
deleted file mode 100644
--- a/_pytest/recwarn.py
+++ /dev/null
@@ -1,99 +0,0 @@
-""" recording warnings during test function execution. """
-
-import py
-import sys, os
-
-def pytest_funcarg__recwarn(request):
- """Return a WarningsRecorder instance that provides these methods:
-
- * ``pop(category=None)``: return last warning matching the category.
- * ``clear()``: clear list of warnings
-
- See http://docs.python.org/library/warnings.html for information
- on warning categories.
- """
- if sys.version_info >= (2,7):
- import warnings
- oldfilters = warnings.filters[:]
- warnings.simplefilter('default')
- def reset_filters():
- warnings.filters[:] = oldfilters
- request.addfinalizer(reset_filters)
- wrec = WarningsRecorder()
- request.addfinalizer(wrec.finalize)
- return wrec
-
-def pytest_namespace():
- return {'deprecated_call': deprecated_call}
-
-def deprecated_call(func, *args, **kwargs):
- """ assert that calling ``func(*args, **kwargs)``
- triggers a DeprecationWarning.
- """
- warningmodule = py.std.warnings
- l = []
- oldwarn_explicit = getattr(warningmodule, 'warn_explicit')
- def warn_explicit(*args, **kwargs):
- l.append(args)
- oldwarn_explicit(*args, **kwargs)
- oldwarn = getattr(warningmodule, 'warn')
- def warn(*args, **kwargs):
- l.append(args)
- oldwarn(*args, **kwargs)
-
- warningmodule.warn_explicit = warn_explicit
- warningmodule.warn = warn
- try:
- ret = func(*args, **kwargs)
- finally:
- warningmodule.warn_explicit = warn_explicit
- warningmodule.warn = warn
- if not l:
- #print warningmodule
- __tracebackhide__ = True
- raise AssertionError("%r did not produce DeprecationWarning" %(func,))
- return ret
-
-
-class RecordedWarning:
- def __init__(self, message, category, filename, lineno, line):
- self.message = message
- self.category = category
- self.filename = filename
- self.lineno = lineno
- self.line = line
-
-class WarningsRecorder:
- def __init__(self):
- warningmodule = py.std.warnings
- self.list = []
- def showwarning(message, category, filename, lineno, line=0):
- self.list.append(RecordedWarning(
- message, category, filename, lineno, line))
- try:
- self.old_showwarning(message, category,
- filename, lineno, line=line)
- except TypeError:
- # < python2.6
- self.old_showwarning(message, category, filename, lineno)
- self.old_showwarning = warningmodule.showwarning
- warningmodule.showwarning = showwarning
-
- def pop(self, cls=Warning):
- """ pop the first recorded warning, raise exception if not exists."""
- for i, w in enumerate(self.list):
- if issubclass(w.category, cls):
- return self.list.pop(i)
- __tracebackhide__ = True
- assert 0, "%r not found in %r" %(cls, self.list)
-
- #def resetregistry(self):
- # import warnings
- # warnings.onceregistry.clear()
- # warnings.__warningregistry__.clear()
-
- def clear(self):
- self.list[:] = []
-
- def finalize(self):
- py.std.warnings.showwarning = self.old_showwarning
diff --git a/_pytest/resultlog.py b/_pytest/resultlog.py
deleted file mode 100644
--- a/_pytest/resultlog.py
+++ /dev/null
@@ -1,98 +0,0 @@
-""" (disabled by default) create result information in a plain text file. """
-
-import py
-
-def pytest_addoption(parser):
- group = parser.getgroup("terminal reporting", "resultlog plugin options")
- group.addoption('--resultlog', action="store", dest="resultlog",
- metavar="path", default=None,
- help="path for machine-readable result log.")
-
-def pytest_configure(config):
- resultlog = config.option.resultlog
- # prevent opening resultlog on slave nodes (xdist)
- if resultlog and not hasattr(config, 'slaveinput'):
- logfile = open(resultlog, 'w', 1) # line buffered
- config._resultlog = ResultLog(config, logfile)
- config.pluginmanager.register(config._resultlog)
-
-def pytest_unconfigure(config):
- resultlog = getattr(config, '_resultlog', None)
- if resultlog:
- resultlog.logfile.close()
- del config._resultlog
- config.pluginmanager.unregister(resultlog)
-
-def generic_path(item):
- chain = item.listchain()
- gpath = [chain[0].name]
- fspath = chain[0].fspath
- fspart = False
- for node in chain[1:]:
- newfspath = node.fspath
- if newfspath == fspath:
- if fspart:
- gpath.append(':')
- fspart = False
- else:
- gpath.append('.')
- else:
- gpath.append('/')
- fspart = True
- name = node.name
- if name[0] in '([':
- gpath.pop()
- gpath.append(name)
- fspath = newfspath
- return ''.join(gpath)
-
-class ResultLog(object):
- def __init__(self, config, logfile):
- self.config = config
- self.logfile = logfile # preferably line buffered
-
- def write_log_entry(self, testpath, lettercode, longrepr):
- py.builtin.print_("%s %s" % (lettercode, testpath), file=self.logfile)
- for line in longrepr.splitlines():
- py.builtin.print_(" %s" % line, file=self.logfile)
-
- def log_outcome(self, report, lettercode, longrepr):
- testpath = getattr(report, 'nodeid', None)
- if testpath is None:
- testpath = report.fspath
- self.write_log_entry(testpath, lettercode, longrepr)
-
- def pytest_runtest_logreport(self, report):
- if report.when != "call" and report.passed:
- return
- res = self.config.hook.pytest_report_teststatus(report=report)
- code = res[1]
- if code == 'x':
- longrepr = str(report.longrepr)
- elif code == 'X':
- longrepr = ''
- elif report.passed:
- longrepr = ""
- elif report.failed:
- longrepr = str(report.longrepr)
- elif report.skipped:
- longrepr = str(report.longrepr[2])
- self.log_outcome(report, code, longrepr)
-
- def pytest_collectreport(self, report):
- if not report.passed:
- if report.failed:
- code = "F"
- longrepr = str(report.longrepr.reprcrash)
- else:
- assert report.skipped
- code = "S"
- longrepr = "%s:%d: %s" % report.longrepr
- self.log_outcome(report, code, longrepr)
-
- def pytest_internalerror(self, excrepr):
- reprcrash = getattr(excrepr, 'reprcrash', None)
- path = getattr(reprcrash, "path", None)
- if path is None:
- path = "cwd:%s" % py.path.local()
- self.write_log_entry(path, '!', str(excrepr))
diff --git a/_pytest/runner.py b/_pytest/runner.py
deleted file mode 100644
--- a/_pytest/runner.py
+++ /dev/null
@@ -1,432 +0,0 @@
-""" basic collect and runtest protocol implementations """
-
-import py, sys, time
-from py._code.code import TerminalRepr
-
-def pytest_namespace():
- return {
- 'fail' : fail,
- 'skip' : skip,
- 'importorskip' : importorskip,
- 'exit' : exit,
- }
-
-#
-# pytest plugin hooks
-
-def pytest_addoption(parser):
- group = parser.getgroup("terminal reporting", "reporting", after="general")
- group.addoption('--durations',
- action="store", type="int", default=None, metavar="N",
- help="show N slowest setup/test durations (N=0 for all)."),
-
-def pytest_terminal_summary(terminalreporter):
- durations = terminalreporter.config.option.durations
- if durations is None:
- return
- tr = terminalreporter
- dlist = []
- for replist in tr.stats.values():
- for rep in replist:
- if hasattr(rep, 'duration'):
- dlist.append(rep)
- if not dlist:
- return
- dlist.sort(key=lambda x: x.duration)
- dlist.reverse()
- if not durations:
- tr.write_sep("=", "slowest test durations")
- else:
- tr.write_sep("=", "slowest %s test durations" % durations)
- dlist = dlist[:durations]
-
- for rep in dlist:
- nodeid = rep.nodeid.replace("::()::", "::")
- tr.write_line("%02.2fs %-8s %s" %
- (rep.duration, rep.when, nodeid))
-
-def pytest_sessionstart(session):
- session._setupstate = SetupState()
-def pytest_sessionfinish(session):
- session._setupstate.teardown_all()
-
-class NodeInfo:
- def __init__(self, location):
- self.location = location
-
-def pytest_runtest_protocol(item, nextitem):
- item.ihook.pytest_runtest_logstart(
- nodeid=item.nodeid, location=item.location,
- )
- runtestprotocol(item, nextitem=nextitem)
- return True
-
-def runtestprotocol(item, log=True, nextitem=None):
- rep = call_and_report(item, "setup", log)
- reports = [rep]
- if rep.passed:
- reports.append(call_and_report(item, "call", log))
- reports.append(call_and_report(item, "teardown", log,
- nextitem=nextitem))
- return reports
-
-def pytest_runtest_setup(item):
- item.session._setupstate.prepare(item)
-
-def pytest_runtest_call(item):
- item.runtest()
-
-def pytest_runtest_teardown(item, nextitem):
- item.session._setupstate.teardown_exact(item, nextitem)
-
-def pytest_report_teststatus(report):
- if report.when in ("setup", "teardown"):
- if report.failed:
- # category, shortletter, verbose-word
- return "error", "E", "ERROR"
- elif report.skipped:
- return "skipped", "s", "SKIPPED"
- else:
- return "", "", ""
-
-
-#
-# Implementation
-
-def call_and_report(item, when, log=True, **kwds):
- call = call_runtest_hook(item, when, **kwds)
- hook = item.ihook
- report = hook.pytest_runtest_makereport(item=item, call=call)
- if log:
- hook.pytest_runtest_logreport(report=report)
- return report
-
-def call_runtest_hook(item, when, **kwds):
- hookname = "pytest_runtest_" + when
- ihook = getattr(item.ihook, hookname)
- return CallInfo(lambda: ihook(item=item, **kwds), when=when)
-
-class CallInfo:
- """ Result/Exception info a function invocation. """
- #: None or ExceptionInfo object.
- excinfo = None
- def __init__(self, func, when):
- #: context of invocation: one of "setup", "call",
- #: "teardown", "memocollect"
- self.when = when
- self.start = time.time()
- try:
- try:
- self.result = func()
- except KeyboardInterrupt:
- raise
- except:
- self.excinfo = py.code.ExceptionInfo()
- finally:
- self.stop = time.time()
-
- def __repr__(self):
- if self.excinfo:
- status = "exception: %s" % str(self.excinfo.value)
- else:
- status = "result: %r" % (self.result,)
- return "" % (self.when, status)
-
-def getslaveinfoline(node):
- try:
- return node._slaveinfocache
- except AttributeError:
- d = node.slaveinfo
- ver = "%s.%s.%s" % d['version_info'][:3]
- node._slaveinfocache = s = "[%s] %s -- Python %s %s" % (
- d['id'], d['sysplatform'], ver, d['executable'])
- return s
-
-class BaseReport(object):
-
- def __init__(self, **kw):
- self.__dict__.update(kw)
-
- def toterminal(self, out):
- longrepr = self.longrepr
- if hasattr(self, 'node'):
- out.line(getslaveinfoline(self.node))
- if hasattr(longrepr, 'toterminal'):
- longrepr.toterminal(out)
- else:
- out.line(str(longrepr))
-
- passed = property(lambda x: x.outcome == "passed")
- failed = property(lambda x: x.outcome == "failed")
- skipped = property(lambda x: x.outcome == "skipped")
-
- @property
- def fspath(self):
- return self.nodeid.split("::")[0]
-
-def pytest_runtest_makereport(item, call):
- when = call.when
- duration = call.stop-call.start
- keywords = dict([(x,1) for x in item.keywords])
- excinfo = call.excinfo
- if not call.excinfo:
- outcome = "passed"
- longrepr = None
- else:
- excinfo = call.excinfo
- if not isinstance(excinfo, py.code.ExceptionInfo):
- outcome = "failed"
- longrepr = excinfo
- elif excinfo.errisinstance(py.test.skip.Exception):
- outcome = "skipped"
- r = excinfo._getreprcrash()
- longrepr = (str(r.path), r.lineno, r.message)
- else:
- outcome = "failed"
- if call.when == "call":
- longrepr = item.repr_failure(excinfo)
- else: # exception in setup or teardown
- longrepr = item._repr_failure_py(excinfo)
- return TestReport(item.nodeid, item.location,
- keywords, outcome, longrepr, when,
- duration=duration)
-
-class TestReport(BaseReport):
- """ Basic test report object (also used for setup and teardown calls if
- they fail).
- """
- def __init__(self, nodeid, location,
- keywords, outcome, longrepr, when, sections=(), duration=0, **extra):
- #: normalized collection node id
- self.nodeid = nodeid
-
- #: a (filesystempath, lineno, domaininfo) tuple indicating the
- #: actual location of a test item - it might be different from the
- #: collected one e.g. if a method is inherited from a different module.
- self.location = location
-
- #: a name -> value dictionary containing all keywords and
- #: markers associated with a test invocation.
- self.keywords = keywords
-
- #: test outcome, always one of "passed", "failed", "skipped".
- self.outcome = outcome
-
- #: None or a failure representation.
- self.longrepr = longrepr
-
- #: one of 'setup', 'call', 'teardown' to indicate runtest phase.
- self.when = when
-
- #: list of (secname, data) extra information which needs to
- #: marshallable
- self.sections = list(sections)
-
- #: time it took to run just the test
- self.duration = duration
-
- self.__dict__.update(extra)
-
- def __repr__(self):
- return "" % (
- self.nodeid, self.when, self.outcome)
-
-class TeardownErrorReport(BaseReport):
- outcome = "failed"
- when = "teardown"
- def __init__(self, longrepr, **extra):
- self.longrepr = longrepr
- self.sections = []
- self.__dict__.update(extra)
-
-def pytest_make_collect_report(collector):
- call = CallInfo(collector._memocollect, "memocollect")
- longrepr = None
- if not call.excinfo:
- outcome = "passed"
- else:
- if call.excinfo.errisinstance(py.test.skip.Exception):
- outcome = "skipped"
- r = collector._repr_failure_py(call.excinfo, "line").reprcrash
- longrepr = (str(r.path), r.lineno, r.message)
- else:
- outcome = "failed"
- errorinfo = collector.repr_failure(call.excinfo)
- if not hasattr(errorinfo, "toterminal"):
- errorinfo = CollectErrorRepr(errorinfo)
- longrepr = errorinfo
- return CollectReport(collector.nodeid, outcome, longrepr,
- getattr(call, 'result', None))
-
-class CollectReport(BaseReport):
- def __init__(self, nodeid, outcome, longrepr, result, sections=(), **extra):
- self.nodeid = nodeid
- self.outcome = outcome
- self.longrepr = longrepr
- self.result = result or []
- self.sections = list(sections)
- self.__dict__.update(extra)
-
- @property
- def location(self):
- return (self.fspath, None, self.fspath)
-
- def __repr__(self):
- return "" % (
- self.nodeid, len(self.result), self.outcome)
-
-class CollectErrorRepr(TerminalRepr):
- def __init__(self, msg):
- self.longrepr = msg
- def toterminal(self, out):
- out.line(str(self.longrepr), red=True)
-
-class SetupState(object):
- """ shared state for setting up/tearing down test items or collectors. """
- def __init__(self):
- self.stack = []
- self._finalizers = {}
-
- def addfinalizer(self, finalizer, colitem):
- """ attach a finalizer to the given colitem.
- if colitem is None, this will add a finalizer that
- is called at the end of teardown_all().
- """
- assert hasattr(finalizer, '__call__')
- #assert colitem in self.stack
- self._finalizers.setdefault(colitem, []).append(finalizer)
-
- def _pop_and_teardown(self):
- colitem = self.stack.pop()
- self._teardown_with_finalization(colitem)
-
- def _callfinalizers(self, colitem):
- finalizers = self._finalizers.pop(colitem, None)
- while finalizers:
- fin = finalizers.pop()
- fin()
-
- def _teardown_with_finalization(self, colitem):
- self._callfinalizers(colitem)
- if colitem:
- colitem.teardown()
- for colitem in self._finalizers:
- assert colitem is None or colitem in self.stack
-
- def teardown_all(self):
- while self.stack:
- self._pop_and_teardown()
- self._teardown_with_finalization(None)
- assert not self._finalizers
-
- def teardown_exact(self, item, nextitem):
- needed_collectors = nextitem and nextitem.listchain() or []
- self._teardown_towards(needed_collectors)
-
- def _teardown_towards(self, needed_collectors):
- while self.stack:
- if self.stack == needed_collectors[:len(self.stack)]:
- break
- self._pop_and_teardown()
-
- def prepare(self, colitem):
- """ setup objects along the collector chain to the test-method
- and teardown previously setup objects."""
- needed_collectors = colitem.listchain()
- self._teardown_towards(needed_collectors)
-
- # check if the last collection node has raised an error
- for col in self.stack:
- if hasattr(col, '_prepare_exc'):
- py.builtin._reraise(*col._prepare_exc)
- for col in needed_collectors[len(self.stack):]:
- self.stack.append(col)
- try:
- col.setup()
- except Exception:
- col._prepare_exc = sys.exc_info()
- raise
-
-# =============================================================
-# Test OutcomeExceptions and helpers for creating them.
-
-
-class OutcomeException(Exception):
- """ OutcomeException and its subclass instances indicate and
- contain info about test and collection outcomes.
- """
- def __init__(self, msg=None, pytrace=True):
- self.msg = msg
- self.pytrace = pytrace
-
- def __repr__(self):
- if self.msg:
- return str(self.msg)
- return "<%s instance>" %(self.__class__.__name__,)
- __str__ = __repr__
-
-class Skipped(OutcomeException):
- # XXX hackish: on 3k we fake to live in the builtins
- # in order to have Skipped exception printing shorter/nicer
- __module__ = 'builtins'
-
-class Failed(OutcomeException):
- """ raised from an explicit call to py.test.fail() """
- __module__ = 'builtins'
-
-class Exit(KeyboardInterrupt):
- """ raised for immediate program exits (no tracebacks/summaries)"""
- def __init__(self, msg="unknown reason"):
- self.msg = msg
- KeyboardInterrupt.__init__(self, msg)
-
-# exposed helper methods
-
-def exit(msg):
- """ exit testing process as if KeyboardInterrupt was triggered. """
- __tracebackhide__ = True
- raise Exit(msg)
-
-exit.Exception = Exit
-
-def skip(msg=""):
- """ skip an executing test with the given message. Note: it's usually
- better to use the py.test.mark.skipif marker to declare a test to be
- skipped under certain conditions like mismatching platforms or
- dependencies. See the pytest_skipping plugin for details.
- """
- __tracebackhide__ = True
- raise Skipped(msg=msg)
-skip.Exception = Skipped
-
-def fail(msg="", pytrace=True):
- """ explicitely fail an currently-executing test with the given Message.
- if @pytrace is not True the msg represents the full failure information.
- """
- __tracebackhide__ = True
- raise Failed(msg=msg, pytrace=pytrace)
-fail.Exception = Failed
-
-
-def importorskip(modname, minversion=None):
- """ return imported module if it has a higher __version__ than the
- optionally specified 'minversion' - otherwise call py.test.skip()
- with a message detailing the mismatch.
- """
- __tracebackhide__ = True
- compile(modname, '', 'eval') # to catch syntaxerrors
- try:
- mod = __import__(modname, None, None, ['__doc__'])
- except ImportError:
- py.test.skip("could not import %r" %(modname,))
- if minversion is None:
- return mod
- verattr = getattr(mod, '__version__', None)
- if isinstance(minversion, str):
- minver = minversion.split(".")
- else:
- minver = list(minversion)
- if verattr is None or verattr.split(".") < minver:
- py.test.skip("module %r has __version__ %r, required is: %r" %(
- modname, verattr, minversion))
- return mod
diff --git a/_pytest/skipping.py b/_pytest/skipping.py
deleted file mode 100644
--- a/_pytest/skipping.py
+++ /dev/null
@@ -1,271 +0,0 @@
-""" support for skip/xfail functions and markers. """
-
-import py, pytest
-import sys
-
-def pytest_addoption(parser):
- group = parser.getgroup("general")
- group.addoption('--runxfail',
- action="store_true", dest="runxfail", default=False,
- help="run tests even if they are marked xfail")
-
-def pytest_configure(config):
- config.addinivalue_line("markers",
- "skipif(*conditions): skip the given test function if evaluation "
- "of all conditions has a True value. Evaluation happens within the "
- "module global context. Example: skipif('sys.platform == \"win32\"') "
- "skips the test if we are on the win32 platform. "
- )
- config.addinivalue_line("markers",
- "xfail(*conditions, reason=None, run=True): mark the the test function "
- "as an expected failure. Optionally specify a reason and run=False "
- "if you don't even want to execute the test function. Any positional "
- "condition strings will be evaluated (like with skipif) and if one is "
- "False the marker will not be applied."
- )
-
-def pytest_namespace():
- return dict(xfail=xfail)
-
-class XFailed(pytest.fail.Exception):
- """ raised from an explicit call to py.test.xfail() """
-
-def xfail(reason=""):
- """ xfail an executing test or setup functions with the given reason."""
- __tracebackhide__ = True
- raise XFailed(reason)
-xfail.Exception = XFailed
-
-class MarkEvaluator:
- def __init__(self, item, name):
- self.item = item
- self.name = name
-
- @property
- def holder(self):
- return self.item.keywords.get(self.name, None)
- def __bool__(self):
- return bool(self.holder)
- __nonzero__ = __bool__
-
- def wasvalid(self):
- return not hasattr(self, 'exc')
-
- def istrue(self):
- try:
- return self._istrue()
- except KeyboardInterrupt:
- raise
- except:
- self.exc = sys.exc_info()
- if isinstance(self.exc[1], SyntaxError):
- msg = [" " * (self.exc[1].offset + 4) + "^",]
- msg.append("SyntaxError: invalid syntax")
- else:
- msg = py.std.traceback.format_exception_only(*self.exc[:2])
- pytest.fail("Error evaluating %r expression\n"
- " %s\n"
- "%s"
- %(self.name, self.expr, "\n".join(msg)),
- pytrace=False)
-
- def _getglobals(self):
- d = {'os': py.std.os, 'sys': py.std.sys, 'config': self.item.config}
- func = self.item.obj
- try:
- d.update(func.__globals__)
- except AttributeError:
- d.update(func.func_globals)
- return d
-
- def _istrue(self):
- if self.holder:
- d = self._getglobals()
- if self.holder.args:
- self.result = False
- for expr in self.holder.args:
- self.expr = expr
- if isinstance(expr, str):
- result = cached_eval(self.item.config, expr, d)
- else:
- pytest.fail("expression is not a string")
- if result:
- self.result = True
- self.expr = expr
- break
- else:
- self.result = True
- return getattr(self, 'result', False)
-
- def get(self, attr, default=None):
- return self.holder.kwargs.get(attr, default)
-
- def getexplanation(self):
- expl = self.get('reason', None)
- if not expl:
- if not hasattr(self, 'expr'):
- return ""
- else:
- return "condition: " + str(self.expr)
- return expl
-
-
-def pytest_runtest_setup(item):
- if not isinstance(item, pytest.Function):
- return
- evalskip = MarkEvaluator(item, 'skipif')
- if evalskip.istrue():
- py.test.skip(evalskip.getexplanation())
- item._evalxfail = MarkEvaluator(item, 'xfail')
- check_xfail_no_run(item)
-
-def pytest_pyfunc_call(pyfuncitem):
- check_xfail_no_run(pyfuncitem)
-
-def check_xfail_no_run(item):
- if not item.config.option.runxfail:
- evalxfail = item._evalxfail
- if evalxfail.istrue():
- if not evalxfail.get('run', True):
- py.test.xfail("[NOTRUN] " + evalxfail.getexplanation())
-
-def pytest_runtest_makereport(__multicall__, item, call):
- if not isinstance(item, pytest.Function):
- return
- # unitttest special case, see setting of _unexpectedsuccess
- if hasattr(item, '_unexpectedsuccess'):
- rep = __multicall__.execute()
- if rep.when == "call":
- # we need to translate into how py.test encodes xpass
- rep.keywords['xfail'] = "reason: " + item._unexpectedsuccess
- rep.outcome = "failed"
- return rep
- if not (call.excinfo and
- call.excinfo.errisinstance(py.test.xfail.Exception)):
- evalxfail = getattr(item, '_evalxfail', None)
- if not evalxfail:
- return
- if call.excinfo and call.excinfo.errisinstance(py.test.xfail.Exception):
- if not item.config.getvalue("runxfail"):
- rep = __multicall__.execute()
- rep.keywords['xfail'] = "reason: " + call.excinfo.value.msg
- rep.outcome = "skipped"
- return rep
- rep = __multicall__.execute()
- evalxfail = item._evalxfail
- if not item.config.option.runxfail:
- if evalxfail.wasvalid() and evalxfail.istrue():
- if call.excinfo:
- rep.outcome = "skipped"
- rep.keywords['xfail'] = evalxfail.getexplanation()
- elif call.when == "call":
- rep.outcome = "failed"
- rep.keywords['xfail'] = evalxfail.getexplanation()
- return rep
- if 'xfail' in rep.keywords:
- del rep.keywords['xfail']
- return rep
-
-# called by terminalreporter progress reporting
-def pytest_report_teststatus(report):
- if 'xfail' in report.keywords:
- if report.skipped:
- return "xfailed", "x", "xfail"
- elif report.failed:
- return "xpassed", "X", "XPASS"
-
-# called by the terminalreporter instance/plugin
-def pytest_terminal_summary(terminalreporter):
- tr = terminalreporter
- if not tr.reportchars:
- #for name in "xfailed skipped failed xpassed":
- # if not tr.stats.get(name, 0):
- # tr.write_line("HINT: use '-r' option to see extra "
- # "summary info about tests")
- # break
- return
-
- lines = []
- for char in tr.reportchars:
- if char == "x":
- show_xfailed(terminalreporter, lines)
- elif char == "X":
- show_xpassed(terminalreporter, lines)
- elif char in "fF":
- show_simple(terminalreporter, lines, 'failed', "FAIL %s")
- elif char in "sS":
- show_skipped(terminalreporter, lines)
- elif char == "E":
- show_simple(terminalreporter, lines, 'error', "ERROR %s")
- if lines:
- tr._tw.sep("=", "short test summary info")
- for line in lines:
- tr._tw.line(line)
-
-def show_simple(terminalreporter, lines, stat, format):
- tw = terminalreporter._tw
- failed = terminalreporter.stats.get(stat)
- if failed:
- for rep in failed:
- pos = rep.nodeid
- lines.append(format %(pos, ))
-
-def show_xfailed(terminalreporter, lines):
- xfailed = terminalreporter.stats.get("xfailed")
- if xfailed:
- for rep in xfailed:
- pos = rep.nodeid
- reason = rep.keywords['xfail']
- lines.append("XFAIL %s" % (pos,))
- if reason:
- lines.append(" " + str(reason))
-
-def show_xpassed(terminalreporter, lines):
- xpassed = terminalreporter.stats.get("xpassed")
- if xpassed:
- for rep in xpassed:
- pos = rep.nodeid
- reason = rep.keywords['xfail']
- lines.append("XPASS %s %s" %(pos, reason))
-
-def cached_eval(config, expr, d):
- if not hasattr(config, '_evalcache'):
- config._evalcache = {}
- try:
- return config._evalcache[expr]
- except KeyError:
- #import sys
- #print >>sys.stderr, ("cache-miss: %r" % expr)
- exprcode = py.code.compile(expr, mode="eval")
- config._evalcache[expr] = x = eval(exprcode, d)
- return x
-
-
-def folded_skips(skipped):
- d = {}
- for event in skipped:
- key = event.longrepr
- assert len(key) == 3, (event, key)
- d.setdefault(key, []).append(event)
- l = []
- for key, events in d.items():
- l.append((len(events),) + key)
- return l
-
-def show_skipped(terminalreporter, lines):
- tr = terminalreporter
- skipped = tr.stats.get('skipped', [])
- if skipped:
- #if not tr.hasopt('skipped'):
- # tr.write_line(
- # "%d skipped tests, specify -rs for more info" %
- # len(skipped))
- # return
- fskips = folded_skips(skipped)
- if fskips:
- #tr.write_sep("_", "skipped test summary")
- for num, fspath, lineno, reason in fskips:
- if reason.startswith("Skipped: "):
- reason = reason[9:]
- lines.append("SKIP [%d] %s:%d: %s" %
- (num, fspath, lineno, reason))
diff --git a/_pytest/standalonetemplate.py b/_pytest/standalonetemplate.py
deleted file mode 100755
--- a/_pytest/standalonetemplate.py
+++ /dev/null
@@ -1,63 +0,0 @@
-#! /usr/bin/env python
-
-sources = """
- at SOURCES@"""
-
-import sys
-import base64
-import zlib
-import imp
-
-class DictImporter(object):
- def __init__(self, sources):
- self.sources = sources
-
- def find_module(self, fullname, path=None):
- if fullname in self.sources:
- return self
- if fullname + '.__init__' in self.sources:
- return self
- return None
-
- def load_module(self, fullname):
- # print "load_module:", fullname
- from types import ModuleType
- try:
- s = self.sources[fullname]
- is_pkg = False
- except KeyError:
- s = self.sources[fullname + '.__init__']
- is_pkg = True
-
- co = compile(s, fullname, 'exec')
- module = sys.modules.setdefault(fullname, ModuleType(fullname))
- module.__file__ = "%s/%s" % (__file__, fullname)
- module.__loader__ = self
- if is_pkg:
- module.__path__ = [fullname]
-
- do_exec(co, module.__dict__)
- return sys.modules[fullname]
-
- def get_source(self, name):
- res = self.sources.get(name)
- if res is None:
- res = self.sources.get(name + '.__init__')
- return res
-
-if __name__ == "__main__":
- if sys.version_info >= (3, 0):
- exec("def do_exec(co, loc): exec(co, loc)\n")
- import pickle
- sources = sources.encode("ascii") # ensure bytes
- sources = pickle.loads(zlib.decompress(base64.decodebytes(sources)))
- else:
- import cPickle as pickle
- exec("def do_exec(co, loc): exec co in loc\n")
- sources = pickle.loads(zlib.decompress(base64.decodestring(sources)))
-
- importer = DictImporter(sources)
- sys.meta_path.append(importer)
-
- entry = "@ENTRY@"
- do_exec(entry, locals())
diff --git a/_pytest/terminal.py b/_pytest/terminal.py
deleted file mode 100644
--- a/_pytest/terminal.py
+++ /dev/null
@@ -1,473 +0,0 @@
-""" terminal reporting of the full testing process.
-
-This is a good source for looking at the various reporting hooks.
-"""
-import pytest, py
-import sys
-import os
-
-def pytest_addoption(parser):
- group = parser.getgroup("terminal reporting", "reporting", after="general")
- group._addoption('-v', '--verbose', action="count",
- dest="verbose", default=0, help="increase verbosity."),
- group._addoption('-q', '--quiet', action="count",
- dest="quiet", default=0, help="decreate verbosity."),
- group._addoption('-r',
- action="store", dest="reportchars", default=None, metavar="chars",
- help="show extra test summary info as specified by chars (f)ailed, "
- "(E)error, (s)skipped, (x)failed, (X)passed.")
- group._addoption('-l', '--showlocals',
- action="store_true", dest="showlocals", default=False,
- help="show locals in tracebacks (disabled by default).")
- group._addoption('--report',
- action="store", dest="report", default=None, metavar="opts",
- help="(deprecated, use -r)")
- group._addoption('--tb', metavar="style",
- action="store", dest="tbstyle", default='long',
- type="choice", choices=['long', 'short', 'no', 'line', 'native'],
- help="traceback print mode (long/short/line/native/no).")
- group._addoption('--fulltrace',
- action="store_true", dest="fulltrace", default=False,
- help="don't cut any tracebacks (default is to cut).")
-
-def pytest_configure(config):
- config.option.verbose -= config.option.quiet
- # we try hard to make printing resilient against
- # later changes on FD level.
- stdout = py.std.sys.stdout
- if hasattr(os, 'dup') and hasattr(stdout, 'fileno'):
- try:
- newfd = os.dup(stdout.fileno())
- #print "got newfd", newfd
- except ValueError:
- pass
- else:
- stdout = os.fdopen(newfd, stdout.mode, 1)
- config._cleanup.append(lambda: stdout.close())
-
- reporter = TerminalReporter(config, stdout)
- config.pluginmanager.register(reporter, 'terminalreporter')
- if config.option.debug or config.option.traceconfig:
- def mywriter(tags, args):
- msg = " ".join(map(str, args))
- reporter.write_line("[traceconfig] " + msg)
- config.trace.root.setprocessor("pytest:config", mywriter)
-
-def getreportopt(config):
- reportopts = ""
- optvalue = config.option.report
- if optvalue:
- py.builtin.print_("DEPRECATED: use -r instead of --report option.",
- file=py.std.sys.stderr)
- if optvalue:
- for setting in optvalue.split(","):
- setting = setting.strip()
- if setting == "skipped":
- reportopts += "s"
- elif setting == "xfailed":
- reportopts += "x"
- reportchars = config.option.reportchars
- if reportchars:
- for char in reportchars:
- if char not in reportopts:
- reportopts += char
- return reportopts
-
-def pytest_report_teststatus(report):
- if report.passed:
- letter = "."
- elif report.skipped:
- letter = "s"
- elif report.failed:
- letter = "F"
- if report.when != "call":
- letter = "f"
- return report.outcome, letter, report.outcome.upper()
-
-class TerminalReporter:
- def __init__(self, config, file=None):
- self.config = config
- self.verbosity = self.config.option.verbose
- self.showheader = self.verbosity >= 0
- self.showfspath = self.verbosity >= 0
- self.showlongtestinfo = self.verbosity > 0
- self._numcollected = 0
-
- self.stats = {}
- self.curdir = py.path.local()
- if file is None:
- file = py.std.sys.stdout
- self._tw = py.io.TerminalWriter(file)
- self.currentfspath = None
- self.reportchars = getreportopt(config)
- self.hasmarkup = self._tw.hasmarkup
-
- def hasopt(self, char):
- char = {'xfailed': 'x', 'skipped': 's'}.get(char,char)
- return char in self.reportchars
-
- def write_fspath_result(self, fspath, res):
- if fspath != self.currentfspath:
- self.currentfspath = fspath
- #fspath = self.curdir.bestrelpath(fspath)
- self._tw.line()
- #relpath = self.curdir.bestrelpath(fspath)
- self._tw.write(fspath + " ")
- self._tw.write(res)
-
- def write_ensure_prefix(self, prefix, extra="", **kwargs):
- if self.currentfspath != prefix:
- self._tw.line()
- self.currentfspath = prefix
- self._tw.write(prefix)
- if extra:
- self._tw.write(extra, **kwargs)
- self.currentfspath = -2
-
- def ensure_newline(self):
- if self.currentfspath:
- self._tw.line()
- self.currentfspath = None
-
- def write(self, content, **markup):
- self._tw.write(content, **markup)
-
- def write_line(self, line, **markup):
- line = str(line)
- self.ensure_newline()
- self._tw.line(line, **markup)
-
- def rewrite(self, line, **markup):
- line = str(line)
- self._tw.write("\r" + line, **markup)
-
- def write_sep(self, sep, title=None, **markup):
- self.ensure_newline()
- self._tw.sep(sep, title, **markup)
-
- def pytest_internalerror(self, excrepr):
- for line in str(excrepr).split("\n"):
- self.write_line("INTERNALERROR> " + line)
- return 1
-
- def pytest_plugin_registered(self, plugin):
- if self.config.option.traceconfig:
- msg = "PLUGIN registered: %s" %(plugin,)
- # XXX this event may happen during setup/teardown time
- # which unfortunately captures our output here
- # which garbles our output if we use self.write_line
- self.write_line(msg)
-
- def pytest_deselected(self, items):
- self.stats.setdefault('deselected', []).extend(items)
-
- def pytest_runtest_logstart(self, nodeid, location):
- # ensure that the path is printed before the
- # 1st test of a module starts running
- fspath = nodeid.split("::")[0]
- if self.showlongtestinfo:
- line = self._locationline(fspath, *location)
- self.write_ensure_prefix(line, "")
- elif self.showfspath:
- self.write_fspath_result(fspath, "")
-
- def pytest_runtest_logreport(self, report):
- rep = report
- res = self.config.hook.pytest_report_teststatus(report=rep)
- cat, letter, word = res
- self.stats.setdefault(cat, []).append(rep)
- if not letter and not word:
- # probably passed setup/teardown
- return
- if self.verbosity <= 0:
- if not hasattr(rep, 'node') and self.showfspath:
- self.write_fspath_result(rep.fspath, letter)
- else:
- self._tw.write(letter)
- else:
- if isinstance(word, tuple):
- word, markup = word
- else:
- if rep.passed:
- markup = {'green':True}
- elif rep.failed:
- markup = {'red':True}
- elif rep.skipped:
- markup = {'yellow':True}
- line = self._locationline(str(rep.fspath), *rep.location)
- if not hasattr(rep, 'node'):
- self.write_ensure_prefix(line, word, **markup)
- #self._tw.write(word, **markup)
- else:
- self.ensure_newline()
- if hasattr(rep, 'node'):
- self._tw.write("[%s] " % rep.node.gateway.id)
- self._tw.write(word, **markup)
- self._tw.write(" " + line)
- self.currentfspath = -2
-
- def pytest_collection(self):
- if not self.hasmarkup:
- self.write("collecting ... ", bold=True)
-
- def pytest_collectreport(self, report):
- if report.failed:
- self.stats.setdefault("error", []).append(report)
- elif report.skipped:
- self.stats.setdefault("skipped", []).append(report)
- items = [x for x in report.result if isinstance(x, pytest.Item)]
- self._numcollected += len(items)
- if self.hasmarkup:
- #self.write_fspath_result(report.fspath, 'E')
- self.report_collect()
-
- def report_collect(self, final=False):
- errors = len(self.stats.get('error', []))
- skipped = len(self.stats.get('skipped', []))
- if final:
- line = "collected "
- else:
- line = "collecting "
- line += str(self._numcollected) + " items"
- if errors:
- line += " / %d errors" % errors
- if skipped:
- line += " / %d skipped" % skipped
- if self.hasmarkup:
- if final:
- line += " \n"
- self.rewrite(line, bold=True)
- else:
- self.write_line(line)
-
- def pytest_collection_modifyitems(self):
- self.report_collect(True)
-
- def pytest_sessionstart(self, session):
- self._sessionstarttime = py.std.time.time()
- if not self.showheader:
- return
- self.write_sep("=", "test session starts", bold=True)
- verinfo = ".".join(map(str, sys.version_info[:3]))
- msg = "platform %s -- Python %s" % (sys.platform, verinfo)
- if hasattr(sys, 'pypy_version_info'):
- verinfo = ".".join(map(str, sys.pypy_version_info[:3]))
- msg += "[pypy-%s-%s]" % (verinfo, sys.pypy_version_info[3])
- msg += " -- pytest-%s" % (py.test.__version__)
- if self.verbosity > 0 or self.config.option.debug or \
- getattr(self.config.option, 'pastebin', None):
- msg += " -- " + str(sys.executable)
- self.write_line(msg)
- lines = self.config.hook.pytest_report_header(config=self.config)
- lines.reverse()
- for line in flatten(lines):
- self.write_line(line)
-
- def pytest_collection_finish(self, session):
- if self.config.option.collectonly:
- self._printcollecteditems(session.items)
- if self.stats.get('failed'):
- self._tw.sep("!", "collection failures")
- for rep in self.stats.get('failed'):
- rep.toterminal(self._tw)
- return 1
- return 0
- if not self.showheader:
- return
- #for i, testarg in enumerate(self.config.args):
- # self.write_line("test path %d: %s" %(i+1, testarg))
-
- def _printcollecteditems(self, items):
- # to print out items and their parent collectors
- # we take care to leave out Instances aka ()
- # because later versions are going to get rid of them anyway
- if self.config.option.verbose < 0:
- if self.config.option.verbose < -1:
- counts = {}
- for item in items:
- name = item.nodeid.split('::', 1)[0]
- counts[name] = counts.get(name, 0) + 1
- for name, count in sorted(counts.items()):
- self._tw.line("%s: %d" % (name, count))
- else:
- for item in items:
- nodeid = item.nodeid
- nodeid = nodeid.replace("::()::", "::")
- self._tw.line(nodeid)
- return
- stack = []
- indent = ""
- for item in items:
- needed_collectors = item.listchain()[1:] # strip root node
- while stack:
- if stack == needed_collectors[:len(stack)]:
- break
- stack.pop()
- for col in needed_collectors[len(stack):]:
- stack.append(col)
- #if col.name == "()":
- # continue
- indent = (len(stack)-1) * " "
- self._tw.line("%s%s" %(indent, col))
-
- def pytest_sessionfinish(self, exitstatus, __multicall__):
- __multicall__.execute()
- self._tw.line("")
- if exitstatus in (0, 1, 2):
- self.summary_errors()
- self.summary_failures()
- self.config.hook.pytest_terminal_summary(terminalreporter=self)
- if exitstatus == 2:
- self._report_keyboardinterrupt()
- del self._keyboardinterrupt_memo
- self.summary_deselected()
- self.summary_stats()
-
- def pytest_keyboard_interrupt(self, excinfo):
- self._keyboardinterrupt_memo = excinfo.getrepr(funcargs=True)
-
- def pytest_unconfigure(self):
- if hasattr(self, '_keyboardinterrupt_memo'):
- self._report_keyboardinterrupt()
-
- def _report_keyboardinterrupt(self):
- excrepr = self._keyboardinterrupt_memo
- msg = excrepr.reprcrash.message
- self.write_sep("!", msg)
- if "KeyboardInterrupt" in msg:
- if self.config.option.fulltrace:
- excrepr.toterminal(self._tw)
- else:
- excrepr.reprcrash.toterminal(self._tw)
-
- def _locationline(self, collect_fspath, fspath, lineno, domain):
- # collect_fspath comes from testid which has a "/"-normalized path
- if fspath and fspath.replace("\\", "/") != collect_fspath:
- fspath = "%s <- %s" % (collect_fspath, fspath)
- if fspath:
- line = str(fspath)
- if lineno is not None:
- lineno += 1
- line += ":" + str(lineno)
- if domain:
- line += ": " + str(domain)
- else:
- line = "[location]"
- return line + " "
-
- def _getfailureheadline(self, rep):
- if hasattr(rep, 'location'):
- fspath, lineno, domain = rep.location
- return domain
- else:
- return "test session" # XXX?
-
- def _getcrashline(self, rep):
- try:
- return str(rep.longrepr.reprcrash)
- except AttributeError:
- try:
- return str(rep.longrepr)[:50]
- except AttributeError:
- return ""
-
- #
- # summaries for sessionfinish
- #
- def getreports(self, name):
- l = []
- for x in self.stats.get(name, []):
- if not hasattr(x, '_pdbshown'):
- l.append(x)
- return l
-
- def summary_failures(self):
- if self.config.option.tbstyle != "no":
- reports = self.getreports('failed')
- if not reports:
- return
- self.write_sep("=", "FAILURES")
- for rep in reports:
- if self.config.option.tbstyle == "line":
- line = self._getcrashline(rep)
- self.write_line(line)
- else:
- msg = self._getfailureheadline(rep)
- self.write_sep("_", msg)
- self._outrep_summary(rep)
-
- def summary_errors(self):
- if self.config.option.tbstyle != "no":
- reports = self.getreports('error')
- if not reports:
- return
- self.write_sep("=", "ERRORS")
- for rep in self.stats['error']:
- msg = self._getfailureheadline(rep)
- if not hasattr(rep, 'when'):
- # collect
- msg = "ERROR collecting " + msg
- elif rep.when == "setup":
- msg = "ERROR at setup of " + msg
- elif rep.when == "teardown":
- msg = "ERROR at teardown of " + msg
- self.write_sep("_", msg)
- self._outrep_summary(rep)
-
- def _outrep_summary(self, rep):
- rep.toterminal(self._tw)
- for secname, content in rep.sections:
- self._tw.sep("-", secname)
- if content[-1:] == "\n":
- content = content[:-1]
- self._tw.line(content)
-
- def summary_stats(self):
- session_duration = py.std.time.time() - self._sessionstarttime
-
- keys = "failed passed skipped deselected".split()
- for key in self.stats.keys():
- if key not in keys:
- keys.append(key)
- parts = []
- for key in keys:
- if key: # setup/teardown reports have an empty key, ignore them
- val = self.stats.get(key, None)
- if val:
- parts.append("%d %s" %(len(val), key))
- line = ", ".join(parts)
- # XXX coloring
- msg = "%s in %.2f seconds" %(line, session_duration)
- if self.verbosity >= 0:
- self.write_sep("=", msg, bold=True)
- else:
- self.write_line(msg, bold=True)
-
- def summary_deselected(self):
- if 'deselected' in self.stats:
- l = []
- k = self.config.option.keyword
- if k:
- l.append("-k%s" % k)
- m = self.config.option.markexpr
- if m:
- l.append("-m %r" % m)
- self.write_sep("=", "%d tests deselected by %r" %(
- len(self.stats['deselected']), " ".join(l)), bold=True)
-
-def repr_pythonversion(v=None):
- if v is None:
- v = sys.version_info
- try:
- return "%s.%s.%s-%s-%s" % v
- except (TypeError, ValueError):
- return str(v)
-
-def flatten(l):
- for x in l:
- if isinstance(x, (list, tuple)):
- for y in flatten(x):
- yield y
- else:
- yield x
-
diff --git a/_pytest/tmpdir.py b/_pytest/tmpdir.py
deleted file mode 100644
--- a/_pytest/tmpdir.py
+++ /dev/null
@@ -1,68 +0,0 @@
-""" support for providing temporary directories to test functions. """
-import pytest, py
-from _pytest.monkeypatch import monkeypatch
-
-class TempdirHandler:
- def __init__(self, config):
- self.config = config
- self.trace = config.trace.get("tmpdir")
-
- def ensuretemp(self, string, dir=1):
- """ (deprecated) return temporary directory path with
- the given string as the trailing part. It is usually
- better to use the 'tmpdir' function argument which
- provides an empty unique-per-test-invocation directory
- and is guaranteed to be empty.
- """
- #py.log._apiwarn(">1.1", "use tmpdir function argument")
- return self.getbasetemp().ensure(string, dir=dir)
-
- def mktemp(self, basename, numbered=True):
- basetemp = self.getbasetemp()
- if not numbered:
- p = basetemp.mkdir(basename)
- else:
- p = py.path.local.make_numbered_dir(prefix=basename,
- keep=0, rootdir=basetemp, lock_timeout=None)
- self.trace("mktemp", p)
- return p
-
- def getbasetemp(self):
- """ return base temporary directory. """
- try:
- return self._basetemp
- except AttributeError:
- basetemp = self.config.option.basetemp
- if basetemp:
- basetemp = py.path.local(basetemp)
- if basetemp.check():
- basetemp.remove()
- basetemp.mkdir()
- else:
- basetemp = py.path.local.make_numbered_dir(prefix='pytest-')
- self._basetemp = t = basetemp
- self.trace("new basetemp", t)
- return t
-
- def finish(self):
- self.trace("finish")
-
-def pytest_configure(config):
- mp = monkeypatch()
- t = TempdirHandler(config)
- config._cleanup.extend([mp.undo, t.finish])
- mp.setattr(config, '_tmpdirhandler', t, raising=False)
- mp.setattr(pytest, 'ensuretemp', t.ensuretemp, raising=False)
-
-def pytest_funcarg__tmpdir(request):
- """return a temporary directory path object
- which is unique to each test function invocation,
- created as a sub directory of the base temporary
- directory. The returned object is a `py.path.local`_
- path object.
- """
- name = request._pyfuncitem.name
- name = py.std.re.sub("[\W]", "_", name)
- x = request.config._tmpdirhandler.mktemp(name, numbered=True)
- return x
-
diff --git a/_pytest/unittest.py b/_pytest/unittest.py
deleted file mode 100644
--- a/_pytest/unittest.py
+++ /dev/null
@@ -1,165 +0,0 @@
-""" discovery and running of std-library "unittest" style tests. """
-import pytest, py
-import sys, pdb
-
-# for transfering markers
-from _pytest.python import transfer_markers
-
-def pytest_pycollect_makeitem(collector, name, obj):
- unittest = sys.modules.get('unittest')
- if unittest is None:
- return # nobody can have derived unittest.TestCase
- try:
- isunit = issubclass(obj, unittest.TestCase)
- except KeyboardInterrupt:
- raise
- except Exception:
- pass
- else:
- if isunit:
- return UnitTestCase(name, parent=collector)
-
-class UnitTestCase(pytest.Class):
- def collect(self):
- loader = py.std.unittest.TestLoader()
- module = self.getparent(pytest.Module).obj
- cls = self.obj
- for name in loader.getTestCaseNames(self.obj):
- x = getattr(self.obj, name)
- funcobj = getattr(x, 'im_func', x)
- transfer_markers(funcobj, cls, module)
- if hasattr(funcobj, 'todo'):
- pytest.mark.xfail(reason=str(funcobj.todo))(funcobj)
- yield TestCaseFunction(name, parent=self)
-
- def setup(self):
- meth = getattr(self.obj, 'setUpClass', None)
- if meth is not None:
- meth()
- super(UnitTestCase, self).setup()
-
- def teardown(self):
- meth = getattr(self.obj, 'tearDownClass', None)
- if meth is not None:
- meth()
- super(UnitTestCase, self).teardown()
-
-class TestCaseFunction(pytest.Function):
- _excinfo = None
-
- def setup(self):
- self._testcase = self.parent.obj(self.name)
- self._obj = getattr(self._testcase, self.name)
- if hasattr(self._testcase, 'skip'):
- pytest.skip(self._testcase.skip)
- if hasattr(self._obj, 'skip'):
- pytest.skip(self._obj.skip)
- if hasattr(self._testcase, 'setup_method'):
- self._testcase.setup_method(self._obj)
-
- def teardown(self):
- if hasattr(self._testcase, 'teardown_method'):
- self._testcase.teardown_method(self._obj)
-
- def startTest(self, testcase):
- pass
-
- def _addexcinfo(self, rawexcinfo):
- # unwrap potential exception info (see twisted trial support below)
- rawexcinfo = getattr(rawexcinfo, '_rawexcinfo', rawexcinfo)
- try:
- excinfo = py.code.ExceptionInfo(rawexcinfo)
- except TypeError:
- try:
- try:
- l = py.std.traceback.format_exception(*rawexcinfo)
- l.insert(0, "NOTE: Incompatible Exception Representation, "
- "displaying natively:\n\n")
- pytest.fail("".join(l), pytrace=False)
- except (pytest.fail.Exception, KeyboardInterrupt):
- raise
- except:
- pytest.fail("ERROR: Unknown Incompatible Exception "
- "representation:\n%r" %(rawexcinfo,), pytrace=False)
- except KeyboardInterrupt:
- raise
- except pytest.fail.Exception:
- excinfo = py.code.ExceptionInfo()
- self.__dict__.setdefault('_excinfo', []).append(excinfo)
-
- def addError(self, testcase, rawexcinfo):
- self._addexcinfo(rawexcinfo)
- def addFailure(self, testcase, rawexcinfo):
- self._addexcinfo(rawexcinfo)
-
- def addSkip(self, testcase, reason):
- try:
- pytest.skip(reason)
- except pytest.skip.Exception:
- self._addexcinfo(sys.exc_info())
-
- def addExpectedFailure(self, testcase, rawexcinfo, reason=""):
- try:
- pytest.xfail(str(reason))
- except pytest.xfail.Exception:
- self._addexcinfo(sys.exc_info())
-
- def addUnexpectedSuccess(self, testcase, reason=""):
- self._unexpectedsuccess = reason
-
- def addSuccess(self, testcase):
- pass
-
- def stopTest(self, testcase):
- pass
-
- def runtest(self):
- self._testcase(result=self)
-
- def _prunetraceback(self, excinfo):
- pytest.Function._prunetraceback(self, excinfo)
- traceback = excinfo.traceback.filter(
- lambda x:not x.frame.f_globals.get('__unittest'))
- if traceback:
- excinfo.traceback = traceback
-
- at pytest.mark.tryfirst
-def pytest_runtest_makereport(item, call):
- if isinstance(item, TestCaseFunction):
- if item._excinfo:
- call.excinfo = item._excinfo.pop(0)
- del call.result
-
-# twisted trial support
-def pytest_runtest_protocol(item, __multicall__):
- if isinstance(item, TestCaseFunction):
- if 'twisted.trial.unittest' in sys.modules:
- ut = sys.modules['twisted.python.failure']
- Failure__init__ = ut.Failure.__init__.im_func
- check_testcase_implements_trial_reporter()
- def excstore(self, exc_value=None, exc_type=None, exc_tb=None,
- captureVars=None):
- if exc_value is None:
- self._rawexcinfo = sys.exc_info()
- else:
- if exc_type is None:
- exc_type = type(exc_value)
- self._rawexcinfo = (exc_type, exc_value, exc_tb)
- try:
- Failure__init__(self, exc_value, exc_type, exc_tb,
- captureVars=captureVars)
- except TypeError:
- Failure__init__(self, exc_value, exc_type, exc_tb)
- ut.Failure.__init__ = excstore
- try:
- return __multicall__.execute()
- finally:
- ut.Failure.__init__ = Failure__init__
-
-def check_testcase_implements_trial_reporter(done=[]):
- if done:
- return
- from zope.interface import classImplements
- from twisted.trial.itrial import IReporter
- classImplements(TestCaseFunction, IReporter)
- done.append(1)
diff --git a/ctypes_configure/__init__.py b/ctypes_configure/__init__.py
deleted file mode 100644
diff --git a/ctypes_configure/cbuild.py b/ctypes_configure/cbuild.py
deleted file mode 100644
--- a/ctypes_configure/cbuild.py
+++ /dev/null
@@ -1,456 +0,0 @@
-
-import os, sys, inspect, re, imp, py
-from ctypes_configure import stdoutcapture
-import distutils
-
-debug = 0
-
-configdir = py.path.local.make_numbered_dir(prefix='ctypes_configure-')
-
-class ExternalCompilationInfo(object):
-
- _ATTRIBUTES = ['pre_include_lines', 'includes', 'include_dirs',
- 'post_include_lines', 'libraries', 'library_dirs',
- 'separate_module_sources', 'separate_module_files']
- _AVOID_DUPLICATES = ['separate_module_files', 'libraries', 'includes',
- 'include_dirs', 'library_dirs', 'separate_module_sources']
-
- def __init__(self,
- pre_include_lines = [],
- includes = [],
- include_dirs = [],
- post_include_lines = [],
- libraries = [],
- library_dirs = [],
- separate_module_sources = [],
- separate_module_files = []):
- """
- pre_include_lines: list of lines that should be put at the top
- of the generated .c files, before any #include. They shouldn't
- contain an #include themselves.
-
- includes: list of .h file names to be #include'd from the
- generated .c files.
-
- include_dirs: list of dir names that is passed to the C compiler
-
- post_include_lines: list of lines that should be put at the top
- of the generated .c files, after the #includes.
-
- libraries: list of library names that is passed to the linker
-
- library_dirs: list of dir names that is passed to the linker
-
- separate_module_sources: list of multiline strings that are
- each written to a .c file and compiled separately and linked
- later on. (If function prototypes are needed for other .c files
- to access this, they can be put in post_include_lines.)
-
- separate_module_files: list of .c file names that are compiled
- separately and linked later on. (If an .h file is needed for
- other .c files to access this, it can be put in includes.)
- """
- for name in self._ATTRIBUTES:
- value = locals()[name]
- assert isinstance(value, (list, tuple))
- setattr(self, name, tuple(value))
-
- def _value(self):
- return tuple([getattr(self, x) for x in self._ATTRIBUTES])
-
- def __hash__(self):
- return hash(self._value())
-
- def __eq__(self, other):
- return self.__class__ is other.__class__ and \
- self._value() == other._value()
-
- def __ne__(self, other):
- return not self == other
-
- def __repr__(self):
- info = []
- for attr in self._ATTRIBUTES:
- val = getattr(self, attr)
- info.append("%s=%s" % (attr, repr(val)))
- return "" % ", ".join(info)
-
- def merge(self, *others):
- others = list(others)
- attrs = {}
- for name in self._ATTRIBUTES:
- if name not in self._AVOID_DUPLICATES:
- s = []
- for i in [self] + others:
- s += getattr(i, name)
- attrs[name] = s
- else:
- s = set()
- attr = []
- for one in [self] + others:
- for elem in getattr(one, name):
- if elem not in s:
- s.add(elem)
- attr.append(elem)
- attrs[name] = attr
- return ExternalCompilationInfo(**attrs)
-
- def write_c_header(self, fileobj):
- for line in self.pre_include_lines:
- print >> fileobj, line
- for path in self.includes:
- print >> fileobj, '#include <%s>' % (path,)
- for line in self.post_include_lines:
- print >> fileobj, line
-
- def _copy_attributes(self):
- d = {}
- for attr in self._ATTRIBUTES:
- d[attr] = getattr(self, attr)
- return d
-
- def convert_sources_to_files(self, cache_dir=None, being_main=False):
- if not self.separate_module_sources:
- return self
- if cache_dir is None:
- cache_dir = configdir.join('module_cache').ensure(dir=1)
- num = 0
- files = []
- for source in self.separate_module_sources:
- while 1:
- filename = cache_dir.join('module_%d.c' % num)
- num += 1
- if not filename.check():
- break
- f = filename.open("w")
- if being_main:
- f.write("#define PYPY_NOT_MAIN_FILE\n")
- self.write_c_header(f)
- source = str(source)
- f.write(source)
- if not source.endswith('\n'):
- f.write('\n')
- f.close()
- files.append(str(filename))
- d = self._copy_attributes()
- d['separate_module_sources'] = ()
- d['separate_module_files'] += tuple(files)
- return ExternalCompilationInfo(**d)
-
- def compile_shared_lib(self):
- self = self.convert_sources_to_files()
- if not self.separate_module_files:
- return self
- lib = compile_c_module([], 'externmod', self)
- d = self._copy_attributes()
- d['libraries'] += (lib,)
- d['separate_module_files'] = ()
- d['separate_module_sources'] = ()
- return ExternalCompilationInfo(**d)
-
-if sys.platform == 'win32':
- so_ext = '.dll'
-else:
- so_ext = '.so'
-
-def compiler_command():
- # e.g. for tcc, you might set this to
- # "tcc -shared -o %s.so %s.c"
- return os.getenv('PYPY_CC')
-
-def enable_fast_compilation():
- if sys.platform == 'win32':
- dash = '/'
- else:
- dash = '-'
- from distutils import sysconfig
- gcv = sysconfig.get_config_vars()
- opt = gcv.get('OPT') # not always existent
- if opt:
- opt = re.sub('%sO\d+' % dash, '%sO0' % dash, opt)
- else:
- opt = '%sO0' % dash
- gcv['OPT'] = opt
-
-def ensure_correct_math():
- if sys.platform != 'win32':
- return # so far
- from distutils import sysconfig
- gcv = sysconfig.get_config_vars()
- opt = gcv.get('OPT') # not always existent
- if opt and '/Op' not in opt:
- opt += '/Op'
- gcv['OPT'] = opt
-
-
-def try_compile(c_files, eci):
- try:
- build_executable(c_files, eci)
- result = True
- except (distutils.errors.CompileError,
- distutils.errors.LinkError):
- result = False
- return result
-
-def compile_c_module(cfiles, modbasename, eci, tmpdir=None):
- #try:
- # from distutils.log import set_threshold
- # set_threshold(10000)
- #except ImportError:
- # print "ERROR IMPORTING"
- # pass
- cfiles = [py.path.local(f) for f in cfiles]
- if tmpdir is None:
- tmpdir = configdir.join("module_cache").ensure(dir=1)
- num = 0
- cfiles += eci.separate_module_files
- include_dirs = list(eci.include_dirs)
- library_dirs = list(eci.library_dirs)
- if (sys.platform == 'darwin' or # support Fink & Darwinports
- sys.platform.startswith('freebsd')):
- for s in ('/sw/', '/opt/local/', '/usr/local/'):
- if s + 'include' not in include_dirs and \
- os.path.exists(s + 'include'):
- include_dirs.append(s + 'include')
- if s + 'lib' not in library_dirs and \
- os.path.exists(s + 'lib'):
- library_dirs.append(s + 'lib')
-
- num = 0
- modname = modbasename
- while 1:
- if not tmpdir.join(modname + so_ext).check():
- break
- num += 1
- modname = '%s_%d' % (modbasename, num)
-
- lastdir = tmpdir.chdir()
- libraries = eci.libraries
- ensure_correct_math()
- try:
- if debug: print "modname", modname
- c = stdoutcapture.Capture(mixed_out_err = True)
- try:
- try:
- if compiler_command():
- # GCC-ish options only
- from distutils import sysconfig
- gcv = sysconfig.get_config_vars()
- cmd = compiler_command().replace('%s',
- str(tmpdir.join(modname)))
- for dir in [gcv['INCLUDEPY']] + list(include_dirs):
- cmd += ' -I%s' % dir
- for dir in library_dirs:
- cmd += ' -L%s' % dir
- os.system(cmd)
- else:
- from distutils.dist import Distribution
- from distutils.extension import Extension
- from distutils.ccompiler import get_default_compiler
- saved_environ = os.environ.items()
- try:
- # distutils.core.setup() is really meant for end-user
- # interactive usage, because it eats most exceptions and
- # turn them into SystemExits. Instead, we directly
- # instantiate a Distribution, which also allows us to
- # ignore unwanted features like config files.
- extra_compile_args = []
- # ensure correct math on windows
- if sys.platform == 'win32':
- extra_compile_args.append('/Op') # get extra precision
- if get_default_compiler() == 'unix':
- old_version = False
- try:
- g = os.popen('gcc --version', 'r')
- verinfo = g.read()
- g.close()
- except (OSError, IOError):
- pass
- else:
- old_version = verinfo.startswith('2')
- if not old_version:
- extra_compile_args.extend(["-Wno-unused-label",
- "-Wno-unused-variable"])
- attrs = {
- 'name': "testmodule",
- 'ext_modules': [
- Extension(modname, [str(cfile) for cfile in cfiles],
- include_dirs=include_dirs,
- library_dirs=library_dirs,
- extra_compile_args=extra_compile_args,
- libraries=list(libraries),)
- ],
- 'script_name': 'setup.py',
- 'script_args': ['-q', 'build_ext', '--inplace', '--force'],
- }
- dist = Distribution(attrs)
- if not dist.parse_command_line():
- raise ValueError, "distutils cmdline parse error"
- dist.run_commands()
- finally:
- for key, value in saved_environ:
- if os.environ.get(key) != value:
- os.environ[key] = value
- finally:
- foutput, foutput = c.done()
- data = foutput.read()
- if data:
- fdump = open("%s.errors" % modname, "w")
- fdump.write(data)
- fdump.close()
- # XXX do we need to do some check on fout/ferr?
- # XXX not a nice way to import a module
- except:
- print >>sys.stderr, data
- raise
- finally:
- lastdir.chdir()
- return str(tmpdir.join(modname) + so_ext)
-
-def make_module_from_c(cfile, eci):
- cfile = py.path.local(cfile)
- modname = cfile.purebasename
- compile_c_module([cfile], modname, eci)
- return import_module_from_directory(cfile.dirpath(), modname)
-
-def import_module_from_directory(dir, modname):
- file, pathname, description = imp.find_module(modname, [str(dir)])
- try:
- mod = imp.load_module(modname, file, pathname, description)
- finally:
- if file:
- file.close()
- return mod
-
-
-def log_spawned_cmd(spawn):
- def spawn_and_log(cmd, *args, **kwds):
- if debug:
- print ' '.join(cmd)
- return spawn(cmd, *args, **kwds)
- return spawn_and_log
-
-
-class ProfOpt(object):
- #XXX assuming gcc style flags for now
- name = "profopt"
-
- def __init__(self, compiler):
- self.compiler = compiler
-
- def first(self):
- self.build('-fprofile-generate')
-
- def probe(self, exe, args):
- # 'args' is a single string typically containing spaces
- # and quotes, which represents several arguments.
- os.system("'%s' %s" % (exe, args))
-
- def after(self):
- self.build('-fprofile-use')
-
- def build(self, option):
- compiler = self.compiler
- compiler.compile_extra.append(option)
- compiler.link_extra.append(option)
- try:
- compiler._build()
- finally:
- compiler.compile_extra.pop()
- compiler.link_extra.pop()
-
-class CCompiler:
-
- def __init__(self, cfilenames, eci, outputfilename=None,
- compiler_exe=None, profbased=None):
- self.cfilenames = cfilenames
- ext = ''
- self.compile_extra = []
- self.link_extra = []
- self.libraries = list(eci.libraries)
- self.include_dirs = list(eci.include_dirs)
- self.library_dirs = list(eci.library_dirs)
- self.compiler_exe = compiler_exe
- self.profbased = profbased
- if not sys.platform in ('win32', 'darwin'): # xxx
- if 'm' not in self.libraries:
- self.libraries.append('m')
- if 'pthread' not in self.libraries:
- self.libraries.append('pthread')
- self.compile_extra += ['-O3', '-fomit-frame-pointer', '-pthread']
- self.link_extra += ['-pthread']
- if sys.platform == 'win32':
- self.link_extra += ['/DEBUG'] # generate .pdb file
- if (sys.platform == 'darwin' or # support Fink & Darwinports
- sys.platform.startswith('freebsd')):
- for s in ('/sw/', '/opt/local/', '/usr/local/'):
- if s + 'include' not in self.include_dirs and \
- os.path.exists(s + 'include'):
- self.include_dirs.append(s + 'include')
- if s + 'lib' not in self.library_dirs and \
- os.path.exists(s + 'lib'):
- self.library_dirs.append(s + 'lib')
- self.compile_extra += ['-O3', '-fomit-frame-pointer']
-
- if outputfilename is None:
- self.outputfilename = py.path.local(cfilenames[0]).new(ext=ext)
- else:
- self.outputfilename = py.path.local(outputfilename)
-
- def build(self, noerr=False):
- basename = self.outputfilename.new(ext='')
- data = ''
- try:
- saved_environ = os.environ.copy()
- c = stdoutcapture.Capture(mixed_out_err = True)
- try:
- self._build()
- finally:
- # workaround for a distutils bugs where some env vars can
- # become longer and longer every time it is used
- for key, value in saved_environ.items():
- if os.environ.get(key) != value:
- os.environ[key] = value
- foutput, foutput = c.done()
- data = foutput.read()
- if data:
- fdump = basename.new(ext='errors').open("w")
- fdump.write(data)
- fdump.close()
- except:
- if not noerr:
- print >>sys.stderr, data
- raise
-
- def _build(self):
- from distutils.ccompiler import new_compiler
- compiler = new_compiler(force=1)
- if self.compiler_exe is not None:
- for c in '''compiler compiler_so compiler_cxx
- linker_exe linker_so'''.split():
- compiler.executables[c][0] = self.compiler_exe
- compiler.spawn = log_spawned_cmd(compiler.spawn)
- objects = []
- for cfile in self.cfilenames:
- cfile = py.path.local(cfile)
- old = cfile.dirpath().chdir()
- try:
- res = compiler.compile([cfile.basename],
- include_dirs=self.include_dirs,
- extra_preargs=self.compile_extra)
- assert len(res) == 1
- cobjfile = py.path.local(res[0])
- assert cobjfile.check()
- objects.append(str(cobjfile))
- finally:
- old.chdir()
- compiler.link_executable(objects, str(self.outputfilename),
- libraries=self.libraries,
- extra_preargs=self.link_extra,
- library_dirs=self.library_dirs)
-
-def build_executable(*args, **kwds):
- noerr = kwds.pop('noerr', False)
- compiler = CCompiler(*args, **kwds)
- compiler.build(noerr=noerr)
- return str(compiler.outputfilename)
diff --git a/ctypes_configure/configure.py b/ctypes_configure/configure.py
deleted file mode 100755
--- a/ctypes_configure/configure.py
+++ /dev/null
@@ -1,621 +0,0 @@
-#! /usr/bin/env python
-
-import os, py, sys
-import ctypes
-from ctypes_configure.cbuild import build_executable, configdir, try_compile
-from ctypes_configure.cbuild import ExternalCompilationInfo
-import distutils
-
-# ____________________________________________________________
-#
-# Helpers for simple cases
-
-def eci_from_header(c_header_source):
- return ExternalCompilationInfo(
- pre_include_lines=c_header_source.split("\n")
- )
-
-
-def getstruct(name, c_header_source, interesting_fields):
- class CConfig:
- _compilation_info_ = eci_from_header(c_header_source)
- STRUCT = Struct(name, interesting_fields)
- return configure(CConfig)['STRUCT']
-
-def getsimpletype(name, c_header_source, ctype_hint=ctypes.c_int):
- class CConfig:
- _compilation_info_ = eci_from_header(c_header_source)
- TYPE = SimpleType(name, ctype_hint)
- return configure(CConfig)['TYPE']
-
-def getconstantinteger(name, c_header_source):
- class CConfig:
- _compilation_info_ = eci_from_header(c_header_source)
- CONST = ConstantInteger(name)
- return configure(CConfig)['CONST']
-
-def getdefined(macro, c_header_source):
- class CConfig:
- _compilation_info_ = eci_from_header(c_header_source)
- DEFINED = Defined(macro)
- return configure(CConfig)['DEFINED']
-
-def has(name, c_header_source):
- class CConfig:
- _compilation_info_ = eci_from_header(c_header_source)
- HAS = Has(name)
- return configure(CConfig)['HAS']
-
-def check_eci(eci):
- """Check if a given ExternalCompilationInfo compiles and links."""
- class CConfig:
- _compilation_info_ = eci
- WORKS = Works()
- return configure(CConfig)['WORKS']
-
-def sizeof(name, eci, **kwds):
- class CConfig:
- _compilation_info_ = eci
- SIZE = SizeOf(name)
- for k, v in kwds.items():
- setattr(CConfig, k, v)
- return configure(CConfig)['SIZE']
-
-def memory_alignment():
- """Return the alignment (in bytes) of memory allocations.
- This is enough to make sure a structure with pointers and 'double'
- fields is properly aligned."""
- global _memory_alignment
- if _memory_alignment is None:
- S = getstruct('struct memory_alignment_test', """
- struct memory_alignment_test {
- double d;
- void* p;
- };
- """, [])
- result = ctypes.alignment(S)
- assert result & (result-1) == 0, "not a power of two??"
- _memory_alignment = result
- return _memory_alignment
-_memory_alignment = None
-
-# ____________________________________________________________
-#
-# General interface
-
-class ConfigResult:
- def __init__(self, CConfig, info, entries):
- self.CConfig = CConfig
- self.result = {}
- self.info = info
- self.entries = entries
-
- def get_entry_result(self, entry):
- try:
- return self.result[entry]
- except KeyError:
- pass
- name = self.entries[entry]
- info = self.info[name]
- self.result[entry] = entry.build_result(info, self)
-
- def get_result(self):
- return dict([(name, self.result[entry])
- for entry, name in self.entries.iteritems()])
-
-
-class _CWriter(object):
- """ A simple class which aggregates config parts
- """
- def __init__(self, CConfig):
- self.path = uniquefilepath()
- self.f = self.path.open("w")
- self.config = CConfig
-
- def write_header(self):
- f = self.f
- CConfig = self.config
- CConfig._compilation_info_.write_c_header(f)
- print >> f, C_HEADER
- print >> f
-
- def write_entry(self, key, entry):
- f = self.f
- print >> f, 'void dump_section_%s(void) {' % (key,)
- for line in entry.prepare_code():
- if line and line[0] != '#':
- line = '\t' + line
- print >> f, line
- print >> f, '}'
- print >> f
-
- def write_entry_main(self, key):
- print >> self.f, '\tprintf("-+- %s\\n");' % (key,)
- print >> self.f, '\tdump_section_%s();' % (key,)
- print >> self.f, '\tprintf("---\\n");'
-
- def start_main(self):
- print >> self.f, 'int main(int argc, char *argv[]) {'
-
- def close(self):
- f = self.f
- print >> f, '\treturn 0;'
- print >> f, '}'
- f.close()
-
- def ask_gcc(self, question):
- self.start_main()
- self.f.write(question + "\n")
- self.close()
- eci = self.config._compilation_info_
- return try_compile([self.path], eci)
-
-
-def configure(CConfig, noerr=False):
- """Examine the local system by running the C compiler.
- The CConfig class contains CConfigEntry attribues that describe
- what should be inspected; configure() returns a dict mapping
- names to the results.
- """
- for attr in ['_includes_', '_libraries_', '_sources_', '_library_dirs_',
- '_include_dirs_', '_header_']:
- assert not hasattr(CConfig, attr), "Found legacy attribut %s on CConfig" % (attr,)
- entries = []
- for key in dir(CConfig):
- value = getattr(CConfig, key)
- if isinstance(value, CConfigEntry):
- entries.append((key, value))
-
- if entries: # can be empty if there are only CConfigSingleEntries
- writer = _CWriter(CConfig)
- writer.write_header()
- for key, entry in entries:
- writer.write_entry(key, entry)
-
- f = writer.f
- writer.start_main()
- for key, entry in entries:
- writer.write_entry_main(key)
- writer.close()
-
- eci = CConfig._compilation_info_
- infolist = list(run_example_code(writer.path, eci, noerr=noerr))
- assert len(infolist) == len(entries)
-
- resultinfo = {}
- resultentries = {}
- for info, (key, entry) in zip(infolist, entries):
- resultinfo[key] = info
- resultentries[entry] = key
-
- result = ConfigResult(CConfig, resultinfo, resultentries)
- for name, entry in entries:
- result.get_entry_result(entry)
- res = result.get_result()
- else:
- res = {}
-
- for key in dir(CConfig):
- value = getattr(CConfig, key)
- if isinstance(value, CConfigSingleEntry):
- writer = _CWriter(CConfig)
- writer.write_header()
- res[key] = value.question(writer.ask_gcc)
- return res
-
-# ____________________________________________________________
-
-
-class CConfigEntry(object):
- "Abstract base class."
-
-class Struct(CConfigEntry):
- """An entry in a CConfig class that stands for an externally
- defined structure.
- """
- def __init__(self, name, interesting_fields, ifdef=None):
- self.name = name
- self.interesting_fields = interesting_fields
- self.ifdef = ifdef
-
- def prepare_code(self):
- if self.ifdef is not None:
- yield '#ifdef %s' % (self.ifdef,)
- yield 'typedef %s ctypesplatcheck_t;' % (self.name,)
- yield 'typedef struct {'
- yield ' char c;'
- yield ' ctypesplatcheck_t s;'
- yield '} ctypesplatcheck2_t;'
- yield ''
- yield 'ctypesplatcheck_t s;'
- if self.ifdef is not None:
- yield 'dump("defined", 1);'
- yield 'dump("align", offsetof(ctypesplatcheck2_t, s));'
- yield 'dump("size", sizeof(ctypesplatcheck_t));'
- for fieldname, fieldtype in self.interesting_fields:
- yield 'dump("fldofs %s", offsetof(ctypesplatcheck_t, %s));'%(
- fieldname, fieldname)
- yield 'dump("fldsize %s", sizeof(s.%s));' % (
- fieldname, fieldname)
- if fieldtype in integer_class:
- yield 's.%s = 0; s.%s = ~s.%s;' % (fieldname,
- fieldname,
- fieldname)
- yield 'dump("fldunsigned %s", s.%s > 0);' % (fieldname,
- fieldname)
- if self.ifdef is not None:
- yield '#else'
- yield 'dump("defined", 0);'
- yield '#endif'
-
- def build_result(self, info, config_result):
- if self.ifdef is not None:
- if not info['defined']:
- return None
- alignment = 1
- layout = [None] * info['size']
- for fieldname, fieldtype in self.interesting_fields:
- if isinstance(fieldtype, Struct):
- offset = info['fldofs ' + fieldname]
- size = info['fldsize ' + fieldname]
- c_fieldtype = config_result.get_entry_result(fieldtype)
- layout_addfield(layout, offset, c_fieldtype, fieldname)
- alignment = max(alignment, ctype_alignment(c_fieldtype))
- else:
- offset = info['fldofs ' + fieldname]
- size = info['fldsize ' + fieldname]
- sign = info.get('fldunsigned ' + fieldname, False)
- if (size, sign) != size_and_sign(fieldtype):
- fieldtype = fixup_ctype(fieldtype, fieldname, (size, sign))
- layout_addfield(layout, offset, fieldtype, fieldname)
- alignment = max(alignment, ctype_alignment(fieldtype))
-
- # try to enforce the same alignment as the one of the original
- # structure
- if alignment < info['align']:
- choices = [ctype for ctype in alignment_types
- if ctype_alignment(ctype) == info['align']]
- assert choices, "unsupported alignment %d" % (info['align'],)
- choices = [(ctypes.sizeof(ctype), i, ctype)
- for i, ctype in enumerate(choices)]
- csize, _, ctype = min(choices)
- for i in range(0, info['size'] - csize + 1, info['align']):
- if layout[i:i+csize] == [None] * csize:
- layout_addfield(layout, i, ctype, '_alignment')
- break
- else:
- raise AssertionError("unenforceable alignment %d" % (
- info['align'],))
-
- n = 0
- for i, cell in enumerate(layout):
- if cell is not None:
- continue
- layout_addfield(layout, i, ctypes.c_char, '_pad%d' % (n,))
- n += 1
-
- # build the ctypes Structure
- seen = {}
- fields = []
- for cell in layout:
- if cell in seen:
- continue
- fields.append((cell.name, cell.ctype))
- seen[cell] = True
-
- class S(ctypes.Structure):
- _fields_ = fields
- name = self.name
- if name.startswith('struct '):
- name = name[7:]
- S.__name__ = name
- return S
-
-class SimpleType(CConfigEntry):
- """An entry in a CConfig class that stands for an externally
- defined simple numeric type.
- """
- def __init__(self, name, ctype_hint=ctypes.c_int, ifdef=None):
- self.name = name
- self.ctype_hint = ctype_hint
- self.ifdef = ifdef
-
- def prepare_code(self):
- if self.ifdef is not None:
- yield '#ifdef %s' % (self.ifdef,)
- yield 'typedef %s ctypesplatcheck_t;' % (self.name,)
- yield ''
- yield 'ctypesplatcheck_t x;'
- if self.ifdef is not None:
- yield 'dump("defined", 1);'
- yield 'dump("size", sizeof(ctypesplatcheck_t));'
- if self.ctype_hint in integer_class:
- yield 'x = 0; x = ~x;'
- yield 'dump("unsigned", x > 0);'
- if self.ifdef is not None:
- yield '#else'
- yield 'dump("defined", 0);'
- yield '#endif'
-
- def build_result(self, info, config_result):
- if self.ifdef is not None and not info['defined']:
- return None
- size = info['size']
- sign = info.get('unsigned', False)
- ctype = self.ctype_hint
- if (size, sign) != size_and_sign(ctype):
- ctype = fixup_ctype(ctype, self.name, (size, sign))
- return ctype
-
-class ConstantInteger(CConfigEntry):
- """An entry in a CConfig class that stands for an externally
- defined integer constant.
- """
- def __init__(self, name):
- self.name = name
-
- def prepare_code(self):
- yield 'if ((%s) < 0) {' % (self.name,)
- yield ' long long x = (long long)(%s);' % (self.name,)
- yield ' printf("value: %lld\\n", x);'
- yield '} else {'
- yield ' unsigned long long x = (unsigned long long)(%s);' % (
- self.name,)
- yield ' printf("value: %llu\\n", x);'
- yield '}'
-
- def build_result(self, info, config_result):
- return info['value']
-
-class DefinedConstantInteger(CConfigEntry):
- """An entry in a CConfig class that stands for an externally
- defined integer constant. If not #defined the value will be None.
- """
- def __init__(self, macro):
- self.name = self.macro = macro
-
- def prepare_code(self):
- yield '#ifdef %s' % self.macro
- yield 'dump("defined", 1);'
- yield 'if ((%s) < 0) {' % (self.macro,)
- yield ' long long x = (long long)(%s);' % (self.macro,)
- yield ' printf("value: %lld\\n", x);'
- yield '} else {'
- yield ' unsigned long long x = (unsigned long long)(%s);' % (
- self.macro,)
- yield ' printf("value: %llu\\n", x);'
- yield '}'
- yield '#else'
- yield 'dump("defined", 0);'
- yield '#endif'
-
- def build_result(self, info, config_result):
- if info["defined"]:
- return info['value']
- return None
-
-
-class DefinedConstantString(CConfigEntry):
- """
- """
- def __init__(self, macro):
- self.macro = macro
- self.name = macro
-
- def prepare_code(self):
- yield '#ifdef %s' % self.macro
- yield 'int i;'
- yield 'char *p = %s;' % self.macro
- yield 'dump("defined", 1);'
- yield 'for (i = 0; p[i] != 0; i++ ) {'
- yield ' printf("value_%d: %d\\n", i, (int)(unsigned char)p[i]);'
- yield '}'
- yield '#else'
- yield 'dump("defined", 0);'
- yield '#endif'
-
- def build_result(self, info, config_result):
- if info["defined"]:
- string = ''
- d = 0
- while info.has_key('value_%d' % d):
- string += chr(info['value_%d' % d])
- d += 1
- return string
- return None
-
-
-class Defined(CConfigEntry):
- """A boolean, corresponding to an #ifdef.
- """
- def __init__(self, macro):
- self.macro = macro
- self.name = macro
-
- def prepare_code(self):
- yield '#ifdef %s' % (self.macro,)
- yield 'dump("defined", 1);'
- yield '#else'
- yield 'dump("defined", 0);'
- yield '#endif'
-
- def build_result(self, info, config_result):
- return bool(info['defined'])
-
-class CConfigSingleEntry(object):
- """ An abstract class of type which requires
- gcc succeeding/failing instead of only asking
- """
- pass
-
-class Has(CConfigSingleEntry):
- def __init__(self, name):
- self.name = name
-
- def question(self, ask_gcc):
- return ask_gcc(self.name + ';')
-
-class Works(CConfigSingleEntry):
- def question(self, ask_gcc):
- return ask_gcc("")
-
-class SizeOf(CConfigEntry):
- """An entry in a CConfig class that stands for
- some external opaque type
- """
- def __init__(self, name):
- self.name = name
-
- def prepare_code(self):
- yield 'dump("size", sizeof(%s));' % self.name
-
- def build_result(self, info, config_result):
- return info['size']
-
-# ____________________________________________________________
-#
-# internal helpers
-
-def ctype_alignment(c_type):
- if issubclass(c_type, ctypes.Structure):
- return max([ctype_alignment(fld_type)
- for fld_name, fld_type in c_type._fields_])
-
- return ctypes.alignment(c_type)
-
-def uniquefilepath(LAST=[0]):
- i = LAST[0]
- LAST[0] += 1
- return configdir.join('ctypesplatcheck_%d.c' % i)
-
-alignment_types = [
- ctypes.c_short,
- ctypes.c_int,
- ctypes.c_long,
- ctypes.c_float,
- ctypes.c_double,
- ctypes.c_char_p,
- ctypes.c_void_p,
- ctypes.c_longlong,
- ctypes.c_wchar,
- ctypes.c_wchar_p,
- ]
-
-integer_class = [ctypes.c_byte, ctypes.c_ubyte,
- ctypes.c_short, ctypes.c_ushort,
- ctypes.c_int, ctypes.c_uint,
- ctypes.c_long, ctypes.c_ulong,
- ctypes.c_longlong, ctypes.c_ulonglong,
- ]
-float_class = [ctypes.c_float, ctypes.c_double]
-
-class Field(object):
- def __init__(self, name, ctype):
- self.name = name
- self.ctype = ctype
- def __repr__(self):
- return '' % (self.name, self.ctype)
-
-def layout_addfield(layout, offset, ctype, prefix):
- size = ctypes.sizeof(ctype)
- name = prefix
- i = 0
- while name in layout:
- i += 1
- name = '%s_%d' % (prefix, i)
- field = Field(name, ctype)
- for i in range(offset, offset+size):
- assert layout[i] is None, "%s overlaps %r" % (fieldname, layout[i])
- layout[i] = field
- return field
-
-def size_and_sign(ctype):
- return (ctypes.sizeof(ctype),
- ctype in integer_class and ctype(-1).value > 0)
-
-def fixup_ctype(fieldtype, fieldname, expected_size_and_sign):
- for typeclass in [integer_class, float_class]:
- if fieldtype in typeclass:
- for ctype in typeclass:
- if size_and_sign(ctype) == expected_size_and_sign:
- return ctype
- if (hasattr(fieldtype, '_length_')
- and getattr(fieldtype, '_type_', None) == ctypes.c_char):
- # for now, assume it is an array of chars; otherwise we'd also
- # have to check the exact integer type of the elements of the array
- size, sign = expected_size_and_sign
- return ctypes.c_char * size
- if (hasattr(fieldtype, '_length_')
- and getattr(fieldtype, '_type_', None) == ctypes.c_ubyte):
- # grumble, fields of type 'c_char array' have automatic cast-to-
- # Python-string behavior in ctypes, which may not be what you
- # want, so here is the same with c_ubytes instead...
- size, sign = expected_size_and_sign
- return ctypes.c_ubyte * size
- raise TypeError("conflicting field type %r for %r" % (fieldtype,
- fieldname))
-
-
-C_HEADER = """
-#include
-#include /* for offsetof() */
-#ifndef _WIN32
-# include /* FreeBSD: for uint64_t */
-#endif
-
-void dump(char* key, int value) {
- printf("%s: %d\\n", key, value);
-}
-"""
-
-def run_example_code(filepath, eci, noerr=False):
- executable = build_executable([filepath], eci, noerr=noerr)
- output = py.process.cmdexec(executable)
- section = None
- for line in output.splitlines():
- line = line.strip()
- if line.startswith('-+- '): # start of a new section
- section = {}
- elif line == '---': # section end
- assert section is not None
- yield section
- section = None
- elif line:
- assert section is not None
- key, value = line.split(': ')
- section[key] = int(value)
-
-# ____________________________________________________________
-
-def get_python_include_dir():
- from distutils import sysconfig
- gcv = sysconfig.get_config_vars()
- return gcv['INCLUDEPY']
-
-if __name__ == '__main__':
- doc = """Example:
-
- ctypes_platform.py -h sys/types.h -h netinet/in.h
- 'struct sockaddr_in'
- sin_port c_int
- """
- import sys, getopt
- opts, args = getopt.gnu_getopt(sys.argv[1:], 'h:')
- if not args:
- print >> sys.stderr, doc
- else:
- assert len(args) % 2 == 1
- headers = []
- for opt, value in opts:
- if opt == '-h':
- headers.append('#include <%s>' % (value,))
- name = args[0]
- fields = []
- for i in range(1, len(args), 2):
- ctype = getattr(ctypes, args[i+1])
- fields.append((args[i], ctype))
-
- S = getstruct(name, '\n'.join(headers), fields)
-
- for key, value in S._fields_:
- print key, value
diff --git a/ctypes_configure/doc/configure.html b/ctypes_configure/doc/configure.html
deleted file mode 100644
--- a/ctypes_configure/doc/configure.html
+++ /dev/null
@@ -1,30 +0,0 @@
-
-
-
-
-
-
-ctypes configure
-
-
-
-
ctypes configure
-
-
-
One of ctypes problems is that ctypes programs are usually not very
-platform-independent. We created ctypes_configure, which invokes gcc
From noreply at buildbot.pypy.org Sat Apr 14 10:01:30 2012
From: noreply at buildbot.pypy.org (RonnyPfannschmidt)
Date: Sat, 14 Apr 2012 10:01:30 +0200 (CEST)
Subject: [pypy-commit] pypy vendor/stdlib: revert hgignore on vendor branch
Message-ID: <20120414080130.302B982F4E@wyvern.cs.uni-duesseldorf.de>
Author: Ronny Pfannschmidt
Branch: vendor/stdlib
Changeset: r54353:4aa4e1f8e19a
Date: 2012-04-14 09:58 +0200
http://bitbucket.org/pypy/pypy/changeset/4aa4e1f8e19a/
Log: revert hgignore on vendor branch
diff --git a/.hgignore b/.hgignore
new file mode 100644
--- /dev/null
+++ b/.hgignore
@@ -0,0 +1,76 @@
+syntax: glob
+*.py[co]
+*~
+.*.swp
+.idea
+.project
+.pydevproject
+
+syntax: regexp
+^testresult$
+^site-packages$
+^site-packages/.*$
+^site-packages/.*$
+^bin$
+^pypy/bin/pypy-c
+^pypy/module/cpyext/src/.+\.o$
+^pypy/module/cpyext/src/.+\.obj$
+^pypy/module/cpyext/test/.+\.errors$
+^pypy/module/cpyext/test/.+\.o$
+^pypy/module/cpyext/test/.+\.obj$
+^pypy/module/cpyext/test/.+\.manifest$
+^pypy/module/test_lib_pypy/ctypes_tests/.+\.o$
+^pypy/doc/.+\.html$
+^pypy/doc/config/.+\.rst$
+^pypy/doc/basicblock\.asc$
+^pypy/doc/.+\.svninfo$
+^pypy/translator/c/src/libffi_msvc/.+\.obj$
+^pypy/translator/c/src/libffi_msvc/.+\.dll$
+^pypy/translator/c/src/libffi_msvc/.+\.lib$
+^pypy/translator/c/src/libffi_msvc/.+\.exp$
+^pypy/translator/c/src/cjkcodecs/.+\.o$
+^pypy/translator/c/src/cjkcodecs/.+\.obj$
+^pypy/translator/jvm/\.project$
+^pypy/translator/jvm/\.classpath$
+^pypy/translator/jvm/eclipse-bin$
+^pypy/translator/jvm/src/pypy/.+\.class$
+^pypy/translator/benchmark/docutils$
+^pypy/translator/benchmark/templess$
+^pypy/translator/benchmark/gadfly$
+^pypy/translator/benchmark/mako$
+^pypy/translator/benchmark/bench-custom\.benchmark_result$
+^pypy/translator/benchmark/shootout_benchmarks$
+^pypy/translator/goal/pypy-translation-snapshot$
+^pypy/translator/goal/pypy-c
+^pypy/translator/goal/pypy-jvm
+^pypy/translator/goal/pypy-jvm.jar
+^pypy/translator/goal/.+\.exe$
+^pypy/translator/goal/.+\.dll$
+^pypy/translator/goal/target.+-c$
+^pypy/_cache$
+^pypy/doc/statistic/.+\.html$
+^pypy/doc/statistic/.+\.eps$
+^pypy/doc/statistic/.+\.pdf$
+^pypy/translator/cli/src/pypylib\.dll$
+^pypy/translator/cli/src/query\.exe$
+^pypy/translator/cli/src/main\.exe$
+^lib_pypy/ctypes_config_cache/_.+_cache\.py$
+^lib_pypy/ctypes_config_cache/_.+_.+_\.py$
+^pypy/translator/cli/query-descriptions$
+^pypy/doc/discussion/.+\.html$
+^include/.+\.h$
+^include/.+\.inl$
+^pypy/doc/_build/.*$
+^pypy/doc/config/.+\.html$
+^pypy/doc/config/style\.css$
+^pypy/doc/jit/.+\.html$
+^pypy/doc/jit/style\.css$
+^pypy/doc/image/lattice1\.png$
+^pypy/doc/image/lattice2\.png$
+^pypy/doc/image/lattice3\.png$
+^pypy/doc/image/stackless_informal\.png$
+^pypy/doc/image/parsing_example.+\.png$
+^pypy/module/test_lib_pypy/ctypes_tests/_ctypes_test\.o$
+^compiled
+^.git/
+^release/
From noreply at buildbot.pypy.org Sat Apr 14 10:01:31 2012
From: noreply at buildbot.pypy.org (RonnyPfannschmidt)
Date: Sat, 14 Apr 2012 10:01:31 +0200 (CEST)
Subject: [pypy-commit] pypy stdlib-unification: merge from vendor/stdlib
Message-ID: <20120414080131.6F36382F4E@wyvern.cs.uni-duesseldorf.de>
Author: Ronny Pfannschmidt
Branch: stdlib-unification
Changeset: r54354:169138063d63
Date: 2012-04-14 09:59 +0200
http://bitbucket.org/pypy/pypy/changeset/169138063d63/
Log: merge from vendor/stdlib
From noreply at buildbot.pypy.org Sat Apr 14 11:12:50 2012
From: noreply at buildbot.pypy.org (RonnyPfannschmidt)
Date: Sat, 14 Apr 2012 11:12:50 +0200 (CEST)
Subject: [pypy-commit] pypy default: tool.clean_old_branches: dont
update/purge for every head
Message-ID: <20120414091250.D1B6582F4F@wyvern.cs.uni-duesseldorf.de>
Author: Ronny Pfannschmidt
Branch:
Changeset: r54356:ffcf6c713e0b
Date: 2012-04-14 11:12 +0200
http://bitbucket.org/pypy/pypy/changeset/ffcf6c713e0b/
Log: tool.clean_old_branches: dont update/purge for every head
diff --git a/pypy/tool/clean_old_branches.py b/pypy/tool/clean_old_branches.py
--- a/pypy/tool/clean_old_branches.py
+++ b/pypy/tool/clean_old_branches.py
@@ -54,11 +54,13 @@
print '*** error %r' % (err,)
sys.exit(1)
+print '*** switching to closed branches *** '
+do("hg up --clean closed-branches")
+do("hg --config extensions.purge= purge --all")
+
for head, branch in closed_heads:
print
print '***** %s ***** %s *****' % (branch, head)
- do("hg up --clean closed-branches")
- do("hg --config extensions.purge= purge --all")
do("hg debugsetparents closed-branches %s" % head)
do("hg ci -m'Merge closed head %s on branch %s'" % (head, branch))
From noreply at buildbot.pypy.org Sat Apr 14 11:12:49 2012
From: noreply at buildbot.pypy.org (RonnyPfannschmidt)
Date: Sat, 14 Apr 2012 11:12:49 +0200 (CEST)
Subject: [pypy-commit] pypy default: tool.clean_old_branches: use
debugsetparents to avoid merge+rm
Message-ID: <20120414091249.8C55782F4E@wyvern.cs.uni-duesseldorf.de>
Author: Ronny Pfannschmidt
Branch:
Changeset: r54355:7d2a37b843fc
Date: 2012-04-14 11:12 +0200
http://bitbucket.org/pypy/pypy/changeset/7d2a37b843fc/
Log: tool.clean_old_branches: use debugsetparents to avoid merge+rm
diff --git a/pypy/tool/clean_old_branches.py b/pypy/tool/clean_old_branches.py
--- a/pypy/tool/clean_old_branches.py
+++ b/pypy/tool/clean_old_branches.py
@@ -4,30 +4,28 @@
called 'closed-branch'. It reduces the number of heads.
"""
-import os, sys
+import os
+import sys
+import commands
-if not os.listdir('.hg'):
+if not os.path.isdir('.hg'):
print 'Must run this script from the top-level directory.'
sys.exit(1)
-def heads(args):
- g = os.popen(r"hg heads --topo %s --template '{node|short}:{branches}\n'"
- % args, 'r')
- result = g.read()
- g.close()
+def heads():
+ result = commands.getoutput(
+ "hg heads --topo --closed --template '{node|short}:{branches}:{extras}\n'")
result = result.splitlines(False)
+ result = [s.split(':', 2) for s in result]
for line in result:
- if len(line.split(':', 1)) != 2:
+ if len(line) != 3:
raise ValueError("'result' contains: %r" % line)
- result = [s.split(':', 1) for s in result]
- result = [(head, branch) for (head, branch) in result
- if branch not in ['', 'closed-branches']]
+ result = [(head, branch) for (head, branch, extra) in result
+ if branch not in ['', 'closed-branches'] and 'close' in extra]
return result
-all_heads = heads("--closed")
-opened_heads = heads("")
-closed_heads = [s for s in all_heads if s not in opened_heads]
+closed_heads = heads()
if not closed_heads:
print >> sys.stderr, 'no dangling closed heads.'
@@ -61,11 +59,7 @@
print '***** %s ***** %s *****' % (branch, head)
do("hg up --clean closed-branches")
do("hg --config extensions.purge= purge --all")
- do("hg merge -y %s" % head)
- for fn in os.listdir('.'):
- if fn.lower() != '.hg':
- do("rm -fr -- '%s'" % fn)
- do("hg rm --after -- '%s' || true" % fn)
+ do("hg debugsetparents closed-branches %s" % head)
do("hg ci -m'Merge closed head %s on branch %s'" % (head, branch))
print
From noreply at buildbot.pypy.org Sat Apr 14 11:26:33 2012
From: noreply at buildbot.pypy.org (RonnyPfannschmidt)
Date: Sat, 14 Apr 2012 11:26:33 +0200 (CEST)
Subject: [pypy-commit] pypy vendor/stdlib: add the 3.2.3 version of the
stdlib in lib-python/3.2
Message-ID: <20120414092633.B40BB82F4E@wyvern.cs.uni-duesseldorf.de>
Author: Ronny Pfannschmidt
Branch: vendor/stdlib
Changeset: r54357:359343b9ac0e
Date: 2012-04-14 11:24 +0200
http://bitbucket.org/pypy/pypy/changeset/359343b9ac0e/
Log: add the 3.2.3 version of the stdlib in lib-python/3.2
diff too long, truncating to 10000 out of 558051 lines
diff --git a/lib-python/3.2/__future__.py b/lib-python/3.2/__future__.py
new file mode 100644
--- /dev/null
+++ b/lib-python/3.2/__future__.py
@@ -0,0 +1,134 @@
+"""Record of phased-in incompatible language changes.
+
+Each line is of the form:
+
+ FeatureName = "_Feature(" OptionalRelease "," MandatoryRelease ","
+ CompilerFlag ")"
+
+where, normally, OptionalRelease < MandatoryRelease, and both are 5-tuples
+of the same form as sys.version_info:
+
+ (PY_MAJOR_VERSION, # the 2 in 2.1.0a3; an int
+ PY_MINOR_VERSION, # the 1; an int
+ PY_MICRO_VERSION, # the 0; an int
+ PY_RELEASE_LEVEL, # "alpha", "beta", "candidate" or "final"; string
+ PY_RELEASE_SERIAL # the 3; an int
+ )
+
+OptionalRelease records the first release in which
+
+ from __future__ import FeatureName
+
+was accepted.
+
+In the case of MandatoryReleases that have not yet occurred,
+MandatoryRelease predicts the release in which the feature will become part
+of the language.
+
+Else MandatoryRelease records when the feature became part of the language;
+in releases at or after that, modules no longer need
+
+ from __future__ import FeatureName
+
+to use the feature in question, but may continue to use such imports.
+
+MandatoryRelease may also be None, meaning that a planned feature got
+dropped.
+
+Instances of class _Feature have two corresponding methods,
+.getOptionalRelease() and .getMandatoryRelease().
+
+CompilerFlag is the (bitfield) flag that should be passed in the fourth
+argument to the builtin function compile() to enable the feature in
+dynamically compiled code. This flag is stored in the .compiler_flag
+attribute on _Future instances. These values must match the appropriate
+#defines of CO_xxx flags in Include/compile.h.
+
+No feature line is ever to be deleted from this file.
+"""
+
+all_feature_names = [
+ "nested_scopes",
+ "generators",
+ "division",
+ "absolute_import",
+ "with_statement",
+ "print_function",
+ "unicode_literals",
+ "barry_as_FLUFL",
+]
+
+__all__ = ["all_feature_names"] + all_feature_names
+
+# The CO_xxx symbols are defined here under the same names used by
+# compile.h, so that an editor search will find them here. However,
+# they're not exported in __all__, because they don't really belong to
+# this module.
+CO_NESTED = 0x0010 # nested_scopes
+CO_GENERATOR_ALLOWED = 0 # generators (obsolete, was 0x1000)
+CO_FUTURE_DIVISION = 0x2000 # division
+CO_FUTURE_ABSOLUTE_IMPORT = 0x4000 # perform absolute imports by default
+CO_FUTURE_WITH_STATEMENT = 0x8000 # with statement
+CO_FUTURE_PRINT_FUNCTION = 0x10000 # print function
+CO_FUTURE_UNICODE_LITERALS = 0x20000 # unicode string literals
+CO_FUTURE_BARRY_AS_BDFL = 0x40000
+
+class _Feature:
+ def __init__(self, optionalRelease, mandatoryRelease, compiler_flag):
+ self.optional = optionalRelease
+ self.mandatory = mandatoryRelease
+ self.compiler_flag = compiler_flag
+
+ def getOptionalRelease(self):
+ """Return first release in which this feature was recognized.
+
+ This is a 5-tuple, of the same form as sys.version_info.
+ """
+
+ return self.optional
+
+ def getMandatoryRelease(self):
+ """Return release in which this feature will become mandatory.
+
+ This is a 5-tuple, of the same form as sys.version_info, or, if
+ the feature was dropped, is None.
+ """
+
+ return self.mandatory
+
+ def __repr__(self):
+ return "_Feature" + repr((self.optional,
+ self.mandatory,
+ self.compiler_flag))
+
+nested_scopes = _Feature((2, 1, 0, "beta", 1),
+ (2, 2, 0, "alpha", 0),
+ CO_NESTED)
+
+generators = _Feature((2, 2, 0, "alpha", 1),
+ (2, 3, 0, "final", 0),
+ CO_GENERATOR_ALLOWED)
+
+division = _Feature((2, 2, 0, "alpha", 2),
+ (3, 0, 0, "alpha", 0),
+ CO_FUTURE_DIVISION)
+
+absolute_import = _Feature((2, 5, 0, "alpha", 1),
+ (2, 7, 0, "alpha", 0),
+ CO_FUTURE_ABSOLUTE_IMPORT)
+
+with_statement = _Feature((2, 5, 0, "alpha", 1),
+ (2, 6, 0, "alpha", 0),
+ CO_FUTURE_WITH_STATEMENT)
+
+print_function = _Feature((2, 6, 0, "alpha", 2),
+ (3, 0, 0, "alpha", 0),
+ CO_FUTURE_PRINT_FUNCTION)
+
+unicode_literals = _Feature((2, 6, 0, "alpha", 2),
+ (3, 0, 0, "alpha", 0),
+ CO_FUTURE_UNICODE_LITERALS)
+
+barry_as_FLUFL = _Feature((3, 1, 0, "alpha", 2),
+ (3, 9, 0, "alpha", 0),
+ CO_FUTURE_BARRY_AS_BDFL)
diff --git a/lib-python/3.2/__phello__.foo.py b/lib-python/3.2/__phello__.foo.py
new file mode 100644
--- /dev/null
+++ b/lib-python/3.2/__phello__.foo.py
@@ -0,0 +1,1 @@
+# This file exists as a helper for the test.test_frozen module.
diff --git a/lib-python/3.2/_abcoll.py b/lib-python/3.2/_abcoll.py
new file mode 100644
--- /dev/null
+++ b/lib-python/3.2/_abcoll.py
@@ -0,0 +1,623 @@
+# Copyright 2007 Google, Inc. All Rights Reserved.
+# Licensed to PSF under a Contributor Agreement.
+
+"""Abstract Base Classes (ABCs) for collections, according to PEP 3119.
+
+DON'T USE THIS MODULE DIRECTLY! The classes here should be imported
+via collections; they are defined here only to alleviate certain
+bootstrapping issues. Unit tests are in test_collections.
+"""
+
+from abc import ABCMeta, abstractmethod
+import sys
+
+__all__ = ["Hashable", "Iterable", "Iterator",
+ "Sized", "Container", "Callable",
+ "Set", "MutableSet",
+ "Mapping", "MutableMapping",
+ "MappingView", "KeysView", "ItemsView", "ValuesView",
+ "Sequence", "MutableSequence",
+ "ByteString",
+ ]
+
+
+### collection related types which are not exposed through builtin ###
+## iterators ##
+bytes_iterator = type(iter(b''))
+bytearray_iterator = type(iter(bytearray()))
+#callable_iterator = ???
+dict_keyiterator = type(iter({}.keys()))
+dict_valueiterator = type(iter({}.values()))
+dict_itemiterator = type(iter({}.items()))
+list_iterator = type(iter([]))
+list_reverseiterator = type(iter(reversed([])))
+range_iterator = type(iter(range(0)))
+set_iterator = type(iter(set()))
+str_iterator = type(iter(""))
+tuple_iterator = type(iter(()))
+zip_iterator = type(iter(zip()))
+## views ##
+dict_keys = type({}.keys())
+dict_values = type({}.values())
+dict_items = type({}.items())
+## misc ##
+dict_proxy = type(type.__dict__)
+
+
+### ONE-TRICK PONIES ###
+
+class Hashable(metaclass=ABCMeta):
+
+ @abstractmethod
+ def __hash__(self):
+ return 0
+
+ @classmethod
+ def __subclasshook__(cls, C):
+ if cls is Hashable:
+ for B in C.__mro__:
+ if "__hash__" in B.__dict__:
+ if B.__dict__["__hash__"]:
+ return True
+ break
+ return NotImplemented
+
+
+class Iterable(metaclass=ABCMeta):
+
+ @abstractmethod
+ def __iter__(self):
+ while False:
+ yield None
+
+ @classmethod
+ def __subclasshook__(cls, C):
+ if cls is Iterable:
+ if any("__iter__" in B.__dict__ for B in C.__mro__):
+ return True
+ return NotImplemented
+
+
+class Iterator(Iterable):
+
+ @abstractmethod
+ def __next__(self):
+ raise StopIteration
+
+ def __iter__(self):
+ return self
+
+ @classmethod
+ def __subclasshook__(cls, C):
+ if cls is Iterator:
+ if (any("__next__" in B.__dict__ for B in C.__mro__) and
+ any("__iter__" in B.__dict__ for B in C.__mro__)):
+ return True
+ return NotImplemented
+
+Iterator.register(bytes_iterator)
+Iterator.register(bytearray_iterator)
+#Iterator.register(callable_iterator)
+Iterator.register(dict_keyiterator)
+Iterator.register(dict_valueiterator)
+Iterator.register(dict_itemiterator)
+Iterator.register(list_iterator)
+Iterator.register(list_reverseiterator)
+Iterator.register(range_iterator)
+Iterator.register(set_iterator)
+Iterator.register(str_iterator)
+Iterator.register(tuple_iterator)
+Iterator.register(zip_iterator)
+
+class Sized(metaclass=ABCMeta):
+
+ @abstractmethod
+ def __len__(self):
+ return 0
+
+ @classmethod
+ def __subclasshook__(cls, C):
+ if cls is Sized:
+ if any("__len__" in B.__dict__ for B in C.__mro__):
+ return True
+ return NotImplemented
+
+
+class Container(metaclass=ABCMeta):
+
+ @abstractmethod
+ def __contains__(self, x):
+ return False
+
+ @classmethod
+ def __subclasshook__(cls, C):
+ if cls is Container:
+ if any("__contains__" in B.__dict__ for B in C.__mro__):
+ return True
+ return NotImplemented
+
+
+class Callable(metaclass=ABCMeta):
+
+ @abstractmethod
+ def __call__(self, *args, **kwds):
+ return False
+
+ @classmethod
+ def __subclasshook__(cls, C):
+ if cls is Callable:
+ if any("__call__" in B.__dict__ for B in C.__mro__):
+ return True
+ return NotImplemented
+
+
+### SETS ###
+
+
+class Set(Sized, Iterable, Container):
+
+ """A set is a finite, iterable container.
+
+ This class provides concrete generic implementations of all
+ methods except for __contains__, __iter__ and __len__.
+
+ To override the comparisons (presumably for speed, as the
+ semantics are fixed), all you have to do is redefine __le__ and
+ then the other operations will automatically follow suit.
+ """
+
+ def __le__(self, other):
+ if not isinstance(other, Set):
+ return NotImplemented
+ if len(self) > len(other):
+ return False
+ for elem in self:
+ if elem not in other:
+ return False
+ return True
+
+ def __lt__(self, other):
+ if not isinstance(other, Set):
+ return NotImplemented
+ return len(self) < len(other) and self.__le__(other)
+
+ def __gt__(self, other):
+ if not isinstance(other, Set):
+ return NotImplemented
+ return other < self
+
+ def __ge__(self, other):
+ if not isinstance(other, Set):
+ return NotImplemented
+ return other <= self
+
+ def __eq__(self, other):
+ if not isinstance(other, Set):
+ return NotImplemented
+ return len(self) == len(other) and self.__le__(other)
+
+ def __ne__(self, other):
+ return not (self == other)
+
+ @classmethod
+ def _from_iterable(cls, it):
+ '''Construct an instance of the class from any iterable input.
+
+ Must override this method if the class constructor signature
+ does not accept an iterable for an input.
+ '''
+ return cls(it)
+
+ def __and__(self, other):
+ if not isinstance(other, Iterable):
+ return NotImplemented
+ return self._from_iterable(value for value in other if value in self)
+
+ def isdisjoint(self, other):
+ for value in other:
+ if value in self:
+ return False
+ return True
+
+ def __or__(self, other):
+ if not isinstance(other, Iterable):
+ return NotImplemented
+ chain = (e for s in (self, other) for e in s)
+ return self._from_iterable(chain)
+
+ def __sub__(self, other):
+ if not isinstance(other, Set):
+ if not isinstance(other, Iterable):
+ return NotImplemented
+ other = self._from_iterable(other)
+ return self._from_iterable(value for value in self
+ if value not in other)
+
+ def __xor__(self, other):
+ if not isinstance(other, Set):
+ if not isinstance(other, Iterable):
+ return NotImplemented
+ other = self._from_iterable(other)
+ return (self - other) | (other - self)
+
+ def _hash(self):
+ """Compute the hash value of a set.
+
+ Note that we don't define __hash__: not all sets are hashable.
+ But if you define a hashable set type, its __hash__ should
+ call this function.
+
+ This must be compatible __eq__.
+
+ All sets ought to compare equal if they contain the same
+ elements, regardless of how they are implemented, and
+ regardless of the order of the elements; so there's not much
+ freedom for __eq__ or __hash__. We match the algorithm used
+ by the built-in frozenset type.
+ """
+ MAX = sys.maxsize
+ MASK = 2 * MAX + 1
+ n = len(self)
+ h = 1927868237 * (n + 1)
+ h &= MASK
+ for x in self:
+ hx = hash(x)
+ h ^= (hx ^ (hx << 16) ^ 89869747) * 3644798167
+ h &= MASK
+ h = h * 69069 + 907133923
+ h &= MASK
+ if h > MAX:
+ h -= MASK + 1
+ if h == -1:
+ h = 590923713
+ return h
+
+Set.register(frozenset)
+
+
+class MutableSet(Set):
+
+ @abstractmethod
+ def add(self, value):
+ """Add an element."""
+ raise NotImplementedError
+
+ @abstractmethod
+ def discard(self, value):
+ """Remove an element. Do not raise an exception if absent."""
+ raise NotImplementedError
+
+ def remove(self, value):
+ """Remove an element. If not a member, raise a KeyError."""
+ if value not in self:
+ raise KeyError(value)
+ self.discard(value)
+
+ def pop(self):
+ """Return the popped value. Raise KeyError if empty."""
+ it = iter(self)
+ try:
+ value = next(it)
+ except StopIteration:
+ raise KeyError
+ self.discard(value)
+ return value
+
+ def clear(self):
+ """This is slow (creates N new iterators!) but effective."""
+ try:
+ while True:
+ self.pop()
+ except KeyError:
+ pass
+
+ def __ior__(self, it):
+ for value in it:
+ self.add(value)
+ return self
+
+ def __iand__(self, it):
+ for value in (self - it):
+ self.discard(value)
+ return self
+
+ def __ixor__(self, it):
+ if it is self:
+ self.clear()
+ else:
+ if not isinstance(it, Set):
+ it = self._from_iterable(it)
+ for value in it:
+ if value in self:
+ self.discard(value)
+ else:
+ self.add(value)
+ return self
+
+ def __isub__(self, it):
+ if it is self:
+ self.clear()
+ else:
+ for value in it:
+ self.discard(value)
+ return self
+
+MutableSet.register(set)
+
+
+### MAPPINGS ###
+
+
+class Mapping(Sized, Iterable, Container):
+
+ @abstractmethod
+ def __getitem__(self, key):
+ raise KeyError
+
+ def get(self, key, default=None):
+ try:
+ return self[key]
+ except KeyError:
+ return default
+
+ def __contains__(self, key):
+ try:
+ self[key]
+ except KeyError:
+ return False
+ else:
+ return True
+
+ def keys(self):
+ return KeysView(self)
+
+ def items(self):
+ return ItemsView(self)
+
+ def values(self):
+ return ValuesView(self)
+
+ def __eq__(self, other):
+ if not isinstance(other, Mapping):
+ return NotImplemented
+ return dict(self.items()) == dict(other.items())
+
+ def __ne__(self, other):
+ return not (self == other)
+
+
+class MappingView(Sized):
+
+ def __init__(self, mapping):
+ self._mapping = mapping
+
+ def __len__(self):
+ return len(self._mapping)
+
+ def __repr__(self):
+ return '{0.__class__.__name__}({0._mapping!r})'.format(self)
+
+
+class KeysView(MappingView, Set):
+
+ @classmethod
+ def _from_iterable(self, it):
+ return set(it)
+
+ def __contains__(self, key):
+ return key in self._mapping
+
+ def __iter__(self):
+ for key in self._mapping:
+ yield key
+
+KeysView.register(dict_keys)
+
+
+class ItemsView(MappingView, Set):
+
+ @classmethod
+ def _from_iterable(self, it):
+ return set(it)
+
+ def __contains__(self, item):
+ key, value = item
+ try:
+ v = self._mapping[key]
+ except KeyError:
+ return False
+ else:
+ return v == value
+
+ def __iter__(self):
+ for key in self._mapping:
+ yield (key, self._mapping[key])
+
+ItemsView.register(dict_items)
+
+
+class ValuesView(MappingView):
+
+ def __contains__(self, value):
+ for key in self._mapping:
+ if value == self._mapping[key]:
+ return True
+ return False
+
+ def __iter__(self):
+ for key in self._mapping:
+ yield self._mapping[key]
+
+ValuesView.register(dict_values)
+
+
+class MutableMapping(Mapping):
+
+ @abstractmethod
+ def __setitem__(self, key, value):
+ raise KeyError
+
+ @abstractmethod
+ def __delitem__(self, key):
+ raise KeyError
+
+ __marker = object()
+
+ def pop(self, key, default=__marker):
+ try:
+ value = self[key]
+ except KeyError:
+ if default is self.__marker:
+ raise
+ return default
+ else:
+ del self[key]
+ return value
+
+ def popitem(self):
+ try:
+ key = next(iter(self))
+ except StopIteration:
+ raise KeyError
+ value = self[key]
+ del self[key]
+ return key, value
+
+ def clear(self):
+ try:
+ while True:
+ self.popitem()
+ except KeyError:
+ pass
+
+ def update(*args, **kwds):
+ if len(args) > 2:
+ raise TypeError("update() takes at most 2 positional "
+ "arguments ({} given)".format(len(args)))
+ elif not args:
+ raise TypeError("update() takes at least 1 argument (0 given)")
+ self = args[0]
+ other = args[1] if len(args) >= 2 else ()
+
+ if isinstance(other, Mapping):
+ for key in other:
+ self[key] = other[key]
+ elif hasattr(other, "keys"):
+ for key in other.keys():
+ self[key] = other[key]
+ else:
+ for key, value in other:
+ self[key] = value
+ for key, value in kwds.items():
+ self[key] = value
+
+ def setdefault(self, key, default=None):
+ try:
+ return self[key]
+ except KeyError:
+ self[key] = default
+ return default
+
+MutableMapping.register(dict)
+
+
+### SEQUENCES ###
+
+
+class Sequence(Sized, Iterable, Container):
+
+ """All the operations on a read-only sequence.
+
+ Concrete subclasses must override __new__ or __init__,
+ __getitem__, and __len__.
+ """
+
+ @abstractmethod
+ def __getitem__(self, index):
+ raise IndexError
+
+ def __iter__(self):
+ i = 0
+ try:
+ while True:
+ v = self[i]
+ yield v
+ i += 1
+ except IndexError:
+ return
+
+ def __contains__(self, value):
+ for v in self:
+ if v == value:
+ return True
+ return False
+
+ def __reversed__(self):
+ for i in reversed(range(len(self))):
+ yield self[i]
+
+ def index(self, value):
+ for i, v in enumerate(self):
+ if v == value:
+ return i
+ raise ValueError
+
+ def count(self, value):
+ return sum(1 for v in self if v == value)
+
+Sequence.register(tuple)
+Sequence.register(str)
+Sequence.register(range)
+
+
+class ByteString(Sequence):
+
+ """This unifies bytes and bytearray.
+
+ XXX Should add all their methods.
+ """
+
+ByteString.register(bytes)
+ByteString.register(bytearray)
+
+
+class MutableSequence(Sequence):
+
+ @abstractmethod
+ def __setitem__(self, index, value):
+ raise IndexError
+
+ @abstractmethod
+ def __delitem__(self, index):
+ raise IndexError
+
+ @abstractmethod
+ def insert(self, index, value):
+ raise IndexError
+
+ def append(self, value):
+ self.insert(len(self), value)
+
+ def reverse(self):
+ n = len(self)
+ for i in range(n//2):
+ self[i], self[n-i-1] = self[n-i-1], self[i]
+
+ def extend(self, values):
+ for v in values:
+ self.append(v)
+
+ def pop(self, index=-1):
+ v = self[index]
+ del self[index]
+ return v
+
+ def remove(self, value):
+ del self[self.index(value)]
+
+ def __iadd__(self, values):
+ self.extend(values)
+ return self
+
+MutableSequence.register(list)
+MutableSequence.register(bytearray) # Multiply inheriting, see ByteString
diff --git a/lib-python/3.2/_compat_pickle.py b/lib-python/3.2/_compat_pickle.py
new file mode 100644
--- /dev/null
+++ b/lib-python/3.2/_compat_pickle.py
@@ -0,0 +1,81 @@
+# This module is used to map the old Python 2 names to the new names used in
+# Python 3 for the pickle module. This needed to make pickle streams
+# generated with Python 2 loadable by Python 3.
+
+# This is a copy of lib2to3.fixes.fix_imports.MAPPING. We cannot import
+# lib2to3 and use the mapping defined there, because lib2to3 uses pickle.
+# Thus, this could cause the module to be imported recursively.
+IMPORT_MAPPING = {
+ 'StringIO': 'io',
+ 'cStringIO': 'io',
+ 'cPickle': 'pickle',
+ '__builtin__' : 'builtins',
+ 'copy_reg': 'copyreg',
+ 'Queue': 'queue',
+ 'SocketServer': 'socketserver',
+ 'ConfigParser': 'configparser',
+ 'repr': 'reprlib',
+ 'FileDialog': 'tkinter.filedialog',
+ 'tkFileDialog': 'tkinter.filedialog',
+ 'SimpleDialog': 'tkinter.simpledialog',
+ 'tkSimpleDialog': 'tkinter.simpledialog',
+ 'tkColorChooser': 'tkinter.colorchooser',
+ 'tkCommonDialog': 'tkinter.commondialog',
+ 'Dialog': 'tkinter.dialog',
+ 'Tkdnd': 'tkinter.dnd',
+ 'tkFont': 'tkinter.font',
+ 'tkMessageBox': 'tkinter.messagebox',
+ 'ScrolledText': 'tkinter.scrolledtext',
+ 'Tkconstants': 'tkinter.constants',
+ 'Tix': 'tkinter.tix',
+ 'ttk': 'tkinter.ttk',
+ 'Tkinter': 'tkinter',
+ 'markupbase': '_markupbase',
+ '_winreg': 'winreg',
+ 'thread': '_thread',
+ 'dummy_thread': '_dummy_thread',
+ 'dbhash': 'dbm.bsd',
+ 'dumbdbm': 'dbm.dumb',
+ 'dbm': 'dbm.ndbm',
+ 'gdbm': 'dbm.gnu',
+ 'xmlrpclib': 'xmlrpc.client',
+ 'DocXMLRPCServer': 'xmlrpc.server',
+ 'SimpleXMLRPCServer': 'xmlrpc.server',
+ 'httplib': 'http.client',
+ 'htmlentitydefs' : 'html.entities',
+ 'HTMLParser' : 'html.parser',
+ 'Cookie': 'http.cookies',
+ 'cookielib': 'http.cookiejar',
+ 'BaseHTTPServer': 'http.server',
+ 'SimpleHTTPServer': 'http.server',
+ 'CGIHTTPServer': 'http.server',
+ 'test.test_support': 'test.support',
+ 'commands': 'subprocess',
+ 'UserString' : 'collections',
+ 'UserList' : 'collections',
+ 'urlparse' : 'urllib.parse',
+ 'robotparser' : 'urllib.robotparser',
+ 'whichdb': 'dbm',
+ 'anydbm': 'dbm'
+}
+
+
+# This contains rename rules that are easy to handle. We ignore the more
+# complex stuff (e.g. mapping the names in the urllib and types modules).
+# These rules should be run before import names are fixed.
+NAME_MAPPING = {
+ ('__builtin__', 'xrange'): ('builtins', 'range'),
+ ('__builtin__', 'reduce'): ('functools', 'reduce'),
+ ('__builtin__', 'intern'): ('sys', 'intern'),
+ ('__builtin__', 'unichr'): ('builtins', 'chr'),
+ ('__builtin__', 'basestring'): ('builtins', 'str'),
+ ('__builtin__', 'long'): ('builtins', 'int'),
+ ('itertools', 'izip'): ('builtins', 'zip'),
+ ('itertools', 'imap'): ('builtins', 'map'),
+ ('itertools', 'ifilter'): ('builtins', 'filter'),
+ ('itertools', 'ifilterfalse'): ('itertools', 'filterfalse'),
+}
+
+# Same, but for 3.x to 2.x
+REVERSE_IMPORT_MAPPING = dict((v, k) for (k, v) in IMPORT_MAPPING.items())
+REVERSE_NAME_MAPPING = dict((v, k) for (k, v) in NAME_MAPPING.items())
diff --git a/lib-python/3.2/_dummy_thread.py b/lib-python/3.2/_dummy_thread.py
new file mode 100644
--- /dev/null
+++ b/lib-python/3.2/_dummy_thread.py
@@ -0,0 +1,155 @@
+"""Drop-in replacement for the thread module.
+
+Meant to be used as a brain-dead substitute so that threaded code does
+not need to be rewritten for when the thread module is not present.
+
+Suggested usage is::
+
+ try:
+ import _thread
+ except ImportError:
+ import _dummy_thread as _thread
+
+"""
+# Exports only things specified by thread documentation;
+# skipping obsolete synonyms allocate(), start_new(), exit_thread().
+__all__ = ['error', 'start_new_thread', 'exit', 'get_ident', 'allocate_lock',
+ 'interrupt_main', 'LockType']
+
+# A dummy value
+TIMEOUT_MAX = 2**31
+
+# NOTE: this module can be imported early in the extension building process,
+# and so top level imports of other modules should be avoided. Instead, all
+# imports are done when needed on a function-by-function basis. Since threads
+# are disabled, the import lock should not be an issue anyway (??).
+
+class error(Exception):
+ """Dummy implementation of _thread.error."""
+
+ def __init__(self, *args):
+ self.args = args
+
+def start_new_thread(function, args, kwargs={}):
+ """Dummy implementation of _thread.start_new_thread().
+
+ Compatibility is maintained by making sure that ``args`` is a
+ tuple and ``kwargs`` is a dictionary. If an exception is raised
+ and it is SystemExit (which can be done by _thread.exit()) it is
+ caught and nothing is done; all other exceptions are printed out
+ by using traceback.print_exc().
+
+ If the executed function calls interrupt_main the KeyboardInterrupt will be
+ raised when the function returns.
+
+ """
+ if type(args) != type(tuple()):
+ raise TypeError("2nd arg must be a tuple")
+ if type(kwargs) != type(dict()):
+ raise TypeError("3rd arg must be a dict")
+ global _main
+ _main = False
+ try:
+ function(*args, **kwargs)
+ except SystemExit:
+ pass
+ except:
+ import traceback
+ traceback.print_exc()
+ _main = True
+ global _interrupt
+ if _interrupt:
+ _interrupt = False
+ raise KeyboardInterrupt
+
+def exit():
+ """Dummy implementation of _thread.exit()."""
+ raise SystemExit
+
+def get_ident():
+ """Dummy implementation of _thread.get_ident().
+
+ Since this module should only be used when _threadmodule is not
+ available, it is safe to assume that the current process is the
+ only thread. Thus a constant can be safely returned.
+ """
+ return -1
+
+def allocate_lock():
+ """Dummy implementation of _thread.allocate_lock()."""
+ return LockType()
+
+def stack_size(size=None):
+ """Dummy implementation of _thread.stack_size()."""
+ if size is not None:
+ raise error("setting thread stack size not supported")
+ return 0
+
+class LockType(object):
+ """Class implementing dummy implementation of _thread.LockType.
+
+ Compatibility is maintained by maintaining self.locked_status
+ which is a boolean that stores the state of the lock. Pickling of
+ the lock, though, should not be done since if the _thread module is
+ then used with an unpickled ``lock()`` from here problems could
+ occur from this class not having atomic methods.
+
+ """
+
+ def __init__(self):
+ self.locked_status = False
+
+ def acquire(self, waitflag=None, timeout=-1):
+ """Dummy implementation of acquire().
+
+ For blocking calls, self.locked_status is automatically set to
+ True and returned appropriately based on value of
+ ``waitflag``. If it is non-blocking, then the value is
+ actually checked and not set if it is already acquired. This
+ is all done so that threading.Condition's assert statements
+ aren't triggered and throw a little fit.
+
+ """
+ if waitflag is None or waitflag:
+ self.locked_status = True
+ return True
+ else:
+ if not self.locked_status:
+ self.locked_status = True
+ return True
+ else:
+ if timeout > 0:
+ import time
+ time.sleep(timeout)
+ return False
+
+ __enter__ = acquire
+
+ def __exit__(self, typ, val, tb):
+ self.release()
+
+ def release(self):
+ """Release the dummy lock."""
+ # XXX Perhaps shouldn't actually bother to test? Could lead
+ # to problems for complex, threaded code.
+ if not self.locked_status:
+ raise error
+ self.locked_status = False
+ return True
+
+ def locked(self):
+ return self.locked_status
+
+# Used to signal that interrupt_main was called in a "thread"
+_interrupt = False
+# True when not executing in a "thread"
+_main = True
+
+def interrupt_main():
+ """Set _interrupt flag to True to have start_new_thread raise
+ KeyboardInterrupt upon exiting."""
+ if _main:
+ raise KeyboardInterrupt
+ else:
+ global _interrupt
+ _interrupt = True
diff --git a/lib-python/3.2/_markupbase.py b/lib-python/3.2/_markupbase.py
new file mode 100644
--- /dev/null
+++ b/lib-python/3.2/_markupbase.py
@@ -0,0 +1,395 @@
+"""Shared support for scanning document type declarations in HTML and XHTML.
+
+This module is used as a foundation for the html.parser module. It has no
+documented public API and should not be used directly.
+
+"""
+
+import re
+
+_declname_match = re.compile(r'[a-zA-Z][-_.a-zA-Z0-9]*\s*').match
+_declstringlit_match = re.compile(r'(\'[^\']*\'|"[^"]*")\s*').match
+_commentclose = re.compile(r'--\s*>')
+_markedsectionclose = re.compile(r']\s*]\s*>')
+
+# An analysis of the MS-Word extensions is available at
+# http://www.planetpublish.com/xmlarena/xap/Thursday/WordtoXML.pdf
+
+_msmarkedsectionclose = re.compile(r']\s*>')
+
+del re
+
+
+class ParserBase:
+ """Parser base class which provides some common support methods used
+ by the SGML/HTML and XHTML parsers."""
+
+ def __init__(self):
+ if self.__class__ is ParserBase:
+ raise RuntimeError(
+ "_markupbase.ParserBase must be subclassed")
+
+ def error(self, message):
+ raise NotImplementedError(
+ "subclasses of ParserBase must override error()")
+
+ def reset(self):
+ self.lineno = 1
+ self.offset = 0
+
+ def getpos(self):
+ """Return current line number and offset."""
+ return self.lineno, self.offset
+
+ # Internal -- update line number and offset. This should be
+ # called for each piece of data exactly once, in order -- in other
+ # words the concatenation of all the input strings to this
+ # function should be exactly the entire input.
+ def updatepos(self, i, j):
+ if i >= j:
+ return j
+ rawdata = self.rawdata
+ nlines = rawdata.count("\n", i, j)
+ if nlines:
+ self.lineno = self.lineno + nlines
+ pos = rawdata.rindex("\n", i, j) # Should not fail
+ self.offset = j-(pos+1)
+ else:
+ self.offset = self.offset + j-i
+ return j
+
+ _decl_otherchars = ''
+
+ # Internal -- parse declaration (for use by subclasses).
+ def parse_declaration(self, i):
+ # This is some sort of declaration; in "HTML as
+ # deployed," this should only be the document type
+ # declaration ("").
+ # ISO 8879:1986, however, has more complex
+ # declaration syntax for elements in , including:
+ # --comment--
+ # [marked section]
+ # name in the following list: ENTITY, DOCTYPE, ELEMENT,
+ # ATTLIST, NOTATION, SHORTREF, USEMAP,
+ # LINKTYPE, LINK, IDLINK, USELINK, SYSTEM
+ rawdata = self.rawdata
+ j = i + 2
+ assert rawdata[i:j] == "":
+ # the empty comment
+ return j + 1
+ if rawdata[j:j+1] in ("-", ""):
+ # Start of comment followed by buffer boundary,
+ # or just a buffer boundary.
+ return -1
+ # A simple, practical version could look like: ((name|stringlit) S*) + '>'
+ n = len(rawdata)
+ if rawdata[j:j+2] == '--': #comment
+ # Locate --.*-- as the body of the comment
+ return self.parse_comment(i)
+ elif rawdata[j] == '[': #marked section
+ # Locate [statusWord [...arbitrary SGML...]] as the body of the marked section
+ # Where statusWord is one of TEMP, CDATA, IGNORE, INCLUDE, RCDATA
+ # Note that this is extended by Microsoft Office "Save as Web" function
+ # to include [if...] and [endif].
+ return self.parse_marked_section(i)
+ else: #all other declaration elements
+ decltype, j = self._scan_name(j, i)
+ if j < 0:
+ return j
+ if decltype == "doctype":
+ self._decl_otherchars = ''
+ while j < n:
+ c = rawdata[j]
+ if c == ">":
+ # end of declaration syntax
+ data = rawdata[i+2:j]
+ if decltype == "doctype":
+ self.handle_decl(data)
+ else:
+ # According to the HTML5 specs sections "8.2.4.44 Bogus
+ # comment state" and "8.2.4.45 Markup declaration open
+ # state", a comment token should be emitted.
+ # Calling unknown_decl provides more flexibility though.
+ self.unknown_decl(data)
+ return j + 1
+ if c in "\"'":
+ m = _declstringlit_match(rawdata, j)
+ if not m:
+ return -1 # incomplete
+ j = m.end()
+ elif c in "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ":
+ name, j = self._scan_name(j, i)
+ elif c in self._decl_otherchars:
+ j = j + 1
+ elif c == "[":
+ # this could be handled in a separate doctype parser
+ if decltype == "doctype":
+ j = self._parse_doctype_subset(j + 1, i)
+ elif decltype in {"attlist", "linktype", "link", "element"}:
+ # must tolerate []'d groups in a content model in an element declaration
+ # also in data attribute specifications of attlist declaration
+ # also link type declaration subsets in linktype declarations
+ # also link attribute specification lists in link declarations
+ self.error("unsupported '[' char in %s declaration" % decltype)
+ else:
+ self.error("unexpected '[' char in declaration")
+ else:
+ self.error(
+ "unexpected %r char in declaration" % rawdata[j])
+ if j < 0:
+ return j
+ return -1 # incomplete
+
+ # Internal -- parse a marked section
+ # Override this to handle MS-word extension syntax content
+ def parse_marked_section(self, i, report=1):
+ rawdata= self.rawdata
+ assert rawdata[i:i+3] == ' ending
+ match= _markedsectionclose.search(rawdata, i+3)
+ elif sectName in {"if", "else", "endif"}:
+ # look for MS Office ]> ending
+ match= _msmarkedsectionclose.search(rawdata, i+3)
+ else:
+ self.error('unknown status keyword %r in marked section' % rawdata[i+3:j])
+ if not match:
+ return -1
+ if report:
+ j = match.start(0)
+ self.unknown_decl(rawdata[i+3: j])
+ return match.end(0)
+
+ # Internal -- parse comment, return length or -1 if not terminated
+ def parse_comment(self, i, report=1):
+ rawdata = self.rawdata
+ if rawdata[i:i+4] != ' LOAD_CONST None
def f(x):
- None
+ y = None
return x
asm = disassemble(f)
for elem in ('LOAD_GLOBAL',):
@@ -67,10 +67,13 @@
self.assertIn(elem, asm)
def test_pack_unpack(self):
+ # On PyPy, "a, b = ..." is even more optimized, by removing
+ # the ROT_TWO. But the ROT_TWO is not removed if assigning
+ # to more complex expressions, so check that.
for line, elem in (
('a, = a,', 'LOAD_CONST',),
- ('a, b = a, b', 'ROT_TWO',),
- ('a, b, c = a, b, c', 'ROT_THREE',),
+ ('a[1], b = a, b', 'ROT_TWO',),
+ ('a, b[2], c = a, b, c', 'ROT_THREE',),
):
asm = dis_single(line)
self.assertIn(elem, asm)
@@ -78,6 +81,8 @@
self.assertNotIn('UNPACK_TUPLE', asm)
def test_folding_of_tuples_of_constants(self):
+ # On CPython, "a,b,c=1,2,3" turns into "a,b,c="
+ # but on PyPy, it turns into "a=1;b=2;c=3".
for line, elem in (
('a = 1,2,3', '((1, 2, 3))'),
('("a","b","c")', "(('a', 'b', 'c'))"),
@@ -86,7 +91,8 @@
('((1, 2), 3, 4)', '(((1, 2), 3, 4))'),
):
asm = dis_single(line)
- self.assertIn(elem, asm)
+ self.assert_(elem in asm or (
+ line == 'a,b,c = 1,2,3' and 'UNPACK_TUPLE' not in asm))
self.assertNotIn('BUILD_TUPLE', asm)
# Bug 1053819: Tuple of constants misidentified when presented with:
diff --git a/lib-python/2.7/test/test_pprint.py b/lib-python/2.7/test/test_pprint.py
--- a/lib-python/2.7/test/test_pprint.py
+++ b/lib-python/2.7/test/test_pprint.py
@@ -233,7 +233,16 @@
frozenset([0, 2]),
frozenset([0, 1])])}"""
cube = test.test_set.cube(3)
- self.assertEqual(pprint.pformat(cube), cube_repr_tgt)
+ # XXX issues of dictionary order, and for the case below,
+ # order of items in the frozenset([...]) representation.
+ # Whether we get precisely cube_repr_tgt or not is open
+ # to implementation-dependent choices (this test probably
+ # fails horribly in CPython if we tweak the dict order too).
+ got = pprint.pformat(cube)
+ if test.test_support.check_impl_detail(cpython=True):
+ self.assertEqual(got, cube_repr_tgt)
+ else:
+ self.assertEqual(eval(got), cube)
cubo_repr_tgt = """\
{frozenset([frozenset([0, 2]), frozenset([0])]): frozenset([frozenset([frozenset([0,
2]),
@@ -393,7 +402,11 @@
2])])])}"""
cubo = test.test_set.linegraph(cube)
- self.assertEqual(pprint.pformat(cubo), cubo_repr_tgt)
+ got = pprint.pformat(cubo)
+ if test.test_support.check_impl_detail(cpython=True):
+ self.assertEqual(got, cubo_repr_tgt)
+ else:
+ self.assertEqual(eval(got), cubo)
def test_depth(self):
nested_tuple = (1, (2, (3, (4, (5, 6)))))
diff --git a/lib-python/2.7/test/test_pydoc.py b/lib-python/2.7/test/test_pydoc.py
--- a/lib-python/2.7/test/test_pydoc.py
+++ b/lib-python/2.7/test/test_pydoc.py
@@ -267,8 +267,8 @@
testpairs = (
('i_am_not_here', 'i_am_not_here'),
('test.i_am_not_here_either', 'i_am_not_here_either'),
- ('test.i_am_not_here.neither_am_i', 'i_am_not_here.neither_am_i'),
- ('i_am_not_here.{}'.format(modname), 'i_am_not_here.{}'.format(modname)),
+ ('test.i_am_not_here.neither_am_i', 'i_am_not_here'),
+ ('i_am_not_here.{}'.format(modname), 'i_am_not_here'),
('test.{}'.format(modname), modname),
)
@@ -292,8 +292,8 @@
result = run_pydoc(modname)
finally:
forget(modname)
- expected = badimport_pattern % (modname, expectedinmsg)
- self.assertEqual(expected, result)
+ expected = badimport_pattern % (modname, '(.+\\.)?' + expectedinmsg + '(\\..+)?$')
+ self.assertTrue(re.match(expected, result))
def test_input_strip(self):
missing_module = " test.i_am_not_here "
diff --git a/lib-python/2.7/test/test_pyexpat.py b/lib-python/2.7/test/test_pyexpat.py
--- a/lib-python/2.7/test/test_pyexpat.py
+++ b/lib-python/2.7/test/test_pyexpat.py
@@ -570,6 +570,9 @@
self.assertEqual(self.n, 4)
class MalformedInputText(unittest.TestCase):
+ # CPython seems to ship its own version of expat, they fixed it on this commit :
+ # http://svn.python.org/view?revision=74429&view=revision
+ @unittest.skipIf(sys.platform == "darwin", "Expat is broken on Mac OS X 10.6.6")
def test1(self):
xml = "\0\r\n"
parser = expat.ParserCreate()
@@ -579,6 +582,7 @@
except expat.ExpatError as e:
self.assertEqual(str(e), 'unclosed token: line 2, column 0')
+ @unittest.skipIf(sys.platform == "darwin", "Expat is broken on Mac OS X 10.6.6")
def test2(self):
xml = "\r\n"
parser = expat.ParserCreate()
diff --git a/lib-python/2.7/test/test_repr.py b/lib-python/2.7/test/test_repr.py
--- a/lib-python/2.7/test/test_repr.py
+++ b/lib-python/2.7/test/test_repr.py
@@ -9,6 +9,7 @@
import unittest
from test.test_support import run_unittest, check_py3k_warnings
+from test.test_support import check_impl_detail
from repr import repr as r # Don't shadow builtin repr
from repr import Repr
@@ -145,8 +146,11 @@
# Functions
eq(repr(hash), '')
# Methods
- self.assertTrue(repr(''.split).startswith(
- '")
def test_xrange(self):
eq = self.assertEqual
@@ -185,7 +189,10 @@
def test_descriptors(self):
eq = self.assertEqual
# method descriptors
- eq(repr(dict.items), "")
+ if check_impl_detail(cpython=True):
+ eq(repr(dict.items), "")
+ elif check_impl_detail(pypy=True):
+ eq(repr(dict.items), "")
# XXX member descriptors
# XXX attribute descriptors
# XXX slot descriptors
@@ -247,8 +254,14 @@
eq = self.assertEqual
touch(os.path.join(self.subpkgname, self.pkgname + os.extsep + 'py'))
from areallylongpackageandmodulenametotestreprtruncation.areallylongpackageandmodulenametotestreprtruncation import areallylongpackageandmodulenametotestreprtruncation
- eq(repr(areallylongpackageandmodulenametotestreprtruncation),
- "" % (areallylongpackageandmodulenametotestreprtruncation.__name__, areallylongpackageandmodulenametotestreprtruncation.__file__))
+ # On PyPy, we use %r to format the file name; on CPython it is done
+ # with '%s'. It seems to me that %r is safer .
+ if '__pypy__' in sys.builtin_module_names:
+ eq(repr(areallylongpackageandmodulenametotestreprtruncation),
+ "