> === modified file 'lib/lp/archivepublisher/publishing.py' > --- lib/lp/archivepublisher/publishing.py 2010-02-09 00:17:40 +0000 > +++ lib/lp/archivepublisher/publishing.py 2010-03-25 20:23:14 +0000 > @@ -12,9 +12,11 @@ > import hashlib > import logging > import os > +import shutil > > from datetime import datetime > > +from storm.store import Store > from zope.component import getUtility > > from lp.archivepublisher import HARDCODED_COMPONENT_ORDER > @@ -29,13 +31,15 @@ > from canonical.database.sqlbase import sqlvalues > from lp.registry.interfaces.pocket import ( > PackagePublishingPocket, pocketsuffix) > -from lp.soyuz.interfaces.archive import ArchivePurpose > +from lp.soyuz.interfaces.archive import ArchivePurpose, ArchiveStatus > from lp.soyuz.interfaces.binarypackagerelease import ( > BinaryPackageFormat) > from lp.soyuz.interfaces.component import IComponentSet > from lp.soyuz.interfaces.publishing import PackagePublishingStatus > > from canonical.librarian.client import LibrarianClient > +from canonical.launchpad.webapp.errorlog import ( > + ErrorReportingUtility, ScriptRequest) > > suffixpocket = dict((v, k) for (k, v) in pocketsuffix.items()) > > @@ -596,3 +600,37 @@ > in_file.close() > > out_file.write(" %s % 16d %s\n" % (checksum, length, file_name)) > + > + def deleteArchive(self): > + """Delete the archive. > + > + Physically remove the entire archive from disk and set the archive's > + status to DELETED. > + > + Any errors encountered while removing the archive from disk will > + be caught and an OOPS report generated. > + """ > + > + self.log.info( > + "Attempting to delete archive '%s/%s' at '%s'." % ( > + self.archive.owner.name, self.archive.name, > + self._config.archiveroot)) > + > + # Attempt to rmdir if the path to the root of the archive exists. > + if os.path.exists(self._config.archiveroot): > + try: > + shutil.rmtree(self._config.archiveroot) > + except: > + message = 'Exception while deleting archive root %s' % ( > + self._config.archiveroot) > + request = ScriptRequest([('error-explanation', message)]) > + ErrorReportingUtility().raising(sys.exc_info(), request) > + self.log.error('%s (%s)' % (message, request.oopsid)) > + else: > + self.log.warning( > + "Root directory '%s' for archive '%s/%s' does not exist." % ( > + self._config.archiveroot, self.archive.owner.name, > + self.archive.name)) > + > + # Set archive's status to DELETED. > + self.archive.status = ArchiveStatus.DELETED This is all good. I was trying to think of failure scenarios with it but I think you have them all covered! > > === modified file 'lib/lp/archivepublisher/tests/test_publisher.py' > --- lib/lp/archivepublisher/tests/test_publisher.py 2010-02-10 00:25:55 +0000 > +++ lib/lp/archivepublisher/tests/test_publisher.py 2010-03-25 20:23:14 +0000 > @@ -26,7 +26,7 @@ > from canonical.database.constants import UTC_NOW > from canonical.launchpad.ftests.keys_for_tests import gpgkeysdir > from lp.soyuz.interfaces.archive import ( > - ArchivePurpose, IArchiveSet) > + ArchivePurpose, ArchiveStatus, IArchiveSet) > from lp.soyuz.interfaces.binarypackagerelease import ( > BinaryPackageFormat) > from lp.registry.interfaces.distribution import IDistributionSet > @@ -95,6 +95,31 @@ > foo_path = "%s/main/f/foo/foo_666.dsc" % self.pool_dir > self.assertEqual(open(foo_path).read().strip(), 'Hello world') > > + def testDeletingPPA(self): > + """Test deleting a PPA""" > + ubuntu_team = getUtility(IPersonSet).getByName('ubuntu-team') > + test_archive = getUtility(IArchiveSet).new( > + distribution=self.ubuntutest, owner=ubuntu_team, > + purpose=ArchivePurpose.PPA) > + > + publisher = getPublisher(test_archive, None, self.logger) > + > + pub_source = self.getPubSource( > + sourcename="foo", filename="foo_1.dsc", > + filecontent='I am a file.', > + status=PackagePublishingStatus.PENDING, archive=test_archive) > + > + publisher.A_publish(False) > + self.layer.txn.commit() > + publisher.C_writeIndexes(False) > + publisher.D_writeReleaseFiles(False) > + pub_source.sync() You don't need to do these last 5 lines, nor the getPubSource. Doing more than necessary makes for a fragile test if other parts of the system change. A better test would be to throw some random files into the root directory of the archive and then assert that they get deleted when you call deleteArchive, along with the status change. > + > + self.assertTrue(os.path.exists(publisher._config.archiveroot)) > + publisher.deleteArchive() > + self.assertTrue(not os.path.exists(publisher._config.archiveroot)) How about assertFalse? > + self.assertEqual(test_archive.status, ArchiveStatus.DELETED) These are good tests. > + > def testPublishPartner(self): > """Test that a partner package is published to the right place.""" > archive = self.ubuntutest.getArchiveByComponent('partner') > > === modified file 'lib/lp/soyuz/scripts/publishdistro.py' > --- lib/lp/soyuz/scripts/publishdistro.py 2010-02-24 11:53:22 +0000 > +++ lib/lp/soyuz/scripts/publishdistro.py 2010-03-25 20:23:14 +0000 > @@ -16,7 +16,7 @@ > from canonical.database.sqlbase import ( > clear_current_connection_cache, flush_database_updates) > from lp.soyuz.interfaces.archive import ( > - ArchivePurpose, IArchiveSet, MAIN_ARCHIVE_PURPOSES) > + ArchivePurpose, ArchiveStatus, IArchiveSet, MAIN_ARCHIVE_PURPOSES) > from lp.registry.interfaces.distribution import IDistributionSet > from canonical.launchpad.scripts import logger, logger_options > from lp.services.scripts.base import LaunchpadScriptFailure > @@ -191,24 +191,34 @@ > else: > log.info("Processing %s" % archive.archive_url) > publisher = getPublisher(archive, allowed_suites, log) > - > - try_and_commit("publishing", publisher.A_publish, > - options.careful or options.careful_publishing) > - # Flag dirty pockets for any outstanding deletions. > - publisher.A2_markPocketsWithDeletionsDirty() > - try_and_commit("dominating", publisher.B_dominate, > - options.careful or options.careful_domination) > - > - # The primary and copy archives use apt-ftparchive to generate the > - # indexes, everything else uses the newer internal LP code. > - if archive.purpose in (ArchivePurpose.PRIMARY, ArchivePurpose.COPY): > - try_and_commit("doing apt-ftparchive", publisher.C_doFTPArchive, > - options.careful or options.careful_apt) > + > + # Do we need to delete the archive or publish it? > + if archive.status == ArchiveStatus.DELETING: > + if archive.purpose == ArchivePurpose.PPA: > + # Delete the archive. > + try_and_commit("publishing", publisher.deleteArchive, > + options.careful or options.careful_publishing) > + else: > + # Other types of archives do not currently support deletion. > + pass > else: > - try_and_commit("building indexes", publisher.C_writeIndexes, > + try_and_commit("publishing", publisher.A_publish, > + options.careful or options.careful_publishing) > + # Flag dirty pockets for any outstanding deletions. > + publisher.A2_markPocketsWithDeletionsDirty() > + try_and_commit("dominating", publisher.B_dominate, > + options.careful or options.careful_domination) > + > + # The primary and copy archives use apt-ftparchive to generate the > + # indexes, everything else uses the newer internal LP code. > + if archive.purpose in (ArchivePurpose.PRIMARY, ArchivePurpose.COPY): > + try_and_commit("doing apt-ftparchive", publisher.C_doFTPArchive, > + options.careful or options.careful_apt) > + else: > + try_and_commit("building indexes", publisher.C_writeIndexes, > + options.careful or options.careful_apt) > + > + try_and_commit("doing release files", publisher.D_writeReleaseFiles, > options.careful or options.careful_apt) > > - try_and_commit("doing release files", publisher.D_writeReleaseFiles, > - options.careful or options.careful_apt) > - > log.debug("Ciao") This code has got a subtle bug - it will only process archives marked with publishing enabled because of the filter just above the context of the diff. I also think in retrospect it's slightly the wrong approach to take here. What do you think to adding IArchiveSet.getByStatus()? Then at the end of the publisher script we can grab a list of archives that are in the DELETING state and simply call publisher.deleteArchive(). The new method on IArchiveSet would require a simple new test. Otherwise, we're nearly there! I have a UI branch that's very close to done. Cheers J