Merge lp://staging/~sinzui/launchpad/needs-linking-bug-507937 into lp://staging/launchpad

Proposed by Curtis Hovey
Status: Merged
Merge reported by: Curtis Hovey
Merged at revision: not available
Proposed branch: lp://staging/~sinzui/launchpad/needs-linking-bug-507937
Merge into: lp://staging/launchpad
Diff against target: 10117 lines (+5201/-1341)
136 files modified
.bzrignore (+4/-0)
Makefile (+1/-1)
cronscripts/scan_branches.py (+25/-0)
cronscripts/upgrade_branches.py (+1/-1)
database/replication/Makefile (+7/-4)
database/replication/authdb_create.sql (+48/-0)
database/replication/authdb_sequences.sql (+22/-0)
database/replication/helpers.py (+20/-1)
database/replication/initialize.py (+2/-1)
database/replication/new-slave.py (+13/-4)
database/replication/sync.py (+26/-0)
database/sampledata/current-dev.sql (+51/-51)
database/sampledata/current.sql (+51/-51)
database/schema/README (+1/-131)
database/schema/comments.sql (+74/-3)
database/schema/patch-2207-19-1.sql (+35/-0)
database/schema/patch-2207-20-0.sql (+13/-0)
database/schema/patch-2207-21-0.sql (+8/-0)
database/schema/patch-2207-24-0.sql (+30/-0)
database/schema/patch-2207-25-0.sql (+149/-0)
database/schema/patch-2207-26-0.sql (+28/-0)
database/schema/patch-2207-27-0.sql (+13/-0)
database/schema/patch-2207-28-0.sql (+11/-0)
database/schema/security.cfg (+17/-0)
database/schema/security.py (+3/-0)
lib/canonical/config/schema-lazr.conf (+10/-0)
lib/canonical/launchpad/blocked.html (+26/-0)
lib/canonical/launchpad/scripts/garbo.py (+63/-0)
lib/canonical/launchpad/webapp/dbpolicy.py (+22/-2)
lib/lp/archiveuploader/permission.py (+25/-13)
lib/lp/bugs/configure.zcml (+4/-2)
lib/lp/bugs/doc/bug-heat.txt (+54/-0)
lib/lp/bugs/interfaces/bug.py (+20/-0)
lib/lp/bugs/model/bug.py (+12/-1)
lib/lp/bugs/scripts/bugheat.py (+75/-0)
lib/lp/bugs/scripts/tests/test_bugheat.py (+183/-0)
lib/lp/bugs/tests/test_doc.py (+6/-0)
lib/lp/buildmaster/buildergroup.py (+7/-20)
lib/lp/buildmaster/interfaces/buildbase.py (+51/-0)
lib/lp/buildmaster/interfaces/builder.py (+8/-2)
lib/lp/buildmaster/interfaces/buildfarmjob.py (+46/-1)
lib/lp/buildmaster/interfaces/buildfarmjobbehavior.py (+8/-0)
lib/lp/buildmaster/master.py (+1/-1)
lib/lp/buildmaster/model/buildbase.py (+213/-4)
lib/lp/buildmaster/model/builder.py (+70/-141)
lib/lp/buildmaster/model/buildfarmjob.py (+14/-4)
lib/lp/buildmaster/model/buildfarmjobbehavior.py (+26/-1)
lib/lp/buildmaster/model/packagebuildfarmjob.py (+30/-0)
lib/lp/buildmaster/tests/test_builder.py (+66/-6)
lib/lp/buildmaster/tests/test_manager.py (+2/-0)
lib/lp/code/browser/branch.py (+2/-2)
lib/lp/code/browser/codeimport.py (+107/-81)
lib/lp/code/browser/codereviewvote.py (+11/-7)
lib/lp/code/configure.zcml (+10/-2)
lib/lp/code/doc/branch.txt (+2/-0)
lib/lp/code/doc/codeimport-event.txt (+31/-77)
lib/lp/code/doc/codeimport.txt (+64/-21)
lib/lp/code/enums.py (+19/-0)
lib/lp/code/errors.py (+5/-0)
lib/lp/code/interfaces/branchjob.py (+17/-12)
lib/lp/code/interfaces/codeimport.py (+7/-24)
lib/lp/code/interfaces/codereviewvote.py (+35/-1)
lib/lp/code/mail/codeimport.py (+7/-11)
lib/lp/code/mail/tests/test_codehandler.py (+1/-1)
lib/lp/code/model/branch.py (+3/-0)
lib/lp/code/model/branchjob.py (+42/-4)
lib/lp/code/model/codeimport.py (+24/-32)
lib/lp/code/model/codeimportevent.py (+4/-4)
lib/lp/code/model/codereviewvote.py (+36/-13)
lib/lp/code/model/tests/test_branch.py (+12/-0)
lib/lp/code/model/tests/test_branchjob.py (+52/-6)
lib/lp/code/model/tests/test_codeimport.py (+34/-36)
lib/lp/code/model/tests/test_codereviewvote.py (+81/-6)
lib/lp/code/scripts/tests/test_scan_branches.py (+59/-0)
lib/lp/code/stories/branches/xx-branchmergeproposals.txt (+15/-10)
lib/lp/code/stories/codeimport/xx-admin-codeimport.txt (+27/-6)
lib/lp/code/stories/codeimport/xx-codeimport-list.txt (+7/-2)
lib/lp/code/stories/codeimport/xx-create-codeimport.txt (+29/-11)
lib/lp/code/templates/branch-import-details.pt (+11/-5)
lib/lp/code/templates/codeimport-new.pt (+16/-0)
lib/lp/codehosting/codeimport/tests/servers.py (+21/-0)
lib/lp/codehosting/codeimport/tests/test_worker.py (+61/-14)
lib/lp/codehosting/codeimport/tests/test_workermonitor.py (+27/-8)
lib/lp/codehosting/codeimport/worker.py (+66/-56)
lib/lp/registry/configure.zcml (+2/-1)
lib/lp/registry/doc/distroseries.txt (+31/-1)
lib/lp/registry/interfaces/distroseries.py (+18/-0)
lib/lp/registry/interfaces/product.py (+8/-0)
lib/lp/registry/model/distroseries.py (+139/-2)
lib/lp/registry/model/person.py (+6/-0)
lib/lp/registry/model/product.py (+10/-4)
lib/lp/registry/stories/webservice/xx-project-registry.txt (+2/-0)
lib/lp/registry/tests/test_distroseries.py (+140/-4)
lib/lp/services/job/interfaces/job.py (+10/-0)
lib/lp/soyuz/adapters/archivedependencies.py (+15/-12)
lib/lp/soyuz/browser/tests/builder-views.txt (+2/-1)
lib/lp/soyuz/configure.zcml (+57/-0)
lib/lp/soyuz/doc/archive-dependencies.txt (+4/-2)
lib/lp/soyuz/doc/build-estimated-dispatch-time.txt (+4/-4)
lib/lp/soyuz/doc/build.txt (+4/-4)
lib/lp/soyuz/doc/buildd-dispatching.txt (+3/-3)
lib/lp/soyuz/doc/buildd-scoring.txt (+1/-1)
lib/lp/soyuz/doc/buildd-slavescanner.txt (+30/-29)
lib/lp/soyuz/doc/buildqueue.txt (+4/-4)
lib/lp/soyuz/interfaces/build.py (+1/-44)
lib/lp/soyuz/interfaces/buildpackagejob.py (+2/-1)
lib/lp/soyuz/interfaces/buildqueue.py (+18/-9)
lib/lp/soyuz/interfaces/sourcepackagerecipe.py (+85/-0)
lib/lp/soyuz/interfaces/sourcepackagerecipebuild.py (+96/-0)
lib/lp/soyuz/model/binarypackagebuildbehavior.py (+14/-15)
lib/lp/soyuz/model/build.py (+24/-221)
lib/lp/soyuz/model/buildpackagejob.py (+113/-26)
lib/lp/soyuz/model/buildqueue.py (+21/-18)
lib/lp/soyuz/model/publishing.py (+1/-1)
lib/lp/soyuz/model/recipebuilder.py (+169/-0)
lib/lp/soyuz/model/sourcepackagerecipe.py (+84/-0)
lib/lp/soyuz/model/sourcepackagerecipebuild.py (+215/-0)
lib/lp/soyuz/model/sourcepackagerecipedata.py (+230/-0)
lib/lp/soyuz/tests/soyuzbuilddhelpers.py (+12/-1)
lib/lp/soyuz/tests/test_binarypackagebuildbehavior.py (+66/-0)
lib/lp/soyuz/tests/test_buildpackagejob.py (+5/-3)
lib/lp/soyuz/tests/test_buildqueue.py (+54/-5)
lib/lp/soyuz/tests/test_recipebuilder.py (+178/-0)
lib/lp/soyuz/tests/test_sourcepackagerecipe.py (+348/-0)
lib/lp/soyuz/tests/test_sourcepackagerecipebuild.py (+92/-0)
lib/lp/testing/__init__.py (+4/-1)
lib/lp/testing/factory.py (+104/-20)
lib/lp/translations/browser/product.py (+5/-1)
lib/lp/translations/stories/translationfocus/xx-product-translationfocus.txt (+87/-0)
lib/lp/translations/stories/webservice/xx-translationfocus.txt (+34/-0)
scripts/code-import-worker.py (+4/-1)
scripts/librarian-report.py (+1/-1)
setup.py (+1/-0)
standard_template.py (+1/-1)
standard_test_template.py (+1/-2)
versions.cfg (+1/-0)
To merge this branch: bzr merge lp://staging/~sinzui/launchpad/needs-linking-bug-507937
Reviewer Review Type Date Requested Status
Edwin Grubbs (community) code Approve
Review via email: mp+17690@code.staging.launchpad.net
To post a comment you must log in.
Revision history for this message
Curtis Hovey (sinzui) wrote :

This is my branch to add the model changes for distroseries +needs-packaging.
This work has taken too long to land so the scope was shortened to get the
model changes landed. The three view changes can land quickly this week and
some of the work can be done in parallel.

    lp:~sinzui/launchpad/needs-linking-bug-507937
    Diff size: 356
    Launchpad bug: https://bugs.launchpad.net/bugs/507937
    Test command: ./bin/test -vv \
        -t reg.*doc/distroseries \
        -t TestDistroSeriesPackaging
    Pre-implementation: bac, EdwinGrubbs
    Target release: 10.01

= Add model changes for distroseries +needs-packaging=

Per the ubuntu-link-to-upstream blueprint, Contributors need a page with a
prioritised list of source packages that need packaging.

The work for this started in bug 487793, but this will land first because
it can be tested on staging and edge before letting the users test it

== Rules ==

    * Add two new methods to model.distroseries that provided a prioritised
      listing of source packages that need a packaging link, and packaging
      links who's upstream projects are missing information

== QA ==

The model is not QAable. The subsequent view changes in my next branch
will introduce something we can QA--and disable if we do not want to
release the changes to production next week.

== Lint ==

    Linting changed files:
      lib/lp/registry/doc/distroseries.txt
      lib/lp/registry/interfaces/distroseries.py
      lib/lp/registry/model/distroseries.py
      lib/lp/registry/tests/test_distroseries.py

== Test ==

    * lib/lp/registry/doc/distroseries.txt
    * lib/lp/registry/tests/test_distroseries.py

== Implementation ==

    * lib/lp/registry/interfaces/distroseries.py
    * lib/lp/registry/model/distroseries.py

Revision history for this message
Edwin Grubbs (edwin-grubbs) wrote :
Download full text (16.3 KiB)

Hi Curtis,

This branch makes a nice coherent chunk. My suggestions are fairly minor.

merge-conditional

-Edwin

>=== modified file 'lib/lp/registry/doc/distroseries.txt'
>--- lib/lp/registry/doc/distroseries.txt 2009-12-10 20:28:03 +0000
>+++ lib/lp/registry/doc/distroseries.txt 2010-01-20 16:13:26 +0000
>@@ -500,7 +500,35 @@
> netapplet are translatable in Hoary.
>
>
>-== DistroSeries can build meta objects for packages ==
>+Packages that need linking and packagings that need upstream information
>+-----------------------------------------------------------------------
>+
>+A distroseries a getPriorizedUnlinkedSourcePackages() method that returns

The beginning of this sentence is confusing.

>+a prioritized list of `ISourcePackage` that need a packaging link to an

Since `ISourcePackage` is not pluralized, it feels odd when I get
to the verb "need" which implies that the noun should be plural.

>+`IProductSeries` to provide the the upstream information to share bugs,
>+translations, and code.
>+
>+ >>> for source_package in hoary.getPriorizedUnlinkedSourcePackages():
>+ ... print source_package.name
>+ pmount
>+ alsa-utils
>+ cnews
>+ libstdc++
>+ linux-source-2.6.15
>+
>+
>+A distroseries a getPriorizedlPackagings() method that returns a prioritized

Beginning of this sentence also.

>+list of `IPackaging` that need more information about the upstream project to
>+share bugs, translations, and code.
>+
>+ >>> for packaging in hoary.getPriorizedlPackagings():
>+ ... print packaging.sourcepackagename.name
>+ netapplet
>+ evolution
>+
>+
>+DistroSeries can build meta objects for packages
>+------------------------------------------------
>
> >>> from canonical.launchpad.interfaces import (
> ... ISourcePackage,
>
>=== modified file 'lib/lp/registry/model/distroseries.py'
>--- lib/lp/registry/model/distroseries.py 2009-12-22 17:41:46 +0000
>+++ lib/lp/registry/model/distroseries.py 2010-01-20 16:13:26 +0000
>@@ -48,6 +48,7 @@
> get_bug_tags, get_bug_tags_open_count)
> from lp.bugs.model.bugtarget import BugTargetBase
> from lp.bugs.model.bugtask import BugTask
>+from lp.bugs.interfaces.bugtask import UNRESOLVED_BUGTASK_STATUSES
> from lp.soyuz.model.component import Component
> from lp.soyuz.model.distroarchseries import (
> DistroArchSeries, DistroArchSeriesSet, PocketChroot)
>@@ -306,7 +307,7 @@
> """See `IDistroSeries`."""
> # Avoid circular import failures.
> # We join to SourcePackageName, ProductSeries, and Product to cache
>- # the objects that are implcitly needed to work with a
>+ # the objects that are implicitly needed to work with a
> # Packaging object.
> from lp.registry.model.product import Product
> from lp.registry.model.productseries import ProductSeries
>@@ -329,6 +330,118 @@
> packaging
> for (packaging, spn, product_series, product) in results]
>
>+ def getPriorizedUnlinkedSourcePackages(self):
>+ """See `IDistroSeries`.
>+
>+ The prioritization is a heuristic rule using bug hotness,
>+ translatable messages, and the source package release's co...

review: Approve (code)
Revision history for this message
Curtis Hovey (sinzui) wrote :
Download full text (18.2 KiB)

Thanks for the review Edwin.

I have made your requested changes, and I identified some additional
changes needed by the view in my next branch. I think you should review
this.

On Wed, 2010-01-20 at 18:06 +0000, Edwin Grubbs wrote:
> Review: Approve code
> Hi Curtis,
>
> This branch makes a nice coherent chunk. My suggestions are fairly minor.
>
> merge-conditional

> >=== modified file 'lib/lp/registry/doc/distroseries.txt'
> >--- lib/lp/registry/doc/distroseries.txt 2009-12-10 20:28:03 +0000
> >+++ lib/lp/registry/doc/distroseries.txt 2010-01-20 16:13:26 +0000
> >@@ -500,7 +500,35 @@
> > netapplet are translatable in Hoary.
> >
> >
> >-== DistroSeries can build meta objects for packages ==
> >+Packages that need linking and packagings that need upstream information
> >+-----------------------------------------------------------------------
> >+
> >+A distroseries a getPriorizedUnlinkedSourcePackages() method that returns
>
> The beginning of this sentence is confusing.

    The distroseries getPriorizedUnlinkedSourcePackages()

> >+a prioritized list of `ISourcePackage` that need a packaging link to an
>
> Since `ISourcePackage` is not pluralized, it feels odd when I get
> to the verb "need" which implies that the noun should be plural.

    a prioritized list of `ISourcePackage` objects

> >+`IProductSeries` to provide the the upstream information to share bugs,
> >+translations, and code.
> >+
> >+ >>> for source_package in hoary.getPriorizedUnlinkedSourcePackages():
> >+ ... print source_package.name
> >+ pmount
> >+ alsa-utils
> >+ cnews
> >+ libstdc++
> >+ linux-source-2.6.15

This test changed because the view will need a dict, see the diff.

> >+A distroseries a getPriorizedlPackagings() method that returns a prioritized
>
> Beginning of this sentence also.

    The distroseries getPriorizedlPackagings() method

> >=== modified file 'lib/lp/registry/model/distroseries.py'
> >--- lib/lp/registry/model/distroseries.py 2009-12-22 17:41:46 +0000
> >+++ lib/lp/registry/model/distroseries.py 2010-01-20 16:13:26 +0000
...
> >+ def getPriorizedlPackagings(self):
> >+ """See `IDistroSeries`.
> >+
> >+ The prioritization is a heuristic rule using the branch, bug hotness,
>
> s/using the/using the/

Fixed.

> >+ translatable messages, and the source package release's component.
> >+ """
> >+ # Avoid circular import failures.
> >+ # We join to SourcePackageName, ProductSeries, and Product to cache
> >+ # the objects that are implcitly needed to work with a
> >+ # Packaging object.
> >+ from lp.registry.model.product import Product
> >+ from lp.registry.model.productseries import ProductSeries
>
> It seems like it would be clearer to move these imports directly
> below the comment line about "Avoid circular import failures."

Fixed. I tried to remove this since Adi's branch addressed part of the
problem, but I abandoned my effort when the branch grew 250 lines and no
clue as to what else needed to change.

> >+ find_spec = (
> >+ Packaging, SourcePackageName, ProductSeries, Product,
> >+ SQL("""
> >+ ...

Revision history for this message
Edwin Grubbs (edwin-grubbs) wrote :

Hi Curtis,

All the changes look good.

merge-approved

-Edwin

Preview Diff

[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1=== modified file '.bzrignore'
2--- .bzrignore 2010-01-10 22:23:32 +0000
3+++ .bzrignore 2010-01-21 17:51:26 +0000
4@@ -53,4 +53,8 @@
5 bzr.dev
6 _trial_temp
7 lazr-js
8+.bazaar
9+.cache
10+.subversion
11 lib/canonical/buildd/launchpad-files
12+.testrepository
13
14=== modified file 'Makefile'
15--- Makefile 2010-01-11 20:57:16 +0000
16+++ Makefile 2010-01-21 17:51:27 +0000
17@@ -218,7 +218,7 @@
18
19 scan_branches:
20 # Scan branches from the filesystem into the database.
21- $(PY) cronscripts/branch-scanner.py
22+ $(PY) cronscripts/scan_branches.py
23
24
25 sync_branches: pull_branches scan_branches mpcreationjobs
26
27=== added symlink 'bzrplugins/builder'
28=== target is u'../sourcecode/bzr-builder/'
29=== added file 'cronscripts/scan_branches.py'
30--- cronscripts/scan_branches.py 1970-01-01 00:00:00 +0000
31+++ cronscripts/scan_branches.py 2010-01-21 17:51:26 +0000
32@@ -0,0 +1,25 @@
33+#!/usr/bin/python2.5
34+#
35+# Copyright 2010 Canonical Ltd. This software is licensed under the
36+# GNU Affero General Public License version 3 (see the file LICENSE).
37+
38+"""Scan branches for new revisions."""
39+
40+__metaclass__ = type
41+
42+import _pythonpath
43+
44+from lp.services.job.runner import JobCronScript
45+from lp.code.interfaces.branchjob import IBranchScanJobSource
46+
47+
48+class RunScanBranches(JobCronScript):
49+ """Run BranchScanJob jobs."""
50+
51+ config_name = 'branchscanner'
52+ source_interface = IBranchScanJobSource
53+
54+
55+if __name__ == '__main__':
56+ script = RunScanBranches()
57+ script.lock_and_run()
58
59=== modified file 'cronscripts/upgrade_branches.py'
60--- cronscripts/upgrade_branches.py 2009-12-10 18:46:14 +0000
61+++ cronscripts/upgrade_branches.py 2010-01-21 17:51:26 +0000
62@@ -14,7 +14,7 @@
63
64
65 class RunUpgradeBranches(JobCronScript):
66- """Run UpdatePreviewDiff jobs."""
67+ """Run UpgradeBranchJob jobs."""
68
69 config_name = 'upgrade_branches'
70 source_interface = IBranchUpgradeJobSource
71
72=== modified file 'database/replication/Makefile'
73--- database/replication/Makefile 2009-12-14 13:00:03 +0000
74+++ database/replication/Makefile 2010-01-21 17:51:26 +0000
75@@ -96,11 +96,11 @@
76 # Create the DB with the desired default tablespace.
77 createdb --encoding UTF8 --tablespace ${STAGING_TABLESPACE} \
78 lpmain_staging_new
79- # Restore the DB schema. Don't restore permissions - it will blow
80- # up when roles don't exist in this cluster, and we rebuild it later
81- # with security.py anyway.
82+ # Restore the DB schema. We need to restore permissions, despite
83+ # later running security.py, to pull in permissions granted on
84+ # production to users not maintained by security.py.
85 pg_restore --dbname=lpmain_staging_new \
86- --no-acl --no-owner --exit-on-error ${STAGING_DUMP}
87+ --no-owner --exit-on-error ${STAGING_DUMP}
88 psql -q -d lpmain_staging_new -f authdb_drop.sql
89 psql -q -d lpmain_staging_new -f authdb_create.sql \
90 2>&1 | grep -v _sl || true
91@@ -110,6 +110,7 @@
92 --no-acl --no-owner --disable-triggers --data-only \
93 --table=$$table ${STAGING_DUMP}; \
94 done
95+ psql -q -d lpmain_staging_new -f authdb_sequences.sql
96 # Uninstall Slony-I if it is installed - a pg_dump of a DB with
97 # Slony-I installed isn't usable without this step.
98 LPCONFIG=${NEW_STAGING_CONFIG} ./repair-restored-db.py
99@@ -187,7 +188,9 @@
100 @echo Running fti.py `date`
101 ${SHHH} ../schema/fti.py
102 @echo Running security.py `date`
103+ ./slon_ctl.py stop # security.py can deadlock with slony
104 ${SHHH} ../schema/security.py --cluster -U slony
105+ ./slon_ctl.py --lag="0 seconds" start
106 # Migrate tables to the authdb replication set, creating the set
107 # and subscribing nodes to it as necessary.
108 ./populate_auth_replication_set.py -U slony
109
110=== modified file 'database/replication/authdb_create.sql'
111--- database/replication/authdb_create.sql 2009-11-17 11:13:25 +0000
112+++ database/replication/authdb_create.sql 2010-01-21 17:51:27 +0000
113@@ -833,3 +833,51 @@
114 CREATE INDEX emailaddress__account__status__idx
115 ON EmailAddress(account, status);
116
117+
118+-- Permissions for Ubuntu SSO server testing on staging.
119+
120+-- Mirrored from sso_auth user 2010-01-12.
121+-- These tables will eventually not be available.
122+--
123+GRANT SELECT,INSERT,DELETE,UPDATE ON TABLE account TO ubuntu_sso;
124+GRANT USAGE ON SEQUENCE account_id_seq TO ubuntu_sso;
125+GRANT SELECT,INSERT,DELETE,UPDATE ON TABLE accountpassword TO ubuntu_sso;
126+GRANT USAGE ON SEQUENCE accountpassword_id_seq TO ubuntu_sso;
127+GRANT SELECT,INSERT,DELETE,UPDATE ON TABLE authtoken TO ubuntu_sso;
128+GRANT USAGE ON SEQUENCE authtoken_id_seq TO ubuntu_sso;
129+GRANT SELECT ON TABLE person TO ubuntu_sso;
130+GRANT SELECT,INSERT,DELETE,UPDATE ON TABLE emailaddress TO ubuntu_sso;
131+GRANT USAGE ON SEQUENCE emailaddress_id_seq TO ubuntu_sso;
132+GRANT SELECT,INSERT,DELETE ON TABLE openidassociation TO ubuntu_sso;
133+GRANT SELECT,INSERT,DELETE,UPDATE ON TABLE openidauthorization TO ubuntu_sso;
134+GRANT USAGE ON SEQUENCE openidauthorization_id_seq TO ubuntu_sso;
135+GRANT SELECT,INSERT,DELETE ON TABLE openidnonce TO ubuntu_sso;
136+GRANT SELECT,INSERT,UPDATE ON TABLE openidrpsummary TO ubuntu_sso;
137+GRANT USAGE ON SEQUENCE openidrpsummary_id_seq TO ubuntu_sso;
138+GRANT SELECT ON SEQUENCE person_id_seq TO ubuntu_sso;
139+GRANT SELECT ON TABLE personlocation TO ubuntu_sso;
140+GRANT SELECT ON SEQUENCE personlocation_id_seq TO ubuntu_sso;
141+GRANT SELECT ON TABLE teamparticipation TO ubuntu_sso;
142+GRANT SELECT ON SEQUENCE teamparticipation_id_seq TO ubuntu_sso;
143+
144+-- Permissions on the Ubuntu SSO tables.
145+--
146+GRANT SELECT, INSERT, DELETE, UPDATE ON TABLE auth_permission,
147+auth_group_permissions, auth_group, auth_user, auth_user_groups,
148+auth_user_user_permissions, auth_message, django_content_type,
149+django_session, django_site, django_admin_log,
150+ssoopenidrpconfig TO ubuntu_sso;
151+
152+GRANT USAGE ON SEQUENCE auth_group_id_seq,
153+auth_group_permissions_id_seq, auth_message_id_seq,
154+auth_permission_id_seq, auth_user_groups_id_seq, auth_user_id_seq,
155+auth_user_user_permissions_id_seq, django_admin_log_id_seq,
156+django_content_type_id_seq, django_site_id_seq,
157+ssoopenidrpconfig_id_seq TO ubuntu_sso;
158+
159+-- Permissions on the lpmirror tables (mirrors of relevant Launchpad
160+-- information, available even when Launchpad database upgrades are in
161+-- progress).
162+GRANT SELECT ON TABLE lp_person, lp_personlocation, lp_teamparticipation
163+TO ubuntu_sso;
164+
165
166=== added file 'database/replication/authdb_sequences.sql'
167--- database/replication/authdb_sequences.sql 1970-01-01 00:00:00 +0000
168+++ database/replication/authdb_sequences.sql 2010-01-21 17:51:27 +0000
169@@ -0,0 +1,22 @@
170+-- Repair sequences in the authdb replication set. We need to do this because
171+-- we cannot restore the sequence values from the dump when restoring the
172+-- data using pg_restore --data-only.
173+
174+SELECT setval('account_id_seq', max(id)) AS Account
175+FROM Account;
176+
177+SELECT setval('accountpassword_id_seq', max(id)) AS AccountPassword
178+FROM AccountPassword;
179+
180+SELECT setval('authtoken_id_seq', max(id)) AS AuthToken
181+FROM AuthToken;
182+
183+SELECT setval('emailaddress_id_seq', max(id)) AS EmailAddress
184+FROM EmailAddress;
185+
186+SELECT setval('openidauthorization_id_seq', max(id)) AS OpenIDAuthorization
187+FROM OpenIDAuthorization;
188+
189+SELECT setval('openidrpsummary_id_seq', max(id)) AS OpenIDRPSummary
190+FROM OpenIDRPSummary;
191+
192
193=== modified file 'database/replication/helpers.py'
194--- database/replication/helpers.py 2009-11-30 11:35:04 +0000
195+++ database/replication/helpers.py 2010-01-21 17:51:26 +0000
196@@ -71,8 +71,27 @@
197 'public.lp_person',
198 'public.lp_personlocation',
199 'public.lp_teamparticipation',
200+ # Ubuntu SSO database. These tables where created manually by ISD
201+ # and the Launchpad scripts should not mess with them. Eventually
202+ # these tables will be in a totally separate database.
203+ 'public.auth_permission',
204+ 'public.auth_group',
205+ 'public.auth_user',
206+ 'public.auth_message',
207+ 'public.django_content_type',
208+ 'public.auth_permission',
209+ 'public.django_session',
210+ 'public.django_site',
211+ 'public.django_admin_log',
212+ 'public.ssoopenidrpconfig',
213+ 'public.auth_group_permissions',
214+ 'public.auth_user_groups',
215+ 'public.auth_user_user_permissions',
216 ])
217
218+# Calculate IGNORED_SEQUENCES
219+IGNORED_SEQUENCES = set('%s_id_seq' % table for table in IGNORED_TABLES)
220+
221
222 def slony_installed(con):
223 """Return True if the connected database is part of a Launchpad Slony-I
224@@ -447,7 +466,7 @@
225
226 return (
227 all_tables - replicated_tables - IGNORED_TABLES,
228- all_sequences - replicated_sequences)
229+ all_sequences - replicated_sequences - IGNORED_SEQUENCES)
230
231
232 class ReplicationConfigError(Exception):
233
234=== modified file 'database/replication/initialize.py'
235--- database/replication/initialize.py 2009-10-17 14:06:03 +0000
236+++ database/replication/initialize.py 2010-01-21 17:51:26 +0000
237@@ -224,7 +224,8 @@
238 fails += 1
239 for sequence in all_sequences_in_schema(cur, 'public'):
240 times_seen = 0
241- for sequence_set in [authdb_sequences, lpmain_sequences]:
242+ for sequence_set in [
243+ authdb_sequences, lpmain_sequences, helpers.IGNORED_SEQUENCES]:
244 if sequence in sequence_set:
245 times_seen += 1
246 if times_seen == 0:
247
248=== modified file 'database/replication/new-slave.py'
249--- database/replication/new-slave.py 2009-12-03 09:47:24 +0000
250+++ database/replication/new-slave.py 2010-01-21 17:51:26 +0000
251@@ -162,13 +162,22 @@
252 target_con.commit()
253 del target_con
254
255- # Get a list of existing set ids.
256+ # Get a list of existing set ids that can be subscribed too. This
257+ # is all sets where the origin is the master_node, and set 2 if
258+ # the master happens to be configured as a forwarding slave. We
259+ # don't allow other sets where the master is configured as a
260+ # forwarding slave as we have to special case rebuilding the database
261+ # schema (such as we do for the authdb replication set 2).
262 source_connection.rollback()
263 master_node = replication.helpers.get_master_node(source_connection)
264 cur = source_connection.cursor()
265- cur.execute(
266- "SELECT set_id FROM _sl.sl_set WHERE set_origin=%d"
267- % master_node.node_id)
268+ cur.execute("""
269+ SELECT set_id FROM _sl.sl_set WHERE set_origin=%d
270+ UNION
271+ SELECT sub_set AS set_id FROM _sl.sl_subscribe
272+ WHERE sub_receiver=%d AND sub_forward IS TRUE AND sub_active IS TRUE
273+ AND sub_set=2
274+ """ % (master_node.node_id, master_node.node_id))
275 set_ids = [set_id for set_id, in cur.fetchall()]
276 log.debug("Discovered set ids %s" % repr(list(set_ids)))
277
278
279=== added file 'database/replication/sync.py'
280--- database/replication/sync.py 1970-01-01 00:00:00 +0000
281+++ database/replication/sync.py 2010-01-21 17:51:26 +0000
282@@ -0,0 +1,26 @@
283+#!/usr/bin/python2.5
284+#
285+# Copyright 2010 Canonical Ltd. This software is licensed under the
286+# GNU Affero General Public License version 3 (see the file LICENSE).
287+
288+"""Block until the replication cluster synchronizes."""
289+
290+__metaclass__ = type
291+__all__ = []
292+
293+import _pythonpath
294+
295+from optparse import OptionParser
296+
297+from canonical.launchpad.scripts import logger_options, db_options
298+from replication.helpers import sync
299+
300+if __name__ == '__main__':
301+ parser = OptionParser()
302+ parser.add_option(
303+ "-t", "--timeout", dest="timeout", metavar="SECS", type="int",
304+ help="Abort if no sync after SECS seconds.", default=0)
305+ logger_options(parser)
306+ db_options(parser)
307+ options, args = parser.parse_args()
308+ sync(options.timeout)
309
310=== modified file 'database/sampledata/current-dev.sql'
311--- database/sampledata/current-dev.sql 2009-12-14 13:49:03 +0000
312+++ database/sampledata/current-dev.sql 2010-01-21 17:51:26 +0000
313@@ -1289,19 +1289,19 @@
314
315 ALTER TABLE bug DISABLE TRIGGER ALL;
316
317-INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, hotness) VALUES (1, '2004-01-01 20:58:04.553583', NULL, 'Firefox does not support SVG', 'Firefox needs to support embedded SVG images, now that the standard has been finalised.
318+INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, heat, heat_last_updated) VALUES (1, '2004-01-01 20:58:04.553583', NULL, 'Firefox does not support SVG', 'Firefox needs to support embedded SVG images, now that the standard has been finalised.
319
320-The SVG standard 1.0 is complete, and draft implementations for Firefox exist. One of these implementations needs to be integrated with the base install of Firefox. Ideally, the implementation needs to include support for the manipulation of SVG objects from JavaScript to enable interactive and dynamic SVG drawings.', 12, NULL, NULL, false, false, '2006-05-19 06:37:40.344941', NULL, NULL, NULL, 0, 2, 0, 0, 0);
321-INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, hotness) VALUES (2, '2004-01-02 20:58:04.572546', 'blackhole', 'Blackhole Trash folder', 'The Trash folder seems to have significant problems! At the moment, dragging an item to the trash results in immediate deletion. The item does not appear in the Trash, it is just deleted from my hard disk. There is no undo or ability to recover the deleted file. Help!', 12, NULL, NULL, false, false, '2006-07-14 08:47:36.124403', NULL, NULL, NULL, 0, 3, 0, 0, 0);
322-INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, hotness) VALUES (3, '2004-01-03 00:00:00', NULL, 'Bug Title Test', 'Shirtpkdf user@domain.org lkjd hlkjfds gkfdsg kfd glkfd gifdsytoxdiytxoiufdytoidxf yxoigfyoigfxuyfxoiug yxoiuy oiugf hyoifxugyoixgfuy xoiuyxoiyxoifuy xo
323+The SVG standard 1.0 is complete, and draft implementations for Firefox exist. One of these implementations needs to be integrated with the base install of Firefox. Ideally, the implementation needs to include support for the manipulation of SVG objects from JavaScript to enable interactive and dynamic SVG drawings.', 12, NULL, NULL, false, false, '2006-05-19 06:37:40.344941', NULL, NULL, NULL, 0, 2, 0, 0, 0, NULL);
324+INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, heat, heat_last_updated) VALUES (2, '2004-01-02 20:58:04.572546', 'blackhole', 'Blackhole Trash folder', 'The Trash folder seems to have significant problems! At the moment, dragging an item to the trash results in immediate deletion. The item does not appear in the Trash, it is just deleted from my hard disk. There is no undo or ability to recover the deleted file. Help!', 12, NULL, NULL, false, false, '2006-07-14 08:47:36.124403', NULL, NULL, NULL, 0, 3, 0, 0, 0, NULL);
325+INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, heat, heat_last_updated) VALUES (3, '2004-01-03 00:00:00', NULL, 'Bug Title Test', 'Shirtpkdf user@domain.org lkjd hlkjfds gkfdsg kfd glkfd gifdsytoxdiytxoiufdytoidxf yxoigfyoigfxuyfxoiug yxoiuy oiugf hyoifxugyoixgfuy xoiuyxoiyxoifuy xo
326
327 y idu yifdxhfgffxShirtpkdf jlkdsj;lkd lkjd hlkjfds gkfdsg kfd glkfd gifdsytoxdiytxoiufdytoidxf yxoigfyoigfxuyfxoiug yxoiuy oiugf hyoifxugyoixgfuy xoiuyxoiyxoifuy xoShirtpkdf jlkdsj;lkd lkjd hlkjfds gkfdsg kfd glkfd gifdsytoxdiytxoiufdytoidxf yxoigfyoigfxuyfxoiug yxoiuy oiugf hyoifxugyoixgfuy xoiuyxoiyxoifuy xo
328 Shirtpkdf jlkdsj;lkd lkjd hlkjfds gkfdsg kfd glkfd gifdsytoxdiytxoiufdytoidxf yxoigfyoigfxuyfxoiug yxoiuy oiugf hyoifxugyoixgfuy xoiuyxoiyxoifuy xoShirtpkdf jlkdsj;lkd lkjd hlkjfds gkfdsg kfd glkfd gifdsytoxdiytxoiufdytoidxf yxoigfyoigfxuyfxoiug yxoiuy oiugf hyoifxugyoixgfuy xoiuyxoiyxoifuy xo
329
330-Shirtpkdf jlkdsj;lkd lkjd hlkjfds gkfdsg kfd glkfd gifdsytoxdiytxoiufdytoidxf yxoigfyoigfxuyfxoiug yxoiuy oiugf hyoifxugyoixgfuy xoiuyxoiyxoifuy xoShirtpkdf jlkdsj;lkd lkjd hlkjfds gkfdsg kfd glkfd gifdsytoxdiytxoiufdytoidxf yxoigfyoigfxuyfxoiug yxoiuy oiugf hyoifxugyoixgfuy xoiuyxoiyxoifuy xo', 16, NULL, NULL, false, false, '2006-07-14 08:48:52.922462', NULL, NULL, NULL, 0, 1, 0, 0, 0);
331-INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, hotness) VALUES (4, '2004-01-04 00:00:00', NULL, 'Reflow problems with complex page layouts', 'Malone pages that use more complex layouts with portlets and fancy CSS are sometimes not getting properly reflowed after rendering.', 12, NULL, NULL, false, false, '2006-07-14 08:49:17.124885', NULL, NULL, NULL, 0, 1, 0, 0, 0);
332-INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, hotness) VALUES (5, '2004-01-05 00:00:00', NULL, 'Firefox install instructions should be complete', 'All ways of downloading firefox should provide complete install instructions. At present, they are only visible on the Release Notes page.', 12, NULL, NULL, false, false, '2006-07-14 08:49:40.553212', NULL, NULL, NULL, 0, 1, 0, 0, 0);
333-INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, hotness) VALUES (6, '2004-01-06 00:00:00', NULL, 'Firefox crashes when Save As dialog for a nonexistent window is closed', 'User-Agent:
334+Shirtpkdf jlkdsj;lkd lkjd hlkjfds gkfdsg kfd glkfd gifdsytoxdiytxoiufdytoidxf yxoigfyoigfxuyfxoiug yxoiuy oiugf hyoifxugyoixgfuy xoiuyxoiyxoifuy xoShirtpkdf jlkdsj;lkd lkjd hlkjfds gkfdsg kfd glkfd gifdsytoxdiytxoiufdytoidxf yxoigfyoigfxuyfxoiug yxoiuy oiugf hyoifxugyoixgfuy xoiuyxoiyxoifuy xo', 16, NULL, NULL, false, false, '2006-07-14 08:48:52.922462', NULL, NULL, NULL, 0, 1, 0, 0, 0, NULL);
335+INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, heat, heat_last_updated) VALUES (4, '2004-01-04 00:00:00', NULL, 'Reflow problems with complex page layouts', 'Malone pages that use more complex layouts with portlets and fancy CSS are sometimes not getting properly reflowed after rendering.', 12, NULL, NULL, false, false, '2006-07-14 08:49:17.124885', NULL, NULL, NULL, 0, 1, 0, 0, 0, NULL);
336+INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, heat, heat_last_updated) VALUES (5, '2004-01-05 00:00:00', NULL, 'Firefox install instructions should be complete', 'All ways of downloading firefox should provide complete install instructions. At present, they are only visible on the Release Notes page.', 12, NULL, NULL, false, false, '2006-07-14 08:49:40.553212', NULL, NULL, NULL, 0, 1, 0, 0, 0, NULL);
337+INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, heat, heat_last_updated) VALUES (6, '2004-01-06 00:00:00', NULL, 'Firefox crashes when Save As dialog for a nonexistent window is closed', 'User-Agent:
338 Build Identifier: Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.6) Gecko/20040207 Firefox/0.8
339
340 If a Save As dialog for a nonexistent window exists, when the dialog is closed Firefox will crash. It''s possible to cause this to happen using the "Open With/Save As" dialog.
341@@ -1320,12 +1320,12 @@
342 Crash!
343
344 Expected Results:
345-Not crashed.', 12, 5, NULL, false, false, '2006-05-19 06:37:40.389193', NULL, NULL, NULL, 0, 1, 0, 0, 0);
346-INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, hotness) VALUES (7, '2004-01-07 15:12:29.602117', NULL, 'A test bug', 'A test description', 16, NULL, NULL, false, false, '2006-05-19 06:37:40.391822', NULL, NULL, NULL, 0, 1, 0, 0, 0);
347-INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, hotness) VALUES (8, '2004-01-08 09:10:17.13237', NULL, 'Printing doesn''t work', 'When I press print in Firefox, nothing happens, not even a print dialog comes up.', 12, NULL, NULL, false, false, '2006-05-19 06:37:40.393303', NULL, NULL, NULL, 0, 1, 0, 0, 0);
348-INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, hotness) VALUES (9, '2004-01-09 16:42:14.080227', NULL, 'Thunderbird crashes', 'Every time I start Thunderbird it crashes.', 16, NULL, NULL, false, false, '2006-07-14 08:45:10.600579', NULL, NULL, NULL, 0, 1, 0, 0, 0);
349-INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, hotness) VALUES (10, '2004-01-10 19:42:21.890299', NULL, 'another test bug', 'test bug', 16, NULL, NULL, false, false, '2006-07-14 08:54:19.453881', NULL, NULL, NULL, 0, 2, 0, 0, 0);
350-INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, hotness) VALUES (11, '2007-03-15 20:33:56.67893', NULL, 'Make Jokosher use autoaudiosink', 'I''ve had problems when switching from Jokosher to Totem to play an Ogg.
351+Not crashed.', 12, 5, NULL, false, false, '2006-05-19 06:37:40.389193', NULL, NULL, NULL, 0, 1, 0, 0, 0, NULL);
352+INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, heat, heat_last_updated) VALUES (7, '2004-01-07 15:12:29.602117', NULL, 'A test bug', 'A test description', 16, NULL, NULL, false, false, '2006-05-19 06:37:40.391822', NULL, NULL, NULL, 0, 1, 0, 0, 0, NULL);
353+INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, heat, heat_last_updated) VALUES (8, '2004-01-08 09:10:17.13237', NULL, 'Printing doesn''t work', 'When I press print in Firefox, nothing happens, not even a print dialog comes up.', 12, NULL, NULL, false, false, '2006-05-19 06:37:40.393303', NULL, NULL, NULL, 0, 1, 0, 0, 0, NULL);
354+INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, heat, heat_last_updated) VALUES (9, '2004-01-09 16:42:14.080227', NULL, 'Thunderbird crashes', 'Every time I start Thunderbird it crashes.', 16, NULL, NULL, false, false, '2006-07-14 08:45:10.600579', NULL, NULL, NULL, 0, 1, 0, 0, 0, NULL);
355+INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, heat, heat_last_updated) VALUES (10, '2004-01-10 19:42:21.890299', NULL, 'another test bug', 'test bug', 16, NULL, NULL, false, false, '2006-07-14 08:54:19.453881', NULL, NULL, NULL, 0, 2, 0, 0, 0, NULL);
356+INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, heat, heat_last_updated) VALUES (11, '2007-03-15 20:33:56.67893', NULL, 'Make Jokosher use autoaudiosink', 'I''ve had problems when switching from Jokosher to Totem to play an Ogg.
357
358 Totem appears to be playing normally but does not produce any sound.
359 If I close Jokosher then you can hear totem.
360@@ -1351,11 +1351,11 @@
361 Is there some way to reset alsa or the device driver - with out having
362 to restart?
363
364-Running on Ubuntu - with Jokosher 0.2 runscript.', 16, NULL, NULL, false, false, '2007-03-15 20:37:51.603369', NULL, NULL, NULL, 0, 7, 0, 0, 0);
365-INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, hotness) VALUES (12, '2007-03-15 20:41:18.635493', NULL, 'Copy, Cut and Delete operations should work on selections', 'When trying to copy, cut or delete just a selected portion of an event, the operations affect the event completely. That is, you can''t select a portion and cut that piece. The whole event will be cut. Same goes for the other 2 operations.', 16, NULL, NULL, false, false, '2007-03-15 20:46:49.909153', NULL, NULL, NULL, 0, 5, 0, 0, 0);
366-INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, hotness) VALUES (13, '2007-07-27 20:00:58.299796', NULL, 'Launchpad CSS and JS is not testible', 'The messages placed on this bug are for eyeball viewing of JS and CSS behaviour.', 12, NULL, NULL, false, false, '2007-07-27 20:29:46.336737', NULL, NULL, NULL, 0, 2, 0, 0, 0);
367-INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, hotness) VALUES (14, '2007-08-09 11:39:16.836856', NULL, 'jokosher exposes personal details in its actions portlet', 'Jokosher discloses to any passerby the fact that I am single and unwed in its actions portlet. Please fix this blatant violacion of privacy now!!', 63, NULL, NULL, true, true, '2007-08-09 11:39:16.836856', NULL, NULL, NULL, 0, 1, 0, 0, 0);
368-INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, hotness) VALUES (15, '2007-12-18 16:30:19.103679', NULL, 'Nonsensical bugs are useless', 'Like this one, natch.', 16, NULL, NULL, false, false, '2007-12-18 16:31:34.972893', NULL, NULL, '2007-12-18 16:31:34.790641', 0, 7, 0, 0, 0);
369+Running on Ubuntu - with Jokosher 0.2 runscript.', 16, NULL, NULL, false, false, '2007-03-15 20:37:51.603369', NULL, NULL, NULL, 0, 7, 0, 0, 0, NULL);
370+INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, heat, heat_last_updated) VALUES (12, '2007-03-15 20:41:18.635493', NULL, 'Copy, Cut and Delete operations should work on selections', 'When trying to copy, cut or delete just a selected portion of an event, the operations affect the event completely. That is, you can''t select a portion and cut that piece. The whole event will be cut. Same goes for the other 2 operations.', 16, NULL, NULL, false, false, '2007-03-15 20:46:49.909153', NULL, NULL, NULL, 0, 5, 0, 0, 0, NULL);
371+INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, heat, heat_last_updated) VALUES (13, '2007-07-27 20:00:58.299796', NULL, 'Launchpad CSS and JS is not testible', 'The messages placed on this bug are for eyeball viewing of JS and CSS behaviour.', 12, NULL, NULL, false, false, '2007-07-27 20:29:46.336737', NULL, NULL, NULL, 0, 2, 0, 0, 0, NULL);
372+INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, heat, heat_last_updated) VALUES (14, '2007-08-09 11:39:16.836856', NULL, 'jokosher exposes personal details in its actions portlet', 'Jokosher discloses to any passerby the fact that I am single and unwed in its actions portlet. Please fix this blatant violacion of privacy now!!', 63, NULL, NULL, true, true, '2007-08-09 11:39:16.836856', NULL, NULL, NULL, 0, 1, 0, 0, 0, NULL);
373+INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, heat, heat_last_updated) VALUES (15, '2007-12-18 16:30:19.103679', NULL, 'Nonsensical bugs are useless', 'Like this one, natch.', 16, NULL, NULL, false, false, '2007-12-18 16:31:34.972893', NULL, NULL, '2007-12-18 16:31:34.790641', 0, 7, 0, 0, 0, NULL);
374
375
376 ALTER TABLE bug ENABLE TRIGGER ALL;
377@@ -1579,34 +1579,34 @@
378
379 ALTER TABLE bugtask DISABLE TRIGGER ALL;
380
381-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (2, 1, 4, NULL, NULL, NULL, NULL, 10, 10, 20, 1, '2005-01-02 11:07:20.330975', '2004-01-02 03:49:22.910878', 12, NULL, NULL, NULL, NULL, 'Mozilla Firefox', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
382-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (3, 2, 1, NULL, NULL, NULL, NULL, 10, 20, 20, NULL, '2005-01-03 11:07:20.330975', '2004-01-03 03:49:22.908491', 12, NULL, NULL, NULL, NULL, 'Tomcat', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
383-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (4, 1, NULL, 3, NULL, 1, NULL, 20, 40, 20, NULL, '2005-01-04 11:07:20.584746', '2004-01-04 03:49:22.79024', 12, NULL, 8, '', NULL, 'mozilla-firefox (Debian)', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
384-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (5, 2, NULL, 3, NULL, 1, NULL, 20, 40, 20, 12, '2005-01-05 11:07:20.584746', '2004-01-05 03:49:22.824591', 12, NULL, NULL, 'Upstream said that they won''t bother fixing it.', NULL, 'mozilla-firefox (Debian)', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
385-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (6, 3, NULL, 3, NULL, 1, NULL, 10, 20, 999, NULL, '2005-01-06 11:07:20.584746', '2004-01-06 03:49:22.825533', 16, NULL, NULL, '', NULL, 'mozilla-firefox (Debian)', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
386-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (13, 4, 4, NULL, NULL, NULL, NULL, 10, 30, 30, NULL, '2005-01-13 17:20:12.820778', '2004-01-13 17:20:12.820778', 12, NULL, NULL, NULL, NULL, 'Mozilla Firefox', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
387-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (14, 5, 4, NULL, NULL, NULL, NULL, 10, 30, 50, 12, '2005-01-14 17:27:03.702622', '2004-01-14 17:27:03.702622', 12, NULL, NULL, 'The status explanation is useful to provide task specific information.', NULL, 'Mozilla Firefox', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
388-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (15, 6, 4, NULL, NULL, NULL, NULL, 10, 30, 40, NULL, '2005-01-15 17:35:39.548665', '2004-01-15 17:35:39.548665', 12, NULL, NULL, NULL, NULL, 'Mozilla Firefox', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
389-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (16, 5, NULL, NULL, 1, 1, NULL, 10, 30, 30, NULL, '2005-01-16 14:43:02.452716', '2004-01-16 14:43:02.452716', 12, NULL, NULL, NULL, NULL, 'mozilla-firefox (Ubuntu Warty)', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
390-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (17, 1, NULL, 1, NULL, 1, NULL, 10, 30, 30, NULL, '2005-01-17 01:15:48.241836', '2004-01-17 01:15:48.241836', 16, NULL, NULL, NULL, NULL, 'mozilla-firefox (Ubuntu)', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
391-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (18, 3, NULL, NULL, 6, 1, NULL, 10, 30, 30, NULL, '2005-01-18 16:30:32.295049', '2004-01-18 16:30:32.295049', 12, NULL, NULL, NULL, NULL, 'mozilla-firefox (Debian Woody)', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
392-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (19, 3, NULL, NULL, 7, 1, NULL, 10, 30, 30, NULL, '2005-01-19 16:30:47.448639', '2004-01-19 16:30:47.448639', 12, 2, 10, '', NULL, 'mozilla-firefox (Debian Sarge)', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
393-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (20, 2, NULL, NULL, 6, 1, NULL, 10, 30, 30, NULL, '2005-01-20 13:54:57.840344', '2004-01-20 13:54:57.840344', 12, NULL, NULL, NULL, NULL, 'mozilla-firefox (Debian Woody)', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
394-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (21, 7, 5, NULL, NULL, NULL, NULL, 10, NULL, 30, 16, '2005-01-21 15:12:29.602117', '2004-01-21 15:12:29.602117', 16, NULL, NULL, NULL, NULL, 'Evolution', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
395-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (22, 8, NULL, 3, NULL, 1, NULL, 30, NULL, 30, 16, '2005-01-22 09:10:17.13237', '2004-01-22 09:10:17.13237', 12, NULL, NULL, NULL, NULL, 'mozilla-firefox (Debian)', NULL, NULL, '2005-01-25 09:10:17.13237', NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
396-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (23, 9, NULL, 1, NULL, 18, NULL, 20, NULL, 30, NULL, '2005-01-23 16:42:14.080227', '2004-01-23 16:42:14.080227', 16, NULL, NULL, '', NULL, 'thunderbird (Ubuntu)', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
397-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (24, 9, 8, NULL, NULL, NULL, NULL, 999, 999, 999, NULL, '2005-01-24 16:43:25.744534', '2004-01-24 16:43:25.744534', 12, NULL, 6, NULL, NULL, 'Mozilla Thunderbird', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
398-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (25, 10, NULL, 1, NULL, 22, NULL, 10, NULL, 30, NULL, '2005-01-25 19:42:21.890299', '2004-01-25 19:42:21.890299', 16, NULL, NULL, NULL, NULL, 'linux-source-2.6.15 (Ubuntu)', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
399-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (26, 2, NULL, 1, NULL, NULL, NULL, 10, NULL, 30, NULL, '2005-01-26 10:11:02.729077', '2004-01-26 10:11:02.729077', 16, NULL, NULL, NULL, NULL, 'Ubuntu', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
400-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (27, 7, NULL, 3, NULL, 9, NULL, 999, 999, 999, NULL, '2005-01-27 16:45:45.054836', '2004-01-27 16:45:45.054836', 12, NULL, 7, NULL, NULL, 'evolution (Debian)', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
401-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (28, 2, NULL, NULL, 3, NULL, NULL, 10, NULL, 5, NULL, NULL, '2006-08-21 22:23:08.163936', 1, NULL, NULL, NULL, NULL, 'Ubuntu Hoary', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
402-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (29, 5, NULL, NULL, NULL, NULL, NULL, 10, NULL, 5, NULL, NULL, '2007-01-04 15:00:50.431687', 16, NULL, NULL, NULL, NULL, 'Mozilla Firefox 1.0', NULL, NULL, NULL, 2, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
403-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (30, 11, 20, NULL, NULL, NULL, NULL, 10, NULL, 5, NULL, NULL, '2007-03-15 20:33:56.67893', 16, NULL, NULL, NULL, NULL, 'Jokosher', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
404-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (31, 12, 20, NULL, NULL, NULL, NULL, 20, NULL, 50, NULL, NULL, '2007-03-15 20:41:18.635493', 16, NULL, NULL, '', NULL, 'Jokosher', '2007-03-15 20:41:42.256326', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
405-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (32, 13, 17, NULL, NULL, NULL, NULL, 10, NULL, 5, NULL, NULL, '2007-07-27 20:00:58.299796', 12, NULL, NULL, NULL, NULL, 'Launchpad', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
406-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (33, 14, 20, NULL, NULL, NULL, NULL, 10, NULL, 5, NULL, NULL, '2007-08-09 11:39:16.836856', 63, NULL, NULL, NULL, NULL, 'Jokosher', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
407-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (34, 15, 22, NULL, NULL, NULL, NULL, 10, NULL, 5, NULL, NULL, '2007-12-18 16:30:19.103679', 16, NULL, NULL, NULL, NULL, 'Redfish', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
408-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (35, 15, 8, NULL, NULL, NULL, NULL, 10, NULL, 999, NULL, NULL, '2007-12-18 16:30:47.889614', 16, NULL, 11, NULL, NULL, 'Mozilla Thunderbird', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
409+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (2, 1, 4, NULL, NULL, NULL, NULL, 10, 10, 20, 1, '2005-01-02 11:07:20.330975', '2004-01-02 03:49:22.910878', 12, NULL, NULL, NULL, NULL, 'Mozilla Firefox', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
410+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (3, 2, 1, NULL, NULL, NULL, NULL, 10, 20, 20, NULL, '2005-01-03 11:07:20.330975', '2004-01-03 03:49:22.908491', 12, NULL, NULL, NULL, NULL, 'Tomcat', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
411+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (4, 1, NULL, 3, NULL, 1, NULL, 20, 40, 20, NULL, '2005-01-04 11:07:20.584746', '2004-01-04 03:49:22.79024', 12, NULL, 8, '', NULL, 'mozilla-firefox (Debian)', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
412+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (5, 2, NULL, 3, NULL, 1, NULL, 20, 40, 20, 12, '2005-01-05 11:07:20.584746', '2004-01-05 03:49:22.824591', 12, NULL, NULL, 'Upstream said that they won''t bother fixing it.', NULL, 'mozilla-firefox (Debian)', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
413+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (6, 3, NULL, 3, NULL, 1, NULL, 10, 20, 999, NULL, '2005-01-06 11:07:20.584746', '2004-01-06 03:49:22.825533', 16, NULL, NULL, '', NULL, 'mozilla-firefox (Debian)', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
414+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (13, 4, 4, NULL, NULL, NULL, NULL, 10, 30, 30, NULL, '2005-01-13 17:20:12.820778', '2004-01-13 17:20:12.820778', 12, NULL, NULL, NULL, NULL, 'Mozilla Firefox', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
415+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (14, 5, 4, NULL, NULL, NULL, NULL, 10, 30, 50, 12, '2005-01-14 17:27:03.702622', '2004-01-14 17:27:03.702622', 12, NULL, NULL, 'The status explanation is useful to provide task specific information.', NULL, 'Mozilla Firefox', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
416+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (15, 6, 4, NULL, NULL, NULL, NULL, 10, 30, 40, NULL, '2005-01-15 17:35:39.548665', '2004-01-15 17:35:39.548665', 12, NULL, NULL, NULL, NULL, 'Mozilla Firefox', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
417+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (16, 5, NULL, NULL, 1, 1, NULL, 10, 30, 30, NULL, '2005-01-16 14:43:02.452716', '2004-01-16 14:43:02.452716', 12, NULL, NULL, NULL, NULL, 'mozilla-firefox (Ubuntu Warty)', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
418+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (17, 1, NULL, 1, NULL, 1, NULL, 10, 30, 30, NULL, '2005-01-17 01:15:48.241836', '2004-01-17 01:15:48.241836', 16, NULL, NULL, NULL, NULL, 'mozilla-firefox (Ubuntu)', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
419+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (18, 3, NULL, NULL, 6, 1, NULL, 10, 30, 30, NULL, '2005-01-18 16:30:32.295049', '2004-01-18 16:30:32.295049', 12, NULL, NULL, NULL, NULL, 'mozilla-firefox (Debian Woody)', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
420+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (19, 3, NULL, NULL, 7, 1, NULL, 10, 30, 30, NULL, '2005-01-19 16:30:47.448639', '2004-01-19 16:30:47.448639', 12, 2, 10, '', NULL, 'mozilla-firefox (Debian Sarge)', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
421+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (20, 2, NULL, NULL, 6, 1, NULL, 10, 30, 30, NULL, '2005-01-20 13:54:57.840344', '2004-01-20 13:54:57.840344', 12, NULL, NULL, NULL, NULL, 'mozilla-firefox (Debian Woody)', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
422+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (21, 7, 5, NULL, NULL, NULL, NULL, 10, NULL, 30, 16, '2005-01-21 15:12:29.602117', '2004-01-21 15:12:29.602117', 16, NULL, NULL, NULL, NULL, 'Evolution', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
423+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (22, 8, NULL, 3, NULL, 1, NULL, 30, NULL, 30, 16, '2005-01-22 09:10:17.13237', '2004-01-22 09:10:17.13237', 12, NULL, NULL, NULL, NULL, 'mozilla-firefox (Debian)', NULL, NULL, '2005-01-25 09:10:17.13237', NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
424+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (23, 9, NULL, 1, NULL, 18, NULL, 20, NULL, 30, NULL, '2005-01-23 16:42:14.080227', '2004-01-23 16:42:14.080227', 16, NULL, NULL, '', NULL, 'thunderbird (Ubuntu)', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
425+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (24, 9, 8, NULL, NULL, NULL, NULL, 999, 999, 999, NULL, '2005-01-24 16:43:25.744534', '2004-01-24 16:43:25.744534', 12, NULL, 6, NULL, NULL, 'Mozilla Thunderbird', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
426+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (25, 10, NULL, 1, NULL, 22, NULL, 10, NULL, 30, NULL, '2005-01-25 19:42:21.890299', '2004-01-25 19:42:21.890299', 16, NULL, NULL, NULL, NULL, 'linux-source-2.6.15 (Ubuntu)', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
427+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (26, 2, NULL, 1, NULL, NULL, NULL, 10, NULL, 30, NULL, '2005-01-26 10:11:02.729077', '2004-01-26 10:11:02.729077', 16, NULL, NULL, NULL, NULL, 'Ubuntu', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
428+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (27, 7, NULL, 3, NULL, 9, NULL, 999, 999, 999, NULL, '2005-01-27 16:45:45.054836', '2004-01-27 16:45:45.054836', 12, NULL, 7, NULL, NULL, 'evolution (Debian)', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
429+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (28, 2, NULL, NULL, 3, NULL, NULL, 10, NULL, 5, NULL, NULL, '2006-08-21 22:23:08.163936', 1, NULL, NULL, NULL, NULL, 'Ubuntu Hoary', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
430+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (29, 5, NULL, NULL, NULL, NULL, NULL, 10, NULL, 5, NULL, NULL, '2007-01-04 15:00:50.431687', 16, NULL, NULL, NULL, NULL, 'Mozilla Firefox 1.0', NULL, NULL, NULL, 2, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
431+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (30, 11, 20, NULL, NULL, NULL, NULL, 10, NULL, 5, NULL, NULL, '2007-03-15 20:33:56.67893', 16, NULL, NULL, NULL, NULL, 'Jokosher', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
432+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (31, 12, 20, NULL, NULL, NULL, NULL, 20, NULL, 50, NULL, NULL, '2007-03-15 20:41:18.635493', 16, NULL, NULL, '', NULL, 'Jokosher', '2007-03-15 20:41:42.256326', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
433+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (32, 13, 17, NULL, NULL, NULL, NULL, 10, NULL, 5, NULL, NULL, '2007-07-27 20:00:58.299796', 12, NULL, NULL, NULL, NULL, 'Launchpad', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
434+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (33, 14, 20, NULL, NULL, NULL, NULL, 10, NULL, 5, NULL, NULL, '2007-08-09 11:39:16.836856', 63, NULL, NULL, NULL, NULL, 'Jokosher', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
435+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (34, 15, 22, NULL, NULL, NULL, NULL, 10, NULL, 5, NULL, NULL, '2007-12-18 16:30:19.103679', 16, NULL, NULL, NULL, NULL, 'Redfish', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
436+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (35, 15, 8, NULL, NULL, NULL, NULL, 10, NULL, 999, NULL, NULL, '2007-12-18 16:30:47.889614', 16, NULL, 11, NULL, NULL, 'Mozilla Thunderbird', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
437
438
439 ALTER TABLE bugtask ENABLE TRIGGER ALL;
440@@ -1715,8 +1715,8 @@
441
442 ALTER TABLE buildqueue DISABLE TRIGGER ALL;
443
444-INSERT INTO buildqueue (id, builder, logtail, lastscore, manual, job, job_type, estimated_duration) VALUES (1, 1, 'Dummy sampledata entry, not processing', 1, false, 1, 1, '00:00:00');
445-INSERT INTO buildqueue (id, builder, logtail, lastscore, manual, job, job_type, estimated_duration) VALUES (2, NULL, NULL, 10, false, 2, 1, '00:01:00');
446+INSERT INTO buildqueue (id, builder, logtail, lastscore, manual, job, job_type, estimated_duration, processor, virtualized) VALUES (1, 1, 'Dummy sampledata entry, not processing', 1, false, 1, 1, '00:00:00', 1, FALSE);
447+INSERT INTO buildqueue (id, builder, logtail, lastscore, manual, job, job_type, estimated_duration, processor, virtualized) VALUES (2, NULL, NULL, 10, false, 2, 1, '00:01:00', 1, FALSE);
448
449
450 ALTER TABLE buildqueue ENABLE TRIGGER ALL;
451@@ -1724,8 +1724,8 @@
452
453 ALTER TABLE codeimport DISABLE TRIGGER ALL;
454
455-INSERT INTO codeimport (id, branch, date_created, registrant, rcs_type, svn_branch_url, cvs_root, cvs_module, review_status, date_last_successful, owner, assignee, update_interval, git_repo_url) VALUES (1, 75, '2007-06-25 20:04:04.226605', 52, 2, 'http://svn.example.org/svnroot/gnome-terminal/trunk', NULL, NULL, 20, NULL, 52, NULL, NULL, NULL);
456-INSERT INTO codeimport (id, branch, date_created, registrant, rcs_type, svn_branch_url, cvs_root, cvs_module, review_status, date_last_successful, owner, assignee, update_interval, git_repo_url) VALUES (2, 76, '2007-06-25 20:04:04.379285', 52, 1, NULL, ':pserver:anonymous@anoncvs.example.org:/cvs/gnome', 'evolution', 1, NULL, 52, NULL, NULL, NULL);
457+INSERT INTO codeimport (id, branch, date_created, registrant, rcs_type, cvs_root, cvs_module, review_status, date_last_successful, owner, assignee, update_interval, url) VALUES (1, 75, '2007-06-25 20:04:04.226605', 52, 2, NULL, NULL, 20, NULL, 52, NULL, NULL, 'http://svn.example.org/svnroot/gnome-terminal/trunk');
458+INSERT INTO codeimport (id, branch, date_created, registrant, rcs_type, cvs_root, cvs_module, review_status, date_last_successful, owner, assignee, update_interval, url) VALUES (2, 76, '2007-06-25 20:04:04.379285', 52, 1, ':pserver:anonymous@anoncvs.example.org:/cvs/gnome', 'evolution', 1, NULL, 52, NULL, NULL, NULL);
459
460
461 ALTER TABLE codeimport ENABLE TRIGGER ALL;
462
463=== modified file 'database/sampledata/current.sql'
464--- database/sampledata/current.sql 2010-01-14 03:36:45 +0000
465+++ database/sampledata/current.sql 2010-01-21 17:51:26 +0000
466@@ -1271,19 +1271,19 @@
467
468 ALTER TABLE bug DISABLE TRIGGER ALL;
469
470-INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, hotness) VALUES (1, '2004-01-01 20:58:04.553583', NULL, 'Firefox does not support SVG', 'Firefox needs to support embedded SVG images, now that the standard has been finalised.
471+INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, heat, heat_last_updated) VALUES (1, '2004-01-01 20:58:04.553583', NULL, 'Firefox does not support SVG', 'Firefox needs to support embedded SVG images, now that the standard has been finalised.
472
473-The SVG standard 1.0 is complete, and draft implementations for Firefox exist. One of these implementations needs to be integrated with the base install of Firefox. Ideally, the implementation needs to include support for the manipulation of SVG objects from JavaScript to enable interactive and dynamic SVG drawings.', 12, NULL, NULL, false, false, '2006-05-19 06:37:40.344941', NULL, NULL, NULL, 0, 2, 0, 0, 0);
474-INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, hotness) VALUES (2, '2004-01-02 20:58:04.572546', 'blackhole', 'Blackhole Trash folder', 'The Trash folder seems to have significant problems! At the moment, dragging an item to the trash results in immediate deletion. The item does not appear in the Trash, it is just deleted from my hard disk. There is no undo or ability to recover the deleted file. Help!', 12, NULL, NULL, false, false, '2006-07-14 08:47:36.124403', NULL, NULL, NULL, 0, 3, 0, 0, 0);
475-INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, hotness) VALUES (3, '2004-01-03 00:00:00', NULL, 'Bug Title Test', 'Shirtpkdf user@domain.org lkjd hlkjfds gkfdsg kfd glkfd gifdsytoxdiytxoiufdytoidxf yxoigfyoigfxuyfxoiug yxoiuy oiugf hyoifxugyoixgfuy xoiuyxoiyxoifuy xo
476+The SVG standard 1.0 is complete, and draft implementations for Firefox exist. One of these implementations needs to be integrated with the base install of Firefox. Ideally, the implementation needs to include support for the manipulation of SVG objects from JavaScript to enable interactive and dynamic SVG drawings.', 12, NULL, NULL, false, false, '2006-05-19 06:37:40.344941', NULL, NULL, NULL, 0, 2, 0, 0, 0, NULL);
477+INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, heat, heat_last_updated) VALUES (2, '2004-01-02 20:58:04.572546', 'blackhole', 'Blackhole Trash folder', 'The Trash folder seems to have significant problems! At the moment, dragging an item to the trash results in immediate deletion. The item does not appear in the Trash, it is just deleted from my hard disk. There is no undo or ability to recover the deleted file. Help!', 12, NULL, NULL, false, false, '2006-07-14 08:47:36.124403', NULL, NULL, NULL, 0, 3, 0, 0, 0, NULL);
478+INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, heat, heat_last_updated) VALUES (3, '2004-01-03 00:00:00', NULL, 'Bug Title Test', 'Shirtpkdf user@domain.org lkjd hlkjfds gkfdsg kfd glkfd gifdsytoxdiytxoiufdytoidxf yxoigfyoigfxuyfxoiug yxoiuy oiugf hyoifxugyoixgfuy xoiuyxoiyxoifuy xo
479
480 y idu yifdxhfgffxShirtpkdf jlkdsj;lkd lkjd hlkjfds gkfdsg kfd glkfd gifdsytoxdiytxoiufdytoidxf yxoigfyoigfxuyfxoiug yxoiuy oiugf hyoifxugyoixgfuy xoiuyxoiyxoifuy xoShirtpkdf jlkdsj;lkd lkjd hlkjfds gkfdsg kfd glkfd gifdsytoxdiytxoiufdytoidxf yxoigfyoigfxuyfxoiug yxoiuy oiugf hyoifxugyoixgfuy xoiuyxoiyxoifuy xo
481 Shirtpkdf jlkdsj;lkd lkjd hlkjfds gkfdsg kfd glkfd gifdsytoxdiytxoiufdytoidxf yxoigfyoigfxuyfxoiug yxoiuy oiugf hyoifxugyoixgfuy xoiuyxoiyxoifuy xoShirtpkdf jlkdsj;lkd lkjd hlkjfds gkfdsg kfd glkfd gifdsytoxdiytxoiufdytoidxf yxoigfyoigfxuyfxoiug yxoiuy oiugf hyoifxugyoixgfuy xoiuyxoiyxoifuy xo
482
483-Shirtpkdf jlkdsj;lkd lkjd hlkjfds gkfdsg kfd glkfd gifdsytoxdiytxoiufdytoidxf yxoigfyoigfxuyfxoiug yxoiuy oiugf hyoifxugyoixgfuy xoiuyxoiyxoifuy xoShirtpkdf jlkdsj;lkd lkjd hlkjfds gkfdsg kfd glkfd gifdsytoxdiytxoiufdytoidxf yxoigfyoigfxuyfxoiug yxoiuy oiugf hyoifxugyoixgfuy xoiuyxoiyxoifuy xo', 16, NULL, NULL, false, false, '2006-07-14 08:48:52.922462', NULL, NULL, NULL, 0, 1, 0, 0, 0);
484-INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, hotness) VALUES (4, '2004-01-04 00:00:00', NULL, 'Reflow problems with complex page layouts', 'Malone pages that use more complex layouts with portlets and fancy CSS are sometimes not getting properly reflowed after rendering.', 12, NULL, NULL, false, false, '2006-07-14 08:49:17.124885', NULL, NULL, NULL, 0, 1, 0, 0, 0);
485-INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, hotness) VALUES (5, '2004-01-05 00:00:00', NULL, 'Firefox install instructions should be complete', 'All ways of downloading firefox should provide complete install instructions. At present, they are only visible on the Release Notes page.', 12, NULL, NULL, false, false, '2006-07-14 08:49:40.553212', NULL, NULL, NULL, 0, 1, 0, 0, 0);
486-INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, hotness) VALUES (6, '2004-01-06 00:00:00', NULL, 'Firefox crashes when Save As dialog for a nonexistent window is closed', 'User-Agent:
487+Shirtpkdf jlkdsj;lkd lkjd hlkjfds gkfdsg kfd glkfd gifdsytoxdiytxoiufdytoidxf yxoigfyoigfxuyfxoiug yxoiuy oiugf hyoifxugyoixgfuy xoiuyxoiyxoifuy xoShirtpkdf jlkdsj;lkd lkjd hlkjfds gkfdsg kfd glkfd gifdsytoxdiytxoiufdytoidxf yxoigfyoigfxuyfxoiug yxoiuy oiugf hyoifxugyoixgfuy xoiuyxoiyxoifuy xo', 16, NULL, NULL, false, false, '2006-07-14 08:48:52.922462', NULL, NULL, NULL, 0, 1, 0, 0, 0, NULL);
488+INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, heat, heat_last_updated) VALUES (4, '2004-01-04 00:00:00', NULL, 'Reflow problems with complex page layouts', 'Malone pages that use more complex layouts with portlets and fancy CSS are sometimes not getting properly reflowed after rendering.', 12, NULL, NULL, false, false, '2006-07-14 08:49:17.124885', NULL, NULL, NULL, 0, 1, 0, 0, 0, NULL);
489+INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, heat, heat_last_updated) VALUES (5, '2004-01-05 00:00:00', NULL, 'Firefox install instructions should be complete', 'All ways of downloading firefox should provide complete install instructions. At present, they are only visible on the Release Notes page.', 12, NULL, NULL, false, false, '2006-07-14 08:49:40.553212', NULL, NULL, NULL, 0, 1, 0, 0, 0, NULL);
490+INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, heat, heat_last_updated) VALUES (6, '2004-01-06 00:00:00', NULL, 'Firefox crashes when Save As dialog for a nonexistent window is closed', 'User-Agent:
491 Build Identifier: Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.6) Gecko/20040207 Firefox/0.8
492
493 If a Save As dialog for a nonexistent window exists, when the dialog is closed Firefox will crash. It''s possible to cause this to happen using the "Open With/Save As" dialog.
494@@ -1302,12 +1302,12 @@
495 Crash!
496
497 Expected Results:
498-Not crashed.', 12, 5, NULL, false, false, '2006-05-19 06:37:40.389193', NULL, NULL, NULL, 0, 1, 0, 0, 0);
499-INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, hotness) VALUES (7, '2004-01-07 15:12:29.602117', NULL, 'A test bug', 'A test description', 16, NULL, NULL, false, false, '2006-05-19 06:37:40.391822', NULL, NULL, NULL, 0, 1, 0, 0, 0);
500-INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, hotness) VALUES (8, '2004-01-08 09:10:17.13237', NULL, 'Printing doesn''t work', 'When I press print in Firefox, nothing happens, not even a print dialog comes up.', 12, NULL, NULL, false, false, '2006-05-19 06:37:40.393303', NULL, NULL, NULL, 0, 1, 0, 0, 0);
501-INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, hotness) VALUES (9, '2004-01-09 16:42:14.080227', NULL, 'Thunderbird crashes', 'Every time I start Thunderbird it crashes.', 16, NULL, NULL, false, false, '2006-07-14 08:45:10.600579', NULL, NULL, NULL, 0, 1, 0, 0, 0);
502-INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, hotness) VALUES (10, '2004-01-10 19:42:21.890299', NULL, 'another test bug', 'test bug', 16, NULL, NULL, false, false, '2006-07-14 08:54:19.453881', NULL, NULL, NULL, 0, 2, 0, 0, 0);
503-INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, hotness) VALUES (11, '2007-03-15 20:33:56.67893', NULL, 'Make Jokosher use autoaudiosink', 'I''ve had problems when switching from Jokosher to Totem to play an Ogg.
504+Not crashed.', 12, 5, NULL, false, false, '2006-05-19 06:37:40.389193', NULL, NULL, NULL, 0, 1, 0, 0, 0, NULL);
505+INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, heat, heat_last_updated) VALUES (7, '2004-01-07 15:12:29.602117', NULL, 'A test bug', 'A test description', 16, NULL, NULL, false, false, '2006-05-19 06:37:40.391822', NULL, NULL, NULL, 0, 1, 0, 0, 0, NULL);
506+INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, heat, heat_last_updated) VALUES (8, '2004-01-08 09:10:17.13237', NULL, 'Printing doesn''t work', 'When I press print in Firefox, nothing happens, not even a print dialog comes up.', 12, NULL, NULL, false, false, '2006-05-19 06:37:40.393303', NULL, NULL, NULL, 0, 1, 0, 0, 0, NULL);
507+INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, heat, heat_last_updated) VALUES (9, '2004-01-09 16:42:14.080227', NULL, 'Thunderbird crashes', 'Every time I start Thunderbird it crashes.', 16, NULL, NULL, false, false, '2006-07-14 08:45:10.600579', NULL, NULL, NULL, 0, 1, 0, 0, 0, NULL);
508+INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, heat, heat_last_updated) VALUES (10, '2004-01-10 19:42:21.890299', NULL, 'another test bug', 'test bug', 16, NULL, NULL, false, false, '2006-07-14 08:54:19.453881', NULL, NULL, NULL, 0, 2, 0, 0, 0, NULL);
509+INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, heat, heat_last_updated) VALUES (11, '2007-03-15 20:33:56.67893', NULL, 'Make Jokosher use autoaudiosink', 'I''ve had problems when switching from Jokosher to Totem to play an Ogg.
510
511 Totem appears to be playing normally but does not produce any sound.
512 If I close Jokosher then you can hear totem.
513@@ -1333,11 +1333,11 @@
514 Is there some way to reset alsa or the device driver - with out having
515 to restart?
516
517-Running on Ubuntu - with Jokosher 0.2 runscript.', 16, NULL, NULL, false, false, '2007-03-15 20:37:51.603369', NULL, NULL, NULL, 0, 7, 0, 0, 0);
518-INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, hotness) VALUES (12, '2007-03-15 20:41:18.635493', NULL, 'Copy, Cut and Delete operations should work on selections', 'When trying to copy, cut or delete just a selected portion of an event, the operations affect the event completely. That is, you can''t select a portion and cut that piece. The whole event will be cut. Same goes for the other 2 operations.', 16, NULL, NULL, false, false, '2007-03-15 20:46:49.909153', NULL, NULL, NULL, 0, 5, 0, 0, 0);
519-INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, hotness) VALUES (13, '2007-07-27 20:00:58.299796', NULL, 'Launchpad CSS and JS is not testible', 'The messages placed on this bug are for eyeball viewing of JS and CSS behaviour.', 12, NULL, NULL, false, false, '2007-07-27 20:29:46.336737', NULL, NULL, NULL, 0, 2, 0, 0, 0);
520-INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, hotness) VALUES (14, '2007-08-09 11:39:16.836856', NULL, 'jokosher exposes personal details in its actions portlet', 'Jokosher discloses to any passerby the fact that I am single and unwed in its actions portlet. Please fix this blatant violacion of privacy now!!', 63, NULL, NULL, true, true, '2007-08-09 11:39:16.836856', NULL, NULL, NULL, 0, 1, 0, 0, 0);
521-INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, hotness) VALUES (15, '2007-12-18 16:30:19.103679', NULL, 'Nonsensical bugs are useless', 'Like this one, natch.', 16, NULL, NULL, false, false, '2007-12-18 16:31:34.972893', NULL, NULL, '2007-12-18 16:31:34.790641', 0, 7, 0, 0, 0);
522+Running on Ubuntu - with Jokosher 0.2 runscript.', 16, NULL, NULL, false, false, '2007-03-15 20:37:51.603369', NULL, NULL, NULL, 0, 7, 0, 0, 0, NULL);
523+INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, heat, heat_last_updated) VALUES (12, '2007-03-15 20:41:18.635493', NULL, 'Copy, Cut and Delete operations should work on selections', 'When trying to copy, cut or delete just a selected portion of an event, the operations affect the event completely. That is, you can''t select a portion and cut that piece. The whole event will be cut. Same goes for the other 2 operations.', 16, NULL, NULL, false, false, '2007-03-15 20:46:49.909153', NULL, NULL, NULL, 0, 5, 0, 0, 0, NULL);
524+INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, heat, heat_last_updated) VALUES (13, '2007-07-27 20:00:58.299796', NULL, 'Launchpad CSS and JS is not testible', 'The messages placed on this bug are for eyeball viewing of JS and CSS behaviour.', 12, NULL, NULL, false, false, '2007-07-27 20:29:46.336737', NULL, NULL, NULL, 0, 2, 0, 0, 0, NULL);
525+INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, heat, heat_last_updated) VALUES (14, '2007-08-09 11:39:16.836856', NULL, 'jokosher exposes personal details in its actions portlet', 'Jokosher discloses to any passerby the fact that I am single and unwed in its actions portlet. Please fix this blatant violacion of privacy now!!', 63, NULL, NULL, true, true, '2007-08-09 11:39:16.836856', NULL, NULL, NULL, 0, 1, 0, 0, 0, NULL);
526+INSERT INTO bug (id, datecreated, name, title, description, owner, duplicateof, fti, private, security_related, date_last_updated, date_made_private, who_made_private, date_last_message, number_of_duplicates, message_count, users_affected_count, users_unaffected_count, heat, heat_last_updated) VALUES (15, '2007-12-18 16:30:19.103679', NULL, 'Nonsensical bugs are useless', 'Like this one, natch.', 16, NULL, NULL, false, false, '2007-12-18 16:31:34.972893', NULL, NULL, '2007-12-18 16:31:34.790641', 0, 7, 0, 0, 0, NULL);
527
528
529 ALTER TABLE bug ENABLE TRIGGER ALL;
530@@ -1561,34 +1561,34 @@
531
532 ALTER TABLE bugtask DISABLE TRIGGER ALL;
533
534-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (2, 1, 4, NULL, NULL, NULL, NULL, 10, 10, 20, 1, '2005-01-02 11:07:20.330975', '2004-01-02 03:49:22.910878', 12, NULL, NULL, NULL, NULL, 'Mozilla Firefox', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
535-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (3, 2, 1, NULL, NULL, NULL, NULL, 10, 20, 20, NULL, '2005-01-03 11:07:20.330975', '2004-01-03 03:49:22.908491', 12, NULL, NULL, NULL, NULL, 'Tomcat', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
536-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (4, 1, NULL, 3, NULL, 1, NULL, 20, 40, 20, NULL, '2005-01-04 11:07:20.584746', '2004-01-04 03:49:22.79024', 12, NULL, 8, '', NULL, 'mozilla-firefox (Debian)', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
537-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (5, 2, NULL, 3, NULL, 1, NULL, 20, 40, 20, 12, '2005-01-05 11:07:20.584746', '2004-01-05 03:49:22.824591', 12, NULL, NULL, 'Upstream said that they won''t bother fixing it.', NULL, 'mozilla-firefox (Debian)', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
538-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (6, 3, NULL, 3, NULL, 1, NULL, 10, 20, 999, NULL, '2005-01-06 11:07:20.584746', '2004-01-06 03:49:22.825533', 16, NULL, NULL, '', NULL, 'mozilla-firefox (Debian)', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
539-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (13, 4, 4, NULL, NULL, NULL, NULL, 10, 30, 30, NULL, '2005-01-13 17:20:12.820778', '2004-01-13 17:20:12.820778', 12, NULL, NULL, NULL, NULL, 'Mozilla Firefox', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
540-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (14, 5, 4, NULL, NULL, NULL, NULL, 10, 30, 50, 12, '2005-01-14 17:27:03.702622', '2004-01-14 17:27:03.702622', 12, NULL, NULL, 'The status explanation is useful to provide task specific information.', NULL, 'Mozilla Firefox', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
541-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (15, 6, 4, NULL, NULL, NULL, NULL, 10, 30, 40, NULL, '2005-01-15 17:35:39.548665', '2004-01-15 17:35:39.548665', 12, NULL, NULL, NULL, NULL, 'Mozilla Firefox', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
542-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (16, 5, NULL, NULL, 1, 1, NULL, 10, 30, 30, NULL, '2005-01-16 14:43:02.452716', '2004-01-16 14:43:02.452716', 12, NULL, NULL, NULL, NULL, 'mozilla-firefox (Ubuntu Warty)', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
543-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (17, 1, NULL, 1, NULL, 1, NULL, 10, 30, 30, NULL, '2005-01-17 01:15:48.241836', '2004-01-17 01:15:48.241836', 16, NULL, NULL, NULL, NULL, 'mozilla-firefox (Ubuntu)', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
544-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (18, 3, NULL, NULL, 6, 1, NULL, 10, 30, 30, NULL, '2005-01-18 16:30:32.295049', '2004-01-18 16:30:32.295049', 12, NULL, NULL, NULL, NULL, 'mozilla-firefox (Debian Woody)', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
545-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (19, 3, NULL, NULL, 7, 1, NULL, 10, 30, 30, NULL, '2005-01-19 16:30:47.448639', '2004-01-19 16:30:47.448639', 12, 2, 10, '', NULL, 'mozilla-firefox (Debian Sarge)', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
546-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (20, 2, NULL, NULL, 6, 1, NULL, 10, 30, 30, NULL, '2005-01-20 13:54:57.840344', '2004-01-20 13:54:57.840344', 12, NULL, NULL, NULL, NULL, 'mozilla-firefox (Debian Woody)', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
547-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (21, 7, 5, NULL, NULL, NULL, NULL, 10, NULL, 30, 16, '2005-01-21 15:12:29.602117', '2004-01-21 15:12:29.602117', 16, NULL, NULL, NULL, NULL, 'Evolution', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
548-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (22, 8, NULL, 3, NULL, 1, NULL, 30, NULL, 30, 16, '2005-01-22 09:10:17.13237', '2004-01-22 09:10:17.13237', 12, NULL, NULL, NULL, NULL, 'mozilla-firefox (Debian)', NULL, NULL, '2005-01-25 09:10:17.13237', NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
549-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (23, 9, NULL, 1, NULL, 18, NULL, 20, NULL, 30, NULL, '2005-01-23 16:42:14.080227', '2004-01-23 16:42:14.080227', 16, NULL, NULL, '', NULL, 'thunderbird (Ubuntu)', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
550-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (24, 9, 8, NULL, NULL, NULL, NULL, 999, 999, 999, NULL, '2005-01-24 16:43:25.744534', '2004-01-24 16:43:25.744534', 12, NULL, 6, NULL, NULL, 'Mozilla Thunderbird', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
551-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (25, 10, NULL, 1, NULL, 22, NULL, 10, NULL, 30, NULL, '2005-01-25 19:42:21.890299', '2004-01-25 19:42:21.890299', 16, NULL, NULL, NULL, NULL, 'linux-source-2.6.15 (Ubuntu)', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
552-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (26, 2, NULL, 1, NULL, NULL, NULL, 10, NULL, 30, NULL, '2005-01-26 10:11:02.729077', '2004-01-26 10:11:02.729077', 16, NULL, NULL, NULL, NULL, 'Ubuntu', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
553-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (27, 7, NULL, 3, NULL, 9, NULL, 999, 999, 999, NULL, '2005-01-27 16:45:45.054836', '2004-01-27 16:45:45.054836', 12, NULL, 7, NULL, NULL, 'evolution (Debian)', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
554-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (28, 2, NULL, NULL, 3, NULL, NULL, 10, NULL, 5, NULL, NULL, '2006-08-21 22:23:08.163936', 1, NULL, NULL, NULL, NULL, 'Ubuntu Hoary', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
555-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (29, 5, NULL, NULL, NULL, NULL, NULL, 10, NULL, 5, NULL, NULL, '2007-01-04 15:00:50.431687', 16, NULL, NULL, NULL, NULL, 'Mozilla Firefox 1.0', NULL, NULL, NULL, 2, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
556-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (30, 11, 20, NULL, NULL, NULL, NULL, 10, NULL, 5, NULL, NULL, '2007-03-15 20:33:56.67893', 16, NULL, NULL, NULL, NULL, 'Jokosher', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
557-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (31, 12, 20, NULL, NULL, NULL, NULL, 20, NULL, 50, NULL, NULL, '2007-03-15 20:41:18.635493', 16, NULL, NULL, '', NULL, 'Jokosher', '2007-03-15 20:41:42.256326', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
558-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (32, 13, 17, NULL, NULL, NULL, NULL, 10, NULL, 5, NULL, NULL, '2007-07-27 20:00:58.299796', 12, NULL, NULL, NULL, NULL, 'Launchpad', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
559-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (33, 14, 20, NULL, NULL, NULL, NULL, 10, NULL, 5, NULL, NULL, '2007-08-09 11:39:16.836856', 63, NULL, NULL, NULL, NULL, 'Jokosher', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
560-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (34, 15, 22, NULL, NULL, NULL, NULL, 10, NULL, 5, NULL, NULL, '2007-12-18 16:30:19.103679', 16, NULL, NULL, NULL, NULL, 'Redfish', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
561-INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, hotness_rank, date_milestone_set) VALUES (35, 15, 8, NULL, NULL, NULL, NULL, 10, NULL, 999, NULL, NULL, '2007-12-18 16:30:47.889614', 16, NULL, 11, NULL, NULL, 'Mozilla Thunderbird', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
562+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (2, 1, 4, NULL, NULL, NULL, NULL, 10, 10, 20, 1, '2005-01-02 11:07:20.330975', '2004-01-02 03:49:22.910878', 12, NULL, NULL, NULL, NULL, 'Mozilla Firefox', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
563+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (3, 2, 1, NULL, NULL, NULL, NULL, 10, 20, 20, NULL, '2005-01-03 11:07:20.330975', '2004-01-03 03:49:22.908491', 12, NULL, NULL, NULL, NULL, 'Tomcat', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
564+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (4, 1, NULL, 3, NULL, 1, NULL, 20, 40, 20, NULL, '2005-01-04 11:07:20.584746', '2004-01-04 03:49:22.79024', 12, NULL, 8, '', NULL, 'mozilla-firefox (Debian)', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
565+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (5, 2, NULL, 3, NULL, 1, NULL, 20, 40, 20, 12, '2005-01-05 11:07:20.584746', '2004-01-05 03:49:22.824591', 12, NULL, NULL, 'Upstream said that they won''t bother fixing it.', NULL, 'mozilla-firefox (Debian)', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
566+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (6, 3, NULL, 3, NULL, 1, NULL, 10, 20, 999, NULL, '2005-01-06 11:07:20.584746', '2004-01-06 03:49:22.825533', 16, NULL, NULL, '', NULL, 'mozilla-firefox (Debian)', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
567+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (13, 4, 4, NULL, NULL, NULL, NULL, 10, 30, 30, NULL, '2005-01-13 17:20:12.820778', '2004-01-13 17:20:12.820778', 12, NULL, NULL, NULL, NULL, 'Mozilla Firefox', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
568+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (14, 5, 4, NULL, NULL, NULL, NULL, 10, 30, 50, 12, '2005-01-14 17:27:03.702622', '2004-01-14 17:27:03.702622', 12, NULL, NULL, 'The status explanation is useful to provide task specific information.', NULL, 'Mozilla Firefox', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
569+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (15, 6, 4, NULL, NULL, NULL, NULL, 10, 30, 40, NULL, '2005-01-15 17:35:39.548665', '2004-01-15 17:35:39.548665', 12, NULL, NULL, NULL, NULL, 'Mozilla Firefox', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
570+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (16, 5, NULL, NULL, 1, 1, NULL, 10, 30, 30, NULL, '2005-01-16 14:43:02.452716', '2004-01-16 14:43:02.452716', 12, NULL, NULL, NULL, NULL, 'mozilla-firefox (Ubuntu Warty)', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
571+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (17, 1, NULL, 1, NULL, 1, NULL, 10, 30, 30, NULL, '2005-01-17 01:15:48.241836', '2004-01-17 01:15:48.241836', 16, NULL, NULL, NULL, NULL, 'mozilla-firefox (Ubuntu)', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
572+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (18, 3, NULL, NULL, 6, 1, NULL, 10, 30, 30, NULL, '2005-01-18 16:30:32.295049', '2004-01-18 16:30:32.295049', 12, NULL, NULL, NULL, NULL, 'mozilla-firefox (Debian Woody)', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
573+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (19, 3, NULL, NULL, 7, 1, NULL, 10, 30, 30, NULL, '2005-01-19 16:30:47.448639', '2004-01-19 16:30:47.448639', 12, 2, 10, '', NULL, 'mozilla-firefox (Debian Sarge)', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
574+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (20, 2, NULL, NULL, 6, 1, NULL, 10, 30, 30, NULL, '2005-01-20 13:54:57.840344', '2004-01-20 13:54:57.840344', 12, NULL, NULL, NULL, NULL, 'mozilla-firefox (Debian Woody)', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
575+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (21, 7, 5, NULL, NULL, NULL, NULL, 10, NULL, 30, 16, '2005-01-21 15:12:29.602117', '2004-01-21 15:12:29.602117', 16, NULL, NULL, NULL, NULL, 'Evolution', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
576+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (22, 8, NULL, 3, NULL, 1, NULL, 30, NULL, 30, 16, '2005-01-22 09:10:17.13237', '2004-01-22 09:10:17.13237', 12, NULL, NULL, NULL, NULL, 'mozilla-firefox (Debian)', NULL, NULL, '2005-01-25 09:10:17.13237', NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
577+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (23, 9, NULL, 1, NULL, 18, NULL, 20, NULL, 30, NULL, '2005-01-23 16:42:14.080227', '2004-01-23 16:42:14.080227', 16, NULL, NULL, '', NULL, 'thunderbird (Ubuntu)', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
578+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (24, 9, 8, NULL, NULL, NULL, NULL, 999, 999, 999, NULL, '2005-01-24 16:43:25.744534', '2004-01-24 16:43:25.744534', 12, NULL, 6, NULL, NULL, 'Mozilla Thunderbird', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
579+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (25, 10, NULL, 1, NULL, 22, NULL, 10, NULL, 30, NULL, '2005-01-25 19:42:21.890299', '2004-01-25 19:42:21.890299', 16, NULL, NULL, NULL, NULL, 'linux-source-2.6.15 (Ubuntu)', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
580+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (26, 2, NULL, 1, NULL, NULL, NULL, 10, NULL, 30, NULL, '2005-01-26 10:11:02.729077', '2004-01-26 10:11:02.729077', 16, NULL, NULL, NULL, NULL, 'Ubuntu', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
581+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (27, 7, NULL, 3, NULL, 9, NULL, 999, 999, 999, NULL, '2005-01-27 16:45:45.054836', '2004-01-27 16:45:45.054836', 12, NULL, 7, NULL, NULL, 'evolution (Debian)', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
582+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (28, 2, NULL, NULL, 3, NULL, NULL, 10, NULL, 5, NULL, NULL, '2006-08-21 22:23:08.163936', 1, NULL, NULL, NULL, NULL, 'Ubuntu Hoary', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
583+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (29, 5, NULL, NULL, NULL, NULL, NULL, 10, NULL, 5, NULL, NULL, '2007-01-04 15:00:50.431687', 16, NULL, NULL, NULL, NULL, 'Mozilla Firefox 1.0', NULL, NULL, NULL, 2, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
584+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (30, 11, 20, NULL, NULL, NULL, NULL, 10, NULL, 5, NULL, NULL, '2007-03-15 20:33:56.67893', 16, NULL, NULL, NULL, NULL, 'Jokosher', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
585+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (31, 12, 20, NULL, NULL, NULL, NULL, 20, NULL, 50, NULL, NULL, '2007-03-15 20:41:18.635493', 16, NULL, NULL, '', NULL, 'Jokosher', '2007-03-15 20:41:42.256326', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
586+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (32, 13, 17, NULL, NULL, NULL, NULL, 10, NULL, 5, NULL, NULL, '2007-07-27 20:00:58.299796', 12, NULL, NULL, NULL, NULL, 'Launchpad', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
587+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (33, 14, 20, NULL, NULL, NULL, NULL, 10, NULL, 5, NULL, NULL, '2007-08-09 11:39:16.836856', 63, NULL, NULL, NULL, NULL, 'Jokosher', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
588+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (34, 15, 22, NULL, NULL, NULL, NULL, 10, NULL, 5, NULL, NULL, '2007-12-18 16:30:19.103679', 16, NULL, NULL, NULL, NULL, 'Redfish', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
589+INSERT INTO bugtask (id, bug, product, distribution, distroseries, sourcepackagename, binarypackagename, status, priority, importance, assignee, date_assigned, datecreated, owner, milestone, bugwatch, statusexplanation, fti, targetnamecache, date_confirmed, date_inprogress, date_closed, productseries, date_incomplete, date_left_new, date_triaged, date_fix_committed, date_fix_released, date_left_closed, heat_rank, date_milestone_set) VALUES (35, 15, 8, NULL, NULL, NULL, NULL, 10, NULL, 999, NULL, NULL, '2007-12-18 16:30:47.889614', 16, NULL, 11, NULL, NULL, 'Mozilla Thunderbird', NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, 0, NULL);
590
591
592 ALTER TABLE bugtask ENABLE TRIGGER ALL;
593@@ -1697,8 +1697,8 @@
594
595 ALTER TABLE buildqueue DISABLE TRIGGER ALL;
596
597-INSERT INTO buildqueue (id, builder, logtail, lastscore, manual, job, job_type, estimated_duration) VALUES (1, 1, 'Dummy sampledata entry, not processing', 1, false, 1, 1, '00:00:00');
598-INSERT INTO buildqueue (id, builder, logtail, lastscore, manual, job, job_type, estimated_duration) VALUES (2, NULL, NULL, 10, false, 2, 1, '00:01:00');
599+INSERT INTO buildqueue (id, builder, logtail, lastscore, manual, job, job_type, estimated_duration, processor, virtualized) VALUES (1, 1, 'Dummy sampledata entry, not processing', 1, false, 1, 1, '00:00:00', 1, FALSE);
600+INSERT INTO buildqueue (id, builder, logtail, lastscore, manual, job, job_type, estimated_duration, processor, virtualized) VALUES (2, NULL, NULL, 10, false, 2, 1, '00:01:00', 1, FALSE);
601
602
603 ALTER TABLE buildqueue ENABLE TRIGGER ALL;
604@@ -1706,8 +1706,8 @@
605
606 ALTER TABLE codeimport DISABLE TRIGGER ALL;
607
608-INSERT INTO codeimport (id, branch, date_created, registrant, rcs_type, svn_branch_url, cvs_root, cvs_module, review_status, date_last_successful, owner, assignee, update_interval, git_repo_url) VALUES (1, 75, '2007-06-25 20:04:04.226605', 52, 2, 'http://svn.example.org/svnroot/gnome-terminal/trunk', NULL, NULL, 20, NULL, 52, NULL, NULL, NULL);
609-INSERT INTO codeimport (id, branch, date_created, registrant, rcs_type, svn_branch_url, cvs_root, cvs_module, review_status, date_last_successful, owner, assignee, update_interval, git_repo_url) VALUES (2, 76, '2007-06-25 20:04:04.379285', 52, 1, NULL, ':pserver:anonymous@anoncvs.example.org:/cvs/gnome', 'evolution', 1, NULL, 52, NULL, NULL, NULL);
610+INSERT INTO codeimport (id, branch, date_created, registrant, rcs_type, cvs_root, cvs_module, review_status, date_last_successful, owner, assignee, update_interval, url) VALUES (1, 75, '2007-06-25 20:04:04.226605', 52, 2, NULL, NULL, 20, NULL, 52, NULL, NULL, 'http://svn.example.org/svnroot/gnome-terminal/trunk');
611+INSERT INTO codeimport (id, branch, date_created, registrant, rcs_type, cvs_root, cvs_module, review_status, date_last_successful, owner, assignee, update_interval, url) VALUES (2, 76, '2007-06-25 20:04:04.379285', 52, 1, ':pserver:anonymous@anoncvs.example.org:/cvs/gnome', 'evolution', 1, NULL, 52, NULL, NULL, NULL);
612
613
614 ALTER TABLE codeimport ENABLE TRIGGER ALL;
615
616=== modified file 'database/schema/README'
617--- database/schema/README 2006-10-27 01:02:38 +0000
618+++ database/schema/README 2010-01-21 17:51:26 +0000
619@@ -1,131 +1,1 @@
620-= How to make database schema changes =
621-
622-Important: This documentation is mirrored on https://launchpad.canonical.com/DatabaseSchemaChanges
623-
624-So, make sure to copy any changes you make here to the wiki page!
625-
626-== Making schema changes ==
627-
628- 1. Make an SQL file in `database/schema/pending/` containing the changes you want, excluding any changes to default values.
629- 2. Run `make schema` to get a pristine database of sampledata.
630- 3. Run the SQL on the database (`psql launchpad_dev -f your-patch.sql`) to ensure it works.
631- 4. In `database/schema/`, `make newsampledata`.
632- 5. Replace `current.sql` with `newsampledata.sql`.
633- 6. Copy your SQL into `database/schema/` with a name like `patch-xx-99-0.sql` (where ''xx'' matches the existing patches), and ending with the line "INSERT INTO Launchpad``Database``Revision VALUES (xx, 99, 0);". Both the name and this last line should be renamed as directed when the patch is approved.
634- At this point `make schema` should work again.
635- 7. Make any necessary changes to `lib/canonical/lp/dbschema.py`, `database/schema/fti.py`, and to the relevant `lib/canonical/launchpad/database/` classes.
636- 8. Make any changes to the SQL patch to reflect new default values.
637-
638-== Proposing database schema changes ==
639-
640-For any tables and fields that you change with an SQL script via Stuart
641-(stub on IRC), please make sure you include comments.
642-
643-The process now looks like this:
644-
645- 1. If you think the proposed changes may be controversial, or you are just ensure, it is worth discussing the changes on the launchpad mailing list first to avoid wasting your time.
646- 2. Work on the patch in a branch as documented above.
647- 3. Add the details of your branch to StuartBishop's review queue on PendingReviews.
648- 4. Work on it in revision control till StuartBishop is happy. He will give you an official patch number
649- 5. Rename your patch to match the official patch number
650- 6. Once code is also ready and reviewed, commit as normal.
651-
652-== Resolving schema conflicts ==
653-
654-Resolving conflicts in `current.sql` manually is usually more trouble than it's worth. Instead, first resolve any conflicts in `comments.sql`, then: {{{
655-
656- cd database/schema/
657- mv {patch-in-question}-0.sql comments.sql pending/
658- cp {parent branch, e.g. rocketfuel}/database/schema/comments.sql ./
659- cp ../sampledata/current.sql.OTHER ../sampledata/current.sql
660- make
661- psql launchpad_dev -f pending/patch-xx-99-0.sql
662- make newsampledata
663- mv ../sampledata/newsampledata.sql ../sampledata/current.sql
664- mv pending/{patch-in-question}-0.sql pending/comments.sql ./
665- make # Just to make sure everything works
666- cd ../..
667- bzr resolve database/sampledata/current.sql
668-
669-}}}
670-
671-= Production Database Upgrades =
672-
673-First get a copy of the Launchpad source built and ready on emperor, readable
674-by the postgres user.
675-
676-Then, before you do anything else, inform #canonical, #launchpad and
677-#ubuntu-devel that Launchpad and the Wiki authentication systems will be
678-offline for 30 minutes (or longer if there is data migration to do).
679-
680-Stop PostgreSQL:
681-
682- % pg_ctl -m fast stop
683-
684-Start PostgreSQL without external connections
685-
686- % pg_ctl start -o '--tcpip-socket=false' -o '--ssl=false' \
687- -l /var/log/postgresql/postgres.log
688-
689-As user postgres, run the upgrade.py, fti.py and security.py scripts.
690-fti.py can be skipped if you are sure no changes need to be made to the
691-full text indexes (ie. fti.py has not been modified and no patches affect
692-the tables being indexed). This process should work without issues, as any
693-issues (such as DB patches not working on production data) will have been
694-picked up from the daily updates to the staging environment. Do not run
695-update.py using the --partial option to ensure that changes will be rolled
696-back on failure.
697-
698- % cd dists/launchpad/database/schema
699- % env LPCONFIG=production \
700- python upgrade.py -d launchpad_prod -U postgres -H ''
701- % env LPCONFIG=production \
702- python fti.py -d launchpad_prod -U postgres -H ''
703- % env LPCONFIG=production \
704- python security.py -d launchpad_prod -U postgres -H ''
705-
706-Restart PostgreSQL with external connections
707-
708- % pg_ctl -m fast stop
709- % pg_ctl start -l /var/log/postgresql/postgres.log
710-
711-At this point, services should be restarted that don't automatically
712-reconnect, such as the Launchpad web application servers and the Librarian.
713-
714-== Create a new development baseline ==
715-
716-After a production update, you should occasionally copy the live schema
717-back into the development tree. This ensures that any differences that have
718-crept in between the development database and reality are fixed.
719-The new baseline dump (launchpad-XX-0-0.sql in this directory) can
720-be generated on production using the following::
721-
722- pg_dump -Fc --schema-only --no-owner --no-acl --schema=public \
723- launchpad_prod > launchpad.dump
724- pg_restore -l launchpad.dump | \
725- grep -v PROCEDURAL | grep -v COMMENT | \
726- grep -v FUNCTION | grep -v VIEW > launchpad.list
727- pg_restore -l launchpad.dump | grep VIEW >> launchpad.list
728- echo "-- Generated `date`" > launchpad.sql
729- echo 'SET client_min_messages=ERROR;' >> launchpad.sql
730- pg_restore --no-owner --no-acl -L launchpad.list launchpad.dump | \
731- grep -v '^--' >> launchpad.sql
732-
733-Move all the existing patches and the old baseline to the archive directory.
734-Add the new baseline using the next revision number (should be in sync
735-with the production release version). Create a patch-XX-0-0.sql patch
736-to populate the LaunchpadDatabaseRevision table with the correct value
737-so the tests pass.
738-
739-
740-= Notes =
741-
742-There is a Makefile in launchpad/database/schema that will
743-create the launchpad_test database (if it doesn't already exist),
744-drop all your tables and create the current schema with all patches
745-applied.
746-
747-If you want to check anything into the launchpad/database/schema
748-directory, please do not give it a .sql extension or you will might
749-confuse the simple Makefile.
750-
751+See https://dev.launchpad.net/PolicyAndProcess/DatabaseSchemaChangesProcess
752
753=== modified file 'database/schema/comments.sql'
754--- database/schema/comments.sql 2009-12-01 13:45:58 +0000
755+++ database/schema/comments.sql 2010-01-21 17:51:26 +0000
756@@ -153,7 +153,8 @@
757 COMMENT ON COLUMN Bug.number_of_duplicates IS 'The number of bugs marked as duplicates of this bug, populated by a trigger after setting the duplicateof of bugs.';
758 COMMENT ON COLUMN Bug.message_count IS 'The number of messages (currently just comments) on this bugbug, maintained by the set_bug_message_count_t trigger.';
759 COMMENT ON COLUMN Bug.users_affected_count IS 'The number of users affected by this bug, maintained by the set_bug_users_affected_count_t trigger.';
760-COMMENT ON COLUMN Bug.hotness IS 'The relevance of this bug. This value is computed periodically using bug_affects_person and other bug values.';
761+COMMENT ON COLUMN Bug.heat IS 'The relevance of this bug. This value is computed periodically using bug_affects_person and other bug values.';
762+COMMENT ON COLUMN Bug.heat_last_updated IS 'The time this bug''s heat was last updated, or NULL if the heat has never yet been updated.';
763
764 -- BugBranch
765 COMMENT ON TABLE BugBranch IS 'A branch related to a bug, most likely a branch for fixing the bug.';
766@@ -163,6 +164,12 @@
767 COMMENT ON COLUMN BugBranch.whiteboard IS 'Additional information about the status of the bugfix in this branch.';
768 COMMENT ON COLUMN BugBranch.registrant IS 'The person who linked the bug to the branch.';
769
770+-- BugJob
771+COMMENT ON TABLE BugJob IS 'Contains references to jobs to be run against Bugs.';
772+COMMENT ON COLUMN BugJob.bug IS 'The bug on which the job is to be run.';
773+COMMENT ON COLUMN BugJob.job_type IS 'The type of job (enumeration value). Allows us to query the database for a given subset of BugJobs.';
774+COMMENT ON COLUMN BugJob.json_data IS 'A JSON struct containing data for the job.';
775+
776 -- BugNomination
777 COMMENT ON TABLE BugNomination IS 'A bug nominated for fixing in a distroseries or productseries';
778 COMMENT ON COLUMN BugNomination.bug IS 'The bug being nominated.';
779@@ -213,7 +220,7 @@
780 COMMENT ON COLUMN BugTask.date_fix_released IS 'The date when this bug transitioned to a FIXRELEASED status.';
781 COMMENT ON COLUMN BugTask.date_left_closed IS 'The date when this bug last transitioned out of a CLOSED status.';
782 COMMENT ON COLUMN BugTask.date_milestone_set IS 'The date when this bug was targed to the milestone that is currently set.';
783-COMMENT ON COLUMN BugTask.hotness_rank IS 'The hotness bin in which this bugtask appears, as a value from the BugTaskHotnessRank enumeration.';
784+COMMENT ON COLUMN BugTask.heat_rank IS 'The heat bin in which this bugtask appears, as a value from the BugTaskHeatRank enumeration.';
785
786
787 -- BugNotification
788@@ -321,7 +328,7 @@
789 COMMENT ON COLUMN CodeImport.owner IS 'The person who is currently responsible for keeping the import details up to date, initially set to the registrant. This person can edit some of the details of the code import branch.';
790 COMMENT ON COLUMN CodeImport.review_status IS 'Whether this code import request has been reviewed, and whether it was accepted.';
791 COMMENT ON COLUMN CodeImport.rcs_type IS 'The revision control system used by the import source. The value is defined in dbschema.RevisionControlSystems.';
792-COMMENT ON COLUMN CodeImport.svn_branch_url IS 'The URL of the Subversion branch for this import.';
793+COMMENT ON COLUMN CodeImport.url IS 'The URL of the foreign VCS branch for this import.';
794 COMMENT ON COLUMN CodeImport.cvs_root IS 'The $CVSROOT details, probably of the form :pserver:user@host:/path.';
795 COMMENT ON COLUMN CodeImport.cvs_module IS 'The module in cvs_root to import, often the name of the project.';
796 COMMENT ON COLUMN CodeImport.date_last_successful IS 'When this code import last succeeded. NULL if this import has never succeeded.';
797@@ -1288,7 +1295,69 @@
798
799 COMMENT ON TABLE SourcePackageName IS 'SourcePackageName: A soyuz source package name.';
800
801+-- SourcePackageRecipeData
802+
803+COMMENT ON TABLE SourcePackageRecipeData IS 'The database representation of a BaseRecipeBranch from bzr-builder. Exactly one of sourcepackage_recipe or sourcepackage_recipe_build will be non-NULL.';
804+COMMENT ON COLUMN SourcePackageRecipeData.base_branch IS 'The branch the recipe is based on.';
805+COMMENT ON COLUMN SourcePackageRecipeData.recipe_format IS 'The format version of the recipe.';
806+COMMENT ON COLUMN SourcePackageRecipeData.deb_version_template IS 'The template for the revision number of the build.';
807+COMMENT ON COLUMN SourcePackageRecipeData.revspec IS 'The revision from base_branch to use.';
808+COMMENT ON COLUMN SourcePackageRecipeData.sourcepackage_recipe IS 'The recipe that this data is for.';
809+COMMENT ON COLUMN SourcePackageRecipeData.sourcepackage_recipe_build IS 'The build that resulted in this manifest.';
810+
811+-- SourcePackageRecipeDataInstruction
812+
813+COMMENT ON TABLE SourcePackageRecipeDataInstruction IS 'A line from the recipe, specifying a branch to nest or merge.';
814+COMMENT ON COLUMN SourcePackageRecipeDataInstruction.name IS 'The name of the instruction.';
815+COMMENT ON COLUMN SourcePackageRecipeDataInstruction.type IS 'The type of the instruction (MERGE == 1, NEST == 2).';
816+COMMENT ON COLUMN SourcePackageRecipeDataInstruction.comment IS 'The comment from the recipe about this instruction.';
817+COMMENT ON COLUMN SourcePackageRecipeDataInstruction.line_number IS 'The line number of the instruction in the recipe.';
818+COMMENT ON COLUMN SourcePackageRecipeDataInstruction.branch IS 'The branch being merged or nested.';
819+COMMENT ON COLUMN SourcePackageRecipeDataInstruction.revspec IS 'The revision of the branch to use.';
820+COMMENT ON COLUMN SourcePackageRecipeDataInstruction.directory IS 'The location to nest at, if this is a nest instruction.';
821+COMMENT ON COLUMN SourcePackageRecipeDataInstruction.recipe_data IS 'The SourcePackageRecipeData this instruction is part of.';
822+COMMENT ON COLUMN SourcePackageRecipeDataInstruction.parent_instruction IS 'The nested branch this instruction applies to, or NULL for a top-level instruction.';
823+
824+-- SourcePackageRecipe
825+
826+COMMENT ON TABLE SourcePackageRecipe IS 'A recipe for assembling a source package from branches.';
827+COMMENT ON COLUMN SourcePackageRecipe.registrant IS 'The person who created this recipe.';
828+COMMENT ON COLUMN SourcePackageRecipe.owner IS 'The person or team who can edit this recipe.';
829+COMMENT ON COLUMN SourcePackageRecipe.distroseries IS 'The distroseries this recipe builds a package for.';
830+COMMENT ON COLUMN SourcePackageRecipe.sourcepackagename IS 'The name of the source package this recipe builds.';
831+COMMENT ON COLUMN SourcePackageRecipe.name IS 'The name of the recipe in the web/URL.';
832+
833+-- SourcePackageRecipeBuild
834+
835+COMMENT ON TABLE SourcePackageRecipeBuild IS 'The build record for the process of building a source package as described by a recipe.';
836+COMMENT ON COLUMN SourcePackageRecipeBuild.distroseries IS 'The distroseries the build was for.';
837+COMMENT ON COLUMN SourcePackageRecipeBuild.sourcepackagename IS 'The name of the source package that was built.';
838+COMMENT ON COLUMN SourcePackageRecipeBuild.build_state IS 'The state of the build.';
839+COMMENT ON COLUMN SourcePackageRecipeBuild.date_built IS 'When the build record was processed.';
840+COMMENT ON COLUMN SourcePackageRecipeBuild.build_duration IS 'How long this build took to be processed.';
841+COMMENT ON COLUMN SourcePackageRecipeBuild.build_log IS 'Points to the build_log file stored in librarian.';
842+COMMENT ON COLUMN SourcePackageRecipeBuild.builder IS 'Points to the builder which has once processed it.';
843+COMMENT ON COLUMN SourcePackageRecipeBuild.date_first_dispatched IS 'The instant the build was dispatched the first time. This value will not get overridden if the build is retried.';
844+COMMENT ON COLUMN SourcePackageRecipeBuild.requester IS 'Who requested the build.';
845+COMMENT ON COLUMN SourcePackageRecipeBuild.recipe IS 'The recipe being processed.';
846+COMMENT ON COLUMN SourcePackageRecipeBuild.archive IS 'The archive the source package will be uploaded to.';
847+
848+-- SourcePackageRecipeBuildUpload
849+
850+COMMENT ON TABLE SourcePackageRecipeBuildUpload IS 'The record of uploading the source package built by a SourcePackageRecipeBuild to an archive.';
851+COMMENT ON COLUMN SourcePackageRecipeBuildUpload.registrant IS 'Who requested the upload.';
852+COMMENT ON COLUMN SourcePackageRecipeBuildUpload.sourcepackage_recipe_build IS 'Upload the output of this build.';
853+COMMENT ON COLUMN SourcePackageRecipeBuildUpload.archive IS 'The archive to upload to.';
854+COMMENT ON COLUMN SourcePackageRecipeBuildUpload.upload_log IS 'The output from uploading the source package to the archive.';
855+COMMENT ON COLUMN SourcePackageRecipeBuildUpload.state IS 'The state of the upload.';
856+
857+-- SourcePackageRecipeBuildJob
858+
859+COMMENT ON TABLE SourcePackageRecipeBuildJob IS 'The link between a SourcePackageRecipeBuild row and a Job row to schedule a build of a source package recipe.';
860+COMMENT ON COLUMN SourcePackageRecipeBuildJob.sourcepackage_recipe_build IS 'The build record describing the package being built.';
861+
862 -- Specification
863+
864 COMMENT ON TABLE Specification IS 'A feature specification. At the moment we do not store the actual specification, we store a URL for the spec, which is managed in a wiki somewhere else. We store the overall state of the spec, as well as queueing information about who needs to review the spec, and why.';
865 COMMENT ON COLUMN Specification.assignee IS 'The person who has been assigned to implement this specification.';
866 COMMENT ON COLUMN Specification.drafter IS 'The person who has been asked to draft this specification. They are responsible for getting the spec to "approved" state.';
867@@ -1537,6 +1606,8 @@
868 COMMENT ON COLUMN BuildQueue.job IS 'Foreign key to the `Job` table row with the generic job data.';
869 COMMENT ON COLUMN BuildQueue.job_type IS 'Type of job (enumeration value), enables us to find/query the correct table with the data specific to this type of job.';
870 COMMENT ON COLUMN BuildQueue.estimated_duration IS 'Estimated job duration, based on previous running times of comparable jobs.';
871+COMMENT ON COLUMN BuildQueue.processor IS 'The processor required by the associated build farm job.';
872+COMMENT ON COLUMN BuildQueue.virtualized IS 'The virtualization setting required by the associated build farm job.';
873
874 -- Mirrors
875
876
877=== added file 'database/schema/patch-2207-19-1.sql'
878--- database/schema/patch-2207-19-1.sql 1970-01-01 00:00:00 +0000
879+++ database/schema/patch-2207-19-1.sql 2010-01-21 17:51:26 +0000
880@@ -0,0 +1,35 @@
881+SET client_min_messages=ERROR;
882+
883+CREATE INDEX bugtask__bugwatch__idx
884+ON BugTask(bugwatch) WHERE bugwatch IS NOT NULL;
885+
886+CREATE INDEX translationimportqueueentry__productseries__idx
887+ON TranslationImportQueueEntry(productseries)
888+WHERE productseries IS NOT NULL;
889+
890+CREATE INDEX translationimportqueueentry__sourcepackagename__idx
891+ON TranslationImportQueueEntry(sourcepackagename)
892+WHERE sourcepackagename IS NOT NULL;
893+
894+CREATE INDEX translationimportqueueentry__path__idx
895+ON TranslationImportQueueEntry(path);
896+
897+CREATE INDEX translationimportqueueentry__pofile__idx
898+ON TranslationImportQueueEntry(pofile)
899+WHERE pofile IS NOT NULL;
900+
901+CREATE INDEX translationimportqueueentry__potemplate__idx
902+ON TranslationImportQueueEntry(potemplate)
903+WHERE potemplate IS NOT NULL;
904+
905+CREATE INDEX pofile__from_sourcepackagename__idx
906+ON POFile(from_sourcepackagename)
907+WHERE from_sourcepackagename IS NOT NULL;
908+
909+CREATE INDEX bugwatch__lastchecked__idx ON BugWatch(lastchecked);
910+CREATE INDEX bugwatch__remotebug__idx ON BugWatch(remotebug);
911+CREATE INDEX bugwatch__remote_lp_bug_id__idx ON BUgWatch(remote_lp_bug_id)
912+WHERE remote_lp_bug_id IS NOT NULL;
913+
914+
915+INSERT INTO LaunchpadDatabaseRevision VALUES (2207, 19, 1);
916
917=== added file 'database/schema/patch-2207-20-0.sql'
918--- database/schema/patch-2207-20-0.sql 1970-01-01 00:00:00 +0000
919+++ database/schema/patch-2207-20-0.sql 2010-01-21 17:51:26 +0000
920@@ -0,0 +1,13 @@
921+SET client_min_messages=ERROR;
922+
923+-- Drop the old view.
924+DROP VIEW validpersonorteamcache;
925+
926+-- Create the new view that excludes merged teams.
927+CREATE VIEW validpersonorteamcache AS
928+ SELECT person.id FROM
929+ ((person LEFT JOIN emailaddress ON ((person.id = emailaddress.person))) LEFT JOIN account ON ((emailaddress.account = account.id)))
930+ WHERE (((person.teamowner IS NOT NULL) AND (person.merged IS NULL)) OR
931+ (person.teamowner IS NULL AND (account.status = 20) AND (emailaddress.status = 4)));
932+
933+INSERT INTO LaunchpadDatabaseRevision VALUES (2207, 20, 0);
934
935=== added file 'database/schema/patch-2207-21-0.sql'
936--- database/schema/patch-2207-21-0.sql 1970-01-01 00:00:00 +0000
937+++ database/schema/patch-2207-21-0.sql 2010-01-21 17:51:27 +0000
938@@ -0,0 +1,8 @@
939+SET client_min_messages=ERROR;
940+
941+ALTER TABLE Language ALTER englishname SET NOT NULL;
942+
943+ALTER TABLE LibraryFileContent ALTER filesize TYPE bigint;
944+CLUSTER LibraryFileContent USING libraryfilecontent_pkey; -- repack
945+
946+INSERT INTO LaunchpadDatabaseRevision VALUES (2207, 21, 0);
947
948=== added file 'database/schema/patch-2207-24-0.sql'
949--- database/schema/patch-2207-24-0.sql 1970-01-01 00:00:00 +0000
950+++ database/schema/patch-2207-24-0.sql 2010-01-21 17:51:27 +0000
951@@ -0,0 +1,30 @@
952+-- Copyright 2009 Canonical Ltd. This software is licensed under the
953+-- GNU Affero General Public License version 3 (see the file LICENSE).
954+
955+SET client_min_messages=ERROR;
956+
957+-- Another schema patch required for the Soyuz buildd generalisation, see
958+-- https://dev.launchpad.net/Soyuz/Specs/BuilddGeneralisation for details.
959+-- Bug #505725.
960+
961+-- Changes needed to the `BuildQueue` table.
962+
963+-- The 'processor' and the 'virtualized' columns will enable us to formulate
964+-- more straightforward queries for finding candidate jobs when builders
965+-- become idle.
966+ALTER TABLE ONLY buildqueue ADD COLUMN processor integer;
967+ALTER TABLE ONLY buildqueue ADD COLUMN virtualized boolean;
968+
969+-- Replicate the processor/require_virtualized values of the (binary) builds
970+-- associated with the existing BuildQueue records.
971+UPDATE BuildQueue
972+SET processor = Build.processor, virtualized = Archive.require_virtualized
973+FROM Archive, Build, BuildPackageJob
974+WHERE
975+ BuildPackageJob.job = BuildQueue.job
976+ AND BuildPackageJob.build = Build.id
977+ AND Build.archive = Archive.id;
978+
979+CREATE INDEX buildqueue__processor__virtualized__idx ON buildqueue USING btree (processor, virtualized) WHERE (processor IS NOT NULL);
980+
981+INSERT INTO LaunchpadDatabaseRevision VALUES (2207, 24, 0);
982
983=== added file 'database/schema/patch-2207-25-0.sql'
984--- database/schema/patch-2207-25-0.sql 1970-01-01 00:00:00 +0000
985+++ database/schema/patch-2207-25-0.sql 2010-01-21 17:51:26 +0000
986@@ -0,0 +1,149 @@
987+SET client_min_messages=ERROR;
988+
989+CREATE TABLE SourcePackageRecipe (
990+ id serial PRIMARY KEY,
991+ date_created timestamp without time zone DEFAULT timezone('UTC'::text, now()) NOT NULL,
992+ date_last_modified timestamp without time zone DEFAULT timezone('UTC'::text, now()) NOT NULL,
993+ registrant integer NOT NULL REFERENCES Person,
994+ owner integer NOT NULL REFERENCES Person,
995+ distroseries integer NOT NULL REFERENCES DistroSeries,
996+ sourcepackagename integer NOT NULL REFERENCES SourcePackageName,
997+ name text NOT NULL
998+);
999+
1000+ALTER TABLE SourcePackageRecipe ADD CONSTRAINT sourcepackagerecipe__owner__distroseries__sourcepackagename__name__key
1001+ UNIQUE (owner, distroseries, sourcepackagename, name);
1002+
1003+CREATE TABLE SourcePackageRecipeBuild (
1004+ id serial PRIMARY KEY,
1005+ -- most of this is just copied from Build
1006+
1007+ -- I've dropped: processor, sourcepackagerelease, pocket, dependencies
1008+ -- changed: distroarchseries to distroseries
1009+ -- added: recipe and manifest
1010+ date_created timestamp without time zone DEFAULT timezone('UTC'::text, ('now'::text)::timestamp(6) with time zone) NOT NULL,
1011+ distroseries integer NOT NULL REFERENCES distroseries,
1012+ sourcepackagename integer NOT NULL REFERENCES SourcePackageName,
1013+ build_state integer NOT NULL,
1014+ date_built timestamp without time zone,
1015+ build_duration interval,
1016+ build_log integer REFERENCES libraryfilealias,
1017+ builder integer REFERENCES builder,
1018+ date_first_dispatched timestamp without time zone,
1019+ requester integer NOT NULL REFERENCES Person,
1020+ recipe integer REFERENCES SourcePackageRecipe NOT NULL,
1021+ archive integer NOT NULL REFERENCES Archive
1022+);
1023+
1024+CREATE TABLE SourcePackageRecipeBuildUpload (
1025+ id serial PRIMARY KEY,
1026+ date_created timestamp without time zone DEFAULT timezone('UTC'::text, ('now'::text)::timestamp(6) with time zone) NOT NULL,
1027+ registrant integer NOT NULL REFERENCES Person,
1028+ sourcepackage_recipe_build integer NOT NULL REFERENCES SourcePackageRecipeBuild,
1029+ archive integer NOT NULL REFERENCES Archive,
1030+ upload_log integer REFERENCES LibraryFileAlias,
1031+ state integer NOT NULL -- an enum, WAITING/UPLOADED/FAILED or something like that.
1032+);
1033+
1034+-- indexes for SourcePackageRecipeBuildUpload I guess
1035+
1036+ALTER TABLE SourcePackageRelease
1037+ ADD COLUMN sourcepackage_recipe_build integer REFERENCES SourcePackageRecipeBuild;
1038+
1039+CREATE TABLE SourcePackageRecipeBuildJob (
1040+ id serial PRIMARY KEY,
1041+ job integer NOT NULL REFERENCES Job,
1042+ sourcepackage_recipe_build integer REFERENCES SourcePackageRecipeBuild
1043+);
1044+
1045+ALTER TABLE SourcePackageRecipeBuildJob ADD CONSTRAINT sourcepackagerecipebuildjob__sourcepackage_recipe_build__key
1046+ UNIQUE (sourcepackage_recipe_build);
1047+
1048+ALTER TABLE SourcePackageRecipeBuildJob ADD CONSTRAINT sourcepackagerecipebuildjob__job__key
1049+ UNIQUE (job);
1050+
1051+CREATE TABLE SourcePackageRecipeData (
1052+ id serial PRIMARY KEY,
1053+ base_branch integer NOT NULL REFERENCES Branch,
1054+ recipe_format text NOT NULL,
1055+ deb_version_template text NOT NULL,
1056+ revspec text,
1057+ sourcepackage_recipe integer REFERENCES SourcePackageRecipe,
1058+ sourcepackage_recipe_build integer REFERENCES SourcePackageRecipeBuild
1059+);
1060+
1061+ALTER TABLE SourcePackageRecipeData ADD CONSTRAINT sourcepackagerecipedata__recipe_or_build_is_not_null
1062+ CHECK (sourcepackage_recipe IS NULL != sourcepackage_recipe_build IS NULL);
1063+CREATE UNIQUE INDEX sourcepackagerecipedata__sourcepackage_recipe__key
1064+ ON SourcepackageRecipeData(sourcepackage_recipe)
1065+ WHERE sourcepackage_recipe IS NOT NULL;
1066+CREATE UNIQUE INDEX sourcepackagerecipedata__sourcepackage_recipe_build__key
1067+ ON SourcepackageRecipeData(sourcepackage_recipe_build)
1068+ WHERE sourcepackage_recipe_build IS NOT NULL;
1069+
1070+CREATE TABLE SourcePackageRecipeDataInstruction (
1071+ id serial PRIMARY KEY,
1072+ name text NOT NULL,
1073+ type integer NOT NULL, -- MERGE == 1, NEST == 2
1074+ comment text,
1075+ line_number integer NOT NULL,
1076+ branch integer NOT NULL REFERENCES Branch,
1077+ revspec text,
1078+ directory text,
1079+ recipe_data integer NOT NULL REFERENCES SourcePackageRecipeData,
1080+ parent_instruction integer REFERENCES SourcePackageRecipeDataInstruction
1081+);
1082+
1083+ALTER TABLE SourcePackageRecipeDataInstruction ADD CONSTRAINT sourcepackagerecipedatainstruction__name__recipe_data__key
1084+ UNIQUE (name, recipe_data);
1085+ALTER TABLE SourcePackageRecipeDataInstruction ADD CONSTRAINT sourcepackagerecipedatainstruction__recipe_data__line_number__key
1086+ UNIQUE (recipe_data, line_number);
1087+ALTER TABLE SourcePackageRecipeDataInstruction ADD CONSTRAINT sourcepackagerecipedatainstruction__directory_not_null
1088+ CHECK ((type = 1 AND directory IS NULL) OR (type = 2 AND directory IS NOT NULL));
1089+
1090+CREATE INDEX sourcepackagerecipedata__base_branch__idx
1091+ON SourcepackageRecipeData(base_branch);
1092+
1093+CREATE INDEX sourcepackagerecipedatainstruction__branch__idx
1094+ON SourcepackageRecipeDataInstruction(branch);
1095+
1096+CREATE INDEX sourcepackagerecipe__registrant__idx
1097+ON SourcepackageRecipe(registrant);
1098+
1099+--CREATE INDEX sourcepackagerecipe__owner__idx
1100+--ON SourcepackageRecipe(owner);
1101+
1102+CREATE INDEX sourcepackagerecipebuild__distroseries__idx
1103+ON SourcepackageRecipeBuild(distroseries);
1104+
1105+CREATE INDEX sourcepackagerecipebuild__sourcepackagename__idx
1106+ON SourcepackageRecipeBuild(sourcepackagename);
1107+
1108+CREATE INDEX sourcepackagerecipebuild__build_log__idx
1109+ON SourcepackageRecipeBuild(build_log) WHERE build_log IS NOT NULL;
1110+
1111+CREATE INDEX sourcepackagerecipebuild__builder__idx
1112+ON SourcepackageRecipeBuild(builder);
1113+
1114+CREATE INDEX sourcepackagerecipebuild__requester__idx
1115+ON SourcepackageRecipeBuild(requester);
1116+
1117+CREATE INDEX sourcepackagerecipebuild__recipe__idx
1118+ON SourcepackageRecipeBuild(recipe);
1119+
1120+CREATE INDEX sourcepackagerecipebuild__archive__idx
1121+ON SourcepackageRecipeBuild(archive);
1122+
1123+CREATE INDEX sourcepackagebuildupload__registrant__idx
1124+ON SourcepackageRecipeBuildUpload(registrant);
1125+
1126+CREATE INDEX sourcepackagerecipebuildupload__archive__idx
1127+ON SourcepackageRecipeBuildUpload(archive);
1128+
1129+CREATE INDEX sourcepackagerecipebuildupload__upload_log__idx
1130+ON SourcepackageRecipeBuildUpload(upload_log) WHERE upload_log IS NOT NULL;
1131+
1132+CREATE INDEX sourcepackagerelease__sourcepackage_recipe_build__idx
1133+ON SourcepackageRelease(sourcepackage_recipe_build);
1134+
1135+INSERT INTO LaunchpadDatabaseRevision VALUES (2207, 25, 0);
1136
1137=== added file 'database/schema/patch-2207-26-0.sql'
1138--- database/schema/patch-2207-26-0.sql 1970-01-01 00:00:00 +0000
1139+++ database/schema/patch-2207-26-0.sql 2010-01-21 17:51:26 +0000
1140@@ -0,0 +1,28 @@
1141+SET client_min_messages=ERROR;
1142+
1143+ALTER TABLE CodeImport ADD COLUMN url text;
1144+UPDATE CodeImport SET url = git_repo_url WHERE rcs_type = 4;
1145+UPDATE CodeImport SET url = svn_branch_url WHERE rcs_type IN (2, 3);
1146+DROP INDEX codeimport__svn_branch_url__idx;
1147+DROP INDEX codeimport__git_repo_url__idx;
1148+ALTER TABLE CodeImport DROP CONSTRAINT valid_vcs_details;
1149+ALTER TABLE CodeImport ADD CONSTRAINT "valid_vcs_details" CHECK (
1150+CASE
1151+ WHEN rcs_type = 1
1152+ THEN cvs_root IS NOT NULL AND cvs_root <> ''::text AND cvs_module IS NOT NULL AND cvs_module <> ''::text
1153+ AND url IS NULL
1154+ WHEN rcs_type IN (2, 3)
1155+ THEN cvs_root IS NULL AND cvs_module IS NULL
1156+ AND url IS NOT NULL AND valid_absolute_url(url)
1157+ WHEN rcs_type IN (4, 5)
1158+ -- Git and mercurial imports are not checked for valid urls right now,
1159+ -- this is a bug - 506146
1160+ THEN cvs_root IS NULL AND cvs_module IS NULL AND url IS NOT NULL
1161+ ELSE false
1162+END);
1163+ALTER TABLE CodeImport DROP COLUMN git_repo_url;
1164+ALTER TABLE CodeImport DROP COLUMN svn_branch_url;
1165+
1166+CREATE UNIQUE INDEX codeimport__url__idx ON CodeImport USING btree (url) WHERE (url is NOT NULL);
1167+
1168+INSERT INTO LaunchpadDatabaseRevision VALUES (2207, 26, 0);
1169
1170=== added file 'database/schema/patch-2207-27-0.sql'
1171--- database/schema/patch-2207-27-0.sql 1970-01-01 00:00:00 +0000
1172+++ database/schema/patch-2207-27-0.sql 2010-01-21 17:51:26 +0000
1173@@ -0,0 +1,13 @@
1174+-- Copyright 2009 Canonical Ltd. This software is licensed under the
1175+-- GNU Affero General Public License version 3 (see the file LICENSE).
1176+
1177+SET client_min_messages TO ERROR;
1178+
1179+DROP INDEX bug__hotness__idx;
1180+ALTER table bug ADD COLUMN heat_last_updated timestamp;
1181+ALTER table bug RENAME COLUMN hotness to heat;
1182+ALTER table bugtask RENAME COLUMN hotness_rank to heat_rank;
1183+CREATE INDEX bug__heat_last_updated__idx ON bug USING btree (heat_last_updated);
1184+CREATE INDEX bug__heat__idx ON bug USING btree (heat);
1185+
1186+INSERT INTO LaunchpadDatabaseRevision VALUES (2207, 27, 0);
1187
1188=== added file 'database/schema/patch-2207-28-0.sql'
1189--- database/schema/patch-2207-28-0.sql 1970-01-01 00:00:00 +0000
1190+++ database/schema/patch-2207-28-0.sql 2010-01-21 17:51:26 +0000
1191@@ -0,0 +1,11 @@
1192+SET client_min_messages=ERROR;
1193+
1194+CREATE TABLE BugJob(
1195+ id serial NOT NULL PRIMARY KEY,
1196+ job integer NOT NULL REFERENCES Job(id),
1197+ bug integer NOT NULL REFERENCES Bug(id),
1198+ job_type integer NOT NULL,
1199+ json_data text
1200+);
1201+
1202+INSERT INTO LaunchpadDatabaseRevision VALUES (2207, 28, 0)
1203
1204=== modified file 'database/schema/security.cfg'
1205--- database/schema/security.cfg 2010-01-17 09:15:43 +0000
1206+++ database/schema/security.cfg 2010-01-21 17:51:26 +0000
1207@@ -275,6 +275,12 @@
1208 public.sourcepackagepublishinghistory = SELECT
1209 public.seriessourcepackagebranch = SELECT, INSERT, UPDATE, DELETE
1210 public.sourcepackageformatselection = SELECT
1211+public.sourcepackagerecipe = SELECT, INSERT, UPDATE, DELETE
1212+public.sourcepackagerecipebuild = SELECT, INSERT, UPDATE, DELETE
1213+public.sourcepackagerecipebuildjob = SELECT, INSERT, UPDATE
1214+public.sourcepackagerecipebuildupload = SELECT, INSERT, UPDATE, DELETE
1215+public.sourcepackagerecipedata = SELECT, INSERT, UPDATE, DELETE
1216+public.sourcepackagerecipedatainstruction = SELECT, INSERT, UPDATE, DELETE
1217 public.specificationbranch = SELECT, INSERT, UPDATE, DELETE
1218 public.specificationbug = SELECT, INSERT, DELETE
1219 public.specificationdependency = SELECT, INSERT, DELETE
1220@@ -403,6 +409,8 @@
1221 public.shippingrun = SELECT
1222 public.sprint = SELECT
1223 public.sourcepackagereleasefile = SELECT
1224+public.sourcepackagerecipebuild = SELECT
1225+public.sourcepackagerecipebuildupload = SELECT
1226 public.temporaryblobstorage = SELECT, DELETE
1227 public.translationimportqueueentry = SELECT
1228
1229@@ -1205,6 +1213,7 @@
1230 public.binarypackagefilepublishing = SELECT
1231 public.securesourcepackagepublishinghistory = SELECT, INSERT, UPDATE
1232 public.securebinarypackagepublishinghistory = SELECT, INSERT, UPDATE
1233+public.sourcepackagerecipebuildjob = SELECT, INSERT, UPDATE
1234 public.component = SELECT
1235 public.section = SELECT
1236 public.componentselection = SELECT
1237@@ -1796,6 +1805,9 @@
1238 # changing DB permissions.
1239 type=user
1240 groups=script,read
1241+public.bug = SELECT, UPDATE
1242+public.bugsubscription = SELECT
1243+public.bugaffectsperson = SELECT
1244 public.bugnotification = SELECT, DELETE
1245 public.bugnotificationrecipientarchive = SELECT
1246 public.codeimportresult = SELECT, DELETE
1247@@ -1843,6 +1855,11 @@
1248
1249 [nagios]
1250 type=user
1251+public.archive = SELECT
1252+public.build = SELECT
1253+public.buildqueue = SELECT
1254+public.buildpackagejob = SELECT
1255+public.job = SELECT
1256 public.libraryfilecontent = SELECT
1257 public.openidrpconfig = SELECT
1258 public.branch = SELECT
1259
1260=== modified file 'database/schema/security.py'
1261--- database/schema/security.py 2009-11-18 08:09:58 +0000
1262+++ database/schema/security.py 2010-01-21 17:51:26 +0000
1263@@ -28,6 +28,9 @@
1264 # sensitive information that interactive sessions don't need.
1265 SECURE_TABLES = [
1266 'public.accountpassword',
1267+ 'public.oauthnonce',
1268+ 'public.openidnonce',
1269+ 'public.openidconsumernonce',
1270 ]
1271
1272
1273
1274=== modified file 'lib/canonical/config/schema-lazr.conf'
1275--- lib/canonical/config/schema-lazr.conf 2010-01-19 12:49:35 +0000
1276+++ lib/canonical/config/schema-lazr.conf 2010-01-21 17:51:26 +0000
1277@@ -402,6 +402,11 @@
1278 default_interval_git: 21600
1279
1280 # The default value of the update interval of a code import from
1281+# Mercurial, in seconds.
1282+# datatype: integer
1283+default_interval_hg: 21600
1284+
1285+# The default value of the update interval of a code import from
1286 # CVS, in seconds.
1287 # datatype: integer
1288 default_interval_cvs: 43200
1289@@ -870,6 +875,11 @@
1290 storm_cache: generational
1291 storm_cache_size: 10000
1292
1293+# Assume the slave database is lagged if it takes more than this many
1294+# milliseconds to calculate this information from the Slony-I tables.
1295+# datatype: integer
1296+lag_check_timeout: 250
1297+
1298 # If False, do not launch the appserver.
1299 # datatype: boolean
1300 launch: True
1301
1302=== added file 'lib/canonical/launchpad/blocked.html'
1303--- lib/canonical/launchpad/blocked.html 1970-01-01 00:00:00 +0000
1304+++ lib/canonical/launchpad/blocked.html 2010-01-21 17:51:27 +0000
1305@@ -0,0 +1,26 @@
1306+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
1307+<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
1308+ <head>
1309+ <title>You have been blocked</title>
1310+ <style type="text/css" media="screen, print">
1311+ @import url(https://launchpad.net/+icing/rev5/+style-slimmer.css);
1312+ </style>
1313+ </head>
1314+ <body>
1315+ <div id="topline"></div>
1316+ <div id="locationbar"></div>
1317+ <div class="offline">
1318+
1319+ <h1>You have been blocked</h1>
1320+ <p>
1321+ Due to what we suspect to be inappropriate usage
1322+ of Launchpad, your access has been blocked.
1323+ </p>
1324+ <p>
1325+ If you believe this to be in error, please contact
1326+ us at feedback@launchpad.net.
1327+ </p>
1328+
1329+ </div>
1330+ </body>
1331+</html>
1332
1333=== modified file 'lib/canonical/launchpad/scripts/garbo.py'
1334--- lib/canonical/launchpad/scripts/garbo.py 2009-12-17 02:44:39 +0000
1335+++ lib/canonical/launchpad/scripts/garbo.py 2010-01-21 17:51:26 +0000
1336@@ -30,7 +30,9 @@
1337 from canonical.launchpad.utilities.looptuner import DBLoopTuner
1338 from canonical.launchpad.webapp.interfaces import (
1339 IStoreSelector, AUTH_STORE, MAIN_STORE, MASTER_FLAVOR)
1340+from lp.bugs.interfaces.bug import IBugSet
1341 from lp.bugs.model.bugnotification import BugNotification
1342+from lp.bugs.scripts.bugheat import BugHeatCalculator
1343 from lp.code.interfaces.revision import IRevisionSet
1344 from lp.code.model.branchjob import BranchJob
1345 from lp.code.model.codeimportresult import CodeImportResult
1346@@ -691,6 +693,66 @@
1347 transaction.commit()
1348
1349
1350+class BugHeatUpdater(TunableLoop):
1351+ """A `TunableLoop` for bug heat calculations."""
1352+
1353+ maximum_chunk_size = 1000
1354+
1355+ def __init__(self, log, abort_time=None):
1356+ super(BugHeatUpdater, self).__init__(log, abort_time)
1357+ self.transaction = transaction
1358+ self.offset = 0
1359+ self.total_updated = 0
1360+
1361+ def isDone(self):
1362+ """See `ITunableLoop`."""
1363+ # When the main loop has no more Bugs to process it sets
1364+ # offset to None. Until then, it always has a numerical
1365+ # value.
1366+ return self.offset is None
1367+
1368+ def __call__(self, chunk_size):
1369+ """Retrieve a batch of Bugs and update their heat.
1370+
1371+ See `ITunableLoop`.
1372+ """
1373+ # XXX 2010-01-08 gmb bug=198767:
1374+ # We cast chunk_size to an integer to ensure that we're not
1375+ # trying to slice using floats or anything similarly
1376+ # foolish. We shouldn't have to do this.
1377+ chunk_size = int(chunk_size)
1378+
1379+ start = self.offset
1380+ end = self.offset + chunk_size
1381+
1382+ transaction.begin()
1383+ # XXX 2010-01-08 gmb bug=505850:
1384+ # This method call should be taken out and shot as soon as
1385+ # we have a proper permissions system for scripts.
1386+ bugs = getUtility(IBugSet).dangerousGetAllBugs()[start:end]
1387+
1388+ self.offset = None
1389+ bug_count = bugs.count()
1390+ if bug_count > 0:
1391+ starting_id = bugs.first().id
1392+ self.log.debug("Updating %i Bugs (starting id: %i)" %
1393+ (bug_count, starting_id))
1394+
1395+ for bug in bugs:
1396+ # We set the starting point of the next batch to the Bug
1397+ # id after the one we're looking at now. If there aren't any
1398+ # bugs this loop will run for 0 iterations and starting_id
1399+ # will remain set to None.
1400+ start += 1
1401+ self.offset = start
1402+ self.log.debug("Updating heat for bug %s" % bug.id)
1403+ bug_heat_calculator = BugHeatCalculator(bug)
1404+ heat = bug_heat_calculator.getBugHeat()
1405+ bug.setHeat(heat)
1406+ self.total_updated += 1
1407+ transaction.commit()
1408+
1409+
1410 class BaseDatabaseGarbageCollector(LaunchpadCronScript):
1411 """Abstract base class to run a collection of TunableLoops."""
1412 script_name = None # Script name for locking and database user. Override.
1413@@ -795,6 +857,7 @@
1414 PersonEmailAddressLinkChecker,
1415 BugNotificationPruner,
1416 BranchJobPruner,
1417+ BugHeatUpdater,
1418 ]
1419 experimental_tunable_loops = [
1420 PersonPruner,
1421
1422=== modified file 'lib/canonical/launchpad/webapp/dbpolicy.py'
1423--- lib/canonical/launchpad/webapp/dbpolicy.py 2010-01-19 04:25:38 +0000
1424+++ lib/canonical/launchpad/webapp/dbpolicy.py 2010-01-21 17:51:27 +0000
1425@@ -15,9 +15,11 @@
1426 ]
1427
1428 from datetime import datetime, timedelta
1429+import logging
1430 from textwrap import dedent
1431
1432 from storm.cache import Cache, GenerationalCache
1433+from storm.exceptions import TimeoutError
1434 from storm.zope.interfaces import IZStorm
1435 from zope.session.interfaces import ISession, IClientIdManager
1436 from zope.component import getUtility
1437@@ -291,8 +293,26 @@
1438
1439 # sl_status gives meaningful results only on the origin node.
1440 master_store = self.getStore(MAIN_STORE, MASTER_FLAVOR)
1441- return master_store.execute(
1442- "SELECT replication_lag(%d)" % slave_node_id).get_one()[0]
1443+ # If it takes more than (by default) 0.25 seconds to query the
1444+ # replication lag, assume we are lagged. Normally the query
1445+ # takes <20ms. This can happen during heavy updates, as the
1446+ # Slony-I tables can get slow with lots of events. We use a
1447+ # SAVEPOINT to conveniently reset the statement timeout.
1448+ master_store.execute("""
1449+ SAVEPOINT lag_check; SET LOCAL statement_timeout TO %d
1450+ """ % config.launchpad.lag_check_timeout)
1451+ try:
1452+ try:
1453+ return master_store.execute(
1454+ "SELECT replication_lag(%d)" % slave_node_id).get_one()[0]
1455+ except TimeoutError:
1456+ logging.warn(
1457+ 'Gave up querying slave lag after %d ms',
1458+ (config.launchpad.lag_check_timeout))
1459+ return timedelta(days=999) # A long, long time.
1460+ finally:
1461+ master_store.execute("ROLLBACK TO lag_check")
1462+
1463
1464
1465 def WebServiceDatabasePolicyFactory(request):
1466
1467=== modified file 'lib/lp/archiveuploader/permission.py'
1468--- lib/lp/archiveuploader/permission.py 2009-11-20 22:22:53 +0000
1469+++ lib/lp/archiveuploader/permission.py 2010-01-21 17:51:26 +0000
1470@@ -9,6 +9,7 @@
1471 'CannotUploadToPPA',
1472 'can_upload_to_archive',
1473 'check_upload_to_archive',
1474+ 'check_upload_to_pocket',
1475 'components_valid_for',
1476 'verify_upload',
1477 ]
1478@@ -125,20 +126,12 @@
1479 return reason is None
1480
1481
1482-def check_upload_to_archive(person, distroseries, sourcepackagename, archive,
1483- component, pocket, strict_component=True):
1484- """Check if 'person' upload 'suitesourcepackage' to 'archive'.
1485+def check_upload_to_pocket(archive, distroseries, pocket):
1486+ """Check if uploading to a particular pocket in an archive is possible.
1487
1488- :param person: An `IPerson` who might be uploading.
1489- :param distroseries: The `IDistroSeries` being uploaded to.
1490- :param sourcepackagename: The `ISourcePackageName` being uploaded.
1491- :param archive: The `IArchive` to upload to. If not provided, defaults
1492- to the default archive for the source package. (See
1493- `ISourcePackage.get_default_archive`).
1494- :param component: The `Component` being uploaded to.
1495- :param pocket: The `PackagePublishingPocket` of 'distroseries' being
1496- uploaded to.
1497- :return: The reason for not being able to upload, None otherwise.
1498+ :param archive: A `IArchive`
1499+ :param distroseries: A `IDistroSeries`
1500+ :param pocket: A `PackagePublishingPocket`
1501 """
1502 if archive.purpose == ArchivePurpose.PARTNER:
1503 if pocket not in (
1504@@ -156,6 +149,25 @@
1505 if not distroseries.canUploadToPocket(pocket):
1506 return CannotUploadToPocket(distroseries, pocket)
1507
1508+
1509+def check_upload_to_archive(person, distroseries, sourcepackagename, archive,
1510+ component, pocket, strict_component=True):
1511+ """Check if 'person' upload 'suitesourcepackage' to 'archive'.
1512+
1513+ :param person: An `IPerson` who might be uploading.
1514+ :param distroseries: The `IDistroSeries` being uploaded to.
1515+ :param sourcepackagename: The `ISourcePackageName` being uploaded.
1516+ :param archive: The `IArchive` to upload to. If not provided, defaults
1517+ to the default archive for the source package. (See
1518+ `ISourcePackage.get_default_archive`).
1519+ :param component: The `Component` being uploaded to.
1520+ :param pocket: The `PackagePublishingPocket` of 'distroseries' being
1521+ uploaded to.
1522+ :return: The reason for not being able to upload, None otherwise.
1523+ """
1524+ reason = check_upload_to_pocket(archive, distroseries, pocket)
1525+ if reason is not None:
1526+ return reason
1527 return verify_upload(
1528 person, sourcepackagename, archive, component, distroseries,
1529 strict_component)
1530
1531=== modified file 'lib/lp/bugs/configure.zcml'
1532--- lib/lp/bugs/configure.zcml 2009-12-09 11:14:16 +0000
1533+++ lib/lp/bugs/configure.zcml 2010-01-21 17:51:26 +0000
1534@@ -572,7 +572,8 @@
1535 userCanView
1536 personIsDirectSubscriber
1537 personIsAlsoNotifiedSubscriber
1538- personIsSubscribedToDuplicate"/>
1539+ personIsSubscribedToDuplicate
1540+ heat"/>
1541 <require
1542 permission="launchpad.View"
1543 attributes="
1544@@ -668,7 +669,8 @@
1545 <require
1546 permission="launchpad.Admin"
1547 attributes="
1548- setCommentVisibility"/>
1549+ setCommentVisibility
1550+ setHeat"/>
1551 </class>
1552 <adapter
1553 for="lp.bugs.interfaces.bug.IBug"
1554
1555=== added file 'lib/lp/bugs/doc/bug-heat.txt'
1556--- lib/lp/bugs/doc/bug-heat.txt 1970-01-01 00:00:00 +0000
1557+++ lib/lp/bugs/doc/bug-heat.txt 2010-01-21 17:51:27 +0000
1558@@ -0,0 +1,54 @@
1559+Calculating bug heat
1560+====================
1561+
1562+Launchpad bugs each have a 'heat' rating. This is an indicator of how
1563+problematic a given bug is to the community and can be used to determine
1564+which bugs should be tackled first.
1565+
1566+A new bug will have a heat of zero.
1567+
1568+ >>> bug_owner = factory.makePerson()
1569+ >>> bug = factory.makeBug(owner=bug_owner)
1570+ >>> bug.heat
1571+ 0
1572+
1573+The bug's heat can be set by calling its setHeat() method.
1574+
1575+ >>> bug.setHeat(42)
1576+ >>> bug.heat
1577+ 42
1578+
1579+
1580+The BugHeatUpdater class
1581+---------------------------
1582+
1583+In order to calculate bug heat we need to use the BugHeatUpdater
1584+class, which is designed precisely for that task. It's part of the garbo
1585+module and runs as part of the garbo-daily cronjob.
1586+
1587+ >>> from canonical.launchpad.scripts.garbo import BugHeatUpdater
1588+ >>> from canonical.launchpad.scripts import FakeLogger
1589+
1590+ >>> update_bug_heat = BugHeatUpdater(FakeLogger())
1591+
1592+BugHeatUpdater implements ITunableLoop and as such is callable. Calling
1593+it as a method will update the heat for all the bugs currently held in
1594+Launchpad.
1595+
1596+Before update_bug_heat is called, bug 1 will have no heat.
1597+
1598+ >>> from zope.component import getUtility
1599+ >>> from lp.bugs.interfaces.bug import IBugSet
1600+ >>> bug_1 = getUtility(IBugSet).get(1)
1601+
1602+ >>> bug_1.heat
1603+ 0
1604+
1605+ >>> update_bug_heat(chunk_size=1)
1606+ DEBUG Updating 1 Bugs (starting id: ...)
1607+ ...
1608+
1609+Bug 1's heat will now be greater than 0.
1610+
1611+ >>> bug_1.heat > 0
1612+ True
1613
1614=== modified file 'lib/lp/bugs/interfaces/bug.py'
1615--- lib/lp/bugs/interfaces/bug.py 2009-12-09 20:48:01 +0000
1616+++ lib/lp/bugs/interfaces/bug.py 2010-01-21 17:51:27 +0000
1617@@ -296,6 +296,10 @@
1618 value_type=Reference(schema=IPerson),
1619 readonly=True))
1620
1621+ heat = Int(
1622+ title=_("The 'heat' of the bug"),
1623+ required=False, readonly=True)
1624+
1625 # Adding related BugMessages provides a hook for getting at
1626 # BugMessage.visible when building bug comments.
1627 bug_messages = Attribute('The bug messages related to this object.')
1628@@ -732,6 +736,9 @@
1629 if the user is the owner or an admin.
1630 """
1631
1632+ def setHeat(heat):
1633+ """Set the heat for the bug."""
1634+
1635 class InvalidDuplicateValue(Exception):
1636 """A bug cannot be set as the duplicate of another."""
1637 webservice_error(417)
1638@@ -970,6 +977,19 @@
1639 Otherwise, return False.
1640 """
1641
1642+ def dangerousGetAllBugs():
1643+ """DO NOT CALL THIS METHOD.
1644+
1645+ This method exists solely to allow the bug heat script to grab
1646+ all the bugs in the database - including private ones - and
1647+ iterate over them. DO NOT USE IT UNLESS YOU KNOW WHAT YOU'RE
1648+ DOING. AND IF YOU KNOW WHAT YOU'RE DOING YOU KNOW BETTER THAN TO
1649+ USE THIS ANYWAY.
1650+ """
1651+ # XXX 2010-01-08 gmb bug=505850:
1652+ # Note, this method should go away when we have a proper
1653+ # permissions system for scripts.
1654+
1655
1656 class InvalidBugTargetType(Exception):
1657 """Bug target's type is not valid."""
1658
1659=== modified file 'lib/lp/bugs/model/bug.py'
1660--- lib/lp/bugs/model/bug.py 2009-12-21 18:05:27 +0000
1661+++ lib/lp/bugs/model/bug.py 2010-01-21 17:51:26 +0000
1662@@ -254,6 +254,7 @@
1663 message_count = IntCol(notNull=True, default=0)
1664 users_affected_count = IntCol(notNull=True, default=0)
1665 users_unaffected_count = IntCol(notNull=True, default=0)
1666+ heat = IntCol(notNull=True, default=0)
1667
1668 @property
1669 def comment_count(self):
1670@@ -1425,6 +1426,10 @@
1671
1672 return not subscriptions_from_dupes.is_empty()
1673
1674+ def setHeat(self, heat):
1675+ """See `IBug`."""
1676+ self.heat = heat
1677+
1678
1679 class BugSet:
1680 """See BugSet."""
1681@@ -1665,13 +1670,19 @@
1682 return bugs
1683
1684 def getByNumbers(self, bug_numbers):
1685- """see `IBugSet`."""
1686+ """See `IBugSet`."""
1687 if bug_numbers is None or len(bug_numbers) < 1:
1688 return EmptyResultSet()
1689 store = IStore(Bug)
1690 result_set = store.find(Bug, In(Bug.id, bug_numbers))
1691 return result_set.order_by('id')
1692
1693+ def dangerousGetAllBugs(self):
1694+ """See `IBugSet`."""
1695+ store = IStore(Bug)
1696+ result_set = store.find(Bug)
1697+ return result_set.order_by('id')
1698+
1699
1700 class BugAffectsPerson(SQLBase):
1701 """A bug is marked as affecting a user."""
1702
1703=== added file 'lib/lp/bugs/scripts/bugheat.py'
1704--- lib/lp/bugs/scripts/bugheat.py 1970-01-01 00:00:00 +0000
1705+++ lib/lp/bugs/scripts/bugheat.py 2010-01-21 17:51:26 +0000
1706@@ -0,0 +1,75 @@
1707+# Copyright 2010 Canonical Ltd. This software is licensed under the
1708+# GNU Affero General Public License version 3 (see the file LICENSE).
1709+
1710+"""The innards of the Bug Heat cronscript."""
1711+
1712+__metaclass__ = type
1713+__all__ = []
1714+
1715+
1716+from zope.component import getUtility
1717+from zope.interface import implements
1718+
1719+from canonical.launchpad.interfaces.looptuner import ITunableLoop
1720+from canonical.launchpad.utilities.looptuner import DBLoopTuner
1721+
1722+
1723+class BugHeatConstants:
1724+
1725+ PRIVACY = 150
1726+ SECURITY = 250
1727+ DUPLICATE = 6
1728+ AFFECTED_USER = 4
1729+ SUBSCRIBER = 2
1730+
1731+
1732+class BugHeatCalculator:
1733+ """A class to calculate the heat for a bug."""
1734+
1735+ def __init__(self, bug):
1736+ self.bug = bug
1737+
1738+ def _getHeatFromPrivacy(self):
1739+ """Return the heat generated by the bug's `private` attribute."""
1740+ if self.bug.private:
1741+ return BugHeatConstants.PRIVACY
1742+ else:
1743+ return 0
1744+
1745+ def _getHeatFromSecurity(self):
1746+ """Return the heat generated if the bug is security related."""
1747+ if self.bug.security_related:
1748+ return BugHeatConstants.SECURITY
1749+ else:
1750+ return 0
1751+
1752+ def _getHeatFromDuplicates(self):
1753+ """Return the heat generated by the bug's duplicates."""
1754+ return self.bug.duplicates.count() * BugHeatConstants.DUPLICATE
1755+
1756+ def _getHeatFromAffectedUsers(self):
1757+ """Return the heat generated by the bug's affected users."""
1758+ return (
1759+ self.bug.users_affected.count() * BugHeatConstants.AFFECTED_USER)
1760+
1761+ def _getHeatFromSubscribers(self):
1762+ """Return the heat generated by the bug's subscribers."""
1763+ direct_subscribers = self.bug.getDirectSubscribers()
1764+ subscribers_from_dupes = self.bug.getSubscribersFromDuplicates()
1765+
1766+ subscriber_count = (
1767+ len(direct_subscribers) + len(subscribers_from_dupes))
1768+ return subscriber_count * BugHeatConstants.SUBSCRIBER
1769+
1770+ def getBugHeat(self):
1771+ """Return the total heat for the current bug."""
1772+ total_heat = sum([
1773+ self._getHeatFromAffectedUsers(),
1774+ self._getHeatFromDuplicates(),
1775+ self._getHeatFromPrivacy(),
1776+ self._getHeatFromSecurity(),
1777+ self._getHeatFromSubscribers(),
1778+ ])
1779+
1780+ return total_heat
1781+
1782
1783=== added file 'lib/lp/bugs/scripts/tests/test_bugheat.py'
1784--- lib/lp/bugs/scripts/tests/test_bugheat.py 1970-01-01 00:00:00 +0000
1785+++ lib/lp/bugs/scripts/tests/test_bugheat.py 2010-01-21 17:51:26 +0000
1786@@ -0,0 +1,183 @@
1787+
1788+# Copyright 2010 Canonical Ltd. This software is licensed under the
1789+# GNU Affero General Public License version 3 (see the file LICENSE).
1790+
1791+"""Module docstring goes here."""
1792+
1793+__metaclass__ = type
1794+
1795+import unittest
1796+
1797+from canonical.testing import LaunchpadZopelessLayer
1798+
1799+from lp.bugs.scripts.bugheat import BugHeatCalculator, BugHeatConstants
1800+from lp.testing import TestCaseWithFactory
1801+
1802+
1803+class TestBugHeatCalculator(TestCaseWithFactory):
1804+ """Tests for the BugHeatCalculator class."""
1805+
1806+ layer = LaunchpadZopelessLayer
1807+
1808+ def setUp(self):
1809+ super(TestBugHeatCalculator, self).setUp()
1810+ self.bug = self.factory.makeBug()
1811+ self.calculator = BugHeatCalculator(self.bug)
1812+
1813+ def test__getHeatFromDuplicates(self):
1814+ # BugHeatCalculator._getHeatFromDuplicates() returns the bug
1815+ # heat generated by duplicates of a bug.
1816+ # By default, the bug has no heat from dupes
1817+ self.assertEqual(0, self.calculator._getHeatFromDuplicates())
1818+
1819+ # If adding duplicates, the heat generated by them will be n *
1820+ # BugHeatConstants.DUPLICATE, where n is the number of
1821+ # duplicates.
1822+ for i in range(5):
1823+ dupe = self.factory.makeBug()
1824+ dupe.duplicateof = self.bug
1825+
1826+ expected_heat = BugHeatConstants.DUPLICATE * 5
1827+ actual_heat = self.calculator._getHeatFromDuplicates()
1828+ self.assertEqual(
1829+ expected_heat, actual_heat,
1830+ "Heat from duplicates does not match expected heat. "
1831+ "Expected %s, got %s" % (expected_heat, actual_heat))
1832+
1833+ def test__getHeatFromAffectedUsers(self):
1834+ # BugHeatCalculator._getHeatFromAffectedUsers() returns the bug
1835+ # heat generated by users affected by the bug.
1836+ # By default, the heat will be BugHeatConstants.AFFECTED_USER, since
1837+ # there will be one affected user (the user who filed the bug).
1838+ self.assertEqual(
1839+ BugHeatConstants.AFFECTED_USER,
1840+ self.calculator._getHeatFromAffectedUsers())
1841+
1842+ # As the number of affected users increases, the heat generated
1843+ # will be n * BugHeatConstants.AFFECTED_USER, where n is the number
1844+ # of affected users.
1845+ for i in range(5):
1846+ person = self.factory.makePerson()
1847+ self.bug.markUserAffected(person)
1848+
1849+ expected_heat = BugHeatConstants.AFFECTED_USER * 6
1850+ actual_heat = self.calculator._getHeatFromAffectedUsers()
1851+ self.assertEqual(
1852+ expected_heat, actual_heat,
1853+ "Heat from affected users does not match expected heat. "
1854+ "Expected %s, got %s" % (expected_heat, actual_heat))
1855+
1856+ def test__getHeatFromSubscribers(self):
1857+ # BugHeatCalculator._getHeatFromSubscribers() returns the bug
1858+ # heat generated by users subscribed tothe bug.
1859+ # By default, the heat will be BugHeatConstants.SUBSCRIBER,
1860+ # since there will be one direct subscriber (the user who filed
1861+ # the bug).
1862+ self.assertEqual(
1863+ BugHeatConstants.SUBSCRIBER,
1864+ self.calculator._getHeatFromSubscribers())
1865+
1866+ # As the number of subscribers increases, the heat generated
1867+ # will be n * BugHeatConstants.SUBSCRIBER, where n is the number
1868+ # of subscribers.
1869+ for i in range(5):
1870+ person = self.factory.makePerson()
1871+ self.bug.subscribe(person, person)
1872+
1873+ expected_heat = BugHeatConstants.SUBSCRIBER * 6
1874+ actual_heat = self.calculator._getHeatFromSubscribers()
1875+ self.assertEqual(
1876+ expected_heat, actual_heat,
1877+ "Heat from subscribers does not match expected heat. "
1878+ "Expected %s, got %s" % (expected_heat, actual_heat))
1879+
1880+ # Subscribers from duplicates are included in the heat returned
1881+ # by _getHeatFromSubscribers()
1882+ dupe = self.factory.makeBug()
1883+ dupe.duplicateof = self.bug
1884+ expected_heat = BugHeatConstants.SUBSCRIBER * 7
1885+ actual_heat = self.calculator._getHeatFromSubscribers()
1886+ self.assertEqual(
1887+ expected_heat, actual_heat,
1888+ "Heat from subscribers (including duplicate-subscribers) "
1889+ "does not match expected heat. Expected %s, got %s" %
1890+ (expected_heat, actual_heat))
1891+
1892+ # Seting the bug to private will increase its heat from
1893+ # subscribers by 1 * BugHeatConstants.SUBSCRIBER, as the project
1894+ # owner will now be directly subscribed to it.
1895+ self.bug.setPrivate(True, self.bug.owner)
1896+ expected_heat = BugHeatConstants.SUBSCRIBER * 8
1897+ actual_heat = self.calculator._getHeatFromSubscribers()
1898+ self.assertEqual(
1899+ expected_heat, actual_heat,
1900+ "Heat from subscribers to private bug does not match expected "
1901+ "heat. Expected %s, got %s" % (expected_heat, actual_heat))
1902+
1903+ def test__getHeatFromPrivacy(self):
1904+ # BugHeatCalculator._getHeatFromPrivacy() returns the heat
1905+ # generated by the bug's private attribute. If the bug is
1906+ # public, this will be 0.
1907+ self.assertEqual(0, self.calculator._getHeatFromPrivacy())
1908+
1909+ # However, if the bug is private, _getHeatFromPrivacy() will
1910+ # return BugHeatConstants.PRIVACY.
1911+ self.bug.setPrivate(True, self.bug.owner)
1912+ self.assertEqual(
1913+ BugHeatConstants.PRIVACY, self.calculator._getHeatFromPrivacy())
1914+
1915+ def test__getHeatFromSecurity(self):
1916+ # BugHeatCalculator._getHeatFromSecurity() returns the heat
1917+ # generated by the bug's security_related attribute. If the bug
1918+ # is not security related, _getHeatFromSecurity() will return 0.
1919+ self.assertEqual(0, self.calculator._getHeatFromPrivacy())
1920+
1921+
1922+ # If, on the other hand, the bug is security_related,
1923+ # _getHeatFromSecurity() will return BugHeatConstants.SECURITY
1924+ self.bug.security_related = True
1925+ self.assertEqual(
1926+ BugHeatConstants.SECURITY, self.calculator._getHeatFromSecurity())
1927+
1928+ def test_getBugHeat(self):
1929+ # BugHeatCalculator.getBugHeat() returns the total heat for a
1930+ # given bug as the sum of the results of all _getHeatFrom*()
1931+ # methods.
1932+ # By default this will be (BugHeatConstants.AFFECTED_USER +
1933+ # BugHeatConstants.SUBSCRIBER) since there will be one
1934+ # subscriber and one affected user only.
1935+ expected_heat = (
1936+ BugHeatConstants.AFFECTED_USER + BugHeatConstants.SUBSCRIBER)
1937+ actual_heat = self.calculator.getBugHeat()
1938+ self.assertEqual(
1939+ expected_heat, actual_heat,
1940+ "Expected bug heat did not match actual bug heat. "
1941+ "Expected %s, got %s" % (expected_heat, actual_heat))
1942+
1943+ # Adding a duplicate and making the bug private and security
1944+ # related will increase its heat.
1945+ dupe = self.factory.makeBug()
1946+ dupe.duplicateof = self.bug
1947+ self.bug.setPrivate(True, self.bug.owner)
1948+ self.bug.security_related = True
1949+
1950+ expected_heat += (
1951+ BugHeatConstants.DUPLICATE +
1952+ BugHeatConstants.PRIVACY +
1953+ BugHeatConstants.SECURITY
1954+ )
1955+
1956+ # Adding the duplicate and making the bug private means it gets
1957+ # two new subscribers, the project owner and the duplicate's
1958+ # direct subscriber.
1959+ expected_heat += BugHeatConstants.SUBSCRIBER * 2
1960+ actual_heat = self.calculator.getBugHeat()
1961+ self.assertEqual(
1962+ expected_heat, actual_heat,
1963+ "Expected bug heat did not match actual bug heat. "
1964+ "Expected %s, got %s" % (expected_heat, actual_heat))
1965+
1966+
1967+def test_suite():
1968+ return unittest.TestLoader().loadTestsFromName(__name__)
1969+
1970
1971=== modified file 'lib/lp/bugs/tests/test_doc.py'
1972--- lib/lp/bugs/tests/test_doc.py 2009-10-02 11:29:22 +0000
1973+++ lib/lp/bugs/tests/test_doc.py 2010-01-21 17:51:26 +0000
1974@@ -89,6 +89,12 @@
1975 '../doc/cve-update.txt',
1976 setUp=cveSetUp, tearDown=tearDown, layer=LaunchpadZopelessLayer
1977 ),
1978+ 'bug-heat.txt': LayeredDocFileSuite(
1979+ '../doc/bug-heat.txt',
1980+ setUp=setUp,
1981+ tearDown=tearDown,
1982+ layer=LaunchpadZopelessLayer
1983+ ),
1984 'bugnotificationrecipients.txt-uploader': LayeredDocFileSuite(
1985 '../doc/bugnotificationrecipients.txt',
1986 setUp=uploaderBugsSetUp,
1987
1988=== modified file 'lib/lp/buildmaster/buildergroup.py'
1989--- lib/lp/buildmaster/buildergroup.py 2010-01-14 03:40:33 +0000
1990+++ lib/lp/buildmaster/buildergroup.py 2010-01-21 17:51:26 +0000
1991@@ -11,15 +11,11 @@
1992 import socket
1993 import xmlrpclib
1994
1995-from sqlobject import SQLObjectNotFound
1996-
1997 from zope.component import getUtility
1998
1999-from lp.soyuz.interfaces.build import IBuildSet
2000 from lp.buildmaster.interfaces.builder import (
2001- BuildDaemonError, BuildJobMismatch, IBuilderSet)
2002-from lp.soyuz.interfaces.buildqueue import IBuildQueueSet
2003-from canonical.launchpad.interfaces import NotFoundError
2004+ BuildDaemonError, IBuilderSet)
2005+from lp.buildmaster.interfaces.builder import CorruptBuildID
2006
2007
2008 class BuilderGroup:
2009@@ -109,26 +105,17 @@
2010 if status not in ident_position.keys():
2011 return
2012
2013- # Extract information from the identifier.
2014- build_id, queue_item_id = status_sentence[
2015- ident_position[status]].split('-')
2016+ slave_build_id = status_sentence[ident_position[status]]
2017
2018- # Check if build_id and queue_item_id exist.
2019 try:
2020- build = getUtility(IBuildSet).getByBuildID(int(build_id))
2021- queue_item = getUtility(IBuildQueueSet).get(int(queue_item_id))
2022- queued_build = getUtility(IBuildSet).getByQueueEntry(queue_item)
2023- # Also check whether build and buildqueue are properly related.
2024- if queued_build.id != build.id:
2025- raise BuildJobMismatch('Job build entry mismatch')
2026-
2027- except (SQLObjectNotFound, NotFoundError, BuildJobMismatch), reason:
2028+ builder.verifySlaveBuildID(slave_build_id)
2029+ except CorruptBuildID, reason:
2030 if status == 'BuilderStatus.WAITING':
2031 builder.cleanSlave()
2032 else:
2033 builder.requestAbort()
2034- self.logger.warn("Builder '%s' rescued from '%s-%s: %s'" % (
2035- builder.name, build_id, queue_item_id, reason))
2036+ self.logger.warn("Builder '%s' rescued from '%s': '%s'" % (
2037+ builder.name, slave_build_id, reason))
2038
2039 def updateBuild(self, queueItem):
2040 """Verify the current build job status.
2041
2042=== modified file 'lib/lp/buildmaster/interfaces/buildbase.py'
2043--- lib/lp/buildmaster/interfaces/buildbase.py 2010-01-14 03:39:27 +0000
2044+++ lib/lp/buildmaster/interfaces/buildbase.py 2010-01-21 17:51:27 +0000
2045@@ -13,12 +13,19 @@
2046 from zope.schema import Choice, Datetime, Object, TextLine, Timedelta
2047 from lazr.enum import DBEnumeratedType
2048 from lazr.restful.declarations import exported
2049+from lazr.restful.fields import Reference
2050
2051 from lp.buildmaster.interfaces.builder import IBuilder
2052+from lp.registry.interfaces.distribution import IDistribution
2053+from lp.registry.interfaces.pocket import PackagePublishingPocket
2054+from lp.soyuz.interfaces.archive import IArchive
2055 from canonical.launchpad.interfaces.librarian import ILibraryFileAlias
2056 from canonical.launchpad import _
2057
2058 class IBuildBase(Interface):
2059+ """Common interface shared by farm jobs that build a package."""
2060+ # XXX: wgrant 2010-01-20 bug=507712: Most of these attribute names
2061+ # are bad.
2062 datecreated = exported(
2063 Datetime(
2064 title=_('Date created'), required=True, readonly=True,
2065@@ -65,6 +72,34 @@
2066
2067 is_private = Attribute("Whether the build should be treated as private.")
2068
2069+ archive = exported(
2070+ Reference(
2071+ title=_("Archive"), schema=IArchive,
2072+ required=True, readonly=True,
2073+ description=_("The Archive context for this build.")))
2074+
2075+ current_component = Attribute(
2076+ "Component where the source related to this build was last "
2077+ "published.")
2078+
2079+ pocket = exported(
2080+ Choice(
2081+ title=_('Pocket'), required=True,
2082+ vocabulary=PackagePublishingPocket,
2083+ description=_("The build targeted pocket.")))
2084+
2085+ dependencies = exported(
2086+ TextLine(
2087+ title=_("Dependencies"), required=False,
2088+ description=_("Debian-like dependency line that must be satisfied"
2089+ " before attempting to build this request.")))
2090+
2091+ distribution = exported(
2092+ Reference(
2093+ schema=IDistribution,
2094+ title=_("Distribution"), required=True,
2095+ description=_("Shortcut for its distribution.")))
2096+
2097 def handleStatus(status, queueItem, librarian, slave_status):
2098 """Handle a finished build status from a slave.
2099
2100@@ -78,6 +113,12 @@
2101 Invoke getFileFromSlave method with 'buildlog' identifier.
2102 """
2103
2104+ def queueBuild():
2105+ """Create a BuildQueue entry for this build."""
2106+
2107+ def estimateDuration():
2108+ """Estimate the build duration."""
2109+
2110 def storeBuildInfo(librarian, slave_status):
2111 """Store available information for the build job.
2112
2113@@ -85,5 +126,15 @@
2114 handlers, but it should not be called externally.
2115 """
2116
2117+ def storeUploadLog(content):
2118+ """Store the given content as the build upload_log.
2119+
2120+ :param content: string containing the upload-processor log output for
2121+ the binaries created in this build.
2122+ """
2123+
2124 def notify(extra_info=None):
2125 """Notify current build state to related people via email."""
2126+
2127+ def makeJob():
2128+ """Construct and return an `IBuildFarmJob` for this build."""
2129
2130=== modified file 'lib/lp/buildmaster/interfaces/builder.py'
2131--- lib/lp/buildmaster/interfaces/builder.py 2010-01-14 03:40:33 +0000
2132+++ lib/lp/buildmaster/interfaces/builder.py 2010-01-21 17:51:27 +0000
2133@@ -9,7 +9,7 @@
2134
2135 __all__ = [
2136 'BuildDaemonError',
2137- 'BuildJobMismatch',
2138+ 'CorruptBuildID',
2139 'BuildSlaveFailure',
2140 'CannotBuild',
2141 'CannotFetchFile',
2142@@ -46,7 +46,7 @@
2143 """The build slave had a protocol version. This is a serious error."""
2144
2145
2146-class BuildJobMismatch(BuildDaemonError):
2147+class CorruptBuildID(BuildDaemonError):
2148 """The build slave is working with mismatched information.
2149
2150 It needs to be rescued.
2151@@ -216,6 +216,12 @@
2152 the status.
2153 """
2154
2155+ def verifySlaveBuildID(slave_build_id):
2156+ """Verify that a slave's build ID is consistent.
2157+
2158+ This should delegate to the current `IBuildFarmJobBehavior`.
2159+ """
2160+
2161 def updateBuild(queueItem):
2162 """Verify the current build job status.
2163
2164
2165=== modified file 'lib/lp/buildmaster/interfaces/buildfarmjob.py'
2166--- lib/lp/buildmaster/interfaces/buildfarmjob.py 2010-01-15 01:20:20 +0000
2167+++ lib/lp/buildmaster/interfaces/buildfarmjob.py 2010-01-21 17:51:26 +0000
2168@@ -9,6 +9,7 @@
2169
2170 __all__ = [
2171 'IBuildFarmJob',
2172+ 'IBuildFarmCandidateJobSelection',
2173 'IBuildFarmJobDispatchEstimation',
2174 'ISpecificBuildFarmJobClass',
2175 'BuildFarmJobType',
2176@@ -67,7 +68,7 @@
2177 """An appropriate name for this job."""
2178
2179 def getTitle():
2180- """A string to identify and describe the job to users return None."""
2181+ """A string to identify and describe the job to users."""
2182
2183 def jobStarted():
2184 """'Job started' life cycle event, handle as appropriate."""
2185@@ -154,3 +155,47 @@
2186 the pending jobs of the appropriate type.
2187 """
2188
2189+
2190+class IBuildFarmCandidateJobSelection(Interface):
2191+ """Operations for refining candidate job selection (optional).
2192+
2193+ Job type classes that do *not* need to refine candidate job selection may
2194+ be derived from `BuildFarmJob` which provides a base implementation of
2195+ this interface.
2196+ """
2197+
2198+ def addCandidateSelectionCriteria(processor, virtualized):
2199+ """Provide a sub-query to refine the candidate job selection.
2200+
2201+ Return a sub-query to narrow down the list of candidate jobs.
2202+ The sub-query will become part of an "outer query" and is free to
2203+ refer to the `BuildQueue` and `Job` tables already utilized in the
2204+ latter.
2205+
2206+ Example (please see the `BuildPackageJob` implementation for a
2207+ complete example):
2208+
2209+ SELECT TRUE
2210+ FROM Archive, Build, BuildPackageJob, DistroArchSeries
2211+ WHERE
2212+ BuildPackageJob.job = Job.id AND
2213+ ..
2214+
2215+ :param processor: the type of processor that the candidate jobs are
2216+ expected to run on.
2217+ :param virtualized: whether the candidate jobs are expected to run on
2218+ the `processor` natively or inside a virtual machine.
2219+ :return: a string containing a sub-query that narrows down the list of
2220+ candidate jobs.
2221+ """
2222+
2223+ def postprocessCandidate(job, logger):
2224+ """True if the candidate job is fine and should be dispatched
2225+ to a builder, False otherwise.
2226+
2227+ :param job: The `BuildQueue` instance to be scrutinized.
2228+ :param logger: The logger to use.
2229+
2230+ :return: True if the candidate job should be dispatched
2231+ to a builder, False otherwise.
2232+ """
2233
2234=== modified file 'lib/lp/buildmaster/interfaces/buildfarmjobbehavior.py'
2235--- lib/lp/buildmaster/interfaces/buildfarmjobbehavior.py 2010-01-14 03:39:27 +0000
2236+++ lib/lp/buildmaster/interfaces/buildfarmjobbehavior.py 2010-01-21 17:51:27 +0000
2237@@ -57,6 +57,14 @@
2238 of IBuilder.slaveStatus().
2239 """
2240
2241+ def verifySlaveBuildID(slave_build_id):
2242+ """Verify that a slave's build ID shows no signs of corruption.
2243+
2244+ :param slave_build_id: The slave's build ID, as specified in
2245+ dispatchBuildToSlave.
2246+ :raises CorruptBuildID: if the build ID is determined to be corrupt.
2247+ """
2248+
2249 def updateBuild(queueItem):
2250 """Verify the current build job status.
2251
2252
2253=== modified file 'lib/lp/buildmaster/master.py'
2254--- lib/lp/buildmaster/master.py 2010-01-13 22:07:39 +0000
2255+++ lib/lp/buildmaster/master.py 2010-01-21 17:51:27 +0000
2256@@ -267,7 +267,7 @@
2257 self._logger.debug(
2258 "Creating buildqueue record for %s (%s) on %s"
2259 % (name, version, tag))
2260- build.createBuildQueueEntry()
2261+ build.queueBuild()
2262
2263 self.commit()
2264
2265
2266=== modified file 'lib/lp/buildmaster/model/buildbase.py'
2267--- lib/lp/buildmaster/model/buildbase.py 2010-01-13 21:04:49 +0000
2268+++ lib/lp/buildmaster/model/buildbase.py 2010-01-21 17:51:26 +0000
2269@@ -11,12 +11,41 @@
2270
2271 import datetime
2272 import logging
2273+import os
2274 import pytz
2275-
2276+import subprocess
2277+import time
2278+
2279+from storm.store import Store
2280+from zope.security.proxy import removeSecurityProxy
2281+
2282+from canonical.config import config
2283+from canonical.database.constants import UTC_NOW
2284+from canonical.database.sqlbase import (
2285+ clear_current_connection_cache, cursor, flush_database_updates)
2286+from canonical.librarian.utils import copy_and_close
2287+from lp.registry.interfaces.pocket import pocketsuffix
2288 from lp.soyuz.interfaces.build import BuildStatus
2289-from canonical.database.constants import UTC_NOW
2290+from lp.soyuz.model.buildqueue import BuildQueue
2291+
2292
2293 class BuildBase:
2294+ def _getProxiedFileURL(self, library_file):
2295+ """Return the 'http_url' of a `ProxiedLibraryFileAlias`."""
2296+ # Avoiding circular imports.
2297+ from canonical.launchpad.browser.librarian import (
2298+ ProxiedLibraryFileAlias)
2299+
2300+ proxied_file = ProxiedLibraryFileAlias(library_file, self)
2301+ return proxied_file.http_url
2302+
2303+ @property
2304+ def build_log_url(self):
2305+ """See `IBuildBase`."""
2306+ if self.buildlog is None:
2307+ return None
2308+ return self._getProxiedFileURL(self.buildlog)
2309+
2310 def handleStatus(self, status, librarian, slave_status):
2311 """See `IBuildBase`."""
2312 logger = logging.getLogger()
2313@@ -31,8 +60,174 @@
2314 method(librarian, slave_status, logger)
2315
2316 def _handleStatus_OK(self, librarian, slave_status, logger):
2317- """Handle a package that built successfully."""
2318- raise NotImplementedError()
2319+ """Handle a package that built successfully.
2320+
2321+ Once built successfully, we pull the files, store them in a
2322+ directory, store build information and push them through the
2323+ uploader.
2324+ """
2325+ # XXX cprov 2007-07-11 bug=129487: untested code path.
2326+ buildid = slave_status['build_id']
2327+ filemap = slave_status['filemap']
2328+
2329+ logger.debug("Processing successful build %s" % buildid)
2330+ # Explode before collect a binary that is denied in this
2331+ # distroseries/pocket
2332+ if not self.archive.allowUpdatesToReleasePocket():
2333+ assert self.distroseries.canUploadToPocket(self.pocket), (
2334+ "%s (%s) can not be built for pocket %s: illegal status"
2335+ % (self.title, self.id, self.pocket.name))
2336+
2337+ # ensure we have the correct build root as:
2338+ # <BUILDMASTER_ROOT>/incoming/<UPLOAD_LEAF>/<TARGET_PATH>/[FILES]
2339+ root = os.path.abspath(config.builddmaster.root)
2340+ incoming = os.path.join(root, 'incoming')
2341+
2342+ # create a single directory to store build result files
2343+ # UPLOAD_LEAF: <TIMESTAMP>-<BUILD_ID>-<BUILDQUEUE_ID>
2344+ upload_leaf = "%s-%s" % (time.strftime("%Y%m%d-%H%M%S"), buildid)
2345+ upload_dir = os.path.join(incoming, upload_leaf)
2346+ logger.debug("Storing build result at '%s'" % upload_dir)
2347+
2348+ # Build the right UPLOAD_PATH so the distribution and archive
2349+ # can be correctly found during the upload:
2350+ # <archive_id>/distribution_name
2351+ # for all destination archive types.
2352+ archive = self.archive
2353+ distribution_name = self.distribution.name
2354+ target_path = '%s/%s' % (archive.id, distribution_name)
2355+ upload_path = os.path.join(upload_dir, target_path)
2356+ os.makedirs(upload_path)
2357+
2358+ slave = removeSecurityProxy(self.buildqueue_record.builder.slave)
2359+ for filename in filemap:
2360+ slave_file = slave.getFile(filemap[filename])
2361+ out_file_name = os.path.join(upload_path, filename)
2362+ out_file = open(out_file_name, "wb")
2363+ copy_and_close(slave_file, out_file)
2364+
2365+ uploader_argv = list(config.builddmaster.uploader.split())
2366+ uploader_logfilename = os.path.join(upload_dir, 'uploader.log')
2367+ logger.debug("Saving uploader log at '%s'"
2368+ % uploader_logfilename)
2369+
2370+ # add extra arguments for processing a binary upload
2371+ extra_args = [
2372+ "--log-file", "%s" % uploader_logfilename,
2373+ "-d", "%s" % self.distribution.name,
2374+ "-s", "%s" % (self.distroseries.name +
2375+ pocketsuffix[self.pocket]),
2376+ "-b", "%s" % self.id,
2377+ "-J", "%s" % upload_leaf,
2378+ "%s" % root,
2379+ ]
2380+
2381+ uploader_argv.extend(extra_args)
2382+
2383+ logger.debug("Invoking uploader on %s" % root)
2384+ logger.debug("%s" % uploader_argv)
2385+
2386+ uploader_process = subprocess.Popen(
2387+ uploader_argv, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
2388+
2389+ # Nothing should be written to the stdout/stderr.
2390+ upload_stdout, upload_stderr = uploader_process.communicate()
2391+
2392+ # XXX cprov 2007-04-17: we do not check uploader_result_code
2393+ # anywhere. We need to find out what will be best strategy
2394+ # when it failed HARD (there is a huge effort in process-upload
2395+ # to not return error, it only happen when the code is broken).
2396+ uploader_result_code = uploader_process.returncode
2397+ logger.debug("Uploader returned %d" % uploader_result_code)
2398+
2399+ # Quick and dirty hack to carry on on process-upload failures
2400+ if os.path.exists(upload_dir):
2401+ logger.debug("The upload directory did not get moved.")
2402+ failed_dir = os.path.join(root, "failed-to-move")
2403+ if not os.path.exists(failed_dir):
2404+ os.mkdir(failed_dir)
2405+ os.rename(upload_dir, os.path.join(failed_dir, upload_leaf))
2406+
2407+ # The famous 'flush_updates + clear_cache' will make visible
2408+ # the DB changes done in process-upload, considering that the
2409+ # transaction was set with ISOLATION_LEVEL_READ_COMMITED
2410+ # isolation level.
2411+ cur = cursor()
2412+ cur.execute('SHOW transaction_isolation')
2413+ isolation_str = cur.fetchone()[0]
2414+ assert isolation_str == 'read committed', (
2415+ 'BuildMaster/BuilderGroup transaction isolation should be '
2416+ 'ISOLATION_LEVEL_READ_COMMITTED (not "%s")' % isolation_str)
2417+
2418+ original_slave = self.buildqueue_record.builder.slave
2419+
2420+ # XXX Robert Collins, Celso Providelo 2007-05-26 bug=506256:
2421+ # 'Refreshing' objects procedure is forced on us by using a
2422+ # different process to do the upload, but as that process runs
2423+ # in the same unix account, it is simply double handling and we
2424+ # would be better off to do it within this process.
2425+ flush_database_updates()
2426+ clear_current_connection_cache()
2427+
2428+ # XXX cprov 2007-06-15: Re-issuing removeSecurityProxy is forced on
2429+ # us by sqlobject refreshing the builder object during the
2430+ # transaction cache clearing. Once we sort the previous problem
2431+ # this step should probably not be required anymore.
2432+ self.buildqueue_record.builder.setSlaveForTesting(
2433+ removeSecurityProxy(original_slave))
2434+
2435+ # Store build information, build record was already updated during
2436+ # the binary upload.
2437+ self.storeBuildInfo(librarian, slave_status)
2438+
2439+ # Retrive the up-to-date build record and perform consistency
2440+ # checks. The build record should be updated during the binary
2441+ # upload processing, if it wasn't something is broken and needs
2442+ # admins attention. Even when we have a FULLYBUILT build record,
2443+ # if it is not related with at least one binary, there is also
2444+ # a problem.
2445+ # For both situations we will mark the builder as FAILEDTOUPLOAD
2446+ # and the and update the build details (datebuilt, duration,
2447+ # buildlog, builder) in LP. A build-failure-notification will be
2448+ # sent to the lp-build-admin celebrity and to the sourcepackagerelease
2449+ # uploader about this occurrence. The failure notification will
2450+ # also contain the information required to manually reprocess the
2451+ # binary upload when it was the case.
2452+ if (self.buildstate != BuildStatus.FULLYBUILT or
2453+ self.binarypackages.count() == 0):
2454+ logger.debug("Build %s upload failed." % self.id)
2455+ self.buildstate = BuildStatus.FAILEDTOUPLOAD
2456+ # Retrieve log file content.
2457+ possible_locations = (
2458+ 'failed', 'failed-to-move', 'rejected', 'accepted')
2459+ for location_dir in possible_locations:
2460+ upload_final_location = os.path.join(
2461+ root, location_dir, upload_leaf)
2462+ if os.path.exists(upload_final_location):
2463+ log_filepath = os.path.join(
2464+ upload_final_location, 'uploader.log')
2465+ uploader_log_file = open(log_filepath)
2466+ try:
2467+ uploader_log_content = uploader_log_file.read()
2468+ finally:
2469+ uploader_log_file.close()
2470+ break
2471+ else:
2472+ uploader_log_content = 'Could not find upload log file'
2473+ # Store the upload_log_contents in librarian so it can be
2474+ # accessed by anyone with permission to see the build.
2475+ self.storeUploadLog(uploader_log_content)
2476+ # Notify the build failure.
2477+ self.notify(extra_info=uploader_log_content)
2478+ else:
2479+ logger.debug(
2480+ "Gathered build %s completely" %
2481+ self.sourcepackagerelease.name)
2482+
2483+ # Release the builder for another job.
2484+ self.buildqueue_record.builder.cleanSlave()
2485+ # Remove BuildQueue record.
2486+ self.buildqueue_record.destroySelf()
2487
2488 def _handleStatus_PACKAGEFAIL(self, librarian, slave_status, logger):
2489 """Handle a package that had failed to build.
2490@@ -128,3 +323,17 @@
2491 # the time operations for duration.
2492 RIGHT_NOW = datetime.datetime.now(pytz.timezone('UTC'))
2493 self.buildduration = RIGHT_NOW - self.buildqueue_record.date_started
2494+ self.dependencies = slave_status.get('dependencies')
2495+
2496+ def queueBuild(self):
2497+ """See `IBuildBase`"""
2498+ specific_job = self.makeJob()
2499+ duration_estimate = self.estimateDuration()
2500+ queue_entry = BuildQueue(
2501+ estimated_duration=duration_estimate,
2502+ job_type=self.build_farm_job_type,
2503+ job=specific_job.job, processor=specific_job.processor,
2504+ virtualized=specific_job.virtualized)
2505+ Store.of(self).add(queue_entry)
2506+ return queue_entry
2507+
2508
2509=== modified file 'lib/lp/buildmaster/model/builder.py'
2510--- lib/lp/buildmaster/model/builder.py 2010-01-19 18:01:00 +0000
2511+++ lib/lp/buildmaster/model/builder.py 2010-01-21 17:51:26 +0000
2512@@ -41,24 +41,23 @@
2513 # These dependencies on soyuz will be removed when getBuildRecords()
2514 # is moved, as well as when the generalisation of findBuildCandidate()
2515 # is completed.
2516-from lp.soyuz.model.buildqueue import BuildQueue
2517+from lp.soyuz.model.buildqueue import BuildQueue, specific_job_classes
2518 from lp.registry.interfaces.person import validate_public_person
2519-from lp.registry.interfaces.pocket import PackagePublishingPocket
2520 from canonical.launchpad.helpers import filenameToContentType
2521+from lp.services.job.interfaces.job import JobStatus
2522 from lp.soyuz.interfaces.buildrecords import IHasBuildRecords
2523 from lp.soyuz.interfaces.distroarchseries import IDistroArchSeriesSet
2524 from canonical.launchpad.interfaces.librarian import ILibraryFileAliasSet
2525 from canonical.launchpad.webapp.interfaces import NotFoundError
2526-from lp.soyuz.interfaces.archive import ArchivePurpose
2527 from lp.soyuz.interfaces.build import BuildStatus, IBuildSet
2528 from lp.buildmaster.interfaces.builder import (
2529 BuildDaemonError, BuildSlaveFailure, CannotBuild, CannotFetchFile,
2530 CannotResumeHost, IBuilder, IBuilderSet, ProtocolVersionMismatch)
2531 from lp.soyuz.interfaces.buildqueue import IBuildQueueSet
2532-from lp.soyuz.interfaces.publishing import (
2533- PackagePublishingStatus)
2534 from lp.soyuz.model.buildpackagejob import BuildPackageJob
2535 from canonical.launchpad.webapp import urlappend
2536+from canonical.launchpad.webapp.interfaces import (
2537+ IStoreSelector, MAIN_STORE, DEFAULT_FLAVOR)
2538 from canonical.lazr.utils import safe_hasattr
2539 from canonical.librarian.utils import copy_and_close
2540
2541@@ -366,6 +365,10 @@
2542 """See IBuilder."""
2543 return self.slave.status()
2544
2545+ def verifySlaveBuildID(self, slave_build_id):
2546+ """See `IBuilder`."""
2547+ return self.current_build_behavior.verifySlaveBuildID(slave_build_id)
2548+
2549 def updateBuild(self, queueItem):
2550 """See `IBuilder`."""
2551 self.current_build_behavior.updateBuild(queueItem)
2552@@ -418,99 +421,6 @@
2553 return False
2554 return True
2555
2556- # XXX cprov 20071116: It should become part of the public
2557- # _findBuildCandidate once we start to detect superseded builds
2558- # at build creation time.
2559- def _findBinaryBuildCandidate(self):
2560- """Return the highest priority build candidate for this builder.
2561-
2562- Returns a pending IBuildQueue record queued for this builder
2563- processorfamily with the highest lastscore or None if there
2564- is no one available.
2565- """
2566- # If a private build does not yet have its source published then
2567- # we temporarily skip it because we want to wait for the publisher
2568- # to place the source in the archive, which is where builders
2569- # download the source from in the case of private builds (because
2570- # it's a secure location).
2571- private_statuses = (
2572- PackagePublishingStatus.PUBLISHED,
2573- PackagePublishingStatus.SUPERSEDED,
2574- PackagePublishingStatus.DELETED,
2575- )
2576- clauses = ["""
2577- ((archive.private IS TRUE AND
2578- EXISTS (
2579- SELECT SourcePackagePublishingHistory.id
2580- FROM SourcePackagePublishingHistory
2581- WHERE
2582- SourcePackagePublishingHistory.distroseries =
2583- DistroArchSeries.distroseries AND
2584- SourcePackagePublishingHistory.sourcepackagerelease =
2585- Build.sourcepackagerelease AND
2586- SourcePackagePublishingHistory.archive = Archive.id AND
2587- SourcePackagePublishingHistory.status IN %s))
2588- OR
2589- archive.private IS FALSE) AND
2590- buildqueue.job = buildpackagejob.job AND
2591- buildpackagejob.build = build.id AND
2592- build.distroarchseries = distroarchseries.id AND
2593- build.archive = archive.id AND
2594- archive.enabled = TRUE AND
2595- build.buildstate = %s AND
2596- distroarchseries.processorfamily = %s AND
2597- buildqueue.builder IS NULL
2598- """ % sqlvalues(
2599- private_statuses, BuildStatus.NEEDSBUILD, self.processor.family)]
2600-
2601- clauseTables = [
2602- 'Build', 'BuildPackageJob', 'DistroArchSeries', 'Archive']
2603-
2604- clauses.append("""
2605- archive.require_virtualized = %s
2606- """ % sqlvalues(self.virtualized))
2607-
2608- # Ensure that if BUILDING builds exist for the same
2609- # public ppa archive and architecture and another would not
2610- # leave at least 20% of them free, then we don't consider
2611- # another as a candidate.
2612- #
2613- # This clause selects the count of currently building builds on
2614- # the arch in question, then adds one to that total before
2615- # deriving a percentage of the total available builders on that
2616- # arch. It then makes sure that percentage is under 80.
2617- #
2618- # The extra clause is only used if the number of available
2619- # builders is greater than one, or nothing would get dispatched
2620- # at all.
2621- num_arch_builders = Builder.selectBy(
2622- processor=self.processor, manual=False, builderok=True).count()
2623- if num_arch_builders > 1:
2624- clauses.append("""
2625- EXISTS (SELECT true
2626- WHERE ((
2627- SELECT COUNT(build2.id)
2628- FROM Build build2, DistroArchSeries distroarchseries2
2629- WHERE
2630- build2.archive = build.archive AND
2631- archive.purpose = %s AND
2632- archive.private IS FALSE AND
2633- build2.distroarchseries = distroarchseries2.id AND
2634- distroarchseries2.processorfamily = %s AND
2635- build2.buildstate = %s) + 1::numeric)
2636- *100 / %s
2637- < 80)
2638- """ % sqlvalues(
2639- ArchivePurpose.PPA, self.processor.family,
2640- BuildStatus.BUILDING, num_arch_builders))
2641-
2642- query = " AND ".join(clauses)
2643- candidate = BuildQueue.selectFirst(
2644- query, clauseTables=clauseTables,
2645- orderBy=['-buildqueue.lastscore', 'build.id'])
2646-
2647- return candidate
2648-
2649 def _getSlaveScannerLogger(self):
2650 """Return the logger instance from buildd-slave-scanner.py."""
2651 # XXX cprov 20071120: Ideally the Launchpad logging system
2652@@ -524,52 +434,71 @@
2653 """Find a candidate job for dispatch to an idle buildd slave.
2654
2655 The pending BuildQueue item with the highest score for this builder
2656- ProcessorFamily or None if no candidate is available.
2657-
2658- For public PPA builds, subsequent builds for a given ppa and
2659- architecture will not be returned until the current build for
2660- the ppa and architecture is finished.
2661-
2662- :return: A binary build candidate job.
2663+ or None if no candidate is available.
2664+
2665+ :return: A candidate job.
2666 """
2667+ def qualify_subquery(job_type, sub_query):
2668+ """Put the sub-query into a job type context."""
2669+ qualified_query = """
2670+ ((BuildQueue.job_type != %s) OR EXISTS(%%s))
2671+ """ % sqlvalues(job_type)
2672+ qualified_query %= sub_query
2673+ return qualified_query
2674
2675 logger = self._getSlaveScannerLogger()
2676- candidate = self._findBinaryBuildCandidate()
2677-
2678- # Mark build records targeted to old source versions as SUPERSEDED
2679- # and build records target to SECURITY pocket as FAILEDTOBUILD.
2680- # Builds in those situation should not be built because they will
2681- # be wasting build-time, the former case already has a newer source
2682- # and the latter could not be built in DAK.
2683- build_set = getUtility(IBuildSet)
2684- while candidate is not None:
2685- build = build_set.getByQueueEntry(candidate)
2686- if build.pocket == PackagePublishingPocket.SECURITY:
2687- # We never build anything in the security pocket.
2688- logger.debug(
2689- "Build %s FAILEDTOBUILD, queue item %s REMOVED"
2690- % (build.id, candidate.id))
2691- build.buildstate = BuildStatus.FAILEDTOBUILD
2692- candidate.destroySelf()
2693- candidate = self._findBinaryBuildCandidate()
2694- continue
2695-
2696- publication = build.current_source_publication
2697-
2698- if publication is None:
2699- # The build should be superseded if it no longer has a
2700- # current publishing record.
2701- logger.debug(
2702- "Build %s SUPERSEDED, queue item %s REMOVED"
2703- % (build.id, candidate.id))
2704- build.buildstate = BuildStatus.SUPERSEDED
2705- candidate.destroySelf()
2706- candidate = self._findBinaryBuildCandidate()
2707- continue
2708-
2709- return candidate
2710-
2711- # No candidate was found.
2712+ candidate = None
2713+
2714+ general_query = """
2715+ SELECT buildqueue.id FROM buildqueue, job
2716+ WHERE
2717+ buildqueue.job = job.id
2718+ AND job.status = %s
2719+ AND (
2720+ -- The processor values either match or the candidate
2721+ -- job is processor-independent.
2722+ buildqueue.processor = %s OR
2723+ buildqueue.processor IS NULL)
2724+ AND (
2725+ -- The virtualized values either match or the candidate
2726+ -- job does not care about virtualization and the idle
2727+ -- builder *is* virtualized (the latter is a security
2728+ -- precaution preventing the execution of untrusted code
2729+ -- on native builders).
2730+ buildqueue.virtualized = %s OR
2731+ (buildqueue.virtualized IS NULL AND %s = TRUE))
2732+ AND buildqueue.builder IS NULL
2733+ """ % sqlvalues(
2734+ JobStatus.WAITING, self.processor, self.virtualized,
2735+ self.virtualized)
2736+ order_clause = " ORDER BY buildqueue.lastscore DESC, buildqueue.id"
2737+
2738+ extra_tables = set()
2739+ extra_queries = []
2740+ job_classes = specific_job_classes()
2741+ for job_type, job_class in job_classes.iteritems():
2742+ query = job_class.addCandidateSelectionCriteria(
2743+ self.processor, self.virtualized)
2744+ if query == '':
2745+ # This job class does not need to refine candidate jobs
2746+ # further.
2747+ continue
2748+
2749+ # The sub-query should only apply to jobs of the right type.
2750+ extra_queries.append(qualify_subquery(job_type, query))
2751+ query = ' AND '.join([general_query] + extra_queries) + order_clause
2752+
2753+ store = getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR)
2754+ candidate_jobs = store.execute(query).get_all()
2755+
2756+ for (candidate_id,) in candidate_jobs:
2757+ candidate = getUtility(IBuildQueueSet).get(candidate_id)
2758+ job_class = job_classes[candidate.job_type]
2759+ candidate_approved = job_class.postprocessCandidate(
2760+ candidate, logger)
2761+ if candidate_approved:
2762+ return candidate
2763+
2764 return None
2765
2766 def _dispatchBuildCandidate(self, candidate):
2767
2768=== modified file 'lib/lp/buildmaster/model/buildfarmjob.py'
2769--- lib/lp/buildmaster/model/buildfarmjob.py 2010-01-15 09:49:29 +0000
2770+++ lib/lp/buildmaster/model/buildfarmjob.py 2010-01-21 17:51:27 +0000
2771@@ -12,13 +12,14 @@
2772 DEFAULT_FLAVOR, IStoreSelector, MAIN_STORE)
2773
2774 from lp.buildmaster.interfaces.buildfarmjob import (
2775- IBuildFarmJob, ISpecificBuildFarmJobClass)
2776+ IBuildFarmJob, IBuildFarmCandidateJobSelection,
2777+ ISpecificBuildFarmJobClass)
2778
2779
2780 class BuildFarmJob:
2781 """Mix-in class for `IBuildFarmJob` implementations."""
2782 implements(IBuildFarmJob)
2783- classProvides(ISpecificBuildFarmJobClass)
2784+ classProvides(IBuildFarmCandidateJobSelection, ISpecificBuildFarmJobClass)
2785
2786 def score(self):
2787 """See `IBuildFarmJob`."""
2788@@ -26,7 +27,7 @@
2789
2790 def getLogFileName(self):
2791 """See `IBuildFarmJob`."""
2792- raise NotImplementedError
2793+ return 'buildlog.txt'
2794
2795 def getName(self):
2796 """See `IBuildFarmJob`."""
2797@@ -58,12 +59,21 @@
2798 """See `IBuildFarmJob`."""
2799 return None
2800
2801+ @staticmethod
2802+ def addCandidateSelectionCriteria(processor, virtualized):
2803+ """See `IBuildFarmCandidateJobSelection`."""
2804+ return ('')
2805+
2806 @classmethod
2807 def getByJob(cls, job):
2808 """See `ISpecificBuildFarmJobClass`.
2809-
2810 This base implementation should work for most build farm job
2811 types, but some need to override it.
2812 """
2813 store = getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR)
2814 return store.find(cls, cls.job == job).one()
2815+
2816+ @staticmethod
2817+ def postprocessCandidate(job, logger):
2818+ """See `IBuildFarmCandidateJobSelection`."""
2819+ return True
2820
2821=== modified file 'lib/lp/buildmaster/model/buildfarmjobbehavior.py'
2822--- lib/lp/buildmaster/model/buildfarmjobbehavior.py 2010-01-14 03:36:00 +0000
2823+++ lib/lp/buildmaster/model/buildfarmjobbehavior.py 2010-01-21 17:51:26 +0000
2824@@ -9,22 +9,25 @@
2825
2826 __all__ = [
2827 'BuildFarmJobBehaviorBase',
2828- 'IdleBuildBehavior'
2829+ 'IdleBuildBehavior',
2830 ]
2831
2832 import logging
2833 import socket
2834 import xmlrpclib
2835
2836+from sqlobject import SQLObjectNotFound
2837 from zope.component import getUtility
2838 from zope.interface import implements
2839 from zope.security.proxy import removeSecurityProxy
2840
2841 from canonical import encoding
2842 from canonical.librarian.interfaces import ILibrarianClient
2843+from lp.buildmaster.interfaces.builder import CorruptBuildID
2844 from lp.buildmaster.interfaces.buildfarmjobbehavior import (
2845 BuildBehaviorMismatch, IBuildFarmJobBehavior)
2846 from lp.services.job.interfaces.job import JobStatus
2847+from lp.soyuz.interfaces.buildqueue import IBuildQueueSet
2848
2849
2850 class BuildFarmJobBehaviorBase:
2851@@ -56,6 +59,24 @@
2852 The default behavior is that we don't add any extra values."""
2853 return {}
2854
2855+ def verifySlaveBuildID(self, slave_build_id):
2856+ """See `IBuildFarmJobBehavior`."""
2857+ # Extract information from the identifier.
2858+ try:
2859+ build_id, queue_item_id = slave_build_id.split('-')
2860+ build_id = int(build_id)
2861+ queue_item_id = int(queue_item_id)
2862+ except ValueError:
2863+ raise CorruptBuildID('Malformed build ID')
2864+
2865+ try:
2866+ queue_item = getUtility(IBuildQueueSet).get(queue_item_id)
2867+ # Check whether build and buildqueue are properly related.
2868+ except SQLObjectNotFound, reason:
2869+ raise CorruptBuildID(str(reason))
2870+ if queue_item.specific_job.build.id != build_id:
2871+ raise CorruptBuildID('Job build entry mismatch')
2872+
2873 def updateBuild(self, queueItem):
2874 """See `IBuildFarmJobBehavior`."""
2875 logger = logging.getLogger('slave-scanner')
2876@@ -193,3 +214,7 @@
2877 def status(self):
2878 """See `IBuildFarmJobBehavior`."""
2879 return "Idle"
2880+
2881+ def verifySlaveBuildID(self, slave_build_id):
2882+ """See `IBuildFarmJobBehavior`."""
2883+ raise AssertionError('Should not be called for an idle builder.')
2884
2885=== added file 'lib/lp/buildmaster/model/packagebuildfarmjob.py'
2886--- lib/lp/buildmaster/model/packagebuildfarmjob.py 1970-01-01 00:00:00 +0000
2887+++ lib/lp/buildmaster/model/packagebuildfarmjob.py 2010-01-21 17:51:26 +0000
2888@@ -0,0 +1,30 @@
2889+# Copyright 2010 Canonical Ltd. This software is licensed under the
2890+# GNU Affero General Public License version 3 (see the file LICENSE).
2891+
2892+__metaclass__ = type
2893+__all__ = ['PackageBuildFarmJob']
2894+
2895+
2896+from canonical.database.constants import UTC_NOW
2897+
2898+from lp.buildmaster.model.buildfarmjob import BuildFarmJob
2899+from lp.soyuz.interfaces.build import BuildStatus
2900+
2901+
2902+class PackageBuildFarmJob(BuildFarmJob):
2903+ """Mix-in class for `IBuildFarmJob` implementations for package builds."""
2904+
2905+ def jobStarted(self):
2906+ """See `IBuildFarmJob`."""
2907+ self.build.buildstate = BuildStatus.BUILDING
2908+ # The build started, set the start time if not set already.
2909+ if self.build.date_first_dispatched is None:
2910+ self.build.date_first_dispatched = UTC_NOW
2911+
2912+ def jobReset(self):
2913+ """See `IBuildFarmJob`."""
2914+ self.build.buildstate = BuildStatus.NEEDSBUILD
2915+
2916+ def jobAborted(self):
2917+ """See `IBuildFarmJob`."""
2918+ self.build.buildstate = BuildStatus.BUILDING
2919
2920=== modified file 'lib/lp/buildmaster/tests/test_builder.py'
2921--- lib/lp/buildmaster/tests/test_builder.py 2010-01-12 20:26:34 +0000
2922+++ lib/lp/buildmaster/tests/test_builder.py 2010-01-21 17:51:27 +0000
2923@@ -8,16 +8,22 @@
2924 from zope.component import getUtility
2925 from zope.security.proxy import removeSecurityProxy
2926
2927+from canonical.launchpad.webapp.interfaces import (
2928+ IStoreSelector, MAIN_STORE, DEFAULT_FLAVOR)
2929 from canonical.testing import LaunchpadZopelessLayer
2930+from lp.buildmaster.interfaces.builder import IBuilderSet
2931+from lp.buildmaster.interfaces.buildfarmjob import BuildFarmJobType
2932 from lp.buildmaster.interfaces.buildfarmjobbehavior import (
2933 IBuildFarmJobBehavior)
2934 from lp.buildmaster.model.buildfarmjobbehavior import IdleBuildBehavior
2935 from lp.soyuz.interfaces.archive import ArchivePurpose
2936 from lp.soyuz.interfaces.build import BuildStatus, IBuildSet
2937-from lp.buildmaster.interfaces.builder import IBuilderSet
2938 from lp.soyuz.interfaces.publishing import PackagePublishingStatus
2939+from lp.soyuz.interfaces.sourcepackagerecipebuild import (
2940+ ISourcePackageRecipeBuildSource)
2941 from lp.soyuz.model.binarypackagebuildbehavior import (
2942 BinaryPackageBuildBehavior)
2943+from lp.soyuz.model.buildqueue import BuildQueue
2944 from lp.soyuz.tests.test_publishing import SoyuzTestPublisher
2945 from lp.testing import TestCaseWithFactory
2946
2947@@ -194,15 +200,15 @@
2948 super(TestFindBuildCandidateDistroArchive, self).setUp()
2949 # Create a primary archive and publish some builds for the
2950 # queue.
2951- non_ppa = self.factory.makeArchive(
2952+ self.non_ppa = self.factory.makeArchive(
2953 name="primary", purpose=ArchivePurpose.PRIMARY)
2954
2955- gedit_build = self.publisher.getPubSource(
2956+ self.gedit_build = self.publisher.getPubSource(
2957 sourcename="gedit", status=PackagePublishingStatus.PUBLISHED,
2958- archive=non_ppa).createMissingBuilds()[0]
2959- firefox_build = self.publisher.getPubSource(
2960+ archive=self.non_ppa).createMissingBuilds()[0]
2961+ self.firefox_build = self.publisher.getPubSource(
2962 sourcename="firefox", status=PackagePublishingStatus.PUBLISHED,
2963- archive=non_ppa).createMissingBuilds()[0]
2964+ archive=self.non_ppa).createMissingBuilds()[0]
2965
2966 def test_findBuildCandidate_for_non_ppa(self):
2967 # Normal archives are not restricted to serial builds per
2968@@ -224,6 +230,60 @@
2969 self.failUnlessEqual('primary', build.archive.name)
2970 self.failUnlessEqual('firefox', build.sourcepackagerelease.name)
2971
2972+ def test_findBuildCandidate_for_recipe_build(self):
2973+ # Recipe builds with a higher score are selected first.
2974+ # This test is run in a context with mixed recipe and binary builds.
2975+
2976+ self.assertIsNot(self.frog_builder.processor, None)
2977+ self.assertEqual(self.frog_builder.virtualized, True)
2978+
2979+ self.assertEqual(self.gedit_build.buildqueue_record.lastscore, 2505)
2980+ self.assertEqual(self.firefox_build.buildqueue_record.lastscore, 2505)
2981+
2982+ recipe_build_job = self.factory.makeSourcePackageRecipeBuildJob(9999)
2983+
2984+ self.assertEqual(recipe_build_job.lastscore, 9999)
2985+
2986+ next_job = removeSecurityProxy(
2987+ self.frog_builder)._findBuildCandidate()
2988+
2989+ self.failUnlessEqual(recipe_build_job, next_job)
2990+
2991+
2992+class TestFindRecipeBuildCandidates(TestFindBuildCandidateBase):
2993+ # These tests operate in a "recipe builds only" setting.
2994+ # Please see also bug #507782.
2995+
2996+ def clearBuildQueue(self):
2997+ """Delete all `BuildQueue`, XXXJOb and `Job` instances."""
2998+ store = getUtility(IStoreSelector).get(MAIN_STORE, DEFAULT_FLAVOR)
2999+ for bq in store.find(BuildQueue):
3000+ bq.destroySelf()
3001+
3002+ def setUp(self):
3003+ """Publish some builds for the test archive."""
3004+ super(TestFindRecipeBuildCandidates, self).setUp()
3005+ # Create a primary archive and publish some builds for the
3006+ # queue.
3007+ self.non_ppa = self.factory.makeArchive(
3008+ name="primary", purpose=ArchivePurpose.PRIMARY)
3009+
3010+ self.clearBuildQueue()
3011+ self.bq1 = self.factory.makeSourcePackageRecipeBuildJob(3333)
3012+ self.bq2 = self.factory.makeSourcePackageRecipeBuildJob(4333)
3013+
3014+ def test_findBuildCandidate_with_highest_score(self):
3015+ # The recipe build with the highest score is selected first.
3016+ # This test is run in a "recipe builds only" context.
3017+
3018+ self.assertIsNot(self.frog_builder.processor, None)
3019+ self.assertEqual(self.frog_builder.virtualized, True)
3020+
3021+ next_job = removeSecurityProxy(
3022+ self.frog_builder)._findBuildCandidate()
3023+
3024+ self.failUnlessEqual(self.bq2, next_job)
3025+
3026
3027 class TestCurrentBuildBehavior(TestCaseWithFactory):
3028 """This test ensures the get/set behavior of IBuilder's
3029
3030=== modified file 'lib/lp/buildmaster/tests/test_manager.py'
3031--- lib/lp/buildmaster/tests/test_manager.py 2010-01-19 17:36:34 +0000
3032+++ lib/lp/buildmaster/tests/test_manager.py 2010-01-21 17:51:26 +0000
3033@@ -505,12 +505,14 @@
3034
3035 def assertBuildingJob(self, job, builder, logtail=None):
3036 """Assert the given job is building on the given builder."""
3037+ from lp.services.job.interfaces.job import JobStatus
3038 if logtail is None:
3039 logtail = 'Dummy sampledata entry, not processing'
3040
3041 self.assertTrue(job is not None)
3042 self.assertEqual(job.builder, builder)
3043 self.assertTrue(job.date_started is not None)
3044+ self.assertEqual(job.job.status, JobStatus.RUNNING)
3045 build = getUtility(IBuildSet).getByQueueEntry(job)
3046 self.assertEqual(build.buildstate, BuildStatus.BUILDING)
3047 self.assertEqual(job.logtail, logtail)
3048
3049=== modified file 'lib/lp/code/browser/branch.py'
3050--- lib/lp/code/browser/branch.py 2010-01-21 09:04:52 +0000
3051+++ lib/lp/code/browser/branch.py 2010-01-21 17:51:26 +0000
3052@@ -513,8 +513,8 @@
3053 """True if an imported branch's SVN URL is HTTP or HTTPS."""
3054 # You should only be calling this if it's an SVN code import
3055 assert self.context.code_import
3056- assert self.context.code_import.svn_branch_url
3057- url = self.context.code_import.svn_branch_url
3058+ url = self.context.code_import.url
3059+ assert url
3060 # https starts with http too!
3061 return url.startswith("http")
3062
3063
3064=== modified file 'lib/lp/code/browser/codeimport.py'
3065--- lib/lp/code/browser/codeimport.py 2010-01-14 01:48:19 +0000
3066+++ lib/lp/code/browser/codeimport.py 2010-01-21 17:51:26 +0000
3067@@ -15,7 +15,6 @@
3068 'CodeImportView',
3069 ]
3070
3071-from cgi import escape
3072
3073 from BeautifulSoup import BeautifulSoup
3074 from zope.app.form import CustomWidgetFactory
3075@@ -24,16 +23,16 @@
3076 from zope.component import getUtility
3077 from zope.formlib import form
3078 from zope.interface import Interface
3079-from zope.schema import Choice, TextLine
3080+from zope.schema import Choice
3081
3082 from canonical.cachedproperty import cachedproperty
3083 from canonical.launchpad import _
3084+from canonical.launchpad.fields import URIField
3085 from canonical.launchpad.interfaces.launchpad import ILaunchpadCelebrities
3086 from lp.code.enums import (
3087 BranchSubscriptionDiffSize, BranchSubscriptionNotificationLevel,
3088 CodeImportReviewStatus, CodeReviewNotificationLevel,
3089 RevisionControlSystems)
3090-from lp.code.interfaces.branch import branch_name_validator
3091 from lp.code.interfaces.branchnamespace import (
3092 get_branch_namespace, IBranchNamespacePolicy)
3093 from lp.code.interfaces.codeimport import (
3094@@ -129,8 +128,7 @@
3095
3096 custom_widget('cvs_root', StrippedTextWidget, displayWidth=50)
3097 custom_widget('cvs_module', StrippedTextWidget, displayWidth=20)
3098- custom_widget('svn_branch_url', URIWidget, displayWidth=50)
3099- custom_widget('git_repo_url', URIWidget, displayWidth=50)
3100+ custom_widget('url', URIWidget, displayWidth=50)
3101
3102 @cachedproperty
3103 def _super_user(self):
3104@@ -174,55 +172,84 @@
3105 canonical_url(code_import.branch),
3106 code_import.branch.unique_name))
3107
3108- def _validateSVN(self, svn_branch_url, existing_import=None):
3109- """If the user has specified a subversion url, we need
3110- to make sure that there isn't already an import with
3111- that url."""
3112- if svn_branch_url is None:
3113+ def _validateURL(self, url, existing_import=None, field_name='url'):
3114+ """If the user has specified a url, we need to make sure that there
3115+ isn't already an import with that url."""
3116+ if url is None:
3117 self.setSecondaryFieldError(
3118- 'svn_branch_url', 'Enter the URL of a Subversion branch.')
3119+ field_name, 'Enter the URL of a foreign VCS branch.')
3120 else:
3121- code_import = getUtility(ICodeImportSet).getBySVNDetails(
3122- svn_branch_url)
3123+ code_import = getUtility(ICodeImportSet).getByURL(url)
3124 if (code_import is not None and
3125 code_import != existing_import):
3126 self.setFieldError(
3127- 'svn_branch_url',
3128+ field_name,
3129 structured("""
3130- This Subversion branch URL is already specified for
3131+ This foreign branch URL is already specified for
3132 the imported branch <a href="%s">%s</a>.""",
3133 canonical_url(code_import.branch),
3134 code_import.branch.unique_name))
3135
3136- def _validateGit(self, git_repo_url, existing_import=None):
3137- """If the user has specified a git repo url, we need
3138- to make sure that there isn't already an import with
3139- that url."""
3140- if git_repo_url is None:
3141- self.setSecondaryFieldError(
3142- 'git_repo_url', 'Enter the URL of a Git repo.')
3143- else:
3144- code_import = getUtility(ICodeImportSet).getByGitDetails(
3145- git_repo_url)
3146- if (code_import is not None and
3147- code_import != existing_import):
3148- self.setFieldError(
3149- 'git_repo_url',
3150- structured("""
3151- This Git repository URL is already specified for
3152- the imported branch <a href="%s">%s</a>.""",
3153- escape(canonical_url(code_import.branch)),
3154- escape(code_import.branch.unique_name)))
3155+
3156+
3157+class NewCodeImportForm(Interface):
3158+ """The fields presented on the form for editing a code import."""
3159+
3160+ use_template(
3161+ ICodeImport,
3162+ ['product', 'rcs_type', 'cvs_root', 'cvs_module'])
3163+
3164+ svn_branch_url = URIField(
3165+ title=_("Branch URL"), required=False,
3166+ description=_(
3167+ "The URL of a Subversion branch, starting with svn:// or"
3168+ " http(s)://. Only trunk branches are imported."),
3169+ allowed_schemes=["http", "https", "svn"],
3170+ allow_userinfo=False,
3171+ allow_port=True,
3172+ allow_query=False,
3173+ allow_fragment=False,
3174+ trailing_slash=False)
3175+
3176+ git_repo_url = URIField(
3177+ title=_("Repo URL"), required=False,
3178+ description=_(
3179+ "The URL of the git repository. The HEAD branch will be "
3180+ "imported."),
3181+ allowed_schemes=["git"],
3182+ allow_userinfo=False, # Only anonymous access is supported.
3183+ allow_port=True,
3184+ allow_query=False,
3185+ allow_fragment=False,
3186+ trailing_slash=False)
3187+
3188+ hg_repo_url = URIField(
3189+ title=_("Repo URL"), required=False,
3190+ description=_(
3191+ "The URL of the Mercurial repository. The tip branch will be "
3192+ "imported."),
3193+ allowed_schemes=["http", "https"],
3194+ allow_userinfo=False, # Only anonymous access is supported.
3195+ allow_port=True,
3196+ allow_query=False, # Query makes no sense in Mercurial
3197+ allow_fragment=False, # Fragment makes no sense in Mercurial
3198+ trailing_slash=False) # See http://launchpad.net/bugs/56357.
3199+
3200+ branch_name = copy_field(
3201+ IBranch['name'],
3202+ __name__='branch_name',
3203+ title=_('Branch Name'),
3204+ description=_(
3205+ "This will be used in the branch URL to identify the "
3206+ "imported branch. Examples: main, trunk."),
3207+ )
3208
3209
3210 class CodeImportNewView(CodeImportBaseView):
3211 """The view to request a new code import."""
3212
3213+ schema = NewCodeImportForm
3214 for_input = True
3215- field_names = [
3216- 'product', 'rcs_type', 'svn_branch_url', 'cvs_root', 'cvs_module',
3217- 'git_repo_url',
3218- ]
3219
3220 custom_widget('rcs_type', LaunchpadRadioWidget)
3221
3222@@ -252,17 +279,6 @@
3223 if self.context_is_product:
3224 self.form_fields = self.form_fields.omit('product')
3225
3226- # Add in the field for the branch name.
3227- name_field = form.Fields(
3228- TextLine(
3229- __name__='branch_name',
3230- title=_('Branch Name'), required=True, description=_(
3231- "This will be used in the branch URL to identify the "
3232- "imported branch. Examples: main, trunk."),
3233- constraint=branch_name_validator),
3234- render_context=self.render_context)
3235- self.form_fields = self.form_fields + name_field
3236-
3237 def setUpWidgets(self):
3238 CodeImportBaseView.setUpWidgets(self)
3239
3240@@ -270,31 +286,48 @@
3241 # display them separately in the form.
3242 soup = BeautifulSoup(self.widgets['rcs_type']())
3243 fields = soup.findAll('input')
3244- [cvs_button, svn_button, git_button, empty_marker] = [
3245+ [cvs_button, svn_button, git_button, hg_button, empty_marker] = [
3246 field for field in fields
3247- if field.get('value') in ['CVS', 'BZR_SVN', 'GIT', '1']]
3248+ if field.get('value') in ['CVS', 'BZR_SVN', 'GIT', 'HG', '1']]
3249 cvs_button['onclick'] = 'updateWidgets()'
3250 svn_button['onclick'] = 'updateWidgets()'
3251 git_button['onclick'] = 'updateWidgets()'
3252+ hg_button['onclick'] = 'updateWidgets()'
3253 # The following attributes are used only in the page template.
3254 self.rcs_type_cvs = str(cvs_button)
3255 self.rcs_type_svn = str(svn_button)
3256 self.rcs_type_git = str(git_button)
3257+ self.rcs_type_hg = str(hg_button)
3258 self.rcs_type_emptymarker = str(empty_marker)
3259
3260+ def _getImportLocation(self, data):
3261+ """Return the import location based on type."""
3262+ rcs_type = data['rcs_type']
3263+ if rcs_type == RevisionControlSystems.CVS:
3264+ return data.get('cvs_root'), data.get('cvs_module'), None
3265+ elif rcs_type == RevisionControlSystems.BZR_SVN:
3266+ return None, None, data.get('svn_branch_url')
3267+ elif rcs_type == RevisionControlSystems.GIT:
3268+ return None, None, data.get('git_repo_url')
3269+ elif rcs_type == RevisionControlSystems.HG:
3270+ return None, None, data.get('hg_repo_url')
3271+ else:
3272+ raise AssertionError(
3273+ 'Unexpected revision control type %r.' % rcs_type)
3274+
3275 def _create_import(self, data, status):
3276 """Create the code import."""
3277 product = self.getProduct(data)
3278+ cvs_root, cvs_module, url = self._getImportLocation(data)
3279 return getUtility(ICodeImportSet).new(
3280 registrant=self.user,
3281 product=product,
3282 branch_name=data['branch_name'],
3283 rcs_type=data['rcs_type'],
3284- svn_branch_url=data['svn_branch_url'],
3285- cvs_root=data['cvs_root'],
3286- cvs_module=data['cvs_module'],
3287- review_status=status,
3288- git_repo_url=data['git_repo_url'])
3289+ url=url,
3290+ cvs_root=cvs_root,
3291+ cvs_module=cvs_module,
3292+ review_status=status)
3293
3294 def _setBranchExists(self, existing_branch):
3295 """Set a field error indicating that the branch already exists."""
3296@@ -379,19 +412,16 @@
3297 # Make sure fields for unselected revision control systems
3298 # are blanked out:
3299 if rcs_type == RevisionControlSystems.CVS:
3300- data['svn_branch_url'] = None
3301- data['git_repo_url'] = None
3302 self._validateCVS(data.get('cvs_root'), data.get('cvs_module'))
3303 elif rcs_type == RevisionControlSystems.BZR_SVN:
3304- data['cvs_root'] = None
3305- data['cvs_module'] = None
3306- data['git_repo_url'] = None
3307- self._validateSVN(data.get('svn_branch_url'))
3308+ self._validateURL(
3309+ data.get('svn_branch_url'), field_name='svn_branch_url')
3310 elif rcs_type == RevisionControlSystems.GIT:
3311- data['cvs_root'] = None
3312- data['cvs_module'] = None
3313- data['svn_branch_url'] = None
3314- self._validateGit(data.get('git_repo_url'))
3315+ self._validateURL(
3316+ data.get('git_repo_url'), field_name='git_repo_url')
3317+ elif rcs_type == RevisionControlSystems.HG:
3318+ self._validateURL(
3319+ data.get('hg_repo_url'), field_name='hg_repo_url')
3320 else:
3321 raise AssertionError(
3322 'Unexpected revision control type %r.' % rcs_type)
3323@@ -402,7 +432,7 @@
3324
3325 use_template(
3326 ICodeImport,
3327- ['svn_branch_url', 'cvs_root', 'cvs_module', 'git_repo_url'])
3328+ ['url', 'cvs_root', 'cvs_module'])
3329 whiteboard = copy_field(IBranch['whiteboard'])
3330
3331
3332@@ -479,15 +509,13 @@
3333 # If the import is a Subversion import, then omit the CVS
3334 # fields, and vice versa.
3335 if self.code_import.rcs_type == RevisionControlSystems.CVS:
3336- self.form_fields = self.form_fields.omit(
3337- 'svn_branch_url', 'git_repo_url')
3338+ self.form_fields = self.form_fields.omit('url')
3339 elif self.code_import.rcs_type in (RevisionControlSystems.SVN,
3340- RevisionControlSystems.BZR_SVN):
3341- self.form_fields = self.form_fields.omit(
3342- 'cvs_root', 'cvs_module', 'git_repo_url')
3343- elif self.code_import.rcs_type == RevisionControlSystems.GIT:
3344- self.form_fields = self.form_fields.omit(
3345- 'cvs_root', 'cvs_module', 'svn_branch_url')
3346+ RevisionControlSystems.BZR_SVN,
3347+ RevisionControlSystems.GIT,
3348+ RevisionControlSystems.HG):
3349+ self.form_fields = self.form_fields.omit(
3350+ 'cvs_root', 'cvs_module')
3351 else:
3352 raise AssertionError('Unknown rcs_type for code import.')
3353
3354@@ -518,12 +546,10 @@
3355 data.get('cvs_root'), data.get('cvs_module'),
3356 self.code_import)
3357 elif self.code_import.rcs_type in (RevisionControlSystems.SVN,
3358- RevisionControlSystems.BZR_SVN):
3359- self._validateSVN(
3360- data.get('svn_branch_url'), self.code_import)
3361- elif self.code_import.rcs_type == RevisionControlSystems.GIT:
3362- self._validateGit(
3363- data.get('git_repo_url'), self.code_import)
3364+ RevisionControlSystems.BZR_SVN,
3365+ RevisionControlSystems.GIT,
3366+ RevisionControlSystems.HG):
3367+ self._validateURL(data.get('url'), self.code_import)
3368 else:
3369 raise AssertionError('Unknown rcs_type for code import.')
3370
3371
3372=== modified file 'lib/lp/code/browser/codereviewvote.py'
3373--- lib/lp/code/browser/codereviewvote.py 2009-12-10 20:46:32 +0000
3374+++ lib/lp/code/browser/codereviewvote.py 2010-01-21 17:51:26 +0000
3375@@ -1,20 +1,18 @@
3376 # Copyright 2009 Canonical Ltd. This software is licensed under the
3377 # GNU Affero General Public License version 3 (see the file LICENSE).
3378
3379-
3380 """Views, navigation and actions for CodeReviewVotes."""
3381
3382-
3383 __metaclass__ = type
3384
3385
3386 from zope.interface import Interface
3387-from zope.security.proxy import removeSecurityProxy
3388
3389 from canonical.launchpad import _
3390 from canonical.launchpad.fields import PublicPersonChoice
3391 from canonical.launchpad.webapp import (
3392 action, canonical_url, LaunchpadFormView)
3393+from lp.code.errors import ReviewNotPending, UserHasExistingReview
3394
3395
3396 class ReassignSchema(Interface):
3397@@ -35,8 +33,14 @@
3398 @action('Reassign', name='reassign')
3399 def reassign_action(self, action, data):
3400 """Use the form data to change the review request reviewer."""
3401- # XXX TimPenhey 2009-12-11 bug=495201
3402- # This should check for existing reviews by the reviewer, and have
3403- # the logic moved into the model code.
3404- removeSecurityProxy(self.context).reviewer = data['reviewer']
3405+ self.context.reassignReview(data['reviewer'])
3406 self.next_url = canonical_url(self.context.branch_merge_proposal)
3407+
3408+ def validate(self, data):
3409+ """Make sure that the reassignment can happen."""
3410+ reviewer = data.get('reviewer')
3411+ if reviewer is not None:
3412+ try:
3413+ self.context.validateReasignReview(reviewer)
3414+ except (ReviewNotPending, UserHasExistingReview), e:
3415+ self.addError(str(e))
3416
3417=== modified file 'lib/lp/code/configure.zcml'
3418--- lib/lp/code/configure.zcml 2010-01-21 09:00:33 +0000
3419+++ lib/lp/code/configure.zcml 2010-01-21 17:51:26 +0000
3420@@ -750,8 +750,7 @@
3421 rcs_type
3422 cvs_root
3423 cvs_module
3424- svn_branch_url
3425- git_repo_url
3426+ url
3427 date_last_successful
3428 source_product_series
3429 update_interval
3430@@ -903,6 +902,15 @@
3431 <allow interface="lp.services.job.interfaces.job.IRunnableJob" />
3432 </class>
3433
3434+ <securedutility
3435+ component="lp.code.model.branchjob.BranchScanJob"
3436+ provides="lp.code.interfaces.branchjob.IBranchScanJobSource">
3437+ <allow interface="lp.code.interfaces.branchjob.IBranchScanJobSource"/>
3438+ </securedutility>
3439+ <class class="lp.code.model.branchjob.BranchScanJob">
3440+ <allow interface="lp.services.job.interfaces.job.IRunnableJob" />
3441+ </class>
3442+
3443 <!-- Linked branches -->
3444 <adapter factory="lp.code.model.linkedbranch.ProductSeriesLinkedBranch" />
3445 <adapter factory="lp.code.model.linkedbranch.ProductLinkedBranch" />
3446
3447=== modified file 'lib/lp/code/doc/branch.txt'
3448--- lib/lp/code/doc/branch.txt 2009-12-10 04:38:46 +0000
3449+++ lib/lp/code/doc/branch.txt 2010-01-21 17:51:27 +0000
3450@@ -601,6 +601,8 @@
3451 productseries.branch
3452 productseries.translations_branch
3453 seriessourcepackagebranch.branch
3454+ sourcepackagerecipedata.base_branch
3455+ sourcepackagerecipedatainstruction.branch
3456 specificationbranch.branch
3457
3458 (Unfortunately, references can form a cycle-- note that codereviewcomments
3459
3460=== modified file 'lib/lp/code/doc/codeimport-event.txt'
3461--- lib/lp/code/doc/codeimport-event.txt 2009-07-01 13:16:44 +0000
3462+++ lib/lp/code/doc/codeimport-event.txt 2010-01-21 17:51:26 +0000
3463@@ -95,22 +95,11 @@
3464
3465 >>> nopriv = getUtility(IPersonSet).getByName('no-priv')
3466
3467- >>> def new_code_import(name, **kw_details):
3468- ... vcs_imports = getUtility(ILaunchpadCelebrities).vcs_imports
3469- ... product = getUtility(IProductSet).getByName('firefox')
3470- ... namespace = get_branch_namespace(vcs_imports, product=product)
3471- ... import_branch = namespace.createBranch(
3472- ... BranchType.IMPORTED, name, vcs_imports, title='Import branch')
3473- ... return CodeImport(
3474- ... registrant=nopriv, owner=nopriv, branch=import_branch,
3475- ... **kw_details)
3476-
3477 First we create a Subversion import.
3478
3479 >>> from lp.code.enums import RevisionControlSystems
3480- >>> svn_url = 'svn://svn.example.com/trunk'
3481- >>> svn_import = new_code_import('svn-trunk',
3482- ... rcs_type=RevisionControlSystems.SVN, svn_branch_url=svn_url)
3483+ >>> svn_import = factory.makeCodeImport(
3484+ ... svn_branch_url='svn://svn.example.com/trunk')
3485
3486 CodeImportSet.newCreate creates an event from the new CodeImport object
3487 and the person that created it. Here, the creator is the nopriv user.
3488@@ -131,11 +120,11 @@
3489
3490 >>> print_items(svn_create_event)
3491 CODE_IMPORT <muted>
3492- OWNER u'52'
3493+ OWNER ...
3494 REVIEW_STATUS u'NEW'
3495 ASSIGNEE None
3496 UPDATE_INTERVAL None
3497- SVN_BRANCH_URL u'svn://svn.example.com/trunk'
3498+ URL u'svn://svn.example.com/trunk'
3499
3500 The database IDs of the CodeImport is also recorded. It is useful to
3501 collate events associated with deleted CodeImport objects.
3502@@ -149,14 +138,13 @@
3503 import source. For a CVS import, CVS details are recorded instead of the
3504 Subversion URL.
3505
3506- >>> cvs_import = new_code_import('cvs-main',
3507- ... rcs_type=RevisionControlSystems.CVS,
3508+ >>> cvs_import = factory.makeCodeImport(
3509 ... cvs_root=':pserver:anonymous@cvs.example.com:/cvsroot',
3510 ... cvs_module='hello')
3511 >>> cvs_create_event = event_set.newCreate(cvs_import, nopriv)
3512 >>> print_items(cvs_create_event)
3513 CODE_IMPORT <muted>
3514- OWNER u'52'
3515+ OWNER ...
3516 REVIEW_STATUS u'NEW'
3517 ASSIGNEE None
3518 UPDATE_INTERVAL None
3519@@ -165,17 +153,31 @@
3520
3521 And for a Git import, the git details are recorded.
3522
3523- >>> git_import = new_code_import('git-main',
3524- ... rcs_type=RevisionControlSystems.GIT,
3525+ >>> git_import = factory.makeCodeImport(
3526 ... git_repo_url='git://git.example.org/main.git')
3527 >>> git_create_event = event_set.newCreate(git_import, nopriv)
3528 >>> print_items(git_create_event)
3529 CODE_IMPORT <muted>
3530- OWNER u'52'
3531- REVIEW_STATUS u'NEW'
3532- ASSIGNEE None
3533- UPDATE_INTERVAL None
3534- GIT_REPO_URL u'git://git.example.org/main.git'
3535+ OWNER ...
3536+ REVIEW_STATUS u'REVIEWED'
3537+ ASSIGNEE None
3538+ UPDATE_INTERVAL None
3539+ URL u'git://git.example.org/main.git'
3540+
3541+
3542+And for a Mercurial import, the hg details are recorded.
3543+
3544+ >>> hg_import = factory.makeCodeImport(
3545+ ... hg_repo_url='http://hg.example.org/main')
3546+ >>> hg_create_event = event_set.newCreate(hg_import, nopriv)
3547+ >>> print_items(hg_create_event)
3548+ CODE_IMPORT <muted>
3549+ OWNER ...
3550+ REVIEW_STATUS u'REVIEWED'
3551+ ASSIGNEE None
3552+ UPDATE_INTERVAL None
3553+ URL u'http://hg.example.org/main'
3554+
3555
3556 == MODIFY ==
3557
3558@@ -204,7 +206,8 @@
3559 Then changes can be applied.
3560
3561 >>> from lp.code.enums import CodeImportReviewStatus
3562- >>> svn_import.review_status = CodeImportReviewStatus.REVIEWED
3563+ >>> removeSecurityProxy(svn_import).review_status = (
3564+ ... CodeImportReviewStatus.REVIEWED)
3565
3566 After applying changes, the newModify method can create an event that
3567 details the changes that have been applied.
3568@@ -229,12 +232,12 @@
3569
3570 >>> print_items(modify_event)
3571 CODE_IMPORT <muted>
3572- OWNER u'52'
3573+ OWNER ...
3574 REVIEW_STATUS u'REVIEWED'
3575 OLD_REVIEW_STATUS u'NEW'
3576 ASSIGNEE None
3577 UPDATE_INTERVAL None
3578- SVN_BRANCH_URL u'svn://svn.example.com/trunk'
3579+ URL u'svn://svn.example.com/trunk'
3580
3581 If no change of interest is found, no event is created.
3582
3583@@ -245,55 +248,6 @@
3584 >>> len(list(event_set.getAll())) == old_event_set_len
3585 True
3586
3587-In most events, only the source details for the selected version control
3588-system are recorded. If the rcs_type changes, we record old and new
3589-values for all changed attributes to explicitly represent transitions to
3590-None and from None.
3591-
3592- >>> token = event_set.beginModify(cvs_import)
3593- >>> cvs_import.rcs_type = RevisionControlSystems.SVN
3594- >>> cvs_import.svn_branch_url = u'svn://svn.example.com/from-cvs'
3595- >>> cvs_import.cvs_root = None
3596- >>> cvs_import.cvs_module = None
3597- >>> modify_event = event_set.newModify(cvs_import, nopriv, token)
3598- >>> print_items(modify_event)
3599- CODE_IMPORT <muted>
3600- OWNER u'52'
3601- REVIEW_STATUS u'NEW'
3602- ASSIGNEE None
3603- UPDATE_INTERVAL None
3604- CVS_ROOT None
3605- CVS_MODULE None
3606- OLD_CVS_ROOT u':pserver:anonymous@cvs.example.com:/cvsroot'
3607- OLD_CVS_MODULE u'hello'
3608- SVN_BRANCH_URL u'svn://svn.example.com/from-cvs'
3609- OLD_SVN_BRANCH_URL None
3610-
3611-Aside from source details changes, MODIFY events can record changes to
3612-the owner, the review_status, the assignee, and the update_interval of a
3613-code import.
3614-
3615- >>> from datetime import timedelta
3616- >>> sample_owner = getUtility(IPersonSet).getByName('name12')
3617- >>> sample_assignee = getUtility(IPersonSet).getByName('ddaa')
3618- >>> token = event_set.beginModify(svn_import)
3619- >>> svn_import.owner = sample_owner
3620- >>> svn_import.review_status = CodeImportReviewStatus.SUSPENDED
3621- >>> svn_import.assignee = sample_assignee
3622- >>> svn_import.update_interval = timedelta(hours=1)
3623- >>> modify_event = event_set.newModify(svn_import, nopriv, token)
3624- >>> print_items(modify_event)
3625- CODE_IMPORT <muted>
3626- OWNER u'12'
3627- OLD_OWNER u'52'
3628- REVIEW_STATUS u'SUSPENDED'
3629- OLD_REVIEW_STATUS u'REVIEWED'
3630- ASSIGNEE u'23'
3631- OLD_ASSIGNEE None
3632- UPDATE_INTERVAL u'1:00:00'
3633- OLD_UPDATE_INTERVAL None
3634- SVN_BRANCH_URL u'svn://svn.example.com/trunk'
3635-
3636
3637 === REQUEST ===
3638
3639
3640=== modified file 'lib/lp/code/doc/codeimport.txt'
3641--- lib/lp/code/doc/codeimport.txt 2009-12-08 19:21:29 +0000
3642+++ lib/lp/code/doc/codeimport.txt 2010-01-21 17:51:26 +0000
3643@@ -59,6 +59,7 @@
3644 Subversion via CSCVS
3645 Subversion via bzr-svn
3646 Git
3647+ Mercurial
3648
3649
3650 Import from CVS
3651@@ -128,7 +129,7 @@
3652 >>> svn_url = 'svn://svn.example.com/trunk'
3653 >>> svn_import = code_import_set.new(
3654 ... registrant=nopriv, product=product, branch_name='trunk-svn',
3655- ... rcs_type=svn, svn_branch_url=svn_url)
3656+ ... rcs_type=svn, url=svn_url)
3657 >>> verifyObject(ICodeImport, removeSecurityProxy(svn_import))
3658 True
3659
3660@@ -141,8 +142,7 @@
3661 The CodeImportSet is also able to retrieve the code imports with the
3662 specified subversion branch url.
3663
3664- >>> existing_import = code_import_set.getBySVNDetails(
3665- ... svn_branch_url=svn_url)
3666+ >>> existing_import = code_import_set.getByURL(svn_url)
3667 >>> svn_import == existing_import
3668 True
3669
3670@@ -158,15 +158,14 @@
3671 >>> bzr_svn_url = 'svn://svn.example.com/for-bzr-svn/trunk'
3672 >>> bzr_svn_import = code_import_set.new(
3673 ... registrant=nopriv, product=product, branch_name='trunk-bzr-svn',
3674- ... rcs_type=bzr_svn, svn_branch_url=bzr_svn_url)
3675+ ... rcs_type=bzr_svn, url=bzr_svn_url)
3676 >>> verifyObject(ICodeImport, removeSecurityProxy(svn_import))
3677 True
3678
3679 The CodeImportSet.getBySVNDetails is also able to find bzr-svn
3680 imports.
3681
3682- >>> existing_bzr_svn_import = code_import_set.getBySVNDetails(
3683- ... svn_branch_url=bzr_svn_url)
3684+ >>> existing_bzr_svn_import = code_import_set.getByURL(bzr_svn_url)
3685 >>> bzr_svn_import == existing_bzr_svn_import
3686 True
3687
3688@@ -181,7 +180,7 @@
3689 >>> git_url = 'git://git.example.com/hello.git'
3690 >>> git_import = code_import_set.new(
3691 ... registrant=nopriv, product=product, branch_name='trunk-git',
3692- ... rcs_type=git, git_repo_url=git_url)
3693+ ... rcs_type=git, url=git_url)
3694 >>> verifyObject(ICodeImport, removeSecurityProxy(git_import))
3695 True
3696
3697@@ -194,11 +193,37 @@
3698 The CodeImportSet is also able to retrieve the code imports with the
3699 specified git repo url.
3700
3701- >>> existing_import = code_import_set.getByGitDetails(
3702- ... git_repo_url=git_url)
3703+ >>> existing_import = code_import_set.getByURL(git_url)
3704 >>> git_import == existing_import
3705 True
3706
3707+Import from Mercurial
3708++++++++++++++++++++++
3709+
3710+Code imports from Mercurial specify the URL used with "hg clone" to
3711+retrieve the branch to import.
3712+
3713+ >>> hg = RevisionControlSystems.HG
3714+ >>> hg_url = 'http://hg.example.com/metallic'
3715+ >>> hg_import = code_import_set.new(
3716+ ... registrant=nopriv, product=product, branch_name='trunk-hg',
3717+ ... rcs_type=hg, url=hg_url)
3718+ >>> verifyObject(ICodeImport, removeSecurityProxy(hg_import))
3719+ True
3720+
3721+Creating a CodeImport object creates a corresponding CodeImportEvent.
3722+
3723+ >>> hg_events = event_set.getEventsForCodeImport(hg_import)
3724+ >>> [event.event_type.name for event in hg_events]
3725+ ['CREATE']
3726+
3727+The CodeImportSet is also able to retrieve the code imports with the
3728+specified hg repo url.
3729+
3730+ >>> existing_import = code_import_set.getByURL(url=hg_url)
3731+ >>> hg_import == existing_import
3732+ True
3733+
3734
3735 Updating code import details
3736 ----------------------------
3737@@ -237,7 +262,7 @@
3738 >>> ignore_old_emails = pop_notifications()
3739 >>> modify_event = code_import.updateFromData(
3740 ... {'review_status': CodeImportReviewStatus.REVIEWED,
3741- ... 'svn_branch_url': 'http://svn.example.com/project/trunk'},
3742+ ... 'url': 'http://svn.example.com/project/trunk'},
3743 ... nopriv)
3744 >>> print_emails(group_similar=True)
3745 From: No Privileges Person <no-priv@canonical.com>
3746@@ -288,7 +313,7 @@
3747 from a 'noreply' address.
3748
3749 >>> modify_event = code_import.updateFromData(
3750- ... {'svn_branch_url': 'http://svn.example.org/project/trunk'},
3751+ ... {'url': 'http://svn.example.org/project/trunk'},
3752 ... None)
3753 >>> print_emails(group_similar=True)
3754 From: noreply@launchpad.net
3755@@ -318,6 +343,8 @@
3756 ... seconds=config.codeimport.default_interval_subversion)
3757 >>> default_interval_git = timedelta(
3758 ... seconds=config.codeimport.default_interval_git)
3759+ >>> default_interval_hg = timedelta(
3760+ ... seconds=config.codeimport.default_interval_hg)
3761
3762 By default, code imports are created with an unspecified update interval.
3763
3764@@ -347,6 +374,12 @@
3765 >>> git_import.effective_update_interval
3766 datetime.timedelta(0, 21600)
3767
3768+ >>> default_interval_hg
3769+ datetime.timedelta(0, 21600)
3770+ >>> hg_import.effective_update_interval
3771+ datetime.timedelta(0, 21600)
3772+
3773+
3774 If the update interval is set, then it overrides the default value.
3775
3776 As explained in the "Modify CodeImports" section, the interface does not allow
3777@@ -375,7 +408,7 @@
3778 object view for an import and also by branch, which will be used to
3779 present the import's details on the page of the branch.
3780
3781- >>> code_import_set.get(svn_import.id).svn_branch_url
3782+ >>> code_import_set.get(svn_import.id).url
3783 u'svn://svn.example.com/trunk'
3784 >>> code_import_set.getByBranch(cvs_import.branch).cvs_root
3785 u':pserver:anonymous@cvs.example.com:/cvsroot'
3786@@ -432,25 +465,25 @@
3787 on those objects.
3788
3789 >>> login('david.allouche@canonical.com')
3790- >>> svn_import.svn_branch_url
3791+ >>> svn_import.url
3792 u'svn://svn.example.com/trunk'
3793- >>> svn_import.svn_branch_url = 'svn://svn.example.com/branch/1.0'
3794+ >>> svn_import.url = 'svn://svn.example.com/branch/1.0'
3795 Traceback (most recent call last):
3796 ...
3797- ForbiddenAttribute: ('svn_branch_url', <CodeImport ...>)
3798+ ForbiddenAttribute: ('url', <CodeImport ...>)
3799
3800 Modifications can be done using the CodeImport.updateFromData
3801 method. If any change were made, this method creates and returns a
3802 CodeImportEvent describing them. The CodeImportEvent records the user
3803 that made the change, so we need to pass the user as an argument.
3804
3805- >>> svn_import.svn_branch_url
3806+ >>> svn_import.url
3807 u'svn://svn.example.com/trunk'
3808- >>> data = {'svn_branch_url': 'svn://svn.example.com/branch/1.0'}
3809+ >>> data = {'url': 'svn://svn.example.com/branch/1.0'}
3810 >>> modify_event = svn_import.updateFromData(data, nopriv)
3811 >>> modify_event.event_type.name
3812 'MODIFY'
3813- >>> svn_import.svn_branch_url
3814+ >>> svn_import.url
3815 u'svn://svn.example.com/branch/1.0'
3816 >>> svn_events = event_set.getEventsForCodeImport(svn_import)
3817 >>> [event.event_type.name for event in svn_events]
3818@@ -479,9 +512,9 @@
3819 instead of:
3820 hello from :pserver:anonymous@cvs.example.com:/cvsroot
3821
3822-And for Git.
3823+For Git.
3824
3825- >>> data = {'git_repo_url': 'git://git.example.com/goodbye.git'}
3826+ >>> data = {'url': 'git://git.example.com/goodbye.git'}
3827 >>> modify_event = git_import.updateFromData(data, nopriv)
3828 >>> print make_email_body_for_code_import_update(
3829 ... git_import, modify_event, None)
3830@@ -492,7 +525,7 @@
3831
3832 Imports via bzr-svn are also similar.
3833
3834- >>> data = {'svn_branch_url': 'http://svn.example.com/for-bzr-svn/trunk'}
3835+ >>> data = {'url': 'http://svn.example.com/for-bzr-svn/trunk'}
3836 >>> modify_event = bzr_svn_import.updateFromData(data, nopriv)
3837 >>> print make_email_body_for_code_import_update(
3838 ... bzr_svn_import, modify_event, None)
3839@@ -501,6 +534,16 @@
3840 instead of:
3841 svn://svn.example.com/for-bzr-svn/trunk
3842
3843+And for Mercurial.
3844+
3845+ >>> data = {'url': 'http://metal.example.com/byebye.hg'}
3846+ >>> modify_event = hg_import.updateFromData(data, nopriv)
3847+ >>> print make_email_body_for_code_import_update(
3848+ ... hg_import, modify_event, None)
3849+ ~no-priv/firefox/trunk-hg is now being imported from:
3850+ http://metal.example.com/byebye.hg
3851+ instead of:
3852+ http://hg.example.com/metallic
3853
3854 In addition, updateFromData can be used to set the branch whiteboard,
3855 which is also described in the email that is sent.
3856
3857=== modified file 'lib/lp/code/enums.py'
3858--- lib/lp/code/enums.py 2009-12-08 02:32:03 +0000
3859+++ lib/lp/code/enums.py 2010-01-21 17:51:26 +0000
3860@@ -405,6 +405,12 @@
3861 Imports from Git using bzr-git.
3862 """)
3863
3864+ HG = DBItem(5, """
3865+ Mercurial
3866+
3867+ Imports from Mercurial using bzr-hg.
3868+ """)
3869+
3870
3871 class CodeImportReviewStatus(DBEnumeratedType):
3872 """CodeImport review status.
3873@@ -666,6 +672,19 @@
3874 Previous Git repo URL, when recording on import source change.
3875 """)
3876
3877+ URL = DBItem(240, """
3878+ Foreign VCS branch URL
3879+
3880+ Location of the foreign VCS branch to import.
3881+ """)
3882+
3883+ OLD_URL = DBItem(241, """
3884+ Previous foreign VCS branch URL
3885+
3886+ Previous foreign VCS branch location, when recording an import source
3887+ change.
3888+ """)
3889+
3890 # Data related to machine events
3891
3892 OFFLINE_REASON = DBItem(410, """Offline Reason
3893
3894=== modified file 'lib/lp/code/errors.py'
3895--- lib/lp/code/errors.py 2009-12-07 06:51:42 +0000
3896+++ lib/lp/code/errors.py 2010-01-21 17:51:26 +0000
3897@@ -11,6 +11,7 @@
3898 'ClaimReviewFailed',
3899 'InvalidBranchMergeProposal',
3900 'ReviewNotPending',
3901+ 'UserHasExistingReview',
3902 'UserNotBranchReviewer',
3903 'WrongBranchMergeProposal',
3904 ]
3905@@ -43,6 +44,10 @@
3906 """The requested review is not in a pending state."""
3907
3908
3909+class UserHasExistingReview(Exception):
3910+ """The user has an existing review."""
3911+
3912+
3913 class UserNotBranchReviewer(Exception):
3914 """The user who attempted to review the merge proposal isn't a reviewer.
3915
3916
3917=== modified file 'lib/lp/code/interfaces/branchjob.py'
3918--- lib/lp/code/interfaces/branchjob.py 2010-01-07 04:58:38 +0000
3919+++ lib/lp/code/interfaces/branchjob.py 2010-01-21 17:51:26 +0000
3920@@ -13,6 +13,8 @@
3921 'IBranchJob',
3922 'IBranchDiffJob',
3923 'IBranchDiffJobSource',
3924+ 'IBranchScanJob',
3925+ 'IBranchScanJobSource',
3926 'IBranchUpgradeJob',
3927 'IBranchUpgradeJobSource',
3928 'IReclaimBranchSpaceJob',
3929@@ -31,7 +33,7 @@
3930
3931 from canonical.launchpad import _
3932 from lp.code.interfaces.branch import IBranch
3933-from lp.services.job.interfaces.job import IJob, IRunnableJob
3934+from lp.services.job.interfaces.job import IJob, IRunnableJob, IJobSource
3935
3936
3937 class IBranchJob(Interface):
3938@@ -74,14 +76,23 @@
3939 """
3940
3941
3942+class IBranchScanJob(IRunnableJob):
3943+ """ A job to scan branches."""
3944+
3945+
3946+class IBranchScanJobSource(IJobSource):
3947+
3948+ def create(branch):
3949+ """Scan a branch for new revisions.
3950+
3951+ :param branch: The database branch to upgrade.
3952+ """
3953+
3954 class IBranchUpgradeJob(IRunnableJob):
3955 """A job to upgrade branches with out-of-date formats."""
3956
3957- def run():
3958- """Upgrade the branch to the format specified."""
3959-
3960-
3961-class IBranchUpgradeJobSource(Interface):
3962+
3963+class IBranchUpgradeJobSource(IJobSource):
3964
3965 def create(branch):
3966 """Upgrade a branch to a more current format.
3967@@ -89,12 +100,6 @@
3968 :param branch: The database branch to upgrade.
3969 """
3970
3971- def iterReady():
3972- """Iterate through all IBranchUpgradeJobs."""
3973-
3974- def contextManager():
3975- """Get a context for running this kind of job in."""
3976-
3977
3978 class IRevisionMailJob(IRunnableJob):
3979 """A Job to send email a revision change in a branch."""
3980
3981=== modified file 'lib/lp/code/interfaces/codeimport.py'
3982--- lib/lp/code/interfaces/codeimport.py 2009-10-19 17:28:59 +0000
3983+++ lib/lp/code/interfaces/codeimport.py 2010-01-21 17:51:27 +0000
3984@@ -106,22 +106,9 @@
3985 "The version control system to import from. "
3986 "Can be CVS or Subversion."))
3987
3988- svn_branch_url = URIField(title=_("Branch URL"), required=False,
3989- description=_(
3990- "The URL of a Subversion branch, starting with svn:// or"
3991- " http(s)://. Only trunk branches are imported."),
3992- allowed_schemes=["http", "https", "svn"],
3993- allow_userinfo=False, # Only anonymous access is supported.
3994- allow_port=True,
3995- allow_query=False, # Query makes no sense in Subversion.
3996- allow_fragment=False, # Fragment makes no sense in Subversion.
3997- trailing_slash=False) # See http://launchpad.net/bugs/56357.
3998-
3999- git_repo_url = URIField(title=_("Repo URL"), required=False,
4000- description=_(
4001- "The URL of the git repository. The MASTER branch will be "
4002- "imported."),
4003- allowed_schemes=["git"],
4004+ url = URIField(title=_("URL"), required=False,
4005+ description=_("The URL of the VCS branch."),
4006+ allowed_schemes=["http", "https", "svn", "git"],
4007 allow_userinfo=False, # Only anonymous access is supported.
4008 allow_port=True,
4009 allow_query=False, # Query makes no sense in Subversion.
4010@@ -200,9 +187,8 @@
4011 class ICodeImportSet(Interface):
4012 """Interface representing the set of code imports."""
4013
4014- def new(registrant, product, branch_name, rcs_type, svn_branch_url=None,
4015- cvs_root=None, cvs_module=None, git_repo_url=None,
4016- review_status=None):
4017+ def new(registrant, product, branch_name, rcs_type, url=None,
4018+ cvs_root=None, cvs_module=None, review_status=None):
4019 """Create a new CodeImport."""
4020
4021 def getAll():
4022@@ -230,11 +216,8 @@
4023 def getByCVSDetails(cvs_root, cvs_module):
4024 """Get the CodeImport with the specified CVS details."""
4025
4026- def getByGitDetails(git_repo_url):
4027- """Get the CodeImport with the specified Git details."""
4028-
4029- def getBySVNDetails(svn_branch_url):
4030- """Get the CodeImport with the specified SVN details."""
4031+ def getByURL(url):
4032+ """Get the CodeImport with the url."""
4033
4034 def delete(id):
4035 """Delete a CodeImport given its id."""
4036
4037=== modified file 'lib/lp/code/interfaces/codereviewvote.py'
4038--- lib/lp/code/interfaces/codereviewvote.py 2009-12-07 06:51:42 +0000
4039+++ lib/lp/code/interfaces/codereviewvote.py 2010-01-21 17:51:27 +0000
4040@@ -17,10 +17,11 @@
4041 IBranchMergeProposal)
4042 from lp.code.interfaces.codereviewcomment import (
4043 ICodeReviewComment)
4044+from lp.registry.interfaces.person import IPerson
4045 from lazr.restful.fields import Reference
4046 from lazr.restful.declarations import (
4047 call_with, export_as_webservice_entry, export_destructor_operation,
4048- export_write_operation, exported, REQUEST_USER)
4049+ export_write_operation, exported, operation_parameters, REQUEST_USER)
4050
4051
4052 class ICodeReviewVoteReferencePublic(Interface):
4053@@ -70,6 +71,15 @@
4054 class ICodeReviewVoteReferenceEdit(Interface):
4055 """Method that require edit permissions."""
4056
4057+ def validateClaimReview(claimant):
4058+ """Implements the validation for claiming a review.
4059+
4060+ :raises ClaimReviewFailed: If the claimant already has a
4061+ personal review, if the reviewer is not a team, if the
4062+ claimant is not in the reviewer team, or if the review is
4063+ not pending.
4064+ """
4065+
4066 @call_with(claimant=REQUEST_USER)
4067 @export_write_operation()
4068 def claimReview(claimant):
4069@@ -86,6 +96,30 @@
4070 not pending.
4071 """
4072
4073+ def validateReasignReview(reviewer):
4074+ """Implements the validation for reassignment.
4075+
4076+ :raises ReviewNotPending: If the review is not pending.
4077+ :raises ReassignReviewFailed: If the reviewer is an individual and
4078+ already has a personal review.
4079+ """
4080+
4081+ @operation_parameters(
4082+ reviewer=Reference(
4083+ title=_("The person or team to assign to do the review."),
4084+ schema=IPerson))
4085+ @export_write_operation()
4086+ def reassignReview(reviewer):
4087+ """Reassign a pending review to someone else.
4088+
4089+ Pending reviews can be reassigned to someone else.
4090+
4091+ :param reviewer: The person to assign the pending review to.
4092+ :raises ReviewNotPending: If the review is not pending.
4093+ :raises ReassignReviewFailed: If the reviewer is an individual and
4094+ already has a personal review.
4095+ """
4096+
4097 @export_destructor_operation()
4098 def delete():
4099 """Delete the pending review.
4100
4101=== modified file 'lib/lp/code/mail/codeimport.py'
4102--- lib/lp/code/mail/codeimport.py 2009-11-26 03:35:35 +0000
4103+++ lib/lp/code/mail/codeimport.py 2010-01-21 17:51:26 +0000
4104@@ -96,17 +96,13 @@
4105 details_change_prefix + '\n' + new_details +
4106 "\ninstead of:\n" + old_details)
4107 elif code_import.rcs_type in (RevisionControlSystems.SVN,
4108- RevisionControlSystems.BZR_SVN):
4109- if CodeImportEventDataType.OLD_SVN_BRANCH_URL in event_data:
4110- old_url = event_data[CodeImportEventDataType.OLD_SVN_BRANCH_URL]
4111- body.append(
4112- details_change_prefix + '\n ' +code_import.svn_branch_url +
4113- "\ninstead of:\n " + old_url)
4114- elif code_import.rcs_type == RevisionControlSystems.GIT:
4115- if CodeImportEventDataType.OLD_GIT_REPO_URL in event_data:
4116- old_url = event_data[CodeImportEventDataType.OLD_GIT_REPO_URL]
4117- body.append(
4118- details_change_prefix + '\n ' +code_import.git_repo_url +
4119+ RevisionControlSystems.BZR_SVN,
4120+ RevisionControlSystems.GIT,
4121+ RevisionControlSystems.HG):
4122+ if CodeImportEventDataType.OLD_URL in event_data:
4123+ old_url = event_data[CodeImportEventDataType.OLD_URL]
4124+ body.append(
4125+ details_change_prefix + '\n ' +code_import.url +
4126 "\ninstead of:\n " + old_url)
4127 else:
4128 raise AssertionError(
4129
4130=== modified file 'lib/lp/code/mail/tests/test_codehandler.py'
4131--- lib/lp/code/mail/tests/test_codehandler.py 2010-01-07 06:37:14 +0000
4132+++ lib/lp/code/mail/tests/test_codehandler.py 2010-01-21 17:51:26 +0000
4133@@ -832,7 +832,7 @@
4134 target branch.
4135 """
4136 db_target_branch, target_tree = self.create_branch_and_tree(
4137- format=format)
4138+ tree_location='.', format=format)
4139 target_tree.branch.set_public_branch(db_target_branch.bzr_identity)
4140 target_tree.commit('rev1')
4141 # Make sure that the created branch has been mirrored.
4142
4143=== modified file 'lib/lp/code/model/branch.py'
4144--- lib/lp/code/model/branch.py 2010-01-21 09:00:33 +0000
4145+++ lib/lp/code/model/branch.py 2010-01-21 17:51:26 +0000
4146@@ -613,6 +613,7 @@
4147 series_set = getUtility(IFindOfficialBranchLinks)
4148 alteration_operations.extend(
4149 map(ClearOfficialPackageBranch, series_set.findForBranch(self)))
4150+ # XXX MichaelHudson 2010-01-13: Handle sourcepackagerecipes here.
4151 return (alteration_operations, deletion_operations)
4152
4153 def deletionRequirements(self):
4154@@ -928,6 +929,8 @@
4155 self.next_mirror_time = (
4156 datetime.now(pytz.timezone('UTC')) + increment)
4157 self.last_mirrored_id = last_revision_id
4158+ from lp.code.model.branchjob import BranchScanJob
4159+ BranchScanJob.create(self)
4160
4161 def mirrorFailed(self, reason):
4162 """See `IBranch`."""
4163
4164=== modified file 'lib/lp/code/model/branchjob.py'
4165--- lib/lp/code/model/branchjob.py 2010-01-21 09:00:33 +0000
4166+++ lib/lp/code/model/branchjob.py 2010-01-21 17:51:27 +0000
4167@@ -3,6 +3,7 @@
4168
4169 __all__ = [
4170 'BranchJob',
4171+ 'BranchScanJob',
4172 'BranchJobDerived',
4173 'BranchJobType',
4174 'BranchUpgradeJob',
4175@@ -50,6 +51,7 @@
4176 from lp.code.model.branchmergeproposal import BranchMergeProposal
4177 from lp.code.model.diff import StaticDiff
4178 from lp.code.model.revision import RevisionSet
4179+from lp.codehosting.scanner.bzrsync import BzrSync
4180 from lp.codehosting.vfs import branch_id_to_path, get_multi_server
4181 from lp.services.job.model.job import Job
4182 from lp.services.job.interfaces.job import JobStatus
4183@@ -61,10 +63,11 @@
4184 BranchMergeProposalStatus, BranchSubscriptionDiffSize,
4185 BranchSubscriptionNotificationLevel)
4186 from lp.code.interfaces.branchjob import (
4187- IBranchDiffJob, IBranchDiffJobSource, IBranchJob, IBranchUpgradeJob,
4188- IBranchUpgradeJobSource, IReclaimBranchSpaceJob,
4189- IReclaimBranchSpaceJobSource, IRevisionsAddedJob, IRevisionMailJob,
4190- IRevisionMailJobSource, IRosettaUploadJob, IRosettaUploadJobSource)
4191+ IBranchDiffJob, IBranchDiffJobSource, IBranchJob, IBranchScanJob,
4192+ IBranchScanJobSource, IBranchUpgradeJob, IBranchUpgradeJobSource,
4193+ IReclaimBranchSpaceJob, IReclaimBranchSpaceJobSource, IRevisionsAddedJob,
4194+ IRevisionMailJob, IRevisionMailJobSource, IRosettaUploadJob,
4195+ IRosettaUploadJobSource)
4196 from lp.translations.interfaces.translations import (
4197 TranslationsBranchImportMode)
4198 from lp.translations.interfaces.translationimportqueue import (
4199@@ -127,6 +130,11 @@
4200 This job generates translations templates from a source branch.
4201 """)
4202
4203+ SCAN_BRANCH = DBItem(7, """
4204+ Scan Branch
4205+
4206+ This job scans a branch for new revisions.
4207+ """)
4208
4209 class BranchJob(SQLBase):
4210 """Base class for jobs related to branches."""
4211@@ -251,6 +259,36 @@
4212 return static_diff
4213
4214
4215+class BranchScanJob(BranchJobDerived):
4216+ """A Job that scans a branch for new revisions."""
4217+
4218+ implements(IBranchScanJob)
4219+
4220+ classProvides(IBranchScanJobSource)
4221+ class_job_type = BranchJobType.SCAN_BRANCH
4222+
4223+ @classmethod
4224+ def create(cls, branch):
4225+ """See `IBranchUpgradeJobSource`."""
4226+ branch_job = BranchJob(branch, BranchJobType.SCAN_BRANCH, {})
4227+ return cls(branch_job)
4228+
4229+ def run(self):
4230+ """See `IBranchScanJob`."""
4231+ bzrsync = BzrSync(self.branch)
4232+ bzrsync.syncBranchAndClose()
4233+
4234+ @staticmethod
4235+ @contextlib.contextmanager
4236+ def contextManager():
4237+ """See `IBranchScanJobSource`."""
4238+ errorlog.globalErrorUtility.configure('branchscanner')
4239+ server = get_multi_server()
4240+ server.setUp()
4241+ yield
4242+ server.tearDown()
4243+
4244+
4245 class BranchUpgradeJob(BranchJobDerived):
4246 """A Job that upgrades branches to the current stable format."""
4247
4248
4249=== modified file 'lib/lp/code/model/codeimport.py'
4250--- lib/lp/code/model/codeimport.py 2010-01-07 05:03:46 +0000
4251+++ lib/lp/code/model/codeimport.py 2010-01-21 17:51:26 +0000
4252@@ -87,9 +87,7 @@
4253
4254 cvs_module = StringCol(default=None)
4255
4256- svn_branch_url = StringCol(default=None)
4257-
4258- git_repo_url = StringCol(default=None)
4259+ url = StringCol(default=None)
4260
4261 date_last_successful = UtcDateTimeCol(default=None)
4262 update_interval = IntervalCol(default=None)
4263@@ -108,6 +106,8 @@
4264 config.codeimport.default_interval_subversion,
4265 RevisionControlSystems.GIT:
4266 config.codeimport.default_interval_git,
4267+ RevisionControlSystems.HG:
4268+ config.codeimport.default_interval_hg,
4269 }
4270 seconds = default_interval_dict[self.rcs_type]
4271 return timedelta(seconds=seconds)
4272@@ -121,12 +121,12 @@
4273 "Only makes sense for series with import details set.")
4274 if self.rcs_type == RevisionControlSystems.CVS:
4275 return '%s %s' % (self.cvs_root, self.cvs_module)
4276- elif self.rcs_type == RevisionControlSystems.SVN:
4277- return self.svn_branch_url
4278- elif self.rcs_type == RevisionControlSystems.GIT:
4279- return self.git_repo_url
4280- elif self.rcs_type == RevisionControlSystems.BZR_SVN:
4281- return self.svn_branch_url
4282+ elif self.rcs_type in (
4283+ RevisionControlSystems.SVN,
4284+ RevisionControlSystems.GIT,
4285+ RevisionControlSystems.BZR_SVN,
4286+ RevisionControlSystems.HG):
4287+ return self.url
4288 else:
4289 raise AssertionError(
4290 'Unknown rcs type: %s'% self.rcs_type.title)
4291@@ -214,29 +214,25 @@
4292 implements(ICodeImportSet)
4293
4294 def new(self, registrant, product, branch_name, rcs_type,
4295- svn_branch_url=None, cvs_root=None, cvs_module=None,
4296- review_status=None, git_repo_url=None):
4297+ url=None, cvs_root=None, cvs_module=None, review_status=None):
4298 """See `ICodeImportSet`."""
4299 if rcs_type == RevisionControlSystems.CVS:
4300 assert cvs_root is not None and cvs_module is not None
4301- assert svn_branch_url is None
4302- assert git_repo_url is None
4303+ assert url is None
4304 elif rcs_type in (RevisionControlSystems.SVN,
4305- RevisionControlSystems.BZR_SVN):
4306- assert cvs_root is None and cvs_module is None
4307- assert svn_branch_url is not None
4308- assert git_repo_url is None
4309- elif rcs_type == RevisionControlSystems.GIT:
4310- assert cvs_root is None and cvs_module is None
4311- assert svn_branch_url is None
4312- assert git_repo_url is not None
4313+ RevisionControlSystems.BZR_SVN,
4314+ RevisionControlSystems.GIT,
4315+ RevisionControlSystems.HG):
4316+ assert cvs_root is None and cvs_module is None
4317+ assert url is not None
4318 else:
4319 raise AssertionError(
4320 "Don't know how to sanity check source details for unknown "
4321 "rcs_type %s"%rcs_type)
4322 if review_status is None:
4323- # Auto approve git imports.
4324- if rcs_type == RevisionControlSystems.GIT:
4325+ # Auto approve git and hg imports.
4326+ if rcs_type in (
4327+ RevisionControlSystems.GIT, RevisionControlSystems.HG):
4328 review_status = CodeImportReviewStatus.REVIEWED
4329 else:
4330 review_status = CodeImportReviewStatus.NEW
4331@@ -248,9 +244,9 @@
4332
4333 code_import = CodeImport(
4334 registrant=registrant, owner=registrant, branch=import_branch,
4335- rcs_type=rcs_type, svn_branch_url=svn_branch_url,
4336+ rcs_type=rcs_type, url=url,
4337 cvs_root=cvs_root, cvs_module=cvs_module,
4338- review_status=review_status, git_repo_url=git_repo_url)
4339+ review_status=review_status)
4340
4341 getUtility(ICodeImportEventSet).newCreate(code_import, registrant)
4342 notify(ObjectCreatedEvent(code_import))
4343@@ -329,13 +325,9 @@
4344 return CodeImport.selectOneBy(
4345 cvs_root=cvs_root, cvs_module=cvs_module)
4346
4347- def getByGitDetails(self, git_repo_url):
4348- """See `ICodeImportSet`."""
4349- return CodeImport.selectOneBy(git_repo_url=git_repo_url)
4350-
4351- def getBySVNDetails(self, svn_branch_url):
4352- """See `ICodeImportSet`."""
4353- return CodeImport.selectOneBy(svn_branch_url=svn_branch_url)
4354+ def getByURL(self, url):
4355+ """See `ICodeImportSet`."""
4356+ return CodeImport.selectOneBy(url=url)
4357
4358 def getByBranch(self, branch):
4359 """See `ICodeImportSet`."""
4360
4361=== modified file 'lib/lp/code/model/codeimportevent.py'
4362--- lib/lp/code/model/codeimportevent.py 2009-11-26 03:35:35 +0000
4363+++ lib/lp/code/model/codeimportevent.py 2010-01-21 17:51:26 +0000
4364@@ -257,13 +257,13 @@
4365 def _iterSourceDetails(self, code_import):
4366 """Yield key-value tuples describing the source of the import."""
4367 if code_import.rcs_type in (RevisionControlSystems.SVN,
4368- RevisionControlSystems.BZR_SVN):
4369- yield 'SVN_BRANCH_URL', code_import.svn_branch_url
4370+ RevisionControlSystems.BZR_SVN,
4371+ RevisionControlSystems.GIT,
4372+ RevisionControlSystems.HG):
4373+ yield 'URL', code_import.url
4374 elif code_import.rcs_type == RevisionControlSystems.CVS:
4375 yield 'CVS_ROOT', code_import.cvs_root
4376 yield 'CVS_MODULE', code_import.cvs_module
4377- elif code_import.rcs_type == RevisionControlSystems.GIT:
4378- yield 'GIT_REPO_URL', code_import.git_repo_url
4379 else:
4380 raise AssertionError(
4381 "Unknown RCS type: %s" % (code_import.rcs_type,))
4382
4383=== modified file 'lib/lp/code/model/codereviewvote.py'
4384--- lib/lp/code/model/codereviewvote.py 2009-12-07 06:51:42 +0000
4385+++ lib/lp/code/model/codereviewvote.py 2010-01-21 17:51:26 +0000
4386@@ -15,7 +15,8 @@
4387 from canonical.database.constants import DEFAULT
4388 from canonical.database.datetimecol import UtcDateTimeCol
4389 from canonical.database.sqlbase import SQLBase
4390-from lp.code.errors import ClaimReviewFailed, ReviewNotPending
4391+from lp.code.errors import (
4392+ ClaimReviewFailed, ReviewNotPending, UserHasExistingReview)
4393 from lp.code.interfaces.codereviewvote import ICodeReviewVoteReference
4394
4395
4396@@ -44,10 +45,25 @@
4397 # Reviews are pending if there is no associated comment.
4398 return self.comment is None
4399
4400- def claimReview(self, claimant):
4401+ def _validatePending(self):
4402+ """Raise if the review is not pending."""
4403+ if not self.is_pending:
4404+ raise ReviewNotPending('The review is not pending.')
4405+
4406+ def _validateNoReviewForUser(self, user):
4407+ """Make sure there isn't an existing review for the user."""
4408+ bmp = self.branch_merge_proposal
4409+ existing_review = bmp.getUsersVoteReference(user)
4410+ if existing_review is not None:
4411+ if existing_review.is_pending:
4412+ error_str = '%s has already been asked to review this'
4413+ else:
4414+ error_str = '%s has already reviewed this'
4415+ raise UserHasExistingReview(error_str % user.unique_displayname)
4416+
4417+ def validateClaimReview(self, claimant):
4418 """See `ICodeReviewVote`"""
4419- if not self.is_pending:
4420- raise ClaimReviewFailed('The review is not pending.')
4421+ self._validatePending()
4422 if not self.reviewer.is_team:
4423 raise ClaimReviewFailed('Cannot claim non-team reviews.')
4424 if not claimant.inTeam(self.reviewer):
4425@@ -55,17 +71,24 @@
4426 '%s is not a member of %s' %
4427 (claimant.unique_displayname,
4428 self.reviewer.unique_displayname))
4429- claimant_review = (
4430- self.branch_merge_proposal.getUsersVoteReference(claimant))
4431- if claimant_review is not None:
4432- if claimant_review.is_pending:
4433- error_str = '%s has an existing pending review'
4434- else:
4435- error_str = '%s has an existing personal review'
4436- raise ClaimReviewFailed(
4437- error_str % claimant.unique_displayname)
4438+ self._validateNoReviewForUser(claimant)
4439+
4440+ def claimReview(self, claimant):
4441+ """See `ICodeReviewVote`"""
4442+ self.validateClaimReview(claimant)
4443 self.reviewer = claimant
4444
4445+ def validateReasignReview(self, reviewer):
4446+ """See `ICodeReviewVote`"""
4447+ self._validatePending()
4448+ if not reviewer.is_team:
4449+ self._validateNoReviewForUser(reviewer)
4450+
4451+ def reassignReview(self, reviewer):
4452+ """See `ICodeReviewVote`"""
4453+ self.validateReasignReview(reviewer)
4454+ self.reviewer = reviewer
4455+
4456 def delete(self):
4457 """See `ICodeReviewVote`"""
4458 if not self.is_pending:
4459
4460=== modified file 'lib/lp/code/model/tests/test_branch.py'
4461--- lib/lp/code/model/tests/test_branch.py 2010-01-21 09:00:33 +0000
4462+++ lib/lp/code/model/tests/test_branch.py 2010-01-21 17:51:27 +0000
4463@@ -1433,6 +1433,18 @@
4464 branch.mirrorComplete(rev_id)
4465 self.assertEqual(True, branch.pending_writes)
4466
4467+ def test_mirrorComplete_creates_scan_job(self):
4468+ # After a branch has been pulled, it should have created a
4469+ # BranchScanJob to complete the process.
4470+ branch = self.factory.makeAnyBranch()
4471+ branch.startMirroring()
4472+ rev_id = self.factory.getUniqueString('rev-id')
4473+ branch.mirrorComplete(rev_id)
4474+
4475+ store = Store.of(branch)
4476+ scan_jobs = store.find(BranchJob, job_type=BranchJobType.SCAN_BRANCH)
4477+ self.assertEqual(scan_jobs.count(), 1)
4478+
4479 def test_pulled_and_scanned(self):
4480 # If a branch has been pulled and scanned, then there are no pending
4481 # writes.
4482
4483=== modified file 'lib/lp/code/model/tests/test_branchjob.py'
4484--- lib/lp/code/model/tests/test_branchjob.py 2010-01-21 09:00:33 +0000
4485+++ lib/lp/code/model/tests/test_branchjob.py 2010-01-21 17:51:26 +0000
4486@@ -8,6 +8,7 @@
4487 import datetime
4488 import os
4489 import shutil
4490+import tempfile
4491 from unittest import TestLoader
4492
4493 from bzrlib import errors as bzr_errors
4494@@ -45,11 +46,12 @@
4495 BranchSubscriptionDiffSize, BranchSubscriptionNotificationLevel,
4496 CodeReviewNotificationLevel)
4497 from lp.code.interfaces.branchjob import (
4498- IBranchDiffJob, IBranchJob, IBranchUpgradeJob, IReclaimBranchSpaceJob,
4499- IReclaimBranchSpaceJobSource, IRevisionMailJob, IRosettaUploadJob)
4500+ IBranchDiffJob, IBranchJob, IBranchScanJob, IBranchUpgradeJob,
4501+ IReclaimBranchSpaceJob, IReclaimBranchSpaceJobSource, IRevisionMailJob,
4502+ IRosettaUploadJob)
4503 from lp.code.model.branchjob import (
4504 BranchDiffJob, BranchJob, BranchJobDerived, BranchJobType,
4505- BranchUpgradeJob, ReclaimBranchSpaceJob, RevisionMailJob,
4506+ BranchScanJob, BranchUpgradeJob, ReclaimBranchSpaceJob, RevisionMailJob,
4507 RevisionsAddedJob, RosettaUploadJob)
4508 from lp.code.model.branchrevision import BranchRevision
4509 from lp.code.model.revision import RevisionSet
4510@@ -111,11 +113,17 @@
4511 def test_run_diff_content(self):
4512 """Ensure that run generates expected diff."""
4513 self.useBzrBranches()
4514- branch, tree = self.create_branch_and_tree()
4515- open('file', 'wb').write('foo\n')
4516+
4517+ tree_location = tempfile.mkdtemp()
4518+ self.addCleanup(lambda: shutil.rmtree(tree_location))
4519+
4520+ branch, tree = self.create_branch_and_tree(
4521+ tree_location=tree_location)
4522+ tree_file = os.path.join(tree_location, 'file')
4523+ open(tree_file, 'wb').write('foo\n')
4524 tree.add('file')
4525 tree.commit('First commit')
4526- open('file', 'wb').write('bar\n')
4527+ open(tree_file, 'wb').write('bar\n')
4528 tree.commit('Next commit')
4529 job = BranchDiffJob.create(branch, '1', '2')
4530 static_diff = job.run()
4531@@ -178,6 +186,44 @@
4532 self.assertIsInstance(diff.diff.text, str)
4533
4534
4535+class TestBranchScanJob(TestCaseWithFactory):
4536+ """Tests for `BranchScanJob`."""
4537+
4538+ layer = LaunchpadZopelessLayer
4539+
4540+ def test_providesInterface(self):
4541+ """Ensure that BranchScanJob implements IBranchScanJob."""
4542+ branch = self.factory.makeAnyBranch()
4543+ job = BranchScanJob.create(branch)
4544+ verifyObject(IBranchScanJob, job)
4545+
4546+ def test_run(self):
4547+ """Ensure the job scans the branch."""
4548+ self.useBzrBranches()
4549+
4550+ db_branch, bzr_tree = self.create_branch_and_tree()
4551+ bzr_tree.commit('First commit', rev_id='rev1')
4552+ bzr_tree.commit('Second commit', rev_id='rev2')
4553+ bzr_tree.commit('Third commit', rev_id='rev3')
4554+ LaunchpadZopelessLayer.commit()
4555+
4556+ job = BranchScanJob.create(db_branch)
4557+ LaunchpadZopelessLayer.switchDbUser(config.branchscanner.dbuser)
4558+ job.run()
4559+ LaunchpadZopelessLayer.switchDbUser(config.launchpad.dbuser)
4560+
4561+ self.assertEqual(db_branch.revision_count, 3)
4562+
4563+ bzr_tree.commit('Fourth commit', rev_id='rev4')
4564+ bzr_tree.commit('Fifth commit', rev_id='rev5')
4565+
4566+ job = BranchScanJob.create(db_branch)
4567+ LaunchpadZopelessLayer.switchDbUser(config.branchscanner.dbuser)
4568+ job.run()
4569+
4570+ self.assertEqual(db_branch.revision_count, 5)
4571+
4572+
4573 class TestBranchUpgradeJob(TestCaseWithFactory):
4574 """Tests for `BranchUpgradeJob`."""
4575
4576
4577=== modified file 'lib/lp/code/model/tests/test_codeimport.py'
4578--- lib/lp/code/model/tests/test_codeimport.py 2010-01-05 21:28:22 +0000
4579+++ lib/lp/code/model/tests/test_codeimport.py 2010-01-21 17:51:26 +0000
4580@@ -24,26 +24,17 @@
4581 from lp.code.interfaces.codeimportjob import ICodeImportJobWorkflow
4582 from lp.testing import (
4583 login, login_person, logout, TestCaseWithFactory, time_counter)
4584-from lp.testing.factory import LaunchpadObjectFactory
4585 from canonical.launchpad.interfaces.launchpad import ILaunchpadCelebrities
4586 from canonical.testing import (
4587 DatabaseFunctionalLayer, LaunchpadFunctionalLayer,
4588 LaunchpadZopelessLayer)
4589
4590
4591-class TestCodeImportCreation(unittest.TestCase):
4592+class TestCodeImportCreation(TestCaseWithFactory):
4593 """Test the creation of CodeImports."""
4594
4595 layer = DatabaseFunctionalLayer
4596
4597- def setUp(self):
4598- unittest.TestCase.setUp(self)
4599- self.factory = LaunchpadObjectFactory()
4600- login('no-priv@canonical.com')
4601-
4602- def tearDown(self):
4603- logout()
4604-
4605 def test_new_svn_import(self):
4606 """A new subversion code import should have NEW status."""
4607 code_import = CodeImportSet().new(
4608@@ -51,12 +42,12 @@
4609 product=self.factory.makeProduct(),
4610 branch_name='imported',
4611 rcs_type=RevisionControlSystems.SVN,
4612- svn_branch_url=self.factory.getUniqueURL())
4613+ url=self.factory.getUniqueURL())
4614 self.assertEqual(
4615 CodeImportReviewStatus.NEW,
4616 code_import.review_status)
4617 # No job is created for the import.
4618- self.assertTrue(code_import.import_job is None)
4619+ self.assertIs(None, code_import.import_job)
4620
4621 def test_reviewed_svn_import(self):
4622 """A specific review status can be set for a new import."""
4623@@ -65,13 +56,13 @@
4624 product=self.factory.makeProduct(),
4625 branch_name='imported',
4626 rcs_type=RevisionControlSystems.SVN,
4627- svn_branch_url=self.factory.getUniqueURL(),
4628+ url=self.factory.getUniqueURL(),
4629 review_status=CodeImportReviewStatus.REVIEWED)
4630 self.assertEqual(
4631 CodeImportReviewStatus.REVIEWED,
4632 code_import.review_status)
4633 # A job is created for the import.
4634- self.assertTrue(code_import.import_job is not None)
4635+ self.assertIsNot(None, code_import.import_job)
4636
4637 def test_new_cvs_import(self):
4638 """A new CVS code import should have NEW status."""
4639@@ -86,7 +77,7 @@
4640 CodeImportReviewStatus.NEW,
4641 code_import.review_status)
4642 # No job is created for the import.
4643- self.assertTrue(code_import.import_job is None)
4644+ self.assertIs(None, code_import.import_job)
4645
4646 def test_reviewed_cvs_import(self):
4647 """A specific review status can be set for a new import."""
4648@@ -102,7 +93,7 @@
4649 CodeImportReviewStatus.REVIEWED,
4650 code_import.review_status)
4651 # A job is created for the import.
4652- self.assertTrue(code_import.import_job is not None)
4653+ self.assertIsNot(None, code_import.import_job)
4654
4655 def test_git_import_reviewed(self):
4656 """A new git import is always reviewed by default."""
4657@@ -111,29 +102,35 @@
4658 product=self.factory.makeProduct(),
4659 branch_name='imported',
4660 rcs_type=RevisionControlSystems.GIT,
4661- git_repo_url=self.factory.getUniqueURL(),
4662+ url=self.factory.getUniqueURL(),
4663 review_status=None)
4664 self.assertEqual(
4665 CodeImportReviewStatus.REVIEWED,
4666 code_import.review_status)
4667 # A job is created for the import.
4668- self.assertTrue(code_import.import_job is not None)
4669-
4670-
4671-class TestCodeImportDeletion(unittest.TestCase):
4672+ self.assertIsNot(None, code_import.import_job)
4673+
4674+ def test_hg_import_reviewed(self):
4675+ """A new hg import is always reviewed by default."""
4676+ code_import = CodeImportSet().new(
4677+ registrant=self.factory.makePerson(),
4678+ product=self.factory.makeProduct(),
4679+ branch_name='imported',
4680+ rcs_type=RevisionControlSystems.HG,
4681+ url=self.factory.getUniqueURL(),
4682+ review_status=None)
4683+ self.assertEqual(
4684+ CodeImportReviewStatus.REVIEWED,
4685+ code_import.review_status)
4686+ # No job is created for the import.
4687+ self.assertIsNot(None, code_import.import_job)
4688+
4689+
4690+class TestCodeImportDeletion(TestCaseWithFactory):
4691 """Test the deletion of CodeImports."""
4692
4693 layer = LaunchpadFunctionalLayer
4694
4695- def setUp(self):
4696- unittest.TestCase.setUp(self)
4697- self.factory = LaunchpadObjectFactory()
4698- # Log in a vcs import member.
4699- login('david.allouche@canonical.com')
4700-
4701- def tearDown(self):
4702- logout()
4703-
4704 def test_delete(self):
4705 """Ensure CodeImport objects can be deleted via CodeImportSet."""
4706 code_import = self.factory.makeCodeImport()
4707@@ -142,6 +139,7 @@
4708 def test_deleteIncludesJob(self):
4709 """Ensure deleting CodeImport objects deletes associated jobs."""
4710 code_import = self.factory.makeCodeImport()
4711+ login_person(getUtility(ILaunchpadCelebrities).vcs_imports.teamowner)
4712 code_import_job = self.factory.makeCodeImportJob(code_import)
4713 job_id = code_import_job.id
4714 CodeImportJobSet().getById(job_id)
4715@@ -302,18 +300,17 @@
4716 CodeImportReviewStatus.FAILING, code_import.review_status)
4717
4718
4719-class TestCodeImportResultsAttribute(unittest.TestCase):
4720+class TestCodeImportResultsAttribute(TestCaseWithFactory):
4721 """Test the results attribute of a CodeImport."""
4722
4723 layer = LaunchpadFunctionalLayer
4724
4725 def setUp(self):
4726- unittest.TestCase.setUp(self)
4727- login('no-priv@canonical.com')
4728- self.factory = LaunchpadObjectFactory()
4729+ TestCaseWithFactory.setUp(self)
4730 self.code_import = self.factory.makeCodeImport()
4731
4732 def tearDown(self):
4733+ super(TestCodeImportResultsAttribute, self).tearDown()
4734 logout()
4735
4736 def test_no_results(self):
4737@@ -534,7 +531,7 @@
4738 def make_active_import(factory, project_name=None, product_name=None,
4739 branch_name=None, svn_branch_url=None,
4740 cvs_root=None, cvs_module=None, git_repo_url=None,
4741- last_update=None, rcs_type=None):
4742+ hg_repo_url=None, last_update=None, rcs_type=None):
4743 """Make a new CodeImport for a new Product, maybe in a new Project.
4744
4745 The import will be 'active' in the sense used by
4746@@ -549,7 +546,8 @@
4747 code_import = factory.makeCodeImport(
4748 product=product, branch_name=branch_name,
4749 svn_branch_url=svn_branch_url, cvs_root=cvs_root,
4750- cvs_module=cvs_module, git_repo_url=git_repo_url, rcs_type=rcs_type)
4751+ cvs_module=cvs_module, git_repo_url=git_repo_url,
4752+ hg_repo_url=hg_repo_url, rcs_type=None)
4753 make_import_active(factory, code_import, last_update)
4754 return code_import
4755
4756
4757=== modified file 'lib/lp/code/model/tests/test_codereviewvote.py'
4758--- lib/lp/code/model/tests/test_codereviewvote.py 2009-12-07 06:51:42 +0000
4759+++ lib/lp/code/model/tests/test_codereviewvote.py 2010-01-21 17:51:26 +0000
4760@@ -9,7 +9,8 @@
4761 from canonical.testing import DatabaseFunctionalLayer
4762
4763 from lp.code.enums import CodeReviewVote
4764-from lp.code.errors import ClaimReviewFailed, ReviewNotPending
4765+from lp.code.errors import (
4766+ ClaimReviewFailed, ReviewNotPending, UserHasExistingReview)
4767 from lp.code.interfaces.codereviewvote import ICodeReviewVoteReference
4768 from lp.testing import login_person, TestCaseWithFactory
4769
4770@@ -43,7 +44,7 @@
4771 TestCaseWithFactory.setUp(self)
4772 # Setup the proposal, claimant and team reviewer.
4773 self.bmp = self.factory.makeBranchMergeProposal()
4774- self.claimant = self.factory.makePerson()
4775+ self.claimant = self.factory.makePerson(name='eric')
4776 self.review_team = self.factory.makeTeam()
4777
4778 def _addPendingReview(self):
4779@@ -71,8 +72,10 @@
4780 vote=CodeReviewVote.APPROVE)
4781 review = self._addPendingReview()
4782 self._addClaimantToReviewTeam()
4783- self.assertRaises(
4784- ClaimReviewFailed, review.claimReview, self.claimant)
4785+ self.assertRaisesWithContent(
4786+ UserHasExistingReview,
4787+ 'Eric (eric) has already reviewed this',
4788+ review.claimReview, self.claimant)
4789
4790 def test_personal_pending_review(self):
4791 # If the claimant has a pending review already, then they can't claim
4792@@ -83,8 +86,10 @@
4793 self.bmp.nominateReviewer(
4794 reviewer=self.claimant, registrant=self.bmp.registrant)
4795 login_person(self.claimant)
4796- self.assertRaises(
4797- ClaimReviewFailed, review.claimReview, self.claimant)
4798+ self.assertRaisesWithContent(
4799+ UserHasExistingReview,
4800+ 'Eric (eric) has already been asked to review this',
4801+ review.claimReview, self.claimant)
4802
4803 def test_personal_not_in_review_team(self):
4804 # If the claimant is not in the review team, an error is raised.
4805@@ -183,5 +188,75 @@
4806 self.assertRaises(ReviewNotPending, review.delete)
4807
4808
4809+class TestCodeReviewVoteReferenceReassignReview(TestCaseWithFactory):
4810+ """Tests for CodeReviewVoteReference.reassignReview."""
4811+
4812+ layer = DatabaseFunctionalLayer
4813+
4814+ def makeMergeProposalWithReview(self, completed=False):
4815+ """Return a new merge proposal with a review."""
4816+ bmp = self.factory.makeBranchMergeProposal()
4817+ reviewer = self.factory.makePerson()
4818+ if completed:
4819+ login_person(reviewer)
4820+ bmp.createComment(
4821+ reviewer, 'Message subject', 'Message content',
4822+ vote=CodeReviewVote.APPROVE)
4823+ [review] = list(bmp.votes)
4824+ else:
4825+ login_person(bmp.registrant)
4826+ review = bmp.nominateReviewer(
4827+ reviewer=reviewer, registrant=bmp.registrant)
4828+ return bmp, review
4829+
4830+ def test_reassign_pending(self):
4831+ # A pending review can be reassigned to someone else.
4832+ bmp, review = self.makeMergeProposalWithReview()
4833+ new_reviewer = self.factory.makePerson()
4834+ review.reassignReview(new_reviewer)
4835+ self.assertEqual(new_reviewer, review.reviewer)
4836+
4837+ def test_reassign_completed_review(self):
4838+ # A completed review cannot be reassigned
4839+ bmp, review = self.makeMergeProposalWithReview(completed=True)
4840+ self.assertRaises(
4841+ ReviewNotPending, review.reassignReview, bmp.registrant)
4842+
4843+ def test_reassign_to_user_existing_pending(self):
4844+ # If a user has an existing pending review, they cannot have another
4845+ # pending review assigned to them.
4846+ bmp, review = self.makeMergeProposalWithReview()
4847+ reviewer = self.factory.makePerson(name='eric')
4848+ user_review = bmp.nominateReviewer(
4849+ reviewer=reviewer, registrant=bmp.registrant)
4850+ self.assertRaisesWithContent(
4851+ UserHasExistingReview,
4852+ 'Eric (eric) has already been asked to review this',
4853+ review.reassignReview, reviewer)
4854+
4855+ def test_reassign_to_user_existing_completed(self):
4856+ # If a user has an existing completed review, they cannot have another
4857+ # pending review assigned to them.
4858+ bmp, review = self.makeMergeProposalWithReview()
4859+ reviewer = self.factory.makePerson(name='eric')
4860+ bmp.createComment(
4861+ reviewer, 'Message subject', 'Message content',
4862+ vote=CodeReviewVote.APPROVE)
4863+ self.assertRaisesWithContent(
4864+ UserHasExistingReview,
4865+ 'Eric (eric) has already reviewed this',
4866+ review.reassignReview, reviewer)
4867+
4868+ def test_reassign_to_team_existing(self):
4869+ # If a team has an existing review, they can have another pending
4870+ # review assigned to them.
4871+ bmp, review = self.makeMergeProposalWithReview()
4872+ reviewer_team = self.factory.makeTeam()
4873+ team_review = bmp.nominateReviewer(
4874+ reviewer=reviewer_team, registrant=bmp.registrant)
4875+ review.reassignReview(reviewer_team)
4876+ self.assertEqual(reviewer_team, review.reviewer)
4877+
4878+
4879 def test_suite():
4880 return TestLoader().loadTestsFromName(__name__)
4881
4882=== added file 'lib/lp/code/scripts/tests/test_scan_branches.py'
4883--- lib/lp/code/scripts/tests/test_scan_branches.py 1970-01-01 00:00:00 +0000
4884+++ lib/lp/code/scripts/tests/test_scan_branches.py 2010-01-21 17:51:27 +0000
4885@@ -0,0 +1,59 @@
4886+#! /usr/bin/python2.5
4887+#
4888+# Copyright 2010 Canonical Ltd. This software is licensed under the
4889+# GNU Affero General Public License version 3 (see the file LICENSE).
4890+
4891+"""Test the scan_branches script."""
4892+
4893+
4894+import transaction
4895+
4896+from canonical.testing import ZopelessAppServerLayer
4897+from lp.testing import TestCaseWithFactory
4898+from canonical.launchpad.scripts.tests import run_script
4899+from lp.code.model.branchjob import BranchScanJob
4900+
4901+
4902+class TestScanBranches(TestCaseWithFactory):
4903+ """Test the scan_branches script."""
4904+
4905+ layer = ZopelessAppServerLayer
4906+
4907+ def make_branch_with_commits_and_scan_job(self, db_branch):
4908+ """Create a branch from a db_branch, make commits and a scan job."""
4909+ target, target_tree = self.create_branch_and_tree(
4910+ db_branch=db_branch)
4911+ target_tree.commit('First commit', rev_id='rev1')
4912+ target_tree.commit('Second commit', rev_id='rev2')
4913+ target_tree.commit('Third commit', rev_id='rev3')
4914+ job = BranchScanJob.create(db_branch)
4915+ transaction.commit()
4916+
4917+ def run_script_and_assert_success(self):
4918+ """Run the scan_branches script and assert it ran successfully."""
4919+ retcode, stdout, stderr = run_script(
4920+ 'cronscripts/scan_branches.py', [],
4921+ expect_returncode=0)
4922+ self.assertEqual('', stdout)
4923+ self.assertIn(
4924+ 'INFO Ran 1 IBranchScanJobSource jobs.\n', stderr)
4925+
4926+ def test_scan_branch(self):
4927+ """Test that scan branches adds revisions to the database."""
4928+ self.useBzrBranches(real_server=True)
4929+
4930+ db_branch = self.factory.makeAnyBranch()
4931+ self.make_branch_with_commits_and_scan_job(db_branch)
4932+
4933+ self.run_script_and_assert_success()
4934+ self.assertEqual(db_branch.revision_count, 3)
4935+
4936+ def test_scan_branch_packagebranch(self):
4937+ """Test that scan_branches can scan package branches."""
4938+ self.useBzrBranches(real_server=True)
4939+
4940+ db_branch = self.factory.makePackageBranch()
4941+ self.make_branch_with_commits_and_scan_job(db_branch)
4942+
4943+ self.run_script_and_assert_success()
4944+ self.assertEqual(db_branch.revision_count, 3)
4945
4946=== renamed file 'lib/lp/code/stories/branches/xx-branch-merge-proposals.txt' => 'lib/lp/code/stories/branches/xx-branchmergeproposals.txt'
4947--- lib/lp/code/stories/branches/xx-branch-merge-proposals.txt 2010-01-07 21:02:00 +0000
4948+++ lib/lp/code/stories/branches/xx-branchmergeproposals.txt 2010-01-21 17:51:26 +0000
4949@@ -233,15 +233,23 @@
4950 The claimant can reassign the review to someone else.
4951
4952 >>> foobar_browser.getLink('Reassign').click()
4953+ >>> foobar_browser.getControl('Reviewer').value = 'no-priv'
4954+ >>> foobar_browser.getControl('Reassign').click()
4955+
4956+If the person already has a review, the user gets an error...
4957+
4958+ >>> print_feedback_messages(foobar_browser.contents)
4959+ There is 1 error.
4960+ No Privileges Person (no-priv) has already reviewed this
4961+
4962+... if not, the review is reassigned.
4963+
4964 >>> foobar_browser.getControl('Reviewer').value = 'hwdb-team'
4965 >>> foobar_browser.getControl('Reassign').click()
4966- >>> foobar_browser.open(klingon_proposal)
4967- >>> pending = find_tag_by_id(
4968- ... foobar_browser.contents, 'code-review-votes')
4969
4970 The review is now reassigned to the HWDB team.
4971
4972- >>> print extract_text(pending)
4973+ >>> print_tag_with_id(foobar_browser.contents, 'code-review-votes')
4974 Reviewer Review Type Date Requested Status...
4975 HWDB Team claimable ... ago Pending ...
4976
4977@@ -294,8 +302,7 @@
4978 >>> sample_browser.getControl('Merged Revision Number').value = '42'
4979 >>> sample_browser.getControl('Mark as Merged').click()
4980
4981- >>> for message in get_feedback_messages(sample_browser.contents):
4982- ... print extract_text(message)
4983+ >>> print_feedback_messages(sample_browser.contents)
4984 The proposal's merged revision has been updated.
4985 >>> print_summary(sample_browser)
4986 Status: Merged
4987@@ -436,8 +443,7 @@
4988 setting it gives an appropriate error.
4989
4990 >>> nopriv_browser.getControl('Propose Merge').click()
4991- >>> for message in get_feedback_messages(nopriv_browser.contents):
4992- ... print extract_text(message)
4993+ >>> print_feedback_messages(nopriv_browser.contents)
4994 There is 1 error.
4995 Required input is missing.
4996
4997@@ -447,8 +453,7 @@
4998 ... name='field.target_branch.target_branch').value = (
4999 ... 'fooix')
5000 >>> nopriv_browser.getControl('Propose Merge').click()
The diff has been truncated for viewing.