Merge lp://staging/~jacseen/keryx/unstable into lp://staging/keryx/unstable
- unstable
- Merge into unstable
Proposed by
Jack N
Status: | Merged |
---|---|
Approved by: | mac9416 |
Approved revision: | not available |
Merged at revision: | not available |
Proposed branch: | lp://staging/~jacseen/keryx/unstable |
Merge into: | lp://staging/keryx/unstable |
Diff against target: |
350 lines (+129/-68) 3 files modified
lib/wxkeryx/download.py (+121/-63) lib/wxkeryx/main.py (+2/-1) plugins/Debian.py (+6/-4) |
To merge this branch: | bzr merge lp://staging/~jacseen/keryx/unstable |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Keryx Admins | Pending | ||
Review via email: mp+18053@code.staging.launchpad.net |
Commit message
Description of the change
To post a comment you must log in.
Revision history for this message
Jack N (jacseen) wrote : | # |
Revision history for this message
mac9416 (mac9416) wrote : | # |
Approved. Note that Launchpad lets you specify a desired commit message when you submit a merge request. Not essential, just handy so I know just what you want your changes remembered as. :-)
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === modified file 'lib/wxkeryx/download.py' |
2 | --- lib/wxkeryx/download.py 2010-01-23 01:38:37 +0000 |
3 | +++ lib/wxkeryx/download.py 2010-01-26 06:09:15 +0000 |
4 | @@ -1,30 +1,32 @@ |
5 | -import lib, os.path, urllib, wx |
6 | +import lib, os.path, urllib, wx, hashlib |
7 | import wx.lib.delayedresult as delayedresult |
8 | from lib import consts, log |
9 | |
10 | class download(wx.Frame): |
11 | """This demos simplistic use of delayedresult module.""" |
12 | - def __init__(self, parent, endfunc, files, extract=False): |
13 | + def __init__(self, parent, endfunc, files, extract=False, overwrite=False): |
14 | wx.Frame.__init__(self, None, title=_("Downloading...")) |
15 | self.parent = parent |
16 | self.files = files |
17 | self.function = endfunc |
18 | - self.extract = extract |
19 | + self.extract = extract |
20 | + self.overwrite = overwrite |
21 | + self.retries = 1 |
22 | |
23 | #TODO: Add overwrite function |
24 | |
25 | - self.SetIcon(wx.Icon(consts.fileIco, wx.BITMAP_TYPE_ICO)) |
26 | + self.SetIcon(wx.Icon(consts.fileIco, wx.BITMAP_TYPE_ICO)) |
27 | panel = wx.Panel(self) |
28 | loading = wx.StaticText(panel, -1, _("This may take a while. Please be patient.")) |
29 | self.gauge = wx.Gauge(panel) |
30 | #cancelBtn = wx.Button(panel, -1, "Cancel") |
31 | - |
32 | + |
33 | self.cur = wx.StaticText(panel, -1, _("Current Transfer:")) |
34 | self.cur.SetFont(wx.Font(8, wx.DEFAULT, wx.NORMAL, wx.BOLD, 0, "")) |
35 | self.current = wx.StaticText(panel, -1, "\n") |
36 | self.download_gauge = wx.Gauge(panel) |
37 | self.status = wx.TextCtrl(panel, style=wx.TE_MULTILINE|wx.TE_READONLY) |
38 | - |
39 | + |
40 | status = wx.BoxSizer() |
41 | status.Add(self.cur, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5) |
42 | status.Add(self.current, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5) |
43 | @@ -49,51 +51,63 @@ |
44 | |
45 | self.timer = wx.Timer(self) |
46 | self.timer.Start(100) |
47 | - |
48 | + |
49 | self.parent.Enable(False) |
50 | self.handleGet(None) |
51 | |
52 | def TimerHandler(self, event): self.gauge.Pulse() |
53 | - |
54 | + |
55 | def OnClose(self, event): |
56 | - """Only needed because in demo, closing the window does not kill the |
57 | + """Only needed because in demo, closing the window does not kill the |
58 | app, so worker thread continues and sends result to dead frame; normally |
59 | your app would exit so this would not happen.""" |
60 | #if self.buttonAbort.IsEnabled(): |
61 | #self.log( "Exiting: Aborting job %s" % self.jobID ) |
62 | #self.handleAbort(None) |
63 | self.Show() |
64 | - |
65 | - def handleGet(self, event): |
66 | + |
67 | + def handleGet(self, event): |
68 | """Compute result in separate thread, doesn't affect GUI response.""" |
69 | #self.buttonGet.Enable(False) |
70 | #self.buttonAbort.Enable(True) |
71 | self.abortEvent.clear() |
72 | self.jobID += 1 |
73 | - |
74 | + |
75 | log.info( "Starting job %s in producer thread: GUI remains responsive" |
76 | % self.jobID ) |
77 | - delayedresult.startWorker(self._resultConsumer, self._resultProducer, |
78 | + delayedresult.startWorker(self._resultConsumer, self._resultProducer, |
79 | wargs=(self.jobID,self.abortEvent), jobID=self.jobID) |
80 | |
81 | - |
82 | + |
83 | def _resultProducer(self, jobID, abortEvent): |
84 | """Downloads the files in self.files""" |
85 | self.numfiles = len(self.files) |
86 | msg = _("Downloading ") + str(self.numfiles) + " " + _("file(s)") + "\n" |
87 | wx.CallAfter(self.LogMessage, msg) |
88 | |
89 | - success = True |
90 | + if consts.proxy_enabled: |
91 | + if consts.http_proxy['http'][0:7] != 'http://': |
92 | + proxy = {'http://':consts.http_proxy['http']} |
93 | + else: |
94 | + proxy = consts.http_proxy |
95 | + downloader = Downloader(proxy) |
96 | + else: |
97 | + downloader = Downloader() |
98 | + |
99 | + failed = self.files[:] |
100 | self.numfile = 0 |
101 | for data in self.files: |
102 | - if abortEvent(): return [1, self.files] |
103 | - |
104 | + if abortEvent(): return [1, failed] |
105 | url = data[0] |
106 | - file = data[1] |
107 | + filepath = data[1] |
108 | end = url.split('/') |
109 | protocol = end[0] + '//' |
110 | site = end[2] |
111 | end = end[len(end) - 1] |
112 | + if len(data) <= 2: |
113 | + checksum = {} |
114 | + else: |
115 | + checksum = data[2] |
116 | |
117 | msg = _("Starting") + " " + end |
118 | wx.CallAfter(self.SetFile, msg) |
119 | @@ -101,60 +115,103 @@ |
120 | |
121 | self.curfile = end |
122 | self.numfile += 1 |
123 | - if (url[-3:] == "deb" and (not os.path.exists(file))) or (url[-3:] != "deb"): # Download only deb files that don't exist but still all gz files |
124 | + if os.path.exists(filepath): |
125 | + if not self.overwrite: |
126 | + if self._verify(filepath, checksum) >= 1: |
127 | + msg = _("Skipped: ") + end + "\n" + _("Reason: ") + _("File already exists.") + "\n" |
128 | + wx.CallAfter(self.LogMessage, msg) |
129 | + failed.remove(data) |
130 | + continue |
131 | + else: |
132 | + msg = _("Deleted: ") + filepath + "\n" + _("Reason: ") + _("Existing file failed checksum verify.") + "\n" |
133 | + wx.CallAfter(self.LogMessage, msg) |
134 | + os.remove(filepath) |
135 | + retries = 0 |
136 | + success = False |
137 | + while retries <= self.retries: |
138 | + |
139 | try: # Attempt to download the file |
140 | - msg = _("Downloading: ") + url + "\n" |
141 | - wx.CallAfter(self.LogMessage, msg) |
142 | - |
143 | - if consts.proxy_enabled: |
144 | - if consts.http_proxy['http'][0:7] != 'http://': |
145 | - proxy = {'http://':consts.http_proxy['http']} |
146 | - else: |
147 | - proxy = consts.http_proxy |
148 | - downloader = Downloader(proxy) |
149 | - else: |
150 | - downloader = Downloader() |
151 | - |
152 | - downloader.retrieve(url, file, self.progress) |
153 | - #TODO: Generate md5hashes for these files |
154 | - |
155 | - msg = _("Extracting") + " " + end |
156 | - wx.CallAfter(self.SetFile, msg) |
157 | - |
158 | + msg = _("Downloading: ") + url + "\n" |
159 | + if retries != 0: msg = _("Retrying: ")+"[%i/%i] " % (retries, self.retries) + url + "\n" |
160 | + wx.CallAfter(self.LogMessage, msg) |
161 | + |
162 | + downloader.retrieve(url, filepath, self.progress) |
163 | + |
164 | + if self._verify(filepath, checksum) == 0: |
165 | + msg = _("Failed verify: ") + filepath + "\n" |
166 | + wx.CallAfter(self.LogMessage, msg) |
167 | + os.remove(filepath) |
168 | + retries += 1 |
169 | + continue |
170 | + |
171 | + success = True |
172 | + failed.remove(data) |
173 | + msg = _("Success: ") + filepath + "\n" |
174 | + wx.CallAfter(self.LogMessage, msg) |
175 | + |
176 | if self.extract: |
177 | + msg = _("Extracting") + " " + end |
178 | + wx.CallAfter(self.SetFile, msg) |
179 | try: |
180 | import gzip |
181 | - infile = gzip.open(data[1], 'rb') |
182 | - outfile = open(data[2], 'wb') |
183 | + infile = gzip.open(filepath, 'rb') |
184 | + outfile = open(filepath[:-3], 'wb') |
185 | outfile.write(infile.read()) |
186 | outfile.close() |
187 | infile.close() |
188 | - os.remove(data[1]) |
189 | - except: |
190 | - msg = _("Unable to extract: ") + data[1] |
191 | - wx.CallAfter(self.LogMessage, msg) |
192 | - |
193 | - msg = _("Success: ") + file + "\n" |
194 | - wx.CallAfter(self.LogMessage, msg) |
195 | - |
196 | + os.remove(filepath) |
197 | + except: # Failed to extract |
198 | + msg = _("Unable to extract: ") + filepath |
199 | + wx.CallAfter(self.LogMessage, msg) |
200 | + break |
201 | + |
202 | except IOError, e: # Failed downloading |
203 | - success = False |
204 | msg = _("Failed: ") + url + "\n" + _("Reason: ") + str(e) + "\n" |
205 | - wx.CallAfter(self.LogMessage, msg) |
206 | - |
207 | - else: |
208 | - msg = _("Skipped: ") + url + "\n" + _("Reason: ") + _("File already exists. Not necessarily a bad thing.") + "\n" |
209 | - wx.CallAfter(self.LogMessage, msg) |
210 | - |
211 | - |
212 | - if success == True: |
213 | + wx.CallAfter(self.LogMessage, msg) |
214 | + retries += 1 |
215 | + |
216 | + |
217 | + msg = _("Downloaded: ") + str(len(self.files) - len(failed)) + ", " + _("Failed: ") + str(len(failed)) + "\n" |
218 | + wx.CallAfter(self.LogMessage, msg) |
219 | + if failed == []: |
220 | wx.CallAfter(self.DisplayMessage,_("All downloads have been completed successfully."), _("Download Complete")) |
221 | - result = [0,self.files] |
222 | + result = [0,failed] |
223 | else: |
224 | wx.CallAfter(self.DisplayMessage, _("Some downloads failed to complete.") + "\n" +_("Please check") + " " + os.path.join(consts.dirLog, "log") + " " + _("for more details."), _("Download Failed")) |
225 | - result = [1,self.files] |
226 | + result = [1,failed] |
227 | return result |
228 | |
229 | + def _verify(self, filename, checksum, cspriority=['SHA256','SHA1','MD5sum']): |
230 | + """ Does a hash check on 'filename' as per hashes in 'checksum' dict. Currently only supports sha256, sha1, md5.""" |
231 | + if cspriority != []: |
232 | + checks = [x for x in cspriority if checksum.has_key(x)] |
233 | + if checks == []: return 2 |
234 | + else: |
235 | + checks = checksum.keys() |
236 | + if checks == []: return 2 |
237 | + |
238 | + if checks[0] == 'SHA256': |
239 | + check = hashlib.sha256() |
240 | + elif checks[0] == 'SHA1': |
241 | + check = hashlib.sha1() |
242 | + elif checks[0] == 'MD5sum': |
243 | + check = hashlib.md5() |
244 | + else: |
245 | + return 2 |
246 | + |
247 | + try: |
248 | + fd = open(filename, 'rb') |
249 | + data = fd.read(1024*64) |
250 | + while data: |
251 | + check.update(data) |
252 | + data=fd.read(1024*64) |
253 | + fd.close() |
254 | + if check.hexdigest() == checksum[checks[0]]: |
255 | + return 1 |
256 | + return 0 |
257 | + except: |
258 | + return 2 |
259 | + |
260 | def progress(self, blocks, size, total): |
261 | if blocks*size > total: fraction = float(total)/float(total) |
262 | else: fraction = float(blocks*size)/float(total) |
263 | @@ -170,14 +227,14 @@ |
264 | def SetGauge(self, val): self.download_gauge.SetValue(val) |
265 | def SetFile(self, val): self.current.SetLabel(val) |
266 | |
267 | - def handleAbort(self, event): |
268 | + def handleAbort(self, event): |
269 | """Abort the result computation.""" |
270 | log.info( "Aborting result for job %s" % self.jobID ) |
271 | #self.buttonGet.Enable(True) |
272 | #self.buttonAbort.Enable(False) |
273 | self.abortEvent.set() |
274 | |
275 | - |
276 | + |
277 | def _resultConsumer(self, delayedResult): |
278 | jobID = delayedResult.getJobID() |
279 | assert jobID == self.jobID |
280 | @@ -186,7 +243,7 @@ |
281 | except Exception, exc: |
282 | log.info( "Result for job %s raised exception: %s" % (jobID, exc) ) |
283 | return |
284 | - |
285 | + |
286 | # output result |
287 | #log.info( "Got result for job %s: %s" % (jobID, result) ) |
288 | self.parent.Enable() |
289 | @@ -195,7 +252,8 @@ |
290 | |
291 | class Downloader(urllib.FancyURLopener): |
292 | def __init__(self, proxy={}): |
293 | - urllib.FancyURLopener.__init__(self, proxy) |
294 | - |
295 | + urllib.FancyURLopener.__init__(self, proxy) |
296 | + |
297 | def prompt_user_passwd(self, host='', realm=''): |
298 | return (consts.proxy_username, consts.proxy_password) |
299 | + |
300 | |
301 | === modified file 'lib/wxkeryx/main.py' |
302 | --- lib/wxkeryx/main.py 2010-01-25 02:59:48 +0000 |
303 | +++ lib/wxkeryx/main.py 2010-01-26 06:09:15 +0000 |
304 | @@ -440,7 +440,7 @@ |
305 | # dlg.Destroy() |
306 | |
307 | def OnUpdateStatus(self, event): |
308 | - dlg = wx.MessageDialog(None, _("This will update the list of packages installed on your computer. Only run this on the computer you created this project on.\n\n" + \ |
309 | + dlg = wx.MessageDialog(None, _("This will update Keryx about which packages are installed on your computer. Only run this on the computer you created this project on.\n\n" + \ |
310 | "Would you like to continue?"), |
311 | _("Update Status"), wx.YES_NO | wx.ICON_QUESTION) |
312 | result = dlg.ShowModal() |
313 | @@ -453,6 +453,7 @@ |
314 | _("Status Update Succeeded")) |
315 | result = dlg.ShowModal() |
316 | dlg.Destroy() |
317 | + self.loadLocal() |
318 | else: |
319 | dlg = wx.MessageDialog(None, _("Status update failed. Try running Keryx as root (using 'sudo')."), |
320 | _("Status Update Failed"), wx.ICON_ERROR) |
321 | |
322 | === modified file 'plugins/Debian.py' |
323 | --- plugins/Debian.py 2010-01-23 15:37:34 +0000 |
324 | +++ plugins/Debian.py 2010-01-26 06:09:15 +0000 |
325 | @@ -111,10 +111,12 @@ |
326 | def loadInternetPackageList(self, dir, arch): |
327 | listDir = os.path.join(dir, "lists") |
328 | debs = self.__parseSources(dir) |
329 | - filenames = self.__filesFromDebs(debs, arch, listDir) |
330 | + #filenames = self.__filesFromDebs(debs, arch, listDir) #extracted locations resolved by downloader |
331 | + #TODO: extract hashes for list files from Release files |
332 | + # and 'return zip(urls, tempnames, checksums)' instead |
333 | tempnames = self.__tempFilesFromDebs(debs, arch, listDir) |
334 | urls = self.__urlsFromDebs(debs, arch) |
335 | - return zip(urls, tempnames, filenames) # Returns urls, gzip file locations, and extracted locations |
336 | + return zip(urls, tempnames) # Returns urls, gzip file locations |
337 | |
338 | def getDependencies(self, dir, allPackages, packageName): |
339 | # Recusive function, takes package name, returns package information for each dependency |
340 | @@ -285,8 +287,8 @@ |
341 | def installPacks(self, projdir, packnames): |
342 | packsdir = os.path.join(projdir, 'packages') |
343 | run = self.__runRoot('xterm', |
344 | - '-e sh -c \"apt-get -y -o dir::cache::archives=\"%s\" ' \ |
345 | - 'install %s; echo \\\"Press any key to exit.\\\"; ' \ |
346 | + '-e sh -c \"apt-get -y -o dir::cache::archives=\\\"%s\\\" ' \ |
347 | + '--allow-unathenticated install %s; echo \\\"Press [ENTER] to exit.\\\"; ' \ |
348 | 'read x\"' % (packsdir, packnames)) |
349 | if run[0] != 0: |
350 | log.error(_('exit code:%i\n%s' % (run[0], run[1]))) |
Error corrections, repairs, cleanup, and redoing of code.
Readded the download checksum code :)