diff -Nru miro-4.0.4/CREDITS miro-6.0/CREDITS --- miro-4.0.4/CREDITS 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/CREDITS 2013-04-05 16:02:42.000000000 +0000 @@ -21,154 +21,89 @@ Contributors ============ -This is the team of people who built Miro 4.0. Thank you so much for +This is the team of people who built Miro 6.0. Thank you so much for your efforts, contributions, and support. -* A. Aziz Yusak -* AJ Davis -* Aaron Gingrich +* Nasser Al-Hilal * Aleksby -* Alex -* Alex Lancaster -* Alexander * AndreSZS -* Andreas Poisel -* Andy -* Anne Jonas -* Arvid Norberg -* Asheesh Laroia -* Author: Helmut Pozimski -* Ben -* Ben Dean-Kawamura -* Ben Monnahan +* saidimu apale +* ascensiontech +* astyguy * Bhavana -* BugginOut -* Charles -* Chimel -* Chris Lahey -* Chris Webber +* nick black +* Carsten Bormann +* Matt Bret +* Orhon Can +* Edwin Castillo +* Rob Chekaluk +* Tiffiny Cheng +* Chi88Gun * Christoph +* Joel Cohen +* Reynaldo Cruz +* Ben Dean-Kawamura +* Pan ~ dietmar * Cory Doctorow -* Dan Winckler -* Daniel O'Neill -* Darren Simpson -* Dave Glassco -* David Haberthür -* David Rosen -* David Stoll -* Dean Jansen -* Derek Petersen -* Doug Adams -* Edwin Castillo -* Elena -* Eric "betazed" Mitchell -* Ferd -* Geoff Schmidt -* Geoffrey Lee -* George Young -* Gregg Nicholas -* Guillaume Marceau -* Helen -* Hendrik -* Holmes Wilson -* Ian Dunlop -* Isaac -* J.B. Nicholson-Owens -* James Anastasios -* James Lynch Jr -* Jan Vilhuber -* Janet * Janet Dragojevic -* Jani Uusitalo -* Jarek -* Jason Brower -* Jesse Patel -* Joel Cohen -* John Lilly -* John Skilleter -* Jon Stødle -* Jonas -* Jonas Emanuel Mueller -* Jonas Emanuel Muller -* Katerina -* Kaz -* Kaz Wesley -* Khan +* ebaklan +* Eric +* Marcio Faustino +* William Penn Foundation * Knight Foundation -* Leszek Izdebski -* Luc Heinrich -* Markus Golser -* Matt Bret -* Matt Kivela -* Michal Jasinski -* Michel Salim * Mitch Kapor Foundation -* Mohammed -* Monika Stepanyuk -* Morgan Knutson +* Surdna Foundation * Mozilla Foundation -* Nasser Al-Hilal -* Nicholas Reville -* Nick Nassar -* Nick Reville -* Olena Alexyeenko -* Open Society Institute -* Opencast -* Orhon Can -* Osama Khalid -* Pan ~ dietmar -* Paul Fisher -* Paul Swartz -* Pieter Hartsook -* Pinguy -* Rahul Kalluri * Rappaport Family Foundation -* Reynaldo Cruz -* Ricardo -* Rick Hess -* Rob Chekaluk -* Robbt -* Roman -* SSerge Sander -* Scaythe -* Scott Bennett -* Sean Feeney -* Serena Kim -* Sergei -* Shawn Maddock -* Spencer -* Stefan Magdlinski -* Surdna Foundation -* Svavar Kjarrval -* Sylvain Picker -* Tiffiny Cheng +* Aaron Gingrich +* Dave Glassco +* Natalia Golubkov * Todd A. Griffith -* Toni Lähdekorpi -* Uwe Hermann -* Valent -* Will Kahn-Greene -* William Penn Foundation * Yulia Gudkova -* Zachary Adams -* astyguy -* bendk -* davidpbrown -* ebaklan -* fritz -* hrt -* jeremy clarke -* jon von -* joseph +* Luc Heinrich +* Uwe Hermann +* Rick Hess +* Open Society Institute +* Leszek Izdebski +* Dean Jansen +* Michal Jasinski +* Anne Jonas +* James Lynch Jr * julalex -* lnxme1 +* Will Kahn-Greene +* Rahul Kalluri +* Katerina +* kdc_net +* Serena Kim +* Matt Kivela +* Morgan Knutson +* Chris Lahey +* Alex Lancaster +* Asheesh Laroia +* Geoffrey Lee +* John Lilly * m.shamraeva -* mathada.samartha -* paul irish -* pj -* riotluck -* saidimu apale -* sg +* Shawn Maddock +* Stefan Magdlinski +* markbechthold.01 +* Mike +* Eric "betazed" Mitchell +* Nick Nassar +* Gregg Nicholas +* Hans Ulrich Niedermann +* Arvid Norberg +* Opencast +* Nicholas Reville +* Michel Salim +* Scaythe +* Geoff Schmidt +* Monika Stepanyuk * sveta.andriyenko +* Paul Swartz +* Jani Uusitalo * valeryanka -* will -* zeroconf +* Jan Vilhuber +* Chris Webber +* Kaz Wesley +* Holmes Wilson +* Dan Winckler diff -Nru miro-4.0.4/debian/changelog miro-6.0/debian/changelog --- miro-4.0.4/debian/changelog 2014-09-05 00:12:56.000000000 +0000 +++ miro-6.0/debian/changelog 2014-10-27 18:10:09.000000000 +0000 @@ -1,26 +1,32 @@ -miro (4.0.4-1.1ubuntu3) utopic; urgency=medium +miro (6.0-1) unstable; urgency=medium - * Rebuild against libav11. + * Acknowledge NMU, thanks Sebastian Ramacher . + * New upstream release (Closes: #692482, #754232, #764562). + + Drop linux/contrib/enmfp-codegen/* in upstream tarball, apparently + there's no source code available for those files. + + Now build-depends on libtag1-dev, libsqlite3-dev, and libboost1.55-dev. + + Fixes Youtube/Vimeo download/scraping (Closes: #689826). + * Standards-Version: 3.9.6 (no changes required). + * debian/patches: + + 10_movies_dir.patch: Update. + + 50_miro_debug_fix.patch: Update. + + 100_catch_keyerror_in_update_items.patch: Drop (obsolete). + + 130_libav9.patch: Drop (fixed upstream). + + 140_use_avconv.patch: Update, drop upstream-merged parts. + + 150_codec_id.patch: Add, fixes FTBFS due to libav 10 (Closes: #748861). + Thanks Andreas Cadhalpun . + + 160_fixyoutubedl.patch: Fix another Youtube downloads/scraping issue. + * Drop 'Recommends: python-psyco' (package no longer in the archive). + * Move 'libav-tools' from 'Suggests' to 'Depends', miro will not properly + start-up without it. + * Move gstreamer0.10-ffmpeg from 'Depends' to 'Suggests' for now; the + package is uninstallable atm (#766988) and seems to be optional for miro. + * Temporary lintian overrides ("use dh_python2 instead"): + + miro: depends-on-obsolete-package + + miro source: build-depends-on-obsolete-package + * Close long-obsolete request for lenny backport (Closes: #532021). - -- Colin Watson Fri, 05 Sep 2014 01:12:56 +0100 - -miro (4.0.4-1.1ubuntu2) utopic; urgency=medium - - * Grab patch from Debian's BTS to fix FTBFS against libav10. - - -- Logan Rosen Sat, 31 May 2014 00:04:03 -0400 - -miro (4.0.4-1.1ubuntu1) trusty; urgency=low - - * Drop gstreamer0.10-ffmpeg to recommends. - - -- Dmitrijs Ledkovs Fri, 22 Nov 2013 01:07:51 +0000 - -miro (4.0.4-1.1build1) trusty; urgency=low - - * No change rebuild against libav 9. - - -- Dmitrijs Ledkovs Sun, 10 Nov 2013 00:07:00 +0000 + -- Uwe Hermann Mon, 27 Oct 2014 19:00:45 +0100 miro (4.0.4-1.1) unstable; urgency=low diff -Nru miro-4.0.4/debian/control miro-6.0/debian/control --- miro-4.0.4/debian/control 2013-11-22 01:07:42.000000000 +0000 +++ miro-6.0/debian/control 2014-10-27 17:19:47.000000000 +0000 @@ -1,8 +1,7 @@ Source: miro Section: net Priority: extra -Maintainer: Ubuntu Developers -XSBC-Original-Maintainer: Uwe Hermann +Maintainer: Uwe Hermann Build-Depends: cdbs (>= 0.4.43), debhelper (>= 7.0.50~), python-all-dev, @@ -11,8 +10,11 @@ python-gtk2-dev, libwebkitgtk-dev, libavformat-dev, + libtag1-dev, + libsqlite3-dev, + libboost1.55-dev, pkg-config, -Standards-Version: 3.9.2 +Standards-Version: 3.9.6 Homepage: http://www.getmiro.com XS-Python-Version: >= 2.6 @@ -35,13 +37,13 @@ python-webkit, python-pycurl, python-mutagen, - miro-data (>= 4.0.4) -Recommends: python-psyco [i386 hurd-i386 netbsd-i386 kfreebsd-i386], gstreamer0.10-ffmpeg (>= 0.10.0), + libav-tools, + miro-data (>= 6.0) Suggests: ttf-dejavu, python-notify, + gstreamer0.10-ffmpeg (>= 0.10.0), gstreamer0.10-plugins-ugly (>= 0.10.0), libavahi-compat-libdnssd1, - libav-tools, ffmpeg2theora Conflicts: miro-data (<< 1.2.1) Description: GTK+ based RSS video aggregator diff -Nru miro-4.0.4/debian/copyright miro-6.0/debian/copyright --- miro-4.0.4/debian/copyright 2012-01-02 21:58:22.000000000 +0000 +++ miro-6.0/debian/copyright 2014-10-27 18:00:08.000000000 +0000 @@ -6,6 +6,11 @@ http://www.getmiro.com +Repackaged tarball: + + The upstream miro-6.0.tar.gz tarball has been repackaged, the files + linux/contrib/enmfp-codegen/* have been removed (no source available). + Upstream Authors: Nick Nassar @@ -99,7 +104,7 @@ ------------------------------------------------------------------------------ Files: debian/* -Copyright: © 2006 - 2012 Uwe Hermann +Copyright: © 2006 - 2014 Uwe Hermann License: PD The packaging done by Uwe Hermann is hereby released as public domain. diff -Nru miro-4.0.4/debian/miro.lintian-overrides miro-6.0/debian/miro.lintian-overrides --- miro-4.0.4/debian/miro.lintian-overrides 2012-01-02 21:56:34.000000000 +0000 +++ miro-6.0/debian/miro.lintian-overrides 2014-10-27 18:02:33.000000000 +0000 @@ -1,2 +1,2 @@ miro: embedded-feedparser-library usr/share/pyshared/miro/feedparser.py - +miro: depends-on-obsolete-package diff -Nru miro-4.0.4/debian/patches/100_catch_keyerror_in_update_items.patch miro-6.0/debian/patches/100_catch_keyerror_in_update_items.patch --- miro-4.0.4/debian/patches/100_catch_keyerror_in_update_items.patch 2012-01-02 21:56:34.000000000 +0000 +++ miro-6.0/debian/patches/100_catch_keyerror_in_update_items.patch 1970-01-01 00:00:00.000000000 +0000 @@ -1,19 +0,0 @@ -Fix for crash in update_items() when trying to delete an item that isn't -in the dict. (LP: #413387) - -Index: a/lib/frontends/widgets/itemlist.py -=================================================================== ---- a/lib/frontends/widgets/itemlist.py 2011-06-05 23:42:53.000000000 +0200 -+++ b/lib/frontends/widgets/itemlist.py 2011-06-05 23:43:49.000000000 +0200 -@@ -504,7 +504,10 @@ - # Item not already displayed - if should_show: - to_add.append(info) -- del self._hidden_items[info.id] -+ try: -+ del self._hidden_items[info.id] -+ except KeyError: -+ pass # Item already gone - else: - self._hidden_items[info.id] = info - else: diff -Nru miro-4.0.4/debian/patches/10_movies_dir.patch miro-6.0/debian/patches/10_movies_dir.patch --- miro-4.0.4/debian/patches/10_movies_dir.patch 2012-01-02 21:56:34.000000000 +0000 +++ miro-6.0/debian/patches/10_movies_dir.patch 2014-10-26 20:56:48.000000000 +0000 @@ -1,10 +1,8 @@ Change some default directories. -Index: a/linux/plat/config.py -=================================================================== ---- a/linux/plat/config.py 2011-06-07 23:47:12.000000000 +0200 -+++ b/linux/plat/config.py 2011-06-07 23:47:28.000000000 +0200 -@@ -164,10 +164,10 @@ +--- a/linux/plat/config.py 2014-10-26 21:54:32.200340565 +0100 ++++ a/linux/plat/config.py 2014-10-26 21:54:57.196399494 +0100 +@@ -164,10 +164,10 @@ def get(descriptor): if descriptor == prefs.MOVIES_DIRECTORY: value = os.path.expanduser(os.path.join(options.user_home, diff -Nru miro-4.0.4/debian/patches/130_libav9.patch miro-6.0/debian/patches/130_libav9.patch --- miro-4.0.4/debian/patches/130_libav9.patch 2013-09-24 00:09:43.000000000 +0000 +++ miro-6.0/debian/patches/130_libav9.patch 1970-01-01 00:00:00.000000000 +0000 @@ -1,118 +0,0 @@ -Description: Port to libav 9 API -Author: Sebastian Ramacher -Bug-Debian: http://bugs.debian.org/720810 -Last-Update: 2013-09-24 - ---- a/linux/miro-segmenter.c -+++ b/linux/miro-segmenter.c -@@ -40,7 +40,11 @@ - AVCodecContext *output_codec_context; - AVStream *output_stream; - -+#if LIBAVFORMAT_VERSION_MAJOR >= 54 -+ output_stream = avformat_new_stream(output_format_context, 0); -+#else - output_stream = av_new_stream(output_format_context, 0); -+#endif - if (!output_stream) { - fprintf(stderr, "Could not allocate stream\n"); - exit(1); -@@ -156,13 +160,21 @@ - exit(1); - } - -+#if LIBAVFORMAT_VERSION_MAJOR >= 54 -+ ret = avformat_open_input(&ic, input, ifmt, NULL); -+#else - ret = av_open_input_file(&ic, input, ifmt, 0, NULL); -+#endif - if (ret != 0) { - fprintf(stderr, "Could not open input file, make sure it is an mpegts file: %d\n", ret); - exit(1); - } - -+#if LIBAVFORMAT_VERSION_MAJOR >= 54 -+ if (avformat_find_stream_info(ic, NULL) < 0) { -+#else - if (av_find_stream_info(ic) < 0) { -+#endif - fprintf(stderr, "Could not read stream information\n"); - exit(1); - } -@@ -215,12 +227,16 @@ - } - } - -+#if LIBAVFORMAT_VERSION_MAJOR >= 54 -+ av_dump_format(oc, 0, input, 1); -+#else - if (av_set_parameters(oc, NULL) < 0) { - fprintf(stderr, "Invalid output format parameters\n"); - exit(1); - } - - dump_format(oc, 0, input, 1); -+#endif - - if (video_st) { - codec = avcodec_find_decoder(video_st->codec->codec_id); -@@ -228,17 +244,29 @@ - fprintf(stderr, "Could not find video decoder, key frames will not be honored\n"); - } - -+#if LIBAVCODEC_VERSION_MAJOR >= 54 -+ if (avcodec_open2(video_st->codec, codec, NULL) < 0) { -+#else - if (avcodec_open(video_st->codec, codec) < 0) { -+#endif - fprintf(stderr, "Could not open video decoder, key frames will not be honored\n"); - } - } - -+#if LIBAVFORMAT_VERSION_MAJOR >= 54 -+ if (avio_open(&oc->pb, output_filename, AVIO_FLAG_WRITE) < 0) { -+#else - if (url_fopen(&oc->pb, output_filename, URL_WRONLY) < 0) { -+#endif - fprintf(stderr, "Could not open '%s'\n", output_filename); - exit(1); - } - -+#if LIBAVFORMAT_VERSION_MAJOR >= 54 -+ if (avformat_write_header(oc, NULL)) { -+#else - if (av_write_header(oc)) { -+#endif - fprintf(stderr, "Could not write mpegts header to first output file\n"); - - exit(1); -@@ -274,10 +302,17 @@ - } - - if (segment_time - prev_segment_time >= segment_duration) { -+#if LIBAVFORMAT_VERSION_MAJOR >= 54 -+ avio_flush(oc->pb); -+ avio_close(oc->pb); -+ -+ if (avio_open(&oc->pb, output_filename, AVIO_FLAG_WRITE) < 0) { -+#else - put_flush_packet(oc->pb); - url_fclose(oc->pb); - - if (url_fopen(&oc->pb, output_filename, URL_WRONLY) < 0) { -+#endif - fprintf(stderr, "Could not open '%s'\n", output_filename); - break; - } -@@ -307,7 +342,11 @@ - av_freep(&oc->streams[i]); - } - -+#if LIBAVFORMAT_VERSION_MAJOR >= 54 -+ avio_close(oc->pb); -+#else - url_fclose(oc->pb); -+#endif - av_free(oc); - - /* End-of-transcode marker. */ diff -Nru miro-4.0.4/debian/patches/140_use_avconv.patch miro-6.0/debian/patches/140_use_avconv.patch --- miro-4.0.4/debian/patches/140_use_avconv.patch 2013-09-24 01:51:02.000000000 +0000 +++ miro-6.0/debian/patches/140_use_avconv.patch 2014-10-26 21:08:54.000000000 +0000 @@ -1,6 +1,6 @@ Description: Use avconv instead of ffmpeg Author: Sebastian Ramacher -Last-Update: 2013-09-24 +Last-Update: 2014-10-26 --- a/linux/plat/options.py +++ b/linux/plat/options.py @@ -13,62 +13,3 @@ alias="ffmpeg", helptext="Absolute path for ffmpeg binary.") ---- a/resources/conversions/android.conv -+++ b/resources/conversions/android.conv -@@ -2,7 +2,7 @@ - name: Android Devices - executable: ffmpeg - extension: mp4 --parameters: -i {input} -y -acodec aac -ab 160k -s {ssize} -vcodec libx264 -vpre slow -vpre ipod640 -f mp4 -threads 0 {output} -+parameters: -i {input} -y -strict experimental -acodec aac -b:a 160k -s {ssize} -vcodec libx264 -pre:v slow -pre:v ipod640 -f mp4 -threads 0 {output} - mediatype: video - - [G2] ---- a/resources/conversions/apple.conv -+++ b/resources/conversions/apple.conv -@@ -3,7 +3,7 @@ - executable: ffmpeg - extension: mp4 - ssize: 480x320 --parameters: -i {input} -acodec aac -ab 160k -s {ssize} -vcodec libx264 -vpre slow -vpre ipod640 -b 1200k -f mp4 -threads 0 {output} -+parameters: -i {input} -strict experimental -acodec aac -b:a 160k -s {ssize} -vcodec libx264 -pre:v slow -pre:v ipod640 -b:v 1200k -f mp4 -threads 0 {output} - mediatype: video - - [iPhone] ---- a/resources/conversions/others.conv -+++ b/resources/conversions/others.conv -@@ -4,7 +4,7 @@ - [Playstation Portable (PSP)] - executable: ffmpeg - extension: mp4 --parameters: -i {input} -s 320x240 -b 512000 -ar 24000 -ab 64000 -f psp -r 29.97 {output} -+parameters: -i {input} -s 320x240 -b:v 512000 -ar 24000 -b:a 64000 -f psp -r 29.97 {output} - mediatype: video - - [Kindle Fire] -@@ -12,7 +12,7 @@ - executable: ffmpeg - extension: mp4 - bitrate: 700000 --parameters: -i {input} -acodec aac -ab 96k -vcodec libx264 -vpre slow -f mp4 -crf 22 {output} -+parameters: -i {input} -strict experimental -acodec aac -b:a 96k -vcodec libx264 -pre:v slow -f mp4 -crf 22 {output} - mediatype: video - ssize: 1024x600 - -@@ -20,14 +20,14 @@ - only_on: osx - executable: ffmpeg - extension: webm --parameters: -i {input} -f webm -vcodec libvpx -acodec libvorbis -ab 160000 -sameq {output} -+parameters: -i {input} -f webm -vcodec libvpx -acodec libvorbis -b:a 160000 -sameq {output} - mediatype: video - - [MP4] - extension: mp4 - executable: ffmpeg - extension: mp4 --parameters: -i {input} -acodec aac -ab 96k -vcodec libx264 -vpre slow -f mp4 -crf 22 {output} -+parameters: -i {input} -strict experimental -acodec aac -b:a 96k -vcodec libx264 -pre:v slow -f mp4 -crf 22 {output} - mediatype: video - - [MP3] diff -Nru miro-4.0.4/debian/patches/150_codec_id.patch miro-6.0/debian/patches/150_codec_id.patch --- miro-4.0.4/debian/patches/150_codec_id.patch 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/debian/patches/150_codec_id.patch 2014-10-26 20:52:50.000000000 +0000 @@ -0,0 +1,19 @@ +Description: Rename CODEC_ID_* to AV_CODEC_ID_* + +Author: Andreas Cadhalpun +Last-Update: <2014-05-21> + +--- a/linux/miro-segmenter.c ++++ b/linux/miro-segmenter.c +@@ -78,7 +78,11 @@ static AVStream *add_output_stream(AVFor + output_codec_context->sample_rate = input_codec_context->sample_rate; + output_codec_context->channels = input_codec_context->channels; + output_codec_context->frame_size = input_codec_context->frame_size; ++#if LIBAVCODEC_VERSION_MAJOR > 54 ++ if ((input_codec_context->block_align == 1 && input_codec_context->codec_id == AV_CODEC_ID_MP3) || input_codec_context->codec_id == AV_CODEC_ID_AC3) { ++#else + if ((input_codec_context->block_align == 1 && input_codec_context->codec_id == CODEC_ID_MP3) || input_codec_context->codec_id == CODEC_ID_AC3) { ++#endif + output_codec_context->block_align = 0; + } + else { diff -Nru miro-4.0.4/debian/patches/160_fixyoutubedl.patch miro-6.0/debian/patches/160_fixyoutubedl.patch --- miro-4.0.4/debian/patches/160_fixyoutubedl.patch 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/debian/patches/160_fixyoutubedl.patch 2014-10-26 22:17:10.000000000 +0000 @@ -0,0 +1,15 @@ +https://github.com/pculture/miro/commit/b08dcb92bb3cc3d8b969d27e8955ed8d5bd28164 + +--- a/lib/flashscraper.py 2014-10-26 23:15:52.198874390 +0100 ++++ a/lib/flashscraper.py 2014-10-26 23:16:21.690961453 +0100 +@@ -134,9 +134,7 @@ def _youtube_callback_step2(info, video_ + # strip url= from url=xxxxxx, strip trailer. Strip duplicate params. + for fmt, stream_map_data in zip(fmt_list, stream_map): + stream_map = cgi.parse_qs(stream_map_data) +- url_base = stream_map['url'][0] +- sig_part = '&signature=' + stream_map['sig'][0] +- fmt_url_map[fmt] = url_base + sig_part ++ fmt_url_map[fmt] = stream_map['url'][0] + + title = params.get("title", ["No title"])[0] + try: diff -Nru miro-4.0.4/debian/patches/170_no_enfmp.patch miro-6.0/debian/patches/170_no_enfmp.patch --- miro-4.0.4/debian/patches/170_no_enfmp.patch 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/debian/patches/170_no_enfmp.patch 2014-10-27 17:37:15.000000000 +0000 @@ -0,0 +1,13 @@ +Don't build enmfp. + +--- a/linux/setup.py ++++ a/linux/setup.py +@@ -468,7 +468,7 @@ class miro_build(build): + def run(self): + self.build_segmenter() + self.build_echoprint_codegen() +- self.build_enmfp_codegen() ++ # self.build_enmfp_codegen() + build.run(self) + + class test_system(Command): diff -Nru miro-4.0.4/debian/patches/50_miro_debug_fix.patch miro-6.0/debian/patches/50_miro_debug_fix.patch --- miro-4.0.4/debian/patches/50_miro_debug_fix.patch 2012-01-02 21:56:34.000000000 +0000 +++ miro-6.0/debian/patches/50_miro_debug_fix.patch 2014-10-26 20:57:14.000000000 +0000 @@ -1,7 +1,7 @@ Fix non-working gdb invokation. ---- a/linux/miro.orig 2011-07-03 18:55:22.000000000 +0200 -+++ b/linux/miro 2011-07-03 18:56:39.000000000 +0200 +--- a/linux/miro 2014-06-09 00:29:03.271288218 +0200 ++++ a/linux/miro 2014-06-09 00:27:51.902548496 +0200 @@ -38,9 +38,9 @@ esac done @@ -21,5 +21,6 @@ - -ex 'run' --args $PYTHON ./miro.real --sync "$@" + -ex 'run' --args $PYTHON /usr/bin/miro.real --sync "$@" else - miro.real "$@" +- miro.real "$@" ++ /usr/bin/miro.real "$@" fi diff -Nru miro-4.0.4/debian/patches/CodecID.patch miro-6.0/debian/patches/CodecID.patch --- miro-4.0.4/debian/patches/CodecID.patch 2014-05-31 04:03:55.000000000 +0000 +++ miro-6.0/debian/patches/CodecID.patch 1970-01-01 00:00:00.000000000 +0000 @@ -1,19 +0,0 @@ -Description: Rename CODEC_ID_* to AV_CODEC_ID_* - -Author: Andreas Cadhalpun -Last-Update: <2014-05-21> - ---- miro-4.0.4.orig/linux/miro-segmenter.c -+++ miro-4.0.4/linux/miro-segmenter.c -@@ -78,7 +78,11 @@ static AVStream *add_output_stream(AVFor - output_codec_context->sample_rate = input_codec_context->sample_rate; - output_codec_context->channels = input_codec_context->channels; - output_codec_context->frame_size = input_codec_context->frame_size; -+#if LIBAVCODEC_VERSION_MAJOR > 54 -+ if ((input_codec_context->block_align == 1 && input_codec_context->codec_id == AV_CODEC_ID_MP3) || input_codec_context->codec_id == AV_CODEC_ID_AC3) { -+#else - if ((input_codec_context->block_align == 1 && input_codec_context->codec_id == CODEC_ID_MP3) || input_codec_context->codec_id == CODEC_ID_AC3) { -+#endif - output_codec_context->block_align = 0; - } - else { diff -Nru miro-4.0.4/debian/patches/series miro-6.0/debian/patches/series --- miro-4.0.4/debian/patches/series 2014-05-31 04:03:55.000000000 +0000 +++ miro-6.0/debian/patches/series 2014-10-27 17:36:07.000000000 +0000 @@ -1,7 +1,7 @@ 10_movies_dir.patch 50_miro_debug_fix.patch -100_catch_keyerror_in_update_items.patch 120_miro.desktop.patch -130_libav9.patch 140_use_avconv.patch -CodecID.patch +150_codec_id.patch +160_fixyoutubedl.patch +170_no_enfmp.patch diff -Nru miro-4.0.4/debian/README.Debian miro-6.0/debian/README.Debian --- miro-4.0.4/debian/README.Debian 2012-01-02 21:56:34.000000000 +0000 +++ miro-6.0/debian/README.Debian 1970-01-01 00:00:00.000000000 +0000 @@ -1,32 +0,0 @@ -Miro for Debian ---------------- - - * The default Miro movies directory in Debian is ~/.miro/Movies - instead of ~/Videos/Miro. - - * Also, the non-video directory in Debian is ~/.miro/Nonvideo - instead of ~/Desktop. - - * In order to enable the 'Miro Sharing' feature (stream and download files - to and from other Miros on the local network and to the Miro iPad app) - you must install 'libavahi-compat-libdnssd1' (and restart Miro), if it's - not already installed: - - $ aptitude install libavahi-compat-libdnssd1 - - * You can install 'python-psyco' which will likely speed up Miro a bit. - - * For even further audio/video support you can also install the following: - - $ aptitude install gstreamer0.10-plugins-ugly - - * If you experience Miro crashes/segfaults, please install 'gdb' and then - run 'miro --debug' (which runs Miro in gdb) and try to reproduce the - bug. If that worked, please report the bug upstream and attach the gdb - output (type e.g. "bt" on the gdb prompt after the crash). - - If needed, please also install additional -dbg packages, e.g. - python2.6-dbg, python-gtk2-dbg, or libwebkit-1.0-2-dbg. - - -- Uwe Hermann , Tue, 07 Jun 2011 21:26:41 +0200 - diff -Nru miro-4.0.4/debian/rules miro-6.0/debian/rules --- miro-4.0.4/debian/rules 2012-01-02 21:56:34.000000000 +0000 +++ miro-6.0/debian/rules 2014-10-27 18:08:14.000000000 +0000 @@ -45,6 +45,7 @@ clean:: cd linux && make clean + cd linux/contrib/echoprint-codegen/src && make clean rm -f lib/frontends/widgets/infolist/infolist.c rm -f lib/frontends/widgets/gtk/webkitgtkhacks.c rm -f lib/frontends/widgets/gtk/pygtkhacks.c diff -Nru miro-4.0.4/debian/source/lintian-overrides miro-6.0/debian/source/lintian-overrides --- miro-4.0.4/debian/source/lintian-overrides 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/debian/source/lintian-overrides 2014-10-27 18:02:27.000000000 +0000 @@ -0,0 +1 @@ +miro source: build-depends-on-obsolete-package diff -Nru miro-4.0.4/extensions/apiexample.miroext miro-6.0/extensions/apiexample.miroext --- miro-4.0.4/extensions/apiexample.miroext 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/extensions/apiexample.miroext 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,9 @@ +[extension] +name = Extension API Example +version = core +enabled_by_default = False +module = apiexample +description = Example extension that shows how to use the API +[hooks] +item_list_filters = apiexample:get_item_list_filters +item_context_menu = apiexample:update_item_context_menu diff -Nru miro-4.0.4/extensions/apiexample.py miro-6.0/extensions/apiexample.py --- miro-4.0.4/extensions/apiexample.py 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/extensions/apiexample.py 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,71 @@ +# Miro - an RSS based video player application +# Copyright (C) 2010, 2011 +# Participatory Culture Foundation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +# +# In addition, as a special exception, the copyright holders give +# permission to link the code of portions of this program with the OpenSSL +# library. +# +# You must obey the GNU General Public License in all respects for all of +# the code used other than OpenSSL. If you modify file(s) with this +# exception, you may extend this exception to your version of the file(s), +# but you are not obligated to do so. If you do not wish to do so, delete +# this exception statement from your version. If you delete this exception +# statement from all source files in the program, then also delete it here. + +import functools +import logging + +from miro import api +from miro.frontends.widgets import widgetsapi + +class StartsWithVowelItemFilter(widgetsapi.ExtensionItemFilter): + """Sample item filter for items that start with vowels.""" + + key = u'starts-with-vowel' + # FIXME: it would be nice to use gettext for user_label, but we don't + # really have support for that in extensions. + user_label = 'Vowel' + + def add_to_query(self, query): + sql = "LOWER(SUBSTR(title, 1, 1)) IN ('a', 'e', 'i', 'o', 'u')" + query.add_complex_condition(['title'], sql) + +def get_item_list_filters(type_, id_): + # Implement the item_list_filter hook by adding the + # StartsWithVowelItemFilter + return [StartsWithVowelItemFilter()] + +def context_menu_action(selection): + logging.info("Example Context menu action clicked: %s", selection) + +def update_item_context_menu(selection, menu): + # implement the item_context_menu hook by adding an item at the top of the + # menu that activates context_menu_action with the current selection + action = functools.partial(context_menu_action, selection) + menu.insert(0, ('Example Action', action)) + +def load(context): + # only load if we are running the widgets frontend + # FIXME: get_frontend() doesn't seem to be working, skip check for now + #if api.get_frontend() != 'widgets': + # raise api.FrontendNotSupported('Widgets frontend only') + pass + +def unload(): + pass + diff -Nru miro-4.0.4/extensions/watchhistory/__init__.py miro-6.0/extensions/watchhistory/__init__.py --- miro-4.0.4/extensions/watchhistory/__init__.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/extensions/watchhistory/__init__.py 2013-04-05 16:02:42.000000000 +0000 @@ -31,7 +31,7 @@ WATCHER = None -def load(): +def load(context): """Loads the watchhistory module. """ global WATCHER diff -Nru miro-4.0.4/extensions/watchhistory/main.py miro-6.0/extensions/watchhistory/main.py --- miro-4.0.4/extensions/watchhistory/main.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/extensions/watchhistory/main.py 2013-04-05 16:02:42.000000000 +0000 @@ -74,7 +74,7 @@ if self.csv_writer and self.item_info: row = [ time.ctime(), - self.item_info.name, + self.item_info.title, self.item_info.duration] self.csv_writer.writerow(row) # we wipe out self.item_info because will-play gets called diff -Nru miro-4.0.4/lib/amazon.py miro-6.0/lib/amazon.py --- miro-4.0.4/lib/amazon.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/amazon.py 2013-04-05 16:02:42.000000000 +0000 @@ -39,6 +39,7 @@ from miro import app from miro import httpclient +from miro import fileutil try: import pyDes as des except ImportError: @@ -56,17 +57,15 @@ def is_amazon_url(url): parts = urlparse.urlparse(url) - return ((parts.netloc.startswith('amazon.') or - parts.netloc.startswith('www.amazon')) and - (parts.path.endswith('.amz') or + return ((parts.path.endswith('.amz') or parts.path.endswith('.m3u'))) def is_amazon_content_type(content_type): """ Returns True if this is a content type from Amazon. """ - return content_type in ('audio/x-amzxml', - 'audio/x-mpegurl') + return content_type.startswith(('audio/x-amzxml', + 'audio/x-mpegurl')) def download_file(url, handle_unknown_callback): """ @@ -77,6 +76,17 @@ def callback(data): _amazon_callback(data, unknown) + if url.startswith('file://'): + path = url[7:] + try: + with file(path) as f: + if path.endswith('.m3u'): + _m3u_callback(f.read()) + else: + _amz_callback(f.read()) + finally: + fileutil.remove(path) + return options = httpclient.TransferOptions(url) options.requires_cookies = True transfer = httpclient.CurlTransfer(options, callback, @@ -92,15 +102,31 @@ unknown() return - if data['content-type'] == 'audio/x-amzxml': # .amz file: - _amz_callback(data) - elif data['content-type'] == 'audio/x-mpegurl': # .m3u file: - _m3u_callback(data) + if data['content-type'].startswith('audio/x-amzxml'): # .amz file: + _amz_callback(data['body']) + elif data['content-type'].startswith('audio/x-mpegurl'): # .m3u file: + _m3u_callback(data['body']) def _amz_callback(data): - content = decrypt_amz(base64.b64decode(data['body'])).rstrip('\x00\x08') - - dom = minidom.parseString(content) + if data.lstrip().startswith(''): # plain XML + content = data.strip() + else: + try: + content = decrypt_amz(base64.b64decode(data)).rstrip('\x00\x08') + except Exception: + app.controller.failed_soft( + '_amz_callback', + 'could not b64decde/decrypt:\n%r' % data, + with_exception=True) + return + + try: + dom = minidom.parseString(content) + except Exception: + app.controller.failed_soft('_amz_callback', + 'could not parse data:\n%r' % content, + with_exception=True) + return from miro.singleclick import _build_entry, download_video @@ -130,7 +156,7 @@ def _m3u_callback(data): from miro.singleclick import _build_entry, download_video - for line in data['body'].split('\n'): + for line in data.split('\n'): line = line.strip() if line.startswith('#'): # comment continue diff -Nru miro-4.0.4/lib/api.py miro-6.0/lib/api.py --- miro-4.0.4/lib/api.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/api.py 2013-04-05 16:02:42.000000000 +0000 @@ -29,6 +29,40 @@ """miro.api -- API for extensions. +Hook functions +------------------------- +Hook functions are used to hook into core components in a well-definined way. +For example, the 'item_list_filter' hook is used to add item filters to the +top of an item list. + +If your extension wants to implement a hook function, add a "hooks" section in +your config file containing a list of hooks. For each hook, the key is the +name of the hook and the value specifies where to find the hook function. The +values are in the form of package.module:path.to.obj. + +Here's an example [hooks] section:: + + [hooks] + hook1 = myext:handle_hook + hook2 = myext.foo:handle_hook + hook3 = myext:hook_obj.handle_hook + +In this example, hook1 will be handled by handle_hook() inside the module +myext, hook2 will be handled by the handle_hook() function inside the module +myext.foo, and hook3 will be handled by the handle() method in inside the +hook_obj object inside the module myext + +Hook functions help keep a stable API for extensions over different releases +and allow extensions to coexist better. You can probably achieve the same +results by importing core modules directly and monkey patching things, but +this approach will almost certainly break when the core code changes or when +another extension tries to do the same thing. + +Your extension can also define a hook for other extensions to use. You can +use the hook_invoke() method to call all functions registered for a hook you +define. + + .. Note:: This API is missing a lot of important stuff. If you're interested in @@ -46,11 +80,22 @@ "get_platform", "get_frontend", "get_support_directory", + "hook_invoke", ] # increase this by 1 every time the API changes APIVERSION = 0 +import logging +import os + +import sqlite3 +try: + import simplejson as json +except ImportError: + import json + +from miro import app from miro import signals class ExtensionException(StandardError): @@ -121,3 +166,156 @@ """ from miro import app, prefs return app.config.get(prefs.SUPPORT_DIRECTORY) + +class StorageManager(object): + """Manages data for an extension. + + StorageManagers allow two kinds of data storage: + + - Simple Storage: simple key/value pair storage. Useful for extensions + with basic storage needs. See the set_value(), get_value(), and + clear_value() methods. + + - SQLite Storage: Use an SQLite connection. Use this if you have complex + storage needs and want a relational database to handle it. Call + get_sqlite_connection() to use this. + + A StorageManager object is passed into each extension's load() function + via the context argument. Extensions must use this object for their + storage needs. Do not create a new StorageManager object. + + Simple and SQLite storage can be used together if needed, however they + share the same underlying SQLite connection. You should avoid using the + simple storage API while in the middle of an SQLite transation. + """ + + def __init__(self, unique_name): + """Create a StorageManager + + :param unique_name: unique string to name the sqlite file with + """ + self._unique_name = unique_name + # Sqlite connection/cursor. We create these lazily because many + # extensions won't use their StorageManager + self._connection = None + self._cursor = None + # stores if we've run through _ensure_simple_api_table() + self._checked_for_simple_api_table = False + + def _ensure_connection(self): + if self._connection is None: + self._connection = sqlite3.connect(self._sqlite_path(), + isolation_level=None) + self._cursor = self._connection.cursor() + + def _sqlite_path(self): + filename = 'extension-db-%s.sqlite' % self._unique_name + return os.path.join(get_support_directory(), filename) + + def get_sqlite_connection(self): + self._ensure_connection() + return self._connection + + def _ensure_simple_api_table(self): + """Ensure the table we need for the simple API has been created. + """ + if self._checked_for_simple_api_table: + return + self._ensure_connection() + self._cursor.execute("SELECT COUNT(*) FROM sqlite_master " + "WHERE type='table' and name = 'simple_data'") + if self._cursor.fetchone()[0] == 0: + self._cursor.execute("CREATE TABLE simple_data " + "(key TEXT PRIMARY KEY, value TEXT)") + self._checked_for_simple_api_table = True + + def set_value(self, key, value): + """Set a value using the simple API + + set_value() stores a value that you can later retrieve with + get_value() + + :param key: key to set (unicode or an ASCII bytestring) + :param value: value to set + """ + self._ensure_simple_api_table() + self._cursor.execute("INSERT OR REPLACE INTO simple_data " + "(key, value) VALUES (?, ?)", + (key, json.dumps(value))) + self._connection.commit() + + def get_value(self, key): + """Get a value using the simple API + + get_value() retrieves a value that was previously set with set_value(). + + :param key: key to retrieve + :returns: value set with set_value() + :raises KeyError: key not set + """ + self._ensure_simple_api_table() + self._cursor.execute("SELECT value FROM simple_data WHERE key=?", + (key,)) + row = self._cursor.fetchone() + if row is None: + raise KeyError(key) + else: + return json.loads(row[0]) + + def key_exists(self, key): + """Test if a key is stored using the simple API + + :param key: key to retrieve + :returns: True if a value set with set_value() + """ + self._ensure_simple_api_table() + self._cursor.execute("SELECT value FROM simple_data WHERE key=?", + (key,)) + return self._cursor.fetchone() is not None + + def clear_value(self, key): + """Clear a value using the simple API + + clear_value() unsets a value that was set with set_value(). + + Calling clear_value() with a key that has not been set results in a + no-op. + + :param key: key to clear + """ + self._ensure_simple_api_table() + self._cursor.execute("DELETE FROM simple_data WHERE key=?", (key,)) + self._connection.commit() + +class ExtensionContext(object): + """ExtensionContext -- Stores objects specific to an extension + + ExtensionContexts are passed in to the load() method for each extension. + + Attributes: + - storage_manager: StorageManager for the extension + + New attributes will be added as we add to the extension system + """ + def __init__(self, unique_name): + self.storage_manager = StorageManager(unique_name) + +def hook_invoke(hook_name, *args, **kwargs): + """Call all functions registered for a hook. + + We will call each function registered with hook_register() with hook_name. + args and kwargs are used to call the hook functions. + + We will return a list of return values, one for each registered hook. + """ + results = [] + for ext in app.extension_manager.extensions_for_hook(hook_name): + try: + retval = ext.invoke_hook(hook_name, *args, **kwargs) + except StandardError: + # hook func raised an error. Log it, then ignore + logging.exception("exception calling hook function %s ", ext.name) + continue + else: + results.append(retval) + return results diff -Nru miro-4.0.4/lib/appconfig.py miro-6.0/lib/appconfig.py --- miro-4.0.4/lib/appconfig.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/appconfig.py 2013-04-05 16:02:42.000000000 +0000 @@ -37,7 +37,7 @@ import logging import traceback -from miro import util +from miro import buildutils from miro.plat import resources class AppConfig(object): @@ -45,7 +45,7 @@ self.theme_vars = {} app_config_path = resources.path('app.config') - self.default_vars = util.read_simple_config_file(app_config_path) + self.default_vars = buildutils.read_simple_config_file(app_config_path) self.load_theme(theme) @@ -54,7 +54,8 @@ logging.info("Using theme %s", theme) theme_app_config = resources.theme_path(theme, 'app.config') try: - self.theme_vars = util.read_simple_config_file(theme_app_config) + self.theme_vars = buildutils.read_simple_config_file( + theme_app_config) except EnvironmentError: logging.warn("Error loading theme: %s\n%s", theme_app_config, traceback.format_exc()) diff -Nru miro-4.0.4/lib/app.py miro-6.0/lib/app.py --- miro-4.0.4/lib/app.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/app.py 2013-04-05 16:02:42.000000000 +0000 @@ -39,14 +39,17 @@ # list of active renderers renderers = [] +# donation manager singleton object +donate_manager = None + # database object db = None -# stores ItemInfo objects so we can quickly fetch them -item_info_cache = None +# BulkSQLManager for the main miro database +bulk_sql_manager = None -# command line arguments for thumbnailer (linux) -movie_data_program_info = None +# DBInfo object for the main miro database +db_info = None # configuration data config = None @@ -69,25 +72,62 @@ # platform/frontend specific directory watcher directory_watcher = None +# MetadataManager for local items +local_metadata_manager = None + # signal emiters for when config data changes backend_config_watcher = None frontend_config_watcher = None downloader_config_watcher = None -# sends MetadataProgressUpdate messages to the frontend -metadata_progress_updater = None +# global state managers +download_state_manager = None +icon_cache_updater = None +movie_data_updater = None # debugmode adds a bunch of computation that's useful for development # and debugging. initalized to None; set to True/False depending on # mode debugmode = None +# +# Frontend API class. All the frontend should define a subclass of this and +# implement the methods. +# +class Frontend(object): + def call_on_ui_thread(self, func, *args, **kwargs): + """Call a function at a later time on the UI thread.""" + raise NotImplementedError() + + def run_choice_dialog(self, title, description, buttons): + """Show the database error dialog and wait for a choice. + + This method should block until the choice is picked. Depending on the + frontend other events may still be processed or not. + + :returns: button that was choosen or None if the dialog was closed. + """ + raise NotImplementedError() + + def quit(self): + """Quit Miro.""" + raise NotImplementedError() + +frontend = Frontend() + +# name of the running frontend +frontend_name = None + # widget frontend adds these # -------------------------- # application object widgetapp = None +# ConnectionPoolTracker object -- Note: this object could be made thread-safe +# pretty easily, but right now it only should be used in the frontend thread. +connection_pools = None + # handles the right-hand display display_manager = None @@ -130,6 +170,8 @@ # gtk/windows item type sniffer get_item_type = None +# Tracks ItemLists that are in-use +item_list_pool = None # cli frontend adds these # ----------------------- diff -Nru miro-4.0.4/lib/autodler.py miro-6.0/lib/autodler.py --- miro-4.0.4/lib/autodler.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/autodler.py 2013-04-05 16:02:42.000000000 +0000 @@ -39,10 +39,10 @@ the search downloads feed gets combined with the search feed (ss #11778) """ - if feed.origURL == u'dtv:searchDownloads': + if feed.orig_url == u'dtv:searchDownloads': return u"dtv:search" - return feed.origURL + return feed.orig_url class Downloader: def __init__(self, is_auto): diff -Nru miro-4.0.4/lib/buildutils.py miro-6.0/lib/buildutils.py --- miro-4.0.4/lib/buildutils.py 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/buildutils.py 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,117 @@ +# Miro - an RSS based video player application +# Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011 +# Participatory Culture Foundation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +# +# In addition, as a special exception, the copyright holders give +# permission to link the code of portions of this program with the OpenSSL +# library. +# +# You must obey the GNU General Public License in all respects for all of +# the code used other than OpenSSL. If you modify file(s) with this +# exception, you may extend this exception to your version of the file(s), +# but you are not obligated to do so. If you do not wish to do so, delete +# this exception statement from your version. If you delete this exception +# statement from all source files in the program, then also delete it here. + +"""``miro.buildutils`` -- Utilities for building miro + +This module stores functions that touch on the build system for miro. This +includes setup.py files, files that process app.config, etc. + +Since this module is used by setup.py on 3 different platforms, it should only +import from the python standard library. +""" + +import re +import subprocess + +CONFIG_LINE_RE = re.compile(r"^([^ ]+) *= *([^\r\n]*)[\r\n]*$") + +def read_simple_config_file(path): + """Parse a configuration file in a very simple format and return contents + as a dict. + + Each line is either whitespace or "Key = Value". Whitespace is ignored + at the beginning of Value, but the remainder of the line is taken + literally, including any whitespace. + + Note: There is no way to put a newline in a value. + """ + ret = {} + + filep = open(path, "rt") + for line in filep.readlines(): + # Skip blank lines + if not line.strip(): + continue + + # Otherwise it'd better be a configuration setting + match = CONFIG_LINE_RE.match(line) + if not match: + print ("WARNING: %s: ignored bad configuration directive '%s'" % + (path, line)) + continue + + key = match.group(1) + value = match.group(2) + if key in ret: + print "WARNING: %s: ignored duplicate directive '%s'" % (path, + line) + continue + + ret[key] = value + + return ret + +def write_simple_config_file(path, data): + """Given a dict, write a configuration file in the format that + read_simple_config_file reads. + """ + filep = open(path, "wt") + + for k, v in data.iteritems(): + filep.write("%s = %s\n" % (k, v)) + + filep.close() + +def query_revision(): + """Called at build-time to ask git for the revision of this + checkout. + + Returns the (url, revision) on success and None on failure. + """ + url = "unknown" + revision = "unknown" + try: + proc = subprocess.Popen(["git", "config", "--list"], + stdout=subprocess.PIPE) + info = proc.stdout.read().splitlines() + proc.stdout.close() + origline = "remote.origin.url" + info = [m for m in info if m.startswith(origline)] + if info: + url = info[0][len(origline)+1:].strip() + + proc = subprocess.Popen(["git", "rev-parse", "HEAD"], + stdout=subprocess.PIPE) + info = proc.stdout.read() + proc.stdout.close() + revision = info[0:8] + return (url, revision) + except StandardError, exc: + print "Exception thrown when querying revision: %s" % exc + return (url, revision) diff -Nru miro-4.0.4/lib/commandline.py miro-6.0/lib/commandline.py --- miro-4.0.4/lib/commandline.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/commandline.py 2013-04-05 16:02:42.000000000 +0000 @@ -67,30 +67,23 @@ _command_line_videos = None _command_line_view = None -def _item_exists_for_path(path): - # in SQLite, LIKE is case insensitive, so we can use it to only look at - # filenames that possibly will match - for item_ in item.Item.make_view('filename LIKE ?', - (filename_to_unicode(path),)): - if samefile(item_.filename, path): - return item_ - return False - def add_video(path, manual_feed=None): """Add a new video :returns: True if we create a new Item object. """ path = os.path.abspath(path) - item_for_path = _item_exists_for_path(path) - if item_for_path: - if item_for_path.deleted: - item_for_path.make_undeleted() + if item.Item.have_item_for_path(path): logging.debug("Not adding duplicate video: %s", path.decode('ascii', 'ignore')) - if _command_line_videos is not None: - _command_line_videos.add(item_for_path) - return False + # get the first item and undelete it + item_for_path = _get_item_for_path(path) + if item_for_path is not None: + if item_for_path.deleted: + item_for_path.make_undeleted() + if _command_line_videos is not None: + _command_line_videos.add(item_for_path) + return False if manual_feed is None: manual_feed = feed.Feed.get_manual_feed() file_item = item.FileItem( @@ -99,18 +92,28 @@ _command_line_videos.add(file_item) return True +def _get_item_for_path(path): + """Get the first item that has a given path.""" + try: + return list(item.Item.items_with_path_view(path))[0] + except IndexError: + msg = ( "have_item_for_path returned True, but " + "items_with_path_view returned no items." + " path: %s" % (path,)) + app.controller.failed_soft("commandline.add_video", msg) + return None + @eventloop.idle_iterator def add_videos(paths): # filter out non-existent paths paths = [p for p in paths if fileutil.exists(p)] - for path in paths: - app.metadata_progress_updater.will_process_path(path) path_iter = iter(paths) finished = False yield # yield after doing prep work - while not finished: - finished = _add_batch_of_videos(path_iter, 0.5) - yield # yield after each batch + with app.local_metadata_manager.bulk_add(): + while not finished: + finished = _add_batch_of_videos(path_iter, 0.1) + yield # yield after each batch def _add_batch_of_videos(path_iter, max_time): """Add a batch of videos for add_video() @@ -125,9 +128,7 @@ app.bulk_sql_manager.start() try: for path in path_iter: - if not add_video(path, manual_feed=manual_feed): - # video was a duplicate, undo the will_process_path() call - app.metadata_progress_updater.path_processed(path) + add_video(path, manual_feed=manual_feed) if time.time() - start_time > max_time: return False return True @@ -138,7 +139,7 @@ manual_feed = feed.Feed.get_manual_feed() for i in manual_feed.items: if ((i.downloader is not None - and i.downloader.status.get('infohash') == torrent_info_hash)): + and i.downloader.info_hash == torrent_info_hash)): logging.info("not downloading %s, it's already a download for %s", path, i) if i.downloader.get_state() in ('paused', 'stopped'): @@ -271,9 +272,9 @@ # to use Miro to play videos and Miro goes to play a video # externally, then it causes an infinite loop and dies. if added_videos and app.config.get(prefs.PLAY_IN_MIRO): - item_infos = [itemsource.DatabaseItemSource._item_info_for(i) - for i in _command_line_videos] - messages.PlayMovie(item_infos).send_to_frontend() + item_ids = [i.id for i in _command_line_videos] + item_infos = app.db.fetch_item_infos(item_ids) + messages.PlayMovies(item_infos).send_to_frontend() if added_downloads: # FIXME - switch to downloads tab? diff -Nru miro-4.0.4/lib/config.py miro-6.0/lib/config.py --- miro-4.0.4/lib/config.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/config.py 2013-04-05 16:02:42.000000000 +0000 @@ -80,6 +80,18 @@ def get(self, descriptor): return self._data[descriptor.key] + def get_platform_default(self, descriptor): + """Get the platform-specific default value for a preference. + + For platform-specific preferences, we can't set the default attribute + since it will be different for each platform and also could depend on + things like the home directory. + + Instead, use this method to get the value from the miro.plat.config + module. + """ + return platformcfg.get(descriptor) + @_with_lock def set(self, descriptor, value): self.set_key(descriptor.key, value) @@ -121,7 +133,7 @@ else: return value elif descriptor.platformSpecific: - return platformcfg.get(descriptor) + return self.get_platform_default(descriptor) if app.configfile.contains(descriptor.key, use_theme_data): return app.configfile.get(descriptor.key, use_theme_data) else: @@ -152,7 +164,7 @@ if pref.key in self._data: return self._data[pref.key] elif pref.platformSpecific: - return platformcfg.get(pref) + return self.get_platform_default(pref) else: return pref.default diff -Nru miro-4.0.4/lib/controller.py miro-6.0/lib/controller.py --- miro-4.0.4/lib/controller.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/controller.py 2013-04-05 16:02:42.000000000 +0000 @@ -34,10 +34,7 @@ import logging import os import threading -import tempfile import locale -from random import randrange -from zipfile import ZipFile from miro import app from miro import crashreport @@ -46,12 +43,11 @@ from miro.gtcache import gettext as _ from miro import httpauth from miro import httpclient -from miro import iconcache from miro import messages -from miro import moviedata from miro import prefs from miro import signals from miro import conversions +from miro import util from miro import workerprocess from miro.plat.utils import exit_miro @@ -66,10 +62,21 @@ @eventloop.as_urgent def shutdown(self): + if app.local_metadata_manager is not None: + logging.info("Sending pending metadata updates") + app.local_metadata_manager.run_updates() + logging.info("Shutting down donation manager") + if app.donate_manager is not None: + app.donate_manager.shutdown() logging.info("Shutting down video conversions manager") conversions.conversion_manager.shutdown() logging.info("Shutting down Downloader...") - downloader.shutdown_downloader(self.downloader_shutdown) + if app.download_state_manager is not None: + app.download_state_manager.shutdown_downloader( + self.downloader_shutdown) + wait_for_downloader = True + else: + wait_for_downloader = False try: logging.info("Shutting down worker process.") workerprocess.shutdown() @@ -85,6 +92,8 @@ except StandardError: signals.system.failed_exn("while shutting down") # don't abort - it's not "fatal" and we can still shutdown + if not wait_for_downloader: + self.downloader_shutdown() def downloader_shutdown(self): logging.info("Shutting down libCURL thread") @@ -97,8 +106,6 @@ logging.info("Saving cached ItemInfo objects") logging.info("Commiting DB changes") app.db.finish_transaction() - if app.item_info_cache is not None: - app.item_info_cache.save() logging.info("Closing Database...") if app.db is not None: app.db.close() @@ -118,10 +125,12 @@ def on_shutdown(self): try: - logging.info("Shutting down icon cache updates") - iconcache.icon_cache_updater.shutdown() + if app.icon_cache_updater is not None: + logging.info("Shutting down icon cache updates") + app.icon_cache_updater.shutdown() logging.info("Shutting down movie data updates") - moviedata.movie_data_updater.shutdown() + if app.movie_data_updater is not None: + app.movie_data_updater.shutdown() logging.info("Joining event loop ...") eventloop.join() @@ -229,7 +238,7 @@ post_files = {"databasebackup": {"filename": "databasebackup.zip", "mimetype": "application/octet-stream", - "handle": open(backupfile, "rb") + "handle": backupfile, }} else: post_files = None @@ -258,44 +267,19 @@ return progress.uploaded, progress.upload_total def _backup_support_dir(self): - # backs up the support directories to a zip file - # returns the name of the zip file - logging.info("Attempting to back up support directory") - app.db.close() + """Back up the support directory. - support_dir = app.config.get(prefs.SUPPORT_DIRECTORY) + :returns: handle of a file for the archive + """ + skip_dirs = [ + app.config.get(prefs.ICON_CACHE_DIRECTORY), + app.config.get(prefs.COVER_ART_DIRECTORY), + ] + app.db.close() try: - uniqfn = "%012ddatabasebackup.zip" % randrange(0, 999999999999) - tempfilename = os.path.join(tempfile.gettempdir(), uniqfn) - zipfile = ZipFile(tempfilename, "w") - iconcache_dir = app.config.get(prefs.ICON_CACHE_DIRECTORY) - iconcache_dir = os.path.normpath(iconcache_dir) - - for root, dummy, files in os.walk(support_dir): - if (os.path.normpath(root).startswith(iconcache_dir) - or os.path.islink(root)): - continue - relativeroot = root[len(support_dir):] - while (len(relativeroot) > 0 - and relativeroot[0] in ['/', '\\']): - relativeroot = relativeroot[1:] - for fn in files: - if fn == 'httpauth': - # don't send http passwords over the internet - continue - if fn == 'preferences.bin': - # On windows, don't send the config file. Other - # platforms don't handle config the same way, so we - # don't need to worry about them - continue - path = os.path.join(root, fn) - if not os.path.islink(path): - relpath = os.path.join(relativeroot, fn) - relpath = relpath.encode('ascii', 'replace') - zipfile.write(path, relpath) - zipfile.close() - logging.info("Support directory backed up to %s (%d bytes)", - tempfilename, os.path.getsize(tempfilename)) - return tempfilename + support_dir = app.config.get(prefs.SUPPORT_DIRECTORY) + max_size = 100000000 # 100 MB + backup = util.SupportDirBackup(support_dir, skip_dirs, max_size) + return backup.fileobj() finally: app.db.open_connection() diff -Nru miro-4.0.4/lib/conversions.py miro-6.0/lib/conversions.py --- miro-4.0.4/lib/conversions.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/conversions.py 2013-04-05 16:02:42.000000000 +0000 @@ -55,6 +55,7 @@ from miro.fileobject import FilenameType from miro.plat import utils from miro.plat import resources +from miro.plat.popen import Popen NON_WORD_CHARS = re.compile(r"[^a-zA-Z0-9]+") @@ -67,7 +68,7 @@ root = app.config.get(prefs.MOVIES_DIRECTORY) target_folder = os.path.join(root, "Converted") if not os.path.exists(target_folder): - os.mkdir(target_folder) + os.makedirs(target_folder) return target_folder @@ -255,6 +256,8 @@ name, parser, "extension", {}) self.screen_size = self._get_config_value( name, parser, "ssize", {}) + self.bit_rate = int(self._get_config_value( + name, parser, "bitrate", {'bitrate': 0})) self.platforms = self._get_config_value( name, parser, "only_on", {'only_on': None}) self.displayname = _( @@ -314,6 +317,7 @@ self.converters.append((defaults['name'], group_converters)) finally: definition_file.close() + messages.ConverterList(self.converters).send_to_frontend() def lookup_converter(self, converter_id): """Looks up and returns a ConverterInfo object by id. @@ -422,9 +426,6 @@ if converter_info.executable == 'ffmpeg': return FFMpegConversionTask(converter_info, item_info, target_folder, create_item) - elif converter_info.executable == 'ffmpeg2theora': - return FFMpeg2TheoraConversionTask(converter_info, item_info, - target_folder, create_item) return None def _check_task_loop(self): @@ -531,7 +532,14 @@ logging.warn("Couldn't find task for key %s", msg['key']) return source = task.temp_output_path - destination, fp = next_free_filename(task.final_output_path) + try: + destination, fp = next_free_filename(task.final_output_path) + fp.close() + except ValueError: + logging.warn('_process_message_queue: ' + 'next_free_filename failed. Candidate = %r', + task.final_output_path) + return source_info = task.item_info conversion_name = task.get_display_name() if os.path.exists(source): @@ -546,7 +554,6 @@ else: task.error = _("Reason unknown--check log") self._notify_tasks_count() - fp.close() self.emit('task-staged', task) def _move_finished_file(self, source, destination): @@ -667,10 +674,10 @@ target_folder = get_conversions_folder() else: use_temp_dir = False - input_path = item_info.video_path + input_path = item_info.filename basename = os.path.basename(input_path) - title = utils.unicode_to_filename(item_info.name, target_folder).strip() + title = utils.unicode_to_filename(item_info.title, target_folder).strip() if not title: title = basename @@ -694,9 +701,15 @@ def round_even(num): """This takes a number, converts it to an integer, then makes sure it's even. + + Additional rules: this helper always rounds down to avoid stray black + pixels (see bz18122). + + This function makes sure that the value returned is always >= 0. """ num = int(num) - return num + (num % 2) + val = num - (num % 2) + return val if val > 0 else 0 def build_parameters(input_path, output_path, converter_info, media_info): @@ -779,14 +792,11 @@ create_item): self.item_info = item_info self.converter_info = converter_info - self.input_path = item_info.video_path + self.input_path = item_info.filename self.final_output_path, self.temp_output_path = build_output_paths( item_info, target_folder, converter_info) self.create_item = create_item - logging.debug("temp_output_path: [%s] final_output_path: [%s]", - self.temp_output_path, self.final_output_path) - self.key = "%s->%s" % (self.input_path, self.final_output_path) self.thread = None self.duration = None @@ -803,10 +813,18 @@ def get_parameters(self): raise NotImplementedError() + def get_output_size_guess(self): + if self.item_info.duration and self.converter_info.bit_rate: + return self.converter_info.bit_rate * self.item_info.duration / 8 + return self.item_info.size + def get_display_name(self): return self.converter_info.displayname def run(self): + logging.debug("temp_output_path: [%s] final_output_path: [%s]", + self.temp_output_path, self.final_output_path) + self.progress = 0 self.thread = threading.Thread(target=utils.thread_body, args=[self._loop], @@ -859,12 +877,9 @@ "stdout": subprocess.PIPE, "stderr": subprocess.STDOUT, "stdin": subprocess.PIPE, - "startupinfo": util.no_console_startupinfo()} - if os.name != "nt": - kwargs["close_fds"] = True - + "close_fds": True} try: - self.process_handle = subprocess.Popen(args, **kwargs) + self.process_handle = Popen(args, **kwargs) self.process_output(line_reader(self.process_handle.stdout)) self.process_handle.wait() @@ -915,7 +930,7 @@ item_id, self.converter_info.identifier)) self.log_file = file(self.log_path, "w") self._log_progress("STARTING CONVERSION") - self._log_progress("-> Item: %s" % util.stringify(self.item_info.name)) + self._log_progress("-> Item: %s" % util.stringify(self.item_info.title)) self._log_progress("-> Converter used: %s" % self.converter_info.name) self._log_progress("-> Executable: %s" % executable) self._log_progress("-> Parameters: %s" % ' '.join(params)) @@ -944,8 +959,8 @@ return logging.warning("killing conversion task %d", self.process_handle.pid) - self.process_handle.kill() try: + self.process_handle.kill() self.process_handle.wait() except OSError: logging.exception('exception while interupting process') @@ -984,6 +999,9 @@ # never actually executed return "copy" + def get_output_size_guess(self): + return self.item_info.size + def get_display_name(self): return _("Copy") @@ -1022,8 +1040,8 @@ default_parameters = build_parameters( self.input_path, self.temp_output_path, self.converter_info, media_info) # insert -strict experimental - default_parameters.insert(0, 'experimental') - default_parameters.insert(0, '-strict') + default_parameters.insert(-1, 'experimental') + default_parameters.insert(-2, '-strict') return utils.customize_ffmpeg_parameters(default_parameters) def check_for_errors(self, line): @@ -1049,69 +1067,16 @@ else: match = FFMpegConversionTask.PROGRESS_RE.match(line) if match is not None: - return float(match.group(1)) / self.duration + t = match.group(1) + if ':' in t: + h, m, s = t.split(':') + t = float(h) * 3600 + float(m) * 60 + float(s) + return float(t) / self.duration match = FFMpegConversionTask.LAST_PROGRESS_RE.match(line) if match is not None: return 1.0 return self.progress - -class FFMpeg2TheoraConversionTask(ConversionTask): - DURATION_RE = re.compile(r'f2t ;duration: ([^;]*);') - - PROGRESS_RE1 = re.compile(r'\{"duration":(.*), "position":(.*), ' - '"audio_kbps":.*, "video_kbps":.*, ' - '"remaining":.*\}') - RESULT_RE1 = re.compile(r'\{"result": "(.*)"\}') - - PROGRESS_RE2 = re.compile(r'f2t ;position: ([^;]*);') - RESULT_RE2 = re.compile(r'f2t ;result: ([^;]*);') - - def __init__(self, converter_info, item_info, target_folder, create_item): - ConversionTask.__init__(self, converter_info, item_info, - target_folder, create_item) - self.platform = app.config.get(prefs.APP_PLATFORM) - - def get_executable(self): - return utils.get_ffmpeg2theora_executable_path() - - def get_parameters(self): - try: - media_info = get_media_info(self.input_path) - except ValueError: - media_info = {} - - default_parameters = build_parameters( - self.input_path, self.temp_output_path, self.converter_info, media_info) - return utils.customize_ffmpeg2theora_parameters(default_parameters) - - def check_for_errors(self, line): - return - - def monitor_progress(self, line): - if line.startswith('f2t'): - if self.duration is None: - match = FFMpeg2TheoraConversionTask.DURATION_RE.match(line) - if match is not None: - self.duration = float(match.group(1)) - match = FFMpeg2TheoraConversionTask.PROGRESS_RE2.match(line) - if match is not None: - return float(match.group(1)) / self.duration - match = FFMpeg2TheoraConversionTask.RESULT_RE2.match(line) - if match is not None: - return 1.0 - else: - match = FFMpeg2TheoraConversionTask.PROGRESS_RE1.match(line) - if match is not None: - if self.duration is None: - self.duration = float(match.group(1)) - return float(match.group(2)) / self.duration - match = FFMpeg2TheoraConversionTask.RESULT_RE1.match(line) - if match is not None: - return 1.0 - return self.progress - - def convert(converter_id, item_info, update_last=False): """Given a converter and an item, this starts the conversion for that item. @@ -1129,7 +1094,7 @@ # should only get called in the event loop. name = _('%(original_name)s (Converted to %(format)s)', - {'original_name': source_info.name, 'format': conversion_name}) + {'original_name': source_info.title, 'format': conversion_name}) fp_values = item.fp_values_for_file(filename, name, source_info.description) diff -Nru miro-4.0.4/lib/convert20database.py miro-6.0/lib/convert20database.py --- miro-4.0.4/lib/convert20database.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/convert20database.py 2013-04-05 16:02:42.000000000 +0000 @@ -75,14 +75,14 @@ klass = getattr(mod, name) return klass -def convert(cursor): +def convert(cursor, show_progress): """Convert an old-style database to a new-style one. cursor is an SQLite cursor. """ savable_objects = _get_old_savables(cursor) - _upgrate_old_savables(cursor, savable_objects) + _upgrate_old_savables(cursor, savable_objects, show_progress) _run_databasesanity(savable_objects) _create_db_schema(cursor) _migrate_old_data(cursor, savable_objects) @@ -95,12 +95,13 @@ cursor.execute("SELECT serialized_object FROM dtv_objects") return [_loads(str(r[0])) for r in cursor] -def _upgrate_old_savables(cursor, savables): +def _upgrate_old_savables(cursor, savables, show_progress): cursor.execute("SELECT serialized_value FROM dtv_variables " "WHERE name=?", ("Democracy Version",)) row = cursor.fetchone() version = cPickle.loads(str(row[0])) - databaseupgrade.upgrade(savables, version, schema_mod.VERSION) + databaseupgrade.upgrade(savables, version, schema_mod.VERSION, + show_progress) def _run_databasesanity(objects): try: diff -Nru miro-4.0.4/lib/coverart.py miro-6.0/lib/coverart.py --- miro-4.0.4/lib/coverart.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/coverart.py 2013-04-05 16:02:42.000000000 +0000 @@ -42,11 +42,11 @@ from miro import util from miro import fileutil -class UnknownImageObjectException(Exception): +class UnknownImageObjectException(StandardError): """Image uses this when mutagen gives us something strange. """ def __init__(self, object_type, known_types): - Exception.__init__(self) + StandardError.__init__(self) self.object_type = object_type self.known_types = known_types @@ -78,6 +78,8 @@ JPEG_EXTENSION = 'jpg' PNG_EXTENSION = 'png' UNKNOWN_EXTENSION = 'bin' + # XXX when adding a mime type below, be sure its chars are all in MIME_CHARS + # and use lowercase only MIME_EXTENSION_MAP = { 'image/jpeg': JPEG_EXTENSION, 'image/jpg': JPEG_EXTENSION, @@ -115,33 +117,6 @@ """ return isinstance(image_object, cls.PROCESSES_TYPE) - @staticmethod - def _get_destination_path(extension, track_path): - filename = "{0}.{1}.{2}".format(os.path.basename(track_path), - util.random_string(5), extension) - directory = app.config.get(prefs.COVER_ART_DIRECTORY) - # make the directory if necessary: - try: - fileutil.makedirs(directory) - except StandardError: - pass - return os.path.join(directory, filename) - - @staticmethod - def from_file(source, track_path): - """Copy a file to use as cover art.""" - if not fileutil.isfile(source): - raise ValueError('cover_art must be a file') - path = Image._get_destination_path( - os.path.splitext(source)[1], track_path) - try: - shutil.copyfile(source, path) - except IOError: - logging.warn( - "Couldn't write cover art file: {0}".format(path)) - return None - return path - def get_extension(self): """Get the extension appropriate for this file's data.""" return self.extension or Image.UNKNOWN_EXTENSION @@ -152,29 +127,25 @@ """ return self.is_cover - def write_to_file(self, track_path): + def write_to_file(self, path): """Creates a new file containing this image's data. - Returns the file's path. + + :raises EnvironmentError: error writing cover art file """ - path = self._get_destination_path(self.get_extension(), track_path) - try: - file_handle = fileutil.open_file(path, 'wb') - file_handle.write(self.data) - except IOError: - logging.warn( - "Couldn't write cover art file: {0}".format(path)) - return None - return path + file_handle = fileutil.open_file(path, 'wb') + file_handle.write(self.data) - def _set_extension_by_mime(self, mime): + def _set_extension_by_mime(self, raw_mime): """If a subclasss can determine its data's mime type, this function will set the extension appropriately. """ - mime = _text_to_mime_chars(mime) - mime = mime.lower() + mime = _text_to_mime_chars(raw_mime).lower() + dropped_chars = len(raw_mime) - len(mime) if not '/' in mime: # some files arbitrarily drop the 'image/' component mime = "image/{0}".format(mime) + if dropped_chars: + logging.debug("coverart: coerced mime %r to %r", raw_mime, mime) if mime in Image.MIME_EXTENSION_MAP: self.extension = Image.MIME_EXTENSION_MAP[mime] else: diff -Nru miro-4.0.4/lib/data/connectionpool.py miro-6.0/lib/data/connectionpool.py --- miro-4.0.4/lib/data/connectionpool.py 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/data/connectionpool.py 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,229 @@ +# Miro - an RSS based video player application +# Copyright (C) 2012 +# Participatory Culture Foundation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +# +# In addition, as a special exception, the copyright holders give +# permission to link the code of portions of this program with the OpenSSL +# library. +# +# You must obey the GNU General Public License in all respects for all of +# the code used other than OpenSSL. If you modify file(s) with this +# exception, you may extend this exception to your version of the file(s), +# but you are not obligated to do so. If you do not wish to do so, delete +# this exception statement from your version. If you delete this exception +# statement from all source files in the program, then also delete it here. + +"""miro.data.connectionpool -- SQLite connection pool """ +import contextlib +import logging + +import sqlite3 + +from miro import messages +from miro.data import dbcollations + +class ConnectionLimitError(StandardError): + """We've hit our connection limits.""" + +class Connection(object): + """Wraps the sqlite3.Connection object.""" + def __init__(self, path): + self._connection = sqlite3.connect( + path, isolation_level=None, detect_types=sqlite3.PARSE_DECLTYPES) + + def execute(self, sql, values=()): + return self._connection.execute(sql, values) + + def execute_many(self, sql, values): + self._connection.execute_many(sql, values) + + def commit(self): + self._connection.commit() + + def rollback(self): + self._connection.rollback() + + def close(self): + self._connection.close() + +class ConnectionPool(object): + """Pool of SQLite database connections + + :attribute wal_mode: Is the database using WAL mode for its journal? + """ + def __init__(self, db_path, min_connections=2, max_connections=7): + """Create a new ConnectionPool + + :param db_path: path to the database to connect to + :param min_connections: Minimum number of connections to maintain + :param max_connections: Maximum number of connections to the database + """ + self.db_path = db_path + self.min_connections = min_connections + self.max_connections = max_connections + self.all_connections = set() + self.free_connections = [] + self._check_wal_mode() + + def _check_wal_mode(self): + """Try to set journal_mode=wall and return if it was successful + """ + connection = self.get_connection() + cursor = connection.execute("PRAGMA journal_mode=wal"); + self.wal_mode = cursor.fetchone()[0] == u'wal' + connection.commit() + self.release_connection(connection) + + def _make_new_connection(self): + # TODO: should have error handling here, but what should we do? + connection = Connection(self.db_path) + dbcollations.setup_collations(connection) + self.free_connections.append(connection) + self.all_connections.add(connection) + + def destroy(self): + """Forcably destroy all connections.""" + for connection in self.all_connections: + connection.close() + self.all_connections = [] + self.free_connections = [] + + def get_connection(self): + """Get a new connection to the database + + When you're finished with the connection, call release_connection() to + put it back into the pool. + + If there are max_connections checked out and get_connection() is + called again, ConnectionLimitError will be raised. + + :returns sqlite3.Connection object + """ + if not self.free_connections: + if len(self.all_connections) < self.max_connections: + self._make_new_connection() + else: + raise ConnectionLimitError() + return self.free_connections.pop() + + def release_connection(self, connection): + """Put a connection back into the pool.""" + + if connection not in self.all_connections: + raise ValueError("%s not from this pool" % connection) + connection.rollback() + if len(self.all_connections) > self.min_connections: + connection.close() + self.all_connections.remove(connection) + else: + self.free_connections.append(connection) + + @contextlib.contextmanager + def context(self): + """ContextManager used to get a connection. + + Usage: + with connection_pool.context() as connection: + cursor = connection.cursor() + cursor.execute("blah blah blah") + """ + connection = self.get_connection() + yield connection + # Rollback any changes not committed + connection.rollback() + self.release_connection(connection) + +class DeviceConnectionPool(ConnectionPool): + """ConnectionPool for a device.""" + def __init__(self, device_info): + # min_connections is 0 since we should normally not have any + # connections to the device database. The max connections is 2 in + # case the user is on the video tab and is playing items from the + # audio tab (or vice-versa) + ConnectionPool.__init__(self, device_info.sqlite_path, + min_connections=0, max_connections=2) + +class ShareConnectionPool(ConnectionPool): + """ConnectionPool for a DAAP share.""" + def __init__(self, share_info): + # min_connections is 0 since we should normally not have any + # connections to the device database. The max connections is 3 which + # handles the following case: + # - playing items from tab #1 + # - switching away from tab #2 + # - switching to tab #3 + ConnectionPool.__init__(self, share_info.sqlite_path, + min_connections=0, max_connections=3) + +class ConnectionPoolTracker(object): + """Manage ConnectionPool for the frontend + + This object stores a connection por for: + - The main connection + - each connected device + - each share + """ + def __init__(self, main_db_path): + self.main_pool = ConnectionPool(main_db_path) + self.pool_map = {} + + def reset(self): + self.pool_map = {} + + def get_main_pool(self): + return self.main_pool + + def get_device_pool(self, tab_id): + return self.pool_map[tab_id] + + def get_sharing_pool(self, tab_id): + return self.pool_map[tab_id] + + def get_all_pools(self): + return [self.main_pool] + self.pool_map.values() + + def _make_connection_pool(self, tab_info): + if isinstance(tab_info, messages.DeviceInfo): + return DeviceConnectionPool(tab_info) + elif isinstance(tab_info, messages.SharingInfo): + return ShareConnectionPool(tab_info) + else: + raise ValueError("Unknown type for tab info: %s", tab_info) + + def _ensure_connection_pool(self, tab_info): + if tab_info.id not in self.pool_map: + self.pool_map[tab_info.id] = self._make_connection_pool(tab_info) + + def _ensure_no_connection_pool(self, tab_id): + if tab_id in self.pool_map: + del self.pool_map[tab_id] + + def on_tabs_changed(self, message): + if message.type != 'connect': + return + for info in message.added + message.changed: + if isinstance(info, messages.DeviceInfo): + # for devices, we should make a connection pool if db_info is + # actually set + if info.db_info is not None: + self._ensure_connection_pool(info) + else: + self._ensure_no_connection_pool(info.id) + elif isinstance(info, messages.SharingInfo): + self._ensure_connection_pool(info) + for id_ in message.removed: + self._ensure_no_connection_pool(id_) diff -Nru miro-4.0.4/lib/data/dbcollations.py miro-6.0/lib/data/dbcollations.py --- miro-4.0.4/lib/data/dbcollations.py 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/data/dbcollations.py 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,40 @@ +# Miro - an RSS based video player application +# Copyright (C) 2012 +# Participatory Culture Foundation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +# +# In addition, as a special exception, the copyright holders give +# permission to link the code of portions of this program with the OpenSSL +# library. +# +# You must obey the GNU General Public License in all respects for all of +# the code used other than OpenSSL. If you modify file(s) with this +# exception, you may extend this exception to your version of the file(s), +# but you are not obligated to do so. If you do not wish to do so, delete +# this exception statement from your version. If you delete this exception +# statement from all source files in the program, then also delete it here. + +"""miro.data.dbcollations -- defines collations for sqlite + +Once setup_collations() is called, the following collations will be defined: + +- name -- collation to use for names (title, artist, album, etc). +""" +from miro.data import namecollation + +def setup_collations(connection): + """Setup collections on a connection.""" + namecollation.setup_collation(connection._connection) diff -Nru miro-4.0.4/lib/data/dberrors.py miro-6.0/lib/data/dberrors.py --- miro-4.0.4/lib/data/dberrors.py 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/data/dberrors.py 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,147 @@ +# Miro - an RSS based video player application +# Copyright (C) 2012 +# Participatory Culture Foundation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +# +# In addition, as a special exception, the copyright holders give +# permission to link the code of portions of this program with the OpenSSL +# library. +# +# You must obey the GNU General Public License in all respects for all of +# the code used other than OpenSSL. If you modify file(s) with this +# exception, you may extend this exception to your version of the file(s), +# but you are not obligated to do so. If you do not wish to do so, delete +# this exception statement from your version. If you delete this exception +# statement from all source files in the program, then also delete it here. + +"""miro.data.dberrors -- handle database errors. + +This module is responsible for handling database errors on both the frontend +and backend threads. This is tricky for a couple reasons: + +- Errors can happen in both threads at the same time, or near one and + other. In that case, we don't want to show 2 dialog windows +- On GTK at least, errors can happen while we're waiting for a user + choice in our dialog window + +Here's how we handle it: +- The backend system creates the DatabaseErrorDialog just like before and asks + the frontend to show it. This is pretty similir to the situation before. +- If we get an error in the frontend, then we show a similar dialog. The + frontend code shouldn't block on the dialog. Instead it can provide a + function to call if the user clicks the RETRY button. +- We avoid showing multiple dialog windows at once and instead reuse the + response from the first dialog. +""" + +import itertools +import logging + +from miro import app +from miro import dialogs + +class DBErrorHandler(object): + def __init__(self, frontend=None): + if frontend is None: + frontend = app.frontend + self.frontend = frontend + self.running_dialog = False + self.retry_callbacks = [] + self.backend_dialogs = [] + self.logged_warning = False + self.sent_quit = False + # The last button clicked on the dialog + self.last_response = None + # Which threads we sent last_response to. + self.last_response_sent_to = set() + + def run_dialog(self, title, description, retry_callback=None): + if retry_callback is not None: + self.retry_callbacks.append(retry_callback) + self.frontend.call_on_ui_thread(self._run_dialog, + title, description, 'ui thread') + + def run_backend_dialog(self, dialog): + self.backend_dialogs.append(dialog) + self.frontend.call_on_ui_thread(self._run_dialog, + dialog.title, dialog.description, + 'eventloop thread') + + def _run_dialog(self, title, description, thread): + if self.running_dialog: + return + self.running_dialog = True + try: + response = self._get_dialog_response(title, description, thread) + finally: + self.running_dialog = False + if response is None: + logging.warn("DB Error dialog closed, assuming QUIT") + response = dialogs.BUTTON_QUIT + self.last_response = response + self.last_response_sent_to.add(thread) + self._handle_response(response) + + def _get_dialog_response(self, title, description, thread): + if self._should_reuse_last_response(thread): + return self.last_response + else: + self.last_response_sent_to.clear() + try: + response = self.frontend.run_choice_dialog( + title, description, [dialogs.BUTTON_RETRY, + dialogs.BUTTON_QUIT]) + except NotImplementedError: + if not self.logged_warning: + logging.warn("Frontend.run_choice_dialog not " + "implemented assuming QUIT was chosen") + response = dialogs.BUTTON_QUIT + return response + + def _should_reuse_last_response(self, thread): + """Check if we should reuse the last button response without popping + up a new dialog. + """ + if self.last_response == dialogs.BUTTON_QUIT: + return True + if (self.last_response == dialogs.BUTTON_RETRY and + thread not in self.last_response_sent_to): + return True + return False + + def _handle_response(self, response): + # copy the callback/dialog lists and reset them before doing anything. + # This handles the case where one of the retry callbacks still sees an + # error and calls run_dialog() again. + backend_dialogs = self.backend_dialogs[:] + retry_callbacks = self.retry_callbacks[:] + self.retry_callbacks = [] + self.backend_dialogs = [] + + if response == dialogs.BUTTON_RETRY: + for callback in retry_callbacks: + try: + callback() + except StandardError: + logging.warn("DBErrorHandler: error calling response " + "callback: %s", callback, exc_info=True) + for dialog in backend_dialogs: + dialog.run_callback(response) + + if response == dialogs.BUTTON_QUIT and not self.sent_quit: + self.frontend.quit() + self.sent_quit = True + diff -Nru miro-4.0.4/lib/data/fulltextsearch.py miro-6.0/lib/data/fulltextsearch.py --- miro-4.0.4/lib/data/fulltextsearch.py 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/data/fulltextsearch.py 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,80 @@ +# Miro - an RSS based video player application +# Copyright (C) 2012 +# Participatory Culture Foundation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +# +# In addition, as a special exception, the copyright holders give +# permission to link the code of portions of this program with the OpenSSL +# library. +# +# You must obey the GNU General Public License in all respects for all of +# the code used other than OpenSSL. If you modify file(s) with this +# exception, you may extend this exception to your version of the file(s), +# but you are not obligated to do so. If you do not wish to do so, delete +# this exception statement from your version. If you delete this exception +# statement from all source files in the program, then also delete it here. + +"""miro.data.fulltextsearch -- Set up full text search in our SQLite DB +""" +from miro import app + +def setup_fulltext_search(connection, table='item', path_column='filename', + has_entry_description=True): + """Set up fulltext search on a newly created database.""" + if hasattr(app, 'in_unit_tests') and _no_item_table(connection, table): + # handle unittests not defining the item table in their schemas + return + + columns = ['title', 'description', 'artist', 'album', + 'genre', path_column, 'parent_title', ] + if has_entry_description: + columns.append('entry_description') + column_list = ', '.join(c for c in columns) + column_list_for_new = ', '.join("new.%s" % c for c in columns) + column_list_with_types = ', '.join('%s text' % c for c in columns) + connection.execute("CREATE VIRTUAL TABLE item_fts USING fts4(%s)" % + column_list_with_types) + connection.execute("INSERT INTO item_fts(docid, %s)" + "SELECT %s.id, %s FROM %s" % + (column_list, table, column_list, table)) + # make triggers to keep item_fts up to date + connection.execute("CREATE TRIGGER item_bu " + "BEFORE UPDATE ON %s BEGIN " + "DELETE FROM item_fts WHERE docid=old.id; " + "END;" % (table,)) + + connection.execute("CREATE TRIGGER item_bd " + "BEFORE DELETE ON %s BEGIN " + "DELETE FROM item_fts WHERE docid=old.id; " + "END;" % (table,)) + + connection.execute("CREATE TRIGGER item_au " + "AFTER UPDATE ON %s BEGIN " + "INSERT INTO item_fts(docid, %s) " + "VALUES(new.id, %s); " + "END;" % (table, column_list, column_list_for_new)) + + connection.execute("CREATE TRIGGER item_ai " + "AFTER INSERT ON %s BEGIN " + "INSERT INTO item_fts(docid, %s) " + "VALUES(new.id, %s); " + "END;" % (table, column_list, column_list_for_new)) + +def _no_item_table(connection, table_name): + cursor = connection.execute("SELECT COUNT(*) FROM sqlite_master " + "WHERE type='table' and name=?", + (table_name,)) + return (cursor.fetchone()[0] == 0) diff -Nru miro-4.0.4/lib/data/__init__.py miro-6.0/lib/data/__init__.py --- miro-4.0.4/lib/data/__init__.py 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/data/__init__.py 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,46 @@ +# Miro - an RSS based video player application +# Copyright (C) 2012 +# Participatory Culture Foundation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +# +# In addition, as a special exception, the copyright holders give +# permission to link the code of portions of this program with the OpenSSL +# library. +# +# You must obey the GNU General Public License in all respects for all of +# the code used other than OpenSSL. If you modify file(s) with this +# exception, you may extend this exception to your version of the file(s), +# but you are not obligated to do so. If you do not wish to do so, delete +# this exception statement from your version. If you delete this exception +# statement from all source files in the program, then also delete it here. + +"""miro.data -- Data layer for the frontends. + +The data package is what the frontend uses to access the database. +It's general enough that it might be used by more than just the frontend +someday. +""" + +from miro import app +from miro import prefs +from miro.data import connectionpool +from miro.data import dberrors + +def init(db_path=None): + if db_path is None: + db_path = app.config.get(prefs.SQLITE_PATHNAME) + app.connection_pools = connectionpool.ConnectionPoolTracker(db_path) + app.db_error_handler = dberrors.DBErrorHandler() diff -Nru miro-4.0.4/lib/data/item.py miro-6.0/lib/data/item.py --- miro-4.0.4/lib/data/item.py 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/data/item.py 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,1002 @@ +# Miro - an RSS based video player application +# Copyright (C) 2012 +# Participatory Culture Foundation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +# +# In addition, as a special exception, the copyright holders give +# permission to link the code of portions of this program with the OpenSSL +# library. +# +# You must obey the GNU General Public License in all respects for all of +# the code used other than OpenSSL. If you modify file(s) with this +# exception, you may extend this exception to your version of the file(s), +# but you are not obligated to do so. If you do not wish to do so, delete +# this exception statement from your version. If you delete this exception +# statement from all source files in the program, then also delete it here. + +"""miro.data.item -- Defines ItemInfo and describes how to create them + +ItemInfo is the read-only interface to database items. To create one you need +to run a SELECT on the database and pass in the data from one row. + +column_info() and join_sql() describe what columns need to be selected and how +to join the tables together in order to create an ItemInfo. +""" + +import datetime +import itertools +import functools +import logging +import os + +from miro import app +from miro import displaytext +from miro.fileobject import FilenameType +from miro import filetypes +from miro import fileutil +from miro import prefs +from miro import schema +from miro import util +from miro.gtcache import gettext as _ +from miro.plat import resources +from miro.plat.utils import PlatformFilenameType + +def _unicode_to_filename(unicode_value): + # Convert a unicode value from the database to FilenameType + # FIXME: This code is not very good and should be replaces as part of + # #13182 + if unicode_value is not None and PlatformFilenameType != unicode: + return unicode_value.encode('utf-8') + else: + return unicode_value + +class SelectColumn(object): + """Describes a single column that we select for ItemInfo. + + :attribute table: name of the table that contains the column + :attribute column: column nabe + :attribute attr_name: attribute name in ItemInfo + """ + + # _schema_map maps (table, column) tuples to their SchemaItem objects + _schema_map = {} + for object_schema in (schema.object_schemas + + schema.device_object_schemas + + schema.sharing_object_schemas): + for column_name, schema_item in object_schema.fields: + _schema_map[object_schema.table_name, column_name] = schema_item + + def __init__(self, table, column, attr_name=None): + if attr_name is None: + attr_name = column + self.table = table + self.column = column + self.attr_name = attr_name + + def sqlite_type(self): + """Get the sqlite type specification for this column.""" + schema_item = self._schema_map[self.table, self.column] + return app.db.get_sqlite_type(schema_item) + +class ItemSelectInfo(object): + """Describes query the data needed for an ItemInfo.""" + + # name of the main item table + table_name = 'item' + # SelectColumn objects for each attribute of ItemInfo + select_columns = [ + SelectColumn('item', 'id'), + SelectColumn('item', 'new'), + SelectColumn('item', 'title'), + SelectColumn('item', 'entry_title'), + SelectColumn('item', 'torrent_title'), + SelectColumn('item', 'feed_id'), + SelectColumn('item', 'parent_id'), + SelectColumn('item', 'parent_title'), + SelectColumn('item', 'downloader_id'), + SelectColumn('item', 'is_file_item'), + SelectColumn('item', 'pending_manual_download'), + SelectColumn('item', 'pending_reason'), + SelectColumn('item', 'expired'), + SelectColumn('item', 'keep'), + SelectColumn('item', 'creation_time', 'date_added'), + SelectColumn('item', 'downloaded_time'), + SelectColumn('item', 'watched_time'), + SelectColumn('item', 'last_watched'), + SelectColumn('item', 'subtitle_encoding'), + SelectColumn('item', 'is_container_item'), + SelectColumn('item', 'release_date'), + SelectColumn('item', 'duration', 'duration_ms'), + SelectColumn('item', 'screenshot', 'screenshot_path_unicode'), + SelectColumn('item', 'resume_time'), + SelectColumn('item', 'license'), + SelectColumn('item', 'rss_id'), + SelectColumn('item', 'entry_description'), + SelectColumn('item', 'enclosure_type', 'mime_type'), + SelectColumn('item', 'enclosure_format'), + SelectColumn('item', 'enclosure_size'), + SelectColumn('item', 'link', 'permalink'), + SelectColumn('item', 'payment_link'), + SelectColumn('item', 'comments_link'), + SelectColumn('item', 'url'), + SelectColumn('item', 'was_downloaded'), + SelectColumn('item', 'filename', 'filename_unicode'), + SelectColumn('item', 'play_count'), + SelectColumn('item', 'skip_count'), + SelectColumn('item', 'cover_art', 'cover_art_path_unicode'), + SelectColumn('item', 'description', 'metadata_description'), + SelectColumn('item', 'album'), + SelectColumn('item', 'album_artist'), + SelectColumn('item', 'artist'), + SelectColumn('item', 'track'), + SelectColumn('item', 'album_tracks'), + SelectColumn('item', 'year'), + SelectColumn('item', 'genre'), + SelectColumn('item', 'rating'), + SelectColumn('item', 'file_type'), + SelectColumn('item', 'has_drm'), + SelectColumn('item', 'show'), + SelectColumn('item', 'size'), + SelectColumn('item', 'episode_id'), + SelectColumn('item', 'episode_number'), + SelectColumn('item', 'season_number'), + SelectColumn('item', 'kind'), + SelectColumn('item', 'net_lookup_enabled'), + SelectColumn('item', 'eligible_for_autodownload'), + SelectColumn('item', 'thumbnail_url'), + SelectColumn('feed', 'orig_url', 'feed_url'), + SelectColumn('feed', 'expire', 'feed_expire'), + SelectColumn('feed', 'expire_timedelta', 'feed_expire_timedelta'), + SelectColumn('feed', 'autoDownloadable', 'feed_auto_downloadable'), + SelectColumn('feed', 'getEverything', 'feed_get_everything'), + SelectColumn('feed', 'thumbnail_path', 'feed_thumbnail_path_unicode'), + SelectColumn('icon_cache', 'filename', 'icon_cache_path_unicode'), + SelectColumn('remote_downloader', 'content_type', + 'downloader_content_type'), + SelectColumn('remote_downloader', 'state', 'downloader_state'), + SelectColumn('remote_downloader', 'reason_failed'), + SelectColumn('remote_downloader', 'short_reason_failed'), + SelectColumn('remote_downloader', 'type', 'downloader_type'), + SelectColumn('remote_downloader', 'retry_time'), + SelectColumn('remote_downloader', 'retry_count'), + SelectColumn('remote_downloader', 'eta', '_eta'), + SelectColumn('remote_downloader', 'rate', '_rate'), + SelectColumn('remote_downloader', 'upload_rate', '_upload_rate'), + SelectColumn('remote_downloader', 'current_size', 'downloaded_size'), + SelectColumn('remote_downloader', 'total_size', 'downloader_size'), + SelectColumn('remote_downloader', 'upload_size'), + SelectColumn('remote_downloader', 'activity', 'downloader_activity'), + SelectColumn('remote_downloader', 'seeders'), + SelectColumn('remote_downloader', 'leechers'), + SelectColumn('remote_downloader', 'connections'), + ] + # name of the column that stores video paths + path_column = 'filename' + + # how to join the main table to other tables. Maps table names to + # (item_column, other_column) tuples + join_info = { + 'feed': ('feed_id', 'id'), + 'playlist_item_map': ('id', 'item_id'), + 'remote_downloader': ('downloader_id', 'id'), + 'icon_cache': ('icon_cache_id', 'id'), + 'item_fts': ('id', 'docid'), + } + + def __init__(self): + self.joined_tables = set(c.table for c in self.select_columns + if c.table != self.table_name) + + def can_join_to(self, table): + """Can we join to a table.""" + return table in self.join_info + + def join_sql(self, table=None, join_type='LEFT JOIN'): + """Get an expression to join the main table to other tables. + + :param table: name of the table to join to, or None to join to all + tables used in select_columns + """ + if table is not None: + item_column, other_column = self.join_info[table] + return '%s %s ON %s.%s=%s.%s' % (join_type, table, + self.table_name, item_column, + table, other_column) + else: + return '\n'.join(self.join_sql(table) + for table in self.joined_tables) + + def item_join_column(self, table): + """Get the item table column used to join to another table.""" + return self.join_info[table][0] + +# ItemInfo has a couple of tricky things going on for it: +# - We need to support both selecting from the main database and the device +# database. So we need a flexible way to map items in the result row to +# attributes +# - We want to create items quickly. We don't want to do a bunch of work in +# the constructor +# +# The solution we use is a metaclass that takes a ItemSelectInfo and creates a +# bunch of class descriptors to implement the attributes by reading from a +# result row +class ItemInfoAttributeGetter(object): + def __init__(self, index): + self.index = index + + def __get__(self, instance, owner): + if instance is None: + raise AttributeError("class attribute not supported") + return instance.row_data[self.index] + +class ItemInfoMeta(type): + """Metaclass for ItemInfo. + + It depends on ItemInfo and all subclasses doing a couple things: + - defining a class attribute called "select_info" that holds a + ItemSelectInfo object. + - storing the result row from sqlite in an instance attribute called + "row_data" + """ + def __new__(cls, classname, bases, dct): + count = itertools.count() + select_info = dct.get('select_info') + if select_info is not None: + for select_column in select_info.select_columns: + attribute = ItemInfoAttributeGetter(count.next()) + dct[select_column.attr_name] = attribute + return type.__new__(cls, classname, bases, dct) + +class ItemInfoBase(object): + """ItemInfo represents a row in one of the item lists. + + This work similarly to the miro.item.Item class, except it's read-only. + Subclases of this handle items from the main database, device database, + and sharing database + """ + + __metaclass__ = ItemInfoMeta + + #: ItemSelectInfo object that describes what to select to create an + #: ItemInfoMeta + select_info = None + html_stripper = util.HTMLStripper() + + # default values for columns from the item table. For DeviceItemInfo and + # SharingItemInfo, we will use these for columns that don't exist in their + # item table. + date_added = None + watched_time = None + last_watched = None + parent_id = None + rating = None + album_tracks = None + new = False + keep = True + was_downloaded = False + expired = False + eligible_for_autodownload = False + is_file_item = True + is_container_item = False + icon_cache_path_unicode = None + subtitle_encoding = None + release_date = None + parent_title = None + feed_url = None + feed_thumbnail_path_unicode = None + license = None + rss_id = None + entry_title = None + entry_description = None + torrent_title = None + permalink = None + payment_link = None + comments_link = None + thumbnail_url = None + url = None + size = None + enclosure_size = None + enclosure_type = None + mime_type = None + enclosure_format = None + auto_sync = None + screenshot_path_unicode = None + cover_art_path_unicode = None + resume_time = 0 + play_count = 0 + skip_count = 0 + net_lookup_enabled = False + has_drm = False + album = None + kind = None + duration_ms = None + metadata_description = None + show = None + file_type = None + artist = None + episode_id = None + track = None + year = None + genre = None + episode_number = None + season_number = None + album_artist = None + # default values for columns in the remote_downloader table + downloader_size = None + downloader_type = None + seeders = None + upload_size = None + downloader_id = None + _rate = None + connections = None + downloaded_time = None + downloaded_size = None + pending_reason = None + retry_time = None + retry_count = None + short_reason_failed = None + reason_failed = None + leechers = None + _eta = None + pending_manual_download = None + downloader_state = None + _upload_rate = None + downloader_activity = None + downloader_content_type = None + # default values for columns in the feed table + feed_id = None + feed_get_everything = None + feed_auto_downloadable = False + feed_expire_timedelta = None + feed_expire = u'never' + + def __init__(self, row_data): + """Create an ItemInfo object. + + :param row_data: data from sqlite. There should be a value for each + SelectColumn that column_info() returns. + """ + self.row_data = row_data + + def __hash__(self): + return hash(self.row_data) + + def __eq__(self, other): + return self.row_data == other.row_data + + # NOTE: The previous ItemInfo API was all attributes, so we use properties + # to try to match that. + + @property + def filename(self): + return _unicode_to_filename(self.filename_unicode) + + @property + def downloaded(self): + return self.has_filename + + @property + def has_filename(self): + return self.filename_unicode is not None + + @property + def icon_cache_path(self): + return _unicode_to_filename(self.icon_cache_path_unicode) + + @property + def cover_art_path(self): + return _unicode_to_filename(self.cover_art_path_unicode) + + @property + def screenshot_path(self): + return _unicode_to_filename(self.screenshot_path_unicode) + + @property + def feed_thumbnail_path(self): + return _unicode_to_filename(self.feed_thumbnail_path_unicode) + + @property + def is_playable(self): + return self.has_filename and self.file_type != u'other' + + @property + def is_torrent(self): + return self.downloader_type == u'BitTorrent' + + @property + def is_torrent_folder(self): + return self.is_torrent and self.is_container_item + + def looks_like_torrent(self): + return self.is_torrent or filetypes.is_torrent_filename(self.url) + + @property + def description(self): + if self.metadata_description: + return self.metadata_description + elif self.entry_description: + return self.entry_description + else: + return None + + @property + def description_stripped(self): + if not hasattr(self, '_description_stripped'): + self._description_stripped = ItemInfo.html_stripper.strip( + self.description) + return self._description_stripped + + @property + def thumbnail(self): + if (self.cover_art_path_unicode is not None + and fileutil.exists(self.cover_art_path)): + return self.cover_art_path + if (self.icon_cache_path_unicode is not None and + fileutil.exists(self.icon_cache_path)): + return self.icon_cache_path + if (self.screenshot_path_unicode is not None + and fileutil.exists(self.screenshot_path)): + return self.screenshot_path + if self.is_container_item: + return resources.path("images/thumb-default-folder.png") + if self.feed_thumbnail_path is not None: + return self.feed_thumbnail_path + # default + if self.file_type == u'audio': + return resources.path("images/thumb-default-audio.png") + else: + return resources.path("images/thumb-default-video.png") + + @property + def is_external(self): + """Is this an externally downloaded item.""" + return False + + @property + def remote(self): + return self.source_type == u'sharing' + + @property + def device(self): + return self.source_type == u'device' + + @property + def has_shareable_url(self): + """Does this item have a URL that the user can share with + others? + + This returns True when the item has a non-file URL. + """ + return self.url is not None and not self.url.startswith(u"file:") + + @property + def file_format(self): + """Returns string with the format of the video. + """ + if self.looks_like_torrent(): + return u'.torrent' + + if self.enclosure_format is not None: + return self.enclosure_format + + return filetypes.calc_file_format(self.filename, + self.downloader_content_type) + + @property + def video_watched(self): + return self.watched_time is not None + + @property + def expiration_date(self): + """When will this item expire? + + :returns: a datetime.datetime object or None if it doesn't expire. + """ + if (self.watched_time is None or self.keep or + not self.has_filename or self.is_file_item): + return None + + if self.feed_expire == u'never': + return None + elif self.feed_expire == u"feed": + if self.feed_expire_timedelta is None: + logging.warn("feed_expire is 'feed', but " + "feed_expire_timedelta is None") + return None + expire_time = self.feed_expire_time_parsed + if expire_time is None: + logging.warn("feed_expire is 'feed', but " + "feed_expire_time_parsed failed") + return None + elif self.feed_expire == u"system": + days = app.config.get(prefs.EXPIRE_AFTER_X_DAYS) + if days <= 0: + return None + expire_time = datetime.timedelta(days=days) + else: + raise AssertionError("Unknown expire value: %s" % self.feed_expire) + return self.watched_time + expire_time + + @property + def feed_expire_time_parsed(self): + if self.feed_expire_timedelta is None: + return None + try: + expire_time_split = self.feed_expire_timedelta.split(":") + return datetime.timedelta(*(int(c) for c in expire_time_split)) + except StandardError: + logging.warn("Error parsing feed_expire_timedelta", exc_info=True) + return None + + @property + def expiration_date_text(self): + return displaytext.expiration_date(self.expiration_date) + + @property + def can_be_saved(self): + return self.has_filename and not self.keep + + @property + def is_download(self): + return (self.downloader_state in ('downloading', 'paused', 'offline') or + self.pending_manual_download) + + @property + def is_paused(self): + return self.downloader_state == 'paused' + + @property + def is_seeding(self): + return self.downloader_state == 'uploading' + + @property + def startup_activity(self): + if self.pending_manual_download: + return self.pending_reason + elif self.downloader_activity: + return self.downloader_activity + elif self.is_retrying: + return self._startup_activity_retry + else: + return _("starting up...") + + @property + def is_retrying(self): + return self.retry_count is not None and self.retry_time is not None + + @property + def _startup_activity_retry(self): + if self.retry_time > datetime.datetime.now(): + retry_delta = self.retry_time - datetime.datetime.now() + time_str = displaytext.time_string(retry_delta.seconds) + return _('no connection - retrying in %(time)s', {"time": time_str}) + else: + return _('no connection - retrying soon') + + @property + def download_progress(self): + """Calculate how for a download has progressed. + + :returns: [0.0, 1.0] depending on how much has been downloaded, or + None if we don't have the info to make this calculation + """ + if self.downloaded_size in (0, None): + # Download hasn't started yet. Give the downloader a little more + # time before deciding that the eta is unknown. + return 0.0 + if self.downloaded_size is None or self.downloader_size is None: + # unknown total size, return None + return None + return float(self.downloaded_size) / self.downloader_size + + @property + def eta(self): + if self.is_paused: + return None + else: + return self._eta + + @property + def rate(self): + if self.is_paused: + return None + else: + return self._rate + + @property + def upload_rate(self): + if self.is_paused: + return None + else: + return self._upload_rate + + @property + def download_rate_text(self): + return displaytext.download_rate(self.rate) + + @property + def upload_rate_text(self): + return displaytext.download_rate(self.upload_rate) + + @property + def upload_ratio(self): + if self.downloaded_size: + return float(self.upload_size) / self.downloaded_size + else: + return 0.0 + + @property + def upload_ratio_text(self): + return "%0.2f" % self.upload_ratio + + @property + def eta_text(self): + return displaytext.time_string_0_blank(self.eta) + + @property + def downloaded_size_text(self): + return displaytext.size_string(self.downloaded_size) + + @property + def upload_size_text(self): + return displaytext.size_string(self.upload_size) + + @property + def is_failed_download(self): + return self.downloader_state == 'failed' + + @property + def pending_auto_dl(self): + return (self.feed_auto_downloadable and + not self.was_downloaded and + self.feed_auto_downloadable and + (self.feed_get_everything or self.eligible_for_autodownload)) + + @property + def title_sort_key(self): + return util.name_sort_key(self.title) + + @property + def artist_sort_key(self): + return util.name_sort_key(self.artist) + + @property + def album_sort_key(self): + return util.name_sort_key(self.album) + + @property + def has_parent(self): + return self.parent_id is not None + + @property + def parent_title_for_sort(self): + """value to use for sorting by parent title. + + This will sort items by their parent title (torrent folder name or + feed name, but if 2 torrents have the same name, or a torrent and a + feed have the same name, then they will be separated) + """ + return (self.parent_title, self.feed_id, self.parent_id) + + @property + def album_artist_sort_key(self): + if self.album_artist: + return util.name_sort_key(self.album_artist) + else: + return self.artist_sort_key + + @property + def description_oneline(self): + return self.description_stripped[0].replace('\n', '$') + + @property + def auto_rating(self): + """Guess at a rating based on the number of times the files has been + played vs. skipped and the item's age. + """ + # TODO: we may want to take into consideration average ratings for this + # artist and this album, total play count and skip counts, and average + # manual rating + SKIP_FACTOR = 1.5 # rating goes to 1 when user skips 40% of the time + UNSKIPPED_FACTOR = 2 # rating goes to 5 when user plays 3 times without + # skipping + # TODO: should divide by log of item's age + if self.play_count > 0: + if self.skip_count > 0: + return min(5, max(1, int(self.play_count - + SKIP_FACTOR * self.skip_count))) + else: + return min(5, int(UNSKIPPED_FACTOR * self.play_count)) + elif self.skip_count > 0: + return 1 + else: + return None + + @property + def duration(self): + if self.duration_ms is None: + return None + else: + return self.duration_ms // 1000 + + def __repr__(self): + return '<%s: %s>' % (self.__class__.__name__, self.title) + + def __str__(self): + return '<%s: %s>' % (self.__class__.__name__, self.title) + +def _fetch_item_rows(connection, item_ids, select_info): + """Fetch rows for fetch_item_infos and fetch_device_item_infos.""" + + columns = ','.join('%s.%s' % (c.table, c.column) + for c in select_info.select_columns) + item_ids = ','.join(str(item_id) for item_id in item_ids) + sql = ("SELECT %s FROM %s %s WHERE %s.id IN (%s)" % + (columns, select_info.table_name, select_info.join_sql(), + select_info.table_name, item_ids)) + return connection.execute(sql) + +def fetch_item_infos(connection, item_ids): + """Fetch a list of ItemInfos """ + result_set = _fetch_item_rows(connection, item_ids, ItemSelectInfo()) + return [ItemInfo(row) for row in result_set] + +def fetch_device_item_infos(device, item_ids): + """Fetch a list of ItemInfos for a device""" + result_set = _fetch_item_rows(device.db_info.db.connection, + item_ids, DeviceItemSelectInfo()) + return [DeviceItemInfo(device.id, row) for row in result_set] + +class ItemInfo(ItemInfoBase): + source_type = 'database' + select_info = ItemSelectInfo() + + @property + def is_external(self): + if self.is_file_item: + return not self.has_parent + else: + return self.feed_url == 'dtv:manualFeed' + +class DBErrorItemInfo(ItemInfoBase): + """DBErrorItemInfo is used as a placeholder when we get DatabaseErrors + """ + + def __init__(self, id): + ItemInfoBase.__init__(self, row_data=(id,)) + self.id = id + self.title = _('Database Error') + self.filename_unicode = None + self.source_type = 'dberror' + +class DeviceItemSelectInfo(ItemSelectInfo): + """ItemSelectInfo for DeviceItems.""" + + # name of the main item table + table_name = 'device_item' + # SelectColumn objects for each attribute of ItemInfo + select_columns = [ + SelectColumn('device_item', 'id'), + SelectColumn('device_item', 'title'), + SelectColumn('device_item', 'creation_time', 'date_added'), + SelectColumn('device_item', 'watched_time'), + SelectColumn('device_item', 'last_watched'), + SelectColumn('device_item', 'subtitle_encoding'), + SelectColumn('device_item', 'release_date'), + SelectColumn('device_item', 'parent_title'), + SelectColumn('device_item', 'feed_url'), + SelectColumn('device_item', 'license'), + SelectColumn('device_item', 'rss_id'), + SelectColumn('device_item', 'entry_title'), + SelectColumn('device_item', 'torrent_title'), + SelectColumn('device_item', 'entry_description'), + SelectColumn('device_item', 'permalink'), + SelectColumn('device_item', 'payment_link'), + SelectColumn('device_item', 'comments_link'), + SelectColumn('device_item', 'url'), + SelectColumn('device_item', 'size'), + SelectColumn('device_item', 'enclosure_size'), + SelectColumn('device_item', 'enclosure_type', 'mime_type'), + SelectColumn('device_item', 'enclosure_format'), + SelectColumn('device_item', 'filename', 'filename_unicode'), + SelectColumn('device_item', 'resume_time'), + SelectColumn('device_item', 'play_count'), + SelectColumn('device_item', 'skip_count'), + SelectColumn('device_item', 'auto_sync'), + SelectColumn('device_item', 'screenshot', 'screenshot_path_unicode'), + SelectColumn('device_item', 'duration', 'duration_ms'), + SelectColumn('device_item', 'cover_art', 'cover_art_path_unicode'), + SelectColumn('device_item', 'description', 'metadata_description'), + SelectColumn('device_item', 'album'), + SelectColumn('device_item', 'album_artist'), + SelectColumn('device_item', 'artist'), + SelectColumn('device_item', 'track'), + SelectColumn('device_item', 'album_tracks'), + SelectColumn('device_item', 'year'), + SelectColumn('device_item', 'genre'), + SelectColumn('device_item', 'rating'), + SelectColumn('device_item', 'file_type'), + SelectColumn('device_item', 'has_drm'), + SelectColumn('device_item', 'show'), + SelectColumn('device_item', 'episode_id'), + SelectColumn('device_item', 'episode_number'), + SelectColumn('device_item', 'season_number'), + SelectColumn('device_item', 'kind'), + SelectColumn('device_item', 'net_lookup_enabled'), + ] + + join_info = { + 'item_fts': ('id', 'docid'), + } + +class DeviceItemInfo(ItemInfoBase): + """ItemInfo for devices """ + + select_info = DeviceItemSelectInfo() + source_type = 'device' + + def __init__(self, device_info, row_data): + """Create an ItemInfo object. + + :param device_info: DeviceInfo object for the device + :param row_data: data from sqlite. There should be a value for each + SelectColumn that column_info() returns. + """ + self.device_info = device_info + self.device_id = device_info.id + self.mount = device_info.mount + self.row_data = row_data + + @property + def filename(self): + relative_filename = ItemInfo.filename.__get__(self, self.__class__) + return os.path.join(self.mount, relative_filename) + +class SharingItemSelectInfo(ItemSelectInfo): + """ItemSelectInfo for SharingItems.""" + + # name of the main item table + table_name = 'sharing_item' + # SelectColumn objects for each attribute of ItemInfo + select_columns = [ + SelectColumn('sharing_item', 'id'), + SelectColumn('sharing_item', 'daap_id'), + SelectColumn('sharing_item', 'video_path'), + SelectColumn('sharing_item', 'title'), + SelectColumn('sharing_item', 'description', 'metadata_description'), + SelectColumn('sharing_item', 'file_type'), + SelectColumn('sharing_item', 'file_format'), + SelectColumn('sharing_item', 'duration', 'duration_ms'), + SelectColumn('sharing_item', 'size'), + SelectColumn('sharing_item', 'artist'), + SelectColumn('sharing_item', 'album_artist'), + SelectColumn('sharing_item', 'album'), + SelectColumn('sharing_item', 'year'), + SelectColumn('sharing_item', 'genre'), + SelectColumn('sharing_item', 'track'), + SelectColumn('sharing_item', 'kind'), + SelectColumn('sharing_item', 'show'), + SelectColumn('sharing_item', 'season_number'), + SelectColumn('sharing_item', 'episode_id'), + SelectColumn('sharing_item', 'episode_number'), + SelectColumn('sharing_item', 'host'), + SelectColumn('sharing_item', 'port'), + SelectColumn('sharing_item', 'address'), + ] + path_column = 'video_path' + + join_info = { + 'item_fts': ('id', 'docid'), + 'sharing_item_playlist_map': ('daap_id', 'item_id'), + } + + +class SharingItemInfo(ItemInfoBase): + """ItemInfo for devices """ + + select_info = SharingItemSelectInfo() + source_type = 'sharing' + + def __init__(self, share_info, row_data): + """Create an ItemInfo object. + + :param share_info: SharingInfo object for the device + :param row_data: data from sqlite. There should be a value for each + SelectColumn that column_info() returns. + """ + self.share_info = share_info + self.row_data = row_data + + @property + def filename(self): + # FIXME: code from the old ItemInfo. Needs some serious cleanup + # For daap, sent it to be the same as http as it is basically + # http with a different port. + def daap_handler(path, host, port): + return 'http://%s:%s%s' % (host, port, path) + fn = FilenameType(self.video_path) + fn.set_urlize_handler(daap_handler, + [self.share_info.host, self.share_info.port]) + return fn + + @property + def has_filename(self): + # all sharing items have files on the share + return True + +class ItemSource(object): + """Create ItemInfo objects. + + ItemSource stores info about a database that stores items and contains the + logic to build an ItemInfo from a SELECT result. It tries to abstract + away the differences between items on the main database and device + databases. + + :attribute select_info: ItemSelectInfo for a database + :attribute connection_pool: ConnectionPool for the same database + """ + + select_info = ItemSelectInfo() + + def __init__(self): + self.connection_pool = app.connection_pools.get_main_pool() + + def get_connection(self): + """Get a database connection to use. + + A database connection must be created before using any of the query + methods. Call release_connection() once the connection is finished + with. + """ + return self.connection_pool.get_connection() + + def release_connection(self, connection): + """Release a connection returned by get_connection(). + + Once a connection is released it should not be used anymore. + """ + self.connection_pool.release_connection(connection) + + def wal_mode(self): + """Is this database using WAL mode for transactions?""" + return self.connection_pool.wal_mode + + def make_item_info(self, row_data): + """Create an ItemInfo from a result row.""" + return ItemInfo(row_data) + +class DeviceItemSource(ItemSource): + + select_info = DeviceItemSelectInfo() + + def __init__(self, device_info): + self.connection_pool = \ + app.connection_pools.get_device_pool(device_info.id) + self.device_info = device_info + + def make_item_info(self, row_data): + return DeviceItemInfo(self.device_info, row_data) + +class SharingItemSource(ItemSource): + select_info = SharingItemSelectInfo() + + def __init__(self, share_info): + self.connection_pool = \ + app.connection_pools.get_sharing_pool(share_info.id) + self.share_info = share_info + + def make_item_info(self, row_data): + return SharingItemInfo(self.share_info, row_data) diff -Nru miro-4.0.4/lib/data/itemtrack.py miro-6.0/lib/data/itemtrack.py --- miro-4.0.4/lib/data/itemtrack.py 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/data/itemtrack.py 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,1065 @@ +# Miro - an RSS based video player application +# Copyright (C) 2012 +# Participatory Culture Foundation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +# +# In addition, as a special exception, the copyright holders give +# permission to link the code of portions of this program with the OpenSSL +# library. +# +# You must obey the GNU General Public License in all respects for all of +# the code used other than OpenSSL. If you modify file(s) with this +# exception, you may extend this exception to your version of the file(s), +# but you are not obligated to do so. If you do not wish to do so, delete +# this exception statement from your version. If you delete this exception +# statement from all source files in the program, then also delete it here. + +"""miro.data.itemtrack -- Track Items in the database +""" +import collections +import logging +import string +import sqlite3 +import random +import re +import weakref + +from miro import app +from miro import models +from miro import prefs +from miro import schema +from miro import signals +from miro import util +from miro.data import item +from miro.gtcache import gettext as _ + +ItemTrackerCondition = util.namedtuple( + "ItemTrackerCondition", + "columns sql values", + + """ItemTrackerCondition defines one term for the WHERE clause of a query. + + :attribute columns: list of (table, column) tuples that that this + condition refers to. If any of these change in the DB, then we should + re-run the query. + :attribute sql: sql string for the clause + :attribute values: list of values to use to fill in sql + """) + +ItemTrackerOrderBy = util.namedtuple( + "ItemTrackerOrderBy", + "columns sql", + + """ItemTrackerOrderBy defines one term for the ORDER BY clause of a query. + + :attribute columns: list of (table, column) tuples used in the query + :attribute sql: sql expression + """) + +class ItemTrackerQueryBase(object): + """Query used to select item ids for ItemTracker. """ + + select_info = item.ItemSelectInfo() + + def __init__(self): + self.conditions = [] + self.match_string = None + self.order_by = None + self.limit = None + + def join_sql(self, table, join_type='LEFT JOIN'): + return self.select_info.join_sql(table, join_type=join_type) + + def table_name(self): + return self.select_info.table_name + + def could_list_change(self, message): + """Given a ItemChanges message, could the id list change? + """ + if message.added or message.removed: + return True + if message.changed_columns.intersection(self.get_columns_to_track()): + return True + return False + + def _parse_column(self, column): + """Parse a column specification. + + Returns a (table, column) tuple. If the table is explicitly + specified, we will use that. If not, we will default to item. + """ + if '.' in column: + (table, column) = column.split('.', 1) + if not self.select_info.can_join_to(table): + raise ValueError("Can't join to %s" % table) + return (table, column) + else: + return (self.table_name(), column) + + def add_condition(self, column, operator, value): + """Add a condition to the WHERE clause + + column, operator, and value work together to create the clause. For + example to add "feed_id = ?" and have 100 be the value for the "?", + then use "feed_id", "=", 100 for column, operator, and value. + """ + table, column = self._parse_column(column) + sql = "%s.%s %s ?" % (table, column, operator) + cond = ItemTrackerCondition([(table, column)], sql, (value,)) + self.conditions.append(cond) + + def set_search(self, search_string): + """Set the full-text search to use for this item tracker.""" + if search_string is None: + self.match_string = None + return + # parse search_string and make a string for the sqlite3 match command + # We do the following: + # - lowercase the terms to ensure that they don't contain any sqlite3 + # fts operators + # - remove the any non-word characters from search_string + # - add a prefix search to the last term, since the user can still be + # typing it out. + terms = re.findall("\w+", search_string.lower()) + if 'torrent' in terms: + # as a special case, the search string "torrent" matches torrent + # items + terms.remove('torrent') + self.add_condition("remote_downloader.type", '=', 'BitTorrent') + if not terms: + self.match_string = None + return + self.match_string = " ".join(terms) + if self.match_string and search_string[-1] != ' ': + self.match_string += "*" + + def add_complex_condition(self, columns, sql, values=()): + """Add a complex condition to the WHERE clause + + This method can be used to add conditions that don't fit into the + " ?" form. + + NOTE: this doesn't support all possible conditions, since some may + depend on multiple columns, or None. But this is good enough for how + we use it. + + :param columns: list of columns that this condition depends on + :param sql: sql that defines the condition + :param values: tuple of values to substitute into sql + """ + columns = [self._parse_column(c) for c in columns] + cond = ItemTrackerCondition(columns, sql, values) + self.conditions.append(cond) + + def set_order_by(self, columns, collations=None): + """Change the ORDER BY clause. + + :param columns: list of columns name to sort by. To do a descending + search, prefix the name with "-". + :param collations: list of collations to use to sort by. None + specifies the default collation. Otherwise, there must be 1 value for + each column and it should specify the collation to use for that + column. + """ + if collations is None: + collations = (None,) * len(columns) + elif len(collations) != len(columns): + raise ValueError("sequence length mismatch") + + sql_parts = [] + order_by_columns = [] + for column, collation in zip(columns, collations): + if column[0] == '-': + descending = True + column = column[1:] + else: + descending = False + table, column = self._parse_column(column) + order_by_columns.append((table, column)) + sql_parts.append(self._order_by_expression(table, column, + descending, collation)) + self.order_by = ItemTrackerOrderBy(order_by_columns, + ', '.join(sql_parts)) + + def set_complex_order_by(self, columns, sql): + """Change the ORDER BY clause to a complex SQL expression + + :param columns: list of column names refered to in sql + :param sql: SQL to execute + """ + order_by_columns = [self._parse_column(c) for c in columns] + self.order_by = ItemTrackerOrderBy(order_by_columns, sql) + + def _order_by_expression(self, table, column, descending, collation): + parts = [] + parts.append("%s.%s" % (table, column)) + if collation is not None: + parts.append("collate %s" % collation) + if descending: + parts.append("DESC") + else: + parts.append("ASC") + return " ".join(parts) + + def set_limit(self, limit): + self.limit = limit + + def get_columns_to_track(self): + """Get the columns that affect the results of the query """ + columns = set() + for c in self.conditions: + for table, column in c.columns: + if table == self.table_name(): + columns.add(column) + else: + columns.add(self.select_info.item_join_column(table)) + if self.order_by: + columns.update(column for (table, column) + in self.order_by.columns + if table == self.table_name()) + return columns + + def get_other_tables_to_track(self): + """Get tables other than item that could affect this query.""" + other_tables = set() + for c in self.conditions: + other_tables.update(table for table, column in c.columns) + if self.order_by: + other_tables.update(table for (table, column) + in self.order_by.columns) + other_tables.discard('item') + return other_tables + + def select_ids(self, connection): + """Run the select statement for this query + + :returns: list of item ids + """ + sql_parts = [] + arg_list = [] + sql_parts.append("SELECT %s.id FROM %s" % + (self.table_name(), self.table_name())) + self._add_joins(sql_parts, arg_list) + self._add_conditions(sql_parts, arg_list) + self._add_order_by(sql_parts, arg_list) + self._add_limit(sql_parts, arg_list) + sql = ' '.join(sql_parts) + logging.debug("ItemTracker: running query %s (%s)", sql, arg_list) + item_ids = [row[0] for row in connection.execute(sql, arg_list)] + logging.debug("ItemTracker: done running query") + return item_ids + + def select_item_data(self, connection): + """Run the select statement for this query + + :returns: list of column data for all items in this query. The + columns will match the columns specified in our select_info. + """ + sql_parts = [] + arg_list = [] + + select_columns = ', '.join("%s.%s " % (col.table, col.column) + for col in + self.select_info.select_columns) + sql_parts.append("SELECT %s" % select_columns) + sql_parts.append("FROM %s " % self.table_name()) + self._add_joins(sql_parts, arg_list, include_select_columns=True) + self._add_conditions(sql_parts, arg_list) + self._add_order_by(sql_parts, arg_list) + self._add_limit(sql_parts, arg_list) + sql = ' '.join(sql_parts) + logging.debug("ItemTracker: running query %s (%s)", sql, arg_list) + item_data = list(connection.execute(sql, arg_list)) + logging.debug("ItemTracker: done running query") + return item_data + + def _add_joins(self, sql_parts, arg_list, include_select_columns=False): + join_tables = set() + for c in self.conditions: + join_tables.update(table for table, column in c.columns) + if self.order_by: + join_tables.update(table for (table, column) + in self.order_by.columns) + if include_select_columns: + join_tables.update(col.table + for col in self.select_info.select_columns) + join_tables.discard(self.table_name()) + for table in join_tables: + sql_parts.append(self.join_sql(table)) + if self.match_string: + sql_parts.append(self.join_sql('item_fts', join_type='JOIN')) + + def _add_all_joins(self, sql_parts, arg_list): + joins_for_data = set(col.table + for col in self.select_info.select_columns + if col.table != self.table_name()) + joins_for_conditions = set() + for c in self.conditions: + joins_for_conditions.update(table for table, column in c.columns) + if self.order_by: + joins_for_conditions.update(table for (table, column) + in self.order_by.columns) + for table in joins_for_data: + if table != self.table_name(): + sql_parts.append(self.join_sql(table, 'LEFT JOIN')) + for table in joins_for_conditions: + if table != self.table_name() and table not in joins_for_data: + sql_parts.append(self.join_sql(table)) + if self.match_string: + sql_parts.append(self.join_sql('item_fts')) + + def _add_conditions(self, sql_parts, arg_list): + if not (self.conditions or self.match_string): + return + where_parts = [] + for c in self.conditions: + where_parts.append(c.sql) + arg_list.extend(c.values) + if self.match_string: + where_parts.append("item_fts MATCH ?") + arg_list.append(self.match_string) + sql_parts.append("WHERE %s" % ' AND '.join( + '(%s)' % part for part in where_parts)) + + def _add_order_by(self, sql_parts, arg_list): + if self.order_by: + sql_parts.append("ORDER BY %s" % self.order_by.sql) + + def _add_limit(self, sql_parts, arg_list): + if self.limit is not None: + sql_parts.append("LIMIT %s" % self.limit) + + def copy(self): + retval = self.__class__() + retval.conditions = self.conditions[:] + retval.order_by = self.order_by + retval.match_string = self.match_string + return retval + +class ItemTrackerQuery(ItemTrackerQueryBase): + """ItemTrackerQuery for items in the main db.""" + + def could_list_change(self, message): + """Given a ItemChanges message, could the id list change? + """ + other_tables = self.get_other_tables_to_track() + if message.dlstats_changed and 'remote_downloader' in other_tables: + return True + if message.playlists_changed and 'playlist_item_map' in other_tables: + return True + return ItemTrackerQueryBase.could_list_change(self, message) + +class DeviceItemTrackerQuery(ItemTrackerQueryBase): + """ItemTrackerQuery for DeviceItems.""" + + select_info = item.DeviceItemSelectInfo() + +class SharingItemTrackerQuery(ItemTrackerQueryBase): + """ItemTrackerQuery for SharingItems.""" + + select_info = item.SharingItemSelectInfo() + + def tracking_playlist_map(self): + for c in self.conditions: + for table, column in c.columns: + if table == 'sharing_item_playlist_map': + return True + return False + + def could_list_change(self, message): + if message.changed_playlists and self.tracking_playlist_map(): + return True + else: + return ItemTrackerQueryBase.could_list_change(self, message) + +class ItemTracker(signals.SignalEmitter): + """Track items in the database + + ItemTracker is used by the frontends to implement the model for its + TableViews for lists of items. + + ItemTracker does several things efficently to make it fast for the + frontend: + + - Fetches ids first, then fetches row data when it's requested, or in + idle callbacks. + - Can efficently tell what's changed in an item list when another process + modifies the item data + + Signals: + + - "items-changed" (changed_id_list): some items have been changed, but the + list is the same. + - "list-changed": items have been added, removed, or reorded in the list. + """ + + # how many rows we fetch at one time in _ensure_row_loaded() + FETCH_ROW_CHUNK_SIZE = 25 + + def __init__(self, idle_scheduler, query, item_source): + """Create an ItemTracker + + :param idle_scheduler: function to schedule idle callback functions. + It should input a function and schedule for it to be called during + idletime. + :param query: ItemTrackerQuery to use + :param item_source: ItemSource to use. + """ + signals.SignalEmitter.__init__(self) + self.create_signal("will-change") + self.create_signal("items-changed") + self.create_signal("list-changed") + self.idle_scheduler = idle_scheduler + self.idle_work_scheduled = False + self.item_fetcher = None + self.item_source = item_source + self._db_retry_callback_pending = False + self._set_query(query) + self._fetch_id_list() + if self.item_fetcher is not None: + self._schedule_idle_work() + + def is_valid(self): + """Is this item list valid? + + This will return True until destroy() is called. + """ + return self.id_list is not None + + def destroy(self): + """Call this when you're done with the ItemTracker + + We will release any open connections to the database and reset our + self to an empty list. + """ + self._destroy_item_fetcher() + self.id_list = self.id_to_index = self.row_data = None + + def make_item_fetcher(self, connection, id_list): + """Make an ItemFetcher to use. + + :param connection: sqlite Connection to use. We will return data from + this connection without commiting any pending read transaction. + :param id_list: list of ids that we will have to fetch. + """ + if self.item_source.wal_mode(): + klass = ItemFetcherWAL + else: + klass = ItemFetcherNoWAL + return klass(connection, self.item_source, id_list) + + def _destroy_item_fetcher(self): + if self.item_fetcher: + self.item_fetcher.destroy() + self.item_fetcher = None + + def _run_db_error_dialog(self): + if self._db_retry_callback_pending: + return + gettext_values = { + "appname": app.config.get(prefs.SHORT_APP_NAME) + } + title = _("%(appname)s database query failed", gettext_values) + description = _("%(appname)s was unable to read from its database.", + gettext_values) + app.db_error_handler.run_dialog(title, description, + self._retry_after_db_error) + + def _retry_after_db_error(self): + self._db_retry_callback_pending = False + self._refetch_id_list() + + def _set_query(self, query): + """Change our ItemTrackerQuery object.""" + self.query = query + + def _fetch_id_list(self): + """Fetch the ids for this list. """ + self._destroy_item_fetcher() + try: + connection = self.item_source.get_connection() + connection.execute("BEGIN TRANSACTION") + self.id_list = self.query.select_ids(connection) + self.item_fetcher = self.make_item_fetcher(connection, self.id_list) + except sqlite3.DatabaseError, e: + logging.warn("%s while fetching items", e, exc_info=True) + self._make_empty_list_after_db_error() + self.id_to_index = dict((id_, i) for i, id_ in enumerate(self.id_list)) + self.row_data = {} + + def _make_empty_list_after_db_error(self): + self.id_list = [] + self._run_db_error_dialog() + self.item_fetcher = None + + def _schedule_idle_work(self): + """Schedule do_idle_work to be called some time in the + future using idle_scheduler. + """ + if not self.idle_work_scheduled: + self.idle_scheduler(self.do_idle_work) + self.idle_work_scheduled = True + + def do_idle_work(self): + self.idle_work_scheduled = False + if self.item_fetcher is None: + # destroy() was called while the idle callback was still + # scheduled. Just return. + return + for i in xrange(len(self.id_list)): + if not self._row_loaded(i): + # row data unloaded, call _ensure_row_loaded to load this row + # and adjecent rows then schedule another run later + self._ensure_row_loaded(i) + self._schedule_idle_work() + return + # no rows need loading + self.item_fetcher.done_fetching() + + def _uncache_row_data(self, id_list): + for id_ in id_list: + if id_ in self.row_data: + del self.row_data[id_] + + def _refetch_id_list(self, send_signals=True): + """Refetch a new id list after we already have one.""" + + if send_signals: + self.emit('will-change') + self._fetch_id_list() + if send_signals: + self.emit("list-changed") + + def get_items(self): + """Get a list of all items in sorted order.""" + return [self.get_row(i) for i in xrange(len(self.id_list))] + + def get_playable_ids(self): + """Get a list of ids for items that can be played.""" + # If we have loaded all items, then we can just use that data + if not self.idle_work_scheduled: + return [i.id for i in self.get_items() if i.is_playable] + else: + try: + return self.item_fetcher.select_playable_ids() + except sqlite3.DatabaseError, e: + logging.warn("%s in select_playable_ids()", e, exc_info=True) + self._run_db_error_dialog() + return [] + + def has_playables(self): + """Can we play any items from this item list?""" + if not self.idle_work_scheduled: + return any(i for i in self.get_items() if i.is_playable) + else: + try: + return self.item_fetcher.select_has_playables() + except sqlite3.DatabaseError, e: + logging.warn("%s in select_has_playables()", e, exc_info=True) + self._run_db_error_dialog() + return False + + def __len__(self): + return len(self.id_list) + + def _row_loaded(self, index): + id_ = self.id_list[index] + return id_ in self.row_data + + def _ensure_row_loaded(self, index): + """Ensure that we have an entry in self._item_rows for index.""" + + if self._row_loaded(index): + # we've already loaded the row for index + return + rows_to_load = [index] + # as long as we're reading from disk, load a chunk of rows instead of + # just one. + start_row = max(index - (self.FETCH_ROW_CHUNK_SIZE // 2), 0) + for i in xrange(start_row, index): + if not self._row_loaded(i): + rows_to_load.append(i) + for i in xrange(index+1, len(self.id_list)): + if not self._row_loaded(i): + rows_to_load.append(i) + if len(rows_to_load) >= self.FETCH_ROW_CHUNK_SIZE: + break + self._load_rows(rows_to_load) + + def _load_rows(self, rows_to_load): + """Query the database to fetch a set of items and put the data in + self.row_data + + :param rows_to_load: indexes of the rows to load. + """ + ids_to_load = set(self.id_list[i] for i in rows_to_load) + try: + items = self.item_fetcher.fetch_items(ids_to_load) + except sqlite3.DatabaseError, e: + logging.warn("%s while fetching items", e, exc_info=True) + items = [item.DBErrorItemInfo(item_id) for item_id in ids_to_load] + self._run_db_error_dialog() + returned_ids = set() + for item_info in items: + pos = self.id_to_index[item_info.id] + self.row_data[item_info.id] = item_info + returned_ids.add(item_info.id) + if returned_ids != ids_to_load: + extra = tuple(returned_ids - ids_to_load) + missing = tuple(ids_to_load - returned_ids) + msg = ("ItemFetcher didn't return the correct rows " + "(extra: %s, missing: %s)" % (extra, missing)) + raise AssertionError(msg) + + def item_in_list(self, item_id): + """Test if an item is in the list. + """ + return item_id in self.id_to_index + + def get_item(self, id_): + """Get an ItemRow for a given id. + + :raises KeyError: id_ not in this list + """ + index = self.id_to_index[id_] + return self.get_row(index) + + def get_row(self, index): + """Get an ItemRow for row index. + + :raises IndexError: index out of range + """ + self._ensure_row_loaded(index) + try: + id_ = self.id_list[index] + except IndexError: + # re-raise the error with a bit more information + raise IndexError("%s is out of range" % index) + return self.row_data[id_] + + def get_first_item(self): + return self.get_row(0) + + def get_last_item(self): + return self.get_row(len(self)-1) + + def get_index(self, item_id): + """Get the index of an item in the list.""" + return self.id_to_index[item_id] + + def change_query(self, new_query): + """Change the query for this select + + This will cause the list-change signal to be emitted. + + :param new_query: ItemTrackerQuery object + """ + self._set_query(new_query) + self._refetch_id_list() + + def on_item_changes(self, message): + """Call this when items get changed and the list needs to be + updated. + + If the changes modify this list, either the items-changed or + list-changed signal will be emitted. + + :param message: an ItemChanges message + """ + self.emit('will-change') + changed_ids = [item_id for item_id in message.changed + if self.item_in_list(item_id)] + self._uncache_row_data(changed_ids) + if self._could_list_change(message): + self._refetch_id_list(send_signals=False) + self.emit("list-changed") + else: + if len(self.id_list) == 0: + # special case when the list is empty. This avoids accessing + # item_fetcher after _make_empty_list_after_db_error() is + # called. + self.emit("list-changed") + return + try: + need_refetch = self.item_fetcher.refresh_items(changed_ids) + except sqlite3.DatabaseError, e: + logging.warn("%s while refreshing items", e, exc_info=True) + self._make_empty_list_after_db_error() + self.emit("list-changed") + return + if not need_refetch: + self.emit('items-changed', changed_ids) + else: + self._refetch_id_list(send_signals=False) + self.emit("list-changed") + + def _could_list_change(self, message): + """Calculate if an ItemChanges means the list may have changed.""" + return self.query.could_list_change(message) + +class ItemFetcher(object): + """Create ItemInfo objects for ItemTracker + + ItemFetcher gets constructed with the connection that ItemTracker used to + select the ids for the item list along with those ids. It's responsible + for fetching data and creating ItemInfo objects as they are needed. + + The connection that gets passed to ItemFetcher still has a read + transaction open from the query that selected the item ids. ItemFetcher + should ensure that the data it fetches to create the ItemInfo is from that + same transaction. ItemFetcher should take ownership of the connection and + ensure that it gets released. + + We handle this 2 ways. If we are using the WAL journal mode, then we can + just keep the transaction open, since it won't block writers from + committing data. + + If we aren't using WAL journal mode, then we select the data we need into + a temporary table to freeze it in place. This is slower than the WAL + version, but not much. + + The two strategies are implemented by the 2 subclasses of ItemFetcher: + ItemFetcherWAL and ItemFetcherNoWAL. + + Finally ItemFetcher has 2 methods, select_playable_ids and + select_has_playables() which figure out which items in the list are + playable using an SQL select. This is needed because we want to calculate + this without having to load all the ItemInfos in the list. + """ + + def __init__(self, connection, item_source, id_list): + self.connection = connection + self.item_source = item_source + self.id_list = id_list + + def select_columns(self): + return self.item_source.select_info.select_columns + + def join_sql(self): + return self.item_source.select_info.join_sql() + + def table_name(self): + return self.item_source.select_info.table_name + + def path_column(self): + return self.item_source.select_info.path_column + + def release_connection(self): + if self.connection is not None: + self.item_source.release_connection(self.connection) + self.connection = None + + def destroy(self): + """Called when the ItemFetcher is no longer needed. Release any + resources. + """ + pass + + def done_fetching(self): + """Called when ItemTracker has fetched all the ItemInfos in its list + + ItemFetcher should release resources that are no longer needed, + however it should be ready to fetch items again if refresh_items() is + called. + """ + pass + + def fetch_items(self, item_ids): + """Get a list of ItemInfo + + :param item_ids: list of ids to fetch + :returns: list of ItemInfo objects. This is not necessarily in the + same order as item_ids. + """ + raise NotImplementedError() + + def refresh_items(self, changed_ids): + """Refresh item data. + + Normally ItemFetcher uses data from the read transaction that the + connection it was created with was in. Use this method to force + ItemFetcher to use new data for a list of items. + + :returns True: if we can't refresh the items and we should refetch the + entire list instead. This is a hack to work around #19823 + """ + raise NotImplementedError() + + def select_playable_ids(self): + """Calculate which items are playable using a select statement + + :returns: list of item ids + """ + raise NotImplementedError() + + def select_has_playables(self): + """Calculate if any items are playable using a select statement. + + :returns: True/False + """ + raise NotImplementedError() + +class ItemFetcherWAL(ItemFetcher): + def __init__(self, connection, item_source, id_list): + ItemFetcher.__init__(self, connection, item_source, id_list) + self._prepare_sql() + self.item_count = self.calc_item_count() + self.max_item_id = self.calc_max_item_id() + + def destroy(self): + self.release_connection() + + def done_fetching(self): + # We can safely finish the read transaction here + self.connection.commit() + + def calc_item_count(self): + sql = "SELECT COUNT(1) FROM %s" % self.table_name() + return self.connection.execute(sql).fetchone()[0] + + def calc_max_item_id(self): + sql = "SELECT MAX(id) FROM %s" % self.table_name() + return self.connection.execute(sql).fetchone()[0] + + def _prepare_sql(self): + """Get an SQL statement ready to fire when fetch() is called. + + The statement will be ready to go, except the WHERE clause will not be + present, since we can't know that in advance. + """ + columns = ['%s.%s' % (c.table, c.column) + for c in self.select_columns()] + self._sql = ("SELECT %s FROM %s %s" % + (', '.join(columns), self.table_name(), self.join_sql())) + + def fetch_items(self, id_list): + """Create Item objects.""" + where = ("WHERE %s.id in (%s)" % + (self.table_name(), ', '.join(str(i) for i in id_list))) + sql = ' '.join((self._sql, where)) + cursor = self.connection.execute(sql) + return [self.item_source.make_item_info(row) for row in cursor] + + def refresh_items(self, changed_ids): + # We ignore changed_ids and just start a new transaction which will + # refresh all the data. + self.connection.commit() + self.connection.execute("BEGIN TRANSACTION") + # check if an item has been added/removed from the DB now that we have + # a new transaction. This can happen if the backend changes some + # items sends an ItemsChanged message, then deletes them before we + # process the message (see #19823) + + new_max_id = self.calc_max_item_id() + new_item_count = self.calc_item_count() + # checks for items have been added + if new_max_id != self.max_item_id: + self.max_item_id = new_max_id + # update item_count since that could have changed too + self.item_count = new_item_count + return True + # given that items haven't been added, we can use the total number of + # items to check if any have been deleted + if new_item_count != self.item_count: + self.item_count = new_item_count + return True + # nothing has changed, we can return false + return False + + def select_playable_ids(self): + sql = ("SELECT id FROM %s " + "WHERE %s IS NOT NULL AND " + "file_type != 'other' AND " + "id in (%s)" % + (self.table_name(), self.path_column(), + ','.join(str(id_) for id_ in self.id_list))) + return [row[0] for row in self.connection.execute(sql)] + + def select_has_playables(self): + sql = ("SELECT EXISTS (SELECT 1 FROM %s " + "WHERE %s IS NOT NULL AND " + "file_type != 'other' AND " + "id in (%s))" % + (self.table_name(), self.path_column(), + ','.join(str(id_) for id_ in self.id_list))) + return self.connection.execute(sql).fetchone()[0] == 1 + +class ItemFetcherNoWAL(ItemFetcher): + def __init__(self, connection, item_source, id_list): + ItemFetcher.__init__(self, connection, item_source, id_list) + self._make_temp_table() + self._select_into_temp_table(id_list) + self.connection.commit() + + def _make_temp_table(self): + randstr = ''.join(random.choice(string.letters) for i in xrange(10)) + self.temp_table_name = 'itemtmp_' + randstr + col_specs = ["%s %s" % (ci.attr_name, ci.sqlite_type()) + for ci in self.select_columns()] + create_sql = ("CREATE TABLE temp.%s(%s)" % + (self.temp_table_name, ', '.join(col_specs))) + index_sql = ("CREATE UNIQUE INDEX temp.%s_id ON %s (id)" % + (self.temp_table_name, self.temp_table_name)) + self.connection.execute(create_sql) + self.connection.execute(index_sql) + + def _select_into_temp_table(self, id_list): + template = string.Template("""\ +INSERT OR REPLACE INTO $temp_table_name($dest_columns) +SELECT $source_columns +FROM $table_name +$join_sql +WHERE $table_name.id in ($id_list)""") + d = { + 'temp_table_name': self.temp_table_name, + 'table_name': self.table_name(), + 'join_sql': self.join_sql(), + 'id_list': ', '.join(str(id_) for id_ in id_list), + 'dest_columns': ','.join(ci.attr_name + for ci in self.select_columns()), + 'source_columns': ','.join('%s.%s' % (ci.table, ci.column) + for ci in self.select_columns()), + } + sql = template.substitute(d) + self.connection.execute(sql) + + def destroy(self): + if self.connection is not None: + self.connection.execute("DROP TABLE %s" % self.temp_table_name) + self.connection.commit() + self.release_connection() + self.temp_table_name = None + + def fetch_items(self, id_list): + """Create Item objects.""" + # We can use SELECT * here because we know that we defined the columns + # in the same order as select_columns() returned them. + id_list_str = ', '.join(str(i) for i in id_list) + sql = "SELECT * FROM %s WHERE id IN (%s)" % (self.temp_table_name, + id_list_str) + return [self.item_source.make_item_info(row) + for row in self.connection.execute(sql)] + + def refresh_items(self, changed_ids): + self._select_into_temp_table(changed_ids) + return False + + def select_playable_ids(self): + sql = ("SELECT id FROM %s " + "WHERE %s IS NOT NULL AND " + "file_type != 'other' AND " + "id in (%s)" % + (self.table_name(), self.path_column(), + ','.join(str(id_) for id_ in self.id_list))) + return [row[0] for row in self.connection.execute(sql)] + + def select_has_playables(self): + sql = ("SELECT EXISTS (SELECT 1 FROM %s " + "WHERE %s IS NOT NULL AND " + "file_type != 'other' AND " + "id in (%s))" % + (self.table_name(), self.path_column(), + ','.join(str(id_) for id_ in self.id_list))) + return self.connection.execute(sql).fetchone()[0] == 1 + +class BackendItemTracker(signals.SignalEmitter): + """Item tracker used by the backend + + BackendItemTracker works similarly to ItemTracker but with a couple + changes that make it work better with the rest of the backend components. + Specifically it: + - Uses the connection in app.db rather than any connection pools + - Fetches all ItemInfo objects up-front rather than using idle + callbacks + - Emits slightly different signals. The main difference is that + BackendItemTracker calculates exactly which items have been + added/changed/removed rather than just emitted "list-changed" with on + extra info. + + Signals: + + - "items-changed" (added, changed, removed): some items have been + added/changed/removed from the list. added/changed is a list of + ItemInfos. Removed is a list of item ids. + """ + def __init__(self, query): + signals.SignalEmitter.__init__(self) + self.create_signal('items-changed') + self.item_changes_callback = None + self.query = query + self.fetch_items() + self.connect_to_item_changes() + + def change_query(self, query): + self.query = query + self.refetch_items() + + def fetch_items(self): + self.item_map = {} + for item_data in self.query.select_item_data(app.db.connection): + item_info = item.ItemInfo(item_data) + self.item_map[item_info.id] = item_info + self.item_ids = set(self.item_map.keys()) + + def get_items(self): + return self.item_map.values() + + def connect_to_item_changes(self): + self.item_changes_callback = models.Item.change_tracker.connect( + 'item-changes', self.on_item_changes) + + def destroy(self): + if self.item_changes_callback is not None: + models.Item.change_tracker.disconnect(self.item_changes_callback) + self.item_changes_callback = None + + def on_item_changes(self, change_tracker, msg): + if app.db.is_closed(): + return + + if self.query.could_list_change(msg): + # items may have been added/removed from the list. We need to + # re-fetch the items and calculate changes + self.refetch_items(msg.changed) + else: + # items changed, but the list is the same. Just refetch the + # changed items. + changed_ids = msg.changed.intersection(self.item_ids) + changed_items = item.fetch_item_infos(app.db.connection, + changed_ids) + for item_info in changed_items: + self.item_map[item_info.id] = item_info + self.emit('items-changed', [], changed_items, []) + + def refetch_items(self, changed_ids=None): + # items may have been added/removed from the list. We need to + # re-fetch the items and calculate changes + old_item_ids = self.item_ids + self.fetch_items() + + added_ids = self.item_ids - old_item_ids + removed_ids = old_item_ids - self.item_ids + if changed_ids: + # remove ids from changed that aren't on the list + changed_ids = changed_ids.intersection(self.item_ids) + # remove ids from changed that were just added + changed_ids = changed_ids.difference(added_ids) + else: + changed_ids = [] + self.emit('items-changed', + [self.item_map[id_] for id_ in added_ids], + [self.item_map[id_] for id_ in changed_ids], + list(removed_ids)) diff -Nru miro-4.0.4/lib/data/mappings.py miro-6.0/lib/data/mappings.py --- miro-4.0.4/lib/data/mappings.py 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/data/mappings.py 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,75 @@ +# Miro - an RSS based video player application +# Copyright (C) 2012 +# Participatory Culture Foundation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +# +# In addition, as a special exception, the copyright holders give +# permission to link the code of portions of this program with the OpenSSL +# library. +# +# You must obey the GNU General Public License in all respects for all of +# the code used other than OpenSSL. If you modify file(s) with this +# exception, you may extend this exception to your version of the file(s), +# but you are not obligated to do so. If you do not wish to do so, delete +# this exception statement from your version. If you delete this exception +# statement from all source files in the program, then also delete it here. + +"""miro.data.maps -- Handle map tables for the database. +""" + +def get_playlist_items(connection, playlist_id): + """Get the order of items in a playlist. + + :returns: list of item ids + """ + cursor = connection.execute("SELECT item_id " + "FROM playlist_item_map " + "WHERE playlist_id=? " + "ORDER BY position", + (playlist_id,)) + return [row[0] for row in cursor.fetchall()] + +class SharingItemPlaylistMap(object): + """Map playlist ids to the items in them. """ + def __init__(self, connection): + self.connection = connection + + def get_map(self): + """Get a map of playlist ids to item ids. + + :returns: dict mapping playlist ids to sets of item ids + """ + rv = {} + cursor = self.connection.execute("SELECT playlist_id, item_id " + "FROM sharing_item_playlist_map") + for playlist_id, item_id in cursor: + if playlist_id not in rv: + rv[playlist_id] = set() + rv[playlist_id].add(item_id) + return rv + + def remove_playlist(self, playlist_id): + """Remove all entries for a playlist """ + self.connection.execute("DELETE FROM sharing_item_playlist_map " + "WHERE playlist_id=?", (playlist_id,)) + + def set_playlist_items(self, playlist_id, item_ids): + """Set the items in a playlist.""" + self.remove_playlist(playlist_id) + self.connection.executemany( + "INSERT INTO sharing_item_playlist_map(playlist_id, item_id) " + "VALUES (?, ?)", + [(playlist_id, item_id) for item_id in item_ids]) diff -Nru miro-4.0.4/lib/data/namecollation.cpp miro-6.0/lib/data/namecollation.cpp --- miro-4.0.4/lib/data/namecollation.cpp 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/data/namecollation.cpp 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,342 @@ +/* +# Miro - an RSS based video player application +# Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011 +# Participatory Culture Foundation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +# +# In addition, as a special exception, the copyright holders give +# permission to link the code of portions of this program with the OpenSSL +# library. +# +# You must obey the GNU General Public License in all respects for all of +# the code used other than OpenSSL. If you modify file(s) with this +# exception, you may extend this exception to your version of the file(s), +# but you are not obligated to do so. If you do not wish to do so, delete +# this exception statement from your version. If you delete this exception +# statement from all source files in the program, then also delete it here. +*/ + +#include + +/* + * miro.data.namecollation -- implement the collation for names. + * + * When ordering by name, we have a few special rules: + * - We ignore case + * - A leading "The" or "a" is ignored + * - Names with numbers in them are sorted using natural sort ("Episode 10" + * comes after "Episode 9") + * + * The setup_collation() function creates the name collation on an sqlite3 + * Connection object. After that's called you can use "collate name" to order + * using the above rules + * + * This module is implemented in C because doing it in python resulted in a + * serious slow down. + * + */ + +#include +#include +#include + +/* + * As far as I can tell there's no header file that defines the struct for a + * sqlite3.Connection object. We need to try to define it ourselves. The + * following is definitely incomplete, but all we care about is the pointer + * to the sqlite3 struct. As long as the actual struct has the sqlite3 + * pointer as it's first member this should work. +*/ + +typedef struct { + PyObject_HEAD + sqlite3* db; +} PySqliteConnectionStruct; + +static PyTypeObject* PySqliteConnectionType; + +/* + * Define a list of strings that we want to ignore if they name starts with + * them. These stored in utf8-encoded format (fortunately plain ASCII works + * fine). + */ + +static const char* prefixes_to_ignore[] = { + "the ", + "a ", +}; +#define PREFIXES_TO_IGNORE_SIZE 2 + +/* + * get_utf8_char + * Given a UTF string, get the first wide character in it + */ +wchar_t get_utf8_char(const char* string, int* char_length) +{ + + int char_len; + int i; + char first_byte_mask; + wint_t rv; + char first_char; + + first_char = string[0]; + if ((first_char & 0x80) == 0) { + char_len = 1; + first_byte_mask = 0x7F; + } else if (first_char >> 5 == 0x06) { + char_len = 2; + first_byte_mask = 0x1F; + } else if (first_char >> 4 == 0x0E) { + char_len = 3; + first_byte_mask = 0x0F; + } else if (first_char >> 5 == 0x1E) { + char_len = 4; + first_byte_mask = 0x07; + } else if (first_char >> 6 == 0x3E) { + char_len = 5; + first_byte_mask = 0x03; + } else { + char_len = 6; + first_byte_mask = 0x01; + } + + /* Process first byte */ + rv = string[0] & first_byte_mask; + /* Process addtional byte */ + for(i = 1; i < char_len; i++) { + rv = (rv << 6) | (string[i] & 0x3F); + } + /* return results */ + *char_length = char_len; + return rv; +} + +/* + * python-like cmp() function + */ + +template +int cmp(T left, T right) { + if (left < right) { + return -1; + } else if (left > right) { + return 1; + } else { + return 0; + } +} + +/* + * Helper class to get wide characters out of a utf8 string + */ + +class UTF8Stream { + public: + UTF8Stream(const char* string_start, int string_length) { + this->pos = string_start; + this->end = string_start + string_length; + this->move_start_pos(); + this->calc_next_char(); + } + + wint_t peek() { + return this->next_char; + } + + // Get the next numeric value in the stream + long peeknumber() { + wchar_t digits[128]; + const char* digitpos = this->pos; + int char_len; + wchar_t next_char; + + for(int i=0; i < 127; i++) { + if(digitpos >= this->end) { + next_char = '\0'; + } else { + next_char = get_utf8_char(digitpos, &char_len); + digitpos += char_len; + } + + if(!iswdigit(next_char)) { + digits[i] = '\0'; + break; + } else { + digits[i] = next_char; + } + } + // Ensure the digits string is NUL terminated if we never break + // from the for loop. + digits[127] = '\0'; + // Check if we didn't find any digits at all. If so return -1 to + // sort non-numbers above numbers. + if(digits[0] == '\0') { + return -1; + } + // Finally use wcstol to sort numbers. + return wcstol(digits, NULL, 10); + } + + void move_forward() { + this->pos += this->next_char_length; + this->calc_next_char(); + } + + int at_end() { + return this->pos >= this->end; + } + + int length_left() { + return this->end - this->pos; + } + + protected: + const char* pos; + const char* end; + wint_t next_char; + int next_char_length; + + void calc_next_char() { + if(!this->at_end()) { + this->next_char = get_utf8_char(this->pos, + &(this->next_char_length)); + this->next_char = towlower(this->next_char); + } else { + this->next_char = WEOF; + this->next_char_length = 0; + } + } + + void move_start_pos() { + for(int i = 0; i < PREFIXES_TO_IGNORE_SIZE; i++) { + if(this->move_past_prefix(prefixes_to_ignore[i])) { + return; + } + } + } + + // Try to move past a prefix string and return 1 if we did + int move_past_prefix(const char* prefix) { + const char* search_pos = this->pos; + const char* prefix_pos = prefix; + while(search_pos < this->end) { + wint_t prefix_char, search_char; + int prefix_char_len, search_char_len; + prefix_char = get_utf8_char(prefix_pos, &prefix_char_len); + if(prefix_char == '\0') { + // Reached the end of the perfix char. This is a match. + this->pos = search_pos; + return 1; + } + search_char = get_utf8_char(search_pos, &search_char_len); + if(towlower(prefix_char) != towlower(search_char)) { + // No match + return 0; + } + // Move to the next character + prefix_pos += prefix_char_len; + search_pos += search_char_len; + } + // Reached the end of the our string. No match + return 0; + } +}; + +static int name_collation(void* arg1, + int str1_len, const void* v_str1, + int str2_len, const void* v_str2) +{ + UTF8Stream string1(static_cast(v_str1), str1_len); + UTF8Stream string2(static_cast(v_str2), str2_len); + + while(!string1.at_end() && !string2.at_end()) { + if(string1.peek() == string2.peek()) { + string1.move_forward(); + string2.move_forward(); + } else { + if(iswdigit(string1.peek()) || iswdigit(string2.peek())) { + // One of strings is on a numeric value. Compare the numeric + // values rather than the string values to achieve a natural + // sort. + return cmp(string1.peeknumber(), string2.peeknumber()); + } else { + // Neither strings are numbers, use a character comparison. + // + // FIXME: We should use something like strcoll here that takes + // into account accents and things like that. + return cmp(string1.peek(), string2.peek()); + } + } + } + + // Both strings were the same until one ended. Order the longer string + // after the shorter one. + return cmp(string1.length_left(), string2.length_left()); +} + +extern "C" { + +static PyObject *setup_collation(PyObject* self, PyObject *arg) +{ + if(!PyObject_TypeCheck(arg, PySqliteConnectionType)) { + PyErr_Format(PyExc_TypeError, + "Excepted sqlite3.Connection. Got %s", + arg->ob_type->tp_name); + return NULL; + } + sqlite3_create_collation(((PySqliteConnectionStruct*)arg)->db, + "name", + SQLITE_UTF8, + NULL, + name_collation); + Py_RETURN_NONE; +} + +static PyMethodDef DBCollationsMethods[] = +{ + {"setup_collation", (PyCFunction)setup_collation, METH_O, + "Setup collations on an sqlite database" + }, + { NULL, NULL, 0, NULL } +}; + +PyMODINIT_FUNC initnamecollation(void) +{ + PyObject* sqlite3_mod; + PyObject* connection_obj; + sqlite3_mod = PyImport_ImportModule("sqlite3"); + if (!sqlite3_mod) { + PyErr_SetString(PyExc_ImportError, "can't import sqlite3 module"); + return; + } + connection_obj = PyObject_GetAttrString(sqlite3_mod, "Connection"); + if(!connection_obj) { + PyErr_SetString(PyExc_ImportError, "Error importing sqlite3.Connection"); + Py_XDECREF(sqlite3_mod); + return; + } + if(!PyType_Check(connection_obj)) { + PyErr_SetString(PyExc_ImportError, "sqlite3.Connection is not a type"); + Py_XDECREF(sqlite3_mod); + return; + } + PySqliteConnectionType = reinterpret_cast(connection_obj); + Py_XDECREF(sqlite3_mod); + Py_InitModule("miro.data.namecollation", DBCollationsMethods); +} + +} /* extern "C" */ diff -Nru miro-4.0.4/lib/database.py miro-6.0/lib/database.py --- miro-4.0.4/lib/database.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/database.py 2013-04-05 16:02:42.000000000 +0000 @@ -27,17 +27,17 @@ # this exception statement from your version. If you delete this exception # statement from all source files in the program, then also delete it here. +import itertools import logging import traceback import threading from miro import app from miro import signals +from miro import threadcheck -class DatabaseException(Exception): - """Superclass for classes that subclass Exception and are all - Database related. - """ +class DatabaseException(StandardError): + """Superclass database errors.""" pass class DatabaseConstraintError(DatabaseException): @@ -52,12 +52,6 @@ """ pass -class DatabaseThreadError(DatabaseException): - """Raised when the database encounters an internal consistency - issue. - """ - pass - class DatabaseStandardError(StandardError): pass @@ -91,26 +85,6 @@ """ pass -# begin* and end* no longer actually lock the database. Instead -# confirm_db_thread prints a warning if it's run from any thread that -# isn't the main thread. This can be removed from releases for speed -# purposes. - -event_thread = None -def set_thread(thread): - global event_thread - if event_thread is None: - event_thread = thread - -def confirm_db_thread(): - if event_thread is None or event_thread != threading.currentThread(): - if event_thread is None: - error_string = "Database event thread not set" - else: - error_string = "Database called from %s" % threading.currentThread() - traceback.print_stack() - raise DatabaseThreadError, error_string - class ViewObjectFetcher(object): """Interface for classes that handle retrieving objects for Views. @@ -140,23 +114,26 @@ pass class DDBObjectFetcher(ViewObjectFetcher): - def __init__(self, klass): + def __init__(self, klass, db_info): self.klass = klass + self.db_info = db_info def fetch_obj(self, id_): - return app.db.get_obj_by_id(id_) + return self.db_info.db.get_obj_by_id(id_, self.klass) def fetch_obj_for_ddb_object(self, ddb_object): return ddb_object def table_name(self): - return app.db.table_name(self.klass) + return self.db_info.db.table_name(self.klass) def prepare_objects(self, id_list): - if app.db.ensure_objects_loaded(self.klass, id_list): + if self.db_info.db.ensure_objects_loaded(self.klass, id_list, + self.db_info): # sometimes objects will call remove() in setup_restored(). # We need to filter those out. - new_id_list = [id_ for id_ in id_list if app.db.id_alive(id_)] + new_id_list = [i for i in id_list + if self.db_info.db.id_alive(i, self.klass)] if len(new_id_list) < id_list: id_list[:] = new_id_list # update id_list in-place @@ -176,7 +153,7 @@ return item.id class View(object): - def __init__(self, fetcher, where, values, order_by, joins, limit): + def __init__(self, fetcher, where, values, order_by, joins, limit, db_info): self.fetcher = fetcher self.table_name = fetcher.table_name() self.where = where @@ -184,6 +161,7 @@ self.order_by = order_by self.joins = joins self.limit = limit + self.db_info = db_info def _query(self): id_list = self._query_ids() @@ -192,12 +170,14 @@ yield self.fetcher.fetch_obj(id_) def _query_ids(self): - return list(app.db.query_ids(self.table_name, self.where, self.values, - self.order_by, self.joins, self.limit)) + return list(self.db_info.db.query_ids(self.table_name, self.where, + self.values, self.order_by, + self.joins, self.limit)) def _query_count(self): - return app.db.query_count(self.table_name, self.where, self.values, - self.joins, self.limit) + return self.db_info.db.query_count(self.table_name, self.where, + self.values, self.joins, + self.limit) def __iter__(self): return self._query() @@ -220,10 +200,12 @@ def make_tracker(self): if self.limit is not None: raise ValueError("tracking views with limits not supported") - return ViewTracker(self.fetcher, self.where, self.values, self.joins) + return ViewTracker(self.fetcher, self.where, self.values, self.joins, + self.db_info) class ViewTrackerManager(object): - def __init__(self): + def __init__(self, db): + self.db = db # maps table_name to trackers self.table_to_tracker = {} # maps joined tables to trackers @@ -237,13 +219,13 @@ return self.table_to_tracker[table_name] def trackers_for_ddb_class(self, klass): - return self.trackers_for_table(app.db.table_name(klass)) + return self.trackers_for_table(self.db.table_name(klass)) - def update_view_trackers(self, obj): + def update_view_trackers(self, obj, can_change_views=True): """Update view trackers based on an object change.""" for tracker in self.trackers_for_ddb_class(obj.__class__): - tracker.object_changed(obj) + tracker.object_changed(obj, can_change_views) def bulk_update_view_trackers(self, table_name): for tracker in self.trackers_for_table(table_name): @@ -260,7 +242,7 @@ tracker.remove_object(obj) class ViewTracker(signals.SignalEmitter): - def __init__(self, fetcher, where, values, joins): + def __init__(self, fetcher, where, values, joins, db_info): signals.SignalEmitter.__init__(self, 'added', 'removed', 'changed', 'bulk-added', 'bulk-removed', 'bulk-changed') self.fetcher = fetcher @@ -270,13 +252,14 @@ raise TypeError("values must be a tuple") self.values = values self.joins = joins + self.db_info = db_info self.bulk_mode = False self.current_ids = self._view_object_ids() - vt_manager = app.view_tracker_manager + vt_manager = self.db_info.view_tracker_manager vt_manager.trackers_for_table(self.table_name).add(self) def unlink(self): - vt_manager = app.view_tracker_manager + vt_manager = self.db_info.view_tracker_manager vt_manager.trackers_for_table(self.table_name).discard(self) def set_bulk_mode(self, bulk_mode): @@ -295,16 +278,20 @@ where += ' AND (%s)' % (self.where,) values = (obj.id,) + self.values - return app.db.query_count(self.table_name, where, values, + return self.db_info.db.query_count(self.table_name, where, values, self.joins) > 0 def _view_object_ids(self): """Get all object ids in our view.""" - return set(app.db.query_ids(self.table_name, - self.where, self.values, joins=self.joins)) - - def object_changed(self, obj): - self.check_object(obj) + return set(self.db_info.db.query_ids(self.table_name, + self.where, self.values, + joins=self.joins)) + + def object_changed(self, obj, can_change_views): + if can_change_views: + self.check_object(obj) + elif obj.id in self.current_ids: + self.emit('changed', self.fetcher.fetch_obj_for_ddb_object(obj)) def remove_object(self, obj): if obj.id in self.current_ids: @@ -365,7 +352,9 @@ class BulkSQLManager(object): - def __init__(self): + def __init__(self, db, view_tracker_manager): + self.db = db + self.view_tracker_manager = view_tracker_manager self.active = False self.to_insert = {} self.to_remove = {} @@ -397,7 +386,7 @@ # Normally if we throw an exception, we want to rollback. However, if # we are in the middle of bulk inserting/removing, then we should try # to commit the objects that didn't throw anything see (#16341) - app.db.finish_transaction() + self.db.finish_transaction() def commit(self): for x in range(100): @@ -423,13 +412,13 @@ def _commit_sql(self, to_insert, to_remove): for table_name, objects in to_insert.items(): logging.debug('bulk insert: %s %s', table_name, len(objects)) - app.db.bulk_insert(objects) + self.db.bulk_insert(objects) for obj in objects: obj.inserted_into_db() for table_name, objects in to_remove.items(): logging.debug('bulk remove: %s %s', table_name, len(objects)) - app.db.bulk_remove(objects) + self.db.bulk_remove(objects) for obj in objects: obj.removed_from_db() @@ -453,8 +442,7 @@ This method is the fastest when there are not a lot of changed objects """ for obj in changed_objs: - app.view_tracker_manager.update_view_trackers(obj) - + self.view_tracker_manager.update_view_trackers(obj) def _update_view_trackers_by_table(self, to_insert, to_remove): """Update view trackers by checking each table @@ -462,17 +450,17 @@ This method is fastest when there are many changed objects """ for table_name in to_insert: - app.view_tracker_manager.bulk_update_view_trackers(table_name) + self.view_tracker_manager.bulk_update_view_trackers(table_name) for table_name, objects in to_remove.items(): if table_name in to_insert: # already updated the view above continue - app.view_tracker_manager.bulk_remove_from_view_trackers( + self.view_tracker_manager.bulk_remove_from_view_trackers( table_name, objects) def add_insert(self, obj): - table_name = app.db.table_name(obj.__class__) + table_name = self.db.table_name(obj.__class__) try: inserts_for_table = self.to_insert[table_name] except KeyError: @@ -488,11 +476,11 @@ return id_ in self.pending_removes def add_remove(self, obj): - table_name = app.db.table_name(obj.__class__) + table_name = self.db.table_name(obj.__class__) if self.will_insert(obj.id): self.to_insert[table_name].remove(obj) self.pending_inserts.remove(obj.id) - app.db.forget_object(obj) + self.db.forget_object(obj) return try: removes_for_table = self.to_remove[table_name] @@ -531,14 +519,18 @@ class DDBObject(signals.SignalEmitter): """Dynamic Database object """ - #The last ID used in this class - lastID = 0 def __init__(self, *args, **kwargs): + self.confirm_db_thread() self.in_db_init = True signals.SignalEmitter.__init__(self, 'removed') self.changed_attributes = set() + if 'db_info' in kwargs: + self.db_info = kwargs.pop('db_info') + else: + self.db_info = app.db_info + if len(args) == 0 and kwargs.keys() == ['restored_data']: restoring = True else: @@ -546,16 +538,16 @@ if restoring: self.__dict__.update(kwargs['restored_data']) - app.db.remember_object(self) + self.db_info.db.remember_object(self) self.setup_restored() # handle setup_restored() calling remove() if not self.id_exists(): return else: - self.id = DDBObject.lastID = DDBObject.lastID + 1 + self.id = self.db_info.make_new_id() # call remember_object so that id_exists will return True # when setup_new() is being run - app.db.remember_object(self) + self.db_info.db.remember_object(self) self.setup_new(*args, **kwargs) self.after_setup_new() if not self.id_exists(): @@ -570,44 +562,67 @@ self._insert_into_db() def _insert_into_db(self): - if not app.bulk_sql_manager.active: - app.db.insert_obj(self) + if not self.db_info.bulk_sql_manager.active: + try: + self.db_info.db.insert_obj(self) + except StandardError: + self.insert_into_db_failed() + raise self.inserted_into_db() - app.view_tracker_manager.update_view_trackers(self) + self.db_info.view_tracker_manager.update_view_trackers(self) else: - app.bulk_sql_manager.add_insert(self) + self.db_info.bulk_sql_manager.add_insert(self) def inserted_into_db(self): self.check_constraints() self.on_db_insert() + def insert_into_db_failed(self): + pass + @classmethod def make_view(cls, where=None, values=None, order_by=None, joins=None, - limit=None): + limit=None, db_info=None): if values is None: values = () - fetcher = DDBObjectFetcher(cls) - return View(fetcher, where, values, order_by, joins, limit) + if db_info is None: + db_info = app.db_info + fetcher = DDBObjectFetcher(cls, db_info) + return View(fetcher, where, values, order_by, joins, limit, db_info) @classmethod - def get_by_id(cls, id_): + def get_by_id(cls, id_, db_info=None): + if db_info is None: + db = app.db + else: + db = db_info.db try: # try memory first before going to sqlite. - obj = app.db.get_obj_by_id(id_) - if app.db.object_from_class_table(obj, cls): + obj = db.get_obj_by_id(id_, cls) + if db.object_from_class_table(obj, cls): return obj else: raise ObjectNotFoundError(id_) except KeyError: - return cls.make_view('id=?', (id_,)).get_singleton() + view = cls.make_view('id=?', (id_,), db_info=db_info) + return view.get_singleton() @classmethod - def delete(cls, where, values=None): - return app.db.delete(cls, where, values) + def delete(cls, where=None, values=None, db_info=None): + if db_info is None: + db = app.db + else: + db = db_info.db + return db.delete(cls, where, values) @classmethod - def select(cls, columns, where=None, values=None, convert=True): - return app.db.select(cls, columns, where, values, convert=convert) + def select(cls, columns, where=None, values=None, convert=True, + db_info=None): + if db_info is None: + db = app.db + else: + db = db_info.db + return db.select(cls, columns, where, values, convert=convert) def setup_new(self): """Initialize a newly created object.""" @@ -653,6 +668,21 @@ def reset_changed_attributes(self): self.changed_attributes = set() + def _bulk_update_db_values(self, dct): + """Safely update many DB values at a time. + + _bulk_update_db_values is needed because if you just call + self.__dict__.update(), then changed_attributes won't be updated so we + won't save the new values to the DB in signal_change() + + _bulk_update_db_values can only be used on attributes that map to + database columns. + + :param dct: dict of new values for our database attributes + """ + self.__dict__.update(dct) + self.changed_attributes.update(dct.keys()) + def get_id(self): """Returns unique integer associated with this object """ @@ -660,7 +690,7 @@ def id_exists(self): try: - self.get_by_id(self.id) + self.get_by_id(self.id, self.db_info) except ObjectNotFoundError: return False else: @@ -669,12 +699,12 @@ def remove(self): """Call this after you've removed all references to the object """ - if not app.bulk_sql_manager.active: - app.db.remove_obj(self) + if not self.db_info.bulk_sql_manager.active: + self.db_info.db.remove_obj(self) self.removed_from_db() - app.view_tracker_manager.remove_from_view_trackers(self) + self.db_info.view_tracker_manager.remove_from_view_trackers(self) else: - app.bulk_sql_manager.add_remove(self) + self.db_info.bulk_sql_manager.add_remove(self) def removed_from_db(self): self.emit('removed') @@ -687,7 +717,7 @@ view.confirm_db_thread() ... """ - confirm_db_thread() + threadcheck.confirm_eventloop_thread() def check_constraints(self): """Subclasses can override this method to do constraint @@ -696,7 +726,7 @@ """ pass - def signal_change(self, needs_save=True): + def signal_change(self, needs_save=True, can_change_views=True): """Call this after you change the object """ if self.in_db_init: @@ -709,25 +739,46 @@ raise DatabaseConstraintError, msg self.on_signal_change() self.check_constraints() - if app.bulk_sql_manager.will_insert(self.id): + if self.db_info.bulk_sql_manager.will_insert(self.id): # Don't need to send an UPDATE SQL command, or check the # view trackers in this case. Both will be done when the # BulkSQLManager.finish() is called. return if needs_save: - app.db.update_obj(self) - app.view_tracker_manager.update_view_trackers(self) + self.db_info.db.update_obj(self) + self.db_info.view_tracker_manager.update_view_trackers( + self, can_change_views) def on_signal_change(self): pass -def update_last_id(): - DDBObject.lastID = app.db.get_last_id() +class DBInfo(object): + """Stores per-database info for DDBObject and friends. -def setup_managers(): - app.view_tracker_manager = ViewTrackerManager() - app.bulk_sql_manager = BulkSQLManager() + Attributes: + - db -- LiveStorage object + - view_tracker_manager -- ViewTrackerManager + - bulk_sql_manager -- BulkSQLManager + """ + def __init__(self, db): + self.db = db + self.view_tracker_manager = ViewTrackerManager(db) + self.bulk_sql_manager = BulkSQLManager(db, self.view_tracker_manager) + self.update_last_id() + + def update_last_id(self): + last_id = self.db.get_last_id() + self.id_counter = itertools.count(last_id + 1) + + def make_new_id(self): + return self.id_counter.next() + +class DeviceDBInfo(DBInfo): + """DeviceDBInfo -- DBInfo for devices.""" + def __init__(self, db, device_id): + DBInfo.__init__(self, db) + self.device_id = device_id def initialize(): - update_last_id() - setup_managers() + app.db_info = DBInfo(app.db) + app.bulk_sql_manager = app.db_info.bulk_sql_manager diff -Nru miro-4.0.4/lib/databasesanity.py miro-6.0/lib/databasesanity.py --- miro-4.0.4/lib/databasesanity.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/databasesanity.py 2013-04-05 16:02:42.000000000 +0000 @@ -44,7 +44,7 @@ from miro import signals from miro import guide -class DatabaseInsaneError(Exception): +class DatabaseInsaneError(StandardError): pass class SanityTest(object): @@ -87,7 +87,7 @@ self.feeds_in_items.add(obj.feed_id) if obj.parent_id is not None: self.parents_in_items.add(obj.parent_id) - if obj.isContainerItem in (None, True): + if obj.is_container_item in (None, True): self.top_level_parents.add(obj.id) elif isinstance(obj, feed.Feed): self.top_level_feeds.add(obj.id) diff -Nru miro-4.0.4/lib/databaseupgrade.py miro-6.0/lib/databaseupgrade.py --- miro-4.0.4/lib/databaseupgrade.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/databaseupgrade.py 2013-04-05 16:02:42.000000000 +0000 @@ -42,9 +42,12 @@ import os import re import logging +import shutil import time import urllib +import sqlite3 + from miro.gtcache import gettext as _ from miro import schema from miro import util @@ -56,7 +59,7 @@ # looks nicer as a return value NO_CHANGES = set() -class DatabaseTooNewError(Exception): +class DatabaseTooNewError(StandardError): """Error that we raise when we see a database that is newer than the version that we can update too. """ @@ -69,37 +72,7 @@ :param table: the table to remove the columns from :param column_names: list of columns to remove """ - cursor.execute("PRAGMA table_info('%s')" % table) - columns = [] - columns_with_type = [] - for column_info in cursor.fetchall(): - column = column_info[1] - col_type = column_info[2] - if column in column_names: - continue - columns.append(column) - if column == 'id': - col_type += ' PRIMARY KEY' - columns_with_type.append("%s %s" % (column, col_type)) - - cursor.execute("PRAGMA index_list('%s')" % table) - index_sql = [] - for index_info in cursor.fetchall(): - name = index_info[1] - if name in column_names: - continue - cursor.execute("SELECT sql FROM sqlite_master " - "WHERE name=? and type='index'", (name,)) - index_sql.append(cursor.fetchone()[0]) - - cursor.execute("ALTER TABLE %s RENAME TO old_%s" % (table, table)) - cursor.execute("CREATE TABLE %s (%s)" % - (table, ', '.join(columns_with_type))) - cursor.execute("INSERT INTO %s SELECT %s FROM old_%s" % - (table, ', '.join(columns), table)) - cursor.execute("DROP TABLE old_%s" % table) - for sql in index_sql: - cursor.execute(sql) + alter_table_columns(cursor, table, column_names, {}) def rename_column(cursor, table, from_column, to_column, new_type=None): """Renames a column in a SQLITE table. @@ -120,6 +93,34 @@ :param to_column: the new name :param new_type: new type for the column (or None to keep the old one) """ + if new_type is None: + new_types = {} + else: + new_types = {to_column: new_type} + alter_table_columns(cursor, table, [], {from_column: to_column}, new_types) + +def alter_table_columns(cursor, table, delete_columns, rename_columns, + new_types=None): + """Rename/drop multiple columns at once + + .. Note:: + + This does **NOT** handle renaming the column in an index. + + If you're going to rename columns that are involved in indexes, + you'll need to add that feature. + + .. Note:: + + Don't rename the id column--that would be bad. + + :param table: the table to remove the columns from + :param delete_columns: list of columns to delete + :param rename_columns: dict mapping old column names to new column names + :param new_types: dict mapping new column names to their new types + """ + if new_types is None: + new_types = {} cursor.execute("PRAGMA table_info('%s')" % table) old_columns = [] new_columns = [] @@ -127,11 +128,13 @@ for column_info in cursor.fetchall(): column = column_info[1] col_type = column_info[2] + if column in delete_columns: + continue old_columns.append(column) - if column == from_column: - column = to_column - if new_type is not None: - col_type = new_type + if column in rename_columns: + column = rename_columns[column] + if column in new_types: + col_type = new_types[column] new_columns.append(column) if column == 'id': col_type += ' PRIMARY KEY' @@ -171,7 +174,14 @@ """ max_id = 0 for table in get_object_tables(cursor): - cursor.execute("SELECT MAX(id) from %s" % table) + # skip tables that don't store DDBObjects + if table.startswith('item_fts'): + continue + try: + cursor.execute("SELECT MAX(id) from %s" % table) + except sqlite3.OperationalError: + raise sqlite3.OperationalError("error selecting id from %s" % + table) max_id = max(max_id, cursor.fetchone()[0]) return max_id + 1 @@ -182,7 +192,54 @@ else: return globals()['upgrade%d' % version] -def new_style_upgrade(cursor, saved_version, upgrade_to): +def run_on_devices(func): + """decorator to run an upgrade function only for device databases. + + Note: when upgrading device databases, we need to account for the fact + that older versions of miro will still open the database with the newer + schema. This means we should go by the following rules: + + For new columns we need to either: + - Make NULL a valid value and deal with the fact that older versions + will always set the column to NULL + - Write code in devicedatabaseupgrade that checks for NULL values and + sets the default for them. (Writing a database upgrade won't work, + the issue is older versions of miro opening the database *after* the + upgrade). + - Don't delete columns. + - Don't change the data format for columns + """ + func._contexts = set(['device']) + return func + +def run_on_both(func): + """decorator to run an upgrade function for both device databases and the + main database + """ + func._contexts = set(['device', 'main']) + return func + +def contexts_for_upgrade_func(func): + """Get the contexts where we should run an upgrade function. + + By default we only run upgrade functions in the main database. To change + this, a function can be decorated with one of the run_on_* wrappers. + """ + try: + return func._contexts # function has been decorated + except AttributeError: + return set(['main']) # default case + +def is_device_db(cursor): + """Test if a cursor is for a device database.""" + cursor.execute("SELECT COUNT(*) FROM sqlite_master " + "WHERE type='table' AND name='device_item'") + return cursor.fetchall()[0][0] > 0 + + + +def new_style_upgrade(cursor, saved_version, upgrade_to, context, + show_progress): """Upgrade a database using new-style upgrade functions. This method replaces the upgrade() method. However, we still need @@ -204,55 +261,58 @@ "(db version is %s)" % saved_version) raise DatabaseTooNewError(msg) - dbupgradeprogress.new_style_progress(saved_version, saved_version, - upgrade_to) + if show_progress: + dbupgradeprogress.new_style_progress(saved_version, saved_version, + upgrade_to) for version in xrange(saved_version + 1, upgrade_to + 1): if util.chatter: logging.info("upgrading database to version %s", version) - cursor.execute("BEGIN TRANSACTION") - get_upgrade_func(version)(cursor) - cursor.execute("COMMIT TRANSACTION") - dbupgradeprogress.new_style_progress(saved_version, version, - upgrade_to) + upgrade_func = get_upgrade_func(version) + if context in contexts_for_upgrade_func(upgrade_func): + cursor.execute("BEGIN TRANSACTION") + upgrade_func(cursor) + cursor.execute("COMMIT TRANSACTION") + if show_progress: + dbupgradeprogress.new_style_progress(saved_version, version, + upgrade_to) -def upgrade(savedObjects, saveVersion, upgradeTo=None): +def upgrade(savedObjects, save_version, upgrade_to, show_progress): """Upgrade a list of SavableObjects that were saved using an old version of the database schema. This method will call upgradeX for each number X between - saveVersion and upgradeTo. For example, if saveVersion is 2 and - upgradeTo is 4, this method is equivelant to:: + save_version and upgrade_to. For example, if save_version is 2 and + upgrade_to is 4, this method is equivelant to:: upgrade3(savedObjects) upgrade4(savedObjects) - By default, upgradeTo will be the VERSION variable in schema. + By default, upgrade_to will be the VERSION variable in schema. """ changed = set() - if upgradeTo is None: - upgradeTo = schema.VERSION - - if saveVersion > upgradeTo: + if save_version > upgrade_to: msg = ("Database was created by a newer version of Miro " - "(db version is %s)" % saveVersion) + "(db version is %s)" % save_version) raise DatabaseTooNewError(msg) - startSaveVersion = saveVersion - dbupgradeprogress.old_style_progress(startSaveVersion, startSaveVersion, - upgradeTo) - while saveVersion < upgradeTo: + startSaveVersion = save_version + if show_progress: + dbupgradeprogress.old_style_progress(startSaveVersion, + startSaveVersion, upgrade_to) + while save_version < upgrade_to: if util.chatter: - print "upgrading database to version %s" % (saveVersion + 1) - upgradeFunc = get_upgrade_func(saveVersion + 1) + print "upgrading database to version %s" % (save_version + 1) + upgradeFunc = get_upgrade_func(save_version + 1) thisChanged = upgradeFunc(savedObjects) if thisChanged is None or changed is None: changed = None else: changed.update (thisChanged) - saveVersion += 1 - dbupgradeprogress.old_style_progress(startSaveVersion, saveVersion, - upgradeTo) + save_version += 1 + if show_progress: + dbupgradeprogress.old_style_progress(startSaveVersion, + save_version, upgrade_to) return changed def upgrade2(objectList): @@ -1831,7 +1891,7 @@ rv = self.first_video_enclosure["text"] elif hasattr(self.entry, "description"): rv = self.entry.description - except Exception: + except StandardError: logging.exception("_calc_raw_description threw exception:") if rv is None: return u'' @@ -3312,3 +3372,1048 @@ cursor.execute("ALTER TABLE global_state ADD COLUMN tabs_width integer") cursor.execute("UPDATE global_state SET tabs_width=200") +def upgrade161(cursor): + """Set the album view data to widget state tables .""" + # update item_details_expanded + ALBUM_VIEW = 3 + + cursor.execute("SELECT item_details_expanded FROM global_state") + row = cursor.fetchone() + if row is None: + # defaults not set yet, just ignore + return + item_details_expanded = eval(row[0]) + item_details_expanded[ALBUM_VIEW] = False + cursor.execute("UPDATE global_state set item_details_expanded=?", + (repr(item_details_expanded),)) + +def upgrade162(cursor): + """Convert the active_filters column to string values.""" + + FILTER_VIEW_ALL = 0 + FILTER_UNWATCHED = 1 + FILTER_NONFEED = 2 + FILTER_DOWNLOADED = 4 + FILTER_VIEW_VIDEO = 8 + FILTER_VIEW_AUDIO = 16 + FILTER_VIEW_MOVIES = 32 + FILTER_VIEW_SHOWS = 64 + FILTER_VIEW_CLIPS = 128 + FILTER_VIEW_PODCASTS = 256 + + # map the old integer constants to strings. Rename "unwatched" to + # "unplayed" since we've done it other places so we might as well do it + # here during the upgrade. + value_map = { + FILTER_VIEW_ALL: 'all', + FILTER_UNWATCHED: 'unplayed', + FILTER_NONFEED: 'nonfeed', + FILTER_DOWNLOADED: 'downloaded', + FILTER_VIEW_VIDEO: 'video', + FILTER_VIEW_AUDIO: 'audio', + FILTER_VIEW_MOVIES: 'movies', + FILTER_VIEW_SHOWS: 'shows', + FILTER_VIEW_CLIPS: 'clips', + FILTER_VIEW_PODCASTS: 'podcasts', + } + + # convert old int values to strings + converted_values = [] + cursor.execute("SELECT type, id_, active_filters FROM display_state") + for row in cursor.fetchall(): + type, id_, active_filters = row + if active_filters is None: + continue + filters = [] + for int_value, string_value in value_map.iteritems(): + if active_filters & int_value: + filters.append(string_value) + new_active_filters = ":".join(filters) + converted_values.append((type, id_, new_active_filters)) + # drop old integer column + remove_column(cursor, 'display_state', ['active_filters']) + # add new text column + cursor.execute("ALTER TABLE display_state ADD COLUMN active_filters text") + # fill in the new values + for (type, id_, new_active_filters) in converted_values: + cursor.execute("UPDATE display_state " + "SET active_filters=? " + "WHERE type = ? AND id_ = ?", + (new_active_filters, type, id_)) + +def upgrade163(cursor): + """Add eMusic as a store.""" + # if the user is using a theme, we don't do anything + if not app.config.get(prefs.THEME_NAME) == prefs.THEME_NAME.default: + return + + store_url = u'http://www.kqzyfj.com/click-5294129-10364534' + favicon_url = u'http://www.emusic.com/favicon.ico' + cursor.execute("SELECT count(*) FROM channel_guide WHERE url=?", + (store_url,)) + count = cursor.fetchone()[0] + if count > 0: + return + + next_id = get_next_id(cursor) + + cursor.execute("INSERT INTO channel_guide " + "(id, url, allowedURLs, updated_url, favicon, firstTime, " + "store, userTitle) " + "VALUES (?, ?, ?, ?, ?, ?, ?, ?)", + (next_id, store_url, "[]", store_url, + favicon_url, True, 1, u"eMusic")) # 1 is a visible store + + cursor.execute('SELECT tab_ids FROM taborder_order WHERE type=?', + ('site',)) + row = cursor.fetchone() + if row is not None: + try: + tab_ids = eval_container(row[0]) + except StandardError: + tab_ids = [] + tab_ids.append(next_id) + cursor.execute('UPDATE taborder_order SET tab_ids=? WHERE type=?', + (repr(tab_ids), 'site')) + else: + # no site taborder (#11985). We will create the TabOrder + # object on startup, so no need to do anything here + pass + +def upgrade164(cursor): + """Move column info from DisplayState to ViewState. + + This makes it easier to store different data for list, album, and standard + view. + """ + + # view type constants + STANDARD_VIEW = 0 + LIST_VIEW = 1 + ALBUM_VIEW = 3 + + # make new columns in view state + cursor.execute("ALTER TABLE view_state " + "ADD COLUMN columns_enabled pythonrepr") + cursor.execute("ALTER TABLE view_state " + "ADD COLUMN column_widths pythonrepr") + + # copy data from display_state + cursor.execute("UPDATE view_state " + "SET columns_enabled = " + "(SELECT list_view_columns FROM display_state " + "WHERE display_state.type=view_state.display_type AND " + "display_state.id_ = view_state.display_id) " + "WHERE view_type in (?, ?, ?)", + (STANDARD_VIEW, LIST_VIEW, ALBUM_VIEW)) + cursor.execute("UPDATE view_state " + "SET column_widths = " + "(SELECT list_view_widths FROM display_state " + "WHERE display_state.type=view_state.display_type AND " + "display_state.id_ = view_state.display_id) " + "WHERE view_type in (?, ?)", (LIST_VIEW, ALBUM_VIEW)) + # drop old columns + remove_column(cursor, 'display_state', ['list_view_columns']) + remove_column(cursor, 'display_state', ['list_view_widths']) + +def upgrade165(cursor): + """Add lots of indexes.""" + indices = [ + ('playlist_item_map_item_id', 'playlist_item_map', 'item_id'), + ('playlist_folder_item_map_item_id', 'playlist_folder_item_map', + 'item_id'), + ('feed_impl_key', 'feed', 'feed_impl_id') + ] + for n, t, c in indices: + cursor.execute("CREATE INDEX %s ON %s (%s)" % (n, t, c)) + +def upgrade166(cursor): + """Create the metadata table and migrate data from item to it.""" + + # create new tables + cursor.execute(""" + CREATE TABLE metadata_status (id integer PRIMARY KEY, path text, + mutagen_status text, moviedata_status text, + mutagen_thinks_drm integer, + max_entry_priority integer) + """) + cursor.execute("""\ + CREATE TABLE metadata (id integer PRIMARY KEY, path text, source text, + priority integer, file_type text, duration integer, + album text, album_artist text, album_tracks + integer, artist text, cover_art_path text, + screenshot_path text, drm integer, genre text, + title text, track integer, year integer, + description text, rating integer, show text, + episode_id text, episode_number integer, + season_number integer, kind text) + """) + cursor.execute("CREATE INDEX metadata_mutagen ON metadata_status " + "(mutagen_status)") + cursor.execute("CREATE INDEX metadata_moviedata ON metadata_status " + "(moviedata_status)") + cursor.execute("CREATE UNIQUE INDEX metadata_path ON metadata_status " + "(path)") + cursor.execute("CREATE INDEX metadata_entry_path ON metadata (path)") + cursor.execute("CREATE UNIQUE INDEX metadata_entry_path_and_source " + "ON metadata (path, source)") + # add new index to item + cursor.execute("CREATE INDEX item_filename ON item (filename)") + + + # map old MDP states to their new values + mdp_state_map = { + None : 'N', + 0 : 'S', + 1 : 'C', + 2 : 'F', + } + + # currently get_title() returns the contents of title and falls back on + # title_tag. Set the title column to be that value for the conversion. + # As a side-effect this makes the title column the same as what + # get_metadata() would return. + cursor.execute("UPDATE item SET title=title_tag " + "WHERE title IS NULL OR title == ''") + + # map columns in metadata table to their old name in item + column_map = { + 'path': 'filename', + 'file_type': 'file_type', + 'duration': 'duration', + 'album': 'album', + 'album_artist': 'album_artist', + 'album_tracks': 'album_tracks', + 'artist':'artist', + 'cover_art_path': 'cover_art', + 'screenshot_path': 'screenshot', + 'drm': 'has_drm', + 'genre': 'genre', + 'title ': 'title', + 'track': 'track', + 'year': 'year', + 'description': 'description', + 'rating': 'rating', + 'show': 'show', + 'episode_id': 'episode_id', + 'episode_number': 'episode_number', + 'season_number': 'season_number', + 'kind': 'kind', + } + + insert_columns = ['id', 'source', 'priority'] + select_columns = ['mdp_state'] + + filenames_seen = set() + + for key, value in column_map.items(): + insert_columns.append(key) + select_columns.append(value) + sql = "SELECT %s FROM item WHERE filename NOT NULL ORDER BY id ASC" % ( + ', '.join(select_columns)) + next_id = get_next_id(cursor) + filename_index = select_columns.index('filename') + has_drm_index = select_columns.index('has_drm') + for row in list(cursor.execute(sql)): + path = row[filename_index] + has_drm = row[has_drm_index] + if path in filenames_seen: + # duplicate filename, just skip this data + continue + else: + filenames_seen.add(path) + # Make an entry in the metadata table with the metadata that was + # stored. We don't know if it came from mutagen, movie data, torrent + # data or wherever, so we use "old-item" as the source and give it a + # low priority. + values = [next_id, 'old-item', 10] + for old_value in row[1:]: + if old_value != '': + values.append(old_value) + else: + values.append(None) + mdp_state = row[0] + sql = "INSERT INTO metadata (%s) VALUES (%s)" % ( + ', '.join(insert_columns), + ', '.join('?' for i in xrange(len(insert_columns)))) + cursor.execute(sql, values) + next_id += 1 + + OLD_ITEM_PRIORITY = 10 + # Make an entry in the metadata_status table. We're not sure if + # mutagen completed successfully or not, so we just call its status + # SKIPPED. moviedata_status is based on the old mdp_state column + sql = ("INSERT INTO metadata_status " + "(id, path, mutagen_status, moviedata_status, " + "mutagen_thinks_drm, max_entry_priority) " + "VALUES (?, ?, ?, ?, ?, ?)") + cursor.execute(sql, (next_id, path, 'S', mdp_state_map[mdp_state], + has_drm, OLD_ITEM_PRIORITY)) + next_id += 1 + + # We need to alter the item table to: + # - drop the columns now handled by metadata_status + # - make column names match the keys in MetadataManager.get_metadata() + # - add a new column for torrent titles + + rename_columns={ + 'cover_art': 'cover_art_path', + 'screenshot': 'screenshot_path', + } + delete_columns=['mdp_state', 'metadata_version', 'title_tag'] + alter_table_columns(cursor, 'item', delete_columns, rename_columns) + + cursor.execute("ALTER TABLE item ADD torrent_title TEXT") + +def upgrade167(cursor): + """Drop the cover_art_path on the metadata table.""" + + cover_art_dir = app.config.get(prefs.COVER_ART_DIRECTORY) + + # Move all current cover art so that it's in at the path + # /cover-art/ + already_moved = set() + cursor.execute("SELECT path, album, cover_art_path from metadata " + "WHERE cover_art_path IS NOT NULL AND " + "album IS NOT NULL") + for (path, album, cover_art_path) in cursor.fetchall(): + # quote the filename using the same logic as + # filetags.calc_cover_art_filename() + dest_filename = urllib.quote(album.encode('utf-8'), safe=' ,.') + dest_path = os.path.join(cover_art_dir, dest_filename) + + if album in already_moved: + cursor.execute("UPDATE item SET cover_art_path=? " + "WHERE filename=?", (dest_path, path)) + try: + os.remove(cover_art_path) + except StandardError: + logging.warn("upgrade167: Error deleting %s", cover_art_path) + continue + if not os.path.exists(cover_art_path): + logging.warn("upgrade167: Error moving cover art. Source path " + "doesn't exist: %s", cover_art_path) + continue + try: + shutil.move(cover_art_path, dest_path) + except StandardError: + logging.warn("upgrade167: Error moving %s -> %s", cover_art_path, + dest_path) + # update item table + cursor.execute("UPDATE item SET cover_art_path=NULL " + "WHERE filename=?", (path,)) + else: + # update item table + cursor.execute("UPDATE item SET cover_art_path=? " + "WHERE filename=?", (dest_path, path)) + + already_moved.add(album) + + # Now that the cover art is in the correct place, we don't need to store + # it in the database anymore. + remove_column(cursor, 'metadata', ['cover_art_path']) + +def upgrade168(cursor): + """Add echonest_status and echonest_id.""" + # make the columns + cursor.execute("ALTER TABLE metadata_status " + "ADD COLUMN echonest_status text") + cursor.execute("ALTER TABLE metadata_status " + "ADD COLUMN echonest_id text") + # Set status to SKIPPED since the user didn't opt-in to internet + # lookups + cursor.execute("UPDATE metadata_status " + "SET echonest_status='S'") + +def upgrade169(cursor): + """Add disabled to metadata.""" + cursor.execute("ALTER TABLE metadata " + "ADD COLUMN disabled integer") + cursor.execute("UPDATE metadata SET disabled=0") + +def upgrade170(cursor): + """Add net_lookup_enabled.""" + cursor.execute("ALTER TABLE metadata_status " + "ADD COLUMN net_lookup_enabled integer") + cursor.execute("ALTER TABLE item " + "ADD COLUMN net_lookup_enabled integer") + cursor.execute("UPDATE item SET net_lookup_enabled=0") + +def upgrade171(cursor): + """Add current_processor and calculate its value.""" + + cursor.execute("ALTER TABLE metadata_status " + "ADD COLUMN current_processor TEXT") + STATUS_NOT_RUN = 'N' + + cursor.execute("UPDATE metadata_status SET current_processor=? " + "WHERE mutagen_status == ?", (u'mutagen', STATUS_NOT_RUN)) + + cursor.execute("UPDATE metadata_status SET current_processor=? " + "WHERE mutagen_status != ? AND moviedata_status == ?", + (u'movie-data', STATUS_NOT_RUN, STATUS_NOT_RUN)) + + cursor.execute("UPDATE metadata_status SET current_processor=? " + "WHERE mutagen_status != ? AND moviedata_status != ? AND " + "echonest_status == ?", (u'echonest', STATUS_NOT_RUN, + STATUS_NOT_RUN, STATUS_NOT_RUN)) + + cursor.execute("DROP INDEX metadata_mutagen") + cursor.execute("DROP INDEX metadata_moviedata") + cursor.execute("CREATE INDEX metadata_processor " + "ON metadata_status (current_processor)") + +def upgrade172(cursor): + """Remove the path column from the metadata table.""" + # Create new column and set it based on the old path column + cursor.execute("ALTER TABLE metadata ADD COLUMN status_id integer") + cursor.execute("UPDATE metadata SET status_id = " + "(SELECT metadata_status.id FROM metadata_status " + "WHERE metadata_status.path=metadata.path)") + # Delete any rows that don't have an associated metadata_status. This + # shouldn't be any, but delete just in case + cursor.execute("DELETE FROM metadata " + "WHERE NOT EXISTS " + "(SELECT metadata_status.id FROM metadata_status " + "WHERE metadata_status.path=metadata.path)") + # Fix indexes + cursor.execute("DROP INDEX metadata_entry_path") + cursor.execute("DROP INDEX metadata_entry_path_and_source") + cursor.execute("CREATE INDEX metadata_entry_status " + "ON metadata (status_id)") + cursor.execute("CREATE UNIQUE INDEX metadata_entry_status_and_source " + "ON metadata (status_id, source)") + # drop old column + remove_column(cursor, 'metadata', 'path') + +def upgrade173(cursor): + """Make sure net_lookup_enabled is always non-null.""" + # This code should have been in upgrade170, but it's okay to run it now + + cursor.execute("UPDATE metadata_status SET net_lookup_enabled=0 " + "WHERE net_lookup_enabled IS NULL") + +def upgrade174(cursor): + """Set some echonest_status to STATUS_SKIP_FROM_PREF instead of skipped.""" + + # for audio files, echonest_status should be STATUS_SKIP_FROM_PREF so that + # if the user enables echonest for that file it will run. We keep + # video/other items as STATUS_SKIP, so that echonest will never run. + cursor.execute("UPDATE metadata_status SET echonest_status='P' " + "WHERE id IN " + "(SELECT status_id FROM metadata " + "WHERE source = " + "(SELECT source FROM metadata " + "WHERE status_id=status_id AND " + "file_type IS NOT NULL " + "ORDER BY priority DESC LIMIT 1) AND " + "file_type = 'audio')") + +def upgrade175(cursor): + """Rename screenshot_path and cover_art_path back to their old names.""" + + rename_column(cursor, 'metadata', 'screenshot_path', 'screenshot') + alter_table_columns(cursor, 'item', [], rename_columns={ + 'screenshot_path': 'screenshot', + 'cover_art_path': 'cover_art', + }) + +def upgrade176(cursor): + """Add file_type to metadata_status.""" + # Add file_type to metadata_status and set it to the file_type from the + # metadata table + cursor.execute("ALTER TABLE metadata_status ADD file_type TEXT") + cursor.execute("UPDATE metadata_status " + "SET file_type=(" + "SELECT file_type FROM metadata " + "WHERE status_id=metadata_status.id AND " + "file_type IS NOT NULL " + "ORDER BY priority DESC LIMIT 1)") + # Set file_type to other for items that the subquery returned 0 rows for + cursor.execute("UPDATE metadata_status SET file_type='other' " + "WHERE file_type IS NULL") + +def upgrade177(cursor): + """Add finished_status, remove current_processor from metadata_status.""" + cursor.execute("ALTER TABLE metadata_status ADD finished_status INTEGER") + # set finished_status to 1 (the current versionas of 5.0) if + # current_processor was None + cursor.execute("UPDATE metadata_status " + "SET finished_status=1 " + "WHERE current_processor IS NULL") + # set finished_status to 0 (unfinished) for all other rows) + cursor.execute("UPDATE metadata_status " + "SET finished_status=0 " + "WHERE current_processor IS NOT NULL") + cursor.execute("CREATE INDEX metadata_finished ON " + "metadata_status (finished_status)") + # drop the current_processor column + cursor.execute("DROP INDEX metadata_processor") + remove_column(cursor, 'metadata_status', ['current_processor']) + +def upgrade178(cursor): + """Remove metadata for items flaged as deleted.""" + cursor.execute("DELETE FROM metadata WHERE status_id IN " + "(SELECT ms.id FROM metadata_status ms " + "JOIN item on ms.path = item.filename " + "WHERE item.deleted)") + cursor.execute("DELETE FROM metadata_status WHERE path IN " + "(SELECT filename FROM item WHERE item.deleted)") + +def upgrade179(cursor): + # Rename title -> metadata_title and add a title column that stores the + # computed title (AKA what get_title() returned) + + # translated from Item.get_title() circa 5ed4c4a6 + def get_title(metadata_title, torrent_title, entry_title, filename): + if metadata_title: + return metadata_title + elif torrent_title is not None: + return torrent_title + elif entry_title is not None: + return entry_title + elif filename: + return filename_to_unicode(os.path.basename(filename)) + else: + return _('no title') + cursor.execute("ALTER TABLE item ADD COLUMN metadata_title text") + cursor.execute("UPDATE item SET metadata_title=title") + cursor.execute("SELECT id, metadata_title, torrent_title, entry_title, " + "filename FROM item") + rows = cursor.fetchall() + for id_, metadata_title, torrent_title, entry_title, filename in rows: + title = get_title(metadata_title, torrent_title, entry_title, + filename) + if title != metadata_title: + cursor.execute("UPDATE item SET title=? WHERE id=?", (title, id_)) + +def upgrade180(cursor): + # Rename columns in the item table + rename_columns = { + 'autoDownloaded': 'auto_downloaded', + 'pendingManualDL': 'pending_manual_download', + 'pendingReason': 'pending_reason', + 'creationTime': 'creation_time', + 'linkNumber': 'link_number', + 'downloadedTime': 'downloaded_time', + 'watchedTime': 'watched_time', + 'lastWatched': 'last_watched', + 'isContainerItem': 'is_container_item', + 'releaseDateObj': 'release_date', + 'eligibleForAutoDownload': 'eligible_for_autodownload', + 'resumeTime': 'resume_time', + 'channelTitle': 'channel_title', + 'shortFilename': 'short_filename', + 'offsetPath': 'offset_path', + } + + alter_table_columns(cursor, 'item', [], rename_columns) + +def upgrade181(cursor): + """Drop the feed.last_viewed column and add item.new. + + This means we can tell an item's state without data from the feed table. + """ + cursor.execute("ALTER TABLE item ADD COLUMN new integer") + # These next lines set new=1 for all items that would have matched the + # feed_available_view() before. + # Make a subquery for items that were created after we last viewed a feed + subquery = ("SELECT item.id " + "FROM item " + "JOIN feed " + "ON feed.id = item.feed_id " + "WHERE feed.last_viewed <= item.creation_time") + cursor.execute("UPDATE item SET new=1 " + "WHERE NOT auto_downloaded AND " + "downloaded_time IS NULL AND " + "NOT is_file_item AND " + "id in (%s)" % subquery) + # remove the last_viewed column + remove_column(cursor, 'feed', ['last_viewed']) + +def upgrade182(cursor): + """Unroll the remote_downloader.status column """ + + cursor.execute("ALTER TABLE remote_downloader " + "ADD COLUMN total_size integer") + cursor.execute("ALTER TABLE remote_downloader " + "ADD COLUMN current_size integer") + cursor.execute("ALTER TABLE remote_downloader " + "ADD COLUMN start_time integer") + cursor.execute("ALTER TABLE remote_downloader " + "ADD COLUMN end_time integer") + cursor.execute("ALTER TABLE remote_downloader " + "ADD COLUMN short_filename text") + cursor.execute("ALTER TABLE remote_downloader " + "ADD COLUMN filename text") + cursor.execute("ALTER TABLE remote_downloader " + "ADD COLUMN reason_failed text") + cursor.execute("ALTER TABLE remote_downloader " + "ADD COLUMN short_reason_failed text") + cursor.execute("ALTER TABLE remote_downloader " + "ADD COLUMN type text") + cursor.execute("ALTER TABLE remote_downloader " + "ADD COLUMN retry_time timestamp") + cursor.execute("ALTER TABLE remote_downloader " + "ADD COLUMN retry_count integer") + cursor.execute("ALTER TABLE remote_downloader " + "ADD COLUMN upload_size integer") + cursor.execute("ALTER TABLE remote_downloader " + "ADD COLUMN info_hash text") + columns = [ 'total_size', 'current_size', 'start_time', 'end_time', + 'short_filename', 'filename', 'retry_time', 'retry_count', + 'upload_size', 'info_hash', 'reason_failed', + 'short_reason_failed', 'type', + ] + # map new column names to their old keys in the status dict + rename_map = { + 'short_filename': 'shortFilename', + 'short_reason_failed': 'shortReasonFailed', + 'reason_failed': 'reasonFailed', + 'type': 'dlerType', + 'start_time': 'startTime', + 'end_time': 'endTime', + 'retry_time': 'retryTime', + 'retry_count': 'retryCount', + 'channel_name': 'channelName', + 'current_size': 'currentSize', + 'total_size': 'totalSize', + 'upload_size': 'uploadSize', + } + + cursor.execute("SELECT id, status from remote_downloader") + update_sql = ("UPDATE remote_downloader SET %s WHERE id=?" % + ", ".join("%s=? " % name for name in columns)) + for id_, status_repr in cursor.fetchall(): + try: + status = eval(status_repr, {}, {'datetime': datetime}) + except StandardError: + logging.warn("Error evaluating status repr: %r" % status_repr) + continue + values = [] + for column in columns: + status_key = rename_map.get(column, column) + value = status.get(status_key) + # Most of the time we can just use the value from status column, + # but for some special cases we need to tweak it. + if (column == 'end_time' and + value == status.get('startTime')): + value = None + elif column in ('current_size', 'upload_size') and value is None: + value = 0 + elif column in ('retry_count', 'total_size') and value == -1: + value = None + elif (column in ['start_time', 'end_time'] and value is not None): + value = int(value) + values.append(value) + values.append(id_) + cursor.execute(update_sql, values) + + remove_column(cursor, 'remote_downloader', ['status']) + +def upgrade183(cursor): + """Rename downloader columns to use PEP 8.""" + rename_columns = { + 'contentType': 'content_type', + 'origURL': 'orig_url', + 'channelName': 'channel_name', + } + alter_table_columns(cursor, 'remote_downloader', [], rename_columns) + # as long as we're changing origURL, change if for feed too + rename_column(cursor, 'feed', 'origURL', 'orig_url') + +def upgrade184(cursor): + """Drop the seen column from item.""" + remove_column(cursor, 'item', ['seen']) + +def upgrade185(cursor): + """Use NULL for empty item descriptions.""" + cursor.execute("UPDATE item SET description=NULL WHERE description=''") + +def upgrade186(cursor): + """Add columns no the remote_downloader table to track stats.""" + + cursor.execute("ALTER TABLE remote_downloader ADD COLUMN eta integer") + cursor.execute("ALTER TABLE remote_downloader ADD COLUMN rate integer") + cursor.execute("ALTER TABLE remote_downloader " + "ADD COLUMN upload_rate integer") + cursor.execute("ALTER TABLE remote_downloader ADD COLUMN activity text") + cursor.execute("ALTER TABLE remote_downloader " + "ADD COLUMN seeders integer") + cursor.execute("ALTER TABLE remote_downloader " + "ADD COLUMN leechers integer") + cursor.execute("ALTER TABLE remote_downloader " + "ADD COLUMN connections integer") + +def upgrade187(cursor): + """Add the item_fts table""" + + columns = ['title', 'description', 'artist', 'album', 'genre', + 'filename', ] + column_list = ', '.join(c for c in columns) + column_list_for_new = ', '.join("new.%s" % c for c in columns) + column_list_with_types = ', '.join('%s text' % c for c in columns) + cursor.execute("CREATE VIRTUAL TABLE item_fts USING fts4(%s)" % + column_list_with_types) + cursor.execute("INSERT INTO item_fts(docid, %s)" + "SELECT item.id, %s FROM item" % + (column_list, column_list)) + # make triggers to keep item_fts up to date + cursor.execute("CREATE TRIGGER item_bu " + "BEFORE UPDATE ON item BEGIN " + "DELETE FROM item_fts WHERE docid=old.id; " + "END;") + + cursor.execute("CREATE TRIGGER item_bd " + "BEFORE DELETE ON item BEGIN " + "DELETE FROM item_fts WHERE docid=old.id; " + "END;") + + cursor.execute("CREATE TRIGGER item_au " + "AFTER UPDATE ON item BEGIN " + "INSERT INTO item_fts(docid, %s) " + "VALUES(new.id, %s); " + "END;" % (column_list, column_list_for_new)) + + cursor.execute("CREATE TRIGGER item_ai " + "AFTER INSERT ON item BEGIN " + "INSERT INTO item_fts(docid, %s) " + "VALUES(new.id, %s); " + "END;" % (column_list, column_list_for_new)) + +def upgrade188(cursor): + """Fix the title for FeedImpl tables.""" + # ensure that if a title is blank, then we set it as NULL + feed_impl_table_names = [ + 'feed_impl', 'rss_feed_impl', 'saved_search_feed_impl', + 'scraper_feed_impl', 'search_feed_impl', 'directory_watch_feed_impl', + 'directory_feed_impl', 'search_downloads_feed_impl', + 'manual_feed_impl', + ] + for table_name in feed_impl_table_names: + cursor.execute("UPDATE %s SET title=NULL " + "WHERE TRIM(title)=''" % table_name) + # Before some FeedImpl subclasses overrode the title attribute and just + # returned a special case name. Update title to have those special case + # names. NOTE: some of these special titles are blank -- the way the old + # code was weird. + special_case_feed_impl_titles = [ + ('search_feed_impl', _('Search')), + ('search_downloads_feed_impl', _('Search Downloads')), + ('directory_feed_impl', ''), + ('manual_feed_impl', ''), + ] + for table_name, title in special_case_feed_impl_titles: + cursor.execute("UPDATE %s SET title=?" % table_name, + (title,)) + +def upgrade189(cursor): + """Add the parent_title column.""" + cursor.execute("ALTER TABLE item ADD COLUMN parent_title text") + + # Calculate parent_title for feed items + feed_impl_table_names = [ + 'feed_impl', 'rss_feed_impl', 'saved_search_feed_impl', + 'scraper_feed_impl', 'search_feed_impl', 'directory_watch_feed_impl', + 'directory_feed_impl', 'search_downloads_feed_impl', + 'manual_feed_impl', + ] + def get_feed_impl_info(feed_impl_id): + for table_name in feed_impl_table_names: + sql = "SELECT title, url FROM %s WHERE id=?" % table_name + cursor.execute(sql, (feed_impl_id,)) + results = cursor.fetchall() + if results: + return results[0] + logging.warn("Can't find up FeedImpl: %s", feed_impl_id) + return _("Unknown Title", "Unknown URL") + + def get_feed_title(user_title, base_title, search_term, feed_impl_id): + if user_title is not None: + return user_title + feed_impl_title, feed_impl_url = get_feed_impl_info(feed_impl_id) + if feed_impl_title is not None: + title = feed_impl_title + elif base_title is not None: + title = base_title + else: + title = feed_impl_url + if search_term is not None: + title = u"%s for '%s'" % (title, search_term) + return title + + cursor.execute("SELECT id, userTitle, baseTitle, searchTerm, " + "feed_impl_id " + "FROM feed") + for row in cursor.fetchall(): + feed_id, user_title, base_title, search_term, feed_impl_id = row + feed_title = get_feed_title(user_title, base_title, search_term, + feed_impl_id) + cursor.execute("UPDATE item SET parent_title=? WHERE feed_id=?", + (feed_title, feed_id)) + # set parent_title for items not in feeds + cursor.execute("UPDATE item SET parent_title=" + "(SELECT i2.title FROM item i2 WHERE item.parent_id=i2.id) " + "WHERE parent_title IS NULL and parent_id IS NOT NULL") + + # Update the item_fts table. Unfortunately, we can't alter a virtual + # table, so we basically need to re-do upgrade 187 + cursor.execute("DROP TABLE item_fts") + # for some reason we need to start a new transaction, or we get a segfault + # on Ubuntu oneiric + cursor.execute("COMMIT TRANSACTION") + cursor.execute("BEGIN TRANSACTION") + + columns = ['title', 'description', 'artist', 'album', 'genre', + 'filename', 'parent_title', ] + column_list = ', '.join(c for c in columns) + column_list_for_new = ', '.join("new.%s" % c for c in columns) + column_list_with_types = ', '.join('%s text' % c for c in columns) + cursor.execute("CREATE VIRTUAL TABLE item_fts USING fts4(%s)" % + column_list_with_types) + cursor.execute("INSERT INTO item_fts(docid, %s)" + "SELECT item.id, %s FROM item" % + (column_list, column_list)) + # The triggers are still present even though we dropped item_fts, no need + # to re-make them. + +def upgrade190(cursor): + """Fix item triggers after update189""" + # update189 incorrectly assumed that the item triggers from before were + # still correct. We need to redo a couple of them since the columns in + # item_fts changed + + columns = ['title', 'description', 'artist', 'album', 'genre', + 'filename', 'parent_title', ] + column_list = ', '.join(c for c in columns) + column_list_for_new = ', '.join("new.%s" % c for c in columns) + cursor.execute("DROP TRIGGER item_au") + cursor.execute("CREATE TRIGGER item_au " + "AFTER UPDATE ON item BEGIN " + "INSERT INTO item_fts(docid, %s) " + "VALUES(new.id, %s); " + "END;" % (column_list, column_list_for_new)) + + cursor.execute("DROP TRIGGER item_ai") + cursor.execute("CREATE TRIGGER item_ai " + "AFTER INSERT ON item BEGIN " + "INSERT INTO item_fts(docid, %s) " + "VALUES(new.id, %s); " + "END;" % (column_list, column_list_for_new)) + +@run_on_devices +def upgrade191(cursor): + cursor.execute( + 'CREATE TABLE device_item(id integer PRIMARY KEY, ' + 'title text, creation_time timestamp, watched_time timestamp, ' + 'last_watched timestamp, subtitle_encoding text, ' + 'release_date timestamp, parent_title text, feed_url text, ' + 'license text, rss_id text, entry_title text, ' + 'torrent_title text, entry_description text, permalink text, ' + 'payment_link text, comments_link text, url text, ' + 'size integer, enclosure_size integer, enclosure_type text, ' + 'enclosure_format text, filename text, resume_time integer, ' + 'play_count integer, skip_count integer, auto_sync integer, ' + 'screenshot text, duration integer, cover_art text, ' + 'description text, album text, album_artist text, ' + 'artist text, track integer, album_tracks integer, ' + 'year integer, genre text, rating integer, ' + 'file_type text, has_drm integer, show text, ' + 'episode_id text, episode_number integer, season_number integer, ' + 'kind text, net_lookup_enabled integer, metadata_title text)') + +def upgrade192(cursor): + # change URL to be NULL instead of an empty string + cursor.execute("UPDATE item SET url=NULL WHERE url=''") + cursor.execute("UPDATE item SET link=NULL WHERE link=''") + cursor.execute("UPDATE item SET payment_link=NULL WHERE payment_link=''") + cursor.execute("UPDATE item SET comments_link=NULL WHERE comments_link=''") + +@run_on_devices +def upgrade193(cursor): + """Add the item_fts table""" + + columns = ['title', 'description', 'artist', 'album', 'genre', + 'filename', 'parent_title', ] + column_list = ', '.join(c for c in columns) + column_list_for_new = ', '.join("new.%s" % c for c in columns) + column_list_with_types = ', '.join('%s text' % c for c in columns) + cursor.execute("CREATE VIRTUAL TABLE item_fts USING fts4(%s)" % + column_list_with_types) + cursor.execute("INSERT INTO item_fts(docid, %s)" + "SELECT device_item.id, %s FROM device_item" % + (column_list, column_list)) + # make triggers to keep item_fts up to date + cursor.execute("CREATE TRIGGER item_bu " + "BEFORE UPDATE ON device_item BEGIN " + "DELETE FROM item_fts WHERE docid=old.id; " + "END;") + + cursor.execute("CREATE TRIGGER item_bd " + "BEFORE DELETE ON device_item BEGIN " + "DELETE FROM item_fts WHERE docid=old.id; " + "END;") + + cursor.execute("CREATE TRIGGER item_au " + "AFTER UPDATE ON device_item BEGIN " + "INSERT INTO item_fts(docid, %s) " + "VALUES(new.id, %s); " + "END;" % (column_list, column_list_for_new)) + + cursor.execute("CREATE TRIGGER item_ai " + "AFTER INSERT ON device_item BEGIN " + "INSERT INTO item_fts(docid, %s) " + "VALUES(new.id, %s); " + "END;" % (column_list, column_list_for_new)) + +@run_on_both +def upgrade194(cursor): + """Remove the item_info_cache table.""" + cursor.execute("DROP TABLE item_info_cache") + +def upgrade195(cursor): + """Add the size column.""" + # importing from miro is bad, but there's no other way to check for the + # platform filename type. + from miro.plat.utils import PlatformFilenameType + def _unicode_to_filename(value): + # reverses filename_to_unicode(). We can't use the platform + # unicode_to_filename() because that also cleans out the filename. + # This code is not very good and should be replaces as part of #13182 + if value is not None and PlatformFilenameType != unicode: + return value.encode('utf-8') + else: + return value + + cursor.execute("ALTER TABLE item ADD size INTEGER") + cursor.execute("SELECT item.id, item.filename, item.enclosure_size, " + "rd.total_size " + "FROM item " + "LEFT JOIN remote_downloader rd " + "ON rd.id=item.downloader_id ") + for row in cursor.fetchall(): + (item_id, filename, enclosure_size, dl_total_size) = row + + if filename is not None: + try: + size = os.path.getsize(_unicode_to_filename(filename)) + except EnvironmentError: + size = None + elif dl_total_size is not None: + size = dl_total_size + elif enclosure_size is not None: + size = enclosure_size + else: + size = None + cursor.execute("UPDATE item SET size=? WHERE id=?", (size, item_id)) + +@run_on_both +def upgrade196(cursor): + """Add cover_art to the metadata table. + + cover_art will store the path to cover art that is specific to a + file, not an album. We use it when the user manually assigns cover art to + an item (#19766) + """ + cursor.execute("ALTER TABLE metadata ADD COLUMN cover_art TEXT") + +def upgrade197(cursor): + """Add the thumbnail_path to the feed table """ + cursor.execute("ALTER TABLE feed ADD COLUMN thumbnail_path TEXT") + cursor.execute("SELECT feed.id, icon_cache.filename " + "FROM feed " + "JOIN icon_cache ON icon_cache.id = feed.icon_cache_id " + "WHERE filename IS NOT NULL") + values = [] + for (feed_id, filename) in cursor.fetchall(): + if os.path.exists(filename): + values.append((filename, feed_id)) + cursor.executemany("UPDATE feed SET thumbnail_path=? WHERE id=?", values) + +@run_on_both +def upgrade198(cursor): + """Add entry_description to the item_fts table.""" + if is_device_db(cursor): + item_table = 'device_item' + else: + item_table = 'item' + # Update the item_fts table. Unfortunately, we can't alter a virtual + # table, so we basically need to re-do upgrade 187 + cursor.execute("DROP TABLE item_fts") + # for some reason we need to start a new transaction, or we get a segfault + # on Ubuntu oneiric + cursor.execute("COMMIT TRANSACTION") + cursor.execute("BEGIN TRANSACTION") + + columns = ['title', 'description', 'artist', 'album', 'genre', 'filename', + 'parent_title', 'entry_description', ] + column_list = ', '.join(c for c in columns) + column_list_for_new = ', '.join("new.%s" % c for c in columns) + column_list_with_types = ', '.join('%s text' % c for c in columns) + cursor.execute("CREATE VIRTUAL TABLE item_fts USING fts4(%s)" % + column_list_with_types) + cursor.execute("INSERT INTO item_fts(docid, %s)" + "SELECT %s.id, %s FROM %s" % + (column_list, item_table, column_list, item_table)) + # remake triggers that need it + cursor.execute("DROP TRIGGER item_au") + cursor.execute("CREATE TRIGGER item_au " + "AFTER UPDATE ON %s BEGIN " + "INSERT INTO item_fts(docid, %s) " + "VALUES(new.id, %s); " + "END;" % (item_table, column_list, column_list_for_new)) + + cursor.execute("DROP TRIGGER item_ai") + cursor.execute("CREATE TRIGGER item_ai " + "AFTER INSERT ON %s BEGIN " + "INSERT INTO item_fts(docid, %s) " + "VALUES(new.id, %s); " + "END;" % (item_table, column_list, column_list_for_new)) + +def upgrade199(cursor): + """Don't use NULL for item.deleted.""" + cursor.execute("UPDATE item SET deleted=0 WHERE deleted IS NULL") + +def upgrade200(cursor): + """Change format and name of expireTime.""" + rename_column(cursor, 'feed', 'expireTime', 'expire_timedelta', 'TEXT') + cursor.execute("SELECT id, expire_timedelta FROM feed " + "WHERE expire_timedelta IS NOT NULL") + update_values = [] + timedelta_re = re.compile(r'datetime\.timedelta\(\d+( *, *\d+){0,2}\)') + for (feed_id, expire_timedelta) in cursor.fetchall(): + # do a check that expire_timedelta is in the right format. This + # hopefully should data in the database from executing mallicous code. + if timedelta_re.match(expire_timedelta) is None: + logging.warn("upgrade200: expireTime doesn't match our RE: %s", + expire_timedelta) + continue + try: + value = eval(expire_timedelta, {'datetime': datetime}, {}) + except StandardError: + logging.warn("upgrade200: error calling eval(): %s", + expire_timedelta) + continue + new_value = ':'.join((str(value.days), str(value.seconds), + str(value.microseconds))) + update_values.append((new_value, feed_id)) + cursor.executemany("UPDATE feed SET expire_timedelta=? WHERE id=?", + update_values) + +def upgrade201(cursor): + """Set invalid expire_timedelta values to NULL.""" + + # we should have done this in the last upgrade, but since the beta was + # released with that code, let's fix the problem with a new upgrade + # function. + cursor.execute("SELECT id, expire_timedelta FROM feed " + "WHERE expire_timedelta IS NOT NULL") + where_values = [] + valid_timedelta_re = re.compile(r'\d+:\d+:\d+') + for (feed_id, expire_timedelta) in cursor.fetchall(): + # do a check that expire_timedelta is in the right format. This + # hopefully should data in the database from executing mallicous code. + if valid_timedelta_re.match(expire_timedelta) is None: + where_values.append((feed_id,)) + cursor.executemany("UPDATE feed SET expire_timedelta=NULL " + "WHERE id=?", where_values) diff -Nru miro-4.0.4/lib/datastructures.py miro-6.0/lib/datastructures.py --- miro-4.0.4/lib/datastructures.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/datastructures.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,66 +0,0 @@ -# Miro - an RSS based video player application -# Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011 -# Participatory Culture Foundation -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA -# -# In addition, as a special exception, the copyright holders give -# permission to link the code of portions of this program with the OpenSSL -# library. -# -# You must obey the GNU General Public License in all respects for all of -# the code used other than OpenSSL. If you modify file(s) with this -# exception, you may extend this exception to your version of the file(s), -# but you are not obligated to do so. If you do not wish to do so, delete -# this exception statement from your version. If you delete this exception -# statement from all source files in the program, then also delete it here. - -"""datastructures.py -- Datastructures used by Miro. -""" - -class Fifo(object): - """FIFO queue. - - Fast implentation of a first-in-first-out queue. - - Based off the code from Jeremy Fincher - (http://code.activestate.com/recipes/68436/) - """ - def __init__(self): - self.back = [] - self.front = [] - self.frontpos = 0 - - def enqueue(self, item): - self.back.append(item) - - def dequeue(self): - try: - rv = self.front[self.frontpos] - except IndexError: - pass - else: - self.frontpos += 1 - return rv - if self.back: - self.front = self.back - self.back = [] - self.frontpos = 1 - return self.front[0] - else: - raise ValueError("Queue Empty") - - def __len__(self): - return len(self.front) - self.frontpos + len(self.back) diff -Nru miro-4.0.4/lib/devicedatabaseupgrade.py miro-6.0/lib/devicedatabaseupgrade.py --- miro-4.0.4/lib/devicedatabaseupgrade.py 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/devicedatabaseupgrade.py 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,339 @@ +# Miro - an RSS based video player application +# Copyright (C) 2012 +# Participatory Culture Foundation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +# +# In addition, as a special exception, the copyright holders give +# permission to link the code of portions of this program with the OpenSSL +# library. +# +# You must obey the GNU General Public License in all respects for all of +# the code used other than OpenSSL. If you modify file(s) with this +# exception, you may extend this exception to your version of the file(s), +# but you are not obligated to do so. If you do not wish to do so, delete +# this exception statement from your version. If you delete this exception +# statement from all source files in the program, then also delete it here. + +"""Upgrade old device databases """ + +import datetime +import itertools +import logging +import os.path +import shutil +import urllib + +from miro import app +from miro import databaseupgrade +from miro import item +from miro import metadata +from miro import prefs +from miro import schema +from miro import storedatabase +from miro.plat.utils import filename_to_unicode + +class OldItemImporter(object): + """Import items from the database of old versions. + + This class is made to import items from previous versions of Miro. There + are basically 2 cases: + + Upgrading from Miro 4.x -- In this case we get all data from the JSON db. + + Upgrading from Miro 5.x -- In this case we get the data from both the JSON + db and also the metadata tables in the sqlite db. + + Importing data requires some careful ordering of how we do things. The + steps are: + - import_metadata() imports data into the metadata tables + - the calling code should then create a MetadataManager for the device + - the MetadataManager is then passed to import_device_items() + """ + + # FIXME: this code is tied to the 5.0 release and may not work for future + # versions + + # map old MDP states to their new values + mdp_state_map = { + None : 'N', + 0 : 'S', + 1 : 'C', + 2 : 'F', + } + + # list that contains tuples in the form of + # (metadata_column_name, device_item_key + column_map = [ + ('duration', 'duration'), + ('album', 'album'), + ('album_artist', 'album_artist'), + ('album_tracks', 'album_tracks'), + ('artist', 'artist'), + ('screenshot', 'screenshot'), + ('drm', 'has_drm'), + ('genre', 'genre'), + ('title ', 'title'), + ('track', 'track'), + ('year', 'year'), + ('description', 'description'), + ('rating', 'rating'), + ('show', 'show'), + ('episode_id', 'episode_id'), + ('episode_number', 'episode_number'), + ('season_number', 'season_number'), + ('kind', 'kind'), + ] + + insert_columns = ['id', 'status_id', 'file_type', 'source', 'priority', + 'disabled'] + for new_name, old_name in column_map: + insert_columns.append(new_name) + + # SQL to insert a row into the metadata table + metadata_insert_sql = ( + "INSERT INTO metadata (%s) VALUES (%s)" % + (', '.join(insert_columns), + ', '.join('?' for i in xrange(len(insert_columns))))) + + # SQL to insert a row into the device_item table + device_item_insert_sql = ( + "INSERT INTO device_item (%s) VALUES (%s)" % + (', '.join(name for name, field in schema.DeviceItemSchema.fields), + ', '.join('?' for i in xrange(len(schema.DeviceItemSchema.fields))))) + + def __init__(self, live_storage, mount, json_db): + self.cover_art_dir = os.path.join(mount, '.miro', 'cover-art') + self.net_lookup_enabled = app.config.get(prefs.NET_LOOKUP_BY_DEFAULT) + self.mount = mount + self.cursor = live_storage.cursor + # track the next id that we should create in the database + next_id = databaseupgrade.get_next_id(self.cursor) + self.id_counter = itertools.count(next_id) + + self.select_paths_from_db() + self.init_device_items(json_db) + + def import_metadata(self): + """Import data into the metadata tables.""" + if not self.old_device_items: + # nothing new to import + return + + logging.info("Importing metadata for %d old device items", + len(self.old_device_items)) + + self.cursor.execute("BEGIN TRANSACTION") + try: + for file_type, path, old_item in self.old_device_items: + try: + if path not in self.paths_in_metadata_table: + self.add_metadata_to_db(file_type, path, old_item) + self.fix_json_data(old_item) + except StandardError, e: + logging.warn("error converting device item for %r ", path, + exc_info=True) + finally: + self.cursor.execute("COMMIT TRANSACTION") + + def import_device_items(self, metadata_manager): + if not self.old_device_items: + # nothing new to import + return + + logging.info("Importing device items for %d old device items", + len(self.old_device_items)) + + self.cursor.execute("BEGIN TRANSACTION") + try: + for file_type, path, old_item in self.old_device_items: + if isinstance(path, unicode): + # when we use the json module instead of the simplejson + # module, paths are unicode rather than bytestrings. + # There's no great solution here, but the best practice is + # probably to force them to be utf-8. (see #19507) + path = path.encode('utf-8') + try: + self.add_device_item(file_type, path, old_item, + metadata_manager) + except StandardError, e: + logging.warn("error converting device item for %r ", path, + exc_info=True) + finally: + self.cursor.execute("COMMIT TRANSACTION") + + def select_paths_from_db(self): + """Setup paths_in_metadata_table and paths_in_device_items_table + + These variables track which paths are in our sqlite database for the + metadata_status and device_item tables. + """ + self.cursor.execute("SELECT path FROM metadata_status") + self.paths_in_metadata_table = set(row[0] for row in self.cursor) + + self.cursor.execute("SELECT filename FROM device_item") + self.paths_in_device_items_table = set(row[0] for row in self.cursor) + + def init_device_items(self, json_db): + """Initialize device_items + + device_items stores a (file_type, path, item_data) tuple for each + device item that we should convert + """ + # get info about each item on the device + paths_added = set() + self.old_device_items = [] + for file_type in (u'audio', u'video', u'other'): + if file_type not in json_db: + continue + for path, data in json_db[file_type].iteritems(): + if path in paths_added: + # duplicate filename, just skip this data + continue + paths_added.add(path) + if (path not in self.paths_in_metadata_table or + path not in self.paths_in_device_items_table): + self.old_device_items.append((file_type, path, data)) + + def add_metadata_to_db(self, file_type, path, old_item): + """Add rows to the metadata and metadata_status tables for old items. + """ + + # title and title_tag were pretty confusing before 5.0. We would set + # title_tag based on the ID3 tags, and if that didn't work, then set + # title based on the filename. get_title() would try the title + # attribute first, then fallback to title_tag. After 5.0, we just + # only use title. + # + # This code should make it so that titles work correctly for upgraded + # items, both in 5.0 and also if you go back to a pre-5.0 versions. + if 'title_tag' in old_item: + if not old_item['title']: + old_item['title'] = old_item['title_tag'] + old_item['title_tag'] = None + + has_drm = old_item.get('has_drm') # other doesn't have DRM + OLD_ITEM_PRIORITY = 10 + # Make an entry in the metadata_status table. We're not sure if + # mutagen completed successfully or not, so we just call its status + # SKIPPED. moviedata_status is based on the old mdp_state column + moviedata_status = self.mdp_state_map[old_item.get('mdp_state')] + finished_status = bool(moviedata_status != + metadata.MetadataStatus.STATUS_NOT_RUN) + + if self.net_lookup_enabled: + echonest_status = metadata.MetadataStatus.STATUS_NOT_RUN + else: + echonest_status = metadata.MetadataStatus.STATUS_SKIP + status_id = self.insert_into_metadata_status( + path, file_type, finished_status, 'S', moviedata_status, + echonest_status, has_drm, OLD_ITEM_PRIORITY) + # Make an entry in the metadata table with the metadata that was + # stored. We don't know if it came from mutagen, movie data, torrent + # data or wherever, so we use "old-item" as the source and give it a + # low priority. + values = [self.id_counter.next(), status_id, file_type, 'old-item', + 10, False] + for new_name, old_name in self.column_map: + value = old_item.get(old_name) + if value == '': + value = None + values.append(value) + + self.cursor.execute(self.metadata_insert_sql, values) + + def insert_into_metadata_status(self, path, file_type, finished_status, + mutagen_status, moviedata_status, + echonest_status, has_drm, + max_entry_priority): + status_id = self.id_counter.next() + sql = ("INSERT INTO metadata_status " + "(id, path, file_type, finished_status, mutagen_status, " + "moviedata_status, echonest_status, net_lookup_enabled, " + "mutagen_thinks_drm, max_entry_priority) " + "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)") + + self.cursor.execute(sql, (status_id, path, file_type, finished_status, + mutagen_status, moviedata_status, + echonest_status, self.net_lookup_enabled, + has_drm, max_entry_priority)) + return status_id + + def add_device_item(self, file_type, path, old_item, metadata_manager): + """Insert a device_item row for an old item.""" + values = [] + # copy data from old_item so that we don't modify it + old_data = dict(old_item) + if path in self.paths_in_metadata_table: + # This item comes from a 5.x database, so there's data in the + # metadata tables for it. + metadata_dict = metadata_manager.get_metadata(path) + for key, value in metadata_dict.items(): + old_data[key] = value + for name, field in schema.DeviceItemSchema.fields: + # get value from the old item + if name == 'id': + value = self.id_counter.next() + elif name == 'filename': + value = filename_to_unicode(path) + elif name == 'file_type': + value = file_type + elif name == 'net_lookup_enabled': + value = False + else: + value = old_data.get(name) + # convert value + if value is not None: + if isinstance(field, schema.SchemaDateTime): + value = datetime.datetime.fromtimestamp(value) + values.append(value) + self.cursor.execute(self.device_item_insert_sql, values) + + def fix_json_data(self, old_item): + """Update the data in the JSON db after upgrading an old item.""" + if 'cover_art' in old_item: + self.upgrade_cover_art(old_item) + # Use the RAN state for all old items. This will prevent old miro + # versions from running the movie data program on them. This seems + # the safest option and old versions should still pick up new metadata + # when newer versions run MDP. + old_item['mdp_state'] = item.MDP_STATE_RAN + + def upgrade_cover_art(self, device_item): + """Drop the cover_art field and move cover art to a filename based on + the album + """ + + if 'album' not in device_item or 'cover_art' not in device_item: + return + cover_art = device_item.pop('cover_art') + device_item['cover_art'] = None # default in case the upgrade fails. + # quote the filename using the same logic as + # filetags.calc_cover_art_filename() + dest_filename = urllib.quote(device_item['album'].encode('utf-8'), + safe=' ,.') + dest_path = os.path.join(self.cover_art_dir, dest_filename) + if not os.path.exists(dest_path): + if not os.path.exists(cover_art): + logging.warn("upgrade_cover_art: Error moving cover art, " + "source path doesn't exist: %s", cover_art) + return + try: + shutil.move(cover_art, dest_path) + except StandardError: + logging.warn("upgrade_cover_art: Error moving %s -> %s", + cover_art, dest_path) + return + device_item['cover_art'] = dest_path diff -Nru miro-4.0.4/lib/devices.py miro-6.0/lib/devices.py --- miro-4.0.4/lib/devices.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/devices.py 2013-04-05 16:02:42.000000000 +0000 @@ -27,11 +27,7 @@ # this exception statement from your version. If you delete this exception # statement from all source files in the program, then also delete it here. -from datetime import datetime from glob import glob -from fnmatch import fnmatch -import os, os.path -import shutil import fnmatch try: import simplejson as json @@ -39,28 +35,52 @@ import json import codecs import logging +import os, os.path import re import time +import bisect +import tempfile +try: + from collections import Counter +except ImportError: + from collections import defaultdict + class Counter(defaultdict): + def __init__(self, *args, **kwargs): + super(Counter, self).__init__(int, *args, **kwargs) from miro import app -from miro.database import confirm_db_thread +from miro import database +from miro import devicedatabaseupgrade from miro import eventloop +from miro import item +from miro import itemsource +from miro import feed from miro import fileutil from miro import filetypes -from miro import filetags +from miro import metadata from miro import prefs +from miro import playlist from miro.gtcache import gettext as _ from miro import messages +from miro import schema from miro import signals +from miro import storedatabase +from miro import threadcheck from miro import conversions -from miro import moviedata -from miro import metadata -from miro.util import returns_filename, check_u +from miro.util import check_u + +from miro.download_utils import next_free_filename from miro.plat import resources from miro.plat.utils import (filename_to_unicode, unicode_to_filename, utf8_to_filename) + +# how much slower converting a file is, compared to copying +CONVERSION_SCALE = 500 +# schema version for device databases +DB_VERSION = 198 + def unicode_to_path(path): """ Convert a Unicode string into a file path. We don't do any of the string @@ -110,6 +130,8 @@ other = set(other) if self.frozenset & other: return True + if not self.regex: + return False return any(v for v in other if self.regex.match(v)) class BaseDeviceInfo(object): @@ -179,8 +201,8 @@ return "" % ( getattr(self, "name", None), getattr(self, "device_name", None), - getattr(self, "vendor_id", 0), - getattr(self, "product_id", 0)) + getattr(self, "vendor_id", None) or 0, + getattr(self, "product_id", None) or 0) class MultipleDeviceInfo(BaseDeviceInfo): """ @@ -227,10 +249,12 @@ 'generic': True, 'mount_instructions': _("Your drive must be mounted."), 'audio_conversion': 'copy', - 'audio_types': '', + 'audio_types': frozenset(), 'audio_path': u'Miro', + 'video_types': frozenset(), 'video_conversion': 'copy', 'video_path': u'Miro', + 'container_types': frozenset(), }) class DeviceManager(object): @@ -298,6 +322,13 @@ # FIXME - need this pass + def force_db_save_error(self, device_info): + if device_info.db_info is None: + logging.warn("force_db_save_error: db_info is None " + "is the device connected?") + return + device_info.db_info.db.simulate_db_save_error() + def startup(self): # load devices self.load_devices(resources.path('devices/*.py')) @@ -306,13 +337,17 @@ def shutdown(self): self.running = False for device in self.connected.values(): - if device.mount and not self._is_hidden(device): - write_database(device.database, device.mount) + if (device.mount and not device.read_only): + device.metadata_manager.run_updates() + device.database.shutdown() def load_devices(self, path): devices = glob(path) for device_desc in devices: global_dict = {} + # XXX bz:17989 execfile() can't handle unicode paths! + if isinstance(device_desc, unicode): + device_desc = device_desc.encode('utf-8') execfile(device_desc, global_dict) if 'devices' in global_dict: for info in global_dict['devices']: @@ -365,9 +400,15 @@ if self.show_unknown == show: return # no change unknown_devices = [info for info in self.connected.values() - if self._is_unknown(info)] + if self._is_unknown(info) and not info.read_only] if show: # now we're showing them for info in unknown_devices: + if (info.db_info is not None and + info.db_info.db.temp_mode): + info.db_info.db.force_directory_creation = True + eventloop.add_idle( + info.db_info.db._try_save_temp_to_disk, + 'writing device SQLite DB on show: %r' % info.mount) self._send_connect(info) else: # now we're hiding them for info in unknown_devices: @@ -375,17 +416,70 @@ self.show_unknown = show app.config.set(prefs.SHOW_UNKNOWN_DEVICES, show) + def change_setting(self, device, setting, value): + """Change the value + """ + device.database.setdefault(u'settings', {}) + device.database[u'settings'][setting] = value + if setting == 'name': + device.name = value + # need to send a changed message + message = messages.TabsChanged('connect', [], [device], []) + message.send_to_frontend() + message = messages.DeviceChanged(device) + message.send_to_frontend() + elif setting == 'always_show' and not self.show_unknown: + if value: + self._send_connect(device) + else: + self._send_disconnect(device) + + def eject_device(self, device): + worker_task_count = device.metadata_manager.worker_task_count() + device.metadata_manager.close() + if worker_task_count > 0: + self._call_eject_later(device, 5) + return + + write_database(device.database, device.mount) + if device.db_info is not None: + device.db_info.db.close() + app.device_tracker.eject(device) + + def _call_eject_later(self, device, timeout): + """Call eject_device after a short delay. """ + eventloop.add_timeout(timeout, self.eject_device, + 'ejecting device', + args=(device,)) + @staticmethod - def _is_unknown(info): - if not getattr(info.info, 'generic', False): + def _is_unknown(info_or_tuple): + if isinstance(info_or_tuple, messages.DeviceInfo): + mount, info, db = (info_or_tuple.mount, info_or_tuple.info, + info_or_tuple.database) + else: + mount, info, db = info_or_tuple + if not getattr(info, 'generic', False): # not a generic device return False - if info.mount and info.database.get('settings', {}).get( + if mount and db.get(u'settings', {}).get( 'always_show', False): # we want to show this device all the time return False return True + @staticmethod + def _is_read_only(mount): + if not mount: + return True + try: + f = tempfile.TemporaryFile(dir=mount) + except EnvironmentError: + return True + else: + f.close() + return False + def _is_hidden(self, info): # like _is_unknown(), but takes the self.show_unknown flag into account if self.show_unknown: @@ -394,13 +488,13 @@ return self._is_unknown(info) def _set_connected(self, id_, kwargs): - if kwargs.get('mount'): - database = load_database(kwargs['mount']) - device_name = database.get(u'device_name', - kwargs.get('device_name')) + mount = kwargs.get('mount') + if mount: + db = load_database(mount) + device_name = db.get(u'device_name', kwargs.get('device_name')) else: device_name = None - database = DeviceDatabase() + db = DeviceDatabase() if 'name' in kwargs: try: info = self.get_device(kwargs['name'], @@ -420,16 +514,52 @@ 'name or vendor/product IDs') kwargs.update({ - 'database': database, + 'database': db, 'device_name': device_name, 'info': info}) + if mount: + is_hidden = self._is_hidden((mount, info, db)) + read_only = self._is_read_only(mount) + if (id_ in self.connected and + self.connected[id_].db_info is not None): + # reuse old database + db_info = self.connected[id_].db_info + metadata_manager = self.connected[id_].metadata_manager + if is_hidden: + # device became hidden, close the existing objects + db_info.db.close() + metadata_manager.close() + db_info = metadata_manager = None + elif not read_only: + sqlite_db = load_sqlite_database(mount, kwargs.get('size'), + is_hidden=is_hidden) + db_info = database.DeviceDBInfo(sqlite_db, id_) + importer = devicedatabaseupgrade.OldItemImporter(sqlite_db, + mount, + db) + importer.import_metadata() + metadata_manager = make_metadata_manager(mount, db_info, id_) + importer.import_device_items(metadata_manager) + db.check_old_key_usage = True + else: + db_info = metadata_manager = None + else: + db_info = None + metadata_manager = None + read_only = False + if db_info is not None: + sqlite_path = sqlite_database_path(mount) + else: + sqlite_path = None info = self.connected[id_] = messages.DeviceInfo( - id_, info, kwargs.get('mount'), database, - kwargs.get('size'), kwargs.get('remaining')) + id_, info, mount, sqlite_path, db, db_info, + metadata_manager, kwargs.get('size'), kwargs.get('remaining'), + read_only) return info + @eventloop.as_idle def device_connected(self, id_, **kwargs): if id_ in self.connected: # backend got confused @@ -438,7 +568,7 @@ info = self._set_connected(id_, kwargs) - if not self._is_hidden(info): + if not self._is_hidden(info) and not info.read_only: self._send_connect(info) else: logging.debug('ignoring %r', info) @@ -446,9 +576,10 @@ def _send_connect(self, info): if info.mount: self.info_cache.setdefault(info.mount, {}) - scan_device_for_files(info) + on_mount(info) messages.TabsChanged('connect', [info], [], []).send_to_frontend() + @eventloop.as_idle def device_changed(self, id_, **kwargs): if id_ not in self.connected: # backend didn't send a connected message @@ -465,13 +596,13 @@ info = self._set_connected(id_, kwargs) - if self._is_hidden(info): + if self._is_hidden(info) or info.read_only: # don't bother with change message on devices we're not showing return if info.mount: self.info_cache.setdefault(info.mount, {}) - scan_device_for_files(info) + on_mount(info) else: sync_manager = app.device_manager.get_sync_for_device(info, create=False) @@ -480,18 +611,24 @@ messages.TabsChanged('connect', [], [info], []).send_to_frontend() messages.DeviceChanged(info).send_to_frontend() + @eventloop.as_idle def device_disconnected(self, id_): if id_ not in self.connected: return # don't bother with sending messages info = self.connected.pop(id_) - if not self._is_hidden(info): + if not self._is_hidden(info) and not info.read_only: self._send_disconnect(info) def _send_disconnect(self, info): sync_manager = app.device_manager.get_sync_for_device(info, create=False) - if sync_manager: + if sync_manager and not sync_manager.is_finished(): + messages.ShowWarning( + _('Device removed during sync'), + _('%(name)s was removed while a sync was in progress. ' + 'Not all items may have been copied.', + {'name': info.name})).send_to_frontend() sync_manager.cancel() if info.mount: @@ -518,27 +655,245 @@ class DeviceSyncManager(object): """ - Represents a sync in progress to a given device. + Represents a sync to a given device. """ def __init__(self, device): self.device = device + self.device_info = self.device.info + self.device_settings = self.device.database.setdefault(u'settings', + {}) self.start_time = time.time() - self.etas = {} self.signal_handles = [] self.finished = 0 self.total = 0 - self.copying = set() + self.progress_size = Counter() + self.total_size = Counter() + self.copying = {} self.waiting = set() + self.auto_syncs = set() self.stopping = False + self._change_timeout = None + self._copy_iter_running = False + self._info_to_conversion = {} + self.started = False + + def get_sync_items(self, max_size=None): + """Calculate information for syncing + + :returns: (sync_info, expired_items) where sync_info is ItemInfos that + we need to sync and expired_items is DeviceItems for expired items. + """ + # sync settings for the database + sync = self.device.database.get(u'sync', {}) + # list of views with items to sync + views = [] + # maps feed_urls -> set of URLs for items in that feed + item_urls = {} + # ItemInfos that we can sync + infos = set() + # DeviceItems whose original item is expired + expired = set() + + # Iterate through synced podcasts + if sync.setdefault(u'podcasts', {}).get(u'enabled', False): + for url in sync[u'podcasts'].setdefault(u'items', []): + feed_ = feed.lookup_feed(url) + if feed_ is not None: + if sync[u'podcasts'].get(u'all', True): + view = feed_.downloaded_items + else: + view = feed_.unwatched_items + views.append(view) + item_urls[url] = set(i.url for i in view) + + # Iterate through synced playlist + if sync.setdefault(u'playlists', {}).get(u'enabled', False): + for name in sync[u'playlists'].setdefault(u'items', []): + try: + playlist_ = playlist.SavedPlaylist.get_by_title(name) + except database.ObjectNotFoundError: + continue + views.append(item.Item.playlist_view(playlist_.id)) + + + # For each podcast/playlist view, check if there are new items. + for view in views: + item_infos = app.db.fetch_item_infos(view.id_list()) + infos.update(info for info in item_infos + if not self._item_exists(info)) + + # check for expired items + if sync[u'podcasts'].get(u'expire', True): + for device_item in item.DeviceItem.make_view( + db_info=self.device.db_info): + if (device_item.feed_url in item_urls and + device_item.url not in item_urls[device_item.feed_url]): + expired.add(device_item) + + # check if our size will overflow max_size. If so, remove items from + # infos until they will fit + if max_size is not None and infos: + for info in expired: + max_size += sum(i.size for i in expired) + sync_size = self.get_sync_size(infos)[1] + if sync_size > max_size: + sizes_and_items = [ + (self.get_sync_size([i])[1], i) for i in infos] + for i in self.yield_items_to_get_to(sync_size - max_size, + sizes_and_items): + sync_size -= i.size + infos.remove(i) + return infos, expired + + def get_auto_items(self, size): + """ + Returns a list of ItemInfos to be automatically synced to the device. + The items should be roughly 'size' bytes. + """ + sync = self.device.database.get(u'sync', {}) + if not sync.get(u'auto_fill', False): + return set() + + name_to_view = { + u'recent_music': item.Item.watchable_audio_view(), + u'random_music': item.Item.watchable_audio_view(), + u'most_played_music': item.Item.watchable_audio_view(), + u'new_playlists': playlist.SavedPlaylist.make_view(), + u'recent_podcasts': item.Item.toplevel_view() + } + + name_to_view[u'recent_music'].order_by = 'item.id DESC' + name_to_view[u'random_music'].order_by = 'RANDOM()' + name_to_view[u'most_played_music'].order_by = 'item.play_count DESC' + name_to_view[u'new_playlists'].order_by = 'playlist.id DESC' + name_to_view[u'recent_podcasts'].where = ( + '%s AND item.filename IS NOT NULL' % ( + name_to_view[u'recent_podcasts'].where,)) + name_to_view[u'recent_podcasts'].order_by = 'item.id DESC' + + scores = sync.get(u'auto_fill_settings', {}) + total = float(sum(scores.setdefault(name, 0.5) + for name in name_to_view)) + + sizes = dict((name, int(size * scores[name] / total)) + for name in name_to_view) + + auto_syncs = {} + for name, view in name_to_view.items(): + auto_syncs[name] = syncs = set() + remaining = sizes[name] + if name == u'new_playlists': + for playlist_ in view: + # FIXME: need to make sure this works now that ItemInfo + # has changed a bit + playlist_view = item.Item.playlist_view(playlist_.id) + playlist_ids = [i.id for i in playlist_view] + infos = app.db.fetch_item_infos(playlist_ids) + size = self.get_sync_size(infos)[1] + if size and size <= remaining: + syncs.update(infos) + remaining -= size + else: + # FIXME: need to make sure this works now that ItemInfo + # has changed a bit + for info in app.db.fetch_item_infos(i.id for i in view): + size = self.get_sync_size([info])[1] + if size and size <= remaining: + syncs.add(info) + remaining -= size + + return set().union(*auto_syncs.values()) + + def get_sync_size(self, items, expired=None): + """ + Returns the number of items that will be synced, and the size that sync + will take. + The count includes items that will be removed, but their size counts + against the total sync size. + """ + if not items and not expired: + return 0, 0 + count = size = 0 + items_for_converter = {} + for info in items: + converter = self.conversion_for_info(info) + if converter is not None: + items_for_converter.setdefault(converter, set()).add(info) + if 'copy' in items_for_converter: + items = items_for_converter.pop('copy') + count += len(items) + size += sum(info.size for info in items) + for converter, items in items_for_converter.items(): + for info in items: + task = conversions.conversion_manager._make_conversion_task( + converter, info, + target_folder=None, + create_item=False) + if task: + count += 1 + size += task.get_output_size_guess() + if expired: + count += len(expired) + size -= sum(i.size for i in expired) + return count, size + + def max_sync_size(self, include_auto=True): + """ + Returns the largest sync (in bytes) that we can perform on this device. + """ + if not self.device.mount: + return 0 + sync = self.device.database.get(u'sync', {}) + if include_auto: + auto_fill_size = self._calc_auto_fill_size() + else: + auto_fill_size = 0 + if not sync.get(u'max_fill', False): + return self.device.remaining + auto_fill_size + else: + try: + percent = int(sync.get(u'max_fill_percent', 90)) * 0.01 + except ValueError: + return self.device.remaining + auto_fill_size + else: + min_remaining = self.device.size * (1 - percent) + return self.device.remaining - min_remaining + auto_fill_size + + def _calc_auto_fill_size(self): + """ + Returns the total size of auto-filled files. + """ + sync = self.device.database.get(u'sync') + if not sync: + return 0 + if not sync.get(u'auto_fill', False): + return 0 + return sum(i.size for i in + item.DeviceItem.auto_sync_view(self.device.db_info)) + + def query_sync_information(self): + if self.device.db_info.db.is_closed(): + logging.warn("query_sync_information: device closed") + return + infos, expired = self.get_sync_items(self.max_sync_size()) + count, size = self.get_sync_size(infos, expired) + self.last_sync_info = (infos, expired, count, size) + message = messages.CurrentSyncInformation(self.device, count, size) + message.send_to_frontend() + + def start(self): + if self.started: + return + self.started = True self.audio_target_folder = os.path.join( - device.mount, + self.device.mount, self._get_path_from_setting('audio_path')) if not os.path.exists(self.audio_target_folder): os.makedirs(self.audio_target_folder) self.video_target_folder = os.path.join( - device.mount, + self.device.mount, self._get_path_from_setting('video_path')) if not os.path.exists(self.video_target_folder): os.makedirs(self.video_target_folder) @@ -558,52 +913,196 @@ def set_device(self, device): self.device = device - def add_items(self, item_infos): - device_settings = self.device.database[u'settings'] - device_info = self.device.info - audio_conversion = (device_settings.get(u'audio_conversion') or - device_info.audio_conversion) - video_conversion = (device_settings.get(u'video_conversion') or - device_info.video_conversion) - self.total += len(item_infos) - self._send_sync_changed() + def expire_items(self, item_infos): + for info in item_infos: + try: + device_item = item.DeviceItem.get_by_id( + info.id, db_info=self.device.db_info) + except database.ObjectNotFoundError: + logging.warn("expire_items: Got ObjectNotFoundError for %s", + info.id) + else: + self._expire_item(device_item) + self._check_finished() + + def _item_exists(self, item_info): + return item.DeviceItem.item_exists(item_info, + db_info=self.device.db_info) + + @staticmethod + def yield_items_to_get_to(size, sizes_and_items): + """ + This algorithm lets us filter a set of items to get to a given size. + ``size`` is the total size we're trying to get below. + ``sizes_and_items`` is a list of (size, item) tuples. This function + yields the items that need to be removed. + + The algorithm we use is: + * Sort all the auto items by their size + * While we need more space: + * bisect the sorted sizes with the size we've got left + * If there's an exact match, remove it and we're done + * Otherwise, remove the item around the insertion point which is + closest and try again with the new remaining size + """ + sizes_and_items.sort() + keys = [i[0] for i in sizes_and_items] + def remove_(index): + keys.pop(index) + return sizes_and_items.pop(index) + + while size >= 0 and keys: + left = bisect.bisect_left(keys, size) + if left == size: # perfect fit! + s, i = remove_(left) + yield i + break + right = bisect.bisect_right(keys, size) + if left == right == len(keys): + s, i = remove_(len(keys) - 1) + size -= s + yield i + continue + if (abs(left - size) < abs(right - size)): # left is closer + s, i = remove_(left) + size -= s + yield i + else: + s, i = remove_(right) + size -= s + yield i + + def expire_auto_items(self, size): + """ + Expires automatically synced items. + """ + sizes_and_items = [(i.size, i) for i in + item.DeviceItem.auto_sync_view(self.device.db_info)] + for size, device_item in self.yield_items_to_get_to(size, + sizes_and_items): + self._expire_item(device_item) + + def _expire_item(self, device_item): + device_item.delete_and_remove(self.device) + self.device.remaining += device_item.size + + def add_items(self, item_infos, auto_sync=False): for info in item_infos: if self.stopping: self._check_finished() return - if self.device.database.item_exists(info): + if self._item_exists(info): continue # don't recopy stuff + conversion = self.conversion_for_info(info) + if not conversion: + continue if info.file_type == 'audio': - if (audio_conversion == 'copy' or (info.file_format and - info.file_format.split()[0] in device_info.audio_types)): - final_path = os.path.join(self.audio_target_folder, - os.path.basename( - info.video_path)) - self.copy_file(info, final_path) - else: - logging.debug('unable to detect format of %r: %s', - info.video_path, info.file_format) - self.start_conversion(audio_conversion, - info, - self.audio_target_folder) + target_folder = self.audio_target_folder elif info.file_type == 'video': - if video_conversion == 'copy': - final_path = os.path.join(self.video_target_folder, - os.path.basename( - info.video_path)) - self.copy_file(info, final_path) - else: - self.start_conversion(video_conversion, - info, - self.video_target_folder) + target_folder = self.video_target_folder + else: + continue + if auto_sync: + self.auto_syncs.add(info.id) + self.total += 1 + if conversion == 'copy': + final_path = os.path.join(target_folder, + os.path.basename( + info.filename)) + if os.path.exists(final_path): + logging.debug('%r exists, getting a new one precopy', + final_path) + try: + final_path, fp = next_free_filename(final_path) + # XXX we should be passing in the file handle not + # path. + fp.close() + except ValueError: + logging.warn('add_items: next_free_filename failed. ' + 'candidate = %r', final_path) + continue + self.copy_file(info, final_path) + else: + self.start_conversion(conversion, + info, + target_folder) self._check_finished() + + def cache_conversion(meth): + def wrapper(self, info): + if info not in self._info_to_conversion: + self._info_to_conversion[info] = meth(self, info) + return self._info_to_conversion[info] + return wrapper + + @cache_conversion + def conversion_for_info(self, info): + if not info.filename: + app.controller.failed_soft("device conversion", + "got video %r without filename" % ( + info.title,)) + return None + + if info.file_type not in ('audio', 'video'): + logging.debug("got item %r that's not audio or video", info.title) + return None + + # shortcut, if we're just going to copy the file + if self.device_settings.get( + u'%s_conversion' % info.file_type, + getattr(self.device_info, + '%s_conversion' % info.file_type)) == u'copy': + return 'copy' + + try: + media_info = conversions.get_media_info(info.filename) + except ValueError: + logging.exception('error getting media info for %r', + info.filename) + return 'copy' + + requires_conversion = False + def ensure_set(v): + if isinstance(v, basestring): + return set([v]) + else: + return set(v) + if 'container' in media_info: + info_containers = ensure_set(media_info['container']) + if not (self.device_info.container_types & info_containers): + requires_conversion = True # container doesn't match + else: + requires_conversion = True + if 'audio_codec' in media_info: + info_audio_codecs = ensure_set(media_info['audio_codec']) + if not (self.device_info.audio_types & info_audio_codecs): + requires_conversion = True # audio codec doesn't match + else: + requires_conversion = True + if info.file_type == 'video': + if (self.device_settings.get(u'always_sync_videos') or + 'video_codec' not in media_info): + requires_conversion = True + else: + info_video_codecs = ensure_set(media_info['video_codec']) + if not (self.device_info.video_types & info_video_codecs): + requires_conversion = True # video codec doesn't match + if not requires_conversion: + return 'copy' # so easy! + elif info.file_type == 'audio': + return (self.device_settings.get(u'audio_conversion') or + self.device_info.audio_conversion) + elif info.file_type == 'video': + return (self.device_settings.get(u'video_conversion') or + self.device_info.video_conversion) + def start_conversion(self, conversion, info, target): conversion_manager = conversions.conversion_manager start_conversion = conversion_manager.start_conversion - if not self.waiting: + if not self.signal_handles: for signal, callback in ( ('task-changed', self._conversion_changed_callback), ('task-staged', self._conversion_staged_callback), @@ -615,58 +1114,79 @@ task = start_conversion(conversion, info, target, create_item=False) + self.total_size[task.key] = (task.get_output_size_guess() * + CONVERSION_SCALE) self.waiting.add(task.key) def copy_file(self, info, final_path): - if info.id in self.copying: + if (info, final_path) in self.copying: logging.warn('tried to copy %r twice', info) return - self.copying.add(info.id) - eventloop.call_in_thread(lambda x: self._copy_file_callback(x), - lambda x: None, - self._copy_in_thread, - self, info, final_path) - - def _copy_in_thread(self, info, final_path): - if self.stopping: - return None, info - try: - shutil.copy(info.video_path, final_path) - except IOError: - return None, info - else: - return final_path, info - - def _copy_file_callback(self, (final_path, info)): - if info.id not in self.copying: - app.controller.failed_soft( - '_copy_file_callback', - '%r not in self.copying (final path: %r)' % ( - info, final_path)) + file(final_path, 'w').close() # create the file so that future tries + # will see it + self.copying[final_path] = info + self.total_size[info.id] = info.size + if not self._copy_iter_running: + self._copy_iter_running = True + eventloop.idle_iterate(self._copy_as_iter, + 'copying files to device') + + def _copy_as_iter(self): + while self.copying: + final_path, info = self.copying.popitem() + iterable = fileutil.copy_with_progress(info.filename, final_path, + block_size=128 * 1024) + try: + for count in iterable: + self.progress_size[info.id] += count + if self.stopping: + iterable.close() + eventloop.add_idle(fileutil.delete, + "deleting canceled sync", + args=(final_path,)) + final_path = None + break + # let other stuff run + self._schedule_sync_changed() + yield + except IOError: + final_path = None + if final_path: + self._add_item(final_path, info) + # don't throw off the progress bar; we're done so pretend we got + # all the bytes + self.progress_size[info.id] = self.total_size[info.id] self.finished += 1 self._check_finished() - return - if final_path: - self._add_item(final_path, info) - self.copying.remove(info.id) - self.finished += 1 - self._check_finished() + if self.stopping: + break # no more copies + yield + for final_path in self.copying: + # canceled the sync, so remove the non-synced files + eventloop.add_idle(fileutil.delete, + "deleting canceled sync", + args=(final_path,)) + self._copy_iter_running = False def _conversion_changed_callback(self, conversion_manager, task): - self.etas[task.key] = task.get_eta() - self._send_sync_changed() + total = self.total_size[task.key] + self.progress_size[task.key] = task.progress * total + self._schedule_sync_changed() def _conversion_removed_callback(self, conversion_manager, task=None): if task is not None: self.finished += 1 try: self.waiting.remove(task.key) - del self.etas[task.key] + # don't throw off the progress bar; we're done so pretend we + # got all the bytes + self.progress_size[task.key] = self.total_size[task.key] except KeyError: pass else: # remove all tasks self.finished += len(self.waiting) - self.etas = {} + for key in self.waiting: + self.progress_size[key] = self.total_size[key] self.waiting = set() self._check_finished() @@ -674,7 +1194,9 @@ self.finished += 1 try: self.waiting.remove(task.key) - del self.etas[task.key] + # don't throw off the progress bar; we're done so pretend we got + # all the bytes + self.progress_size[task.key] = self.total_size[task.key] except KeyError: pass # missing for some reason else: @@ -686,52 +1208,26 @@ def _add_item(self, final_path, item_info): dirname, basename = os.path.split(final_path) _, extension = os.path.splitext(basename) - new_basename = "%s%s" % (unicode_to_filename(item_info.name, + new_basename = "%s%s" % (unicode_to_filename(item_info.title, self.device.mount), extension) new_path = os.path.join(dirname, new_basename) + if os.path.exists(new_path): + logging.debug('final destination %r exists, making a new one', + new_path) + new_path, fp = next_free_filename(new_path) def callback(): if not os.path.exists(new_path): return # copy failed, just give up + if _device_not_valid(self.device): + return # Device has been ejected, give up. - device_item = DeviceItem( - device=self.device, - file_type=item_info.file_type, - video_path=new_path[len(self.device.mount):], - title=item_info.name, - feed_name=item_info.feed_name, - feed_url=item_info.feed_url, - description=item_info.description, - release_date=time.mktime(item_info.release_date.timetuple()), - duration=(item_info.duration and item_info.duration * 1000 or - None), - permalink=item_info.permalink, - commentslink=item_info.commentslink, - payment_link=item_info.payment_link, - screenshot=item_info.thumbnail, - thumbnail_url=item_info.thumbnail_url, - file_format=item_info.file_format, - license=item_info.license, - url=item_info.file_url, - media_type_checked=item_info.media_type_checked, - mime_type=item_info.mime_type, - creation_time=time.mktime(item_info.date_added.timetuple()), - title_tag=item_info.title_tag, - artist=item_info.artist, - album=item_info.album, - track=item_info.track, - year=item_info.year, - genre=item_info.genre, - metadata_version=item_info.metadata_version, - mdp_state=item_info.mdp_state, - ) - device_item._migrate_thumbnail() - database = self.device.database - database.setdefault(device_item.file_type, {}) - database[device_item.file_type][device_item.id] = \ - device_item.to_dict() - database.emit('item-added', device_item) + relpath = os.path.relpath(new_path, self.device.mount) + auto_sync = item_info.id in self.auto_syncs + device_item = item.DeviceItem(self.device, relpath, item_info, + auto_sync=auto_sync) + self.device.remaining -= device_item.size fileutil.migrate_file(final_path, new_path, callback) @@ -740,16 +1236,24 @@ # finished! if not self.stopping: self._send_sync_finished() - self._send_sync_changed() + self._schedule_sync_changed() + + def _schedule_sync_changed(self): + if not self._change_timeout: + self._change_timeout = eventloop.add_timeout( + 1.0, + self._send_sync_changed, + 'sync changed update') def _send_sync_changed(self): message = messages.DeviceSyncChanged(self) message.send_to_frontend() + self._change_timeout = None def _send_sync_finished(self): for handle in self.signal_handles: conversions.conversion_manager.disconnect(handle) - self.signal_handles = None + self.signal_handles = [] self.device.is_updating = False # stop the spinner messages.TabsChanged('connect', [], [self.device], []).send_to_frontend() @@ -757,242 +1261,59 @@ def is_finished(self): - if self.stopping: + if self.stopping or not self.started: return True if self.waiting or self.copying: return False return self.device.id not in app.device_manager.syncs_in_progress def get_eta(self): - etas = [eta for eta in self.etas.values() if eta is not None] - if not etas: - return - longest_eta = max(etas) - return longest_eta + progress = self.get_progress() * 100 + if not progress: + return None + + duration = time.time() - self.start_time + time_per_percent = duration / progress + return int(time_per_percent * (100 - progress)) def get_progress(self): - eta = self.get_eta() - if eta is None: - return float(self.finished) / self.total - total_time = time.time() - self.start_time - total_eta = total_time + eta - return total_time / total_eta + total = sum(self.total_size.itervalues()) + if not total: + return 0.0 + progress = float(sum(self.progress_size.itervalues())) + return min(progress / total, 1.0) def cancel(self): + if not self.started: + return for key in self.waiting: conversions.conversion_manager.cancel(key) self.stopping = True # kill in-progress copies self._send_sync_changed() self._send_sync_finished() -class DeviceItem(metadata.Store): - """ - An item which lives on a device. There's a separate, per-device JSON - database, so this implements the necessary Item logic for those files. - """ - def __init__(self, **kwargs): - self.__initialized = False - for required in ('video_path', 'file_type', 'device'): - if required not in kwargs: - raise TypeError('DeviceItem must be given a "%s" argument' - % required) - self.file_format = self.size = None - self.release_date = self.feed_name = self.feed_id = None - self.keep = True - self.isContainerItem = False - self.url = self.payment_link = None - self.comments_link = self.permalink = self.file_url = None - self.license = self.downloader = None - self.duration = self.screenshot = self.thumbnail_url = None - self.resumeTime = 0 - self.subtitle_encoding = self.enclosure_type = None - self.file_type = None - self.creation_time = None - self.is_playing = False - metadata.Store.setup_new(self) - self.__dict__.update(kwargs) - - if isinstance(self.video_path, unicode): - # make sure video path is a filename and ID is Unicode - self.id = self.video_path - self.video_path = utf8_to_filename(self.video_path.encode('utf8')) - else: - self.id = filename_to_unicode(self.video_path) - if isinstance(self.screenshot, unicode): - self.screenshot = utf8_to_filename(self.screenshot.encode('utf8')) - if isinstance(self.cover_art, unicode): - self.cover_art = utf8_to_filename(self.cover_art.encode('utf8')) - if self.file_format is None: - self.file_format = filename_to_unicode( - os.path.splitext(self.video_path)[1]) - if self.file_type == 'audio': - self.file_format = self.file_format + ' audio' - - try: # filesystem operations - if self.size is None: - self.size = os.path.getsize(self.get_filename()) - if self.release_date is None or self.creation_time is None: - ctime = fileutil.getctime(self.get_filename()) - if self.release_date is None: - self.release_date = ctime - if self.creation_time is None: - self.creation_time = ctime - if not self.metadata_version: - # haven't run read_metadata yet. We don't check the actual - # version because upgrading metadata isn't supported. - self.read_metadata() - if not self.get_title(): - self.title = filename_to_unicode( - os.path.basename(self.video_path)) - - except (OSError, IOError): - # if there was an error reading the data from the filesystem, don't - # bother continuing with other FS operations or starting moviedata - logging.debug('error reading %s', self.id, exc_info=True) - else: - if self.mdp_state is None: # haven't run MDP yet - moviedata.movie_data_updater.request_update(self) - self.__initialized = True - - @staticmethod - def id_exists(): - return True - - def get_release_date(self): - try: - return datetime.fromtimestamp(self.release_date) - except (ValueError, TypeError): - logging.warn('DeviceItem: release date %s invalid', - self.release_date) - return datetime.now() - - - def get_creation_time(self): - try: - return datetime.fromtimestamp(self.creation_time) - except (ValueError, TypeError): - logging.warn('DeviceItem: creation time %s invalid', - self.creation_time) - return datetime.now() - - @returns_filename - def get_filename(self): - return os.path.join(self.device.mount, self.video_path) - - def get_url(self): - return self.url or u'' - - @returns_filename - def get_thumbnail(self): - if self.cover_art: - return os.path.join(self.device.mount, - self.cover_art) - elif self.screenshot: - return os.path.join(self.device.mount, - self.screenshot) - elif self.file_type == 'audio': - return resources.path("images/thumb-default-audio.png") - else: - return resources.path("images/thumb-default-video.png") - - def _migrate_image_field(self, field_name): - value = getattr(self, field_name) - icon_cache_directory = app.config.get(prefs.ICON_CACHE_DIRECTORY) - cover_art_directory = app.config.get(prefs.COVER_ART_DIRECTORY) - if value is not None: - if (value.startswith(icon_cache_directory) or - value.startswith(cover_art_directory)): - # migrate the screenshot onto the device - basename = os.path.basename(value) - try: - new_path = os.path.join(self.device.mount, '.miro', - basename) - shutil.copyfile(value, new_path) - except (IOError, OSError): - # error copying the thumbnail, just erase it - setattr(self, field_name, None) - else: - extracted = os.path.join(icon_cache_directory, 'extracted') - if (value.startswith(extracted) or - value.startswith(cover_art_directory)): - # moviedata extracted this for us, so we can remove it - try: - os.unlink(value) - except OSError: - pass - setattr(self, field_name, - os.path.join('.miro', basename)) - elif value.startswith(resources.root()): - setattr(self, field_name, None) # don't save a default - # thumbnail - - def _migrate_thumbnail(self): - self._migrate_image_field('screenshot') - self._migrate_image_field('cover_art') - - def remove(self, save=True): - for file_type in [u'video', u'audio', u'other']: - if self.video_path in self.device.database[file_type]: - del self.device.database[file_type][self.id] - if save: - self.device.database.emit('item-removed', self) - - def signal_change(self): - if not self.__initialized: - return - - if not os.path.exists( - os.path.join(self.device.mount, self.video_path)): - # file was removed from the filesystem - self.remove() - return - - if (not isinstance(self.file_type, unicode) and - self.file_type is not None): - self.file_type = unicode(self.file_type) - - was_removed = False - for type_ in set((u'video', u'audio', u'other')) - set( - (self.file_type,)): - if self.id in self.device.database[type_]: - # clean up old types, if necessary - self.remove(save=False) - was_removed = True - break - - self._migrate_thumbnail() - if self.file_type: - db = self.device.database - db[self.file_type][self.id] = self.to_dict() - - if self.file_type != u'other' or was_removed: - db.emit('item-changed', self) - - def to_dict(self): - data = {} - for k, v in self.__dict__.items(): - if v is not None and k not in (u'device', u'file_type', u'id', - u'video_path', u'_deferred_update'): - if ((k == u'screenshot' or k == u'cover_art')): - v = filename_to_unicode(v) - data[k] = v - return data - class DeviceDatabase(dict, signals.SignalEmitter): def __init__(self, data=None, parent=None): if data: dict.__init__(self, data) + self.created_new = False else: dict.__init__(self) + self.created_new = True signals.SignalEmitter.__init__(self, 'changed', 'item-added', 'item-changed', 'item-removed') self.parent = parent self.changing = False self.bulk_mode = False self.did_change = False + self.check_old_key_usage = False + self.write_manager = None def __getitem__(self, key): check_u(key) + if self.check_old_key_usage: + if key in (u'audio', u'video', u'other'): + raise AssertionError() value = super(DeviceDatabase, self).__getitem__(key) if isinstance(value, dict) and not isinstance(value, DeviceDatabase): value = DeviceDatabase(value, self.parent or self) @@ -1014,6 +1335,8 @@ self.changing = True try: self.emit('changed') + if self.write_manager: + self.write_manager.schedule_write(self) finally: self.changing = False self.did_change = False @@ -1023,28 +1346,20 @@ if not bulk and self.did_change: self.notify_changed() - # XXX does this belong here? - def item_exists(self, item_info): - """Checks if the given ItemInfo exists in our database. Should only be - called on the parent database. + def _find_item_data(self, path): + """Find the data for an item in the database + + Returns (item_data, file_type) tuple """ - if self.parent: - raise RuntimeError('item_exists() called on sub-dictionary') - if item_info.file_type not in self: - return False - for existing in self[item_info.file_type].values(): - if (item_info.file_url and - existing.get('url') == item_info.file_url): - return True - if ((item_info.name, item_info.description, item_info.size, - item_info.duration * 1000 if item_info.duration - else None) == - (existing.get('title'), existing.get('description'), - existing.get('size'), existing.get('duration'))): - # if a bunch of qualities are the same, we'll call it close - # enough - return True - return False + + for file_type in (u'audio', u'video', u'other'): + if file_type in self and path in self[file_type]: + return (self[file_type][path], file_type) + raise KeyError(path) + + def shutdown(self): + if self.write_manager and self.write_manager.is_dirty(): + self.write_manager.write() class DatabaseWriteManager(object): """ @@ -1057,16 +1372,21 @@ self.scheduled_write = None self.database = None - def __call__(self, database): + def schedule_write(self, database): self.database = database - if self.scheduled_write: + if self.is_dirty(): return self.scheduled_write = eventloop.add_timeout(self.SAVE_INTERVAL, self.write, 'writing device database') + + def is_dirty(self): + return self.scheduled_write is not None + def write(self): - write_database(self.database, self.mount) - self.database = self.scheduled_write = None + if self.is_dirty(): + write_database(self.database, self.mount) + self.database = self.scheduled_write = None def load_database(mount, countdown=0): """ @@ -1084,7 +1404,7 @@ except ValueError: logging.exception('JSON decode error on %s', mount) db = {} - except (IOError, OSError): + except EnvironmentError: if countdown == 5: logging.exception('file error with JSON on %s', mount) db = {} @@ -1093,16 +1413,124 @@ time.sleep(0.20 * 1.2 ** countdown) return load_database(mount, countdown + 1) ddb = DeviceDatabase(db) - ddb.connect('changed', DatabaseWriteManager(mount)) + ddb.write_manager = DatabaseWriteManager(mount) return ddb -def write_database(database, mount): +def sqlite_database_path(mount): + return os.path.join(mount, '.miro', 'sqlite') + +def load_sqlite_database(mount, device_size, countdown=0, is_hidden=False): + """ + Returns a LiveStorage object for an sqlite database on the device + + The database lives at [MOUNT]/.miro/sqlite + """ + threadcheck.confirm_eventloop_thread() + if mount == ':memory:': # special case for the unittests + path = ':memory:' + preallocate = None + start_in_temp_mode = False + else: + directory = os.path.join(mount, '.miro') + start_in_temp_mode = False + if is_hidden and not os.path.exists(directory): + # don't write to the disk initially. This works because we set + # `force_directory_creation` to False further down, which prevents + # LiveStorage from creating the .miro directory itself + start_in_temp_mode = True + path = os.path.join(directory, 'sqlite') + preallocate = calc_sqlite_preallocate_size(device_size) + logging.info('loading SQLite db on device %r: %r', mount, path) + error_handler = storedatabase.DeviceLiveStorageErrorHandler(mount) + try: + live_storage = storedatabase.DeviceLiveStorage( + path, error_handler, + preallocate=preallocate, + object_schemas=schema.device_object_schemas, + schema_version=DB_VERSION, + start_in_temp_mode=start_in_temp_mode) + except EnvironmentError: + if countdown == 5: + logging.exception('file error with JSON on %s', mount) + return load_sqlite_database(':memory:', 0, countdown) + else: + # wait a little while; total time is ~1.5s + time.sleep(0.20 * 1.2 ** countdown) + return load_sqlite_database(mount, device_size, countdown + 1) + if live_storage.created_new: + # force the version to match the current schema. This is a hack to + # make databases from the nightlies match the ones from users starting + # with 5.0 + live_storage.set_version(DB_VERSION) + if start_in_temp_mode: + # We won't create an SQLite database until something else writes to + # the .miro directory on the device. + live_storage.force_directory_creation = False + else: + device_db_version = live_storage.get_version() + if device_db_version < DB_VERSION: + logging.info("upgrading device database: %r", mount) + live_storage.upgrade_database(context='device') + elif device_db_version > DB_VERSION: + # Newer versions of miro should store their device databases in a + # way that's compatible with previous ones. We just have to hope + # that's true in this case. + logging.warn("database from newer miro version: %r (version=%s)", + mount, device_db_version) + + return live_storage + +def calc_sqlite_preallocate_size(device_size): + """Calculate the size we should preallocate for our sqlite database. """ + # Estimate that the device can store 1 item per megabyte and each item + # takes 400 bytes in the database. + max_items_estimate = device_size / (2 ** 20) + size = max_items_estimate * 400 + # force the size to be between 512K and 10M + size = max(size, 512 * (2 ** 10)) + size = min(size, 10 * (2 ** 20)) + return size + +def make_metadata_manager(mount, db_info, device_id): + """ + Get a MetadataManager for a device. + """ + manager = metadata.DeviceMetadataManager(db_info, device_id, mount) + manager.connect("new-metadata", on_new_metadata, device_id) + return manager + +def on_new_metadata(metadata_manager, new_metadata, device_id): + try: + device = app.device_manager.connected[device_id] + except KeyError: + # bz18893: don't crash if the device isn't around any more. + logging.warn("devices.py - on_new_metadata: KeyError getting %r", + device_id) + return + + path_map = item.DeviceItem.items_for_paths(new_metadata.keys(), + device.db_info) + device.db_info.bulk_sql_manager.start() + try: + for path, metadata in new_metadata.iteritems(): + try: + device_item = path_map[path.lower()] + except KeyError: + logging.warn("devices.py - on_new_metadata: Got metadata " + "but can't find item for %r", path) + else: + device_item.update_from_metadata(metadata) + device_item.signal_change() + finally: + device.db_info.bulk_sql_manager.finish() + +def write_database(db, mount): """ Writes the given dictionary to the device. The database lives at [MOUNT]/.miro/json """ - confirm_db_thread() + threadcheck.confirm_eventloop_thread() if not os.path.exists(mount): # device disappeared, so we can't write to it return @@ -1111,104 +1539,133 @@ except OSError: pass try: - json.dump(database, file(os.path.join(mount, '.miro', 'json'), 'wb')) + with file(os.path.join(mount, '.miro', 'json'), 'wb') as output: + iterable = json._default_encoder.iterencode(db) + output.writelines(iterable) except IOError: # couldn't write to the device # XXX throw up an error? pass def clean_database(device): - def _exists(item_path): - return os.path.exists(os.path.join(device.mount, - item_path)) + """Go through a device and remove any items that have been deleted. + + :returns: list of paths that are still valid + """ + metadata.remove_invalid_device_metadata(device) + known_files = set() to_remove = [] - for item_type in (u'video', u'audio', u'other'): - device.database.setdefault(item_type, {}) - if isinstance(device.database[item_type], list): - # 17554: we could accidentally set this to a list - device.database[item_type] = {} - for item_path_unicode in device.database[item_type]: - item_path = utf8_to_filename(item_path_unicode.encode('utf8')) - if _exists(item_path): - known_files.add(os.path.normcase(item_path)) - else: - to_remove.append((item_type, item_path_unicode)) + # Use select_paths() since it avoids constructing DeviceItem objects + for row in item.DeviceItem.select_paths(device.db_info): + relpath = row[0] + full_path = os.path.join(device.mount, relpath) + if os.path.exists(full_path): + known_files.add(relpath.lower()) + else: + to_remove.append(relpath) if to_remove: - device.database.set_bulk_mode(True) - for item_type, item_path in to_remove: - del device.database[item_type][item_path] - device.database.set_bulk_mode(False) + device.db_info.bulk_sql_manager.start() + try: + for relpath in to_remove: + device_item = item.DeviceItem.get_by_path(relpath, + device.db_info) + device_item.remove(device) + finally: + device.db_info.bulk_sql_manager.finish() return known_files +def on_mount(info): + """Stuff that we need to do when the device is first mounted. + """ + if info.database.get(u'sync', {}).get(u'auto', False): + message = messages.DeviceSyncFeeds(info) + message.send_to_backend() + scan_device_for_files(info) + +def _device_not_valid(device): + if not app.device_manager.running: # user quit, so we will too + logging.debug('stopping scan on %r: user quit', device.mount) + return True + if device.metadata_manager is None or device.metadata_manager.closed: # device was ejected + return True + if not os.path.exists(device.mount): # device disappeared + logging.debug('stopping scan on %r: disappeared', device.mount) + return True + if app.device_manager._is_hidden(device): # device no longer being + # shown + logging.debug('stopping scan on %r: hidden', device.mount) + return True + return False + @eventloop.idle_iterator def scan_device_for_files(device): # XXX is this as_idle() safe? # prepare paths to add - logging.debug('starting scan on %s', device.mount) + if device.read_only: + logging.debug('skipping scan on read-only device %r', device.mount) + return + logging.debug('starting scan on %r', device.mount) known_files = clean_database(device) - item_data = [] + found_files = [] start = time.time() - def _continue(): - if not app.device_manager.running: # user quit, so we will too - logging.debug('stopping scan on %s: user quit', device.mount) - return False - if not os.path.exists(device.mount): # device disappeared - logging.debug('stopping scan on %s: disappeared', device.mount) - return False - if app.device_manager._is_hidden(device): # device no longer being - # shown - logging.debug('stopping scan on %s: hidden', device.mount) - return False - return True - for filename in fileutil.miro_allfiles(device.mount): - short_filename = filename[len(device.mount):] - ufilename = filename_to_unicode(short_filename) - item_type = None - if os.path.normcase(short_filename) in known_files: - continue - if filetypes.is_video_filename(ufilename): - item_type = u'video' - elif filetypes.is_audio_filename(ufilename): - item_type = u'audio' - if item_type is not None: - item_data.append((ufilename, item_type)) - app.metadata_progress_updater.will_process_path(filename, - device) - if time.time() - start > 0.4: + for path in fileutil.miro_allfiles(device.mount): + relpath = os.path.relpath(path, device.mount) + if ((filetypes.is_video_filename(path) or + filetypes.is_audio_filename(path)) and + relpath.lower() not in known_files): + found_files.append(relpath) + if time.time() - start > 0.3: yield # let other stuff run - if not _continue(): + if _device_not_valid(device): break start = time.time() - if app.device_manager.running and os.path.exists(device.mount): - # we don't re-check if the device is hidden because we still want to - # save the items we found in that case - yield # yield after prep work - - device.database.setdefault(u'sync', {}) - logging.debug('scanned %s, found %i files (%i total)', - device.mount, len(item_data), - len(known_files) + len(item_data)) - - device.database.set_bulk_mode(True) - start = time.time() - for ufilename, item_type in item_data: - i = DeviceItem(video_path=ufilename, - file_type=item_type, - device=device) - device.database[item_type][ufilename] = i.to_dict() - device.database.emit('item-added', i) - if time.time() - start > 0.4: - device.database.set_bulk_mode(False) # save the database - yield # let other idle functions run - if not _continue(): - break - device.database.set_bulk_mode(True) - start = time.time() + yield # yield after prep work + if _device_not_valid(device): + return - device.database.set_bulk_mode(False) + device.database.setdefault(u'sync', {}) + logging.debug('scanned %r, found %i files (%i total)', + device.mount, len(found_files), + len(known_files) + len(found_files)) + + found_files_iter = iter(found_files) + while not _create_items_for_files(device, found_files_iter, 0.4): + # _create_items_for_files hit our timeout. let other idle + # functions run for a bit + yield + if _device_not_valid(device): + break + +def _create_items_for_files(device, path_iter, timeout): + """Create a batch of DeviceItems + + :param device: DeviceInfo to create the items for + :param path_iter: iterator that yields paths to create (must be relative + to the device mount) + :param timeout: stop after this many seconds + :returns: True if we exausted the iterator, False if we stopped because we + hit the timeout + """ + start = time.time() + device.db_info.bulk_sql_manager.start() + try: + while time.time() - start < 0.4: + try: + path = path_iter.next() + except StopIteration: + # path_iter has been exhausted, return True + return True + try: + item.DeviceItem(device, path) + except StandardError: + logging.exception("Error adding DeviceItem: %r", path) + # we timed out, return False + return False + finally: + device.db_info.bulk_sql_manager.finish() diff -Nru miro-4.0.4/lib/dialogs.py miro-6.0/lib/dialogs.py --- miro-4.0.4/lib/dialogs.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/dialogs.py 2013-04-05 16:02:42.000000000 +0000 @@ -81,57 +81,63 @@ from miro import eventloop from miro import signals from miro import util -from miro.gtcache import gettext as _ +from miro.gtcache import gettext_lazy, gettext as _ class DialogButton(object): def __init__(self, text): - self.text = text + self._text = text def __eq__(self, other): return isinstance(other, DialogButton) and self.text == other.text def __str__(self): return "DialogButton(%r)" % util.stringify(self.text) - -BUTTON_OK = DialogButton(_("OK")) -BUTTON_APPLY = DialogButton(_("Apply")) -BUTTON_CLOSE = DialogButton(_("Close")) -BUTTON_CANCEL = DialogButton(_("Cancel")) -BUTTON_DONE = DialogButton(_("Done")) -BUTTON_YES = DialogButton(_("Yes")) -BUTTON_NO = DialogButton(_("No")) -BUTTON_QUIT = DialogButton(_("Quit")) -BUTTON_CONTINUE = DialogButton(_("Continue")) -BUTTON_IGNORE = DialogButton(_("Ignore")) -BUTTON_IMPORT_FILES = DialogButton(_("Import Files")) -BUTTON_SUBMIT_REPORT = DialogButton(_("Submit Crash Report")) -BUTTON_MIGRATE = DialogButton(_("Migrate")) -BUTTON_DONT_MIGRATE = DialogButton(_("Don't Migrate")) -BUTTON_DOWNLOAD = DialogButton(_("Download")) -BUTTON_REMOVE_ENTRY = DialogButton(_("Remove Entry")) -BUTTON_DELETE_FILE = DialogButton(_("Delete File")) -BUTTON_DELETE_FILES = DialogButton(_("Delete Files")) -BUTTON_KEEP_VIDEOS = DialogButton(_("Keep Videos")) -BUTTON_DELETE_VIDEOS = DialogButton(_("Delete Videos")) -BUTTON_CREATE = DialogButton(_("Create")) -BUTTON_CREATE_FEED = DialogButton(_("Create Podcast")) -BUTTON_CREATE_FOLDER = DialogButton(_("Create Folder")) -BUTTON_ADD_FOLDER = DialogButton(_("Add Folder")) -BUTTON_ADD = DialogButton(_("Add")) -BUTTON_ADD_INTO_NEW_FOLDER = DialogButton(_("Add Into New Folder")) -BUTTON_KEEP = DialogButton(_("Keep")) -BUTTON_DELETE = DialogButton(_("Delete")) -BUTTON_REMOVE = DialogButton(_("Remove")) -BUTTON_NOT_NOW = DialogButton(_("Not Now")) -BUTTON_CLOSE_TO_TRAY = DialogButton(_("Close to Tray")) -BUTTON_LAUNCH_MIRO = DialogButton(_("Launch Miro")) -BUTTON_DOWNLOAD_ANYWAY = DialogButton(_("Download Anyway")) -BUTTON_OPEN_IN_EXTERNAL_BROWSER = DialogButton(_("Open in External Browser")) -BUTTON_DONT_INSTALL = DialogButton(_("Don't Install")) -BUTTON_SUBSCRIBE = DialogButton(_("Subscribe")) -BUTTON_STOP_WATCHING = DialogButton(_("Stop Watching")) -BUTTON_RETRY = DialogButton(_("Retry")) -BUTTON_START_FRESH = DialogButton(_("Start Fresh")) -BUTTON_INCLUDE_DATABASE = DialogButton(_("Include Database")) -BUTTON_DONT_INCLUDE_DATABASE = DialogButton(_("Don't Include Database")) + @property + def text(self): + return unicode(self._text) + +BUTTON_OK = DialogButton(gettext_lazy("OK")) +BUTTON_APPLY = DialogButton(gettext_lazy("Apply")) +BUTTON_CLOSE = DialogButton(gettext_lazy("Close")) +BUTTON_CANCEL = DialogButton(gettext_lazy("Cancel")) +BUTTON_DONE = DialogButton(gettext_lazy("Done")) +BUTTON_YES = DialogButton(gettext_lazy("Yes")) +BUTTON_NO = DialogButton(gettext_lazy("No")) +BUTTON_QUIT = DialogButton(gettext_lazy("Quit")) +BUTTON_CONTINUE = DialogButton(gettext_lazy("Continue")) +BUTTON_IGNORE = DialogButton(gettext_lazy("Ignore")) +BUTTON_IMPORT_FILES = DialogButton(gettext_lazy("Import Files")) +BUTTON_SUBMIT_REPORT = DialogButton(gettext_lazy("Submit Crash Report")) +BUTTON_MIGRATE = DialogButton(gettext_lazy("Migrate")) +BUTTON_DONT_MIGRATE = DialogButton(gettext_lazy("Don't Migrate")) +BUTTON_DOWNLOAD = DialogButton(gettext_lazy("Download")) +BUTTON_REMOVE_ENTRY = DialogButton(gettext_lazy("Remove Entry")) +BUTTON_DELETE_FILE = DialogButton(gettext_lazy("Delete File")) +BUTTON_DELETE_FILES = DialogButton(gettext_lazy("Delete Files")) +BUTTON_KEEP_VIDEOS = DialogButton(gettext_lazy("Keep Videos")) +BUTTON_DELETE_VIDEOS = DialogButton(gettext_lazy("Delete Videos")) +BUTTON_CREATE = DialogButton(gettext_lazy("Create")) +BUTTON_CREATE_FEED = DialogButton(gettext_lazy("Create Podcast")) +BUTTON_CREATE_FOLDER = DialogButton(gettext_lazy("Create Folder")) +BUTTON_CHOOSE_NEW_FOLDER = DialogButton(gettext_lazy("Choose New Folder")) +BUTTON_ADD_FOLDER = DialogButton(gettext_lazy("Add Folder")) +BUTTON_ADD = DialogButton(gettext_lazy("Add")) +BUTTON_ADD_INTO_NEW_FOLDER = DialogButton(gettext_lazy("Add Into New Folder")) +BUTTON_KEEP = DialogButton(gettext_lazy("Keep")) +BUTTON_DELETE = DialogButton(gettext_lazy("Delete")) +BUTTON_REMOVE = DialogButton(gettext_lazy("Remove")) +BUTTON_NOT_NOW = DialogButton(gettext_lazy("Not Now")) +BUTTON_CLOSE_TO_TRAY = DialogButton(gettext_lazy("Close to Tray")) +BUTTON_LAUNCH_MIRO = DialogButton(gettext_lazy("Launch Miro")) +BUTTON_DOWNLOAD_ANYWAY = DialogButton(gettext_lazy("Download Anyway")) +BUTTON_OPEN_IN_EXTERNAL_BROWSER = DialogButton(gettext_lazy( + "Open in External Browser")) +BUTTON_DONT_INSTALL = DialogButton(gettext_lazy("Don't Install")) +BUTTON_SUBSCRIBE = DialogButton(gettext_lazy("Subscribe")) +BUTTON_STOP_WATCHING = DialogButton(gettext_lazy("Stop Watching")) +BUTTON_RETRY = DialogButton(gettext_lazy("Retry")) +BUTTON_START_FRESH = DialogButton(gettext_lazy("Start Fresh")) +BUTTON_INCLUDE_DATABASE = DialogButton(gettext_lazy("Include Database")) +BUTTON_DONT_INCLUDE_DATABASE = DialogButton(gettext_lazy( + "Don't Include Database")) class Dialog(object): """Abstract base class for dialogs. @@ -168,6 +174,12 @@ "%s callback" % self.__class__, args=(self,)) + def __str__(self): + button_text = '/'.join(b.text for b in self.buttons) + return "%s (text: %s, buttons: %s)" % (self.__class__, + self.title, + button_text) + class MessageBoxDialog(Dialog): """Show the user some info in a dialog box. The only button is Okay. The callback is optional for a message box dialog. @@ -185,6 +197,15 @@ def __init__(self, title, description, default_button, other_button): super(ChoiceDialog, self).__init__(title, description, [default_button, other_button]) +class DatabaseErrorDialog(ChoiceDialog): + """ChoiceDialog for that we show when we see a database error. + + Frontends should call app.db_error_handler.run_backend_dialog() instead + of running the normal code. + """ + def __init__(self, title, description): + super(DatabaseErrorDialog, self).__init__(title, description, + BUTTON_RETRY, BUTTON_QUIT) class ThreeChoiceDialog(Dialog): """Give the user a choice of 3 options (e.g. Remove entry/ diff -Nru miro-4.0.4/lib/displaytext.py miro-6.0/lib/displaytext.py --- miro-4.0.4/lib/displaytext.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/displaytext.py 2013-04-05 16:02:42.000000000 +0000 @@ -57,6 +57,8 @@ return nbytes.decode('utf-8', 'replace') def download_rate(rate): + if rate is None: + return "" if rate >= (1 << 30): value = "%1.1f" % (rate / float(1 << 30)) return _("%(size)s GB/s", {"size": value}) @@ -115,11 +117,7 @@ return time_string(secs) def size_string(nbytes): - # when switching from the enclosure reported size to the - # downloader reported size, it takes a while to get the new size - # and the downloader returns -1. the user sees the size go to -1B - # which is weird.... better to return an empty string. - if nbytes == -1 or nbytes == 0: + if nbytes is None: return "" # FIXME this is a repeat of util.format_size_for_user ... should diff -Nru miro-4.0.4/lib/dl_daemon/command.py miro-6.0/lib/dl_daemon/command.py --- miro-4.0.4/lib/dl_daemon/command.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/dl_daemon/command.py 2013-04-05 16:02:42.000000000 +0000 @@ -109,17 +109,17 @@ from miro import httpauth httpauth.remove_by_url_and_realm(*self.args) -class UpdateDownloadStatus(Command): - def action(self): - from miro.downloader import RemoteDownloader - return RemoteDownloader.update_status(*self.args, **self.kws) - class BatchUpdateDownloadStatus(Command): spammy = True def action(self): from miro.downloader import RemoteDownloader - for status in self.args[0]: - RemoteDownloader.update_status(status) + from miro.messages import DownloaderSyncCommandComplete + + cmd_done = self.args[1] + fresh = all(RemoteDownloader.update_status(status, cmd_done=cmd_done) + for status in self.args[0]) + if cmd_done and fresh: + DownloaderSyncCommandComplete().send_to_frontend() class DownloaderErrorCommand(Command): def action(self): @@ -154,6 +154,9 @@ ############################################################################# # App to Downloader commands # ############################################################################# + +# XXX why do we have so much junk here when we can multiplex this stuff with +# a cmd parameter? -gl class InitialConfigCommand(Command): def action(self): app.config.set_dictionary(*self.args, **self.kws) @@ -169,46 +172,53 @@ from miro.dl_daemon.private import httpauth httpauth.update_passwords(*self.args) -class StartNewDownloadCommand(Command): - def action(self): - from miro.dl_daemon import download - return download.start_new_download(*self.args, **self.kws) - -class StartDownloadCommand(Command): - def action(self): - from miro.dl_daemon import download - return download.start_download(*self.args, **self.kws) - -class PauseDownloadCommand(Command): - def action(self): - from miro.dl_daemon import download - return download.pause_download(*self.args, **self.kws) - -class StopDownloadCommand(Command): - def action(self): - from miro.dl_daemon import download - return download.stop_download(*self.args, **self.kws) +# Downloader Daemon start/stop/resume demux. +# +# This is the class that contains the action handler for commands which may +# be marshalled and sent to us via the DownloadStateManager(). +# +# Here, they are demuxed, and then dispatched to the downloader. +class DownloaderBatchCommand(Command): + STOP = 0 + RESUME = 1 + PAUSE = 2 + RESTORE = 4 -class StopUploadCommand(Command): def action(self): from miro.dl_daemon import download - return download.stop_upload(*self.args, **self.kws) - -class PauseUploadCommand(Command): - def action(self): - from miro.dl_daemon import download - return download.pause_upload(*self.args, **self.kws) - -class GetDownloadStatusCommand(Command): - def action(self): - from miro.dl_daemon import download - return download.get_download_status(*self.args, **self.kws) - -class RestoreDownloaderCommand(Command): - def action(self): - from miro.dl_daemon import download - return download.restore_downloader(*self.args, **self.kws) - + mark_reply = True + for dlid, (cmd, args) in self.args[0].iteritems(): + if cmd == self.PAUSE: + upload = args['upload'] + if upload: + download.pause_upload(dlid) + else: + download.pause_download(dlid) + elif cmd == self.STOP: + upload = args['upload'] + if upload: + download.stop_upload(dlid) + else: + download.stop_download(dlid, args['delete']) + elif cmd == self.RESUME: + channel_name = args['channel_name'] + url = args['url'] + content_type = args['content_type'] + download.start_download(url, dlid, content_type, channel_name) + elif cmd == self.RESTORE: + # Restoring a downloader doesn't actually change any state + # so don't reply. + mark_reply = False + downloader = args['downloader'] + download.restore_downloader(downloader) + else: + raise ValueError('unknown downloader batch command %s' % cmd) + # Mark this so that the next time we run through the periodic update + # which will be after all the above have been processed because we + # are in the same thread. + if mark_reply: + download.DOWNLOAD_UPDATER.set_cmds_done() + class MigrateDownloadCommand(Command): def action(self): from miro.dl_daemon import download diff -Nru miro-4.0.4/lib/dl_daemon/daemon.py miro-6.0/lib/dl_daemon/daemon.py --- miro-4.0.4/lib/dl_daemon/daemon.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/dl_daemon/daemon.py 2013-04-05 16:02:42.000000000 +0000 @@ -45,9 +45,9 @@ from miro.net import ConnectionHandler from miro import util -SIZE_OF_INT = calcsize("I") +SIZEOF_LONG = calcsize("Q") -class DaemonError(Exception): +class DaemonError(StandardError): """Exception while communicating to a daemon (either controller or downloader). """ @@ -163,8 +163,8 @@ self.queued_commands = [] def on_size(self): - if self.buffer.length >= SIZE_OF_INT: - (self.size,) = unpack("I", self.buffer.read(SIZE_OF_INT)) + if self.buffer.length >= SIZEOF_LONG: + (self.size,) = unpack("Q", self.buffer.read(SIZEOF_LONG)) self.change_state('command') def on_command(self): @@ -193,7 +193,7 @@ self.queued_commands.append((comm, callback)) else: raw = cPickle.dumps(comm, cPickle.HIGHEST_PROTOCOL) - self.send_data(pack("I", len(raw)) + raw, callback) + self.send_data(pack("Q", len(raw)) + raw, callback) class DownloaderDaemon(Daemon): def __init__(self, host, port, short_app_name): diff -Nru miro-4.0.4/lib/dl_daemon/download.py miro-6.0/lib/dl_daemon/download.py --- miro-4.0.4/lib/dl_daemon/download.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/dl_daemon/download.py 2013-04-05 16:02:42.000000000 +0000 @@ -79,18 +79,7 @@ elif content_type == u'application/x-magnet': return BTDownloader(None, dlid, magnet=url) else: - return HTTPDownloader(url, dlid, expectedContentType=content_type) - -def start_new_download(url, dlid, content_type, channel_name): - """Creates a new downloader object. - """ - check_u(url) - check_u(content_type) - if channel_name: - check_f(channel_name) - dl = create_downloader(url, content_type, dlid) - dl.channelName = channel_name - _downloads[dlid] = dl + return HTTPDownloader(url, dlid, expected_content_type=content_type) def pause_download(dlid): """Pauses a download by download id. @@ -113,16 +102,19 @@ """ return long(str(info_hash), 16) -def start_download(dlid): +def start_download(url, dlid, content_type, channel_name): try: download = _downloads[dlid] + download.start() except KeyError: - # There is no download with this id - err = u"in start_download(): no downloader with id %s" % dlid - c = command.DownloaderErrorCommand(daemon.LAST_DAEMON, err) - c.send() - return True - return download.start() + # There is no download with this id. This is a new download. + check_u(url) + check_u(content_type) + if channel_name: + check_f(channel_name) + dl = create_downloader(url, content_type, dlid) + dl.channel_name = channel_name + _downloads[dlid] = dl def stop_download(dlid, delete): _lock.acquire() @@ -134,7 +126,6 @@ return True finally: _lock.release() - return download.stop(delete) def stop_upload(dlid): @@ -197,6 +188,9 @@ _downloads[dlid].shutdown() logging.info("Shutting down torrent session...") TORRENT_SESSION.shutdown() + # Flush the status updates. + logging.info('flushing status updates...') + DOWNLOAD_UPDATER.flush_update() logging.info("shutdown() finished") def restore_downloader(downloader): @@ -206,13 +200,13 @@ return downloader = copy(downloader) - dler_type = downloader.get('dlerType') - if dler_type == u'HTTP': + type_ = downloader.get('type') + if type_ == u'HTTP': dl = HTTPDownloader(restore=downloader) - elif dler_type == u'BitTorrent': + elif type_ == u'BitTorrent': dl = BTDownloader(restore=downloader) else: - err = u"in restore_downloader(): unknown dlerType: %s" % dler_type + err = u"in restore_downloader(): unknown type: %s" % type_ c = command.DownloaderErrorCommand(daemon.LAST_DAEMON, err) c.send() return @@ -253,6 +247,7 @@ self.set_dht() self.set_upload_limit() self.set_download_limit() + self.set_connection_limit() self.set_encryption() self.callback_handle = app.downloader_config_watcher.connect('changed', self.on_config_changed) @@ -421,24 +416,35 @@ def __init__(self): self.to_update = set() + self.cmds_done = False def start_updates(self): eventloop.add_timeout(self.UPDATE_CLIENT_INTERVAL, self.do_update, "Download status update") - def do_update(self): + def flush_update(self): + self.do_update(periodic=False) + + def do_update(self, periodic=True): try: TORRENT_SESSION.update_torrents() statuses = [] for downloader in self.to_update: statuses.append(downloader.get_status()) self.to_update = set() - if statuses: + if statuses or self.cmds_done: command.BatchUpdateDownloadStatus(daemon.LAST_DAEMON, - statuses).send() + statuses, + self.cmds_done).send() + self.cmds_done = False finally: - eventloop.add_timeout(self.UPDATE_CLIENT_INTERVAL, self.do_update, - "Download status update") + if periodic: + eventloop.add_timeout(self.UPDATE_CLIENT_INTERVAL, + self.do_update, + "Download status update") + + def set_cmds_done(self): + self.cmds_done = True def queue_update(self, downloader): self.to_update.add(downloader) @@ -461,16 +467,16 @@ def __init__(self, url, dlid): self.dlid = dlid self.url = url - self.startTime = clock() - self.endTime = self.startTime - self.shortFilename = filename_from_url(url) + self.start_time = int(clock()) + self.end_time = None + self.short_filename = filename_from_url(url) self.pick_initial_filename() self.state = u"downloading" - self.currentSize = 0 - self.totalSize = -1 - self.shortReasonFailed = self.reasonFailed = u"No Error" - self.retryTime = None - self.retryCount = -1 + self.current_size = 0 + self.total_size = None + self.short_reason_failed = self.reason_failed = u"No Error" + self.retry_time = None + self.retry_count = None def get_url(self): return self.url @@ -479,29 +485,31 @@ return {'dlid': self.dlid, 'url': self.url, 'state': self.state, - 'totalSize': self.totalSize, - 'currentSize': self.currentSize, + 'total_size': self.total_size, + 'current_size': self.current_size, 'eta': self.get_eta(), 'rate': self.get_rate(), - 'uploaded': 0, + 'upload_size': 0, 'filename': self.filename, - 'startTime': self.startTime, - 'endTime': self.endTime, - 'shortFilename': self.shortFilename, - 'reasonFailed': self.reasonFailed, - 'shortReasonFailed': self.shortReasonFailed, - 'dlerType': None, - 'retryTime': self.retryTime, - 'retryCount': self.retryCount, - 'channelName': self.channelName} - - def update_client(self): - x = command.UpdateDownloadStatus(daemon.LAST_DAEMON, self.get_status()) - return x.send() + 'start_time': self.start_time, + 'end_time': self.end_time, + 'short_filename': self.short_filename, + 'reason_failed': self.reason_failed, + 'short_reason_failed': self.short_reason_failed, + 'type': None, + 'retry_time': self.retry_time, + 'retry_count': self.retry_count} + + def update_client(self, now=False): + if not now: + DOWNLOAD_UPDATER.queue_update(self) + else: + command.BatchUpdateDownloadStatus(daemon.LAST_DAEMON, + [self.get_status()]).send() def pick_initial_filename(self, suffix=".part", torrent=False, - is_directory=False): - """Pick a path to download to based on self.shortFilename. + is_directory=False, exists=False): + """Pick a path to download to based on self.short_filename. This method sets self.filename, as well as creates any leading paths needed to start downloading there. @@ -512,26 +520,33 @@ ascii and needs to be transformed into something sane. :param is_directory: If True, we're really creating a directory--not a file. + :param exists: If True, libtorrent has already created the + file/directory """ download_dir = os.path.join(app.config.get(prefs.MOVIES_DIRECTORY), 'Incomplete Downloads') # Create the download directory if it doesn't already exist. if not os.path.exists(download_dir): fileutil.makedirs(download_dir) - filename = self.shortFilename + suffix + filename = self.short_filename + suffix if not torrent: # this is an ascii filename and needs to be fixed filename = clean_filename(filename) - - full_path = os.path.join(download_dir, filename) - if is_directory: - # if this is a torrent and it's a directory of files, then - # we create a temp directory to put the directory of files - # in. - new_filename = next_free_directory(full_path) - else: - new_filename, fp = next_free_filename(full_path) - fp.close() + new_filename = os.path.join(download_dir, filename) + if not exists or not os.path.exists(new_filename): + if exists: + # if the file/directory is supposed to exist and doesn't, just + # create it + logging.warn("f/d was supposed to exist, but does not: %s", + new_filename) + if is_directory: + # if this is a torrent and it's a directory of files, then + # we create a temp directory to put the directory of files + # in. + new_filename = next_free_directory(new_filename) + else: + new_filename, fp = next_free_filename(new_filename) + fp.close() self.filename = new_filename def move_to_movies_directory(self): @@ -573,8 +588,8 @@ def move_to_directory(self, directory): check_f(directory) - if self.channelName: - channel_name = filter_directory_name(self.channelName) + if self.channel_name: + channel_name = filter_directory_name(self.channel_name) # bug 10769: shutil and windows has problems with long # filenames, so we clip the directory name. if len(channel_name) > 80: @@ -583,15 +598,22 @@ directory = self._ensure_directory_exists(directory) src = self.filename - dest = os.path.join(directory, self.shortFilename) + dest = os.path.join(directory, self.short_filename) if src == dest: return - if os.path.isdir(src): - dest = next_free_directory(dest) - else: - dest, fp = next_free_filename(dest) - fp.close() + try: + is_dir = os.path.isdir(src) + if is_dir: + dest = next_free_directory(dest) + else: + dest, fp = next_free_filename(dest) + fp.close() + except ValueError: + func = 'next_free_directory' if is_dir else 'next_free_filename' + logging.warn('move_to_directory: %s failed. candidate = %r', + func, dest) + return def callback(): # for torrent of a directory of files, we want to remove @@ -608,64 +630,66 @@ fileutil.migrate_file(src, dest, callback) def get_eta(self): - """Returns a float with the estimated number of seconds left. + """Returns a int with the estimated number of seconds left. """ - if self.totalSize == -1: - return -1 + if self.total_size is None: + return None rate = self.get_rate() if rate > 0: - return (self.totalSize - self.currentSize) / rate + return (self.total_size - self.current_size) // rate else: return 0 def get_rate(self): - """Returns a float with the download rate in bytes per second + """Returns a int with the download rate in bytes per second """ - if self.endTime != self.startTime: - rate = self.currentSize / (self.endTime - self.startTime) - else: - rate = self.rate - return rate + return self.rate def retry_download(self): - self.retryDC = None + self.retry_dc = None self.start(resume=False) def handle_temporary_error(self, short_reason, reason): self.state = u"offline" - self.endTime = self.startTime = 0 - self.rate = 0 - self.reasonFailed = reason - self.shortReasonFailed = short_reason - self.retryCount = self.retryCount + 1 - if self.retryCount >= len(RETRY_TIMES): - self.retryCount = len(RETRY_TIMES) - 1 - self.retryDC = eventloop.add_timeout( - RETRY_TIMES[self.retryCount], self.retry_download, + self.start_time = 0 + self.end_time = None + self.rate = None + self.reason_failed = reason + self.short_reason_failed = short_reason + if self.retry_count is None: + self.retry_count = 0 + else: + self.retry_count = self.retry_count + 1 + if self.retry_count >= len(RETRY_TIMES): + self.retry_count = len(RETRY_TIMES) - 1 + self.retry_dc = eventloop.add_timeout( + RETRY_TIMES[self.retry_count], self.retry_download, "Logarithmic retry") now = datetime.datetime.now() - self.retryTime = now + datetime.timedelta( - seconds=RETRY_TIMES[self.retryCount]) + self.retry_time = now + datetime.timedelta( + seconds=RETRY_TIMES[self.retry_count]) logging.warning("Temporary error: '%s' '%s'. retrying at %s %s", - short_reason, reason, self.retryTime, self.retryCount) + short_reason, reason, self.retry_time, + self.retry_count) self.update_client() def handle_error(self, short_reason, reason): self.state = u"failed" - self.reasonFailed = reason - self.shortReasonFailed = short_reason + self.reason_failed = reason + self.short_reason_failed = short_reason self.update_client() def handle_network_error(self, error): if isinstance(error, httpclient.NetworkError): if (isinstance(error, (httpclient.MalformedURL, - httpclient.UnknownHostError, httpclient.AuthorizationFailed, httpclient.ProxyAuthorizationFailed, + httpclient.TooManyRedirects, + httpclient.InvalidRedirect, httpclient.UnexpectedStatusCode))): self.handle_error(error.getFriendlyDescription(), error.getLongDescription()) - self.retryCount = -1 # reset retryCount + self.retry_count = None # reset retry_count else: self.handle_temporary_error(error.getFriendlyDescription(), error.getLongDescription()) @@ -687,7 +711,14 @@ size = 0 preserved = (app.config.get(prefs.PRESERVE_X_GB_FREE) * 1024 * 1024 * 1024) - available = get_available_bytes_for_movies() - preserved + # below code gets the additional space we expect to be taken by + # in-progress downloads. total_size is None when the download + # doesn't know how big it is. + additional_space = sum(dl.total_size - dl.current_size + for dl in _downloads.values() + if dl.total_size is not None) + available = (get_available_bytes_for_movies() - preserved - + additional_space) accept = (size <= available) return accept @@ -695,22 +726,18 @@ CHECK_STATS_TIMEOUT = 1.0 def __init__(self, url=None, dlid=None, restore=None, - expectedContentType=None): - self.retryDC = None - self.channelName = None - self.expectedContentType = expectedContentType + expected_content_type=None): + self.retry_dc = None + self.channel_name = None + self.expected_content_type = expected_content_type if restore is not None: - if not isinstance(restore.get('totalSize', 0), int): - # Sometimes restoring old downloaders caused errors - # because their totalSize wasn't an int. (see #3965) - restore['totalSize'] = int(restore['totalSize']) self.__dict__.update(restore) self.restartOnError = True else: BGDownloader.__init__(self, url, dlid) self.restartOnError = False self.client = None - self.rate = 0 + self.rate = None if self.state == u'downloading': self.start_download() elif self.state == u'offline': @@ -720,14 +747,14 @@ def start_new_download(self): """Start a download, discarding any existing data""" - self.currentSize = 0 - self.totalSize = -1 + self.current_size = 0 + self.total_size = None self.start_download(resume=False) def start_download(self, resume=True): - if self.retryDC: - self.retryDC.cancel() - self.retryDC = None + if self.retry_dc: + self.retry_dc.cancel() + self.retry_dc = None if resume: resume = self._resume_sanity_check() @@ -737,9 +764,7 @@ self.url, self.on_download_finished, self.on_download_error, header_callback=self.on_headers, write_file=self.filename, resume=resume) - self.update_client() - eventloop.add_timeout(self.CHECK_STATS_TIMEOUT, self.update_stats, - 'update http downloader stats') + self.update_stats() def _resume_sanity_check(self): """Do sanity checks to test if we should try HTTP Resume. @@ -752,15 +777,15 @@ # size. In particular, before the libcurl change, we would # preallocate the entire file, so we need to undo this. file_size = os.stat(self.filename)[stat.ST_SIZE] - if file_size > self.currentSize: + if file_size > self.current_size: # use logging.info rather than warn, since this is the # usual case from upgrading from 3.0.x to 3.1 - logging.info("File larger than currentSize: truncating. " + logging.info("File larger than current_size: truncating. " "url: %s, path: %s.", self.url, self.filename) f = open(self.filename, "ab") - f.truncate(self.currentSize) + f.truncate(self.current_size) f.close() - elif file_size < self.currentSize: + elif file_size < self.current_size: # Data got deleted somehow. Let's start over. logging.warn("File doesn't contain enough data to resume. " "url: %s, path: %s.", self.url, self.filename) @@ -778,9 +803,9 @@ self.client.cancel(remove_file=remove_file) self.destroy_client() # if it's in a retrying state, we want to nix that, too - if self.retryDC: - self.retryDC.cancel() - self.retryDC = None + if self.retry_dc: + self.retry_dc.cancel() + self.retry_dc = None def handle_error(self, short_reason, reason): BGDownloader.handle_error(self, short_reason, reason) @@ -790,47 +815,47 @@ fileutil.remove(self.filename) except OSError: pass - self.currentSize = 0 - self.totalSize = -1 + self.current_size = 0 + self.total_size = None def handle_temporary_error(self, short_reason, reason): self.cancel_request() BGDownloader.handle_temporary_error(self, short_reason, reason) def handle_move_error(self, error): - logging.exception("Error moving to movies directory") - logging.warn("filename: %s, shortFilename: %s, movies directory: %s", - self.filename, self.shortFilename, + logging.exception("Error moving to movies directory\n" + "filename: %s, short_filename: %s, movies directory: %s", + self.filename, self.short_filename, app.config.get(prefs.MOVIES_DIRECTORY)) text = _("Error moving to movies directory") self.handle_generic_error(text) def on_headers(self, info): if 'total-size' in info: - self.totalSize = info['total-size'] - if not self.accept_download_size(self.totalSize): + self.total_size = info['total-size'] + if not self.accept_download_size(self.total_size): self.handle_error(_("Not enough disk space"), _("%(amount)s MB required to store this video", - {"amount": self.totalSize / (2 ** 20)})) + {"amount": self.total_size / (2 ** 20)})) return - # We should successfully download the file. Reset retryCount + # We should successfully download the file. Reset retry_count # and accept defeat if we see an error. self.restartOnError = False - # update shortFilename based on the headers. This will affect + # update short_filename based on the headers. This will affect # how we move the file once the download is finished - self.shortFilename = clean_filename(info['filename']) - if self.expectedContentType is not None: - ext_content_type = self.expectedContentType + self.short_filename = clean_filename(info['filename']) + if self.expected_content_type is not None: + ext_content_type = self.expected_content_type else: ext_content_type = info.get('content-type') - self.shortFilename = check_filename_extension(self.shortFilename, + self.short_filename = check_filename_extension(self.short_filename, ext_content_type) def on_download_error(self, error): if isinstance(error, httpclient.ResumeFailed): # try starting from scratch - self.currentSize = 0 - self.totalSize = -1 + self.current_size = 0 + self.total_size = None self.start_new_download() elif isinstance(error, httpclient.AuthorizationCanceled): self.destroy_client() @@ -845,16 +870,17 @@ def on_download_finished(self, response): self.destroy_client() self.state = u"finished" - self.endTime = clock() + self.end_time = int(clock()) + self.rate = None # bug 14131 -- if there's nothing here, treat it like a temporary # error - if self.currentSize == 0: + if self.current_size == 0: self.handle_network_error(httpclient.PossiblyTemporaryError( _("no content"))) else: - if self.totalSize == -1: - self.totalSize = self.currentSize + if self.total_size is None: + self.total_size = self.current_size try: self.move_to_movies_directory() except (OSError, IOError), e: @@ -863,7 +889,7 @@ def get_status(self): data = BGDownloader.get_status(self) - data['dlerType'] = 'HTTP' + data['type'] = 'HTTP' return data def update_stats(self): @@ -874,14 +900,14 @@ return stats = self.client.get_stats() if stats.status_code in (200, 206): - # Only upload currentSize/rate if we are currently + # Only upload current_size/rate if we are currently # downloading something. Don't change them before the # transfer starts, while we are handling redirects, etc. - self.currentSize = stats.downloaded + stats.initial_size + self.current_size = stats.downloaded + stats.initial_size self.rate = stats.download_rate eventloop.add_timeout(self.CHECK_STATS_TIMEOUT, self.update_stats, 'update http downloader stats') - DOWNLOAD_UPDATER.queue_update(self) + self.update_client() def pause(self): """Pauses the download. @@ -908,7 +934,7 @@ # Cancel the request, don't keep around partially # downloaded data self.cancel_request(remove_file=True) - self.currentSize = 0 + self.current_size = 0 self.state = u"stopped" self.update_client() @@ -959,7 +985,7 @@ return try: - with open(fast_resume_file, "wb") as f: + with open(fast_resume_file, 'wb') as f: f.write(fast_resume_data) except (OSError, IOError): logging.exception("Error occured trying to write fast_resume_data") @@ -1015,17 +1041,18 @@ def __init__(self, url=None, item=None, restore=None, magnet=None): self.metainfo = None self.torrent = None - self.rate = self.eta = 0 - self.upRate = self.uploaded = 0 + self.rate = self.eta = None + self.upload_rate = None + self.upload_size = 0 self.activity = None self.fast_resume_data = None - self.retryDC = None - self.channelName = None - self.uploadedStart = 0 + self.retry_dc = None + self.channel_name = None + self.uploaded_start = 0 self.restarting = False - self.seeders = -1 - self.leechers = -1 - self.connections = -1 + self.seeders = None + self.leechers = None + self.connections = None self.metainfo_updated = False self.info_hash = None self.magnet = magnet @@ -1051,22 +1078,22 @@ else: torrent_info = lt.torrent_info(lt.bdecode(self.metainfo)) params["ti"] = torrent_info - self.totalSize = torrent_info.total_size() + self.total_size = torrent_info.total_size() duplicate = TORRENT_SESSION.find_duplicate_torrent( params["ti"]) if duplicate is not None: c = command.DuplicateTorrent(daemon.LAST_DAEMON, - duplicate.dlid, self.dlid) + duplicate.dlid, self.dlid) c.send() return if self.firstTime and not self.accept_download_size( - self.totalSize): + self.total_size): self.handle_error( _("Not enough disk space"), _("%(amount)s MB required to store this video", - {"amount": self.totalSize / (2 ** 20)}) + {"amount": self.total_size / (2 ** 20)}) ) return @@ -1077,7 +1104,26 @@ params["auto_managed"] = False params["paused"] = False params["duplicate_is_error"] = True - params["storage_mode"] = lt.storage_mode_t.storage_mode_allocate + + # About file allocation: the default scheme that would work is + # using sparse file allocation. With sparse, physical disk space + # is consumed on an as-needed basis and we no shuffling of the + # individual file chunks is required (missing ones are zero-filled + # on the fly and don't take up disk space). Unfortunately not + # all filesystems support sparse files and on those that don't + # the results are less than ideal. In particular, pause + # immediately after a large file is started can take a long time + # because the close() of the torrent file handle must wait till + # all bytes are zero-filled. Using compact mode has the + # disadvantage of extra disk i/o because of pieces are moved + # on the fly rather than being placed into its logical location + # within the file but makes everything else usable again. + if self.magnet: + # unfortunately, compact allocation doesn't work for Magnet + # links, so we revert to the allocate mode + params["storage_mode"] = lt.storage_mode_t.storage_mode_allocate + else: + params["storage_mode"] = lt.storage_mode_t.storage_mode_compact if self.info_hash: self.fast_resume_data = load_fast_resume_data(self.info_hash) @@ -1205,36 +1251,36 @@ self.rate, self.seeders, self.leechers, - self.currentSize) + self.current_size) def update_status(self): """ activity -- string specifying what's currently happening or None for normal operations. - upRate -- upload rate in B/s - downRate -- download rate in B/s - upTotal -- total MB uploaded + upload_rate -- upload rate in B/s + rate -- download rate in B/s + upload_size -- total MB upload_size downTotal -- total MB downloaded fractionDone -- what portion of the download is completed. timeEst -- estimated completion time, in seconds. - totalSize -- total size of the torrent in bytes + total_size -- total size of the torrent in bytes seeders -- number of seeders for this torrent leechers -- number of leechers for this torrent connecting -- nummber of peers we're connected to """ status = self.torrent.status() - self.totalSize = status.total_wanted - self.rate = status.download_payload_rate - self.upRate = status.upload_payload_rate - self.uploaded = status.total_payload_upload + self.uploadedStart + self.total_size = status.total_wanted + self.rate = int(status.download_payload_rate) + self.upload_rate = int(status.upload_payload_rate) + self.upload_size = status.total_payload_upload + self.uploaded_start self.seeders = status.num_complete self.leechers = status.num_incomplete self.connections = status.num_connections try: - self.eta = ((status.total_wanted - status.total_wanted_done) / - float(status.download_payload_rate)) + self.eta = ((status.total_wanted - status.total_wanted_done) // + status.download_payload_rate) except ZeroDivisionError: - self.eta = 0 + self.eta = None # FIXME - this needs some more examination before it's # enabled. @@ -1250,7 +1296,7 @@ else: self.activity = None - self.currentSize = status.total_wanted_done + self.current_size = status.total_wanted_done # these are useful for debugging torrent issues # self._debug_print_status() @@ -1260,14 +1306,14 @@ and status.state == lt.torrent_status.states.seeding)): self.move_to_movies_directory() self.state = u"uploading" - self.endTime = clock() - self.update_client() - else: - DOWNLOAD_UPDATER.queue_update(self) + self.end_time = int(clock()) + self.rate = 0 + + self.update_client() if app.config.get(prefs.LIMIT_UPLOAD_RATIO): if status.state == lt.torrent_status.states.seeding: - if ((float(self.uploaded) / self.totalSize > + if ((float(self.upload_size) / self.total_size > app.config.get(prefs.UPLOAD_RATIO))): self.stop_upload() @@ -1336,9 +1382,9 @@ self.magnet = data['url'] data['url'] = None self.__dict__.update(data) - self.rate = self.eta = 0 - self.upRate = 0 - self.uploadedStart = self.uploaded + self.rate = self.eta = None + self.upload_rate = 0 + self.uploaded_start = self.upload_size if self.state in (u'downloading', u'uploading'): self.run_downloader(done=True) elif self.state == u'offline': @@ -1346,13 +1392,13 @@ def get_status(self): data = BGDownloader.get_status(self) - data['upRate'] = self.upRate - data['uploaded'] = self.uploaded + data['upload_rate'] = self.upload_rate + data['upload_size'] = self.upload_size if self.metainfo_updated: data['metainfo'] = self.metainfo self.metainfo_updated = False data['activity'] = self.activity - data['dlerType'] = 'BitTorrent' + data['type'] = 'BitTorrent' data['seeders'] = self.seeders data['leechers'] = self.leechers data['connections'] = self.connections @@ -1404,9 +1450,9 @@ return self.state = u"downloading" - if self.retryDC: - self.retryDC.cancel() - self.retryDC = None + if self.retry_dc: + self.retry_dc.cancel() + self.retry_dc = None self.update_client() self.get_metainfo() @@ -1438,7 +1484,7 @@ except (KeyError, RuntimeError): self.handle_corrupt_torrent() return - self.shortFilename = utf8_to_filename(name) + self.short_filename = utf8_to_filename(name) try: self.pick_initial_filename( suffix="", torrent=True, is_directory=is_directory) @@ -1447,8 +1493,9 @@ # exceptions. # # Not sure if this is correct but if we throw a runtime - # error like above it can't hurt anyone. - except (OSError, IOError): + # error like above it can't hurt anyone. ValueError to catch + # next_free_filename(). + except (ValueError, OSError, IOError): raise RuntimeError self.update_client() self._resume_torrent() @@ -1461,21 +1508,26 @@ """ if not self.torrent.has_metadata(): return - self.shortFilename = utf8_to_filename( + self.short_filename = utf8_to_filename( self.torrent.get_torrent_info().name()) # FIXME: we should determine whether it is a directory # in the same way in got_metainfo and got_delayed_metainfo is_directory = False + multiple_files = False for file_ in self.torrent.get_torrent_info().files(): - if os.sep in file_.path: + # if there is >1 file, we'll go back through the loop and + # multiple_files will be True + if os.sep in file_.path or multiple_files: is_directory = True + break + else: + multiple_files = True - is_directory = (is_directory or - len(self.torrent.get_torrent_info().files()) > 1) try: self.pick_initial_filename( - suffix="", torrent=True, is_directory=is_directory) + suffix="", torrent=True, is_directory=is_directory, + exists=True) # Somewhere deep it calls makedirs() which can throw # exceptions. # @@ -1485,8 +1537,25 @@ raise RuntimeError save_path = self.calc_save_path() self.torrent.move_storage(save_path) + self.metainfo = self.calc_metainfo() + self.metainfo_updated = True + self.update_client() + def calc_metainfo(self): + torrent_info = self.torrent.get_torrent_info() + metainfo = { + 'info': lt.bdecode(torrent_info.metadata()) + } + trackers = list(torrent_info.trackers()) + if trackers: + # which tracker URL should we use? For now, we just use the first + # one in the list. + metainfo['announce'] = trackers[0].url + else: + logging.warn("calc_metainfo(): no announce URL") + return lt.bencode(metainfo) + def handle_corrupt_torrent(self): self.handle_error( _("Corrupt Torrent"), diff -Nru miro-4.0.4/lib/dl_daemon/MiroDownloader.py miro-6.0/lib/dl_daemon/MiroDownloader.py --- miro-4.0.4/lib/dl_daemon/MiroDownloader.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/dl_daemon/MiroDownloader.py 2013-04-05 16:02:42.000000000 +0000 @@ -57,7 +57,7 @@ override_modules() from miro.plat.utils import setup_logging, initialize_locale - setup_logging(in_downloader=True) + setup_logging(os.environ.get("DEMOCRACY_DOWNLOADER_LOG")) util.setup_logging() initialize_locale() diff -Nru miro-4.0.4/lib/donate.py miro-6.0/lib/donate.py --- miro-4.0.4/lib/donate.py 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/donate.py 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,250 @@ +# Miro - an RSS based video player application +# Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011 +# Participatory Culture Foundation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +# +# In addition, as a special exception, the copyright holders give +# permission to link the code of portions of this program with the OpenSSL +# library. +# +# You must obey the GNU General Public License in all respects for all of +# the code used other than OpenSSL. If you modify file(s) with this +# exception, you may extend this exception to your version of the file(s), +# but you are not obligated to do so. If you do not wish to do so, delete +# this exception statement from your version. If you delete this exception +# statement from all source files in the program, then also delete it here. + +"""``miro.donatemanager`` -- functions for handling donation +""" + +import logging +import time + +from miro import app +from miro import eventloop +from miro import prefs +from miro import signals + +from miro.frontends.widgets import donate + +from miro.plat.frontends.widgets.threads import call_on_ui_thread + +class DonateManager(object): + """DonateManager: anchor point for donation framework implementation. + + There is frontend stuff and backend stuff both anchored here, by + necessity. Most of the stuff here is backend except: + + (1) when you create the UI components + (2) when you run the UI components + (3) when the UI components issue callbacks in response to user input + + UI components can include the actual donate window as well as the debug + powertoys. + + There are some preferences to do various housekeeping. + + DONATE_PAYMENT_URL_TEMPLATE - keeps the payment URL if user says yes + DONATE_URL_TEMPLATE - a template url is used to ask the user for donation + The template is transformed into an actual URL + DONATE_ASK{1,2,3} - number of downloads completed before we show + DONATE_URL_TEMPLATE + DONATE_NOTHANKS - number of times user said no thanks to our request + DONATE_COUNTER - count down timer. When zero, the dialog will be shown. + When re-armed, it should be populated with values + from DONATE_ASK{1,2,3} + LAST_DONATE_TIME - the last the donate request was accepted. Starts off + as 0 (the epoch), which will make it fail the 6 month + nag test. Used to keep track whether user has accepted + as request for donation or after we cannot retrieve + a rearm count from donate_ask_thresholds , at which + point we don't bother them again. This is reset + every 6 months. + """ + def __init__(self): + self.donate_ask_thresholds = [app.config.get(prefs.DONATE_ASK1), + app.config.get(prefs.DONATE_ASK2), + app.config.get(prefs.DONATE_ASK3)] + self.donate_url_template = app.config.get(prefs.DONATE_URL_TEMPLATE) + self.payment_url_template = app.config.get( + prefs.DONATE_PAYMENT_URL_TEMPLATE) + self.donate_nothanks = app.config.get(prefs.DONATE_NOTHANKS) + self.donate_counter = app.config.get(prefs.DONATE_COUNTER) + self.last_donate_time = app.config.get(prefs.LAST_DONATE_TIME) + app.backend_config_watcher.connect('changed', self.on_config_changed) + signals.system.connect('download-complete', self.on_download_complete) + self.donate_window = self.powertoys = None + self.donate_ratelimit = False + self.ratelimit_dc = None + # Tri-state: None/False/True: the close callback gets called + # anyway even if the window's not shown! + self.donate_response = None + call_on_ui_thread(self.create_windows) + + # Reset counters if not shown for more than 1/2 year. Only do this on + # startup is fine. We have already waited half a year, we can wait + # some more. + # + # The other part to this is in shutdown, if the last_donate_time + # is still zero at the point in shutdown() set the current time. + # + # At reset, if the timer is zero, it will fail the half year nag + # test. So anyone who is upgrading or somehow had a screwed + # last donate time will get the dialog reshown. + HALF_YEAR = 60 * 60 * 24 * 180 + if time.time() - self.last_donate_time > HALF_YEAR: + self.reset() + + def create_windows(self): + self.donate_window = donate.DonateWindow() + self.powertoys = donate.DonatePowerToys() + self.donate_window.connect('donate-clicked', self.on_donate_clicked) + self.donate_window.connect('hide', self.on_window_close) + + def run_powertoys(self): + if self.powertoys: + self.powertoys.run_dialog() + + def on_config_changed(self, obj, key, value): + if key == prefs.DONATE_NOTHANKS.key: + self.donate_nothanks = value + elif key == prefs.DONATE_COUNTER.key: + self.donate_counter = value + elif key == prefs.LAST_DONATE_TIME.key: + self.last_donate_time = value + + def on_download_complete(self, obj, item): + try: + # Re-arm count is for the next threshold, not the current one, + # so add 1. + rearm_count = self.donate_ask_thresholds[self.donate_nothanks + 1] + except IndexError: + rearm_count = self.donate_ask_thresholds[-1] + + self.donate_counter -= 1 + + # In case the donate counters are borked, then reset it + if self.donate_counter < 0: + self.donate_counter = 0 + if self.last_donate_time < 0: + self.last_donate_time = 0 + + # If the donate window has been shown recently, don't show it again + # even if the timer is about to fire. Defuse the timer and then + # continue. + if self.donate_ratelimit: + logging.debug('donate: rate limiting donate window popup.') + return + + logging.debug('donate: on_download_complete %s %s %s', + self.donate_nothanks, self.donate_counter, + self.last_donate_time) + + # Show it if the donate counter has reached zero and we have asked + # less than 3 times, but not if the user's already accepted in the + # past 6 months + HALF_YEAR = 60 * 60 * 24 * 180 + show_donate = (self.donate_counter == 0 and + self.donate_nothanks < 3 and + time.time() - self.last_donate_time > HALF_YEAR) + + logging.debug('donate: show_donate = %s', show_donate) + + if show_donate: + # re-arm the countdown + self.donate_counter = rearm_count + self.set_ratelimit() + # 5 days + self.ratelimit_dc = eventloop.add_timeout(3600 * 24 * 5, + self.reset_ratelimit, + 'donate ratelimiter') + self.show_donate() + + # Set the new value of donate_counter. + app.config.set(prefs.DONATE_COUNTER, self.donate_counter) + + + # ratelimit set/reset can be called from frontend but in this case it + # should be okay + def reset_ratelimit(self): + logging.debug('donate: ratelimit flag reset') + self.donate_ratelimit = False + + def set_ratelimit(self): + logging.debug('donate: ratelimit flag set') + self.donate_ratelimit = True + + def on_window_close(self, obj): + # Yes, I know they are the same. But make it explicit: + # None means that the window either was never shown to begin with + # (but close callback get get called anyway - assume no) or + # the user clicked no, in which case, no really means no. + if self.donate_response is None or not self.donate_response: + # user clicked no + self.donate_nothanks += 1 + app.config.set(prefs.DONATE_NOTHANKS, self.donate_nothanks) + else: + # user clicked yes: set the time of last acceptance + app.config.set(prefs.LAST_DONATE_TIME, time.time()) + # Reset flag + self.donate_response = None + + def on_donate_clicked(self, obj, donate, payment_url): + # Save response then close. Do it in the close callback because + # we want to run common code for the no case for people who + # simply close the window without responding. But we do the yes + # case in-line to open the payment_url as provided by the callback + self.donate_response = donate + if donate: + app.widgetapp.open_url(payment_url) + self.donate_window.close() + + def shutdown(self): + # OK: shutdown() is executed on frontend + if self.donate_window: + self.donate_window.close() + self.donate_window = None + self.reset_ratelimit() + # Don't forget to save the donate counter on shutdown! + app.config.set(prefs.DONATE_COUNTER, self.donate_counter) + # If the last doante time is 0, set it to the current time so the + # preferences don't get reset on startup. + if self.last_donate_time == 0: + app.config.set(prefs.LAST_DONATE_TIME, time.time()) + + def reset(self): + for pref in [prefs.DONATE_NOTHANKS, prefs.LAST_DONATE_TIME, + prefs.DONATE_COUNTER]: + app.config.set(pref, pref.default) + + def show_donate(self, url=None, payment_url=None): + if not url: + args = [1, 2, 3] + try: + url = self.donate_url_template % args[self.donate_nothanks] + except IndexError: + url = self.donate_url_template % args[-1] + if not payment_url: + args = [7, 8, 9] + try: + payment_url = (self.payment_url_template % + args[self.donate_nothanks]) + except IndexError: + payment_url = self.payment_url_template % args[-1] + if self.donate_window: + logging.debug('donate window: callout to frontend') + call_on_ui_thread(lambda: self.donate_window.show(url, + payment_url)) diff -Nru miro-4.0.4/lib/downloader.py miro-6.0/lib/downloader.py --- miro-4.0.4/lib/downloader.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/downloader.py 2013-04-05 16:02:42.000000000 +0000 @@ -31,6 +31,7 @@ import os import random import logging +import time from miro.gtcache import gettext as _ from miro.database import DDBObject, ObjectNotFoundError @@ -38,7 +39,8 @@ from miro.download_utils import (next_free_filename, get_file_url_path, next_free_directory, filter_directory_name) from miro.util import (get_torrent_info_hash, returns_unicode, check_u, - returns_filename, unicodify, check_f, to_uni, is_magnet_uri) + returns_filename, unicodify, check_f, to_uni, + is_magnet_uri, title_from_magnet) from miro import app from miro import dialogs from miro import displaytext @@ -49,15 +51,157 @@ from miro.plat.utils import samefile, unicode_to_filename from miro import flashscraper from miro import fileutil +from miro import util from miro.fileobject import FilenameType -daemon_starter = None +class DownloadStateManager(object): + """DownloadStateManager: class to store state information about the + downloader. + + Commands to the downloader is batched and sent every second. This is + based on the premise that commands for a particular download id can + be completely superceded by a subsequent command, with the exception + of a pause/resume pair. For example, a stop command will completely + supecede a pause command, so if the 2 are sent in quick succession + only the stop command will be sent by the downloader. The exception + to this rule is the pause/resume pair which acts like matter and + anti-matter, which will nuke itself when they come into contact + (but dies with not even a whimper instead of a gorgeous display). + """ + STOP = command.DownloaderBatchCommand.STOP + RESUME = command.DownloaderBatchCommand.RESUME + PAUSE = command.DownloaderBatchCommand.PAUSE + RESTORE = command.DownloaderBatchCommand.RESTORE + + UPDATE_INTERVAL = 1 + + def __init__(self): + self.total_up_rate = 0 + self.total_down_rate = 0 + # a hash of download ids that the server knows about. + self.downloads = {} + self.daemon_starter = None + self.startup_commands = dict() + self.commands = dict() + self.bulk_mode = False + + def set_bulk_mode(self): + self.bulk_mode = True + + def send_initial_updates(self): + commands = self.startup_commands + self.startup_commands = None + if commands: + c = command.DownloaderBatchCommand(RemoteDownloader.dldaemon, + commands) + c.send() + + def send_updates(self): + commands = self.commands + self.commands = dict() + if commands: + c = command.DownloaderBatchCommand(RemoteDownloader.dldaemon, + commands) + c.send() + elif self.bulk_mode: + from miro.messages import DownloaderSyncCommandComplete + # If we did a pause/resume/cancel all, and there weren't any + # items in the list to send nobody would re-enable the auto-sort. + # So we do it here. + DownloaderSyncCommandComplete().send_to_frontend() + # Reset the bulk mode notification. + self.bulk_mode = False + self.start_updates() + + def start_updates(self): + eventloop.add_timeout(self.UPDATE_INTERVAL, + self.send_updates, + "Send Download Command Updates") + + def get_download(self, dlid): + try: + return self.downloads[dlid] + except KeyError: + return None + + def add_download(self, dlid, downloader): + self.downloads[dlid] = downloader + + def delete_download(self, dlid): + try: + del self.downloads[dlid] + except KeyError: + return False + else: + return True + + def daemon_started(self): + return self.daemon_starter and self.daemon_starter.started + + def queue(self, identifier, cmd, args): + if not self.downloads.has_key(identifier): + raise ValueError('add_download() not called before queue()') + + # Catch restores first, we will flush them when the downloader's + # started. + if cmd == self.RESTORE and not self.daemon_started(): + self.startup_commands[identifier] = (cmd, args) + return + + exists = self.commands.has_key(identifier) -# a hash of download ids that the server knows about. -_downloads = {} + # Make sure that a pause/resume pair cancel each other out. For + # others, assume that a subsequent command can completely supercede + # the previous command. + if exists: + old_cmd, unused = self.commands[identifier] + if (old_cmd == self.RESUME and cmd == self.PAUSE or + old_cmd == self.PAUSE and cmd == self.RESUME): + # Make sure that we unfreeze it + self.downloads[identifier].status_updates_frozen = False + del self.commands[identifier] + return + # HACK: When we pause and resume we currently send a download + # command, then a restore downloader command which doesn't + # do anything. This also breaks our general assumption that a + # current command can completely supercede any previous queued + # command so if we see it disable it. I'm not actually + # sure why we'd want to send a restore command in this case. + if cmd == self.RESTORE: + logging.info('not restoring active download') + return + + # Freeze the status updates, but don't freeze if it is a restore. + if not cmd == self.RESTORE: + self.downloads[identifier].status_updates_frozen = True + self.commands[identifier] = (cmd, args) + + def init_controller(self): + """Intializes the download daemon controller. + + This doesn't actually start up the downloader daemon, that's done + in startup_downloader. Commands will be queued until then. + """ + self.daemon_starter = DownloadDaemonStarter() -total_up_rate = 0 -total_down_rate = 0 + def startup_downloader(self): + """Initialize the downloaders. + + This method currently does 2 things. It deletes any stale files + self in Incomplete Downloads, then it restarts downloads that have + been restored from the database. It must be called before any + RemoteDownloader objects get created. + """ + self.daemon_starter.startup() + # Now that the daemon has started, we can process updates. + self.send_initial_updates() + self.start_updates() + + def shutdown_downloader(self, callback=None): + if self.daemon_starter: + self.daemon_starter.shutdown(callback) + elif callback: + callback() def get_downloader_by_dlid(dlid): try: @@ -74,41 +218,133 @@ class RemoteDownloader(DDBObject): """Download a file using the downloader daemon.""" - def setup_new(self, url, item, contentType=None, channelName=None): + + # attributes that get set from the BatchUpdateDownloadStatus command + status_attributes = [ + 'state', + 'total_size', + 'current_size', + 'eta', + 'rate', + 'start_time', + 'end_time', + 'short_filename', + 'filename', + 'reason_failed', + 'short_reason_failed', + 'type', + 'retry_time', + 'retry_count', + 'upload_rate', + 'upload_size', + 'activity', + 'seeders', + 'leechers', + 'connections', + 'metainfo', + 'info_hash', + 'filename', + 'eta', + 'rate', + 'upload_rate', + 'activity', + 'seeders', + 'leechers', + 'connections' + ] + # default values for attributes in status_attributes + status_attribute_defaults = { + 'current_size': 0, + 'upload_size': 0, + 'state': u'downloading', + } + # status attributes that don't get saved to disk + temp_status_attributes = [ + 'eta', + 'rate', + 'upload_rate', + 'activity', + 'seeders', + 'leechers', + 'connections' + ] + # status attributes that we can wait a little while to save to disk + status_attributes_to_defer = set([ + 'current_size', + 'upload_size', + ]) + + def setup_new(self, url, item, content_type=None, channel_name=None): check_u(url) - if contentType: - check_u(contentType) - self.origURL = self.url = url + if content_type: + check_u(content_type) + self.orig_url = self.url = url self.item_list = [] self.child_deleted = False self.main_item_id = None self.dlid = generate_dlid() - self.status = {} - self.metainfo = None - self.state = u'downloading' - if contentType is None: + if content_type is None: # HACK: Some servers report the wrong content-type for # torrent files. We try to work around that by assuming # if the enclosure states that something is a torrent, # it's a torrent. Thanks to j@v2v.cc. if item.enclosure_type == u'application/x-bittorrent': - contentType = item.enclosure_type - self.contentType = u"" + content_type = item.enclosure_type + self.content_type = u"" self.delete_files = True - self.channelName = channelName + self.channel_name = channel_name self.manualUpload = False - self._save_later_dc = None - self._update_retry_time_dc = None - if contentType is None: - self.contentType = u"" + self.status_updates_frozen = False + self.last_update = time.time() + self.reset_status_attributes() + if content_type is None: + self.content_type = u"" else: - self.contentType = contentType + self.content_type = content_type - if self.contentType == u'': + if self.content_type == u'': self.get_content_type() else: self.run_downloader() + def setup_restored(self): + self.status_updates_frozen = False + self.last_update = time.time() + self.delete_files = True + self.item_list = [] + if self.dlid == 'noid': + # this won't happen nowadays, but it can for old databases + self.dlid = generate_dlid() + + def reset_status_attributes(self): + """Reset the attributes that track downloading info.""" + for attr_name in self.status_attributes: + default = self.status_attribute_defaults.get(attr_name) + setattr(self, attr_name, default) + + def update_status_attributes(self, status_dict): + """Reset the attributes that track downloading info.""" + for attr_name in self.status_attributes: + if attr_name in status_dict: + value = status_dict[attr_name] + else: + value = self.status_attribute_defaults.get(attr_name) + # only set attributes if something's changed. This makes our + # UPDATE statments contain less data + if getattr(self, attr_name) != value: + setattr(self, attr_name, value) + + def get_status_for_downloader(self): + status = dict((name, getattr(self, name)) + for name in self.status_attributes) + # status_attributes only tracks the attributes that we update based on + # the downloader status updates. Also add values for that we send to + # the downloader, but that don't change from status updates. + status['channel_name'] = self.channel_name + status['dlid'] = self.dlid + status['url'] = self.url + return status + @classmethod def finished_view(cls): return cls.make_view("state in ('finished', 'uploading', " @@ -124,7 +360,7 @@ @classmethod def get_by_url(cls, url): - return cls.make_view('origURL=?', (url,)).get_singleton() + return cls.make_view('orig_url=?', (url,)).get_singleton() @classmethod def orphaned_view(cls): @@ -135,39 +371,7 @@ DDBObject.signal_change(self, needs_save=needs_save) if needs_signal_item: for item in self.item_list: - item.signal_change(needs_save=False) - if needs_save: - self._cancel_save_later() - - - def _save_later(self): - """Save the remote downloader at some point in the future. - - This is used to handle the fact that remote downloaders are - updated often, but those updates are usually just the status - dict, which is never used for SELECT statements. Continually - saving those changes to disk is just a waste of time and IO. - - Instead, we schedule the save to happen sometime in the - future. When miro quits, we call the module-level function - run_delayed_saves(), which makes sure any pending objects are - saved to disk. - """ - if self._save_later_dc is None: - self._save_later_dc = eventloop.add_timeout(15, - self._save_now, "Delayed RemoteDownloader save") - - def _save_now(self): - """If _save_later() was called and we haven't saved the - downloader to disk, do it now. - """ - if self.id_exists() and self._save_later_dc is not None: - self.signal_change() - - def _cancel_save_later(self): - if self._save_later_dc is not None: - self._save_later_dc.cancel() - self._save_later_dc = None + item.download_stats_changed() def on_content_type(self, info): if not self.id_exists(): @@ -175,12 +379,12 @@ if info['status'] == 200: self.url = info['updated-url'].decode('ascii','replace') - self.contentType = None + self.content_type = None try: - self.contentType = info['content-type'].decode('ascii', - 'replace') + self.content_type = info['content-type'].decode('ascii', + 'replace') except (KeyError, UnicodeDecodeError): - self.contentType = None + self.content_type = None self.run_downloader() else: error = httpclient.UnexpectedStatusCode(info['status']) @@ -193,20 +397,20 @@ if isinstance(error, httpclient.AuthorizationCanceled): # user canceled out of the authorization request, so stop the # download. - self.status['state'] = u'stopped' + self.state = u'stopped' self.signal_change() return # we can't get a content type. it's possible that this is a - # retryable error so we're going to set the contentType to + # retryable error so we're going to set the content_type to # None and run the downloader. it'll handle HTTP errors # better than we will. - self.contentType = None + self.content_type = None self.run_downloader() def get_content_type(self): if is_magnet_uri(self.url): - self.contentType = u'application/x-magnet' + self.content_type = u'application/x-magnet' return httpclient.grab_headers(self.url, self.on_content_type, self.on_content_type_error) @@ -218,69 +422,105 @@ def _get_rates(self): state = self.get_state() if state == u'downloading': - return (self.status.get('rate', 0), self.status.get('upRate', 0)) + return self.rate, self.upload_rate if state == u'uploading': - return (0, self.status.get('upRate', 0)) + return (0, self.upload_rate) return (0, 0) - def before_changing_status(self): - global total_down_rate - global total_up_rate + def before_changing_rates(self): rates = self._get_rates() - total_down_rate -= rates[0] - total_up_rate -= rates[1] + if rates[0] is not None: + app.download_state_manager.total_down_rate -= rates[0] + if rates[1] is not None: + app.download_state_manager.total_up_rate -= rates[1] - def after_changing_status(self): - global total_down_rate - global total_up_rate - self._recalc_state() + def after_changing_rates(self): rates = self._get_rates() - total_down_rate += rates[0] - total_up_rate += rates[1] + if rates[0] is not None: + app.download_state_manager.total_down_rate += rates[0] + if rates[1] is not None: + app.download_state_manager.total_up_rate += rates[1] @classmethod - def update_status(cls, data): + def update_status(cls, data, cmd_done=False): for field in data: - if field not in ['filename', 'shortFilename', 'channelName', - 'metainfo']: + if field not in ['filename', 'short_filename', 'metainfo']: data[field] = unicodify(data[field]) + self = get_downloader_by_dlid(dlid=data['dlid']) + # FIXME: how do we get all of the possible bit torrent + # activity strings into gettext? --NN + if data.has_key('activity') and data['activity']: + data['activity'] = _(data['activity']) if self is not None: - # FIXME - this should get fixed. - metainfo = data.pop('metainfo', self.metainfo) + now = time.time() + last_update = self.last_update + state = self.get_state() + new_state = data.get('state', u'downloading') + + # If this item was marked as pending update, then any update + # which comes in now which does not have cmd_done set is void. + if not cmd_done and self.status_updates_frozen: + logging.debug('self = %s, ' + 'saved state = %s ' + 'downloader state = %s. ' + 'Discard.', + self, state, new_state) + # treat as stale + return False + + # If the state is one which we set and was meant to be passed + # through to the downloader (valid_states), and the downloader + # replied with something that was a response to a previous + # download command, and state was also a part of valid_states, + # but the saved state and the new state do not match + # then it means the message is stale. + # + # Have a think about why this is true: when you set a state, + # which is authoritative, to the downloader you expect it + # to reply with that same state. If they do not match then it + # means the message is stale. + # + # The exception to this rule is if the downloader replies with + # an error state, or if downloading has transitioned to finished + # state. + # + # This also does not apply to any state which we set on the + # downloader via a restore command. A restore command before + # a pause/resume/cancel will work as intended, and no special + # trickery is required. A restore command which happens after + # a pause/resume/cancel is void, so no work is required. + # + # I hope this makes sense and is clear! + valid_states = (u'downloading', u'paused', u'stopped', + u'uploading-paused', u'finished') + if (cmd_done and + state in valid_states and new_state in valid_states and + state != new_state): + if not (state == u'downloading' and new_state == u'finished'): + logging.debug('self = %s STALE. ' + 'Saved state %s, got state %s. Discarding.', + self, state, new_state) + return False - # For metainfo, the downloader process doesn't send the - # keys if they haven't changed. Therefore, use our - # current values if the key isn't present. - current = (self.status, self.metainfo) - new = (data, metainfo) - if current == new: - return + # We are updating! Reset the status_updates_frozen flag. + self.status_updates_frozen = False + + # We have something to update: update the last updated timestamp. + self.last_update = now was_finished = self.is_finished() old_filename = self.get_filename() - self.before_changing_status() - - # FIXME: how do we get all of the possible bit torrent - # activity strings into gettext? --NN - if data.has_key('activity') and data['activity']: - data['activity'] = _(data['activity']) - # only set attributes if something's changed. This makes our - # UPDATE statments contain less data - if data != self.status: - self.status = data - if metainfo != self.metainfo: - self.metainfo = metainfo - self._recalc_state() + self.before_changing_rates() + self.update_status_attributes(data) + self.after_changing_rates() # Store the time the download finished finished = self.is_finished() and not was_finished - file_migrated = (self.is_finished() and - self.get_filename() != old_filename) - needs_signal_item = not (finished or file_migrated) - self.after_changing_status() + name_changed = self.get_filename() != old_filename + file_migrated = (self.is_finished() and name_changed) if ((self.get_state() == u'uploading' and not self.manualUpload @@ -288,63 +528,87 @@ and self.get_upload_ratio() > app.config.get(prefs.UPLOAD_RATIO)))): self.stop_upload() - self.signal_change(needs_signal_item=needs_signal_item, - needs_save=False) - if self.changed_attributes == set(('status',)): - # if we just changed status, then we can wait a while - # to store things to disk. Since we go through - # update_status() often, this results in a fairly - # large performance gain and alleviates #12101 - self._save_later() - else: - self.signal_change() - if finished: - for item in self.item_list: - item.on_download_finished() - elif file_migrated: - self._file_migrated(old_filename) + self.signal_change() + + self.update_item_list(finished, file_migrated, old_filename) + return True + + def update_item_list(self, finished, file_migrated, old_filename): + if finished: + for item in self.item_list: + item.on_download_finished() + elif file_migrated: + self._file_migrated(old_filename) + else: + # update the torrent title; important for magnet URLs since we + # don't have a real one when the download starts. The + # old_filename check is to prevent things with existing titles + # from being renamed (#18656). + torrent_title = self.calc_torrent_title() + size = self.total_size + for item in self.item_list: + if size != item.size or torrent_title != item.torrent_title: + if size != item.size: + item.size = size + if (torrent_title != item.torrent_title and + torrent_title is not None): + item.torrent_title = torrent_title + item.signal_change() + + def calc_torrent_title(self): + if self.metainfo is not None: + # if we have metainfo, then we should use that + return util.get_name_from_torrent_metadata( self.metainfo) + elif is_magnet_uri(self.url): + # as a fallback, we can try the title from the magnet URI + return title_from_magnet(self.url) + else: + return None def run_downloader(self): """This is the actual download thread. """ flashscraper.try_scraping_url(self.url, self._run_downloader) - def _run_downloader(self, url, contentType=None, title=None): + def _run_downloader(self, url, content_type=None, title=None): if not self.id_exists(): # we got deleted while we were doing the flash scraping return - if contentType is not None: - self.contentType = contentType + if content_type is not None: + self.content_type = content_type if url is not None: if title is not None: + # abuse the torrent_title attribute for this. Since the file + # comes from a flash site, we can be pretty sure that it's not + # going to need it. for mem in self.item_list: - if not mem.title: - mem.title = title + mem.set_torrent_title(title) self.url = url logging.debug("downloading url %s", self.url) - c = command.StartNewDownloadCommand(RemoteDownloader.dldaemon, - self.url, self.dlid, - self.contentType, - self.channelName) - c.send() - _downloads[self.dlid] = self - self.status["state"] = u"downloading" + args = dict(url=self.url, content_type=self.content_type, + channel_name=self.channel_name) + app.download_state_manager.add_download(self.dlid, self) + app.download_state_manager.queue(self.dlid, + app.download_state_manager.RESUME, + args) + self.state = u'downloading' else: - self.status["state"] = u'failed' - self.status["shortReasonFailed"] = _('File not found') - self.status["reasonFailed"] = _('Flash URL Scraping Error') + self.state = u'failed' + self.short_reason_failed = _('File not found') + self.reason_failed = _('Flash URL Scraping Error') self.signal_change() def pause(self): """Pauses the download.""" - if _downloads.has_key(self.dlid): - c = command.PauseDownloadCommand(RemoteDownloader.dldaemon, - self.dlid) - c.send() - self.before_changing_status() - self.status["state"] = u"paused" - self.after_changing_status() + if app.download_state_manager.get_download(self.dlid): + args = dict(upload=False) + app.download_state_manager.queue(self.dlid, + app.download_state_manager.PAUSE, + args) + self.before_changing_rates() + self.state = u'paused' + self.after_changing_rates() self.signal_change() def stop(self, delete): @@ -353,29 +617,31 @@ """ if self.get_state() in [u'downloading', u'uploading', u'paused', u'offline']: - if _downloads.has_key(self.dlid): - c = command.StopDownloadCommand(RemoteDownloader.dldaemon, - self.dlid, delete) - c.send() - del _downloads[self.dlid] + if app.download_state_manager.get_download(self.dlid): + args = dict(upload=False, delete=delete) + app.download_state_manager.queue( + self.dlid, + app.download_state_manager.STOP, + args) + app.download_state_manager.delete_download(self.dlid) if delete: self.delete() - self.status["state"] = u"stopped" + self.before_changing_rates() + self.state = u'stopped' + self.after_changing_rates() self.signal_change() def delete(self): - if "filename" in self.status: - filename = self.status['filename'] - else: + if self.filename is None: return try: - fileutil.delete(filename) + fileutil.delete(self.filename) except OSError: logging.exception("Error deleting downloaded file: %s", - to_uni(filename)) + to_uni(self.filename)) - parent = os.path.join(fileutil.expand_filename(filename), + parent = os.path.join(fileutil.expand_filename(self.filename), os.path.pardir) parent = os.path.normpath(parent) movies_dir = fileutil.expand_filename(app.config.get(prefs.MOVIES_DIRECTORY)) @@ -387,57 +653,61 @@ except OSError: logging.exception("Error deleting empty download directory: %s", to_uni(parent)) + self.filename = None def start(self): """Continues a paused, stopped, or failed download thread """ if self.get_state() == u'failed': # For failed downloads, don't trust the redirected URL (#14232) - self.url = self.origURL - if _downloads.has_key (self.dlid): - del _downloads[self.dlid] + self.url = self.orig_url + app.download_state_manager.delete_download(self.dlid) self.dlid = generate_dlid() - self.before_changing_status() - self.status = {} - self.after_changing_status() - if self.contentType == u"": + self.before_changing_rates() + self.reset_status_attributes() + self.after_changing_rates() + if self.content_type == u"": self.get_content_type() else: self.run_downloader() self.signal_change() elif self.get_state() in (u'stopped', u'paused', u'offline'): - if _downloads.has_key(self.dlid): - c = command.StartDownloadCommand(RemoteDownloader.dldaemon, - self.dlid) - c.send() - - self.status['state'] = u'downloading' + if app.download_state_manager.get_download(self.dlid): + args = dict(url=self.url, content_type=self.content_type, + channel_name=self.channel_name) + app.download_state_manager.queue( + self.dlid, + app.download_state_manager.RESUME, + args) + self.before_changing_rates() + self.state = u'downloading' + self.after_changing_rates() self.restart() self.signal_change() def migrate(self, directory): - if _downloads.has_key(self.dlid): + if app.download_state_manager.get_download(self.dlid): c = command.MigrateDownloadCommand(RemoteDownloader.dldaemon, self.dlid, directory) c.send() else: # downloader doesn't have our dlid. Move the file ourself. - short_filename = self.status.get("shortFilename") + short_filename = self.short_filename if not short_filename: logging.warning( "can't migrate download; no shortfilename! URL was %s", self.url) return - filename = self.status.get("filename") + filename = self.filename if not filename: logging.warning( "can't migrate download; no filename! URL was %s", self.url) return if fileutil.exists(filename): - if self.status.get('channelName', None) is not None: - channelName = filter_directory_name(self.status['channelName']) - directory = os.path.join(directory, channelName) + if self.channel_name is not None: + channel_name = filter_directory_name(self.channel_name) + directory = os.path.join(directory, channel_name) if not os.path.exists(directory): try: fileutil.makedirs(directory) @@ -450,18 +720,25 @@ # create a file or directory to serve as a placeholder before # we start to migrate. This helps ensure that the destination # we're migrating too is not already taken. - if fileutil.isdir(filename): - newfilename = next_free_directory(newfilename) - fp = None + try: + is_dir = fileutil.isdir(filename) + if is_dir: + newfilename = next_free_directory(newfilename) + fp = None + else: + newfilename, fp = next_free_filename(newfilename) + fp.close() + except ValueError: + func = ('next_free_directory' if is_dir + else 'next_free_filename') + logging.warn('migrate: %s failed. candidate = %r', + func, newfilename) else: - newfilename, fp = next_free_filename(newfilename) - def callback(): - self.status['filename'] = newfilename - self.signal_change(needs_signal_item=False) - self._file_migrated(filename) - fileutil.migrate_file(filename, newfilename, callback) - if fp is not None: - fp.close() # clean up if we called next_free_filename() + def callback(): + self.filename = newfilename + self.signal_change(needs_signal_item=False) + self._file_migrated(filename) + fileutil.migrate_file(filename, newfilename, callback) for i in self.item_list: i.migrate_children(directory) @@ -475,21 +752,24 @@ def set_delete_files(self, delete_files): self.delete_files = delete_files - def set_channel_name(self, channelName): - if self.channelName is None: - if channelName: - check_f(channelName) - self.channelName = channelName + def set_channel_name(self, channel_name): + if self.channel_name is None: + if channel_name: + check_f(channel_name) + self.channel_name = channel_name def remove(self): """Removes downloader from the database and deletes the file. """ - global total_down_rate - global total_up_rate - rates = self._get_rates() - total_down_rate -= rates[0] - total_up_rate -= rates[1] + self.before_changing_rates() + if self.is_finished(): + if self.filename is not None: + app.local_metadata_manager.remove_file(self.get_filename()) + else: + logging.warn("RemoteDownloader.remove: filename is None, " + "but state is %s", self.get_state()) self.stop(self.delete_files) + self.after_changing_rates() DDBObject.remove(self) def get_type(self): @@ -497,8 +777,8 @@ "bittorrent". """ self.confirm_db_thread() - if ((self.contentType == u'application/x-bittorrent' - or self.contentType == u'application/x-magnet')): + if ((self.content_type == u'application/x-bittorrent' + or self.content_type == u'application/x-magnet')): return u"bittorrent" return u"http" @@ -509,6 +789,9 @@ """ if item not in self.item_list: self.item_list.append(item) + torrent_title = self.calc_torrent_title() + if torrent_title is not None: + item.torrent_title = torrent_title if self.main_item_id is None: self.main_item_id = item.id self.signal_change() @@ -523,45 +806,11 @@ def get_rate(self): self.confirm_db_thread() - return self.status.get('rate', 0) + return self.rate def get_eta(self): self.confirm_db_thread() - return self.status.get('eta', 0) - - @returns_unicode - def get_startup_activity(self): - self.confirm_db_thread() - activity = self.status.get('activity') - if ((activity is None and self.status.get('retryCount', -1) > -1 - and 'retryTime' in self.status)): - activity = self._calc_retry_time() - if self._update_retry_time_dc is None: - self._update_retry_time_dc = eventloop.add_timeout(1, - self._update_retry_time, 'Updating retry time') - if activity is None: - return _("starting up") - return activity - - def _calc_retry_time(self): - if self.status['retryTime'] > datetime.datetime.now(): - retry_delta = self.status['retryTime'] - datetime.datetime.now() - time_str = displaytext.time_string(retry_delta.seconds) - return _('no connection - retrying in %(time)s', {"time": time_str}) - else: - return _('no connection - retrying soon') - - def _update_retry_time(self): - if self.id_exists(): - # calling signal_change() will cause the us to call - # get_startup_activity() again which will have a new time now. - self.signal_change(needs_save=False) - self._update_retry_time_dc = None - - def _cancel_retry_time_update(self): - if self._update_retry_time_dc: - self._update_retry_time_dc.cancel() - self._update_retry_time_dc = None + return self.eta @returns_unicode def get_reason_failed(self): @@ -573,7 +822,10 @@ msg = u"get_reason_failed() called on a non-failed downloader" raise ValueError(msg) self.confirm_db_thread() - return self.status.get('reasonFailed', _("Unknown")) + if self.reason_failed is not None: + return self.reason_failed + else: + return _("Unknown") @returns_unicode def get_short_reason_failed(self): @@ -581,7 +833,10 @@ msg = u"get_short_reason_failed() called on a non-failed downloader" raise ValueError(msg) self.confirm_db_thread() - return self.status.get('shortReasonFailed', _("Unknown")) + if self.short_reason_failed is not None: + return self.short_reason_failed + else: + return _("Unknown") @returns_unicode def get_url(self): @@ -593,7 +848,7 @@ @returns_unicode def get_state(self): """Returns the state of the download: downloading, paused, - stopped, failed, or finished. + uploading, uploading-paused, stopped, failed, or finished. """ self.confirm_db_thread() return self.state @@ -606,13 +861,13 @@ """Returns the total size of the download in bytes. """ self.confirm_db_thread() - return self.status.get('totalSize', -1) + return self.total_size def get_current_size(self): """Returns the current amount downloaded in bytes. """ self.confirm_db_thread() - return self.status.get('currentSize', 0) + return self.current_size @returns_filename def get_filename(self): @@ -620,41 +875,18 @@ called until state is "finished." """ self.confirm_db_thread() - # FIXME - FilenameType('') is a bogus value, but looks like a - # filename. should return None. - return self.status.get('filename', FilenameType('')) - - def setup_restored(self): - self._save_later_dc = None - self._update_retry_time_dc = None - self.delete_files = True - self.item_list = [] - if self.dlid == 'noid': - # this won't happen nowadays, but it can for old databases - self.dlid = generate_dlid() - self.status['rate'] = 0 - self.status['upRate'] = 0 - self.status['eta'] = 0 - - def on_signal_change(self): - self._recalc_state() - - def _recalc_state(self): - new_state = self.status.get('state', u'downloading') - # avoid altering changed_attributes if we don't need to - if new_state != self.state: - self.state = new_state + return self.filename def get_upload_ratio(self): size = self.get_current_size() if size == 0: return 0 - return self.status.get('uploaded', 0) / size + return self.upload_size / size def restart_on_startup_if_needed(self): if not self.id_exists(): return - if _downloads.has_key(self.dlid): + if app.download_state_manager.get_download(self.dlid): # something has caused us to restart already, (for # example, the user selects "resume seeding"). squelch # any automatic behaviour (#12462) @@ -670,25 +902,19 @@ self.stop_upload() def restart(self): - if not self.status or self.status.get('dlerType') is None: - if self.contentType == u"": + if self.type is None: + if self.content_type == u"": self.get_content_type() else: self.run_downloader() else: - _downloads[self.dlid] = self - dler_status = self.status - # FIXME: not sure why this is necessary - if self.contentType == u'application/x-magnet': - dler_status['url'] = self.url - dler_status['metainfo'] = self.metainfo - - c = command.RestoreDownloaderCommand(RemoteDownloader.dldaemon, - dler_status) - c.send() - self.before_changing_status() - self.status['state'] = u'downloading' - self.after_changing_status() + app.download_state_manager.add_download(self.dlid, self) + self.state = u'downloading' + args = dict(downloader=self.get_status_for_downloader()) + app.download_state_manager.queue( + self.dlid, + app.download_state_manager.RESTORE, + args) def start_upload(self): """ @@ -708,14 +934,16 @@ self.get_state()) return self.manualUpload = True - if _downloads.has_key(self.dlid): - c = command.StartDownloadCommand(RemoteDownloader.dldaemon, - self.dlid) - c.send() + if app.download_state_manager.get_download(self.dlid): + args = dict(url=self.url, content_type=self.content_type, + channel_name=self.channel_name) + app.download_state_manager.queue(self.dlid, + app.download_state_manager.RESUME, + args) else: - self.before_changing_status() - self.status['state'] = u'uploading' - self.after_changing_status() + self.before_changing_rates() + self.state = u'uploading' + self.after_changing_rates() self.restart() self.signal_change() @@ -723,28 +951,29 @@ """ Stop uploading/seeding and set status as "finished". """ - if _downloads.has_key(self.dlid): - c = command.StopUploadCommand(RemoteDownloader.dldaemon, - self.dlid) - c.send() - del _downloads[self.dlid] - self.before_changing_status() - self.status["state"] = u"finished" - self.after_changing_status() + if app.download_state_manager.get_download(self.dlid): + args = dict(upload=True) + app.download_state_manager.queue(self.dlid, + app.download_state_manager.STOP, args) + app.download_state_manager.delete_download(self.dlid) + self.before_changing_rates() + self.state = u'finished' + self.after_changing_rates() self.signal_change() def pause_upload(self): """ Stop uploading/seeding and set status as "uploading-paused". """ - if _downloads.has_key(self.dlid): - c = command.PauseUploadCommand(RemoteDownloader.dldaemon, - self.dlid) - c.send() - del _downloads[self.dlid] - self.before_changing_status() - self.status["state"] = u"uploading-paused" - self.after_changing_status() + if app.download_state_manager.get_download(self.dlid): + args = dict(upload=True) + app.download_state_manager.queue(self.dlid, + app.download_state_manager.PAUSE, + args) + app.download_state_manager.delete_download(self.dlid) + self.before_changing_rates() + self.state = u"uploading-paused" + self.after_changing_rates() self.signal_change() def cleanup_incomplete_downloads(): @@ -759,7 +988,7 @@ 'offline', 'uploading', 'finished', 'uploading-paused'): filename = downloader.get_filename() - if len(filename) > 0: + if filename: if not fileutil.isabs(filename): filename = os.path.join(download_dir, filename) files_in_use.add(filename) @@ -837,35 +1066,9 @@ callback=self._on_shutdown) def _on_shutdown(self): - shutdown_downloader_objects() self.shutdown_callback() del self.shutdown_callback -def init_controller(): - """Intializes the download daemon controller. - - This doesn't actually start up the downloader daemon, that's done - in startup_downloader. Commands will be queued until then. - """ - global daemon_starter - daemon_starter = DownloadDaemonStarter() - -def startup_downloader(): - """Initialize the downloaders. - - This method currently does 2 things. It deletes any stale files - self in Incomplete Downloads, then it restarts downloads that have - been restored from the database. It must be called before any - RemoteDownloader objects get created. - """ - daemon_starter.startup() - -def shutdown_downloader(callback=None): - if daemon_starter: - daemon_starter.shutdown(callback) - elif callback: - callback() - def lookup_downloader(url): try: return RemoteDownloader.get_by_url(url) @@ -890,9 +1093,9 @@ existing = get_existing_downloader_by_url(url) if existing: return existing - channelName = unicode_to_filename(item.get_channel_title(True)) - if not channelName: - channelName = None + channel_name = unicode_to_filename(item.get_channel_title(True)) + if not channel_name: + channel_name = None if url.startswith(u'file://'): path = get_file_url_path(url) try: @@ -903,19 +1106,22 @@ return None else: return RemoteDownloader(url, item, u'application/x-bittorrent', - channelName=channelName) + channel_name=channel_name) elif is_magnet_uri(url): return RemoteDownloader(url, item, u'application/x-magnet') else: - return RemoteDownloader(url, item, channelName=channelName) - -def shutdown_downloader_objects(): - """Perform shutdown code for RemoteDownloaders. + return RemoteDownloader(url, item, channel_name=channel_name) - This means a couple things: - - Make sure any RemoteDownloaders with pending changes get saved. - - Cancel the update retry time callbacks +def reset_download_stats(): + """Set columns in the remote_downloader table to None if they track + temporary data, like eta or rate. """ - for downloader in RemoteDownloader.make_view(): - downloader._save_now() - downloader._cancel_retry_time_update() + # FIXME: it's a little weird to be using app.db's cursor here + setters = ['%s=NULL' % name + for name in RemoteDownloader.temp_status_attributes + ] + app.db.cursor.execute("UPDATE remote_downloader SET %s" % + ', '.join(setters)) + app.db.connection.commit() + + diff -Nru miro-4.0.4/lib/download_utils.py miro-6.0/lib/download_utils.py --- miro-4.0.4/lib/download_utils.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/download_utils.py 2013-04-05 16:02:42.000000000 +0000 @@ -44,6 +44,12 @@ from miro.plat.utils import unicode_to_filename, unmake_url_safe from miro.fileutil import expand_filename +# next_free_filename and friends used to be defined in this module, but +# they've been moved to the utils module. For now, we import into this module +# too so that the old code will continue to work. +# FIXME: refactor code so we import next_free_filename from util.py +from miro.util import next_free_filename, next_free_directory + URI_PATTERN = re.compile(r'^([^?]*/)?([^/?]*)/*(\?(.*))?$') # filename limits this is mostly for windows where we have a 255 character @@ -140,71 +146,6 @@ filename += guessed_ext return filename -def next_free_filename_candidates(path): - """Generates candidate names for next_free_filename.""" - - # try unmodified path first - yield path - # add stuff to the filename to try to make it unique - - dirname, filename = os.path.split(path) - if not filename: - raise ValueError("%s is a directory name" % path) - basename, ext = os.path.splitext(filename) - count = 1 - while True: - filename = "%s.%s%s" % (basename, count, ext) - yield os.path.join(dirname, filename) - count += 1 - if count > 1000: - raise ValueError("Can't find available filename for %s" % path) - -@returns_file -def next_free_filename(name): - """Finds a filename that's unused and similar the the file we want - to download and returns an open file handle to it. - """ - check_f(name) - mask = os.O_CREAT | os.O_EXCL | os.O_RDWR - # On Windows we need to pass in O_BINARY, fdopen() even with 'b' - # specified is not sufficient. - if sys.platform == 'win32': - mask |= os.O_BINARY - - candidates = next_free_filename_candidates(name) - while True: - # Try with the name supplied. - newname = candidates.next() - try: - fd = os.open(expand_filename(newname), mask) - fp = os.fdopen(fd, 'wb') - return expand_filename(newname), fp - except OSError: - continue - return (expand_filename(newname), fp) - -def next_free_directory_candidates(name): - """Generates candidate names for next_free_directory.""" - yield name - count = 1 - while True: - yield "%s.%s" % (name, count) - count += 1 - if count > 1000: - raise ValueError("Can't find available directory for %s" % name) - -@returns_filename -def next_free_directory(name): - """Finds a unused directory name using name as a base. - - This method doesn't create the directory, it just finds an an-used one. - """ - candidates = next_free_directory_candidates(name) - while True: - candidate = candidates.next() - if not os.path.exists(candidate): - return candidate - @returns_filename def filename_from_url(url, clean=False): """Returns a reasonable filename for saving the given url. diff -Nru miro-4.0.4/lib/echonest.py miro-6.0/lib/echonest.py --- miro-4.0.4/lib/echonest.py 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/echonest.py 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,448 @@ +# Miro - an RSS based video player application +# Copyright (C) 2011 +# Participatory Culture Foundation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +# +# In addition, as a special exception, the copyright holders give +# permission to link the code of portions of this program with the OpenSSL +# library. +# +# You must obey the GNU General Public License in all respects for all of +# the code used other than OpenSSL. If you modify file(s) with this +# exception, you may extend this exception to your version of the file(s), +# but you are not obligated to do so. If you do not wish to do so, delete +# this exception statement from your version. If you delete this exception +# statement from all source files in the program, then also delete it here. + +"""``miro.echonest`` -- Query Echonest""" + +import collections +import difflib +import logging +import os +import os.path +import urllib +from xml.dom import minidom +from xml.parsers import expat + +from miro import filetags +from miro import httpclient +from miro import util +from miro import eventloop +from miro import trapcall + +# TODO: get a API keys for PCF +ECHO_NEST_API_KEY = "ZAHBN7QAMJFJLABY6" +SEVEN_DIGITAL_API_KEY = "7d35gcbnycah" + +try: + import simplejson as json +except ImportError: + import json + +class CodegenError(StandardError): + """ENMFP or echoprint failed to process a file.""" + +class ResponseParsingError(StandardError): + """Error parsing an echonest/7digital response.""" + +class CodegenNotSupported(StandardError): + """ENMFP can't run in the user's architecture.""" + +def exec_codegen(codegen_info, media_path, callback, errback): + """Run an echonest codegen in a worker thread. + + This method should work for both ENMFP and echoprint. + + On success, callback(media_path, echonest_code) will be called. + + On error, errback(media_path, exception) will be called. + """ + if cant_run_codegen(): + # use add_idle to send the errback to make the timing closer to normal + # operations. Some code could potentially break if the errback runs + # before this function returns. + eventloop.add_idle(errback, 'exec_codegen errback', + args=(CodegenNotSupported(),)) + return + + codegen_path = codegen_info['path'] + codegen_env = codegen_info.get('env') + def thread_function(): + stdout = util.call_command(codegen_path, media_path, env=codegen_env) + results = json.loads(stdout) + # not sure why the code generator always returns a 1-element list, but + # it does + results = results[0] + if 'error' in results: + raise CodegenError(results['error']) + # NOTE: both codegens return some metadata that we can use, but + # mutagen can get the same data so let's just pay attention to the + # code. + return results['code'] + + def thread_callback(code): + callback(media_path, code) + + def thread_errback(error): + errback(media_path, error) + + logging.debug("Invoking echonest codegen on %s", media_path) + eventloop.call_in_thread(thread_callback, thread_errback, thread_function, + 'exec echonest codegen') + +def cant_run_codegen(): + # Windows doesn't support uname, but we know we can run ENMFP-codegen + # there. + if not hasattr(os, 'uname'): + return False + uname = os.uname() + return (uname[0] == 'Darwin' and + (uname[4] != 'i386' and uname[4] != 'x86_64')) + +def query_echonest(path, cover_art_dir, code, version, metadata, callback, + errback): + """Send a query to echonest to indentify a song. + + After the query is complete, we will either call callback(path, + metadata_dict) or errback(path, exception_obj) + + :param path: path for the song + :param cover_art_dir: directory to write cover art to + :param code: echonest code from ENMFP or echoprint + :param version: code version (3.15 for ENMFP or 4.11 for echoprint) + :param metadata: dict of metadata from ID3 tags. + :param callback: function to call on success + :param error: function to call on error + """ + _EchonestQuery(path, cover_art_dir, code, version, metadata, callback, + errback) + +class _EchonestQuery(object): + """Functor object that does the work for query_echonest. + + Since we use a couple deferred calls, it's simpler to work with an object + than nesting everything inside a function. + """ + # cache album names for 7digital release ids + seven_digital_cache = {} + + def __init__(self, path, cover_art_dir, code, version, metadata, callback, + errback): + self.metadata = {} + self.cover_art_url = None + self.cover_art_filename = None + self.path = path + self.cover_art_dir = cover_art_dir + self.seven_digital_release_ids = [] + self.seven_digital_results = [] + self.callback = callback + self.errback = errback + self.code = code + self.album_name_from_tags = metadata.get('album') + if code is not None: + self.query_echonest_with_code(code, version, metadata) + elif 'echonest_id' in metadata: + self.query_echonest_with_echonest_id(metadata['echonest_id']) + else: + self.query_echonest_with_tags(metadata) + + def invoke_callback(self): + trapcall.trap_call('query_echonest callback', self.callback, + self.path, self.metadata) + + def invoke_errback(self, error): + trapcall.trap_call('query_echonest errback', self.errback, + self.path, error) + + def query_echonest_with_code(self, code, version, metadata): + post_vars = { + 'api_key': ECHO_NEST_API_KEY, + 'bucket': ['tracks', 'id:7digital'], + 'query': self._make_echonest_query(code, version, metadata), + } + url = 'http://echonest.pculture.org/api/v4/song/identify?' + httpclient.grab_url(url, + self.echonest_callback, self.echonest_errback, + post_vars=post_vars) + + def query_echonest_with_tags(self, metadata): + url_data = [ + ('api_key', ECHO_NEST_API_KEY), + ('bucket', 'tracks'), + ('bucket', 'id:7digital'), + # In case there are multiple songs for the same artist/title, only + # use the "hottest" song, AKA the most popular. + ('results', '1'), + ('sort', 'song_hotttnesss-desc'), + ] + for key in ('title', 'artist'): + if key in metadata: + url_data.append((key, metadata[key].encode('utf-8'))) + url = ('http://echonest.pculture.org/api/v4/song/search?' + + urllib.urlencode(url_data)) + httpclient.grab_url(url, self.echonest_callback, + self.echonest_errback) + + def query_echonest_with_echonest_id(self, echonest_id): + url_data = [ + ('api_key', ECHO_NEST_API_KEY), + ('bucket', 'tracks'), + ('bucket', 'id:7digital'), + ('id', echonest_id), + ] + url = ('http://echonest.pculture.org/api/v4/song/profile?' + + urllib.urlencode(url_data)) + httpclient.grab_url(url, + self.echonest_callback, self.echonest_errback) + + def _make_echonest_query(self, code, version, metadata): + echonest_metadata = {'version': version} + if 'title' in metadata: + echonest_metadata['title'] = metadata['title'] + if 'artist' in metadata: + echonest_metadata['artist'] = metadata['artist'] + if 'album' in metadata: + # echonest uses "release" instead of album + echonest_metadata['release'] = metadata['album'] + if 'duration' in metadata: + # convert millisecs to secs for echonest + echonest_metadata['duration'] = metadata['duration'] // 1000 + return json.dumps({ + 'code': code, + 'metadata': echonest_metadata, + }) + + def echonest_callback(self, data): + try: + self._handle_echonest_callback(data['body']) + except StandardError, e: + logging.warn("Error handling echonest response: %r", data['body'], + exc_info=True) + self.invoke_errback(ResponseParsingError()) + + def _handle_echonest_callback(self, echonest_reply): + response = json.loads(echonest_reply.decode('utf-8'))['response'] + status_code = response['status']['code'] + # TODO: check status code + songs = response['songs'] + if len(songs) != 1: + if self.code is not None: + query_type = "Echonest code" + else: + query_type = "Metadata to echonest" + if len(songs) == 0: + logging.warn("%s matched no songs", query_type) + else: + logging.warn("%s Echonest code matched multiple songs", + query_type) + # What can we do here? Just return an empty metadata dict to our + # callback + self.invoke_callback() + return + + song = songs[0] + self.metadata['title'] = song['title'] + self.metadata['artist'] = song['artist_name'] + self.metadata['echonest_id'] = song['id'] + + tracks = song.get('tracks', []) + if len(tracks) == 0: + # No 7digital releases + logging.warn("No 7digital releases for echonest song %s", + self.metadata['echonest_id']) + self.invoke_callback() + elif len(tracks) > 1 and self.album_name_from_tags is None: + logging.warn("Multiple 7digital releases for echonest song %s " + "and no album tag", + self.metadata['echonest_id']) + self.invoke_callback() + else: + # No find all release ids, then start fetching them + for track in tracks: + foreign_release_id = track['foreign_release_id'] + prefix = "7digital:release:" + if not foreign_release_id.startswith(prefix): + raise ResponseParsingError("Invalid foreign_release_id: " + "%s" % foreign_release_id) + release_id = foreign_release_id[len(prefix):] + self.seven_digital_release_ids.append(release_id) + self.query_7digital(release_id) + + def echonest_errback(self, error): + self.invoke_errback(error) + + def query_7digital(self, release_id): + if release_id not in self.seven_digital_cache: + self.release_id = release_id + seven_digital_url = self._make_7digital_url(release_id) + httpclient.grab_url(seven_digital_url, + self.seven_digital_callback, + self.seven_digital_errback) + else: + self.handle_7digital_cache_hit(release_id) + + def _make_7digital_url(self, release_id): + # data in all query strings + url_data = [ + ('oauth_consumer_key', SEVEN_DIGITAL_API_KEY), + ('imageSize', '350'), + ('releaseid', str(release_id)), + ] + return ('http://7digital.pculture.org/1.2/release/details?' + + urllib.urlencode(url_data)) + + def handle_7digital_cache_hit(self, release_id): + cached_result = self.seven_digital_cache[release_id] + self.handle_7_digital_result(cached_result) + + def handle_7_digital_result(self, result): + self.seven_digital_results.append(result) + # wait until we get replies for each release_id we queried finish this + if (len(self.seven_digital_results) == + len(self.seven_digital_release_ids)): + self.finish_seven_digital_query() + + def seven_digital_callback(self, data): + result = self.parse_seven_digital_callback(data['body']) + if result is not None: + self.seven_digital_cache[result['id']] = result + self.handle_7_digital_result(result) + + def parse_seven_digital_callback(self, seven_digital_reply): + try: + return self._parse_seven_digital_callback(seven_digital_reply) + except (StandardError, expat.ExpatError), e: + logging.warn("Error handling 7digital response: %r", + seven_digital_reply, exc_info=True) + return None + + def _parse_seven_digital_callback(self, seven_digital_reply): + doc = minidom.parseString(seven_digital_reply) + def find_text_for_tag(elt, tag_name): + return elt.getElementsByTagName(tag_name)[0].firstChild.data + result = {} + + if len(doc.getElementsByTagName('error')) != 0: + error = doc.getElementsByTagName('error')[0] + code = error.getAttribute("code"), + msg = find_text_for_tag(doc, 'errorMessage') + logging.warn("7digital returned an error: %s -- %s", code, msg) + return None + + release = doc.getElementsByTagName('release')[0] + result['id'] = release.getAttribute('id') + result['album'] = album = find_text_for_tag(doc, 'title') + result['cover_art_url'] = find_text_for_tag(doc, 'image') + result['cover_art_filename'] = filetags.calc_cover_art_filename(album) + artist = doc.getElementsByTagName('artist')[0] + result['album_artist'] = find_text_for_tag(artist, 'name') + return result + + def seven_digital_errback(self, error): + logging.warn("Error connecting to 7digital: %s", error) + self.handle_7_digital_result(error) + + def finish_seven_digital_query(self): + result = self.pick_seven_digital_result() + # if we didn't get a good 7digital reply, we can still invoke our + # callback with our echonest data + if isinstance(result, Exception) or result is None: + self.invoke_callback() + return + if result is None: + self.invoke_callback() + return + self.metadata['album'] = result['album'] + self.metadata['album_artist'] = result['album_artist'] + self.cover_art_url = result['cover_art_url'] + self.cover_art_filename = result['cover_art_filename'] + # try to grab cover art if we can, and it's not already downloaded. + # Otherwise, just call our callback + if (self.cover_art_url and self.cover_art_filename): + self.grab_url_dest = os.path.join(self.cover_art_dir, + self.cover_art_filename) + if os.path.exists(self.grab_url_dest): + self.metadata['cover_art'] = self.grab_url_dest + self.invoke_callback() + else: + self.fetch_cover_art() + else: + self.invoke_callback() + + def pick_seven_digital_result(self): + """Pick a result from the replies we got from 7digital.""" + # Error handling: If some of the 7 digital replies were HTTP errors or + # parsing errors, then just ignore them. But if all replies were + # errors, then we need to fail. + error_count = 0 + # None signifies that we got a reply, but couldn't parse it + none_count = 0 + # use results to collect the none Error/None results + results = [] + for r in self.seven_digital_results: + if isinstance(r, Exception): + error_count += 1 + elif r is None: + none_count += 1 + else: + results.append(r) + if error_count == len(self.seven_digital_results): + # Return any of the errors as our result + result = self.seven_digital_results[0] + logging.warn("Error querying 7digital: ", result) + return result + if error_count + none_count == len(self.seven_digital_results): + # None of the results we got were parsable, return None + logging.warn("No parsable results from 7digital") + return None + return self._choose_best_7digital_result(results) + + def _choose_best_7digital_result(self, results): + """Pick the best 7digital result from a list.""" + if len(results) == 1: + return results[0] + if self.album_name_from_tags is None: + logging.warn("_EchonestQuery._choose_best_7digital_result: " + "album_name_from_tags is None") + # we shouldn't get here, just return any result + return results[0] + result_map = dict((r['album'], r) for r in results) + best_matches = difflib.get_close_matches(self.album_name_from_tags, + result_map.keys(), + n=1, cutoff=0.6) + if best_matches: + return result_map[best_matches[0]] + else: + return None + + def fetch_cover_art(self): + httpclient.grab_url(self.cover_art_url, + self.cover_art_callback, + self.cover_art_errback, + write_file=self.grab_url_dest) + + def cover_art_callback(self, data): + # we don't care about the data sent back, since grab_url wrote our + # file for us + self.metadata['cover_art'] = self.grab_url_dest + self.metadata['created_cover_art'] = True + self.invoke_callback() + + def cover_art_errback(self, error): + logging.warn("Error fetching cover art (%s)", self.cover_art_url) + # we can still invoke our callback with the data from echonest + self.invoke_callback() diff -Nru miro-4.0.4/lib/emusic.py miro-6.0/lib/emusic.py --- miro-4.0.4/lib/emusic.py 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/emusic.py 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,117 @@ +# Miro - an RSS based video player application +# Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011 +# Participatory Culture Foundation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +# +# In addition, as a special exception, the copyright holders give +# permission to link the code of portions of this program with the OpenSSL +# library. +# +# You must obey the GNU General Public License in all respects for all of +# the code used other than OpenSSL. If you modify file(s) with this +# exception, you may extend this exception to your version of the file(s), +# but you are not obligated to do so. If you do not wish to do so, delete +# this exception statement from your version. If you delete this exception +# statement from all source files in the program, then also delete it here. + +"""Functions for downloading from eMusic.""" + +import logging + +from miro import app +from miro import httpclient +from miro import fileutil + +import StringIO + +import urlparse +from xml.dom import minidom + +def is_emusic_url(url): + parts = urlparse.urlparse(url) + return parts.path.endswith('.emx') + +def download_file(url, handle_unknown_callback): + """ + Deals with turning an .amz file into some real downloads. + """ + if url.startswith('file://'): + path = url[7:] + try: + _download_emx_files(path) + finally: + fileutil.remove(path) + return + + def callback(data): + _emx_callback(data, handle_unknown_callback) + + options = httpclient.TransferOptions(url) + options.requires_cookies = True + transfer = httpclient.CurlTransfer(options, callback, + handle_unknown_callback) + transfer.start() + +def _emx_callback(data, unknown): + if data['status'] != 200: + return unknown(data['original-url']) + if data['content-type'].startswith('text/html'): + return unknown(data['original-url']) + + _download_emx_files(StringIO.StringIO(data['body'])) + +def _download_emx_files(file_): + try: + dom = minidom.parse(file_) + except Exception, e: + if e.message == 'no element found: line 1, column 0': + logging.debug('got _emx file with no data, skipping') + return + with file(file_, 'rb') as f: + app.controller.failed_soft('_emx_callback', + 'could not parse %r, data:\n%r' % ( + file_, f.read()), + with_exception=True) + return + from miro.singleclick import _build_entry, download_video + + for track in dom.documentElement.getElementsByTagName('TRACK'): + url = None + additional = {} + for node in track.childNodes: + if node.nodeType != node.TEXT_NODE: + key = node.nodeName + if node.childNodes: + value = node.childNodes[0].nodeValue + else: + value = None + if key == 'TRACKURL': + url = value + elif key == 'TITLE': + additional['title'] = value + elif key == 'ALBUMARTLARGE': + additional['thumbnail'] = value + elif key == 'ALBUMART' and 'thumbnail' not in additional: + additional['thumbnail'] = value + elif key == 'DURATION': + additional['length'] = int(value) + if url is None: + app.controller.failed_soft("_emx_callback", + "could not find URL for track", + with_exception=False) + else: + entry = _build_entry(url, 'audio/mp3', additional) + download_video(entry) diff -Nru miro-4.0.4/lib/errors.py miro-6.0/lib/errors.py --- miro-4.0.4/lib/errors.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/errors.py 2013-04-05 16:02:42.000000000 +0000 @@ -30,9 +30,6 @@ """``miro.errors`` -- Miro exceptions. """ -class Shutdown(Exception): - """Action aborted because we're shutting down""" - class ActionUnavailableError(ValueError): """The action attempted can not be done in the current state.""" def __init__(self, reason): diff -Nru miro-4.0.4/lib/eventloop.py miro-6.0/lib/eventloop.py --- miro-4.0.4/lib/eventloop.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/eventloop.py 2013-04-05 16:02:42.000000000 +0000 @@ -35,22 +35,21 @@ TODO: handle user setting clock back """ -import threading import errno -import select -import socket import heapq -import Queue import logging +import Queue +import select +import socket +import threading import traceback + from miro import app from miro import config from miro import trapcall from miro import signals from miro import util - from miro.clock import clock - from miro.plat.utils import thread_body cumulative = {} @@ -172,7 +171,7 @@ instead is call them in a separate thread and return the result in a callback that executes in the event loop. """ - THREADS = 3 + THREADS = 4 def __init__(self, event_loop): self.event_loop = event_loop @@ -227,12 +226,12 @@ # in a blocking operation which is exactly the point of having them # so eventloop.join() in turn blocks. So if it doesn't clean up # in time let the daemon flag in the Thread() do its job. See #16584. - while len(self.threads) > 0: - x = self.threads.pop() + for t in self.threads: try: - x.join(0.5) + t.join(0.5) except StandardError: pass + self.threads = [] class SimpleEventLoop(signals.SignalEmitter): def __init__(self): @@ -523,6 +522,9 @@ def connect(signal, callback): _eventloop.connect(signal, callback) +def connect_after(signal, callback): + _eventloop.connect_after(signal, callback) + def disconnect(signal, callback): _eventloop.disconnect(signal, callback) @@ -608,3 +610,41 @@ return idle_iterate(func, "%s() (using idle_iterator)" % func.__name__, args=args, kwargs=kwargs) return queuer + +class DelayedFunctionCaller(object): + """Call a function sometime in the future using add_idle()/add_timeout() + + This class also tracks whether a function has been scheduled and avoids + scheduling it twice. + """ + def __init__(self, func): + """Create a DelayedFunctionCaller + + :param func: function to call. + """ + self.dc = None + self.func = func + self.name = 'delayed call to %s' % func + + def call_when_idle(self, *args, **kwargs): + """Call our function when we're idle.""" + if self.dc is None: + self.dc = add_idle(self.call_now, self.name, args=args, + kwargs=kwargs) + + def call_after_timeout(self, timeout, *args, **kwargs): + """Call our function after a timeout.""" + if self.dc is None: + self.dc = add_timeout(timeout, self.call_now, self.name, + args=args, kwargs=kwargs) + + def call_now(self, *args, **kwargs): + """Call our function immediately.""" + self.cancel_call() + self.func(*args, **kwargs) + + def cancel_call(self): + """Cancel a timeout/idle callback, if scheduled.""" + if self.dc is not None: + self.dc.cancel() + self.dc = None diff -Nru miro-4.0.4/lib/extensionmanager.py miro-6.0/lib/extensionmanager.py --- miro-4.0.4/lib/extensionmanager.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/extensionmanager.py 2013-04-05 16:02:42.000000000 +0000 @@ -34,6 +34,7 @@ http://develop.participatoryculture.org/index.php/ExtensionSystem """ +import collections import traceback import logging import os @@ -41,10 +42,19 @@ import ConfigParser from miro import app from miro import prefs +from miro import util # need to do this otherwise py2exe won't pick up the api module from miro import api +class ExtensionParseError(StandardError): + """Error when parsing the extension config file. + + This error is raised when ConfigParser can read the file, but we fail to + parse a value from it. + """ + pass + class Extension: def __init__(self): self.name = "Unknown" @@ -55,6 +65,56 @@ self.enabled_by_default = False # whether or not this extension has been loaded self.loaded = False + # maps hook names -> hook functions + self.hooks = {} + + def module_obj(self): + """Gets the module object for this extension. + + If this extension is not loaded, None will be returned + """ + if not self.loaded: + return None + return sys.modules[self.ext_module] + + def add_hook(self, hook_name, hook_string): + """Add a hook to the extension. + + hook_name is the name of the hook to add. + hook_string is a string specifying the function object to call for the + hook. It's in the form of package.module:path.to.obj. + + See api.py for the format for hook_string. + """ + try: + module_string, object_string = hook_string.split(":") + except ValueError: + raise ExtensionParseError("Invalid hook string: %s" % hook_string) + try: + module = util.import_module(module_string) + except ImportError: + raise ExtensionParseError("Can't import module: %s" % module_string) + try: + # We allow extensions to execute arbirary code, so calling eval is + # not any more of a security risk. + hook_func = eval(object_string, module.__dict__) + except StandardError, e: + raise ExtensionParseError("Error loading hook object: %s (%s)" % + (object_string, e)) + self.hooks[hook_name] = hook_func + + def invoke_hook(self, hook_name, *args, **kwargs): + """Invoke a hook for this extension. + + If this extension implements hook_name, the function for that hook + will be called using args and kwargs. The return value or exception + will be passed on. + + :raises KeyError: this extension doesn't implement hook_name + :raises: invoke_hook propogates exceptions from the hook function + """ + hook_func = self.hooks[hook_name] + return hook_func(*args, **kwargs) def __repr__(self): return "%s (%s)" % (self.name, self.version) @@ -100,11 +160,15 @@ e.enabled_by_default = cf.getboolean( "extension", "enabled_by_default") e.enabled = e.enabled_by_default + if cf.has_section("hooks"): + for hook_name in cf.options("hooks"): + e.add_hook(hook_name, cf.get('hooks', hook_name)) extensions.append(e) except (ConfigParser.NoSectionError, ConfigParser.NoOptionError, - ConfigParser.ParsingError): + ConfigParser.ParsingError, + ExtensionParseError): logging.warning("Extension file %s is malformed.\n%s", f, traceback.format_exc()) @@ -124,6 +188,9 @@ # list of all extensions--core and user self.extensions = [] + # maps hook names to set of extensions that implement the hook + self.hook_map = collections.defaultdict(set) + def get_extension_by_name(self, name): for mem in self.extensions: if mem.name == name: @@ -133,6 +200,20 @@ def is_enabled(self, ext): return ext.name in self.enabled_extensions + def _register_hooks(self, ext): + """Register all hooks for an extension.""" + for hook_name in ext.hooks.keys(): + self.hook_map[hook_name].add(ext) + + def _unregister_hooks(self, ext): + """Unregister all hooks for an extension.""" + for hook_name in ext.hooks.keys(): + self.hook_map[hook_name].discard(ext) + + def extensions_for_hook(self, hook_name): + """Get a set of all extensions that implement a hook.""" + return self.hook_map[hook_name] + def should_load(self, ext): if ext.name in self.disabled_extensions: return False @@ -172,7 +253,8 @@ """ logging.info("extension manager: loading: %r", ext) load = getattr(sys.modules[ext.ext_module], "load") - load() + load(api.ExtensionContext(ext.ext_module)) + self._register_hooks(ext) ext.loaded = True def disable_extension(self, ext): @@ -192,6 +274,7 @@ """ if not ext.ext_module in sys.modules: return + self._unregister_hooks(ext) logging.info("extension manager: unloading: %r", ext) unload = getattr(sys.modules[ext.ext_module], "unload") unload() @@ -203,9 +286,10 @@ extensions = [] for d in self.core_ext_dirs: logging.info("Loading core extensions in %s", d) + if d not in sys.path: + sys.path.insert(0, d) exts = get_extensions(d) if exts: - sys.path.insert(0, d) extensions.extend(exts) self.core_extensions = list(extensions) @@ -220,9 +304,10 @@ continue logging.info("Loading user extensions in %s", d) + if d not in sys.path: + sys.path.insert(0, d) exts = get_extensions(d) if exts: - sys.path.insert(0, d) extensions.extend(exts) self.extensions = extensions diff -Nru miro-4.0.4/lib/feedparserutil.py miro-6.0/lib/feedparserutil.py --- miro-4.0.4/lib/feedparserutil.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/feedparserutil.py 2013-04-05 16:02:42.000000000 +0000 @@ -42,7 +42,6 @@ from miro import filetypes from miro import flashscraper from miro import util -from miro.datastructures import Fifo # values from feedparser dicts that don't have to convert in # normalize_feedparser_dict() diff -Nru miro-4.0.4/lib/feed.py miro-6.0/lib/feed.py --- miro-4.0.4/lib/feed.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/feed.py 2013-04-05 16:02:42.000000000 +0000 @@ -57,13 +57,12 @@ from miro import download_utils from miro import eventloop from miro import feedupdate -from miro import flashscraper from miro import models from miro import prefs from miro.plat import resources from miro import downloader from miro.util import (returns_unicode, returns_filename, unicodify, check_u, - check_f, quote_unicode_url, escape, to_uni, + check_f, quote_unicode_url, to_uni, is_url, stringify, is_magnet_uri) from miro import fileutil from miro.plat.utils import filename_to_unicode, make_url_safe, unmake_url_safe @@ -78,8 +77,6 @@ # the unittests to speed things up _RUN_FEED_PARSER_INLINE = False -WHITESPACE_PATTERN = re.compile(r"^[ \t\r\n]*$") - DEFAULT_FEED_ICON = "images/icon-podcast-small.png" @returns_unicode @@ -101,7 +98,6 @@ self.last_time = time.time() def check_for_sleep(self): - new_time = time.time() elapsed = self.last_time - time.time() if elapsed < 0.1: return # don't sleep until a decent of time has passed. @@ -237,7 +233,9 @@ else: callback(rv) else: - workerprocess.run_feedparser(html, callback, errback) + workerprocess.send(workerprocess.FeedparserTask(html), + lambda msg, result: callback(result), + lambda msg, error: errback(error)) # Wait X seconds before updating the feeds at startup INITIAL_FEED_UPDATE_DELAY = 5.0 @@ -247,7 +245,7 @@ """ def setup_new(self, url, ufeed, title=None): check_u(url) - if title: + if title is not None: check_u(title) self.url = url self.ufeed = ufeed @@ -303,19 +301,19 @@ if firstTriggerDelay >= 0: self.scheduler = eventloop.add_timeout( firstTriggerDelay, self.update, - "Podcast update (%s)" % self.get_title()) + "Podcast update (%s)" % self.title) else: if self.updateFreq > 0: logging.debug("scheduling update in %s seconds (%s)", self.updateFreq, - self.get_title()) + self.title) self.scheduler = eventloop.add_timeout( self.updateFreq, self.update, - "Podcast update (%s)" % self.get_title()) + "Podcast update (%s)" % self.title) else: logging.debug("updateFreq is %s: skipping update (%s)", self.updateFreq, - self.get_title()) + self.title) def cancel_update_events(self): if hasattr(self, 'scheduler') and self.scheduler is not None: @@ -332,21 +330,6 @@ return default_feed_icon_path() @returns_unicode - def get_title(self): - """Returns the title of the feed - """ - try: - title = self.title - if title is None or WHITESPACE_PATTERN.match(title): - if self.ufeed.baseTitle is not None: - title = self.ufeed.baseTitle - else: - title = self.url - return title - except AttributeError: - return u"" - - @returns_unicode def get_url(self): """Returns the URL of the feed """ @@ -392,7 +375,7 @@ pass def __str__(self): - return "%s - %s" % (self.__class__.__name__, stringify(self.get_title())) + return "%s - %s" % (self.__class__.__name__, stringify(self.title)) def clean_old_items(self): """ @@ -436,17 +419,17 @@ self.maxNew = 3 self.maxOldItems = None self.expire = u"system" - self.expireTime = None + self.expire_timedelta = None self.fallBehind = -1 - self.last_viewed = datetime.min self.baseTitle = None - self.origURL = url + self.orig_url = url self.errorState = False self.loading = True self._actualFeed = None self._set_feed_impl(FeedImpl(url, self, title)) self.setup_new_icon_cache() + self.thumbnail_path = None self.informOnError = True self.folder_id = None self.searchTerm = search_term @@ -483,7 +466,7 @@ pass # otherwise, make a new FeedImpl if self._actualFeed is None: - self._set_feed_impl(FeedImpl(self.origURL, self)) + self._set_feed_impl(FeedImpl(self.orig_url, self)) self.signal_change() return self._actualFeed @@ -491,15 +474,15 @@ @classmethod def get_by_url(cls, url): - return cls.make_view('origURL=?', (url,)).get_singleton() + return cls.make_view('orig_url=?', (url,)).get_singleton() @classmethod def get_by_url_and_search(cls, url, searchTerm): if searchTerm is not None: - view = cls.make_view('origURL=? AND searchTerm=?', + view = cls.make_view('orig_url=? AND searchTerm=?', (url, searchTerm)) else: - view = cls.make_view('origURL=? AND searchTerm IS NULL', (url,)) + view = cls.make_view('orig_url=? AND searchTerm IS NULL', (url,)) return view.get_singleton() @classmethod @@ -528,7 +511,7 @@ @classmethod def watched_folder_view(cls): - return cls.make_view("origURL LIKE 'dtv:directoryfeed:%'") + return cls.make_view("orig_url LIKE 'dtv:directoryfeed:%'") def on_db_insert(self): self.generate_feed(True) @@ -626,26 +609,18 @@ self.auto_pending_items.count()) return self._num_available - def get_viewed(self): - """Returns true iff this feed has been looked at - """ - return self.last_viewed != datetime.min - def mark_as_viewed(self): """Sets the last time the feed was viewed to now """ - # get the list of available items before we reset the time - available_items = list(self.available_items) - self.last_viewed = datetime.now() try: del self._num_available except AttributeError: pass + for item in list(self.available_items): + item.unset_new() if self.in_folder(): self.get_folder().signal_change() self.signal_change() - for item in available_items: - item.signal_change(needs_save=False) def start_manual_download(self): next_ = None @@ -681,7 +656,7 @@ return [] delta = timedelta(days=expire_after_x_days) else: - delta = self.expireTime + delta = self.expire_timedelta return models.Item.feed_expiring_view(self.id, datetime.now() - delta) def expire_items(self): @@ -698,11 +673,12 @@ """See item.get_thumbnail to figure out which items to send signals for. """ - self.signal_change(needs_save=False) + self.calc_thumbnail_path() + self.signal_change() for item in self.items: if not item.icon_cache or not (item.icon_cache.is_valid() or item.screenshot or - item.isContainerItem): + item.is_container_item): item.signal_change(needs_save=False) def get_id(self): @@ -717,12 +693,20 @@ return self.loading or (self.actualFeed and self.actualFeed.updating) @returns_unicode - def get_title(self): + def get_title_without_search_terms(self): if self.userTitle is not None: return self.userTitle + elif self.actualFeed.title is not None: + return self.actualFeed.title + elif self.baseTitle is not None: + return self.baseTitle + else: + return self.actualFeed.url - title = self.actualFeed.get_title() - if self.searchTerm is not None: + @returns_unicode + def get_title(self): + title = self.get_title_without_search_terms() + if self.userTitle is None and self.searchTerm is not None: title = u"%s for '%s'" % (title, self.searchTerm) return title @@ -777,7 +761,7 @@ """ self.confirm_db_thread() self.expire = type_ - self.expireTime = timedelta(hours=time_) + self.expire_timedelta = timedelta(hours=time_) if self.expire == u"never": for item in self.items: @@ -861,35 +845,36 @@ def generate_feed(self, removeOnError=False): newFeed = None - if self.origURL == u"dtv:directoryfeed": + if self.orig_url == u"dtv:directoryfeed": newFeed = DirectoryFeedImpl(self) self.visible = False - elif (self.origURL.startswith(u"dtv:directoryfeed:")): - url = self.origURL[len(u"dtv:directoryfeed:"):] + elif (self.orig_url.startswith(u"dtv:directoryfeed:")): + url = self.orig_url[len(u"dtv:directoryfeed:"):] dir_ = unmake_url_safe(url) newFeed = DirectoryWatchFeedImpl(self, dir_) - elif self.origURL == u"dtv:search": + elif self.orig_url == u"dtv:search": newFeed = SearchFeedImpl(self) self.visible = False - elif self.origURL == u"dtv:searchDownloads": + elif self.orig_url == u"dtv:searchDownloads": newFeed = SearchDownloadsFeedImpl(self) self.visible = False - elif self.origURL == u"dtv:manualFeed": + elif self.orig_url == u"dtv:manualFeed": newFeed = ManualFeedImpl(self) self.visible = False - elif SEARCH_URL_MATCH_RE.match(self.origURL): - newFeed = SavedSearchFeedImpl(self.origURL, self) + elif SEARCH_URL_MATCH_RE.match(self.orig_url): + newFeed = SavedSearchFeedImpl(self.orig_url, self) else: - self.download = grab_url(self.origURL, + self.download = grab_url(self.orig_url, lambda info: self._generate_feed_callback(info, removeOnError), lambda error: self._generate_feed_errback(error, removeOnError), default_mime_type=u'application/rss+xml') - logging.debug("added async callback to create feed %s", self.origURL) + logging.debug("added async callback to create feed %s", + self.orig_url) if newFeed: self.finish_generate_feed(newFeed) def is_watched_folder(self): - return self.origURL.startswith("dtv:directoryfeed:") + return self.orig_url.startswith("dtv:directoryfeed:") def _handle_feed_loading_error(self, errorDescription): self.download = None @@ -925,7 +910,7 @@ if not self.id_exists(): return logging.warning("Couldn't load podcast at %s (%s)", - self.origURL, error) + self.orig_url, error) self._handle_feed_loading_error(error.getFriendlyDescription()) def _generate_feed_callback(self, info, removeOnError): @@ -940,8 +925,8 @@ if not self.id_exists(): return - if info['updated-url'] != self.origURL and \ - not self.origURL.startswith('dtv:'): # we got redirected + if info['updated-url'] != self.orig_url and \ + not self.orig_url.startswith('dtv:'): # we got redirected f = lookup_feed(info['updated-url'], self.searchTerm) if f is not None: # already have this feed, so delete us self.remove() @@ -1129,12 +1114,12 @@ """ self.confirm_db_thread() expireAfterSetting = app.config.get(prefs.EXPIRE_AFTER_X_DAYS) - if ((self.expireTime is None or self.expire == 'never' + if ((self.expire_timedelta is None or self.expire == 'never' or (self.expire == 'system' and expireAfterSetting <= 0))): return 0 else: - return (self.expireTime.days * 24 + - self.expireTime.seconds / 3600) + return (self.expire_timedelta.days * 24 + + self.expire_timedelta.seconds / 3600) def is_autodownloadable(self): """Returns true iff item is autodownloadable @@ -1176,16 +1161,23 @@ self.get_folder().signal_change() def thumbnail_valid(self): - return self.icon_cache and self.icon_cache.is_valid() + return self.thumbnail_path is not None @returns_filename def get_thumbnail_path(self): - self.confirm_db_thread() if self.thumbnail_valid(): - return fileutil.expand_filename(self.icon_cache.get_filename()) + return self.thumbnail_path else: return self.actualFeed.default_thumbnail_path() + def calc_thumbnail_path(self): + self.confirm_db_thread() + if self.icon_cache and self.icon_cache.is_valid(): + self.thumbnail_path = fileutil.expand_filename( + self.icon_cache.get_filename()) + else: + self.thumbnail_path = None + def has_downloaded_items(self): return self.num_downloaded() > 0 @@ -1229,7 +1221,7 @@ feed_id=self.ufeed.id, channel_title=channel_title) else: item = models.Item(fp_values, feed_id=self.ufeed.id, - eligibleForAutoDownload=not self.initialUpdate, + eligible_for_autodownload=not self.initialUpdate, channel_title=channel_title) if not item.matches_search(self.ufeed.searchTerm): item.remove() @@ -1258,11 +1250,22 @@ if channel_title != None and self._allow_feed_to_override_title(): self.title = channel_title - if (parsed.feed.has_key('image') and - parsed.feed.image.has_key('url') and - self._allow_feed_to_override_thumbnail()): - self.thumbURL = parsed.feed.image.url - self.ufeed.icon_cache.request_update(is_vital=True) + if parsed.feed.has_key('image'): + image = parsed.feed['image'] + image_url = None + if isinstance(image, dict): + if 'url' in image: + image_url = image['url'] + elif 'href' in image: + image_url = image['href'] + elif isinstance(image, basestring): + image_url = image + else: + logging.warn('strange image value from %r: %r', + self.url, image) + if image_url and self._allow_feed_to_override_thumbnail(): + self.thumbURL = image_url + self.ufeed.icon_cache.request_update(is_vital=True) items_byid = {} items_byURLTitle = {} @@ -1337,7 +1340,7 @@ if self.initialUpdate: self.initialUpdate = False for latest in models.Item.latest_in_feed_view(self.ufeed_id): - latest.eligibleForAutoDownload = True + latest.eligible_for_autodownload = True latest.signal_change() if self.ufeed.is_autodownloadable(): self.ufeed.mark_as_viewed() @@ -1373,7 +1376,7 @@ candidates = [] for item in self.old_items: if item.downloader is None: - candidates.append((item.creationTime, item)) + candidates.append((item.creation_time, item)) candidates.sort() for time_, item in candidates[:extra]: item.remove() @@ -1552,7 +1555,7 @@ self.modified = {} self.download_dc = {} self.updating = 0 - self.urls = self.calc_urls() + self._urls = None def setup_restored(self): """Called by pickle during deserialization @@ -1560,7 +1563,13 @@ RSSFeedImplBase.setup_restored(self) self.download_dc = {} self.updating = 0 - self.urls = self.calc_urls() + self._urls = None + + @property + def urls(self): + if self._urls is None: + self._urls = self.calc_urls() + return self._urls def calc_urls(self): """Calculate the list of URLs to parse. @@ -1630,6 +1639,7 @@ etag=etag, modified=modified, default_mime_type=u'application/rss+xml',) self.updating += 1 + self.ufeed.signal_change(needs_save=False) def _update_errback(self, error, url): if not self.ufeed.id_exists(): @@ -1745,7 +1755,7 @@ self.linkHistory[url] = self.tempHistory[url] self.tempHistory = {} - def get_html(self, urlList, depth=0, linkNumber=0, top=False): + def get_html(self, urlList, depth=0, link_number=0, top=False): """Grabs HTML at the given URL, then processes it """ url = urlList.pop(0) @@ -1760,7 +1770,7 @@ return self.downloads.discard(download) try: - self.process_downloaded_html(info, urlList, depth, linkNumber, + self.process_downloaded_html(info, urlList, depth, link_number, top) finally: self.check_done() @@ -1775,7 +1785,7 @@ modified=modified, default_mime_type='text/html') self.downloads.add(download) - def process_downloaded_html(self, info, urlList, depth, linkNumber, + def process_downloaded_html(self, info, urlList, depth, link_number, top=False): self.ufeed.confirm_db_thread() #print "Done grabbing %s" % info['updated-url'] @@ -1793,11 +1803,11 @@ else: subLinks = self.scrape_links(info['body'], info['redirected-url'], setTitle=top) if top: - self.process_links(subLinks, 0, linkNumber) + self.process_links(subLinks, 0, link_number) else: - self.process_links(subLinks, depth+1, linkNumber) + self.process_links(subLinks, depth+1, link_number) if len(urlList) > 0: - self.get_html(urlList, depth, linkNumber) + self.get_html(urlList, depth, link_number) def check_done(self): if len(self.downloads) == 0: @@ -1806,7 +1816,7 @@ self.ufeed.signal_change() self.schedule_update_events(-1) - def add_video_item(self, link, dict_, linkNumber): + def add_video_item(self, link, dict_, link_number): link = unicodify(link.strip()) if dict_.has_key('title'): title = dict_['title'] @@ -1828,14 +1838,14 @@ 'enclosures': [FeedParserDict({'url': link})] }) i = models.Item(FeedParserValues(fp_dict), - linkNumber=linkNumber, feed_id=self.ufeed.id, - eligibleForAutoDownload=False) + link_number=link_number, feed_id=self.ufeed.id, + eligible_for_autodownload=False) if ((self.ufeed.searchTerm is not None and not i.matches_search(self.ufeed.searchTerm))): i.remove() return - def process_links(self, links, depth=0, linkNumber=0): + def process_links(self, links, depth=0, link_number=0): # FIXME: compound names for titles at each depth?? maxDepth = 2 urls = links[0] @@ -1846,8 +1856,8 @@ if depth < maxDepth: for link in urls: if depth == 0: - linkNumber += 1 - #print "Processing %s (%d)" % (link,linkNumber) + link_number += 1 + #print "Processing %s (%d)" % (link,link_number) # FIXME: Using file extensions totally breaks the # standard and won't work with Broadcast Machine or @@ -1878,9 +1888,9 @@ mimetype == "application/ogg" or mimetype == "application/x-annodex" or mimetype == "application/x-bittorrent"): - self.add_video_item(link, links[link], linkNumber) + self.add_video_item(link, links[link], link_number) if len(newURLs) > 0: - self.get_html(newURLs, depth, linkNumber) + self.get_html(newURLs, depth, link_number) def on_remove(self): for download in self.downloads: @@ -2093,7 +2103,11 @@ self.handle_watcher_updates, "handle directory watcher updates") + @eventloop.idle_iterator def handle_watcher_updates(self): + # If we are not longer valid just return + if not self.ufeed.id_exists(): + return # find deleted paths that we have items for to_remove = [] for item in self.items: @@ -2103,16 +2117,25 @@ known_files = self.calc_known_files() for x in self.items: known_files.add_path(x.get_filename()) - to_add = self._filter_paths(self._watcher_paths_added, known_files) + to_add = [] + start = time.time() + for f in self._filter_paths(self._watcher_paths_added, known_files): + to_add.append(f) + if time.time() - start > 0.4: + yield + if not self.id_exists(): + return + start = time.time() # commit changes - app.bulk_sql_manager.start() - try: - for item in to_remove: - item.remove() - for path in to_add: - self._make_child(path) - finally: - app.bulk_sql_manager.finish() + with app.local_metadata_manager.bulk_add(): + app.bulk_sql_manager.start() + try: + for item in to_remove: + item.remove() + for path in to_add: + self._make_child(path) + finally: + app.bulk_sql_manager.finish() # cleanup and prepare for the next change self._watcher_paths_deleted = set() self._watcher_paths_added = set() @@ -2131,24 +2154,50 @@ self._add_known_files(known_files) return known_files + # Subclass may override this to implement asynchronous preparation + # if it is long-running + def schedule_update(self): + self.do_update() + def update(self): + self.ufeed.confirm_db_thread() + if not self.ufeed.id_exists(): + return + if not self.updating: self.updating = True - self.do_update() + self.schedule_update() @eventloop.idle_iterator def do_update(self): - self.ufeed.confirm_db_thread() + + def should_halt_early(): + """Check if we should halt before completing the entire update. + + This should be called after each yield statement. + """ + return not self.id_exists() + + if should_halt_early(): + return self._before_update() known_files = self.calc_known_files() my_files = set() + my_items = list(self.items) + # pause after doing prep work + yield + if should_halt_early(): + return # Remove items with deleted files or that that are in feeds to_remove = [] duplicate_paths = [] - for item in self.items: + start = time.time() + for item in my_items: + if not item.id_exists(): + continue filename = item.get_filename() if (filename is None or not fileutil.isfile(filename) or @@ -2159,6 +2208,11 @@ else: duplicate_paths.append(filename) to_remove.append(item) + if time.time() - start > 0.4: + yield + if should_halt_early(): + return + start = time.time() if duplicate_paths: app.controller.failed_soft("scanning directory", "duplicate paths in directory watcher: %s (impl: %s" % @@ -2166,7 +2220,8 @@ app.bulk_sql_manager.start() try: for item in to_remove: - item.remove() + if item.id_exists(): + item.remove() finally: app.bulk_sql_manager.finish() @@ -2180,10 +2235,25 @@ # files on the filesystem scan_dir = self._scan_dir() if fileutil.isdir(scan_dir) and not is_file_bundle(scan_dir): - all_files = fileutil.miro_allfiles(scan_dir) - to_add = self._filter_paths(all_files, known_files) - for path in to_add: - app.metadata_progress_updater.will_process_path(path) + all_files = [] + start = time.time() + for f in fileutil.miro_allfiles(scan_dir): + all_files.append(f) + if time.time() - start > 0.4: + yield + if should_halt_early(): + return + start = time.time() + start = time.time() + to_add = [] + for path in self._filter_paths(all_files, known_files): + to_add.append(path) + if time.time() - start > 0.4: + yield + if should_halt_early(): + return + start = time.time() + # Keep track of the paths we will add in case we get directory # watcher updates. In that case, we want these paths to be in # known_files. It's very important that the next line come before @@ -2192,15 +2262,14 @@ path_iter = iter(to_add) finished = False yield # yield after doing prep work - try: + if should_halt_early(): + return + with app.local_metadata_manager.bulk_add(): while not finished: - finished = self._add_batch_of_videos(path_iter, 0.2) + finished = self._add_batch_of_videos(path_iter, 0.1) yield # yield after each batch - except ObjectNotFoundError: - # whoops, we disappeared! clean up and quit - for path in path_iter: - app.metadata_progress_updater.path_processed(path) - return + if should_halt_early(): + return self._after_update() self.updating = False self.pending_paths_to_add = [] @@ -2231,13 +2300,8 @@ This method removes items from paths if they are in known_files or they are not media files """ - rv = [] - for path in paths: - ufile = filename_to_unicode(path) - if (not known_files.contains_path(path) and - filetypes.is_media_filename(ufile)): - rv.append(path) - return rv + return (p for p in paths if not known_files.contains_path(p) and + filetypes.is_media_filename(filename_to_unicode(p))) class DirectoryWatchFeedImpl(DirectoryScannerImplBase): def setup_new(self, ufeed, directory): @@ -2283,19 +2347,36 @@ """ def setup_new(self, ufeed): DirectoryScannerImplBase.setup_new(self, url=u"dtv:directoryfeed", - ufeed=ufeed, title=None) + ufeed=ufeed, title=u"") self.set_update_frequency(5) self.schedule_update_events(0) + self._prepare_count = 0 self.start_watching_directory() def setup_restored(self): + self._prepare_count = 0 DirectoryScannerImplBase.setup_restored(self) self.start_watching_directory() - def _before_update(self): + def schedule_update(self): + self._async_prepare() + + def _async_prepare(self): # Make sure container items have created FileItems for their contents + # + # Since find_new_children() is an idle iterator we can safely + # count on the call returning immediately and at the end of the loop for container in models.Item.containers_view(): - container.find_new_children() + container.find_new_children(callback=self._async_prepare_complete) + self._prepare_count += 1 + + def _async_prepare_complete(self, unused): + if not self.id_exists(): + return + self._prepare_count -= 1 + if self._prepare_count == 0: + # All done? Go to the do_update() + self.do_update() def _calc_known_files(self): pass @@ -2314,9 +2395,6 @@ def _scan_dir(self): return app.config.get(prefs.MOVIES_DIRECTORY) - @returns_unicode - def get_title(self): - return _('Local Files') class SearchFeedImpl(RSSMultiFeedBase): """Search and Search Results feeds @@ -2331,7 +2409,6 @@ self.set_update_frequency(-1) self.ufeed.autoDownloadable = False # keeps the items from being seen as 'newly available' - self.ufeed.last_viewed = datetime.max self.ufeed.signal_change() def setup_restored(self): @@ -2355,7 +2432,7 @@ item.remove() finally: app.bulk_sql_manager.finish() - self.urls = [] + self._urls = [] self.searching = False if set_engine is not None: self.engine = set_engine @@ -2385,7 +2462,7 @@ self.searching = True self.engine = engine self.query = query - self.urls = self.calc_urls() + self._urls = None self.update() self.ufeed.signal_change() @@ -2429,34 +2506,21 @@ # the dtv:search feed should never automatically update. return - @returns_unicode - def get_title(self): - return _('Search') - class SearchDownloadsFeedImpl(FeedImpl): def setup_new(self, ufeed): FeedImpl.setup_new(self, url=u'dtv:searchDownloads', ufeed=ufeed, - title=None) + title=_('Search')) self.set_update_frequency(-1) - @returns_unicode - def get_title(self): - return _('Search') class ManualFeedImpl(FeedImpl): """Downloaded Videos/Torrents that have been added using by the user opening them with democracy. """ def setup_new(self, ufeed): - FeedImpl.setup_new(self, url=u'dtv:manualFeed', ufeed=ufeed, - title=None) + FeedImpl.setup_new(self, url=u'dtv:manualFeed', ufeed=ufeed, title=u"") self.ufeed.expire = u'never' self.set_update_frequency(-1) - self.ufeed.last_viewed = datetime.max - - @returns_unicode - def get_title(self): - return _('Local Files') LINK_PATTERN = re.compile("<(a|embed)\s[^>]*(href|src)\s*=\s*\"([^\"]*)\"[^>]*>(.*?)", re.S) diff -Nru miro-4.0.4/lib/feedupdate.py miro-6.0/lib/feedupdate.py --- miro-4.0.4/lib/feedupdate.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/feedupdate.py 2013-04-05 16:02:42.000000000 +0000 @@ -34,14 +34,15 @@ to 3. """ +import collections + from miro import eventloop -from miro import datastructures MAX_UPDATES = 3 class FeedUpdateQueue(object): def __init__(self): - self.update_queue = datastructures.Fifo() + self.update_queue = collections.deque() self.timeouts = {} self.callback_handles = {} self.currently_updating = set() @@ -61,7 +62,7 @@ def do_update(self, feed, update_callback): del self.timeouts[feed.id] - self.update_queue.enqueue((feed, update_callback)) + self.update_queue.append((feed, update_callback)) self.run_update_queue() def update_finished(self, feed): @@ -76,7 +77,7 @@ def run_update_queue(self): while (len(self.update_queue) > 0 and len(self.currently_updating) < MAX_UPDATES): - feed, update_callback = self.update_queue.dequeue() + feed, update_callback = self.update_queue.popleft() if feed in self.currently_updating: continue handle = feed.connect('update-finished', self.update_finished) diff -Nru miro-4.0.4/lib/filetags.py miro-6.0/lib/filetags.py --- miro-4.0.4/lib/filetags.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/filetags.py 2013-04-05 16:02:42.000000000 +0000 @@ -33,10 +33,12 @@ import logging import struct import mutagen +import urllib from miro import coverart from miro import filetypes from miro import app +from miro.plat.utils import PlatformFilenameType # increment this after adding to TAGS_FOR_ATTRIBUTE or changing read_metadata() in a way # that will increase data identified (will not change values already extracted) @@ -87,10 +89,7 @@ """ if 'length' in info: return int(round(info['length'] * 1000)) - try: # find approximate length of FLAC file - return int(round(muta.seektable.seekpoints[-1][1] / 100.0)) - except (KeyError, AttributeError, TypeError, IndexError): - logging.debug(muta.seektable.seekpoints[-1][1] / 100.0) + else: return None def _mediatype_from_mime(mimes): @@ -185,16 +184,28 @@ if not char.isdigit(): break initial_int.append(char) - num = int(''.join(initial_int) or 0) - if num > 0: - return num % 100 # handle e.g. '204' meaning disc 2, track 04 + if len(initial_int) > 0 and len(initial_int) < 4: + number = ''.join(initial_int[-2:]) # e.g. '204' is disc 2, track 04 + return int(number) -def _make_cover_art_file(track_path, objects): +def _make_cover_art_file(album_name, objects, cover_art_directory): """Given an iterable of mutagen cover art objects, returns the path to a newly-created file created from one of the objects. If given more than one - object, uses the one most likely to be cover art. If none of the objects are - usable, returns None. + object, uses the one most likely to be cover art. + + :returns: tuple (path, newly_created) or None if we didn't create a path """ + if album_name is None: + return None + if cover_art_directory is None: + cover_art_directory = app.config.get(prefs.COVER_ART_DIRECTORY) + # quote the album so that the filename doesn't have any illegal characters + # in it. + dest_filename = calc_cover_art_filename(album_name) + path = os.path.join(cover_art_directory, dest_filename) + if os.path.exists(path): + # already made cover art, no need to make it again + return path, False if not isinstance(objects, list): objects = [objects] @@ -208,7 +219,7 @@ else: images.append(image) if not images: - return + return None cover_image = None for candidate in images: @@ -219,8 +230,12 @@ # no attached image is definitively cover art. use the first one. cover_image = images[0] - path = cover_image.write_to_file(track_path) - return path + try: + cover_image.write_to_file(path) + except EnvironmentError: + logging.warn("Couldn't write cover art file: {0}".format(path)) + return None + return path, True MUTAGEN_ERRORS = None def _setup_mutagen_errors(): @@ -232,19 +247,26 @@ oggtheora.error, oggvorbis.error, trueaudio.error, _vorbis.error) _setup_mutagen_errors() -def read_metadata(filename, test=False): - """This is the external interface of the filetags module. Given a filename, - this function returns a tuple of (mediatype [a string], duration [integer - number of milliseconds(?)], data [dict of attributes to set on the item], - cover_art [filename]). - - Both the interface and the implementation are in need of substantial - reworking. I have a replacement in the works (with write support!) but have - pushed it off for 4.1 since this is generally functional. The root of the - problem is that I have tried to write one function that handles all the - different mutagen metadata objects; the new approach will be to wrap each - mutagen object in a different wrapper subclass, with all the wrappers - sharing a common interface. --KCW +def calc_cover_art_filename(album_name): + """Get the filename we will use to store cover art for an album. + + :returns: PlatformFilenameType + """ + + # quote the album name to avoid characters that are unsafe for the + # filesystem. Chars that are safe on all platforms shouldn't be touched + # though + ascii_filename = urllib.quote(album_name.encode('utf-8'), safe=' ,.') + # since the filename is ASCII it should be safe to convert to any platform + # filename type + return PlatformFilenameType(ascii_filename) + +def process_file(filename, cover_art_directory): + """Send a file through mutagen + + :param filename: path to the media file + :param cover_art_directory: directory to store cover art in + :returns: dict of metadata """ try: muta = mutagen.File(filename) @@ -288,9 +310,10 @@ with_exception=True) else: if muta: - return _parse_mutagen(filename, muta, test) + return _parse_mutagen(filename, muta, cover_art_directory) + return {} -def _parse_mutagen(filename, muta, test): +def _parse_mutagen(filename, muta, cover_art_directory): meta = muta.__dict__ tags = meta['tags'] if hasattr(tags, '__dict__') and '_DictProxy__dict' in tags.__dict__: @@ -301,10 +324,11 @@ if hasattr(muta, 'info'): info = muta.info.__dict__ - duration = _get_duration(muta, info) - mediatype = _get_mediatype(muta, filename, info, tags) - data = {} + data = { + 'duration': _get_duration(muta, info), + 'file_type': _get_mediatype(muta, filename, info, tags), + } for file_tag, value in tags.iteritems(): try: file_tag = _sanitize_key(file_tag) @@ -339,18 +363,16 @@ if guessed_track: data['track'] = guessed_track - cover_art = None + cover_art_info = None if hasattr(muta, 'pictures'): image_data = muta.pictures - if test: - cover_art = True - else: - cover_art = _make_cover_art_file(filename, image_data) + cover_art_info = _make_cover_art_file(data.get('album'), image_data, + cover_art_directory) elif 'cover_art' in data: image_data = data['cover_art'] - if test: - cover_art = True - else: - cover_art = _make_cover_art_file(filename, image_data) + cover_art_info = _make_cover_art_file(data.get('album'), image_data, + cover_art_directory) del data['cover_art'] - return mediatype, duration, data, cover_art + if cover_art_info is not None: + data['cover_art'], data['created_cover_art'] = cover_art_info + return data diff -Nru miro-4.0.4/lib/filetypes.py miro-6.0/lib/filetypes.py --- miro-4.0.4/lib/filetypes.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/filetypes.py 2013-04-05 16:02:42.000000000 +0000 @@ -41,6 +41,7 @@ '.rmvb', '.mkv', '.m2v', '.ogm', '.webm', '.ogx'] AUDIO_EXTENSIONS = ['.mp3', '.m4a', '.wma', '.mka', '.flac', '.ogg', '.oga'] FEED_EXTENSIONS = ['.xml', '.rss', '.atom'] +TORRENT_EXTENSIONS = ['.torrent'] OTHER_EXTENSIONS = ['.pdf', '.txt', '.html', '.doc', '.bmp', '.gif', '.jpg', '.jpeg', '.png', '.psd', '.tif', '.tiff',] SUBTITLES_EXTENSIONS = ['.srt', '.sub', '.ass', '.ssa', '.smil', '.cmml'] @@ -74,7 +75,19 @@ 'application/x-magnet': ['.magnet'], 'audio/x-mpegurl': ['.m3u'], - 'audio/x-amzxml': ['.amz'] + 'audio/x-amzxml': ['.amz'], + + 'application/vnd.emusic-emusic_list': ['.emx'] +} + +KNOWN_MIME_TYPES = (u'audio', u'video') +KNOWN_MIME_SUBTYPES = ( + u'.mov', u'.wmv', u'.mp4', u'.mp3', + u'.mpg', u'.mpeg', u'.avi', u'.x-flv', + u'.x-msvideo', u'.m4v', u'.mkv', u'.m2v', u'.ogg' + ) +MIME_SUBSITUTIONS = { + u'QUICKTIME': u'MOV', } EXT_MIMETYPES_MAP = {} @@ -91,6 +104,14 @@ """ return (mimetype in MIMETYPES_EXT_MAP.keys()) +def is_download_mimetype(mimetype): + return mimetype in ('application/vnd.emusic-emusic_list', + 'audio/x-amzxml') + +def is_download_url(url): + from miro import emusic, amazon + return emusic.is_emusic_url(url) or amazon.is_amazon_url(url) + def is_allowed_filename(filename): """ Pass a filename to this method and it will return a boolean @@ -99,8 +120,7 @@ return (is_video_filename(filename) or is_audio_filename(filename) or is_torrent_filename(filename) - or filename.endswith('.amz') - or filename.endswith('.m3u')) + or filename.endswith(('.amz', '.m3u', '.emx'))) def is_playable_filename(filename): """ @@ -265,5 +285,36 @@ return u'video' if ext in AUDIO_EXTENSIONS: return u'audio' - if ext in OTHER_EXTENSIONS: - return u'other' + return u'other' + +def calc_file_format(filename, mime_type): + """Get a file format string for an item. + + file formats are the user text that describes a file. Something like + "mp3" or "mp4 video". + + :param extension: file extension. Can be None if it's not known + :param mime_type: mime type for the file. Can be None if this isn't known + :returns: file format string, or None if we cant calculate it + """ + if filename is not None: + extension = os.path.splitext(filename)[1].lower() + # Hack for mp3s, "mpeg audio" isn't clear enough + if extension == '.mp3': + return u'.mp3' + else: + extension = None + if mime_type is not None and '/' in mime_type: + mtype, subtype = mime_type.split('/', 1) + mtype = mtype.lower() + if mtype in KNOWN_MIME_TYPES: + format = subtype.split(';')[0].upper() + if mtype == u'audio': + format += u' AUDIO' + if format.startswith(u'X-'): + format = format[2:] + return (u'.%s' % + MIME_SUBSITUTIONS.get(format, format).lower()) + if extension in KNOWN_MIME_SUBTYPES: + return unicode(extension) + return None diff -Nru miro-4.0.4/lib/fileutil.py miro-6.0/lib/fileutil.py --- miro-4.0.4/lib/fileutil.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/fileutil.py 2013-04-05 16:02:42.000000000 +0000 @@ -122,6 +122,23 @@ path = collapse_filename(path) return path +def copy_with_progress(input_path, output_path, block_size=32*1024): + flags = os.O_WRONLY | os.O_CREAT + if hasattr(os, 'O_SYNC'): + flags |= os.O_SYNC + output_fd = os.open(output_path, flags) + with file(input_path, 'rb') as input: + with os.fdopen(output_fd, 'wb') as output: + data = input.read(block_size) + while data: + output.write(data) + result = yield len(data) + if result: + # return True to cancel. NB: you should probably remove the + # output file + break + data = input.read(block_size) + try: samefile = os.path.samefile except AttributeError: @@ -184,7 +201,7 @@ else: callback() -class DeletesInProgressTracker(set): +class DeletesInProgressTracker(object): def __init__(self): self.set = set() def normalize(self, path): @@ -198,7 +215,7 @@ deletes_in_progress = DeletesInProgressTracker() -def delete(path, retry_after=10, retry_for=60): +def delete(path, retry_after=10, retry_for=60, firsttime=True): """Try to delete a file or directory. If this fails because the file is open, we retry deleting the file every so often This probably only makes a difference on Windows. @@ -222,7 +239,14 @@ logging.info('Retrying delete for %s (%d)', path, retry_after) eventloop.add_timeout(retry_after, delete, "Delete File Retry", args=(path, retry_after, - retry_for - retry_after)) + retry_for - retry_after, False)) + if firsttime: + from miro.workerprocess import _subprocess_manager + if _subprocess_manager.is_running: + logging.debug('restarting subprocess_manager to hopefully ' + 'free file references') + _subprocess_manager.restart(clean=True) + else: deletes_in_progress.discard(path) diff -Nru miro-4.0.4/lib/flashscraper.py miro-6.0/lib/flashscraper.py --- miro-4.0.4/lib/flashscraper.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/flashscraper.py 2013-04-05 16:02:42.000000000 +0000 @@ -40,6 +40,11 @@ from urllib import unquote_plus, urlencode from miro.util import check_u +try: + import simplejson as json +except ImportError: + import json + def is_maybe_flashscrapable(url): """Returns whether or not the given url is possibly handled by one of the flash url converters we have. @@ -67,7 +72,7 @@ def _actual_url_callback(url, callback, new_url, content_type, title): if new_url: check_u(new_url) - callback(new_url, contentType=content_type, title=title) + callback(new_url, content_type=content_type, title=title) def _get_scrape_function_for(url): check_u(url) @@ -127,13 +132,11 @@ stream_map = params["url_encoded_fmt_stream_map"][0].split(",") fmt_url_map = dict() # strip url= from url=xxxxxx, strip trailer. Strip duplicate params. - for fmt, u in zip(fmt_list, stream_map): - o = urlparse.urlsplit(unquote_plus(u[4:]).split(';')[0]) - qs = urlencode(list(set(urlparse.parse_qsl(o.query)))) - # Let's put humpty dumpty back together again - fmt_url_map[fmt] = urlparse.urlunsplit( - urlparse.SplitResult(o.scheme, o.netloc, o.path, qs, o.fragment)) - + for fmt, stream_map_data in zip(fmt_list, stream_map): + stream_map = cgi.parse_qs(stream_map_data) + url_base = stream_map['url'][0] + sig_part = '&signature=' + stream_map['sig'][0] + fmt_url_map[fmt] = url_base + sig_part title = params.get("title", ["No title"])[0] try: @@ -283,56 +286,111 @@ logging.warning("unable to scrape Green peace Video URL %s", url) callback(None) -VIMEO_RE = re.compile(r'http://([^/]+\.)?vimeo.com/(\d+)') +VIMEO_RE = re.compile(r'http://([^/]+\.)?vimeo.com/[^\d]*(\d+)') -def _scrape_vimeo_video_url(url, callback): +def _scrape_vimeo_video_url(url, callback, countdown=10): try: id_ = VIMEO_RE.match(url).group(2) - url = u"http://www.vimeo.com/moogaloop/load/clip:%s" % id_ + download_url = 'http://vimeo.com/%s?action=download' % id_ httpclient.grab_url( - url, - lambda x: _scrape_vimeo_callback(x, callback), - lambda x: _scrape_vimeo_errback(x, callback)) + download_url, + lambda x: _scrape_vimeo_download_callback(x, callback), + lambda x: _scrape_vimeo_video_url_try_2(url, callback, id_), + extra_headers={ + 'Referer': 'http://vimeo.com/%s' % id_, + 'X-Requested-With': 'XMLHttpRequest', + 'User-Agent': ('Mozilla/5.0 (X11; Linux x86_64) ' + 'AppleWebKit/536.11 (KHTML, like Gecko) ' + 'Chrome/20.0.1132.8 Safari/536.11') + }) except StandardError: logging.exception("Unable to scrape vimeo.com video URL: %s", url) callback(None) -MEGALOOP_RE = re.compile(r'http://([^/]+\.)?vimeo.com/moogaloop.swf\?clip_id=(\d+)') +VIMEO_LINK_RE = re.compile(' size: + largest_url, size = url, trial_size + except: + logging.exception('during parse of Vimeo response for %r', + info['original-url']) callback(None) -VIMEO_CLIP_RE = re.compile(r'http://(?:www\.)?vimeo\.com/moogaloop/load/clip:(?P\d+)') + if largest_url is not None: + callback(u'http://vimeo.com/%s' % largest_url, + content_type=u'video/mp4') + else: + _scrape_vimeo_download_errback("no largest url", callback, + info['original-url']) -def _scrape_vimeo_callback(info, callback): - url = info['redirected-url'] +def _scrape_vimeo_video_url_try_2(url, callback, vimeo_id): + """Try scraping vimeo URLs by scraping the javascript code. + + This method seems less reliable than the regular method, but it works for + private videos. See #19305 + """ + video_url = u'http://vimeo.com/%s' % vimeo_id + + httpclient.grab_url( + video_url, + lambda x: _scrape_vimeo_download_try_2_callback(x, callback, + vimeo_id), + lambda x: _scrape_vimeo_download_errback(x, callback, url)) + +VIMEO_JS_DATA_SCRAPE_RE = re.compile(r'clip[0-9_]+\s*=\s*(.*}});') +VIMEO_SCRAPE_SIG_RE = re.compile(r'"signature":"([0-9a-fA-F]+)"') +VIMEO_SCRAPE_TIMESTAMP_RE = re.compile(r'"timestamp":([0-9]+)') +VIMEO_SCRAPE_FILES_RE = re.compile(r'"files":({[^}]+})') + +def _scrape_vimeo_download_try_2_callback(info, callback, vimeo_id): + # first step is to find the javascript code that we care about in the HTML + # page + m = VIMEO_JS_DATA_SCRAPE_RE.search(info['body']) + if m is None: + logging.warn("Unable to scrape %s for JSON", info['original-url']) + callback(None) + return + json_data = m.group(1) try: - doc = minidom.parseString(info['body']) - id_ = VIMEO_CLIP_RE.match(url).group('id_') - req_sig = doc.getElementsByTagName('request_signature').item(0).firstChild.data.decode('ascii', 'replace') - req_sig_expires = doc.getElementsByTagName('request_signature_expires').item(0).firstChild.data.decode('ascii', 'replace') - url = (u"http://www.vimeo.com/moogaloop/play/clip:%s/%s/%s/?q=" % - (id_, req_sig, req_sig_expires)) - hd_url = url + 'hd' - sd_url = url + 'sd' - httpclient.grab_headers(hd_url, - lambda x: callback(hd_url), - lambda x: callback(sd_url)) - except StandardError: - logging.exception("Unable to scrape XML for vimeo.com video URL: %s", url) + signature = VIMEO_SCRAPE_SIG_RE.search(json_data).group(1) + timestamp = VIMEO_SCRAPE_TIMESTAMP_RE.search(json_data).group(1) + files_str = VIMEO_SCRAPE_FILES_RE.search(json_data).group(1) + except AttributeError: + # one of the RE's retured None + logging.warn("Unable to scrape %s", info['original-url']) callback(None) + return + try: + files_data = json.loads(files_str) + codec = files_data.keys()[0] + quality = files_data[codec][0] + except StandardError: + logging.warn("Unable to scrape vimeo files variable (%s)", + files_match.group(1)) + callback(None) + url = ('http://player.vimeo.com/play_redirect?' + 'clip_id=%s&quality=%s&codecs=%s&time=%s' + '&sig=%s&type=html5_desktop_local' % + (vimeo_id, quality, codec, timestamp, signature)) + logging.debug("_scrape_vimeo_download_try_2_callback scraped URL: %s", + url) + callback(url) -def _scrape_vimeo_errback(err, callback): - logging.warning("Network error scraping vimeo.com video URL") +def _scrape_vimeo_download_errback(err, callback, url): + logging.warning("Unable to scrape %r\nerror: %s", url, err) callback(None) # ============================================================================= @@ -347,5 +405,5 @@ {'pattern': re.compile(r'http://([^/]+\.)?greenpeaceweb.org/GreenpeaceTV1Col.swf'), 'func': _scrape_green_peace_video_url}, {'pattern': re.compile(r'http://([^/]+\.)?break.com/'), 'func': _scrape_break_video_url}, {'pattern': re.compile(r'http://([^/]+\.)?vimeo.com/\d+'), 'func': _scrape_vimeo_video_url}, - {'pattern': re.compile(r'http://([^/]+\.)?vimeo.com/moogaloop.swf'), 'func': _scrape_vimeo_moogaloop_url}, + {'pattern': re.compile(r'http://([^/]+\.)?vimeo.com/moogaloop.swf'), 'func': _scrape_vimeo_video_url}, ] diff -Nru miro-4.0.4/lib/frontends/cli/application.py miro-6.0/lib/frontends/cli/application.py --- miro-4.0.4/lib/frontends/cli/application.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/cli/application.py 2013-04-05 16:02:42.000000000 +0000 @@ -28,19 +28,13 @@ # statement from all source files in the program, then also delete it here. import logging -import os -import sys import platform from miro import app from miro import prefs from miro import startup from miro import controller -from miro import signals from miro import messages -from miro import eventloop -from miro import feed -from miro import workerprocess from miro.frontends.cli.util import print_text, print_box from miro.frontends.cli.events import EventHandler from miro.frontends.cli.interpreter import MiroInterpreter @@ -49,11 +43,7 @@ # this gets called after miro.plat.util.setup_logging, and changes # the logging level so it's way less spammy. logger = logging.getLogger('') - logger.setLevel(logging.WARN) - -def setup_movie_data_program_info(): - from miro.plat.renderers.gstreamerrenderer import movie_data_program_info - app.movie_data_program_info = movie_data_program_info + logger.setLevel(logging.WARN) def run_application(): setup_logging() @@ -68,7 +58,6 @@ print "Build Time: %s" % app.config.get(prefs.BUILD_TIME) print - app.info_updater = InfoUpdater() app.cli_events = EventHandler() app.cli_events.connect_to_signals() startup.install_first_time_handler(app.cli_events.handle_first_time) @@ -81,92 +70,9 @@ app.controller.shutdown() return - setup_movie_data_program_info() messages.FrontendStarted().send_to_backend() print "Startup complete. Type \"help\" for list of commands." app.cli_interpreter = MiroInterpreter() app.cli_interpreter.cmdloop() app.controller.shutdown() - -class InfoUpdaterCallbackList(object): - """Tracks the list of callbacks for InfoUpdater. - """ - - def __init__(self): - self._callbacks = {} - - def add(self, type_, id_, callback): - """Adds the callback to the list for ``type_`` ``id_``. - - :param type_: the type of the thing (feed, site, ...) - :param id_: the id for the thing - :param callback: the callback function to add - """ - key = (type_, id_) - self._callbacks.setdefault(key, set()).add(callback) - - def remove(self, type_, id_, callback): - """Removes the callback from the list for ``type_`` ``id_``. - - :param type_: the type of the thing (feed, site, ...) - :param id_: the id for the thing - :param callback: the callback function to remove - """ - key = (type_, id_) - callback_set = self._callbacks[key] - callback_set.remove(callback) - if len(callback_set) == 0: - del self._callbacks[key] - - def get(self, type_, id_): - """Get the list of callbacks for ``type_``, ``id_``. - - :param type_: the type of the thing (feed, site, ...) - :param id_: the id for the thing - """ - key = (type_, id_) - if key not in self._callbacks: - return [] - else: - # return a new list of callbacks, so that if we iterate over the - # return value, we don't have to worry about callbacks being - # removed midway. - return list(self._callbacks[key]) - -class InfoUpdater(signals.SignalEmitter): - def __init__(self): - signals.SignalEmitter.__init__(self) - for prefix in ('feeds', 'sites', 'playlists'): - self.create_signal('%s-added' % prefix) - self.create_signal('%s-changed' % prefix) - self.create_signal('%s-removed' % prefix) - - self.item_list_callbacks = InfoUpdaterCallbackList() - self.item_changed_callbacks = InfoUpdaterCallbackList() - - def handle_items_changed(self, message): - callback_list = self.item_changed_callbacks - for callback in callback_list.get(message.type, message.id): - callback(message) - - def handle_item_list(self, message): - callback_list = self.item_list_callbacks - for callback in callback_list.get(message.type, message.id): - callback(message) - - def handle_tabs_changed(self, message): - if message.type == 'feed': - signal_start = 'feeds' - elif message.type == 'site': - signal_start = 'sites' - elif message.type == 'playlist': - signal_start = 'playlists' - else: - return - if message.added: - self.emit('%s-added' % signal_start, message.added) - if message.changed: - self.emit('%s-changed' % signal_start, message.changed) - if message.removed: - self.emit('%s-removed' % signal_start, message.removed) diff -Nru miro-4.0.4/lib/frontends/profilewidgets/tests.py miro-6.0/lib/frontends/profilewidgets/tests.py --- miro-4.0.4/lib/frontends/profilewidgets/tests.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/profilewidgets/tests.py 2013-04-05 16:02:42.000000000 +0000 @@ -205,7 +205,6 @@ 'duration': 0, 'size': 328195214, 'album': u'', - 'media_type_checked': False, 'can_be_saved': False, 'file_format': u'.mov', 'video_path': None, diff -Nru miro-4.0.4/lib/frontends/shell/application.py miro-6.0/lib/frontends/shell/application.py --- miro-4.0.4/lib/frontends/shell/application.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/shell/application.py 2013-04-05 16:02:42.000000000 +0000 @@ -38,10 +38,8 @@ from miro import app from miro import startup from miro import messages -from miro import signals def run_application(): - app.info_updater = InfoUpdater() messages.FrontendMessage.install_handler(MessageHandler()) startup.startup() print 'startup exit' @@ -66,85 +64,3 @@ code.interact(local=imported_objects) app.controller.shutdown() - -class InfoUpdaterCallbackList(object): - """Tracks the list of callbacks for InfoUpdater. - """ - - def __init__(self): - self._callbacks = {} - - def add(self, type_, id_, callback): - """Adds the callback to the list for ``type_`` ``id_``. - - :param type_: the type of the thing (feed, site, ...) - :param id_: the id for the thing - :param callback: the callback function to add - """ - key = (type_, id_) - self._callbacks.setdefault(key, set()).add(callback) - - def remove(self, type_, id_, callback): - """Removes the callback from the list for ``type_`` ``id_``. - - :param type_: the type of the thing (feed, site, ...) - :param id_: the id for the thing - :param callback: the callback function to remove - """ - key = (type_, id_) - callback_set = self._callbacks[key] - callback_set.remove(callback) - if len(callback_set) == 0: - del self._callbacks[key] - - def get(self, type_, id_): - """Get the list of callbacks for ``type_``, ``id_``. - - :param type_: the type of the thing (feed, site, ...) - :param id_: the id for the thing - """ - key = (type_, id_) - if key not in self._callbacks: - return [] - else: - # return a new list of callbacks, so that if we iterate over the - # return value, we don't have to worry about callbacks being - # removed midway. - return list(self._callbacks[key]) - -class InfoUpdater(signals.SignalEmitter): - def __init__(self): - signals.SignalEmitter.__init__(self) - for prefix in ('feeds', 'sites', 'playlists'): - self.create_signal('%s-added' % prefix) - self.create_signal('%s-changed' % prefix) - self.create_signal('%s-removed' % prefix) - - self.item_list_callbacks = InfoUpdaterCallbackList() - self.item_changed_callbacks = InfoUpdaterCallbackList() - - def handle_items_changed(self, message): - callback_list = self.item_changed_callbacks - for callback in callback_list.get(message.type, message.id): - callback(message) - - def handle_item_list(self, message): - callback_list = self.item_list_callbacks - for callback in callback_list.get(message.type, message.id): - callback(message) - - def handle_tabs_changed(self, message): - if message.type == 'feed': - signal_start = 'feeds' - elif message.type == 'site': - signal_start = 'sites' - elif message.type == 'playlist': - signal_start = 'playlists' - else: - return - if message.added: - self.emit('%s-added' % signal_start, message.added) - if message.changed: - self.emit('%s-changed' % signal_start, message.changed) - if message.removed: - self.emit('%s-removed' % signal_start, message.removed) diff -Nru miro-4.0.4/lib/frontends/widgets/addtoplaylistdialog.py miro-6.0/lib/frontends/widgets/addtoplaylistdialog.py --- miro-4.0.4/lib/frontends/widgets/addtoplaylistdialog.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/addtoplaylistdialog.py 2013-04-05 16:02:42.000000000 +0000 @@ -32,7 +32,7 @@ """ from miro.gtcache import gettext as _ -from miro.util import clamp_text, name_sort_key +from miro.util import name_sort_key from miro.plat.frontends.widgets import widgetset from miro.frontends.widgets import widgetutil from miro.frontends.widgets.dialogs import MainDialog @@ -74,7 +74,8 @@ rbg = widgetset.RadioButtonGroup() existing_rb = widgetset.RadioButton(_("Existing playlist:"), rbg) - existing_option = widgetset.OptionMenu([clamp_text(pi.name) for pi in playlists]) + existing_option = widgetset.OptionMenu([pi.name for pi in playlists]) + existing_option.set_size_request(300, -1) choice_table.pack(existing_rb, 0, 0) choice_table.pack(existing_option, 1, 0) diff -Nru miro-4.0.4/lib/frontends/widgets/application.py miro-6.0/lib/frontends/widgets/application.py --- miro-4.0.4/lib/frontends/widgets/application.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/application.py 2013-04-05 16:02:42.000000000 +0000 @@ -33,10 +33,6 @@ It also holds: -* :class:`InfoUpdater` -- tracks channel/item updates from the backend - and sends the information to the frontend -* :class:`InfoUpdaterCallbackList` -- tracks the list of callbacks for - info updater * :class:`WidgetsMessageHandler` -- frontend message handler * :class:`DisplayStateStore` -- stores state of each display """ @@ -51,6 +47,7 @@ from miro import app from miro import config from miro import crashreport +from miro import data from miro import prefs from miro import feed from miro import startup @@ -62,6 +59,7 @@ from miro.gtcache import gettext as _ from miro.gtcache import ngettext from miro.frontends.widgets import dialogs +from miro.frontends.widgets import infoupdater from miro.frontends.widgets import newsearchfeed from miro.frontends.widgets import newfeed from miro.frontends.widgets import newfolder @@ -72,6 +70,7 @@ from miro.frontends.widgets import removefeeds from miro.frontends.widgets import diagnostics from miro.frontends.widgets import crashdialog +from miro.frontends.widgets import itemlist from miro.frontends.widgets import itemlistcontroller from miro.frontends.widgets import prefpanel from miro.frontends.widgets import displays @@ -105,7 +104,9 @@ self.window = None self.ui_initialized = False messages.FrontendMessage.install_handler(self.message_handler) - app.info_updater = InfoUpdater() + app.item_list_pool = itemlist.ItemListPool() + app.item_tracker_updater = itemlist.ItemTrackerUpdater() + app.info_updater = infoupdater.InfoUpdater() app.saved_items = set() app.watched_folder_manager = watchedfolders.WatchedFolderManager() app.store_manager = stores.StoreManager() @@ -139,6 +140,7 @@ """Connects to signals, installs handlers, and calls :meth:`startup` from the :mod:`miro.startup` module. """ + app.frontend = WidgetsFrontend() self.connect_to_signals() startup.install_movies_directory_gone_handler(self.handle_movies_directory_gone) startup.install_first_time_handler(self.handle_first_time) @@ -149,6 +151,7 @@ requesting data from the backend. Also sets up managers, initializes the ui, and displays the :class:`MiroWindow`. """ + data.init() # Send a couple messages to the backend, when we get responses, # WidgetsMessageHandler() will call build_window() messages.TrackGuides().send_to_backend() @@ -162,8 +165,11 @@ self.setup_globals() self.ui_initialized = True - self.window = MiroWindow(app.config.get(prefs.LONG_APP_NAME), - self.get_main_window_dimensions()) + title = app.config.get(prefs.LONG_APP_NAME) + if app.debugmode: + title += ' (%s - %s)' % (app.config.get(prefs.APP_VERSION), + app.config.get(prefs.APP_REVISION_NUM)) + self.window = MiroWindow(title, self.get_main_window_dimensions()) self.window.connect_weak('key-press', self.on_key_press) self.window.connect_weak('on-shown', self.on_shown) self._window_show_callback = self.window.connect_weak('show', @@ -172,8 +178,9 @@ def setup_globals(self): app.item_list_controller_manager = \ itemlistcontroller.ItemListControllerManager() - app.menu_manager = menus.MenuStateManager() app.playback_manager = playback.PlaybackManager() + app.menu_manager = menus.MenuManager() + app.menu_manager.setup_menubar(self.menubar) app.search_manager = search.SearchManager() app.inline_search_memory = search.InlineSearchMemory() app.tabs = tablistmanager.TabListManager() @@ -199,6 +206,8 @@ This is useful for e.g. restoring a saved selection, which is overridden by the default first-row selection if done too early. """ + app.startup_timer.log_time("window shown") + app.startup_timer.log_total_time() logging.debug('on_shown') app.tabs.on_shown() @@ -206,67 +215,56 @@ if app.playback_manager.is_playing: return playback.handle_key_press(key, mods) - def handle_movies_directory_gone(self, continue_callback): - call_on_ui_thread(self._handle_movies_directory_gone, continue_callback) + def handle_movies_directory_gone(self, msg, movies_dir, + allow_continue=False): + call_on_ui_thread(self._handle_movies_directory_gone, msg, movies_dir, + allow_continue) - def _handle_movies_directory_gone(self, continue_callback): + def _handle_movies_directory_gone(self, msg, movies_dir, allow_continue): # Make sure we close the upgrade dialog before showing a new one self.message_handler.close_upgrade_dialog() - title = _("Movies directory gone") - movies_directory = app.config.get(prefs.MOVIES_DIRECTORY) - if os.path.isdir(movies_directory): - # allow users to continue if the directory exists - description = _( - "%(shortappname)s can't use the primary video directory " - "located at:\n" - "\n" - "%(moviedirectory)s\n" - "\n" - "This may be because it is located on an external drive " - "that is not connected, is a directory that " - "%(shortappname)s does not have write permission to, or " - "there is something that is not a directory at that " - "path.\n" - "\n" - "If you continue, the primary video directory will be " - "reset to a location on this drive. If you had videos " - "downloaded this will cause %(shortappname)s to lose " - "details about those videos.\n " - "\n" - "If you quit, then you can connect the drive or otherwise " - "fix the problem and relaunch %(shortappname)s.", - {"shortappname": app.config.get(prefs.SHORT_APP_NAME), - "moviedirectory": movies_directory}) - choice = dialogs.show_choice_dialog(title, description, - [dialogs.BUTTON_CONTINUE, dialogs.BUTTON_QUIT]) + title = _("Movies folder gone") + description = _( + "%(shortappname)s can't use your primary video folder, which " + "is currently set to:\n" + "\n" + "%(moviedirectory)s\n" + "\n" + "%(reason)s\n" + "\n", + {"shortappname": app.config.get(prefs.SHORT_APP_NAME), + "moviedirectory": movies_dir, + "reason":msg, + }) + if allow_continue: + description += _( + "You may continue with the current folder, choose a new " + "folder or quit and try to fix the issue manually.") + buttons = [ + dialogs.BUTTON_CONTINUE, + dialogs.BUTTON_CHOOSE_NEW_FOLDER, + dialogs.BUTTON_QUIT + ] else: - # if the directory doesn't exist, don't let the user continue - description = _( - "%(shortappname)s can't use the primary video directory " - "located at:\n" - "\n" - "%(moviedirectory)s\n" - "\n" - "This may be because it is located on an external drive " - "that is not connected, is a directory that " - "%(shortappname)s does not have write permission to, or " - "there is something that is not a directory at that " - "path.\n" - "\n" - "%(shortappname)s will now exit. You can connect the drive " - "or otherwise fix the problem and relaunch " - "%(shortappname)s.", - {"shortappname": app.config.get(prefs.SHORT_APP_NAME), - "moviedirectory": movies_directory}) - dialogs.show_message(title, description, - alert_type=dialogs.CRITICAL_MESSAGE) - choice = dialogs.BUTTON_QUIT - + description += _( + "You choose a new folder or quit and try to fix the issue " + "manually.") + buttons = [ + dialogs.BUTTON_CHOOSE_NEW_FOLDER, + dialogs.BUTTON_QUIT + ] + choice = dialogs.show_choice_dialog(title, description, buttons) if choice == dialogs.BUTTON_CONTINUE: - continue_callback() - else: - self.do_quit() + startup.fix_movies_gone(None) + return + elif choice == dialogs.BUTTON_CHOOSE_NEW_FOLDER: + title = _("Choose new primary video folder") + new_movies_dir = dialogs.ask_for_directory(title, movies_dir) + if new_movies_dir is not None: + startup.fix_movies_gone(new_movies_dir) + return + self.do_quit() def handle_first_time(self, continue_callback): call_on_ui_thread(lambda: self._handle_first_time(continue_callback)) @@ -279,6 +277,7 @@ firsttimedialog.FirstTimeDialog(continue_callback).run() def build_window(self): + app.startup_timer.log_time("in build_window") app.display_manager = displays.DisplayManager() app.tabs['site'].extend(self.message_handler.initial_guides) app.tabs['store'].extend(self.message_handler.initial_stores) @@ -306,8 +305,6 @@ messages.TrackNewVideoCount().send_to_backend() messages.TrackNewAudioCount().send_to_backend() messages.TrackUnwatchedCount().send_to_backend() - messages.TrackDevices().send_to_backend() - messages.TrackSharing().send_to_backend() def get_main_window_dimensions(self): """Override this to provide platform-specific Main Window dimensions. @@ -368,6 +365,20 @@ def resume_play_selection(self): app.item_list_controller_manager.resume_play_selection() + def enable_net_lookup_for_selection(self): + selection = app.item_list_controller_manager.get_selection() + id_list = [info.id for info in selection + if info.downloaded and not info.net_lookup_enabled] + m = messages.SetNetLookupEnabled(id_list, True) + m.send_to_backend() + + def disable_net_lookup_for_selection(self): + selection = app.item_list_controller_manager.get_selection() + id_list = [info.id for info in selection + if info.downloaded and info.net_lookup_enabled] + m = messages.SetNetLookupEnabled(id_list, False) + m.send_to_backend() + def on_stop_clicked(self, button=None): app.playback_manager.stop() @@ -417,13 +428,12 @@ def toggle_column(self, name): current_display = app.display_manager.get_current_display() - app.widget_state.toggle_sort(current_display.type, current_display.id, - unicode(name)) - current_display.update_columns_enabled() + if current_display: + current_display.toggle_column_enabled(unicode(name)) def share_item(self, item): - share_items = {"file_url": item.file_url, - "item_name": item.name} + share_items = {"file_url": item.url, + "item_name": item.title} if item.feed_url: share_items["feed_url"] = item.feed_url query_string = "&".join([ @@ -467,7 +477,20 @@ def open_video(self): title = _('Open Files...') - filenames = dialogs.ask_for_open_pathname(title, select_multiple=True) + audio_extensions = [mem[1:] for mem in filetypes.AUDIO_EXTENSIONS] + video_extensions = [mem[1:] for mem in filetypes.VIDEO_EXTENSIONS] + torrent_extensions = [mem[1:] for mem in filetypes.TORRENT_EXTENSIONS] + all_extensions = (audio_extensions + video_extensions + + torrent_extensions) + filenames = dialogs.ask_for_open_pathname( + title, + filters=[ + (_('All Media Files'), all_extensions), + (_('Video Files'), video_extensions), + (_('Audio Files'), audio_extensions), + (_('Torrent Files'), torrent_extensions) + ], + select_multiple=True) if not filenames: return @@ -524,14 +547,6 @@ if url is not None: messages.DownloadURL(url).send_to_backend() - def add_files(self): - # this was: - #addfilesdialog.AddFilesDialog().run() - # that module no longer exists so presumably this is unreachable - app.widgetapp.handle_soft_failure('Application.add_files', - "Application.add_files is no longer used", - with_exception=False) - def check_version(self): # this gets called by the backend, so it has to send a message to # the frontend to open a dialog @@ -571,8 +586,7 @@ app.playback_manager.on_movie_finished() external_count = len([s for s in selection if s.is_external]) - failed_count = len([s for s in selection if s.download_info and - s.download_info.state == u'failed']) + failed_count = len([s for s in selection if s.is_failed_download]) folder_count = len([s for s in selection if s.is_container_item]) total_count = len(selection) @@ -684,7 +698,7 @@ return title = _('Save Item As...') - filename = selection[0].video_path + filename = selection[0].filename filename = os.path.basename(filename) filename = dialogs.ask_for_save_pathname(title, filename) @@ -693,6 +707,13 @@ messages.SaveItemAs(selection[0].id, filename).send_to_backend() + def set_media_kind(self, kind): + logging.debug('set media kind = %s', kind) + selection = app.item_list_controller_manager.get_selection() + if not selection: + return + messages.SetMediaKind(selection, kind).send_to_backend() + def convert_items(self, converter_id): selection = app.item_list_controller_manager.get_selection() for item_info in selection: @@ -708,8 +729,8 @@ return selection = selection[0] - if selection.file_url: - app.widgetapp.copy_text_to_clipboard(selection.file_url) + if selection.url: + app.widgetapp.copy_text_to_clipboard(selection.url) def add_new_feed(self): url = newfeed.run_dialog() @@ -734,10 +755,8 @@ def import_choose_files(self): # opens dialog allowing you to choose files and folders - audio_extensions = [mem.replace(".", "") - for mem in filetypes.AUDIO_EXTENSIONS] - video_extensions = [mem.replace(".", "") - for mem in filetypes.VIDEO_EXTENSIONS] + audio_extensions = [mem[1:] for mem in filetypes.AUDIO_EXTENSIONS] + video_extensions = [mem[1:] for mem in filetypes.VIDEO_EXTENSIONS] files_ = dialogs.ask_for_open_pathname( _("Choose files to import"), filters=[ @@ -1004,21 +1023,10 @@ importing media. """ app.config.set(prefs.MUSIC_TAB_CLICKED, True) - name, path = get_plat_media_player_name_path() - if path is None: - return - trans_data = { - 'media_player': name, - 'short_app_name': app.config.get(prefs.SHORT_APP_NAME)} - title = _('Import Music from %(media_player)s?', trans_data) - description = _("We see that you have %(media_player)s installed. " - "Would you like %(short_app_name)s to import the " - "music from it?", trans_data) - ret = dialogs.show_choice_dialog(title, description, - [dialogs.BUTTON_YES, - dialogs.BUTTON_NO]) - if ret == dialogs.BUTTON_YES: - app.watched_folder_manager.add(path) + dialog = firsttimedialog.MusicSetupDialog() + dialog.run() + if dialog.import_path(): + app.watched_folder_manager.add(dialog.import_path()) def quit_ui(self): """Quit out of the UI event loop.""" @@ -1059,6 +1067,27 @@ if path is not None: self.message_handler.profile_next_message(message_obj, path) + def clog_backend(self): + """Dev method: hog the backend to simluate the backend being + unresponsive. + + NB: strings not translated on purpose. + """ + title = 'Clog backend' + description = ('Make the backend busy by sleeping for a specified ' + 'number of seconds to simulate a clogged backend.\n\n' + 'WARNING: use judiciously!\n\n' + 'Default is 0 seconds.') + initial_text = '0' + n = dialogs.ask_for_string(title, description, initial_text) + if n == None: + return + try: + n = int(n) + except ValueError: + n = 0 + messages.ClogBackend(n).send_to_backend() + def profile_redraw(self): """Devel method: profile time to redraw part of the interface.""" @@ -1093,6 +1122,9 @@ gc.collect() self._printout_memory_stats('MEMORY STATS AFTER GARBAGE COLLECTION') + def force_feedparser_processing(self): + messages.ForceFeedparserProcessing().send_to_backend() + def _printout_memory_stats(self, title): # base_classes is a list of base classes that we care about. If you # want to check memory usage for a different class, add it to the @@ -1257,107 +1289,6 @@ def on_backend_shutdown(self, obj): logging.info('Shutting down...') -class InfoUpdaterCallbackList(object): - """Tracks the list of callbacks for InfoUpdater. - """ - - def __init__(self): - self._callbacks = {} - - def add(self, type_, id_, callback): - """Adds the callback to the list for ``type_`` ``id_``. - - :param type_: the type of the thing (feed, site, ...) - :param id_: the id for the thing - :param callback: the callback function to add - """ - key = (type_, id_) - self._callbacks.setdefault(key, set()).add(callback) - - def remove(self, type_, id_, callback): - """Removes the callback from the list for ``type_`` ``id_``. - - :param type_: the type of the thing (feed, site, ...) - :param id_: the id for the thing - :param callback: the callback function to remove - """ - key = (type_, id_) - callback_set = self._callbacks[key] - callback_set.remove(callback) - if len(callback_set) == 0: - del self._callbacks[key] - - def get(self, type_, id_): - """Get the list of callbacks for ``type_``, ``id_``. - - :param type_: the type of the thing (feed, site, ...) - :param id_: the id for the thing - """ - key = (type_, id_) - if key not in self._callbacks: - return [] - else: - # return a new list of callbacks, so that if we iterate over the - # return value, we don't have to worry about callbacks being - # removed midway. - return list(self._callbacks[key]) - -class InfoUpdater(signals.SignalEmitter): - """Track channel/item updates from the backend. - - To track item updates, use the item_list_callbacks and - item_changed_callbacks attributes, both are instances of - InfoUpdaterCallbackList. To track tab updates, connect to one of the - signals below. - - Signals: - - * feeds-added (self, info_list) -- New feeds were added - * feeds-changed (self, info_list) -- Feeds were changed - * feeds-removed (self, info_list) -- Feeds were removed - * sites-added (self, info_list) -- New sites were added - * sites-changed (self, info_list) -- Sites were changed - * sites-removed (self, info_list) -- Sites were removed - * playlists-added (self, info_list) -- New playlists were added - * playlists-changed (self, info_list) -- Playlists were changed - * playlists-removed (self, info_list) -- Playlists were removed - """ - def __init__(self): - signals.SignalEmitter.__init__(self) - for prefix in ('feeds', 'sites', 'playlists'): - self.create_signal('%s-added' % prefix) - self.create_signal('%s-changed' % prefix) - self.create_signal('%s-removed' % prefix) - - self.item_list_callbacks = InfoUpdaterCallbackList() - self.item_changed_callbacks = InfoUpdaterCallbackList() - - def handle_items_changed(self, message): - callback_list = self.item_changed_callbacks - for callback in callback_list.get(message.type, message.id): - callback(message) - - def handle_item_list(self, message): - callback_list = self.item_list_callbacks - for callback in callback_list.get(message.type, message.id): - callback(message) - - def handle_tabs_changed(self, message): - if message.type == 'feed': - signal_start = 'feeds' - elif message.type == 'site': - signal_start = 'sites' - elif message.type == 'playlist': - signal_start = 'playlists' - else: - return - if message.added: - self.emit('%s-added' % signal_start, message.added) - if message.changed: - self.emit('%s-changed' % signal_start, message.changed) - if message.removed: - self.emit('%s-removed' % signal_start, message.removed) - class WidgetsMessageHandler(messages.MessageHandler): """Handles frontend messages. @@ -1386,6 +1317,7 @@ self.progress_dialog = None self.dbupgrade_progress_dialog = None self._profile_info = None + self._startup_failure_mode = self._database_failure_mode = False def profile_next_message(self, message_obj, path): self._profile_info = (message_obj, path) @@ -1408,7 +1340,12 @@ self.handle_tabs_changed(message) # Now, reply to backend, and eject the share. if share.mount: - messages.SharingEject(share).send_to_backend() + messages.StopTrackingShare(share.share_id).send_to_backend() + + def handle_downloader_sync_command_complete(self, message): + # We used to need this command, but with the new ItemList code it's + # obsolute. + logging.debug('DownloaderSyncCommandComplete') def handle_jettison_tabs(self, message): typ = message.type @@ -1458,12 +1395,9 @@ dialogs.show_message(title, description, dialogs.INFO_MESSAGE) app.tabs.select_guide() - def handle_device_eject_failed(self, message): - name = message.device.name - title = _('Eject failed') - description = _("Ejecting device '%(name)s' failed.\n\n" - "The device is in use.", {'name': name}) - dialogs.show_message(title, description, dialogs.WARNING_MESSAGE) + def handle_show_warning(self, message): + dialogs.show_message(message.title, message.description, + dialogs.WARNING_MESSAGE) def handle_frontend_quit(self, message): if self.dbupgrade_progress_dialog: @@ -1508,7 +1442,8 @@ self.dbupgrade_progress_dialog = None def handle_startup_failure(self, message): - if hasattr(self, "_startup_failure_mode"): + if self._startup_failure_mode: + logging.info("already in startup failure mode--skipping") return self._startup_failure_mode = True # We may still have the DB upgrade dialog open. If so, close it. @@ -1519,7 +1454,7 @@ app.widgetapp.do_quit() def handle_startup_database_failure(self, message): - if hasattr(self, "_database_failure_mode"): + if self._database_failure_mode: logging.info("already in db failure mode--skipping") return self._database_failure_mode = True @@ -1543,6 +1478,9 @@ else: app.widgetapp.do_quit() + def startup_failed(self): + return self._startup_failure_mode or self._database_failure_mode + def handle_startup_success(self, message): app.widgetapp.startup_ui() signals.system.emit('startup-success') @@ -1558,6 +1496,10 @@ app.widgetapp.build_window() def call_handler(self, method, message): + if self.startup_failed(): + logging.warn("skipping message: %s because startup failed", + message) + return # uncomment this next line if you need frontend messages # logging.debug("handling frontend %s", message) if (self._profile_info is not None and @@ -1572,7 +1514,7 @@ self._profile_info = None def handle_current_search_info(self, message): - app.search_manager.set_search_info(message.engine, message.text) + app.search_manager.set_initial_search_info(message.engine, message.text) self._saw_pre_startup_message('search-info') def handle_tab_list(self, message): @@ -1632,17 +1574,17 @@ else: tablist.add(info) tablist.model_changed() + app.connection_pools.on_tabs_changed(message) app.info_updater.handle_tabs_changed(message) - def handle_item_list(self, message): - app.info_updater.handle_item_list(message) - if app.menu_manager: - app.menu_manager.update_menus() - - def handle_items_changed(self, message): - app.info_updater.handle_items_changed(message) - if app.menu_manager: - app.menu_manager.update_menus() + def handle_item_changes(self, message): + app.item_tracker_updater.on_item_changes(message) + + def handle_device_item_changes(self, message): + app.item_tracker_updater.on_device_item_changes(message) + + def handle_sharing_item_changes(self, message): + app.item_tracker_updater.on_sharing_item_changes(message) def handle_download_count_changed(self, message): app.widgetapp.download_count = message.count @@ -1669,6 +1611,9 @@ app.widgetapp.unwatched_count = message.count app.widgetapp.handle_unwatched_count_changed() + def handle_converter_list(self, message): + app.menu_manager.add_converters(message.converters) + def handle_conversions_count_changed(self, message): library_tab_list = app.tabs['library'] library_tab_list.update_converting_count(message.running_count, @@ -1715,7 +1660,7 @@ if isinstance(current_display, displays.DeviceDisplay): current_display.handle_device_sync_changed(message) - def handle_play_movie(self, message): + def handle_play_movies(self, message): app.playback_manager.start_with_items(message.item_infos) def handle_stop_playing(self, message): @@ -1774,6 +1719,25 @@ if not app.item_list_controller_manager or not app.tabs: return # got a metadata update before the UI opens app.item_list_controller_manager.update_metadata_progress( - message.target, message.remaining, message.eta, message.total) + message.target, message.finished, message.finished_local, + message.eta, message.total) app.tabs.update_metadata_progress( - message.target, message.remaining, message.eta, message.total) + message.target, message.finished_local, message.eta, + message.total) + + def handle_set_net_lookup_enabled_finished(self, message): + prefpanel.enable_net_lookup_buttons() + + def handle_net_lookup_counts(self, message): + prefpanel.update_net_lookup_counts(message.net_lookup_count, + message.total_count) + +class WidgetsFrontend(app.Frontend): + def call_on_ui_thread(self, func, *args, **kwargs): + call_on_ui_thread(func, *args, **kwargs) + + def run_choice_dialog(self, title, description, buttons): + return dialogs.show_choice_dialog(title, description, buttons) + + def quit(self): + app.widgetapp.do_quit() diff -Nru miro-4.0.4/lib/frontends/widgets/browser.py miro-6.0/lib/frontends/widgets/browser.py --- miro-4.0.4/lib/frontends/widgets/browser.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/browser.py 2013-04-05 16:02:42.000000000 +0000 @@ -272,7 +272,21 @@ metadata).send_to_backend() return False - return True + return self.should_load_url(url) + + def should_download_url(self, url, mimetype=None): + if mimetype and filetypes.is_download_mimetype(mimetype): + logging.debug('downloading %s (%s)', url, mimetype) + return True + if filetypes.is_download_url(url): + logging.debug('downloading %s' % url) + return True + return False + + def do_download_finished(self, url): + logging.debug('finished downloading %s', url) + self.emit('download-started') + messages.DownloadURL(url, self.unknown_callback).send_to_backend() class BrowserNav(widgetset.VBox): def __init__(self, guide_info): @@ -359,3 +373,5 @@ self.toolbar.loading_icon.set_download(True) timer.add(5, lambda: self.toolbar.loading_icon.set_download(False)) + def destroy(self): + self.browser.destroy() diff -Nru miro-4.0.4/lib/frontends/widgets/cellpack.py miro-6.0/lib/frontends/widgets/cellpack.py --- miro-4.0.4/lib/frontends/widgets/cellpack.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/cellpack.py 2013-04-05 16:02:42.000000000 +0000 @@ -860,6 +860,14 @@ return hotspot, x - rect.x, y - rect.y return None + def find_hotspot_name(self, x, y): + """Like find_hotspot(), but only returns the hotspot name """ + hotspot_info = self.find_hotspot(x, y) + if hotspot_info is not None: + return hotspot_info[0] + else: + return None + def draw(self, context): """Render each layout rect onto context diff -Nru miro-4.0.4/lib/frontends/widgets/convertingcontroller.py miro-6.0/lib/frontends/widgets/convertingcontroller.py --- miro-4.0.4/lib/frontends/widgets/convertingcontroller.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/convertingcontroller.py 2013-04-05 16:02:42.000000000 +0000 @@ -41,22 +41,15 @@ from miro.conversions import conversion_manager -class ConvertingSort(itemlist.ItemSort): - KEY = None - - def __init__(self): - itemlist.ItemSort.__init__(self, True) - self.positions = [] +class ConvertingController(object): + """Controller object for the conversions list. - def sort_key(self, item): - id_ = item.id - if not id_ in self.positions: - self.positions.append(id_) - return self.positions.index(id_) + This class doesn't derive from ItemListController because ConversionInfos + don't reside on the database, so it can't used the itemlist/itemtracker + code. Instead it simply creates a TableModel and appends items as it they + come in. + """ -# XXX: why doesn't this guy derive from something sensible and use -# ItemListController instead? -class ConvertingController(object): def __init__(self): self.widget = widgetset.VBox() self.build_widget() @@ -68,8 +61,7 @@ self.titlebar.connect('reveal', self.on_reveal_conversions_folder) self.titlebar.connect('clear-finished', self.on_clear_finished) - sorter = ConvertingSort() - self.model = widgetset.InfoListModel(sorter.sort_key) + self.model = widgetset.TableModel('object') self.table = ConvertingTableView(self.model) self.table.connect_weak('hotspot-clicked', self.on_hotspot_clicked) scroller = widgetset.Scroller(False, True) @@ -105,47 +97,54 @@ app.widgetapp.reveal_file(task.output_path) def handle_task_list(self, running_tasks, pending_tasks, finished_tasks): - self.model.add_infos(running_tasks) - self.model.add_infos(pending_tasks) - self.model.add_infos(finished_tasks) + for task in running_tasks + pending_tasks + finished_tasks: + self.model.append(task) self.table.model_changed() self._update_buttons_state() + def find_iter(self, task_id): + """Find a model iter for a task id.""" + it = self.model.first_iter() + while it: + if self.model[it][0].id == task_id: + return it + it = self.model.next_iter(it) + return None + def handle_task_added(self, task): - try: - self.model.add_infos([task]) - except ValueError: - pass # task already added - else: - self.table.model_changed() - self._update_buttons_state() + if self.find_iter(task.id): + # task already added + return + self.model.append(task) + self.table.model_changed() + self._update_buttons_state() def handle_all_tasks_removed(self): - self.model.remove_all() + while len(self.model) > 0: + self.model.remove(self.model.first_iter()) self.table.model_changed() self._update_buttons_state() def handle_task_removed(self, task): - try: - self.model.remove_ids([task.id]) - except KeyError: - pass # task already removed - else: - self.table.model_changed() - self._update_buttons_state() + it = self.find_iter(task.id) + if it is None: + return # task already removed + self.model.remove(it) + self.table.model_changed() + self._update_buttons_state() def handle_task_changed(self, task): - try: - self.model.update_infos([task], resort=False) - except KeyError: - pass # task already removed - else: - self.table.model_changed() - self._update_buttons_state() + it = self.find_iter(task.id) + if it is None: + return # task already removed + self.model.update(it, task) + self.table.model_changed() + self._update_buttons_state() def _update_buttons_state(self): finished_count = not_finished_count = 0 - for info in self.model.info_list(): + for row in self.model: + info = row[0] if info.state == 'finished': finished_count += 1 else: @@ -165,7 +164,8 @@ self.set_show_headers(False) self.renderer = itemrenderer.ConversionItemRenderer() - self.column = widgetset.TableColumn('conversion', self.renderer) + self.column = widgetset.TableColumn('conversion', self.renderer, + info=0) self.column.set_min_width(600) self.add_column(self.column) diff -Nru miro-4.0.4/lib/frontends/widgets/devicecontroller.py miro-6.0/lib/frontends/widgets/devicecontroller.py --- miro-4.0.4/lib/frontends/widgets/devicecontroller.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/devicecontroller.py 2013-04-05 16:02:42.000000000 +0000 @@ -41,14 +41,12 @@ from miro.gtcache import ngettext from miro import messages -from miro.frontends.widgets import imagebutton from miro.frontends.widgets import imagepool -from miro.frontends.widgets import itemlist from miro.frontends.widgets import itemlistcontroller from miro.frontends.widgets import itemlistwidgets from miro.frontends.widgets import itemrenderer -from miro.frontends.widgets import itemtrack from miro.frontends.widgets import segmented +from miro.frontends.widgets import tabcontroller from miro.frontends.widgets import widgetconst from miro.frontends.widgets import widgetutil from miro.frontends.widgets.widgetstatestore import WidgetStateStore @@ -59,217 +57,284 @@ class DeviceTabButtonSegment(segmented.TextButtonSegment): PARTS = { - 'off-far-left': segmented._get_image('device-off-far-left'), - 'off-middle-left': segmented._get_image('device-off-middle-left'), - 'off-center': segmented._get_image('device-off-center'), - 'off-middle-right': segmented._get_image('device-off-middle-right'), - 'off-far-right': segmented._get_image('device-off-far-right'), - 'on-far-left': segmented._get_image('device-on-far-left'), - 'on-middle-left': segmented._get_image('device-on-middle-left'), - 'on-center': segmented._get_image('device-on-center'), - 'on-middle-right': segmented._get_image('device-on-middle-right'), - 'on-far-right': segmented._get_image('device-on-far-right') + 'off-far-left': segmented._get_image('toggle-button-inactive_left'), + 'off-middle-left': segmented._get_image('toggle-button-inactive_center'), + 'off-center': segmented._get_image('toggle-button-inactive_center'), + 'off-middle-right': segmented._get_image('toggle-button-separator'), + 'off-far-right': segmented._get_image('toggle-button-inactive_right'), + 'on-far-left': segmented._get_image('toggle-button-active_left'), + 'on-middle-left': segmented._get_image('toggle-button-active_center'), + 'on-center': segmented._get_image('toggle-button-active_center'), + 'on-middle-right': segmented._get_image('toggle-button-active_center'), + 'on-far-right': segmented._get_image('toggle-button-active_right') } MARGIN = 20 - TEXT_COLOR = {True: (1, 1, 1), False: widgetutil.css_to_color('#424242')} + TEXT_COLOR = {True: (1, 1, 1), False: widgetutil.css_to_color('#0e0e0e')} def size_request(self, layout): width, _ = segmented.TextButtonSegment.size_request(self, layout) - return width, 23 + return width, 24 + +class TabButtonContainer(widgetset.Background): + TOP_BORDER = widgetutil.css_to_color('#e2e2e2') + TOP_GRADIENT = widgetutil.css_to_color('#cfcfcf') + BOTTOM_GRADIENT = widgetutil.css_to_color('#a4a4a4') + BOTTOM_BORDER1 = widgetutil.css_to_color('#bbbbbb') + BOTTOM_BORDER2 = widgetutil.css_to_color('#303030') + + def __init__(self): + widgetset.Background.__init__(self) + self.set_size_request(-1, 45) + + def draw(self, context, layout): + context.set_line_width(1) + context.move_to(0, 0.5) + context.line_to(context.width, 0.5) + context.set_color(self.TOP_BORDER) + context.stroke() + gradient = widgetset.Gradient(0, 1, context.width, context.height - 2) + gradient.set_start_color(self.TOP_GRADIENT) + gradient.set_end_color(self.BOTTOM_GRADIENT) + context.rectangle(0, 1, context.width, context.height - 2) + context.gradient_fill(gradient) + context.move_to(0, context.height - 1.5) + context.line_to(context.width, context.height - 1.5) + context.set_color(self.BOTTOM_BORDER1) + context.stroke() + context.move_to(0, context.height - 0.5) + context.line_to(context.width, context.height - 0.5) + context.set_color(self.BOTTOM_BORDER2) + context.stroke() class SizeProgressBar(widgetset.Background): - GRADIENT_COLOR_TOP = widgetutil.css_to_color('#080808') - GRADIENT_COLOR_BOTTOM = widgetutil.css_to_color('#515151') - SIZE_COLOR_TOP = widgetutil.css_to_color('#829ac8') - SIZE_COLOR_BOTTOM = widgetutil.css_to_color('#697fb0') - SIZE_BORDER = widgetutil.css_to_color('#060606') def __init__(self): widgetset.Background.__init__(self) self.size_ratio = 0.0 + self.in_progress = False self.bg_surface = widgetutil.ThreeImageSurface('device-size-bg') self.fg_surface = widgetutil.ThreeImageSurface('device-size-fg') + self.progress_surface = widgetutil.ThreeImageSurface( + 'device-size-progress') + + def set_in_progress(self, value): + self.in_progress = bool(value) + self.queue_redraw() def set_progress(self, progress): self.size_ratio = progress self.queue_redraw() def draw(self, context, layout): - self.bg_surface.draw(context, 0, 0, context.width) + self.bg_surface.draw(context, 0, 1, context.width) if self.size_ratio: - self.fg_surface.draw(context, 0, 0, - int(context.width * self.size_ratio)) + if self.in_progress: + surface = self.progress_surface + else: + surface = self.fg_surface + width = max(int(context.width * self.size_ratio), + surface.left.width + surface.right.width) + surface.draw(context, 0, 0, width) class SizeWidget(widgetset.Background): def __init__(self): self.in_progress = False widgetset.Background.__init__(self) + hbox = widgetset.HBox() + # left side: labels on first line, progress on second vbox = widgetset.VBox() - # first line: size remaining on the left, sync status on the right + line = widgetset.HBox() self.size_label = widgetset.Label(u"") self.size_label.set_bold(True) self.sync_label = widgetset.Label(u"") + self.sync_label.set_alignment(widgetconst.TEXT_JUSTIFY_RIGHT) + self.sync_label.set_bold(True) line.pack_start(self.size_label) line.pack_end(self.sync_label) vbox.pack_start(widgetutil.pad(line, bottom=10)) - # second line: bigger; size status on left, sync button on right - line = widgetset.HBox() self.progress = SizeProgressBar() - self.progress.set_size_request(425, 36) + self.progress.set_size_request(-1, 14) + vbox.pack_start(self.progress) + + hbox.pack_start(vbox, expand=True) + + # right size: sync button self.sync_button = widgetutil.ThreeImageButton( - 'device-sync', _("Sync Now")) - self.sync_button.set_size_request(100, 39) - line.pack_start(self.progress) - line.pack_end(widgetutil.pad(self.sync_button, left=50)) - vbox.pack_start(line) - self.add(widgetutil.align(vbox, 0.5, 1, 0, 0, top_pad=15, - bottom_pad=15, right_pad=20)) + 'device-sync', _("Up to date")) + self.sync_button.set_text_size(1.07) # 14pt + self.sync_button.disable() + self.sync_button.set_size_request(150, 23) + hbox.pack_end(widgetutil.pad(self.sync_button, left=50)) + self.add(widgetutil.align(hbox, 0.5, 1, 1, 0, top_pad=10, + bottom_pad=10, left_pad=50, right_pad=50)) def draw(self, context, layout): - gradient = widgetset.Gradient(0, 0, 0, context.height) - gradient.set_start_color(widgetutil.css_to_color('#c2c2c2')) - gradient.set_end_color(widgetutil.css_to_color('#a3a3a3')) - context.rectangle(0, 0, context.width, context.height) + context.set_line_width(1) + context.set_color(widgetutil.css_to_color('#d8d8d8')) + context.move_to(0, 0.5) + context.line_to(context.width, 0.5) + context.stroke() + gradient = widgetset.Gradient(0, 1, 0, context.height) + gradient.set_start_color(widgetutil.css_to_color('#f7f7f7')) + gradient.set_end_color(widgetutil.css_to_color('#cacaca')) + context.rectangle(0, 1, context.width, context.height) context.gradient_fill(gradient) def set_size(self, size, remaining): + if self.in_progress: + return if size and remaining: self.progress.set_progress(1 - float(remaining) / size) self.size_label.set_text( - _("%(used)s used / %(total)s total - %(percent)i%% full", { + _("%(used)s used / %(total)s total", { 'used': displaytext.size_string(size - remaining), - 'total': displaytext.size_string(size), - 'percent': 100 * (1 - float(remaining) / size)})) + 'total': displaytext.size_string(size)})) + self.sync_label.set_text( + _("%(percent)i%% full", + {'percent': 100 * (1 - float(remaining) / size)})) else: self.progress.set_progress(0) self.size_label.set_text(u"") - - def get_text(self): - return self.sync_label.get_text() + self.sync_label.set_text(u"") def set_sync_state(self, count): if self.in_progress: # don't update sync state while we're syncing return if count: - self.sync_label.set_text( - ngettext('1 file selected to sync', - '%(count)i files selected to sync', + self.sync_button.set_text( + ngettext('Sync 1 File', + 'Sync %(count)i Files', count, {'count': count})) self.sync_button.enable() else: - self.sync_label.set_text(_("Up to date")) + self.sync_button.set_text(_("Up to date")) self.sync_button.disable() def set_in_progress(self, progress): - self.in_progress = progress - if progress: - self.sync_label.set_text(u"") - self.sync_button.set_text(_("In Progress")) - self.sync_button.disable() - else: - self.sync_button.set_text(_("Sync Now")) - self.set_sync_state(0) - -class SyncProgressBar(widgetset.Background): - PROGRESS_GRADIENT_TOP = (1, 1, 1) - PROGRESS_GRADIENT_BOTTOM = widgetutil.css_to_color('#a0a0a0') + if progress != self.in_progress: + self.in_progress = progress + self.progress.set_in_progress(progress) + if progress: + self.size_label.set_text(_("Now Syncing")) + self.sync_button.set_text(_("Cancel Sync")) + self.sync_button.enable() + else: + self.set_sync_state(0) - BACKGROUND_GRADIENT_TOP = widgetutil.css_to_color('#0c0c0e') - BACKGROUND_GRADIENT_BOTTOM = widgetutil.css_to_color('#3f4346') + def set_sync_status(self, progress, eta): + self.set_in_progress(True) + self.progress.set_progress(progress) + label = displaytext.time_string(int(eta)) if eta is not None else u'' + self.sync_label.set_text(label) +class AutoFillSlider(widgetset.CustomSlider): def __init__(self): - widgetset.Background.__init__(self) - self.progress_ratio = 0.0 + widgetset.CustomSlider.__init__(self) + self.set_can_focus(False) + self.set_range(0.0, 1.0) + self.set_increments(0.05, 0.20) + self.track = widgetutil.ThreeImageSurface('device-slider-track') + self.filled_track = widgetutil.ThreeImageSurface( + 'device-slider-filled') + self.knob = widgetutil.make_surface('device-slider-knob') - def set_progress(self, progress): - self.progress_ratio = progress - self.queue_redraw() + def is_horizontal(self): + return True + + def is_continuous(self): + return True + + def size_request(self, layout): + return (200, self.knob.height) + + def slider_size(self): + return self.knob.width def draw(self, context, layout): - widgetutil.circular_rect(context, 0, 0, context.width, context.height) - gradient = widgetset.Gradient(0, 0, 0, context.height) - gradient.set_start_color(self.BACKGROUND_GRADIENT_TOP) - gradient.set_end_color(self.BACKGROUND_GRADIENT_BOTTOM) - context.gradient_fill(gradient) - progress_width = ( - (context.width - context.height) * self.progress_ratio + - context.height) - widgetutil.circular_rect_negative(context, 1, 1, - progress_width - 2, - context.height - 2) - gradient = widgetset.Gradient(1, 1, 1, context.height - 2) - gradient.set_start_color(self.PROGRESS_GRADIENT_TOP) - gradient.set_end_color(self.PROGRESS_GRADIENT_BOTTOM) - context.gradient_fill(gradient) + self.draw_track(context) + self.draw_filled(context) + self.draw_knob(context) + + def draw_track(self, context): + y = (context.height - self.track.height) / 2 + self.track.draw(context, 0, y, context.width) + + def draw_filled(self, context): + portion_right = self.get_value() + y = (context.height - self.filled_track.height) / 2 + width = int(round(portion_right * context.width)) + self.filled_track.draw(context, 0, y, width) + + + def draw_knob(self, context): + portion_right = self.get_value() + x_max = context.width - self.slider_size() + slider_x = int(round(portion_right * x_max)) + slider_y = (context.height - self.knob.height) / 2 + self.knob.draw(context, slider_x, slider_y, self.knob.width, + self.knob.height) + +class RoundedVBox(widgetset.Background): + BORDER_COLOR = widgetutil.css_to_color('#c8c8c8') + BG_COLOR = widgetutil.css_to_color('#e4e4e4') -class SyncProgressWidget(widgetset.Background): def __init__(self): widgetset.Background.__init__(self) + self._vbox = widgetset.VBox() + self.add(self._vbox) + self.children_start = [] + + def pack_start(self, widget, **kwargs): + self._vbox.pack_start(widget, **kwargs) + self.children_start.append(widget) - vbox = widgetset.VBox() - # first line: sync progess and cancel button - line = widgetset.HBox() - self.sync_progress = SyncProgressBar() - self.sync_progress.set_size_request(400, 10) - self.cancel_button = imagebutton.ImageButton('sync-cancel') - line.pack_start(widgetutil.pad(self.sync_progress, 10, 10, 5, 5)) - line.pack_end(widgetutil.pad(self.cancel_button, 5, 5, 5, 5)) - vbox.pack_start(line) + def set_size_request(self, width, height): + self._vbox.set_size_request(width, height) - # second line: time remaining, all the way to the right - line = widgetset.HBox() - self.sync_files = widgetset.Label(u"") - self.sync_remaining = widgetset.Label(u"") - self.sync_remaining.set_bold(True) - line.pack_start(widgetutil.align_left(self.sync_files, 5, 5, 5, 5)) - line.pack_end(widgetutil.align_right(self.sync_remaining, 5, 5, 5, 5)) - vbox.pack_start(line) - - self.add(widgetutil.pad(vbox, 10, 10, 10, 10)) - - def set_text(self, text): - self.sync_files.set_text(text) - - def set_status(self, progress, eta): - self.sync_progress.set_progress(progress) - if eta: - self.sync_remaining.set_text( - _('%(eta)s left', - {'eta': displaytext.time_string(int(eta))})) - else: - self.sync_remaining.set_text(_('%(percent)i%% percent complete', - {'percent': int(progress * 100)})) + def size_request(self, layout): + return self._vbox.get_size_request() def draw(self, context, layout): - # we draw the rectangle off the bottom so that it's flat - widgetutil.round_rect(context, 0, 0, context.width, - context.height + 10, 10) - - context.set_color(widgetutil.css_to_color('#9199bd')) + width, height = self.get_width(), self.get_height() + x, y = (context.width - width) / 2, (context.height - height) / 2 + widgetutil.round_rect(context, x, y, width, height, 20) + context.set_color(self.BG_COLOR) context.fill() - widgetutil.round_rect_reverse(context, 1, 1, context.width - 2, - context.height + 10, 10) - context.set_color(widgetutil.css_to_color('#bec1d0')) + widgetutil.round_rect(context, x, y, width, height, 20) + widgetutil.round_rect_reverse(context, x+1, y+1, width-2, height-2, 20) + context.set_color(self.BORDER_COLOR) context.fill() + total = y + for child in self.children_start[:-1]: + total += child.get_height() + context.rectangle(x, total, width, 1) + context.fill() + + widgetset.Background.draw(self, context, layout) -class SyncWidget(widgetset.VBox): +class SyncWidget(RoundedVBox): list_label = _("Sync These Podcasts") def __init__(self): self.device = None self.bulk_change = False - widgetset.VBox.__init__(self) + RoundedVBox.__init__(self) self.create_signal('changed') + + top_vbox = widgetset.VBox() self.sync_library = widgetset.Checkbox(self.title) self.sync_library.connect('toggled', self.sync_library_toggled) - self.pack_start(widgetutil.pad(self.sync_library, top=50)) - self._pack_extra_buttons() + top_vbox.pack_start(self.sync_library) + self._pack_extra_buttons(top_vbox) + + self.pack_start(widgetutil.pad(top_vbox, 20, 20, 20, 20)) + bottom_vbox = widgetset.VBox() self.feed_list = widgetset.VBox() self.feed_list.set_size_request(450, -1) self.info_map = {} @@ -282,14 +347,12 @@ self.info_map[self.info_key(info)] = checkbox else: self.sync_library.disable() - background = widgetset.SolidBackground( - widgetutil.css_to_color('#dddddd')) + background = widgetset.SolidBackground(self.BG_COLOR) background.add(self.feed_list) scroller = widgetset.Scroller(False, True) scroller.set_child(background) self.feed_list.disable() - self.pack_start(widgetutil.pad(scroller, top=20, bottom=5), - expand=True) + bottom_vbox.pack_start(scroller, expand=True) line = widgetset.HBox(spacing=5) button = widgetutil.TitlebarButton(_("Select none")) @@ -298,9 +361,12 @@ button = widgetutil.TitlebarButton(_("Select all")) button.connect('clicked', self.select_clicked, True) line.pack_end(button) - self.pack_start(widgetutil.pad(line, bottom=20)) + bottom_vbox.pack_start(widgetutil.pad(line, top=5)) + + self.pack_start(widgetutil.pad(bottom_vbox, 20, 20, 20, 20), + expand=True) - def _pack_extra_buttons(self): + def _pack_extra_buttons(self, vbox): pass def set_device(self, device): @@ -315,10 +381,6 @@ # OS X doesn't send the callback when we toggle it manually (#15392) self.sync_library_toggled(self.sync_library) - if self.file_type != u'playlists': - all_feeds = this_sync.get(u'all', True) - self.sync_unwatched.set_checked(not all_feeds) - for item in this_sync.get(u'items', []): if item in self.info_map: self.info_map[item].set_checked(True) @@ -398,10 +460,14 @@ file_type = u'podcasts' title = _("Sync Podcasts") - def _pack_extra_buttons(self): + def _pack_extra_buttons(self, vbox): self.sync_unwatched = widgetset.Checkbox(_("Only sync unplayed items")) self.sync_unwatched.connect('toggled', self.unwatched_toggled) - self.pack_start(widgetutil.pad(self.sync_unwatched, left=20)) + self.expire_podcasts = widgetset.Checkbox( + _("Delete expired podcasts from my device")) + self.expire_podcasts.connect('toggled', self.expire_podcasts_toggled) + vbox.pack_start(widgetutil.pad(self.sync_unwatched, left=20)) + vbox.pack_start(widgetutil.pad(self.expire_podcasts, left=20)) def unwatched_toggled(self, obj): all_items = (not obj.get_checked()) @@ -413,11 +479,37 @@ message.send_to_backend() self.emit('changed') + def expire_podcasts_toggled(self, obj): + expire_podcasts = bool(obj.get_checked()) + current = self.device.database[u'sync'][self.file_type].get(u'expire') + if current != expire_podcasts: + message = messages.ChangeDeviceSyncSetting(self.device, + self.file_type, + u'expire', + expire_podcasts) + message.send_to_backend() + self.emit('changed') + + def set_device(self, device): + SyncWidget.set_device(self, device) + sync = self.device.database.setdefault(u'sync', {}) + if self.file_type not in sync: + this_sync = sync[self.file_type] = {} + else: + this_sync = sync[self.file_type] + + all_feeds = this_sync.get(u'all', True) + self.sync_unwatched.set_checked(not all_feeds) + expire_podcasts = this_sync.get(u'expire', True) + self.expire_podcasts.set_checked(expire_podcasts) + def sync_library_toggled(self, obj): if SyncWidget.sync_library_toggled(self, obj): self.sync_unwatched.enable() + self.expire_podcasts.enable() else: self.sync_unwatched.disable() + self.expire_podcasts.disable() def get_items(self): return [info for info in app.tabs['feed'].get_feeds() @@ -440,14 +532,17 @@ def find_info_by_key(self, key): return app.tabs['playlist'].find_playlist_with_name(key) -class DeviceSettingsWidget(widgetset.Background): +class DeviceSettingsWidget(RoundedVBox): def __init__(self): - widgetset.Background.__init__(self) + RoundedVBox.__init__(self) + self._background = widgetset.Background() + self.pack_start(widgetutil.align_center(self._background, + 20, 20, 20, 20)) self.boxes = {} self.device = None def create_table(self): - self.remove() + self._background.remove() def _get_conversion_name(id_): if id_ == 'copy': return _('Copy') @@ -518,7 +613,8 @@ table.pack(widgetutil.align_left(widget[1]), 1, row) table.set_column_spacing(20) table.set_row_spacing(20) - self.set_child(widgetutil.align(table, 0.5, top_pad=50)) + self._background.set_child(widgetutil.align_center(table, + 20, 20, 20, 20)) def set_device(self, device): if self.device is None: @@ -546,6 +642,8 @@ else: self.boxes['always_show'].disable() self.boxes['always_show'].set_checked(True) + self.boxes['always_sync_videos'].set_checked( + device_settings.get(u'always_sync_videos', False)) self.bulk_change = False def setting_changed(self, widget, setting_or_value, setting=None): @@ -588,39 +686,123 @@ self.button_row.add_button(name.lower(), button) self.button_row.set_active('main') - self.pack_start(widgetutil.align_center( - self.button_row.make_widget(), top_pad=50)) + tbc = TabButtonContainer() + tbc.add(widgetutil.align_center(self.button_row.make_widget(), + top_pad=9)) + width = tbc.child.get_size_request()[0] + tbc.child.set_size_request(-1, 24) + self.pack_start(tbc) self.tabs = {} self.tab_container = widgetset.Background() - self.pack_start(self.tab_container, expand=True) - + scroller = widgetset.Scroller(False, True) + scroller.add(self.tab_container) + self.pack_start(scroller, expand=True) - label_size = widgetutil.font_scale_from_osx_points(16) vbox = widgetset.VBox() - label = widgetset.Label(_("Drag individual video and audio files " - "onto the device in the sidebar to copy " - "them.")) - label.set_size(label_size) - vbox.pack_start(widgetutil.align_center(label, top_pad=50)) - label = widgetset.Label(_("Use these options and the tabs above for " - "automatic syncing.")) - label.set_size(label_size) - vbox.pack_start(widgetutil.align_center(label, top_pad=10)) + vbox.pack_start(widgetutil.align_left( + tabcontroller.ConnectTab.build_header(_("Individual Files")), + top_pad=10)) + label = tabcontroller.ConnectTab.build_text( + _("Drag individual video and audio files onto " + "the device in the sidebar to copy them.")) + label.set_size_request(width, -1) + label.set_wrap(True) + vbox.pack_start(widgetutil.align_left(label, top_pad=10)) + + vbox.pack_start(widgetutil.align_left( + tabcontroller.ConnectTab.build_header(_("Syncing")), + top_pad=30)) + label = tabcontroller.ConnectTab.build_text( + _("Use the tabs above and these options for " + "automatic syncing.")) + label.set_size_request(width, -1) + label.set_wrap(True) + vbox.pack_start(widgetutil.align_left(label, top_pad=10)) + + self.auto_sync = widgetset.Checkbox(_("Sync automatically when this " + "device is connected")) + self.auto_sync.connect('toggled', self._auto_sync_changed) + vbox.pack_start(widgetutil.align_left(self.auto_sync, top_pad=10)) + max_fill_label = _( + "Don't fill more than %(count)i percent of the " + "free space when syncing", + {'count': id(self)}) + checkbox_label, text_label = max_fill_label.split(unicode(id(self)), 1) + self.max_fill_enabled = widgetset.Checkbox(checkbox_label) + self.max_fill_enabled.connect('toggled', + self._max_fill_enabled_changed) + self.max_fill_percent = widgetset.TextEntry() + self.max_fill_percent.set_size_request(50, -1) + self.max_fill_percent.connect('focus-out', + self._max_fill_percent_changed) + label = widgetset.Label(text_label) + vbox.pack_start(widgetutil.align_left( + widgetutil.build_hbox([self.max_fill_enabled, + self.max_fill_percent, + label], 0), + top_pad=10)) + + rounded_vbox = RoundedVBox() + vbox.pack_start(widgetutil.align_left( + tabcontroller.ConnectTab.build_header(_("Auto Fill")), + top_pad=30, bottom_pad=10)) + self.auto_fill = widgetset.Checkbox( + _("After syncing my selections in the tabs above, " + "fill remaining space with:")) + self.auto_fill.connect('toggled', self._auto_fill_changed) + rounded_vbox.pack_start(widgetutil.align_left(self.auto_fill, + 20, 20, 20, 20)) + names = [ + (_('Newest Music'), u'recent_music'), + (_('Random Music'), u'random_music'), + (_('Most Played Songs'), u'most_played_music'), + (_('New Playlists'), u'new_playlists'), + (_('Most Recent Podcasts'), u'recent_podcasts')] + longest = max(names, key=lambda x: len(x[0]))[0] + width = widgetset.Label(longest).get_width() + less_label = widgetset.Label(_('Less').upper()) + less_label.set_size(tabcontroller.ConnectTab.TEXT_SIZE / 2) + more_label = widgetset.Label(_('More').upper()) + more_label.set_size(tabcontroller.ConnectTab.TEXT_SIZE / 2) + label_hbox = widgetutil.build_hbox([ + less_label, + widgetutil.pad(more_label, + left=(200 - less_label.get_width() - + more_label.get_width()))], + padding=0) + label_hbox.set_size_request(200, -1) + scrollers = [widgetutil.align_right(label_hbox, + right_pad=20)] + self.auto_fill_sliders = {} + for name, setting in names: + label = widgetutil.align_right(widgetset.Label(name)) + label.set_size_request(width, -1) + dragger = AutoFillSlider() + dragger.connect('released', self._auto_fill_slider_changed, + setting) + self.auto_fill_sliders[setting] = dragger + hbox = widgetutil.build_hbox([label, dragger], 20) + scrollers.append(hbox) + rounded_vbox.pack_start(widgetutil.align_left( + widgetutil.build_vbox(scrollers, 10), + 20, 20, 20, 20)) + + vbox.pack_start(widgetutil.align_left(rounded_vbox)) self.device_size = SizeWidget() self.device_size.sync_button.connect('clicked', self.sync_clicked) self.pack_end(self.device_size) - self.sync_container = widgetset.Background() - self.pack_end(widgetutil.align_center(self.sync_container)) - - self.add_tab('main', vbox) - self.add_tab('podcasts', widgetutil.align_center(PodcastSyncWidget())) + self.add_tab('main', widgetutil.align_center(vbox, 20, 20, 20, 20)) + self.add_tab('podcasts', widgetutil.align_center(PodcastSyncWidget(), + 20, 20, 20, 20)) self.add_tab('playlists', - widgetutil.align_center(PlaylistSyncWidget())) + widgetutil.align_center(PlaylistSyncWidget(), + 20, 20, 20, 20)) self.add_tab('settings', - widgetutil.align_center(DeviceSettingsWidget())) + widgetutil.align_center(DeviceSettingsWidget(), + 20, 20, 20, 20)) def add_tab(self, key, widget): if not self.tabs: @@ -634,12 +816,22 @@ self.device_size.set_size(device.size, device.remaining) if not self.device.mount: return + sync = device.database.get(u'sync', {}) + self.auto_sync.set_checked(sync.get(u'auto', False)) + self.max_fill_enabled.set_checked(sync.get(u'max_fill', False)) + self.max_fill_percent.set_text(str(sync.get(u'max_fill_percent', 90))) + self.auto_fill.set_checked(sync.get(u'auto_fill', False)) + auto_fill_settings = sync.get(u'auto_fill_settings', {}) + for auto_fill_setting in self.auto_fill_sliders: + slider = self.auto_fill_sliders[auto_fill_setting] + slider.set_value(float( + auto_fill_settings.get(auto_fill_setting, 0.5))) for name in 'podcasts', 'playlists', 'settings': tab = self.tabs[name] tab.child.set_device(device) sync_manager = app.device_manager.get_sync_for_device(device, create=False) - if sync_manager is not None: + if sync_manager is not None and sync_manager.started: self.set_sync_status(sync_manager.get_progress(), sync_manager.get_eta()) @@ -649,6 +841,68 @@ self.tab_container.remove() self.tab_container.set_child(self.tabs[key]) + def _auto_sync_changed(self, widget): + is_checked = widget.get_checked() + was_checked = self.device.database.get(u'sync', {}).get(u'auto', False) + if is_checked != was_checked: + message = messages.ChangeDeviceSyncSetting(self.device, + None, + u'auto', is_checked) + message.send_to_backend() + + def _max_fill_enabled_changed(self, widget): + is_checked = widget.get_checked() + was_checked = self.device.database.get(u'sync', {}).get(u'max_fill', + False) + if is_checked != was_checked: + message = messages.ChangeDeviceSyncSetting(self.device, + None, + u'max_fill', is_checked) + message.send_to_backend() + self.sync_settings_changed(widget) + + + def _max_fill_percent_changed(self, widget): + try: + value = int(widget.get_text()) + except ValueError: + return + old_value = self.device.database.get(u'sync', {}).get( + u'max_fill_percent', 90) + if value != old_value: + message = messages.ChangeDeviceSyncSetting(self.device, + None, + u'max_fill_percent', + value) + message.send_to_backend() + self.sync_settings_changed(widget) + + def _auto_fill_changed(self, widget): + value = widget.get_checked() + old_value = self.device.database.get(u'sync', {}).get( + u'auto_fill', False) + if value != old_value: + message = messages.ChangeDeviceSyncSetting(self.device, + None, + u'auto_fill', + value) + message.send_to_backend() + self.sync_settings_changed(widget) + + + def _auto_fill_slider_changed(self, widget, setting): + value = widget.get_value() + old_value = self.device.database.get(u'sync', {}).get( + u'auto_fill_settings', {}).get(setting, 0.5) + if value != old_value: + message = messages.ChangeDeviceSyncSetting(self.device, + None, + (u'auto_fill_settings', + setting), + value) + message.send_to_backend() + self.sync_settings_changed(widget) + def _get_sync_state(self): sync_type = {} sync_ids = {} @@ -667,34 +921,23 @@ message.send_to_backend() def sync_clicked(self, obj): - message = messages.DeviceSyncFeeds(self.device) + if self.device_size.in_progress: + message = messages.CancelDeviceSync(self.device) + else: + message = messages.DeviceSyncFeeds(self.device) message.send_to_backend() - def current_sync_information(self, count): + def current_sync_information(self, count, size): self.device_size.set_sync_state(count) def set_sync_status(self, progress, eta): - if not isinstance(self.sync_container.child, SyncProgressWidget): - widget = SyncProgressWidget() - widget.cancel_button.connect('clicked', self.cancel_sync) - self.sync_container.set_child(widget) - widget.set_text(self.device_size.get_text()) - self.device_size.set_in_progress(True) - self.sync_container.child.set_status(progress, eta) - - def cancel_sync(self, obj): - message = messages.CancelDeviceSync(self.device) - message.send_to_backend() + self.device_size.set_sync_status(progress, eta) def sync_finished(self): - self.sync_container.remove() self.device_size.set_in_progress(False) + self.device_size.set_size(self.device.size, self.device.remaining) self.sync_settings_changed(self) -class DeviceItemList(itemlist.ItemList): - def filter(self, item_info): - return True - class UnknownDeviceView(widgetset.VBox): def __init__(self): widgetset.VBox.__init__(self) @@ -801,10 +1044,10 @@ self.device_view.set_child(view) - def current_sync_information(self, count): + def current_sync_information(self, count, size): view = self.get_view() if isinstance(view, DeviceMountedView): - view.current_sync_information(count) + view.current_sync_information(count, size) def set_sync_status(self, progress, eta): view = self.get_view() @@ -835,26 +1078,17 @@ if message.device.id != self.device.id: return # not our device - self.widget.current_sync_information(message.count) + self.widget.current_sync_information(message.count, + message.size) def handle_device_sync_changed(self, message): if message.device.id != self.device.id: return # not our device - if message.finished: self.widget.sync_finished() else: self.widget.set_sync_status(message.progress, message.eta) - def start_tracking(self): - view = self.widget.get_view() - if isinstance(view, DeviceMountedView): - message = messages.QuerySyncInformation(self.device) - message.send_to_backend() - - def stop_tracking(self): - pass - class DeviceItemController(itemlistcontroller.AudioVideoItemsController): unwatched_label = u'' # everything is marked as played @@ -893,16 +1127,9 @@ def make_titlebar(self): titlebar = self.titlebar_class() titlebar.connect('search-changed', self._on_search_changed) - titlebar.connect('toggle-filter', self.on_toggle_filter) + titlebar.connect('filter-clicked', self.on_filter_clicked) return titlebar - def build_header_toolbar(self): - sorts_enabled = app.widget_state.get_sorts_enabled(self.type, self.id) - return itemlistwidgets.HeaderToolbar(sorts_enabled) - - def build_item_tracker(self): - return itemtrack.ItemListTracker.create('device', self.device) - def build_renderer(self): return itemrenderer.DeviceItemRenderer(display_channel=False) @@ -926,7 +1153,6 @@ return self.device = device - class DeviceItemDragHandler(object): def allowed_actions(self): return widgetset.DRAG_ACTION_COPY @@ -936,4 +1162,4 @@ def begin_drag(self, tableview, rows): videos = [row[0] for row in rows] - return { 'device-%s-item' % videos[0].file_type: pickle.dumps(videos) } + return { 'device-%s-item' % videos[0].file_type: videos} diff -Nru miro-4.0.4/lib/frontends/widgets/dialogs.py miro-6.0/lib/frontends/widgets/dialogs.py --- miro-4.0.4/lib/frontends/widgets/dialogs.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/dialogs.py 2013-04-05 16:02:42.000000000 +0000 @@ -52,7 +52,7 @@ BUTTON_CREATE_FEED, BUTTON_CONTINUE, BUTTON_QUIT, BUTTON_DELETE, BUTTON_CLOSE_TO_TRAY, BUTTON_MIGRATE, BUTTON_DONT_MIGRATE, BUTTON_NOT_NOW, BUTTON_ADD, BUTTON_CREATE_FOLDER, - BUTTON_START_FRESH) + BUTTON_CHOOSE_NEW_FOLDER, BUTTON_START_FRESH, BUTTON_RETRY) WARNING_MESSAGE = 0 INFO_MESSAGE = 1 CRITICAL_MESSAGE = 2 @@ -72,14 +72,19 @@ dialog.set_transient_for(transient_for) class MainDialog(widgetset.Dialog): - """Dialog that is transient for the main window.""" + """Dialog that is transient for the main window. + + description=None means that no text will ever be set in this dialog, + if you have no description text to set right now, but would like to + update the description text at a later point in time, use ''. + """ def __init__(self, title, description=None): widgetset.Dialog.__init__(self, title, description) set_transient_for_main(self) class ProgressDialog(MainDialog): def __init__(self, title): - MainDialog.__init__(self, title) + MainDialog.__init__(self, title, description='') self.progress_bar = widgetset.ProgressBar() self.label = widgetset.Label() self.label.set_size(1.2) @@ -142,6 +147,8 @@ for mem in choices: window.add_button(mem.text) response = window.run() + if response == -1: + return None return choices[response] finally: window.destroy() diff -Nru miro-4.0.4/lib/frontends/widgets/dialogwidgets.py miro-6.0/lib/frontends/widgets/dialogwidgets.py --- miro-4.0.4/lib/frontends/widgets/dialogwidgets.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/dialogwidgets.py 2013-04-05 16:02:42.000000000 +0000 @@ -119,7 +119,11 @@ def pack_label(self, text, *args, **kwargs): if 'extra_space' not in kwargs and len(args) == 0: kwargs['extra_space'] = ControlGrid.ALIGN_LEFT - self.pack(widgetset.Label(text), *args, **kwargs) + widget = widgetset.Label(text) + if 'width' in kwargs: + widget.set_wrap(True) + widget.set_size_request(kwargs.pop('width'), -1) + self.pack(widget, *args, **kwargs) def pack(self, widget, extra_space=FILL, pad_left=0, pad_right=6, span=1): diff -Nru miro-4.0.4/lib/frontends/widgets/displays.py miro-6.0/lib/frontends/widgets/displays.py --- miro-4.0.4/lib/frontends/widgets/displays.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/displays.py 2013-04-05 16:02:42.000000000 +0000 @@ -30,6 +30,7 @@ """displays.py -- Handle switching the content on the right hand side of the app. """ +import functools import logging import os @@ -39,6 +40,8 @@ from miro import signals from miro import prefs from miro import filetypes +from miro.data import item +from miro.data import itemtrack from miro.gtcache import gettext as _ from miro.gtcache import ngettext from miro.frontends.widgets import browser @@ -53,7 +56,10 @@ from miro.frontends.widgets import tabcontroller from miro.frontends.widgets import playlist from miro.frontends.widgets import widgetutil +from miro.frontends.widgets.widgetstatestore import WidgetStateStore + from miro.plat.frontends.widgets.threads import call_on_ui_thread +from miro.plat.frontends.widgets import timer from miro.plat.frontends.widgets import widgetset class Display(signals.SignalEmitter): @@ -96,6 +102,29 @@ """ pass + def get_column_info(self): + """Get info about the togglable columns for this display. + + By default this returns None, which indicates that the display doesn't + support togglable columns. + + Subclasses can override this and return the tuple (columns_enabled, + columns_available) which describes the togglable columns. + Both should be a list of column names. + """ + return None + + def toggle_column_enabled(self, name): + """Change if a column is enabled. + + This method is called after the sorts menu changes. By default we do + nothing. If a display has togglable columns, it should override this + and update the enabled columns. + + :param name: unicode identifier for the column + """ + pass + class TabDisplay(Display): """Display that displays the selection in the tab list.""" @@ -103,7 +132,7 @@ raise NotImplementedError() def on_activate(self, is_push): - app.menu_manager.update_menus() + app.menu_manager.update_menus('tab-selection-changed') @staticmethod def should_display(tab_type, selected_tabs): @@ -122,6 +151,8 @@ def __init__(self): # displays that we construct when the user clicks on them self.on_demand_display_classes = [ + VideoItemsDisplay, + AudioItemsDisplay, FeedDisplay, AllFeedsDisplay, PlaylistDisplay, @@ -144,17 +175,17 @@ # catch-all. DummyDisplay, ] + # _select_display_for_tabs_args holds the arguments passed to + # select_display_for_tabs() + self._select_display_for_tabs_args = None # displays that we keep alive all the time self.permanent_displays = set() - self.add_permanent_display(VideoItemsDisplay()) - self.add_permanent_display(AudioItemsDisplay()) self.display_stack = [] self.selected_tab_list = self.selected_tabs = None app.info_updater.connect('sites-removed', SiteDisplay.on_sites_removed) def add_permanent_display(self, display): self.permanent_displays.add(display) - display.on_selected() def get_current_display(self): try: @@ -165,6 +196,31 @@ def select_display_for_tabs(self, selected_tab_list, selected_tabs): """Select a display to show in the right-hand side. """ + if self._select_display_for_tabs_args is None: + # First call to select_display_for_tabs(), we need to schedule + # _select_display_for_tabs() to be called. + timer.add(0.01, self._select_display_for_tabs) + # For all cases, we want to store these arguments in + # _select_display_for_tabs_args so that when + # _select_display_for_tabs() is called it uses them. + self._select_display_for_tabs_args = (selected_tab_list, + selected_tabs) + + def _select_display_for_tabs(self): + """Call that does the work for select_display_for_tabs() + + select_display_for_tabs() defers action in case the user is quickly + switching between tabs. In that case, we only need to select the last + tab that was switched to. This method does the actual work. + """ + if self._select_display_for_tabs_args is None: + app.widgetapp.handle_soft_failure( + "_select_display_for_tabs():", + "_select_display_for_tabs_args is None") + return + selected_tab_list, selected_tabs = self._select_display_for_tabs_args + self._select_display_for_tabs_args = None + if (selected_tab_list is self.selected_tab_list and selected_tabs == self.selected_tabs and len(self.display_stack) > 0 and @@ -206,7 +262,13 @@ The main reason for this method is when we are playing video and the current tab gets removed (#16225). In this case, we want to select a new tab and make a display for that tab, but not show that display - until video stops + until video stops. + + Normally this is called as part of a deferred call by + change_non_video_displays(). We do this because loading a display + is a relatively expensive process and we want to be able to cancel + the operation if the display is going to be extremely transient, + e.g. during a continued keypress event as part of navigation. """ if (len(self.display_stack) == 0 or @@ -253,8 +315,10 @@ display = self.display_stack.pop() if unselect: self._unselect_display(display, on_top=True) - self.current_display.on_activate(is_push=False) - app.widgetapp.window.set_main_area(self.current_display.widget) + current_display = self.current_display + if current_display: + current_display.on_activate(is_push=False) + app.widgetapp.window.set_main_area(current_display.widget) def _unselect_display(self, display, on_top): if on_top: @@ -266,6 +330,64 @@ def push_folder_contents_display(self, folder_info, start_playing=False): self.push_display(FolderContentsDisplay(folder_info, start_playing)) +class RecentlyActiveTracker(object): + """Used by GuideDisplay to track recently downloaded/played items.""" + + # maximum number of items to track for each of the lists + ITEM_LIMIT = 6 + + def __init__(self, guide_tab): + # map ItemTracker objects to the GuideTab method we should call to set + # the list for + self.trackers = { + self._recently_downloaded_tracker(): + guide_tab.set_recently_downloaded, + self._recently_played_tracker('video'): + guide_tab.set_recently_watched, + self._recently_played_tracker('audio'): + guide_tab.set_recently_listened, + } + + for (tracker, set_recent_method) in self.trackers.items(): + list_callback = functools.partial(self.on_list_changed, + set_recent_method) + change_callback = functools.partial(self.on_items_changed, + set_recent_method) + tracker.connect('list-changed', list_callback) + tracker.connect('items-changed', change_callback) + app.item_tracker_updater.add_tracker(tracker) + set_recent_method(tracker.get_items()) + + def destroy(self): + for (tracker, set_recent_method) in self.trackers: + app.item_tracker_updater.remove_tracker(tracker) + + def _recently_downloaded_tracker(self): + query = itemtrack.ItemTrackerQuery() + query.add_condition('downloaded_time', 'IS NOT', None) + query.add_condition('expired', '=', False) + query.add_condition('parent_id', 'IS', None) + query.add_condition('watched_time', 'IS', None) + query.set_order_by(['-downloaded_time']) + query.set_limit(self.ITEM_LIMIT) + return itemtrack.ItemTracker(call_on_ui_thread, query, + item.ItemSource()) + + def _recently_played_tracker(self, file_type): + query = itemtrack.ItemTrackerQuery() + query.add_condition('file_type', '=', file_type) + query.add_condition('watched_time', 'IS NOT', None) + query.set_order_by(['-last_watched']) + query.set_limit(self.ITEM_LIMIT) + return itemtrack.ItemTracker(call_on_ui_thread, query, + item.ItemSource()) + + def on_list_changed(self, set_recent_method, tracker): + set_recent_method(tracker.get_items()) + + def on_items_changed(self, set_recent_method, tracker, changed_ids): + set_recent_method(tracker.get_items()) + class GuideDisplay(TabDisplay): @staticmethod def should_display(tab_type, selected_tabs): @@ -274,20 +396,12 @@ def __init__(self, tab_type, selected_tabs): Display.__init__(self) self.widget = guidecontroller.GuideTab(selected_tabs[0].browser) - app.info_updater.item_list_callbacks.add(u'guide-sidebar', None, - self.on_item_list), - app.info_updater.item_changed_callbacks.add(u'guide-sidebar', None, - self.on_item_changed), - messages.TrackItems(u'guide-sidebar', None).send_to_backend() + self.recently_active_tracker = RecentlyActiveTracker(self.widget) app.display_manager.add_permanent_display(self) # once we're loaded, # stay loaded - def on_item_list(self, message): - self.widget.on_item_list(message.items) - - def on_item_changed(self, message): - self.widget.on_item_changed(message.added, message.changed, - message.removed) + def cleanup(self): + self.recently_active_tracker.destroy() class SiteDisplay(TabDisplay): _open_sites = {} # maps site ids -> BrowserNav objects for them @@ -296,9 +410,13 @@ def on_sites_removed(cls, info_updater, id_list): for id_ in id_list: try: - del cls._open_sites[id_] + browser = cls._open_sites[id_] except KeyError: pass + else: + # explicitly destroy the browser. Some platforms need this + # call to cleanup. + browser.destroy() @staticmethod def should_display(tab_type, selected_tabs): @@ -315,7 +433,6 @@ class ItemListDisplayMixin(object): def on_selected(self): app.item_list_controller_manager.controller_created(self.controller) - self.controller.start_tracking() def on_activate(self, is_push): app.item_list_controller_manager.controller_displayed(self.controller) @@ -333,10 +450,12 @@ self.controller) def cleanup(self): - self.controller.stop_tracking() + self.controller.cleanup() app.item_list_controller_manager.controller_destroyed(self.controller) - def update_columns_enabled(self): + def toggle_column_enabled(self, name): + app.widget_state.toggle_column_enabled(self.type, self.id, + self.controller.selected_view, name) self.controller.update_columns_enabled() class ItemListDisplay(ItemListDisplayMixin, TabDisplay): @@ -351,6 +470,13 @@ def make_controller(self, tab): raise NotImplementedError() + def get_column_info(self): + available = WidgetStateStore.get_columns_available(self.type, self.id, + self.controller.selected_view) + enabled = app.widget_state.get_columns_enabled(self.type, self.id, + self.controller.selected_view) + return enabled, available + class FeedDisplay(ItemListDisplay): TAB_TYPE = u'feed' UPDATER_SIGNAL_NAME = 'feeds-changed' @@ -377,7 +503,7 @@ selected_tabs[0].tab_class == u'feed') def make_controller(self, tab): - return feedcontroller.AllFeedsController(tab.id, True, True) + return feedcontroller.AllFeedsController() def cleanup(self): ItemListDisplay.cleanup(self) @@ -416,12 +542,6 @@ isinstance(selected_tabs[0], messages.DeviceInfo) and \ not getattr(selected_tabs[0], 'fake', False) - def on_selected(self): - self.controller.start_tracking() - - def cleanup(self): - self.controller.stop_tracking() - def handle_current_sync_information(self, message): if not getattr(self.controller.device, 'fake', False): self.controller.handle_current_sync_information(message) @@ -430,20 +550,31 @@ if not getattr(self.controller.device, 'fake', False): self.controller.handle_device_sync_changed(message) + def on_activate(self, is_push): + if self.controller.device.mount: + message = messages.QuerySyncInformation(self.controller.device) + message.send_to_backend() + class DeviceItemDisplay(DeviceDisplayMixin, ItemListDisplay): @staticmethod def should_display(tab_type, selected_tabs): + # FIXME: re-implement DeviceItemController with the new ItemList code return tab_type == u'device' and len(selected_tabs) == 1 and \ isinstance(selected_tabs[0], messages.DeviceInfo) and \ getattr(selected_tabs[0], 'fake', False) class SharingDisplay(ItemListDisplay): + def __init__(self, tab_type, selected_tabs): + # our type is always 'sharing', regardless if the tab type is + # 'sharing' or 'sharing-playlist' + ItemListDisplay.__init__(self, u'sharing', selected_tabs) + @staticmethod def should_display(tab_type, selected_tabs): - return tab_type == u'sharing' and len(selected_tabs) == 1 + return tab_type.startswith(u'sharing') and len(selected_tabs) == 1 def make_controller(self, tab): - return sharingcontroller.SharingView(tab) + return sharingcontroller.SharingController(tab) class SearchDisplay(ItemListDisplay): @staticmethod @@ -454,13 +585,6 @@ return searchcontroller.SearchController() class AudioVideoItemsDisplay(ItemListDisplay): - def __init__(self): - Display.__init__(self) - self.controller = self.make_controller() - self.widget = self.controller.widget - self.type = self.__class__.tab_type - self.id = self.__class__.tab_id - @classmethod def should_display(cls, tab_type, selected_tabs): return (hasattr(selected_tabs[0], 'type') and selected_tabs[0].type == @@ -470,14 +594,14 @@ tab_type = u'videos' tab_id = u'videos' - def make_controller(self): + def make_controller(self, tab): return itemlistcontroller.VideoItemsController() class AudioItemsDisplay(AudioVideoItemsDisplay): tab_type = u'music' tab_id = u'music' - def make_controller(self): + def make_controller(self, tab): return itemlistcontroller.AudioItemsController() class OtherItemsDisplay(ItemListDisplay): @@ -569,19 +693,19 @@ return label def _on_reveal(self, button): - app.widgetapp.reveal_file(self.video_path) + app.widgetapp.reveal_file(self.filename) def _on_play_externally(self, button): - app.widgetapp.open_file(self.video_path) + app.widgetapp.open_file(self.filename) def _on_skip(self, button): app.playback_manager.play_next_item() - def set_video_path(self, video_path): - self.video_path = video_path - self.filename_label.set_text(os.path.split(video_path)[-1]) - self.filetype_label.set_text(os.path.splitext(video_path)[1]) - if filetypes.is_playable_filename(video_path): + def set_filename(self, filename): + self.filename = filename + self.filename_label.set_text(os.path.split(filename)[-1]) + self.filetype_label.set_text(os.path.splitext(filename)[1]) + if filetypes.is_playable_filename(filename): self.play_externally_button.set_text(_('Play Externally')) else: self.play_externally_button.set_text(_('Open Externally')) @@ -630,7 +754,7 @@ def setup(self, item_info, item_type, volume): self.show_renderer() self.cant_play_widget.set_remote(item_info.remote) - self.cant_play_widget.set_video_path(item_info.video_path) + self.cant_play_widget.set_filename(item_info.filename) self.item_info = item_info if item_type != 'video': self._open_error() diff -Nru miro-4.0.4/lib/frontends/widgets/donate.py miro-6.0/lib/frontends/widgets/donate.py --- miro-4.0.4/lib/frontends/widgets/donate.py 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/donate.py 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,241 @@ +# Miro - an RSS based video player application +# Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011 +# Participatory Culture Foundation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +# +# In addition, as a special exception, the copyright holders give +# permission to link the code of portions of this program with the OpenSSL +# library. +# +# You must obey the GNU General Public License in all respects for all of +# the code used other than OpenSSL. If you modify file(s) with this +# exception, you may extend this exception to your version of the file(s), +# but you are not obligated to do so. If you do not wish to do so, delete +# this exception statement from your version. If you delete this exception +# statement from all source files in the program, then also delete it here. + +"""Defines the donate window. Please help Miro! +""" + +import logging +import sys +import os + +from miro import app +from miro import messages +from miro import prefs +from miro.plat.frontends.widgets import widgetset +from miro.frontends.widgets import widgetutil +from miro.frontends.widgets import dialogs +from miro.frontends.widgets import dialogwidgets +from miro.frontends.widgets import prefpanel +from miro.plat import resources +from miro.gtcache import gettext as _ +from miro import gtcache +from miro.plat.frontends.widgets.threads import call_on_ui_thread + +class DonatePowerToys(object): + # NB: not translated on purpose + def __init__(self): + title = 'Donate PowerToys' + widget = self.build_widgets() + w, h = widget.get_size_request() + rect = widgetset.Rect(0, 0, w, h) + self.window = widgetset.DialogWindow(title, rect) + self.window.set_content_widget(widget) + + def run_dialog(self): + self.window.show() + + def on_reset_clicked(self, obj): + app.donate_manager.reset() + + def on_run_clicked(self, obj): + donate_url = self.donate_url_textentry.get_text() + payment_url = self.payment_url_textentry.get_text() + if not donate_url: + donate_url = None + if not payment_url: + payment_url = None + app.donate_manager.show_donate(url=donate_url, payment_url=payment_url) + + def on_set_ratelimit_clicked(self, obj): + app.donate_manager.set_ratelimit() + + def on_reset_ratelimit_clicked(self, obj): + app.donate_manager.reset_ratelimit() + + def on_reset_donate_url_clicked(self, obj): + self.donate_url = None + self.donate_url_textentry.set_text('') + + def on_reset_payment_url_clicked(self, obj): + self.payment_url = None + self.payment_url_textentry.set_text('') + + def build_widgets(self): + self.vlayout = widgetset.VBox(spacing=5) + grid = dialogwidgets.ControlGrid() + + donate_nothanks_textentry = widgetset.TextEntry() + donate_nothanks_textentry.set_width(5) + prefpanel.attach_integer(donate_nothanks_textentry, + prefs.DONATE_NOTHANKS, + prefpanel.build_error_image(), + prefpanel.create_value_checker(min_=0)) + + last_donate_time_textentry = widgetset.TextEntry() + last_donate_time_textentry .set_width(16) + prefpanel.attach_integer(last_donate_time_textentry, + prefs.LAST_DONATE_TIME, + prefpanel.build_error_image(), + prefpanel.create_value_checker(min_=0)) + + donate_counter_textentry = widgetset.TextEntry() + donate_counter_textentry.set_width(5) + prefpanel.attach_integer(donate_counter_textentry, + prefs.DONATE_COUNTER, + prefpanel.build_error_image(), + prefpanel.create_value_checker(min_=0)) + + set_ratelimit_button = widgetset.Button('Force ratelimit') + set_ratelimit_button.connect('clicked', self.on_set_ratelimit_clicked) + + reset_ratelimit_button = widgetset.Button('Force no ratelimit') + reset_ratelimit_button.connect('clicked', + self.on_reset_ratelimit_clicked) + + reset_button = widgetset.Button('Reset counters to factory defaults') + reset_button.connect('clicked', self.on_reset_clicked) + + reset_donate_url_button = widgetset.Button('Reset') + reset_donate_url_button.connect('clicked', + self.on_reset_donate_url_clicked) + + reset_payment_url_button = widgetset.Button('Reset') + reset_payment_url_button.connect('clicked', + self.on_reset_payment_url_clicked) + + self.donate_url_textentry = widgetset.TextEntry() + self.donate_url_textentry.set_width(16) + + self.payment_url_textentry = widgetset.TextEntry() + self.payment_url_textentry.set_width(16) + + run_button = widgetset.Button('Run dialog') + run_button.connect('clicked', self.on_run_clicked) + + grid.pack_label('Set DONATE_NOTHANKS', grid.ALIGN_RIGHT) + grid.pack(donate_nothanks_textentry, span=2) + grid.end_line(spacing=4) + + grid.pack_label('Set LAST_DONATE_TIME', grid.ALIGN_RIGHT) + grid.pack(last_donate_time_textentry, span=2) + grid.end_line(spacing=4) + + grid.pack_label('Set DONATE_COUNTER', grid.ALIGN_RIGHT) + grid.pack(donate_counter_textentry, span=2) + grid.end_line(spacing=4) + + grid.pack(reset_button, grid.FILL, span=3) + grid.end_line(spacing=4) + + hbox = widgetset.HBox() + hbox.pack_start(set_ratelimit_button) + hbox.pack_start(reset_ratelimit_button) + + grid.pack(widgetutil.align_center(hbox), grid.FILL, span=3) + grid.end_line(spacing=4) + grid.pack_label('Use donate url', grid.ALIGN_RIGHT) + grid.pack(self.donate_url_textentry) + grid.pack(reset_donate_url_button, grid.FILL) + grid.end_line(spacing=4) + + grid.pack_label('Use payment donate url', grid.ALIGN_RIGHT) + grid.pack(self.payment_url_textentry) + grid.pack(reset_payment_url_button, grid.FILL) + grid.end_line(spacing=4) + + grid.pack(run_button, grid.FILL, span=3) + grid.end_line(spacing=12) + + alignment = widgetset.Alignment(xalign=0.5, yalign=0.5) + alignment.set_padding(20, 20, 20, 20) + alignment.add(grid.make_table()) + + return alignment + +class DonateWindow(widgetset.DonateWindow): + def __init__(self): + widgetset.DonateWindow.__init__(self, _("Donate")) + self.create_signal('donate-clicked') + self.vbox = widgetset.VBox(spacing=5) + self.hbox = widgetset.HBox(spacing=5) + self.button_yes = widgetset.Button(_('Yes, I can donate now')) + self.button_no = widgetset.Button(_('Ask me later')) + self.button_yes.connect('clicked', self._on_button_clicked) + self.button_no.connect('clicked', self._on_button_clicked) + self.browser = widgetset.Browser() + self.browser.set_size_request(640, 440) + self.browser.connect('net-stop', self._on_browser_stop) + self.browser.connect('net-error', self._on_browser_error) + self.hbox.pack_end(widgetutil.align_middle(self.button_no, + right_pad=10)) + self.hbox.pack_end(widgetutil.align_middle(self.button_yes)) + self.vbox.pack_start(self.browser, padding=10, expand=True) + self.vbox.pack_start(self.hbox, padding=5) + self.set_content_widget(self.vbox) + self.was_shown_invoked = False + + self.callback_object = None + + def _on_button_clicked(self, widget): + callback_object = self.callback_object + self.callback_object = None + if widget == self.button_yes: + self.emit('donate-clicked', True, callback_object) + elif widget == self.button_no: + self.emit('donate-clicked', False, callback_object) + + def navigate(self, url): + self.browser.navigate(url) + + def show(self, url, callback_object): + if url: + self.callback_object = callback_object + logging.debug('Donate: Navigating to %s (callback object = %s)', + url, callback_object) + self.was_shown_invoked = True + self.browser.navigate(url) + else: + widgetset.DonateWindow.show(self) + + def _on_browser_stop(self, widget): + logging.debug('Donate: _on_browser_stop') + if self.was_shown_invoked: + widgetset.DonateWindow.show(self) + self.was_shown_invoked = False + + def _on_browser_error(self, widget): + # XXX Linux/GTK can't directly issue a self.navigate() here on error. + # Don't know why. :-( + logging.debug('Donate: _on_browser_error') + # only need to nav to fallback if the window was requested to be + # shown + if self.was_shown_invoked: + fallback_path = resources.url('donate.html') + call_on_ui_thread(lambda: self.browser.navigate(fallback_path)) + self.was_shown_invoked = False diff -Nru miro-4.0.4/lib/frontends/widgets/downloadscontroller.py miro-6.0/lib/frontends/widgets/downloadscontroller.py --- miro-4.0.4/lib/frontends/widgets/downloadscontroller.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/downloadscontroller.py 2013-04-05 16:02:42.000000000 +0000 @@ -36,6 +36,7 @@ from miro.frontends.widgets import itemcontextmenu from miro.frontends.widgets import prefpanel +from miro import app from miro import messages from miro import downloader from miro import prefs @@ -44,11 +45,11 @@ def __init__(self): itemlistcontroller.ItemListController.__init__( self, u'downloading', u'downloading') - self.item_list.resort_on_update = True self.toolbar = None + self.update_buttons() def build_widget(self): - self.titlebar = self.make_titlebar() + self.titlebar.switch_to_view(self.widget.selected_view) self.widget.titlebar_vbox.pack_start(self.titlebar) self.status_toolbar = DownloadStatusToolbar() @@ -70,7 +71,7 @@ titlebar.connect("resume-all", self._on_resume_all) titlebar.connect("cancel-all", self._on_cancel_all) titlebar.connect("settings", self._on_settings) - titlebar.switch_to_view(self.widget.selected_view) + titlebar.hide_album_view_button() return titlebar def make_context_menu_handler(self): @@ -82,6 +83,17 @@ def _update_free_space(self): self.status_toolbar.update_free_space() + # The pause_all/resume_all and cancel_all disables the automatic sorting + # mechanism until the operations are complete, because even though the + # operation affects all items in the list the status updates for these + # items are not batched. + # + # When complete, the backend is expected to send a reply indicating that + # all operations (up to this point) are complete and at that point we can + # re-enable the sort again. See DownloadSyncCommandComplete(). We hope + # for the best that the remote command to do whatever's needed should not + # take more than a second or two which should be accurate for a moderately + # sized download list. def _on_pause_all(self, widget): messages.PauseAllDownloads().send_to_backend() @@ -94,6 +106,29 @@ def _on_settings(self, widget): prefpanel.show_window("downloads") - def on_items_changed(self): + def handle_item_list_changes(self): + itemlistcontroller.ItemListController.handle_item_list_changes(self) + self.update_rates() + self.update_buttons() + + def update_rates(self): self.status_toolbar.update_rates( - downloader.total_down_rate, downloader.total_up_rate) + app.download_state_manager.total_down_rate, + app.download_state_manager.total_up_rate) + + def update_buttons(self): + items = self.item_list.get_items() + if len(items) == 0: + self.titlebar.set_button_enabled("resume", False) + self.titlebar.set_button_enabled("pause", False) + self.titlebar.set_button_enabled("cancel", False) + else: + all_paused = all_downloading = True + for item in items: + if item.is_paused: + all_downloading = False + else: + all_paused = False + self.titlebar.set_button_enabled("resume", not all_downloading) + self.titlebar.set_button_enabled("pause", not all_paused) + self.titlebar.set_button_enabled("cancel", True) diff -Nru miro-4.0.4/lib/frontends/widgets/feedcontroller.py miro-6.0/lib/frontends/widgets/feedcontroller.py --- miro-4.0.4/lib/frontends/widgets/feedcontroller.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/feedcontroller.py 2013-04-05 16:02:42.000000000 +0000 @@ -35,22 +35,23 @@ from miro import messages from miro.frontends.widgets import feedsettingspanel from miro.frontends.widgets import itemcontextmenu +from miro.frontends.widgets import itemlist from miro.frontends.widgets import itemlistcontroller from miro.frontends.widgets import itemlistwidgets -from miro.frontends.widgets import itemtrack from miro.frontends.widgets import itemrenderer from miro.frontends.widgets import widgetutil -class FeedController(itemlistcontroller.ItemListController, - itemlistcontroller.FilteredListMixin): +class FeedController(itemlistcontroller.ItemListController): """Controller object for feeds.""" - TYPE = u'feed' def __init__(self, id_, is_folder, is_directory_feed): self.is_folder = is_folder self.is_directory_feed = is_directory_feed self.titlebar = None - itemlistcontroller.ItemListController.__init__(self, self.TYPE, id_) - itemlistcontroller.FilteredListMixin.__init__(self) + if is_folder: + type_ = u'feed-folder' + else: + type_ = u'feed' + itemlistcontroller.ItemListController.__init__(self, type_, id_) self.show_resume_playing_button = True def make_context_menu_handler(self): @@ -59,8 +60,7 @@ def build_widget(self): feed_info = widgetutil.get_feed_info(self.id) - self.titlebar = self.make_titlebar(feed_info) - self.titlebar.connect('toggle-filter', self.on_toggle_filter) + self.titlebar.connect('filter-clicked', self.on_filter_clicked) self.titlebar.switch_to_view(self.widget.selected_view) self.titlebar.connect('search-changed', self._on_search_changed) self.widget.titlebar_vbox.pack_start(self.titlebar) @@ -79,7 +79,8 @@ return itemrenderer.ItemRenderer(display_channel=self.is_folder, is_podcast=(not feed_info.is_directory_feed)) - def make_titlebar(self, feed_info): + def make_titlebar(self): + feed_info = widgetutil.get_feed_info(self.id) if feed_info.is_directory_feed: titlebar = itemlistwidgets.WatchedFolderTitlebar() elif feed_info.is_folder: @@ -87,6 +88,8 @@ else: titlebar = itemlistwidgets.ChannelTitlebar() titlebar.connect('save-search', self._on_save_search) + if not self.is_directory_feed: + titlebar.hide_album_view_button() return titlebar def get_saved_search_text(self): @@ -131,9 +134,6 @@ def _on_auto_download_changed(self, widget, setting): messages.AutodownloadChange(self.id, setting).send_to_backend() - def on_initial_list(self): - self._update_counts() - def on_items_changed(self): self._update_counts() @@ -211,8 +211,15 @@ class AllFeedsController(FeedController): TYPE = u'tab' - def build_item_tracker(self): - return itemtrack.ItemListTracker.create(u'feed', self.id) - def make_titlebar(self, feed_info): + def __init__(self): + FeedController.__init__(self, u'feed-base-tab', True, True) + + def make_titlebar(self): return itemlistwidgets.AllFeedsTitlebar() + + def get_item_list_grouping(self): + return itemlist.feed_grouping + + def get_multi_row_album_mode(self): + return 'feed' diff -Nru miro-4.0.4/lib/frontends/widgets/firsttimedialog.py miro-6.0/lib/frontends/widgets/firsttimedialog.py --- miro-4.0.4/lib/frontends/widgets/firsttimedialog.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/firsttimedialog.py 2013-04-05 16:02:42.000000000 +0000 @@ -31,12 +31,14 @@ """ from miro import app +from miro import fileutil from miro import prefs from miro import util from miro import messages from miro.fileobject import FilenameType from miro.plat.frontends.widgets import widgetset from miro.plat.frontends.widgets import threads +from miro.frontends.widgets import widgetconst from miro.frontends.widgets import widgetutil from miro.frontends.widgets import prefpanel from miro.frontends.widgets import dialogs @@ -55,8 +57,24 @@ WIDTH = 475 HEIGHT = 375 +def get_media_player_name_path(where): + """Get the media player name and path from the platform. -def _build_title_question(text): + This method wraps the platform version with a bit of error checking and + logging. + + :param where: what piece of code calling this? + """ + mp_name, mp_path = get_plat_media_player_name_path() + logging.debug("%s: got media player name/path: %r %r", + where, mp_name, mp_path) + if mp_path is None or mp_name is None: + # If either path or name is not given, make sure that neither are + # set. See #18865. + return None, None + return mp_name, mp_path + +def _build_title(text): """Builds and returns a title widget for the panes in the First Time Startup dialog. """ @@ -72,6 +90,22 @@ lab.set_size_request(WIDTH - 40, -1) return widgetutil.align_left(lab, bottom_pad=15) +def _build_checkbox_and_label(checkbox, label_text): + """Build a checkbox with a label right under it. + + This is useful because checkboxes don't wrap properly. Labels don't + wrap great either, but we can hack things to make labels wrape. + """ + label = widgetset.Label(label_text) + label.set_size_request(WIDTH - 40, -1) + label.set_wrap(True) + vbox = widgetset.VBox(spacing=0) + vbox.pack_start(widgetutil.align_left(checkbox)) + vbox.pack_start(widgetutil.align_left(label)) + label.set_size(widgetconst.SIZE_SMALL) + checkbox.set_size(widgetconst.SIZE_NORMAL) + return vbox + class FirstTimeDialog(widgetset.DialogWindow): def __init__(self, done_firsttime_callback, title=None): if title == None: @@ -99,9 +133,8 @@ self._done_firsttime_callback = done_firsttime_callback - self.mp_name, self.mp_path = get_plat_media_player_name_path() - self._has_media_player = ( - self.mp_name is not None and self.mp_path is not None) + mp_info = get_media_player_name_path('firsttimedialog') + self.mp_name, self.mp_path = mp_info self._page_box = widgetset.VBox() self._pages = self.build_pages() @@ -113,13 +146,10 @@ def build_pages(self): pages = [self.build_language_page(), - self.build_startup_page()] - - if self._has_media_player: - pages.append(self.build_media_player_import_page()) - - pages.extend([self.build_find_files_page(), - self.build_search_page()]) + self.build_startup_page(), + self.build_music_page(), + self.build_find_files_page(), + self.build_search_page()] for page in pages: page.set_size_request(WIDTH - 40, HEIGHT - 40) @@ -131,7 +161,8 @@ def on_close(self, widget=None): if self.import_media_player_stuff: - logging.debug("firsttimedialog: adding mp_path") + logging.debug("firsttimedialog: adding mp_path (%r)", + self.mp_path) app.watched_folder_manager.add(self.mp_path) if self.gathered_media_files: logging.debug("firsttimedialog: adding %d files", @@ -182,7 +213,7 @@ "to help you get started.", {'name': app.config.get(prefs.SHORT_APP_NAME)}))) - vbox.pack_start(_build_title_question(_( + vbox.pack_start(_build_title(_( "What language would you like %(name)s to be in?", {'name': app.config.get(prefs.SHORT_APP_NAME)}))) @@ -200,8 +231,7 @@ # import time, so if someone changes the language, then # the translations have already happened. we reload the # module to force them to happen again. bug 17515 - if "miro.frontends.widgets.widgetconst" in sys.modules: - reload(sys.modules["miro.frontends.widgets.widgetconst"]) + reload(widgetconst) self.this_page(rebuild=True) lang_option_menu = widgetset.OptionMenu([op[1] for op in lang_options]) @@ -249,7 +279,7 @@ "and update your podcasts.", {'name': app.config.get(prefs.SHORT_APP_NAME)}))) - vbox.pack_start(_build_title_question(_( + vbox.pack_start(_build_title(_( "Would you like to run %(name)s on startup?", {'name': app.config.get(prefs.SHORT_APP_NAME)}))) @@ -288,7 +318,7 @@ "will be copied or duplicated.", {"name": app.config.get(prefs.SHORT_APP_NAME)}))) - vbox.pack_start(_build_title_question(_( + vbox.pack_start(_build_title(_( "Would you like %(name)s to search your computer " "for media files?", {"name": app.config.get(prefs.SHORT_APP_NAME)}))) @@ -456,21 +486,10 @@ threads.call_on_ui_thread(self.make_search_progress) except StopIteration: - num_found = len(self.gathered_media_files) - self.search_complete( - ngettext( - "found %(count)s media file", - "found %(count)s media files", - num_found, - {"count": num_found})) - self.finder = None - - except Exception: - # this is here to get more data for bug #17422 - logging.exception("exception thrown in make_search_progress") - - # we want to clean up after this exception, too. - num_found = len(self.gathered_media_files) + if self.gathered_media_files: + num_found = len(self.gathered_media_files) + else: + num_found = 0 self.search_complete( ngettext( "found %(count)s media file", @@ -479,7 +498,6 @@ {"count": num_found})) self.finder = None - def start_search(self): # only start a search if we haven't gathered anything, yet. if self.gathered_media_files is not None: @@ -536,50 +554,117 @@ return vbox - - def build_media_player_import_page(self): - vbox = widgetset.VBox(spacing=5) - - vbox.pack_start(_build_title_question(_( - "Would you like to display your %(player)s music and " - "video in %(appname)s?", - {"player": self.mp_name, - "appname": app.config.get(prefs.SHORT_APP_NAME)}))) - - rbg = widgetset.RadioButtonGroup() - yes_rb = widgetset.RadioButton(_("Yes"), rbg) - no_rb = widgetset.RadioButton(_("No"), rbg) - yes_rb.set_selected() - - vbox.pack_start(widgetutil.align_left(yes_rb)) - vbox.pack_start(widgetutil.align_left(no_rb)) - - lab = widgetset.Label(_( - "Note: %(appname)s won't move or copy any files on your " - "disk. It will just add them to your %(appname)s library.", - {"appname": app.config.get(prefs.SHORT_APP_NAME)})) - lab.set_size_request(WIDTH - 40, -1) - lab.set_wrap(True) - vbox.pack_start(widgetutil.align_left(lab)) - - def handle_next(widget): - if rbg.get_selected() == yes_rb: - self.import_media_player_stuff = True - else: - self.import_media_player_stuff = False - self.next_page() + def build_music_page(self): + vbox = MusicSetupVBox(self.mp_name, _('Music Setup')) + if vbox.import_cbx: + def on_import_toggled(cbx): + self.import_media_player_stuff = cbx.get_checked() + vbox.import_cbx.connect("toggled", on_import_toggled) prev_button = widgetset.Button(_("< Previous")) prev_button.connect('clicked', lambda x: self.prev_page()) next_button = widgetset.Button(_("Next >")) - next_button.connect('clicked', handle_next) + next_button.connect('clicked', lambda x: self.next_page()) vbox.pack_start( widgetutil.align_bottom(widgetutil.align_right( widgetutil.build_hbox((prev_button, next_button)))), expand=True) - vbox = widgetutil.pad(vbox) - return vbox + +class MusicSetupVBox(widgetset.VBox): + """VBox for the music setup page. + + This is separated out because we want to re-use it when the user first + clicks the music tab. + + Attributes: + - import_cbx: Checkbox to import music from the platform player + - net_lookup_cbx: Checkbox to enable net lookups by default + + This class handles changing the NET_LOOKUP_BY_DEFAULT pref, but the class + using it must handle responding import_cbx being checked. + """ + def __init__(self, mp_name, title): + widgetset.VBox.__init__(self, spacing=5) + + self.pack_start(_build_title(title)) + if mp_name is not None: + self.import_cbx = widgetset.Checkbox( + _("Show my %(player)s music in my %(appname)s library.", + {"player": mp_name, + "appname": app.config.get(prefs.SHORT_APP_NAME)})) + controls = _build_checkbox_and_label( + self.import_cbx, _("(Don't worry, no files are copied.)")) + self.pack_start(widgetutil.pad(controls, bottom=10)) + else: + self.import_cbx = None + + self.net_lookup_cbx = widgetset.Checkbox( + _('Cleanup Song Info and Album Art')) + + self.pack_start(_build_checkbox_and_label( + self.net_lookup_cbx, _('Miro will use Echonest and 7Digital ' + 'to cleanup all song titles, info, and ' + 'album art. Recommended.'))) + prefpanel.attach_boolean(self.net_lookup_cbx, + prefs.NET_LOOKUP_BY_DEFAULT) + + # Give the "Note:" heading top padding to separate it from the + # checkboxes, but no bottom padding to keep it together with the rest + # of the text. + heading = widgetset.Label(_("Note:")) + heading.set_wrap(True) + heading.set_bold(True) + heading.set_size_request(WIDTH - 40, -1) + self.pack_start(widgetutil.pad(heading, top=15)) + text = _("You can manually set or undo song info cleanup by " + "right-clicking on a song or batch of songs. You can " + "adjust lookup settings at any time in Miro preferences." + "\n\n" + "Online lookup involves sending anonymized data about " + "your songs to Miro and indirectly to Echonest and " + "7Digital.") + self.pack_start(_build_paragraph_text(text)) + +class MusicSetupDialog(dialogs.MainDialog): + """Ask some questions on music settings. + + We pop this up the first time the user clicks on the music tab, if we + haven't already asked the questions in the first-time setup dialog. + """ + def __init__(self): + dialogs.MainDialog.__init__(self, _("Music Setup")) + mp_info = get_media_player_name_path('musicsetupdialog') + self.mp_name, self.mp_path = mp_info + if self.already_added_media_player_path(): + self.mp_name = self.mp_path = None + self.vbox = MusicSetupVBox(self.mp_name, _('First Time Music Setup')) + self.set_extra_widget(self.vbox) + self.add_button(_("Get Started")) + + def already_added_media_player_path(self): + if self.mp_path is None: + return False + wf_model = app.watched_folder_manager.model + wf_iter = wf_model.first_iter() + while wf_iter is not None: + path = wf_model[wf_iter][1] + if fileutil.samefile(path, self.mp_path): + return True + wf_iter = wf_model.next_iter(wf_iter) + + def import_path(self): + """Path to import music from. + + The code using MusicSetupDialog should check this once the dialog is + closed and create a watched folder if needed. + + :returns: path or None if we shouldn't import anything. + """ + if self.vbox.import_cbx and self.vbox.import_cbx.get_checked(): + return self.mp_path + else: + return None diff -Nru miro-4.0.4/lib/frontends/widgets/gst/gst_extractor.py miro-6.0/lib/frontends/widgets/gst/gst_extractor.py --- miro-4.0.4/lib/frontends/widgets/gst/gst_extractor.py 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/gst/gst_extractor.py 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,345 @@ +# Miro - an RSS based video player application +# Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011 +# Participatory Culture Foundation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +# +# In addition, as a special exception, the copyright holders give +# permission to link the code of portions of this program with the OpenSSL +# library. +# +# You must obey the GNU General Public License in all respects for all of +# the code used other than OpenSSL. If you modify file(s) with this +# exception, you may extend this exception to your version of the file(s), +# but you are not obligated to do so. If you do not wish to do so, delete +# this exception statement from your version. If you delete this exception +# statement from all source files in the program, then also delete it here. + +import sys +import logging +import os +import urllib + +import pygtk +import gtk +import gobject + +import pygst +pygst.require('0.10') +import gst +import gst.interfaces + +def scaled_size(from_size, to_size): + """Takes an image which has a width and a height and a size tuple + that specifies the space available and returns the new width + and height that allows the image to fit into the sized space + at the correct height/width ratio. + + :param from_size: (width, height) tuple of original image size + :param to_size: (width, height) tuple of new image size + """ + image_ratio = float(from_size[0]) / from_size[1] + new_ratio = float(to_size[0]) / to_size[1] + if image_ratio == new_ratio: + return to_size + elif image_ratio > new_ratio: + # The scaled image has a wider aspect ratio than the old one. + height = int(round(float(to_size[0]) / from_size[0] * from_size[1])) + return to_size[0], height + else: + # The scaled image has a taller aspect ratio than the old one. + width = int(round(float(to_size[1]) / from_size[1] * from_size[0])) + return width, to_size[1] + + +class Extractor: + def __init__(self, filename, thumbnail_filename): + logging.info("running gstreamer Extractor on %s", filename) + self.thumbnail_filename = thumbnail_filename + self.filename = filename + + self.timeout = None + self.grabit = False + self.first_pause = True + self.doing_thumbnailing = False + self.success = False + self.duration = -1 + self.buffer_probes = {} + self.audio_only = False + self.saw_video_tag = self.saw_audio_tag = False + + self.pipeline = gst.element_factory_make('playbin') + self.videosink = gst.element_factory_make("fakesink", "videosink") + self.pipeline.set_property("video-sink", self.videosink) + self.audiosink = gst.element_factory_make("fakesink", "audiosink") + self.pipeline.set_property("audio-sink", self.audiosink) + + self.thumbnail_pipeline = None + + self.bus = self.pipeline.get_bus() + self.bus.add_signal_watch() + self.watch_id = self.bus.connect("message", self.on_bus_message) + + fileurl = urllib.pathname2url(os.path.abspath(filename)) + logging.warn("FILE URL: %r", 'file:' + fileurl) + self.pipeline.set_property("uri", "file:%s" % fileurl) + self.pipeline.set_state(gst.STATE_PAUSED) + + + def on_bus_message(self, bus, message): + if message.type == gst.MESSAGE_ERROR: + logging.warn("gstreamer error: %s", message) + gobject.idle_add(self.error_occurred) + + elif message.type == gst.MESSAGE_STATE_CHANGED: + _prev, state, _pending = message.parse_state_changed() + if state == gst.STATE_PAUSED: + if message.src == self.pipeline: + logging.info("gstreamer ready") + gobject.idle_add(self.paused_reached) + + elif (message.src == self.thumbnail_pipeline and + not self.doing_thumbnailing): + + logging.info("thumbnail_pipeline ready") + + self.doing_thumbnailing = True + for sink in self.thumbnail_pipeline.sinks(): + name = sink.get_name() + factoryname = sink.get_factory().get_name() + if factoryname == "fakesink": + pad = sink.get_pad("sink") + self.buffer_probes[name] = pad.add_buffer_probe( + self.buffer_probe_handler, name) + break + + seek_amount = min(self.duration / 2, 20 * gst.SECOND) + seek_result = self.thumbnail_pipeline.seek( + 1.0, gst.FORMAT_TIME, + gst.SEEK_FLAG_FLUSH | gst.SEEK_FLAG_ACCURATE, + gst.SEEK_TYPE_SET, seek_amount, + gst.SEEK_TYPE_NONE, 0) + logging.info("seek finished. Result: %s", seek_result) + + if not seek_result: + self.disconnect() + self.done() + + def done(self): + if self.saw_video_tag: + media_type = 'video' + elif self.saw_audio_tag: + media_type = 'audio' + else: + media_type = 'other' + self.media_type = media_type + self.cancel_timeout() + gtk.main_quit() + + def run(self): + gobject.threads_init() + self.timeout = gobject.timeout_add(30000, self.on_timeout) + gtk.main() + + def on_timeout(self): + logging.warn("on_timeout() reached. Quitting.") + self.done() + + def cancel_timeout(self): + if self.timeout is not None: + gobject.source_remove(self.timeout) + self.timeout = None + + def get_result(self): + duration = self.duration + success = self.success + media_type = self.media_type + + if duration != -1: + duration /= 1000000 + + return (media_type, duration, success) + + def get_duration(self, pipeline, attempts=0): + if attempts == 5: + return 0 + try: + return pipeline.query_duration(gst.FORMAT_TIME)[0] + except gst.QueryError: + return self.get_duration(pipeline, attempts + 1) + + def paused_reached(self): + self.saw_video_tag = False + self.saw_audio_tag = False + + if not self.first_pause: + return False + + self.first_pause = True + current_video = self.pipeline.get_property("current-video") + current_audio = self.pipeline.get_property("current-audio") + + if current_video == 0: + self.saw_video_tag = True + if current_audio == 0: + self.saw_audio_tag = True + + if not self.saw_video_tag and self.saw_audio_tag: + # audio only + logging.info("audio only... calling done()") + self.audio_only = True + self.duration = self.get_duration(self.pipeline) + self.success = True + self.disconnect() + self.done() + return False + + if not self.saw_video_tag and not self.saw_audio_tag: + logging.info("no audio or video... calling done()") + # no audio and no video + self.audio_only = False + self.disconnect() + self.done() + return False + + self.duration = self.get_duration(self.pipeline) + self.grabit = True + self.buffer_probes = {} + + self.thumbnail_pipeline = gst.parse_launch( + 'filesrc name="filesrc" ! decodebin ! ' + 'ffmpegcolorspace ! video/x-raw-rgb,depth=24,bpp=24 ! ' + 'fakesink signal-handoffs=True') + filesrc = self.thumbnail_pipeline.get_by_name('filesrc') + filesrc.set_property("location", self.filename) + + self.thumbnail_bus = self.thumbnail_pipeline.get_bus() + self.thumbnail_bus.add_signal_watch() + self.thumbnail_watch_id = self.thumbnail_bus.connect( + "message", self.on_bus_message) + + self.thumbnail_pipeline.set_state(gst.STATE_PAUSED) + + return False + + def error_occurred(self): + self.disconnect() + self.done() + return False + + def buffer_probe_handler_real(self, pad, buff, name): + """Capture buffers as gdk_pixbufs when told to. + """ + logging.info("buffer_probe_handler_real running") + try: + caps = buff.caps + if caps is None: + self.success = False + self.disconnect() + self.done() + return False + + filters = caps[0] + width = filters["width"] + height = filters["height"] + + pixbuf = gtk.gdk.pixbuf_new_from_data( + buff.data, gtk.gdk.COLORSPACE_RGB, False, 8, + width, height, width * 3) + + # NOTE: 200x136 is sort of arbitrary. it's larger than what + + # the ui uses at the time of this writing. + new_width, new_height = scaled_size((width, height), (200, 136)) + + pixbuf = pixbuf.scale_simple( + new_width, new_height, gtk.gdk.INTERP_BILINEAR) + + pixbuf.save(self.thumbnail_filename, "png") + del pixbuf + self.success = True + self.disconnect() + self.done() + except gst.QueryError: + pass + logging.info("buffer_probe_handler_real finished") + return False + + def buffer_probe_handler(self, pad, buff, name): + gobject.idle_add( + lambda: self.buffer_probe_handler_real(pad, buff, name)) + return True + + def disconnect(self): + if self.pipeline is not None: + self.pipeline.set_state(gst.STATE_NULL) + if not self.audio_only: + for sink in self.pipeline.sinks(): + name = sink.get_name() + factoryname = sink.get_factory().get_name() + if factoryname == "fakesink": + pad = sink.get_pad("sink") + pad.remove_buffer_probe(self.buffer_probes[name]) + del self.buffer_probes[name] + self.pipeline = None + if self.thumbnail_pipeline is not None: + self.thumbnail_pipeline.set_state(gst.STATE_NULL) + self.thumbnail_pipeline = None + + if self.bus is not None: + self.bus.disconnect(self.watch_id) + self.bus = None + + +def make_verbose(): + import logging + logging.basicConfig(level=logging.DEBUG) + + def wrap_func(func): + def _wrap_func(*args, **kwargs): + logging.debug("calling %s (%s) (%s)", + func.__name__, repr(args), repr(kwargs)) + return func(*args, **kwargs) + return _wrap_func + + for mem in dir(Extractor): + fun = Extractor.__dict__[mem] + if callable(fun): + Extractor.__dict__[mem] = wrap_func(fun) + +def run(movie_file, thumbnail_file): + extractor = Extractor(movie_file, thumbnail_file) + extractor.run() + return extractor.get_result() + +def main(argv): + if "--verbose" in argv: + make_verbose() + argv.remove("--verbose") + + if len(argv) < 2: + print "Syntax: gst_extractor.py [path-to-thumbnail]" + return 1 + + if len(argv) < 3: + argv.append(os.path.join(os.path.dirname(__file__), "thumbnail.png")) + + result = run(argv[1], argv[2]) + print result + + return 0 + +if __name__ == "__main__": + sys.exit(main(sys.argv)) diff -Nru miro-4.0.4/lib/frontends/widgets/gst/gstutil.py miro-6.0/lib/frontends/widgets/gst/gstutil.py --- miro-4.0.4/lib/frontends/widgets/gst/gstutil.py 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/gst/gstutil.py 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,51 @@ +# Miro - an RSS based video player application +# Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011 +# Participatory Culture Foundation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +# +# In addition, as a special exception, the copyright holders give +# permission to link the code of portions of this program with the OpenSSL +# library. +# +# You must obey the GNU General Public License in all respects for all of +# the code used other than OpenSSL. If you modify file(s) with this +# exception, you may extend this exception to your version of the file(s), +# but you are not obligated to do so. If you do not wish to do so, delete +# this exception statement from your version. If you delete this exception +# statement from all source files in the program, then also delete it here. + +"""gstutils.py -- utility functions for gstreamer code. """ + +import gst + +from miro import util + +def to_seconds(t): + return t / gst.SECOND + +def from_seconds(s): + return s * gst.SECOND + +def _get_file_url(filename): + """Get a file:// URL for a filename """ + + # FIXME: this code is really weird. We should probably make urlize() + # always work + + try: + return filename.urlize() + except AttributeError: + return util.make_file_url(filename) diff -Nru miro-4.0.4/lib/frontends/widgets/gst/__init__.py miro-6.0/lib/frontends/widgets/gst/__init__.py --- miro-4.0.4/lib/frontends/widgets/gst/__init__.py 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/gst/__init__.py 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,34 @@ +# Miro - an RSS based video player application +# Copyright (C) 2011 +# Participatory Culture Foundation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +# +# In addition, as a special exception, the copyright holders give +# permission to link the code of portions of this program with the OpenSSL +# library. +# +# You must obey the GNU General Public License in all respects for all of +# the code used other than OpenSSL. If you modify file(s) with this +# exception, you may extend this exception to your version of the file(s), +# but you are not obligated to do so. If you do not wish to do so, delete +# this exception statement from your version. If you delete this exception +# statement from all source files in the program, then also delete it here. + +"""miro.frontends.widgets.gst -- portable gstreamer code """ + +# check that we have the correct pygst version when we import the gst package +import pygst +pygst.require('0.10') diff -Nru miro-4.0.4/lib/frontends/widgets/gst/renderer.py miro-6.0/lib/frontends/widgets/gst/renderer.py --- miro-4.0.4/lib/frontends/widgets/gst/renderer.py 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/gst/renderer.py 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,645 @@ +# Miro - an RSS based video player application +# Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011 +# Participatory Culture Foundation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +# +# In addition, as a special exception, the copyright holders give +# permission to link the code of portions of this program with the OpenSSL +# library. +# +# You must obey the GNU General Public License in all respects for all of +# the code used other than OpenSSL. If you modify file(s) with this +# exception, you may extend this exception to your version of the file(s), +# but you are not obligated to do so. If you do not wish to do so, delete +# this exception statement from your version. If you delete this exception +# statement from all source files in the program, then also delete it here. + +"""gst.renderer -- Video and Audio renderer using gstremer +""" + +import logging +import os + +import gst +import gst.interfaces +import gtk + +# not sure why this isn't in the gst module, but it's easy to define +GST_PLAY_FLAG_TEXT = (1 << 2) + +from miro import app +from miro import prefs +from miro import util +from miro.gtcache import gettext as _ +from miro.plat import options +from miro import iso639 + +from miro.frontends.widgets import menus +from miro.frontends.widgets import widgetconst +from miro.frontends.widgets.gst import gstutil + +class SinkFactory(object): + """SinkFactory -- Create audio/video sinks for our renderer. + + SinkFactory an the interface that platforms must implement to use + VideoRenderer and AudioRenderer. It simple contains the logic to create + the audiosink and videosink elements to give to gstreamer. + + When creating elements, SinkFactory can return anything that will work + with playbin2. Normally this means creating an element with + gst.element_factory_make(). Howvever, if platforms need more complexity, + they can chain elements together and return a gst.Bin that contains them. + """ + + def make_audiosink(self): + """Create a gstreamer element to use as an audiosink. """ + raise NotImplementedError() + + def make_videosink(self): + """Create a gstreamer element to use as an audiosink. """ + raise NotImplementedError() + + +class Renderer(object): + def __init__(self, sink_factory): + logging.info("GStreamer version: %s", gst.version_string()) + + self.rate = 1.0 + self.select_callbacks = None + self.sink_factory = sink_factory + self.supports_subtitles = True + self.playbin = None + self.bus = None + self.watch_ids = [] + self.enabled_track = None + + def build_playbin(self): + self.playbin = gst.element_factory_make("playbin2", "player") + self.bus = self.playbin.get_bus() + self.bus.add_signal_watch() + self.bus.enable_sync_message_emission() + + self.watch_ids.append(self.bus.connect("message", self.on_bus_message)) + self.playbin.set_property("audio-sink", + self.sink_factory.make_audiosink()) + + def destroy_playbin(self): + if self.playbin is None: + return + for watch_id in self.watch_ids: + self.bus.disconnect(watch_id) + self.watch_ids = [] + self.bus = None + self.playbin = None + + def invoke_select_callback(self, success=False): + if success: + callback = self.select_callbacks[0] + else: + callback = self.select_callbacks[1] + self.select_callbacks = None + try: + callback() + except StandardError: + logging.exception("Error calling renderer callback") + + def on_bus_message(self, bus, message): + """receives message posted on the GstBus""" + if message.src is not self.playbin: + return + + if message.type == gst.MESSAGE_ERROR: + err, debug = message.parse_error() + if self.select_callbacks is not None: + self.invoke_select_callback(success=False) + logging.error("on_bus_message: gstreamer error: %s", err) + else: + err, debug = message.parse_error() + logging.error("on_bus_message (after callbacks): " + "gstreamer error: %s", err) + elif message.type == gst.MESSAGE_STATE_CHANGED: + prev, new, pending = message.parse_state_changed() + if ((new == gst.STATE_PAUSED + and self.select_callbacks is not None)): + self.invoke_select_callback(success=True) + self.finish_select_file() + elif message.type == gst.MESSAGE_EOS: + app.playback_manager.on_movie_finished() + + def select_file(self, iteminfo, callback, errback): + """starts playing the specified file""" + self._setup_item(iteminfo) + self.select_callbacks = (callback, errback) + self.playbin.set_state(gst.STATE_PAUSED) + + def _setup_item(self, iteminfo): + self.stop() + self.destroy_playbin() + self.build_playbin() + self.enabled_track = None + + self.iteminfo = iteminfo + self.playbin.set_property("uri", + gstutil._get_file_url(iteminfo.filename)) + + def finish_select_file(self): + pass + + def get_current_time(self, attempt=0): + # query_position fails periodically, so this attempts it 5 times + # and if after that it fails, then we return 0. + if not self.playbin or attempt == 5: + return 0 + try: + position, fmt = self.playbin.query_position(gst.FORMAT_TIME) + return gstutil.to_seconds(position) + except gst.QueryError, qe: + logging.warn("get_current_time: caught exception: %s" % qe) + return self.get_current_time(attempt + 1) + + def _seek(self, seconds): + if not self.playbin: + return + event = gst.event_new_seek( + 1.0, + gst.FORMAT_TIME, + gst.SEEK_FLAG_FLUSH | gst.SEEK_FLAG_ACCURATE, + gst.SEEK_TYPE_SET, gstutil.from_seconds(seconds), + gst.SEEK_TYPE_NONE, 0) + result = self.playbin.send_event(event) + if not result: + logging.error("seek failed") + + def _set_current_time_actual(self, bus, message, seconds): + if not self.playbin: + return + if self.playbin.get_state(0)[1] in (gst.STATE_PAUSED, + gst.STATE_PLAYING): + self._seek(seconds) + self.bus.disconnect(self._set_current_time_actual_id) + + def set_current_time(self, seconds): + if not self.playbin: + return + # only want to kick these off when PAUSED or PLAYING + if self.playbin.get_state(0)[1] not in (gst.STATE_PAUSED, + gst.STATE_PLAYING): + self._set_current_time_actual_id = self.bus.connect( + "message::state-changed", + self._set_current_time_actual, + seconds) + return + + self._seek(seconds) + + def get_duration(self): + if not self.playbin: + return None + try: + duration, fmt = self.playbin.query_duration(gst.FORMAT_TIME) + return gstutil.to_seconds(duration) + except gst.QueryError, qe: + logging.warn("get_duration: caught exception: %s" % qe) + return None + + def reset(self): + if self.playbin: + self.playbin.set_state(gst.STATE_NULL) + self.destroy_playbin() + + def set_volume(self, level): + if not self.playbin: + return + self.playbin.set_property("volume", level / widgetconst.MAX_VOLUME) + + def play(self): + if not self.playbin: + return + self.playbin.set_state(gst.STATE_PLAYING) + + def pause(self): + if not self.playbin: + return + self.playbin.set_state(gst.STATE_PAUSED) + + def stop(self): + if self.playbin: + self.reset() + + def get_rate(self): + return 256 + + def set_rate(self, rate): + if not self.playbin or self.rate == rate: + return + self.rate = rate + position = self.playbin.query_position(gst.FORMAT_TIME, None)[0] + if rate >= 0: + self.playbin.seek(rate, + gst.FORMAT_TIME, + gst.SEEK_FLAG_FLUSH | gst.SEEK_FLAG_KEY_UNIT, + gst.SEEK_TYPE_SET, + position + (rate * gst.SECOND), + gst.SEEK_TYPE_SET, + -1) + else: + self.playbin.seek(rate, + gst.FORMAT_TIME, + gst.SEEK_FLAG_FLUSH | gst.SEEK_FLAG_KEY_UNIT, + gst.SEEK_TYPE_SET, + 0, + gst.SEEK_TYPE_SET, + position + (rate * gst.SECOND)) + + def get_audio_tracks(self): + if not self.playbin: + return 0 + return self.playbin.get_property('n-audio') + + def set_audio_track(self, track_index): + if not self.playbin: + return + self.playbin.set_property('current-audio', track_index) + + def get_enabled_audio_track(self): + if not self.playbin: + return None + return self.playbin.get_property('current-audio') + +class AudioRenderer(Renderer): + pass + +class VideoRenderer(Renderer): + def __init__(self, sink_factory): + Renderer.__init__(self, sink_factory) + + self.textsink_name = "textoverlay" + self.imagesink = None + self.window_id = None + self.config_cb_handle = None + + def build_playbin(self): + Renderer.build_playbin(self) + # imagesink gets set when we get the prepare-xwindow-id message + self.imagesink = None + self.watch_ids.append(self.bus.connect( + 'sync-message::element', self.on_sync_message)) + self.playbin.set_property("video-sink", + self.sink_factory.make_videosink()) + try: + textsink = gst.element_factory_make( + self.textsink_name, "textsink") + self.playbin.set_property("text-sink", textsink) + except TypeError: + logging.warning("this platform has an old version of " + "playbin2--no subtitle support.") + self.supports_subtitles = False + # setup subtitle fonts + self.set_subtitle_font(app.config.get(prefs.SUBTITLE_FONT)) + self.connect_to_config_changed() + + def destroy_playbin(self): + Renderer.destroy_playbin(self) + self.imagesink = None + self.disconnect_from_config_changed() + + def connect_to_config_changed(self): + if self.config_cb_handle is None: + self.config_cb_handle = app.frontend_config_watcher.connect( + 'changed', self.on_config_changed) + + def disconnect_from_config_changed(self): + if self.config_cb_handle is not None: + app.frontend_config_watcher.disconnect(self.config_cb_handle) + self.config_cb_handle = None + + def on_config_changed(self, obj, key, value): + if key == prefs.SUBTITLE_FONT.key: + self.set_subtitle_font(value) + + def set_subtitle_font(self, name): + if not self.playbin: + return + self.playbin.set_property("subtitle-font-desc", name) + + def select_file(self, iteminfo, callback, errback, sub_filename=""): + self._setup_item(iteminfo) + self._setup_initial_subtitles(sub_filename) + self.select_callbacks = (callback, errback) + self.playbin.set_state(gst.STATE_PAUSED) + + def _setup_initial_subtitles(self, sub_filename): + sub_index = -1 + if (app.config.get(prefs.ENABLE_SUBTITLES) and self.supports_subtitles + and not sub_filename): + tracks = self.get_subtitles() + if 100 in tracks: # Select default sidecar file + sub_filename = tracks[100][1] + sub_index = 0 + self.enabled_track = 100 + elif 0 in tracks: # Select default embedded subtitle track + sub_index = 0 + self.enabled_track = 0 + + if sub_filename and self.playbin: + self.playbin.set_property("suburi", + gstutil._get_file_url(sub_filename)) + if sub_index > -1: + flags = self.playbin.get_property('flags') + self.playbin.set_properties(flags=flags | GST_PLAY_FLAG_TEXT, + current_text=sub_index) + + def on_sync_message(self, bus, message): + if message.structure is None: + return + message_name = message.structure.get_name() + if message_name == 'prepare-xwindow-id': + self.imagesink = message.src + self.imagesink.set_property('force-aspect-ratio', True) + if self.window_id is not None: + self.imagesink.set_xwindow_id(self.window_id) + else: + logging.warn("Got prepare-xwindow-id before " + "set_xwindow_id() called") + + def set_window_id(self, window_id): + """Set the window id to render to + + window_id a value to pass into gstreamer's set_xwindow_id() method. + On linux, it's an x window id, on windows it's an HWND + """ + self.window_id = window_id + + def ready_for_expose(self): + """Are we ready to handle an expose event?""" + return self.imagesink and hasattr(self.imagesink, "expose") + + def expose(self): + if self.ready_for_expose(): + self.imagesink.expose() + + def go_fullscreen(self): + """Handle when the video window goes fullscreen.""" + logging.debug("haven't implemented go_fullscreen method yet!") + + def exit_fullscreen(self): + """Handle when the video window exits fullscreen mode.""" + logging.debug("haven't implemented exit_fullscreen method yet!") + + def _get_subtitle_track_name(self, index): + """Returns the language for the track at the specified index. + """ + if not self.supports_subtitles or not self.playbin: + return None + tag_list = self.playbin.emit("get-text-tags", index) + lang = None + if tag_list is not None and gst.TAG_LANGUAGE_CODE in tag_list: + code = tag_list[gst.TAG_LANGUAGE_CODE] + lang = iso639.find(code) + if lang is None: + return None + else: + return lang['name'] + + def _get_subtitle_file_name(self, filename): + """Returns the language for the file at the specified + filename. + """ + if not self.supports_subtitles or not self.playbin: + return None + basename, ext = os.path.splitext(filename) + movie_file, code = os.path.splitext(basename) + + # if the filename is like "foo.srt" and "srt", then there + # is no language code, so we return None + if not code: + return None + + # remove . in the code so we end up with what's probably + # a two or three letter language code + if "." in code: + code = code.replace(".", "") + + lang = iso639.find(code) + if lang is None: + return None + else: + return lang['name'] + + def get_subtitles(self): + """Returns a dict of index -> (language, filename) for available + tracks. + """ + if not self.playbin or not self.supports_subtitles: + return {} + + tracks = {} + + if self.playbin.get_property("suburi") is None: + # Don't list subtitle tracks that we're getting from an SRT file + for track_index in range(self.playbin.get_property("n-text")): + track_name = self._get_subtitle_track_name(track_index) + if track_name is None: + track_name = _("Track %(tracknumber)d", + {"tracknumber": track_index}) + tracks[track_index] = (track_name, None) + + files = util.gather_subtitle_files(self.iteminfo.filename) + + external_track_id = 100 + for i, mem in enumerate(files): + track_name = self._get_subtitle_file_name(mem) + if track_name is None: + track_name = _("Subtitle file %(tracknumber)d", + {"tracknumber": i}) + tracks[external_track_id + i] = (track_name, mem) + + return tracks + + def get_subtitle_tracks(self): + """Returns a list of 2-tuple of (index, language) for + available tracks. + """ + if not self.supports_subtitles or not self.playbin: + return [] + tracks = [(index, filename) + for index, (filename, language) + in self.get_subtitles().items()] + return tracks + + def get_enabled_subtitle_track(self): + if not self.supports_subtitles or not self.playbin: + return None + if self.enabled_track is not None: + return self.enabled_track + return self.playbin.get_property("current-text") + + def set_subtitle_track(self, track_index): + if not self.supports_subtitles or not self.playbin: + return + tracks = self.get_subtitles() + if tracks.get(track_index) is None: + return + + language, filename = tracks[track_index] + + if filename is not None: + self.switch_subtitle_file(filename) + self.enabled_track = track_index + return + flags = self.playbin.get_property('flags') + self.playbin.set_properties(flags=flags | GST_PLAY_FLAG_TEXT, + current_text=track_index) + + def switch_subtitle_file(self, filename): + """Set our playbin to use a file to get subtitles from. + + :param filename: path to file or None to disable subtitle files + """ + # file-based subtitle tracks have to get selected as files + # first, then enable_subtitle_track gets called again with + # the new track_index + pos = self.get_current_time() + + # note: select_success needs to mirror what playback + # manager does + def select_success(): + self.set_current_time(pos) + self.play() + + self.select_subtitle_file(self.iteminfo, filename, select_success) + + def disable_subtitles(self): + if not self.supports_subtitles or not self.playbin: + return + if self.playbin.get_property("suburi") is None: + # playing embedded subtitles, we can just switch off the + # PLAY_FLAG_TEXT property + flags = self.playbin.get_property('flags') + self.playbin.set_property('flags', flags & ~GST_PLAY_FLAG_TEXT) + else: + # playing subtitles from an external file, we have to jump through + # some hoops to disable them. + self.switch_subtitle_file(None) + self.enabled_track = None + + def select_subtitle_file(self, iteminfo, sub_path, + handle_successful_select): + if not self.supports_subtitles or not self.playbin: + return + subtitle_encoding = self.playbin.get_property("subtitle-encoding") + + def handle_ok(): + self.playbin.set_property("subtitle-encoding", subtitle_encoding) + handle_successful_select() + + def handle_err(): + app.playback_manager.stop() + filenames = [filename + for lang, filename in self.get_subtitles().values()] + if sub_path is not None and sub_path not in filenames: + sub_path = util.copy_subtitle_file(sub_path, iteminfo.filename) + self.select_file(iteminfo, handle_ok, handle_err, sub_path) + + def setup_subtitle_encoding_menu(self): + app.menu_manager.add_subtitle_encoding_menu(_('Eastern European'), + ("ISO-8859-4", _("Baltic")), + ("ISO-8859-13", _("Baltic")), + ("WINDOWS-1257", _("Baltic")), + ("MAC_CROATIAN", _("Croatian")), + ("ISO-8859-5", _("Cyrillic")), + ("IBM855", _("Cyrillic")), + ("ISO-IR-111", _("Cyrillic")), + ("KOI8-R", _("Cyrillic")), + ("MAC-CYRILLIC", _("Cyrillic")), + ("WINDOWS-1251", _("Cyrillic")), + ("CP866", _("Cyrillic/Russian")), + ("MAC_UKRAINIAN", _("Cyrillic/Ukrainian")), + ("KOI8-U", _("Cyrillic/Ukrainian")), + ("ISO-8859-2", ("Central European")), + ("IBM852", _("Central European")), + ("MAC_CE", _("Central European")), + ("WINDOWS-1250", _("Central European")), + ("ISO-8859-16", _("Romanian")), + ("MAC_ROMANIAN", _("Romanian")), + ) + app.menu_manager.add_subtitle_encoding_menu(_('Western European'), + ("ISO-8859-14", _("Celtic")), + ("ISO-8859-7", _("Greek")), + ("MAC_GREEK", _("Greek")), + ("WINDOWS-1253", _("Greek")), + ("MAC_ICELANDIC", _("Icelandic")), + ("ISO-8859-10", _("Nordic")), + ("ISO-8859-3", _("South European")), + ("ISO-8859-1", _("Western")), + ("ISO-8859-15", _("Western")), + ("IBM850", _("Western")), + ("MAC_ROMAN", _("Western")), + ("WINDOWS-1252", _("Western")), + ) + app.menu_manager.add_subtitle_encoding_menu(_('East Asian'), + ("GB18030", _("Chinese Simplified")), + ("GB2312", _("Chinese Simplified")), + ("GBK", _("Chinese Simplified")), + ("HZ", _("Chinese Simplified")), + ("BIG5", _("Chinese Traditional")), + ("BIG5-HKSCS", _("Chinese Traditional")), + ("EUC-TW", _("Chinese Traditional")), + ("EUC-JP", _("Japanese")), + ("ISO2022JP", _("Japanese")), + ("SHIFT-JIS", _("Japanese")), + ("EUC-KR", _("Korean")), + ("ISO2022KR", _("Korean")), + ("JOHAB", _("Korean")), + ("UHC", _("Korean")), + ) + app.menu_manager.add_subtitle_encoding_menu(_('SE and SW Asian'), + ("ARMSCII-8", _("Armenian")), + ("GEORGIAN-PS", _("Georgian")), + ("MAC_GUJARATI", _("Gujarati")), + ("MAC_GURMUKHI", _("Gurmukhi")), + ("MAC_DEVANAGARI", _("Hindi")), + ("TIS-620", _("Thai")), + ("ISO-8859-9", _("Turkish")), + ("IBM857", _("Turkish")), + ("MAC_TURKISH", _("Turkish")), + ("WINDOWS-1254", _("Turkish")), + ("TCVN", _("Vietnamese")), + ("VISCII", _("Vietnamese")), + ("WINDOWS-1258", _("Vietnamese")), + ) + app.menu_manager.add_subtitle_encoding_menu(_('Middle Eastern'), + ("ISO-8859-6", _("Arabic")), + ("IBM864", _("Arabic")), + ("MAC_ARABIC", _("Arabic")), + ("WINDOWS-1256", _("Arabic")), + ("ISO-8859-8-I", _("Hebrew")), + ("IBM862", _("Hebrew")), + ("MAC_HEBREW", _("Hebrew")), + ("WINDOWS-1255", _("Hebrew")), + ("ISO-8859-8", _("Hebrew Visual")), + ("MAC_FARSI", _("Persian")), + ) + app.menu_manager.add_subtitle_encoding_menu(_('Unicode'), + ("UTF-7", _("Unicode")), + ("UTF-8", _("Unicode")), + ("UTF-16", _("Unicode")), + ("UCS-2", _("Unicode")), + ("UCS-4", _("Unicode")), + ) + + def select_subtitle_encoding(self, encoding): + self.playbin.set_property("subtitle-encoding", encoding) diff -Nru miro-4.0.4/lib/frontends/widgets/gst/sniffer.py miro-6.0/lib/frontends/widgets/gst/sniffer.py --- miro-4.0.4/lib/frontends/widgets/gst/sniffer.py 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/gst/sniffer.py 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,104 @@ +# Miro - an RSS based video player application +# Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011 +# Participatory Culture Foundation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +# +# In addition, as a special exception, the copyright holders give +# permission to link the code of portions of this program with the OpenSSL +# library. +# +# You must obey the GNU General Public License in all respects for all of +# the code used other than OpenSSL. If you modify file(s) with this +# exception, you may extend this exception to your version of the file(s), +# but you are not obligated to do so. If you do not wish to do so, delete +# this exception statement from your version. If you delete this exception +# statement from all source files in the program, then also delete it here. + +"""gst.sniffer -- Examine media files, without playing them +""" + +import thread +from threading import Event + +import gst +import gst.interfaces + +from miro.frontends.widgets.gst import gstutil +from miro.plat.frontends.widgets.threads import call_on_ui_thread + +class Sniffer: + """Determines whether a file is "audio", "video", or "unplayable". + """ + def __init__(self, filename): + self.done = Event() + self.success = False + + self.playbin = gst.element_factory_make('playbin') + self.videosink = gst.element_factory_make("fakesink", "videosink") + self.playbin.set_property("video-sink", self.videosink) + self.audiosink = gst.element_factory_make("fakesink", "audiosink") + self.playbin.set_property("audio-sink", self.audiosink) + + self.bus = self.playbin.get_bus() + self.bus.add_signal_watch() + self.watch_id = self.bus.connect("message", self.on_bus_message) + + self.playbin.set_property("uri", gstutil._get_file_url(filename)) + self.playbin.set_state(gst.STATE_PAUSED) + + def result(self, success_callback, error_callback): + def _result(): + self.done.wait(1) + if self.success: + # -1 if None, 0 if yes + current_video = self.playbin.get_property("current-video") + current_audio = self.playbin.get_property("current-audio") + + if current_video == 0: + call_on_ui_thread(success_callback, "video") + elif current_audio == 0: + call_on_ui_thread(success_callback, "audio") + else: + call_on_ui_thread(success_callback, "unplayable") + else: + call_on_ui_thread(error_callback) + self.disconnect() + thread.start_new_thread(_result, ()) + + def on_bus_message(self, bus, message): + if message.src == self.playbin: + if message.type == gst.MESSAGE_STATE_CHANGED: + prev, new, pending = message.parse_state_changed() + if new == gst.STATE_PAUSED: + # Success + self.success = True + self.done.set() + + elif message.type == gst.MESSAGE_ERROR: + self.success = False + self.done.set() + + def disconnect(self): + self.bus.disconnect(self.watch_id) + self.playbin.set_state(gst.STATE_NULL) + del self.bus + del self.playbin + del self.audiosink + del self.videosink + +def get_item_type(item_info, success_callback, error_callback): + s = Sniffer(item_info.filename) + s.result(success_callback, error_callback) diff -Nru miro-4.0.4/lib/frontends/widgets/gtk/audio.py miro-6.0/lib/frontends/widgets/gtk/audio.py --- miro-4.0.4/lib/frontends/widgets/gtk/audio.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/gtk/audio.py 2013-04-05 16:02:42.000000000 +0000 @@ -28,25 +28,9 @@ # statement from all source files in the program, then also delete it here. from miro import app -from miro import player +from miro.frontends.widgets.gtk import player -class NullRenderer: - def __init__(self): - pass - - def reset(self): - pass - - def select_file(self, iteminfo, success_callback, error_callback): - error_callback() - - def stop(self): - pass - - def set_volume(self, v): - pass - -class AudioPlayer(player.Player): +class AudioPlayer(player.GTKPlayer): """Audio renderer widget. Note: ``app.audio_renderer`` must be inititalized before instantiating this @@ -54,11 +38,7 @@ ``None``. """ def __init__(self): - player.Player.__init__(self) - if app.audio_renderer is not None: - self.renderer = app.audio_renderer - else: - self.renderer = NullRenderer() + player.GTKPlayer.__init__(self, app.audio_renderer) def teardown(self): self.renderer.reset() @@ -70,6 +50,9 @@ self.renderer.play() def play_from_time(self, resume_time=0): + # FIXME: this overrides the default implementation. The reason + # is going through the default implementation it requires the total + # time and it may not be ready at this point. self.seek_to_time(resume_time) self.play() diff -Nru miro-4.0.4/lib/frontends/widgets/gtk/base.py miro-6.0/lib/frontends/widgets/gtk/base.py --- miro-4.0.4/lib/frontends/widgets/gtk/base.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/gtk/base.py 2013-04-05 16:02:42.000000000 +0000 @@ -32,6 +32,7 @@ import gtk from miro import signals +from miro import threadcheck from miro.frontends.widgets.gtk import wrappermap from miro.frontends.widgets.gtk.weakconnect import weak_connect from miro.frontends.widgets.gtk import keymap @@ -54,6 +55,7 @@ allocated. """ def __init__(self, *signal_names): + threadcheck.confirm_ui_thread() signals.SignalEmitter.__init__(self, *signal_names) self.create_signal('size-allocated') self.create_signal('key-press') @@ -73,13 +75,20 @@ def set_widget(self, widget): self._widget = widget wrappermap.add(self._widget, self) - self.wrapped_widget_connect('hierarchy_changed', - self.on_hierarchy_changed) + if self.should_connect_to_hierarchy_changed(): + self.wrapped_widget_connect('hierarchy_changed', + self.on_hierarchy_changed) self.wrapped_widget_connect('size-allocate', self.on_size_allocate) self.wrapped_widget_connect('key-press-event', self.on_key_press) self.wrapped_widget_connect('focus-out-event', self.on_focus_out) self.use_custom_style_callback = None + def should_connect_to_hierarchy_changed(self): + # GTK creates windows to handle submenus, which messes with our + # on_hierarchy_changed callback. We don't care about custom styles + # for menus anyways, so just ignore the signal. + return not isinstance(self._widget, gtk.MenuItem) + def set_can_focus(self, allow): """Set if we allow the widget to hold keyboard focus. """ diff -Nru miro-4.0.4/lib/frontends/widgets/gtk/controls.py miro-6.0/lib/frontends/widgets/gtk/controls.py --- miro-4.0.4/lib/frontends/widgets/gtk/controls.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/gtk/controls.py 2013-04-05 16:02:42.000000000 +0000 @@ -150,13 +150,19 @@ class Checkbox(Widget, BinBaselineCalculator): """Widget that the user can toggle on or off.""" - def __init__(self, label=None, bold=False): + def __init__(self, text=None, bold=False): Widget.__init__(self) BinBaselineCalculator.__init__(self) - self.set_widget(gtk.CheckButton(label)) + if text is None: + text = '' + self.set_widget(gtk.CheckButton()) + self.label = Label(text) + self._widget.add(self.label._widget) + self.label._widget.show() self.create_signal('toggled') self.forward_signal('toggled') - # GTK doesn't support bold checkboxes, at least not natively + if bold: + self.label.set_bold(True) def get_checked(self): return self._widget.get_active() @@ -164,6 +170,9 @@ def set_checked(self, value): self._widget.set_active(value) + def set_size(self, scale_factor): + self.label.set_size(scale_factor) + def get_text_padding(self): """ Returns the amount of space the checkbox takes up before the label. @@ -245,12 +254,11 @@ self.create_signal('clicked') self.forward_signal('clicked') self.label = Label(text) - if width: - current_width = self.label.get_width() - padding = (width - current_width) // 2 - odd = width - current_width - padding * 2 + # only honor width if its bigger than the width we need to display the + # label (#18994) + if width and width > self.label.get_width(): alignment = layout.Alignment(0.5, 0.5, 0, 0) - alignment.set_padding(0, 0, padding, padding+odd) + alignment.set_size_request(width, -1) alignment.add(self.label) self._widget.add(alignment._widget) else: diff -Nru miro-4.0.4/lib/frontends/widgets/gtk/customcontrols.py miro-6.0/lib/frontends/widgets/gtk/customcontrols.py --- miro-4.0.4/lib/frontends/widgets/gtk/customcontrols.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/gtk/customcontrols.py 2013-04-05 16:02:42.000000000 +0000 @@ -109,12 +109,28 @@ if self.last_drag_event is None: wrappermap.wrapper(self).emit('clicked') +class _DragInfo(object): + """Info about the start of a drag. + + Attributes: + + - button: button that started the drag + - start_pos: position of the slider + - click_pos: position of the click + + Note that start_pos and click_pos will be different if the user clicks + inside the slider. + """ + + def __init__(self, button, start_pos, click_pos): + self.button = button + self.start_pos = start_pos + self.click_pos = click_pos + class CustomScaleMixin(CustomControlMixin): def __init__(self): CustomControlMixin.__init__(self) - self.in_drag = False - self.drag_inbounds = False - self.drag_button = None + self.drag_info = None self.min = self.max = 0.0 def get_range(self): @@ -128,27 +144,6 @@ def is_continuous(self): return wrappermap.wrapper(self).is_continuous() - def do_button_press_event(self, event): - if self.in_drag: - return - self.start_value = self.get_value() - self.in_drag = True - self.drag_button = event.button - self.drag_inbounds = True - self.move_slider_to_mouse(event.x, event.y) - self.grab_focus() - wrappermap.wrapper(self).emit('pressed') - - def do_motion_notify_event(self, event): - if self.in_drag: - self.move_slider_to_mouse(event.x, event.y) - - def calc_percent(self, pos, size): - slider_size = wrappermap.wrapper(self).slider_size() - pos -= slider_size / 2 - size -= slider_size - return max(0, min(1, float(pos) / size)) - def is_horizontal(self): # this comes from a mixin pass @@ -159,32 +154,77 @@ else: return gtk.VScale - def move_slider_to_mouse(self, x, y): - if ((not 0 <= x < self.allocation.width) or - (not 0 <= y < self.allocation.height)): - self.handle_drag_out_of_bounds() + def get_slider_pos(self, value=None): + if value is None: + value = self.get_value() + if self.is_horizontal(): + size = self.allocation.width + else: + size = self.allocation.height + ratio = (float(value) - self.min) / (self.max - self.min) + start_pos = self.slider_size() / 2.0 + return start_pos + ratio * (size - self.slider_size()) + + def slider_size(self): + return wrappermap.wrapper(self).slider_size() + + def _event_pos(self, event): + """Get the position of an event. + + If we are horizontal, this will be the x coordinate. If we are + vertical, the y. + """ + if self.is_horizontal(): + return event.x + else: + return event.y + + def do_button_press_event(self, event): + if self.drag_info is not None: return + current_pos = self.get_slider_pos() + event_pos = self._event_pos(event) + pos_difference = abs(current_pos - event_pos) + # only move the slider if the click was outside its boundaries + # (#18840) + if pos_difference > self.slider_size() / 2.0: + self.move_slider(event_pos) + current_pos = event_pos + self.drag_info = _DragInfo(event.button, current_pos, event_pos) + self.grab_focus() + wrappermap.wrapper(self).emit('pressed') + + def do_motion_notify_event(self, event): + if self.drag_info is not None: + event_pos = self._event_pos(event) + delta = event_pos - self.drag_info.click_pos + self.move_slider(self.drag_info.start_pos + delta) + + def move_slider(self, new_pos): + """Move the slider so that it's centered on new_pos.""" if self.is_horizontal(): - pos = x size = self.allocation.width else: - pos = y - size = self.height - value = (self.max - self.min) * self.calc_percent(pos, size) - self.set_value(value) + size = self.allocation.height + + slider_size = self.slider_size() + new_pos -= slider_size / 2 + size -= slider_size + ratio = max(0, min(1, float(new_pos) / size)) + self.set_value(ratio * (self.max - self.min)) + wrappermap.wrapper(self).emit('moved', self.get_value()) if self.is_continuous(): wrappermap.wrapper(self).emit('changed', self.get_value()) def handle_drag_out_of_bounds(self): - self.drag_inbounds = False if not self.is_continuous(): self.set_value(self.start_value) def do_button_release_event(self, event): - if event.button != self.drag_button: + if self.drag_info is None or event.button != self.drag_info.button: return - self.in_drag = False + self.drag_info = None if (self.is_continuous and (0 <= event.x < self.allocation.width) and (0 <= event.y < self.allocation.height)): @@ -192,16 +232,27 @@ wrappermap.wrapper(self).emit('released') def do_scroll_event(self, event): + wrapper = wrappermap.wrapper(self) if self.is_horizontal(): if event.direction == gtk.gdk.SCROLL_UP: event.direction = gtk.gdk.SCROLL_DOWN elif event.direction == gtk.gdk.SCROLL_DOWN: event.direction = gtk.gdk.SCROLL_UP - self.gtk_scale_class().do_scroll_event(self, event) + if (wrapper._scroll_step is not None and + event.direction in (gtk.gdk.SCROLL_UP, gtk.gdk.SCROLL_DOWN)): + # handle the scroll ourself + if event.direction == gtk.gdk.SCROLL_DOWN: + delta = wrapper._scroll_step + else: + delta = -wrapper._scroll_step + self.set_value(self.get_value() + delta) + else: + # let GTK handle the scroll + self.gtk_scale_class().do_scroll_event(self, event) # Treat mouse scrolls as if the user clicked on the new position - wrappermap.wrapper(self).emit('pressed') - wrappermap.wrapper(self).emit('changed', self.get_value()) - wrappermap.wrapper(self).emit('released') + wrapper.emit('pressed') + wrapper.emit('changed', self.get_value()) + wrapper.emit('released') def do_move_slider(self, scroll): if self.is_horizontal(): @@ -357,6 +408,7 @@ self.create_signal('released') self.create_signal('changed') self.create_signal('moved') + self._scroll_step = None if self.is_horizontal(): self.set_widget(CustomHScaleWidget()) else: @@ -376,14 +428,40 @@ def get_range(self): return self._widget.get_range() + def get_slider_pos(self, value=None): + """Get the position for the slider for our current value. + + This will return position that the slider should be centered on to + display the value. It will be the x coordinate if is_horizontal() is + True and the y coordinate otherwise. + + This method takes into acount the size of the slider when calculating + the position. The slider position will start at (slider_size / 2) and + will end (slider_size / 2) px before the end of the widget. + + :param value: value to get the position for. Defaults to the current + value + """ + return self._widget.get_slider_pos(value) + def set_range(self, min_value, max_value): self._widget.set_range(min_value, max_value) - # Try to pick a reasonable default for the digits + # set_digits controls the precision of the scale by limiting changes + # to a certain number of digits. If the range is [0, 1], this code + # will give us 4 digits of precision, which seems reasonable. range = max_value - min_value - self._widget.set_digits(int(round(math.log10(100.0 / range)))) + self._widget.set_digits(int(round(math.log10(10000.0 / range)))) - def set_increments(self, increment, big_increment): - self._widget.set_increments(increment, big_increment) + def set_increments(self, small_step, big_step, scroll_step=None): + """Set the increments to scroll. + + :param small_step: scroll amount for up/down + :param big_step: scroll amount for page up/page down. + :param scroll_step: scroll amount for mouse wheel, or None to make + this 2 times the small step + """ + self._widget.set_increments(small_step, big_step) + self._scroll_step = scroll_step def to_miro_volume(value): """Convert from 0 to 1.0 to 0.0 to MAX_VOLUME. diff -Nru miro-4.0.4/lib/frontends/widgets/gtk/donatewindow.py miro-6.0/lib/frontends/widgets/gtk/donatewindow.py --- miro-4.0.4/lib/frontends/widgets/gtk/donatewindow.py 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/gtk/donatewindow.py 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,40 @@ +# Miro - an RSS based video player application +# Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011 +# Participatory Culture Foundation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +# +# In addition, as a special exception, the copyright holders give +# permission to link the code of portions of this program with the OpenSSL +# library. +# +# You must obey the GNU General Public License in all respects for all of +# the code used other than OpenSSL. If you modify file(s) with this +# exception, you may extend this exception to your version of the file(s), +# but you are not obligated to do so. If you do not wish to do so, delete +# this exception statement from your version. If you delete this exception +# statement from all source files in the program, then also delete it here. + +"""donatewindow.py -- Donate window. """ + +import gtk + +from miro.frontends.widgets.gtk import layout +from miro.frontends.widgets.gtk import simple +from miro.frontends.widgets.gtk import window +from miro.plat import resources + +class DonateWindow(window.Window): + pass diff -Nru miro-4.0.4/lib/frontends/widgets/gtk/drawing.py miro-6.0/lib/frontends/widgets/gtk/drawing.py --- miro-4.0.4/lib/frontends/widgets/gtk/drawing.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/gtk/drawing.py 2013-04-05 16:02:42.000000000 +0000 @@ -64,10 +64,14 @@ def get_size(self): return self.width, self.height - def draw(self, context, x, y, width, height, fraction=1.0): + def _align_pattern(self, x, y): + """Line up our image pattern so that it's top-left corner is x, y.""" m = cairo.Matrix() m.translate(-x, -y) self.pattern.set_matrix(m) + + def draw(self, context, x, y, width, height, fraction=1.0): + self._align_pattern(x, y) cairo_context = context.context cairo_context.save() cairo_context.set_source(self.pattern) @@ -76,6 +80,22 @@ if fraction >= 1.0: cairo_context.fill() else: + cairo_context.clip() + cairo_context.paint_with_alpha(fraction) + cairo_context.restore() + + def draw_rect(self, context, dest_x, dest_y, source_x, source_y, + width, height, fraction=1.0): + + self._align_pattern(dest_x-source_x, dest_y-source_y) + cairo_context = context.context + cairo_context.save() + cairo_context.set_source(self.pattern) + cairo_context.new_path() + cairo_context.rectangle(dest_x, dest_y, width, height) + if fraction >= 1.0: + cairo_context.fill() + else: cairo_context.clip() cairo_context.paint_with_alpha(fraction) cairo_context.restore() diff -Nru miro-4.0.4/lib/frontends/widgets/gtk/fixedliststore/build-info-defs miro-6.0/lib/frontends/widgets/gtk/fixedliststore/build-info-defs --- miro-4.0.4/lib/frontends/widgets/gtk/fixedliststore/build-info-defs 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/gtk/fixedliststore/build-info-defs 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,2 @@ +fixed-list-store.defs was built with h2def.py from +pygobject version 2.28.6 diff -Nru miro-4.0.4/lib/frontends/widgets/gtk/fixedliststore/build-info-wrapper miro-6.0/lib/frontends/widgets/gtk/fixedliststore/build-info-wrapper --- miro-4.0.4/lib/frontends/widgets/gtk/fixedliststore/build-info-wrapper 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/gtk/fixedliststore/build-info-wrapper 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,2 @@ +fixed-list-store-wrapper.c was built with pyobject-codegen-2.0 +from pygobject version 2.28.6 diff -Nru miro-4.0.4/lib/frontends/widgets/gtk/fixedliststore/fixed-list-store.c miro-6.0/lib/frontends/widgets/gtk/fixedliststore/fixed-list-store.c --- miro-4.0.4/lib/frontends/widgets/gtk/fixedliststore/fixed-list-store.c 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/gtk/fixedliststore/fixed-list-store.c 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,379 @@ +/* +# Miro - an RSS based video player application +# Copyright (C) 2012 +# Participatory Culture Foundation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +# +# In addition, as a special exception, the copyright holders give +# permission to link the code of portions of this program with the OpenSSL +# library. +# +# You must obey the GNU General Public License in all respects for all of +# the code used other than OpenSSL. If you modify file(s) with this +# exception, you may extend this exception to your version of the file(s), +# but you are not obligated to do so. If you do not wish to do so, delete +# this exception statement from your version. If you delete this exception +# statement from all source files in the program, then also delete it here. +*/ + +/* + * fixed-list-store.c - MiroFixedListStore implementation + */ + +#include "fixed-list-store.h" + +static void +miro_fixed_list_store_init (MiroFixedListStore *self); + +static GtkTreeModelFlags +miro_fixed_list_store_get_flags (GtkTreeModel *tree_model); + +static gint +miro_fixed_list_store_get_n_columns (GtkTreeModel *tree_model); + +static GType +miro_fixed_list_store_get_column_type (GtkTreeModel *tree_model, + gint index); + +static gboolean +miro_fixed_list_store_make_iter (GtkTreeModel *tree_model, + GtkTreeIter *iter, + gint index); + +static gboolean +miro_fixed_list_store_get_iter (GtkTreeModel *tree_model, + GtkTreeIter *iter, + GtkTreePath *path); + +static GtkTreePath * +miro_fixed_list_store_get_path (GtkTreeModel *tree_model, + GtkTreeIter *iter); + +static void +miro_fixed_list_store_get_value (GtkTreeModel *tree_model, + GtkTreeIter *iter, + gint column, + GValue *value); + +static gboolean +miro_fixed_list_store_iter_next (GtkTreeModel *tree_model, + GtkTreeIter *iter); + +static gboolean +miro_fixed_list_store_iter_children (GtkTreeModel *tree_model, + GtkTreeIter *iter, + GtkTreeIter *parent); + +static gboolean +miro_fixed_list_store_iter_has_child (GtkTreeModel *tree_model, + GtkTreeIter *iter); + +static gint +miro_fixed_list_store_iter_n_children (GtkTreeModel *tree_model, + GtkTreeIter *iter); + +static gboolean +miro_fixed_list_store_iter_nth_child (GtkTreeModel *tree_model, + GtkTreeIter *iter, + GtkTreeIter *parent, + gint n); + +static gboolean +miro_fixed_list_store_iter_parent (GtkTreeModel *tree_model, + GtkTreeIter *iter, + GtkTreeIter *child); + +static void +miro_fixed_list_store_interface_init (GtkTreeModelIface *iface); + +/* Properties */ + +enum { + PROP_0, + PROP_ROW_COUNT, + N_PROPERTIES +}; + +static GParamSpec *obj_properties[N_PROPERTIES] = { NULL, }; + +/* Implement GObject stuff */ + +G_DEFINE_TYPE_WITH_CODE(MiroFixedListStore, + miro_fixed_list_store, + G_TYPE_OBJECT, + G_IMPLEMENT_INTERFACE(GTK_TYPE_TREE_MODEL, + miro_fixed_list_store_interface_init)); + +static void +miro_fixed_list_store_set_property (GObject *object, + guint property_id, + const GValue *value, + GParamSpec *pspec) +{ + MiroFixedListStore *self = MIRO_FIXED_LIST_STORE (object); + + switch (property_id) + { + case PROP_ROW_COUNT: + self->row_count = g_value_get_uint(value); + break; + + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec); + break; + } +} + +static void +miro_fixed_list_store_get_property (GObject *object, + guint property_id, + GValue *value, + GParamSpec *pspec) +{ + MiroFixedListStore *self = MIRO_FIXED_LIST_STORE (object); + + switch (property_id) + { + case PROP_ROW_COUNT: + g_value_set_uint (value, self->row_count); + break; + + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec); + break; + } +} + +static void +miro_fixed_list_store_class_init (MiroFixedListStoreClass *klass) +{ + GObjectClass *gobject_class = G_OBJECT_CLASS (klass); + + obj_properties[PROP_ROW_COUNT] = + g_param_spec_uint ("row_count", "row_count", + "number of table rows", + 0, G_MAXUINT, + 0, + G_PARAM_READWRITE| G_PARAM_CONSTRUCT_ONLY); + + gobject_class->set_property = miro_fixed_list_store_set_property; + gobject_class->get_property = miro_fixed_list_store_get_property; + g_object_class_install_property (gobject_class, + PROP_ROW_COUNT, + obj_properties[PROP_ROW_COUNT]); +} + +static void +miro_fixed_list_store_init (MiroFixedListStore *self) +{ + // Random int to check whether an iter belongs to our model + self->stamp = g_random_int(); +} + +static GtkTreeModelFlags +miro_fixed_list_store_get_flags (GtkTreeModel *tree_model) +{ + return (GtkTreeModelFlags)(GTK_TREE_MODEL_LIST_ONLY | + GTK_TREE_MODEL_ITERS_PERSIST); +} + +static gint +miro_fixed_list_store_get_n_columns (GtkTreeModel *tree_model) +{ + return 0; +} + +static GType +miro_fixed_list_store_get_column_type (GtkTreeModel *tree_model, + gint index) +{ + return G_TYPE_INVALID; +} + +static gboolean +miro_fixed_list_store_make_iter (GtkTreeModel *tree_model, + GtkTreeIter *iter, + gint index) +{ + MiroFixedListStore *miro_fls; + + miro_fls = MIRO_FIXED_LIST_STORE(tree_model); + + if (index < 0 || index >= miro_fls->row_count) { + return FALSE; + } + + iter->stamp = miro_fls->stamp; + iter->user_data = (gpointer)index; + return TRUE; +} + +static gboolean +miro_fixed_list_store_get_iter (GtkTreeModel *tree_model, + GtkTreeIter *iter, + GtkTreePath *path) +{ + g_assert(path); + g_assert(gtk_tree_path_get_depth(path) == 1); + + return miro_fixed_list_store_make_iter(tree_model, iter, + gtk_tree_path_get_indices(path)[0]); +} + +static GtkTreePath * +miro_fixed_list_store_get_path (GtkTreeModel *tree_model, + GtkTreeIter *iter) +{ + MiroFixedListStore *miro_fls; + GtkTreePath* path; + + miro_fls = MIRO_FIXED_LIST_STORE(tree_model); + // Couple of sanity checks + g_assert (iter != NULL); + g_assert (iter->stamp == miro_fls->stamp); + + path = gtk_tree_path_new(); + gtk_tree_path_append_index(path, (gint)iter->user_data); + return path; +} + +static void +miro_fixed_list_store_get_value (GtkTreeModel *tree_model, + GtkTreeIter *iter, + gint column, + GValue *value) +{ + g_value_init(value, G_TYPE_NONE); +} + +static gboolean +miro_fixed_list_store_iter_next (GtkTreeModel *tree_model, + GtkTreeIter *iter) +{ + MiroFixedListStore *miro_fls; + gint next; + + miro_fls = MIRO_FIXED_LIST_STORE(tree_model); + g_assert(iter); + if (iter->stamp != miro_fls->stamp) return FALSE; + + next = (gint)iter->user_data + 1; + if(next >= miro_fls->row_count) { + return FALSE; + } else { + iter->user_data = (gpointer)next; + } + return TRUE; +} + +static gboolean +miro_fixed_list_store_iter_children (GtkTreeModel *tree_model, + GtkTreeIter *iter, + GtkTreeIter *parent) +{ + // We don't have children, only works for special case when parent=NULL + if(parent != NULL) return FALSE; + return miro_fixed_list_store_make_iter(tree_model, iter, 0); +} + +static gboolean +miro_fixed_list_store_iter_has_child (GtkTreeModel *tree_model, + GtkTreeIter *iter) +{ + return FALSE; +} + +static gint +miro_fixed_list_store_iter_n_children (GtkTreeModel *tree_model, + GtkTreeIter *iter) +{ + MiroFixedListStore* miro_fls; + // We don't have children, only works for special case when iter=NULL + if(iter) { + return 0; + } + miro_fls = MIRO_FIXED_LIST_STORE(tree_model); + return miro_fls->row_count; +} + +static gboolean +miro_fixed_list_store_iter_nth_child (GtkTreeModel *tree_model, + GtkTreeIter *iter, + GtkTreeIter *parent, + gint n) +{ + if(parent) { + return FALSE; // non-toplevel row fails + } + return miro_fixed_list_store_make_iter(tree_model, iter, n); +} + +static gboolean +miro_fixed_list_store_iter_parent (GtkTreeModel *tree_model, + GtkTreeIter *iter, + GtkTreeIter *child) +{ + return FALSE; +} + +static void +miro_fixed_list_store_interface_init (GtkTreeModelIface *iface) +{ + iface->get_flags = miro_fixed_list_store_get_flags; + iface->get_n_columns = miro_fixed_list_store_get_n_columns; + iface->get_column_type = miro_fixed_list_store_get_column_type; + iface->get_iter = miro_fixed_list_store_get_iter; + iface->get_path = miro_fixed_list_store_get_path; + iface->get_value = miro_fixed_list_store_get_value; + iface->iter_next = miro_fixed_list_store_iter_next; + iface->iter_children = miro_fixed_list_store_iter_children; + iface->iter_has_child = miro_fixed_list_store_iter_has_child; + iface->iter_n_children = miro_fixed_list_store_iter_n_children; + iface->iter_nth_child = miro_fixed_list_store_iter_nth_child; + iface->iter_parent = miro_fixed_list_store_iter_parent; +} + +MiroFixedListStore* miro_fixed_list_store_new(int row_count) +{ + MiroFixedListStore *rv; + g_assert(row_count >= 0); + + rv = MIRO_FIXED_LIST_STORE(g_object_new (MIRO_TYPE_FIXED_LIST_STORE, + NULL)); + rv->row_count = row_count; + return rv; +} + +gint +miro_fixed_list_store_row_of_iter(MiroFixedListStore* miro_fls, + GtkTreeIter* iter) +{ + g_assert (iter->stamp == miro_fls->stamp); + return (gint)iter->user_data; +} + +gboolean +miro_fixed_list_store_iter_is_valid(MiroFixedListStore* miro_fls, + GtkTreeIter* iter) +{ + gint pos; + + pos = (gint)iter->user_data; + return (iter->stamp == miro_fls->stamp && + pos >= 0 && + pos < miro_fls->row_count); +} + diff -Nru miro-4.0.4/lib/frontends/widgets/gtk/fixedliststore/fixed-list-store.defs miro-6.0/lib/frontends/widgets/gtk/fixedliststore/fixed-list-store.defs --- miro-4.0.4/lib/frontends/widgets/gtk/fixedliststore/fixed-list-store.defs 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/gtk/fixedliststore/fixed-list-store.defs 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,47 @@ +;; -*- scheme -*- +; object definitions ... +(define-object FixedListStore + (in-module "Miro") + (parent "GObject") + (c-name "MiroFixedListStore") + (gtype-id "MIRO_TYPE_FIXED_LIST_STORE") +) + +;; Enumerations and flags ... + + +;; From fixed-list-store.h + +(define-function miro_fixed_list_store_get_type + (c-name "miro_fixed_list_store_get_type") + (return-type "GType") +) + +(define-function miro_fixed_list_store_new + (c-name "miro_fixed_list_store_new") + (is-constructor-of "MiroFixedListStore") + (return-type "MiroFixedListStore*") + (parameters + '("int" "row_count") + ) +) + +(define-method row_of_iter + (of-object "MiroFixedListStore") + (c-name "miro_fixed_list_store_row_of_iter") + (return-type "gint") + (parameters + '("GtkTreeIter*" "iter") + ) +) + +(define-method iter_is_valid + (of-object "MiroFixedListStore") + (c-name "miro_fixed_list_store_iter_is_valid") + (return-type "gboolean") + (parameters + '("GtkTreeIter*" "iter") + ) +) + + diff -Nru miro-4.0.4/lib/frontends/widgets/gtk/fixedliststore/fixed-list-store.h miro-6.0/lib/frontends/widgets/gtk/fixedliststore/fixed-list-store.h --- miro-4.0.4/lib/frontends/widgets/gtk/fixedliststore/fixed-list-store.h 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/gtk/fixedliststore/fixed-list-store.h 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,99 @@ +/* + * Miro - an RSS based video player application + * Copyright (C) 2012 + * Participatory Culture Foundation + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation; either version 2 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program; if not, write to the Free Software + * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + * + * In addition, as a special exception, the copyright holders give + * permission to link the code of portions of this program with the OpenSSL + * library. + * + * You must obey the GNU General Public License in all respects for all of + * the code used other than OpenSSL. If you modify file(s) with this + * exception, you may extend this exception to your version of the file(s), + * but you are not obligated to do so. If you do not wish to do so, delete + * this exception statement from your version. If you delete this exception + * statement from all source files in the program, then also delete it here. + */ + +/* + * fixed-list-store.h - MiroFixedListStore interface + * + * MiroFixedListStore is a GtkTreeModel for a simple fixed-size list of items. + * + * MiroFixedListStore does next to nothing, but at least it does it fast :) + * It stores no data at all, nor can rows be added, deleted, or reordered. On + * the plus side, this means that it implements the GtkTreeModel API pretty + * close to as fast as possible. + * + * The intended use is alongside another class to actually fetch the data and + * to use a custom cell renderer function to set up the cell renderers. + */ + +#include +#include + +/* boilerplate GObject defines. */ + +GType miro_fixed_list_store_get_type(void); + +#define MIRO_TYPE_FIXED_LIST_STORE (miro_fixed_list_store_get_type ()) +#define MIRO_FIXED_LIST_STORE(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIRO_TYPE_FIXED_LIST_STORE, MiroFixedListStore)) +#define MIRO_FIXED_LIST_STORE_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIRO_TYPE_FIXED_LIST_STORE, MiroFixedListStore)) +#define MIRO_FIXED_IS_LIST_STORE(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIRO_TYPE_FIXED_LIST_STORE)) +#define MIRO_FIXED_IS_LIST_STORE_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIRO_TYPE_FIXED_LIST_STORE)) +#define MIRO_FIXED_LIST_STORE_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIRO_TYPE_FIXED_LIST_STORE, MiroFixedListStore)) + +struct _MiroFixedListStore +{ + GObject parent_instance; + + /* instance members */ + gint row_count; + gint stamp; +}; + +struct _MiroFixedListStoreClass +{ + GObjectClass parent_class; +}; + +typedef struct _MiroFixedListStore MiroFixedListStore; +typedef struct _MiroFixedListStoreClass MiroFixedListStoreClass; + +/* + * Create a new MiroFixedListStore + * + * :param row_count: number of rows in the model + * column) and return a python string. + */ +MiroFixedListStore* miro_fixed_list_store_new(int row_count); + +/* + * Convert a python TreeModelIter to an index. + */ + +gint +miro_fixed_list_store_row_of_iter(MiroFixedListStore* miro_fls, + GtkTreeIter* iter); + +/* + * Check if an iter is valid + */ + +gboolean +miro_fixed_list_store_iter_is_valid(MiroFixedListStore* miro_fls, + GtkTreeIter* iter); diff -Nru miro-4.0.4/lib/frontends/widgets/gtk/fixedliststore/fixed-list-store-module.c miro-6.0/lib/frontends/widgets/gtk/fixedliststore/fixed-list-store-module.c --- miro-4.0.4/lib/frontends/widgets/gtk/fixedliststore/fixed-list-store-module.c 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/gtk/fixedliststore/fixed-list-store-module.c 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,57 @@ +/* +# Miro - an RSS based video player application +# Copyright (C) 2012 +# Participatory Culture Foundation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +# +# In addition, as a special exception, the copyright holders give +# permission to link the code of portions of this program with the OpenSSL +# library. +# +# You must obey the GNU General Public License in all respects for all of +# the code used other than OpenSSL. If you modify file(s) with this +# exception, you may extend this exception to your version of the file(s), +# but you are not obligated to do so. If you do not wish to do so, delete +# this exception statement from your version. If you delete this exception +# statement from all source files in the program, then also delete it here. +*/ + +/* + * fixed-list-store-module.c -- Python module that stores the FixedListStore + * wrapper. + */ + +#include + +void miro_fixed_list_store_register_classes (PyObject *d); +extern PyMethodDef miro_fixed_list_store_functions[]; + +DL_EXPORT(void) +initfixedliststore(void) +{ + PyObject *m, *d; + + init_pygobject (); + + m = Py_InitModule ("fixedliststore", miro_fixed_list_store_functions); + d = PyModule_GetDict (m); + + miro_fixed_list_store_register_classes (d); + + if (PyErr_Occurred ()) { + Py_FatalError ("can't initialise module fixedliststore"); + } +} diff -Nru miro-4.0.4/lib/frontends/widgets/gtk/fixedliststore/fixed-list-store.override miro-6.0/lib/frontends/widgets/gtk/fixedliststore/fixed-list-store.override --- miro-4.0.4/lib/frontends/widgets/gtk/fixedliststore/fixed-list-store.override 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/gtk/fixedliststore/fixed-list-store.override 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,14 @@ +%% +headers +#include +#include "pygobject.h" +#include "fixed-list-store.h" +%% +modulename miro.fixedliststore +%% +import gobject.GObject as PyGObject_Type +import gtk.TreeModel as PyGtkTreeModel_Type +%% +ignore-glob +*_get_type +%% diff -Nru miro-4.0.4/lib/frontends/widgets/gtk/fixedliststore/fixed-list-store-wrapper.c miro-6.0/lib/frontends/widgets/gtk/fixedliststore/fixed-list-store-wrapper.c --- miro-4.0.4/lib/frontends/widgets/gtk/fixedliststore/fixed-list-store-wrapper.c 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/gtk/fixedliststore/fixed-list-store-wrapper.c 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,188 @@ +/* -- THIS FILE IS GENERATED - DO NOT EDIT *//* -*- Mode: C; c-basic-offset: 4 -*- */ + +#include + + + +#line 3 "fixed-list-store.override" +#include +#include "pygobject.h" +#include "fixed-list-store.h" +#line 12 "fixed-list-store.c" + + +/* ---------- types from other modules ---------- */ +static PyTypeObject *_PyGObject_Type; +#define PyGObject_Type (*_PyGObject_Type) +static PyTypeObject *_PyGtkTreeModel_Type; +#define PyGtkTreeModel_Type (*_PyGtkTreeModel_Type) + + +/* ---------- forward type declarations ---------- */ +PyTypeObject G_GNUC_INTERNAL PyMiroFixedListStore_Type; + +#line 25 "fixed-list-store.c" + + + +/* ----------- MiroFixedListStore ----------- */ + +static int +_wrap_miro_fixed_list_store_new(PyGObject *self, PyObject *args, PyObject *kwargs) +{ + static char *kwlist[] = { "row_count", NULL }; + int row_count; + + if (!PyArg_ParseTupleAndKeywords(args, kwargs,"i:Miro.FixedListStore.__init__", kwlist, &row_count)) + return -1; + self->obj = (GObject *)miro_fixed_list_store_new(row_count); + + if (!self->obj) { + PyErr_SetString(PyExc_RuntimeError, "could not create MiroFixedListStore object"); + return -1; + } + pygobject_register_wrapper((PyObject *)self); + return 0; +} + +static PyObject * +_wrap_miro_fixed_list_store_row_of_iter(PyGObject *self, PyObject *args, PyObject *kwargs) +{ + static char *kwlist[] = { "iter", NULL }; + PyObject *py_iter; + GtkTreeIter *iter = NULL; + int ret; + + if (!PyArg_ParseTupleAndKeywords(args, kwargs,"O:Miro.FixedListStore.row_of_iter", kwlist, &py_iter)) + return NULL; + if (pyg_boxed_check(py_iter, GTK_TYPE_TREE_ITER)) + iter = pyg_boxed_get(py_iter, GtkTreeIter); + else { + PyErr_SetString(PyExc_TypeError, "iter should be a GtkTreeIter"); + return NULL; + } + + ret = miro_fixed_list_store_row_of_iter(MIRO_FIXED_LIST_STORE(self->obj), iter); + + return PyInt_FromLong(ret); +} + +static PyObject * +_wrap_miro_fixed_list_store_iter_is_valid(PyGObject *self, PyObject *args, PyObject *kwargs) +{ + static char *kwlist[] = { "iter", NULL }; + PyObject *py_iter; + GtkTreeIter *iter = NULL; + int ret; + + if (!PyArg_ParseTupleAndKeywords(args, kwargs,"O:Miro.FixedListStore.iter_is_valid", kwlist, &py_iter)) + return NULL; + if (pyg_boxed_check(py_iter, GTK_TYPE_TREE_ITER)) + iter = pyg_boxed_get(py_iter, GtkTreeIter); + else { + PyErr_SetString(PyExc_TypeError, "iter should be a GtkTreeIter"); + return NULL; + } + + ret = miro_fixed_list_store_iter_is_valid(MIRO_FIXED_LIST_STORE(self->obj), iter); + + return PyBool_FromLong(ret); + +} + +static const PyMethodDef _PyMiroFixedListStore_methods[] = { + { "row_of_iter", (PyCFunction)_wrap_miro_fixed_list_store_row_of_iter, METH_VARARGS|METH_KEYWORDS, + NULL }, + { "iter_is_valid", (PyCFunction)_wrap_miro_fixed_list_store_iter_is_valid, METH_VARARGS|METH_KEYWORDS, + NULL }, + { NULL, NULL, 0, NULL } +}; + +PyTypeObject G_GNUC_INTERNAL PyMiroFixedListStore_Type = { + PyObject_HEAD_INIT(NULL) + 0, /* ob_size */ + "miro.fixedliststore.FixedListStore", /* tp_name */ + sizeof(PyGObject), /* tp_basicsize */ + 0, /* tp_itemsize */ + /* methods */ + (destructor)0, /* tp_dealloc */ + (printfunc)0, /* tp_print */ + (getattrfunc)0, /* tp_getattr */ + (setattrfunc)0, /* tp_setattr */ + (cmpfunc)0, /* tp_compare */ + (reprfunc)0, /* tp_repr */ + (PyNumberMethods*)0, /* tp_as_number */ + (PySequenceMethods*)0, /* tp_as_sequence */ + (PyMappingMethods*)0, /* tp_as_mapping */ + (hashfunc)0, /* tp_hash */ + (ternaryfunc)0, /* tp_call */ + (reprfunc)0, /* tp_str */ + (getattrofunc)0, /* tp_getattro */ + (setattrofunc)0, /* tp_setattro */ + (PyBufferProcs*)0, /* tp_as_buffer */ + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ + NULL, /* Documentation string */ + (traverseproc)0, /* tp_traverse */ + (inquiry)0, /* tp_clear */ + (richcmpfunc)0, /* tp_richcompare */ + offsetof(PyGObject, weakreflist), /* tp_weaklistoffset */ + (getiterfunc)0, /* tp_iter */ + (iternextfunc)0, /* tp_iternext */ + (struct PyMethodDef*)_PyMiroFixedListStore_methods, /* tp_methods */ + (struct PyMemberDef*)0, /* tp_members */ + (struct PyGetSetDef*)0, /* tp_getset */ + NULL, /* tp_base */ + NULL, /* tp_dict */ + (descrgetfunc)0, /* tp_descr_get */ + (descrsetfunc)0, /* tp_descr_set */ + offsetof(PyGObject, inst_dict), /* tp_dictoffset */ + (initproc)_wrap_miro_fixed_list_store_new, /* tp_init */ + (allocfunc)0, /* tp_alloc */ + (newfunc)0, /* tp_new */ + (freefunc)0, /* tp_free */ + (inquiry)0 /* tp_is_gc */ +}; + + + +/* ----------- functions ----------- */ + +const PyMethodDef miro_fixed_list_store_functions[] = { + { NULL, NULL, 0, NULL } +}; + +/* initialise stuff extension classes */ +void +miro_fixed_list_store_register_classes(PyObject *d) +{ + PyObject *module; + + if ((module = PyImport_ImportModule("gobject")) != NULL) { + _PyGObject_Type = (PyTypeObject *)PyObject_GetAttrString(module, "GObject"); + if (_PyGObject_Type == NULL) { + PyErr_SetString(PyExc_ImportError, + "cannot import name GObject from gobject"); + return ; + } + } else { + PyErr_SetString(PyExc_ImportError, + "could not import gobject"); + return ; + } + if ((module = PyImport_ImportModule("gtk")) != NULL) { + _PyGtkTreeModel_Type = (PyTypeObject *)PyObject_GetAttrString(module, "TreeModel"); + if (_PyGtkTreeModel_Type == NULL) { + PyErr_SetString(PyExc_ImportError, + "cannot import name TreeModel from gtk"); + return ; + } + } else { + PyErr_SetString(PyExc_ImportError, + "could not import gtk"); + return ; + } + + +#line 187 "fixed-list-store.c" + pygobject_register_class(d, "MiroFixedListStore", MIRO_TYPE_FIXED_LIST_STORE, &PyMiroFixedListStore_Type, Py_BuildValue("(O)", &PyGObject_Type)); +} diff -Nru miro-4.0.4/lib/frontends/widgets/gtk/fixedliststore/generate-defs.sh miro-6.0/lib/frontends/widgets/gtk/fixedliststore/generate-defs.sh --- miro-4.0.4/lib/frontends/widgets/gtk/fixedliststore/generate-defs.sh 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/gtk/fixedliststore/generate-defs.sh 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,11 @@ +#!/bin/sh + +h2def=`pkg-config --variable=codegendir pygobject-2.0`/h2def.py + +BUILD_INFO='build-info-defs' + +echo "fixed-list-store.defs was built with h2def.py from " > ${BUILD_INFO} +echo "pygobject version" `pkg-config --modversion pygobject-2.0` >> ${BUILD_INFO} + +python ${h2def} fixed-list-store.h > fixed-list-store.defs + diff -Nru miro-4.0.4/lib/frontends/widgets/gtk/fixedliststore/generate-wrapper.sh miro-6.0/lib/frontends/widgets/gtk/fixedliststore/generate-wrapper.sh --- miro-4.0.4/lib/frontends/widgets/gtk/fixedliststore/generate-wrapper.sh 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/gtk/fixedliststore/generate-wrapper.sh 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,14 @@ +#!/bin/sh + +BUILD_INFO='build-info-wrapper' + +echo "fixed-list-store-wrapper.c was built with pyobject-codegen-2.0 " > ${BUILD_INFO} +echo "from pygobject version" `pkg-config --modversion pygobject-2.0` >> ${BUILD_INFO} + +DEFS=`pkg-config --variable=defsdir pygtk-2.0` + +pygobject-codegen-2.0 --prefix miro_fixed_list_store \ + --register ${DEFS}/gdk-types.defs \ + --register ${DEFS}/gtk-types.defs \ + --override fixed-list-store.override \ + fixed-list-store.defs > fixed-list-store-wrapper.c diff -Nru miro-4.0.4/lib/frontends/widgets/gtk/fixedliststore/.gitignore miro-6.0/lib/frontends/widgets/gtk/fixedliststore/.gitignore --- miro-4.0.4/lib/frontends/widgets/gtk/fixedliststore/.gitignore 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/gtk/fixedliststore/.gitignore 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,3 @@ +build +dist +mirotreemodels.c diff -Nru miro-4.0.4/lib/frontends/widgets/gtk/fixedliststore/README miro-6.0/lib/frontends/widgets/gtk/fixedliststore/README --- miro-4.0.4/lib/frontends/widgets/gtk/fixedliststore/README 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/gtk/fixedliststore/README 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,21 @@ +This directory contains the miro.frontends.widgets.gtk.fixedliststore module. + +The meat of the code is in fixed-list-store.c and fixed-list-store.h, which +implement the GtkTreeModel in C. + +That code gets wrapped by pygtk to create a python module. A couple of these +files are auto-built by the pygobject code generation system: + +fixed-list-store.defs: + - build using generate-defs.sh + - build info is stored in build-info-defs + +fixed-list-store-wrapper.c: + - build using generate-wrapper.sh + - controled by the fixed-list-store.override file + - build info is store in build-info-wrapper + +fixed-list-store-module.c defines the module that stores the wrapper code. + +Lastly there's setup-test.py and test.py which work together to build a quick +python program to test the code. diff -Nru miro-4.0.4/lib/frontends/widgets/gtk/fixedliststore/test.py miro-6.0/lib/frontends/widgets/gtk/fixedliststore/test.py --- miro-4.0.4/lib/frontends/widgets/gtk/fixedliststore/test.py 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/gtk/fixedliststore/test.py 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,72 @@ +#!/usr/bin/python + +# test.py -- test the FixedListStore python wrapper + +import sys +import os +import shutil +import subprocess +import time +import itertools + +import gobject +import gtk + +for dirname in ("build", "dist"): + if os.path.exists(dirname): + shutil.rmtree(dirname) +subprocess.check_call(["python", "test-setup.py", "install", "--prefix", "dist"]) +sys.path.append("dist/lib/python%s.%s/site-packages/" % + (sys.version_info[0], sys.version_info[1])) + +print 'running...' +print + +import fixedliststore + +rows = 100 +columns = 10 +treeview = gtk.TreeView() +cell = gtk.CellRendererText() + +def celldatafunction(column, cell, model, it, col_num): + row = model.row_of_iter(it) + text = "cell %s %s" % (row, col_num) + cell.set_property("text", text) + +for i in range(columns): + col = gtk.TreeViewColumn('Column %s' % i) + treeview.append_column(col) + col.pack_start(cell, True) + col.set_cell_data_func(cell, celldatafunction, i) + +model = fixedliststore.FixedListStore(rows) +treeview.set_model(model) + +def on_click(b): + start = time.time() + treeview.queue_draw() + while gtk.events_pending(): + gtk.main_iteration() + end = time.time() + print 'redraw in %0.3f seconds' % (end-start) + +button = gtk.Button("Push me") +button.connect("clicked", on_click) + + +scroller = gtk.ScrolledWindow() +scroller.add(treeview) + +vbox = gtk.VBox() +vbox.pack_start(button, False) +vbox.pack_start(scroller) + +window = gtk.Window() +window.add(vbox) +window.set_size_request(800, 500) +window.show_all() +window.connect("destroy", lambda w: gtk.main_quit()) +gtk.main() + +print '...done' diff -Nru miro-4.0.4/lib/frontends/widgets/gtk/fixedliststore/test-setup.py miro-6.0/lib/frontends/widgets/gtk/fixedliststore/test-setup.py --- miro-4.0.4/lib/frontends/widgets/gtk/fixedliststore/test-setup.py 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/gtk/fixedliststore/test-setup.py 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,111 @@ +#!/usr/bin/env python + +# Miro - an RSS based video player application +# Copyright (C) 2012 +# Participatory Culture Foundation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +# +# In addition, as a special exception, the copyright holders give +# permission to link the code of portions of this program with the OpenSSL +# library. +# +# You must obey the GNU General Public License in all respects for all of +# the code used other than OpenSSL. If you modify file(s) with this +# exception, you may extend this exception to your version of the file(s), +# but you are not obligated to do so. If you do not wish to do so, delete +# this exception statement from your version. If you delete this exception +# statement from all source files in the program, then also delete it here. + +# test-setup.py -- distutils script to build things for test.py + +from distutils.core import setup +from distutils.extension import Extension +import subprocess + +def get_command_output(cmd, warnOnStderr=True, warnOnReturnCode=True): + """Wait for a command and return its output. Check for common + errors and raise an exception if one of these occurs. + """ + p = subprocess.Popen(cmd, shell=True, close_fds=True, + stdout=subprocess.PIPE, stderr=subprocess.PIPE) + stdout, stderr = p.communicate() + if warnOnStderr and stderr != '': + raise RuntimeError("%s outputted the following error:\n%s" % + (cmd, stderr)) + if warnOnReturnCode and p.returncode != 0: + raise RuntimeError("%s had non-zero return code %d" % + (cmd, p.returncode)) + return stdout + +def parse_pkg_config(command, components, options_dict=None): + """Helper function to parse compiler/linker arguments from + pkg-config and update include_dirs, library_dirs, etc. + + We return a dict with the following keys, which match up with + keyword arguments to the setup function: include_dirs, + library_dirs, libraries, extra_compile_args. + + Command is the command to run (pkg-config, etc). Components is a + string that lists the components to get options for. + + If options_dict is passed in, we add options to it, instead of + starting from scratch. + """ + if options_dict is None: + options_dict = { + 'include_dirs': [], + 'library_dirs': [], + 'libraries': [], + 'extra_compile_args': [] + } + commandLine = "%s --cflags --libs %s" % (command, components) + output = get_command_output(commandLine).strip() + for comp in output.split(): + prefix, rest = comp[:2], comp[2:] + if prefix == '-I': + options_dict['include_dirs'].append(rest) + elif prefix == '-L': + options_dict['library_dirs'].append(rest) + elif prefix == '-l': + options_dict['libraries'].append(rest) + else: + options_dict['extra_compile_args'].append(comp) + + commandLine = "%s --variable=libdir %s" % (command, components) + output = get_command_output(commandLine).strip() + + return options_dict + +my_ext = \ + Extension("fixedliststore", + [ + 'fixed-list-store.c', + 'fixed-list-store-module.c', + 'fixed-list-store-wrapper.c', + ], + **parse_pkg_config('pkg-config', + 'pygobject-2.0 gtk+-2.0 glib-2.0 gthread-2.0') + ) + +#### Run setup #### +setup(name='miro', + version='1.0', + author='Participatory Culture Foundation', + author_email='feedback@pculture.org', + url='http://www.getmiro.com/', + download_url='http://www.getmiro.com/downloads/', + ext_modules=[my_ext], +) diff -Nru miro-4.0.4/lib/frontends/widgets/gtk/gtkmenus.py miro-6.0/lib/frontends/widgets/gtk/gtkmenus.py --- miro-4.0.4/lib/frontends/widgets/gtk/gtkmenus.py 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/gtk/gtkmenus.py 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,394 @@ +# Miro - an RSS based video player application +# Copyright (C) 2011 +# Participatory Culture Foundation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +# +# In addition, as a special exception, the copyright holders give +# permission to link the code of portions of this program with the OpenSSL +# library. +# +# You must obey the GNU General Public License in all respects for all of +# the code used other than OpenSSL. If you modify file(s) with this +# exception, you may extend this exception to your version of the file(s), +# but you are not obligated to do so. If you do not wish to do so, delete +# this exception statement from your version. If you delete this exception +# statement from all source files in the program, then also delete it here. + +"""gtkmenus.py -- Manage menu layout.""" + +import gtk + +from miro import app +from miro import prefs +from miro.frontends.widgets.gtk import base +from miro.frontends.widgets.gtk import keymap +from miro.frontends.widgets.gtk import wrappermap + +def _setup_accel(widget, name, shortcut=None): + """Setup accelerators for a menu item. + + This method sets an accel path for the widget and optionally connects a + shortcut to that accel path. + """ + # The GTK docs say that we should set the path using this form: + # /Menu/Submenu/MenuItem + # ...but this is hard to do because we don't yet know what window/menu + # this menu item is going to be added to. gtk.Action and gtk.ActionGroup + # don't follow the above suggestion, so we don't need to either. + path = "/MenuBar/%s" % name + widget.set_accel_path(path) + if shortcut is not None: + accel_string = keymap.get_accel_string(shortcut) + key, mods = gtk.accelerator_parse(accel_string) + if gtk.accel_map_lookup_entry(path) is None: + gtk.accel_map_add_entry(path, key, mods) + else: + gtk.accel_map_change_entry(path, key, mods, True) + +# map menu names to GTK stock ids. +_STOCK_IDS = { + "SaveItem": gtk.STOCK_SAVE, + "CopyItemURL": gtk.STOCK_COPY, + "RemoveItems": gtk.STOCK_REMOVE, + "StopItem": gtk.STOCK_MEDIA_STOP, + "NextItem": gtk.STOCK_MEDIA_NEXT, + "PreviousItem": gtk.STOCK_MEDIA_PREVIOUS, + "PlayPauseItem": gtk.STOCK_MEDIA_PLAY, + "Open": gtk.STOCK_OPEN, + "EditPreferences": gtk.STOCK_PREFERENCES, + "Quit": gtk.STOCK_QUIT, + "Help": gtk.STOCK_HELP, + "About": gtk.STOCK_ABOUT, + "Translate": gtk.STOCK_EDIT +} +try: + _STOCK_IDS['Fullscreen'] = gtk.STOCK_FULLSCREEN +except AttributeError: + # fullscreen not available on all GTK versions + pass + +class MenuItemBase(base.Widget): + """Base class for MenuItem and Separator.""" + + def show(self): + """Show this menu item.""" + self._widget.show() + + def hide(self): + """Hide and disable this menu item.""" + self._widget.hide() + + def _set_accel_group(self, accel_group): + # menu items don't care about the accel group, their parent Menu + # handles it for them + pass + +class MenuItem(MenuItemBase): + """Single item in the menu that can be clicked + + :param label: The label it has (must be internationalized) + :param name: String identifier for this item + :param shortcut: Shortcut object to use + + Signals: + - activate: menu item was clicked + + Example: + + >>> MenuItem(_("Preferences"), "EditPreferences") + >>> MenuItem(_("Cu_t"), "ClipboardCut", Shortcut("x", MOD)) + >>> MenuItem(_("_Update Podcasts and Library"), "UpdatePodcasts", + ... (Shortcut("r", MOD), Shortcut(F5))) + >>> MenuItem(_("_Play"), "PlayPauseItem", + ... play=_("_Play"), pause=_("_Pause")) + """ + + def __init__(self, label, name, shortcut=None): + MenuItemBase.__init__(self) + self.name = name + self.set_widget(self.make_widget(label)) + self.activate_id = self.wrapped_widget_connect('activate', + self._on_activate) + self._widget.show() + self.create_signal('activate') + _setup_accel(self._widget, self.name, shortcut) + + def _on_activate(self, menu_item): + self.emit('activate') + gtk_menubar = self._find_menubar() + if gtk_menubar is not None: + try: + menubar = wrappermap.wrapper(gtk_menubar) + except KeyError: + app.widgetapp.handle_soft_failure('menubar activate', + 'no wrapper for gtk.MenuBar', with_exception=True) + else: + menubar.emit('activate', self.name) + + def _find_menubar(self): + """Find the MenuBar that this menu item is attached to.""" + menu_item = self._widget + while True: + parent_menu = menu_item.get_parent() + if isinstance(parent_menu, gtk.MenuBar): + return parent_menu + elif parent_menu is None: + return None + menu_item = parent_menu.get_attach_widget() + if menu_item is None: + return None + + def make_widget(self, label): + """Create the menu item to use for this widget. + + Subclasses will probably want to override this. + """ + if self.name in _STOCK_IDS: + mi = gtk.ImageMenuItem(stock_id=_STOCK_IDS[self.name]) + mi.set_label(label) + return mi + else: + return gtk.MenuItem(label) + + def set_label(self, new_label): + self._widget.set_label(new_label) + + def get_label(self): + self._widget.get_label() + +class CheckMenuItem(MenuItem): + """MenuItem that toggles on/off""" + + def make_widget(self, label): + return gtk.CheckMenuItem(label) + + def set_state(self, active): + # prevent the activate signal from fireing in response to us manually + # changing a value + self._widget.handler_block(self.activate_id) + if active is not None: + self._widget.set_inconsistent(False) + self._widget.set_active(active) + else: + self._widget.set_inconsistent(True) + self._widget.set_active(False) + self._widget.handler_unblock(self.activate_id) + + def get_state(self): + return self._widget.get_active() + +class RadioMenuItem(CheckMenuItem): + """MenuItem that toggles on/off and is grouped with other RadioMenuItems. + """ + + def make_widget(self, label): + widget = gtk.RadioMenuItem() + widget.set_label(label) + return widget + + def set_group(self, group_item): + self._widget.set_group(group_item._widget) + + def remove_from_group(self): + """Remove this RadioMenuItem from its current group.""" + self._widget.set_group(None) + + def _on_activate(self, menu_item): + # GTK sends the activate signal for both the radio button that's + # toggled on and the one that gets turned off. Just emit our signal + # for the active radio button. + if self.get_state(): + MenuItem._on_activate(self, menu_item) + +class Separator(MenuItemBase): + """Separator item for menus""" + + def __init__(self): + MenuItemBase.__init__(self) + self.set_widget(gtk.SeparatorMenuItem()) + self._widget.show() + # Set name to be None just so that it has a similar API to other menu + # items. + self.name = None + +class MenuShell(base.Widget): + """Common code shared between Menu and MenuBar. + + Subclasses must define a _menu attribute that's a gtk.MenuShell subclass. + """ + + def __init__(self): + base.Widget.__init__(self) + self._accel_group = None + self.children = [] + + def append(self, menu_item): + """Add a menu item to the end of this menu.""" + self.children.append(menu_item) + menu_item._set_accel_group(self._accel_group) + self._menu.append(menu_item._widget) + + def insert(self, index, menu_item): + """Insert a menu item in the middle of this menu.""" + self.children.insert(index, menu_item) + menu_item._set_accel_group(self._accel_group) + self._menu.insert(menu_item._widget, index) + + def remove(self, menu_item): + """Remove a child menu item. + + :raises ValueError: menu_item is not a child of this menu + """ + self.children.remove(menu_item) + self._menu.remove(menu_item._widget) + menu_item._set_accel_group(None) + + def index(self, name): + """Get the position of a menu item in this list. + + :param name: name of the menu + :returns: index of the menu item, or -1 if not found. + """ + for i, menu_item in enumerate(self.children): + if menu_item.name == name: + return i + return -1 + + def get_children(self): + """Get the child menu items in order.""" + return list(self.children) + + def find(self, name): + """Search for a menu or menu item + + This method recursively searches the entire menu structure for a Menu + or MenuItem object with a given name. + + :raises KeyError: name not found + """ + found = self._find(name) + if found is None: + raise KeyError(name) + else: + return found + + def _find(self, name): + """Low-level helper-method for find(). + + :returns: found menu item or None. + """ + for menu_item in self.get_children(): + if menu_item.name == name: + return menu_item + if isinstance(menu_item, MenuShell): + submenu_find = menu_item._find(name) + if submenu_find is not None: + return submenu_find + return None + +class Menu(MenuShell): + """A Menu holds a list of MenuItems and Menus. + + Example: + >>> Menu(_("P_layback"), "Playback", [ + ... MenuItem(_("_Foo"), "Foo"), + ... MenuItem(_("_Bar"), "Bar") + ... ]) + >>> Menu("", "toplevel", [ + ... Menu(_("_File"), "File", [ ... ]) + ... ]) + """ + + def __init__(self, label, name, child_items): + MenuShell.__init__(self) + self.set_widget(gtk.MenuItem(label)) + self._widget.show() + self.name = name + # set up _menu for the MenuShell code + self._menu = gtk.Menu() + _setup_accel(self._menu, self.name) + self._widget.set_submenu(self._menu) + for item in child_items: + self.append(item) + + def show(self): + """Show this menu.""" + self._widget.show() + + def hide(self): + """Hide this menu.""" + self._widget.hide() + + def _set_accel_group(self, accel_group): + """Set the accel group for this widget. + + Accel groups get created by the MenuBar. Whenever a menu or menu item + is added to that menu bar, the parent calls _set_accel_group() to give + the accel group to the child. + """ + if accel_group == self._accel_group: + return + self._menu.set_accel_group(accel_group) + self._accel_group = accel_group + for child in self.children: + child._set_accel_group(accel_group) + +class MenuBar(MenuShell): + """Displays a list of Menu items. + + Signals: + + - activate(menu_bar, name): a menu item was activated + """ + + def __init__(self): + """Create a new MenuBar + + :param name: string id to use for our action group + """ + MenuShell.__init__(self) + self.create_signal('activate') + self.set_widget(gtk.MenuBar()) + self._widget.show() + self._accel_group = gtk.AccelGroup() + # set up _menu for the MenuShell code + self._menu = self._widget + + def get_accel_group(self): + return self._accel_group + +class MainWindowMenuBar(MenuBar): + """MenuBar for the main window. + + This gets installed into app.widgetapp.menubar on GTK. + """ + def add_initial_menus(self, menus): + """Add the initial set of menus. + + We modify the menu structure slightly for GTK. + """ + for menu in menus: + self.append(menu) + self._modify_initial_menus() + + def _modify_initial_menus(self): + """Update the portable root menu with GTK-specific stuff.""" + # on linux, we don't have a CheckVersion option because + # we update with the package system. + this_platform = app.config.get(prefs.APP_PLATFORM) + if this_platform == 'linux': + self.find("FileMenu").remove(self.find("CheckVersion")) + app.video_renderer.setup_subtitle_encoding_menu() diff -Nru miro-4.0.4/lib/frontends/widgets/gtk/keymap.py miro-6.0/lib/frontends/widgets/gtk/keymap.py --- miro-4.0.4/lib/frontends/widgets/gtk/keymap.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/gtk/keymap.py 2013-04-05 16:02:42.000000000 +0000 @@ -32,47 +32,56 @@ import gtk -from miro.frontends.widgets import menus +from miro.frontends.widgets import keyboard menubar_mod_map = { - menus.CTRL: '', - menus.ALT: '', - menus.SHIFT: '', + keyboard.MOD: '', + keyboard.CTRL: '', + keyboard.ALT: '', + keyboard.SHIFT: '', } menubar_key_map = { - menus.RIGHT_ARROW: 'Right', - menus.LEFT_ARROW: 'Left', - menus.UP_ARROW: 'Up', - menus.DOWN_ARROW: 'Down', - menus.SPACE: 'space', - menus.ENTER: 'Return', - menus.DELETE: 'Delete', - menus.BKSPACE: 'BackSpace', - menus.ESCAPE: 'Escape', + keyboard.RIGHT_ARROW: 'Right', + keyboard.LEFT_ARROW: 'Left', + keyboard.UP_ARROW: 'Up', + keyboard.DOWN_ARROW: 'Down', + keyboard.SPACE: 'space', + keyboard.ENTER: 'Return', + keyboard.DELETE: 'Delete', + keyboard.BKSPACE: 'BackSpace', + keyboard.ESCAPE: 'Escape', '>': 'greater', '<': 'less' } +for i in range(1, 13): + name = 'F%d' % i + menubar_key_map[getattr(keyboard, name)] = name # These are reversed versions of menubar_key_map and menubar_mod_map gtk_key_map = dict((i[1], i[0]) for i in menubar_key_map.items()) +def get_accel_string(shortcut): + mod_str = ''.join(menubar_mod_map[mod] for mod in shortcut.modifiers) + key_str = menubar_key_map.get(shortcut.shortcut, shortcut.shortcut) + return mod_str + key_str + def translate_gtk_modifiers(event): - """Convert a keypress event to a set of modifiers from the menus + """Convert a keypress event to a set of modifiers from the shortcut module. """ modifiers = set() if event.state & gtk.gdk.CONTROL_MASK: - modifiers.add(menus.CTRL) + modifiers.add(keyboard.CTRL) if event.state & gtk.gdk.MOD1_MASK: - modifiers.add(menus.ALT) + modifiers.add(keyboard.ALT) if event.state & gtk.gdk.SHIFT_MASK: - modifiers.add(menus.SHIFT) + modifiers.add(keyboard.SHIFT) return modifiers def translate_gtk_event(event): """Convert a GTK key event into the tuple (key, modifiers) where - key and modifiers are from the menus module. + key and modifiers are from the shortcut module. """ gtk_keyval = gtk.gdk.keyval_name(event.keyval) if gtk_keyval == None: diff -Nru miro-4.0.4/lib/frontends/widgets/gtk/layoutmanager.py miro-6.0/lib/frontends/widgets/gtk/layoutmanager.py --- miro-4.0.4/lib/frontends/widgets/gtk/layoutmanager.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/gtk/layoutmanager.py 2013-04-05 16:02:42.000000000 +0000 @@ -47,7 +47,7 @@ key = (context, description, scale_factor, bold, italic) return util.Cache.get(self, key) - def create_new_value(self, key): + def create_new_value(self, key, invalidator=None): (context, description, scale_factor, bold, italic) = key return Font(context, description, scale_factor, bold, italic) @@ -145,8 +145,10 @@ def line_height(self): metrics = self.get_font_metrics() - # the +1: some glyphs can be slightly taller than ascent+descent (#17329) - return pango.PIXELS(metrics.get_ascent() + metrics.get_descent()) + 1 + # the +1: some glyphs can be slightly taller than ascent+descent + # (#17329) + return (pango.PIXELS(metrics.get_ascent()) + + pango.PIXELS(metrics.get_descent()) + 1) class TextBox(object): def __init__(self, context, font, color, shadow): diff -Nru miro-4.0.4/lib/frontends/widgets/gtk/persistentwindow.py miro-6.0/lib/frontends/widgets/gtk/persistentwindow.py --- miro-4.0.4/lib/frontends/widgets/gtk/persistentwindow.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/gtk/persistentwindow.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,120 +0,0 @@ -# Miro - an RSS based video player application -# Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011 -# Participatory Culture Foundation -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA -# -# In addition, as a special exception, the copyright holders give -# permission to link the code of portions of this program with the OpenSSL -# library. -# -# You must obey the GNU General Public License in all respects for all of -# the code used other than OpenSSL. If you modify file(s) with this -# exception, you may extend this exception to your version of the file(s), -# but you are not obligated to do so. If you do not wish to do so, delete -# this exception statement from your version. If you delete this exception -# statement from all source files in the program, then also delete it here. - -"""persistentwindow.py -- Contains a GTK widget that displays a GDK window -that stays around as long as the widget is alive (i.e. after unrealize). This -makes it a nice place to embed video renderers and XULRunner inside, since -they XWindow/HWND that we pass to them doesn't go away when the widget is -unrealized. -""" - -import gtk -import gobject -import weakref - -_dummy_window = None - -def _get_dummy_window(): - """Get a hidden window to use. - - This method creates the hidden window lazily, as a singleton. - """ - global _dummy_window - if _dummy_window is None: - _dummy_window = gtk.gdk.Window(None, - x=0, y=0, width=1, height=1, - window_type=gtk.gdk.WINDOW_TOPLEVEL, - wclass=gtk.gdk.INPUT_OUTPUT, event_mask=0) - return _dummy_window - -_persistent_window_to_widget = weakref.WeakValueDictionary() - -class PersistentWindow(gtk.DrawingArea): - """GTK Widget that keeps around a GDK window from the time it's realized - to the time it's destroyed. - - Attributes: - - persistent_window -- Window that we keep around - """ - - def __init__(self): - gtk.DrawingArea.__init__(self) - self.persistent_window = gtk.gdk.Window(_get_dummy_window(), - x=0, y=0, width=1, height=1, window_type=gtk.gdk.WINDOW_CHILD, - wclass=gtk.gdk.INPUT_OUTPUT, event_mask=self.get_events()) - _persistent_window_to_widget[self.persistent_window] = self - - def set_events(self, event_mask): - gtk.DrawingArea.set_events(self, event_mask) - self.persistent_window.set_events(self.get_events()) - - def add_events(self, event_mask): - gtk.DrawingArea.add_events(self, event_mask) - self.persistent_window.set_events(self.get_events()) - - def do_realize(self): - gtk.DrawingArea.do_realize(self) - self.persistent_window.reparent(self.window, 0, 0) - self.persistent_window.resize(self.allocation.width, - self.allocation.height) - self.persistent_window.show() - self.persistent_window.set_events(self.get_events()) - self.persistent_window.set_user_data(self) - - def do_configure_event(self, event): - self.persistent_window.resize(event.width, event.height) - - def do_size_allocate(self, allocation): - gtk.DrawingArea.do_size_allocate(self, allocation) - self.persistent_window.resize(allocation.width, allocation.height) - - def do_unrealize(self): - self.persistent_window.set_user_data(None) - self.persistent_window.hide() - self.persistent_window.reparent(_get_dummy_window(), 0, 0) - gtk.DrawingArea.do_unrealize(self) - - def do_destroy(self): - try: - gtk.DrawingArea.do_destroy(self) - self.persistent_window.withdraw() - self.persistent_window.destroy() - self.persistent_window = None - except AttributeError: - # Probably means we're in shutdown, so our symbols have been - # deleted - pass -gobject.type_register(PersistentWindow) - -def get_widgets(): - retval = [] - for window in _get_dummy_window().get_children(): - retval.append(_persistent_window_to_widget[window]) - return retval diff -Nru miro-4.0.4/lib/frontends/widgets/gtk/player.py miro-6.0/lib/frontends/widgets/gtk/player.py --- miro-4.0.4/lib/frontends/widgets/gtk/player.py 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/gtk/player.py 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,70 @@ +# Miro - an RSS based video player application +# Copyright (C) 2011 +# Participatory Culture Foundation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +# +# In addition, as a special exception, the copyright holders give +# permission to link the code of portions of this program with the OpenSSL +# library. +# +# You must obey the GNU General Public License in all respects for all of +# the code used other than OpenSSL. If you modify file(s) with this +# exception, you may extend this exception to your version of the file(s), +# but you are not obligated to do so. If you do not wish to do so, delete +# this exception statement from your version. If you delete this exception +# statement from all source files in the program, then also delete it here. + +from miro import player + +class NullRenderer: + def __init__(self): + pass + + def reset(self): + pass + + def select_file(self, iteminfo, success_callback, error_callback): + error_callback() + + def stop(self): + pass + + def set_volume(self, v): + pass + +class GTKPlayer(player.Player): + """Audio/Video player base class.""" + + def __init__(self, renderer): + player.Player.__init__(self) + if renderer is not None: + self.renderer = renderer + else: + self.renderer = NullRenderer() + + def get_audio_tracks(self): + track_count = self.renderer.get_audio_tracks() + tracks = [] + for i in xrange(track_count): + name = 'Track %s' % i + tracks.append((i, name)) + return tracks + + def get_enabled_audio_track(self): + return self.renderer.get_enabled_audio_track() + + def set_audio_track(self, track_index): + self.renderer.set_audio_track(track_index) diff -Nru miro-4.0.4/lib/frontends/widgets/gtk/preferenceswindow.py miro-6.0/lib/frontends/widgets/gtk/preferenceswindow.py --- miro-4.0.4/lib/frontends/widgets/gtk/preferenceswindow.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/gtk/preferenceswindow.py 2013-04-05 16:02:42.000000000 +0000 @@ -34,6 +34,7 @@ from miro.frontends.widgets.gtk import layout from miro.frontends.widgets.gtk import simple from miro.frontends.widgets.gtk import window +from miro.gtcache import gettext as _ from miro.plat import resources class PreferencesWindow(window.Window): @@ -43,6 +44,7 @@ self.content_widget = gtk.VBox(spacing=12) self.content_widget.pack_start(self.tab_container._widget) close_button = gtk.Button(stock=gtk.STOCK_CLOSE) + close_button.set_label(_("Close")) close_button.connect_object('clicked', gtk.Window.hide, self._window) alignment = gtk.Alignment(xalign=1.0) alignment.set_padding(0, 10, 0, 10) diff -Nru miro-4.0.4/lib/frontends/widgets/gtk/pygtkhacks.pyx miro-6.0/lib/frontends/widgets/gtk/pygtkhacks.pyx --- miro-4.0.4/lib/frontends/widgets/gtk/pygtkhacks.pyx 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/gtk/pygtkhacks.pyx 2013-04-05 16:02:42.000000000 +0000 @@ -33,6 +33,7 @@ ctypedef int gint ctypedef unsigned long gulong ctypedef unsigned int guint + ctypedef unsigned short gboolean ctypedef char gchar ctypedef void * gpointer ctypedef void * GCallback @@ -61,6 +62,17 @@ char* text) cdef void gtk_tooltip_set_text (GtkTooltip *tooltip, char *text) +cdef extern from "gdk/gdk.h": + ctypedef struct GdkWindow + ctypedef struct GdkDrawable + + void gdk_window_get_internal_paint_info (GdkWindow *window, + GdkDrawable **real_drawable, + gint *x_offset, + gint *y_offset) + gboolean gdk_window_ensure_native (GdkWindow *window) + + cdef extern from "pango/pango.h": ctypedef struct PangoLayout cdef void pango_layout_set_height(PangoLayout *layout, int height) @@ -73,10 +85,11 @@ cdef PyObject* pygobject_init(int req_major, int req_minor, int req_micro) cdef GObject* pygobject_get(PyGObject*) -cdef GtkWidget* get_gtk_widget(object pygtk_widget): +cdef GObject* get_c_gobject(object py_gobject): + """Get a C GObject pointer from a Python GObject.""" cdef PyGObject *pygobject - pygobject = pygtk_widget - return (pygobject_get(pygobject)) + pygobject = py_gobject + return pygobject_get(pygobject) def initialize(): if pygobject_init(-1, -1, -1) == NULL: @@ -84,14 +97,14 @@ def unset_tree_view_drag_dest_row(object py_tree_view): cdef GtkTreeView* tree_view - tree_view = get_gtk_widget(py_tree_view) + tree_view = get_c_gobject(py_tree_view) gtk_tree_view_set_drag_dest_row(tree_view, NULL, GTK_TREE_VIEW_DROP_BEFORE) def set_entry_border(object py_entry, int top, int right, int bottom, int left): cdef GtkEntry* entry cdef GtkBorder border - entry = get_gtk_widget(py_entry) + entry = get_c_gobject(py_entry) border.left = left border.right = right border.top = top @@ -102,14 +115,26 @@ # For some reason this isn't available on pygtk 2.12.1 for windows, so we # have to implement it ourselves. cdef GtkTooltip* gtk_tooltip - cdef PyGObject *pygobject - pygobject = tooltip - gtk_tooltip = (pygobject_get(pygobject)) + gtk_tooltip = get_c_gobject(tooltip) gtk_tooltip_set_text(gtk_tooltip, text) def set_pango_layout_height(object layout, height): cdef PangoLayout* pango_layout - cdef PyGObject *pygobject - pygobject = layout - pango_layout = (pygobject_get(pygobject)) + pango_layout = get_c_gobject(layout) pango_layout_set_height(pango_layout, height) + +def get_gdk_window_offset(py_gdk_window): + cdef GdkWindow* window + cdef GdkDrawable* drawable + cdef gint x_offset, y_offset + + window = get_c_gobject(py_gdk_window) + gdk_window_get_internal_paint_info(window, &drawable, &x_offset, + &y_offset) + return (x_offset, y_offset) + +def ensure_native_window(py_gdk_window): + cdef GdkWindow* window + + window = get_c_gobject(py_gdk_window) + return gdk_window_ensure_native(window) diff -Nru miro-4.0.4/lib/frontends/widgets/gtk/simple.py miro-6.0/lib/frontends/widgets/gtk/simple.py --- miro-4.0.4/lib/frontends/widgets/gtk/simple.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/gtk/simple.py 2013-04-05 16:02:42.000000000 +0000 @@ -107,7 +107,18 @@ def __init__(self, path): Widget.__init__(self) self.set_widget(gtk.Image()) - self._widget.set_from_animation(gtk.gdk.PixbufAnimation(path)) + self._animation = gtk.gdk.PixbufAnimation(path) + # Set to animate before we are shown and stop animating after + # we disappear. + self._widget.connect('map', lambda w: self._set_animate(True)) + self._widget.connect('unmap-event', + lambda w, a: self._set_animate(False)) + + def _set_animate(self, enabled): + if enabled: + self._widget.set_from_animation(self._animation) + else: + self._widget.clear() class Label(Widget): """Widget that displays simple text.""" @@ -167,7 +178,7 @@ return self._widget.get_layout().get_pixel_size()[0] def set_text(self, text): - self._widget.set_text(text) + self._widget.set_text(text.encode('utf-8')) def get_text(self): return self._widget.get_text().decode('utf-8') @@ -181,12 +192,8 @@ self._widget.set_attributes(self.attr_list) def set_color(self, color): - # It seems like 'text' is the color we want to change, but fg is - # actually the one that changes it for me. Change them both just to - # be sure. - for state in xrange(5): - self.modify_style('fg', state, self.make_color(color)) - self.modify_style('text', state, self.make_color(color)) + color_as_int = (int(c * 65535) for c in color) + self.set_attr(pango.AttrForeground(*color_as_int)) def baseline(self): pango_context = self._widget.get_pango_context() diff -Nru miro-4.0.4/lib/frontends/widgets/gtk/tableviewcells.py miro-6.0/lib/frontends/widgets/gtk/tableviewcells.py --- miro-4.0.4/lib/frontends/widgets/gtk/tableviewcells.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/gtk/tableviewcells.py 2013-04-05 16:02:42.000000000 +0000 @@ -34,7 +34,6 @@ import pango from miro import signals -from miro import infolist from miro.frontends.widgets import widgetconst from miro.frontends.widgets.gtk import drawing from miro.frontends.widgets.gtk import wrappermap @@ -44,9 +43,12 @@ """Simple Cell Renderer https://develop.participatoryculture.org/index.php/WidgetAPITableView""" def __init__(self): - self._renderer = gtk.CellRendererText() + self._renderer = self.build_renderer() self.want_hover = False + def build_renderer(self): + return gtk.CellRendererText() + def setup_attributes(self, column, attr_map): column.add_attribute(self._renderer, 'text', attr_map['value']) @@ -85,12 +87,11 @@ def set_font_scale(self, scale_factor): self._renderer.props.scale = scale_factor -class ImageCellRenderer(object): +class ImageCellRenderer(CellRenderer): """Cell Renderer for images https://develop.participatoryculture.org/index.php/WidgetAPITableView""" - def __init__(self): - self._renderer = gtk.CellRendererPixbuf() - self.want_hover = False + def build_renderer(self): + return gtk.CellRendererPixbuf() def setup_attributes(self, column, attr_map): column.add_attribute(self._renderer, 'pixbuf', attr_map['image']) @@ -147,6 +148,9 @@ def on_render(self, window, widget, background_area, cell_area, expose_area, flags): + widget_wrapper = wrappermap.wrapper(widget) + cell_wrapper = wrappermap.wrapper(self) + selected = (flags & gtk.CELL_RENDERER_SELECTED) if selected: if widget.flags() & gtk.HAS_FOCUS: @@ -155,22 +159,23 @@ state = gtk.STATE_ACTIVE else: state = gtk.STATE_NORMAL - xpad = self.props.xpad - ypad = self.props.ypad - area = gtk.gdk.Rectangle(cell_area.x + xpad, cell_area.y + ypad, - cell_area.width - xpad * 2, cell_area.height - ypad * 2) + if cell_wrapper.IGNORE_PADDING: + area = background_area + else: + xpad = self.props.xpad + ypad = self.props.ypad + area = gtk.gdk.Rectangle(cell_area.x + xpad, cell_area.y + ypad, + cell_area.width - xpad * 2, cell_area.height - ypad * 2) context = drawing.DrawingContext(window, area, expose_area) - widget_wrapper = wrappermap.wrapper(widget) if (selected and not widget_wrapper.draws_selection and widget_wrapper.use_custom_style): # Draw the base color as our background. This erases the gradient # that GTK draws for selected items. - area = widget.get_background_area(self.path, self.column) window.draw_rectangle(widget.style.base_gc[state], True, - area.x, area.y, area.width, area.height) + background_area.x, background_area.y, + background_area.width, background_area.height) context.style = drawing.DrawingStyle(widget_wrapper, use_base_color=True, state=state) - owner = wrappermap.wrapper(self) widget_wrapper.layout_manager.update_cairo_context(context.context) hotspot_tracker = widget_wrapper.hotspot_tracker if (hotspot_tracker and hotspot_tracker.hit and @@ -188,7 +193,7 @@ # from the model itself, so we don't have to worry about setting them # here. widget_wrapper.layout_manager.reset() - owner.render(context, widget_wrapper.layout_manager, selected, + cell_wrapper.render(context, widget_wrapper.layout_manager, selected, hotspot, hover) def on_activate(self, event, widget, path, background_area, cell_area, @@ -200,14 +205,19 @@ pass gobject.type_register(GTKCustomCellRenderer) -class CustomCellRenderer(object): +class CustomCellRenderer(CellRenderer): """Customizable Cell Renderer https://develop.participatoryculture.org/index.php/WidgetAPITableView""" + + IGNORE_PADDING = False + def __init__(self): - self._renderer = GTKCustomCellRenderer() - self.want_hover = False + CellRenderer.__init__(self) wrappermap.add(self._renderer, self) + def build_renderer(self): + return GTKCustomCellRenderer() + def setup_attributes(self, column, attr_map): column.set_cell_data_func(self._renderer, self.cell_data_func, attr_map) @@ -224,19 +234,34 @@ def hotspot_test(self, style, layout, x, y, width, height): return None -class InfoListRenderer(CustomCellRenderer): +class ItemListRenderer(CustomCellRenderer): """Custom Renderer for InfoListModels https://develop.participatoryculture.org/index.php/WidgetAPITableView""" - def cell_data_func(self, column, cell, model, iter, attr_map): - self.info, self.attrs = wrappermap.wrapper(model).row_for_iter(iter) + def cell_data_func(self, column, cell, model, it, attr_map): + item_list = wrappermap.wrapper(model).item_list + row = model.row_of_iter(it) + self.info = item_list.get_row(row) + self.attrs = item_list.get_attrs(self.info.id) + self.group_info = item_list.get_group_info(row) cell.column = column - cell.path = model.get_path(iter) + cell.path = row -class InfoListRendererText(CellRenderer): +class ItemListRendererText(CellRenderer): """Renderer for InfoListModels that only display text https://develop.participatoryculture.org/index.php/WidgetAPITableView""" def setup_attributes(self, column, attr_map): - infolist.gtk.setup_text_cell_data_func(column, self._renderer, - self.get_value) + column.set_cell_data_func(self._renderer, self.cell_data_func, + attr_map) + + def cell_data_func(self, column, cell, model, it, attr_map): + item_list = wrappermap.wrapper(model).item_list + info = item_list.get_row(model.row_of_iter(it)) + cell.set_property("text", self.get_value(info)) + + def get_value(self, info): + """Get the text to render for this cell + + :param info: ItemInfo to render. + """ diff -Nru miro-4.0.4/lib/frontends/widgets/gtk/tableview.py miro-6.0/lib/frontends/widgets/gtk/tableview.py --- miro-4.0.4/lib/frontends/widgets/gtk/tableview.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/gtk/tableview.py 2013-04-05 16:02:42.000000000 +0000 @@ -44,24 +44,48 @@ HEADER_HEIGHT = 25 from miro import signals -from miro import infolist from miro.errors import (WidgetActionError, WidgetDomainError, WidgetRangeError, WidgetNotReadyError) from miro.frontends.widgets.tableselection import SelectionOwnerMixin from miro.frontends.widgets.tablescroll import ScrollbarOwnerMixin from miro.frontends.widgets.gtk import pygtkhacks from miro.frontends.widgets.gtk import drawing +from miro.frontends.widgets.gtk import fixedliststore from miro.frontends.widgets.gtk import wrappermap from miro.frontends.widgets.gtk.base import Widget from miro.frontends.widgets.gtk.simple import Image from miro.frontends.widgets.gtk.layoutmanager import LayoutManager from miro.frontends.widgets.gtk.weakconnect import weak_connect from miro.frontends.widgets.gtk.tableviewcells import (GTKCustomCellRenderer, - GTKCheckboxCellRenderer, InfoListRenderer, InfoListRendererText) + GTKCheckboxCellRenderer, ItemListRenderer, ItemListRendererText) PathInfo = namedtuple('PathInfo', 'path column x y') Rect = namedtuple('Rect', 'x y width height') +_album_view_gtkrc_installed = False +def _install_album_view_gtkrc(): + """Hack for styling GTKTreeView for the album view widget. + + We do a couple things: + - Remove the focus ring + - Remove any separator space. + + We do this so that we don't draw a box through the album view column for + selected rows. + """ + global _album_view_gtkrc_installed + if _album_view_gtkrc_installed: + return + rc_string = ('style "album-view-style"\n' + '{ \n' + ' GtkTreeView::vertical-separator = 0\n' + ' GtkTreeView::horizontal-separator = 0\n' + ' GtkWidget::focus-line-width = 0 \n' + '}\n' + 'widget "*.miro-album-view" style "album-view-style"\n') + gtk.rc_parse_string(rc_string) + _album_view_gtkrc_installed = True + def rect_contains_rect(outside, inside): # currently unused return (outside.x <= inside.x and @@ -214,6 +238,10 @@ self.set_enable_search(False) self.horizontal_separator = self.style_get_property("horizontal-separator") self.expander_size = self.style_get_property("expander-size") + self.group_lines_enabled = False + self.group_line_color = (0, 0, 0) + self.group_line_width = 1 + self._scroll_before_model_change = None def do_size_request(self, req): gtk.TreeView.do_size_request(self, req) @@ -275,11 +303,57 @@ self.set_drag_dest_at_bottom(False) def do_expose_event(self, event): + if self._scroll_before_model_change is not None: + self._restore_scroll_after_model_change() gtk.TreeView.do_expose_event(self, event) if self.drag_dest_at_bottom: gc = self.get_style().fg_gc[self.state] x1, x2, y = self.bottom_drag_dest_coords() event.window.draw_line(gc, x1, y, x2, y) + if self.group_lines_enabled and event.window == self.get_bin_window(): + self.draw_group_lines(event) + + def draw_group_lines(self, expose_event): + # we need both the GTK TreeModel and the ItemList for this one + gtk_model = self.get_model() + modelwrapper = wrappermap.wrapper(self).model + if (not isinstance(modelwrapper, ItemListModel) or + modelwrapper.item_list.group_func is None): + return + # prepare a couple variables for the drawing + expose_bottom = expose_event.area.y + expose_event.area.height + cr = expose_event.window.cairo_create() + cr.set_source_rgb(*self.group_line_color) + first_column = self.get_columns()[0] + # start on the top row of the expose event + path_info = self.get_path_at_pos(expose_event.area.x, expose_event.area.y) + if path_info is None: + return + else: + path = path_info[0] + gtk_iter = gtk_model.get_iter(path) + # draw the lines + while True: + # calculate the row's area in the y direction. We don't care + # about the x-axis, but PyGTK forces us to pass in a column, so we + # send in the first one and ignore the x/width attributes. + background_area = self.get_background_area(path, first_column) + if background_area.y > expose_bottom: + break + # draw stuff if we're on the last row + index = gtk_model.row_of_iter(gtk_iter) + group_info = modelwrapper.item_list.get_group_info(index) + if group_info[0] == group_info[1] - 1: + y = (background_area.y + background_area.height - + self.group_line_width) + cr.rectangle(expose_event.area.x, y, expose_event.area.width, + self.group_line_width) + cr.fill() + # prepare for next row + gtk_iter = gtk_model.iter_next(gtk_iter) + if gtk_iter is None: + break + path = (path[0] + 1,) def bottom_drag_dest_coords(self): visible = self.get_visible_rect() @@ -298,6 +372,29 @@ if path_info: return PathInfo(*path_info) + def save_scroll_position_before_model_change(self): + """This method implements a hack to keep our scroll position when we + change our model. + + For performance reasons, sometimes it's better to to change a model + than keep a model in place and make a bunch of changes to it (we + currently do this for ItemListModel). However, one issue that we run + into is that when we set the new model, the scroll position is lost. + + Call this method before changing the model to keep the scroll + position between changes. + """ + vadjustment = self.get_vadjustment() + hadjustment = self.get_hadjustment() + self._scroll_before_model_change = \ + (vadjustment.get_value(), hadjustment.get_value()) + + def _restore_scroll_after_model_change(self): + v_value, h_value = self._scroll_before_model_change + self._scroll_before_model_change = None + self.get_vadjustment().set_value(v_value) + self.get_hadjustment().set_value(h_value) + gobject.type_register(MiroTreeView) class HotspotTracker(object): @@ -509,11 +606,11 @@ self.selection.unselect_all() def _iter_to_string(self, iter_): - return self._model.get_string_from_iter(iter_) + return self.gtk_model.get_string_from_iter(iter_) def _iter_from_string(self, string): try: - return self._model.get_iter_from_string(string) + return self.gtk_model.get_iter_from_string(string) except ValueError: raise WidgetDomainError( "model iters", string, "%s other iters" % len(self.model)) @@ -528,7 +625,7 @@ real_model = self._widget.get_model() if not real_model: raise WidgetActionError("no model") - elif real_model != self._model: + elif real_model != self.gtk_model: raise WidgetActionError("wrong model?") def get_cursor(self): @@ -640,12 +737,12 @@ equivalent position to send to the GTK code if the drag_dest validates the drop. """ - model = self._model + model = self.gtk_model try: gtk_path, gtk_position = self._widget.get_dest_row_at_pos(x, y) except TypeError: # Below the last row - yield (None, len(model), None, None) + yield (None, model.iter_n_children(None), None, None) return iter_ = model.get_iter(gtk_path) @@ -653,9 +750,7 @@ gtk.TREE_VIEW_DROP_INTO_OR_AFTER): yield (iter_, -1, gtk_path, gtk_position) - if hasattr(model, 'iter_is_valid'): - # tablist has this; item list does not - assert model.iter_is_valid(iter_) + assert model.iter_is_valid(iter_) parent_iter = model.iter_parent(iter_) position = gtk_path[-1] if gtk_position in (gtk.TREE_VIEW_DROP_BEFORE, @@ -684,8 +779,15 @@ for pos_info in self.calc_positions(x, y): drop_action = self.drag_dest.validate_drop(self, self.model, type, drag_context.actions, pos_info[0], pos_info[1]) + if isinstance(drop_action, (list, tuple)): + drop_action, iter = drop_action + path = self.model.get_path(iter) + pos = gtk.TREE_VIEW_DROP_INTO_OR_BEFORE + else: + path, pos = pos_info[2:4] + if drop_action: - self.set_drag_dest_row(pos_info[2], pos_info[3]) + self.set_drag_dest_row(path, pos) break else: self.unset_drag_dest_row() @@ -766,14 +868,9 @@ 'row-changed': self.on_row_changed, } self._hotspot_callback_handles.extend( - weak_connect(self._model, signal, handler) + weak_connect(self.gtk_model, signal, handler) for signal, handler in SIGNALS.iteritems()) - def _disconnect_hotspot_signals(self): - for handle in self._hotspot_callback_handles: - self._model.disconnect(handle) - self._hotspot_callback_handles = [] - def on_row_inserted(self, model, path, iter_): if self.hotspot_tracker: self.hotspot_tracker.redraw_cell() @@ -833,13 +930,6 @@ self.hotspot_tracker = None return True - def hotspot_model_changed(self): - """A bulk change has ended; reconnect signals and update hotspots.""" - self._connect_hotspot_signals() - if self.hotspot_tracker: - self.hotspot_tracker.redraw_cell() - self.hotspot_tracker.update_hit() - class ColumnOwnerMixin(object): """Keeps track of the table's columns - including the list of columns, and properties that we set for a table but need to apply to each column. @@ -1054,18 +1144,15 @@ draws_selection = True - def __init__(self, model): + def __init__(self, model, custom_headers=False): Widget.__init__(self) self.set_widget(MiroTreeView()) - self.model = model - self.model.add_to_tableview(self._widget) - self._model = self._widget.get_model() - wrappermap.add(self._model, model) + self.init_model(model) self._setup_colors() self.background_color = None self.context_menu_callback = None - self.in_bulk_change = False self.delaying_press = False + self._use_custom_headers = False self.layout_manager = LayoutManager(self._widget) self.height_changed = None # 17178 hack self._connect_signals() @@ -1076,6 +1163,27 @@ ColumnOwnerMixin.__init__(self) HoverTrackingMixin.__init__(self) GTKScrollbarOwnerMixin.__init__(self) + if custom_headers: + self._enable_custom_headers() + + def init_model(self, model): + self.model = model + self.model_handler = make_model_handler(model, self._widget) + + @property + def gtk_model(self): + return self.model._model + + # FIXME: should implement set_model() and make None a special case. + def unset_model(self): + """Disconnect our model from this table view. + + This should be called when you want to destroy a TableView and + there's a new TableView sharing its model. + """ + self.model.cleanup() + self._widget.set_model(None) + self.model_handler = self.model = None def _connect_signals(self): self.create_signal('row-expanded') @@ -1105,6 +1213,23 @@ if self.use_custom_style: self.set_column_background_color() + def set_group_lines_enabled(self, enabled): + """Enable/Disable group lines. + + This only has an effect if our model is an ItemListModel and it has a + grouping set. + + If group lines are enabled, we will draw a line below the last item in + the group. Use set_group_line_style() to change the look of the line. + """ + self._widget.group_lines_enabled = enabled + self.queue_redraw() + + def set_group_line_style(self, color, width): + self._widget.group_line_color = color + self._widget.group_line_width = width + self.queue_redraw() + def handle_custom_style_change(self): if self.background_color is not None: if self.use_custom_style: @@ -1136,9 +1261,18 @@ column_spacing = TableColumn.FIXED_PADDING * len(self.columns) return total_width - column_spacing + def enable_album_view_focus_hack(self): + _install_album_view_gtkrc() + self._widget.set_name("miro-album-view") + def focus(self): self._widget.grab_focus() + def _enable_custom_headers(self): + # NB: this is currently not used because the GTK tableview does not + # support custom headers. + self._use_custom_headers = True + def set_show_headers(self, show): self._widget.set_headers_visible(show) self._widget.set_headers_clickable(show) @@ -1186,10 +1320,10 @@ return def on_row_expanded(self, _widget, iter_, path): - self.emit('row-expanded', iter_, path) + self.emit('row-expanded', iter_) def on_row_collapsed(self, _widget, iter_, path): - self.emit('row-collapsed', iter_, path) + self.emit('row-collapsed', iter_) def on_button_press(self, treeview, event): """Handle a mouse button press""" @@ -1209,26 +1343,55 @@ iter_ = treeview.get_model().get_iter(path_info.path) self.emit('row-clicked', iter_) - if self.handle_hotspot_hit(treeview, event): + if (event.button == 1 and self.handle_hotspot_hit(treeview, event)): return True if event.window != treeview.get_bin_window(): # click is outside the content area, don't try to handle this. # In particular, our DnD code messes up resizing table columns. return False - if event.button == 1 and self.drag_source: + if (event.button == 1 and self.drag_source and + not self._x_coord_in_expander(treeview, path_info)): return self.start_drag(treeview, event, path_info) elif event.button == 3 and self.context_menu_callback: self.show_context_menu(treeview, event, path_info) return True + # FALLTHROUGH + return False + def show_context_menu(self, treeview, event, path_info): """Pop up a context menu for the given click event (which is a right-click on a row). """ + # hack for album view + if (treeview.group_lines_enabled and + path_info.column == treeview.get_columns()[0]): + self._select_all_rows_in_group(treeview, path_info.path) self._popup_context_menu(path_info.path, event) # grab keyboard focus since we handled the event self.focus() + def _select_all_rows_in_group(self, treeview, path): + """Select all items in the group """ + + # FIXME: this is very tightly coupled with the portable code. + + modelwrapper = self.model + gtk_model = treeview.get_model() + if (not isinstance(modelwrapper, ItemListModel) or + modelwrapper.item_list.group_func is None): + return + modelwrapper.item_list.group_info(path[0]) + + start_row = path[0] - group_info[0] + total_rows = group_info[1] + + with self._ignoring_changes(): + self.unselect_all() + for row in xrange(start_row, start_row + total_rows): + self.select_path((row,)) + self.emit('selection-changed') + def _popup_context_menu(self, path, event): if not self.selection.path_is_selected(path): self.unselect_all(signal=False) @@ -1343,18 +1506,8 @@ self.potential_drag_motion(treeview, event) return None # XXX: used to fall through; not sure what retval does here - def start_bulk_change(self): - self._widget.freeze_child_notify() - self._widget.set_model(None) - self._disconnect_hotspot_signals() - self.in_bulk_change = True - def model_changed(self): - if self.in_bulk_change: - self._widget.set_model(self._model) - self._widget.thaw_child_notify() - self.hotspot_model_changed() - self.in_bulk_change = False + self.model_handler.model_changed() def get_path(self, iter_): """Always use this rather than the model's get_path directly - @@ -1363,9 +1516,33 @@ AssertionError. Example related bug: #17362. """ assert self.model.iter_is_valid(iter_) + return self.gtk_model.get_path(iter_) + +class TableModelBase(object): + """Base class for all TableModels.""" + def cleanup(self): + pass + + def first_iter(self): + return self._model.get_iter_first() + + def next_iter(self, iter_): + return self._model.iter_next(iter_) + + def nth_iter(self, index): + assert index >= 0 + return self._model.iter_nth_child(None, index) + + def get_path(self, iter_): return self._model.get_path(iter_) -class TableModel(object): + def iter_is_valid(self, iter_): + return self._model.iter_is_valid(iter_) + + def __len__(self): + return len(self._model) + +class TableModel(TableModelBase): """https://develop.participatoryculture.org/index.php/WidgetAPITableView""" MODEL_CLASS = gtk.ListStore @@ -1379,9 +1556,6 @@ self.convert_row_for_gtk = self.convert_row_for_gtk_fast self.convert_value_for_gtk = self.convert_value_for_gtk_fast - def add_to_tableview(self, widget): - widget.set_model(self._model) - def map_types(self, miro_column_types): type_map = { 'boolean': bool, @@ -1444,34 +1618,15 @@ row = self.convert_row_for_gtk(column_values) return self._model.insert_before(iter_, row) - def first_iter(self): - return self._model.get_iter_first() - - def next_iter(self, iter_): - return self._model.iter_next(iter_) - - def nth_iter(self, index): - assert index >= 0 - return self._model.iter_nth_child(None, index) - def __iter__(self): return iter(self._model) - def __len__(self): - return len(self._model) - def __getitem__(self, iter_): return self._model[iter_] def get_rows(self, row_paths): return [self._model[path] for path in row_paths] - def get_path(self, iter_): - return self._model.get_path(iter_) - - def iter_is_valid(self, iter_): - return self._model.iter_is_valid(iter_) - class TreeTableModel(TableModel): """https://develop.participatoryculture.org/index.php/WidgetAPITableView""" MODEL_CLASS = gtk.TreeStore @@ -1506,20 +1661,108 @@ assert self._model.iter_is_valid(iter_) return self._model.iter_parent(iter_) -class InfoListModel(infolist.InfoList): - # InfoList is a special model for quick handling of ItemInfo lists - # we we wrap it slightly so that it matches some of the TableModel - # interface +class ItemListModel(TableModelBase): + """Special model to use with ItemLists + """ + + def __init__(self, item_list): + self.item_list = item_list + self.list_changed_handle = self.item_list.connect_before( + "list-changed", self.on_list_changed) + self._model = fixedliststore.FixedListStore(len(item_list)) + + def cleanup(self): + if self.list_changed_handle is not None: + self.item_list.disconnect(self.list_changed_handle) + self.list_changed_handle = None + + def on_list_changed(self, item_list): + # When the list changes, we need to create a new FixedListStore object + # to handle it. ItemListModelHandler then updates the GtkTreeView + # with this new model. + self._model = fixedliststore.FixedListStore(len(item_list)) + + def get_item(self, it): + return self.item_list.get_row(self._model.row_of_iter(it)) + + def iter_for_id(self, item_id): + """Get an iter that points to an item in this list.""" + row = self.item_list.get_index(item_id) + return self._model.iter_nth_child(None, row) + + def __getitem__(self, it): + """Get a row of data + + For ItemListModel, this is the tuple (info, attrs, group_info) + """ + index = self._model.row_of_iter(it) + item = self.item_list.get_row(index) + attrs = self.item_list.get_attrs(item.id) + group_info = self.item_list.get_group_info(index) + return (item, attrs, group_info) + def check_new_column(self, column): - if not (isinstance(column.renderer, InfoListRenderer) or - isinstance(column.renderer, InfoListRendererText)): - raise TypeError("InfoListModel only supports InfoListRenderer " - "or InfoListRendererText") + if not (isinstance(column.renderer, ItemListRenderer) or + isinstance(column.renderer, ItemListRendererText)): + raise TypeError("ItemListModel only supports ItemListRenderer " + "or ItemListRendererText") def get_rows(self, row_paths): - return [self.nth_row(path[0]) for path in row_paths] + return [(self.item_list.get_row(path[0]),) for path in row_paths] def iter_is_valid(self, iter_): - # it may be useful to do something here, but invalid iters are more of a - # problem for GTK's models. + # there's no way to check this for FixedListStore. Let's just assume + # that iters are valid, since the model never changes. return True + + def __len__(self): + return self._model.get_property("row_count") + +class ModelHandler(object): + """Used by TableModel to handle its TableModel + + This class defines the default behavior. Subclasses extend it handle + specific models. make_model_handler() is a factory method to create the + correct ModelHandler for a given TableModel. + """ + def __init__(self, model, gtk_treeview): + self.model = model + self.gtk_treeview = gtk_treeview + self._set_gtk_model() + + def _set_gtk_model(self): + gtk_model = self.model._model + self.gtk_treeview.set_model(gtk_model) + wrappermap.add(gtk_model, self.model) + + def reset_gtk_model(self): + self.gtk_treeview.save_scroll_position_before_model_change() + self._set_gtk_model() + + # Note: by default, we don't need to do anything special for + # model_changed(). + def model_changed(self): + return + +class ItemListModelHandler(ModelHandler): + """ + """ + def __init__(self, model, gtk_treeview): + ModelHandler.__init__(self, model, gtk_treeview) + item_list = self.model.item_list + + def model_changed(self): + if self.model._model != self.gtk_treeview.get_model(): + # Items have been added or removed and ItemListModel has created a + # FixedListStore for the new list. Update our widget. + self.reset_gtk_model() + else: + # Some of the items have changed, but the list is the same. Ask + # the treeview to redraw itself. + self.gtk_treeview.queue_draw() + +def make_model_handler(model, gtk_treeview): + if isinstance(model, ItemListModel): + return ItemListModelHandler(model, gtk_treeview) + else: + return ModelHandler(model, gtk_treeview) diff -Nru miro-4.0.4/lib/frontends/widgets/gtk/textlayout.py miro-6.0/lib/frontends/widgets/gtk/textlayout.py --- miro-4.0.4/lib/frontends/widgets/gtk/textlayout.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/gtk/textlayout.py 2013-04-05 16:02:42.000000000 +0000 @@ -85,7 +85,9 @@ def line_height(self): metrics = self.get_font_metrics() - return pango.PIXELS(metrics.get_ascent() + metrics.get_descent()) + # add 1px to the ascent/descent to include space for the baseline + # (see #17539) + return pango.PIXELS(metrics.get_ascent() + metrics.get_descent()) + 1 def set_width(self, width): if width is not None: diff -Nru miro-4.0.4/lib/frontends/widgets/gtk/trayicon.py miro-6.0/lib/frontends/widgets/gtk/trayicon.py --- miro-4.0.4/lib/frontends/widgets/gtk/trayicon.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/gtk/trayicon.py 2013-04-05 16:02:42.000000000 +0000 @@ -91,7 +91,9 @@ menu_items.append((None, None)) break - if app.config.get(prefs.SINGLE_VIDEO_PLAYBACK_MODE): + if ((app.playback_manager.is_playing and + app.playback_manager.item_continuous_playback_mode( + app.playback_manager.playlist.currently_playing))): menu_items.append((_("Play Next Unplayed (%(unplayed)d)", {"unplayed": app.widgetapp.unwatched_count}), self.on_play_unwatched)) diff -Nru miro-4.0.4/lib/frontends/widgets/gtk/video.py miro-6.0/lib/frontends/widgets/gtk/video.py --- miro-4.0.4/lib/frontends/widgets/gtk/video.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/gtk/video.py 2013-04-05 16:02:42.000000000 +0000 @@ -35,26 +35,22 @@ import gtk from miro import app -from miro import player from miro.gtcache import gettext as _ from miro import messages +from miro.plat.frontends.widgets import hidemouse from miro.plat import resources from miro.plat import screensaver +from miro.frontends.widgets.gtk import player from miro.frontends.widgets.gtk.window import Window, WrappedWindow from miro.frontends.widgets.gtk.widgetset import ( Widget, VBox, Label, HBox, Alignment, Background, DrawingArea, ClickableImageButton) -from miro.frontends.widgets.gtk.persistentwindow import PersistentWindow +from miro.plat.frontends.widgets import videoembed BLACK = (0.0, 0.0, 0.0) WHITE = (1.0, 1.0, 1.0) GREEN = (159.0 / 255.0, 202.0 / 255.0, 120.0 / 255.0) -# Global VideoWidget object. We re-use so we can re-use our -# PersistentWindow -video_widget = None - - class ClickableLabel(Widget): """This is like a label and reimplements many of the Label things, but it's an EventBox with a Label child widget. @@ -118,11 +114,6 @@ pass -def make_hidden_cursor(): - pixmap = gtk.gdk.Pixmap(None, 1, 1, 1) - color = gtk.gdk.Color() - return gtk.gdk.Cursor(pixmap, pixmap, color, color, 0, 0) - def make_label(text, handler, visible=True): if visible: @@ -213,20 +204,6 @@ window.move(screen_rect.x, screen_rect.y + screen_rect.height - my_height) - -class VideoWidget(Widget): - def __init__(self, renderer): - Widget.__init__(self) - self.set_widget(PersistentWindow()) - self._widget.set_double_buffered(False) - self._widget.add_events(gtk.gdk.POINTER_MOTION_MASK) - self._widget.add_events(gtk.gdk.BUTTON_PRESS_MASK) - renderer.set_widget(self._widget) - - def destroy(self): - self._widget.destroy() - - class Divider(DrawingArea): def size_request(self, layout): return (1, 25) @@ -238,22 +215,27 @@ context.rel_line_to(0, context.height) context.stroke() - class VideoDetailsWidget(Background): def __init__(self): Background.__init__(self) self.item_info = None self.rebuild_video_details() - self._delete_link = None - self._delete_image = None + self._delete_link = self._delete_image = None + self._keep_link = self._keep_image = None self._will_play_handle = app.playback_manager.connect( 'will-play', self.on_will_play) + self._info_changed_handle = app.playback_manager.connect( + 'playing-info-changed', self.on_info_changed) def on_will_play(self, widget, duration): # we need to update the video details now that the file is # open and we know more about subtitle track info. self.rebuild_video_details() + def on_info_changed(self, widget, item_info): + self.item_info = item_info + self.rebuild_video_details() + def rebuild_video_details(self): # this removes the child widget if there is one self.remove() @@ -312,6 +294,12 @@ right_side_hbox.pack_start(_align_middle(self._delete_image)) self._delete_link = make_label(_("Delete"), self.handle_delete) right_side_hbox.pack_start(_align_middle(self._delete_link)) + if self.item_info.can_be_saved: # keepable + self._keep_image = make_image_button( + 'images/keep-button.png', self.handle_keep) + right_side_hbox.pack_start(_align_middle(self._keep_image)) + self._keep_link = make_label(_("Keep"), self.handle_keep) + right_side_hbox.pack_start(_align_middle(self._keep_link)) outer_hbox = HBox() outer_hbox.pack_start(_align_left(left_side_hbox, left_pad=10), @@ -372,6 +360,8 @@ def handle_keep(self, widget): messages.KeepVideo(self.item_info.id).send_to_backend() self._widget.window.set_cursor(None) + self.reset() + self.rebuild_video_details() def handle_delete(self, widget): item_info = self.item_info @@ -419,8 +409,7 @@ menu.append(sep) child = gtk.MenuItem(_("Select a Subtitles file...")) - sensitive = 'PlayingLocalVideo' in app.menu_manager.enabled_groups - child.set_sensitive(sensitive) + child.set_sensitive(app.playback_manager.is_playing_video) child.connect('activate', self.handle_select_subtitle_file) child.show() menu.append(child) @@ -430,20 +419,20 @@ def handle_disable_subtitles(self, widget): if widget.active: app.video_renderer.disable_subtitles() - app.widgetapp.window.on_playback_change(app.playback_manager) + app.menu_manager.update_menus('playback-changed') self.rebuild_video_details() def handle_subtitle_change(self, widget, index): if widget.active: - app.video_renderer.enable_subtitle_track(index) - app.widgetapp.window.on_playback_change(app.playback_manager) + app.video_renderer.set_subtitle_track(index) + app.menu_manager.update_menus('playback-changed') self.rebuild_video_details() def handle_select_subtitle_file(self, widget): app.playback_manager.open_subtitle_file() def handle_commentslink(self, widget, event): - app.widgetapp.open_url(self.item_info.commentslink) + app.widgetapp.open_url(self.item_info.comments_link) def handle_share(self, widget, event): app.widgetapp.share_item(self.item_info) @@ -480,8 +469,12 @@ self._delete_link.on_leave_notify(None, None) if self._delete_image: self._delete_image.on_leave_notify(None, None) + if self._keep_link: + self._keep_link.on_leave_notify(None, None) + if self._keep_image: + self._keep_image.on_leave_notify(None, None) -class VideoPlayer(player.Player, VBox): +class VideoPlayer(player.GTKPlayer, VBox): """Video renderer widget. Note: ``app.video_renderer`` must be initialized before @@ -491,49 +484,33 @@ HIDE_CONTROLS_TIMEOUT = 2000 def __init__(self): - global video_widget - player.Player.__init__(self) + player.GTKPlayer.__init__(self, app.video_renderer) VBox.__init__(self) - if app.video_renderer is not None: - self.renderer = app.video_renderer - else: - self.renderer = NullRenderer() self.overlay = None self.screensaver_manager = None - if video_widget is None: - video_widget = VideoWidget(self.renderer) - self._video_widget = video_widget + self._video_widget = videoembed.VideoWidget(self.renderer) self.pack_start(self._video_widget, expand=True) self._video_details = VideoDetailsWidget() self.pack_start(self._video_details) self.hide_controls_timeout = None - self.motion_handler = None - self.videobox_motion_handler = None - self.hidden_cursor = make_hidden_cursor() - # piggyback on the TrackItemsManually message that playback.py sends. - app.info_updater.item_changed_callbacks.add('manual', 'playback-list', - self._on_items_changed) - self._item_id = None - self._video_widget.wrapped_widget_connect( - 'button-press-event', self.on_button_press) + self._video_widget.connect('double-click', self.on_double_click) + self._video_widget.connect('mouse-motion', self.on_mouse_motion) def teardown(self): - self.renderer.reset() - app.info_updater.item_changed_callbacks.remove('manual', - 'playback-list', self._on_items_changed) - self._items_changed_callback = None + # remove the our embedding widget from the hierarchy self.remove(self._video_widget) - - def _on_items_changed(self, message): - for item_info in message.changed: - if item_info.id == self._item_id: - self._video_details.update_info(item_info) - break + # now that we aren't showing a video widget, we can reset playback + self.renderer.reset() + self._video_widget.destroy() + # remove callbacks + self._video_widget.disconnect_all() + # dereference VideoWidget + self._video_widget = None def update_for_presentation_mode(self, mode): pass @@ -541,7 +518,6 @@ def set_item(self, item_info, success_callback, error_callback): self._video_details.set_video_details(item_info) self.renderer.select_file(item_info, success_callback, error_callback) - self._item_id = item_info.id def get_elapsed_playback_time(self): return self.renderer.get_current_time() @@ -555,12 +531,12 @@ def play(self): self.renderer.play() # do this to trigger the overlay showing up for a smidge - self.on_mouse_motion(None, None) + self.on_mouse_motion(None) def play_from_time(self, resume_time=0): - # Note: This overrides player.Player's version of play_from_time, but - # this one seeks directly rather than fiddling with - # total_playback_time. + # FIXME: this overrides the default implementation. The reason + # is going through the default implementation it requires the total + # time and it may not be ready at this point. self.seek_to_time(resume_time) self.play() @@ -590,17 +566,13 @@ self.screensaver_manager.disable() self.rebuild_video_details() self._make_overlay() - self.motion_handler = self.wrapped_widget_connect( - 'motion-notify-event', self.on_mouse_motion) - self.videobox_motion_handler = self.overlay._window.connect( - 'motion-notify-event', self.on_mouse_motion) + self.overlay._window.connect('motion-notify-event', + self.on_motion_notify) if not app.playback_manager.detached_window: - app.widgetapp.window.menubar.hide() + app.widgetapp.window.menubar._widget.hide() self.schedule_hide_controls(self.HIDE_CONTROLS_TIMEOUT) - # make sure all hide() calls go through, otherwise we get the wrong - # size on windows (#10810) - while gtk.events_pending(): - gtk.main_iteration() + # Need to call set_decorated() before fullscreen. See #10810. + _window().set_decorated(False) _window().fullscreen() def _make_overlay(self): @@ -633,21 +605,25 @@ def prepare_switch_to_detached_playback(self): gobject.timeout_add(0, self.rebuild_video_details) - def on_button_press(self, widget, event): - if event.type == gtk.gdk._2BUTTON_PRESS: - app.playback_manager.toggle_fullscreen() - return True - return False + def on_double_click(self, widget): + app.playback_manager.toggle_fullscreen() + + def on_motion_notify(self, widget, event): + self.on_mouse_motion(widget) - def on_mouse_motion(self, widget, event): + def on_mouse_motion(self, widget): if not self.overlay: return if not self.overlay.is_visible(): show_it_all = False - if event is None: - show_it_all = True - else: + # NOTE: this code wasn't working when I went through the + # windows overhaul, so I just left it commented out. Will says we + # should eventually implement this. + # + #if event is None: + #show_it_all = True + #else: # figures out the monitor that miro is fullscreened on and # gets the monitor geometry for that. # if app.playback_manager.detached_window is not None: @@ -664,7 +640,8 @@ # Hack to fix #17213. Eventually we should remove this and # uncomment the code above to implement #8655 - show_it_all = True + #show_it_all = True + show_it_all = True if show_it_all: self.show_controls() @@ -674,15 +651,18 @@ else: self.last_motion_time = time.time() + def hide_mouse(self): + hidemouse.hide(_window().window) + def show_mouse(self): - _window().window.set_cursor(None) + hidemouse.unhide(_window().window) def show_controls(self): self.show_mouse() self.overlay.show() def hide_controls(self): - _window().window.set_cursor(self.hidden_cursor) + self.hide_mouse() if self.overlay and self.overlay.is_visible(): self.overlay.close() @@ -711,14 +691,15 @@ if self.screensaver_manager is not None: self.screensaver_manager.enable() self.screensaver_manager = None - app.widgetapp.window.menubar.show() + app.widgetapp.window.menubar._widget.show() self.rebuild_video_details() self._video_details.show() self._destroy_overlay() _window().unfullscreen() - self._widget.disconnect(self.motion_handler) + # Undo above call to set_decorated() + _window().set_decorated(True) self.cancel_hide_controls() - _window().window.set_cursor(None) + self.show_mouse() def select_subtitle_file(self, sub_path, handle_successful_select): app.video_renderer.select_subtitle_file( @@ -728,3 +709,15 @@ def select_subtitle_encoding(self, encoding): app.video_renderer.select_subtitle_encoding(encoding) + + def get_subtitle_tracks(self): + return self.renderer.get_subtitle_tracks() + + def get_enabled_subtitle_track(self): + return self.renderer.get_enabled_audio_track() + + def set_subtitle_track(self, track_index): + if track_index is not None: + self.renderer.set_subtitle_track(track_index) + else: + self.renderer.disable_subtitles() diff -Nru miro-4.0.4/lib/frontends/widgets/gtk/webkitgtkhacks.pyx miro-6.0/lib/frontends/widgets/gtk/webkitgtkhacks.pyx --- miro-4.0.4/lib/frontends/widgets/gtk/webkitgtkhacks.pyx 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/gtk/webkitgtkhacks.pyx 2013-04-05 16:02:42.000000000 +0000 @@ -45,7 +45,7 @@ ctypedef void* SoupCookieJar ctypedef void* SoupCookie - SoupCookieJarText* soup_cookie_jar_text_new(char* filename, + SoupCookieJar* soup_cookie_jar_text_new(char* filename, gboolean read_only) void soup_cookie_jar_add_cookie(SoupCookieJar *jar, SoupCookie *cookie) SoupCookie* soup_cookie_new(char* name, char* value, char* domain, char* path, int max_age) @@ -65,7 +65,7 @@ session = webkit_get_default_session() if not session: raise AssertionError("webkit_get_default_session() returned NULL") - cookie_jar = soup_cookie_jar_text_new(filename, 0) + cookie_jar = soup_cookie_jar_text_new(filename, 0) if not cookie_jar: raise AssertionError("soup_cookie_jar_text_new() returned NULL") soup_session_add_feature (session, cookie_jar) @@ -78,7 +78,7 @@ session = webkit_get_default_session() if not session: raise AssertionError("webkit_get_default_session() returned NULL") - cookie_jar = soup_session_get_feature(session, soup_cookie_jar_get_type()) + cookie_jar = soup_session_get_feature(session, soup_cookie_jar_get_type()) if not cookie_jar: raise AssertionError("soup_session_get_feature() returned NULL") cookie = soup_cookie_new(name, value, domain, path, age) diff -Nru miro-4.0.4/lib/frontends/widgets/gtk/widgetset.py miro-6.0/lib/frontends/widgets/gtk/widgetset.py --- miro-4.0.4/lib/frontends/widgets/gtk/widgetset.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/gtk/widgetset.py 2013-04-05 16:02:42.000000000 +0000 @@ -52,11 +52,12 @@ FileOpenDialog, FileSaveDialog, DirectorySelectDialog, AboutDialog, \ AlertDialog, DialogWindow from miro.frontends.widgets.gtk.preferenceswindow import PreferencesWindow +from miro.frontends.widgets.gtk.donatewindow import DonateWindow from miro.frontends.widgets.gtk.tableview import (TableView, TableModel, - InfoListModel, TableColumn, TreeTableModel, CUSTOM_HEADER_HEIGHT) + ItemListModel, TableColumn, TreeTableModel, CUSTOM_HEADER_HEIGHT) from miro.frontends.widgets.gtk.tableviewcells import (CellRenderer, ImageCellRenderer, CheckboxCellRenderer, CustomCellRenderer, - InfoListRenderer, InfoListRendererText) + ItemListRenderer, ItemListRendererText) from miro.frontends.widgets.gtk.simple import (Image, ImageDisplay, AnimatedImageDisplay, Label, Scroller, Expander, SolidBackground, ProgressBar, HLine) @@ -64,3 +65,5 @@ from miro.frontends.widgets.gtk.video import VideoPlayer from miro.frontends.widgets.gtk.widgets import Rect from miro.frontends.widgets.gtk.sniffer import get_item_type +from miro.frontends.widgets.gtk.gtkmenus import (MenuItem, RadioMenuItem, + CheckMenuItem, Separator, Menu, MenuBar) diff -Nru miro-4.0.4/lib/frontends/widgets/gtk/widgets.py miro-6.0/lib/frontends/widgets/gtk/widgets.py --- miro-4.0.4/lib/frontends/widgets/gtk/widgets.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/gtk/widgets.py 2013-04-05 16:02:42.000000000 +0000 @@ -65,7 +65,7 @@ return True def close(self): - self._widget.destroy() + self._widget = None def set_content_widget(self, widget): """Set the widget that will be drawn in the content area for this diff -Nru miro-4.0.4/lib/frontends/widgets/gtk/window.py miro-6.0/lib/frontends/widgets/gtk/window.py --- miro-4.0.4/lib/frontends/widgets/gtk/window.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/gtk/window.py 2013-04-05 16:02:42.000000000 +0000 @@ -41,51 +41,17 @@ from miro import dialogs from miro.fileobject import FilenameType from miro.gtcache import gettext as _ -from miro.frontends.widgets.gtk import wrappermap, widgets -from miro.frontends.widgets.gtk import keymap, layout -from miro.frontends.widgets import menus +from miro.frontends.widgets.gtk import gtkmenus +from miro.frontends.widgets.gtk import keymap +from miro.frontends.widgets.gtk import layout +from miro.frontends.widgets.gtk import widgets +from miro.frontends.widgets.gtk import wrappermap from miro.plat import resources # keeps the objects alive until destroy() is called alive_windows = set() running_dialogs = set() -def __get_fullscreen_stock_id(): - try: - return gtk.STOCK_FULLSCREEN - except StandardError: - pass - -STOCK_IDS = { - "SaveItem": gtk.STOCK_SAVE, - "CopyItemURL": gtk.STOCK_COPY, - "RemoveItems": gtk.STOCK_REMOVE, - "Fullscreen": __get_fullscreen_stock_id(), - "StopItem": gtk.STOCK_MEDIA_STOP, - "NextItem": gtk.STOCK_MEDIA_NEXT, - "PreviousItem": gtk.STOCK_MEDIA_PREVIOUS, - "PlayPauseItem": gtk.STOCK_MEDIA_PLAY, - "Open": gtk.STOCK_OPEN, - "EditPreferences": gtk.STOCK_PREFERENCES, - "Quit": gtk.STOCK_QUIT, - "Help": gtk.STOCK_HELP, - "About": gtk.STOCK_ABOUT, - "Translate": gtk.STOCK_EDIT -} - -for i in range(1, 13): - name = 'F%d' % i - keymap.menubar_key_map[getattr(menus, name)] = name - -def get_accel_string(shortcut): - mod_str = ''.join( - keymap.menubar_mod_map[mod] for mod in shortcut.modifiers) - key_str = keymap.menubar_key_map.get(shortcut.shortcut, shortcut.shortcut) - return mod_str + key_str - -def get_stock_id(n): - return STOCK_IDS.get(n, None) - class WrappedWindow(gtk.Window): def do_map(self): gtk.Window.do_map(self) @@ -150,18 +116,6 @@ gobject.type_register(WrappedWindow) -class WrappedMainWindow(WrappedWindow): - def do_key_press_event(self, event): - if (gtk.gdk.keyval_name(event.keyval) == 'Return' and - event.state & gtk.gdk.MOD1_MASK and - app.playback_manager.is_playing): - # Hack for having 2 shortcuts for fullscreen - app.playback_manager.enter_fullscreen() - return - return WrappedWindow.do_key_press_event(self, event) - -gobject.type_register(WrappedMainWindow) - class WindowBase(signals.SignalEmitter): def __init__(self): signals.SignalEmitter.__init__(self) @@ -169,14 +123,6 @@ self.create_signal('key-press') self.create_signal('show') self.create_signal('hide') - # FIXME - this is a weird place to have the menu code because - # it causes all WindowBase subclasses to have all this extra - # menu stuff. - self.menu_structure = None - self.menu_action_groups = None - self._merge_id = None - self._subtitle_tracks_cached = None - self._setup_ui_manager() def set_window(self, window): self._window = window @@ -191,203 +137,17 @@ self.emit('use-custom-style-changed') def calc_use_custom_style(self): - base = self._window.style.base[gtk.STATE_NORMAL] - # Decide if we should use a custom style. Right now the - # formula is the base color is a very light shade of - # gray/white (lighter than #f0f0f0). - self.use_custom_style = ((base.red == base.green == base.blue) and - base.red >= 61680) + if self._window is not None: + base = self._window.style.base[gtk.STATE_NORMAL] + # Decide if we should use a custom style. Right now the + # formula is the base color is a very light shade of + # gray/white (lighter than #f0f0f0). + self.use_custom_style = ((base.red == base.green == base.blue) and + base.red >= 61680) def connect_menu_keyboard_shortcuts(self): - self._window.add_accel_group(self.ui_manager.get_accel_group()) - - def _add_menu(self, menu, outstream, parent=None): - outstream.write('' % menu.action) - for mem in menu.menuitems: - if isinstance(mem, menus.Menu): - self._add_menu(mem, outstream, menu) - elif isinstance(mem, menus.Separator): - self._add_separator(mem, outstream) - elif isinstance(mem, menus.MenuItem): - self._add_menuitem(mem, outstream) - outstream.write('') - - def _add_menuitem(self, menu, outstream): - if menu.action not in ("NoneAvailable", "SubtitlesSelect"): - outstream.write('' % menu.action) - - def _add_separator(self, menu, outstream): - outstream.write("") - - def _setup_ui_manager(self): - self.menu_structure = menus.get_menu() - - # make modifications to the menu structure here - - # on linux, we don't have a CheckVersion option because - # we update with the package system. - this_platform = app.config.get(prefs.APP_PLATFORM) - if this_platform == 'linux': - file_menu = self.menu_structure.get("FileMenu") - file_menu.remove("CheckVersion") - - # If the renderer supports it, create a the subtitle encodings menu - try: - app.video_renderer.setup_subtitle_encoding_menu( - self.menu_structure) - except AttributeError: - pass - - # generate action groups after making all modifications - mag = menus.generate_action_groups(self.menu_structure) - self.menu_action_groups = mag - - self.ui_manager = gtk.UIManager() - self.make_actions() - - outstream = StringIO.StringIO() - outstream.write('') - for mem in self.menu_structure.menuitems: - self._add_menu(mem, outstream) - outstream.write('') - - for mem in self.menu_structure: - if ((not isinstance(mem, menus.MenuItem) or - len(mem.shortcuts) <= 1)): - continue - for shortcut in mem.shortcuts[1:]: - outstream.write('' % \ - (mem.action, id(shortcut))) - outstream.write('') - self.ui_manager.add_ui_from_string(outstream.getvalue()) - - def make_action(self, action, label, groups, shortcuts=None): - gtk_action = gtk.Action(action, label, None, get_stock_id(action)) - self.setup_action(gtk_action, groups, shortcuts) - - def make_radio_action(self, action, radio_group, label, groups, - shortcuts): - gtk_action = gtk.RadioAction(action, label, None, - get_stock_id(action), 0) - self.setup_action(gtk_action, groups, shortcuts) - try: - root_action = self.radio_group_actions[radio_group] - except KeyError: - # gtk_action is the first action for the group. - self.radio_group_actions[radio_group] = gtk_action - else: - # There was already a gtk_action for this group - gtk_action.set_group(root_action) - - def setup_action(self, gtk_action, groups, shortcuts): - action_name = gtk_action.get_name() - self.actions[action_name] = gtk_action - callback = menus.lookup_handler(action_name) - if callback is not None: - gtk_action.connect("activate", self.on_activate, callback) - action_group_name = groups[0] - action_group = self.action_groups[action_group_name] - if shortcuts is None or len(shortcuts) == 0: - action_group.add_action(gtk_action) - else: - action_group.add_action_with_accel(gtk_action, - get_accel_string(shortcuts[0])) - for shortcut in shortcuts[1:]: - shortcut_name = gtk_action.get_name() + str(id(shortcut)) - extra_action = gtk.Action(shortcut_name, None, None, None) - extra_action.set_visible(False) - if callback is not None: - extra_action.connect('activate', self.on_activate, - callback) - action_group.add_action_with_accel(extra_action, - get_accel_string(shortcut)) - - def _raw_check_action(self, action, label, groups, callback, index, - group=None): - gtk_action = gtk.RadioAction(action, label, None, None, index) - if group is not None: - gtk_action.set_group(group) - gtk_action.connect("activate", callback, index) - self.action_groups[groups[0]].add_action(gtk_action) - - def make_check_action(self, action, check_group, label, groups, shortcuts): - gtk_action = gtk.ToggleAction(action, label, None, None) - self.actions[action] = gtk_action - callback = menus.lookup_handler(gtk_action.get_name()) - if callback is not None: - gtk_action.connect("toggled", self.on_activate, callback) - if check_group not in self.check_groups: - self.check_groups[check_group] = list() - self.check_groups[check_group].append(gtk_action) - self.action_groups[groups[0]].add_action(gtk_action) - - def make_actions(self): - self.action_groups = {} - self.actions = {} - self.radio_group_actions = {} - self.check_groups = {} - - for name in self.menu_action_groups.keys(): - self.action_groups[name] = gtk.ActionGroup(name) - - self.action_groups["Subtitles"] = gtk.ActionGroup("Subtitles") - - for mem in self.menu_structure: - if isinstance(mem, menus.Separator): - continue - if isinstance(mem, menus.Menu): - self.make_action(mem.action, mem.label, mem.groups) - elif isinstance(mem, menus.RadioMenuItem): - self.make_radio_action(mem.action, mem.radio_group, mem.label, - mem.groups, mem.shortcuts) - elif isinstance(mem, menus.CheckMenuItem): - self.make_check_action(mem.action, mem.check_group, mem.label, - mem.groups, mem.shortcuts) - elif isinstance(mem, menus.MenuItem): - self.make_action(mem.action, mem.label, mem.groups, - mem.shortcuts) - - - # make a bunch of SubtitleTrack# actions - self._raw_check_action("SubtitlesDisabled", _("Disable Subtitles"), - ["AlwaysOn"], self.on_subtitles_change, -1) - radio_group = self.action_groups["AlwaysOn"].get_action( - "SubtitlesDisabled") - for i in range(199): - self._raw_check_action("SubtitleTrack%d" % i, "", ["AlwaysOn"], - self.on_subtitles_change, i, radio_group) - - for action_group in self.action_groups.values(): - self.ui_manager.insert_action_group(action_group, -1) - - def on_subtitles_select(self, action, track_index): - action_group = self.action_groups["AlwaysOn"] - action_group.get_action("SubtitlesDisabled").current_value = -2 - app.playback_manager.open_subtitle_file() - - def on_subtitles_change(self, action, track_index): - if hasattr(self, "_ignore_on_subtitles_change"): - return - if action.get_property("current-value") != action.get_property( - "value"): - return - action_group = self.action_groups["AlwaysOn"] - action_group.get_action( - "SubtitlesDisabled").current_value = track_index - if track_index == -1: - app.video_renderer.disable_subtitles() - else: - app.video_renderer.enable_subtitle_track(track_index) - - def select_subtitle_radio(self, track_index): - self._ignore_on_subtitles_change = True - action_group = self.action_groups["AlwaysOn"] - action = action_group.get_action("SubtitlesDisabled") - action.set_property('current-value', track_index) - delattr(self, "_ignore_on_subtitles_change") - - def on_activate(self, action, callback): - callback() + """Connect the shortcuts for the app menu to this window.""" + self._window.add_accel_group(app.widgetapp.menubar.get_accel_group()) class Window(WindowBase): """The main Miro window. """ @@ -440,13 +200,16 @@ if hasattr(self, "_closing"): return self._closing = True + # Keep a reference to the widget in case will-close signal handler + # calls destroy() + old_window = self._window self.emit('will-close') - self._window.hide() + old_window.hide() del self._closing def destroy(self): self.close() - self._window.destroy() + self._window = None alive_windows.discard(self) def is_active(self): @@ -540,36 +303,27 @@ self.vbox = gtk.VBox() self._window.add(self.vbox) self.vbox.show() - self._add_menubar() - self.connect_menu_keyboard_shortcuts() + self._add_app_menubar() self.create_signal('save-dimensions') self.create_signal('save-maximized') self.create_signal('on-shown') - app.menu_manager.connect('enabled-changed', self.on_menu_change) - app.menu_manager.connect('radio-group-changed', self.on_radio_change) - app.menu_manager.connect('checked-changed', self.on_checked_change) - app.playback_manager.connect('did-start-playing', - self.on_playback_change) - app.playback_manager.connect('will-play', self.on_playback_change) - app.playback_manager.connect('did-stop', self.on_playback_change) - self._window.connect('key-release-event', self.on_key_release) self._window.connect('window-state-event', self.on_window_state_event) self._window.connect('configure-event', self.on_configure_event) self._window.connect('map-event', lambda w, a: self.emit('on-shown')) - self._clear_subtitles_menu() def _make_gtk_window(self): - return WrappedMainWindow() + return WrappedWindow() def on_delete_window(self, widget, event): app.widgetapp.on_close() return True def on_configure_event(self, widget, event): - (x, y) = self._window.get_position() - (width, height) = self._window.get_size() - self.emit('save-dimensions', x, y, width, height) + if self._window is not None: + (x, y) = self._window.get_position() + (width, height) = self._window.get_size() + self.emit('save-dimensions', x, y, width, height) def on_window_state_event(self, widget, event): maximized = bool( @@ -582,127 +336,10 @@ 'Up', 'Down'): return True - def _add_menubar(self): - self.menubar = self.ui_manager.get_widget("/MiroMenu") - self.vbox.pack_start(self.menubar, expand=False) - self.menubar.show_all() - - def on_menu_change(self, menu_manager): - for name, action_group in self.action_groups.items(): - if name in menu_manager.enabled_groups: - action_group.set_visible(True) - action_group.set_sensitive(True) - else: - # TODO: don't hard-code this here; probably put a "hide when - # not checked" property on check menu class --Kaz - if name.startswith('column-'): - action_group.set_visible(False) - else: - action_group.set_sensitive(False) - - def get_state_label(action, state): - menu = self.menu_structure.get(action) - return menu.state_labels.get(state, menu.label) - - action_labels = {} - for state, actions in menu_manager.states.iteritems(): - for action in actions: - action_labels[action] = get_state_label(action, state) - - for name, action in self.actions.iteritems(): - default = self.menu_structure.get(name).label - new_label = action_labels.get(name, default) - action.set_property('label', new_label) - - play_pause = self.menu_structure.get('PlayPauseItem').state_labels[ - menu_manager.play_pause_state] - self.actions['PlayPauseItem'].set_property('label', play_pause) - - def on_radio_change(self, menu_manager, radio_group, value): - root_action = self.radio_group_actions[radio_group] - for action in root_action.get_group(): - if action.get_name() == value: - action.set_active(True) - return - - def on_checked_change(self, menu_manager, check_group, values): - group = self.check_groups[check_group] - for action in group: - name = action.get_name() - if name in values: - checked = values[name] - action.handler_block_by_func(self.on_activate) - action.set_active(checked) - action.handler_unblock_by_func(self.on_activate) - - def on_playback_change(self, playback_manager, *extra_args): - self._ignore_on_subtitles_change = True - if app.playback_manager.is_playing_audio: - self._clear_subtitles_menu() - else: - tracks = app.video_renderer.get_subtitle_tracks() -# if tracks is None or len(tracks) == 0: - if len(tracks) == 0: - self._clear_subtitles_menu() - else: - self._populate_subtitles_menu(tracks) - delattr(self, "_ignore_on_subtitles_change") - - def _populate_subtitles_menu(self, tracks): - enabled_track = app.video_renderer.get_enabled_subtitle_track() - - if self._subtitle_tracks_cached == (tuple(tracks), enabled_track): - return - - self._subtitle_tracks_cached = (tuple(tracks), enabled_track) - - if self._merge_id is not None: - self.ui_manager.remove_ui(self._merge_id) - - outstream = StringIO.StringIO() - outstream.write(''' - - - -''') - for i, lang in tracks: - outstream.write( - ' \n' % i) - outstream.write(''' - - - - - -''') - - self._merge_id = self.ui_manager.add_ui_from_string( - outstream.getvalue()) - - action_group = self.action_groups["AlwaysOn"] - for i, lang in tracks: - action_group.get_action("SubtitleTrack%d" % i).set_property( - "label", lang) - - action_group.get_action("SubtitlesDisabled").set_property( - "current-value", enabled_track) - - def _clear_subtitles_menu(self): - if self._merge_id is not None: - self.ui_manager.remove_ui(self._merge_id) - self._subtitle_tracks_cached = None - - s = ''' - - - - - - - - -''' - self._merge_id = self.ui_manager.add_ui_from_string(s) + def _add_app_menubar(self): + self.menubar = app.widgetapp.menubar + self.vbox.pack_start(self.menubar._widget, expand=False) + self.connect_menu_keyboard_shortcuts() def _add_content_widget(self, widget): self.vbox.pack_start(widget._widget, expand=True) @@ -727,13 +364,17 @@ return self._run() finally: running_dialogs.remove(self) + self._window = None def _run(self): """Run the dialog. Must be implemented by subclasses.""" raise NotImplementedError() def destroy(self): - self._window.destroy() + if self._window is not None: + self._window.response(gtk.RESPONSE_NONE) + # don't set self._window to None yet. We will unset it when we + # return from the _run() method class Dialog(DialogBase): def __init__(self, title, description=None): @@ -751,7 +392,7 @@ def build_content(self): packing_vbox = layout.VBox(spacing=20) packing_vbox._widget.set_border_width(6) - if self.description: + if self.description is not None: label = gtk.Label(self.description) label.set_line_wrap(True) label.set_size_request(390, -1) @@ -762,7 +403,10 @@ return packing_vbox def add_button(self, text): - self.buttons_to_add.append(_stock.get(text, text)) + if text in _stock: + # store both the text and the stock ID + text = _stock[text], text + self.buttons_to_add.append(text) def pack_buttons(self): # There's a couple tricky things here: @@ -773,7 +417,12 @@ # response_ids for the user. response_id = len(self.buttons_to_add) for text in reversed(self.buttons_to_add): - self._window.add_button(text, response_id) + label = None + if isinstance(text, tuple): # stock ID, text + text, label = text + button = self._window.add_button(text, response_id) + if label is not None: + button.set_label(label) response_id -= 1 self.buttons_to_add = [] self._window.set_default_response(1) @@ -790,9 +439,6 @@ else: return response - 1 # response IDs started at 1 - def destroy(self): - DialogBase.destroy(self) - def set_extra_widget(self, widget): self.extra_widget = widget @@ -802,6 +448,7 @@ class FileDialogBase(DialogBase): def _run(self): ret = self._window.run() + self._window.hide() if ret == gtk.RESPONSE_OK: self._files = self._window.get_filenames() return 0 diff -Nru miro-4.0.4/lib/frontends/widgets/guidecontroller.py miro-6.0/lib/frontends/widgets/guidecontroller.py --- miro-4.0.4/lib/frontends/widgets/guidecontroller.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/guidecontroller.py 2013-04-05 16:02:42.000000000 +0000 @@ -29,6 +29,7 @@ """Controller for the Guide tab. It's a browser with an informational sidebar. """ +import logging import operator from miro.gtcache import gettext as _ @@ -77,7 +78,7 @@ WIDTH = 138 ITEM_LIMIT = 6 - def __init__(self, title, sort_key): + def __init__(self, title): widgetset.VBox.__init__(self) self.current_limit = self.ITEM_LIMIT hbox = widgetset.HBox() @@ -91,19 +92,12 @@ # 17/2 is close to 8 self.pack_start(self.item_box) - self.items = {} - self.currently_packed = [] - self.sorter = operator.attrgetter(sort_key) + self.item_list = [] def set_limit(self, limit): - if limit > self.ITEM_LIMIT: - limit = self.ITEM_LIMIT - if self.current_limit < limit: + limit = min(limit, self.ITEM_LIMIT) + if limit != self.current_limit: self.current_limit = limit - self.resort() - else: - self.current_limit = limit - self.currently_packed = self.currently_packed[:limit] self.repack() def get_label_for(self, text): @@ -121,7 +115,7 @@ def get_hbox_for(self, info): hbox = widgetset.HBox() - hbox.pack_start(self.get_label_for(info.name)) + hbox.pack_start(self.get_label_for(info.title)) button = imagebutton.ImageButton('guide-sidebar-play') button.connect('clicked', self.on_play_clicked, info) hbox.pack_end(button) @@ -130,56 +124,23 @@ def repack(self): for child in list(self.item_box.children): self.item_box.remove(child) - for info in self.currently_packed: + for info in self.item_list: self.item_box.pack_start(self.get_hbox_for(info)) - def resort(self): - # XXX how do we get data where self.sorter(item) is None!? #17431 is - # for tracking this issue - self.currently_packed = list(sorted( - (item for item in self.items.values() if self.sorter(item)), - key=self.sorter, - reverse=True))[:self.current_limit] - self.repack() - def set_items(self, items): - self.items = {} - for info in items: - self.items[info.id] = info - self.resort() - - def add(self, info): - self.items[info.id] = info - if len(self.currently_packed) < self.current_limit: - self.currently_packed.append(info) - self.currently_packed.sort(key=self.sorter, reverse=True) - self.repack() - else: - self.resort() - - def change(self, info): - self.items[info.id] = info - self.resort() - - def remove(self, id_): - info = self.items.pop(id_) - if info in self.currently_packed: - self.resort() + self.item_list = items + self.repack() def on_play_clicked(self, button, info): - messages.PlayMovie([info]).send_to_frontend() - + messages.PlayMovies([info]).send_to_frontend() class GuideSidebarDetails(widgetset.SolidBackground): def __init__(self): widgetset.SolidBackground.__init__(self) self.set_background_color(widgetutil.css_to_color('#e7e7e7')) - self.video = GuideSidebarCollection(_("Recently Watched"), - 'last_watched') - self.audio = GuideSidebarCollection(_("Recently Listened To"), - 'last_watched') - self.download = GuideSidebarCollection(_("Recent Downloads"), - 'downloaded_time') + self.video = GuideSidebarCollection(_("Recently Watched")) + self.audio = GuideSidebarCollection(_("Recently Listened To")) + self.download = GuideSidebarCollection(_("Recent Downloads")) self.vbox = widgetset.VBox() self.vbox.pack_start(self.video) self.vbox.pack_start(self.audio) @@ -211,46 +172,14 @@ self.download.set_limit(item_count) self.changing_size = False - def collection_for(self, info): - if not info.last_watched: - return self.download - elif info.file_type == 'audio': - return self.audio - else: - return self.video + def set_recently_downloaded(self, item_list): + self.download.set_items(item_list) - def on_item_list(self, items): - collections = {} - self.id_to_collection = {} - for info in items: - collection = self.collection_for(info) - self.id_to_collection[info.id] = collection - collections.setdefault(collection, []) - collections[collection].append(info) - - for collection, items in collections.items(): - collection.set_items(items) - - def on_item_changed(self, added, changed, removed): - for info in added: - collection = self.collection_for(info) - self.id_to_collection[info.id] = collection - collection.add(info) - - for id_ in removed: - collection = self.id_to_collection.pop(id_) - collection.remove(id_) - - for info in changed: - # if the collection that the info was is changed, send an - # add/remove pair, otherwise update the info - collection = self.collection_for(info) - if collection != self.id_to_collection.get(info.id): - self.id_to_collection[info.id].remove(info.id) - collection.add(info) - self.id_to_collection[info.id] = collection - else: - collection.change(info) + def set_recently_watched(self, item_list): + self.video.set_items(item_list) + + def set_recently_listened(self, item_list): + self.audio.set_items(item_list) class GuideSidebar(widgetset.HBox): @@ -284,8 +213,11 @@ self.pack_start(browser, expand=True) self.pack_start(self.sidebar) - def on_item_list(self, items): - self.sidebar.details.on_item_list(items) + def set_recently_downloaded(self, item_list): + self.sidebar.details.set_recently_downloaded(item_list) + + def set_recently_watched(self, item_list): + self.sidebar.details.set_recently_watched(item_list) - def on_item_changed(self, added, changed, removed): - self.sidebar.details.on_item_changed(added, changed, removed) + def set_recently_listened(self, item_list): + self.sidebar.details.set_recently_listened(item_list) diff -Nru miro-4.0.4/lib/frontends/widgets/imagepool.py miro-6.0/lib/frontends/widgets/imagepool.py --- miro-4.0.4/lib/frontends/widgets/imagepool.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/imagepool.py 2013-04-05 16:02:42.000000000 +0000 @@ -86,7 +86,7 @@ return image class ImagePool(util.Cache): - def create_new_value(self, (path, size)): + def create_new_value(self, (path, size), invalidator=None): try: image = widgetset.Image(path) except StandardError: @@ -98,14 +98,14 @@ return image class ImageSurfacePool(util.Cache): - def create_new_value(self, (path, size)): - image = _imagepool.get((path, size)) + def create_new_value(self, (path, size), invalidator=None): + image = _imagepool.get((path, size), invalidator=invalidator) return widgetset.ImageSurface(image) _imagepool = ImagePool(CACHE_SIZE) _image_surface_pool = ImageSurfacePool(CACHE_SIZE) -def get(path, size=None): +def get(path, size=None, invalidator=None): """Returns an Image for path. :param path: the filename for the image @@ -113,10 +113,12 @@ space, then specify this and get will return a scaled image; if size is not specified, then this returns the default sized image + :param invalidator: an optional functions which returns True if + the cache value is no longer valid """ - return _imagepool.get((path, size)) + return _imagepool.get((path, size), invalidator=invalidator) -def get_surface(path, size=None): +def get_surface(path, size=None, invalidator=None): """Returns an ImageSurface for path. :param path: the filename for the image @@ -124,10 +126,12 @@ space, then specify this and get will return a scaled image; if size is not specified, then this returns the default sized image + :param invalidator: an optional functions which returns True if + the cache value is no longer valid """ - return _image_surface_pool.get((path, size)) + return _image_surface_pool.get((path, size), invalidator=invalidator) -def get_image_display(path, size=None): +def get_image_display(path, size=None, invalidator=None): """Returns an ImageDisplay for path. :param path: the filename for the image @@ -135,8 +139,21 @@ space, then specify this and get will return a scaled image; if size is not specified, then this returns the default sized image + :param invalidator: an optional functions which returns True if + the cache value is no longer valid """ - return widgetset.ImageDisplay(_imagepool.get((path, size))) - -# XXX have a way to remove a path from the cache? (re: #16573) + return widgetset.ImageDisplay(_imagepool.get((path, size), + invalidator=invalidator)) +def clear_cache_for_path(path): + """ + Removes a give path from the ImagePool and ImageSurface pools. Removing a + non-existent path is a no-op. + """ + for pool in _imagepool, _image_surface_pool: + invalid = set() + for key in pool.keys(): + if key[0] == path: + invalid.add(key) + for key in invalid: + pool.remove(key) diff -Nru miro-4.0.4/lib/frontends/widgets/infolist/cocoa/infolist-cocoa.m miro-6.0/lib/frontends/widgets/infolist/cocoa/infolist-cocoa.m --- miro-4.0.4/lib/frontends/widgets/infolist/cocoa/infolist-cocoa.m 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/infolist/cocoa/infolist-cocoa.m 1970-01-01 00:00:00.000000000 +0000 @@ -1,132 +0,0 @@ -/* -# Miro - an RSS based video player application -# Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011 -# Participatory Culture Foundation -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA -# -# In addition, as a special exception, the copyright holders give -# permission to link the code of portions of this program with the OpenSSL -# library. -# -# You must obey the GNU General Public License in all respects for all of -# the code used other than OpenSSL. If you modify file(s) with this -# exception, you may extend this exception to your version of the file(s), -# but you are not obligated to do so. If you do not wish to do so, delete -# this exception statement from your version. If you delete this exception -# statement from all source files in the program, then also delete it here. -*/ - -// infolist-cocoa.cpp -- Cocoa code to implement infolist - -#include -#include "infolist-nodelist.h" - -/* All these hooks are basically noops. Cocoa is so much simpler than GTK - * here. - */ - -int -infolistplat_init(void) -{ - return 0; -} - - -int -infolistplat_nodelist_created(InfoListNodeList* nodelist) -{ - return 0; -} - -int -infolistplat_nodelist_will_destroy(InfoListNodeList* nodelist) -{ - return 0; -} - -void -infolistplat_will_add_nodes(InfoListNodeList* nodelist) -{ -} - -int -infolistplat_node_added(InfoListNodeList* nodelist, - InfoListNode* node) -{ - return 0; -} - -void -infolistplat_will_change_nodes(InfoListNodeList* nodelist) -{ -} - -int -infolistplat_node_changed(InfoListNodeList* nodelist, - InfoListNode* node) -{ - return 0; -} - -void -infolistplat_will_remove_nodes(InfoListNodeList* nodelist) -{ -} - -int -infolistplat_node_removed(InfoListNodeList* nodelist, - InfoListNode* node) -{ - return 0; -} - -void -infolistplat_will_reorder_nodes(InfoListNodeList* nodelist) -{ -} - -int -infolistplat_nodes_reordered(InfoListNodeList* nodelist) -{ - return 0; -} - -int -infolistplat_add_to_tableview(InfoListNodeList* nodelist, - PyObject* pyobject) -{ - return -1; // shouldn't be called on OS X -} - -InfoListNode* -infolistplat_node_for_pos(InfoListNodeList* nodelist, - PyObject* pos) -{ - long row; - - row = PyInt_AsLong(pos); - if(row == -1 && PyErr_Occurred()) return NULL; - return infolist_nodelist_nth_node(nodelist, row); -} - -InfoListNode* -infolistplat_iter_for_node(InfoListNodeList* nodelist, - InfoListNode* node) -{ - /* We handle this by subclassing in python-land on OS X.*/ - PyErr_SetNone(&PyExc_NotImplementedError); - return NULL; -} diff -Nru miro-4.0.4/lib/frontends/widgets/infolist/gtk/infolist-gtk.c miro-6.0/lib/frontends/widgets/infolist/gtk/infolist-gtk.c --- miro-4.0.4/lib/frontends/widgets/infolist/gtk/infolist-gtk.c 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/infolist/gtk/infolist-gtk.c 1970-01-01 00:00:00.000000000 +0000 @@ -1,703 +0,0 @@ -/* -# Miro - an RSS based video player application -# Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011 -# Participatory Culture Foundation -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA -# -# In addition, as a special exception, the copyright holders give -# permission to link the code of portions of this program with the OpenSSL -# library. -# -# You must obey the GNU General Public License in all respects for all of -# the code used other than OpenSSL. If you modify file(s) with this -# exception, you may extend this exception to your version of the file(s), -# but you are not obligated to do so. If you do not wish to do so, delete -# this exception statement from your version. If you delete this exception -# statement from all source files in the program, then also delete it here. -*/ - -// infolist-gtk.cpp -- GTK code to implement infolist - -#include -#include -#include -#include "infolist-nodelist.h" -#include "infolist-gtk.h" - -static PyTypeObject *PyGObject_Type=NULL; - -static void -miro_list_store_init (MiroListStore *self); - -static void -miro_list_store_finalize (GObject *self); - -static GtkTreeModelFlags -miro_list_store_get_flags (GtkTreeModel *tree_model); - -static gint -miro_list_store_get_n_columns (GtkTreeModel *tree_model); - -static GType -miro_list_store_get_column_type (GtkTreeModel *tree_model, - gint index); - -static gboolean -miro_list_store_make_iter (GtkTreeModel *tree_model, - GtkTreeIter *iter, - gint index); - -static gboolean -miro_list_store_get_iter (GtkTreeModel *tree_model, - GtkTreeIter *iter, - GtkTreePath *path); - -static GtkTreePath * -miro_list_store_get_path (GtkTreeModel *tree_model, - GtkTreeIter *iter); - -static void -miro_list_store_get_value (GtkTreeModel *tree_model, - GtkTreeIter *iter, - gint column, - GValue *value); - -static gboolean -miro_list_store_iter_next (GtkTreeModel *tree_model, - GtkTreeIter *iter); - -static gboolean -miro_list_store_iter_children (GtkTreeModel *tree_model, - GtkTreeIter *iter, - GtkTreeIter *parent); - -static gboolean -miro_list_store_iter_has_child (GtkTreeModel *tree_model, - GtkTreeIter *iter); - -static gint -miro_list_store_iter_n_children (GtkTreeModel *tree_model, - GtkTreeIter *iter); - -static gboolean -miro_list_store_iter_nth_child (GtkTreeModel *tree_model, - GtkTreeIter *iter, - GtkTreeIter *parent, - gint n); - -static gboolean -miro_list_store_iter_parent (GtkTreeModel *tree_model, - GtkTreeIter *iter, - GtkTreeIter *child); - -static void -miro_list_store_interface_init (GtkTreeModelIface *iface); - -static void -miro_list_store_set_path_row(MiroListStore* self, int row); - - -/* Create the "miro.infolist.gtk" module */ - -static PyObject* -convert_obj_to_utf8(PyObject* obj) { - PyObject* obj_as_unicode; - PyObject* obj_as_utf8; - - if(PyString_Check(obj)) { - /* obj is a string already, we can use it's value directly */ - Py_INCREF(obj); - return obj; - } - /* Convert the object to unicode */ - if(PyUnicode_Check(obj)) { - /* fast path for objects that are already unicode */ - obj_as_unicode = obj; - Py_INCREF(obj_as_unicode); - } else { - obj_as_unicode = PyObject_Unicode(obj); - if(!obj_as_unicode) return NULL; - } - /* Convert the unicode to utf-8 */ - obj_as_utf8 = PyUnicode_AsUTF8String(obj_as_unicode); - Py_DECREF(obj_as_unicode); - return obj_as_utf8; -} - -static void -text_cell_data_func(GtkTreeViewColumn *tree_column, - GtkCellRenderer *cell, - GtkTreeModel *tree_model, - GtkTreeIter *iter, - PyObject* attr_getter) -{ - PyObject* attr_obj; - PyObject* attr_as_utf8; - InfoListNode *node; - PyGILState_STATE gstate; - - /* Aquire the GIL before touching python data */ - gstate = PyGILState_Ensure(); - /* Get the python object for this cell */ - node = (InfoListNode*)iter->user_data; - attr_obj = PyObject_CallFunctionObjArgs(attr_getter, node->info, - NULL); - if(!attr_obj) { - PyErr_Print(); - return; - } - if(attr_obj == Py_None) { - /* Special-case empty values */ - Py_DECREF(attr_obj); - PyGILState_Release(gstate); - g_object_set(G_OBJECT(cell), "text", "", NULL); - return; - } - - /* Convert object to utf-8 */ - attr_as_utf8 = convert_obj_to_utf8(attr_obj); - if(!attr_as_utf8) { - PyErr_Print(); - Py_DECREF(attr_obj); - return; - } - /* Set the value */ - g_object_set(G_OBJECT(cell), "text", PyString_AS_STRING(attr_as_utf8), - NULL); - /* All done. Release references then the GIL */ - Py_DECREF(attr_as_utf8); - Py_DECREF(attr_obj); - PyGILState_Release(gstate); -} - -static void -release_attr_getter(PyObject* obj) -{ - PyGILState_STATE gstate; - - /* Aquire the GIL before touching python data */ - gstate = PyGILState_Ensure(); - Py_DECREF(obj); - PyGILState_Release(gstate); -} - - -static PyObject * -setup_text_cell_data_func(PyObject *self, PyObject *args) -{ - PyObject* column; - PyObject* renderer; - PyObject* attr_getter; - GObject* g_column; - GObject* g_renderer; - - if (!PyArg_ParseTuple(args, "O!O!O", - PyGObject_Type, &column, - PyGObject_Type, &renderer, - &attr_getter)) { - return NULL; - } - if(!PyCallable_Check(attr_getter)) { - PyErr_SetString(PyExc_TypeError, - "attr_getter not callable"); - return NULL; - } - /* Convert Python objects to GObjects */ - g_column = pygobject_get(column); - g_renderer = pygobject_get(renderer); - - /* Set the cell data func - * INCREF attr_getter so that we own a reference. DECREF the object - * in the destroy function to free it when we're done. - * */ - Py_INCREF(attr_getter); - gtk_tree_view_column_set_cell_data_func(GTK_TREE_VIEW_COLUMN(g_column), - GTK_CELL_RENDERER(g_renderer), - (GtkTreeCellDataFunc) - text_cell_data_func, - attr_getter, - release_attr_getter); - - Py_INCREF(Py_None); - return Py_None; -} - -static PyMethodDef InfoListGTKMethods[] = { - {"setup_text_cell_data_func", setup_text_cell_data_func, METH_VARARGS, - "Setup a Text Cell data function for an InfoList model" }, - {NULL, NULL, 0, NULL} /* Sentinel */ -}; - -/* Implement GTK stuff */ - -G_DEFINE_TYPE_WITH_CODE(MiroListStore, miro_list_store, G_TYPE_OBJECT, - G_IMPLEMENT_INTERFACE(GTK_TYPE_TREE_MODEL, - miro_list_store_interface_init)); - -static void -miro_list_store_class_init (MiroListStoreClass *klass) -{ - GObjectClass *gobject_class = G_OBJECT_CLASS (klass); - - gobject_class->finalize = miro_list_store_finalize; -} - -static void -miro_list_store_init (MiroListStore *self) -{ - // Random int to check whether an iter belongs to our model - self->stamp = g_random_int(); - // make a path with depth == 1, we can use this to send out signals - self->path = gtk_tree_path_new(); - gtk_tree_path_append_index(self->path, 0); -} - -static void -miro_list_store_finalize (GObject *self) -{ - gtk_tree_path_free(MIRO_LIST_STORE(self)->path); -} - -static GtkTreeModelFlags -miro_list_store_get_flags (GtkTreeModel *tree_model) -{ - return (GtkTreeModelFlags)(GTK_TREE_MODEL_LIST_ONLY | - GTK_TREE_MODEL_ITERS_PERSIST); -} - -static gint -miro_list_store_get_n_columns (GtkTreeModel *tree_model) -{ - return 0; -} - -static GType -miro_list_store_get_column_type (GtkTreeModel *tree_model, - gint index) -{ - return G_TYPE_INVALID; -} - -static gboolean -miro_list_store_make_iter (GtkTreeModel *tree_model, - GtkTreeIter *iter, - gint index) -{ - MiroListStore *miro_list_store; - - miro_list_store = MIRO_LIST_STORE(tree_model); - - if (index < 0 || index >= miro_list_store->nodelist->node_count) { - return FALSE; - } - - iter->stamp = miro_list_store->stamp; - iter->user_data = infolist_nodelist_nth_node(miro_list_store->nodelist, - index); - return TRUE; -} - -static gboolean -miro_list_store_get_iter (GtkTreeModel *tree_model, - GtkTreeIter *iter, - GtkTreePath *path) -{ - g_assert(path); - g_assert(gtk_tree_path_get_depth(path) == 1); - - return miro_list_store_make_iter(tree_model, iter, - gtk_tree_path_get_indices(path)[0]); -} - -static GtkTreePath * -miro_list_store_get_path (GtkTreeModel *tree_model, - GtkTreeIter *iter) -{ - GtkTreePath *path; - MiroListStore *miro_list_store; - InfoListNode* node; - int index; - - miro_list_store = MIRO_LIST_STORE(tree_model); - g_assert (iter != NULL); - if (iter->stamp != miro_list_store->stamp) return NULL; - g_assert (iter->user_data != NULL); - - node = (InfoListNode*)iter->user_data; - index = infolist_nodelist_node_index(miro_list_store->nodelist, node); - if(index < 0) return NULL; - path = gtk_tree_path_new(); - gtk_tree_path_append_index(path, index); - return path; -} - -static void -miro_list_store_get_value (GtkTreeModel *tree_model, - GtkTreeIter *iter, - gint column, - GValue *value) -{ - return; // no visible columns -} - -static gboolean -miro_list_store_iter_next (GtkTreeModel *tree_model, - GtkTreeIter *iter) -{ - MiroListStore *miro_list_store; - InfoListNode *node; - - miro_list_store = MIRO_LIST_STORE(tree_model); - g_assert(iter); - if (iter->stamp != miro_list_store->stamp) return FALSE; - g_assert(iter->user_data); - - node = iter->user_data; - if(infolist_node_is_sentinal(node->next)) return FALSE; - iter->user_data = node->next; - return TRUE; -} - -static gboolean -miro_list_store_iter_children (GtkTreeModel *tree_model, - GtkTreeIter *iter, - GtkTreeIter *parent) -{ - // We don't have children, only works for special case when parent=NULL - if(parent != NULL) return FALSE; - return miro_list_store_make_iter(tree_model, iter, 0); -} - -static gboolean -miro_list_store_iter_has_child (GtkTreeModel *tree_model, - GtkTreeIter *iter) -{ - return FALSE; -} - -static gint -miro_list_store_iter_n_children (GtkTreeModel *tree_model, - GtkTreeIter *iter) -{ - MiroListStore* miro_list_store; - // We don't have children, only works for special case when iter=NULL - if(iter) { - return 0; - } - miro_list_store = MIRO_LIST_STORE(tree_model); - return miro_list_store->nodelist->node_count; -} - -static gboolean -miro_list_store_iter_nth_child (GtkTreeModel *tree_model, - GtkTreeIter *iter, - GtkTreeIter *parent, - gint n) -{ - if(parent) { - return FALSE; // non-toplevel row fails - } - return miro_list_store_make_iter(tree_model, iter, n); -} - -static gboolean -miro_list_store_iter_parent (GtkTreeModel *tree_model, - GtkTreeIter *iter, - GtkTreeIter *child) -{ - return FALSE; -} - -static void -miro_list_store_interface_init (GtkTreeModelIface *iface) -{ - iface->get_flags = miro_list_store_get_flags; - iface->get_n_columns = miro_list_store_get_n_columns; - iface->get_column_type = miro_list_store_get_column_type; - iface->get_iter = miro_list_store_get_iter; - iface->get_path = miro_list_store_get_path; - iface->get_value = miro_list_store_get_value; - iface->iter_next = miro_list_store_iter_next; - iface->iter_children = miro_list_store_iter_children; - iface->iter_has_child = miro_list_store_iter_has_child; - iface->iter_n_children = miro_list_store_iter_n_children; - iface->iter_nth_child = miro_list_store_iter_nth_child; - iface->iter_parent = miro_list_store_iter_parent; -} - -MiroListStore* -miro_list_store_new(InfoListNodeList* nodelist) -{ - MiroListStore *rv; - rv = MIRO_LIST_STORE(g_object_new (MIRO_TYPE_LIST_STORE, NULL)); - rv->nodelist = nodelist; - return rv; -} - -static void -miro_list_store_set_path_row(MiroListStore* self, int row) -{ - gtk_tree_path_get_indices(self->path)[0] = row; -} - -// Implement InfoListNodeList hooks - -static void -do_init_pygtk(void) -{ - // This is pretty weird, init_pygtk is a macro that sometimes calls - // return, so we have to put this call in it's own function. - init_pygtk(); -} - -int -infolistplat_init(void) -{ - PyObject* gobject_mod; - PyObject* infolist_mod; - PyObject* infolist_gtk_mod; - - g_type_init(); - if(!pygobject_init(2, -1, -1)) return -1; - do_init_pygtk(); - if(PyErr_Occurred()) return -1; - - /* Setup PyGObject_Type */ - gobject_mod = PyImport_ImportModule("gobject"); - if (!gobject_mod) { - return -1; - } - PyGObject_Type = (PyTypeObject*)PyObject_GetAttrString(gobject_mod, - "GObject"); - Py_DECREF(gobject_mod); - - /* Create our GTK infolist submodule. */ - infolist_gtk_mod = Py_InitModule("miro.infolist.gtk", - InfoListGTKMethods); - if(!infolist_gtk_mod) { - return -1; - } - - infolist_mod = PyImport_ImportModule("miro.infolist"); - if(!infolist_mod) { - return -1; - } - if(PyModule_AddObject(infolist_mod, "gtk", infolist_gtk_mod) < 0) { - return -1; - } - return 0; -} - - -int -infolistplat_nodelist_created(InfoListNodeList* nodelist) -{ - MiroListStore* new_list_store; - new_list_store = miro_list_store_new(nodelist); - if(!new_list_store) { - PyErr_SetNone(PyExc_MemoryError); - return -1; - } - nodelist->plat_data = new_list_store; - return 0; -} - -int -infolistplat_nodelist_will_destroy(InfoListNodeList* nodelist) -{ - GObject* miro_list_store; - - miro_list_store = G_OBJECT(nodelist->plat_data); - g_object_unref(miro_list_store); - return 0; -} - -void -infolistplat_will_add_nodes(InfoListNodeList* nodelist) -{ - infolist_nodelist_calc_positions(nodelist); -} - -int -infolistplat_node_added(InfoListNodeList* nodelist, - InfoListNode* node) -{ - MiroListStore* miro_list_store; - GtkTreeIter iter; - int row; - - miro_list_store = MIRO_LIST_STORE(nodelist->plat_data); - iter.stamp = miro_list_store->stamp; - iter.user_data = node; - if(!infolist_node_is_sentinal(node->prev)) { - // we call infolist_nodelist_calc_positions() before we start - // inserting, then add nodes from the end of the list. We - // can calculate our position using the previous node. - row = node->prev->position + 1; - } else { - row = 0; - } - miro_list_store_set_path_row(miro_list_store, row); - gtk_tree_model_row_inserted(GTK_TREE_MODEL(miro_list_store), - miro_list_store->path, - &iter); - return 0; -} - -void -infolistplat_will_change_nodes(InfoListNodeList* nodelist) -{ - infolist_nodelist_calc_positions(nodelist); -} - -int -infolistplat_node_changed(InfoListNodeList* nodelist, - InfoListNode* node) -{ - MiroListStore* miro_list_store; - GtkTreeIter iter; - - miro_list_store = MIRO_LIST_STORE(nodelist->plat_data); - iter.stamp = miro_list_store->stamp; - iter.user_data = node; - // node->position should still be valid from the - // infolist_nodelist_calc_positions() call in - // infolistplat_will_change_nodes - miro_list_store_set_path_row(miro_list_store, node->position); - gtk_tree_model_row_changed(GTK_TREE_MODEL(miro_list_store), - miro_list_store->path, - &iter); - return 0; -} - -void -infolistplat_will_remove_nodes(InfoListNodeList* nodelist) -{ - infolist_nodelist_calc_positions(nodelist); -} - -int -infolistplat_node_removed(InfoListNodeList* nodelist, - InfoListNode* node) -{ - MiroListStore* miro_list_store; - - miro_list_store = MIRO_LIST_STORE(nodelist->plat_data); - // node->position should still be valid from the - // infolist_nodelist_calc_positions() call in - // infolistplat_will_change_nodes. Note that node->position will be - // invalid for all nodes after this one, but since we remove things in - // back-to-front order this doesn't matter - miro_list_store_set_path_row(miro_list_store, node->position); - gtk_tree_model_row_deleted(GTK_TREE_MODEL(miro_list_store), - miro_list_store->path); - return 0; -} - -void -infolistplat_will_reorder_nodes(InfoListNodeList* nodelist) -{ - infolist_nodelist_calc_positions(nodelist); -} - -int -infolistplat_nodes_reordered(InfoListNodeList* nodelist) -{ - MiroListStore* miro_list_store; - int* new_order; - int i; - InfoListNode* node; - GtkTreePath* path; - - if(nodelist->node_count == 0) return 0; - - miro_list_store = MIRO_LIST_STORE(nodelist->plat_data); - new_order = g_new(int, nodelist->node_count); - if(!new_order) { - PyErr_SetNone(PyExc_MemoryError); - return -1; - } - path = gtk_tree_path_new(); - node = infolist_nodelist_head(nodelist); - // node->position was set in infolistplat_will_reorder_nodes(), it - // contains the old position. - for(i = 0; i < nodelist->node_count; i++) { - new_order[i] = node->position; - node = node->next; - } - gtk_tree_model_rows_reordered(GTK_TREE_MODEL(miro_list_store), path, - NULL, new_order); - gtk_tree_path_free(path); - return 0; -} - -int -infolistplat_add_to_tableview(InfoListNodeList* nodelist, - PyObject* pyobject) -{ - GtkTreeView* treeview; - - if(!PyObject_TypeCheck(pyobject, - pygobject_lookup_class(GTK_TYPE_TREE_VIEW))) { - PyErr_SetString(PyExc_TypeError, - "param must be a gtk.TreeView"); - return -1; - } - - treeview = GTK_TREE_VIEW(pygobject_get(pyobject)); - gtk_tree_view_set_model(treeview, - GTK_TREE_MODEL(nodelist->plat_data)); - return 0; -} - -InfoListNode* -infolistplat_node_for_pos(InfoListNodeList* nodelist, - PyObject* pos) -{ - GtkTreeIter* iter; - MiroListStore* miro_list_store; - - if(!pyg_boxed_check(pos, GTK_TYPE_TREE_ITER)) { - PyErr_SetString(PyExc_TypeError, - "param must be a gtk.TreeIter"); - return NULL; - } - iter = pyg_boxed_get(pos, GtkTreeIter); - miro_list_store = (MiroListStore*)nodelist->plat_data; - if (iter->stamp != miro_list_store->stamp) { - PyErr_SetString(PyExc_ValueError, - "iter not from this nodelist"); - return NULL; - } - - return (InfoListNode*)iter->user_data; -} - -PyObject* -infolistplat_iter_for_node(InfoListNodeList* nodelist, - InfoListNode* node) -{ - GtkTreeIter iter; - MiroListStore* miro_list_store; - - miro_list_store = (MiroListStore*)nodelist->plat_data; - - iter.stamp = miro_list_store->stamp; - iter.user_data = node; - /* This call to pyg_boxed_new copies the iter, so it's okay that we're - * using a value from the stack */ - return pyg_boxed_new(GTK_TYPE_TREE_ITER, &iter, TRUE, TRUE); -} diff -Nru miro-4.0.4/lib/frontends/widgets/infolist/gtk/infolist-gtk.h miro-6.0/lib/frontends/widgets/infolist/gtk/infolist-gtk.h --- miro-4.0.4/lib/frontends/widgets/infolist/gtk/infolist-gtk.h 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/infolist/gtk/infolist-gtk.h 1970-01-01 00:00:00.000000000 +0000 @@ -1,31 +0,0 @@ - -#include -#include -#include "infolist-nodelist.h" - -/* boilerplate GObject defines. */ - -#define MIRO_TYPE_LIST_STORE (miro_list_store_get_type ()) -#define MIRO_LIST_STORE(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), MIRO_TYPE_LIST_STORE, MiroListStore)) -#define MIRO_LIST_STORE_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), MIRO_TYPE_LIST_STORE, MiroListStoreClass)) -#define MIRO_IS_LIST_STORE(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), MIRO_TYPE_LIST_STORE)) -#define MIRO_IS_LIST_STORE_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), MIRO_TYPE_LIST_STORE)) -#define MIRO_LIST_STORE_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), MIRO_TYPE_LIST_STORE, MiroListStoreClass)) - -struct _MiroListStore -{ - GObject parent; - InfoListNodeList* nodelist; - gint stamp; - GtkTreePath* path; // for sending out in signals -}; - -struct _MiroListStoreClass -{ - GObjectClass parent_class; -}; - -typedef struct _MiroListStore MiroListStore; -typedef struct _MiroListStoreClass MiroListStoreClass; - -MiroListStore* miro_list_store_new(InfoListNodeList* nodelist); diff -Nru miro-4.0.4/lib/frontends/widgets/infolist/infolist-nodelist.c miro-6.0/lib/frontends/widgets/infolist/infolist-nodelist.c --- miro-4.0.4/lib/frontends/widgets/infolist/infolist-nodelist.c 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/infolist/infolist-nodelist.c 1970-01-01 00:00:00.000000000 +0000 @@ -1,444 +0,0 @@ -/* -# Miro - an RSS based video player application -# Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011 -# Participatory Culture Foundation -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA -# -# In addition, as a special exception, the copyright holders give -# permission to link the code of portions of this program with the OpenSSL -# library. -# -# You must obey the GNU General Public License in all respects for all of -# the code used other than OpenSSL. If you modify file(s) with this -# exception, you may extend this exception to your version of the file(s), -# but you are not obligated to do so. If you do not wish to do so, delete -# this exception statement from your version. If you delete this exception -# statement from all source files in the program, then also delete it here. -*/ - -// infolist-nodelist.c -- implementation for InfoListNodeList - -#include "infolist-nodelist.h" -#include "Python.h" -#include "stdlib.h" - -#define CHECK_NOT_IN_LIST(node, error_rv) if(node->next || node->prev) { \ - PyErr_SetString(PyExc_ValueError, "node in list"); \ - return error_rv; } - -#define CHECK_IN_LIST(node, error_rv) if(!node->next || !node->prev) { \ - PyErr_SetString(PyExc_ValueError, "node not in list"); \ - return error_rv; } - -InfoListNode* -infolist_node_new(PyObject* id, - PyObject* info, - PyObject* sort_key) -{ - InfoListNode* node; - - node = PyMem_New(InfoListNode, 1); - if(!node) { - return (InfoListNode*)PyErr_NoMemory(); - } - Py_INCREF(id); - Py_INCREF(info); - Py_INCREF(sort_key); - node->id = id; - node->info = info; - node->sort_key = sort_key; - node->prev = node->next = NULL; - return node; -} - -int -infolist_node_free(InfoListNode* node) -{ - CHECK_NOT_IN_LIST(node, -1); - Py_DECREF(node->id); - Py_DECREF(node->info); - Py_DECREF(node->sort_key); - PyMem_Free(node); - return 0; -} - -static void -infolist_node_make_sentinals(InfoListNode* start, InfoListNode* end) -{ - start->id = end->id = NULL; - start->info = end->info = NULL; - start->sort_key = end->sort_key = NULL; - start->next = end->next = end; - end->prev = start->prev = start; -} - -int -infolist_node_is_sentinal(InfoListNode* node) -{ - return node->info == NULL; -} - -PyObject* -infolist_node_get_id(InfoListNode* node) -{ - Py_INCREF(node->id); - return node->id; -} - -PyObject* -infolist_node_get_info(InfoListNode* node) -{ - Py_INCREF(node->info); - return node->info; -} - -PyObject* -infolist_node_get_sort_key(InfoListNode* node) -{ - Py_INCREF(node->sort_key); - return node->sort_key; -} - -void -infolist_node_set_info(InfoListNode* node, PyObject* info) -{ - - Py_DECREF(node->info); - Py_INCREF(info); - node->info = info; -} - -void -infolist_node_set_sort_key(InfoListNode* node, PyObject* sort_key) -{ - Py_DECREF(node->sort_key); - Py_INCREF(sort_key); - node->sort_key = sort_key; -} - -static int cmp_failed; - -int -infolist_node_cmp(const InfoListNode* node1, - const InfoListNode* node2) -{ - int cmp_result; - - if(PyObject_Cmp(node1->sort_key, node2->sort_key, &cmp_result) == -1) { - cmp_failed = 1; - cmp_result = 0; - } - if(cmp_result == 0) { - // for a tiebreak, just compare the node pointers. This - // ensures that the order is completely defined and avoids - // issues like #16113 - return (node1 < node2) ? -1 : 1; - } - return cmp_result; -} - -static int -qsort_compare(const void* arg1, - const void* arg2) -{ - return infolist_node_cmp(*((InfoListNode**)arg1), - *((InfoListNode**)arg2)); -} - -static int -qsort_compare_reverse(const void* arg1, - const void* arg2) -{ - return infolist_node_cmp(*((InfoListNode**)arg2), - *((InfoListNode**)arg1)); -} - -int -infolist_node_sort(InfoListNode** node_array, int count) -{ - cmp_failed = 0; - qsort(node_array, count, sizeof(InfoListNode*), qsort_compare); - if(cmp_failed) { - // The exception should have been set when the comparison failed. - // return -1 should propagate it. - return -1; - } - return 0; -} - -int -infolist_node_sort_reversed(InfoListNode** node_array, int count) -{ - cmp_failed = 0; - qsort(node_array, count, sizeof(InfoListNode*), qsort_compare_reverse); - if(cmp_failed) { - // The exception should have been set when the comparison failed. - // return -1 should propagate it. - printf("CMP FAILED\n"); - return -1; - } - return 0; -} - -InfoListNodeList* -infolist_nodelist_new(void) -{ - InfoListNodeList* nodelist; - - nodelist = PyMem_New(InfoListNodeList, 1); - if(!nodelist) { - return (InfoListNodeList*)PyErr_NoMemory(); - } - nodelist->node_count = 0; - infolist_node_make_sentinals(&nodelist->sentinal_start, - &nodelist->sentinal_end); - nodelist->index_lookup = NULL; - nodelist->index_lookup_capacity = 0; - nodelist->index_lookup_dirty = 0; - nodelist->node_positions_dirty = 0; - nodelist->plat_data = nodelist->plat_data2 = nodelist->plat_data3 = NULL; - return nodelist; -} - - -void -infolist_nodelist_free(InfoListNodeList* nodelist) -{ - InfoListNode* node; - InfoListNode* next; - - node = infolist_nodelist_head(nodelist); - while(!infolist_node_is_sentinal(node)) { - next = node->next; - // we don't need all the bookkeeping in - // infolist_nodelist_remove(), just do enough so that - // infolist_node_free doesn't complain. - node->prev = node->next = NULL; - infolist_node_free(node); - node = next; - } - PyMem_Free(nodelist->index_lookup); - PyMem_Free(nodelist); -} - -int -infolist_nodelist_insert_before(InfoListNodeList* nodelist, - InfoListNode* pos, - InfoListNode* new_node) -{ - InfoListNode* old_prev; - - CHECK_IN_LIST(pos, -1); - CHECK_NOT_IN_LIST(new_node, -1); - if(pos->prev == pos) { - PyErr_SetString(PyExc_ValueError, - "can't insert before start sentinal"); - return -1; - } - - old_prev = pos->prev; - new_node->prev = old_prev; - new_node->next = pos; - pos->prev = new_node; - old_prev->next = new_node; - - nodelist->node_count++; - nodelist->index_lookup_dirty = 1; - nodelist->node_positions_dirty = 1; - return 0; -} - -int -infolist_nodelist_insert_after(InfoListNodeList* nodelist, - InfoListNode* pos, - InfoListNode* new_node) -{ - InfoListNode* old_next; - - CHECK_IN_LIST(pos, -1); - CHECK_NOT_IN_LIST(new_node, -1); - if(pos->next == pos) { - PyErr_SetString(PyExc_ValueError, - "can't insert after end sentinal"); - return -1; - } - old_next = pos->next; - new_node->prev = pos; - new_node->next = old_next; - pos->next = new_node; - old_next->prev = new_node; - - nodelist->node_count++; - nodelist->index_lookup_dirty = 1; - nodelist->node_positions_dirty = 1; - return 0; -} - -int -infolist_nodelist_remove(InfoListNodeList* nodelist, - InfoListNode* node) -{ - CHECK_IN_LIST(node, -1); - if(infolist_node_is_sentinal(node)) { - PyErr_SetString(PyExc_ValueError, "can't remove sentinal"); - return -1; - } - node->prev->next = node->next; - node->next->prev = node->prev; - node->prev = node->next = NULL; - - nodelist->node_count--; - nodelist->index_lookup_dirty = 1; - nodelist->node_positions_dirty = 1; - return 0; -} - -static int -infolist_nodelist_ensure_index_lookup_capacity(InfoListNodeList* nodelist) -{ - InfoListNode** new_index_lookup; - int new_capacity; - if(nodelist->index_lookup_capacity >= nodelist->node_count) return 0; - - new_capacity = nodelist->node_count * 2; - new_index_lookup = PyMem_Resize(nodelist->index_lookup, - InfoListNode*, - new_capacity); - if(!new_index_lookup) { - PyErr_SetNone(PyExc_MemoryError); - return -1; - } - nodelist->index_lookup = new_index_lookup; - nodelist->index_lookup_capacity = new_capacity; - return 0; -} - -static int -infolist_nodelist_ensure_index_lookup(InfoListNodeList* nodelist) -{ - int i; - InfoListNode* node; - - if(!nodelist->index_lookup_dirty) return 0; - if(infolist_nodelist_ensure_index_lookup_capacity(nodelist) == -1) - return -1; - node = infolist_nodelist_head(nodelist); - for(i = 0; i < nodelist->node_count; i++) { - nodelist->index_lookup[i] = node; - node = node->next; - } - return 0; -} - -InfoListNode* -infolist_nodelist_head(InfoListNodeList* nodelist) -{ - return nodelist->sentinal_start.next; -} - -InfoListNode* -infolist_nodelist_tail(InfoListNodeList* nodelist) -{ - return nodelist->sentinal_end.prev; -} - -InfoListNode* -infolist_nodelist_nth_node(InfoListNodeList* nodelist, - int n) -{ - if(n < 0 || n >= nodelist->node_count) { - PyErr_SetString(PyExc_ValueError, "index out of range"); - return NULL; - } - // special-case this one - if(n == 0) return infolist_nodelist_head(nodelist); - if(infolist_nodelist_ensure_index_lookup(nodelist) == -1) return NULL; - return nodelist->index_lookup[n]; -} - -int -infolist_nodelist_node_index(InfoListNodeList* nodelist, - InfoListNode* node) -{ - CHECK_IN_LIST(node, -1); - - infolist_nodelist_calc_positions(nodelist); - return node->position; -} - -int -infolist_nodelist_calc_positions(InfoListNodeList* nodelist) -{ - InfoListNode* node; - int i; - - if(!nodelist->node_positions_dirty) return 0; - node = infolist_nodelist_head(nodelist); - for(i = 0; i < nodelist->node_count; i++) { - node->position = i; - node = node->next; - } - nodelist->node_positions_dirty = 0; - return 0; -} - -int -infolist_nodelist_check_nodes(InfoListNodeList* nodelist) -{ - int i, count; - InfoListNode* node; - - count = 0; - - node = &nodelist->sentinal_start; - if(node->prev != node) { - PyErr_SetString(PyExc_AssertionError, - "start sentinal prev wrong"); - return -1; - } - while(node != &nodelist->sentinal_end) { - if(node->next->prev != node) { - PyErr_SetString(PyExc_AssertionError, - "node->next->prev != node"); - return -1; - } - node = node->next; - count++; - } - if(node->next != node) { - PyErr_SetString(PyExc_AssertionError, - "end sentinal next wrong"); - return -1; - } - - // count includes the start sentinal, so subtract 1 - if(count -1 != nodelist->node_count) { - PyErr_SetString(PyExc_AssertionError, "node_count wrong"); - return -1; - } - - infolist_nodelist_ensure_index_lookup(nodelist); - node = infolist_nodelist_head(nodelist); - for(i = 0; i < nodelist->node_count; i++) { - if(nodelist->index_lookup[i] != node) { - PyErr_SetString(PyExc_AssertionError, - "index_lookup wrong"); - return -1; - } - node = node->next; - } - return 0; -} diff -Nru miro-4.0.4/lib/frontends/widgets/infolist/infolist-nodelist.h miro-6.0/lib/frontends/widgets/infolist/infolist-nodelist.h --- miro-4.0.4/lib/frontends/widgets/infolist/infolist-nodelist.h 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/infolist/infolist-nodelist.h 1970-01-01 00:00:00.000000000 +0000 @@ -1,189 +0,0 @@ -/* -# Miro - an RSS based video player application -# Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011 -# Participatory Culture Foundation -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA -# -# In addition, as a special exception, the copyright holders give -# permission to link the code of portions of this program with the OpenSSL -# library. -# -# You must obey the GNU General Public License in all respects for all of -# the code used other than OpenSSL. If you modify file(s) with this -# exception, you may extend this exception to your version of the file(s), -# but you are not obligated to do so. If you do not wish to do so, delete -# this exception statement from your version. If you delete this exception -# statement from all source files in the program, then also delete it here. -*/ - -// infolist-nodelist.h -- Basic data structures for InfoList -// -// InfoListNode stores the data for 1 row our table. This includes ItemInfo -// objects, as well as a sort key, and a dict to store attributes. -// -// InfoListNodeList is basically a simple linked list of InfoListNodes. -// However, InfoListNodeList has some features to be able to lookup rows by -// their index, and calculate the index of each row. These operations are -// O(N), but then O(1) until a node is inserted/removed. Since we don't do -// that all that often, this works well in practice. -// -// Error handling: -// -// The general convention is to return NULL or -1 and set a python error -// on failure for any of these methods. - -#ifndef __INFOLIST_DATA_H__ -#define __INFOLIST_DATA_H__ - -#ifdef __cplusplus -extern "C" { -#endif - -#include - -struct InfoListNodeStruct -{ - PyObject* id; - PyObject* info; - PyObject* sort_key; - struct InfoListNodeStruct *next; - struct InfoListNodeStruct *prev; - // Call infolist_nodelist_calc_positions before using position - unsigned int position; -}; -typedef struct InfoListNodeStruct InfoListNode; - -// Create a new InfoListNode, we ADDREF all the python objects -InfoListNode* -infolist_node_new(PyObject* id, - PyObject* info, - PyObject* sort_key); - -// Free a InfoListNode and DECREF the python objects -int -infolist_node_free(InfoListNode* node); - -// Check for a sentinal node. sentinals are before/after the last node with -// valid data. -int -infolist_node_is_sentinal(InfoListNode* node); - -// get/set python objects for a node. Reference counting is as usual for -// python (nodes hold a reference to the objects inside them, return values -// get ADDREFed) -PyObject* -infolist_node_get_id(InfoListNode* node); - -PyObject* -infolist_node_get_info(InfoListNode* node); - -PyObject* -infolist_node_get_sort_key(InfoListNode* node); - -void -infolist_node_set_info(InfoListNode* node, - PyObject* info); - -void -infolist_node_set_sort_key(InfoListNode* node, - PyObject* sort_key); - -// Sort an array of nodes by their sort key -int -infolist_node_cmp(const InfoListNode* node1, - const InfoListNode* node2); -int -infolist_node_sort(InfoListNode** node_array, - int count); -int -infolist_node_sort_reversed(InfoListNode** node_array, - int count); - -struct InfoListNodeListStruct -{ - // Basic list functionality - int node_count; - InfoListNode sentinal_start, sentinal_end; - // Handle index lookup - InfoListNode** index_lookup; - int index_lookup_capacity; - int index_lookup_dirty; - // Handle node positions - int node_positions_dirty; - // Place to store Platform-specific stuff - void* plat_data; - void* plat_data2; - void* plat_data3; -}; -typedef struct InfoListNodeListStruct InfoListNodeList; - -// Make a new list -InfoListNodeList* -infolist_nodelist_new(void); - -// Delete a list and free all of it's resources -void -infolist_nodelist_free(InfoListNodeList* nodelist); - -// Insert a new node before pos -int -infolist_nodelist_insert_before(InfoListNodeList* nodelist, - InfoListNode* pos, - InfoListNode* new_node); - -// Insert a new node after pos -int -infolist_nodelist_insert_after(InfoListNodeList* nodelist, - InfoListNode* pos, - InfoListNode* new_node); - -// Remove a node from the list. WARNING: Does not free it! -int -infolist_nodelist_remove(InfoListNodeList* nodelist, - InfoListNode* node); - -// Get the first node in the list, return a sentinal if the list is empty -InfoListNode* -infolist_nodelist_head(InfoListNodeList* nodelist); - -// Get the last node in the list, return a sentinal if the list is empty -InfoListNode* -infolist_nodelist_tail(InfoListNodeList* nodelist); - -// Get the nth node in the list -InfoListNode* -infolist_nodelist_nth_node(InfoListNodeList* nodelist, - int n); - -// Find the index of a node -int -infolist_nodelist_node_index(InfoListNodeList* nodelist, - InfoListNode* node); - -// Calculate node positions -// Set the position attribute for each node with it's current index -int -infolist_nodelist_calc_positions(InfoListNodeList* nodelist); - -// Debugging function, check that the linked list makes sense -int -infolist_nodelist_check_nodes(InfoListNodeList* nodelist); - -#ifdef __cplusplus -} // extern "C" -#endif - -#endif // __INFOLIST_DATA_H__ diff -Nru miro-4.0.4/lib/frontends/widgets/infolist/infolist-platform.h miro-6.0/lib/frontends/widgets/infolist/infolist-platform.h --- miro-4.0.4/lib/frontends/widgets/infolist/infolist-platform.h 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/infolist/infolist-platform.h 1970-01-01 00:00:00.000000000 +0000 @@ -1,105 +0,0 @@ -/* -# Miro - an RSS based video player application -# Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011 -# Participatory Culture Foundation -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA -# -# In addition, as a special exception, the copyright holders give -# permission to link the code of portions of this program with the OpenSSL -# library. -# -# You must obey the GNU General Public License in all respects for all of -# the code used other than OpenSSL. If you modify file(s) with this -# exception, you may extend this exception to your version of the file(s), -# but you are not obligated to do so. If you do not wish to do so, delete -# this exception statement from your version. If you delete this exception -# statement from all source files in the program, then also delete it here. -*/ - -// infolist-platform.h -- platform-specific functions -// -// This header defines functions that platforms use to integrate InfoList with -// the GUI framework. Each platform should have a C file (or multiple files) -// that define functions for each of these hooks. -// -// All functions should return 0 on success and -1 on failure (and set the -// python exception) - -#ifndef INFOLIST_PLAT_H -#define INFOLIST_PLAT_H - -#include - -int -infolistplat_init(void); - -int -infolistplat_nodelist_created(InfoListNodeList* nodelist); - -int -infolistplat_nodelist_will_destroy(InfoListNodeList* nodelist); - - -// Called before adding nodes to the list -void -infolistplat_will_add_nodes(InfoListNodeList* nodelist); - -// Called for over node added to the list. They will be added from back to -// front. -int -infolistplat_node_added(InfoListNodeList* nodelist, - InfoListNode* node); - -// Called before making updates to the list -void -infolistplat_will_change_nodes(InfoListNodeList* nodelist); - -// Called for each node changed -int -infolistplat_node_changed(InfoListNodeList* nodelist, - InfoListNode* node); - -// Called before removing nodes from the list -void -infolistplat_will_remove_nodes(InfoListNodeList* nodelist); - -// Called for each node removed from the list. Rows will be removed back to -// front. -int -infolistplat_node_removed(InfoListNodeList* nodelist, - InfoListNode* node); - -// Called before reordering nodes -void -infolistplat_will_reorder_nodes(InfoListNodeList* nodelist); - -// Called after reordering nodes -int -infolistplat_nodes_reordered(InfoListNodeList* nodelist); - -int -infolistplat_add_to_tableview(InfoListNodeList* nodelist, - PyObject* tableview); - -InfoListNode* -infolistplat_node_for_pos(InfoListNodeList* nodelist, - PyObject* pos); - -PyObject* -infolistplat_iter_for_node(InfoListNodeList* nodelist, - InfoListNode* node); - -#endif /* INFOLIST_PLAT_H */ diff -Nru miro-4.0.4/lib/frontends/widgets/infolist/infolist.pyx miro-6.0/lib/frontends/widgets/infolist/infolist.pyx --- miro-4.0.4/lib/frontends/widgets/infolist/infolist.pyx 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/infolist/infolist.pyx 1970-01-01 00:00:00.000000000 +0000 @@ -1,607 +0,0 @@ -# Miro - an RSS based video player application -# Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011 -# Participatory Culture Foundation -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA -# -# In addition, as a special exception, the copyright holders give -# permission to link the code of portions of this program with the OpenSSL -# library. -# -# You must obey the GNU General Public License in all respects for all of -# the code used other than OpenSSL. If you modify file(s) with this -# exception, you may extend this exception to your version of the file(s), -# but you are not obligated to do so. If you do not wish to do so, delete -# this exception statement from your version. If you delete this exception -# statement from all source files in the program, then also delete it here. - -# infolistmodule.pyx -- Pyrex module definition - -ctypedef unsigned char boolean - -cdef extern from "stdlib.h": - ctypedef unsigned long size_t - void qsort(void *base, size_t nmemb, size_t size, - int(*compar)(void *, void *)) - -cdef extern from "Python.h": - ctypedef struct PyObject - - PyObject* PyExc_KeyError - - void* PyMem_Malloc(size_t n) except NULL - void PyMem_Free(void *p) - object PyCObject_FromVoidPtr(void* cobj, void (*destr)(void *)) - void* PyCObject_AsVoidPtr(object self) - -cdef extern from "infolist-nodelist.h": - ctypedef struct InfoListNode - ctypedef struct InfoListNode: - InfoListNode *next - InfoListNode *prev - # would be nice to list the python objects here, but Pyrex doesn't - # support it well. Use the infolist_node_get_* methods for - # access. - - InfoListNode* infolist_node_new(object id, object info, - object sort_key) except NULL - int infolist_node_free(InfoListNode* node) except -1 - int infolist_node_is_sentinal(InfoListNode* node) except -1 - object infolist_node_get_id(InfoListNode* node) - object infolist_node_get_info(InfoListNode* node) - object infolist_node_get_sort_key(InfoListNode* node) - void infolist_node_set_info(InfoListNode* node, object info) - void infolist_node_set_sort_key(InfoListNode* node, object sort_key) - int infolist_node_cmp(InfoListNode* node1, InfoListNode* node2) - int infolist_node_sort(InfoListNode** node_array, int count) except -1 - int infolist_node_sort_reversed(InfoListNode** node_array, - int count) except -1 - - ctypedef struct InfoListNodeList: - int node_count - - InfoListNodeList* infolist_nodelist_new() except NULL - void infolist_nodelist_free(InfoListNodeList* nodelist) - InfoListNode* infolist_nodelist_head( - InfoListNodeList* nodelist) except NULL - InfoListNode* infolist_nodelist_tail( - InfoListNodeList* nodelist) except NULL - int infolist_nodelist_insert_before(InfoListNodeList* nodelist, - InfoListNode* node, InfoListNode* new_node) except -1 - int infolist_nodelist_insert_after(InfoListNodeList* nodelist, - InfoListNode* node, InfoListNode* new_node) except -1 - int infolist_nodelist_remove(InfoListNodeList* nodelist, - InfoListNode* node) except -1 - int infolist_nodelist_node_index(InfoListNodeList* nodelist, - InfoListNode* node) except -1 - InfoListNode* infolist_nodelist_nth_node(InfoListNodeList* nodelist, - int n) except NULL - int infolist_nodelist_check_nodes(InfoListNodeList* nodelist) except -1 - -cdef extern from "infolist-platform.h": - int infolistplat_init() except -1 - int infolistplat_nodelist_created(InfoListNodeList* nodelist) except -1 - int infolistplat_nodelist_will_destroy( - InfoListNodeList* nodelist) except -1 - void infolistplat_will_add_nodes(InfoListNodeList* nodelist) - int infolistplat_node_added(InfoListNodeList* nodelist, - InfoListNode* node) except -1 - void infolistplat_will_change_nodes(InfoListNodeList* nodelist) - int infolistplat_node_changed(InfoListNodeList* nodelist, - InfoListNode* node) except -1 - void infolistplat_will_remove_nodes(InfoListNodeList* nodelist) - int infolistplat_node_removed(InfoListNodeList* nodelist, - InfoListNode* node) except -1 - void infolistplat_will_reorder_nodes(InfoListNodeList* nodelist) - int infolistplat_nodes_reordered(InfoListNodeList* nodelist) except -1 - int infolistplat_add_to_tableview(InfoListNodeList* nodelist, - object tableview) except -1 - InfoListNode* infolistplat_node_for_pos(InfoListNodeList* nodelist, - object pos) except NULL - object infolistplat_iter_for_node(InfoListNodeList* nodelist, - InfoListNode* node) - -cdef class InfoListAttributeStore: - """Stores the attributes for an InfoList - - For most rows, the attributes will be empty. As an optimization, we share - a single dictionary between all of those. - """ - cdef dict attr_dict_map # maps id -> attr_dicts - cdef dict empty_dict # shared empty dict - - def __init__(self, *args, **kwargs): - self.attr_dict_map = {} - self.empty_dict = {} - - def get_attr(self, id_, name): - if id_ not in self.attr_dict_map: - raise KeyError(name) - else: - return self.attr_dict_map[id_][name] - - def set_attr(self, id_, name, value): - if id_ not in self.attr_dict_map: - self.attr_dict_map[id_] = {name: value} - else: - self.attr_dict_map[id_][name] = value - - def unset_attr(self, id_, name): - if (id_ in self.attr_dict_map - and name in self.attr_dict_map[id_]): - del self.attr_dict_map[id_][name] - - def get_attr_dict(self, id_): - if id_ not in self.attr_dict_map: - return self.empty_dict - else: - return self.attr_dict_map[id_] - - def del_attr_dict(self, id_): - if id_ in self.attr_dict_map: - del self.attr_dict_map[id_] - -cdef InfoListNode* insert_node_before(InfoListNodeList* nodelist, - InfoListNode* node, InfoListNode* pos, int reverse) except NULL: - cdef int cmp_result - # Insert a node in the correct position in nodelist by searching backwards - # from pos. Returns the position just before node gets inserted, which is - # can be used for future calls - while not infolist_node_is_sentinal(pos): - cmp_result = infolist_node_cmp(node, pos) - if reverse: - cmp_result *= -1 - if cmp_result < 0: - pos = pos.prev - else: - break - infolist_nodelist_insert_after(nodelist, pos, node) - return pos - -cdef int update_sort_key(InfoListNode* node, object new_sort_key, int reverse): - # Update node's sort key, then return TRUE if the node is now out of place - # in the list. - cdef int cmp_result - cdef object old_sort_key - - old_sort_key = infolist_node_get_sort_key(node) - infolist_node_set_sort_key(node, new_sort_key) - if old_sort_key == new_sort_key: # sort key didn't change - return 0 - if not infolist_node_is_sentinal(node.next): - cmp_result = infolist_node_cmp(node, node.next) - if (not reverse and cmp_result > 0) or (reverse and cmp_result < 0): - return 1 - if not infolist_node_is_sentinal(node.prev): - cmp_result = infolist_node_cmp(node, node.prev) - if (not reverse and cmp_result < 0) or (reverse and cmp_result > 0): - return 1 - return 0 - -cdef enum SortMode: - INFOLIST_SORT_NORMAL = 0 - INFOLIST_SORT_REVERSED = 1 - -cdef class InfoList: - """InfoList -- TableModel for ItemInfo and similar objects - - InfoList is a highly optimized TableModel for item lists. It also has - some nice features for handling item lists. - - can quickly lookup an info by it's id attribute. - - automatically keeps the list in sorted order - - can store arbitrary attributes for each item. This can help to - implement animations and other UI goodies. - - There's nothing in the code that ties InfoList to ItemInfo objects, it - supports any python object that has an id attribute. - """ - - cdef InfoListNodeList* nodelist - cdef dict id_map # maps ids -> CObjects that point to nodes - cdef object sort_key_func - cdef int sort_mode - cdef InfoListAttributeStore attributes - - def __cinit__(self, *args, **kwargs): - # __cinit__ should allocate any C resources - self.nodelist = infolist_nodelist_new() - infolistplat_nodelist_created(self.nodelist) - self.id_map = {} - - def __dealloc__(self): - # __dealloc__ should free any C resources - infolistplat_nodelist_will_destroy(self.nodelist) - infolist_nodelist_free(self.nodelist) - - def __init__(self, sort_key_func, reverse=False): - """Create an InfoList. - - :param sort_key_func: function that inputs an info and outputs a key - to sort with - :param reverse: Should we sort in reverse order? - """ - self._set_sort(sort_key_func, reverse) - self.attributes = InfoListAttributeStore() - - cdef int _set_sort(self, object sort_key_func, object reverse) except -1: - if sort_key_func is None: - raise ValueError("sort_key_func can't be None") - self.sort_key_func = sort_key_func - if reverse: - self.sort_mode = INFOLIST_SORT_REVERSED - else: - self.sort_mode = INFOLIST_SORT_NORMAL - return 0 - - cdef int sort_nodes(self, InfoListNode** nodes, int count) except -1: - if self.sort_mode == INFOLIST_SORT_NORMAL: - infolist_node_sort(nodes, count) - elif self.sort_mode == INFOLIST_SORT_REVERSED: - infolist_node_sort_reversed(nodes, count) - - cdef int sort_nodes_reversed(self, InfoListNode** nodes, - int count) except -1: - if self.sort_mode == INFOLIST_SORT_NORMAL: - infolist_node_sort_reversed(nodes, count) - elif self.sort_mode == INFOLIST_SORT_REVERSED: - infolist_node_sort(nodes, count) - - cdef InfoListNode* _fetch_node(self, object id) except NULL: - cdef object cobject - - cobject = self.id_map[id] - return PyCObject_AsVoidPtr(cobject) - - def add_infos(self, new_infos): - """Add a list of objects into the list. - - If we have a sort, they will be inserted in sorted order. If not, - they will be inserted at the end of the list. - - If any info is already in the list, then a ValueError will be thrown - and no changes will be made. - - :param new_infos: an iterable with the infos - """ - cdef InfoListNode* pos - cdef InfoListNode* new_node - cdef InfoListNode** node_array - cdef int i - cdef int count - cdef int reverse_sort - cdef int infos_created - cdef int infos_added - cdef object info, sort_key - - infos_created = infos_added = 0 - count = len(new_infos) - node_array = PyMem_Malloc( - sizeof(InfoListNode*) * count) - try: - # prepare the insert - for 0 <= i < count: - info = new_infos[i] - if info.id in self.id_map: - raise ValueError("Info with id %s already in list" % - info.id) - sort_key = self.sort_key_func(info) - node_array[i] = infolist_node_new(info.id, info, sort_key) - infos_created += 1 - # insert nodes in reversed order, this makes calculating rows - # simpler in the GTK code - infolistplat_will_add_nodes(self.nodelist) - pos = infolist_nodelist_tail(self.nodelist) - self.sort_nodes_reversed(node_array, count) - reverse_sort = (self.sort_mode == INFOLIST_SORT_REVERSED) - for 0 <= i < count: - new_node = node_array[i] - pos = insert_node_before(self.nodelist, new_node, pos, - reverse_sort) - infos_added += 1 - cobj = PyCObject_FromVoidPtr(new_node, NULL) - self.id_map[infolist_node_get_id(new_node)] = cobj - infolistplat_node_added(self.nodelist, new_node) - finally: - if infos_added < infos_created: - for infos_added <= i < infos_created: - infolist_node_free(node_array[i]) - PyMem_Free(node_array) - - def update_infos(self, infos, resort): - """Update a list of objects - - if any of the infos are not already in the list, a KeyError will be - thown and no changes will be made. - - :param infos: list of infos to update - :param resort: should the list be resorted? - """ - - cdef InfoListNode** node_array # stores the nodes we will update - cdef InfoListNode* pos - cdef InfoListNode* node - cdef int count, move_count, reverse - cdef object sort_key - - node_array = NULL - count = len(infos) - node_array = PyMem_Malloc( - sizeof(InfoListNode*) * count) - try: - # fetch first, in case of key error - for 0 <= i < count: - node_array[i] = self._fetch_node(infos[i].id) - infolistplat_will_change_nodes(self.nodelist) - for 0 <= i < count: - node = node_array[i] - infolist_node_set_info(node, infos[i]) - infolistplat_node_changed(self.nodelist, node) - if not resort: - return - if self.sort_mode == INFOLIST_SORT_NORMAL: - reverse = 0 - elif self.sort_mode == INFOLIST_SORT_REVERSED: - reverse = 1 - - # update sort keys and figure out which nodes actually need to - # move. - move_count = 0 - for 0 <= i < count: - node = node_array[i] - sort_key = self.sort_key_func(infolist_node_get_info(node)) - if update_sort_key(node, sort_key, reverse): - node_array[move_count] = node - move_count += 1 - if move_count == 0: - return - # remove infos, sort them, then re-enter them - infolistplat_will_reorder_nodes(self.nodelist) - for 0 <= i < move_count: - infolist_nodelist_remove(self.nodelist, node_array[i]) - self.sort_nodes_reversed(node_array, move_count) - pos = infolist_nodelist_tail(self.nodelist) - for 0 <= i < move_count: - pos = insert_node_before(self.nodelist, node_array[i], pos, - reverse) - infolistplat_nodes_reordered(self.nodelist) - finally: - PyMem_Free(node_array) - - def remove_ids(self, id_list): - """Remove objects from the list. - - If any id is not in the list, then a KeyError will be thrown and no - changes will be made. - - :param id_list: list of ids to remove - """ - - cdef InfoListNode** to_remove - cdef InfoListNode* node - cdef int i, count - - count = len(id_list) - to_remove = PyMem_Malloc(sizeof(InfoListNode*) * count) - try: - # fetch all nodes first in case of KeyError - for 0 <= i < count: - to_remove[i] = self._fetch_node(id_list[i]) - infolistplat_will_remove_nodes(self.nodelist) - # order nodes last-to-first so that we call - # infolistplat_node_removed in that order - self.sort_nodes_reversed(to_remove, count) - for 0 <= i < count: - node = to_remove[i] - infolist_nodelist_remove(self.nodelist, node) - del self.id_map[infolist_node_get_id(node)] - self.attributes.del_attr_dict(infolist_node_get_info(node).id) - infolistplat_node_removed(self.nodelist, node) - infolist_node_free(node) - finally: - PyMem_Free(to_remove) - - def remove_all(self): - """Remove all data from the InfoList.""" - cdef InfoListNode* node - cdef InfoListNode* prev_node - infolistplat_will_remove_nodes(self.nodelist) - # remove last-to-first so that we call - # infolistplat_node_removed in that order - node = infolist_nodelist_tail(self.nodelist) - while not infolist_node_is_sentinal(node): - prev_node = node.prev - infolist_nodelist_remove(self.nodelist, node) - infolistplat_node_removed(self.nodelist, node) - infolist_node_free(node) - node = prev_node - self.attributes = InfoListAttributeStore() - self.id_map = {} - - def set_attr(self, id_, name, value): - self.attributes.set_attr(id_, name, value) - self._send_node_changed(id_) - - def unset_attr(self, id_, name): - self.attributes.unset_attr(id_, name) - self._send_node_changed(id_) - - def _send_node_changed(self, id_): - cdef InfoListNode* node - - infolistplat_will_change_nodes(self.nodelist) - infolistplat_node_changed(self.nodelist, self._fetch_node(id_)) - - def get_attr(self, id_, name): - return self.attributes.get_attr(id_, name) - - def get_info(self, id_): - return infolist_node_get_info(self._fetch_node(id_)) - - def get_first_info(self): - cdef InfoListNode* node - - node = infolist_nodelist_head(self.nodelist) - if infolist_node_is_sentinal(node): - return None - else: - return infolist_node_get_info(node) - - def get_last_info(self): - cdef InfoListNode* node - - node = infolist_nodelist_tail(self.nodelist) - if infolist_node_is_sentinal(node): - return None - else: - return infolist_node_get_info(node) - - def index_of_id(self, id_): - return infolist_nodelist_node_index(self.nodelist, - self._fetch_node(id_)) - - def get_next_info(self, id_): - cdef InfoListNode* node - - node = self._fetch_node(id_).next - if infolist_node_is_sentinal(node): - return None - else: - return infolist_node_get_info(node) - - def get_prev_info(self, id_): - cdef InfoListNode* node - - node = self._fetch_node(id_).prev - if infolist_node_is_sentinal(node): - return None - else: - return infolist_node_get_info(node) - - def get_sort_key(self, id_): - return infolist_node_get_sort_key(self._fetch_node(id_)) - - def change_sort(self, sort_key_func, reverse=False): - cdef InfoListNode** nodes - cdef InfoListNode* node - cdef InfoListNode* next_node - cdef int i - cdef int node_count - cdef object info - - - self._set_sort(sort_key_func, reverse) - node_count = self.nodelist.node_count - nodes = PyMem_Malloc( - sizeof(InfoListNode*) * node_count) - try: - # remove infos, sort them, then re-enter them - infolistplat_will_reorder_nodes(self.nodelist) - node = infolist_nodelist_head(self.nodelist) - for 0 <= i < node_count: - nodes[i] = node - next_node = node.next - info = infolist_node_get_info(node) - infolist_node_set_sort_key(node, sort_key_func(info)) - infolist_nodelist_remove(self.nodelist, node) - node = next_node - self.sort_nodes_reversed(nodes, node_count) - node = infolist_nodelist_tail(self.nodelist) - for 0 <= i < node_count: - infolist_nodelist_insert_after(self.nodelist, node, nodes[i]) - infolistplat_nodes_reordered(self.nodelist) - finally: - PyMem_Free(nodes) - - def __len__(self): - return self.nodelist.node_count - - def info_list(self): - """Get all objects, in order, in a python list """ - cdef list rv - cdef InfoListNode* current_node - - current_node = infolist_nodelist_head(self.nodelist) - rv = [] - while not infolist_node_is_sentinal(current_node): - rv.append(infolist_node_get_info(current_node)) - current_node = current_node.next - return rv - - def add_to_tableview(self, tableview): - """Add this infolist to a TableView object.""" - infolistplat_add_to_tableview(self.nodelist, tableview) - - def row_for_iter(self, pos): - """Get a (info, attr_dict) tuple for a row in this list. - - pos is platform-specific, on gtk it's a gtk.TreeIter object. - - :param pos: position in the list - """ - cdef InfoListNode* node - cdef object info - - node = infolistplat_node_for_pos(self.nodelist, pos) - info = infolist_node_get_info(node) - return (info, self.attributes.get_attr_dict(info.id)) - - def iter_for_id(self, id_): - """Get an TableModel iterator for an info in the list - - Iterators are a platform-specific object that refers to a position in - the table. "Iterator" is a bit of a misnomer, since they are only - used for positions, not actually iterating through the list. - - :param id_: id of the info that we care about - :returns: Platform-specific TableModel iterator - - """ - - cdef InfoListNode* node - - node = self._fetch_node(id_) - return infolistplat_iter_for_node(self.nodelist, node) - - def nth_row(self, index): - cdef InfoListNode* node - cdef object info - - node = infolist_nodelist_nth_node(self.nodelist, index) - info = infolist_node_get_info(node) - return (info, self.attributes.get_attr_dict(info.id)) - - def __getitem__(self, pos): - return self.row_for_iter(pos) - - def _sanity_check(self): - """Debugging function that tests if the list structure is sane.""" - infolist_nodelist_check_nodes(self.nodelist) - info_list = self.info_list() - for info in info_list: - if info is not self.get_info(info.id): - raise AssertionError("id_map for %s is wrong" % info.id) - - for i in xrange(len(info_list) - 1): - if (self.sort_mode == INFOLIST_SORT_NORMAL and - (self.get_sort_key(info_list[i].id) > - self.get_sort_key(info_list[i+1].id))): - raise AssertionError("infos out of order") - elif (self.sort_mode == INFOLIST_SORT_REVERSED and - (self.get_sort_key(info_list[i].id) < - self.get_sort_key(info_list[i+1].id))): - raise AssertionError("infos out of order") - -# module-level initialization -infolistplat_init() diff -Nru miro-4.0.4/lib/frontends/widgets/infolist/README.txt miro-6.0/lib/frontends/widgets/infolist/README.txt --- miro-4.0.4/lib/frontends/widgets/infolist/README.txt 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/infolist/README.txt 1970-01-01 00:00:00.000000000 +0000 @@ -1,22 +0,0 @@ -InfoList is a fast, low-level table model, that can be used with the -widgets.TableView class for sorted lists of ItemInfo objects (and is theory -any other info-like object). - -InfoList has a few features to make item lists quick and easy - - can quickly lookup a row by it's id attribute. Not having to track iters - in python is both convenient and fast. - - can lookup a row by it's position (this is O(N) some times, but is fast - enough in practice) - - keeps the list in sorted order - - stores arbitrary attributes for each item - -InfoList glues together code in many different (C-like) languages. - -Here's a the other components: - - - infolist.pyx -- python module (pyrex) - - infolist-nodelist.c -- InfoList data structions (C) - - infolist-idmap.cpp -- simple hash table (C++) - - infolist-gtk.c, infolist-cocoa.m -- each platform adds another file to - implement platform-specific parts. The hooks are definined in - infolist-platform.h (C/Objective-C) diff -Nru miro-4.0.4/lib/frontends/widgets/infoupdater.py miro-6.0/lib/frontends/widgets/infoupdater.py --- miro-4.0.4/lib/frontends/widgets/infoupdater.py 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/infoupdater.py 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,73 @@ +# Miro - an RSS based video player application +# Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011 +# Participatory Culture Foundation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +# +# In addition, as a special exception, the copyright holders give +# permission to link the code of portions of this program with the OpenSSL +# library. +# +# You must obey the GNU General Public License in all respects for all of +# the code used other than OpenSSL. If you modify file(s) with this +# exception, you may extend this exception to your version of the file(s), +# but you are not obligated to do so. If you do not wish to do so, delete +# this exception statement from your version. If you delete this exception +# statement from all source files in the program, then also delete it here. + +"""``miro.infoupdater`` -- The infoupdater module holds: + +* :class:`InfoUpdater` -- Track channel/feed/playlist updates from the +backend. +""" +from miro import signals + +class InfoUpdater(signals.SignalEmitter): + """Track channel/feed/playlist updates from the backend. + + Signals: + + * feeds-added (self, info_list) -- New feeds were added + * feeds-changed (self, info_list) -- Feeds were changed + * feeds-removed (self, info_list) -- Feeds were removed + * sites-added (self, info_list) -- New sites were added + * sites-changed (self, info_list) -- Sites were changed + * sites-removed (self, info_list) -- Sites were removed + * playlists-added (self, info_list) -- New playlists were added + * playlists-changed (self, info_list) -- Playlists were changed + * playlists-removed (self, info_list) -- Playlists were removed + """ + def __init__(self): + signals.SignalEmitter.__init__(self) + for prefix in ('feeds', 'sites', 'playlists'): + self.create_signal('%s-added' % prefix) + self.create_signal('%s-changed' % prefix) + self.create_signal('%s-removed' % prefix) + + def handle_tabs_changed(self, message): + if message.type == 'feed': + signal_start = 'feeds' + elif message.type == 'site': + signal_start = 'sites' + elif message.type == 'playlist': + signal_start = 'playlists' + else: + return + if message.added: + self.emit('%s-added' % signal_start, message.added) + if message.changed: + self.emit('%s-changed' % signal_start, message.changed) + if message.removed: + self.emit('%s-removed' % signal_start, message.removed) diff -Nru miro-4.0.4/lib/frontends/widgets/itemcontextmenu.py miro-6.0/lib/frontends/widgets/itemcontextmenu.py --- miro-4.0.4/lib/frontends/widgets/itemcontextmenu.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/itemcontextmenu.py 2013-04-05 16:02:42.000000000 +0000 @@ -30,6 +30,9 @@ """itemcontextmenu.py -- Handle popping up a context menu for an item """ +import functools + +from miro import api from miro import app from miro import displaytext from miro import messages @@ -52,9 +55,12 @@ for iter in tableview.get_selection()] if len(selected) == 1: - return self._make_context_menu_single(selected[0]) + menu = self._make_context_menu_single(selected[0]) else: - return self._make_context_menu_multiple(selected) + menu = self._make_context_menu_multiple(selected) + # allow extensions to change the menu + api.hook_invoke('item_context_menu', selected, menu) + return menu def _remove_context_menu_item(self, selection): """Returns the appropriate remove/delete menu item. @@ -99,7 +105,7 @@ section = [] def play_externally(): - app.widgetapp.open_file(item.video_path) + app.widgetapp.open_file(item.filename) messages.MarkItemWatched(item).send_to_backend() # drm items seem to go in misc and are always unplayable. @@ -128,6 +134,18 @@ convert_menu = self._make_convert_menu() section.append((_('Convert to...'), convert_menu)) + if not (item.device or item.remote): + section.append((_('Set media kind as...'), + self._make_edit_metadata_menu())) + if not item.remote and not item.device: + if item.net_lookup_enabled: + label = _("Don't Use Online Lookup Data") + callback = app.widgetapp.disable_net_lookup_for_selection + else: + label = _("Use Online Lookup Data") + callback = app.widgetapp.enable_net_lookup_for_selection + section.append((label, callback)) + if section: menu_sections.append(section) section = [] @@ -137,8 +155,10 @@ playing_item = app.playback_manager.get_playing_item() is_paused = app.playback_manager.is_paused - if item != playing_item or (item == playing_item and is_paused): + if item != playing_item: section.append((_('Play'), app.widgetapp.play_selection)) + elif item == playing_item and is_paused: + section.append((_('Play'), app.playback_manager.toggle_paused)) else: section.append((_('Pause'), app.playback_manager.pause)) # Resume @@ -169,44 +189,31 @@ menu_sections.append(section) section = [] - # Edit Item Details, Delete, Resume/Stop Seeding - # this doesn't work for device or remote items. if not (item.device or item.remote): section.append(( _("Edit Item Details"), app.widgetapp.edit_items)) - if not (item.device or item.remote): - if item.seeding_status == 'seeding': - section.append(( - _('Stop Seeding'), - messages.StopUpload(item.id).send_to_backend)) - elif item.seeding_status == 'stopped': - section.append(( - _('Resume Seeding'), - messages.StartUpload(item.id).send_to_backend)) if not item.is_container_item: section.append(( _('Add to Playlist'), app.widgetapp.add_to_playlist)) - elif ((item.download_info is not None and - item.download_info.state != 'failed')): - if item.download_info.state != 'finished': - if not menu_sections: - # make sure that the default menu option isn't destructive - # (re: #16715) - section.append(None) + elif item.is_download: + if not menu_sections: + # make sure that the default menu option isn't destructive + # (re: #16715) + section.append(None) + section.append(( + _('Cancel Download'), + messages.CancelDownload(item.id).send_to_backend)) + if not item.is_paused: section.append(( - _('Cancel Download'), - messages.CancelDownload(item.id).send_to_backend)) - if item.download_info.state != 'paused': - section.append(( - _('Pause Download'), - messages.PauseDownload(item.id).send_to_backend)) - else: - section.append(( - _('Resume Download'), - messages.ResumeDownload(item.id).send_to_backend)) + _('Pause Download'), + messages.PauseDownload(item.id).send_to_backend)) + else: + section.append(( + _('Resume Download'), + messages.ResumeDownload(item.id).send_to_backend)) else: if not (item.device or item.remote): @@ -215,8 +222,7 @@ section.append(( _('Download'), messages.StartDownload(item.id).send_to_backend)) - if (item.download_info and - item.download_info.state == u'failed'): + if item.is_failed_download: section.append(( _('Cancel Download'), messages.CancelDownload( @@ -229,6 +235,15 @@ # Play section.append((_('Play'), app.widgetapp.play_selection)) + if item.is_seeding: + section.append(( + _('Stop Seeding'), + messages.StopUpload(item.id).send_to_backend)) + elif not item.is_seeding and item.is_torrent: + section.append(( + _('Resume Seeding'), + messages.StartUpload(item.id).send_to_backend)) + if item.downloaded and not item.remote: if file_navigator_name: reveal_text = _('Show File in %(progname)s', @@ -237,7 +252,7 @@ reveal_text = _('File on Disk') section.append((reveal_text, - lambda: app.widgetapp.check_then_reveal_file(item.video_path))) + lambda: app.widgetapp.check_then_reveal_file(item.filename))) remove = self._remove_context_menu_item([item]) if remove: section.append(remove) @@ -297,6 +312,8 @@ paused = [] uploadable = [] expiring = [] + net_lookup_enabled = [] + net_lookup_disabled = [] editable = False # local functions @@ -328,6 +345,10 @@ if info.downloaded: downloaded.append(info) if info.is_playable: + if info.net_lookup_enabled: + net_lookup_enabled.append(info) + else: + net_lookup_disabled.append(info) playable.append(info) if info.device: device.append(info) @@ -343,12 +364,11 @@ container.append(info) if not (info.device or info.remote): editable = True - elif info.state == 'paused': + elif info.is_paused: paused.append(info) - elif info.state == 'downloading': + elif info.is_download: downloading.append(info) - if (info.download_info.torrent and - info.download_info.state != 'uploading'): + if info.is_torrent and not info.is_seeding: uploadable.append(info) else: available.append(info) @@ -383,7 +403,17 @@ menu.append(None) convert_menu = self._make_convert_menu() menu.append((_('Convert to...'), convert_menu)) - + menu.append((_('Set media kind as...'), + self._make_edit_metadata_menu())) + if downloaded and not remote and not device: + if net_lookup_enabled: + label = _("Don't Use Online Lookup Data") + callback = app.widgetapp.disable_net_lookup_for_selection + menu.append((label, callback)) + if net_lookup_disabled: + label = _("Use Online Lookup Data") + callback = app.widgetapp.enable_net_lookup_for_selection + menu.append((label, callback)) if available: if len(menu) > 0: @@ -425,17 +455,30 @@ return menu + def _make_edit_metadata_menu(self): + # Edit metadata + edit_metadata_menu = [] + + edit_metadata_menu.append((_('Movie'), + lambda: app.widgetapp.set_media_kind(u'movie'))) + edit_metadata_menu.append((_('Show'), + lambda: app.widgetapp.set_media_kind(u'show'))) + edit_metadata_menu.append((_('Clip'), + lambda: app.widgetapp.set_media_kind(u'clip'))) + edit_metadata_menu.append((_('Podcast'), + lambda: app.widgetapp.set_media_kind(u'podcast'))) + + return edit_metadata_menu + def _make_convert_menu(self): convert_menu = [] - sections = conversion_manager.get_converters() - for index, section in enumerate(sections): - for converter in section[1]: - def convert(converter=converter.identifier): - app.widgetapp.convert_items(converter) - convert_menu.append((converter.displayname, convert)) - if index+1 < len(sections): - convert_menu.append(None) - convert_menu.append(None) + sections = app.menu_manager.get_converters() + for index, converter_list in enumerate(sections): + for (identifier, title) in converter_list: + func = functools.partial(app.widgetapp.convert_items, + identifier) + convert_menu.append((title, func)) + convert_menu.append(None) convert_menu.append((_("Show Conversion Folder"), app.widgetapp.reveal_conversions_folder)) return convert_menu diff -Nru miro-4.0.4/lib/frontends/widgets/itemedit.py miro-6.0/lib/frontends/widgets/itemedit.py --- miro-4.0.4/lib/frontends/widgets/itemedit.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/itemedit.py 2013-04-05 16:02:42.000000000 +0000 @@ -262,7 +262,14 @@ self.widget.set_width(width) self.widget.set_max_length(width) value = self.common_value or "" - self.widget.set_text(str(value)) + # Ugh! Convert to utf-8 or else decode may not work! + if isinstance(value, unicode): + value = value.encode('utf-8') + try: + value = str(value) + except TypeError, ValueError: + value = '' + self.widget.set_text(value) def get_value(self): value = self.widget.get_text() @@ -318,7 +325,7 @@ TITLE = _("Choose a thumbnail file") DIALOG = widgetset.FileOpenDialog def __init__(self, items, label): - Field.__init__(self, 'cover_art', items, label) + Field.__init__(self, 'cover_art_path', items, label) DialogOwnerMixin.__init__(self, self.DIALOG, self.TITLE) path = self.common_value @@ -489,7 +496,7 @@ """Pack the left column into the middle HBox of the main VBox.""" widget = widgetset.VBox() left = [] - left.append(TextField('name', self.items, _("Name"))) + left.append(TextField('title', self.items, _("Name"))) left.append(TextField('artist', self.items, _("Artist"))) left.append(TextField('album', self.items, _("Album"))) left.append(TextField('genre', self.items, _("Genre"))) @@ -527,7 +534,7 @@ def _pack_bottom(self): """Pack the bottom row into the VBox.""" - bottom = [PathField('video_path', self.items, _("Path"), readonly=True)] + bottom = [PathField('filename', self.items, _("Path"), readonly=True)] self.vbox.pack_start(widgetutil.pad(widgetset.HLine(), top=25, bottom=10, left=15, right=15)) for field in bottom: @@ -558,15 +565,10 @@ (u'podcast', _("Podcast")), ]), ] - content = widgetset.VBox() + self.vbox = widgetset.VBox() for field in self.fields: field.set_label_width(120) - content.pack_start(field.get_box(), padding=5) - # XXX - hack: OS X is cutting off the right side of the box in single - # selection mode; this seems like a bug in layout. padding the right - # side causes only padding to be cut off. - # XXX - this padding fixes 17065. 17065 is the same layout issue? - self.vbox = widgetutil.pad(content, right=15) + self.vbox.pack_start(field.get_box(), padding=5) class ToggleButtonBackground(widgetset.Background): """Gradiated background for an individual ToggleButton.""" @@ -750,6 +752,17 @@ self.panels[name] = content self.toggler.add_option(name, label) + def set_width_from_panels(self): + """Set the min-width for our panels. + + We set the min-width to the width of the biggest panel, to avoid + things moving too much when the user switches between them + """ + max_width = -1 + for panel in self.panels.values(): + max_width = max(max_width, panel.vbox.get_size_request()[0]) + self.content_panel.set_size_request(max_width, -1) + def on_choose_panel(self, _toggler, name): self.content_panel.set(self.panels[name].vbox) @@ -761,6 +774,7 @@ self._add_panel(_("General"), 'general', GeneralPanel(self.items)) self._add_panel(_("Video"), 'video', VideoPanel(self.items)) self._pack_bottom() + self.set_width_from_panels() self.toggler.choose('general') self.content_panel.set(self.panels['general'].vbox) return self.vbox diff -Nru miro-4.0.4/lib/frontends/widgets/itemfilter.py miro-6.0/lib/frontends/widgets/itemfilter.py --- miro-4.0.4/lib/frontends/widgets/itemfilter.py 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/itemfilter.py 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,293 @@ +# Miro - an RSS based video player application +# Copyright (C) 2011 +# Participatory Culture Foundation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +# +# In addition, as a special exception, the copyright holders give +# permission to link the code of portions of this program with the OpenSSL +# library. +# +# You must obey the GNU General Public License in all respects for all of +# the code used other than OpenSSL. If you modify file(s) with this +# exception, you may extend this exception to your version of the file(s), +# but you are not obligated to do so. If you do not wish to do so, delete +# this exception statement from your version. If you delete this exception +# statement from all source files in the program, then also delete it here. + +"""itemfilter.py -- Filter out items from item lists + +ItemFilter is a base class for item filters. They handle determining what +items should be filtered out of an item list. They also handle +selecting/deselecting other ItemFilters when they are first activated (most +filters deselect all other filters, but some can be used together). + +ItemFilterSet handles keeping track of what filters are available and which +are selected for a given item list. +""" + +from miro import app +from miro import util +from miro.gtcache import gettext as _ +from miro.gtcache import declarify + +class ItemFilter(object): + """Base class for item filters. + + To create a new item filter you must: + - define key, which must be a unique string + - define user_label + - define the filter() method() + - (optionally) override the switch_to_filter() method + """ + key = None + + def add_to_query(self, item_tracker_query): + """Add this filter to an ItemTrackerQuery + + subclasses must override this + """ + # TODO: This is the new interface for ItemFilter. We need to + # implement this method on all subclasses. + raise NotImplementedError() + + def switch_to_filter(self, previous_filters): + """select/deselect filters when this one is selected + + By default, we select this filter and deselect all others. + + :param previous_filters: set of previously active filters + :returns: set of new active filters + """ + return set((self.key,)) + + @staticmethod + def lookup_class(key): + """Find a ItemFilter subclass for a key.""" + + for cls in util.all_subclasses(ItemFilter): + if cls.key == key: + return cls + raise KeyError(key) + + # maps keys to ItemFilter objects + _cached_filters = {} + @staticmethod + def get_filter(key): + """Factory method to create an ItemFilter subclass from its key.""" + try: + return ItemFilter._cached_filters[key] + except KeyError: + filter = ItemFilter.lookup_class(key)() + ItemFilter._cached_filters[key] = filter + return filter + +class ItemFilterSet(object): + def __init__(self): + """Create a new ItemFilterSet + + By default only the all filter will be available. + """ + # set of filter keys currently active + self.active_filters = set() + # list of ItemFilter objects currently active + self.active_filter_objects = [] + self.extension_filters = [] + # select the 'all' filter + self.select('all') + + def add_extension_filters(self, filter_list): + self.extension_filters.extend(filter_list) + + def _lookup_filter(self, key): + for filter in self.extension_filters: + if filter.key == key: + return filter + return ItemFilter.get_filter(key) + + def select(self, key): + """Select a new filter + + This method should be used when a user selects a new filter. We will + apply our selection logic to determine if other filters should stay + active, or if new ones should also select. + """ + + # get the filter we want to add + filter_ = self._lookup_filter(key) + # let the filter figure out what other filters to select/deselect + new_active_filters = filter_.switch_to_filter(self.active_filters) + self.set_filters(new_active_filters) + + def set_filters(self, filter_keys): + """Set the filters to be a specific set. + + No validation is used to check that the set is valid. + + :raises KeyError: one of the filter keys is not valid + """ + # fetch the filters first. This way we won't change our attributes in + # case of a KeyError. + filter_objs = [ItemFilter.get_filter(k) for k in filter_keys] + self.active_filters = set(filter_keys) + self.active_filter_objects = filter_objs + + def add_to_query(self, query): + """Add conditions to an ItemTrackerQuery object """ + for f in self.active_filter_objects: + f.add_to_query(query) + +# define the actual filter classes we use + +class ItemFilterAll(ItemFilter): + """Filter that shows all items.""" + key = u'all' + # this "All" is different than other "All"s in the codebase, so it + # needs to be clarified + user_label = declarify(_('View|All')) + + def add_to_query(self, query): + return + +class ItemFilterAudioVideoHelper(ItemFilter): + """Item filters that work in conjunction with the audio/video filters.""" + + def switch_to_filter(self, previous_filters): + # allow audio/video filters to remain + rv = set((self.key,)) + for filter_ in (u'audio', u'video'): + if filter_ in previous_filters: + rv.add(filter_) + return rv + +class ItemFilterUnplayed(ItemFilterAudioVideoHelper): + """Filter for unplayed items.""" + key = u'unplayed' + user_label = _('Unplayed') + + def add_to_query(self, query): + query.add_condition('filename', 'IS NOT', None) + query.add_condition('watched_time', 'IS', None) + +class ItemFilterDownloaded(ItemFilterAudioVideoHelper): + """Filter for downloaded items.""" + key = u'downloaded' + user_label = _('Downloaded') + + def add_to_query(self, query): + query.add_condition('downloaded_time', 'IS NOT', None) + query.add_condition('expired', '=', 0) + +class ItemFilterAudioVideo(ItemFilter): + """Filter for audio/video on the all podcast tab.""" + + def switch_to_filter(self, previous_filters): + # allow downloaded/unplayed filters to remain + rv = set((self.key,)) + for filter_ in (u'downloaded', u'unplayed'): + if filter_ in previous_filters: + rv.add(filter_) + # make sure that at either downloaded or unplayed is selected + if u'unplayed' not in rv and u'downloaded' not in rv: + rv.add(u'downloaded') + return rv + +class ItemFilterVideo(ItemFilterAudioVideo): + """Filter for video items.""" + key = u'video' + user_label = _('Video') + + def add_to_query(self, query): + query.add_condition('file_type', '=', 'video') + +class ItemFilterAudio(ItemFilterAudioVideo): + """Filter for audio items.""" + key = u'audio' + user_label = _('Audio') + + def add_to_query(self, query): + query.add_condition('file_type', '=', 'audio') + +class ItemFilterWatchedFolderAudioVideo(ItemFilter): + def switch_to_filter(self, previous_filters): + # allow unplayed filter to remain + rv = set((self.key,)) + if u'unplayed' in previous_filters: + rv.add(u'unplayed') + return rv + +class ItemFilterWatchedFolderVideo(ItemFilterWatchedFolderAudioVideo): + """Filter for video items in watch folders. + + This works like the Video filter, but it doesn't automatically select + other filters when selected + """ + key = u'wf-video' + user_label = _('Video') + + def add_to_query(self, query): + query.add_condition('file_type', '=', 'video') + +class ItemFilterWatchedFolderAudio(ItemFilterWatchedFolderAudioVideo): + """Filter for audio items in watch folders. + + This works like the Audio filter, but it doesn't automatically select + other filters when selected + """ + key = u'wf-audio' + user_label = _('Audio') + + def add_to_query(self, query): + query.add_condition('file_type', '=', 'audio') + +class ItemFilterMovies(ItemFilter): + """Filter for movie items.""" + key = u'movies' + user_label = _('Movies') + + def add_to_query(self, query): + query.add_condition('kind', '=', 'movie') + +class ItemFilterShows(ItemFilter): + """Filter for show items.""" + key = u'shows' + user_label = _('Shows') + + def add_to_query(self, query): + query.add_condition('kind', '=', 'show') + +class ItemFilterClips(ItemFilter): + """Filter for clip items.""" + key = u'clips' + user_label = _('Clips') + + def add_to_query(self, query): + query.add_condition('kind', '=', 'clip') + +class ItemFilterPodcasts(ItemFilter): + """Filter for podcast items. + + Not: this means the user flagged the item as a podcast somehow, not that + we downloaded it from a feed + """ + key = u'podcasts' + user_label = _('Podcasts') + + def add_to_query(self, query): + query.add_condition('kind', '=', u'podcast') + +def get_label(key): + """Get the label to use for a filter key.""" + return ItemFilter.get_filter(key).user_label diff -Nru miro-4.0.4/lib/frontends/widgets/itemlistcontroller.py miro-6.0/lib/frontends/widgets/itemlistcontroller.py --- miro-4.0.4/lib/frontends/widgets/itemlistcontroller.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/itemlistcontroller.py 2013-04-05 16:02:42.000000000 +0000 @@ -41,6 +41,7 @@ import logging from urlparse import urljoin +from miro import api from miro import app from miro.errors import (WidgetActionError, WidgetRangeError, ActionUnavailableError) @@ -51,11 +52,11 @@ from miro.frontends.widgets import dialogs from miro.frontends.widgets import itemcontextmenu from miro.frontends.widgets import itemlist -from miro.frontends.widgets import itemrenderer -from miro.frontends.widgets import itemtrack from miro.frontends.widgets import itemlistwidgets +from miro.frontends.widgets import itemrenderer +from miro.frontends.widgets import itemsort +from miro.frontends.widgets import keyboard from miro.frontends.widgets import widgetutil -from miro.frontends.widgets import menus from miro.frontends.widgets.widgetstatestore import WidgetStateStore from miro.plat.frontends.widgets import timer from miro.plat.frontends.widgets import widgetset @@ -74,32 +75,6 @@ else: return {} -class FilteredListMixin(object): - """Track a filter switch attached to an ItemListController - """ - def __init__(self): - filters = app.widget_state.get_filters(self.type, self.id) - self.update_filters(filters) - - def on_toggle_filter(self, button, filter_): - """Handle the filter switch changing state.""" - self.update_filters(filter_) - app.widget_state.toggle_filters(self.type, self.id, filter_) - # Did we toggle a filter of the currently playing playlist - if ((self.current_item_view and - app.playback_manager.is_playing and - self.current_item_view.model == - app.playback_manager.playlist.model)): - app.playback_manager.reshuffle() - - def update_filters(self, filters): - """Update the display and toolbar filter switch state.""" - self.item_list_will_change() - self.titlebar.toggle_filter(filters) - self.item_list.toggle_filter(filters) - self.send_model_changed() - self.check_for_empty_list() - class ProgressTrackingListMixin(object): """Controller that cares about item metadata extraction progress.""" def __init__(self): @@ -111,13 +86,13 @@ else: self.mediatype = 'other' - def update_metadata_progress(self, remaining, eta, total): + def update_metadata_progress(self, finished, finished_local, eta, total): meter = self.widget.get_progress_meter() if meter: - meter.update(self.mediatype, remaining, eta, total) + meter.update(self.mediatype, finished, finished_local, eta, total) self.postponed = None else: # got progress before widget created - self.postponed = (remaining, eta, total) + self.postponed = (finished, finished_local, eta, total) def _init_widget(self): """Hook that handles any updates that were waiting for the widget.""" @@ -141,12 +116,14 @@ item_info.id, self.repeat_delay(item_info)) def _do_iteration(self, item_id, repeat_delay): - try: - item_info = self.item_list.get_item(item_id) - except KeyError: + if not (self.item_list.is_valid() and + self.item_list.item_in_list(item_id)): + # item deleted or list destroyed. # item was deleted from model self.currently_animating.remove(item_id) return + + item_info = self.item_list.get_item(item_id) rv = self.continue_animation(item_info) if rv != False: timer.add(repeat_delay, self._do_iteration, item_id, repeat_delay) @@ -177,7 +154,6 @@ """Finish the animation""" pass - class ThrobberAnimationManager(AnimationManager): def initial_delay(self, item_info): return 0.2 @@ -186,12 +162,56 @@ return 0.2 def continue_animation(self, item_info): - if item_info.state == 'downloading': - self.item_list.update_throbber(item_info.id) + if item_info.is_download: + value = self.item_list.get_attr(item_info.id, 'throbber-value', 0) + self.item_list.set_attr(item_info.id, 'throbber-value', + value + 1) else: - self.item_list.finish_throbber(item_info.id) + self.item_list.unset_attr(item_info.id, 'throbber-value') return False +class RetryAnimationManager(AnimationManager): + """AnimationManager to update the "retrying in..." text. + + This isn't really an animation, but we can use the same system to update + it. + """ + + def initial_delay(self, item_info): + return 1.0 + + def repeat_delay(self, item_info): + return 1.0 + + def continue_animation(self, item_info): + return item_info.is_retrying + +class ItemSelectionInfo(object): + """Stores information about what's selected in an item list. + + Attributes: + - count: number of selected items + - has_download: are any of the items downloaded? + - has_remote: are any of the items remote? + - file_types: set containing all file types + """ + def __init__(self, selected_items=None): + if selected_items is None: + selected_items = [] + self.count = len(selected_items) + self.file_types = set() + self.has_download = self.has_remote = False + for item in selected_items: + self.file_types.add(item.file_type) + if item.downloaded: + self.has_download = True + if item.remote: + self.has_remote = True + + def has_file_type(self, file_type): + """Does the selection have a specific file type?""" + return file_type in self.file_types + class ItemListController(object): """Base class for controllers that manage list of items. @@ -202,33 +222,57 @@ def __init__(self, typ, id_): """Construct a ItemListController. - type and id are the same as in the constructor to - messages.TrackItems + type and id are the same as in the constructor to itemlist.ItemList() """ self.type = typ self.id = id_ self.views = {} - self._search_text = '' - self._got_initial_list = False + self._search_text = app.inline_search_memory.get_search(self.type, + self.id) self._playing_items = False self._selection_to_restore = None self.config_change_handle = None self.show_resume_playing_button = False - self.item_tracker = self.build_item_tracker() + self.titlebar = self.make_titlebar() + self.item_list = self.build_item_list() + self.add_extension_filters() + self.model = widgetset.ItemListModel(self.item_list) self._init_widget() - self._check_for_initial_items() + self.restore_scroll_positions() + self.restore_selection() - self._init_sort() self._init_item_views() self.initialize_search() - self._item_tracker_callbacks = [] + self._item_list_callbacks = [] self._playback_callbacks = [] self.throbber_manager = ThrobberAnimationManager(self.item_list, self.all_item_views()) + self.retry_time_manager = RetryAnimationManager(self.item_list, + self.all_item_views()) + self.connect_to_signals() + + # filter handling code + def on_filter_clicked(self, button, filter_key): + """Handle the filter switch changing state.""" + self.item_list.select_filter(filter_key) + app.widget_state.set_filters(self.type, self.id, + self.item_list.get_filters()) + self.titlebar.set_filters(self.item_list.get_filters()) + self.check_for_empty_list() - def get_item_list(self): - return self.item_tracker.item_list - item_list = property(get_item_list) + def add_extension_filters(self): + hook_results = api.hook_invoke('item_list_filters', self.type, + self.id) + for filter_list in hook_results: + try: + self._add_extension_filter_list(filter_list) + except StandardError: + logging.exception("Error adding extension item filter") + + def _add_extension_filter_list(self, filter_list): + self.item_list.add_extension_filters(filter_list) + for filter_ in filter_list: + self.titlebar.filter_box.add_filter(filter_.key) def on_become_primary(self): """This has become the primary item_list_controller; this is like @@ -298,11 +342,6 @@ app.widgetapp.handle_soft_failure('_handle_playback_did_stop', "did-repeat sent to wrong ILC", with_exception=False) - def _init_sort(self): - sorter = self.get_sorter() - self.change_sort_indicators(sorter.KEY, sorter.is_ascending()) - self.item_list.set_sort(sorter) - def get_saved_search_text(self): """Get the text we would use to create a saved search. @@ -335,15 +374,31 @@ def make_sorter(self, column, ascending): try: - sorter = itemlist.SORT_KEY_MAP[column](ascending) + sorter = itemsort.SORT_KEY_MAP[column](ascending) except KeyError: + logging.warn("Error looking up sort: %s (item list type: %s)", + column, self.type) column = WidgetStateStore.DEFAULT_SORT_COLUMN[self.type] column, ascending = self.parse_sort_key(column) - sorter = itemlist.SORT_KEY_MAP[column](ascending) + sorter = itemsort.SORT_KEY_MAP[column](ascending) + if column == 'multi-row-album': + sorter.switch_mode(self.get_multi_row_album_mode()) return sorter + def setup_multi_row_album_sorter(self, sorter): + """Set up the sorter for the multi-row album column + + That column displays different data depending on which tab is + selected. For example in the all feeds tab, it displays data based on + the feed attributes, rather than album attributes. + + If a subclass changes what's displayed in that column, it probably + should also override this method and change the mode for the sorter. + """ + pass + def make_sort_key(self, sorter): - key = unicode(sorter.KEY) + key = unicode(sorter.key) if sorter.is_ascending(): state = key else: @@ -352,58 +407,49 @@ def change_sort_indicators(self, sort_key, ascending): self.list_item_view.change_sort_indicator(sort_key, ascending) + self.album_item_view.change_sort_indicator(sort_key, ascending) self.widget.toolbar.change_sort_indicator(sort_key, ascending) def _init_widget(self): - toolbar = self.build_header_toolbar() self.selected_view = app.widget_state.get_selected_view(self.type, self.id) - self.widget = itemlistwidgets.ItemContainerWidget(toolbar, - self.selected_view) - + # build widgets + self.standard_view_toolbar = self.build_standard_view_header() + self.widget = itemlistwidgets.ItemContainerWidget( + self.standard_view_toolbar, self.selected_view) self.build_widget() - - list_view = WidgetStateStore.get_list_view_type() - self.views[list_view] = self.build_list_view() - + self.item_selection_info = ItemSelectionInfo() + self.list_item_view = self.build_list_view() + self.standard_item_view = self.build_standard_view() + self.album_item_view = self.build_album_view() self.expand_or_contract_item_details() - + # set sort indicators + sorter = self.item_list.sorter + self.change_sort_indicators(sorter.key, sorter.is_ascending()) + # connect to signals + list_view = WidgetStateStore.get_list_view_type() standard_view = WidgetStateStore.get_standard_view_type() - standard_view_widget = itemlistwidgets.StandardView( - self.item_list, self.build_renderer()) - self.views[standard_view] = standard_view_widget - standard_view_background = widgetset.SolidBackground( - standard_view_widget.BACKGROUND_COLOR) - standard_view_background.add(widgetutil.pad(standard_view_widget, - top=10, bottom=10)) - - # set up member attrs to easily get our list/standard view widgets - self.list_item_view = self.views[list_view] - self.standard_item_view = self.views[standard_view] - self.standard_view_toolbar = toolbar - - standard_view_scroller = widgetset.Scroller(False, True) - standard_view_scroller.add(standard_view_background) - self.widget.vbox[standard_view].pack_start( - standard_view_scroller, expand=True) - self.views[standard_view].set_scroller(standard_view_scroller) - standard_view_scroller.set_background_color( - standard_view_widget.BACKGROUND_COLOR) - standard_view_scroller.prepare_for_dark_content() - - toolbar.connect_weak('sort-changed', + album_view = WidgetStateStore.get_album_view_type() + self.standard_view_toolbar.connect_weak('sort-changed', self.on_sort_changed, standard_view) self.widget.item_details.expander_button.connect_weak('clicked', self.on_item_details_expander_clicked) self.list_item_view.connect_weak('sort-changed', self.on_sort_changed, list_view) + self.album_item_view.connect_weak('sort-changed', + self.on_sort_changed, album_view) + self.titlebar.set_filters(self.item_list.get_filters()) self.titlebar.connect_weak('list-view-clicked', self.set_view, list_view) self.titlebar.connect_weak('normal-view-clicked', self.set_view, standard_view) + self.titlebar.connect_weak('album-view-clicked', + self.set_view, album_view) self.titlebar.connect_weak('resume-playing', self.on_resume_playing) self.standard_item_view.renderer.signals.connect_weak( 'throbber-drawn', self.on_throbber_drawn) + self.standard_item_view.renderer.signals.connect_weak( + 'item-retrying', self.on_item_retrying) def set_view(self, _widget, view): if view == self.selected_view: @@ -438,7 +484,7 @@ # perform finishing touches app.widget_state.set_selected_view(self.type, self.id, self.selected_view) - app.menu_manager.update_menus() + self._selection_changed('item-list-view-changed') self.expand_or_contract_item_details() def get_current_item_view(self): @@ -450,11 +496,11 @@ def focus_view(self): self.current_item_view.focus() - if len(self.get_selection()) == 0: - first = self.current_item_view.model.get_first_info() - if first is not None: - iter_ = self.current_item_view.model.iter_for_id(first.id) - self.current_item_view.select(iter_) + if (len(self.get_selection()) == 0 and self.item_list is not None and + len(self.item_list) > 0): + first = self.item_list.get_first_item() + iter_ = self.current_item_view.model.iter_for_id(first.id) + self.current_item_view.select(iter_) def build_widget(self): """Build the container widget for this controller.""" @@ -463,43 +509,115 @@ def build_renderer(self): return itemrenderer.ItemRenderer(display_channel=False) + def build_standard_view(self): + # make the widget + standard_view = itemlistwidgets.StandardView(self.model, + self.build_renderer()) + scroller = widgetset.Scroller(False, True) + scroller.add(standard_view) + standard_view.set_scroller(scroller) + scroller.set_background_color(standard_view.BACKGROUND_COLOR) + scroller.prepare_for_dark_content() + # put the scroller in our container widget + standard_view_type = WidgetStateStore.get_standard_view_type() + self.widget.vbox[standard_view_type].pack_start(scroller, expand=True) + # add to our views map + self.views[standard_view_type] = standard_view + return standard_view + def build_list_view(self): """Build the list view widget for this controller.""" list_view_type = WidgetStateStore.get_list_view_type() - columns = app.widget_state.get_sorts_enabled(self.type, self.id) + columns = app.widget_state.get_columns_enabled(self.type, self.id, + list_view_type) list_view_widths = app.widget_state.get_column_widths( self.type, self.id, list_view_type) column_renderers = self.build_column_renderers() - list_view = itemlistwidgets.ListView(self.item_list, column_renderers, + list_view = itemlistwidgets.ListView(self.model, column_renderers, columns, list_view_widths) scroller = widgetset.Scroller(True, True) scroller.add(list_view) # make the min-width for list view match standard view scroller.set_size_request(600, -1) self.widget.vbox[list_view_type].pack_start(scroller, expand=True) + # add to our views map + self.views[list_view_type] = list_view return list_view - def build_column_renderers(self): - return itemlistwidgets.ListViewColumnRendererSet() + def build_album_view(self): + """Build the album view widget for this controller.""" + # build album view widget + album_view_type = WidgetStateStore.get_album_view_type() + columns = app.widget_state.get_columns_enabled(self.type, self.id, + album_view_type) + # use list view column widths for now. I think we want to separate + # values for these eventually, but since we're sharing which columns + # are enabled, let's share the widths too. + album_view_widths = app.widget_state.get_column_widths( + self.type, self.id, album_view_type) + column_renderers = self.build_column_renderers() + album_view = itemlistwidgets.AlbumView(self.model, + column_renderers, columns, album_view_widths) + # add widget to a scroller + scroller = widgetset.Scroller(True, True) + scroller.add(album_view) + # make the min-width for album view match standard view + scroller.set_size_request(600, -1) + # add the scroller to our container widget + self.widget.vbox[album_view_type].pack_start(scroller, expand=True) + # add to our views map + self.views[album_view_type] = album_view + return album_view + + def get_multi_row_album_mode(self): + """Get the mode to use for MultiRowAlbumRenderer. + + Subclasses should override this method to change how that column gets + rendered + """ + return 'standard' - def build_header_toolbar(self): - sorts_enabled = app.widget_state.get_sorts_enabled(self.type, self.id) + def build_column_renderers(self): + column_renderers = itemlistwidgets.ListViewColumnRendererSet() + multi_row_mode = self.get_multi_row_album_mode() + (label, renderer) = column_renderers.get('multi-row-album') + renderer.switch_mode(multi_row_mode) + if multi_row_mode == 'video': + column_renderers.change_label('multi-row-album', _('Series')) + elif multi_row_mode == 'feed': + column_renderers.change_label('multi-row-album', _('Source')) + return column_renderers + + def build_standard_view_header(self): + sorts_enabled = app.widget_state.get_columns_enabled(self.type, + self.id, WidgetStateStore.get_standard_view_type()) return itemlistwidgets.HeaderToolbar(sorts_enabled) - def build_item_tracker(self): - return itemtrack.ItemListTracker.create(self.type, self.id) + def build_item_list(self): + filters = self.get_initial_filters() + sorter = self.get_sorter() + group_func = self.get_item_list_grouping() + return app.item_list_pool.get(self.type, self.id, sorter, group_func, + filters, self._search_text) - def _check_for_initial_items(self): - """Check if our the ItemList from our itemtrack already has items + def get_initial_filters(self): + filters = app.widget_state.get_filters(self.type, self.id) + # check that the filters are valid for this view (see #19948) + valid_filters = self.titlebar.filter_box.filters + invalid_filters = [] + for key in filters: + if key not in valid_filters: + invalid_filters.append(key) + if invalid_filters: + logging.warn("ItemListController.get_initial_filters: removing " + "invalid filters: %s", invalid_filters) + filters.difference_update(invalid_filters) + if not filters: # handle the case where we removed all filters + return None + return filters - If so, simulate the getting the initial-list signal. - """ - initial_items = self.item_list.get_items() - if len(initial_items) > 0: - # simulate getting the initial-list signal - self.handle_items_will_change(self.item_tracker, initial_items, - [], []) - self.handle_item_list(self.item_tracker, initial_items) + def get_item_list_grouping(self): + return itemlist.album_grouping def expand_or_contract_item_details(self): expanded = app.widget_state.get_item_details_expanded( @@ -507,19 +625,30 @@ self.widget.item_details.set_expanded(expanded) def update_columns_enabled(self): - sorts = app.widget_state.get_sorts_enabled(self.type, self.id) + if self.selected_view == WidgetStateStore.get_standard_view_type(): + self._update_columns_enabled_standard_view() + else: + self._update_columns_enabled_table_view() + + def _update_columns_enabled_table_view(self): + columns = app.widget_state.get_columns_enabled(self.type, self.id, + self.selected_view) widths = app.widget_state.get_column_widths(self.type, self.id, - WidgetStateStore.get_list_view_type()) - self.list_item_view.column_widths.update(widths) - self.list_item_view.update_sorts(sorts) - self.standard_view_toolbar.update_sorts(sorts) + self.selected_view) + self.current_item_view.column_widths.update(widths) + self.current_item_view.update_sorts(columns) + + def _update_columns_enabled_standard_view(self): + columns = app.widget_state.get_columns_enabled(self.type, self.id, + self.selected_view) + self.standard_view_toolbar.update_sorts(columns) def _init_item_views(self): self.context_menu_handler = self.make_context_menu_handler() context_callback = self.context_menu_handler.callback - for view_type, item_view in self.views.items(): + for item_view in self.views.values(): item_view.connect_weak('selection-changed', - self.on_selection_changed, view_type) + self.on_selection_changed) item_view.connect_weak('hotspot-clicked', self.on_hotspot_clicked) item_view.connect_weak('key-press', self.on_key_press) item_view.connect_weak('row-activated', self.on_row_activated) @@ -530,7 +659,6 @@ search = app.inline_search_memory.get_search(self.type, self.id) if search != '': self.titlebar.set_search_text(search) - self.set_search(search) def get_selection(self): """Get the currently selected items. Returns a list of @@ -553,7 +681,8 @@ start_id = selection[0].id else: selected_ids = [i.id for i in selection] - selected_ids.sort(key=self.item_list.model.index_of_id) + selected_ids.sort( + key=lambda item_id: self.item_list.get_index(item_id)) start_id = selected_ids[0] self._play_item_list(start_id, presentation_mode, force_resume=force_resume) @@ -563,14 +692,14 @@ self._play_item_list(None, presentation_mode, force_resume) def can_play_items(self): - return any(i.is_playable for i in self.item_list.model.info_list()) + return self.item_list.has_playables() def _play_item_list(self, start_id, presentation_mode='fit-to-bounds', force_resume=False): if start_id is None: start_info = None else: - start_info = self.item_list.model.get_info(start_id) + start_info = self.item_list.get_item(start_id) if start_info is None and not self.can_play_items(): return app.playback_manager.stop() @@ -581,7 +710,7 @@ start_playing=True) return self._playing_items = True - app.playback_manager.start(start_id, self.item_tracker, + app.playback_manager.start(start_id, self.item_list, presentation_mode, force_resume) shuffle = app.widget_state.get_shuffle(self.type, self.id) app.playback_manager.set_shuffle(shuffle) @@ -595,9 +724,10 @@ def set_search(self, search_text): """Set the search for all ItemViews managed by this controller. """ + if search_text == self._search_text: + return self._search_text = search_text - if self.item_tracker: - self.item_tracker.set_search(search_text) + self.item_list.set_search(search_text) app.inline_search_memory.set_search(self.type, self.id, search_text) def on_row_activated(self, item_view, iter_): @@ -606,19 +736,17 @@ app.playback_manager.toggle_paused() elif info.is_playable: self._play_item_list(info.id) - elif info.state == 'downloading': + elif info.is_download and not info.is_paused: messages.PauseDownload(info.id).send_to_backend() - elif info.state == 'paused': + elif info.is_download and info.is_paused: messages.ResumeDownload(info.id).send_to_backend() - elif info.download_info is None and not info.has_drm: + elif not info.is_download and not info.has_drm: messages.StartDownload(info.id).send_to_backend() def on_sort_changed(self, object, sort_key, ascending, view): self.views[view].reset_scroll() - self.item_list_will_change() sorter = self.make_sorter(sort_key, ascending) self.item_list.set_sort(sorter) - self.send_model_changed() self.change_sort_indicators(sort_key, ascending) sort_key = self.make_sort_key(sorter) app.widget_state.set_sort_state(self.type, self.id, sort_key) @@ -631,7 +759,7 @@ return if last_played_id: try: - info = self.item_list.model.get_info(last_played_id) + info = self.item_list.get_item(last_played_id) except KeyError: logging.warn("Resume playing clicked, but last_played_info " "not found") @@ -646,17 +774,23 @@ def on_throbber_drawn(self, signaler, item_info): self.throbber_manager.start(item_info) + def on_item_retrying(self, signaler, item_info): + self.retry_time_manager.start(item_info) + def on_key_press(self, view, key, mods): - if key == menus.DELETE or key == menus.BKSPACE: + if key == keyboard.DELETE or key == keyboard.BKSPACE: return self.handle_delete() - elif key == menus.ESCAPE: + elif key == keyboard.ESCAPE: return self.handle_escape() - elif key == menus.ENTER: + elif key == keyboard.ENTER: self.play_selection() return True - elif key == menus.SPACE and app.playback_manager.is_playing: + elif key == keyboard.SPACE and app.playback_manager.is_playing: app.playback_manager.toggle_paused() return True + elif key == keyboard.F5: + app.widgetapp.update_selected_feeds() + return True elif isinstance(key, basestring) and len(key) == 1 and key.isalnum(): self.titlebar.start_editing_search(key) return True @@ -669,10 +803,10 @@ def handle_escape(self): handled = False for info in self.get_selection(): - if info.state == 'downloading': + if info.is_download: messages.CancelDownload(info.id).send_to_backend() handled = True - elif info.state == 'uploading': + elif info.is_seeding: messages.StopUpload(info.id).send_to_backend() handled = True return handled @@ -680,7 +814,7 @@ def on_hotspot_clicked(self, itemview, name, iter_): """Hotspot handler for ItemViews.""" - item_info, attrs = itemview.model[iter_] + (item_info, attrs, group_info) = itemview.model[iter_] if name == 'download': if item_info.remote: name = 'download-sharing-item' @@ -709,13 +843,13 @@ elif name == 'visit_webpage': app.widgetapp.open_url(item_info.permalink) elif name == 'visit_comments': - app.widgetapp.open_url(item_info.commentslink) + app.widgetapp.open_url(item_info.comments_link) elif name == 'visit_filelink': - app.widgetapp.open_url(item_info.file_url) + app.widgetapp.open_url(item_info.url) elif name == 'visit_license': app.widgetapp.open_url(item_info.license) elif name == 'show_local_file': - app.widgetapp.check_then_reveal_file(item_info.video_path) + app.widgetapp.check_then_reveal_file(item_info.filename) elif name == 'show_contents': app.display_manager.push_folder_contents_display(item_info) elif name == 'cancel_auto_download': @@ -750,14 +884,25 @@ messages.DownloadDeviceItems([item_info]).send_to_backend() elif name == 'download-sharing-item': messages.DownloadSharingItems([item_info]).send_to_backend() + elif name == 'album-click': + first_track = self.item_list.get_group_top(item_info.id) + first_track_iter = itemview.model.iter_for_id(first_track.id) + itemview.unselect_all() + itemview.select(first_track_iter) else: logging.debug("ItemView doesn't know how to handle hotspot %s.", name) - def on_selection_changed(self, item_view, view_type): - app.menu_manager.update_menus() + def on_selection_changed(self, item_view): + self._selection_changed() self.update_item_details() + def _selection_changed(self, *extra_reasons_for_update_menus): + """This is called whenever the item selection changes.""" + self.item_selection_info = ItemSelectionInfo(self.get_selection()) + app.menu_manager.update_menus('item-selection-changed', + *extra_reasons_for_update_menus) + def update_item_details(self): try: selection = self.get_selection() @@ -778,7 +923,8 @@ iters = [] for id_ in selected_ids: try: - iters.append(self.item_list.get_iter(id_)) + index = self.item_list.get_index(id_) + iters.append(self.model.nth_iter(index)) except KeyError: # item was removed since we saved the selection, no big deal pass @@ -792,14 +938,29 @@ selection = app.widget_state.get_selection(self.type, self.id) if selection: self.restore_selected_ids(selection) + self.on_selection_changed(self.current_item_view) def save_columns(self): """Save enabled columns, column order, and column widths""" - columns, widths = self.list_item_view.get_column_state() - app.widget_state.set_sorts_enabled(self.type, self.id, columns) - list_view_type = WidgetStateStore.get_list_view_type() - app.widget_state.update_column_widths( - self.type, self.id, list_view_type, widths) + + self._save_table_view_columns(WidgetStateStore.get_list_view_type()) + self._save_table_view_columns(WidgetStateStore.get_album_view_type()) + self._save_standard_view_columns() + + def _save_table_view_columns(self, view_type): + item_view = self.views[view_type] + columns, widths = item_view.get_column_state() + app.widget_state.set_columns_enabled(self.type, self.id, + view_type, columns) + app.widget_state.update_column_widths(self.type, self.id, + view_type, widths) + + def _save_standard_view_columns(self): + standard_view_type = WidgetStateStore.get_standard_view_type() + columns = self.standard_view_toolbar.get_sorts() + app.widget_state.set_columns_enabled(self.type, self.id, + standard_view_type, columns) + def save_scroll_positions(self): """Save the current scroll positions of all item views""" @@ -821,39 +982,38 @@ # restore_only so it can't overwrite an early scroll_to_item view.set_scroll_position(position, restore_only=True) - def start_tracking(self): + def connect_to_signals(self): """Send the message to start tracking items.""" - self.track_item_lists() - self.track_playback() - self.track_config_changes() + self.connect_to_item_list_signals() + self.connect_to_playback_signals() + self.connect_to_config_signals() - def stop_tracking(self): + def cleanup(self): """Send the message to stop tracking items.""" - self.cancel_track_item_lists() - self.cancel_track_playback() - self.cancel_track_config_changes() - - def track_item_lists(self): - if self._item_tracker_callbacks: - raise AssertionError("called track_item_lists() twice") - self.item_tracker.set_search(self._search_text) - self._item_tracker_callbacks = [ - self.item_tracker.connect("items-will-change", - self.handle_items_will_change), - self.item_tracker.connect("initial-list", self.handle_item_list), - self.item_tracker.connect("items-changed", + self.disconnect_from_item_list_signals() + self.disconnect_from_playback_signals() + self.disconnect_from_config_signals() + for item_view in self.all_item_views(): + item_view.unset_model() + app.item_list_pool.release(self.item_list) + self.item_list = None + + def connect_to_item_list_signals(self): + self._item_list_callbacks = [ + self.item_list.connect("will-change", + self.handle_will_change), + self.item_list.connect("items-changed", self.handle_items_changed), + self.item_list.connect("list-changed", + self.handle_list_changed), ] - def cancel_track_item_lists(self): - if self.item_tracker is None: - return # never started tracking - for handle in self._item_tracker_callbacks: - self.item_tracker.disconnect(handle) - self.item_tracker = None - self._item_tracker_callbacks = [] + def disconnect_from_item_list_signals(self): + for handle in self._item_list_callbacks: + self.item_list.disconnect(handle) + self._item_list_callbacks = [] - def track_playback(self): + def connect_to_playback_signals(self): self._playback_callbacks.extend([ app.playback_manager.connect('selecting-file', self._on_playback_change), @@ -863,16 +1023,16 @@ self._playback_will_play), ]) - def cancel_track_playback(self): + def disconnect_from_playback_signals(self): for handle in self._playback_callbacks: app.playback_manager.disconnect(handle) self._playback_callbacks = [] - def track_config_changes(self): + def connect_to_config_signals(self): self.config_change_handle = app.frontend_config_watcher.connect( 'changed', self.on_config_change) - def cancel_track_config_changes(self): + def disconnect_from_config_signals(self): if self.config_change_handle: app.frontend_config_watcher.disconnect(self.config_change_handle) self.config_change_handle = None @@ -897,19 +1057,6 @@ self.update_resume_button() self.scroll_to_item(item, manual=False, recenter=False) - def item_list_will_change(self): - """Call this before making any changes to the item list. """ - # Remember our current selection. If we are adding/removing items - # from the list, we may lose it. - self._selection_to_restore = self.get_selected_ids() - # forget the selection for now. GTK has code that tries to preserve - # the selection. That's wasted effort since we do the same thing. - self.current_item_view.unselect_all() - - def start_bulk_change(self): - for item_view in self.all_item_views(): - item_view.start_bulk_change() - def send_model_changed(self): for item_view in self.all_item_views(): item_view.model_changed() @@ -920,26 +1067,19 @@ app.widgetapp.handle_soft_failure('send_model_changed()', "_selection_to_restore was not set", with_exception=False) - def handle_items_will_change(self, obj, added, changed, removed): - self.item_list_will_change() - if len(added) + len(removed) > 100: - # Lots of changes are happening, so call start_bulk_change() to - # speed things up. The reason we don't call this always is that - # it looses the scroll position on GTK. But when lots of rows are - # changing, trying to keep the scroll position is pointless. - self.start_bulk_change() - self.on_items_will_change(added, changed, removed) + def handle_will_change(self, item_list): + # Remember our current selection. If we are adding/removing items + # from the list, we may lose it. + self._selection_to_restore = self.get_selected_ids() + # forget the selection for now. GTK has code that tries to preserve + # the selection. That's wasted effort since we do the same thing. + self.current_item_view.unselect_all() - def handle_item_list(self, obj, items): - """Handle an ItemList message meant for this ItemContainer.""" + def handle_items_changed(self, item_list, changed_ids): self.handle_item_list_changes() - self._got_initial_list = True - self.restore_scroll_positions() - self.restore_selection() - self.on_initial_list() + self.on_items_changed() - def handle_items_changed(self, obj, added, changed, removed): - """Handle an ItemsChanged message meant for this ItemContainer.""" + def handle_list_changed(self, item_list): self.handle_item_list_changes() self.on_items_changed() @@ -948,15 +1088,17 @@ self.send_model_changed() self.update_resume_button() self.update_count_label() - self.update_item_details() self.check_for_empty_list() + # call _selection_changed() and update_item_details in case one of the + # selected items changed + self._selection_changed() + self.update_item_details() def check_for_empty_list(self): self.widget.set_list_empty_mode(self.calc_list_empty_mode()) def calc_list_empty_mode(self): - return (self.item_list.get_count() == 0 and - self.item_tracker.is_filtering()) + return len(self.item_list) == 0 def update_resume_button(self): if not self.show_resume_playing_button: @@ -966,34 +1108,20 @@ last_played = None if last_played_id: try: - last_played = self.item_list.model.get_info(last_played_id) + last_played = self.item_list.get_item(last_played_id) except KeyError: pass if (last_played is None or not last_played.is_playable or self._playing_items): self.titlebar.update_resume_button(None, None) else: - self.titlebar.update_resume_button(last_played.name, + self.titlebar.update_resume_button(last_played.title, last_played.resume_time) def update_count_label(self): - _("%(count)s items", {'count': self.item_list.get_count()}) + _("%(count)s items", {'count': len(self.item_list)}) # FIXME: need to have a place to put this text - def on_items_will_change(self, added, changed, removed): - """Called before we change the list. - - Subclasses can override this method if they want. - """ - pass - - def on_initial_list(self): - """Called after we have receieved the initial list of items. - - Subclasses can override this method if they want. - """ - pass - def on_items_changed(self): """Called after we have changes to items @@ -1008,11 +1136,10 @@ return ItemListDragHandler() def no_longer_displayed(self): - if self._got_initial_list: - # rember our selection, and scroll position, but only if we had a - # chance to call restore_selection() on the initial item list. - self.save_selection() - self.save_scroll_positions() + # rember our selection, and scroll position, but only if we had a + # chance to call restore_selection() on the initial item list. + self.save_selection() + self.save_scroll_positions() self.save_columns() def no_longer_primary(self): @@ -1044,7 +1171,6 @@ ItemListController.__init__(self, self.type, self.id) def build_widget(self): - self.titlebar = self.make_titlebar() self.titlebar.switch_to_view(self.widget.selected_view) self.widget.titlebar_vbox.pack_start(self.titlebar) @@ -1056,11 +1182,10 @@ def _on_search_changed(self, widget, search_text): self.set_search(search_text) -class AudioVideoItemsController(SimpleItemListController, FilteredListMixin, - ProgressTrackingListMixin): +class AudioVideoItemsController(SimpleItemListController, + ProgressTrackingListMixin): def __init__(self): SimpleItemListController.__init__(self) - FilteredListMixin.__init__(self) ProgressTrackingListMixin.__init__(self) def build_widget(self): @@ -1072,10 +1197,15 @@ self.widget.list_empty_mode_vbox.pack_start( itemlistwidgets.EmptyListHeader(text)) + def can_play_items(self): + # we can shortcut the logic here since we know all items in this list + # are playable + return len(self.item_list) > 0 + def make_titlebar(self): titlebar = self.titlebar_class() titlebar.connect('search-changed', self._on_search_changed) - titlebar.connect('toggle-filter', self.on_toggle_filter) + titlebar.connect('filter-clicked', self.on_filter_clicked) titlebar.connect('save-search', self._on_save_search) return titlebar @@ -1102,6 +1232,17 @@ def build_renderer(self): return itemrenderer.ItemRenderer(display_channel=True, wide_image=True) + def on_config_change(self, obj, key, value): + if key == prefs.SHOW_PODCASTS_IN_VIDEO.key: + self.item_list.refresh_query() + AudioVideoItemsController.on_config_change(self, obj, key, value) + + def get_item_list_grouping(self): + return itemlist.video_grouping + + def get_multi_row_album_mode(self): + return 'video' + class AudioItemsController(AudioVideoItemsController): type = u'music' id = u'music' @@ -1111,6 +1252,11 @@ def build_renderer(self): return itemrenderer.ItemRenderer(display_channel=True) + def on_config_change(self, obj, key, value): + if key == prefs.SHOW_PODCASTS_IN_MUSIC.key: + self.item_list.refresh_query() + AudioVideoItemsController.on_config_change(self, obj, key, value) + class OtherItemsController(SimpleItemListController): type = u'others' id = u'others' @@ -1137,8 +1283,9 @@ self.type = u'folder-contents' self.id = folder_info.id self.info = folder_info - self.play_initial_list = play_initial_list SimpleItemListController.__init__(self) + if play_initial_list: + self.play_items() def make_titlebar(self): titlebar = itemlistwidgets.FolderContentsTitlebar() @@ -1149,11 +1296,6 @@ def _on_podcast_clicked(self, titlebar, button): app.display_manager.pop_display() - def on_initial_list(self): - SimpleItemListController.on_initial_list(self) - if self.play_initial_list: - self.play_items() - class ItemListControllerManager(object): """Manages ItemListController objects. @@ -1271,6 +1413,12 @@ else: return self.displayed.get_selection() + def get_selection_info(self): + if self.displayed is None: + return ItemSelectionInfo() + else: + return self.displayed.item_selection_info + def can_play_items(self): """Can we play any items currently?""" return self.displayed and self.displayed.can_play_items() @@ -1279,8 +1427,17 @@ if self.displayed: self.controller_no_longer_displayed(self.displayed) - def update_metadata_progress(self, target, remaining, eta, total): + def update_metadata_progress(self, target, finished, finished_local, eta, + total): if target not in self.controllers: # devices can have this process started without a controller return - self.controllers[target].update_metadata_progress(remaining, eta, total) + self.controllers[target].update_metadata_progress( + finished, finished_local, eta, total) + + def displayed_type(self): + """Get the type of the displayed ItemListController.""" + if self.displayed is not None: + return self.displayed.type + else: + return None diff -Nru miro-4.0.4/lib/frontends/widgets/itemlist.py miro-6.0/lib/frontends/widgets/itemlist.py --- miro-4.0.4/lib/frontends/widgets/itemlist.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/itemlist.py 2013-04-05 16:02:42.000000000 +0000 @@ -27,661 +27,471 @@ # this exception statement from your version. If you delete this exception # statement from all source files in the program, then also delete it here. -"""itemlist.py -- Handles TableModel objects that store items. +"""itemlist.py -- Handles item data for our table views -itemlist, itemlistcontroller and itemlistwidgets work together using the MVC -pattern. itemlist handles the Model, itemlistwidgets handles the View and -itemlistcontroller handles the Controller. - -ItemList manages a TableModel that stores ItemInfo objects. It handles -filtering out items from the list (for example in the Downloading items list). -They also handle temporarily filtering out items based the user's search -terms. +This module defines ItemLists, which integres ItemTracker with the rest of the +widgets code. + +It also defines several ItemTrackerQuery subclasses that correspond to tabs +in the interface. """ -import itertools -import sys +import collections from miro import app -from miro.frontends.widgets.widgetstatestore import WidgetStateStore -from miro.plat.frontends.widgets import widgetset - - -class ItemSort(object): - """Class that sorts items in an item list.""" - - def __init__(self, ascending): - self.reverse = not ascending - - def is_ascending(self): - return not self.reverse - - def sort_key(self, item): - """Return a value that can be used to sort item. - - Must be implemented by subclasses. - """ - raise NotImplementedError() +from miro import prefs +from miro.data import item +from miro.data import itemtrack +from miro.frontends.widgets import itemfilter +from miro.frontends.widgets import itemsort +from miro.plat.frontends.widgets.threads import call_on_ui_thread + +class ItemList(itemtrack.ItemTracker): + """ItemList -- Track a list of items for TableView + + ItemList extends ItemTracker to provide things we need to make implement + the data model for our TableViews that contain lists of items. The + platform code takes uses ItemList to implement ItemListModel. + + Extra capabilities include: + - set/get arbitrary attributes on items + - grouping information + - simpler interface to construct queries: + - set_filters/select_filter changes the filters + - set_sort changes the sort + """ - def items_will_change(self, added, changed, removed): - """Called when the item list will change. + # sentinel used to represent a group_info that hasn't been calculated + NOT_CALCULATED = object() - Subclasses can override this if they need to update things based on - changes to the item list. + def __init__(self, tab_type, tab_id, sort=None, group_func=None, + filters=None, search_text=None): + """Create a new ItemList + + Note: outside classes shouldn't call this directly. Instead, they + should use the app.item_list_pool.get() method. + + :param tab_type: type of tab that this list is for + :param tab_id: id of the tab that this list is for + :param sort: initial sort to use + :param group_func: initial grouping to use + :param filters: initial filters + :param search_text: initial search text """ - -class DateSort(ItemSort): - KEY = 'date' - def sort_key(self, info): - return info.release_date - -class NameSort(ItemSort): - KEY = 'name' - def sort_key(self, info): - return info.name_sort_key - -class LengthSort(ItemSort): - KEY = 'length' - def sort_key(self, info): - return info.duration - -class SizeSort(ItemSort): - KEY = 'size' - def sort_key(self, info): - return info.size - -class DescriptionSort(ItemSort): - KEY = 'description' - def sort_key(self, info): - return info.description - -class FeedNameSort(ItemSort): - KEY = 'feed-name' - def sort_key(self, info): - if info.feed_name: - return info.feed_name.lower() - return info.feed_name - -class StatusCircleSort(ItemSort): - KEY = 'state' - # Weird sort, this one is for when the user clicks on the header above the - # status bumps. It's almost the same as StatusSort, but there isn't a - # bump for expiring. - def sort_key(self, info): - if info.state == 'downloading': - return 1 # downloading - elif info.downloaded and not info.video_watched: - return 2 # unwatched - elif not info.item_viewed and not info.expiration_date: - return 0 # new + self.tab_type = tab_type + self.tab_id = tab_id + self.base_query = self._make_base_query(tab_type, tab_id) + self.item_attributes = collections.defaultdict(dict) + self.filter_set = itemfilter.ItemFilterSet() + if filters is not None: + self.filter_set.set_filters(filters) + if sort is None: + self.sorter = itemsort.DateSort() else: - return 3 # other - -class StatusSort(ItemSort): - KEY = 'status' - def sort_key(self, info): - if info.state == 'downloading': - return (2, ) # downloading - elif info.downloaded and not info.video_watched: - return (3, ) # unwatched - elif info.expiration_date: - # the tuple here creates a subsort on expiration_date - return (4, info.expiration_date) # expiring - elif not info.item_viewed: - return (0, ) # new + self.sorter = sort + self.search_text = search_text + self.group_func = group_func + itemtrack.ItemTracker.__init__(self, call_on_ui_thread, + self._make_query(), + self._make_item_source()) + + def is_for_device(self): + return self.tab_type.startswith('device-') + + def is_for_share(self): + return self.tab_type == 'sharing' + + def device_id(self): + # tab_id is the device_id + '-video' or '-audio'. Remove the + # suffix + return self.tab_id.rsplit('-', 1)[0] + + def share_id(self): + # - for sharing tabs the tab id "sharing-" + # - for playlist tabs, the tab id is + # "sharing--" + # This code should work for either + return int(self.tab_id.split("-")[1]) + + def _fetch_id_list(self): + itemtrack.ItemTracker._fetch_id_list(self) + self._reset_group_info() + + def _uncache_row_data(self, id_list): + itemtrack.ItemTracker._uncache_row_data(self, id_list) + # items have changed, so we need to reset all group info + self._reset_group_info() + + def _make_base_query(self, tab_type, tab_id): + if self.is_for_device(): + query = itemtrack.DeviceItemTrackerQuery() + elif self.is_for_share(): + query = itemtrack.SharingItemTrackerQuery() else: - return (1, ) # other + query = itemtrack.ItemTrackerQuery() + + if tab_type == 'videos': + query.add_condition('file_type', '=', 'video') + query.add_condition('deleted', '=', False) + if not app.config.get(prefs.SHOW_PODCASTS_IN_VIDEO): + self.add_in_podcast_to_query(query) + elif tab_type == 'music': + if not app.config.get(prefs.SHOW_PODCASTS_IN_MUSIC): + self.add_in_podcast_to_query(query) + query.add_condition('file_type', '=', 'audio') + query.add_condition('deleted', '=', False) + elif tab_type == 'others': + query.add_condition('file_type', '=', 'other') + query.add_condition('deleted', '=', False) + elif tab_type == 'search': + query.add_condition('feed.orig_url', '=', 'dtv:search') + elif tab_type == 'downloading': + sql = ("((remote_downloader.state IN ('downloading', 'uploading', " + "'paused', 'uploading-paused', 'offline')) OR " + "(remote_downloader.state = 'failed' AND " + "feed.orig_url = 'dtv:manualFeed') OR " + "pending_manual_download) AND " + "remote_downloader.main_item_id=item.id") + columns = ['remote_downloader.state', + 'remote_downloader.main_item_id', + 'feed.orig_url', + 'pending_manual_download'] + query.add_complex_condition(columns, sql, ()) + elif tab_type == 'feed': + query.add_condition('feed_id', '=', tab_id) + elif tab_type == 'feed-folder' and tab_id == 'feed-base-tab': + # all feeds tab + query.add_condition('feed.orig_url', 'IS NOT', None) + query.add_condition('feed.orig_url', '!=', 'dtv:manualFeed') + query.add_condition('feed.orig_url', 'NOT LIKE', 'dtv:search%') + elif tab_type == 'feed-folder': + # NOTE: this also depends on the folder_id column of feed, but we + # don't track that in any way. If that changed while the user was + # viewing the display, then they wouldn't see the changes. + # However, the only way for this to change is drag and drop, so we + # can ignore this. + sql = ("feed_id in " + "(SELECT feed.id FROM feed WHERE feed.folder_id=?)") + query.add_complex_condition(['feed_id'], sql, (tab_id,)) + elif tab_type == 'folder-contents': + query.add_condition('parent_id', '=', tab_id) + elif tab_type == 'playlist': + query.add_condition('playlist_item_map.playlist_id', '=', tab_id) + elif tab_type == 'device-video': + query.add_condition('file_type', '=', u'video') + elif tab_type == 'device-audio': + query.add_condition('file_type', '=', u'audio') + elif tab_type == 'sharing' and tab_id.startswith("sharing-"): + # browsing a playlist on a share + id_components = tab_id.split("-") + if len(id_components) == 2: + # browsing an entire share, no filters needed + pass + else: + # browsing a playlist + playlist_id = id_components[-1] + if playlist_id == 'audio': + query.add_condition('file_type', '=', u'audio') + elif playlist_id == 'video': + query.add_condition('file_type', '=', u'video') + elif playlist_id == 'podcast': + query.add_condition( + 'sharing_item_playlist_map.playlist_id', '=', + u'podcast') + elif playlist_id == 'playlist': + query.add_condition( + 'sharing_item_playlist_map.playlist_id', '=', + u'playlist') + else: + query.add_condition( + 'sharing_item_playlist_map.playlist_id', '=', + int(playlist_id)) -class ETASort(ItemSort): - KEY = 'eta' - def sort_key(self, info): - if info.state == 'downloading': - eta = info.download_info.eta - if eta > 0: - return eta - elif not self.reverse: - return sys.maxint + elif tab_type == 'sharing': + # browsing an entire share, we don't need any filters on the query + pass + elif tab_type == 'manual': + # for the manual tab, tab_id is a list of ids to play + id_list = tab_id + placeholders = ",".join("?" for i in xrange(len(id_list))) + sql = "item.id IN (%s)" % placeholders + query.add_complex_condition(['id'], sql, id_list) else: - return -sys.maxint + raise ValueError("Can't handle tab (%r, %r)" % (tab_type, tab_id)) + return query -class DownloadRateSort(ItemSort): - KEY = 'rate' - def sort_key(self, info): - if info.state == 'downloading': - return info.download_info.rate - elif not self.reverse: - return sys.maxint + def add_in_podcast_to_query(self, query): + columns = ['feed.orig_url', 'is_file_item'] + sql = ("feed.orig_url IN ('dtv:manualFeed', 'dtv:searchDownloads', " + "'dtv:search') OR is_file_item") + query.add_complex_condition(columns, sql) + + def _make_item_source(self): + if self.is_for_device(): + device_info = app.tabs['connect'].get_tab(self.device_id()) + return item.DeviceItemSource(device_info) + elif self.is_for_share(): + share_info = app.tabs['connect'].get_tab('sharing-%s' % + self.share_id()) + return item.SharingItemSource(share_info) else: - return -1 + return item.ItemSource() -class ArtistSort(ItemSort): - KEY = 'artist' - def sort_key(self, info): - return (info.artist_sort_key, - info.album_sort_key, - info.track) - -class AlbumSort(ItemSort): - KEY = 'album' - def sort_key(self, info): - return (info.album_sort_key, - info.track, - info.artist_sort_key) - -class TrackSort(ItemSort): - KEY = 'track' - def sort_key(self, info): - return (info.track, - info.artist_sort_key, - info.album_sort_key) - -class YearSort(ItemSort): - KEY = 'year' - def sort_key(self, info): - return info.year - -class GenreSort(ItemSort): - KEY = 'genre' - def sort_key(self, info): - return info.genre - -class RatingSort(ItemSort): - KEY = 'rating' - def sort_key(self, info): - return info.rating - -class DRMSort(ItemSort): - KEY = 'drm' - def sort_key(self, info): - return info.has_drm - -class FileTypeSort(ItemSort): - KEY = 'file-type' - def sort_key(self, info): - return info.file_type - -class TorrentDetailsSort(ItemSort): - KEY = 'torrent-details' - def sort_key(self, info): - return 0 # FIXME - -class DateAddedSort(ItemSort): - KEY = 'date-added' - def sort_key(self, info): - return info.date_added - -class ShowSort(ItemSort): - KEY = 'show' - def sort_key(self, info): - return info.show + def _make_query(self): + query = self.base_query.copy() + self.filter_set.add_to_query(query) + self.sorter.add_to_query(query) + if self.search_text: + query.set_search(self.search_text) + return query + + def _update_query(self): + self.change_query(self._make_query()) + + # sorts/filters/search + def select_filter(self, key): + self.filter_set.select(key) + self._update_query() + + def set_filters(self, filter_keys): + self.filter_set.set_filters(filter_keys) + self._update_query() -class KindSort(ItemSort): - KEY = 'kind' - def sort_key(self, info): - return info.kind + def get_filters(self): + return self.filter_set.active_filters -class PlaylistSort(ItemSort): - """Sort that orders items by their order in the playlist. - """ - KEY = 'playlist' + def add_extension_filters(self, filters): + return self.filter_set.add_extension_filters(filters) - def __init__(self, initial_items=None): - ItemSort.__init__(self, True) - self.reset_current_position() - self.positions = {} - # ascending works weirdly for playlist. We always sort items in - # their playlist order. When the user asks to reverse the sort, we - # reverse the actual order of items. So self.reverse is always false, - # and self.order_is_reversed tracks our internal notion if we've - # reversed the order or not. - self.order_is_reversed = False - self.reverse = False - if initial_items: - for item in initial_items: - self.positions[item.id] = self.current_postion.next() - - def reset_current_position(self): - self.current_postion = itertools.count() - - def set_ascending(self, ascending): - self.reverse = not ascending - - def add_items(self, item_list): - if not self.order_is_reversed: - self.add_items_at_end(item_list) - else: - self.add_items_at_start(item_list) + def set_sort(self, sorter): + self.sorter = sorter + self._update_query() - def add_items_at_end(self, item_list): - for item in item_list: - if item.id not in self.positions: - self.positions[item.id] = self.current_postion.next() - - def add_items_at_start(self, item_list): - new_items = [i for i in item_list if i.id not in self.positions] - new_count = len(new_items) - # move current positions forward - for key, pos in self.positions.iteritems(): - self.positions[key] = pos + new_count - # add new positions - for pos, item in enumerate(new_items): - self.positions[item.id] = pos - # fix current_postion - self.current_postion = itertools.count(len(self.positions)) - - def forget_items(self, id_list): - for id in id_list: - del self.positions[id] - new_items = self.positions.items() - new_items.sort(key=lambda row: row[1]) - self.reset_current_position() - self.positions = {} - for id, old_position in new_items: - self.positions[id] = self.current_postion.next() - - def should_reverse_order(self, ascending): - """Should we reverse the playlist order? - - This method is called after the user clicks on a sort header. If the - sort header direction is different then our internal notion of if we - are reversed or not, then we return true. - """ - return ascending == self.order_is_reversed + def set_search(self, search_text): + self.search_text = search_text + self._update_query() - def reverse_order(self): - """Reverse the order of our playlist + def refresh_query(self): + self.base_query = self._make_base_query(self.tab_type, self.tab_id) + self._update_query() - :returns: the new order as a list of ids - """ - last_position = len(self.positions) - 1 - new_order = [None] * len(self.positions) - for id_ in self.positions: - index = last_position - self.positions[id_] - self.positions[id_] = index - new_order[index] = id_ - self.order_is_reversed = not self.order_is_reversed - return new_order - - def set_new_order(self, id_order): - self.reset_current_position() - self.positions = dict((id, self.current_postion.next()) - for id in id_order) + # attributes + def set_attr(self, item_id, name, value): + self.item_attributes[item_id][name] = value - def move_ids_before(self, before_id, id_list): - """Move ids around in the position list + def get_attr(self, item_id, name, default=None): + return self.item_attributes[item_id].get(name, default) - The ids in id_list will be placed before before_id. If before_id is - None, then they will be placed at the end of the list. + def get_attrs(self, item_id): + return self.item_attributes[item_id] - :returns: new sort order as a list of ids - """ + def unset_attr(self, item_id, name): + if name in self.item_attributes[item_id]: + del self.item_attributes[item_id][name] - # calculate order of ids not in id_list - moving = set(id_list) - new_order = [id_ for id_ in self.positions if id_ not in moving] - new_order.sort(key=lambda id_: self.positions[id_]) - # insert id_list into new_order - if before_id is not None: - insert_pos = new_order.index(before_id) - new_order[insert_pos:insert_pos] = id_list - else: - new_order.extend(id_list) - self.set_new_order(new_order) - return new_order + # grouping + def get_group_info(self, row): + """Get the info about the group an info is inside. - def sort_key(self, item): - try: - return self.positions[item.id] - except KeyError: - # Something wrong happened and the item is not in our internal - # list. Let's add it to the end to prevent endless crash reports. - self.add_items([item]) - app.widgetapp.handle_soft_failure("getting playlist sort key", - 'Key Error: %s' % item.id, with_exception=True) - return self.positions[item.id] + This method fetches the index of the info inside the group, the total + size of the group, and the first info in the group. - def items_will_change(self, added, changed, removed): - self.add_items(added) + :returns: an (index, count, first_info) tuple + :raises ValueError: if no groupping is set + """ + if self.group_func is None: + raise ValueError("no grouping set") + if self.group_info[row] is ItemList.NOT_CALCULATED: + self._calc_group_info(row) + return self.group_info[row] - def items_removed_from_source(self, removed): - self.forget_items(removed) + def get_group_top(self, item_id): + """Get the first info for an item's group. -DEFAULT_SORT = ArtistSort(False) + :param item_id: id of an item in the group + """ + row = self.get_index(item_id) + index, count, first_info = self.get_group_info(row) + return first_info + + def get_grouping(self): + """Get the function set with set_grouping.""" + return self.group_func + + def set_grouping(self, func): + """Set a grouping function for this info list. + + Grouping functions input info objects and return values that will be + used to segment the list into groups. Adjacent infos with the same + grouping value are part of the same group. -SORT_KEY_MAP = dict((sort.KEY, sort) for sort in ItemSort.__subclasses__()) + get_group_info() can be used to find the position of an info inside + its group. + """ + self.group_func = func + self._reset_group_info() -class ItemList(object): - """ - Attributes: + def _reset_group_info(self): + self.group_info = [ItemList.NOT_CALCULATED] * len(self) - model -- TableModel for this item list. It contains these columns: - * ItemInfo (object) - * show_details flag (boolean) - * counter used to change the progress throbber (integer) - - video_only -- Are we only displaying videos? - audio_only -- Are we only displaying audio? - new_only -- Are we only displaying the new items? - unwatched_only -- Are we only displaying the unwatched items? - downloaded_only -- Are we only displaying the downloaded items? - non_feed_only -- Are we only displaying file items? - resort_on_update -- Should we re-sort the list when items change? + def _calc_group_info(self, row): + # FIXME: for normal item lists, this is fairly fast, but it is slow in + # a specific case: + # + # When the group function returns the same value for many items and + # those items need to be loaded. This actually can happen pretty + # easily when you add a bunch of music files to miro, and we haven't + # run mutagen on them yet. In that case, when you first switch to the + # music tab, basically all items will be in the same group. + key = self.group_func(self.get_row(row)) + if key is None: + # if group_func returns None, then put this item in a group by + # itself. + self.group_info[row] = (0, 1, self.get_row(row)) + return + start = end = row + while (start > 0 and + self.group_func(self.get_row(start-1)) == key): + start -= 1 + while (end < len(self) - 1 and + self.group_func(self.get_row(end+1)) == key): + end += 1 + total = end - start + 1 + for row in xrange(start, end+1): + self.group_info[row] = (row-start, total, self.get_row(start)) + +class ItemTrackerUpdater(object): + """Keep a list of ItemTrackers and call on_item_changes when needed. + + Note that this class is mostly used for ItemList objects, which works + since it derives from ItemTracker. However, it can also be used for raw + ItemTrackers. """ def __init__(self): - self._sorter = DEFAULT_SORT - self.model = widgetset.InfoListModel(self._sorter.sort_key, - self._sorter.reverse) - self.video_only = self.audio_only = False - self.movies_only = self.shows_only = False - self.clips_only = self.podcasts_only = False - self.new_only = False - self.unwatched_only = False - self.downloaded_only = False - self.non_feed_only = False - self.resort_on_update = False - self._hidden_items = {} - self._filter = WidgetStateStore.get_view_all_filter() - # maps ids -> items that should be in this list, but are filtered out - # for some reason - - def set_sort(self, sorter): - self._sorter = sorter - self.model.change_sort(sorter.sort_key, sorter.reverse) - - def resort(self): - self.set_sort(self._sorter) + self.trackers = set() + self.device_trackers = set() + self.sharing_trackers = set() + + def _set_for_tracker(self, item_tracker): + source_type_map = { + item.ItemSource: self.trackers, + item.DeviceItemSource: self.device_trackers, + item.SharingItemSource: self.sharing_trackers, + } + return source_type_map[type(item_tracker.item_source)] - def get_sort(self): - return self._sorter + def add_tracker(self, item_tracker): + self._set_for_tracker(item_tracker).add(item_tracker) - def get_count(self): - """Get the number of items in this list that are displayed.""" - return len(self.model) - - def get_hidden_count(self): - """Get the number of items in this list that are hidden.""" - return len(self._hidden_items) - - def get_item(self, id): - return self.model.get_info(id) - - def get_iter(self, id_): - """Get a TableView iter object for an id.""" - return self.model.iter_for_id(id_) - - def get_items(self, start_id=None): - """Get a list of ItemInfo objects in this list""" - rv = self.model.info_list() - if start_id is not None: - for idx in xrange(len(rv)): - if rv[idx].id == start_id: - break - return rv[idx:] - return rv - - def iter_items(self, start_id=None): - """Iterate through ItemInfo objects in this list""" - info_list = self.model.info_list() - if start_id is not None: - for start_id_index in xrange(len(info_list)): - if info_list[start_id_index].id == start_id: - break - else: - start_id_index = 0 - for i in xrange(start_id_index, len(info_list)): - yield info_list[i] - - def __iter__(self): - return self.iter_items() - - def filter(self, item_info): - """Can be overrided by subclasses to filter out items from the list. - """ - return True - - def _should_show_item(self, item_info): - if not self.filter(item_info): - return False - return (not (self.new_only and item_info.item_viewed) and - not (self.unwatched_only and - not app.playback_manager.is_playing_item(item_info) and - (item_info.video_path is None or - item_info.video_watched)) and - not (self.downloaded_only and - item_info.video_path is None) and - not (self.non_feed_only and (not item_info.is_external and - item_info.feed_url != 'dtv:searchDownloads')) and - not (self.video_only and item_info.file_type != 'video') and - not (self.audio_only and item_info.file_type != 'audio') and - not (self.movies_only and item_info.kind != 'movie') and - not (self.shows_only and item_info.kind != 'show') and - not (self.clips_only and item_info.kind != 'clip') and - not (self.podcasts_only and item_info.kind != 'podcast')) - - def set_show_details(self, item_id, value): - """Change the show details value for an item""" - try: - self.model.set_attr(item_id, 'show-details', value) - except KeyError: - pass - - def update_throbber(self, item_id): - """Update the throbber count for an item. - - raises a KeyError if item_id is not in the model. - """ + def remove_tracker(self, item_tracker): try: - counter = self.model.get_attr(item_id, 'throbber-value') + self._set_for_tracker(item_tracker).remove(item_tracker) except KeyError: - counter = 0 - self.model.set_attr(item_id, 'throbber-value', counter + 1) + logging.warn("KeyError in ItemTrackerUpdater.remove_tracker") - def finish_throbber(self, item_id): - self.model.unset_attr(item_id, 'throbber-value') + def on_item_changes(self, message): + for tracker in self.trackers: + tracker.on_item_changes(message) + + def on_device_item_changes(self, message): + for tracker in self.device_trackers: + tracker.on_item_changes(message) + + def on_sharing_item_changes(self, message): + for tracker in self.sharing_trackers: + tracker.on_item_changes(message) + +class ItemListPool(object): + """Pool of ItemLists that the frontend is using. + + This class keeps track of all active ItemList objects so that we can avoid + creating 2 ItemLists for the same tab. This helps with performance + because we don't have to process the ItemChanges message twice. Also, we + want changes to the item list to be shared. For example, if a user is + playing items from a given tab and they change the filters on that tab, we + want the PlaybackPlaylist to reflect those changes. + """ + def __init__(self): + self.all_item_lists = set() + self._refcounts = {} - def _insert_items(self, to_add): - if len(to_add) == 0: - return - self.model.add_infos(to_add) + def get(self, tab_type, tab_id, sort=None, group_func=None, filters=None, + search_text=None): + """Get an ItemList to use. - def add_items(self, item_list): - to_add = [] - for item in item_list: - if self._should_show_item(item): - to_add.append(item) - else: - self._hidden_items[item.id] = item - self._insert_items(to_add) + This method will first try to re-use an existing ItemList from the + pool. If it can't, then a new ItemList will be created. - def update_items(self, changed_items): - to_add = [] - to_remove = [] - to_update = [] - for info in changed_items: - should_show = self._should_show_item(info) - if info.id in self._hidden_items: - # Item not already displayed - if should_show: - to_add.append(info) - del self._hidden_items[info.id] - else: - self._hidden_items[info.id] = info - else: - # Item already displayed - if not should_show: - to_remove.append(info.id) - self._hidden_items[info.id] = info - else: - to_update.append(info) - self._insert_items(to_add) - self.model.update_infos(to_update, resort=self.resort_on_update) - self.model.remove_ids(to_remove) - - def remove_items(self, id_list): - ids_in_model = [] - for id_ in id_list: - if id_ in self._hidden_items: - del self._hidden_items[id_] - else: - ids_in_model.append(id_) - self.model.remove_ids(ids_in_model) + sort, group_func, and filters are only used if a new ItemList is + created. - def remove_all(self): - """Remove items from the list.""" - self.model.remove_all() - self._hidden_items = {} - - def set_new_only(self, new_only): - """Set if only new items are to be displayed (default False).""" - self.new_only = new_only - self._recalculate_hidden_items() - - def toggle_filter(self, filter_): - self._filter = WidgetStateStore.toggle_filter(self._filter, filter_) - self.video_only = WidgetStateStore.is_view_video_filter(self._filter) - self.audio_only = WidgetStateStore.is_view_audio_filter(self._filter) - self.movies_only = WidgetStateStore.is_view_movies_filter(self._filter) - self.shows_only = WidgetStateStore.is_view_shows_filter(self._filter) - self.clips_only = WidgetStateStore.is_view_clips_filter(self._filter) - self.podcasts_only = WidgetStateStore.is_view_podcasts_filter(self._filter) - self.unwatched_only = WidgetStateStore.has_unwatched_filter( - self._filter) - self.downloaded_only = WidgetStateStore.has_downloaded_filter( - self._filter) - self.non_feed_only = WidgetStateStore.has_non_feed_filter( - self._filter) - self._recalculate_hidden_items() - - def _recalculate_hidden_items(self): - info_list_at_start = self.model.info_list() - - newly_matching = [] - for item in self._hidden_items.values(): - if self._should_show_item(item): - newly_matching.append(item) - del self._hidden_items[item.id] - self._insert_items(newly_matching) - - newly_unmatching_ids = [] - for item in info_list_at_start: - if not self._should_show_item(item): - newly_unmatching_ids.append(item.id) - self._hidden_items[item.id] = item - self.model.remove_ids(newly_unmatching_ids) - - def move_items(self, insert_before, item_ids): - """Move a group of items inside the list. - - The items for item_ids will be positioned before insert_before. - insert_before should be an iterator, or None to position the items at - the end of the list. + :returns: ItemList object. When you are done with it, you must pass + the ItemList to the release() method. """ - if insert_before is not None: - insert_before_id = insert_before.id + if tab_type != u'manual': + for obj in self.all_item_lists: + if obj.tab_type == tab_type and obj.tab_id == tab_id: + self._refcounts[obj] += 1 + return obj + # no existing list found, make new list + new_list = ItemList(tab_type, tab_id, sort, group_func, filters, + search_text) + self.all_item_lists.add(new_list) + app.item_tracker_updater.add_tracker(new_list) + self._refcounts[new_list] = 1 + return new_list + + def add_ref(self, item_list): + """Add a reference to an existing ItemList + + Use this method if you are given an ItemList by another component and + intend on keeping it around. The ItemList will stay in the poll until + both components call release() + """ + if item_list in self._refcounts: + self._refcounts[item_list] += 1 else: - insert_before_id = None - self.model.move_before(insert_before_id, list(item_ids)) - -class IndividualDownloadItemList(ItemList): - """ItemList that only displays single downloads items. - - Used in the downloads tab.""" - def filter(self, item_info): - return (item_info.is_external - and not (item_info.download_info - and item_info.download_info.state in ( - 'uploading', 'uploading-paused'))) - -class ChannelDownloadItemList(ItemList): - """ItemList that only displays channel downloads items. - - Used in the downloads tab.""" - def filter(self, item_info): - return (not item_info.is_external - and not (item_info.download_info - and item_info.download_info.state in ( - 'uploading', 'uploading-paused'))) - -class SeedingItemList(ItemList): - """ItemList that only displays seeding items. - - Used in the downloads tab.""" - def filter(self, item_info): - return (item_info.download_info - and item_info.download_info.state in ('uploading', - 'uploading-paused')) - -class DownloadingItemList(ItemList): - """ItemList that only displays downloading items.""" - def filter(self, item_info): - return (item_info.download_info - and not item_info.download_info.finished - and not item_info.download_info.state == 'failed') - -class ConvertingItemList(ItemList): - """ItemList that displays items being converted.""" - def filter(self, item_info): - return item_info.converting - -class DownloadedItemList(ItemList): - """ItemList that only displays downloaded items.""" - def filter(self, item_info): - return (item_info.download_info and - item_info.download_info.finished) - -class _ItemReorderer(object): - """Handles re-ordering items inside an itemlist. + raise ValueError("%s has already been released" % item_list) - This object is just around for utility sake. It's only created to track - the state during the call to ItemList.move_items() - """ + def release(self, item_list): + """Release an item list. - def __init__(self): - self.removed_rows = [] + Call this when you're done using an ItemList. Once this has been + called for each time the list has been returned from get(), then that + list will be removed from the pool and no longer get callbacks for the + ItemChanges message. + """ + self._refcounts[item_list] -= 1 + if self._refcounts[item_list] <= 0: + self.all_item_lists.remove(item_list) + del self._refcounts[item_list] + item_list.destroy() + app.item_tracker_updater.remove_tracker(item_list) + +# grouping functions +def album_grouping(info): + """Grouping function that groups infos by albums.""" + if (info.album_artist_sort_key != (u'',) or + info.album_sort_key != (u'',)): + return (info.album_artist_sort_key, info.album_sort_key) + else: + return None + +def feed_grouping(info): + """Grouping function that groups infos by their feed.""" + return info.feed_id - def calc_insert_id(self, model): - if self.insert_iter is not None: - self.insert_id = model[self.insert_iter][0].id - else: - self.insert_id = None +def video_grouping(info): + """Grouping function that groups infos for the videos tab. - def reorder(self, model, insert_iter, ids): - self.insert_iter = insert_iter - self.calc_insert_id(model) - self.remove_rows(model, ids) - return self.put_rows_back(model) - - def remove_row(self, model, iter, row): - self.removed_rows.append(row) - if row[0].id == self.insert_id: - self.insert_iter = model.next_iter(self.insert_iter) - self.calc_insert_id(model) - return model.remove(iter) - - def remove_rows(self, model, ids): - # iterating through the entire table seems inefficient, but we have to - # know the order of rows so we can insert them back in the right - # order. - iter = model.first_iter() - while iter is not None: - row = model[iter] - if row[0].id in ids: - # need to make a copy of the row data, since we're removing it - # from the table - iter = self.remove_row(model, iter, tuple(row)) - else: - iter = model.next_iter(iter) + For this group, we try to figure out what "show" the item is in. If the + user has set a show we use that, otherwise we use the podcast. - def put_rows_back(self, model): - if self.insert_iter is None: - def put_back(moved_row): - return model.append(*moved_row) - else: - def put_back(moved_row): - return model.insert_before(self.insert_iter, *moved_row) - retval = {} - for removed_row in self.removed_rows: - iter = put_back(removed_row) - retval[removed_row[0].id] = iter - return retval + """ + if info.show is not None: + return info.show + elif info.parent_title is not None: + return info.parent_title_for_sort + else: + return None diff -Nru miro-4.0.4/lib/frontends/widgets/itemlistwidgets.py miro-6.0/lib/frontends/widgets/itemlistwidgets.py --- miro-4.0.4/lib/frontends/widgets/itemlistwidgets.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/itemlistwidgets.py 2013-04-05 16:02:42.000000000 +0000 @@ -48,10 +48,9 @@ from miro import prefs from miro import displaytext from miro import util -from miro import eventloop from miro.gtcache import gettext as _ -from miro.gtcache import declarify from miro.frontends.widgets import imagepool +from miro.frontends.widgets import itemfilter from miro.frontends.widgets import style from miro.frontends.widgets import widgetconst from miro.frontends.widgets import widgetutil @@ -59,6 +58,7 @@ from miro.frontends.widgets import separator from miro.frontends.widgets.widgetstatestore import WidgetStateStore from miro.plat import resources +from miro.plat.frontends.widgets import timer from miro.plat.frontends.widgets import widgetset from miro.plat.frontends.widgets import use_upside_down_sort from miro.plat.utils import get_available_bytes_for_movies @@ -83,14 +83,12 @@ self.set_size_request(-1, self.HEIGHT) class TogglerButton(widgetset.CustomButton): - LEFT = 0 - RIGHT = 1 - def __init__(self, image_name, pos): + + def __init__(self, image_name): widgetset.CustomButton.__init__(self) self.set_can_focus(False) self.state = 'normal' self._enabled = False - self._pos = pos self.surface = imagepool.get_surface( resources.path('images/%s.png' % image_name)) self.active_surface = imagepool.get_surface( @@ -99,12 +97,9 @@ resources.path('images/%s_pressed.png' % image_name)) self.current_surface = self.surface - def do_size_request(self): - return (max(self.surface.width, self.active_surface.width), - max(self.surface.height, self.active_surface.height)) - def size_request(self, layout): - return self.do_size_request() + # NOTE: this assumes all surfaces are the same size. + return self.surface.get_size() def set_pressed(self, pressed): self._enabled = pressed @@ -122,44 +117,48 @@ self.current_surface = surface - # XXX Working on the basis of LEFT/RIGHT only does not allow toggles - # with multiple states. - w = int(surface.width) - h = int(surface.height) - y = 0 - if self._pos == TogglerButton.RIGHT: - x = 0 - else: - x = int(self.do_size_request()[0] - surface.width) - surface.draw(context, x, y, w, h) + surface.draw(context, 0, 0, surface.width, surface.height) class ViewToggler(widgetset.HBox): def __init__(self): widgetset.HBox.__init__(self) self.create_signal('normal-view-clicked') + self.create_signal('album-view-clicked') self.create_signal('list-view-clicked') self.selected_view = WidgetStateStore.get_standard_view_type() self.togglers = dict() standard_view = WidgetStateStore.get_standard_view_type() list_view = WidgetStateStore.get_list_view_type() + album_view = WidgetStateStore.get_album_view_type() self.toggler_events = dict() self.toggler_events[standard_view] = 'normal-view-clicked' + self.toggler_events[album_view] = 'album-view-clicked' self.toggler_events[list_view] = 'list-view-clicked' - self.togglers[standard_view] = TogglerButton('standard-view', - TogglerButton.LEFT) - self.togglers[list_view]= TogglerButton('list-view', - TogglerButton.RIGHT) + self.togglers[standard_view] = TogglerButton('standard-view') + self.togglers[album_view]= TogglerButton('album-view') + self.togglers[list_view]= TogglerButton('list-view') for t in self.togglers.values(): t.connect('clicked', self.on_clicked) self.togglers[self.selected_view].set_pressed(True) self.pack_start(self.togglers[standard_view]) + self.pack_start(self.togglers[album_view]) self.pack_start(self.togglers[list_view]) + def hide_album_view(self): + album_view = WidgetStateStore.get_album_view_type() + if self.togglers[album_view] in self.children: + self.remove(self.togglers[album_view]) + + def show_album_view(self): + album_view = WidgetStateStore.get_album_view_type() + if self.togglers[album_view] not in self.children: + self.pack_start(self.togglers[album_view]) + def size_request(self, layout): width = sum([w.size_request()[0] for w in self.togglers.values()]) return w, -1 @@ -409,8 +408,8 @@ def show(self): if self.button_shown: return - self.add(self.resume_button) self.button_shown = True + self.add(self.resume_button) # set our width request to the min-width of the button. This way our # parent only reserves that amount of space for us. self.set_size_request(self.resume_button.MIN_WIDTH + self.LEFT_PAD + @@ -430,6 +429,48 @@ width_available = width - self.LEFT_PAD - self.RIGHT_PAD self.resume_button.set_width_available(width_available) +class ItemFilterBox(widgetset.HBox): + """Pack filter buttons for the titlebar. """ + + def __init__(self): + widgetset.HBox.__init__(self, spacing=10) + self.create_signal('filter-clicked') + # map filter keys -> FilterButton objects + self.button_map = {} + self.filters = set() + + def set_filters(self, filter_set): + for key, button in self.button_map.iteritems(): + enabled = (key in filter_set) + self.button_map[key].set_enabled(enabled) + + def add_filter(self, key): + if key in self.filters: + raise ValueError("Already added filter %s" % key) + # lookup label using itemfilter + try: + label = itemfilter.get_label(key) + except KeyError: + logging.warn("KeyError when looking up item filter: %s" % key) + return + # enable the first button added + if not self.button_map: + enabled = True + else: + enabled = False + button = FilterButton(label, enabled=enabled) + button.connect('clicked', self._on_filter_clicked, key) + self.pack_start(button) + self.button_map[key] = button + self.filters.add(key) + + def add_filters(self, key_list): + for key in key_list: + self.add_filter(key) + + def _on_filter_clicked(self, button, filter_key): + self.emit("filter-clicked", filter_key) + class ItemListTitlebar(Titlebar): """Titlebar for feeds, playlists and static tabs that display items. @@ -446,13 +487,16 @@ def __init__(self): Titlebar.__init__(self) self.create_signal('resume-playing') + self.create_signal('filter-clicked') hbox = widgetset.HBox() self.add(hbox) # Pack stuff to the right before_filters = self._build_before_filters() if before_filters: hbox.pack_start(before_filters) - self.filter_box = widgetset.HBox(spacing=10) + self.filter_box = ItemFilterBox() + self.filter_box.connect('filter-clicked', self._on_filter_clicked) + self.filter_box.add_filters(self.get_filters()) self.filter_box_holder = widgetutil.HideableWidget( widgetutil.align_middle(self.filter_box, left_pad=15)) self.filter_box_holder.show() @@ -475,9 +519,6 @@ self.resume_button_holder = ResumeButtonHolder(self.resume_button) hbox.pack_start(self.resume_button_holder, expand=True) - self.filters = {} - self.setup_filters() - self.calculate_width_requests() # force our size request to be the smaller of the two. If we get # allocated less than normal_width_needed, we will go into small width @@ -485,28 +526,32 @@ self.set_size_request(self.small_width_needed, self.HEIGHT) self.in_small_width_mode = False - def setup_filters(self): - """Add filters that we want to show. + def set_filters(self, filter_keys): + self.filter_box.set_filters(filter_keys) + + def get_filters(self): + """Get the list of filters to show. - Subclasses can define this to add filters to the titlebar. + By default we return no filters. Subclasses can override this if they + want to display filters. """ - pass + return [] + + def _on_filter_clicked(self, filterbox, filter_key): + # just forward the signal on + self.emit("filter-clicked", filter_key) def calculate_width_requests(self): """Calculate the width required for normal mode and small-width mode. """ self.set_small_width_mode(True) - # FIXME: this is a hack for the OS X size request system. We should - # probably just make get_size_request() always work - self.clear_size_request_cache() self.small_width_needed = self.get_size_request()[0] self.set_small_width_mode(False) if self.uses_resume_button: # force the resume button to be included in the size request, if # we will show it self.resume_button_holder.show() - self.clear_size_request_cache() self.normal_width_needed = self.get_size_request()[0] if self.uses_resume_button: self.resume_button_holder.hide() @@ -514,8 +559,8 @@ def do_size_allocated(self, width, height): should_use_small_width = (width < self.normal_width_needed) if should_use_small_width != self.in_small_width_mode: - self.set_small_width_mode(should_use_small_width) self.in_small_width_mode = should_use_small_width + self.set_small_width_mode(should_use_small_width) def set_small_width_mode(self, enabled): """Called when we should change into/out of small width mode @@ -564,14 +609,18 @@ left_pad=15) def _build_view_toggle(self): - self.create_signal('list-view-clicked') - self.create_signal('normal-view-clicked') self.view_toggler = ViewToggler() - self.view_toggler.connect('list-view-clicked', self._on_list_clicked) - self.view_toggler.connect('normal-view-clicked', - self._on_normal_clicked) + # forward signals from view_toggler + for signal_name in ('list-view-clicked', 'normal-view-clicked', + 'album-view-clicked'): + self.create_signal(signal_name) + self.view_toggler.connect(signal_name, + self._forward_view_toggler_signal, signal_name) return self.view_toggler + def _forward_view_toggler_signal(self, view_toggler, signal_name): + self.emit(signal_name) + def _on_resume_button_clicked(self, button): self.emit('resume-playing') @@ -582,15 +631,15 @@ def _on_search_changed(self, searchbox): self.emit('search-changed', searchbox.get_text()) - def _on_normal_clicked(self, button): - self.emit('normal-view-clicked') - - def _on_list_clicked(self, button): - self.emit('list-view-clicked') - def switch_to_view(self, view): self.view_toggler.switch_to_view(view) + def show_album_view_button(self): + self.view_toggler.show_album_view() + + def hide_album_view_button(self): + self.view_toggler.hide_album_view() + def set_title(self, title): self.title_drawer = title self.title_drawer.queue_redraw() @@ -604,24 +653,6 @@ def start_editing_search(self, text): self.searchbox.start_editing(text) - def toggle_filter(self, filter_): - # implemented by subclasses - pass - - def add_filter(self, name, signal_name, signal_param, label): - if not self.filters: - enabled = True - else: - enabled = False - self.create_signal(signal_name) - def callback(button): - self.emit(signal_name, signal_param) - button = FilterButton(label, enabled=enabled) - button.connect('clicked', callback) - self.filter_box.pack_start(button) - self.filters[name] = button - return button - class FolderContentsTitlebar(ItemListTitlebar): def _build_before_filters(self): self.create_signal('podcast-clicked') @@ -735,95 +766,16 @@ self.save_button_holder.show() self.emit('search-changed', searchbox.get_text()) -class VideoAudioFilterMixin(object): - def setup_filters(self): - view_video = WidgetStateStore.get_view_video_filter() - view_audio = WidgetStateStore.get_view_audio_filter() - self.add_filter('view-video', 'toggle-filter', view_video, - _('Video')) - self.add_filter('view-audio', 'toggle-filter', view_audio, - _('Audio')) - - def toggle_filter(self): - view_video = WidgetStateStore.is_view_video_filter(self.filter) - view_audio = WidgetStateStore.is_view_audio_filter(self.filter) - self.filters['view-video'].set_enabled(view_video) - self.filters['view-audio'].set_enabled(view_audio) - -class UnplayedFilterMixin(object): - def setup_filters(self): - unwatched = WidgetStateStore.get_unwatched_filter() - self.add_filter('only-unplayed', 'toggle-filter', unwatched, - _('Unplayed')) - - def toggle_filter(self): - unwatched = WidgetStateStore.has_unwatched_filter(self.filter) - self.filters['only-unplayed'].set_enabled(unwatched) - -class DownloadedUnplayedFilterMixin(UnplayedFilterMixin): - def setup_filters(self): - downloaded = WidgetStateStore.get_downloaded_filter() - self.add_filter('only-downloaded', 'toggle-filter', downloaded, - _('Downloaded')) - UnplayedFilterMixin.setup_filters(self) - - def toggle_filter(self): - downloaded = WidgetStateStore.has_downloaded_filter(self.filter) - self.filters['only-downloaded'].set_enabled(downloaded) - UnplayedFilterMixin.toggle_filter(self) - -class FilteredTitlebar(ItemListTitlebar): - def setup_filters(self): - # this "All" is different than other "All"s in the codebase, so it - # needs to be clarified - view_all = WidgetStateStore.get_view_all_filter() - self.add_filter('view-all', 'toggle-filter', view_all, - declarify(_('View|All'))) - self.filter = view_all - - def toggle_filter(self, filter_): - self.filter = WidgetStateStore.toggle_filter(self.filter, filter_) - view_all = WidgetStateStore.is_view_all_filter(self.filter) - self.filters['view-all'].set_enabled(view_all) - -class MediaTitlebar(SearchTitlebar, FilteredTitlebar): +class MediaTitlebar(SearchTitlebar): def save_search_title(self): return _('Save as Playlist') def save_search_icon(self): return 'save-as-playlist' -# Note that this is not related to VideoAudioFilterMixin. -# VideoAudioFilterMixin adds video and audio filtering, -# while VideosTitlebar is the static video tab. class VideosTitlebarMixin(object): - def setup_filters(self): - FilteredTitlebar.setup_filters(self) - view_all = WidgetStateStore.get_view_all_filter() - view_movies = WidgetStateStore.get_view_movies_filter() - view_shows = WidgetStateStore.get_view_shows_filter() - view_clips = WidgetStateStore.get_view_clips_filter() - view_podcasts = WidgetStateStore.get_view_podcasts_filter() - - self.add_filter('view-movies', 'toggle-filter', view_movies, - _('Movies')) - self.add_filter('view-shows', 'toggle-filter', view_shows, - _('Shows')) - self.add_filter('view-clips', 'toggle-filter', view_clips, - _('Clips')) - self.add_filter('view-podcasts', 'toggle-filter', view_podcasts, - _('Podcasts')) - - def toggle_filter(self, filter_): - FilteredTitlebar.toggle_filter(self, filter_) - view_movies = WidgetStateStore.is_view_movies_filter(self.filter) - view_shows = WidgetStateStore.is_view_shows_filter(self.filter) - view_clips = WidgetStateStore.is_view_clips_filter(self.filter) - view_podcasts = WidgetStateStore.is_view_podcasts_filter(self.filter) - self.filters['view-movies'].set_enabled(view_movies) - self.filters['view-shows'].set_enabled(view_shows) - self.filters['view-clips'].set_enabled(view_clips) - self.filters['view-podcasts'].set_enabled(view_podcasts) + def get_filters(self): + return ('all', 'movies', 'shows', 'clips', 'podcasts') def set_small_width_mode(self, enabled): if enabled: @@ -835,60 +787,36 @@ class VideosTitlebar(VideosTitlebarMixin, MediaTitlebar): pass -class DeviceVideosTitlebar(VideosTitlebarMixin, FilteredTitlebar): +class DeviceVideosTitlebar(VideosTitlebarMixin, ItemListTitlebar): pass # This is the same as the videos titlebar (with all the filters etc) except # we don't let saving as a playlist (because everything here is transient). class SharingTitlebar(VideosTitlebar): - def _on_search_changed(self, searchbox): - self.emit('search-changed', searchbox.get_text()) + def _on_search_changed(self, searchbox): + self.emit('search-changed', searchbox.get_text()) -class MusicTitlebar(MediaTitlebar, UnplayedFilterMixin): - def setup_filters(self): - FilteredTitlebar.setup_filters(self) - UnplayedFilterMixin.setup_filters(self) - - def toggle_filter(self, filter_): - FilteredTitlebar.toggle_filter(self, filter_) - UnplayedFilterMixin.toggle_filter(self) - -class DeviceMusicTitlebar(FilteredTitlebar, UnplayedFilterMixin): - def setup_filters(self): - FilteredTitlebar.setup_filters(self) - UnplayedFilterMixin.setup_filters(self) - - def toggle_filter(self, filter_): - FilteredTitlebar.toggle_filter(self, filter_) - UnplayedFilterMixin.toggle_filter(self) +class MusicTitlebar(MediaTitlebar): + def get_filters(self): + return ('all', 'unplayed') + +class DeviceMusicTitlebar(ItemListTitlebar): + def get_filters(self): + return ('all', 'unplayed') -class AllFeedsTitlebar(FilteredTitlebar, DownloadedUnplayedFilterMixin, - VideoAudioFilterMixin): +class AllFeedsTitlebar(ItemListTitlebar): uses_resume_button = True - def setup_filters(self): - FilteredTitlebar.setup_filters(self) - DownloadedUnplayedFilterMixin.setup_filters(self) - VideoAudioFilterMixin.setup_filters(self) - - def toggle_filter(self, filter_): - FilteredTitlebar.toggle_filter(self, filter_) - DownloadedUnplayedFilterMixin.toggle_filter(self) - VideoAudioFilterMixin.toggle_filter(self) + def get_filters(self): + return ('all', 'downloaded', 'unplayed', 'video', 'audio') -class ChannelTitlebar(SearchTitlebar, FilteredTitlebar, - DownloadedUnplayedFilterMixin): +class ChannelTitlebar(SearchTitlebar): """Titlebar for a channel """ uses_resume_button = True - def setup_filters(self): - FilteredTitlebar.setup_filters(self) - DownloadedUnplayedFilterMixin.setup_filters(self) - - def toggle_filter(self, filter_): - FilteredTitlebar.toggle_filter(self, filter_) - DownloadedUnplayedFilterMixin.toggle_filter(self) + def get_filters(self): + return ('all', 'downloaded', 'unplayed') def set_small_width_mode(self, enabled): if enabled: @@ -897,35 +825,20 @@ self.save_button.set_title(_("Save as Podcast")) SearchTitlebar.set_small_width_mode(self, enabled) -class ChannelFolderTitlebar(FilteredTitlebar, DownloadedUnplayedFilterMixin): +class ChannelFolderTitlebar(ItemListTitlebar): """Titlebar for a channel folder; like the channel titlebar, but without the save search button. """ uses_resume_button = True - def setup_filters(self): - FilteredTitlebar.setup_filters(self) - DownloadedUnplayedFilterMixin.setup_filters(self) - - def toggle_filter(self, filter_): - FilteredTitlebar.toggle_filter(self, filter_) - DownloadedUnplayedFilterMixin.toggle_filter(self) + def get_filters(self): + return ('all', 'downloaded', 'unplayed') -class WatchedFolderTitlebar(FilteredTitlebar, VideoAudioFilterMixin): +class WatchedFolderTitlebar(ItemListTitlebar): uses_resume_button = True - def setup_filters(self): - FilteredTitlebar.setup_filters(self) - unwatched = WidgetStateStore.get_unwatched_filter() - self.add_filter('only-unplayed', 'toggle-filter', unwatched, - _('Unplayed')) - VideoAudioFilterMixin.setup_filters(self) - - def toggle_filter(self, filter_): - FilteredTitlebar.toggle_filter(self, filter_) - unwatched = WidgetStateStore.has_unwatched_filter(self.filter) - self.filters['only-unplayed'].set_enabled(unwatched) - VideoAudioFilterMixin.toggle_filter(self) + def get_filters(self): + return ('all', 'unplayed', 'wf-video', 'wf-audio') class SearchListTitlebar(SearchTitlebar): """Titlebar for the search page. @@ -958,10 +871,10 @@ class ItemView(widgetset.TableView): """TableView that displays a list of items.""" - def __init__(self, item_list): - widgetset.TableView.__init__(self, item_list.model) + def __init__(self, model, custom_headers=False): + widgetset.TableView.__init__(self, model, + custom_headers=custom_headers) - self.item_list = item_list self.set_fixed_height(True) self.allow_multiple_select = True @@ -969,9 +882,11 @@ class SorterOwner(object): """Mixin for objects that need to handle a set of sort indicators.""" + def __init__(self, sorts_enabled): self.create_signal('sort-changed') self.sorters = dict() + self.sorts_enabled = [] self.update_sorts(sorts_enabled) def on_sorter_clicked(self, widget, sort_key): @@ -999,6 +914,10 @@ for name in sorted(new - old, key=sorts_enabled.index): self.sorters[name] = self.make_sorter(name) self.allocate_widths() + self.sorts_enabled = sorts_enabled + + def get_sorts(self): + return self.sorts_enabled def allocate_widths(self, preserving_by_label=None): """Option method for subclasses that need to allocate widths when their @@ -1011,7 +930,15 @@ self._column_map = {} def add_renderer(self, name, renderer): - self._column_map[name] = renderer + if name != 'state': + label = widgetconst.COLUMN_LABELS[name] + else: + label = '' + self._column_map[name] = (label, renderer) + + def change_label(self, name, new_label): + old_label, renderer = self.get(name) + self._column_map[name] = (new_label, renderer) def get(self, name): return self._column_map[name] @@ -1023,6 +950,7 @@ 'name': style.NameRenderer, 'artist': style.ArtistRenderer, 'album': style.AlbumRenderer, + 'multi-row-album': style.MultiRowAlbumRenderer, 'track': style.TrackRenderer, 'year': style.YearRenderer, 'genre': style.GenreRenderer, @@ -1057,8 +985,8 @@ draws_selection = False - def __init__(self, item_list, item_renderer): - ItemView.__init__(self, item_list) + def __init__(self, model, item_renderer): + ItemView.__init__(self, model) self.renderer = item_renderer self.column = widgetset.TableColumn('item', self.renderer) self.set_column_spacing(0) @@ -1071,14 +999,14 @@ class ListView(ItemView, SorterOwner): """TableView that displays a list of items using the list view.""" COLUMN_PADDING = 12 - def __init__(self, item_list, renderer_set, sorts, column_widths): - ItemView.__init__(self, item_list) + def __init__(self, model, renderer_set, sorts, column_widths): + ItemView.__init__(self, model, custom_headers=True) self.column_widths = column_widths self._column_by_label = {} self.set_show_headers(True) self.set_columns_draggable(True) self.set_column_spacing(self.COLUMN_PADDING) - self.set_row_spacing(5) + self.set_row_spacing(4) self.set_grid_lines(False, False) self.set_alternate_row_backgrounds(True) self.html_stripper = util.HTMLStripper() @@ -1098,23 +1026,21 @@ def get_tooltip(self, iter_, column): if self.sorters.get('name', None) == column: - info = self.item_list.model[iter_][0] + info = self.model.get_item(iter_) text, links = self.html_stripper.strip(info.description) if text: if len(text) > 1000: text = text[:994] + ' [...]' return text elif self.sorters.get('state', None) is column: - info = self.item_list.model[iter_][0] + info = self.model.get_item(iter_) # this logic is replicated in style.StateCircleRenderer # with text from style.StatusRenderer - if info.state == 'downloading': + if info.is_download: return _("Downloading") - elif (info.downloaded and info.is_playable - and not info.video_watched): + elif not info.video_watched: return _("Unplayed") - elif (not info.item_viewed and not info.expiration_date - and not info.is_external): + elif info.new: return _("Newly Available") return None @@ -1122,11 +1048,7 @@ self.remove_column(self.columns.index(column)) def make_sorter(self, name): - if name == 'state': - header = u'' - else: - header = widgetconst.COLUMN_LABELS[name] - renderer = self.renderer_set.get(name) + header, renderer = self.renderer_set.get(name) column = widgetset.TableColumn(header, renderer, ListViewSorter(header, renderer)) column.set_min_width(renderer.min_width) @@ -1211,6 +1133,13 @@ for name, sorter in self.sorters.iteritems(): sorter.set_width(self.column_widths[name]) +class AlbumView(ListView): + def __init__(self, model, renderer_set, sorts, column_widths): + ListView.__init__(self, model, renderer_set, sorts, column_widths) + self.set_group_lines_enabled(True) + self.set_group_line_style(widgetutil.css_to_color('#dddddd'), 1) + self.enable_album_view_focus_hack() + class DownloadStatusToolbar(Toolbar): """Widget that shows free space and download and upload speed status. @@ -1363,13 +1292,21 @@ def set_small_width_mode(self, enabled): if enabled: - labels = [ _('All'), _('All'), _('All'), _('Settings')] - else: - labels = [ _('Pause All'), _('Resume All'), _('Cancel All'), - _('Download Settings')] - - for button, label in zip(self.buttons, labels): - button.set_title(label) + labels = { + 'pause': _('All'), + 'resume': _('All'), + 'cancel': _('All'), + 'settings': _('Settings'), + } + else: + labels = { + 'pause': _('Pause All'), + 'resume': _('Resume All'), + 'cancel': _('Cancel All'), + 'settings': _('Download Settings'), + } + for key, title in labels.items(): + self.buttons[key].set_title(title) ItemListTitlebar.set_small_width_mode(self, enabled) def _build_before_filters(self): @@ -1399,10 +1336,26 @@ h.pack_start(widgetutil.align_middle(settings_button, top_pad=5, bottom_pad=5, right_pad=16)) - self.buttons = [pause_button, resume_button, cancel_button, - settings_button] + self.buttons = { + 'pause': pause_button, + 'resume': resume_button, + 'cancel': cancel_button, + 'settings': settings_button, + } return h + def set_button_enabled(self, name, enabled): + """Enable/Disable a button + + :param name: "pause", "resume", "cancel", or "settings" + :param enabled: should the button be enabled? + """ + button = self.buttons[name] + if enabled: + button.enable() + else: + button.disable() + def _on_pause_button_clicked(self, widget): self.emit('pause-all') @@ -1438,9 +1391,9 @@ self.settings_button = widgetutil.HideableWidget(settings_button) autodownload_button = widgetutil.MultiStateTitlebarButton( - [('autodownload-all', _("Auto-Download All"), "all"), + [('autodownload-off', _("Auto-Download Off"), "off"), ('autodownload-new', _("Auto-Download New"), "new"), - ('autodownload-off', _("Auto-Download Off"), "off")]) + ('autodownload-all', _("Auto-Download All"), "all")]) autodownload_button.connect('clicked', self._on_autodownload_changed) self.autodownload_button_actual = autodownload_button @@ -1460,11 +1413,11 @@ self.autodownload_dc = None def set_autodownload_mode(self, autodownload_mode): - if autodownload_mode == 'all': + if autodownload_mode == 'off': self.autodownload_button_actual.set_toggle_state(0) elif autodownload_mode == 'new': self.autodownload_button_actual.set_toggle_state(1) - elif autodownload_mode == 'off': + elif autodownload_mode == 'all': self.autodownload_button_actual.set_toggle_state(2) def draw(self, context, layout): @@ -1490,19 +1443,18 @@ def _on_autodownload_changed(self, widget): if self.autodownload_dc is not None: - self.autodownload_dc.cancel() + timer.cancel(self.autodownload_dc) self.autodownload_dc = None toggle_state = self.autodownload_button_actual.get_toggle_state() toggle_state = (toggle_state + 1) % 3 self.autodownload_button_actual.set_toggle_state(toggle_state) + self.autodownload_dc = timer.add( + 0.5, self._on_autodownload_changed_timeout) + + def _on_autodownload_changed_timeout(self): value = self.autodownload_button_actual.get_toggle_state_information() value = value[0] - self.autodownload_dc = eventloop.add_timeout( - 3, self._on_autodownload_changed_timeout, "autodownload change", - args=(value,)) - - def _on_autodownload_changed_timeout(self, value): self.emit('auto-download-changed', value) class HeaderToolbar(Toolbar, SorterOwner): @@ -1606,7 +1558,12 @@ return int(text_size[0]) + 36, int(max(text_size[1], widgetset.CUSTOM_HEADER_HEIGHT - 1)) - def draw(self, context, layout): + def draw(self, context, layout, background_only=False): + # bz:17103 this button is not stipulated not to be in the list of + # column headers so just draw the background then return. + if background_only: + self.surface.draw(context, 0, 0, context.width, context.height) + return text = 1 # white text arrow = 1 # white arrow if self.state == 'pressed' or self._enabled: @@ -1761,7 +1718,7 @@ self.add(self.label) class ProgressToolbar(Toolbar): - """Toolbar displayed above ItemViews to show the progress of + """Toolbar displayed below ItemViews to show the progress of reading new metadata, communicating with a device, and similar time-consuming operations. @@ -1770,41 +1727,40 @@ """ def __init__(self): Toolbar.__init__(self) - loading_icon = widgetset.AnimatedImageDisplay( - resources.path('images/load-indicator.gif')) self.hbox = widgetset.HBox() self.add(self.hbox) self.label = widgetset.Label() - self.meter = widgetutil.HideableWidget(loading_icon) + # bz18599 says we should reduce the size by 2px from the default. The + # default size is 13 px, so 0.85 should do the trick. + self.label.set_size(0.85) self.label_widget = widgetutil.HideableWidget(self.label) self.elapsed = None self.eta = None self.total = None - self.remaining = None + self.count = None + self.net_lookup_only = False self.mediatype = 'other' - self.displayed = False self.set_up = False - def _display(self): - if not self.set_up: - padding = max(0, 380 - self.label.get_width()) - self.hbox.pack_start( - widgetutil.align( - self.label_widget, 1, 0.5, 1, 0, 0, 0, padding, 10), - expand=False) - self.hbox.pack_start(widgetutil.align_left( - self.meter, 0, 0, 0, 200), expand=True) - self.set_up = True - if not self.displayed: - self.label_widget.show() - self.meter.show() - self.displayed = True + def _setup(self): + left_pad = max(0, 380 - self.label.get_width()) + label_align = widgetutil.align(self.label_widget, yalign=0.5, + top_pad=1, left_pad=left_pad, + right_pad=10) + label_align.set_size_request(-1, 20) + self.hbox.pack_start(label_align, expand=False) + # this is where we could pack a progress bar if we wanted to show that + # again + self.set_up = True def _update_label(self): # TODO: display eta - state = {"number": self.total-self.remaining, + state = {"number": self.count, "total": self.total} - if self.mediatype == 'audio': + if self.net_lookup_only: + text = _("Looking up album art and song info: " + "%(number)d of %(total)d", state) + elif self.mediatype == 'audio': text = _("Importing audio details and artwork: " "%(number)d of %(total)d", state) elif self.mediatype == 'video': @@ -1815,19 +1771,35 @@ "%(number)d of %(total)d", state) self.label.set_text(text) - def update(self, mediatype, remaining, seconds, total): + def update(self, mediatype, finished, finished_local, eta, total): """Update progress.""" self.mediatype = mediatype - self.eta = seconds + self.eta = eta self.total = total - self.remaining = remaining - if total: + if total == 0: + # completely finished + self._set_display_mode('hide') + elif finished_local == total: + # finished with local extraction, working on internet lookups + self.net_lookup_only = True + self.count = finished + # ensure we are as tall we are when the loading indicator is shown + self._set_display_mode('show') + else: + # still working on local extraction (mutagen, moviedata, etc) + self.net_lookup_only = False + self.count = finished_local + self._set_display_mode('show') + + def _set_display_mode(self, mode): + if mode == 'show': + if not self.set_up: + self._setup() self._update_label() - self._display() + self.label_widget.show() else: + # we are hiding the whole display self.label_widget.hide() - self.meter.hide() - self.displayed = False class ItemDetailsBackground(widgetset.Background): """Nearly white background behind the item details widget @@ -2103,20 +2075,17 @@ self.connections.set_text(str(info.connections)) self.seeders.set_text(str(info.seeders)) self.leechers.set_text(str(info.leechers)) - self.down_rate.set_text(displaytext.download_rate(info.down_rate)) - self.up_rate.set_text(displaytext.download_rate(info.up_rate)) - self.ratio.set_text("%0.2f" % info.up_down_ratio) - self.eta.set_text(displaytext.time_string_0_blank( - info.download_info.eta)) - self.down_total.set_text(displaytext.size_string(info.down_total)) - self.up_total.set_text(displaytext.size_string(info.up_total)) + self.down_rate.set_text(info.download_rate_text) + self.up_rate.set_text(info.upload_rate_text) + self.ratio.set_text(info.upload_ratio_text) + self.eta.set_text(info.eta_text) + self.down_total.set_text(info.downloaded_size_text) + self.up_total.set_text(info.upload_size_text) def _calc_should_show(self, info): """Decide if we should show ourselves for an ItemInfo.""" - if info.download_info is None: - return False - return (info.download_info.torrent and - info.download_info.state in ('downloading', 'uploading')) + return (info.is_torrent and + (info.is_download or info.is_seeding)) class ItemDetailsWidget(widgetset.VBox): """Widget to display detailed information about an item. @@ -2267,7 +2236,7 @@ self._expanded = expanded def set_info(self, info): - self.title_label.set_text(info.name) + self.title_label.set_text(info.title) self.torrent_info.set_info(info) if self.torrent_info.should_show: self.torrent_info_holder.show() @@ -2281,15 +2250,21 @@ self.description_label.set_text(info.description_stripped[0]) self.set_extra_info_text(info) self.setup_license_button(info) - image = imagepool.get(info.thumbnail, self.IMAGE_SIZE) + image = imagepool.get(info.thumbnail, self.IMAGE_SIZE, + invalidator=util.mtime_invalidator( + info.thumbnail)) self.image_widget.set_image(image) self.set_label_widths() self._set_empty_mode(False) def set_extra_info_text(self, info): + display_date = displaytext.date_slashes(info.release_date) + display_duration = displaytext.duration(info.duration) + display_size = displaytext.size_string(info.size) + parts = [] - for attr in (info.display_date, info.display_duration, - info.display_size, info.file_format): + for attr in (display_date, display_duration, display_size, + info.file_format): if attr: parts.append(attr) self.extra_info_label.set_text(' | '.join(parts)) @@ -2355,10 +2330,8 @@ def __init__(self, toolbar, view): widgetset.VBox.__init__(self) self.vbox = {} - standard_view = WidgetStateStore.get_standard_view_type() - list_view = WidgetStateStore.get_list_view_type() - self.vbox[standard_view] = widgetset.VBox() - self.vbox[list_view] = widgetset.VBox() + for view_type in WidgetStateStore.get_all_view_types(): + self.vbox[view_type] = widgetset.VBox() self.titlebar_vbox = widgetset.VBox() self.statusbar_vbox = widgetset.VBox() self.item_details = ItemDetailsWidget() @@ -2370,13 +2343,14 @@ color1 = widgetutil.css_to_color('#303030') color2 = widgetutil.css_to_color('#020202') self.pack_start(separator.HThinSeparator(color1)) - self.pack_start(self.progress_toolbar) self.background = ItemListBackground() self.pack_start(self.background, expand=True) self.pack_start(self.item_details) + self.pack_start(self.progress_toolbar) self.pack_start(self.statusbar_vbox) self.selected_view = view self.list_empty_mode = False + standard_view = WidgetStateStore.get_standard_view_type() self.vbox[standard_view].pack_start(self.toolbar) self.vbox[standard_view].pack_start(separator.HThinSeparator(color2)) self.background.add(self.vbox[view]) diff -Nru miro-4.0.4/lib/frontends/widgets/itemrenderer.py miro-6.0/lib/frontends/widgets/itemrenderer.py --- miro-4.0.4/lib/frontends/widgets/itemrenderer.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/itemrenderer.py 2013-04-05 16:02:42.000000000 +0000 @@ -36,6 +36,7 @@ from miro import displaytext from miro import prefs from miro import util +from miro.data import item from miro.gtcache import gettext as _ from miro.frontends.widgets import cellpack from miro.frontends.widgets import imagepool @@ -180,9 +181,14 @@ signals: throbber-drawn (obj, item_info) -- a progress throbber was drawn + item-retrying (obj, item_info) -- a download will be retried, and we + need to update the time """ def __init__(self): - signals.SignalEmitter.__init__(self, 'throbber-drawn') + signals.SignalEmitter.__init__(self, + 'throbber-drawn', + 'item-retrying', + ) _cached_images = {} # caches ImageSurface for get_image() def get_image(image_name): @@ -207,13 +213,18 @@ path = os.path.join('images', filename) return imagepool.get_surface(resources.path(path)) -class ItemRendererBase(widgetset.InfoListRenderer): +class ItemRenderer(widgetset.ItemListRenderer): MIN_WIDTH = 600 HEIGHT = 147 - def __init__(self, wide_image=False): - widgetset.InfoListRenderer.__init__(self) + def __init__(self, display_channel=True, is_podcast=False, + wide_image=False): + widgetset.ItemListRenderer.__init__(self) self.canvas = ItemRendererCanvas(wide_image) + self.signals = ItemRendererSignals() + self.display_channel = display_channel + self.is_podcast = is_podcast + self.setup_torrent_folder_description() def get_size(self, style, layout_manager): return self.MIN_WIDTH, self.HEIGHT @@ -242,32 +253,19 @@ context.height, selected, hotspot) layout.draw(context) - def layout_all(self, layout_manager, width, height, selected, hotspot): - """Create a ItemRendererLayout object for our cell.""" - raise NotImplementedError() - -class ItemRenderer(ItemRendererBase): - def __init__(self, display_channel=True, is_podcast=False, - wide_image=False): - ItemRendererBase.__init__(self, wide_image) - self.signals = ItemRendererSignals() - self.display_channel = display_channel - self.is_podcast = is_podcast - self.setup_torrent_folder_description() - def setup_torrent_folder_description(self): text = (u'%s' % SHOW_CONTENTS_TEXT) self.torrent_folder_description = util.HTMLStripper().strip(text) def layout_all(self, layout_manager, width, height, selected, hotspot): - download_mode = (self.info.state in ('downloading', 'paused')) + download_mode = self.info.is_download self.canvas.start_new_cell(layout_manager, width, height, selected, hotspot, download_mode) # add elements that are always present self.canvas.add_thumbnail(self.info.thumbnail, self.calc_thumbnail_hotspot()) - self.canvas.add_text(self.info.name, ITEM_TITLE_COLOR, + self.canvas.add_text(self.info.title, ITEM_TITLE_COLOR, self.calc_description(), self.calc_extra_info()) # add elements for download-mode or non-download-mode if download_mode: @@ -278,6 +276,8 @@ def add_normal_mode_elements(self): """Add elements when we aren't in download mode.""" + if isinstance(self.info, item.DBErrorItemInfo): + return self.add_main_button() self.add_emblem() self.add_secondary_button() @@ -327,8 +327,8 @@ self.canvas.add_remove_button(*self.remove_button_info()) if self.info.expiration_date: - text = displaytext.expiration_date(self.info.expiration_date) - self.canvas.add_keep_button('keep', text) + self.canvas.add_keep_button('keep', + self.info.expiration_date_text) elif self.info.id in app.saved_items: self.canvas.add_saved_emblem() @@ -339,48 +339,40 @@ self.canvas.add_menu_button_download_mode() def add_progress_bar(self): - dl_info = self.info.download_info - if dl_info.state == 'paused': + if self.info.is_paused: pause_button_mode = 'resume' else: pause_button_mode = 'pause' - if dl_info.downloaded_size == 0: - # show empty bar before we start up - self.canvas.add_progress_bar(0.0, pause_button_mode) - elif dl_info.total_size <= 0: - # show throbber once we've started, but still don't know the - # total_size + progress = self.info.download_progress + if progress is None: + # show throbber throbber_index = self.attrs.get('throbber-value', 0) % 10 self.canvas.add_progress_throbber(throbber_index, pause_button_mode) self.signals.emit('throbber-drawn', self.info) else: - # show regular bar otherwise - amount = float(dl_info.downloaded_size) / dl_info.total_size - self.canvas.add_progress_bar(amount, pause_button_mode) + # show regular bar + self.canvas.add_progress_bar(progress, pause_button_mode) def add_download_info(self): - dl_info = self.info.download_info - if self.info.state == 'paused': - eta = down_rate = 0 - else: - eta = dl_info.eta - down_rate = dl_info.rate download_info = ItemDownloadInfo() - download_info.add_line('time-left', - displaytext.time_string_0_blank(eta), None) + download_info.add_line('time-left', self.info.eta_text, None) download_info.add_line('dl-speed', - displaytext.download_rate(down_rate), - displaytext.size_string(dl_info.downloaded_size)) - if dl_info.torrent: + self.info.download_rate_text, + self.info.downloaded_size_text) + if self.info.is_torrent: download_info.add_line('ul-speed', - displaytext.download_rate(self.info.up_rate), - displaytext.size_string(self.info.up_total)) + self.info.upload_rate_text, + self.info.upload_size_text) self.canvas.add_download_info(download_info) - if self.info.download_info.rate == 0: + if self.info.is_paused: + pass + elif self.info.rate is None: self.canvas.add_startup_info( - self.info.download_info.startup_activity) - elif dl_info.torrent and dl_info.state != 'paused': + self.info.startup_activity) + if self.info.is_retrying: + self.signals.emit('item-retrying', self.info) + elif self.info.is_torrent: self.add_torrent_info() def add_torrent_info(self): @@ -388,7 +380,7 @@ (_('PEERS'), str(self.info.connections)), (_('SEEDS'), str(self.info.seeders)), (_('LEECH'), str(self.info.leechers)), - (_('SHARE'), "%.2f" % self.info.up_down_ratio), + (_('SHARE'), "%.2f" % self.info.upload_ratio), ) self.canvas.add_torrent_info(lines) @@ -402,8 +394,7 @@ return None def calc_description(self): - if (self.info.download_info and self.info.download_info.torrent and - self.info.children): + if self.info.is_torrent_folder: text, links = self.torrent_folder_description else: text, links = self.info.description_stripped @@ -412,22 +403,24 @@ preface_color) def calc_description_preface(self): - if (self.display_channel and self.info.feed_name and + if (self.display_channel and self.info.parent_title and not self.info.is_external): - return ("%s: " % self.info.feed_name, FEED_NAME_COLOR) + return ("%s: " % self.info.parent_title, FEED_NAME_COLOR) return '', widgetutil.WHITE def calc_extra_info(self): - return (self.info.display_date, self.info.display_duration, - self.info.display_size, self.info.file_format) + display_date = displaytext.date_slashes(self.info.release_date) + display_duration = displaytext.duration(self.info.duration) + display_size = displaytext.size_string(self.info.size) + return (display_date, display_duration, display_size, + self.info.file_format) def calc_extra_button(self): """Calculate the button to put to the right of the emblem. :returns: (text, hotspot_name) tuple, or None """ - if (self.info.download_info and - self.info.download_info.state == 'uploading'): + if self.info.is_seeding: return (STOP_SEEDING_TEXT, 'stop_seeding') elif self.info.pending_auto_dl: return (CANCEL_TEXT, 'cancel_auto_download') @@ -448,12 +441,11 @@ if self.info.has_drm: visuals = EMBLEM_VISUALS_DRM text = _('DRM locked') - elif (self.info.download_info - and self.info.download_info.state == 'failed'): + elif self.info.is_failed_download: visuals = EMBLEM_VISUALS_FAILED image = get_image('status-icon-alert') text = u"%s-%s" % (ERROR_TEXT, - self.info.download_info.short_reason_failed) + self.info.short_reason_failed) elif self.info.pending_auto_dl: visuals = EMBLEM_VISUALS_QUEUED text = QUEUED_TEXT @@ -472,7 +464,7 @@ text = _("Resume at %(resumetime)s", {"resumetime": displaytext.short_time_string( self.info.resume_time)}) - elif not self.info.item_viewed and self.info.state == "new": + elif self.info.new: visuals = EMBLEM_VISUALS_NEWLY_AVAILABLE text = NEWLY_AVAILABLE_TEXT else: @@ -1093,7 +1085,9 @@ context.fill() def draw_thumbnail(self, context, x, y, width, height): - icon = imagepool.get_surface(self.thumbnail, (width, height)) + icon = imagepool.get_surface(self.thumbnail, (width, height), + invalidator=util.mtime_invalidator( + self.thumbnail)) icon_x = x + (width - icon.width) // 2 icon_y = y + (height - icon.height) // 2 # if our thumbnail is far enough to the left, we need to set a clip @@ -1269,15 +1263,15 @@ cap_image.height) class PlaylistItemRenderer(ItemRenderer): - def __init__(self, playlist_sorter): + def __init__(self, playlist_order): ItemRenderer.__init__(self, display_channel=False) - self.playlist_sorter = playlist_sorter + self.playlist_order = playlist_order def remove_button_info(self): return ('remove-playlist', 'remove') def calc_description_preface(self): - order_number = self.playlist_sorter.sort_key(self.info) + 1 + order_number = self.playlist_order.item_position(self.info) if self.info.description_stripped[0]: text = "%s - " % order_number else: @@ -1293,17 +1287,30 @@ def calc_extra_button(self): return DOWNLOAD_TO_MY_MIRO_TEXT, 'download-device-item' -class ConversionItemRenderer(ItemRendererBase): +class ConversionItemRenderer(widgetset.CustomCellRenderer): """Class to draw conversion items This one is substantially different from ItemRenderer because it deals with ConversionTaskInfo objects. """ def __init__(self): - ItemRendererBase.__init__(self, wide_image=True) + widgetset.CustomCellRenderer.__init__(self) + self.canvas = ItemRendererCanvas(wide_image=True) self.canvas.set_progress_bar_images('conversion-progress-left', 'conversion-progress-middle', 'conversion-progress-right') + def get_size(self, style, layout_manager): + return ItemRenderer.MIN_WIDTH, ItemRenderer.HEIGHT + + def hotspot_test(self, style, layout_manager, x, y, width, height): + layout = self.layout_all(layout_manager, width, height, False, None) + return layout.find_hotspot_name(x, y) + + def render(self, context, layout_manager, selected, hotspot, hover): + layout = self.layout_all(layout_manager, context.width, + context.height, selected, hotspot) + layout.draw(context) + def layout_all(self, layout_manager, width, height, selected, hotspot): download_mode = (self.info.state == 'running') self.canvas.start_new_cell(layout_manager, width, height, selected, diff -Nru miro-4.0.4/lib/frontends/widgets/itemsort.py miro-6.0/lib/frontends/widgets/itemsort.py --- miro-4.0.4/lib/frontends/widgets/itemsort.py 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/itemsort.py 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,308 @@ +# Miro - an RSS based video player application +# Copyright (C) 2012 +# Participatory Culture Foundation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +# +# In addition, as a special exception, the copyright holders give +# permission to link the code of portions of this program with the OpenSSL +# library. +# +# You must obey the GNU General Public License in all respects for all of +# the code used other than OpenSSL. If you modify file(s) with this +# exception, you may extend this exception to your version of the file(s), +# but you are not obligated to do so. If you do not wish to do so, delete +# this exception statement from your version. If you delete this exception +# statement from all source files in the program, then also delete it here. + +"""itemsort.py -- Define sorts for item lists. + +This module defines the ItemSort base class and subclasses that define +concrete ways of sorting item lists. +""" + +from miro import util + +class ItemSort(object): + """Class that sorts items in an item list. + + :attribute key: string specifying the name of the column for an ItemView + that this sort should be used for. Subclasses must set this. + + :attribute columns: list of columns to pass to set_order_by(). These + should specify an ascending search. Subclasses must set this. + + :attribute collations: list of collations for each column. By default + this is None, which specifies the default collations + """ + collations = None + + def __init__(self, ascending=True): + self.ascending = ascending + + def is_ascending(self): + return self.ascending + + def reverse_columns(self): + """Generate columns for a reverse search + + By default, we just negate every column in self.columns. Subclasses + can override this if they want different behavior. + """ + def reverse_term(term): + if term[0] == '-': + return term[1:] + else: + return '-' + term + return [reverse_term(t) for t in self.columns] + + def add_to_query(self, query): + if self.ascending: + query.set_order_by(self.columns, self.collations) + else: + query.set_order_by(self.reverse_columns(), self.collations) + +class TitleSort(ItemSort): + key = 'name' + columns = ['title'] + collations = ['name'] + +class DateSort(ItemSort): + key = 'date' + columns = ['release_date'] + +class ArtistSort(ItemSort): + key = 'artist' + columns = ['artist', 'album', 'track'] + collations = ['name', 'name', None] + +class AlbumSort(ItemSort): + key = 'album' + columns = ['album', 'track'] + collations = ['name', None] + +class FeedNameSort(ItemSort): + key = 'feed-name' + columns = ['parent_title', 'feed_id', 'parent_id'] + +class StateCircleSort(ItemSort): + # Weird sort, this one is for when the user clicks on the header above the + # status bumps. It's almost the same as StatusSort, but there isn't a + # bump for expiring. + key = 'state' + def add_to_query(self, query): + sql = ("CASE " + # downloading + "WHEN remote_downloader.state IN ('downloading', 'paused') OR " + "pending_manual_download THEN 1 " + # unwatched + "WHEN item.filename IS NOT NULL AND was_downloaded AND " + "watched_time IS NULL THEN 2 " + # new + "WHEN new THEN 3 " + # other + "ELSE 4 " + "END") + if not self.ascending: + sql += ' DESC' + columns = ['remote_downloader.state', 'pending_manual_download', + 'filename', 'was_downloaded', 'watched_time', 'new'] + query.set_complex_order_by(columns, sql) + +class StatusSort(ItemSort): + key = 'status' + def add_to_query(self, query): + sort1 = ("CASE " + # new + "WHEN new THEN 0 " + # downloading + "WHEN remote_downloader.state IN ('downloading', 'paused') OR " + "pending_manual_download THEN 2 " + # unwatched + "WHEN item.filename IS NOT NULL AND was_downloaded AND " + "watched_time IS NULL THEN 3 " + # expiring + "WHEN item.filename IS NOT NULL AND was_downloaded AND " + "NOT keep AND watched_time IS NOT NULL THEN 4 " + # other + "ELSE 1 " + "END") + sort2 = ("CASE " + # for expiring items, our secondary sort is the watched time, + # this makes items that expire sooner appear on top. + "WHEN item.filename IS NOT NULL AND was_downloaded AND " + "NOT keep AND watched_time IS NOT NULL THEN watched_time " + # for other items we don't care. Just use id. + "ELSE item.id " + "END") + + if self.ascending: + sql = "%s, %s" % (sort1, sort2) + else: + sql = "%s DESC, %s DESC" % (sort1, sort2) + columns = ['remote_downloader.state', 'pending_manual_download', + 'filename', 'was_downloaded', 'watched_time', 'new', + 'keep', + ] + query.set_complex_order_by(columns, sql) + +class LengthSort(ItemSort): + key = 'length' + columns = ['duration'] + +class DateAddedSort(ItemSort): + key = 'date-added' + columns = ['creation_time'] + +class SizeSort(ItemSort): + key = 'size' + columns = ['size'] + +class DescriptionSort(ItemSort): + key = 'description' + + def add_to_query(self, query): + sql = ("CASE " + # downloading + "WHEN description IS NOT NULL " + "THEN description " + "ELSE entry_description " + "END collate name") + if not self.ascending: + sql += ' DESC' + columns = ['description', 'entry_description' ] + query.set_complex_order_by(columns, sql) + +class FileTypeSort(ItemSort): + key = 'file-type' + columns = ['file_type'] + +class RatingSort(ItemSort): + # TODO: should this also include auto rating? + key = 'rating' + columns = ['rating'] + +class GenreSort(ItemSort): + key = 'genre' + columns = ['genre'] + +class ShowSort(ItemSort): + key = 'show' + columns = ['show'] + +class TrackSort(ItemSort): + key = 'track' + columns = ['track'] + +class YearSort(ItemSort): + key = 'year' + columns = ['year'] + +class VideoKindSort(ItemSort): + key = 'kind' + columns = ['kind'] + +class MultiRowAlbum(ItemSort): + key = 'multi-row-album' + + def __init__(self, ascending): + ItemSort.__init__(self, ascending) + self.switch_mode('standard') + + def switch_mode(self, new_mode): + """Switch which mode we use to sort. + + MultiRowAlbumRenderer displays different data depending on what mode + it's in. Therefore, this sorter needs to sort differently depending + on that mode. + + The modes available are the same as MultiRowAlbumRenderer's modes + (standard, feed, and video). The mode should be set the same on each + """ + if new_mode not in ('standard', 'feed', 'video'): + raise ValueError("unknown mode: %s" % new_mode) + add_to_query_method = getattr(self, 'add_to_query_%s' % new_mode) + self.add_to_query = add_to_query_method + + def add_to_query_standard(self, query): + columns = ['album_artist', 'album', 'track'] + collations = ['name', 'name', None] + if not self.ascending: + columns = ['-' + c for c in columns] + query.set_order_by(columns, collations) + + def _watched_folder_case(self): + """Get an SQL case expression will sort watched folders to the bottom. + + See #18410 and #18278. + """ + return ("CASE " + "WHEN feed.orig_url LIKE 'dtv:directoryfeed:%' THEN 1 " + "ELSE 0 " + "END") + + def add_to_query_feed(self, query): + if self.ascending: + sql_template = "%s, %s, %s" + else: + # NOTE: we don't add DESC to the watch folder case expression. We + # want watched folders to always be at the bottom, even if we + # reverse the search. + sql_template = "%s, %s DESC, %s DESC" + sql = sql_template % (self._watched_folder_case(), + 'item.parent_title', + 'item.release_date') + columns = ['feed.orig_url', 'parent_title', 'release_date'] + query.set_complex_order_by(columns, sql) + + def add_to_query_video(self, query): + if self.ascending: + sql_template = "%s, %s, %s" + else: + # NOTE: we don't add DESC to the watch folder case expression. We + # want watched folders to always be at the bottom, even if we + # reverse the search. + sql_template = "%s, %s DESC, %s DESC" + sql = sql_template % (self._watched_folder_case(), + 'CASE ' + 'WHEN item.show IS NOT NULL THEN item.show ' + 'ELSE item.parent_title ' + 'END collate name', + 'item.release_date') + columns = ['feed.orig_url' 'show', 'parent_title', 'release_date'] + query.set_complex_order_by(columns, sql) + +class DRMSort(ItemSort): + key = 'drm' + columns = ['-has_drm'] + +class RateSort(ItemSort): + key = 'rate' + columns = ['remote_downloader.rate'] + +class ETASort(ItemSort): + key = 'eta' + columns = ['remote_downloader.eta'] + +class TorrentDetailsSort(ItemSort): + # FIXME: need to implement this + key = 'torrent-details' + columns = ['id'] + +class PlaylistSort(ItemSort): + key = 'playlist' + columns = ['playlist_item_map.position'] + +SORT_KEY_MAP = dict((sort.key, sort) for sort in util.all_subclasses(ItemSort)) diff -Nru miro-4.0.4/lib/frontends/widgets/itemtrack.py miro-6.0/lib/frontends/widgets/itemtrack.py --- miro-4.0.4/lib/frontends/widgets/itemtrack.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/itemtrack.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,391 +0,0 @@ -# Miro - an RSS based video player application -# Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011 -# Participatory Culture Foundation -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA -# -# In addition, as a special exception, the copyright holders give -# permission to link the code of portions of this program with the OpenSSL -# library. -# -# You must obey the GNU General Public License in all respects for all of -# the code used other than OpenSSL. If you modify file(s) with this -# exception, you may extend this exception to your version of the file(s), -# but you are not obligated to do so. If you do not wish to do so, delete -# this exception statement from your version. If you delete this exception -# statement from all source files in the program, then also delete it here. - -"""itemtrack.py -- Create and track ItemList objects - -itemtrack's job is to create ItemLists and keep them updated. It handles the -following things: - - Sending TrackItems/StopTrackingItems messages - - Filtering out items from the messages that don't match the search - - Managing the life of an ItemList -""" - -import weakref -import itertools -import logging - -from miro import app -from miro import datastructures -from miro import messages -from miro import signals -from miro import search -from miro.frontends.widgets import itemlist -from miro.plat.frontends.widgets.threads import call_on_ui_thread - -class ItemListTracker(signals.SignalEmitter): - """ItemListTracker -- Track ItemLists - - ItemListTracker manages tracking the items for a given type/id. When the - first object connects to the initial-list method, ItemListTracker will - send out the TrackItems message to the backend. When all objects - disconnect from the initial-list and items-changed method, then the - StopTrackingItems message will be sent. - - ItemListTracker handles filtering out items that don't match a search via - the ``set_search()`` method. - - Attributes: - item_list -- ItemList object containing our items - - Signals: - items-will-change (tracker) -- We are about to modify item_list - initial-list (tracker, items) -- The initial item list was received - items-changed (tracker, added, changed, removed) -- The item list - changed - items-removed-from-source (tracker, removed) - Items were permanently - removed from the item list (as opposed to just filtered out by - the filter). The passed items include all removed items, - irrespective of whether they currently are filtered out by the - filter or not - """ - - - - # maps (type, id) -> ItemListTracker objects - _live_trackers = weakref.WeakValueDictionary() - - @classmethod - def create(cls, type_, id_): - """Get a ItemListTracker - - This method will return an existing ItemListTracker if one already - exists for (type_, id_). If not, it will create a new one. - """ - key = (type_, id_) - if key in cls._live_trackers: - return cls._live_trackers[(type_, id_)] - else: - # need to do a little bit of fancy footwork here because - # _live_trackers is a WeakValueDictionary. - if type_ == 'playlist': - tracker = PlaylistItemListTracker(type_, id_) - else: - tracker = ItemListTracker(type_, id_) - retval = cls._live_trackers[key] = tracker - return retval - - def __init__(self, type_, id_): - """Create a new ItemListTracker - - Don't construct ItemListTracker's directly! Instead use the - create() factory method. - """ - # FIXME: there's probably a better way of doing a factory method to - # create ItemListTrackers. I think a private constructor would be - # ideal, but that's not really possible in python. - signals.SignalEmitter.__init__(self, 'items-will-change', - 'initial-list', 'items-changed', 'items-removed-from-source') - self.type = type_ - self.item_list = itemlist.ItemList() - self.id = id_ - self.is_tracking = False - self.search_filter = SearchFilter() - self.saw_initial_list = False - - def connect(self, name, func, *extra_args): - if not self.is_tracking: - self._start_tracking() - return signals.SignalEmitter.connect(self, name, func, *extra_args) - - def disconnect(self, callback_handle): - signals.SignalEmitter.disconnect(self, callback_handle) - if self.is_tracking and self._all_handlers_disconnected(): - self._stop_tracking() - - def disconnect_all(self): - signals.SignalEmitter.disconnect_all(self) - self._stop_tracking() - - def _all_handlers_disconnected(self): - for callback_dict in self.signal_callbacks.values(): - if len(callback_dict) > 0: - return False - return True - - def _start_tracking(self): - if self.is_tracking: - return - logging.debug("ItemListTracker -- tracking: %s, %s", self.type, - self.id) - self._send_track_items_message() - app.info_updater.item_list_callbacks.add(self.type, self.id, - self.on_item_list) - app.info_updater.item_changed_callbacks.add(self.type, self.id, - self.on_items_changed) - self.is_tracking = True - - def _send_track_items_message(self): - messages.TrackItems(self.type, self.id).send_to_backend() - - def _stop_tracking(self): - if not self.is_tracking: - return - logging.debug("ItemListTracker -- stopping tracking: %s, %s", - self.type, self.id) - messages.StopTrackingItems(self.type, self.id).send_to_backend() - app.info_updater.item_list_callbacks.remove(self.type, self.id, - self.on_item_list) - app.info_updater.item_changed_callbacks.remove(self.type, self.id, - self.on_items_changed) - self.is_tracking = False - - def on_item_list(self, message): - self.add_initial_items(message.items) - - def is_filtering(self): - """Check if we are filtering out any items.""" - return self.search_filter.is_filtering() - - def add_initial_items(self, items): - self.saw_initial_list = True - items = self.search_filter.filter_initial_list(items) - self.emit('items-will-change', items, [], []) - # call remove all to handle the race described in #16089. We may get - # multiple ItemList messages, in which case we want the last one to be - # the one that sticks. - self.item_list.remove_all() - self.item_list.add_items(items) - self.emit("initial-list", items) - - def on_items_changed(self, message): - if not self.saw_initial_list: - # another hack for #16089, if things get backed up in the wrong - # way, we could get an ItemsChanged message for our old list, - # before the ItemList message for our new one. - return - added, changed, removed = self.search_filter.filter_changes( - message.added, message.changed, message.removed) - self.emit('items-will-change', added, changed, removed) - self.item_list.add_items(added) - self.item_list.update_items(changed) - self.item_list.remove_items(removed) - #Note that the code in PlaybackPlaylist expects this signal order - self.emit("items-removed-from-source", message.removed) - self.emit("items-changed", added, changed, removed) - - def set_search(self, query): - added, removed = self.search_filter.set_search(query) - self.emit("items-will-change", added, [], removed) - self.item_list.add_items(added) - self.item_list.remove_items(removed) - self.emit("items-changed", added, [], removed) - -class PlaylistItemListTracker(ItemListTracker): - """ItemListTracker for playlists. - - This class adds a playlist_sort attribute, that contains a - itemlist.PlaylistSort object that is kept up to date. - """ - def __init__(self, type_, id_): - ItemListTracker.__init__(self, type_, id_) - self.playlist_sort = itemlist.PlaylistSort() - - def do_items_will_change(self, added, changed, removed): - self.playlist_sort.items_will_change(added, changed, removed) - - def do_items_removed_from_source(self, removed): - self.playlist_sort.items_removed_from_source(removed) - -class ManualItemListTracker(ItemListTracker): - id_counter = itertools.count() - - @classmethod - def create(cls, info_list): - """Create a new ManualItemListTracker - - This method can safely by used, unlike regular ItemListTrackers - """ - # overide the code to share ItemListTrackers, since it doesn't really - # make sense for ManualItemListTracker. Note: this could just be an - # __init__ method, but I wanted to match the API of ItemListTracker - # (BDK). - my_unique_id = ('item-list-tracker-%d' % - ManualItemListTracker.id_counter.next()) - self = ManualItemListTracker('manual', my_unique_id) - self.info_list = info_list - self.add_initial_items(info_list) - return self - - def _send_track_items_message(self): - messages.TrackItemsManually(self.id, self.info_list).send_to_backend() - -class SearchFilter(object): - """SearchFilter filter out non-matching items from item lists - """ - def __init__(self): - self.searcher = search.ItemSearcher() - self.query = '' - self.all_items = {} # maps id to item info - self.matching_ids = set() - self._pending_changes = datastructures.Fifo() - self._index_pass_scheduled = False - - def is_filtering(self): - return len(self.all_items) > len(self.matching_ids) - - def filter_initial_list(self, items): - """Filter a list of incoming items. - - :param items: list of ItemInfos - - :returns: list of items that match our search - """ - if not self.query: - # special case, just send out the list and calculate the index - # later - self._pending_changes.enqueue((items, [], [])) - self._schedule_indexing() - return items - self._ensure_index_ready() - self._add_items(items) - self.matching_ids = self.searcher.search(self.query) - return [i for i in items if i.id in self.matching_ids] - - def filter_changes(self, added, changed, removed): - """Filter a list of incoming changes - - :param added: list of added ItemInfos - :param changed: list of changed ItemInfos - :param removed: list of removed ItemInfos ids - - :returns: (added, changed, removed), updated based on our search - """ - if not self.query: - # special case, just send out the list and calculate the index - # later - self._pending_changes.enqueue((added, changed, removed)) - self._schedule_indexing() - return added, changed, removed - self._ensure_index_ready() - - self._add_items(added) - self._update_items(changed) - self._remove_ids(removed) - - matches = self.searcher.search(self.query) - old_matches = self.matching_ids - - added_filtered = [i for i in added if i.id in matches] - remove_filtered = old_matches.intersection(removed) - changed_filtered = [] - for info in changed: - if info.id in matches: - if info.id in old_matches: - changed_filtered.append(info) - else: - added_filtered.append(info) - elif info.id in old_matches: - remove_filtered.add(info.id) - self.matching_ids = matches - return added_filtered, changed_filtered, remove_filtered - - def set_search(self, query): - """Change the current search. - - :param query: new search to filter on - - :returns: (added, removed) based on the new search - """ - self._ensure_index_ready() - self.query = query - matches = self.searcher.search(self.query) - added = matches - self.matching_ids - removed = self.matching_ids - matches - self.matching_ids = matches - added_infos = [self.all_items[id_] for id_ in added] - return added_infos, removed - - def _add_items(self, items): - for item in items: - self.all_items[item.id] = item - self.searcher.add_item(item) - - def _update_items(self, items): - for item in items: - self.all_items[item.id] = item - try: - self.searcher.update_item(item) - except KeyError: - # This happens when the item is not in the index - # As a precaution, try out best to recover, log the error, - # then just add the item. (see #17152 for details). - app.widgetapp.handle_soft_failure("Item Track update", - "Tried to update item not in index: %s" % item.id, - with_exception=True) - self.searcher.add_item(item) - - def _remove_ids(self, id_list): - for id_ in id_list: - del self.all_items[id_] - try: - self.searcher.remove_item(id_) - except KeyError: - # This happens when the item is not in the index. As a - # precaution, try to recover. log the error, then keep going. - app.widgetapp.handle_soft_failure("Item Track update", - "Tried to update item not in index: %s" % id_, - with_exception=True) - - def _ensure_index_ready(self): - if len(self._pending_changes) > 0: - while len(self._pending_changes) > 0: - (added, changed, removed) = self._pending_changes.dequeue() - self._add_items(added) - self._update_items(changed) - self._remove_ids(removed) - self.matching_ids = self.searcher.search(self.query) - - def _schedule_indexing(self): - if not self._index_pass_scheduled: - call_on_ui_thread(self._do_index_pass) - self._index_pass_scheduled = True - - def _do_index_pass(self): - self._index_pass_scheduled = False - # find a chunk of items, process them, then schedule another call - if len(self._pending_changes) == 0: - return - (added, changed, removed) = self._pending_changes.dequeue() - self._add_items(added) - self._update_items(changed) - self._remove_ids(removed) - if len(self._pending_changes) > 0: - self._schedule_indexing() - else: - self.matching_ids = self.searcher.search(self.query) diff -Nru miro-4.0.4/lib/frontends/widgets/keyboard.py miro-6.0/lib/frontends/widgets/keyboard.py --- miro-4.0.4/lib/frontends/widgets/keyboard.py 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/keyboard.py 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,69 @@ +# Miro - an RSS based video player application +# Copyright (C) 2011 +# Participatory Culture Foundation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +# +# In addition, as a special exception, the copyright holders give +# permission to link the code of portions of this program with the OpenSSL +# library. +# +# You must obey the GNU General Public License in all respects for all of +# the code used other than OpenSSL. If you modify file(s) with this +# exception, you may extend this exception to your version of the file(s), +# but you are not obligated to do so. If you do not wish to do so, delete +# this exception statement from your version. If you delete this exception +# statement from all source files in the program, then also delete it here. + +"""Define keyboard input in a platform-independant way.""" + +(CTRL, ALT, SHIFT, CMD, MOD, RIGHT_ARROW, LEFT_ARROW, UP_ARROW, + DOWN_ARROW, SPACE, ENTER, DELETE, BKSPACE, ESCAPE, + F1, F2, F3, F4, F5, F6, F7, F8, F9, F10, F11, F12) = range(26) + +class Shortcut: + """Defines a shortcut key combination used to trigger this + menu item. + + The first argument is the shortcut key. Other arguments are + modifiers. + + Examples: + + >>> Shortcut("x", MOD) + >>> Shortcut(BKSPACE, MOD) + + This is wrong: + + >>> Shortcut(MOD, "x") + """ + def __init__(self, shortcut, *modifiers): + self.shortcut = shortcut + self.modifiers = modifiers + + def _get_key_symbol(self, value): + """Translate key values to their symbolic names.""" + if isinstance(self.shortcut, int): + shortcut_string = '' + for name, value in globals().iteritems(): + if value == self.shortcut: + return name + return repr(value) + + def __str__(self): + shortcut_string = self._get_key_symbol(self.shortcut) + mod_string = repr(set(self._get_key_symbol(k) for k in + self.modifiers)) + return "Shortcut(%s, %s)" % (shortcut_string, mod_string) diff -Nru miro-4.0.4/lib/frontends/widgets/menus.py miro-6.0/lib/frontends/widgets/menus.py --- miro-4.0.4/lib/frontends/widgets/menus.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/menus.py 2013-04-05 16:02:42.000000000 +0000 @@ -29,190 +29,117 @@ """Menu handling code.""" +import collections +import itertools +import logging +import subprocess +import sqlite3 +import time + from miro import app from miro import errors +from miro import messages from miro import prefs from miro import signals from miro import conversions +from miro.data import connectionpool +from miro.data.item import DBErrorItemInfo +from miro.frontends.widgets.keyboard import (Shortcut, CTRL, ALT, SHIFT, CMD, + MOD, RIGHT_ARROW, LEFT_ARROW, UP_ARROW, DOWN_ARROW, SPACE, ENTER, DELETE, + BKSPACE, ESCAPE, F1, F2, F3, F4, F5, F6, F7, F8, F9, F10, F11, F12) +from miro.frontends.widgets import dialogs from miro.frontends.widgets.widgetconst import COLUMN_LABELS from miro.frontends.widgets.widgetstatestore import WidgetStateStore - +from miro.frontends.widgets import widgetutil +from miro.plat.frontends.widgets import widgetset +# import menu widgets into our namespace for easy access +from miro.plat.frontends.widgets.widgetset import (Separator, RadioMenuItem, + CheckMenuItem) from miro.gtcache import gettext as _ +from miro.plat import resources +from miro.plat import utils -(CTRL, ALT, SHIFT, CMD, RIGHT_ARROW, LEFT_ARROW, UP_ARROW, - DOWN_ARROW, SPACE, ENTER, DELETE, BKSPACE, ESCAPE, - F1, F2, F3, F4, F5, F6, F7, F8, F9, F10, F11, F12) = range(25) - -MOD = CTRL - -def set_mod(modifier): - """Allows the platform to change the MOD key. OSX and - Windows have different mod keys. - - Examples: - >>> set_mod(CTRL) - >>> set_mod(CMD) - """ - global MOD - MOD = modifier - -class Shortcut: - """Defines a shortcut key combination used to trigger this - menu item. - - The first argument is the shortcut key. Other arguments are - modifiers. - - Examples: - - >>> Shortcut("x", MOD) - >>> Shortcut(BKSPACE, MOD) +class MenuItem(widgetset.MenuItem): + """Portable MenuItem class. - This is wrong: - - >>> Shortcut(MOD, "x") - """ - def __init__(self, shortcut, *modifiers): - self.shortcut = shortcut - self.modifiers = modifiers - -class MenuItem: - """Single item in the menu that can be clicked on that has an action. - - :param label: The label it has (must be internationalized) - :param action: The action string for this menu item. - :param shortcuts: None, the Shortcut, or tuple of Shortcut objects. - :param groups: The action groups this item is enabled in. By default - this is ["AlwaysOn"] - :param state_labels: If this menu item has states, then this is - the name/value pairs for all states. - - Example: - - >>> MenuItem(_("Preferences"), "EditPreferences") - >>> MenuItem(_("Cu_t"), "ClipboardCut", Shortcut("x", MOD)) - >>> MenuItem(_("_Update Podcasts and Library"), "UpdatePodcasts", - ... (Shortcut("r", MOD), Shortcut(F5))) - >>> MenuItem(_("_Play"), "PlayPauseItem", - ... play=_("_Play"), pause=_("_Pause")) + This adds group handling to the platform menu items. """ - def __init__(self, label, action, shortcuts=None, groups=None, - **state_labels): - self.label = label - self.action = action - if shortcuts is None: - shortcuts = () - if not isinstance(shortcuts, tuple): - shortcuts = (shortcuts,) - self.shortcuts = shortcuts - if groups is None: - groups = ["AlwaysOn"] - self.groups = groups - self.state_labels = state_labels + # group_map is used for the legacy menu updater code + group_map = collections.defaultdict(set) -class RadioMenuItem(MenuItem): - """MenuItem that has a radio button is grouped with other RadioMenuItems. - - :param radio_group: identifier for the group that this menu item is in. - """ - def __init__(self, label, action, radio_group, shortcuts=None, - groups=None, **state_labels): - MenuItem.__init__(self, label, action, shortcuts, groups, - **state_labels) - self.radio_group = radio_group + def __init__(self, label, name, shortcut=None, groups=None, + **state_labels): + widgetset.MenuItem.__init__(self, label, name, shortcut) + # state_labels is used for the legacy menu updater code + self.state_labels = state_labels + if groups: + if len(groups) > 1: + raise ValueError("only support one group") + MenuItem.group_map[groups[0]].add(self) -class CheckMenuItem(MenuItem): - """MenuItem that has a check mark. +_menu_item_counter = itertools.count() +def menu_item(label, shortcut=None, groups=None, **state_labels): + def decorator(func): + func.menu_item_info = { + 'label': label, + 'name': func.__name__, + 'shortcut': shortcut, + 'groups': groups, + 'state_labels': state_labels, + 'order': _menu_item_counter.next() + } + return func + return decorator - :param check_group: the group this menu item is in - """ - def __init__(self, label, action, check_group, shortcuts=None, - groups=None, **state_labels): - MenuItem.__init__(self, label, action, shortcuts, groups, - **state_labels) - self.check_group = check_group +class Menu(widgetset.Menu): + """Portable menu class. -class Separator: - """This denotes a separator in the menu. + This class adds some code to make writing menu items simpler. Menu items + can be added by defining an action handler method, and using the + @menu_item decorator """ - def __init__(self): - self.action = None - -class Menu: - """A Menu holds a list of MenuItems and Menus. + # FIXME: the @menu_item functionality is totally optional, so most menus + # are implemented without it. The @menu_item approach is nicer through, + # so we should switch the other classes to use it. + + def __init__(self, label, name, child_items=()): + widgetset.Menu.__init__(self, label, name, + list(child_items) + self.make_items()) + + def make_items(self): + # list of (order, label, name, callback) tuples + menu_item_methods = [] + for obj in self.__class__.__dict__.values(): + if callable(obj) and hasattr(obj, 'menu_item_info'): + menu_item_methods.append(obj) + menu_item_methods.sort(key=lambda obj: obj.menu_item_info['order']) + menu_items = [] + for meth in menu_item_methods: + constructor_args = meth.menu_item_info.copy() + del constructor_args['order'] + menu_item = MenuItem(**constructor_args) + menu_item.connect("activate", meth) + menu_items.append(menu_item) + return menu_items - Example: - >>> Menu(_("P_layback"), "Playback", [ - ... MenuItem(_("_Foo"), "Foo"), - ... MenuItem(_("_Bar"), "Bar") - ... ]) - >>> Menu("", "toplevel", [ - ... Menu(_("_File"), "File", [ ... ]) - ... ]) - """ - def __init__(self, label, action, menuitems, groups=None): - self.label = label - self.action = action - self.menuitems = list(menuitems) - if groups is None: - groups = ["AlwaysOn"] - self.groups = groups - - def __iter__(self): - for mem in self.menuitems: - yield mem - if isinstance(mem, Menu): - for mem2 in mem: - yield mem2 - - def has(self, action): - for mem in self: - if mem.action == action: - return True - return False +class MenuItemFetcher(object): + """Get MenuItems by their name quickly. """ - def get(self, action, default=None): - for mem in self: - if mem.action == action: - return mem - - if default is not None: - return default - - raise ValueError("%s is not in this menu." % action) - - def index(self, action): - for i, mem in enumerate(self.menuitems): - if mem.action == action: - return i - raise ValueError("%s not in this menu." % action) - - def remove(self, action): - # FIXME - this won't remove separators--probably should do - # a pass to remove a separator for two separators in a row - # or a separator at the beginning or end of the list - self.menuitems = [m for m in self.menuitems if m.action != action] - for mem in self.menuitems: - if isinstance(mem, Menu): - mem.remove(action) - - def count(self): - return len(self.menuitems) - - def insert(self, index, menuitem): - self.menuitems.insert(index, menuitem) + def __init__(self): + self._cache = {} - def append(self, menuitem): - self.menuitems.append(menuitem) + def __getitem__(self, name): + if name in self._cache: + return self._cache[name] + else: + menu_item = app.widgetapp.menubar.find(name) + self._cache[name] = menu_item + return menu_item -def get_menu(): - """Returns the default menu structure. +def get_app_menu(): + """Returns the default menu structure.""" - Call this, then make whatever platform-specific changes you - need to make. - """ - mbar = Menu("", "TopLevel", [ - Menu(_("_File"), "FileMenu", [ + file_menu = Menu(_("_File"), "FileMenu", [ MenuItem(_("_Open"), "Open", Shortcut("o", MOD), groups=["NonPlaying"]), Menu(_("Import"), "Import", [ @@ -232,13 +159,17 @@ Separator(), MenuItem(_("Download from a URL"), "NewDownload", groups=["NonPlaying"]), + MenuItem(_("Edit _Item Details..."), "EditItems", + Shortcut("i", MOD), + groups=["LocalItemsSelected"], + plural=_("Edit _Items")), + CheckMenuItem(_('Use album art and song info from online ' + 'lookup database (Echonest)'), + 'UseEchonestData'), Separator(), MenuItem(_("Remove Item"), "RemoveItems", groups=["LocalItemsSelected"], plural=_("Remove Items")), - MenuItem(_("Edit _Item"), "EditItems", Shortcut("i", MOD), - groups=["LocalItemsSelected"], - plural=_("Edit _Items")), MenuItem(_("_Save Item As"), "SaveItem", Shortcut("s", MOD), groups=["LocalPlayableSelected"], @@ -250,9 +181,9 @@ MenuItem(_("Check Version"), "CheckVersion"), MenuItem(_("Preferences"), "EditPreferences"), MenuItem(_("_Quit"), "Quit", Shortcut("q", MOD)), - ]), + ]) - Menu(_("_Sidebar"), "SidebarMenu", [ + sidebar_menu = Menu(_("_Sidebar"), "SidebarMenu", [ MenuItem(_("Add Podcast"), "NewPodcast", Shortcut("n", MOD), groups=["NonPlaying"]), @@ -279,7 +210,7 @@ site=_("Remove Source"), sites=_("Remove Sources")), MenuItem(_("Update Podcast"), "UpdatePodcasts", - (Shortcut("r", MOD), Shortcut(F5)), + Shortcut("r", MOD), groups=["PodcastsSelected"], plural=_("Update Podcasts")), MenuItem(_("Update All Podcasts and Library"), @@ -296,9 +227,9 @@ groups=["PodcastSelected"]), MenuItem(_("Copy URL"), "CopyPodcastURL", groups=["PodcastSelected"]), - ]), + ]) - Menu(_("_Playlists"), "PlaylistsMenu", [ + playlists_menu = Menu(_("_Playlists"), "PlaylistsMenu", [ MenuItem(_("New _Playlist"), "NewPlaylist", Shortcut("p", MOD), groups=["NonPlaying"]), @@ -313,13 +244,11 @@ plural=_("Remove Playlists"), folders=_("Remove Playlist Folders"), folder=_("Remove Playlist Folder")), - ]), + ]) - Menu(_("P_layback"), "PlaybackMenu", [ + playback_menu = Menu(_("P_layback"), "PlaybackMenu", [ MenuItem(_("Play"), "PlayPauseItem", - groups=["PlayPause"], - play=_("Play"), - pause=_("Pause")), + groups=["PlayPause"]), MenuItem(_("Stop"), "StopItem", Shortcut("d", MOD), groups=["Playing"]), Separator(), @@ -340,34 +269,39 @@ MenuItem(_("Volume Down"), "DownVolume", Shortcut(DOWN_ARROW,MOD)), Separator(), + MenuItem(_("Go to Currently Playing"), + "GotoCurrentlyPlaying", + Shortcut("l", MOD), + groups=["Playing"]), + Separator(), MenuItem(_("_Fullscreen"), "Fullscreen", - (Shortcut("f", MOD), Shortcut(ENTER, ALT)), + Shortcut("f", MOD), groups=["PlayingVideo"]), MenuItem(_("_Toggle Detached/Attached"), "ToggleDetach", Shortcut("t", MOD), groups=["PlayingVideo"]), - Menu(_("Subtitles"), "SubtitlesMenu", [ - MenuItem(_("None Available"), "NoneAvailable", + Menu(_("Audio Track"), "AudioTrackMenu", [ + MenuItem(_("None Available"), "NoAudioTracks", groups=["NeverEnabled"]), - Separator(), - MenuItem(_("Select a Subtitles File..."), - "SubtitlesSelect", - groups=["PlayingLocalVideo"]) ]), - ]), - - Menu(_("Sorts"), "ViewMenu", _get_view_menu()), - - Menu(_("_Convert"), "ConvertMenu", _get_convert_menu()), - - Menu(_("_Help"), "HelpMenu", [ + Menu(_("Subtitles"), "SubtitlesMenu", [ + Menu(_("_Encoding"), "SubtitleEncodingMenu", []), + ]), + ]) + # hide the SubtitleEncodingMenu until it's populated with items. On OSX, + # we don't support it yet + playback_menu.find("SubtitleEncodingMenu").hide() + + sorts_menu = Menu(_("Sorts"), "SortsMenu", []) + convert_menu = Menu(_("_Convert"), "ConvertMenu", [ + MenuItem(_("Show Conversion Folder"), "RevealConversionFolder") + ]) + help_menu = Menu(_("_Help"), "HelpMenu", [ MenuItem(_("About %(name)s", {'name': app.config.get(prefs.SHORT_APP_NAME)}), "About") ]) - ]) - help_menu = mbar.get("HelpMenu") if app.config.get(prefs.DONATE_URL): help_menu.append(MenuItem(_("Donate"), "Donate")) @@ -382,73 +316,185 @@ if app.config.get(prefs.PLANET_URL): help_menu.append(MenuItem(_("Planet Miro"), "Planet")) + all_menus = [file_menu, sidebar_menu, playlists_menu, playback_menu, + sorts_menu, convert_menu, help_menu ] + if app.debugmode: - dev_menu = Menu(_("Dev"), "DevMenu", [ - MenuItem(_("Profile Message"), "ProfileMessage"), - MenuItem(_("Profile Redraw"), "ProfileRedraw"), - MenuItem(_("Test Crash Reporter"), "TestCrashReporter"), - MenuItem(_("Test Soft Crash Reporter"), - "TestSoftCrashReporter"), - MenuItem(_("Memory Stats"), "MemoryStats") - ]) - - mbar.menuitems.append(dev_menu) - return mbar - -def _get_convert_menu(): - menu = list() - sections = conversions.conversion_manager.get_converters() - for index, section in enumerate(sections): - for converter in section[1]: - handler_name = make_convert_handler(converter) - item = MenuItem(converter.displayname, handler_name, - groups=["LocalPlayablesSelected"]) - menu.append(item) - if index+1 < len(sections): - menu.append(Separator()) - menu.append(Separator()) - menu.append(MenuItem(_("Show Conversion Folder"), "RevealConversionFolder")) - return menu - -def add_subtitle_encoding_menu(menubar, category_label, *encodings): - """Helper method to set up the subtitles encoding menu. - - This method should be called for each category of subtitle encodings (East - Asian, Western European, Unicode, etc). Pass it the list of encodings for - that category. - - :param category_label: human-readable name for the category - :param encodings: list of (label, encoding) tuples. label is a - human-readable name, and encoding is a value that we can pass to - VideoDisplay.select_subtitle_encoding() - """ - subtitles_menu = menubar.get("PlaybackMenu").get("SubtitlesMenu") - try: - encoding_menu = subtitles_menu.get("SubtitleEncodingMenu") - except ValueError: - # first time calling this function, we need to set up the menu. - encoding_menu = Menu(_("_Encoding"), - "SubtitleEncodingMenu", [], groups=['PlayingVideo']) - subtitles_menu.append(encoding_menu) - default_item = RadioMenuItem(_('Default (UTF-8)'), - "SubtitleEncoding-Default", 'subtitle-encoding', - groups=['PlayingVideo']) - encoding_menu.append(default_item) - app.menu_manager.subtitle_encoding_enabled = True - - category_menu = Menu(category_label, - "SubtitleEncodingCat%s" % encoding_menu.count(), [], - groups=['PlayingVideo']) - encoding_menu.append(category_menu) - - for encoding, name in encodings: - label = '%s (%s)' % (name, encoding) - category_menu.append(RadioMenuItem(label, - 'SubtitleEncoding-%s' % encoding, - 'subtitle-encoding', groups=["PlayingVideo"])) + all_menus.append(DevMenu()) + return all_menus + +class DevMenu(Menu): + def __init__(self): + Menu.__init__(self, _("Dev"), "DevMenu") + + @menu_item(_("Profile Message")) + def on_profile_message(menu_item): + app.widgetapp.setup_profile_message() + + @menu_item(_("Profile Redraw")) + def on_profile_redraw(menu_item): + app.widgetapp.profile_redraw() + + class TestIntentionalCrash(StandardError): + pass + + @menu_item(_("Test Crash Reporter")) + def on_test_crash_reporter(menu_item): + raise TestIntentionalCrash("intentional error here") + + @menu_item(_("Test Soft Crash Reporter")) + def on_test_soft_crash_reporter(menu_item): + app.widgetapp.handle_soft_failure("testing soft crash reporter", + 'intentional error', with_exception=False) + + @menu_item(_("Memory Stats")) + def on_memory_stats(menu_item): + app.widgetapp.memory_stats() + + @menu_item(_("Force Feedparser Processing")) + def on_force_feedparser_processing(menu_item): + app.widgetapp.force_feedparser_processing() + + @menu_item(_("Clog Backend")) + def on_clog_backend(menu_item): + app.widgetapp.clog_backend() + + @menu_item(_("Run Echoprint")) + def on_run_echoprint(menu_item): + print 'Running echoprint' + print '-' * 50 + subprocess.call([utils.get_echoprint_executable_path()]) + print '-' * 50 + + @menu_item(_("Run ENMFP")) + def on_run_enmfp(menu_item): + enmfp_info = utils.get_enmfp_executable_info() + print 'Running enmfp-codegen' + if 'env' in enmfp_info: + print 'env: %s' % enmfp_info['env'] + print '-' * 50 + subprocess.call([enmfp_info['path']], env=enmfp_info.get('env')) + print '-' * 50 + + @menu_item(_("Run Donate Manager Power Toys")) + def on_run_donate_manager_powertoys(menu_item): + app.donate_manager.run_powertoys() + + @menu_item(_("Image Render Test")) + def on_image_render_test(menu_item): + t = widgetset.Table(4, 4) + t.pack(widgetset.Label("ImageDisplay"), 1, 0) + t.pack(widgetset.Label("ImageSurface.draw"), 2, 0) + t.pack(widgetset.Label("ImageSurface.draw_rect"), 3, 0) + t.pack(widgetset.Label("Normal"), 0, 1) + t.pack(widgetset.Label("resize() called"), 0, 2) + t.pack(widgetset.Label("crop_and_scale() called"), 0, 3) + t.set_column_spacing(20) + t.set_row_spacing(20) + w = widgetset.Window("Image render test", + widgetset.Rect(100, 300, 800, 600)) + w.set_content_widget(t) + + path = resources.path("images/album-view-default-audio.png") + image = widgetset.Image(path) + resize = image.resize(image.width / 2, image.height / 2) + crop_and_scale = image.crop_and_scale(20, 0, + image.width-40, image.height, + image.width, image.height) + def add_to_table(widget, col, row): + t.pack(widgetutil.align(widget, xalign=0, yalign=0), col, row) + add_to_table(widgetset.ImageDisplay(image), 1, 1) + add_to_table(widgetset.ImageDisplay(resize), 1, 2) + add_to_table(widgetset.ImageDisplay(crop_and_scale), 1, 3) + + class ImageSurfaceDrawer(widgetset.DrawingArea): + def __init__(self, image, use_draw_rect): + self.image = widgetset.ImageSurface(image) + self.use_draw_rect = use_draw_rect + widgetset.DrawingArea.__init__(self) + + def size_request(self, layout): + return self.image.width, self.image.height + + def draw(self, context, layout): + if not self.use_draw_rect: + self.image.draw(context, 0, 0, image.width, image.height) + else: + x_stride = int(image.width // 10) + y_stride = int(image.height // 10) + for x in range(0, int(image.width), x_stride): + for y in range(0, int(image.height), y_stride): + width = min(x_stride, image.width-x) + height = min(y_stride, image.height-y) + print x, y, width, height + self.image.draw_rect(context, x, y, x, y, width, + height) + add_to_table(ImageSurfaceDrawer(image, False), 2, 1) + add_to_table(ImageSurfaceDrawer(resize, False), 2, 2) + add_to_table(ImageSurfaceDrawer(crop_and_scale, False), 2, 3) + add_to_table(ImageSurfaceDrawer(image, True), 3, 1) + add_to_table(ImageSurfaceDrawer(resize, True), 3, 2) + add_to_table(ImageSurfaceDrawer(crop_and_scale, True), 3, 3) + + w.show() + + @menu_item(_("Set Echonest Retry Timeout")) + def set_echonest_retry_timout(menu_item): + # set LAST_RETRY_NET_LOOKUP to 1 week ago minus 1 minute + new_value = int(time.time()) - (60 * 60 * 24 * 7) + 60 + app.config.set(prefs.LAST_RETRY_NET_LOOKUP, new_value) + + @menu_item(_("Test Database Error Item Rendering")) + def test_database_error_item_rendering(menu_item): + displayed = app.item_list_controller_manager.displayed + if displayed is None: + logging.warn("test_database_error_item_rendering: " + "no item list displayed") + return + # replace all currently loaded item infos with DBError items + item_list = displayed.item_list + changed_ids = [] + for item_id, item_info in item_list.row_data.items(): + if item_info is not None: + item_list.row_data[item_id] = DBErrorItemInfo(item_id) + changed_ids.append(item_id) + item_list.emit('will-change') + item_list.emit('items-changed', changed_ids) + + @menu_item(_("Force Main DB Save Error")) + def on_force_device_db_save_error(menu_item): + messages.ForceDBSaveError().send_to_backend() + + @menu_item(_("Force Device DB Save Error")) + def on_force_device_db_save_error(menu_item): + selection_type, selected_tabs = app.tabs.selection + if (selection_type != 'connect' or + len(selected_tabs) != 1 or + not isinstance(selected_tabs[0], messages.DeviceInfo)): + dialogs.show_message("Usage", + "You must have a device tab selected to " + "force a device database error") + return + messages.ForceDeviceDBSaveError(selected_tabs[0]).send_to_backend() + + @menu_item(_("Force Frontend DB Errors")) + def force_frontend_backend_db_errors(menu_item): + old_execute = connectionpool.Connection.execute + def new_execute(*args, **kwargs): + raise sqlite3.DatabaseError("Fake Error") + connectionpool.Connection.execute = new_execute + def undo(): + connectionpool.Connection.execute = old_execute + app.db_error_handler.retry_callbacks.insert(0, undo) action_handlers = {} group_action_handlers = {} + +def on_menubar_activate(menubar, action_name): + callback = lookup_handler(action_name) + if callback is not None: + callback() + def lookup_handler(action_name): """For a given action name, get a callback to handle it. Return None if no callback is found. @@ -487,9 +533,6 @@ def make_convert_handler(converter): return "ConvertItemTo-" + converter.identifier -def make_column_toggle_handler(name): - return "ToggleColumn-" + name - # File menu @action_handler("Open") def on_open(): @@ -499,10 +542,6 @@ def on_new_download(): app.widgetapp.new_download() -@action_handler("AddFiles") -def on_add_files(): - app.widgetapp.add_files() - @action_handler("CheckVersion") def on_check_version(): app.widgetapp.check_version() @@ -654,6 +693,10 @@ def on_down_volume(): app.widgetapp.down_volume() +@action_handler("GotoCurrentlyPlaying") +def on_goto_currently_playing(): + app.playback_manager.goto_currently_playing() + @action_handler("Fullscreen") def on_fullscreen(): app.playback_manager.toggle_fullscreen() @@ -666,13 +709,6 @@ def on_subtitles_select(): app.playback_manager.open_subtitle_file() -@group_action_handler("SubtitleEncoding") -def on_subtitle_encoding(converter): - if converter == 'Default': - app.playback_manager.select_subtitle_encoding(None) - else: - app.playback_manager.select_subtitle_encoding(converter) - # Sorts menu @group_action_handler("ToggleColumn") def on_toggle_column(name): @@ -707,62 +743,53 @@ def on_planet(): app.widgetapp.open_url(app.config.get(prefs.PLANET_URL)) -@action_handler("ProfileMessage") -def on_profile_message(): - app.widgetapp.setup_profile_message() - -@action_handler("ProfileRedraw") -def on_profile_redraw(): - app.widgetapp.profile_redraw() - -class TestIntentionalCrash(Exception): - pass - -@action_handler("TestCrashReporter") -def on_test_crash_reporter(): - raise TestIntentionalCrash("intentional error here") - -@action_handler("TestSoftCrashReporter") -def on_test_soft_crash_reporter(): - app.widgetapp.handle_soft_failure("testing soft crash reporter", - 'intentional error', with_exception=False) - -@action_handler("MemoryStats") -def on_memory_stats(): - app.widgetapp.memory_stats() - -def generate_action_groups(menu_structure): - """Takes a menu structure and returns a map of action group name to - list of menu actions in that group. +class LegacyMenuUpdater(object): + """This class contains the logic to update the menus based on enabled + groups and state labels. + + Now that we can directly manipulate MenuItems, we probably can re-write + this stuff in a cleaner and better way. """ - action_groups = {} - for menu in menu_structure: - if hasattr(menu, "groups"): - for grp in menu.groups: - action_groups.setdefault(grp, []).append(menu.action) - return action_groups + # NOTE: this code is probably extremely brittle. If you want to change + # something, you are probably better off rewriting it and moving it to + # MenuManager + # + # FIXME: we should probably just move all this code to new classes + # eventually -class MenuStateManager(signals.SignalEmitter): - """Updates the menu based on the current selection. + def __init__(self): + self.menu_item_fetcher = MenuItemFetcher() - This includes enabling/disabling menu items, changing menu text - for plural selection and enabling/disabling the play button. The - play button is obviously not a menu item, but it's pretty closely - related + def update(self, reasons): + # reset enabled_groups and state_labels + self.reset() + # update enabled_groups and state_labels based on the state of the UI + self._handle_selected_tabs() + self._handle_selected_items() + # update menu items based on enabled_groups and state_labels + self.update_enabled_groups() + self.update_state_labels() + + def update_enabled_groups(self): + for group_name, items in MenuItem.group_map.iteritems(): + if group_name in self.enabled_groups: + for item in items: + item.enable() + else: + for item in items: + item.disable() - Whenever code makes a change that could possibly affect which menu - items should be enabled/disabled, it should call the - update_menus() method. - """ - def __init__(self): - signals.SignalEmitter.__init__(self) - self.create_signal('enabled-changed') - self.create_signal('radio-group-changed') - self.create_signal('checked-changed') - self.enabled_groups = set(['AlwaysOn']) - self.states = {} - self.play_pause_state = "play" - self.subtitle_encoding_enabled = False + def update_state_labels(self): + for state, names in self.states.iteritems(): + for name in names: + menu_item = self.menu_item_fetcher[name] + try: + new_label = menu_item.state_labels[state] + except KeyError: + logging.warn("Error trying to set menu item %s to %s", + name, state) + else: + menu_item.set_label(new_label) def reset(self): self.states = {"feed": [], @@ -796,13 +823,6 @@ else: self.enabled_groups.add('NonPlaying') - def _set_play_pause(self): - if ((not app.playback_manager.is_playing - or app.playback_manager.is_paused)): - self.play_pause_state = 'play' - else: - self.play_pause_state = 'pause' - def _handle_feed_selection(self, selected_feeds): """Handle the user selecting things in the feed list. @@ -825,9 +845,7 @@ if len(selected_folders) == len(selected_feeds): self.states["folders"].append("RemoveSomething") else: - self.states["plural"].append("RemoveSomething") self.states["feeds"].append("RemoveSomething") - self.states["feeds"].append("RenameSomething") self.states["plural"].append("UpdatePodcasts") def _handle_site_selection(self, selected_sites): @@ -873,69 +891,45 @@ # we don't change menu items for the static tab list pass - def _update_menus_for_selected_tabs(self): + def _handle_selected_tabs(self): try: selection_type, selected_tabs = app.tabs.selection except errors.WidgetActionError: return - if len(selected_tabs) == 1: - app.menu_manager._update_view_menu() if selection_type is None or selected_tabs[0].type == u'tab': pass elif selection_type == 'feed': - app.menu_manager._handle_feed_selection(selected_tabs) + self._handle_feed_selection(selected_tabs) elif selection_type == 'playlist': - app.menu_manager._handle_playlist_selection(selected_tabs) + self._handle_playlist_selection(selected_tabs) elif selection_type in ('static', 'library'): - app.menu_manager._handle_static_tab_selection(selected_tabs) + self._handle_static_tab_selection(selected_tabs) elif selection_type in ('site', 'store'): - app.menu_manager._handle_site_selection(selected_tabs) + self._handle_site_selection(selected_tabs) elif selection_type == 'connect': - app.menu_manager._handle_connect_selection(selected_tabs) + self._handle_connect_selection(selected_tabs) else: raise ValueError("Unknown tab list type: %s" % selection_type) - def select_subtitle_encoding(self, encoding): - if self.subtitle_encoding_enabled: - if encoding is None: - action_name = 'SubtitleEncoding-Default' - else: - action_name = 'SubtitleEncoding-%s' % encoding - self.emit('radio-group-changed', 'subtitle-encoding', action_name) - - def _update_menus_for_selected_items(self): + def _handle_selected_items(self): """Update the menu items based on the current item list selection. """ - selected_items = app.item_list_controller_manager.get_selection() - downloaded = False - has_audio = False - is_remote = False - for item in selected_items: - if item.downloaded: - downloaded = True - if item.file_type == 'audio': - has_audio = True - if item.remote: - is_remote = True - - if selected_items and not is_remote: - if len(selected_items) == 1: - self.enabled_groups.add('LocalItemSelected') - else: - self.states['plural'].append('EditItems') - self.enabled_groups.add('LocalItemsSelected') + selection_info = app.item_list_controller_manager.get_selection_info() - if downloaded: - if not is_remote: + if selection_info.has_download: + if not selection_info.has_remote: + if selection_info.count > 1: + self.states['plural'].append('EditItems') + self.enabled_groups.add('LocalItemsSelected') self.enabled_groups.add('LocalPlayablesSelected') self.enabled_groups.add('LocalPlayablesSelected_PlayPause') self.enabled_groups.add('PlayablesSelected') self.enabled_groups.add('PlayablesSelected_PlayPause') - if not has_audio: + if not selection_info.has_file_type('audio'): self.enabled_groups.add('PlayableVideosSelected') - if len(selected_items) == 1: - if not is_remote: + if selection_info.count == 1: + if not selection_info.has_remote: self.enabled_groups.add('LocalPlayableSelected') self.enabled_groups.add('LocalPlayableSelected_PlayPause') self.enabled_groups.add('PlayableSelected') @@ -946,41 +940,505 @@ can_play = app.item_list_controller_manager.can_play_items() if can_play: self.enabled_groups.add('PlayPause') - if not is_remote: + if not selection_info.has_remote: self.enabled_groups.add('LocalPlayableSelected_PlayPause') self.enabled_groups.add('LocalPlayablesSelected_PlayPause') self.enabled_groups.add('PlayableSelected_PlayPause') self.enabled_groups.add('PlayablesSelected_PlayPause') app.widgetapp.window.videobox.handle_new_selection(can_play) - def update_menus(self): - self.reset() - self._update_menus_for_selected_tabs() - self._update_menus_for_selected_items() +class MenuManager(signals.SignalEmitter): + """Updates the menu based on the current selection. + + This includes enabling/disabling menu items, changing menu text + for plural selection and enabling/disabling the play button. The + play button is obviously not a menu item, but it's pretty closely + related + + Whenever code makes a change that could possibly affect which menu + items should be enabled/disabled, it should call the + update_menus() method. + + Signals: + - menus-updated(reasons): Emitted whenever update_menus() is called + """ + def __init__(self): + signals.SignalEmitter.__init__(self, 'menus-updated') + self.menu_item_fetcher = MenuItemFetcher() + self.subtitle_encoding_updater = SubtitleEncodingMenuUpdater() + self.converter_list = [] + + def setup_menubar(self, menubar): + """Setup the main miro menubar. + """ + menubar.add_initial_menus(get_app_menu()) + menubar.connect("activate", on_menubar_activate) + self.menu_updaters = [ + LegacyMenuUpdater(), + SortsMenuUpdater(), + AudioTrackMenuUpdater(), + SubtitlesMenuUpdater(), + self.subtitle_encoding_updater, + EchonestMenuHandler(menubar), + ] + + def _set_play_pause(self): + if ((not app.playback_manager.is_playing + or app.playback_manager.is_paused)): + label = _('Play') + else: + label = _('Pause') + self.menu_item_fetcher['PlayPauseItem'].set_label(label) + + def add_subtitle_encoding_menu(self, category_label, *encodings): + """Set up a subtitles encoding menu. + + This method should be called for each category of subtitle encodings + (East Asian, Western European, Unicode, etc). Pass it the list of + encodings for that category. + + :param category_label: human-readable name for the category + :param encodings: list of (label, encoding) tuples. label is a + human-readable name, and encoding is a value that we can pass to + VideoDisplay.select_subtitle_encoding() + """ + self.subtitle_encoding_updater.add_menu(category_label, encodings) + + def add_converters(self, converters): + menu = app.widgetapp.menubar.find("ConvertMenu") + position = itertools.count() + for group_list in converters: + for (identifier, title) in group_list: + item = MenuItem(title, "ConvertItemTo-" + identifier, + groups=["LocalPlayablesSelected"]) + menu.insert(position.next(), item) + menu.insert(position.next(), Separator()) + for i, group_list in enumerate(converters): + self.converter_list.insert(i, group_list) + + def get_converters(self): + """Get the current list of converters + + :returns: list of converter groups. Each group will be a list of + (identifier, title) tuples. + """ + return self.converter_list + + def select_subtitle_encoding(self, encoding): + if not self.subtitle_encoding_updater.has_encodings(): + # OSX never sets up the subtitle encoding menu + return + menu_item_name = self.subtitle_encoding_updater.action_name(encoding) + try: + self.menu_item_fetcher[menu_item_name].set_state(True) + except KeyError: + logging.warn("Error enabling subtitle encoding menu item: %s", + menu_item_name) + + def update_menus(self, *reasons): + """Call this when a change is made that could change the menus + + Use reasons to describe why the menus could change. Some MenuUpdater + objects will do some optimizations based on that + """ + reasons = set(reasons) self._set_play_pause() - self.emit('enabled-changed') + for menu_updater in self.menu_updaters: + menu_updater.update(reasons) + self.emit('menus-updated', reasons) + +class MenuUpdater(object): + """Base class for objects that dynamically update menus.""" + def __init__(self, menu_name): + self.menu_name = menu_name + self.first_update = False + + # we lazily access our menu item, since we are created before the menubar + # is fully setup. + def get_menu(self): + try: + return self._menu + except AttributeError: + self._menu = app.widgetapp.menubar.find(self.menu_name) + return self._menu + menu = property(get_menu) - def _update_view_menu(self): + def update(self, reasons): + if not self.first_update and not self.should_process_update(reasons): + return + self.first_update = False + self.start_update() + if not self.should_show_menu(): + self.menu.hide() + return + + self.menu.show() + if self.should_rebuild_menu(): + self.clear_menu() + self.populate_menu() + self.update_items() + + def should_process_update(self, reasons): + """Test if we should ignore the update call. + + :param reasons: the reasons passed in to MenuManager.update_menus() + """ + return True + + def clear_menu(self): + """Remove items from our menu before rebuilding it.""" + for child in self.menu.get_children(): + self.menu.remove(child) + + def start_update(self): + """Called at the very start of the update method. """ + pass + + def should_show_menu(self): + """Should we display the menu? """ + return True + + def should_rebuild_menu(self): + """Should we rebuild the menu structure?""" + return False + + def populate_menu(self): + """Add MenuItems to our menu.""" + pass + + def update_items(self): + """Update our menu items.""" + pass + +class SortsMenuUpdater(MenuUpdater): + """Update the sorts menu for MenuManager.""" + def __init__(self): + MenuUpdater.__init__(self, 'SortsMenu') + self.current_sorts = [] + + def should_process_update(self, reasons): + return ('tab-selection-changed' in reasons or + 'item-list-view-changed' in reasons) + + def action_name(self, column_name): + return "ToggleColumn-" + column_name + + def start_update(self): + """Called at the very start of the update method. """ + self.togglable_columns = self.columns_enabled = None display = app.display_manager.get_current_display() - # using hasattr because not all displays have ids and types - # TODO: refactor the display type / id system - if not (hasattr(display, 'type') and hasattr(display, 'id')): + if display is None: + # no display? + return + column_info = display.get_column_info() + if column_info is None: + # no togglable columns for this display return + self.columns_enabled = column_info[0] + untogglable = WidgetStateStore.MANDATORY_SORTERS + self.togglable_columns = list(c for c in column_info[1] + if c not in untogglable) + self.togglable_columns.sort(key=COLUMN_LABELS.get) + + def should_show_menu(self): + """Should we display the menu? """ + return self.togglable_columns is not None + + def should_rebuild_menu(self): + """Should we rebuild the menu structure?""" + return self.togglable_columns != self.current_sorts + + def populate_menu(self): + """Make a list of menu items for this menu.""" + for name in self.togglable_columns: + label = COLUMN_LABELS[name] + handler_name = self.action_name(name) + self.menu.append(CheckMenuItem(label, handler_name)) + self.current_sorts = self.togglable_columns + + def update_items(self): + """Update our menu items.""" + menu_names_to_enable = set(self.action_name(name) + for name in self.columns_enabled) + for menu_item in self.menu.get_children(): + if menu_item.name in menu_names_to_enable: + menu_item.set_state(True) + else: + menu_item.set_state(False) + +class AudioTrackMenuUpdater(MenuUpdater): + """Update the audio track menu for MenuManager.""" + def __init__(self): + MenuUpdater.__init__(self, 'AudioTrackMenu') + self.currently_displayed_tracks = None + + def should_process_update(self, reasons): + return 'playback-changed' in reasons - enabled = set(app.widget_state.get_sorts_enabled(display.type, display.id)) - checks = dict(('ToggleColumn-' + column, column in enabled) - for column in WidgetStateStore.get_columns()) - self.emit('checked-changed', 'ListView', checks) - - for column in WidgetStateStore.get_columns_available(display.type): - self.enabled_groups.add('column-%s' % column) - -def _get_view_menu(): - menu = list() - toggleable = WidgetStateStore.get_toggleable_columns() - for name in sorted(toggleable, key=COLUMN_LABELS.get): - groups = ['column-%s' % name] - label = COLUMN_LABELS[name] - handler_name = make_column_toggle_handler(name) - menu.append(CheckMenuItem(label, handler_name, 'ListView', groups=groups)) - return menu + def _on_track_change(self, menu_item, track_id): + if app.playback_manager.is_playing: + app.playback_manager.set_audio_track(track_id) + + def action_name(self, track_id): + return 'ChangeAudioTrack-%s' % track_id + + def start_update(self): + """Called at the very start of the update method. """ + self.track_info = app.playback_manager.get_audio_tracks() + self.enabled_track = app.playback_manager.get_enabled_audio_track() + + def should_rebuild_menu(self): + """Should we rebuild the menu structure?""" + return self.track_info != self.currently_displayed_tracks + + def populate_menu(self): + """Add MenuItems to our menu.""" + if not self.track_info: + self.make_empty_menu() + self.currently_displayed_tracks = self.track_info + return + + group = [] + for (track_id, label) in self.track_info: + menu_item = RadioMenuItem(label, self.action_name(track_id)) + self.menu.append(menu_item) + menu_item.connect('activate', self._on_track_change, + track_id) + group.append(menu_item) + + for item in group[1:]: + item.set_group(group[0]) + self.currently_displayed_tracks = self.track_info + + def make_empty_menu(self): + menu_item = MenuItem(_("None Available"), "NoSubtitlesAvailable") + menu_item.disable() + self.menu.append(menu_item) + + def update_items(self): + """Update our menu items.""" + if self.enabled_track is None: + return + enabled_name = self.action_name(self.enabled_track) + for menu_item in self.menu.get_children(): + if menu_item.name == enabled_name: + menu_item.set_state(True) + return + +class SubtitlesMenuUpdater(MenuUpdater): + """Update the subtitles menu for MenuManager.""" + + def __init__(self): + MenuUpdater.__init__(self, 'SubtitlesMenu') + self.none_available = MenuItem(_("None Available"), "NoneAvailable") + self.none_available.disable() + self.currently_displayed_tracks = None + + def should_process_update(self, reasons): + return 'playback-changed' in reasons + + def on_change_track(self, menu_item, track_id): + if app.playback_manager.is_playing: + app.playback_manager.set_subtitle_track(track_id) + + def on_disable(self, menu_item): + if app.playback_manager.is_playing: + app.playback_manager.set_subtitle_track(None) + + def on_select_file_activate(self, menu_item): + if app.playback_manager.is_playing: + app.playback_manager.open_subtitle_file() + + def action_name(self, track_id): + return 'ChangeSubtitles-%s' % track_id + + def start_update(self): + """Called at the very start of the update method. """ + self.enabled_track = app.playback_manager.get_enabled_subtitle_track() + self.all_tracks = list(app.playback_manager.get_subtitle_tracks()) + + def should_rebuild_menu(self): + """Should we rebuild the menu structure?""" + return self.currently_displayed_tracks != self.all_tracks + + def get_items(self): + """Get the items that we actually should work with. + + This is a all of the child items in our menu, except the subtitle + encoding menu. + """ + return self.menu.get_children()[:-1] + + def clear_menu(self): + # only clear the subtitle items, not the subtitle encoding submenu. + for item in self.get_items(): + self.menu.remove(item) + + def populate_menu(self): + """Add MenuItems to our menu.""" + to_add = self.make_items_for_tracks() + to_add.append(Separator()) + select_file = MenuItem(_("Select a Subtitles file..."), + "SelectSubtitlesFile") + select_file.connect("activate", self.on_select_file_activate) + to_add.append(select_file) + + # insert menu items before the select subtitles encoding item + for i, menu_item in enumerate(to_add): + self.menu.insert(i, menu_item) + self.currently_displayed_tracks = self.all_tracks + + def make_items_for_tracks(self): + """Get MenuItems for subtitle tracks embedded in the video.""" + + if not self.all_tracks: + return [self.none_available] + + items = [] + first_item = None + for track_id, label in self.all_tracks: + menu_item = RadioMenuItem(label, self.action_name(track_id)) + menu_item.connect("activate", self.on_change_track, track_id) + items.append(menu_item) + if first_item is None: + first_item = menu_item + else: + menu_item.set_group(first_item) + + items.append(Separator()) + disable = RadioMenuItem(_("Disable Subtitles"), "DisableSubtitles") + disable.connect("activate", self.on_disable) + disable.set_group(first_item) + items.append(disable) + return items + + def update_items(self): + """Update our menu items.""" + menu_items = self.get_items() + if app.playback_manager.is_playing_video: + for item in menu_items: + if item is not self.none_available: + item.enable() + if self.enabled_track is not None: + enabled_action_name = self.action_name(self.enabled_track) + else: + enabled_action_name = "DisableSubtitles" + for item in menu_items: + if item.name == enabled_action_name: + item.set_state(True) + break + else: + for item in menu_items: + item.disable() + +class SubtitleEncodingMenuUpdater(object): + """Handles updating the subtitles encoding menu. + + This class is responsible for: + - populating the subtitles encoding method + - enabling/disabling the menu items + """ + + def __init__(self): + self.menu_item_fetcher = MenuItemFetcher() + self.default_item = None + self.category_counter = itertools.count() + + def action_name(self, encoding): + """Get the name of the menu item for a given encoding. + + :param: string name of the encoding, or None for the default encoding + """ + if encoding is None: + return 'SubtitleEncoding-Default' + else: + return 'SubtitleEncoding-%s' % encoding + + def has_encodings(self): + return self.default_item is not None + + def update(self, reasons): + encoding_menu = self.menu_item_fetcher["SubtitleEncodingMenu"] + if app.playback_manager.is_playing_video: + encoding_menu.enable() + else: + encoding_menu.disable() + + def add_menu(self, category_label, encodings): + if not self.has_encodings(): + self.init_menu() + category_menu = self.add_submenu(category_label) + self.populate_submenu(category_menu, encodings) + + def init_menu(self): + # first time calling this function, we need to set up the menu. + encoding_menu = self.menu_item_fetcher["SubtitleEncodingMenu"] + encoding_menu.show() + self.default_item = RadioMenuItem(_('Default (UTF-8)'), + self.action_name(None)) + self.default_item.set_state(True) + self.default_item.connect("activate", self.on_activate, None) + encoding_menu.append(self.default_item) + + def add_submenu(self, label): + encoding_menu = self.menu_item_fetcher["SubtitleEncodingMenu"] + name = "SubtitleEncodingCat-%s" % self.category_counter.next() + category_menu = Menu(label, name, []) + encoding_menu.append(category_menu) + return category_menu + + def populate_submenu(self, category_menu, encodings): + for encoding, name in encodings: + label = '%s (%s)' % (name, encoding) + menu_item = RadioMenuItem(label, + self.action_name(encoding)) + menu_item.set_state(False) + menu_item.connect("activate", self.on_activate, encoding) + category_menu.append(menu_item) + menu_item.set_group(self.default_item) + + def on_activate(self, menu_item, encoding): + app.playback_manager.select_subtitle_encoding(encoding) + +class EchonestMenuHandler(object): + """Handles the echonest enable/disable checkbox + + Responsibilities: + - Enabling/Disabling the menu item depending on the selection + - Handling the callback + """ + + def __init__(self, meunbar): + self.menu_item = app.widgetapp.menubar.find("UseEchonestData") + self.menu_item.connect("activate", self.on_activate) + + def on_activate(self, button): + selection = app.item_list_controller_manager.get_selection() + id_list = [info.id for info in selection if info.downloaded] + m = messages.SetNetLookupEnabled(id_list, button.get_state()) + m.send_to_backend() + + def update(self, reasons): + ilc_manager = app.item_list_controller_manager + selection_info = ilc_manager.get_selection_info() + if (selection_info.has_download and + not ilc_manager.displayed_type().startswith("device-")): + self.menu_item.enable() + else: + self.menu_item.disable() + self.update_check_value() + + def update_check_value(self): + selection = app.item_list_controller_manager.get_selection() + has_enabled = has_disabled = False + for info in selection: + if info.net_lookup_enabled: + has_enabled = True + else: + has_disabled = True + if has_enabled and has_disabled: + self.menu_item.set_state(None) + elif has_enabled: + self.menu_item.set_state(True) + else: + self.menu_item.set_state(False) diff -Nru miro-4.0.4/lib/frontends/widgets/newsearchfeed.py miro-6.0/lib/frontends/widgets/newsearchfeed.py --- miro-4.0.4/lib/frontends/widgets/newsearchfeed.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/newsearchfeed.py 2013-04-05 16:02:42.000000000 +0000 @@ -184,28 +184,33 @@ def set_initial_source(self): source = app.item_list_controller_manager.get_saved_search_source() if source is not None: - type, id = source - if type == 'channel': + typ, id_ = source + if typ == 'channel': self.channel_rb.set_selected() for i, info in enumerate(self.channels): - if info.id == id: + if info.id == id_: self.channel_option.set_selected(i) break else: - app.widgetapp.handle_soft_failure("New search feed dialog", - "didn't find channel with id: %r" % id, - with_exception=False) - elif type == 'search': + # bz:17818 + # Watched folders are not listed in this dialog, but is + # in the feed/channel category. So we could come here + # with a watched folder selected, and fall into else + # path. There used to be a soft failure here, now + # I think it is okay if we just print a debug message. + logging.debug(("didn't find channel with id: %r " + "(possibly watched folder selected)"), id_) + elif typ == 'search': self.search_engine_rb.set_selected() self.enable_choice_table_row(1) for i, info in enumerate(self.search_engines): - if info.name == id: + if info.name == id_: self.search_engine_option.set_selected(i) break else: app.widgetapp.handle_soft_failure("New search feed dialog", - "didn't find search engine with id: %r" % id, + "didn't find search engine with id: %r" % id_, with_exception=False) else: app.widgetapp.handle_soft_failure("New search feed dialog", - "unknown source type %r" % type, with_exception=False) + "unknown source type %r" % typ, with_exception=False) diff -Nru miro-4.0.4/lib/frontends/widgets/playback.py miro-6.0/lib/frontends/widgets/playback.py --- miro-4.0.4/lib/frontends/widgets/playback.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/playback.py 2013-04-05 16:02:42.000000000 +0000 @@ -28,8 +28,7 @@ # statement from all source files in the program, then also delete it here. import logging -from random import randrange -from random import shuffle +import random from miro import app from miro import prefs @@ -41,8 +40,7 @@ from miro.plat.frontends.widgets import timer from miro.plat.frontends.widgets import widgetset from miro.frontends.widgets.displays import VideoDisplay -from miro.frontends.widgets import itemtrack -from miro.frontends.widgets import menus +from miro.frontends.widgets import keyboard from miro.frontends.widgets import dialogs from miro.frontends.widgets.widgetstatestore import WidgetStateStore @@ -68,6 +66,8 @@ self.playlist = None self.mark_as_watched_timeout = None self.update_timeout = None + self.manual_item_list = None + self.selected_tab_list = self.selected_tabs = None self.presentation_mode = 'fit-to-bounds' self.create_signal('will-start') self.create_signal('selecting-file') @@ -91,6 +91,10 @@ def player_playing(self): return self.player is not None and self.open_successful + def get_is_playing_video(self): + return self.is_playing and not self.is_playing_audio + is_playing_video = property(get_is_playing_video) + def set_volume(self, volume): self.volume = volume if self.player is not None: @@ -117,34 +121,68 @@ def start_with_items(self, item_infos): """Start playback, playing a static list of ItemInfos.""" - tracker = itemtrack.ManualItemListTracker.create(item_infos) - self.start(None, tracker) + # call stop before anything so that we release our existing + # manual_item_list (#19932) + self.stop() + id_list = [i.id for i in item_infos] + item_list = app.item_list_pool.get(u'manual', id_list) + self.manual_item_list = item_list + self.start(None, item_list) + + def goto_currently_playing(self): + """Jump to the currently playing item in the display.""" + playing_item = self.get_playing_item() + if not self.selected_tab_list or not playing_item: + return + if (self.is_playing and not + (self.is_playing_audio or self.detached_window)): + # playing a video in the app, so don't bother + return + try: + tab_iter = self.selected_tab_list.iter_map[self.selected_tabs[0].id] + except KeyError: + #17495 - item may be from a tab that no longer exists + self.selected_tab_list = self.selected_tabs = None + return + app.tabs._select_from_tab_list(self.selected_tab_list.type, tab_iter) + display = app.display_manager.current_display + if display and hasattr(display, 'controller'): + controller = display.controller + controller.scroll_to_item(playing_item, manual=True, recenter=True) + else: + #17488 - GuideDisplay doesn't have a controller + logging.debug("current display doesn't have a controller - " + "can't switch to") - def start(self, start_id, item_tracker, + def start(self, start_id, item_list, presentation_mode='fit-to-bounds', force_resume=False): """Start playback, playing the items from an ItemTracker""" if self.is_playing: self.stop() self.emit('will-start') + + # Remember where we are, so we can switch to it later + list_type, selected = app.tabs.selection + self.selected_tab_list = app.tabs[list_type] + self.selected_tabs = selected + play_in_miro = app.config.get(prefs.PLAY_IN_MIRO) # Only setup a playlist if we are playing in Miro - otherwise we # farm off to an external player for an individual item and the # concept of a playlist doesn't really make sense. start_item = None if play_in_miro: - self.playlist = PlaybackPlaylist(item_tracker, start_id) + self.playlist = PlaybackPlaylist(item_list, start_id, + self.shuffle, self.repeat) self.playlist.connect("position-changed", self._on_position_changed) self.playlist.connect("playing-info-changed", self._on_playing_changed) - self.playlist.set_shuffle(self.shuffle) - self.playlist.set_repeat(self.repeat) else: - model = item_tracker.item_list.model if start_id: - start_item = model.get_info(start_id) + start_item = item_list.get_item(start_id) else: - start_item = model.get_first_info() + start_item = item_list.get_first_item() self.should_mark_watched = [] self.presentation_mode = presentation_mode self.force_resume = force_resume @@ -153,23 +191,19 @@ self.fullscreen() def _on_position_changed(self, playlist): - self._not_skipped_by_user = True + self._skipped_by_user = False self._play_current() def _on_playing_changed(self, playlist): new_info = self.get_playing_item() + if new_info is None or not new_info.is_playable: + self.stop() + return if self.detached_window: - if self.detached_window.get_title() != new_info.name: - self.detached_window.set_title(new_info.name) - if app.config.get(prefs.PLAY_IN_MIRO) and new_info: - # if playlist is None, new_info will be none as well. - # Since emitting playing-info-changed with a "None" - # argument will cause a crash, we only emit it if - # new_info has a value + if self.detached_window.get_title() != new_info.title: + self.detached_window.set_title(new_info.title) + if app.config.get(prefs.PLAY_IN_MIRO): self.emit('playing-info-changed', new_info) - else: - logging.warning("trying to update playback info " - "even though playback has stopped") def prepare_attached_playback(self): self.emit('will-play-attached') @@ -194,7 +228,7 @@ detached_window_frame = widgetset.Rect(0, 0, 800, 600) else: detached_window_frame = widgetset.Rect.from_string(detached_window_frame) - title = self.playlist.currently_playing.name + title = self.playlist.currently_playing.title self.detached_window = DetachedWindow(title, detached_window_frame) self.align = widgetset.DetachedWindowHolder() self.align.add(self.video_display.widget) @@ -234,11 +268,15 @@ if self.player_playing(): elapsed = self.player.get_elapsed_playback_time() total = self.player.get_total_playback_time() - self.emit('playback-did-progress', elapsed, total) + if elapsed is not None and total is not None: + self.emit('playback-did-progress', elapsed, total) + else: + logging.warning('notify_update: elapsed = %s total = %s', + elapsed, total) def on_display_removed(self, display): if not self.removing_video_display: - self._not_skipped_by_user = True + self._skipped_by_user = False self.stop() def play(self, start_at=0): @@ -246,6 +284,8 @@ logging.warn("no self.player in play(). race condition?") return duration = self.player.get_total_playback_time() + if duration is None or duration <= 0: + logging.warning('duration is %s', duration) self.emit('will-play', duration) resume_time = self.playlist.currently_playing.resume_time if start_at > 0: @@ -258,7 +298,7 @@ self.schedule_update() self.is_paused = False self.is_suspended = False - app.menu_manager.update_menus() + app.menu_manager.update_menus('playback-changed') def should_resume(self): if self.force_resume: @@ -276,7 +316,7 @@ self.emit('will-pause') self.player.pause() self.is_paused = True - app.menu_manager.update_menus() + app.menu_manager.update_menus('playback-changed') def fullscreen(self): if not self.is_playing or not self.video_display: @@ -289,6 +329,9 @@ return if self.get_playing_item() is not None: self.update_current_resume_time() + if self.manual_item_list is not None: + app.item_list_pool.release(self.manual_item_list) + self.manual_item_list = None self.playlist.finished() self.playlist = None self.cancel_update_timer() @@ -307,6 +350,68 @@ self.is_fullscreen = False self.previous_left_widget = None self.emit('did-stop') + app.menu_manager.update_menus('playback-changed') + + def get_audio_tracks(self): + """Get a list of available audio tracks + + :returns: list of (label, track_id) tuples + """ + if self.player is not None: + return self.player.get_audio_tracks() + else: + return [] + + def get_enabled_audio_track(self): + """Get the currently enabled audio track + + :returns: current track_id or None if we are not playing + """ + if self.player is not None: + return self.player.get_enabled_audio_track() + else: + return None + + def set_audio_track(self, track_id): + """Change the currently enabled audio track + + :param track_id: track_id from get_audio_tracks() + """ + if self.player is not None: + self.player.set_audio_track(track_id) + else: + raise ValueError("Not playing") + + def get_subtitle_tracks(self): + """Get a list of available subtitle tracks + + :returns: list of (label, track_id) tuples + """ + if self.player is not None and not self.is_playing_audio: + return self.player.get_subtitle_tracks() + else: + return [] + + def get_enabled_subtitle_track(self): + """Get the currently enabled subtitle track + + :returns: current track_id or None if we are not playing video + """ + if self.player is not None and not self.is_playing_audio: + return self.player.get_enabled_subtitle_track() + else: + return None + + def set_subtitle_track(self, track_id): + """Change the currently enabled subtitle track + + :param track_id: track_id from get_subtitle_tracks() + """ + if self.player is None: + raise ValueError("Not playing") + if self.is_playing_audio: + raise ValueError("Playing Audio") + self.player.set_subtitle_track(track_id) def toggle_shuffle(self): self.set_shuffle(not self.shuffle) @@ -318,10 +423,6 @@ self.playlist.set_shuffle(self.shuffle) self.emit('update-shuffle') - def reshuffle(self): - if self.playlist: - self.playlist.reshuffle() - def toggle_repeat(self): if self.repeat == WidgetStateStore.get_repeat_playlist(): self.set_repeat(WidgetStateStore.get_repeat_track()) @@ -350,7 +451,7 @@ self.removing_video_display = False def update_current_resume_time(self, resume_time=-1): - if self._not_skipped_by_user: + if not self._skipped_by_user: return if not self.player_playing() and resume_time == -1: # we want to see what the current time is, but the player hasn't @@ -360,6 +461,9 @@ if resume_time == -1: resume_time = self.player.get_elapsed_playback_time() duration = self.player.get_total_playback_time() + if duration is None: + logging.warning('update_current_resume_time: duration is None') + return # if we are 95% of the way into the movie and less than 30 # seconds before the end, don't save resume time (#11956) if resume_time > min(duration * 0.95, duration - 30): @@ -417,11 +521,14 @@ pass def on_movie_finished(self): - m = messages.MarkItemCompleted(self.playlist.currently_playing) - m.send_to_backend() - self.update_current_resume_time(0) - self._not_skipped_by_user = True - self.play_next_item() + self._skipped_by_user = False + if self.playlist.currently_playing is not None: + m = messages.MarkItemCompleted(self.playlist.currently_playing) + m.send_to_backend() + self.update_current_resume_time(0) + self.play_next_item() + else: + self.stop() def schedule_mark_as_watched(self, info): # Note: mark_as_watched time should match the minimum resume @@ -461,20 +568,17 @@ def _setup_player(self, item_info, volume): def _handle_successful_sniff(item_type): logging.debug("sniffer got '%s' for %s", item_type, - item_info.video_path) + item_info.filename) self._finish_setup_player(item_info, item_type, volume) def _handle_unsuccessful_sniff(): logging.debug("sniffer got 'unplayable' for %s", - item_info.video_path) + item_info.filename) self._finish_setup_player(item_info, "unplayable", volume) - if item_info.media_type_checked: - typ = item_info.file_type - if typ == 'other': - # the backend and frontend use different names for this - typ = 'unplayable' - self._finish_setup_player(item_info, typ, volume) - else: - widgetset.get_item_type(item_info, _handle_successful_sniff, _handle_unsuccessful_sniff) + typ = item_info.file_type + if typ == 'other': + # the backend and frontend use different names for this + typ = 'unplayable' + self._finish_setup_player(item_info, typ, volume) def _finish_setup_player(self, item_info, item_type, volume): if item_type == 'audio': @@ -501,9 +605,9 @@ self.is_playing = True self.video_display.setup(item_info, item_type, volume) if self.detached_window is not None: - self.detached_window.set_title(item_info.name) + self.detached_window.set_title(item_info.title) self.emit('did-start-playing') - app.menu_manager.update_menus() + app.menu_manager.update_menus('playback-changed') def _build_video_player(self, item_info, volume): self.player = widgetset.VideoPlayer() @@ -531,8 +635,7 @@ # there. self.cancel_update_timer() self.cancel_mark_as_watched() - self._not_skipped_by_user = False - + self._skipped_by_user = True info_to_play = item if item else self.get_playing_item() if info_to_play is None: # end of the playlist @@ -544,7 +647,7 @@ self.player.stop(will_play_another=play_in_miro) if not play_in_miro: - app.widgetapp.open_file(info_to_play.video_path) + app.widgetapp.open_file(info_to_play.filename) messages.MarkItemWatched(info_to_play).send_to_backend() return @@ -569,14 +672,14 @@ if playing_item is None: return self.open_finished = True - self._not_skipped_by_user = True + self._skipped_by_user = False self.emit('cant-play-file') if isinstance(obj, widgetset.AudioPlayer): self.play_next_item() def _handle_skip(self): playing = self.get_playing_item() - if not self._not_skipped_by_user and playing is not None: + if self._skipped_by_user and playing is not None: self.update_current_resume_time() messages.MarkItemSkipped(playing).send_to_backend() @@ -584,10 +687,15 @@ if not self.player_ready(): return self._handle_skip() - if app.config.get(prefs.SINGLE_VIDEO_PLAYBACK_MODE): - self.stop() + if ((not self.item_continuous_playback_mode( + self.playlist.currently_playing) and + not self._skipped_by_user)): + if self.repeat: + self._play_current() + else: # not repeating, or shuffle + self.stop() else: - self.playlist.select_next_item(self._not_skipped_by_user) + self.playlist.select_next_item(self._skipped_by_user) self._play_current() def play_prev_item(self, from_user=False): @@ -609,11 +717,8 @@ self.seek_to(0) return self._handle_skip() - if app.config.get(prefs.SINGLE_VIDEO_PLAYBACK_MODE): - self.stop() - else: - self.playlist.select_previous_item() - self._play_current() + self.playlist.select_previous_item() + self._play_current() def skip_forward(self): if not self.player_ready(): @@ -649,7 +754,7 @@ self.switch_to_detached_playback() else: self.switch_to_attached_playback() - app.menu_manager.update_menus() + app.menu_manager.update_menus('playback-changed') def switch_to_attached_playback(self): self.cancel_update_timer() @@ -702,9 +807,11 @@ # FIXME: we should have a better way of deciding # which tab something is listed in. In addition, assume all items # from a remote share is either audio or video (no podcast). - # Figure out if its from a library or feed + # Figure out if its from a library or feed. Also, if feed_url + # is None don't consider it a podcast. if (item_info.remote or - (item_info.feed_url and + not item_info.feed_id or + (item_info.feed_url and (item_info.feed_url.startswith('dtv:manualFeed') or item_info.feed_url.startswith('dtv:directoryfeed') or item_info.feed_url.startswith('dtv:search') or @@ -722,380 +829,271 @@ and app.config.get(prefs.PLAY_IN_MIRO)) return result + def item_continuous_playback_mode(self, item_info): + if (item_info.remote or + not item_info.feed_id or + (item_info.feed_url and + (item_info.feed_url.startswith('dtv:manualFeed') or + item_info.feed_url.startswith('dtv:directoryfeed') or + item_info.feed_url.startswith('dtv:search') or + item_info.feed_url.startswith('dtv:searchDownloads')))): + if(item_info.file_type == u'video'): + continuous_playback = app.config.get( + prefs.CONTINUOUS_VIDEO_PLAYBACK_MODE) + else: + continuous_playback = app.config.get( + prefs.CONTINUOUS_MUSIC_PLAYBACK_MODE) + else: + continuous_playback = app.config.get( + prefs.CONTINUOUS_PODCAST_PLAYBACK_MODE) + + result = continuous_playback and app.config.get(prefs.PLAY_IN_MIRO) + return result + class PlaybackPlaylist(signals.SignalEmitter): - def __init__(self, item_tracker, start_id): + def __init__(self, item_list, start_id, shuffle, repeat): + """Create a playlist of items we are playing + + :param item_list: ItemList that we're playing from + :param start_id: id of the first item to play, or None to play a + random item. + :param shuffle: should we start in shuffle mode? + :param repeat: repeate mode to start in. + """ signals.SignalEmitter.__init__(self, 'position-changed', 'playing-info-changed') - self.item_tracker = item_tracker - self.model = item_tracker.item_list.model - self._tracker_callbacks = [ - item_tracker.connect('items-will-change', - self._on_items_will_change), - item_tracker.connect('items-changed', self._on_items_changed), - item_tracker.connect('items-removed-from-source', - self._on_items_removed_from_source) + self.item_list = item_list + app.item_list_pool.add_ref(item_list) + self._item_list_callbacks = [ + item_list.connect('items-changed', self._on_items_changed), + item_list.connect('list-changed', self._on_list_changed), ] - self.repeat = WidgetStateStore.get_repeat_off() - self.shuffle = False - self.shuffle_history = [] - self.currently_playing = None - self.shuffle_upcoming = self.generate_upcoming_shuffle_items() - self._pick_initial_item(start_id) - - def _pick_initial_item(self, start_id): - if start_id: - # The call to _find_playable here covers the corner case where - # start_id belogns to a container item with playable children. In - # that case is_playing is True, but we still can't directly play - # it - start_item = self._find_playable(self.model.get_info(start_id)) + self.shuffle = shuffle + self.repeat = repeat + if len(self.item_list) == 0: + # special case for empty item lists (#19890) + self.currently_playing = None + return + # If we get be passed a torrent folder item, we can't play it + # directly. We use _find_playable to find its first playable child in + # that case. + if start_id is None: + start_id = self.item_list.get_first_item().id + start_id = self._find_playable(start_id) + if start_id is not None: + self.currently_playing = self.item_list.get_item(start_id) + self._create_navigation_strategy() else: - start_item = self._find_playable(self.model.get_first_info()) - self._change_currently_playing(start_item) + self.currently_playing = None - def finished(self): - self._change_currently_playing(None) - for handle in self._tracker_callbacks: - self.item_tracker.disconnect(handle) - self.item_tracker = None - self.model = None - self.disconnect_all() + def is_playing_id(self, id_): + return self.currently_playing and self.currently_playing.id == id_ - # This recalculates the upcoming shuffle items. - # This needs to be done in case the playlist changes - # without any items-changed signal being emitted. - # Note that the new upcoming shuffle items do not take - # already played items into consideration. - def reshuffle(self): - # only reshuffle if we are currently using self.shuffle_upcoming - if self.shuffle: - self.shuffle_upcoming = self.generate_upcoming_shuffle_items() - # Remove any items in the shuffle history which are not currently - # in the playlist. The last item isn't removed since that is the - # currently playing item. - if self.shuffle_history: - playable_items = set(self.get_all_playable_items()) - current_item = self.shuffle_history.pop() - self.shuffle_history = [item for item in self.shuffle_history - if item in playable_items] - self.shuffle_history.append(current_item) - - def prev_shuffle_item(self): - while len(self.shuffle_history) > 0: - try: - return self.model.get_info(self.shuffle_history[-1]) - except KeyError: - # Item was removed from our InfoList and the shuffle history - # was not updated. This should not happen. - self.shuffle_history.pop() - logging.warning("trying to play non-existent shuffle history " - "item, skipping to previous item.") - continue - # no items in our history, return None - return - - def next_shuffle_item(self): - while len(self.shuffle_upcoming) > 0: - next_id = self.shuffle_upcoming.pop() - try: - return self.model.get_info(next_id) - except KeyError: - # Item was removed from our InfoList and the upcoming shuffle - # items were not updated. This should not happen. - logging.warning("trying to play non-existent upcoming shuffle " - "item, skipping to next item.") + def is_playing_item(self, info): + return self.is_playing_id(info.id) - continue - # no items left in shuffle_upcoming - return None + def set_shuffle(self, value): + self.shuffle = value + self._create_navigation_strategy() - def find_next_item(self, not_skipped_by_user=True): - #if track repeat is on and the user doesn't skip, - #shuffle doesn't matter - if ((self.repeat == WidgetStateStore.get_repeat_track() - and not_skipped_by_user)): - return self.currently_playing - elif ((not self.shuffle and - self.repeat == WidgetStateStore.get_repeat_playlist() - and self.is_playing_last_item())): - return self._find_playable(self.model.get_first_info()) - elif (self.shuffle and self.repeat == WidgetStateStore.get_repeat_off() - or self.shuffle and self.repeat == WidgetStateStore.get_repeat_track()): - next_item = self.next_shuffle_item() - if next_item is None: - self.shuffle_upcoming = self.generate_upcoming_shuffle_items() - self.shuffle_history = [] - return None #stop playback - else: - # Remove currently playing item from history if it - # was removed from the playlist. - if self._is_playing_filtered_item(): - self.shuffle_history.pop() - self.shuffle_history.append(next_item.id) - return next_item - elif self.shuffle and WidgetStateStore.get_repeat_playlist(): - next_item = self.next_shuffle_item() - if next_item is None: - #populate with new items - self.shuffle_upcoming = self.generate_upcoming_shuffle_items() - next_item = self.next_shuffle_item() - if next_item is None: - #17492 - nothing playable in list - return None - # Remove currently playing item from history if it - # was removed from the playlist. - if self._is_playing_filtered_item(): - self.shuffle_history.pop() - self.shuffle_history.append(next_item.id) - return next_item - else: - if self._is_playing_filtered_item(): - return self.model.get_first_info() - else: - next_item = self.model.get_next_info(self.currently_playing.id) - return self._find_playable(next_item) + def set_repeat(self, value): + self.repeat = value + self._create_navigation_strategy() - def find_previous_item(self): - if self.shuffle: - if not self.shuffle_history: - return None - current_item = self.shuffle_history.pop() - # Only add the currently playing item to upcoming shuffle items - # if it exists in the playlist - if not self._is_playing_filtered_item(): - self.shuffle_upcoming.append(current_item) - return self.prev_shuffle_item() - elif (not self.shuffle - and self.repeat == WidgetStateStore.get_repeat_playlist() - and self.is_playing_first_item()): - last_item = self._find_playable(self.model.get_last_info(), True) - return last_item + def _create_navigation_strategy(self): + if self.item_list.item_in_list(self.currently_playing.id): + initial_item = self.currently_playing else: - if self._is_playing_filtered_item(): - return None - else: - prev_item = self.model.get_prev_info(self.currently_playing.id) - return self._find_playable(prev_item, backwards=True) - - def generate_upcoming_shuffle_items(self): - if not self.shuffle: - return [] - elif (self.repeat == WidgetStateStore.get_repeat_off() - or self.repeat == WidgetStateStore.get_repeat_track()): - #random order - items = self.get_all_playable_items() - shuffle(items) - #do not include currently playing item - if self.currently_playing: - try: - items.remove(self.currently_playing.id) - except ValueError: - pass - return items - elif self.repeat == WidgetStateStore.get_repeat_playlist(): - #random items - items = self.get_all_playable_items() - if items: - return self.random_sequence(items, self.currently_playing.id) - else: - return [] + initial_item = None + repeat = (self.repeat == WidgetStateStore.get_repeat_playlist()) + if self.shuffle: + self.navigation_strategy = ShuffleNavigationStrategy( + initial_item, self.item_list, repeat) else: - return [] - - def random_sequence(self, pool, do_not_begin_with=None): - """ - Returns a list of random elements taken from the pool - parameter (which is a list). This means that the - returned list might contain elements from the pool - several times while others might not appear at all. - - The returned list has the following contraints: - - An element will never appear twice in a row. - - If an element from the pool is passed as do_no_begin_with - the returned list will not begin with that element. - """ - random_items = [] - previous_index = None - - if do_not_begin_with: - try: - previous_index = pool.index(do_not_begin_with) - except ValueError: - pass - - if len(pool) < 2: - #17493: infinite loop when trying to shuffle 1 item - return pool - for i in range(len(pool)): - random_index = randrange(0, len(pool)) - while random_index == previous_index: - random_index = randrange(0, len(pool)) - random_items.append(pool[random_index]) - previous_index = random_index - return random_items + self.navigation_strategy = LinearNavigationStrategy( + initial_item, self.item_list, repeat) def select_previous_item(self): - previous_item = self.find_previous_item() - self._change_currently_playing(previous_item) + prev_item = self.navigation_strategy.previous_item() + self._change_currently_playing(prev_item) - def select_next_item(self, not_skipped_by_user=True): - next_item = self.find_next_item(not_skipped_by_user) + def select_next_item(self, skipped_by_user=False): + if (self.repeat == WidgetStateStore.get_repeat_track() and + not skipped_by_user): + next_item = self.currently_playing + else: + next_item = self.navigation_strategy.next_item() self._change_currently_playing(next_item) - def _is_playing_filtered_item(self): - """Are we playing an item that is filtered out of our InfoList? + def finished(self): + """Call this when we're finished with the playlist.""" + self._change_currently_playing(None) + for handle in self._item_list_callbacks: + self.item_list.disconnect(handle) + app.item_list_pool.release(self.item_list) + self.navigation_strategy = self.item_list = None + self._item_list_callbacks = [] + self.disconnect_all() - This method should only be called if currently_playing is not None + def _find_playable(self, item_id): + """Find the first playable item in our item list starting with + item_info and moving down. """ - - if self.currently_playing is None: - app.widgetapp.handle_soft_failure('_is_playing_filtered_item', - "currently_playing is None", with_exception=False) - return True # I guess this is most likely to make things work try: - self.model.get_info(self.currently_playing.id) + item_info = self.item_list.get_item(item_id) except KeyError: - return True - else: - return False - - def is_playing_last_item(self): - if self._is_playing_filtered_item(): - return False - next_item = self.model.get_next_info(self.currently_playing.id) - return self._find_playable(next_item) == None - - def is_playing_first_item(self): - if self._is_playing_filtered_item(): - return False - previous_item = self.model.get_prev_info(self.currently_playing.id) - return self._find_playable(previous_item, True) == None - - def get_all_playable_items(self): - item_info = self.model.get_first_info() - items = [] - while item_info is not None: + return None + if item_info.is_playable: + return item_info.id + current_row = self.item_list.get_index(item_info.id) + for i in xrange(current_row + 1, len(self.item_list)): + item_info = self.item_list.get_row(i) if item_info.is_playable: - items.append(item_info.id) - item_info = self.model.get_next_info(item_info.id) - return items + return item_info.id + # no playable items + return None - def is_playing_id(self, id_): - return self.currently_playing and self.currently_playing.id == id_ + def _on_items_changed(self, item_list, changed_ids): + self.handle_changes() - def set_shuffle(self, value): - self.shuffle = value - self.shuffle_upcoming = self.generate_upcoming_shuffle_items() - if self.currently_playing: - self.shuffle_history = [self.currently_playing.id] - else: - self.shuffle_history = [] - - def set_repeat(self, value): - self.repeat = value + def _on_list_changed(self, item_list): + self.handle_changes() - def _on_items_will_change(self, tracker, added, changed, removed): - if self.currently_playing: - self._items_before_change = self.model.info_list() - if self._is_playing_filtered_item(): - self._index_before_change = -1 - else: - self._index_before_change = self.model.index_of_id( - self.currently_playing.id) - - def _on_items_removed_from_source(self, tracker, ids_removed): - if self.currently_playing: - old_currently_playing = self.currently_playing - removed_set = set(ids_removed) - if self.currently_playing.id in removed_set: - self._change_currently_playing_after_removed(ids_removed) - - if (self.currently_playing is None - or old_currently_playing.id is not self.currently_playing.id): - self.emit("position-changed") + def handle_changes(self): + if self.currently_playing is not None: + if self.item_list.item_in_list(self.currently_playing.id): + new_item = self.item_list.get_item(self.currently_playing.id) + if new_item != self.currently_playing: + if new_item.is_playable: + self.currently_playing = new_item + else: + self.currently_playing = None + self.emit("playing-info-changed") - def _update_currently_playing(self, new_info): - """Update our currently-playing ItemInfo.""" + def _change_currently_playing(self, new_info): self.currently_playing = new_info + # FIXME: should notify the item list code and so that it can redraw + # the item. - def _change_currently_playing_after_removed(self, removed_set): - def position_removed(old_index): - old_info = self._items_before_change[old_index] - try: - return (old_info.id in removed_set - or not self.model.get_info(old_info.id).is_playable) - except KeyError: - # info was removed by the ItemList's internal filter - return True +class PlaylistNavigationStrategy(object): + """Handles moving back/forward for PlaybackPlaylist.""" + def __init__(self, initial_item, item_list, repeat): + """Create a PlaylistNavigationStrategy + + :param initial_item: the first item in the playlist, or None + :param item_list: ItemList we're playing from + :param repeat: repeat mode + """ + + def next_item(self): + """Pick the next item to play. + + :returns: ItemInfo to play + """ + raise NotImplementedError() + + def previous_item(self): + """Pick the previous item to play. - new_position = self._index_before_change - if new_position == -1: - # we were playing an item that was filtered by the search and - # it got removed. Start with the top of the list - new_position = 0 + :returns: ItemInfo to play + """ + raise NotImplementedError() + +class LinearNavigationStrategy(PlaylistNavigationStrategy): + """Play items in the same order as the item list.""" + + def __init__(self, initial_item, item_list, repeat): + self.repeat = repeat + self.current_item = initial_item + self.item_list = item_list + + def next_item(self): + return self._pick_item(+1) + + def previous_item(self): + return self._pick_item(-1) + + def _pick_item(self, delta): + if (self.current_item is None or + not self.item_list.item_in_list(self.current_item.id)): + # item no longer in item list. Return None to stop playback + self.current_item = None + return None + current_item = self.current_item while True: - if new_position >= len(self._items_before_change): - # moved past the end of our old item list, stop playback - self._change_currently_playing(None) - return - if not position_removed(new_position): - break - new_position += 1 - item = self.model.get_info(self._items_before_change[new_position].id) - self._change_currently_playing(item) + canditate = self._next_candidate_item(current_item, delta) + if canditate is None: + # no more items to choose from, select None to stop playback + self.current_item = None + return None + if canditate.is_playable: + # found an item, select it + self.current_item = canditate + return self.current_item + if canditate is self.current_item: + # we've wrapped around the list without finding an item, + # return None + self.current_item = None + return None + # candidate item isn't playable, continue searching + current_item = canditate - def _on_items_changed(self, tracker, added, changed, removed): - if self.shuffle: - for id_ in removed: - while True: - try: - self.shuffle_upcoming.remove(id_) - except ValueError: - break - while True: - try: - self.shuffle_history.remove(id_) - except ValueError: - break - for item in added: - shuffle_upcoming_len = len(self.shuffle_upcoming) - if shuffle_upcoming_len: - index = randrange(0, shuffle_upcoming_len) - else: - index = 0 - self.shuffle_upcoming.insert(index, item.id) - self._index_before_change = None - self._items_before_change = None - for info in changed: - if (self.currently_playing is not None and - info.id == self.currently_playing.id): - self._update_currently_playing(info) - self.emit("playing-info-changed") - break - - def _find_playable(self, item_info, backwards=False): - if backwards: - iter_func = self.model.get_prev_info - else: - iter_func = self.model.get_next_info - - while item_info is not None and not item_info.is_playable: - item_info = iter_func(item_info.id) - return item_info + def _next_candidate_item(self, current_item, delta): + row = self.item_list.get_index(current_item.id) + new_row = row + delta + if 0 <= new_row < len(self.item_list): + # normal case, play the next item + return self.item_list.get_row(new_row) + elif self.repeat and len(self.item_list) > 0: + # if we are in repeat mode, wrap around + return self.item_list.get_row(new_row % len(self.item_list)) + else: + # no items left to pick, return None to stop playback + return None - def _send_item_is_playing(self, info, value): - play_in_miro = app.config.get(prefs.PLAY_IN_MIRO) - if play_in_miro: - messages.SetItemIsPlaying(info, value).send_to_backend() +class ShuffleNavigationStrategy(PlaylistNavigationStrategy): + """Play items in shuffle mode.""" + def __init__(self, initial_item, item_list, repeat): + self.repeat = repeat + self.item_list = item_list + # history of items that we've already played + self.history = [] + # history of items that we've already played, then skipped back to + self.forward_history = [] + self.current_item = initial_item + + def next_item(self): + if self.current_item is not None: + self.history.append(self.current_item) + self.current_item = self._next_from_history_list(self.forward_history) + if self.current_item is None: + self.current_item = self._random_item() + return self.current_item + + def previous_item(self): + if self.current_item is not None: + self.forward_history.append(self.current_item) + self.current_item = self._next_from_history_list(self.history) + if self.current_item is None: + self.current_item = self._random_item() + return self.current_item + + def _random_item(self): + choices = self.item_list.get_playable_ids() + if not self.repeat: + history_ids = set((i.id for i in self.history)) + history_ids.update(i.id for i in self.forward_history) + choices = list(set(choices) - history_ids) + if choices: + return self.item_list.get_item(random.choice(choices)) + else: + return None - def _change_currently_playing(self, new_info): - if self.currently_playing: - self._send_item_is_playing(self.currently_playing, False) - self.currently_playing = new_info - if self.currently_playing: - self._send_item_is_playing(self.currently_playing, True) + def _next_from_history_list(self, history_list): + while history_list: + item = history_list.pop() + if self.item_list.item_in_list(item.id): + return item class DetachedWindow(widgetset.Window): def __init__(self, title, rect): @@ -1124,7 +1122,7 @@ """Handle a playback key press events """ if len(mods) != 0: - if set([menus.MOD, menus.SHIFT]) == mods: + if set([keyboard.MOD, keyboard.SHIFT]) == mods: if key in ('>', '.'): # OS X sends '.', GTK sends '>' app.widgetapp.on_forward_clicked() return True @@ -1132,20 +1130,25 @@ app.widgetapp.on_previous_clicked() return True - if set([menus.SHIFT]) == mods: - if key == menus.RIGHT_ARROW: + if set([keyboard.SHIFT]) == mods: + if key == keyboard.RIGHT_ARROW: app.widgetapp.on_skip_forward() return True - elif key == menus.LEFT_ARROW: + elif key == keyboard.LEFT_ARROW: app.widgetapp.on_skip_backward() return True - if set([menus.CTRL]) == mods and key == menus.SPACE: + if set([keyboard.ALT]) == mods: + if key == keyboard.ENTER: + app.playback_manager.enter_fullscreen() + return True + + if set([keyboard.CTRL]) == mods and key == keyboard.SPACE: app.playback_manager.toggle_paused() return True return False - if key == menus.DELETE or key == menus.BKSPACE: + if key == keyboard.DELETE or key == keyboard.BKSPACE: playing = app.playback_manager.get_playing_item() if playing is not None: if app.playback_manager.is_playing_audio: @@ -1157,7 +1160,7 @@ app.widgetapp.remove_items([playing]) return True - if key == menus.ESCAPE: + if key == keyboard.ESCAPE: if app.playback_manager.is_fullscreen: app.playback_manager.exit_fullscreen() return True @@ -1165,22 +1168,22 @@ app.widgetapp.on_stop_clicked() return True - if key == menus.RIGHT_ARROW: + if key == keyboard.RIGHT_ARROW: app.widgetapp.on_forward_clicked() return True - if key == menus.LEFT_ARROW: + if key == keyboard.LEFT_ARROW: app.widgetapp.on_previous_clicked() return True - if key == menus.UP_ARROW: + if key == keyboard.UP_ARROW: app.widgetapp.up_volume() return True - if key == menus.DOWN_ARROW: + if key == keyboard.DOWN_ARROW: app.widgetapp.down_volume() return True - if key == menus.SPACE: + if key == keyboard.SPACE: app.playback_manager.toggle_paused() return True diff -Nru miro-4.0.4/lib/frontends/widgets/playlist.py miro-6.0/lib/frontends/widgets/playlist.py --- miro-4.0.4/lib/frontends/widgets/playlist.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/playlist.py 2013-04-05 16:02:42.000000000 +0000 @@ -29,27 +29,88 @@ """playlist.py -- Handle displaying a playlist.""" +import logging + from miro import app from miro import messages from miro import signals +from miro.data import mappings from miro.gtcache import gettext as _ from miro.plat.frontends.widgets import widgetset from miro.frontends.widgets import itemcontextmenu -from miro.frontends.widgets import itemlist from miro.frontends.widgets import itemlistcontroller from miro.frontends.widgets import itemlistwidgets from miro.frontends.widgets import itemrenderer +from miro.frontends.widgets import itemsort from miro.frontends.widgets import style from miro.frontends.widgets.widgetstatestore import WidgetStateStore +class PlaylistOrder(object): + """Tracks the order of items in a playlist.""" + def __init__(self, playlist_id): + self.playlist_id = playlist_id + self.update_positions() + self.ascending = True + + def update_positions(self): + connection_pool = app.connection_pools.main_pool + connection = connection_pool.get_connection() + try: + self.playlist_items = mappings.get_playlist_items(connection, + self.playlist_id) + finally: + connection_pool.release_connection(connection) + + def item_position(self, item_info): + """Get the position of an item inside the list. + + :returns: position as an int, with the count starting at 1 + """ + try: + return self.playlist_items.index(item_info.id) + 1 + except ValueError: + logging.warn("PlaylistOrder.item_position(): item not found (%s)", + item_info.title) + return -1 + + def set_sort_ascending(self, ascending): + """Set if the sort is ascending or not. + + This changes how we calculate the new list order after a DnD + operation. + """ + self.ascending = ascending + + def get_new_list_order(self, insert_id, dragged): + """Get the list order after some items gets re-ordered. + + :param insert_id: item to insert before, or None to insert the items + at the end of the list + :param dragged: list of ids that got dragged + """ + dragged = set(dragged) + new_order = [] + if self.ascending: + source_list = self.playlist_items + else: + source_list = list(self.playlist_items) + source_list.reverse() + for item_id in source_list: + if item_id == insert_id: + new_order.extend(dragged) + if item_id not in dragged: + new_order.append(item_id) + if insert_id is None: + new_order.extend(dragged) + if not self.ascending: + new_order.reverse() + return new_order + class DropHandler(signals.SignalEmitter): - def __init__(self, playlist_id, item_list, item_views, sorter): + def __init__(self, playlist_order): signals.SignalEmitter.__init__(self) self.create_signal('new-order') - self.playlist_id = playlist_id - self.item_list = item_list - self.item_views = item_views - self.sorter = sorter + self.playlist_order = playlist_order def allowed_actions(self): return widgetset.DRAG_ACTION_MOVE @@ -66,23 +127,10 @@ def accept_drop(self, table_view, model, typ, source_actions, parent, position, dragged): if 0 <= position < len(model): - insert_id = model.nth_row(position)[0].id - # If we try to insert before an ID that iself is being - # dragged we get an error - while insert_id in dragged: - position += 1 - # If we iterate to the end of the playlist - # we cancel the iteration - if position >= len(model): - insert_id = None - break - insert_id = model.nth_row(position)[0].id + insert_id = model.item_list.get_row(position).id else: insert_id = None - new_order = self.sorter.move_ids_before(insert_id, dragged) - self.item_list.resort() - for item_view in self.item_views: - item_view.model_changed() + new_order = self.playlist_order.get_new_list_order(insert_id, dragged) self.emit('new-order', new_order) return True @@ -92,12 +140,12 @@ self.id = playlist_info.id self.is_folder = playlist_info.is_folder self.populated_sorter = False + self.playlist_order = PlaylistOrder(playlist_info.id) itemlistcontroller.SimpleItemListController.__init__(self) def build_column_renderers(self): column_renderers = itemlistwidgets.ListViewColumnRendererSet() - playlist_renderer = style.PlaylistOrderRenderer( - self.item_tracker.playlist_sort) + playlist_renderer = style.PlaylistOrderRenderer(self.playlist_order) column_renderers.add_renderer('playlist', playlist_renderer) return column_renderers @@ -110,30 +158,31 @@ self.make_drop_handler() def make_sorter(self, column, ascending): + # slight bit of a hack here. We enable/disable reordering based + # on the sort we return here. The assumption is that we are going + # to use the sort we return, which seems reasonable. if column == 'playlist': - # take the playlist sorter from our item tracker - playlist_sort = self.item_tracker.playlist_sort - if playlist_sort.should_reverse_order(ascending): - new_order = playlist_sort.reverse_order() - m = messages.PlaylistReordered(self.id, new_order) - m.send_to_backend() - # slight bit of a hack here. We enable/disable reordering based - # on the sort we return here. The assumption is that we are going - # to use the sort we return, which seems reasonable. + self.playlist_order.set_sort_ascending(ascending) self.enable_reorder() - return playlist_sort else: self.disable_reorder() - return itemlistcontroller.SimpleItemListController.make_sorter( - self, column, ascending) + return itemlistcontroller.SimpleItemListController.make_sorter( + self, column, ascending) + + def _init_item_views(self): + itemlistcontroller.SimpleItemListController._init_item_views(self) + if isinstance(self.item_list.sorter, itemsort.PlaylistSort): + self.enable_reorder() def build_renderer(self): - return itemrenderer.PlaylistItemRenderer( - self.item_tracker.playlist_sort) + return itemrenderer.PlaylistItemRenderer(self.playlist_order) + + def handle_item_list_changes(self): + itemlistcontroller.SimpleItemListController.handle_item_list_changes(self) + self.playlist_order.update_positions() def make_drop_handler(self): - self.drop_handler = DropHandler(self.id, self.item_list, - self.views.values(), self.item_tracker.playlist_sort) + self.drop_handler = DropHandler(self.playlist_order) self.drop_handler.connect('new-order', self._on_new_order) def enable_reorder(self): @@ -167,4 +216,6 @@ itemlistwidgets.EmptyListDescription(text)) def _on_new_order(self, drop_handler, order): + sort_key = app.widget_state.get_sort_state(self.type, self.id) + column, ascending = self.parse_sort_key(sort_key) messages.PlaylistReordered(self.id, order).send_to_backend() diff -Nru miro-4.0.4/lib/frontends/widgets/prefpanel.py miro-6.0/lib/frontends/widgets/prefpanel.py --- miro-4.0.4/lib/frontends/widgets/prefpanel.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/prefpanel.py 2013-04-05 16:02:42.000000000 +0000 @@ -54,7 +54,7 @@ from miro import messages from miro import prefs from miro.plat.frontends.widgets import widgetset -from miro.frontends.widgets import menus +from miro.frontends.widgets import keyboard from miro.frontends.widgets import widgetutil from miro.frontends.widgets import dialogs from miro.frontends.widgets import widgetconst @@ -64,7 +64,7 @@ from miro.plat.utils import filename_to_unicode, get_logical_cpu_count from miro.plat.frontends.widgets.bonjour import install_bonjour from miro.plat.frontends.widgets.threads import call_on_ui_thread -from miro.gtcache import gettext as _ +from miro.gtcache import gettext as _, gettext_lazy from miro import gtcache # Note: we do an additional import from prefpanelset half way down the file. @@ -473,8 +473,23 @@ [vbox.pack_start(mem) for mem in extras[1:]] class GeneralPanel(PanelBuilder): + def __init__(self): + PanelBuilder.__init__(self) + self.disable_net_lookup_buttons = False + def build_widget(self): v = widgetset.VBox(8) + self.pack_startup_shutdown_section(v) + self.pack_sidebar_section(v) + self.pack_language_section(v) + self.pack_echonest_section(v) + pack_extras(v, "general") + return v + + def pack_startup_shutdown_section(self, v): + startup_shutdown_heading = dialogwidgets.heading( + _('Startup / Shutdown')) + v.pack_start(startup_shutdown_heading) run_at_startup_cbx = widgetset.Checkbox(_( "Automatically run %(appname)s when I log in.", @@ -501,6 +516,21 @@ prefs.WARN_IF_CONVERTING_ON_QUIT) v.pack_start(warn_if_converting_cbx) + def pack_sidebar_section(self, v): + sidebar_heading = dialogwidgets.heading(_('Sidebar')) + v.pack_start(sidebar_heading) + + cbx = widgetset.Checkbox(_('Show videos from podcasts in the Videos ' + 'section.')) + attach_boolean(cbx, prefs.SHOW_PODCASTS_IN_VIDEO) + v.pack_start(cbx) + + cbx = widgetset.Checkbox(_('Show audio from podcasts in the Music ' + 'section.')) + attach_boolean(cbx, prefs.SHOW_PODCASTS_IN_MUSIC) + v.pack_start(cbx) + + def pack_language_section(self, v): # FIXME - need to automatically generate list of available # languages in correct language lang_options = gtcache.get_languages() @@ -509,9 +539,9 @@ lang_option_menu = widgetset.OptionMenu([op[1] for op in lang_options]) attach_combo(lang_option_menu, prefs.LANGUAGE, [op[0] for op in lang_options]) - v.pack_start(widgetutil.align_left( - widgetutil.build_control_line(( - widgetset.Label(_("Language:")), lang_option_menu)))) + + language_heading = dialogwidgets.heading(_("Language")) + v.pack_start(language_heading) v.pack_start(widgetutil.align_left( dialogwidgets.note( @@ -519,26 +549,105 @@ "restart %(appname)s.)", {"appname": app.config.get(prefs.SHORT_APP_NAME)})))) - pack_extras(v, "general") + v.pack_start(widgetutil.align_left( + widgetutil.build_control_line(( + widgetset.Label(_('Display in:')), lang_option_menu)))) + + def pack_echonest_section(self, v): + heading = dialogwidgets.heading( + _("Music Album Art and Info Cleanup")) + v.pack_start(heading) + + help_text = _('Online lookup can find matching album art and song ' + 'info for all your audio files') + v.pack_start(widgetset.Label(help_text)) + + # Link to the data providers for net lookup. This method doesn't look + # the best, but hopefully it can be translated somewhat decently. + provider_links = widgetset.HBox() + provider_links.pack_start(widgetset.Label(_("Via:"))) + provider_links.pack_start(widgetset.Label(" ")) + echonest_link = widgetutil.LinkButton(_('Echonest')) + seven_digital_link = widgetutil.LinkButton(_('7Digital')) + provider_links.pack_start(echonest_link) + provider_links.pack_start(widgetset.Label(" ")) + provider_links.pack_start(seven_digital_link) + def on_echonest_click(button): + app.widgetapp.open_url("http://the.echonest.com/") + echonest_link.connect("clicked", on_echonest_click) + def on_7digital_click(button): + app.widgetapp.open_url("http://www.7digital.com/") + seven_digital_link.connect("clicked", on_7digital_click) + v.pack_start(widgetutil.align_center(provider_links)) + + self.run_net_lookup_button = widgetset.Button( + _('Run Online Lookup for All Current Music')) + self.run_net_lookup_button.connect( + 'clicked', self.net_lookup_button_clicked, True) + self.remove_net_lookup_button = widgetset.Button( + _("Remove All Lookup data")) + self.remove_net_lookup_button.connect( + 'clicked', self.net_lookup_button_clicked, False) + + v.pack_start(widgetutil.build_control_line( + (self.run_net_lookup_button, self.remove_net_lookup_button))) + + self.net_lookup_label = widgetset.Label(self._net_lookup_label_text()) + v.pack_start(self.net_lookup_label) + + cb_text = _('Automatically run online lookup for any new audio files ' + 'that I add to Miro.') + cbx = widgetset.Checkbox(cb_text) + attach_boolean(cbx, prefs.NET_LOOKUP_BY_DEFAULT) + v.pack_start(cbx) + self._update_net_lookup_butons() + + def net_lookup_button_clicked(self, button, value): + messages.SetNetLookupEnabled(None, value).send_to_backend() + self.disable_net_lookup_buttons = True + self._update_net_lookup_butons() + + def enable_net_lookup_buttons(self): + self.disable_net_lookup_buttons = False + self._update_net_lookup_butons() + + def update_net_lookup_counts(self): + self.net_lookup_label.set_text(self._net_lookup_label_text()) + self._update_net_lookup_butons() + + def _net_lookup_label_text(self): + global _net_lookup_counts + + return _("Using lookup data for %(net_lookup_count)s " + "out of %(total_count)s items", { + 'net_lookup_count': _net_lookup_counts[0], + 'total_count': _net_lookup_counts[1], + }) + + def _update_net_lookup_butons(self): + """Enable/Disable the buttons for echonest lookup.""" + global _net_lookup_counts + + net_lookup_count, total_count = _net_lookup_counts + + if self.disable_net_lookup_buttons: + self.run_net_lookup_button.disable() + self.remove_net_lookup_button.disable() + elif net_lookup_count == total_count: + self.run_net_lookup_button.disable() + self.remove_net_lookup_button.enable() + elif net_lookup_count == 0: + self.run_net_lookup_button.enable() + self.remove_net_lookup_button.disable() + else: + self.run_net_lookup_button.enable() + self.remove_net_lookup_button.enable() - return v class PodcastsPanel(PanelBuilder): def build_widget(self): grid = dialogwidgets.ControlGrid() - cbx = widgetset.Checkbox(_('Show videos from podcasts in the Videos ' - 'section.')) - attach_boolean(cbx, prefs.SHOW_PODCASTS_IN_VIDEO) - grid.pack(cbx) - grid.end_line(spacing=2) - - cbx = widgetset.Checkbox(_('Show audio from podcasts in the Music ' - 'section.')) - attach_boolean(cbx, prefs.SHOW_PODCASTS_IN_MUSIC) - grid.pack(cbx) - grid.end_line(spacing=12) - cc_options = [(1440, _("Every day")), (60, _("Every hour")), (30, _("Every 30 minutes")), @@ -564,8 +673,10 @@ attach_combo(max_option_menu, prefs.MAX_OLD_ITEMS_DEFAULT, [op[0] for op in max_options]) - view_options = [(WidgetStateStore.STANDARD_VIEW, _("Standard view")), - (WidgetStateStore.LIST_VIEW, _("List view"))] + view_options = [ + (WidgetStateStore.STANDARD_VIEW, _("Standard view")), + (WidgetStateStore.LIST_VIEW, _("List view")), + ] view_option_menu = widgetset.OptionMenu([op[1] for op in view_options]) attach_combo(view_option_menu, prefs.PODCASTS_DEFAULT_VIEW, [op[0] for op in view_options]) @@ -884,7 +995,7 @@ grid.end_line(spacing=18) grid.pack_label( _('Watch for new video and audio items in these folders ' - 'and include them in library:'), span=2) + 'and include them in library:'), span=2, width=500) grid.end_line() grid.pack(self.watched_folder_helper.folder_list, pad_right=12) grid.pack(self.watched_folder_helper.button_box) @@ -1050,32 +1161,34 @@ subtitles_cbx = widgetset.Checkbox( _('Automatically enable movie subtitles when available.')) - playback_heading = dialogwidgets.heading(_("Continuous Playback")) - - rbg = widgetset.RadioButtonGroup() - play_rb = widgetset.RadioButton( - _("Play video and audio items one after another"), rbg) - stop_rb = widgetset.RadioButton( - _("Stop after each video or audio item"), rbg) - - resume_heading = dialogwidgets.heading(_("Resume Playback")) - + videos_heading = dialogwidgets.heading(_("Video Playback")) resume_videos_cbx = widgetset.Checkbox( - _('Continue playing videos from where they were last stopped.')) + _('Resume playback from where the video was last stopped.')) + continue_videos_cbx = widgetset.Checkbox( + _('Play continuously - when one item finishes, play the next item in the list.')) + + music_heading = dialogwidgets.heading(_("Music Playback")) resume_music_cbx = widgetset.Checkbox( - _('Continue playing music files from ' - 'where they were last stopped.')) + _('Resume playback from where the song was last stopped.')) + continue_music_cbx = widgetset.Checkbox( + _('Play continuously - when one item finishes, play the next item in the list.')) + + podcasts_heading = dialogwidgets.heading(_("Podcast Playback")) resume_podcasts_cbx = widgetset.Checkbox( - _('Continue playing podcast files from ' - 'where they were last stopped.')) + _('Resume playback from where the podcast was last stopped.')) + continue_podcasts_cbx = widgetset.Checkbox( + _('Play continuously - when one item finishes, play the next item in the list.')) attach_boolean(miro_cbx, prefs.PLAY_IN_MIRO, - (separate_cbx, resume_heading, + (separate_cbx, videos_heading, + music_heading, podcasts_heading, resume_videos_cbx, + continue_videos_cbx, resume_music_cbx, + continue_music_cbx, resume_podcasts_cbx, - subtitles_cbx, playback_heading, - play_rb, stop_rb)) + continue_podcasts_cbx, + subtitles_cbx)) v.pack_start(widgetutil.align_left(miro_cbx, bottom_pad=6)) @@ -1085,23 +1198,26 @@ attach_boolean(subtitles_cbx, prefs.ENABLE_SUBTITLES) v.pack_start(widgetutil.align_left(subtitles_cbx, bottom_pad=6)) - v.pack_start(widgetutil.align_left(playback_heading, - left_pad=3, top_pad=6 , bottom_pad=6)) - - attach_radio([(stop_rb, True), (play_rb, False)], - prefs.SINGLE_VIDEO_PLAYBACK_MODE) - v.pack_start(widgetutil.align_left(play_rb), padding=2) - v.pack_start(widgetutil.align_left(stop_rb)) - - v.pack_start(widgetutil.align_left(resume_heading, + v.pack_start(widgetutil.align_left(videos_heading, left_pad=3, top_pad=12 , bottom_pad=6)) - attach_boolean(resume_videos_cbx, prefs.RESUME_VIDEOS_MODE) - attach_boolean(resume_music_cbx, prefs.RESUME_MUSIC_MODE) - attach_boolean(resume_podcasts_cbx, prefs.RESUME_PODCASTS_MODE) v.pack_start(widgetutil.align_left(resume_videos_cbx, bottom_pad=6)) + attach_boolean(continue_videos_cbx, prefs.CONTINUOUS_VIDEO_PLAYBACK_MODE) + v.pack_start(widgetutil.align_left(continue_videos_cbx, bottom_pad=6)) + + v.pack_start(widgetutil.align_left(music_heading, + left_pad=3, top_pad=12 , bottom_pad=6)) + attach_boolean(resume_music_cbx, prefs.RESUME_MUSIC_MODE) v.pack_start(widgetutil.align_left(resume_music_cbx, bottom_pad=6)) + attach_boolean(continue_music_cbx, prefs.CONTINUOUS_MUSIC_PLAYBACK_MODE) + v.pack_start(widgetutil.align_left(continue_music_cbx, bottom_pad=6)) + + v.pack_start(widgetutil.align_left(podcasts_heading, + left_pad=3, top_pad=12 , bottom_pad=6)) + attach_boolean(resume_podcasts_cbx, prefs.RESUME_PODCASTS_MODE) v.pack_start(widgetutil.align_left(resume_podcasts_cbx, bottom_pad=6)) + attach_boolean(continue_podcasts_cbx, prefs.CONTINUOUS_PODCAST_PLAYBACK_MODE) + v.pack_start(widgetutil.align_left(continue_podcasts_cbx, bottom_pad=6)) pack_extras(v, "playback") @@ -1339,29 +1455,34 @@ self.extensions_helper.load() # Add the initial panels -add_panel("general", _("General"), GeneralPanel, 'images/pref_tab_general.png') -add_panel("feeds", _("Podcasts"), PodcastsPanel, 'images/pref_tab_feeds.png') -add_panel("downloads", _("Downloads"), DownloadsPanel, +add_panel("general", gettext_lazy("General"), GeneralPanel, + 'images/pref_tab_general.png') +add_panel("feeds", gettext_lazy("Podcasts"), PodcastsPanel, + 'images/pref_tab_feeds.png') +add_panel("downloads", gettext_lazy("Downloads"), DownloadsPanel, 'images/pref_tab_downloads.png') -add_panel("folders", _("Folders"), FoldersPanel, 'images/pref_tab_folders.png') -add_panel("disk_space", _("Disk space"), DiskSpacePanel, +add_panel("folders", gettext_lazy("Folders"), FoldersPanel, + 'images/pref_tab_folders.png') +add_panel("disk_space", gettext_lazy("Disk space"), DiskSpacePanel, 'images/pref_tab_disk_space.png') -add_panel("playback", _("Playback"), PlaybackPanel, +add_panel("playback", gettext_lazy("Playback"), PlaybackPanel, 'images/pref_tab_playback.png') -add_panel("sharing", _("Sharing"), SharingPanel, 'images/pref_tab_sharing.png') -add_panel("conversions", _("Conversions"), ConversionsPanel, +add_panel("sharing", gettext_lazy("Sharing"), SharingPanel, + 'images/pref_tab_sharing.png') +add_panel("conversions", gettext_lazy("Conversions"), ConversionsPanel, 'images/pref_tab_conversions.png') -add_panel("stores", _("Stores"), StoresPanel, 'images/pref_tab_stores.png') -add_panel("extensions", _("Extensions"), ExtensionsPanel, +add_panel("stores", gettext_lazy("Stores"), StoresPanel, + 'images/pref_tab_stores.png') +add_panel("extensions", gettext_lazy("Extensions"), ExtensionsPanel, 'images/pref_tab_extensions.png') class PreferencesWindow(widgetset.PreferencesWindow): def __init__(self): widgetset.PreferencesWindow.__init__(self, _("Preferences")) - self.panel_builders = [] + self.panel_builders = {} for name, title, image_name, panel_builder_class in _PANEL: panel_builder = panel_builder_class() - self.panel_builders.append(panel_builder) + self.panel_builders[name] = panel_builder panel = panel_builder.build_widget() alignment = widgetset.Alignment(xalign=0.5, yalign=0.5) alignment.set_padding(20, 20, 20, 20) @@ -1370,6 +1491,9 @@ self.finish_panels() + def get_panel(self, name): + return self.panel_builders[name] + def select_panel(self, selection): if selection is None: widgetset.PreferencesWindow.select_panel(self, 0) @@ -1380,17 +1504,17 @@ break def do_key_press(self, key, mods): - if key == menus.ESCAPE: + if key == keyboard.ESCAPE: self.close() return True return False def do_show(self): - for panel_builder in self.panel_builders: + for panel_builder in self.panel_builders.values(): panel_builder.on_window_open() def do_hide(self): - for panel_builder in self.panel_builders: + for panel_builder in self.panel_builders.values(): panel_builder.on_window_closed() _pref_window = None @@ -1398,6 +1522,7 @@ def show_window(selection=None): """Displays the preferences window.""" global _pref_window + if _pref_window is None: _pref_window = PreferencesWindow() _pref_window.select_panel(selection) @@ -1408,3 +1533,17 @@ def hide_window(): _pref_window.close() + +def enable_net_lookup_buttons(): + global _pref_window + if _pref_window is not None: + _pref_window.get_panel('general').enable_net_lookup_buttons() + +_net_lookup_counts = (0, 0) +def update_net_lookup_counts(net_lookup_count, total_count): + global _pref_window + global _net_lookup_counts + + _net_lookup_counts = (net_lookup_count, total_count) + if _pref_window is not None: + _pref_window.get_panel('general').update_net_lookup_counts() diff -Nru miro-4.0.4/lib/frontends/widgets/rundialog.py miro-6.0/lib/frontends/widgets/rundialog.py --- miro-4.0.4/lib/frontends/widgets/rundialog.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/rundialog.py 2013-04-05 16:02:42.000000000 +0000 @@ -29,6 +29,7 @@ """Run dialogs from the backend thread.""" +from miro import app from miro import dialogs from miro.gtcache import gettext as _ from miro.frontends.widgets import widgetutil @@ -39,6 +40,8 @@ if dialog.__class__ in (dialogs.MessageBoxDialog, dialogs.ChoiceDialog, dialogs.ThreeChoiceDialog): runner = DialogRunner(dialog) + elif isinstance(dialog, dialogs.DatabaseErrorDialog): + runner = DatabaseErrorDialogRunner(dialog) elif isinstance(dialog, dialogs.HTTPAuthDialog): runner = HTTPAuthDialogRunner(dialog) elif isinstance(dialog, dialogs.TextEntryDialog): @@ -139,3 +142,10 @@ checked = self.checkbox.get_checked() text = self.entry.get_text() self.dialog.run_callback(self.dialog.buttons[response], checked, text) + +class DatabaseErrorDialogRunner(DialogRunner): + def __init__(self, dialog): + self.dialog = dialog + + def run(self): + app.db_error_handler.run_backend_dialog(self.dialog) diff -Nru miro-4.0.4/lib/frontends/widgets/searchcontroller.py miro-6.0/lib/frontends/widgets/searchcontroller.py --- miro-4.0.4/lib/frontends/widgets/searchcontroller.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/searchcontroller.py 2013-04-05 16:02:42.000000000 +0000 @@ -134,6 +134,19 @@ type = u'search' id = u'search' + def __init__(self): + itemlistcontroller.SimpleItemListController.__init__(self) + self._started_handle = app.search_manager.connect('search-started', + self._on_search_started) + self._complete_handle = app.search_manager.connect('search-complete', + self._on_search_complete) + + def cleanup(self): + itemlistcontroller.SimpleItemListController.cleanup(self) + app.search_manager.disconnect(self._started_handle) + app.search_manager.disconnect(self._complete_handle) + + def build_widget(self): itemlistcontroller.SimpleItemListController.build_widget(self) scroller = widgetset.Scroller(False, True) @@ -152,17 +165,10 @@ # no results see (#16970) return app.search_manager.text == '' - def on_items_changed(self): - # Don't check for an empty list here. Since items don't get - # removed from the search feed, we don't need to do anything. - # Also, it results in a false positive just after the search - # starts when the items from the last search get removed - # (#11255) - pass - def make_titlebar(self): titlebar = itemlistwidgets.SearchListTitlebar() titlebar.connect('save-search', self._on_save_search) + titlebar.hide_album_view_button() return titlebar def get_saved_search_text(self): @@ -173,21 +179,10 @@ def _on_save_search(self, widget, search_text): engine = self.titlebar.get_engine() - app.search_manager.perform_search(engine, search_text) + # don't need to perform the search, just set the info for saving + app.search_manager.set_search_info(engine, search_text) app.search_manager.save_search() - def start_tracking(self): - itemlistcontroller.SimpleItemListController.start_tracking(self) - self._started_handle = app.search_manager.connect('search-started', - self._on_search_started) - self._complete_handle = app.search_manager.connect('search-complete', - self._on_search_complete) - - def stop_tracking(self): - itemlistcontroller.SimpleItemListController.stop_tracking(self) - app.search_manager.disconnect(self._started_handle) - app.search_manager.disconnect(self._complete_handle) - def _on_search_started(self, search_manager): self.titlebar.set_search_text(search_manager.text) self.titlebar.set_search_engine(search_manager.engine) diff -Nru miro-4.0.4/lib/frontends/widgets/search.py miro-6.0/lib/frontends/widgets/search.py --- miro-4.0.4/lib/frontends/widgets/search.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/search.py 2013-04-05 16:02:42.000000000 +0000 @@ -49,15 +49,19 @@ signals.SignalEmitter.__init__(self) self.create_signal('search-started') self.create_signal('search-complete') - self.engine = searchengines.get_last_engine().name + self.engine = u'all' self.text = '' self.searching = False + def set_initial_search_info(self, engine, text): + self.engine = engine + self.text = text + def set_search_info(self, engine, text): if not searchengines.get_engine_for_name(engine): - logging.warn('Manager asked to set engine to non-existent %s', + logging.info('Manager asked to set engine to non-existent %s. ' + 'Probably engine has been removed.', engine) - self.perform_search(searchengines.get_last_engine().name, '') return self.engine = engine self.text = text diff -Nru miro-4.0.4/lib/frontends/widgets/segmented.py miro-6.0/lib/frontends/widgets/segmented.py --- miro-4.0.4/lib/frontends/widgets/segmented.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/segmented.py 2013-04-05 16:02:42.000000000 +0000 @@ -155,6 +155,7 @@ surface = widgetutil.ThreeImageSurface() surface.set_images(left, center, right) surface.draw(context, 0, 0, context.width) + return surface class TextButtonSegment(ButtonSegment): MARGIN = 12 @@ -169,11 +170,11 @@ return math.ceil(width) + (2 * self.MARGIN), 20 def draw(self, context, layout): - ButtonSegment.draw(self, context, layout) + surface = ButtonSegment.draw(self, context, layout) layout.set_text_color(self.TEXT_COLOR[self.active]) textbox = self._get_textbox(layout) _, height = textbox.get_size() - y = int((context.height - height) / 2.0) + y = int((surface.height - height) / 2.0) textbox.draw(context, self.MARGIN, y, context.width - (2 * self.MARGIN), context.height) def _get_textbox(self, layout): diff -Nru miro-4.0.4/lib/frontends/widgets/sharingcontroller.py miro-6.0/lib/frontends/widgets/sharingcontroller.py --- miro-4.0.4/lib/frontends/widgets/sharingcontroller.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/sharingcontroller.py 2013-04-05 16:02:42.000000000 +0000 @@ -28,24 +28,21 @@ """playlist.py -- Handle displaying a playlist.""" +from miro import messages from miro.frontends.widgets import itemlistcontroller from miro.frontends.widgets import itemlistwidgets from miro.frontends.widgets import itemrenderer -from miro.frontends.widgets import itemtrack from miro.gtcache import gettext as _ -# The spinning progress bar while a user connects is done by the backend -# with messages sent to the frontend, the idea is the backend should know -# when it is a connect or not so let it handle that case. -class SharingView(itemlistcontroller.SimpleItemListController, - itemlistcontroller.FilteredListMixin): - - def __init__(self, share): +class SharingController(itemlistcontroller.SimpleItemListController): + def __init__(self, share_info): self.type = u'sharing' - self.share = share - self.id = share.id + self.share_info = share_info + self.id = share_info.id itemlistcontroller.SimpleItemListController.__init__(self) - itemlistcontroller.FilteredListMixin.__init__(self) + # tell the backend that it should connect to the share and start + # sending updates + messages.TrackShare(share_info.share_id).send_to_backend() def make_drag_handler(self): return None @@ -53,7 +50,8 @@ def make_titlebar(self): titlebar = itemlistwidgets.SharingTitlebar() titlebar.connect('search-changed', self._on_search_changed) - titlebar.connect('toggle-filter', self.on_toggle_filter) + titlebar.connect('filter-clicked', self.on_filter_clicked) + titlebar.hide_album_view_button() return titlebar def build_renderer(self): @@ -62,9 +60,6 @@ def handle_delete(self): pass - def build_item_tracker(self): - return itemtrack.ItemListTracker.create(self.type, self.share) - def build_widget(self): itemlistcontroller.SimpleItemListController.build_widget(self) diff -Nru miro-4.0.4/lib/frontends/widgets/style.py miro-6.0/lib/frontends/widgets/style.py --- miro-4.0.4/lib/frontends/widgets/style.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/style.py 2013-04-05 16:02:42.000000000 +0000 @@ -29,9 +29,12 @@ """Constants that define the look-and-feel.""" +import logging import math +from miro import app from miro import displaytext +from miro import util from miro.gtcache import gettext as _ from miro.frontends.widgets import cellpack from miro.frontends.widgets import imagepool @@ -291,7 +294,7 @@ return hotspot_info[0] # Renderers for the list view -class ListViewRendererText(widgetset.InfoListRendererText): +class ListViewRendererText(widgetset.ItemListRendererText): """Renderer for list view columns that are just plain text""" bold = False @@ -301,7 +304,7 @@ right_aligned = False def __init__(self): - widgetset.InfoListRendererText.__init__(self) + widgetset.ItemListRendererText.__init__(self) self.set_bold(self.bold) self.set_color(self.color) self.set_font_scale(self.font_size) @@ -316,28 +319,57 @@ attr_name = 'description_oneline' class FeedNameRenderer(ListViewRendererText): - attr_name = 'feed_name' + attr_name = 'parent_title' class DateRenderer(ListViewRendererText): - attr_name = 'display_date' + min_width = 80 + + def get_value(self, info): + return displaytext.date_slashes(info.release_date) class LengthRenderer(ListViewRendererText): - attr_name = 'display_duration_short' + def get_value(self, info): + return displaytext.short_time_string(info.duration) class ETARenderer(ListViewRendererText): right_aligned = True - attr_name = 'display_eta' + + def get_value(self, info): + return info.eta_text class TorrentDetailsRenderer(ListViewRendererText): - attr_name = 'display_torrent_details' + def get_value(self, info): + if not info.is_torrent: + return '' + + details = _( + "S: %(seeders)s | " + "L: %(leechers)s | " + "UR: %(up_rate)s | " + "UT: %(up_total)s | " + "DR: %(down_rate)s | " + "DT: %(down_total)s | " + "R: %(upload_ratio)s", + {"seeders": info.seeders, + "leechers": info.leechers, + "up_rate": info.upload_rate_text, + "up_total": info.upload_size_text, + "down_rate": info.download_rate_text, + "down_total": info.downloaded_size_text, + "ratio": info.upload_ratio_text}) + return details class DownloadRateRenderer(ListViewRendererText): right_aligned = True - attr_name = 'display_rate' + + def get_value(self, info): + return info.download_rate_text class SizeRenderer(ListViewRendererText): right_aligned = True - attr_name = 'display_size' + min_width = 60 + def get_value(self, info): + return displaytext.size_string(info.size) class ArtistRenderer(ListViewRendererText): attr_name = 'artist' @@ -346,22 +378,28 @@ attr_name = 'album' class TrackRenderer(ListViewRendererText): - attr_name = 'display_track' + def get_value(self, info): + return displaytext.integer(info.track) class YearRenderer(ListViewRendererText): - attr_name = 'display_year' + def get_value(self, info): + return displaytext.integer(info.year) class GenreRenderer(ListViewRendererText): attr_name = 'genre' class DateAddedRenderer(ListViewRendererText): - attr_name = 'display_date_added' + min_width = 90 + def get_value(self, info): + return displaytext.date_slashes(info.date_added) class LastPlayedRenderer(ListViewRendererText): - attr_name = 'display_last_played' + def get_value(self, info): + return displaytext.date_slashes(info.last_played) class DRMRenderer(ListViewRendererText): - attr_name = 'display_drm' + def get_value(self, info): + return _("Locked") if info.has_drm else u"" class FileTypeRenderer(ListViewRendererText): attr_name = 'file_format' @@ -370,19 +408,29 @@ attr_name = 'show' class KindRenderer(ListViewRendererText): - attr_name = 'display_kind' + def get_value(self, info): + if info.kind == 'movie': + return _("Movie") + elif info.kind == 'show': + return _("Show") + elif info.kind == 'clip': + return _("Clip") + elif info.kind == 'podcast': + return _("Podcast") + else: + return None class PlaylistOrderRenderer(ListViewRendererText): """Displays the order an item is in a particular playlist. """ - def __init__(self, playlist_sorter): + def __init__(self, playlist_order): ListViewRendererText.__init__(self) - self.playlist_sorter = playlist_sorter + self.playlist_order = playlist_order def get_value(self, info): - return str(self.playlist_sorter.sort_key(info) + 1) + return str(self.playlist_order.item_position(info)) -class ListViewRenderer(widgetset.InfoListRenderer): +class ListViewRenderer(widgetset.ItemListRenderer): """Renderer for more complex list view columns. This class is useful for renderers that use the cellpack.Layout class. @@ -441,7 +489,7 @@ # make a Layout Object layout = cellpack.Layout() # add the text - textbox = layout_manager.textbox(self.info.name) + textbox = layout_manager.textbox(self.info.title) textbox.set_wrap_style('truncated-char') # 4px here is half of ListView.COLUMN_PADDING - 2px for luck layout.add_text_line(textbox, 4, 0, width) @@ -467,8 +515,7 @@ def layout_all(self, layout_manager, width, height, selected): # add the button, if needed if self.should_show_download_button(): - layout = cellpack.Layout() - + layout = self.layout_text(layout_manager, width, height) button = self.make_button(layout_manager) button_x = width - button.get_size()[0] layout.add_image(button, button_x, 0, hotspot='download') @@ -476,8 +523,7 @@ layout.center_y(top=0, bottom=height) return layout - if (self.info.state in ('downloading', 'paused') and - self.info.download_info.state != 'pending'): + if self.info.is_download and not self.info.pending_manual_download: return self.layout_progress(layout_manager, width, height) else: return self.layout_text(layout_manager, width, height) @@ -493,16 +539,16 @@ return button def should_show_download_button(self): - nonlocal = self.info.device or self.info.remote - return ((not self.info.downloaded and - self.info.state not in ('downloading', 'paused')) or nonlocal) + can_sync = self.info.device or self.info.remote + can_download = not self.info.downloaded and not self.info.is_download + return can_sync or can_download def layout_progress(self, layout_manager, width, height): """Handle layout when we should display a progress bar """ layout = cellpack.Layout() # add left button - if self.info.state == 'downloading': + if not self.info.is_paused: left_button = 'pause' else: left_button = 'resume' @@ -549,19 +595,15 @@ text = displaytext.expiration_date_short( self.info.expiration_date) return (text, EXPIRING_TEXT_COLOR) - elif (self.info.download_info and - self.info.download_info.rate == 0): - if self.info.download_info.state == 'paused': - return (_('paused'), DOWNLOADING_COLOR) - elif self.info.download_info.state == 'pending': - return (_('queued'), DOWNLOADING_COLOR) - elif self.info.download_info.state == 'failed': - return (self.info.download_info.short_reason_failed, - DOWNLOADING_COLOR) - else: - return (self.info.download_info.startup_activity, - DOWNLOADING_COLOR) - elif not self.info.item_viewed: + elif self.info.is_paused: + return (_('paused'), DOWNLOADING_COLOR) + elif self.info.pending_manual_download: + return (_('queued'), DOWNLOADING_COLOR) + elif self.info.is_failed_download: + return (self.info.short_reason_failed, ERROR_COLOR) + elif self.info.is_download and self.info.rate is None: + return (self.info.startup_activity, DOWNLOADING_COLOR) + elif self.info.new: return (_('Newly Available'), AVAILABLE_COLOR) return ('', self.default_text_color) @@ -570,8 +612,7 @@ if self.info.expiration_date: button_name = 'keep' - elif (self.info.state == 'downloading' and - self.info.download_info.state == 'pending'): + elif self.info.pending_manual_download: button_name = 'cancel' else: return @@ -579,7 +620,7 @@ button_x = width - button.width # right-align layout.add_image(button, button_x, 0, hotspot=button_name) -class RatingRenderer(widgetset.InfoListRenderer): +class RatingRenderer(widgetset.ItemListRenderer): """Render ratings column This cell supports updating based on hover states and rates items based on @@ -594,7 +635,7 @@ ICON_COUNT = 5 def __init__(self): - widgetset.InfoListRenderer.__init__(self) + widgetset.ItemListRenderer.__init__(self) self.want_hover = True self.icon = {} # TODO: to support scaling, we need not to check min_height until after @@ -606,7 +647,9 @@ path = resources.path('images/star-%s.png' % state) self.icon[state] = imagepool.get_surface(path, (self.icon_width, self.icon_height)) - self.min_width = self.width = int(self.icon_width * self.ICON_COUNT) + self.min_width = self.width = int((self.icon_width + + self.ICON_HORIZONTAL_SPACING) * + self.ICON_COUNT) self.hover = None def hotspot_test(self, style, layout_manager, x, y, width, height): @@ -679,7 +722,7 @@ state = 'unset' return self.icon[state] -class StateCircleRenderer(widgetset.InfoListRenderer): +class StateCircleRenderer(widgetset.ItemListRenderer): """Renderer for the state circle column.""" # NOTE: we don't inherit from ListViewRenderer because we handle @@ -691,25 +734,11 @@ min_height = 9 def __init__(self): - widgetset.InfoListRenderer.__init__(self) + widgetset.ItemListRenderer.__init__(self) self.icon = {} - self.setup_size = (-1, -1) - - def setup_icons(self, width, height): - """Create icons that will fill our allocated area correctly. """ - if (width, height) == self.setup_size: - return - - icon_width = int(height / 2.0) - icon_height = int((icon_width / self.ICON_PROPORTIONS) + 0.5) - # FIXME: by the time min_width is set below, it doesn't matter --Kaz - self.width = self.min_width = icon_width - self.height = icon_height - icon_dimensions = (icon_width, icon_height) for state in StateCircleRenderer.ICON_STATES: path = resources.path('images/status-icon-%s.png' % state) - self.icon[state] = imagepool.get_surface(path, icon_dimensions) - self.setup_size = (width, height) + self.icon[state] = imagepool.get_surface(path) def get_size(self, style, layout_manager): return self.min_width, self.min_height @@ -718,34 +747,366 @@ return None def render(self, context, layout_manager, selected, hotspot, hover): - self.setup_icons(context.width, context.height) icon = self.calc_icon() + if icon is None: + return # center icon vertically and horizontally - x = int((context.width - self.width) / 2) - y = int((context.height - self.height) / 2) - if icon: - icon.draw(context, x, y, icon.width, icon.height) + x = (context.width - icon.width) // 2 + y = (context.height - icon.height) // 2 + icon.draw(context, x + 1, y, icon.width, icon.height) def calc_icon(self): """Get the icon we should show. :returns: ImageSurface to display """ - if self.info.state == 'downloading': + if self.info.is_download: return self.icon['downloading'] - elif self.info.is_playing: + elif app.playback_manager.is_playing_item(self.info): return self.icon['playing'] - elif self.info.state == 'newly-downloaded': - return self.icon['unplayed'] - elif (self.info.downloaded and self.info.is_playable and - not self.info.video_watched): - return self.icon['new'] - elif (not self.info.item_viewed and not self.info.expiration_date and - not self.info.is_external and not self.info.downloaded): + elif self.info.new: return self.icon['new'] + elif self.info.downloaded and not self.info.video_watched: + return self.icon['unplayed'] + else: + return None + +class _MultiRowAlbumRenderStrategy(object): + """Utility class that controls what we render in MultiRowAlbumRenderer + + We subclass for each of the different modes that we use (standard, feed, + videos, etc). + + This class is just used internally. + """ + + def get_image_path(self, item_info, first_info): + """Get a path to the image we should draw.""" + raise NotImplementedError() + + def get_album(self, item_info, first_info): + """Get album name to render.""" + raise NotImplementedError() + + def get_artist(self, item_info, first_info): + """Get artist name to render.""" + raise NotImplementedError() + + def get_track_number(self, item_info, first_info): + """Track number to show""" + raise NotImplementedError() + +class _StandardRenderStrategy(_MultiRowAlbumRenderStrategy): + def get_image_path(self, item_info, first_info): + if first_info.cover_art_path is not None: + return first_info.cover_art_path + else: + return first_info.thumbnail + + def get_album(self, item_info, first_info): + return item_info.album + + def get_artist(self, item_info, first_info): + return item_info.artist + + def get_track_number(self, item_info, first_info): + if item_info.track is not None: + return item_info.track + else: + return '' + +class _FeedRenderStrategy(_MultiRowAlbumRenderStrategy): + def get_image_path(self, item_info, first_info): + try: + feed_info = widgetutil.get_feed_info(item_info.feed_id) + except KeyError: + return first_info.thumbnail + else: + return feed_info.thumbnail + + def get_album(self, item_info, first_info): + return '' + + def get_artist(self, item_info, first_info): + return item_info.parent_title + + def get_track_number(self, item_info, first_info): + return '' + +class _VideoRenderStrategy(_MultiRowAlbumRenderStrategy): + def get_image_path(self, item_info, first_info): + return first_info.thumbnail + + def get_album(self, item_info, first_info): + if item_info.show: + return item_info.show + elif item_info.parent_title: + return item_info.parent_title else: return None + def get_artist(self, item_info, first_info): + return None + + def get_track_number(self, item_info, first_info): + return '' + +class MultiRowAlbumRenderer(widgetset.ItemListRenderer): + """Renderer for album view.""" + + IGNORE_PADDING = True + DRAW_BACKGROUND = False + + IMAGE_MARGIN_TOP = 4 + IMAGE_MARGIN_BOTTOM = 3 + IMAGE_MARGIN_LEFT = 7 + IMAGE_MARGIN_RIGHT = 6 + + MIN_TEXT_WIDTH = 78 + TEXT_PADDING_RIGHT = 6 + TRACK_NUMBER_MARGIN_RIGHT = 13 + + BACKGROUND_COLOR = widgetutil.WHITE + TEXT_COLOR = widgetutil.BLACK + TRACK_TEXT_COLOR = widgetutil.css_to_color('#969696') + BOTTOM_LINE_COLOR = widgetutil.css_to_color('#dddddd') + FONT_SIZE = widgetutil.font_scale_from_osx_points(11) + + min_width = 260 + + def __init__(self): + widgetset.ItemListRenderer.__init__(self) + self._render_strategy = _StandardRenderStrategy() + self._setup_default_image_map() + + def _setup_default_image_map(self): + """Setup the _default_image_map attribute. + + _default_image_map maps the default images for things to a default + image that looks better in album view. + """ + # check if we're using one of the default image files and switch to an + # album-view-specific default file in that case + mappings = [ + ('thumb-default-audio.png', 'album-view-default-audio.png'), + ('thumb-default-video.png', 'album-view-default-video.png'), + ('icon-podcast-small.png', 'album-view-default-podcast.png'), + ('icon-watched-folder.png', 'album-view-watched-folder.png'), + ] + self._default_image_map = {} + for src, dest in mappings: + src_path = resources.path('images/%s' % src) + dest_path = resources.path('images/%s' % dest) + self._default_image_map[src_path] = dest_path + + def get_image_path(self): + image_path = self._render_strategy.get_image_path( + self.info, self.get_first_info()) + if image_path in self._default_image_map: + return self._default_image_map[image_path] + else: + return image_path + + def get_album(self): + return self._render_strategy.get_album(self.info, + self.get_first_info()) + + def get_artist(self): + return self._render_strategy.get_artist(self.info, + self.get_first_info()) + + def get_track_number(self): + return self._render_strategy.get_track_number(self.info, + self.get_first_info()) + + def get_current_row(self): + return self.group_info[0] + + def get_total_rows(self): + return self.group_info[1] + + def get_first_info(self): + return self.group_info[2] + + def switch_mode(self, new_mode): + """Switch which mode we use to render the album art. + + Currently there are 3 modes: + + - 'standard' -- standard view of the data + - 'feed' -- use feed info instead of album info + - 'video' -- mode for the all videos tab + """ + if new_mode == 'standard': + self._render_strategy = _StandardRenderStrategy() + elif new_mode == 'feed': + self._render_strategy = _FeedRenderStrategy() + elif new_mode == 'video': + self._render_strategy = _VideoRenderStrategy() + else: + raise ValueError("Unknown mode: %s" % new_mode) + + def get_size(self, style, layout_manager): + # return 0 for height because we render to multiple columns. We let + # the other columns determin the row height + return self.min_width, 0 + + def hotspot_test(self, style, layout_manager, x, y, width, height): + return 'album-click' + + def render(self, context, layout_manager, selected, hotspot, hover): + if not self.sanity_check_before_render(context): + return + + # draw our background color behind everything. We need this in case + # there's transparency in our album art + context.set_color(self.BACKGROUND_COLOR) + context.rectangle(0, 0, context.width, context.height) + context.fill() + + self.calc_album_art_size(context) + self.render_album_art(context) + self.render_track_number(context, layout_manager) + self.render_album_or_artist(context, layout_manager) + + def sanity_check_before_render(self, context): + """Do some sanity checking before starting to render things. + + Returns True if we're okay to render, False if we should bail + """ + if self.group_info is None: + # we can't render if group_info isn't set + logging.warn("group_info is None in MultiRowAlbumRenderer") + return False + if context.height == 0: + # not sure how this would happen, but we need to avoid + # divide-by-zero errors if it does + logging.warn("row height is 0 in MultiRowAlbumRenderer") + return False + return True + + def calc_album_art_size(self, context): + """Calculate how big we are going to draw album art. + + This is currently big enough so it fits in 6 rows with the top/bottom + padding. + """ + self.album_art_size = context.height * 6 + self.album_art_size -= (self.IMAGE_MARGIN_TOP + + self.IMAGE_MARGIN_BOTTOM) + + def make_album_art(self, context): + """Make an image to draw as album art. + + Returns ImageSurface to draw or None if we don't have anything + """ + if self.get_total_rows() < 6: + # don't draw album art if we have less than 6 items in the group + return None + + album_art_path = self.get_image_path() + if album_art_path is None: + return None + return imagepool.get_surface(album_art_path, + size=(self.album_art_size, self.album_art_size), + invalidator=util.mtime_invalidator( + album_art_path)) + + def render_album_art(self, context): + album_art = self.make_album_art(context) + if (album_art is not None and + self.cell_contains_album_art(context, album_art)): + self.render_album_art_slice(context, album_art) + + def cell_contains_album_art(self, context, album_art): + """Does this cell contain a portion of the album art? + """ + album_art_bottom = album_art.height + self.IMAGE_MARGIN_TOP + cell_top = self.get_current_row() * context.height + cell_bottom = cell_top + context.height + return (cell_bottom > self.IMAGE_MARGIN_TOP and + cell_top < album_art_bottom) + + def render_album_art_slice(self, context, image): + """Render the slice of the album art for this cell.""" + + if context.width < image.width: + # not enough width to draw + return + + # setup variables to track where we are copying from and to + + dest_x = self.IMAGE_MARGIN_LEFT + width = image.width + + dest_y = 0 + height = context.height + + src_x = 0 + src_y = self.get_current_row() * context.height - self.IMAGE_MARGIN_TOP + + if src_y < 0: + # The cell is contains the top padding for our image. + # move dest_y and src_y down + dest_y -= src_y + src_y = 0 + # descrease height + height -= dest_y + src_y_bottom = src_y + height + if src_y_bottom > image.height: + # The cell is contains the bottom padding for our image. + # decrease height + extra_space = src_y_bottom - image.height + height -= extra_space + # draw our image slice + if height > 0: + image.draw_rect(context, dest_x, dest_y, src_x, src_y, + width, height) + + def render_album_or_artist(self, context, layout_manager): + x = (self.album_art_size + self.IMAGE_MARGIN_LEFT + + self.IMAGE_MARGIN_RIGHT) + if self.get_current_row() == 0: + text = self.get_artist() + bold = True + elif self.get_current_row() == 1: + text = self.get_album() + bold = False + else: + return + + width = self.album_artist_text_end - x + if width < 10: + # don't try to render if we have a really small, or negative + # amount of space + return + # setup a textbox for the text + layout_manager.set_font(self.FONT_SIZE, bold=bold) + layout_manager.set_text_color(self.TEXT_COLOR) + textbox = layout_manager.textbox(text) + # truncate the textbox to the area we have in a cell. + textbox.set_wrap_style('truncated-char') + textbox.set_width(width) + # middle-align the text to line-up with the other cells + line_height = textbox.font.line_height() + y = (context.height - line_height) / 2.0 + # okay, ready to draw + textbox.draw(context, x, y, width, line_height) + + def render_track_number(self, context, layout_manager): + # setup a textbox for the text + layout_manager.set_font(self.FONT_SIZE) + layout_manager.set_text_color(self.TRACK_TEXT_COLOR) + textbox = layout_manager.textbox(str(self.get_track_number())) + # place the text on the right-side of the cell + text_width, text_height = textbox.get_size() + x = context.width - self.TEXT_PADDING_RIGHT - text_width + # middle-align the text to line-up with the other cells + y = (context.height - text_height) // 2 + # okay, ready to draw + textbox.draw(context, x, y, text_width, text_height) + self.album_artist_text_end = x - self.TEXT_PADDING_RIGHT + class ProgressBarColorSet(object): PROGRESS_BASE_TOP = (0.92, 0.53, 0.21) PROGRESS_BASE_BOTTOM = (0.90, 0.45, 0.08) @@ -902,9 +1263,5 @@ class ItemProgressBarDrawer(ProgressBarDrawer): def __init__(self, info): - ProgressBarDrawer.__init__(self, 0, ProgressBarColorSet) - if info.download_info and info.size > 0.0: - self.progress_ratio = (float(info.download_info.downloaded_size) / - info.size) - else: - self.progress_ratio = 0.0 + ProgressBarDrawer.__init__(self, info.download_progress, + ProgressBarColorSet) diff -Nru miro-4.0.4/lib/frontends/widgets/tabcontroller.py miro-6.0/lib/frontends/widgets/tabcontroller.py --- miro-4.0.4/lib/frontends/widgets/tabcontroller.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/tabcontroller.py 2013-04-05 16:02:42.000000000 +0000 @@ -225,13 +225,21 @@ lambda: app.sharing_manager.unregister_interest(self)) widgetset.VBox.__del__(self) - def build_header(self, text): + @classmethod + def build_header(klass, text): label = widgetset.Label(text) - label.set_size(self.HEADER_SIZE) - label.set_color(self.HEADER_COLOR) + label.set_size(klass.HEADER_SIZE) + label.set_color(klass.HEADER_COLOR) label.set_bold(True) return label + @classmethod + def build_text(klass, text): + label = widgetset.Label(text) + label.set_size(klass.TEXT_SIZE) + label.set_color(klass.TEXT_COLOR) + return label + def _build_daap_section(self, bottom): label = self.build_header(_("%(shortappname)s Sharing", self.trans_data)) @@ -239,12 +247,10 @@ bottom_pad=10)) # Note: "Miro iPad app" is the name of a piece of software-- # don't substitute Miro for %(appname)s here. - label = widgetset.Label( + label = self.build_text( _("%(shortappname)s can stream and download files to and from " "other %(shortappname)ss on your local network and to the " "Miro iPad app. It's awesome!", self.trans_data)) - label.set_size(self.TEXT_SIZE) - label.set_color(self.TEXT_COLOR) label.set_wrap(True) label.set_size_request(550, -1) bottom.pack_start(widgetutil.align_left(label, left_pad=20, @@ -335,14 +341,12 @@ label_line.pack_start(widgetutil.align_top(help_button)) bottom.pack_start(label_line) - label = widgetset.Label( + label = self.build_text( _("Connect the USB cable to sync your Android device with " "%(shortappname)s. Be sure to set your device to 'USB Mass " "Storage' mode in your device settings. Attach your digital " "camera, and convert your video files to be instantly " "web-ready.", self.trans_data)) - label.set_size(self.TEXT_SIZE) - label.set_color(self.TEXT_COLOR) label.set_size_request(400, -1) label.set_wrap(True) vbox.pack_start(widgetutil.align_left(label, left_pad=20, @@ -356,12 +360,10 @@ self.show_unknown.connect('toggled', self.show_all_devices_toggled) show_all_vbox.pack_start(self.show_unknown) padding = self.show_unknown.get_text_padding() - label = widgetset.Label( + label = self.build_text( _("Use this if your phone doesn't appear in %(shortappname)s when " "you connect it to the computer, or if you want to sync with an " "external drive.", self.trans_data)) - label.set_size(self.TEXT_SIZE) - label.set_color(self.TEXT_COLOR) label.set_size_request(370 - padding, -1) label.set_wrap(True) show_all_vbox.pack_start(widgetutil.pad(label, top=10, left=padding)) @@ -382,13 +384,11 @@ label = self.build_header(_("Miro on your iPad")) vbox.pack_start(widgetutil.align_left(label, left_pad=20, bottom_pad=10)) - label = widgetset.Label( + label = self.build_text( _("The gorgeous Miro iPad app lets you wirelessly stream music " "and videos from %(shortappname)s on your desktop to your iPad. " "You can also download songs and videos to your iPad and take " "them with you.", self.trans_data)) - label.set_size(self.TEXT_SIZE) - label.set_color(self.TEXT_COLOR) label.set_wrap(True) label.set_size_request(400, -1) vbox.pack_start(widgetutil.align_left(label, left_pad=20, @@ -406,11 +406,9 @@ label = self.build_header(_("Miro on Android")) vbox.pack_start(widgetutil.align_left(label, left_pad=20, bottom_pad=10)) - label = widgetset.Label( + label = self.build_text( _("We don't yet have a Miro app for Android, but you can stream " "to your device using other DAAP apps.")) - label.set_size(self.TEXT_SIZE) - label.set_color(self.TEXT_COLOR) label.set_wrap(True) label.set_size_request(550, -1) vbox.pack_start(widgetutil.align_left(label, left_pad=20, diff -Nru miro-4.0.4/lib/frontends/widgets/tableselection.py miro-6.0/lib/frontends/widgets/tableselection.py --- miro-4.0.4/lib/frontends/widgets/tableselection.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/tableselection.py 2013-04-05 16:02:42.000000000 +0000 @@ -31,7 +31,6 @@ the platform tableview modules provide the platform-specific methods used here. """ -from __future__ import with_statement # neccessary for python2.5 from contextlib import contextmanager import logging diff -Nru miro-4.0.4/lib/frontends/widgets/tablistdnd.py miro-6.0/lib/frontends/widgets/tablistdnd.py --- miro-4.0.4/lib/frontends/widgets/tablistdnd.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/tablistdnd.py 2013-04-05 16:02:42.000000000 +0000 @@ -270,16 +270,24 @@ def validate_drop(self, _widget, model, typ, _source_actions, parent, position): - if position == -1 and parent and typ in self.allowed_types(): - device = model[parent][0] - if not isinstance(device, messages.DeviceInfo): - # DAAP share - return widgetset.DRAG_ACTION_NONE - if device.mount and not getattr(device, 'fake', False): + if typ not in self.allowed_types() or not parent: + return widgetset.DRAG_ACTION_NONE + device = model[parent][0] + if not isinstance(device, messages.DeviceInfo): + return widgetset.DRAG_ACTION_NONE + if device.mount: + if position != -1: + return (widgetset.DRAG_ACTION_COPY, parent) + elif not getattr(device, 'fake', False): return widgetset.DRAG_ACTION_COPY + else: + return (widgetset.DRAG_ACTION_COPY, + model.parent_iter(parent)) return widgetset.DRAG_ACTION_NONE def accept_drop(self, _widget, model, _type, _source_actions, parent, _position, videos): device = model[parent][0] + if getattr(device, 'fake', False): + device = model[model.parent_iter(parent)][0] messages.DeviceSyncMedia(device, videos).send_to_backend() diff -Nru miro-4.0.4/lib/frontends/widgets/tablist.py miro-6.0/lib/frontends/widgets/tablist.py --- miro-4.0.4/lib/frontends/widgets/tablist.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/tablist.py 2013-04-05 16:02:42.000000000 +0000 @@ -29,7 +29,7 @@ """Displays the list of tabs on the left-hand side of the app.""" -from __future__ import with_statement # python2.5 +import collections import itertools from hashlib import md5 try: @@ -47,12 +47,12 @@ from miro import errors from miro.gtcache import gettext as _ from miro.plat import resources -from miro.frontends.widgets import playback -from miro.frontends.widgets import style from miro.frontends.widgets import imagepool +from miro.frontends.widgets import keyboard +from miro.frontends.widgets import playback from miro.frontends.widgets import statictabs +from miro.frontends.widgets import style from miro.frontends.widgets import widgetutil -from miro.frontends.widgets import menus from miro.frontends.widgets.tablistdnd import (FeedListDragHandler, FeedListDropHandler, PlaylistListDragHandler, PlaylistListDropHandler, MediaTypeDropHandler, DeviceDropHandler) @@ -181,7 +181,7 @@ del self.updating_animations[id_] return self.view.pulse_updating_image(iter_) - timer_id = timer.add(0.1, self.pulse_updating_animation, id_) + timer_id = timer.add(0.5, self.pulse_updating_animation, id_) self.updating_animations[id_] = timer_id class TabBlinkerMixin(object): @@ -323,14 +323,14 @@ self.view.connect_weak('row-activated', self.on_row_activated) def on_key_press(self, view, key, mods): - if key == menus.DOWN_ARROW and len(mods) == 0: + if key == keyboard.DOWN_ARROW and len(mods) == 0: # Test if the user is trying to move down past the last row in the # table, if so, select the next tablist. if view.is_selected(_last_iter(view, view.model)): if self._move_to_next_tablist(): return True return False - elif key == menus.UP_ARROW and len(mods) == 0: + elif key == keyboard.UP_ARROW and len(mods) == 0: # Test if the user is trying to move up past the first row in the # table, if so, select the next tablist. if view.is_selected(view.model.first_iter()): @@ -612,9 +612,12 @@ self._after_change(True) def on_key_press(self, view, key, mods): - if key == menus.DELETE or key == menus.BKSPACE: + if key == keyboard.DELETE or key == keyboard.BKSPACE: self.on_delete_key_pressed() return True + elif key == keyboard.F5: + app.widgetapp.update_selected_feeds() + return True return TabList.on_key_press(self, view, key, mods) def on_delete_key_pressed(self): @@ -626,7 +629,7 @@ self.expand(self.info.id) self.view.model_changed() - def on_row_expanded_change(self, view, iter_, path, expanded): + def on_row_expanded_change(self, view, iter_, expanded): info = self.view.model[iter_][0] if info == self.info: message = messages.TabExpandedChange(self.type, expanded) @@ -673,7 +676,7 @@ self.emit('tab-name-changed', old_name, info.name) def remove(self, id_list): - deleted_ids = [] + deleted_ids = set(id_list) with self.removing(): for id_ in id_list: try: @@ -682,9 +685,9 @@ # child of a tab we already deleted continue # override default of nil - deleted_ids = self.forget_child_iters(iter_) + deleted_ids.update(self.forget_child_iters(iter_)) self.view.model.remove(iter_) - if set(deleted_ids + id_list).intersection(set(app.tabs.selected_ids)): + if deleted_ids.intersection(set(app.tabs.selected_ids)): # hack for 17653: on OS X, deleting the selected tab doesn't # send selection-changed - so if the tab(s) we've # deleted is selected, explicitly change the selection to @@ -695,16 +698,16 @@ def forget_child_iters(self, parent_iter): model = self.view.model iter_ = model.child_iter(parent_iter) - deleted_ids = [] + deleted_ids = set() while iter_ is not None: # No need to wrap these around a try...except. If we can get # the child iter (from above) then this must exist. If it doesn't # then there is some sort of internal inconsistency. id_ = model[iter_][0].id del self.iter_map[id_] - deleted_ids += self.forget_child_iters(iter_) + deleted_ids.update(self.forget_child_iters(iter_)) iter_ = self.view.model.remove(iter_) - deleted_ids.append(id_) + deleted_ids.add(id_) return deleted_ids def model_changed(self): @@ -728,6 +731,8 @@ class DeviceTabListHandler(object): def __init__(self, tablist): self.tablist = tablist + # map device ids to the fake tabs that we created for them. + self.fake_tabs_created = {} def _fake_info(self, info, typ, name): new_data = { @@ -753,17 +758,29 @@ self._fake_info(info, 'audio', _('Music'))] def _add_fake_tabs(self, info): + if info.id in self.fake_tabs_created: + # fake tabs already added + return + + fake_tabs = self._get_fake_infos(info) + self.fake_tabs_created[info.id] = fake_tabs with self.tablist.adding(): - for fake in self._get_fake_infos(info): - HideableTabList.add(self.tablist, - fake, - info.id) + for fake in fake_tabs: + HideableTabList.add(self.tablist, fake, info.id) try: self.tablist.expand(info.id) except errors.WidgetActionError: pass # if the Connect Tab isn't open, we can't expand the tab - def add(self, info): + def _remove_fake_tabs(self, info): + if info.id not in self.fake_tabs_created: + # fake tabs already removed + return + fake_tabs = self.fake_tabs_created[info.id] + self.tablist.remove([fake_tab.id for fake_tab in fake_tabs]) + del self.fake_tabs_created[info.id] + + def add(self, info, parent_id): HideableTabList.add(self.tablist, info) if info.mount and not info.info.has_multiple_devices: self._add_fake_tabs(info) @@ -816,18 +833,130 @@ def on_hotspot_clicked(self, view, hotspot, iter_): if hotspot == 'eject-device': - info = view.model[iter_][0] - messages.DeviceEject(info).send_to_backend() + self.eject_device(view.model[iter_][0]) + + def eject_device(self, device_info): + # stop playback from the device + currently_playing = app.playback_manager.get_playing_item() + if currently_playing and currently_playing.device == device_info: + app.playback_manager.stop() + # navigate away from the audio/video tabs and remove them + self._remove_fake_tabs(device_info) + messages.DeviceEject(device_info).send_to_backend() + +class FakeSharingInfo(object): + """TabInfo that we use for the "fake" tabs under a share + + This includes the audio, video, podcast, and playlist tabs. + """ + def __init__(self, share_id, tab_name, label): + self.id = u"sharing-%s-%s" % (share_id, tab_name) + self.share_id = share_id + self.name = label + self.type = u'sharing-fake-playlist' + self.icon = widgetutil.make_surface("icon-%s" % tab_name) + self.active_icon = widgetutil.make_surface("icon-%s_active" % tab_name) class SharingTabListHandler(object): + """Handles all of the sharing tabs """ def __init__(self, tablist): self.tablist = tablist + # map share ids to the fake tabs for them + self.fake_tabs_created = {} + # before the fake tabs are created, we can't add any playlist tabs. + # pending_playlist_tabs maps share ids to those playlist tabs. + self.pending_playlist_tabs = collections.defaultdict(list) + + def _make_fake_tabs(self, info): + return [ + FakeSharingInfo(info.share_id, u'video', _('Video')), + FakeSharingInfo(info.share_id, u'audio', _('Audio')), + FakeSharingInfo(info.share_id, u'playlist', _('Playlists')), + FakeSharingInfo(info.share_id, u'podcast', _('Podcasts')), + ] + + def _add_fake_tabs(self, info): + """Add the psuedo-tabs below the share tabs. + + This includes the audio, video, playlists, and podcasts tabs. + """ + if info.share_id in self.fake_tabs_created: + # fake tabs already added + return + self.fake_tabs_created[info.share_id] = [] + with self.tablist.adding(): + for tab in self._make_fake_tabs(info): + self.tablist.add(tab, info.id) + self.fake_tabs_created[info.share_id].append(tab) + self._add_pending_playlist_tabs(info.share_id) + try: + self.tablist.expand(info.id) + except errors.WidgetActionError: + pass # if the Connect Tab isn't open, we can't expand the tab + + def _remove_fake_tabs(self, info): + if info.share_id not in self.fake_tabs_created: + # fake tabs already removed + return + fake_tabs = self.fake_tabs_created[info.share_id] + self.tablist.remove([fake_tab.id for fake_tab in fake_tabs]) + del self.fake_tabs_created[info.share_id] + + def _add_pending_playlist_tabs(self, share_id): + if share_id not in self.pending_playlist_tabs: + return + + pending_tabs = self.pending_playlist_tabs.pop(share_id) + with self.tablist.adding(): + for tab in pending_tabs: + self.add_playlist_tab(tab) + + def add(self, info, parent_id): + # Need to avoid calling ConnectList.add since that will result in an + # infinite loop. + if isinstance(info, messages.SharingInfo): + self.add_sharing_tab(info) + else: + self.add_playlist_tab(info) + + def add_sharing_tab(self, info): + HideableTabList.add(self.tablist, info) + self._handle_sharing_info_change(info) + + def add_playlist_tab(self, info): + if info.share_id in self.fake_tabs_created: + # fake tabs are created, we can add this info now + parent_id = info.share_id + if info.podcast: + parent_id = "sharing-%s-podcast" % (info.share_id,) + else: + parent_id = "sharing-%s-playlist" % (info.share_id,) + HideableTabList.add(self.tablist, info, parent_id) + else: + # fake tabs not created yet, wait to add the info + self.pending_playlist_tabs[info.share_id].append(info) + + def update(self, info): + # Need to avoid calling ConnectList.update since that will result in an + # infinite loop. + HideableTabList.update(self.tablist, info) + if isinstance(info, messages.SharingInfo): + self._handle_sharing_info_change(info) + + def _handle_sharing_info_change(self, info): + if info.is_updating: + self.tablist.start_updating(info.id) + else: + self.tablist.stop_updating(info.id) + if info.mount: + self._add_fake_tabs(info) + else: + self._remove_fake_tabs(info) def on_hotspot_clicked(self, view, hotspot, iter_): if hotspot == 'eject-device': # Don't track this tab anymore for music. info = view.model[iter_][0] - info.mount = False # We must stop the playback if we are playing from the same # share that we are ejecting from. host = info.host @@ -843,49 +972,42 @@ if typ == u'connect' and (info == selected_tabs[0] or getattr(selected_tabs[0], 'parent_id', None) == info.id): app.tabs.select_guide() - messages.SharingEject(info).send_to_backend() + messages.StopTrackingShare(info.share_id).send_to_backend() - def update(self, info): - if info.is_updating: - self.tablist.start_updating(info.id) + def init_info(self, info): + info.unwatched = info.available = 0 + if isinstance(info, messages.SharingInfo): + self.init_share_info(info) else: - self.tablist.stop_updating(info.id) - HideableTabList.update(self.tablist, info) + self.init_playlist_info(info) - def init_info(self, info): + def init_share_info(self, info): info.type = u'sharing' - info.unwatched = info.available = 0 - active = None - if info.is_folder and info.playlist_id is None: - thumb_path = resources.path('images/sharing.png') - # Checking the name instead of a supposedly unique id is ok for now - # because - elif info.playlist_id == u'video': - thumb_path = resources.path('images/icon-video.png') - active = resources.path('images/icon-video_active.png') - info.name = _('Video') - elif info.playlist_id == u'audio': - thumb_path = resources.path('images/icon-audio.png') - active = resources.path('images/icon-audio_active.png') - info.name = _('Music') - elif info.playlist_id == u'playlist': - thumb_path = resources.path('images/icon-playlist.png') - active = resources.path('images/icon-playlist_active.png') - info.name = _('Playlists') - elif info.playlist_id == u'podcast': - thumb_path = resources.path('images/icon-podcast.png') - active = resources.path('images/icon-podcast_active.png') - info.name = _('Podcasts') + info.icon = imagepool.get_surface( + resources.path('images/sharing.png')) + + def init_playlist_info(self, info): + info.type = u'sharing-playlist' + if info.podcast: + thumb_path = resources.path('images/icon-podcast-small.png') + active = resources.path('images/icon-podcast-small_active.png') else: - if info.podcast: - thumb_path = resources.path('images/icon-podcast-small.png') - active = resources.path('images/icon-podcast-small_active.png') - else: - thumb_path = resources.path('images/icon-playlist-small.png') - active = resources.path('images/icon-playlist-small_active.png') + thumb_path = resources.path('images/icon-playlist-small.png') + active = resources.path('images/icon-playlist-small_active.png') info.icon = imagepool.get_surface(thumb_path) - if active: - info.active_icon = imagepool.get_surface(active) + info.active_icon = imagepool.get_surface(active) + +class SharingPlaylistTabListHandler(object): + def __init__(self, tablist): + self.tablist = tablist + + def add(self, info, parent_id): + parent_id = info.share_id + if info.podcast: + parent_id = "sharing-%s-podcast" % (info.share_id,) + else: + parent_id = "sharing-%s-playlist" % (info.share_id,) + HideableTabList.add(self.tablist, info, parent_id) class ConnectList(TabUpdaterMixin, HideableTabList): name = _('Connect') @@ -900,18 +1022,21 @@ HideableTabList.__init__(self) TabUpdaterMixin.__init__(self) self._set_up = True # setup_list is never called? + + sharing_tab_list_handler = SharingTabListHandler(self) self.info_class_map = { messages.DeviceInfo: DeviceTabListHandler(self), - messages.SharingInfo: SharingTabListHandler(self), + messages.SharingInfo: sharing_tab_list_handler, + messages.SharingPlaylistInfo: sharing_tab_list_handler, TabInfo: None, } self.view.connect_weak('hotspot-clicked', self.on_hotspot_clicked) self.view.set_drag_dest(DeviceDropHandler(self)) - def on_row_expanded_change(self, view, iter_, path, expanded): + def on_row_expanded_change(self, view, iter_, expanded): info = self.view.model[iter_][0] if info is self.info: - HideableTabList.on_row_expanded_change(self, view, iter_, path, + HideableTabList.on_row_expanded_change(self, view, iter_, expanded) def on_delete_key_pressed(self): @@ -924,24 +1049,26 @@ def on_hotspot_clicked(self, view, hotspot, iter_): info = self.view.model[iter_][0] - handler = self.info_class_map[type(info)] - return handler.on_hotspot_clicked(view, hotspot, iter_) + handler = self.info_class_map.get(type(info)) + if hasattr(handler, 'on_hotspot_clicked'): + return handler.on_hotspot_clicked(view, hotspot, iter_) def init_info(self, info): if info is self.info: return - handler = self.info_class_map[type(info)] - return handler.init_info(info) + handler = self.info_class_map.get(type(info)) + if hasattr(handler, 'init_info'): + handler.init_info(info) def add(self, info, parent_id=None): - handler = self.info_class_map[type(info)] + handler = self.info_class_map.get(type(info)) if hasattr(handler, 'add'): - handler.add(info) # device doesn't use the parent_id + handler.add(info, parent_id) # device doesn't use the parent_id else: HideableTabList.add(self, info, parent_id) def update(self, info): - handler = self.info_class_map[type(info)] + handler = self.info_class_map.get(type(info)) if hasattr(handler, 'update'): handler.update(info) else: diff -Nru miro-4.0.4/lib/frontends/widgets/videobox.py miro-6.0/lib/frontends/widgets/videobox.py --- miro-4.0.4/lib/frontends/widgets/videobox.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/videobox.py 2013-04-05 16:02:42.000000000 +0000 @@ -124,8 +124,8 @@ app.playback_manager.connect('will-stop', self.handle_stop) def on_info_change(self, obj, item_info): - self.item_name = item_info.name - self.feed_name = item_info.feed_name + self.item_name = item_info.title + self.parent_title = item_info.parent_title self.is_feed = not item_info.is_external self.album = item_info.album self.artist = item_info.artist @@ -144,12 +144,12 @@ def reset(self): self.item_name = "" - self.feed_name = self.album = self.artist = None + self.parent_title = self.album = self.artist = None self.is_audio = self.is_video = self.is_feed = None def get_details(self): - if self.feed_name and self.is_feed: - details = self.feed_name + if self.parent_title and self.is_feed: + details = self.parent_title elif self.is_audio: # non-feed audio ~= music album = self.album or _("Unknown Album") @@ -288,6 +288,8 @@ self.queue_redraw() def set_duration(self, duration): + if duration is None: + duration = 0 self.duration = duration self.queue_redraw() @@ -315,6 +317,8 @@ class ProgressSlider(widgetset.CustomSlider): def __init__(self): widgetset.CustomSlider.__init__(self) + # progress silders always use the range [0, 1] + self.set_range(0, 1) self.set_can_focus(False) self.background_surface = widgetutil.ThreeImageSurface('playback_track') self.progress_surface = widgetutil.ThreeImageSurface('playback_track_progress') @@ -328,7 +332,7 @@ app.playback_manager.connect('will-play', self.handle_play) app.playback_manager.connect('will-stop', self.handle_stop) self.disable() - self.duration = 0 + self.playing = False def handle_progress(self, obj, elapsed, total): if elapsed is None or total is None: @@ -339,14 +343,20 @@ self.set_value(0) def handle_play(self, obj, duration): - self.duration = duration + self.playing = True + # This makes it so that the mousewheel scrolls exactly 5 seconds + if duration is None or duration < 5: + step_size = 1 + else: + step_size = 5.0 / duration + self.set_increments(step_size, step_size, step_size) self.enable() def handle_selecting(self, obj, item_info): self.disable() def handle_stop(self, obj): - self.duration = 0 + self.playing = False self.set_value(0) self.disable() @@ -360,7 +370,7 @@ return (60, 17) def slider_size(self): - return 1 + return self.progress_cursor.width def draw(self, context, layout): if not app.playback_manager.is_playing: @@ -379,15 +389,14 @@ background, progress, cursor = (self.background_surface, self.progress_surface, self.progress_cursor) - min, max = self.get_range() - scale = (self.get_value() - min) / (max - min) - cursor_pos = int(scale * (context.width - 18)) + cursor_pos = self.get_slider_pos() background.draw(context, 0, 1, context.width, context.height - 1) if cursor_pos: - progress.draw(context, 0, 1, cursor_pos + 9) + progress.draw(context, 0, 1, cursor_pos) - if self.duration > 0: - cursor.draw(context, cursor_pos, 0, cursor.width, cursor.height) + if self.playing: + cursor_left = cursor_pos - cursor.width // 2 + cursor.draw(context, cursor_left, 0, cursor.width, cursor.height) class ProgressTimeline(widgetset.Background): def __init__(self): @@ -399,13 +408,12 @@ self.time = ProgressTime() self.slider.connect('pressed', self.on_slider_pressed) self.slider.connect('moved', self.on_slider_moved) + self.slider.connect('changed', self.on_slider_moved) self.slider.connect('released', self.on_slider_released) self.remaining_time = ProgressTimeRemaining() self.remaining_time.connect('clicked', self.on_remaining_clicked) - self.active = widgetutil.ThreeImageSurface('progress_timeline') - self.inactive = widgetutil.ThreeImageSurface( - 'progress_timeline_inactive') + self.surface = widgetutil.ThreeImageSurface('progress_timeline') vbox = widgetset.VBox() vbox.pack_start(widgetutil.align_middle(self.info, top_pad=6)) @@ -428,30 +436,11 @@ def on_slider_released(self, slider): app.playback_manager.resume() - def set_duration(self, duration): - self.slider.set_range(0, duration) - self.slider.set_increments(5, min(20, duration / 20.0)) - self.remaining_time.set_duration(duration) - - def set_current_time(self, current_time): - self.slider.set_value(current_time) - self.time.set_current_time(current_time) - self.remaining_time.set_current_time(current_time) - def size_request(self, layout): return -1, 46 def draw(self, context, layout): - if self.get_window().is_active(): - surface = self.active - else: - surface = self.inactive - # XXX bz:17136 override the color: Windows things we are inactive - # embedded web browser is displayed. - surface = self.active - - surface.draw(context, 0, 0, context.width, 46) - + self.surface.draw(context, 0, 0, context.width, 46) class VolumeSlider(widgetset.CustomSlider): def __init__(self): @@ -538,28 +527,7 @@ self.selected_file = None def on_title_clicked(self, button): - if not self.selected_tab_list or not self.selected_file: - return - if app.playback_manager.is_playing and not ( - app.playback_manager.is_playing_audio or - app.playback_manager.detached_window): - # playing a video in the app, so don't bother - return - try: - tab_iter = self.selected_tab_list.iter_map[self.selected_tabs[0].id] - except KeyError: - #17495 - item may be from a tab that no longer exists - self.selected_tab_list = self.selected_tabs = None - return - app.tabs._select_from_tab_list(self.selected_tab_list.type, tab_iter) - display = app.display_manager.current_display - if hasattr(display, 'controller'): - controller = display.controller - controller.scroll_to_item(self.selected_file, manual=True, recenter=True) - else: - #17488 - GuideDisplay doesn't have a controller - logging.debug("current display doesn't have a controller - " - "can't switch to") + app.playback_manager.goto_currently_playing() def handle_new_selection(self, has_playable): self.controls.handle_new_selection(has_playable) diff -Nru miro-4.0.4/lib/frontends/widgets/widgetconst.py miro-6.0/lib/frontends/widgets/widgetconst.py --- miro-4.0.4/lib/frontends/widgets/widgetconst.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/widgetconst.py 2013-04-05 16:02:42.000000000 +0000 @@ -58,6 +58,7 @@ u'name': _('Name'), u'artist': _('Artist'), u'album': _('Album'), + u'multi-row-album': _('Album/Artist'), u'track': _('Track'), u'year': _('Year'), u'genre': _('Genre'), diff -Nru miro-4.0.4/lib/frontends/widgets/widgetsapi.py miro-6.0/lib/frontends/widgets/widgetsapi.py --- miro-4.0.4/lib/frontends/widgets/widgetsapi.py 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/widgetsapi.py 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,56 @@ +# Miro - an RSS based video player application +# Copyright (C) 2010, 2011 +# Participatory Culture Foundation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +# +# In addition, as a special exception, the copyright holders give +# permission to link the code of portions of this program with the OpenSSL +# library. +# +# You must obey the GNU General Public License in all respects for all of +# the code used other than OpenSSL. If you modify file(s) with this +# exception, you may extend this exception to your version of the file(s), +# but you are not obligated to do so. If you do not wish to do so, delete +# this exception statement from your version. If you delete this exception +# statement from all source files in the program, then also delete it here. + +"""miro.frontends.widgets.widgetsapi -- Widgets API for extensions. + +This module implements the extension API for doing tasks specific to the +widgets frontend +""" + +__all__ = [ + "APIVERSION", + "ExtensionItemFilter", + ] + +from miro.frontends.widgets import itemfilter + +# increase this by 1 every time the API changes +APIVERSION = 0 + +class ExtensionItemFilter(itemfilter.ItemFilter): + """Base class for item filters coming from extensions. + + Item filters are used to filter out items from item lists. They are + displayed as buttons on top of the list. + + Right now this class has the exact same interface as + itemfilter.ItemFilter. ItemFilter may change at any point. We'll try to + keep this one the same. + """ + pass diff -Nru miro-4.0.4/lib/frontends/widgets/widgetstatestore.py miro-6.0/lib/frontends/widgets/widgetstatestore.py --- miro-4.0.4/lib/frontends/widgets/widgetstatestore.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/widgetstatestore.py 2013-04-05 16:02:42.000000000 +0000 @@ -41,6 +41,7 @@ # supposed to be. The CUSTOM_VIEW has to be caught in widgetstatestore # and replaced with the proper view for the given situation. CUSTOM_VIEW = 2 + ALBUM_VIEW = 3 LIST_VIEW = 1 STANDARD_VIEW = 0 DEFAULT_VIEW_TYPE = { @@ -49,6 +50,7 @@ u'device-video': STANDARD_VIEW, u'downloading': STANDARD_VIEW, u'feed': CUSTOM_VIEW, + u'feed-folder': CUSTOM_VIEW, u'folder-contents': STANDARD_VIEW, u'music': LIST_VIEW, u'others': LIST_VIEW, @@ -57,17 +59,7 @@ u'sharing': LIST_VIEW, u'videos': STANDARD_VIEW, } - FILTER_VIEW_ALL = 0 - FILTER_UNWATCHED = 1 - FILTER_NONFEED = 2 - FILTER_DOWNLOADED = 4 - FILTER_VIEW_VIDEO = 8 - FILTER_VIEW_AUDIO = 16 - FILTER_VIEW_MOVIES = 32 - FILTER_VIEW_SHOWS = 64 - FILTER_VIEW_CLIPS = 128 - FILTER_VIEW_PODCASTS = 256 - DEFAULT_DISPLAY_FILTERS = FILTER_VIEW_ALL + DEFAULT_DISPLAY_FILTERS = ['all'] DEFAULT_COLUMN_WIDTHS = { u'album': 100, u'artist': 110, @@ -78,6 +70,7 @@ u'feed-name': 70, u'file-type': 70, u'genre': 65, + u'multi-row-album': 200, u'kind': 70, u'length': 60, u'name': 200, @@ -97,6 +90,7 @@ u'tab': u'feed-name', # all-feeds u'downloading': u'name', u'feed': u'-date', + u'feed-folder': u'-date', u'folder-contents': u'artist', u'music': u'artist', u'others': u'name', @@ -107,6 +101,8 @@ DEFAULT_SORT_COLUMN[u'device-audio'] = DEFAULT_SORT_COLUMN[u'music'] DEFAULT_SORT_COLUMN[u'device-video'] = DEFAULT_SORT_COLUMN[u'videos'] DEFAULT_SORT_COLUMN[u'sharing'] = DEFAULT_SORT_COLUMN[u'videos'] + # DEFAULT_COLUMNS stores the default columns when using list view for + # different display types. We tweak it in _calc_default_columns() DEFAULT_COLUMNS = { u'videos': [u'state', u'name', u'length', u'date-added', u'feed-name', @@ -124,6 +120,8 @@ u'size', u'date', u'status'], u'feed': [u'state', u'name', u'length', u'size', u'date', u'status'], + u'feed-folder': + [u'state', u'name', u'length', u'size', u'date', u'status'], u'search': [u'state', u'name', u'description', u'status', u'file-type', u'feed-name', u'date'], @@ -149,7 +147,7 @@ u'file-type']) AVAILABLE_COLUMNS['search'] |= set([u'size', u'rating']) AVAILABLE_COLUMNS['videos'] |= set([u'description', u'date', u'rating', - u'file-type', u'show', u'kind', u'status']) + u'file-type', u'show', u'kind', u'status', u'genre']) AVAILABLE_COLUMNS[u'device-audio'] = AVAILABLE_COLUMNS[u'music'].copy() AVAILABLE_COLUMNS[u'device-video'] = AVAILABLE_COLUMNS[u'videos'].copy() AVAILABLE_COLUMNS[u'feed'] = ((AVAILABLE_COLUMNS['music'] | @@ -165,7 +163,7 @@ # This needs to be handled here, though analagous properties (e.g. # NO_RESIZE_COLUMNS) are handled in widgetconst. #16783 refers to fixing the # WSS mess - these shouldn't be sets of strings anyway. - MANDATORY_SORTERS = frozenset([u'name']) + MANDATORY_SORTERS = frozenset([u'name', u'multi-row-album']) def __init__(self): self.displays = {} @@ -223,7 +221,7 @@ else: view = WidgetStateStore.DEFAULT_VIEW_TYPE[display_type] if view == WidgetStateStore.CUSTOM_VIEW: - if display_type == u'feed': + if display_type in (u'feed', u'feed-folder'): view = app.config.get(prefs.PODCASTS_DEFAULT_VIEW) else: app.widgetapp.handle_soft_failure("Getting default view", @@ -239,16 +237,22 @@ self._save_display_state(display_type, display_id) def get_filters(self, display_type, display_id): + """Get the active filters + + :returns: set of active filter strings + """ display = self._get_display(display_type, display_id) if display.active_filters is None: - return WidgetStateStore.DEFAULT_DISPLAY_FILTERS + return set(WidgetStateStore.DEFAULT_DISPLAY_FILTERS) return display.active_filters - def toggle_filters(self, display_type, display_id, filter_): - filters = self.get_filters(display_type, display_id) - filters = WidgetStateStore.toggle_filter(filters, filter_) + def set_filters(self, display_type, display_id, active_filters): + """Set the active filters + + active_filters should be a set of filter key strings. + """ display = self._get_display(display_type, display_id) - display.active_filters = filters + display.active_filters = active_filters self._save_display_state(display_type, display_id) def set_shuffle(self, display_type, display_id, shuffle): @@ -311,58 +315,81 @@ display.last_played_item_id = id_ self._save_display_state(display_type, display_id) - def get_sorts_enabled(self, display_type, display_id): - display = self._get_display(display_type, display_id) - columns = display.list_view_columns - if columns is None: - columns = WidgetStateStore.DEFAULT_COLUMNS[display_type] - available = WidgetStateStore.AVAILABLE_COLUMNS[display_type] - # If a column used to be disableable for a display but now is not, - # re-add it at the end of the list: - mandatory = available & WidgetStateStore.MANDATORY_SORTERS - columns.extend(mandatory.difference(columns)) - # If a column used to be enableable for a display but now is not, +# ViewState properties for list and album view + + def get_columns_enabled(self, display_type, display_id, view_type): + view = self._get_view(display_type, display_id, view_type) + # Copy the column list. We may modify it in _add_manditory_columns() + # and that shouldn't change the source value. + if view.columns_enabled is not None: + columns = list(view.columns_enabled) + else: + columns = self._calc_default_columns(display_type, view_type) + available = WidgetStateStore.get_columns_available(display_type, + display_id, view_type) + self._add_manditory_columns(view_type, columns) + # If a column used to be enableable for a view but now is not, # filter it out: return [x for x in columns if x in available] - def set_sorts_enabled(self, display_type, display_id, enabled): - display = self._get_display(display_type, display_id) - display.list_view_columns = enabled - self._save_display_state(display_type, display_id) + def _calc_default_columns(self, display_type, view_type): + columns = list(WidgetStateStore.DEFAULT_COLUMNS[display_type]) + if view_type == self.get_album_view_type(): + # Remove columns that contain info in the album/artist column. + filter_out = (u'artist', u'album', u'track', u'feed-name') + columns = [n for n in columns if n not in filter_out] + return columns + + def _add_manditory_columns(self, view_type, columns): + """Add manditory columns to the list of columns enabled.""" + # We currently handle name and multi-row-album. Add an assertion so + # that if MANDATORY_SORTERS changes without this code changing, we'll + # see a crash report. + + assert (WidgetStateStore.MANDATORY_SORTERS == + set((u'name', u'multi-row-album'))) + if u'name' not in columns: + columns.append(u'name') + if (view_type == self.get_album_view_type() and + u'multi-row-album' not in columns): + columns.insert(0, u'multi-row-album') - def toggle_sort(self, display_type, display_id, column): - columns = self.get_sorts_enabled(display_type, display_id) + def set_columns_enabled(self, display_type, display_id, view_type, + enabled): + view = self._get_view(display_type, display_id, view_type) + view.columns_enabled = enabled + self._save_view_state(display_type, display_id, view_type) + + def toggle_column_enabled(self, display_type, display_id, view_type, + column): + columns = self.get_columns_enabled(display_type, display_id, view_type) if column in columns: columns.remove(column) else: columns.append(column) - self.set_sorts_enabled(display_type, display_id, columns) - -# ViewState properties that are only valid for specific view_types: + self.set_columns_enabled(display_type, display_id, view_type, columns) def get_column_widths(self, display_type, display_id, view_type): - if WidgetStateStore.is_list_view(view_type): - display = self._get_display(display_type, display_id) - column_widths = display.list_view_widths or {} - columns = self.get_sorts_enabled(display_type, display_id) - for name in columns: - default = WidgetStateStore.DEFAULT_COLUMN_WIDTHS[name] - column_widths.setdefault(name, default) - return column_widths.copy() - else: - raise ValueError() + # fetch dict containing the widths from the DisplayInfo + view_info = self._get_view(display_type, display_id, view_type) + column_widths = view_info.column_widths + if column_widths is None: + column_widths = {} + # get widths for each enabled column + columns = self.get_columns_enabled(display_type, display_id, + view_type) + for name in columns: + default = WidgetStateStore.DEFAULT_COLUMN_WIDTHS[name] + column_widths.setdefault(name, default) + return column_widths.copy() def update_column_widths(self, display_type, display_id, view_type, widths): - if WidgetStateStore.is_list_view(view_type): - display = self._get_display(display_type, display_id) - if display.list_view_widths is None: - display.list_view_widths = self.get_column_widths( - display_type, display_id, view_type) - display.list_view_widths.update(widths) - self._save_display_state(display_type, display_id) - else: - raise ValueError() + view_info = self._get_view(display_type, display_id, view_type) + if view_info.column_widths is None: + view_info.column_widths = {} + view_info.column_widths.update(widths) + self._save_view_state(display_type, display_id, view_type) # ViewState properties that are global to the whole frontend def get_item_details_expanded(self, view_type): @@ -405,8 +432,12 @@ # static properties of a display_type: @staticmethod - def get_columns_available(display_type): - return WidgetStateStore.AVAILABLE_COLUMNS[display_type] + def get_columns_available(display_type, display_id, view_type): + available = WidgetStateStore.AVAILABLE_COLUMNS[display_type].copy() + # copy the set, since we may modify it before returning it + if view_type == WidgetStateStore.get_album_view_type(): + available.add(u'multi-row-album') + return available # static properties of a view_type: @@ -418,79 +449,9 @@ def is_standard_view(view_type): return view_type == WidgetStateStore.STANDARD_VIEW -# manipulate a filter set: - - @staticmethod - def toggle_filter(filters, filter_): - if filter_ == WidgetStateStore.FILTER_VIEW_ALL: - return filter_ - elif filter_ in (WidgetStateStore.FILTER_VIEW_VIDEO, - WidgetStateStore.FILTER_VIEW_AUDIO): - exclude = ~(WidgetStateStore.FILTER_VIEW_VIDEO | - WidgetStateStore.FILTER_VIEW_AUDIO) - if not ((filters & WidgetStateStore.FILTER_DOWNLOADED) or - (filters & WidgetStateStore.FILTER_UNWATCHED)): - # only downloaded items have a file type - filters = filters | WidgetStateStore.FILTER_DOWNLOADED - return (filters & exclude) | filter_ - elif filter_ in (WidgetStateStore.FILTER_UNWATCHED, - WidgetStateStore.FILTER_DOWNLOADED): - exclude = ~(WidgetStateStore.FILTER_UNWATCHED | - WidgetStateStore.FILTER_DOWNLOADED) - return (filters & exclude) | filter_ - elif filter_ in (WidgetStateStore.FILTER_VIEW_MOVIES, - WidgetStateStore.FILTER_VIEW_SHOWS, - WidgetStateStore.FILTER_VIEW_CLIPS, - WidgetStateStore.FILTER_VIEW_PODCASTS): - exclude = ~(WidgetStateStore.FILTER_VIEW_MOVIES | - WidgetStateStore.FILTER_VIEW_SHOWS | - WidgetStateStore.FILTER_VIEW_CLIPS | - WidgetStateStore.FILTER_VIEW_PODCASTS) - return (filters & exclude) | filter_ - else: - return filters | filter_ - -# static properties of a filter combination: - - @staticmethod - def is_view_all_filter(filters): - return filters == WidgetStateStore.FILTER_VIEW_ALL - - @staticmethod - def is_view_video_filter(filters): - return bool(filters & WidgetStateStore.FILTER_VIEW_VIDEO) - - @staticmethod - def is_view_audio_filter(filters): - return bool(filters & WidgetStateStore.FILTER_VIEW_AUDIO) - @staticmethod - def is_view_movies_filter(filters): - return bool(filters & WidgetStateStore.FILTER_VIEW_MOVIES) - - @staticmethod - def is_view_shows_filter(filters): - return bool(filters & WidgetStateStore.FILTER_VIEW_SHOWS) - - @staticmethod - def is_view_clips_filter(filters): - return bool(filters & WidgetStateStore.FILTER_VIEW_CLIPS) - - @staticmethod - def is_view_podcasts_filter(filters): - return bool(filters & WidgetStateStore.FILTER_VIEW_PODCASTS) - - @staticmethod - def has_unwatched_filter(filters): - return bool(filters & WidgetStateStore.FILTER_UNWATCHED) - - @staticmethod - def has_non_feed_filter(filters): - return bool(filters & WidgetStateStore.FILTER_NONFEED) - - @staticmethod - def has_downloaded_filter(filters): - return bool(filters & WidgetStateStore.FILTER_DOWNLOADED) + def is_album_view(view_type): + return view_type == WidgetStateStore.ALBUM_VIEW # static properties: @@ -530,44 +491,12 @@ def get_standard_view_type(): return WidgetStateStore.STANDARD_VIEW - # filters: - - @staticmethod - def get_view_all_filter(): - return WidgetStateStore.FILTER_VIEW_ALL - - @staticmethod - def get_view_video_filter(): - return WidgetStateStore.FILTER_VIEW_VIDEO - - @staticmethod - def get_view_audio_filter(): - return WidgetStateStore.FILTER_VIEW_AUDIO - - @staticmethod - def get_view_movies_filter(): - return WidgetStateStore.FILTER_VIEW_MOVIES - - @staticmethod - def get_view_shows_filter(): - return WidgetStateStore.FILTER_VIEW_SHOWS - - @staticmethod - def get_view_clips_filter(): - return WidgetStateStore.FILTER_VIEW_CLIPS - - @staticmethod - def get_view_podcasts_filter(): - return WidgetStateStore.FILTER_VIEW_PODCASTS - - @staticmethod - def get_unwatched_filter(): - return WidgetStateStore.FILTER_UNWATCHED - @staticmethod - def get_non_feed_filter(): - return WidgetStateStore.FILTER_NONFEED + def get_album_view_type(): + return WidgetStateStore.ALBUM_VIEW @staticmethod - def get_downloaded_filter(): - return WidgetStateStore.FILTER_DOWNLOADED + def get_all_view_types(): + return (WidgetStateStore.LIST_VIEW, + WidgetStateStore.STANDARD_VIEW, + WidgetStateStore.ALBUM_VIEW) diff -Nru miro-4.0.4/lib/frontends/widgets/widgetutil.py miro-6.0/lib/frontends/widgets/widgetutil.py --- miro-4.0.4/lib/frontends/widgets/widgetutil.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/frontends/widgets/widgetutil.py 2013-04-05 16:02:42.000000000 +0000 @@ -434,6 +434,7 @@ self.set_can_focus(False) self.set_cursor(widgetconst.CURSOR_POINTING_HAND) self.title = title + self.text_size = self.TEXT_SIZE self.surface = ThreeImageSurface(basename) self.surface_active = ThreeImageSurface(basename + '_active') self.surface_inactive = ThreeImageSurface(basename + '_inactive') @@ -442,12 +443,15 @@ self.title = text self.invalidate_size_request() + def set_text_size(self, size): + self.text_size = size + def _get_textbox(self, layout): if self.get_disabled(): layout.set_text_color(self.DISABLED_TEXT_COLOR) else: layout.set_text_color(self.TEXT_COLOR) - layout.set_font(self.TEXT_SIZE) + layout.set_font(self.text_size) return layout.textbox(self.title) def size_request(self, layout): @@ -462,7 +466,8 @@ else: surface = self.surface - surface.draw(context, 0, 0, context.width) + surface.draw(context, 0, int((context.height - surface.height) / 2), + context.width) textbox = self._get_textbox(layout) width, height = textbox.get_size() textbox.draw(context, int((context.width - width) / 2), diff -Nru miro-4.0.4/lib/gtcache.py miro-6.0/lib/gtcache.py --- miro-4.0.4/lib/gtcache.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/gtcache.py 2013-04-05 16:02:42.000000000 +0000 @@ -35,7 +35,6 @@ import locale from miro import app from miro import prefs -import miro.plat.utils _gtcache = None _translation = None @@ -83,7 +82,11 @@ global _translation global codeset - if not miro.plat.utils.locale_initialized(): + # need to import here rather than at the module level to avoid a circular + # import -- plat.utls might import gtcache. + from miro.plat.utils import locale_initialized + + if not locale_initialized(): raise Exception, "locale not initialized" if languages is None: @@ -101,6 +104,12 @@ except locale.Error: print "gtcache.init: setlocale failed. setting locale to 'C'" locale.setlocale(locale.LC_ALL, 'C') + try: + codeset = locale.getlocale()[1] + except (TypeError, ValueError): + print "gtcache.init: getlocale failed. Reinit with 'C'" + locale.setlocale(locale.LC_ALL, 'C') + codeset = locale.getlocale()[1] # bz:17713 - convert to str in utf-8 encoding before trying to use. if languages: @@ -113,8 +122,6 @@ codeset="UTF-8", fallback=True) - codeset = locale.getlocale()[1] - _gtcache = {} def declarify(text): @@ -134,6 +141,42 @@ return text.split("|")[-1] return text +class gettext_lazy(object): + def __init__(self, text, values=None): + self.text = text + self.values = values + + def __unicode__(self): + return gettext(self.text, self.values) + + def __str__(self): + return str(unicode(self)) + + def __repr__(self): + return repr(unicode(self)) + + def __nonzero__(self): + return bool(self.text) + + def __len__(self): + return len(unicode(self)) + + def __iter__(self): + return iter(unicode(self)) + + def __eq__(self, other): + return unicode(self) == other + + def __ne__(self, other): + return unicode(self) != other + + def __hash__(self): + return hash(unicode(self)) + + def __getattr__(self, attr): + return getattr(unicode(self), attr) + + def gettext(text, values=None): """Returns the translated form of the given text. If values are provided, expands the string with the given values. @@ -176,10 +219,10 @@ s = s % values return s - except (KeyError, ValueError): + except (KeyError, ValueError), e: import logging - logging.warn("gtcache.gettext: translation has bad formatting " - "characters. returning english form. '%s'", text) + logging.warn("gtcache.gettext: translation problem '%s'. " + "returning english form. '%s'", e, text) _gtcache[text] = text return text % values diff -Nru miro-4.0.4/lib/httpauthtools.py miro-6.0/lib/httpauthtools.py --- miro-4.0.4/lib/httpauthtools.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/httpauthtools.py 2013-04-05 16:02:42.000000000 +0000 @@ -34,7 +34,10 @@ """ import base64 -import json +try: + import simplejson as json +except ImportError: + import json import logging import os import re diff -Nru miro-4.0.4/lib/httpclient.py miro-6.0/lib/httpclient.py --- miro-4.0.4/lib/httpclient.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/httpclient.py 2013-04-05 16:02:42.000000000 +0000 @@ -164,6 +164,11 @@ NetworkError.__init__(self, _('Invalid URL'), _('"%(url)s" is not a valid URL', {"url": url})) +class InvalidRedirect(NetworkError): + def __init__(self, url): + NetworkError.__init__(self, _('Invalid Redirect'), + _('"%(url)s" is not a valid redirect URL', {"url": url})) + class UnknownHostError(NetworkError): """A file: URL doesn't exist""" def __init__(self, host): @@ -203,10 +208,12 @@ """ def __init__(self, url, etag=None, modified=None, resume=False, - post_vars=None, post_files=None, write_file=None): + post_vars=None, post_files=None, write_file=None, + extra_headers=None): self.url = url self.etag = etag self.modified = modified + self.extra_headers = extra_headers self.resume = resume self.post_vars = post_vars self.post_files = post_files @@ -245,6 +252,8 @@ out_headers['etag'] = self.etag if self.modified is not None: out_headers['If-Modified-Since'] = self.modified + if self.extra_headers is not None: + out_headers.update(self.extra_headers) handle = self._init_handle() self._setup_post(handle, out_headers) @@ -466,15 +475,14 @@ elif self.options.write_file is not None: if not self.saw_head_success: # try a HEAD request first to see if the request will work. - # This avoids writing error responses to our file. It also - # avoids the issue of RESUME_FROM being applied to the error - # response. + # It avoids the issue of RESUME_FROM being applied to the + # error response. self.handle.setopt(pycurl.NOBODY, 1) self.trying_head_request = True else: self.handle.setopt(pycurl.URL, self.last_url) self._open_file() - self.handle.setopt(pycurl.WRITEDATA, self._filehandle) + self.handle.setopt(pycurl.WRITEFUNCTION, self._write_file) elif self.content_check_callback is not None: self.handle.setopt(pycurl.WRITEFUNCTION, self._call_content_check) else: @@ -485,6 +493,10 @@ self.handle.setopt(pycurl.VERBOSE, 1) self.handle.setopt(pycurl.DEBUGFUNCTION, self.debug_func) + def _write_file(self, buf): + if self.check_response_code(self.status_code): + self._filehandle.write(buf) + def _lookup_auth(self): """Lookup existing HTTP passwords to use. @@ -552,7 +564,7 @@ return False - def debug_func(self, type, msg): + def debug_func(self, typ, msg): type_map = { pycurl.INFOTYPE_HEADER_IN: 'header-in', pycurl.INFOTYPE_HEADER_OUT: 'header-out', @@ -560,7 +572,7 @@ pycurl.INFOTYPE_DATA_OUT: 'data-out', pycurl.INFOTYPE_TEXT: 'text', } - type_str = type_map.get(type, type) + type_str = type_map.get(typ, typ) logging.warn("libcurl debug (%s) %r", type_str, msg) @@ -589,6 +601,15 @@ self.status_code = None elif 'location' in self.headers: # doing a redirect, clear out the headers + redirect_url = self.headers['location'] + scheme, _, _, _ = download_utils.parse_url(redirect_url) + if not scheme: + logging.warn("%s: Non-absolute redirect URL: %s", + self.options.url, redirect_url) + elif scheme not in ('http', 'https'): + self.cancel(remove_file=True) + self.call_errback(InvalidRedirect(redirect_url)) + return self.headers = {} elif not self.headers_finished: curl_manager.call_after_perform(self.on_headers_finished) @@ -677,16 +698,28 @@ self.saw_head_success = True elif info['status'] == 401: self.handle_http_auth() - elif info['status'] in (405, 501): - if self.trying_head_request: - # server didn't like the HEAD request, so just try the GET - self._send_new_request() - self.saw_head_success = True - else: - self.call_errback(UnexpectedStatusCode(info['status'])) elif info['status'] == 407: self.handle_proxy_auth() - elif info['status'] >= 500 and info['status'] < 600: + elif self.trying_head_request: + # The response code wasn't what we expected, but we are doing + # a HEAD request. + # + # Servers have "inventive" strategies for dealing with HEAD + # requests. I don't really feel like handling them one by + # one since you never know what comes through, so just assume + # things are okay and let the actual download handler deal with + # failure. + # + # We handle this before all of the error handling to make sure + # we have a chance to catch this. + self._send_new_request() + self.saw_head_success = True + elif ((info['status'] >= 500 and info['status'] < 600) or + (info['status'] == 404 and + self.last_url.startswith(('http://vimeo.com', + 'http://www.vimeo.com')))): + # 500 errors are hopefully temporary, as are 404s from Vimeo + # (#19066) logging.info("httpclient: possibly temporary http error: HTTP %s", info['status']) self.call_errback(PossiblyTemporaryError(info['status'])) @@ -774,13 +807,13 @@ def call_callback(self, info): self._cleanup_filehandle() - eventloop.add_idle(self.callback, 'curl transfer callback', - args=(info,)) + msg = 'curl transfer callback: %s' % (self.callback,) + eventloop.add_idle(self.callback, msg, args=(info,)) def call_errback(self, error): self._cleanup_filehandle() - eventloop.add_idle(self.errback, 'curl transfer errback', - args=(error,)) + msg = 'curl transfer errback: %s' % (self.errback,) + eventloop.add_idle(self.errback, msg, args=(error,)) def _cleanup_filehandle(self): if self._filehandle is not None: @@ -947,12 +980,12 @@ try: self.pop_transfer(handle).on_finished() except StandardError: - logging.stacktrace("Error calling on_finished()") + logging.warning("Error calling on_finished()", exc_info=True) for handle, code, message in errors: try: self.pop_transfer(handle).on_error(code, handle) except StandardError: - logging.stacktrace("Error calling on_error()") + logging.warning("Error calling on_error()", exc_info=True) def pop_transfer(self, handle): transfer = self.transfer_map.pop(handle) @@ -991,7 +1024,7 @@ def grab_url(url, callback, errback, header_callback=None, content_check_callback=None, write_file=None, etag=None, modified=None, default_mime_type=None, resume=False, post_vars=None, - post_files=None): + post_files=None, extra_headers=None): """Quick way to download a network resource grab_url is a simple interface to the HTTPClient class. @@ -1012,6 +1045,7 @@ :param post_vars: dictionary of variables to send as POST data :param post_files: files to send as POST data (see xhtmltools.multipart_encode for the format) + :param extra_headers: an option dictionary of extra headers to send The callback will be passed a dictionary that contains all the HTTP headers, as well as the following keys: @@ -1035,7 +1069,7 @@ return _grab_file_url(url, callback, errback, default_mime_type) else: options = TransferOptions(url, etag, modified, resume, post_vars, - post_files, write_file) + post_files, write_file, extra_headers) transfer = CurlTransfer(options, callback, errback, header_callback, content_check_callback) transfer.start() diff -Nru miro-4.0.4/lib/iconcache.py miro-6.0/lib/iconcache.py --- miro-4.0.4/lib/iconcache.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/iconcache.py 2013-04-05 16:02:42.000000000 +0000 @@ -29,9 +29,9 @@ import os import logging +import collections from miro import httpclient -from datastructures import Fifo from miro import eventloop from miro.database import DDBObject, ObjectNotFoundError from miro.download_utils import next_free_filename, get_file_url_path @@ -45,10 +45,15 @@ class IconCacheUpdater: def __init__(self): - self.idle = Fifo() - self.vital = Fifo() + self.idle = collections.deque() + self.vital = collections.deque() self.running_count = 0 self.in_shutdown = False + self.started = False + + def start_updates(self): + self.started = True + self.run_next_update() def request_update(self, item, is_vital=False): if is_vital: @@ -56,24 +61,27 @@ if (item.filename and fileutil.access(item.filename, os.R_OK) and item.url == item.dbItem.get_thumbnail_url()): is_vital = False - if self.running_count < RUNNING_MAX: + if self.started and self.running_count < RUNNING_MAX: eventloop.add_idle(item.request_icon, "Icon Request") self.running_count += 1 else: if is_vital: - self.vital.enqueue(item) + self.vital.append(item) else: - self.idle.enqueue(item) + self.idle.append(item) def update_finished(self): if self.in_shutdown: self.running_count -= 1 return + self.run_next_update() + + def run_next_update(self): if len(self.vital) > 0: - item = self.vital.dequeue() + item = self.vital.popleft() elif len(self.idle) > 0: - item = self.idle.dequeue() + item = self.idle.popleft() else: self.running_count -= 1 return @@ -82,16 +90,12 @@ @eventloop.as_idle def clear_vital(self): - self.vital = Fifo() + self.vital = collections.deque() @eventloop.as_idle def shutdown(self): self.in_shutdown = True -# FIXME - should create an IconCacheUpdater at startup, NOT at -# module import time. -icon_cache_updater = IconCacheUpdater() - class IconCache(DDBObject): def setup_new(self, dbItem): self.etag = None @@ -152,7 +156,7 @@ self.dbItem.confirm_db_thread() if self.removed: - icon_cache_updater.update_finished() + app.icon_cache_updater.update_finished() return # Don't clear the cache on an error. @@ -165,13 +169,13 @@ if self.needsUpdate: self.needsUpdate = False self.request_update(True) - icon_cache_updater.update_finished() + app.icon_cache_updater.update_finished() def update_icon_cache(self, url, info): self.dbItem.confirm_db_thread() if self.removed: - icon_cache_updater.update_finished() + app.icon_cache_updater.update_finished() return needs_save = False @@ -205,25 +209,32 @@ tmp_filename = self.filename + ".part" else: tmp_filename = os.path.join(cachedir, info["filename"]) + ".part" - tmp_filename, output = next_free_filename(tmp_filename) - output.write(info["body"]) output.close() except IOError: self.remove_file(tmp_filename) return - - if self.filename: - filename = self.filename - self.filename = None - self.remove_file(filename) + except ValueError: + logging.warn('update_icon_cache: next_free_filename failed ' + '#1, candidate = %r', tmp_filename) + return filename = unicode(info["filename"]) filename = unicode_to_filename(filename, cachedir) filename = os.path.join(cachedir, filename) - filename, fp = next_free_filename(filename) needs_save = True + try: + filename, fp = next_free_filename(filename) + except ValueError: + logging.warn('update_icon_cache: next_free_filename failed ' + '#2, candidate = %r', filename) + return + + if self.filename: + filename = self.filename + self.filename = None + self.remove_file(filename) # we need to move the file here--so we close the file # pointer and then move the file. @@ -256,17 +267,17 @@ if self.needsUpdate: self.needsUpdate = False self.request_update(True) - icon_cache_updater.update_finished() + app.icon_cache_updater.update_finished() def request_icon(self): if self.removed: - icon_cache_updater.update_finished() + app.icon_cache_updater.update_finished() return self.dbItem.confirm_db_thread() if self.updating: self.needsUpdate = True - icon_cache_updater.update_finished() + app.icon_cache_updater.update_finished() return if hasattr(self.dbItem, "get_thumbnail_url"): @@ -277,7 +288,7 @@ # Only verify each icon once per run unless the url changes if (url == self.url and self.filename and fileutil.access(self.filename, os.R_OK)): - icon_cache_updater.update_finished() + app.icon_cache_updater.update_finished() return self.updating = True @@ -296,7 +307,7 @@ if self.removed: return - icon_cache_updater.request_update(self, is_vital=is_vital) + app.icon_cache_updater.request_update(self, is_vital=is_vital) def setup_restored(self): self.removed = False @@ -325,7 +336,8 @@ obj, obj.icon_cache_id) else: icon_cache.dbItem = obj - icon_cache.request_update() + if not icon_cache.is_valid(): + icon_cache.request_update() return icon_cache return IconCache(obj) diff -Nru miro-4.0.4/lib/importmedia.py miro-6.0/lib/importmedia.py --- miro-4.0.4/lib/importmedia.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/importmedia.py 2013-04-05 16:02:42.000000000 +0000 @@ -102,5 +102,5 @@ parser.parse(os.path.join(path, ITUNES_XML_FILE)) music_path = file_path_xlat(handler.music_path) return music_path - except (IOError, xml.sax.SAXParseException): + except (ValueError, IOError, xml.sax.SAXParseException): pass diff -Nru miro-4.0.4/lib/iteminfocache.py miro-6.0/lib/iteminfocache.py --- miro-4.0.4/lib/iteminfocache.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/iteminfocache.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,292 +0,0 @@ -# Miro - an RSS based video player application -# Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011 -# Participatory Culture Foundation -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA -# -# In addition, as a special exception, the copyright holders give -# permission to link the code of portions of this program with the OpenSSL -# library. -# -# You must obey the GNU General Public License in all respects for all of -# the code used other than OpenSSL. If you modify file(s) with this -# exception, you may extend this exception to your version of the file(s), -# but you are not obligated to do so. If you do not wish to do so, delete -# this exception statement from your version. If you delete this exception -# statement from all source files in the program, then also delete it here. - -"""``miro.iteminfocache`` -- Cache ItemInfo objects to speed up TrackItem -calls. - -TrackItem calls result in the largest DB queries and therefore should be -optimized well. This module handles remembering ItemInfo objects, both while -Miro is running and between runs. This results in 2 speedups: - - 1) We don't have to rebuild the ItemInfo objects, which takes a sizeable - amount of time. - - 2) We can avoid building Item objects by just fetching the ids of the - result set, then returning the ItemInfos for those ids. (Actually we also - need to build IconCache objects and Feed objects in order to create - ItemInfos). - -The general strategy is to just dumbly pickle the data and if we notice any -errors, or if the DB version changes, throw away the cache and rebuild. We -use a lot of direct SQL queries in this code, borrowing app.db's cursor. This -is slightly naughty, but results in fast peformance. -""" - -import cPickle -import itertools -import logging - -from miro import app -from miro import dbupgradeprogress -from miro import eventloop -from miro import itemsource -from miro import models -from miro import schema -from miro import signals - -class ItemInfoCache(signals.SignalEmitter): - """ItemInfoCache stores the latest ItemInfo objects for each item - - ItemInfo objects take a relatively long time to create, and they also - require that the Item object be loaded from the database, which also is - costly. This object allows us to shortcut both of those steps. The main - use of this is quickly handling the TrackItems message. - - ItemInfoCache also provides signals to track when ItemInfos get - added/change/removed from the system - - Signals: - added (obj, item_info) -- an item info object was created - changed (obj, item_info) -- an item info object was updated - removed (obj, item_info) -- an item info object was removed - """ - - # how often should we save cache data to the DB? (in seconds) - SAVE_INTERVAL = 30 - VERSION_KEY = 'item_info_cache_db_version' - - def __init__(self): - signals.SignalEmitter.__init__(self) - self.create_signal('added') - self.create_signal('changed') - self.create_signal('removed') - self.id_to_info = None - self.loaded = False - - def load(self): - # call _reset_changes() first. This way if we throw an exception - # inside this method, we're at least ready to shutdown cleanly - # (see #17729) - self._reset_changes() - did_failsafe_load = False - try: - self._quick_load() - except (StandardError, cPickle.UnpicklingError), e: - logging.warn("Error loading item info cache: %s", e) - if self.id_to_info is None: - self._failsafe_load() - # the current data is suspect, delete it - app.db.cursor.execute("DELETE FROM item_info_cache") - did_failsafe_load = True - app.db.set_variable(self.VERSION_KEY, self.version()) - self._save_dc = None - if did_failsafe_load: - # Need to save the cache data we just created - self._infos_added = self.id_to_info.copy() - self.schedule_save_to_db() - self.loaded = True - - def version(self): - return "%s-%s" % (schema.VERSION, - itemsource.DatabaseItemSource.VERSION) - - def _info_to_blob(self, info): - return buffer(cPickle.dumps(info)) - - def _blob_to_info(self, blob): - info = cPickle.loads(str(blob)) - # Download stats are no longer valid, reset them - info.leechers = None - info.seeders = None - info.up_rate = None - info.down_rate = None - if info.download_info is not None: - info.download_info.rate = 0 - info.download_info.eta = 0 - return info - - def _quick_load(self): - """Load ItemInfos using the item_info_cache table - - This is much faster than _failsafe_load(), but could result in errors. - """ - saved_db_version = app.db.get_variable(self.VERSION_KEY) - if saved_db_version == self.version(): - quick_load_values = {} - app.db.cursor.execute("SELECT id, pickle FROM item_info_cache") - for row in app.db.cursor: - quick_load_values[row[0]] = self._blob_to_info(row[1]) - # double check that we have the right number of rows - if len(quick_load_values) == self._db_item_count(): - self.id_to_info = quick_load_values - - def _db_item_count(self): - app.db.cursor.execute("SELECT COUNT(*) from item") - return app.db.cursor.fetchone()[0] - - def _failsafe_load(self): - """Load ItemInfos using Item objects. - - This is much slower than _quick_load(), but more robust. - """ - - self.id_to_info = {} - - count = itertools.count(1) - total_count = self._db_item_count() - for item in models.Item.make_view(): - info = itemsource.DatabaseItemSource._item_info_for(item) - self.id_to_info[info.id] = info - dbupgradeprogress.infocache_progress(count.next(), total_count) - - def schedule_save_to_db(self): - if self._save_dc is None: - self._save_dc = eventloop.add_timeout(self.SAVE_INTERVAL, - self.save, 'save item info cache') - - def _reset_changes(self): - self._infos_added = {} - self._infos_changed = {} - self._infos_deleted = set() - - def save(self): - app.db.cursor.execute("BEGIN TRANSACTION") - try: - self._run_inserts() - self._run_updates() - self._run_deletes() - except StandardError: - app.db.cursor.execute("ROLLBACK TRANSACTION") - raise - else: - app.db.cursor.execute("COMMIT TRANSACTION") - self._reset_changes() - - def _run_inserts(self): - if not self._infos_added: - return - sql = "INSERT INTO item_info_cache (id, pickle) VALUES (?, ?)" - values = ((id, self._info_to_blob(info)) for (id, - info) in self._infos_added.iteritems()) - app.db.cursor.executemany(sql, values) - - def _run_updates(self): - if not self._infos_changed: - return - sql = "UPDATE item_info_cache SET PICKLE=? WHERE id=?" - values = ((self._info_to_blob(info), id) for (id, info) in - self._infos_changed.iteritems()) - app.db.cursor.executemany(sql, values) - - def _run_deletes(self): - if not self._infos_deleted: - return - id_list = ', '.join(str(id_) for id_ in self._infos_deleted) - app.db.cursor.execute("DELETE FROM item_info_cache " - "WHERE id IN (%s)" % id_list) - - def all_infos(self): - """Return all ItemInfo objects that in the database. - - This method is optimized to avoid constructing Item objects. - """ - return self.id_to_info.values() - - def get_info(self, id_): - """Get the ItemInfo for a given item id""" - try: - return self.id_to_info[id_] - except KeyError: - app.controller.failed_soft("getting item info", - "KeyError: %d" % id_, with_exception=True) - item = models.Item.get_by_id(id_) - info = itemsource.DatabaseItemSource._item_info_for(item) - self.id_to_info[id_] = info - return info - - def item_created(self, item): - info = itemsource.DatabaseItemSource._item_info_for(item) - self.id_to_info[item.id] = info - if not self.loaded: - # bail out here if the item was created while we were doing a - # failsafe load - return - self._infos_added[item.id] = info - self.schedule_save_to_db() - self.emit("added", info) - - def item_changed(self, item): - if not self.loaded: - # signal_change() called in Item.setup_restored(), while we were - # doing a failsafe load - return - if item.id not in self.id_to_info: - # signal_change() called inside setup_new(), just ignor it - return - info = itemsource.DatabaseItemSource._item_info_for(item) - self.id_to_info[item.id] = info - if item.id in self._infos_added: - # no need to update if we insert the new values - self._infos_added[item.id] = info - else: - self._infos_changed[item.id] = info - self.schedule_save_to_db() - self.emit("changed", info) - - def item_removed(self, item): - if not self.loaded: - # Item.remove() called in Item.setup_restored() while we were - # doing a failsafe load - del self.id_to_info[item.id] - return - try: - info = self.id_to_info.pop(item.id) - except KeyError: - # We are upgrading from a version without an info cache, and an - # item was expired, but it didn't exist in the cache before. - logging.info('Item %s removed but no corresponding info exists', - item.id) - return - - if item.id in self._infos_added: - del self._infos_added[item.id] - # no need to delete if we don't add the row in the 1st place - elif item.id in self._infos_changed: - # no need to change, since we're going to delete it - del self._infos_changed[item.id] - self._infos_deleted.add(item.id) - else: - self._infos_deleted.add(item.id) - self.schedule_save_to_db() - self.emit("removed", info) - -def create_sql(): - """Get the SQL needed to create the tables we need for the ItemInfo cache - """ - return "CREATE TABLE item_info_cache(id INTEGER PRIMARY KEY, pickle BLOB)" diff -Nru miro-4.0.4/lib/item.py miro-6.0/lib/item.py --- miro-4.0.4/lib/item.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/item.py 2013-04-05 16:02:42.000000000 +0000 @@ -30,26 +30,31 @@ """``miro.item`` -- Holds ``Item`` class and related things. """ +import collections from datetime import datetime, timedelta import locale import os.path import traceback import logging import re +import shutil +import time +import urlparse from miro.gtcache import gettext as _ from miro.util import (check_u, returns_unicode, check_f, returns_filename, quote_unicode_url, stringify, get_first_video_enclosure, entity_replace) -from miro.plat.utils import filename_to_unicode, unicode_to_filename +from miro.plat.utils import (filename_to_unicode, unicode_to_filename, + utf8_to_filename) from miro.download_utils import (clean_filename, next_free_filename, next_free_directory) -from miro.database import (DDBObject, ObjectNotFoundError, - DatabaseConstraintError) from miro.databasehelper import make_simple_get_set from miro import app +from miro import database +from miro import httpclient from miro import iconcache from miro import databaselog from miro import downloader @@ -57,8 +62,8 @@ from miro import prefs from miro.plat import resources from miro import util -from miro import moviedata from miro import filetypes +from miro import messages from miro import searchengines from miro import fileutil from miro import signals @@ -68,15 +73,9 @@ _charset = locale.getpreferredencoding() -KNOWN_MIME_TYPES = (u'audio', u'video') -KNOWN_MIME_SUBTYPES = ( - u'mov', u'wmv', u'mp4', u'mp3', - u'mpg', u'mpeg', u'avi', u'x-flv', - u'x-msvideo', u'm4v', u'mkv', u'm2v', u'ogg' - ) -MIME_SUBSITUTIONS = { - u'QUICKTIME': u'MOV', -} +# We don't mdp_state as of version 5.0, but we need to set this for +# DeviceItems so that older versions can read the device DB +MDP_STATE_RAN = 1 def _check_for_image(path, element): """Given an element (which is really a dict), traverses @@ -119,12 +118,13 @@ 'enclosure_size': self._calc_enclosure_size(), 'enclosure_type': self._calc_enclosure_type(), 'enclosure_format': self._calc_enclosure_format(), - 'releaseDateObj': self._calc_release_date(), + 'release_date': self._calc_release_date(), } def update_item(self, item): for key, value in self.data.items(): setattr(item, key, value) + item.calc_title() def compare_to_item(self, item): for key, value in self.data.items(): @@ -208,16 +208,16 @@ try: link = link['href'] except KeyError: - return u"" + return None if link is None: - return u"" + return None if isinstance(link, unicode): return link try: return link.decode('ascii', 'replace') except UnicodeDecodeError: return link.decode('ascii', 'ignore') - return u"" + return None def _calc_payment_link(self): try: @@ -227,10 +227,10 @@ try: return self.entry.payment_url.decode('ascii','replace') except (AttributeError, UnicodeDecodeError): - return u"" + return None def _calc_comments_link(self): - return self.entry.get('comments', u"") + return self.entry.get('comments', None) def _calc_url(self): if (self.first_video_enclosure is not None and @@ -238,7 +238,7 @@ url = self.first_video_enclosure['url'].replace('+', '%20') return quote_unicode_url(url) else: - return u'' + return None def _calc_enclosure_size(self): enc = self.first_video_enclosure @@ -257,34 +257,17 @@ def _calc_enclosure_format(self): enclosure = self.first_video_enclosure - if enclosure: - try: - extension = enclosure['url'].split('.')[-1] - extension = extension.lower().encode('ascii', 'replace') - except (SystemExit, KeyboardInterrupt): - raise - except KeyError: - extension = u'' - # Hack for mp3s, "mpeg audio" isn't clear enough - if extension.lower() == u'mp3': - return u'.mp3' - if enclosure.get('type'): - enc = enclosure['type'].decode('ascii', 'replace') - if "/" in enc: - mtype, subtype = enc.split('/', 1) - mtype = mtype.lower() - if mtype in KNOWN_MIME_TYPES: - format = subtype.split(';')[0].upper() - if mtype == u'audio': - format += u' AUDIO' - if format.startswith(u'X-'): - format = format[2:] - return (u'.%s' % - MIME_SUBSITUTIONS.get(format, format).lower()) - - if extension in KNOWN_MIME_SUBTYPES: - return u'.%s' % extension - return None + if enclosure is None: + return None + if 'url' in enclosure: + filename = urlparse.urlparse(enclosure['url']).path + else: + filename = None + if enclosure.get('type'): + mime_type = enclosure['type'].decode('ascii', 'replace') + else: + mime_type = None + return filetypes.calc_file_format(filename, mime_type) def _calc_release_date(self): # FIXME - this is awful. need to handle site-specific things @@ -293,7 +276,8 @@ # if this is not a youtube url, then we try to use # updated_parsed from either the enclosure or the entry - if "youtube.com" not in self._calc_url(): + url = self._calc_url() + if url is not None and "youtube.com" not in url: try: release_date = self.first_video_enclosure.updated_parsed except AttributeError: @@ -333,21 +317,168 @@ 'entry_title': title, 'thumbnail_url': None, 'entry_description': description, - 'link': u'', - 'payment_link': u'', - 'comments_link': u'', + 'link': None, + 'payment_link': None, + 'comments_link': None, 'url': resources.url(filename), 'enclosure_size': None, 'enclosure_type': None, 'enclosure_format': self._calc_enclosure_format(), - 'releaseDateObj': datetime.min, + 'release_date': datetime.min, } -class CheckMediaError(StandardError): - """Error when trying to call read_metadata on an item.""" - pass +class _ItemsForPathCountTracker(object): + """Helps Item implement have_item_for_path + + This class tracks how many items we have for a given path. It's optimized + pretty agressively. We call it many times when importing new files. + """ + + def get_count(self, path): + try: + counts = self.count_for_paths + except AttributeError: + counts = self._init_counts_for_paths() + return counts[self._count_key(path)] + + def _init_counts_for_paths(self): + # Use a raw DB query for this one, since we want to be as fast as + # possible + counts = collections.defaultdict(int) + app.db.cursor.execute("SELECT filename, COUNT(*) " + "FROM item " + "WHERE filename IS NOT NULL " + "GROUP BY LOWER(filename)") + counts.update((filename.lower(), count) + for filename, count in app.db.cursor) + self.count_for_paths = counts + return counts + + def _count_key(self, path): + # normalize paths so that they match whats in the database, and work + # case-insensitively + return filename_to_unicode(path).lower() + + def reset(self): + try: + del self.count_for_paths + except AttributeError: + pass -class Item(DDBObject, iconcache.IconCacheOwnerMixin, metadata.Store): + def add_item(self, item): + if item.filename is None: + return + try: + self.count_for_paths[self._count_key(item.filename)] += 1 + except AttributeError: + return # counts not created yet we can just ignore + + def remove_item(self, item): + if item.filename is None: + return + try: + self.count_for_paths[self._count_key(item.filename)] -= 1 + except AttributeError: + return # counts not created yet we can just ignore + +class ItemChangeTracker(signals.SignalEmitter): + """Tracks changes to items and send the ItemChanges message.""" + def __init__(self): + signals.SignalEmitter.__init__(self) + self.create_signal('item-changes') + self.reset() + # databases changes get commited during the event-finished signal. We + # use connect_after() to send our changes directly after that. + eventloop.connect_after('event-finished', self.after_event_finished) + + def reset(self): + self.added = set() + self.changed = set() + self.removed = set() + self.changed_columns = set() + self.dlstats_changed = False + self.playlists_changed = False + + def after_event_finished(self, event_loop, success): + self.send_changes() + + def send_changes(self): + if self.has_changes(): + m = messages.ItemChanges(self.added, self.changed, self.removed, + self.changed_columns, + self.dlstats_changed, + self.playlists_changed) + m.send_to_frontend() + self.reset() + self.emit('item-changes', m) + + def has_changes(self): + return (self.added or self.changed or self.removed or + self.dlstats_changed or self.playlists_changed) + + def on_item_added(self, item): + self.added.add(item.id) + + def on_item_changed(self, item): + self.changed.add(item.id) + self.changed_columns.update(item.changed_attributes) + + def on_item_removed(self, item): + self.removed.add(item.id) + +class ItemBase(database.DDBObject): + """Base class for Item, DeviceItem, and SharingItem""" + + def after_setup_new(self): + self.__class__.change_tracker.on_item_added(self) + + def signal_change(self, needs_save=True, can_change_views=True): + self.__class__.change_tracker.on_item_changed(self) + database.DDBObject.signal_change(self, needs_save, can_change_views) + + def remove(self): + database.DDBObject.remove(self) + self.__class__.change_tracker.on_item_removed(self) + +class MetadataItemBase(ItemBase): + """Base class for items that use the MetadataManager system.""" + + def init_metadata_attributes(self): + """Initialize all metadata attributes to None.""" + for attr in metadata.attribute_names: + if attr == 'title': + attr = 'metadata_title' + setattr(self, attr, None) + + def calc_title(self): + """Set the title column + + The title column store the official title for the item. This may come + from torrent data, file metadata, feed data, and or our filename. + """ + if self.metadata_title: + title = self.metadata_title + elif self.torrent_title is not None: + title = self.torrent_title + elif self.entry_title is not None: + title = self.entry_title + elif self.filename: + title = filename_to_unicode(os.path.basename(self.filename)) + else: + title = _('no title') + if not hasattr(self, 'title') or title != self.title: + self.title = title + + def update_from_metadata(self, metadata_dict): + """Update our attributes from a metadata dictionary.""" + # change the name of title to be "metadata_title" + metadata_dict = metadata_dict.copy() + if 'title' in metadata_dict: + metadata_dict['metadata_title'] = metadata_dict.pop('title') + self._bulk_update_db_values(metadata_dict) + self.calc_title() + +class Item(MetadataItemBase, iconcache.IconCacheOwnerMixin): """An item corresponds to a single entry in a feed. It has a single url associated with it. """ @@ -357,50 +488,62 @@ # tweaked by the unittests to make things easier _allow_nonexistent_paths = False - def setup_new(self, fp_values, linkNumber=0, feed_id=None, parent_id=None, - eligibleForAutoDownload=True, channel_title=None): - metadata.Store.setup_new(self) + def setup_new(self, fp_values, link_number=0, feed_id=None, parent_id=None, + eligible_for_autodownload=True, channel_title=None): + self.init_metadata_attributes() self.is_file_item = False + self.new = True self.feed_id = feed_id self.parent_id = parent_id - self.channelTitle = channel_title - self.isContainerItem = None - self.seen = False - self.autoDownloaded = False - self.pendingManualDL = False - self.downloadedTime = None - self.watchedTime = self.lastWatched = None - self.pendingReason = u"" + self.channel_title = channel_title + self.is_container_item = None + self.auto_downloaded = False + self.pending_manual_download = False + self.downloaded_time = None + self.watched_time = self.last_watched = None + self.pending_reason = u"" + entry_title = self.torrent_title = None + self.metadata_title = None + self.filename = None fp_values.update_item(self) self.expired = False self.keep = False - self.filename = None - self.eligibleForAutoDownload = eligibleForAutoDownload + self.eligible_for_autodownload = eligible_for_autodownload self.duration = None self.screenshot = None - self.resumeTime = 0 - self.channelTitle = None + self.resume_time = 0 + self.channel_title = None self.downloader_id = None self.was_downloaded = False self.subtitle_encoding = None self.setup_new_icon_cache() self.play_count = 0 self.skip_count = 0 + self.net_lookup_enabled = False + self.deleted = False + self.size = self.enclosure_size # Initalize FileItem attributes to None - self.deleted = self.shortFilename = self.offsetPath = None + self.short_filename = self.offset_path = None - # linkNumber is a hack to make sure that scraped items at the + # link_number is a hack to make sure that scraped items at the # top of a page show up before scraped items at the bottom of # a page. 0 is the topmost, 1 is the next, and so on - self.linkNumber = linkNumber - self.creationTime = datetime.now() + self.link_number = link_number + self.creation_time = datetime.now() self._look_for_downloader() + self._calc_parent_title() self.setup_common() self.split_item() def setup_restored(self): self.setup_common() self.setup_links() + if (self.filename is not None and + not self.deleted and + not app.local_metadata_manager.path_in_system(self.filename)): + logging.warn("Path for item not in MetadataManager (%s). " + "Adding it now." % (self.filename)) + app.local_metadata_manager.add_file(self.filename) def setup_common(self): self.selected = False @@ -408,45 +551,66 @@ self.expiring = None self.showMoreInfo = False self.playing = False + Item._path_count_tracker.add_item(self) - def after_setup_new(self): - app.item_info_cache.item_created(self) + def signal_change(self, needs_save=True, can_change_views=True): + if ('torrent_title' in self.changed_attributes or + 'metadata_title' in self.changed_attributes): + self.calc_title() + ItemBase.signal_change(self, needs_save, can_change_views) + + def playlists_changed(self, added=False): + """Called when the item gets added/removed from playlists.""" + Item.change_tracker.playlists_changed = True + if added: + self.keep = True + needs_save = True + else: + needs_save = False + self.signal_change(needs_save) - def signal_change(self, needs_save=True): - app.item_info_cache.item_changed(self) - DDBObject.signal_change(self, needs_save) + @staticmethod + def playlist_reordered(): + """Called when a playlist gets reordered.""" + Item.change_tracker.playlists_changed = True + + def download_stats_changed(self): + Item.change_tracker.dlstats_changed = True + # TODO: I don't think we need the signal_change() call here once we + # finish replacing the ViewTracker code. + self.signal_change(needs_save=False) @classmethod def auto_pending_view(cls): return cls.make_view('feed.autoDownloadable AND ' 'NOT item.was_downloaded AND ' - '(item.eligibleForAutoDownload OR feed.getEverything)', + '(item.eligible_for_autodownload OR feed.getEverything)', joins={'feed': 'item.feed_id=feed.id'}) @classmethod def manual_pending_view(cls): - return cls.make_view('pendingManualDL') + return cls.make_view('pending_manual_download') @classmethod def auto_downloads_view(cls): - return cls.make_view("item.autoDownloaded AND " + return cls.make_view("item.auto_downloaded AND " "rd.state in ('downloading', 'paused')", joins={'remote_downloader rd': 'item.downloader_id=rd.id'}) @classmethod def manual_downloads_view(cls): - return cls.make_view("NOT item.autoDownloaded AND " - "NOT item.pendingManualDL AND " + return cls.make_view("NOT item.auto_downloaded AND " + "NOT item.pending_manual_download AND " "rd.state in ('downloading', 'paused')", joins={'remote_downloader AS rd': 'item.downloader_id=rd.id'}) @classmethod def download_tab_view(cls): - return cls.make_view("(item.pendingManualDL OR " + return cls.make_view("(item.pending_manual_download OR " "(rd.state in ('downloading', 'paused', 'uploading', " "'uploading-paused', 'offline') OR " "(rd.state == 'failed' AND " - "feed.origURL == 'dtv:manualFeed')) AND " + "feed.orig_url == 'dtv:manualFeed')) AND " "rd.main_item_id=item.id)", joins={'remote_downloader AS rd': 'item.downloader_id=rd.id', 'feed': 'item.feed_id=feed.id'}) @@ -471,19 +635,20 @@ @classmethod def unwatched_downloaded_items(cls): - return cls.make_view("NOT item.seen AND " + return cls.make_view("item.watched_time IS NULL AND " "item.parent_id IS NULL AND " "rd.state in ('finished', 'uploading', 'uploading-paused')", joins={'remote_downloader AS rd': 'item.downloader_id=rd.id'}) @classmethod def newly_downloaded_view(cls): - return cls.make_view("NOT item.seen AND " - "(item.file_type != 'other') AND " + return cls.make_view("item.watched_time IS NULL AND " + "(item.file_type in ('audio', 'video')) AND " "((is_file_item AND NOT deleted) OR " - "rd.state in ('finished', 'uploading', 'uploading-paused'))", + "(rd.main_item_id=item.id AND " + "rd.state in ('finished', 'uploading', 'uploading-paused')))", joins={'remote_downloader AS rd': 'item.downloader_id=rd.id'}, - order_by='downloadedTime DESC') + order_by='downloaded_time DESC') @classmethod def downloaded_view(cls): @@ -492,7 +657,7 @@ joins={'remote_downloader AS rd': 'item.downloader_id=rd.id'}) @classmethod - def incomplete_mdp_view(cls, limit=10): + def incomplete_mdp_view(cls): """Return up to limit local items that have not yet been examined with MDP; a file is considered examined even if we have decided to skip it. @@ -500,10 +665,9 @@ """ return cls.make_view("((is_file_item AND NOT deleted) OR " "(rd.state in ('finished', 'uploading', 'uploading-paused'))) " - "AND NOT isContainerItem " # match CMF short-circuit, just in case + "AND NOT is_container_item " # match CMF short-circuit, just in case "AND mdp_state IS NULL", # State.UNSEEN - joins={'remote_downloader AS rd': 'item.downloader_id=rd.id'}, - limit=limit) + joins={'remote_downloader AS rd': 'item.downloader_id=rd.id'}) @property def in_incomplete_mdp_view(self): @@ -516,7 +680,7 @@ """ if not self.id_exists(): return False - if self.isContainerItem: + if self.is_container_item: return False # FIXME: if possible we should use the actual incomplete_mdp_view with # an id=self.id constraint, but I don't see a straightforward way to do @@ -530,13 +694,13 @@ def unique_others_view(cls): return cls.make_view("item.file_type='other' AND " "((is_file_item AND NOT deleted) OR " - "(rd.maiN_item_id=item.id AND " + "(rd.main_item_id=item.id AND " "rd.state in ('finished', 'uploading', 'uploading-paused')))", joins={'remote_downloader AS rd': 'item.downloader_id=rd.id'}) @classmethod def unique_new_video_view(cls, include_podcasts=False): - query = ("NOT item.seen AND " + query = ("item.watched_time IS NULL AND " "item.file_type='video' AND " "((is_file_item AND NOT deleted) OR " "(rd.main_item_id=item.id AND " @@ -544,14 +708,14 @@ joins = {'remote_downloader AS rd': 'item.downloader_id=rd.id'} if not include_podcasts: query = query + (" AND (feed_id IS NULL OR " - "feed.origURL == 'dtv:manualFeed' OR " + "feed.orig_url == 'dtv:manualFeed' OR " "is_file_item)") joins['feed'] = 'feed_id = feed.id' return cls.make_view(query, joins=joins) @classmethod def unique_new_audio_view(cls, include_podcasts=False): - query = ("NOT item.seen AND " + query = ("item.watched_time IS NULL AND " "item.file_type='audio' AND " "((is_file_item AND NOT deleted) OR " "(rd.main_item_id=item.id AND " @@ -559,7 +723,7 @@ joins = {'remote_downloader AS rd': 'item.downloader_id=rd.id'} if not include_podcasts: query = query + (" AND (feed_id IS NULL OR " - "feed.origURL == 'dtv:manualFeed' OR " + "feed.orig_url == 'dtv:manualFeed' OR " "is_file_item)") joins['feed'] = 'feed_id = feed.id' return cls.make_view(query, joins=joins) @@ -567,8 +731,8 @@ @classmethod def toplevel_view(cls): return cls.make_view('feed_id IS NOT NULL AND ' - "feed.origURL != 'dtv:manualFeed' AND " - "feed.origURL NOT LIKE 'dtv:search%'", + "feed.orig_url != 'dtv:manualFeed' AND " + "feed.orig_url NOT LIKE 'dtv:search%'", joins={'feed': 'item.feed_id = feed.id'}) @classmethod @@ -594,7 +758,8 @@ @classmethod def feed_downloaded_view(cls, feed_id): return cls.make_view("feed_id=? AND " - "rd.state in ('finished', 'uploading', 'uploading-paused')", + "(is_file_item OR rd.state in ('finished', 'uploading', " + "'uploading-paused'))", (feed_id,), joins={'remote_downloader AS rd': 'item.downloader_id=rd.id'}) @@ -608,25 +773,20 @@ @classmethod def feed_available_view(cls, feed_id): - return cls.make_view("feed_id=? AND NOT autoDownloaded " - "AND downloadedTime IS NULL AND " - "NOT is_file_item AND " # FileItems are not available - "feed.last_viewed <= item.creationTime", - (feed_id,), - joins={'feed': 'item.feed_id=feed.id'}) + return cls.make_view("feed_id=? AND new", (feed_id,)) @classmethod def feed_auto_pending_view(cls, feed_id): return cls.make_view('feed_id=? AND feed.autoDownloadable AND ' 'NOT item.was_downloaded AND ' - '(item.eligibleForAutoDownload OR feed.getEverything)', + '(item.eligible_for_autodownload OR feed.getEverything)', (feed_id,), joins={'feed': 'item.feed_id=feed.id'}) @classmethod def feed_unwatched_view(cls, feed_id): - return cls.make_view("feed_id=? AND not seen AND " - "file_type !='other' AND " + return cls.make_view("feed_id=? AND item.watched_time IS NULL AND " + "file_type in ('audio', 'video') AND " "(is_file_item OR rd.state in ('finished', 'uploading', " "'uploading-paused'))", (feed_id,), @@ -651,20 +811,20 @@ @classmethod def search_item_view(cls): - return cls.make_view("feed.origURL == 'dtv:search'", + return cls.make_view("feed.orig_url == 'dtv:search'", joins={'feed': 'item.feed_id=feed.id'}) @classmethod def watchable_video_view(cls, include_podcasts=False): - query = ("not isContainerItem AND " + query = ("not is_container_item AND " "(deleted IS NULL or not deleted) AND " "(is_file_item OR rd.main_item_id=item.id) AND " "item.file_type='video'") if not include_podcasts: query = query + (" AND (feed_id IS NULL OR " - "feed.origURL == 'dtv:manualFeed' OR " - "feed.origURL == 'dtv:searchDownloads' OR " - "feed.origURL == 'dtv:search' OR " + "feed.orig_url == 'dtv:manualFeed' OR " + "feed.orig_url == 'dtv:searchDownloads' OR " + "feed.orig_url == 'dtv:search' OR " "is_file_item)") return cls.make_view(query, joins={'feed': 'item.feed_id=feed.id', @@ -673,24 +833,24 @@ @classmethod def watchable_view(cls): return cls.make_view( - "not isContainerItem AND " + "not is_container_item AND " "(deleted IS NULL or not deleted) AND " - "(is_file_item OR rd.main_item_id=item.id) AND " + "(is_file_item OR rd.main_item_id=item.id) AND " "NOT item.file_type='other'", joins={'feed': 'item.feed_id=feed.id', 'remote_downloader as rd': 'item.downloader_id=rd.id'}) @classmethod def watchable_audio_view(cls, include_podcasts=False): - query = ("not isContainerItem AND " + query = ("not is_container_item AND " "(deleted IS NULL or not deleted) AND " "(is_file_item OR rd.main_item_id=item.id) AND " "item.file_type='audio'") if not include_podcasts: query = query + (" AND (feed_id IS NULL OR " - "feed.origURL == 'dtv:manualFeed' OR " - "feed.origURL == 'dtv:searchDownloads' OR " - "feed.origURL == 'dtv:search' OR " + "feed.orig_url == 'dtv:manualFeed' OR " + "feed.orig_url == 'dtv:searchDownloads' OR " + "feed.orig_url == 'dtv:search' OR " "is_file_item)") return cls.make_view(query, joins={'feed': 'item.feed_id=feed.id', @@ -707,15 +867,15 @@ @classmethod def feed_expiring_view(cls, feed_id, watched_before): - return cls.make_view("watchedTime is not NULL AND " - "watchedTime < ? AND feed_id = ? AND keep = 0", + return cls.make_view("watched_time is not NULL AND " + "watched_time < ? AND feed_id = ? AND keep = 0", (watched_before, feed_id), joins={'feed': 'item.feed_id=feed.id'}) @classmethod def latest_in_feed_view(cls, feed_id): return cls.make_view("feed_id=?", (feed_id,), - order_by='releaseDateObj DESC', limit=1) + order_by='release_date DESC', limit=1) @classmethod def media_children_view(cls, parent_id): @@ -724,7 +884,7 @@ @classmethod def containers_view(cls): - return cls.make_view("isContainerItem") + return cls.make_view("is_container_item") @classmethod def file_items_view(cls): @@ -742,23 +902,27 @@ @classmethod def recently_watched_view(cls): - return cls.make_view("file_type IN ('video', 'audio') AND lastWatched") + return cls.make_view("file_type IN ('video', 'audio') AND last_watched") @classmethod def recently_downloaded_view(cls): - return cls.make_view("NOT seen AND " + return cls.make_view("item.watched_time IS NULL AND " "item.parent_id IS NULL AND " - "NOT is_file_item AND downloadedTime AND " + "NOT is_file_item AND downloaded_time AND " "rd.state in ('finished', 'uploading', 'uploading-paused')", joins={'remote_downloader AS rd': 'item.downloader_id=rd.id'}) @classmethod - def update_folder_trackers(cls): + def update_folder_trackers(cls, db_info=None): """Update each view tracker that care's about the item's folder (both playlist and channel folders). """ - for tracker in app.view_tracker_manager.trackers_for_ddb_class(cls): + if db_info is None: + view_tracker_manager = app.db_info.view_tracker_manager + else: + view_tracker_manager = db_info.view_tracker_manager + for tracker in view_tracker_manager.trackers_for_ddb_class(cls): # bit of a hack here. We only need to update ViewTrackers # that care about the item's folder. This seems like a # safe way to check if that's true. @@ -766,14 +930,30 @@ tracker.check_all_objects() @classmethod + def items_with_path_view(cls, path): + return cls.make_view('LOWER(filename)=LOWER(?)', + (filename_to_unicode(path),)) + + @classmethod def downloader_view(cls, dler_id): return cls.make_view("downloader_id=?", (dler_id,)) + _path_count_tracker = _ItemsForPathCountTracker() + + @classmethod + def have_item_for_path(cls, path): + """Check if we have an item for a path. + + This method is optimized to avoid DB queries if at all possible. + """ + # NOTE: use Item here rather than cls, since FileItem and Item share + # the same _path_count_tracker. + return Item._path_count_tracker.get_count(path) > 0 + def _look_for_downloader(self): self.set_downloader(downloader.lookup_downloader(self.get_url())) if self.has_downloader() and self.downloader.is_finished(): self.set_filename(self.downloader.get_filename()) - self.check_media_file() getSelected, setSelected = make_simple_get_set( u'selected', change_needs_save=False) @@ -786,9 +966,9 @@ """ filename_root = self.get_filename() if fileutil.isdir(filename_root): - return set(fileutil.miro_allfiles(filename_root)) + return (fn for fn in fileutil.miro_allfiles(filename_root)) else: - return set() + return [] def _make_new_children(self, paths): filename_root = self.get_filename() @@ -796,61 +976,91 @@ logging.error("Item._make_new_children: get_filename here is None") return for path in paths: + # XXX this assert is expensive due to stat() + assert os.path.isfile(path) assert path.startswith(filename_root) - offsetPath = path[len(filename_root):] - while offsetPath[0] in ('/', '\\'): - offsetPath = offsetPath[1:] - FileItem(path, parent_id=self.id, offsetPath=offsetPath) + offset_path = path[len(filename_root):] + while offset_path[0] in ('/', '\\'): + offset_path = offset_path[1:] + FileItem(path, parent_id=self.id, offset_path=offset_path) - def find_new_children(self): + @eventloop.idle_iterator + def find_new_children(self, callback=None): """If this feed is a container item, walk through its - directory and find any new children. Returns True if it found - children and ran signal_change(). + directory and find any new children. You may specify a callback + which will be called at the end of the find_new_children() + operation as a sort of serializing operation. Doing so, + is entirely optional though on an as-needed basis. """ - if not self.isContainerItem: - return False + if not self.id_exists() or self.is_container_item == False: + return + if self.is_container_item: + skip = [c.get_filename() for c in self.get_children()] + else: + skip = [] if self.get_state() == 'downloading': # don't try to find videos that we're in the middle of # re-downloading - return False - child_paths = self._find_child_paths() - for child in self.get_children(): - child_paths.discard(child.get_filename()) - self._make_new_children(child_paths) - if child_paths: + return + dirty = False + this_pass = [] + start = time.time() + for path in self._find_child_paths(): + if path in skip: + continue + this_pass.append(path) + if time.time() - start > 0.3: + self.is_container_item = True + dirty = True + self._make_new_children(this_pass) + self.signal_change() + yield + if not self.id_exists(): + return + # Leave "skip" as is. The filesystem namespace changes + # asynchronously wrt to our operations and so whether we do + # it piecemeal or in one go is the same. + start = time.time() + this_pass = [] + if this_pass: + # Do the leftovers + dirty = True + self.is_container_item = True + self._make_new_children(this_pass) self.signal_change() - return True - return False + if callback: + callback(dirty) def split_item(self): - """returns True if it ran signal_change()""" - if self.isContainerItem is not None: - return self.find_new_children() + if self.is_container_item is not None: + if self.is_container_item: + self.find_new_children() + return if ((not isinstance(self, FileItem) and (self.downloader is None or not self.downloader.is_finished()))): - return False + return filename_root = self.get_filename() if filename_root is None: - return False + return if fileutil.isdir(filename_root): - child_paths = self._find_child_paths() - if len(child_paths) > 0: - self.isContainerItem = True - self._make_new_children(child_paths) - else: - if not self.get_feed_url().startswith ("dtv:directoryfeed"): - target_dir = app.config.get(prefs.NON_VIDEO_DIRECTORY) - if not filename_root.startswith(target_dir): - if isinstance(self, FileItem): - self.migrate (target_dir) - else: - self.downloader.migrate (target_dir) - self.isContainerItem = False + def complete(nonempty): + if not self.id_exists(): + return + if not nonempty: + if not self.get_feed_url().startswith("dtv:directoryfeed"): + target_dir = app.config.get(prefs.NON_VIDEO_DIRECTORY) + if not filename_root.startswith(target_dir): + if isinstance(self, FileItem): + self.migrate(target_dir) + else: + self.downloader.migrate(target_dir) + self.is_container_item = False + self.signal_change() + self.find_new_children(callback=complete) else: - self.isContainerItem = False + self.is_container_item = False self.signal_change() - return True def set_subtitle_encoding(self, encoding): if encoding is not None: @@ -863,13 +1073,45 @@ self.signal_change() def set_filename(self, filename): + Item._path_count_tracker.remove_item(self) self.filename = filename + try: + self.size = os.path.getsize(filename) + except EnvironmentError, e: + logging.warn("Item.set_filename(): error getting size: %s", e) + self.size = None + if not app.local_metadata_manager.path_in_system(filename): + metadata = app.local_metadata_manager.add_file(filename) + else: + metadata = app.local_metadata_manager.get_metadata(filename) + self.update_from_metadata(metadata) + Item._path_count_tracker.add_item(self) + + def file_moved(self, new_filename): + app.local_metadata_manager.file_moved(self.filename, new_filename) + self.set_filename(new_filename) + + def set_user_metadata(self, metadata_dict): + if not self.filename: + logging.warn("No file to set metadata for in " + "set_metadata_from_iteminfo") + return + if 'cover_art_path' in metadata_dict: + # ItemInfo calls the attribute "cover_art_path", but the metadata + # system just calls it cover art. Rename the key. + metadata_dict['cover_art'] = metadata_dict.pop('cover_art_path') + app.local_metadata_manager.set_user_data(self.filename, metadata_dict) + self.update_from_metadata(metadata_dict) + self.signal_change() + + def set_rating(self, rating): + self.rating = rating + self.signal_change() def matches_search(self, search_string): if search_string is None or search_string == '': return True - my_info = app.item_info_cache.get_info(self.id) - return search.item_matches(my_info, search_string) + return search.item_matches(self, search_string) def _remove_from_playlists(self): models.PlaylistItemMap.remove_item_from_playlists(self) @@ -879,70 +1121,45 @@ if self.feed_id is not None: try: obj = models.Feed.get_by_id(self.feed_id) - except ObjectNotFoundError: - raise DatabaseConstraintError( + except database.ObjectNotFoundError: + raise database.DatabaseConstraintError( "my feed (%s) is not in database" % self.feed_id) else: if not isinstance(obj, models.Feed): msg = "feed_id points to a %s instance" % obj.__class__ - raise DatabaseConstraintError(msg) + raise database.DatabaseConstraintError(msg) if self.has_parent(): try: obj = Item.get_by_id(self.parent_id) - except ObjectNotFoundError: - raise DatabaseConstraintError( + except database.ObjectNotFoundError: + raise database.DatabaseConstraintError( "my parent (%s) is not in database" % self.parent_id) else: if not isinstance(obj, Item): msg = "parent_id points to a %s instance" % obj.__class__ - raise DatabaseConstraintError(msg) - # If isContainerItem is None, we may be in the middle + raise database.DatabaseConstraintError(msg) + # If is_container_item is None, we may be in the middle # of building the children list. - if obj.isContainerItem is not None and not obj.isContainerItem: + if (obj.is_container_item is not None and + not obj.is_container_item): msg = "parent_id is not a containerItem" - raise DatabaseConstraintError(msg) + raise database.DatabaseConstraintError(msg) if self.parent_id is None and self.feed_id is None: - raise DatabaseConstraintError("feed_id and parent_id both None") + raise database.DatabaseConstraintError("feed_id and parent_id both None") if self.parent_id is not None and self.feed_id is not None: - raise DatabaseConstraintError( + raise database.DatabaseConstraintError( "feed_id and parent_id both not None") def on_signal_change(self): self.expiring = None - self._sync_title() if hasattr(self, "_state"): del self._state if hasattr(self, "_size"): del self._size - def _sync_title(self): - # for torrents that aren't from a feed, we use the filename - # as the title. - if ((self.is_external() - and self.has_downloader() - and self.downloader.get_type() == "bittorrent" - and self.downloader.get_state() == "downloading")): - filename = os.path.basename(self.downloader.get_filename()) - if self.title != filename: - # we skip set_title() since we're already in the DB - # thread/signal_change() - self.title = filename_to_unicode(filename) - def recalc_feed_counts(self): self.get_feed().recalc_counts() - def get_viewed(self): - """Returns True iff this item has never been viewed in the - interface. - - Note the difference between "viewed" and seen. - """ - try: - # optimizing by trying the cached feed - return self._feed.last_viewed >= self.creationTime - except AttributeError: - return self.get_feed().last_viewed >= self.creationTime - @returns_unicode def get_url(self): """Returns the URL associated with the first enclosure in the @@ -957,7 +1174,7 @@ This returns True when the item has a non-file URL. """ url = self.get_url() - return url != u'' and not url.startswith(u"file:") + return url is not None and not url.startswith(u"file:") def get_feed(self): """Returns the feed this item came from. @@ -985,27 +1202,48 @@ self._parent = self return self._parent + def get_parent_sort_key(self): + if self.has_parent(): + parent = self.get_parent() + # this key lets us sort by title, but also keep torrents with + # duplicate titles separate. + return (parent.get_title(), parent.id) + else: + return None + @returns_unicode def get_feed_url(self): - return self.get_feed().origURL + return self.get_feed().orig_url @returns_unicode def get_source(self): if self.feed_id is not None: - feed_ = self.get_feed() - if feed_.origURL != 'dtv:manualFeed': - # we do this manually so we don't pick up the name of a search - # query (#16044) - return feed_.userTitle or feed_.actualFeed.get_title() + return self.get_feed().get_title() if self.has_parent(): try: return self.get_parent().get_title() - except ObjectNotFoundError: + except database.ObjectNotFoundError: + return None + return None + + @returns_unicode + def get_source_for_search(self): + """Get the source name to match against for a search. + + This is the same as get_source(), except it doesn't include the feed + search terms for saved searches (see #16044) + """ + if self.feed_id is not None: + return self.get_feed().get_title_without_search_terms() + if self.has_parent(): + try: + return self.get_parent().get_title() + except database.ObjectNotFoundError: return None return None def get_children(self): - if self.isContainerItem: + if self.is_container_item: return Item.children_view(self.id) else: raise ValueError("%s is not a container item" % self) @@ -1017,7 +1255,7 @@ def is_playable(self): """Is this a playable item?""" - if self.isContainerItem: + if self.is_container_item: return Item.media_children_view(self.id).count() > 0 else: return self.file_type in ('audio', 'video') and not self.has_drm @@ -1029,7 +1267,8 @@ # _feed is created by get_feed which caches the result if hasattr(self, "_feed"): del self._feed - if self.isContainerItem: + self._calc_parent_title() + if self.is_container_item: for item in self.get_children(): if hasattr(item, "_feed"): del item._feed @@ -1039,36 +1278,41 @@ def expire(self): self.confirm_db_thread() self._remove_from_playlists() - self.resumeTime = 0 if self.is_external(): - if self.is_downloaded(): - if self.isContainerItem: - for item in self.get_children(): - item.make_deleted() - elif self.get_filename(): - FileItem(self.get_filename(), feed_id=self.feed_id, - parent_id=self.parent_id, deleted=True) - if self.has_downloader(): - self.downloader.set_delete_files(False) - self.remove() + self.delete_files_and_remove() else: - if self.isContainerItem: - # remove our children, since we're about to set - # isContainerItem to None + self.delete_files_and_expire() + self.recalc_feed_counts() + + def delete_files_and_remove(self): + if self.is_downloaded(): + if self.is_container_item: for item in self.get_children(): item.make_deleted() - item.remove() - self.delete_files() - self.delete_external_metadata() - self.expired = True - self.seen = self.keep = self.pendingManualDL = False - self.filename = None - self.mdp_state = moviedata.State.UNSEEN - self.file_type = self.watchedTime = self.lastWatched = None - self.duration = None - self.isContainerItem = None - self.signal_change() - self.recalc_feed_counts() + elif self.get_filename(): + FileItem(self.get_filename(), feed_id=self.feed_id, + parent_id=self.parent_id, deleted=True) + if self.has_downloader(): + self.downloader.set_delete_files(False) + self.remove() + + def delete_files_and_expire(self): + if self.is_container_item: + # remove our children, since we're about to set + # is_container_item to None + for item in self.get_children(): + item.make_deleted() + item.remove() + Item._path_count_tracker.remove_item(self) + self.delete_files() + self.resume_time = 0 + self.expired = True + self.keep = self.pending_manual_download = False + self.filename = None + self.file_type = self.watched_time = self.last_watched = None + self.duration = None + self.is_container_item = None + self.signal_change() def has_downloader(self): return self.downloader_id is not None and self.downloader is not None @@ -1089,19 +1333,19 @@ def stop_upload(self): if self.downloader: self.downloader.stop_upload() - if self.isContainerItem: + if self.is_container_item: self.children_signal_change() def pause_upload(self): if self.downloader: self.downloader.pause_upload() - if self.isContainerItem: + if self.is_container_item: self.children_signal_change() def start_upload(self): if self.downloader: self.downloader.start_upload() - if self.isContainerItem: + if self.is_container_item: self.children_signal_change() def get_expiration_time(self): @@ -1130,7 +1374,7 @@ """Returns the time this Item object was created - i.e. the associated file was added to our database """ - return self.creationTime + return self.creation_time def get_watched_time(self): """Returns the most recent watched time of this item or any @@ -1139,23 +1383,23 @@ Returns a datetime.datetime instance or None if the item and none of its children have been watched. """ - if not self.get_seen(): + if not self.get_watched(): return None - if self.isContainerItem and self.watchedTime == None: - self.watchedTime = datetime.min + if self.is_container_item and self.watched_time == None: + self.watched_time = datetime.min for item in self.get_children(): child_time = item.get_watched_time() if child_time is None: - self.watchedTime = None + self.watched_time = None return None - if child_time > self.watchedTime: - self.watchedTime = child_time + if child_time > self.watched_time: + self.watched_time = child_time self.signal_change() - return self.watchedTime + return self.watched_time def get_expiring(self): if self.expiring is None: - if not self.get_seen(): + if not self.get_watched(): self.expiring = False elif self.keep: self.expiring = False @@ -1170,69 +1414,90 @@ self.expiring = True return self.expiring - def get_seen(self): - """Returns true iff video has been seen. + def unset_new(self): + """Unsets the "new" attribute of an item. - Note the difference between "viewed" and "seen". + This should be done after: + The user, or the auto-downloader, downloads the item + The user has seen the item in a feed then switched away from it """ + self.new = False + self.signal_change() + + def get_watched(self): + """Check if the media file has been watched by the user """ self.confirm_db_thread() - return self.seen + return self.watched_time is not None + + def set_watched(self): + """Set that this item has been watched by the user - def mark_item_seen(self, mark_other_items=True): + If this item has been watched before, this sets last_watched to the + current time. If it hasn't been watched, this sets both watched_time + and last_watched. + """ + self.last_watched = datetime.now() + if self.watched_time is None: + self.watched_time = self.last_watched + self.signal_change() + + def unset_watched(self): + """Act like this item has never been watched by the user.""" + self.resume_time = 0 + self.watched_time = self.last_watched = None + self.signal_change() + + def mark_watched(self, mark_other_items=True): """Marks the item as seen. """ self.confirm_db_thread() self.has_drm = False - if self.isContainerItem: + if self.is_container_item: for child in self.get_children(): - child.seen = True - child.signal_change() - if self.seen == False: - self.seen = True + child.set_watched() + was_watched = self.get_watched() + self.set_watched() + if not was_watched: + # need to do some extra work if this is the first time we're + # watching the video if self.subtitle_encoding is None: config_value = app.config.get(prefs.SUBTITLE_ENCODING) if config_value: self.subtitle_encoding = unicode(config_value) - if self.watchedTime is None: - self.watchedTime = datetime.now() - self.lastWatched = datetime.now() - self.signal_change() self.update_parent_seen() - if mark_other_items and self.downloader: - for item in self.downloader.item_list: - if item != self: - item.mark_item_seen(False) self.recalc_feed_counts() - else: - self.lastWatched = datetime.now() - self.signal_change() + + if mark_other_items and self.downloader: + for item in self.downloader.item_list: + if item != self: + item.mark_watched(False) def update_parent_seen(self): if self.parent_id: - unseen_children = self.make_view('parent_id=? AND NOT seen AND ' - "file_type in ('audio', 'video')", (self.parent_id,)) + unseen_children = self.make_view( + 'parent_id=? AND watched_time IS NULL AND ' + "file_type in ('audio', 'video')", (self.parent_id,)) new_seen = (unseen_children.count() == 0) parent = self.get_parent() - if parent.seen != new_seen: - parent.seen = new_seen - parent.signal_change() + if parent.get_watched() != new_seen: + if new_seen: + parent.set_watched() + else: + parent.unset_watched() - def mark_item_unseen(self, mark_other_items=True): + def mark_unwatched(self, mark_other_items=True): self.confirm_db_thread() - if self.isContainerItem: + if self.is_container_item: for item in self.get_children(): - item.seen = False - item.signal_change() - if self.seen: - self.seen = False - self.watchedTime = self.lastWatched = None - self.resumeTime = 0 - self.signal_change() + item.unset_watched() + + if self.get_watched(): + self.unset_watched() self.update_parent_seen() if mark_other_items and self.downloader: for item in self.downloader.item_list: if item != self: - item.mark_item_unseen(False) + item.mark_unwatched(False) self.recalc_feed_counts() # TODO: played/seen count updates need to trigger recalculation of auto @@ -1259,8 +1524,8 @@ def set_auto_downloaded(self, autodl=True): self.confirm_db_thread() - if autodl != self.autoDownloaded: - self.autoDownloaded = autodl + if autodl != self.auto_downloaded: + self.auto_downloaded = autodl self.signal_change() @eventloop.as_idle @@ -1273,33 +1538,37 @@ logging.exception("set_resume_time: not-saving! given non-int %s", position) return - if self.resumeTime != position: - self.resumeTime = position + if self.resume_time != position: + self.resume_time = position self.signal_change() def get_auto_downloaded(self): """Returns true iff item was auto downloaded. """ self.confirm_db_thread() - return self.autoDownloaded + return self.auto_downloaded def download(self, autodl=False): """Starts downloading the item. """ self.confirm_db_thread() manual_dl_count = Item.manual_downloads_view().count() - self.expired = self.keep = self.seen = False + self.expired = self.keep = False self.was_downloaded = True + self.new = False + self.watched_time = None if ((not autodl) and manual_dl_count >= app.config.get(prefs.MAX_MANUAL_DOWNLOADS)): - self.pendingManualDL = True - self.pendingReason = _("queued for download") + self.pending_manual_download = True + self.pending_reason = _("queued for download") self.signal_change() + if self.looks_like_torrent(): + self.update_title_from_torrent() return else: self.set_auto_downloaded(autodl) - self.pendingManualDL = False + self.pending_manual_download = False dler = downloader.get_downloader_for_item(self) if dler is not None: @@ -1322,7 +1591,7 @@ def is_pending_manual_download(self): self.confirm_db_thread() - return self.pendingManualDL + return self.pending_manual_download def cancel_auto_download(self): # FIXME - this is cheating and abusing the was_downloaded flag @@ -1337,7 +1606,7 @@ ufeed = self.get_feed() if ufeed.getEverything: return True - return self.eligibleForAutoDownload + return self.eligible_for_autodownload def is_pending_auto_download(self): return (self.get_feed().is_autodownloadable() and @@ -1355,18 +1624,24 @@ self.confirm_db_thread() if self.cover_art: path = self.cover_art - return resources.path(fileutil.expand_filename(path)) - elif self.icon_cache is not None and self.icon_cache.is_valid(): + path = resources.path(fileutil.expand_filename(path)) + if fileutil.exists(path): + return path + if self.icon_cache is not None and self.icon_cache.is_valid(): + # is_valid() verifies that the path exists path = self.icon_cache.get_filename() return resources.path(fileutil.expand_filename(path)) - elif self.screenshot: + if self.screenshot: path = self.screenshot - return resources.path(fileutil.expand_filename(path)) - elif self.isContainerItem: + path = resources.path(fileutil.expand_filename(path)) + if fileutil.exists(path): + return path + if self.is_container_item: return resources.path("images/thumb-default-folder.png") else: feed = self.get_feed() if feed.thumbnail_valid(): + # thumbnail_valid() also verifies the path exists return feed.get_thumbnail_path() elif (self.get_filename() and filetypes.is_audio_filename(self.get_filename())): @@ -1375,25 +1650,18 @@ return resources.path("images/thumb-default-video.png") def is_downloaded_torrent(self): - return (self.isContainerItem and self.has_downloader() and + return (self.is_container_item and self.has_downloader() and self.downloader.is_finished()) @returns_unicode def get_title(self): """Returns the title of the item. """ - stored = metadata.Store.get_title(self) - if stored: - return stored - if self.entry_title is not None: - return self.entry_title - if self.get_filename() is not None: - return filename_to_title(self.get_filename()) - return _('no title') + return self.title def set_channel_title(self, title): check_u(title) - self.channelTitle = title + self.channel_title = title self.signal_change() @returns_unicode @@ -1407,8 +1675,8 @@ return e.title else: return u'' - elif self.channelTitle: - return self.channelTitle + elif self.channel_title: + return self.channel_title else: return u'' @@ -1457,6 +1725,45 @@ else: return 'stopped' + def update_title_from_torrent(self): + """Try to update our title using torrent metadata. + + If this item is a torrent, then we will download the .torrent file and + use that to upate our title. If this is not a torrent, or there's an + error downloading the file, then nothing will change + """ + self._update_title_from_torrent_client = httpclient.grab_url( + self.get_url(), + self._update_title_from_torrent_callback, + self._update_title_from_torrent_errback, + header_callback=self._update_title_from_torrent_headers) + + def _update_title_from_torrent_headers(self, info): + if info['content-type'] != u'application/x-bittorrent': + logging.warn("wrong content-type %s in " + "update_title_from_torrent()", info['content-type']) + # data doesn't seem like a torrent, cancel the request + self._update_title_from_torrent_client.cancel() + self._update_title_from_torrent_client = None + + def _update_title_from_torrent_callback(self, info): + try: + title = util.get_name_from_torrent_metadata(info['body']) + except ValueError: + logging.exception("Error setting torrent name") + else: + self.set_torrent_title(title) + self._update_title_from_torrent_client = None + + def set_torrent_title(self, title): + self.torrent_title = title + self.signal_change() + + def _update_title_from_torrent_errback(self, error): + logging.warn("Error downloading torrent metainfo in " + "update_title_from_torrent(): %s", error) + self._update_title_from_torrent_client = None + def is_transferring(self): return (self.downloader and self.downloader.get_state() in (u'uploading', @@ -1468,7 +1775,7 @@ self.confirm_db_thread() if self.has_downloader(): self.set_downloader(None) - if self.isContainerItem: + if self.is_container_item: for item in self.get_children(): item.delete_files() self.delete_subtitle_files() @@ -1510,31 +1817,36 @@ # into the stopped state). if (self.downloader is None or self.downloader.get_state() in (u'failed', u'stopped')): - if self.pendingManualDL: + if self.pending_manual_download: self._state = u'downloading' elif self.expired: self._state = u'expired' - elif (self.get_viewed() or - (self.downloader and - self.downloader.get_state() in (u'failed', - u'stopped'))): - self._state = u'not-downloaded' - else: + elif self.new: self._state = u'new' + else: + self._state = u'not-downloaded' elif self.downloader.get_state() in (u'offline', u'paused'): - if self.pendingManualDL: + if self.pending_manual_download: self._state = u'downloading' else: self._state = u'paused' elif not self.downloader.is_finished(): self._state = u'downloading' - elif not self.get_seen(): + elif not self.get_watched(): self._state = u'newly-downloaded' elif self.get_expiring(): self._state = u'expiring' else: self._state = u'saved' + def _calc_parent_title(self): + if self.feed_id is not None: + self.parent_title = self.get_feed().get_title() + elif self.has_parent(): + self.parent_title = self.get_parent().get_title() + else: + self.parent_title = None + @returns_unicode def get_channel_category(self): """Get the category to use for the channel template. @@ -1556,16 +1868,14 @@ """ self.confirm_db_thread() - if self.downloader is None or not self.downloader.is_finished(): - if not self.get_viewed(): - return u'new' + if self.new: + return u'new' + elif self.downloader is None or not self.downloader.is_finished(): if self.expired: return u'expired' else: return u'not-downloaded' - elif not self.get_seen(): - if not self.get_viewed(): - return u'new' + elif not self.get_watched(): return u'newly-downloaded' elif self.get_expiring(): return u'expiring' @@ -1601,36 +1911,7 @@ return util.format_size_for_user(self.get_size()) def get_size(self): - if not hasattr(self, "_size"): - self._size = self._get_size() - return self._size - - def _get_size(self): - """Returns the size of the item. We use the following methods - to get the size: - - 1. Physical size of a downloaded file - 2. HTTP content-length - 3. RSS enclosure tag value - """ - if self.is_downloaded(): - if self.get_filename() is None: - return 0 - try: - fname = self.get_filename() - return os.path.getsize(fname) - except OSError: - return 0 - elif self.has_downloader(): - size = self.downloader.get_total_size() - if size == -1: - logging.debug("downloader could not get total size for item") - return 0 - return size - else: - if self.enclosure_size is not None: - return self.enclosure_size - return 0 + return self.size def download_progress(self): """Returns the download progress in absolute percentage [0.0 - @@ -1642,20 +1923,11 @@ else: size = self.get_size() dled = self.downloader.get_current_size() - if size == 0: + if size == 0 or size is None or dled is None: return 0 else: return (100.0*dled) / size - @returns_unicode - def get_startup_activity(self): - if self.pendingManualDL: - return self.pendingReason - elif self.downloader: - return self.downloader.get_startup_activity() - else: - return _("starting up...") - def get_pub_date_parsed(self): """Returns the published date of the item as a datetime object. """ @@ -1665,7 +1937,7 @@ """Returns the date this video was released or when it was published. """ - return self.releaseDateObj + return self.release_date def get_duration_value(self): """Returns the length of the video in seconds. @@ -1686,19 +1958,8 @@ return self.enclosure_format if self.downloader: - if ((self.downloader.contentType - and "/" in self.downloader.contentType)): - mtype, subtype = self.downloader.contentType.split('/', 1) - mtype = mtype.lower() - if mtype in KNOWN_MIME_TYPES: - format_ = subtype.split(';')[0].upper() - if mtype == u'audio': - format_ += u' AUDIO' - if format_.startswith(u'X-'): - format_ = format_[2:] - return (u'.%s' % - MIME_SUBSITUTIONS.get(format_, format_).lower()) - + return filetypes.calc_file_format(self.filename, + self.downloader.content_type) if empty_for_unknown: return u"" return u"unknown" @@ -1738,19 +1999,19 @@ def update_from_feed_parser_values(self, fp_values): fp_values.update_item(self) - self.icon_cache.request_update() + if self.icon_cache.filename is None: + self.icon_cache.request_update() self.signal_change() def on_download_finished(self): """Called when the download for this item finishes.""" self.confirm_db_thread() - self.downloadedTime = datetime.now() + self.downloaded_time = datetime.now() self.set_filename(self.downloader.get_filename()) self.split_item() self.signal_change() self._replace_file_items() - self.check_media_file() signals.system.download_complete(self) for other in Item.make_view('downloader_id IS NULL AND url=?', @@ -1758,76 +2019,10 @@ other.set_downloader(self.downloader) self.recalc_feed_counts() - def check_media_file(self): - """Begin metadata extraction for this item; runs mutagen synchonously, - if applicable, and then adds the item to mdp's queue. - """ - if self.isContainerItem: - self.file_type = u'other' - self.signal_change() - return # this is OK because incomplete_mdp_view knows about it - # NOTE: it is very important (#7993) that there is no way to leave this - # method without either: - # - calling moviedata.movie_data_updater.request_update(self) - # - calling _handle_invalid_media_file - try: - self._check_media_file() - except IOError, e: - # shouldn't generally happen, but probably something we have no - # control over; likely another process has moved or deleted our file - logging.warn("check_media_file failed: %s", e) - self._handle_invalid_media_file() - except CheckMediaError, e: - # filename is None - this should never happen - app.controller.failed_soft("check_media_file", str(e), True) - self._handle_invalid_media_file() - else: - moviedata.movie_data_updater.request_update(self) - if self.file_type is None: - # if this is not overridden by movie_data_updater, - # neither mutagen nor MDP could identify it - self.file_type = u'other' - self.signal_change() - - def _handle_invalid_media_file(self): - """Failed to process a file in check_media_file; when this happens we: - - inform the metadata_progress_updater that we're done with the item - - make sure we don't try to process it again - - check whether the file no longer exists - """ - # call path_processed since the movie data program won't run on us - path = self.get_filename() - if path is not None: - app.metadata_progress_updater.path_processed(path) - # Set our state to SKIPPED so we don't request another update. - self.mdp_state = moviedata.State.SKIPPED - self.file_type = u'other' - self.signal_change() - # The file may no longer be around. Call check_delete() in an - # idle callback to handle this. We don't want to call - # check_delete() now because it's not safe if we're called inside - # setup_new(). See #17344 - _deleted_file_checker.schedule_check(self) - - def _check_media_file(self): - """Does the work for check_media_file() - - :raises: CheckMediaError if we aren't able to check it - """ - # NOTE: it is very important (#7993) that there is no way to leave this - # method without either: - # - calling moviedata.movie_data_updater.request_update(self) - # - changing this item so that not self.in_incomplete_mdp_view - filename = self.get_filename() - if filename is None: - raise CheckMediaError("item has no filename") - self.file_type = filetypes.item_file_type_for_filename(filename) - self.read_metadata() - def on_downloader_migrated(self, old_filename, new_filename): - self.set_filename(new_filename) + self.file_moved(new_filename) self.signal_change() - if self.isContainerItem: + if self.is_container_item: self.migrate_children(self.get_filename()) self._replace_file_items() @@ -1855,6 +2050,7 @@ new_downloader.add_item(self) else: self.downloader_id = None + self.download_stats_changed() self.signal_change() def save(self, always_signal=False): @@ -1870,11 +2066,11 @@ return self.filename def is_video_file(self): - return (self.isContainerItem != True + return (self.is_container_item != True and filetypes.is_video_filename(self.get_filename())) def is_audio_file(self): - return (self.isContainerItem != True + return (self.is_container_item != True and filetypes.is_audio_filename(self.get_filename())) def is_external(self): @@ -1885,32 +2081,20 @@ and self.get_feed_url() == 'dtv:manualFeed') def migrate_children(self, newdir): - if self.isContainerItem: + if self.is_container_item: for item in self.get_children(): item.migrate(newdir) - def delete_external_metadata(self): - if self.screenshot: - try: - fileutil.remove(self.screenshot) - except StandardError: - pass - self.screenshot = None - self.delete_cover_art() - def remove(self): + Item._path_count_tracker.remove_item(self) if self.has_downloader(): self.set_downloader(None) self.remove_icon_cache() - self.delete_external_metadata() - if self.isContainerItem: + if self.is_container_item: for item in self.get_children(): item.remove() self._remove_from_playlists() - DDBObject.remove(self) - # need to call this after DDBObject.remove(), so that the item info is - # there for ItemInfoFetcher to see. - app.item_info_cache.item_removed(self) + MetadataItemBase.remove(self) def setup_links(self): self.split_item() @@ -1919,14 +2103,6 @@ # deleted, so we were removed as well. (#11979) return _deleted_file_checker.schedule_check(self) - if self.screenshot and not fileutil.exists(self.screenshot): - logging.warn("file disappeared: %s", self.screenshot) - self.screenshot = None - self.signal_change() - if self.cover_art and not fileutil.exists(self.cover_art): - logging.warn("file disappeared: %s", self.cover_art) - self.cover_art = None - self.signal_change() def check_deleted(self): """Check whether the item's file has been deleted outside of miro. @@ -1937,7 +2113,7 @@ """ if not self.id_exists(): return True - if (self.isContainerItem is not None and + if (self.is_container_item is not None and not fileutil.exists(self.get_filename()) and not self._allow_nonexistent_paths): self.expire() @@ -1948,9 +2124,12 @@ try: return self._downloader except AttributeError: - dler = downloader.get_existing_downloader(self) - if dler is not None: - dler.add_item(self) + if self.downloader_id is None: + dler = None + else: + dler = downloader.get_existing_downloader(self) + if dler is not None: + dler.add_item(self) self._downloader = dler return dler downloader = property(_get_downloader) @@ -1993,31 +2172,31 @@ """An Item that exists as a local file """ def setup_new(self, filename, feed_id=None, parent_id=None, - offsetPath=None, deleted=False, fp_values=None, + offset_path=None, deleted=False, fp_values=None, channel_title=None, mark_seen=False): if fp_values is None: fp_values = fp_values_for_file(filename) Item.setup_new(self, fp_values, feed_id=feed_id, parent_id=parent_id, - eligibleForAutoDownload=False, channel_title=channel_title) + eligible_for_autodownload=False, channel_title=channel_title) self.is_file_item = True check_f(filename) filename = fileutil.abspath(filename) self.set_filename(filename) self.set_release_date() self.deleted = deleted - self.offsetPath = offsetPath - self.shortFilename = clean_filename(os.path.basename(self.filename)) + self.offset_path = offset_path + self.short_filename = clean_filename(os.path.basename(self.filename)) self.was_downloaded = False if mark_seen: - self.watchedTime = datetime.now() - self.seen = True + self.watched_time = datetime.now() if not fileutil.isdir(self.filename): # If our file isn't a directory, then we know we are definitely # not a container item. Note that the opposite isn't true in the # case where we are a directory with only 1 file inside. - self.isContainerItem = False - if not self.deleted: - self.check_media_file() + self.is_container_item = False + # FileItems are never considered new. The new flag only really makes + # sense inside a feed. + self.new = False self.split_item() # FileItem downloaders are always None @@ -2027,7 +2206,7 @@ def get_state(self): if self.deleted: return u"expired" - elif self.get_seen(): + elif self.get_watched(): return u"saved" else: return u"newly-downloaded" @@ -2058,7 +2237,7 @@ self.confirm_db_thread() if self.deleted: return u'expired' - elif not self.get_seen(): + elif not self.get_watched(): return u'newly-downloaded' if self.parent_id and self.get_parent().get_expiring(): @@ -2072,12 +2251,14 @@ def show_save_button(self): return False - def get_viewed(self): - return True - def is_external(self): return self.parent_id is None + def _look_for_downloader(self): + # we don't need a database query to know that there's no downloader + # for us. + return + def expire(self): self.confirm_db_thread() if self.has_parent(): @@ -2085,7 +2266,7 @@ # already deleted. try: old_parent = self.get_parent() - except ObjectNotFoundError: + except database.ObjectNotFoundError: old_parent = None else: old_parent = None @@ -2103,24 +2284,35 @@ self.make_deleted() if old_parent is not None and old_parent.get_children().count() == 0: old_parent.expire() + if app.local_metadata_manager.path_in_system(self.filename): + app.local_metadata_manager.remove_file(self.filename) + + def remove(self): + if app.local_metadata_manager.path_in_system(self.filename): + app.local_metadata_manager.remove_file(self.filename) + Item.remove(self) def make_deleted(self): - self.delete_external_metadata() + if app.local_metadata_manager.path_in_system(self.filename): + app.local_metadata_manager.remove_file(self.filename) self._remove_from_playlists() - self.downloadedTime = None + self.downloaded_time = None # Move to the manual feed, since from Miro's point of view the file is # no longer part of a feed, or torrent container. self.parent_id = None self.feed_id = models.Feed.get_manual_feed().id self.deleted = True - self.mdp_state = moviedata.State.UNSEEN + self._calc_parent_title() self.signal_change() def make_undeleted(self): self.deleted = False - self.mdp_state = moviedata.State.UNSEEN - self.check_media_file() self.signal_change() + if not app.local_metadata_manager.path_in_system(self.filename): + app.local_metadata_manager.add_file(self.filename) + else: + logging.warn("Item.make_undeleted: path exists in " + "MetadataManager (%r)" % self.filename) def delete_files(self): if self.has_parent(): @@ -2142,38 +2334,34 @@ def download(self, autodl=False): self.make_undeleted() - def set_filename(self, filename): - Item.set_filename(self, filename) - def set_release_date(self): try: - self.releaseDateObj = datetime.fromtimestamp( + self.release_date = datetime.fromtimestamp( fileutil.getmtime(self.filename)) except (OSError, ValueError): logging.warn("Error setting release date:\n%s", traceback.format_exc()) - self.releaseDateObj = datetime.now() + self.release_date = datetime.now() def get_release_date(self): if self.parent_id: - return self.get_parent().releaseDateObj + return self.get_parent().release_date else: - return self.releaseDateObj + return self.release_date def migrate(self, newdir): self.confirm_db_thread() if self.parent_id: parent = self.get_parent() - self.filename = os.path.join(parent.get_filename(), - self.offsetPath) - self.signal_change() + self.file_moved(os.path.join(parent.get_filename(), + self.offset_path)) return - if self.shortFilename is None: + if self.short_filename is None: logging.warn("""\ -can't migrate download because we don't have a shortFilename! +can't migrate download because we don't have a short_filename! filename was %s""", stringify(self.filename)) return - new_filename = os.path.join(newdir, self.shortFilename) + new_filename = os.path.join(newdir, self.short_filename) if self.filename == new_filename: return if fileutil.exists(self.filename): @@ -2181,31 +2369,35 @@ # start to migrate. This helps ensure that the destination we're # migrating too is not already taken. src = self.filename - if fileutil.isdir(src): - new_filename = next_free_directory(new_filename) - fp = None + try: + is_dir = fileutil.isdir(src) + if is_dir: + new_filename = next_free_directory(new_filename) + fp = None + else: + new_filename, fp = next_free_filename(new_filename) + fp.close() # clean up if we called next_free_filename() + except ValueError: + func = 'next_free_directory' if is_dir else 'next_free_filename' + logging.warn('migrate_file: %s failed. Filename %r ' + 'candidate %r', func, src, new_filename) else: - new_filename, fp = next_free_filename(new_filename) - def callback(): - self.filename = new_filename - self.signal_change() - fileutil.migrate_file(src, new_filename, callback) - if fp is not None: - fp.close() # clean up if we called next_free_filename() + def callback(): + self.file_moved(new_filename) + fileutil.migrate_file(src, new_filename, callback) elif fileutil.exists(new_filename): - self.filename = new_filename - self.signal_change() + self.file_moved(new_filename) self.migrate_children(newdir) def setup_links(self): - if self.shortFilename is None: + if self.short_filename is None: if self.parent_id is None: - self.shortFilename = clean_filename( + self.short_filename = clean_filename( os.path.basename(self.filename)) else: parent_file = self.get_parent().get_filename() if self.filename.startswith(parent_file): - self.shortFilename = clean_filename( + self.short_filename = clean_filename( self.filename[len(parent_file):]) else: logging.warn("%s is not a subdirectory of %s", @@ -2228,40 +2420,6 @@ def fp_values_for_file(filename, title=None, description=None): return FileFeedParserValues(filename, title, description) -def update_incomplete_movie_data(): - IncompleteMovieDataUpdator() - # this will stay around because it connects to the movie data updater's - # signal. Once it disconnects from the signal, we clean it up - -class IncompleteMovieDataUpdator(object): - """Finds local Items that have not been examined by MDP, and queues them. - """ - BATCH_SIZE = 10 - def __init__(self): - self.done = False - self.handle = moviedata.movie_data_updater.connect('queue-empty', - self.on_queue_empty) - self.do_some_updates() - - def do_some_updates(self): - """Update some incomplete files, or set done=True if there are none. - - Mutagen runs as part of the item creation process, so we need only check - whether MDP has examined a file here. - """ - items_queued = 0 - for item in Item.incomplete_mdp_view(limit=self.BATCH_SIZE): - item.check_media_file() - items_queued += 1 - self.done = items_queued < self.BATCH_SIZE - - def on_queue_empty(self, movie_data_updator): - if self.done: - movie_data_updator.disconnect(self.handle) - else: - eventloop.add_idle(self.do_some_updates, - 'update incomplete movie data') - class DeletedFileChecker(object): """Utility class that manages calling Item.check_deleted(). @@ -2321,6 +2479,300 @@ if self.items_to_check: self._ensure_run_checks_scheduled() +class DeviceItemChangeTracker(object): + """Track changes to DeviceItems and send the DeviceItemChanges message. + """ + def __init__(self): + self.reset() + eventloop.connect_after('event-finished', self.after_event_finished) + + def reset(self): + self.added = collections.defaultdict(set) + self.changed = collections.defaultdict(set) + self.removed = collections.defaultdict(set) + self.changed_columns = collections.defaultdict(set) + self.changed_devices = set() + + def after_event_finished(self, event_loop, success): + self.send_changes() + + def send_changes(self): + for device_id in self.changed_devices: + m = messages.DeviceItemChanges(device_id, + self.added[device_id], + self.changed[device_id], + self.removed[device_id], + self.changed_columns[device_id]) + m.send_to_frontend() + self.reset() + + def on_item_added(self, item): + device_id = item.device_id + self.added[device_id].add(item.id) + self.changed_devices.add(device_id) + + def on_item_changed(self, item): + device_id = item.device_id + self.changed[device_id].add(item.id) + self.changed_columns[device_id].update(item.changed_attributes) + self.changed_devices.add(device_id) + + def on_item_removed(self, item): + device_id = item.device_id + self.removed[device_id].add(item.id) + self.changed_devices.add(device_id) + +class DeviceItem(MetadataItemBase): + """ + An item which lives on a device. There's a separate, per-device sqlite + database database. + """ + def __init__(self, *args, **kwargs): + # Normally we don't override DDBObject.__init__(), but this time we do + # for a couple reasons: + # - We need to set device_id + # - We want to make creating a new DeviceItem less awkward. Since + # the device is the first argument, we don't require the caller to + # also pass the DBInfo object, which is redundant. + if 'restored_data' not in kwargs: + # creating a new DeviceItem. We can get db_info from the device + # passed in + device = args[0] + kwargs['db_info'] = device.db_info + # set device_id + self.device_id = kwargs['db_info'].device_id + MetadataItemBase.__init__(self, *args, **kwargs) + + def setup_new(self, device, filename, sync_info=None, auto_sync=False): + """Create a new DeviceItem. + + :param device: DeviceInfo for the device + :param filename: path to the file, relative to the device mount + :param sync_info: ItemInfo that this was synced from + :param auto_sync: Was this item auto-synced? + """ + self.init_metadata_attributes() + self.filename = filename + self.auto_sync = auto_sync + if sync_info is None: + self.watched_time = None + self.last_watched = None + self.creation_time = None + self.subtitle_encoding = None + self.release_date = None + self.parent_title = None + self.feed_url = None + self.license = None + self.rss_id = None + self.entry_title = None + self.torrent_title = None + self.entry_description = None + self.permalink = None + self.payment_link = None + self.comments_link = None + self.url = None + self.enclosure_size = None + self.enclosure_type = None + self.enclosure_format = None + self.resume_time = 0 + self.play_count = 0 + self.skip_count = 0 + local_path = None + else: + self.watched_time = sync_info.watched_time + self.last_watched = sync_info.last_watched + self.creation_time = sync_info.date_added + self.subtitle_encoding = sync_info.subtitle_encoding + self.release_date = sync_info.release_date + self.parent_title = sync_info.parent_title + self.feed_url = sync_info.feed_url + self.license = sync_info.license + self.rss_id = sync_info.rss_id + self.entry_title = sync_info.entry_title + self.torrent_title = sync_info.torrent_title + self.entry_description = sync_info.entry_description + self.permalink = sync_info.permalink + self.payment_link = sync_info.payment_link + self.comments_link = sync_info.comments_link + self.url = sync_info.url + self.enclosure_size = sync_info.enclosure_size + self.enclosure_type = sync_info.mime_type + self.enclosure_format = sync_info.enclosure_format + self.resume_time = sync_info.resume_time + self.play_count = sync_info.play_count + self.skip_count = sync_info.skip_count + local_path = sync_info.filename + + initial_metadata = device.metadata_manager.add_file(filename, + local_path) + self.update_from_metadata(initial_metadata) + # At this point, net_lookup_enabled is always False for DeviceItems + self.net_lookup_enabled = False + + # fill in missing data from the filesystem + fullpath = os.path.join(device.mount, self.filename) + self.size = os.path.getsize(fullpath) + self.calc_title() + if self.release_date is None or self.creation_time is None: + ctime = datetime.fromtimestamp(fileutil.getctime(fullpath)) + if self.release_date is None: + self.release_date = ctime + if self.creation_time is None: + self.creation_time = ctime + + @classmethod + def select_paths(cls, db_info): + """Select all paths that are present in the database. + + :returns: list of paths, relative to the device mount + """ + return cls.select(['filename'], db_info=db_info) + + @classmethod + def get_by_path(cls, path, db_info): + """Get a deviceItem for a given path.""" + view = cls.make_view('LOWER(filename)=LOWER(?)', + (filename_to_unicode(path),), + db_info=db_info) + return view.get_singleton() + + @classmethod + def items_for_paths(cls, path_list, db_info): + """Get all items for a list of paths. + + :returns: dict mapping lower-case paths to DeviceItems. There will be + one entry for each item in path_list that exists in the database. + """ + + path_list = [filename_to_unicode(p) for p in path_list] + path_map = {} + # It's possible for there to be more than 999 items in path_list. + # Split up the query to avoid SQLite's host parameters limit + for paths in util.split_values_for_sqlite(path_list): + placeholders = ', '.join('LOWER(?)' for i in xrange(len(paths))) + view = cls.make_view('LOWER(filename) IN (%s)' % placeholders, + paths, db_info=db_info) + for i in view: + path_map[i.filename.lower()] = i + return path_map + + @classmethod + def auto_sync_view(cls, db_info): + return cls.make_view('auto_sync', db_info=db_info) + + @classmethod + def get_by_url(cls, url, db_info): + return cls.make_view('url=?', (url,), db_info=db_info).get_singleton() + + @classmethod + def item_exists(cls, item_info, db_info): + """Check if a DeviceItem has already been created for an ItemInfo.""" + + # Item URL is a sure way to match + if cls.make_view('url=?', (item_info.url,), db_info=db_info).count() > 0: + return True + # If a bunch of qualities are the same, we'll call it close + # enough + if cls.make_view('title=? AND description=? AND size=? AND ' + 'duration=?', (item_info.title, item_info.description, + item_info.size, item_info.duration_ms), + db_info=db_info).count() > 0: + return True + return False + + def delete_and_remove(self, device): + fullpath = os.path.join(device.mount, self.filename) + try: + fileutil.delete(fullpath) + except EnvironmentError: + logging.warn("DeviceItem.delete_and_remove: Error removing %s", + fullpath) + # FIXME: should also delete cover art and screenshot files + self.remove(device) + + def remove(self, device): + if device.metadata_manager.path_in_system(self.filename): + device.metadata_manager.remove_file(self.filename) + MetadataItemBase.remove(self) + +class SharingItemChangeTracker(object): + """Track changes to SharingItem and send the SharingItemChanges message. + """ + def __init__(self): + self.reset() + eventloop.connect_after('event-finished', self.after_event_finished) + + def reset(self): + self.added = collections.defaultdict(set) + self.changed = collections.defaultdict(set) + self.removed = collections.defaultdict(set) + self.changed_columns = collections.defaultdict(set) + self.changed_playlists = set() + self.changed_shares = set() + + def after_event_finished(self, eventloop, success): + self.send_changes() + + def send_changes(self): + # TODO: implement this + for share_id in self.changed_shares: + msg = messages.SharingItemChanges( + share_id, + self.added[share_id], + self.changed[share_id], + self.removed[share_id], + self.changed_columns[share_id], + share_id in self.changed_playlists) + msg.send_to_frontend() + self.reset() + + def playlist_changed(self, share_id): + self.changed_playlists.add(share_id) + self.changed_shares.add(share_id) + + def on_item_added(self, item): + self.added[item.share_id].add(item.id) + self.changed_shares.add(item.share_id) + + def on_item_changed(self, item): + self.changed[item.share_id].add(item.id) + self.changed_columns[item.share_id].update(item.changed_attributes) + self.changed_shares.add(item.share_id) + + def on_item_removed(self, item): + self.removed[item.share_id].add(item.id) + self.changed_shares.add(item.share_id) + +class SharingItem(ItemBase): + """Item on a DAAP share.""" + def __init__(self, share, *args, **kwargs): + self.share_id = share.id + kwargs['db_info'] = share.db_info + ItemBase.__init__(self, *args, **kwargs) + + def setup_new(self, daap_id, **kwargs): + self.daap_id = daap_id + self.file_format = self.duration = self.size = self.artist = None + self.album_artist = self.album = self.year = self.genre = None + self.track = self.kind = self.show = self.season_number = None + self.episode_id = self.episode_number = None + self.description = None + self.parent_title = None + self.__dict__.update(kwargs) + + def setup_restored(self): + # we should never call setup_restored() since the databases are always + # created fresh + app.controller.failed_soft("creating sharing item", + "setup_restored() called") + self.remove() + + @classmethod + def get_by_daap_id(cls, daap_id, db_info=None): + view = cls.make_view('daap_id=?', (daap_id,), db_info=db_info) + return view.get_singleton() + +_deleted_file_checker = None def setup_deleted_checker(): global _deleted_file_checker @@ -2330,17 +2782,17 @@ _deleted_file_checker.start_checks() def fix_non_container_parents(): - """Make sure all items referenced by parent_id have isContainerItem set + """Make sure all items referenced by parent_id have is_container_item set Bug #12906 has a database where this was not so. """ - where_sql = ("(isContainerItem = 0 OR isContainerItem IS NULL) AND " + where_sql = ("(is_container_item = 0 OR is_container_item IS NULL) AND " "id IN (SELECT parent_id FROM item)") for item in Item.make_view(where_sql): - logging.warn("parent_id points to %s but isContainerItem == %r. " - "Setting isContainerItem to True", item.id, - item.isContainerItem) - item.isContainerItem = True + logging.warn("parent_id points to %s but is_container_item == %r. " + "Setting is_container_item to True", item.id, + item.is_container_item) + item.is_container_item = True item.signal_change() def move_orphaned_items(): @@ -2365,3 +2817,44 @@ if parentless_items: databaselog.info("Moved items to manual feed because their parent was " "gone: %s", ', '.join(parentless_items)) + +def setup_metadata_manager(cover_art_dir=None, screenshot_dir=None): + """Setup the MetadataManager for Items and FileItems.""" + if cover_art_dir is None: + cover_art_dir = app.config.get(prefs.COVER_ART_DIRECTORY) + if screenshot_dir is None: + icon_cache_dir = app.config.get(prefs.ICON_CACHE_DIRECTORY) + screenshot_dir = os.path.join(icon_cache_dir, 'extracted') + app.local_metadata_manager = metadata.LibraryMetadataManager( + cover_art_dir, screenshot_dir) + app.local_metadata_manager.connect('new-metadata', on_new_metadata) + +def setup_change_tracker(): + Item.change_tracker = ItemChangeTracker() + DeviceItem.change_tracker = DeviceItemChangeTracker() + SharingItem.change_tracker = SharingItemChangeTracker() + +def on_new_metadata(metadata_manager, new_metadata): + # Get all items that have changed using one query. This is much faster + # than calling items_with_path_view() for each path. + path_map = collections.defaultdict(list) + all_paths = [filename_to_unicode(p) for p in new_metadata.keys()] + # It's possible for there to be more than 999 items in all_paths. Split + # up the query to avoid SQLite's host parametrs limit + for paths in util.split_values_for_sqlite(all_paths): + placeholders = ', '.join('LOWER(?)' for i in xrange(len(paths))) + view = Item.make_view('LOWER(filename) IN (%s)' % placeholders, paths) + for i in view: + path_map[i.filename].append(i) + + for path, metadata in new_metadata.iteritems(): + for item in path_map[path]: + # optimize signal_change. An item will only change views because + # of new metadata if it changes file type. + can_change_views = (metadata.get('file_type') != item.file_type) + item.update_from_metadata(metadata) + item.signal_change(can_change_views=can_change_views) + +def update_incomplete_metadata(): + """Restart medata updates for our items. """ + app.local_metadata_manager.restart_incomplete() diff -Nru miro-4.0.4/lib/itemsource.py miro-6.0/lib/itemsource.py --- miro-4.0.4/lib/itemsource.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/itemsource.py 2013-04-05 16:02:42.000000000 +0000 @@ -32,33 +32,10 @@ from miro import app from miro import database -from miro.devices import DeviceItem from miro import item from miro import messages -from miro import signals - -class ItemSource(signals.SignalEmitter): - """ - This represents a list of audio/video items. When changes occur, various - signals are sent to allow listeners to update their view of this source. - """ - def __init__(self): - signals.SignalEmitter.__init__(self, 'added', 'changed', 'removed') - - # Methods to implement for sources - def fetch_all(self): - """ - Returns a list of ItemInfo objects representing all the A/V items this - source knows about. - """ - raise NotImplementedError - - def unlink(self): - """ - Do any cleanup for when this source is disappearing. - """ - pass - +from miro import metadata +from miro import models class ItemHandler(object): """ @@ -133,138 +110,6 @@ """ logging.warn("%s: not handling delete", self) -class DatabaseItemSource(ItemSource): - """ - An ItemSource which pulls its data from the database, along with - ItemInfoCache. - """ - # bump this whenever you change the ItemInfo class, or change one of the - # functions that ItemInfo uses to get it's attributes (for example - # Item.get_description()). - VERSION = 34 - - def __init__(self, view): - ItemSource.__init__(self) - self.view = view - self.view.fetcher = database.IDOnlyFetcher() - self.tracker = self.view.make_tracker() - self.tracker.connect('added', self._on_tracker_added) - self.tracker.connect('changed', self._on_tracker_changed) - self.tracker.connect('removed', self._on_tracker_removed) - - @staticmethod - def _item_info_for(item): - info = { - 'feed_id': item.feed_id, - 'feed_name': item.get_source(), - 'feed_url': item.get_feed_url(), - 'state': item.get_state(), - 'release_date': item.get_release_date(), - 'size': item.get_size(), - 'duration': item.get_duration_value(), - 'resume_time': item.resumeTime, - 'permalink': item.get_link(), - 'commentslink': item.get_comments_link(), - 'payment_link': item.get_payment_link(), - 'has_shareable_url': item.has_shareable_url(), - 'can_be_saved': item.show_save_button(), - 'pending_manual_dl': item.is_pending_manual_download(), - 'pending_auto_dl': item.is_pending_auto_download(), - 'item_viewed': item.get_viewed(), - 'downloaded': item.is_downloaded(), - 'is_external': item.is_external(), - 'video_watched': item.get_seen(), - 'video_path': item.get_filename(), - 'thumbnail': item.get_thumbnail(), - 'thumbnail_url': item.get_thumbnail_url(), - 'file_format': item.get_format(), - 'license': item.get_license(), - 'file_url': item.get_url(), - 'is_container_item': item.isContainerItem, - 'is_file_item': item.is_file_item, - 'is_playable': item.is_playable(), - 'file_type': item.file_type, - 'subtitle_encoding': item.subtitle_encoding, - 'media_type_checked': item.media_type_checked, - 'seeding_status': item.torrent_seeding_status(), - 'mime_type': item.enclosure_type, - 'date_added': item.get_creation_time(), - 'last_played': item.get_watched_time(), - 'last_watched': item.lastWatched, - 'downloaded_time': item.downloadedTime, - 'children': [], - 'expiration_date': None, - 'download_info': None, - 'leechers': None, - 'seeders': None, - 'up_rate': None, - 'down_rate': None, - 'up_total': None, - 'down_total': None, - 'up_down_ratio': 0.0, - 'remote': False, - 'device': None, - 'source_type': 'database', - 'play_count': item.play_count, - 'skip_count': item.skip_count, - 'auto_rating': item.get_auto_rating(), - 'is_playing': item.is_playing(), - } - info.update(item.get_iteminfo_metadata()) - if item.isContainerItem: - info['children'] = [DatabaseItemSource._item_info_for(i) for i in - item.get_children()] - if not item.keep and not item.is_external(): - info['expiration_date'] = item.get_expiration_time() - - if item.downloader: - info['download_info'] = messages.DownloadInfo(item.downloader) - elif info['state'] == 'downloading': - info['download_info'] = messages.PendingDownloadInfo() - - ## Torrent-specific stuff - if item.looks_like_torrent() and hasattr(item.downloader, 'status'): - status = item.downloader.status - if item.is_transferring(): - # gettorrentdetails only - info['leechers'] = status.get('leechers', 0) - info['seeders'] = status.get('seeders', 0) - info['connections'] = status.get('connections', 0) - info['up_rate'] = status.get('upRate', 0) - info['down_rate'] = status.get('rate', 0) - - # gettorrentdetailsfinished & gettorrentdetails - info['up_total'] = status.get('uploaded', 0) - info['down_total'] = status.get('currentSize', 0) - if info['down_total'] > 0: - info['up_down_ratio'] = (float(info['up_total']) / - info['down_total']) - - return messages.ItemInfo(item.id, **info) - - def fetch_all(self): - return [self._get_info(id_) for id_ in self.view] - - def _get_info(self, id_): - return app.item_info_cache.get_info(id_) - - def _on_tracker_added(self, tracker, id_): - self.emit("added", self._get_info(id_)) - - def _on_tracker_changed(self, tracker, id_): - self.emit("changed", self._get_info(id_)) - - def _on_tracker_removed(self, tracker, id_): - self.emit("removed", id_) - - def unlink(self): - self.tracker.unlink() - - @staticmethod - def get_by_id(id_): - # XXX should this be part of the ItemSource API? - return app.item_info_cache.get_info(id_) - class DatabaseItemHandler(ItemHandler): def mark_watched(self, info): """ @@ -273,7 +118,7 @@ """ try: item_ = item.Item.get_by_id(info.id) - item_.mark_item_seen() + item_.mark_watched() except database.ObjectNotFoundError: logging.warning("mark_watched: can't find item by id %s" % info.id) @@ -284,7 +129,7 @@ """ try: item_ = item.Item.get_by_id(info.id) - item_.mark_item_unseen() + item_.mark_unwatched() except database.ObjectNotFoundError: logging.warning("mark_unwatched: can't find item by id %s" % ( info.id,)) @@ -380,152 +225,6 @@ finally: app.bulk_sql_manager.finish() -class SharingItemSource(ItemSource): - """ - An ItemSource which pulls data from a remote media share. - XXX should we use the database somehow so that the OS can decide - XXX can decide to write this data to secondary storage if it feels - XXX like it? - """ - def __init__(self, tracker, playlist_id=None): - ItemSource.__init__(self) - self.tracker = tracker - self.playlist_id = playlist_id - self.include_podcasts = True - self.signal_handles = [ - self.tracker.connect('added', self._on_tracker_added), - self.tracker.connect('changed', self._on_tracker_changed), - self.tracker.connect('removed', self._on_tracker_removed), - ] - self.info_cache = tracker.info_cache - - def _item_info_for(self, item): - info = dict( - item_source=self, - source_type='sharing', - feed_id = item.feed_id, - feed_name = None, - feed_url = None, - state = u'saved', - release_date = item.get_release_date(), - size = item.size, - duration = item.duration, - resume_time = 0, - permalink = item.permalink, - commentslink = item.comments_link, - payment_link = item.payment_link, - has_shareable_url = bool(item.url), - can_be_saved = False, - pending_manual_dl = False, - pending_auto_dl = False, - expiration_date = None, - item_viewed = True, - downloaded = True, - is_external = False, - video_watched = True, - video_path = item.get_filename(), - thumbnail = item.get_thumbnail(), - thumbnail_url = item.thumbnail_url or u'', - file_format = item.file_format, - license = item.license, - file_url = item.url or u'', - is_container_item = False, - is_file_item = False, - is_playable = True, - children = [], - file_type = item.file_type, - subtitle_encoding = item.subtitle_encoding, - seeding_status = None, - media_type_checked = True, - mime_type = item.enclosure_type, - artist = item.artist, - auto_rating = None, - date_added = item.get_creation_time(), - last_played = item.get_creation_time(), - download_info = None, - device = None, - remote = True, - leechers = None, - seeders = None, - up_rate = None, - down_rate = None, - up_total = None, - down_total = None, - up_down_ratio = 0, - play_count=0, - skip_count=0, - host=item.host, - port=item.port, - is_playing=False) - info.update(item.get_iteminfo_metadata()) - return messages.ItemInfo(item.id, **info) - - def _ensure_info(self, obj): - if not isinstance(obj, messages.ItemInfo): - self.info_cache[obj.id] = info = self._item_info_for(obj) - return info - else: - return obj - - def is_podcast(self, item): - try: - return item.kind == 'podcast' - except AttributeError: - pass - return False - - def _on_tracker_added(self, tracker, playlist, item): - if self.playlist_id == playlist: - if (self.include_podcasts or - not self.include_podcasts and not self.is_podcast(item)): - self.emit("added", self._ensure_info(item)) - - def _on_tracker_changed(self, tracker, playlist, item): - if self.playlist_id == playlist: - if (self.include_podcasts or - not self.include_podcasts and not self.is_podcast(item)): - self.emit("changed", self._ensure_info(item)) - - def _on_tracker_removed(self, tracker, playlist, item): - # Only nuke if we are removing the item from the library. - if playlist == None: - try: - del self.info_cache[item.id] - except KeyError: - pass - if playlist == self.playlist_id: - if (self.include_podcasts or - not self.include_podcasts and not self.is_podcast(item)): - self.emit("removed", item.id) - - def fetch_all(self): - # Always call _ensure_info() on the item when fetching. - # - # This is a problem for shared items because the database is not - # ready until some amount of time after the share is first accessed. - # The sharing API is designed to always either returns 0 items or - # a fully populated media listing. In the former case the upper layer - # (i.e. this one) is notified via the 'added' signal. - # - # But this ItemSource object is a transient object that only exists - # for the lifetime a display is active. If the 'added' signal was - # called and the tab has switched away and hence is no longer active, - # then it won't be in the ItemInfo cache. - # - # _ensure_info() ensures that we either fetch something from the - # cache or if it does not exist then create a new copy so this should - # be okay. - return [self._ensure_info(item) for item in - self.tracker.get_items(playlist_id=self.playlist_id) - if self.include_podcasts or - (not self.include_podcasts and - not self.is_podcast(item))] - - def unlink(self): - for handle in self.signal_handles: - self.tracker.disconnect(handle) - self.signal_handles = [] - class SharingItemHandler(ItemHandler): def set_is_playing(self, info, is_playing): """ @@ -542,192 +241,17 @@ info.is_playing = is_playing info.item_source.emit("changed", info) -class DeviceItemSource(ItemSource): - """ - An ItemSource which pulls its data from a device's JSON database. - """ - def __init__(self, device): - ItemSource.__init__(self) - self.device = device - self.info_cache = app.device_manager.info_cache[device.mount] - self.type = device.id.rsplit('-', 1)[1] - self.signal_handles = [ - device.database.connect('item-added', self._on_device_added), - device.database.connect('item-changed', self._on_device_changed), - device.database.connect('item-removed', self._on_device_removed), - ] - - def _ensure_info(self, item): - if not isinstance(item, messages.ItemInfo): - info = self.info_cache[item.id] = self._item_info_for(item) - return info - else: - return item - - def _on_device_added(self, database, item): - if item.file_type != self.type: - return # don't care about other types of items - self.emit("added", self._ensure_info(item)) - - def _on_device_changed(self, database, item): - existed = was_type = False - if item.id in self.info_cache: - existed = True - was_type = ( - self.info_cache[item.id].file_type == self.type) - is_type = (item.file_type == self.type) - if existed: - if was_type and not is_type: - # type changed alway from this source - self.emit('removed', item.id) - return - elif is_type and not was_type: - # added to this source - self.emit('added', self._ensure_info(item)) - return - if is_type: - if existed: - self.emit("changed", self._ensure_info(item)) - else: - self.emit('added', self._ensure_info(item)) - - def _on_device_removed(self, database, item): - was_type = False - if item.id in self.info_cache: - was_type = ( - self.info_cache[item.id].file_type == self.type) - if item.file_type == self.type or was_type: - self.emit("removed", item.id) - self.info_cache.pop(item.video_path, None) - - def _item_info_for(self, item): - if item.duration is None: - duration = None - else: - duration = item.duration / 1000 - info = dict( - source_type='device', - feed_id = item.feed_id, - feed_name = (item.feed_name is None and item.feed_name or - self.device.name), - feed_url = None, - state = u'saved', - release_date = item.get_release_date(), - size = item.size, - duration = duration, - resume_time = 0, - permalink = item.permalink, - commentslink = item.comments_link, - payment_link = item.payment_link, - has_shareable_url = (item.url and - not item.url.startswith('file://')), - can_be_saved = False, - pending_manual_dl = False, - pending_auto_dl = False, - expiration_date = None, - item_viewed = True, - downloaded = True, - is_external = False, - video_watched = True, - media_type_checked = item.media_type_checked, - video_path = item.get_filename(), - thumbnail = item.get_thumbnail(), - thumbnail_url = item.thumbnail_url or u'', - file_format = item.file_format, - license = item.license, - file_url = item.url or u'', - is_container_item = False, - is_file_item = False, - is_playable = True, - children = [], - file_type = item.file_type, - subtitle_encoding = item.subtitle_encoding, - seeding_status = None, - mime_type = item.enclosure_type, - artist = item.artist, - date_added = item.get_creation_time(), - last_played = item.get_creation_time(), - download_info = None, - device = item.device, - remote = False, - leechers = None, - seeders = None, - up_rate = None, - down_rate = None, - up_total = None, - down_total = None, - up_down_ratio = 0, - play_count=0, - skip_count=0, - auto_rating=0, - is_playing=item.is_playing) - info.update(item.get_iteminfo_metadata()) - return messages.ItemInfo(item.id, **info) - - def fetch_all(self): - # avoid lookups - info_cache = self.info_cache - type_ = self.type - device = self.device - _item_info_for = self._item_info_for - if type_ not in self.device.database: - # race: we can get here before clean_database() sets us up - return [] - data = self.device.database[type_] - - def _cache(id_): - if id_ in info_cache: - return info_cache[id_] - else: - info = info_cache[id_] = _item_info_for( - DeviceItem( - video_path=id_, - file_type=type_, - device=device, - **data[id_])) - return info - - def _all_videos(): - for id_ in list(data.keys()): - try: - yield _cache(id_) - except (OSError, IOError): # couldn't find the file - pass - - return list(_all_videos()) - - def unlink(self): - for handle in self.signal_handles: - self.device.database.disconnect(handle) - self.signal_handles = [] - class DeviceItemHandler(ItemHandler): def delete(self, info): - device = info.device - try: - if os.path.exists(info.video_path): - os.unlink(info.video_path) - except (OSError, IOError): - # we can still fail to delete an item, log the error - logging.warn('failed to delete %r', info.video_path, - exc_info=True) - else: - del device.database[info.file_type][info.id] - if info.thumbnail and info.thumbnail.startswith(device.mount): - try: - os.unlink(info.thumbnail) - except (OSError, IOError): - pass # ignore errors - if info.cover_art and info.cover_art.startswith(device.mount): - try: - os.unlink(info.cover_art) - except (OSError, IOError): - pass # ignore errors - device.database.emit('item-removed', info) + device = info.device_info + device_item = models.DeviceItem.get_by_id(info.id, + db_info=device.db_info) + device_item.delete_and_remove(device) + device.remaining += device_item.size def bulk_delete(self, info_list): # calculate all the devices involved - all_devices = set(info.device for info in info_list) + all_devices = set(info.device_info for info in info_list) # set bulk mode, delete, then unset bulk mode for device in all_devices: device.database.set_bulk_mode(True) @@ -736,6 +260,8 @@ self.delete(info) finally: for device in all_devices: + message = messages.DeviceChanged(device) + message.send_to_frontend() device.database.set_bulk_mode(False) def set_is_playing(self, info, is_playing): @@ -746,10 +272,10 @@ if info.is_playing != is_playing: # modifying the ItemInfo in-place messes up the Tracker's # object-changed logic, so make a copy - info_cache = app.device_manager.info_cache[info.device.mount] + info_cache = app.device_manager.info_cache[info.device_info.mount] info = info_cache[info.id] = messages.ItemInfo( info.id, **info.__dict__) - database = info.device.database + database = info.device_info.database info.is_playing = is_playing database[info.file_type][info.id][u'is_playing'] = is_playing database.emit('item-changed', info) diff -Nru miro-4.0.4/lib/libdaap/libdaap.py miro-6.0/lib/libdaap/libdaap.py --- miro-4.0.4/lib/libdaap/libdaap.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/libdaap/libdaap.py 2013-04-05 16:02:42.000000000 +0000 @@ -43,7 +43,6 @@ import SocketServer import threading import httplib -from httplib import _CS_IDLE, _CS_REQ_STARTED, _CS_REQ_SENT, _UNKNOWN import gzip try: from cStringIO import StringIO @@ -382,7 +381,7 @@ seekend = 0 rc = DAAP_PARTIAL_CONTENT generation = threading.current_thread().generation - file_obj = self.server.backend.get_file(item_id, generation, ext, + file_obj, hint = self.server.backend.get_file(item_id, generation, ext, self.get_session(), self.get_request_path, offset=seekpos, chunk=chunk) @@ -390,7 +389,7 @@ return (DAAP_FILENOTFOUND, [], extra_headers) self.log_message('daap server: streaming with filobj %s', file_obj) # Return a special response, the encode_reponse() will handle correctly - return (rc, [(file_obj, seekpos, seekend)], extra_headers) + return (rc, [(file_obj, hint, seekpos, seekend)], extra_headers) def get_request_path(self, itemid, enclosure): # XXX @@ -863,6 +862,9 @@ # We've been disconnected, or server gave incorrect response? except (IOError, ValueError): self.disconnect() + except AttributeError: + logging.debug('AttributeError caught; probably during shutdown. ' + 'Ignoring.') # Generic check for http response. ValueError() on unexpected response. def check_reply(self, response, http_code=httplib.OK, callback=None, diff -Nru miro-4.0.4/lib/libdaap/pybonjour.py miro-6.0/lib/libdaap/pybonjour.py --- miro-4.0.4/lib/libdaap/pybonjour.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/libdaap/pybonjour.py 2013-04-05 16:02:42.000000000 +0000 @@ -246,7 +246,7 @@ -class BonjourError(Exception): +class BonjourError(StandardError): """ @@ -288,8 +288,8 @@ def __init__(self, errorCode): self.errorCode = errorCode - Exception.__init__(self, - (errorCode, self._errmsg.get(errorCode, 'unknown'))) + StandardError.__init__(self, + (errorCode, self._errmsg.get(errorCode, 'unknown'))) diff -Nru miro-4.0.4/lib/libdaap/subr.py miro-6.0/lib/libdaap/subr.py --- miro-4.0.4/lib/libdaap/subr.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/libdaap/subr.py 2013-04-05 16:02:42.000000000 +0000 @@ -103,7 +103,10 @@ """ DEFAULT_CHUNK_SIZE = 128 * 1024 - def __init__(self, file_obj, start=0, end=0, chunksize=DEFAULT_CHUNK_SIZE): + def __init__(self, file_obj, hint, start=0, end=0, + chunksize=DEFAULT_CHUNK_SIZE): + hint = os.path.basename(hint) if hint else '' + self.file_hint = hint self.chunksize = chunksize self.file_obj = file_obj self.end = end @@ -154,6 +157,9 @@ headers = [] if self.rangetext: headers.append(('Content-Range', self.get_rangetext())) + if self.file_hint: + headers.append(('Content-disposition', + 'attachment; filename=%s' % self.file_hint)) return headers def get_rangetext(self): @@ -317,8 +323,8 @@ except ValueError: # This is probably a file. Just pass up to the # caller and let the caller deal with it. - [(file_obj, start, end)] = reply - blob = ChunkedStreamObj(file_obj, start, end) + [(file_obj, hint, start, end)] = reply + blob = ChunkedStreamObj(file_obj, hint, start, end) return blob def split_url_path(urlpath): diff -Nru miro-4.0.4/lib/messagehandler.py miro-6.0/lib/messagehandler.py --- miro-4.0.4/lib/messagehandler.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/messagehandler.py 2013-04-05 16:02:42.000000000 +0000 @@ -30,6 +30,7 @@ """``miro.messagehandler``` -- Backend message handler """ +import copy import logging import time import os @@ -38,6 +39,7 @@ from miro import autoupdate from miro import database from miro import devices +from miro import conversions from miro import downloader from miro import eventloop from miro import feed @@ -53,6 +55,7 @@ from miro import subscription from miro import tabs from miro import opml +from miro.data.item import fetch_item_infos from miro.widgetstate import DisplayState, ViewState, GlobalState from miro.feed import Feed, lookup_feed from miro.gtcache import gettext as _ @@ -64,8 +67,7 @@ import shutil class ViewTracker(object): - """Handles tracking views for TrackGuides, TrackChannels, TrackPlaylist and - TrackItems. + """Handles tracking views for TrackGuides, TrackChannels, TrackPlaylists. """ type = None info_factory = None @@ -337,304 +339,6 @@ info_list = self._make_added_list(feed.Feed.watched_folder_view()) messages.WatchedFolderList(info_list).send_to_frontend() -class SourceTrackerBase(ViewTracker): - # we only deal with ItemInfo objects, so we don't need to create anything - info_factory = lambda self, info: info - - def __init__(self): - ViewTracker.__init__(self) - self.sent_initial_list = False - - def get_sources(self): - return [self.source] - - def add_callbacks(self): - for source in self.get_sources(): - source.connect('added', self.on_object_added) - source.connect('changed', self.on_object_changed) - source.connect('removed', self.on_object_id_removed) - self.trackers.append(source) - - def send_initial_list(self): - infos = [] - for source in self.trackers: - infos.extend(source.fetch_all()) - self._last_sent_info.update([(info.id, info) for info in infos]) - messages.ItemList(self.type, self.id, infos).send_to_frontend() - self.sent_initial_list = True - - def make_changed_message(self, added, changed, removed): - return messages.ItemsChanged(self.type, self.id, added, changed, - removed) - - def send_messages(self): - ViewTracker.send_messages(self) - -class DatabaseSourceTrackerBase(SourceTrackerBase): - - def get_sources(self): - return [itemsource.DatabaseItemSource(view) for view in - self.get_object_views()] - - def get_object_views(self): - return [self.view] - -class AllFeedsItemTracker(DatabaseSourceTrackerBase): - type = u'feed' - def __init__(self, id): - self.view = item.Item.toplevel_view() - self.id = id - DatabaseSourceTrackerBase.__init__(self) - -class FeedItemTracker(DatabaseSourceTrackerBase): - type = u'feed' - def __init__(self, feed): - self.view = feed.visible_items - self.id = feed.id - DatabaseSourceTrackerBase.__init__(self) - -class FeedFolderItemTracker(DatabaseSourceTrackerBase): - type = u'feed' - def __init__(self, folder): - self.view = item.Item.visible_folder_view(folder.id) - self.id = folder.id - DatabaseSourceTrackerBase.__init__(self) - -class PlaylistItemTracker(DatabaseSourceTrackerBase): - type = u'playlist' - def __init__(self, playlist): - self.view = item.Item.playlist_view(playlist.id) - self.id = playlist.id - DatabaseSourceTrackerBase.__init__(self) - -class PlaylistFolderItemTracker(DatabaseSourceTrackerBase): - type = u'playlist' - def __init__(self, playlist): - self.view = item.Item.playlist_folder_view(playlist.id) - self.id = playlist.id - DatabaseSourceTrackerBase.__init__(self) - -class ManualItemTracker(DatabaseSourceTrackerBase): - type = u'manual' - - def __init__(self, id_, info_list): - self.id = id_ - # SQLite can only handle 999 variables at once. If there are more ids - # than that, we need to split things up (#12020) - self.views = [] - self.id_list = [info.id for info in info_list] - for pos in xrange(0, len(self.id_list), 950): - bite_sized_list = self.id_list[pos:pos+950] - place_holders = ', '.join('?' for i in xrange( - len(bite_sized_list))) - self.views.append(item.Item.make_view( - 'id in (%s)' % place_holders, tuple(bite_sized_list))) - DatabaseSourceTrackerBase.__init__(self) - # set _last_sent_info to the values that we received. We can then use - # that to figure out which ones are out of date in send_initial_list() - self._last_sent_info.update([(info.id, info) for info in info_list]) - - def send_initial_list(self): - infos = [] - for source in self.trackers: - infos.extend(source.fetch_all()) - # _last_sent_info was set with the infos we received, figure out of - # any of those weren't up to date. - changed = self._make_changed_list(infos) - removed_set = set(self.id_list) - set(i.id for i in infos) - removed = self._make_removed_list(removed_set) - if changed or removed: - messages.ItemsChanged(self.type, self.id, [], changed, - removed).send_to_frontend() - self.sent_initial_list = True - - def get_object_views(self): - return self.views - -class DownloadingItemsTracker(DatabaseSourceTrackerBase): - type = u'downloading' - id = u'downloading' - def __init__(self): - self.view = item.Item.download_tab_view() - DatabaseSourceTrackerBase.__init__(self) - -# We don't use the preferenced tracker superclass here because we want -# to do it manually - have both audio and video prefs to take care of here. -class SharingBackendItemsTracker(DatabaseSourceTrackerBase): - type = u'sharing-backend' - def __init__(self, id_): - # NOTE: dodgy: we want to keep the existing api but we need a different - # database query for playlist and feed. So ... track this info in - # the identifier and then chuck it away when we are done. - if id_ is None: - # All items. Type is ignored in this case. - self.id = None - self.view = item.Item.watchable_view() - else: - id_, podcast = id_ - typ = 'feed' if podcast else 'playlist' - self.id = id_ - if typ == 'playlist': - self.view = item.Item.playlist_view(self.id) - elif typ == 'feed': - self.view = item.Item.feed_view(self.id) - else: - # Future expansion? - logging.debug('SharingBackendTracker: unrecognized type %s', - typ) - DatabaseSourceTrackerBase.__init__(self) - -class PreferencedItemsTracker(DatabaseSourceTrackerBase): - - def __init__(self): - self.view = self.view_func(app.config.get(self.pref)) - app.backend_config_watcher.connect_weak('changed', - self.on_config_changed) - DatabaseSourceTrackerBase.__init__(self) - - def on_config_changed(self, obj, key, value): - if key == self.pref.key: - self.view = self.view_func(value) - for source in self.trackers: - source.disconnect_all() - self.trackers = [] - self.add_callbacks() - self.send_initial_list() - -class VideoItemsTracker(PreferencedItemsTracker): - type = u'videos' - id = u'videos' - pref = prefs.SHOW_PODCASTS_IN_VIDEO - view_func = item.Item.watchable_video_view - -class AudioItemsTracker(PreferencedItemsTracker): - type = u'music' - id = u'music' - pref = prefs.SHOW_PODCASTS_IN_MUSIC - view_func = item.Item.watchable_audio_view - -class OtherItemsTracker(DatabaseSourceTrackerBase): - type = u'others' - id = u'others' - def __init__(self): - self.view = item.Item.watchable_other_view() - DatabaseSourceTrackerBase.__init__(self) - -class SearchItemsTracker(DatabaseSourceTrackerBase): - type = u'search' - id = u'search' - def __init__(self): - self.view = item.Item.search_item_view() - DatabaseSourceTrackerBase.__init__(self) - -class FolderItemsTracker(DatabaseSourceTrackerBase): - type = u'folder-contents' - def __init__(self, folder_id): - self.view = item.Item.folder_contents_view(folder_id) - self.id = folder_id - DatabaseSourceTrackerBase.__init__(self) - -class GuideSidebarTracker(DatabaseSourceTrackerBase): - type = u'guide-sidebar' - id = None - - def __init__(self): - DatabaseSourceTrackerBase.__init__(self) - - def get_object_views(self): - return [ - item.Item.recently_watched_view(), - item.Item.recently_downloaded_view()] - -class SharingItemTracker(SourceTrackerBase): - type = u'sharing' - def __init__(self, share): - share_id = share.tracker_id - self.id = share - self.tracker = app.sharing_tracker.get_tracker(share_id) - prefdict = dict(video=prefs.SHOW_PODCASTS_IN_VIDEO, - audio=prefs.SHOW_PODCASTS_IN_MUSIC) - self.source = itemsource.SharingItemSource(self.tracker, - share.playlist_id) - try: - self.pref = prefdict[share.playlist_id] - self.prefvalue = app.config.get(self.pref) - self.source.include_podcasts = self.prefvalue - except KeyError: - self.pref = self.prefvalue = None - app.backend_config_watcher.connect_weak('changed', - self.on_config_changed) - SourceTrackerBase.__init__(self) - - def on_config_changed(self, obj, key, value): - if self.pref and key == self.pref.key: - self.prefvalue = value - self.source.include_podcasts = self.prefvalue - for source in self.trackers: - source.disconnect_all() - self.trackers = [] - self.add_callbacks() - self.send_initial_list() - - def _make_changed_list(self, changed): - retval = [] - for obj in changed: - info = self.info_factory(obj) - retval.append(info) - self._last_sent_info[obj.id] = info - return retval - -class DeviceItemTracker(SourceTrackerBase): - type = u'device' - def __init__(self, device): - self.device = self.id = device - self.source = itemsource.DeviceItemSource(device) - - SourceTrackerBase.__init__(self) - - -def make_item_tracker(message): - if message.type == 'downloading': - return DownloadingItemsTracker() - elif message.type == 'videos': - return VideoItemsTracker() - elif message.type == 'music': - return AudioItemsTracker() - elif message.type == 'others': - return OtherItemsTracker() - elif message.type == 'search': - return SearchItemsTracker() - elif message.type == 'folder-contents': - return FolderItemsTracker(message.id) - elif message.type == 'feed': - if message.id == u'%s-base-tab' % u'feed': - return AllFeedsItemTracker(message.id) - try: - feed_ = feed.Feed.get_by_id(message.id) - return FeedItemTracker(feed_) - except database.ObjectNotFoundError: - folder = ChannelFolder.get_by_id(message.id) - return FeedFolderItemTracker(folder) - elif message.type == 'playlist': - try: - playlist = SavedPlaylist.get_by_id(message.id) - return PlaylistItemTracker(playlist) - except database.ObjectNotFoundError: - playlist = PlaylistFolder.get_by_id(message.id) - return PlaylistFolderItemTracker(playlist) - elif message.type == 'manual': - return ManualItemTracker(message.id, message.infos_to_track) - elif message.type == 'device': - return DeviceItemTracker(message.id) - elif message.type == 'sharing': - return SharingItemTracker(message.id) - elif message.type == 'sharing-backend': - return SharingBackendItemsTracker(message.id) - elif message.type == 'guide-sidebar': - return GuideSidebarTracker() - else: - logging.warn("Unknown TrackItems type: %s", message.type) - class CountTracker(object): """Tracks downloads count or new videos count""" def __init__(self): @@ -751,7 +455,6 @@ self.new_video_count_tracker = None self.new_audio_count_tracker = None self.unwatched_count_tracker = None - self.item_trackers = {} search_feed = Feed.get_search_feed() search_feed.connect('update-finished', self._search_update_finished) @@ -837,17 +540,11 @@ self.playlist_tracker.unlink() self.playlist_tracker = None - def handle_track_sharing(self, message): - app.sharing_tracker.start_tracking() - - def handle_stop_tracking_sharing(self, message): - pass - - def handle_sharing_eject(self, message): - app.sharing_tracker.eject(message.share.id) + def handle_track_share(self, message): + app.sharing_tracker.track_share(message.share_id) - def handle_track_devices(self, message): - app.device_tracker.start_tracking() + def handle_stop_tracking_share(self, message): + app.sharing_tracker.stop_tracking_share(message.share_id) def handle_mark_feed_seen(self, message): try: @@ -902,7 +599,7 @@ except database.ObjectNotFoundError: logging.warn("SetItemMediaType: Item not found -- %s", id_) continue - item_.set_file_type(message.media_type) + item_.set_user_metadata({'file_type': message.media_type}) def handle_set_feed_expire(self, message): channel_info = message.channel_info @@ -978,9 +675,9 @@ obj.set_title(message.new_name) def handle_play_all_unwatched(self, message): - item_infos = itemsource.DatabaseItemSource( - item.Item.newly_downloaded_view()).fetch_all() - messages.PlayMovie(item_infos).send_to_frontend() + item_ids = [i.id for i in item.Item.newly_downloaded_view()] + item_infos = app.db.fetch_item_infos(item_ids) + messages.PlayMovies(item_infos).send_to_frontend() def handle_tab_expanded_change(self, message): tab_view = HideableTab.make_view('type=?', (message.type,)) @@ -1005,7 +702,7 @@ try: feed_ = feed.Feed.get_by_id(message.id) except database.ObjectNotFoundError: - logging.warn("feed not found: %s" % id) + logging.warn("feed not found: %s" % message.id) else: feed_.update() @@ -1013,7 +710,7 @@ try: f = ChannelFolder.get_by_id(message.id) except database.ObjectNotFoundError: - logging.warn("folder not found: %s" % id) + logging.warn("folder not found: %s" % message.id) else: for feed in f.get_children_view(): feed.schedule_update_events(0) @@ -1271,11 +968,11 @@ logging.warn("AddVideosToPlaylist: Playlist not found -- %s", message.playlist_id) return - for id in message.video_ids: + for id_ in message.video_ids: try: - item_ = item.Item.get_by_id(id) + item_ = item.Item.get_by_id(id_) except database.ObjectNotFoundError: - logging.warn("AddVideosToPlaylist: Item not found -- %s", id) + logging.warn("AddVideosToPlaylist: Item not found -- %s", id_) continue if not item_.is_downloaded(): logging.warn("AddVideosToPlaylist: Item not downloaded (%s)", @@ -1334,36 +1031,6 @@ # make sure the item list is a tuple, so it can be hashed. return (message.type, tuple(message.id)) - def handle_track_items(self, message): - key = self.item_tracker_key(message) - if key not in self.item_trackers: - try: - item_tracker = make_item_tracker(message) - except database.ObjectNotFoundError: - logging.warn("TrackItems called for deleted object (%s %s)", - message.type, message.id) - return - if item_tracker is None: - # message type was wrong - return - self.item_trackers[key] = item_tracker - else: - item_tracker = self.item_trackers[key] - item_tracker.send_initial_list() - - def handle_track_items_manually(self, message): - # handle_track_items can handle this message too - self.handle_track_items(message) - - def handle_stop_tracking_items(self, message): - key = self.item_tracker_key(message) - try: - item_tracker = self.item_trackers.pop(key) - except KeyError: - logging.warn("Item tracker not found (id: %s)", message.id) - else: - item_tracker.unlink() - def handle_cancel_auto_download(self, message): try: item_ = item.Item.get_by_id(message.id) @@ -1391,6 +1058,7 @@ def handle_pause_all_downloads(self, message): """Pauses all downloading and uploading items""" + app.download_state_manager.set_bulk_mode() for item_ in item.Item.downloading_view(): if item_.is_uploading(): item_.pause_upload() @@ -1407,6 +1075,7 @@ def handle_resume_all_downloads(self, message): """Resumes downloading and uploading items""" + app.download_state_manager.set_bulk_mode() for item_ in item.Item.paused_view(): if item_.is_uploading_paused(): item_.start_upload() @@ -1422,6 +1091,7 @@ item_.resume() def handle_cancel_all_downloads(self, message): + app.download_state_manager.set_bulk_mode() for item_ in item.Item.download_tab_view(): if item_.is_uploading() or item_.is_uploading_paused(): item_.stop_upload() @@ -1470,6 +1140,23 @@ else: item_.save() + def handle_set_media_kind(self, message): + item_infos = message.item_infos + kind = message.kind + logging.debug('KIND = %s', kind) + items = [] + for i in item_infos: + try: + items.append(item.Item.get_by_id(i.id)) + except database.ObjectNotFoundError: + logging.warn("SetMediaKind: Item not found -- %s", i.id) + app.bulk_sql_manager.start() + try: + for i in items: + i.set_user_metadata({u'kind': kind}) + finally: + app.bulk_sql_manager.finish() + def handle_save_item_as(self, message): try: item_ = item.Item.get_by_id(message.id) @@ -1484,15 +1171,19 @@ try: if fileutil.samefile(item_.get_filename(), message.filename): return # saving over the same file - except (IOError, OSError): - # FIXME - return an error to the frontend? + except EnvironmentError: + # just try to write the file anyways pass try: shutil.copyfile(item_.get_filename(), message.filename) - except IOError: - # FIXME - we should pass the error back to the frontend - pass + except EnvironmentError, e: + # XXX is there a more useful error message we can show the user? + messages.ShowWarning( + _("Error saving file"), + _("There was an error saving %(filename)s: %(error)s", { + 'filename': message.filename, + 'error': e.strerror})).send_to_frontend() def handle_remove_video_entries(self, message): items = [] @@ -1519,14 +1210,6 @@ for handler, info_list in infos_per_handler.iteritems(): handler.bulk_delete(info_list) - def handle_rename_video(self, message): - try: - item_ = item.Item.get_by_id(message.id) - except database.ObjectNotFoundError: - logging.warn("RenameVideo: Item not found -- %s", message.id) - else: - item_.set_title(message.new_name) - def handle_edit_items(self, message): changes = message.change_dict for id_ in message.item_ids: @@ -1535,7 +1218,10 @@ except database.ObjectNotFoundError: logging.warn("EditItems: Item not found -- %s", id_) continue - item_.set_metadata_from_iteminfo(changes) + # ItemInfo uses "name", but the metadata system uses "title" now + if 'name' in changes: + changes['title'] = changes.pop('name') + item_.set_user_metadata(changes) def handle_revert_feed_title(self, message): try: @@ -1649,6 +1335,8 @@ last_progress_time = 0 title = _('Migrating Files') messages.ProgressDialogStart(title).send_to_frontend() + app.local_metadata_manager.will_move_files([d.get_filename() for d in + to_migrate]) for i, download in enumerate(to_migrate): current_time = time.time() if current_time > last_progress_time + 0.5: @@ -1696,9 +1384,6 @@ info = message.display_info state = self._get_display_state(info.key) state.selected_view = info.selected_view - state.active_filters = info.active_filters - state.list_view_columns = info.list_view_columns - state.list_view_widths = info.list_view_widths state.shuffle = info.shuffle state.repeat = info.repeat state.selection = info.selection @@ -1709,12 +1394,17 @@ # don't save device/share items, since they might not be there next # time state.last_played_item_id = None + # shallow-copy attributes that store lists, dicts, and sets so + # that changing the DisplayInfo doesn't change the database object + state.active_filters = copy.copy(info.active_filters) state.signal_change() def handle_save_view_state(self, message): info = message.view_info state = self._get_view_state(info.key) state.scroll_position = info.scroll_position + state.columns_enabled = copy.copy(info.columns_enabled) + state.column_widths = copy.copy(info.column_widths) state.signal_change() def handle_save_global_state(self, message): @@ -1774,91 +1464,76 @@ def handle_change_device_sync_setting(self, message): db = message.device.database - this_sync = db[u'sync'].setdefault(message.file_type, {}) - this_sync[message.setting] = message.value + if message.file_type: + this_sync = db[u'sync'].setdefault(message.file_type, {}) + else: + this_sync = db[u'sync'] + if not isinstance(message.setting, (tuple, list)): + settings = [message.setting] + else: + settings = list(message.setting) + while len(settings) > 1: + this_sync = this_sync.setdefault(settings.pop(0), {}) + this_sync[settings[0]] = message.value def handle_change_device_setting(self, message): - device = message.device - device.database.setdefault(u'settings', {}) - device.database[u'settings'][message.setting] = message.value - if message.setting == 'name': - device.name = message.value - # need to send a changed message - message = messages.TabsChanged('connect', [], [device], []) - message.send_to_frontend() - message = messages.DeviceChanged(device) - message.send_to_frontend() + app.device_manager.change_setting(message.device, message.setting, + message.value) def handle_device_eject(self, message): - currently_playing = app.playback_manager.get_playing_item() - if currently_playing and getattr(currently_playing, 'device', None): - if currently_playing.device.mount == message.device.mount: - messages.StopPlaying().send_to_frontend() - # give the stop a chance to close the files - eventloop.add_timeout(0.1, self.handle_device_eject, - 'ejecting device', - args=(message,)) - return - devices.write_database(message.device.database, message.device.mount) - app.device_tracker.eject(message.device) - - @staticmethod - def _get_sync_items_for_message(message): - sync = message.device.database[u'sync'] - views = [] - infos = set() - if sync.setdefault(u'podcasts', {}).get(u'enabled', False): - for url in sync[u'podcasts'].setdefault(u'items', []): - feed_ = lookup_feed(url) - if feed_ is not None: - if sync[u'podcasts'].get(u'all', True): - view = feed_.downloaded_items - else: - view = feed_.unwatched_items - views.append(view) - - if sync.setdefault(u'playlists', {}).get(u'enabled', False): - for name in sync[u'playlists'].setdefault(u'items', []): - try: - playlist_ = SavedPlaylist.get_by_title(name) - except database.ObjectNotFoundError: - continue - views.append(item.Item.playlist_view(playlist_.id)) - - for view in views: - source = itemsource.DatabaseItemSource(view) - try: - infos.update( - [info for info in source.fetch_all() - if not message.device.database.item_exists(info)]) - finally: - source.unlink() - return infos + app.device_manager.eject_device(message.device) def handle_query_sync_information(self, message): - infos = self._get_sync_items_for_message(message) - count = sum(1 for info in infos - if info.file_type in ('video', 'audio')) - message = messages.CurrentSyncInformation(message.device, - count) - message.send_to_frontend() + dsm = app.device_manager.get_sync_for_device(message.device) + dsm.query_sync_information() def handle_device_sync_feeds(self, message): - infos = self._get_sync_items_for_message(message) - if infos: - dsm = app.device_manager.get_sync_for_device(message.device) - dsm.add_items(infos) + dsm = app.device_manager.get_sync_for_device(message.device) + if hasattr(dsm, 'last_sync_info'): + infos, expired, count, size = dsm.last_sync_info + del dsm.last_sync_info + else: + infos, expired = dsm.get_sync_items(dsm.max_sync_size()) + count, size = dsm.get_sync_size(infos, expired) + + if size > dsm.max_sync_size(): + return + + if infos or expired: + if expired: + dsm.expire_items(expired) + without_auto = dsm.max_sync_size(include_auto=False) + if size > without_auto: + dsm.expire_auto_items(size - without_auto) + if infos: + dsm.start() + dsm.add_items(infos) + if size < without_auto: + remaining = dsm.max_sync_size() - size + auto_items = dsm.get_auto_items(remaining) + if auto_items: + dsm.add_items(auto_items, auto_sync=True) + else: + message = messages.CurrentSyncInformation(message.device, + 0, 0) + message.send_to_frontend() def handle_device_sync_media(self, message): try: - item_infos = [itemsource.DatabaseItemSource.get_by_id(id_) - for id_ in message.item_ids] + item_infos = fetch_item_infos(app.db.connection, + message.item_ids) except database.ObjectNotFoundError: logging.warn("HandleDeviceSyncMedia: Items not found -- %s", message.item_ids) return dsm = app.device_manager.get_sync_for_device(message.device) + count, size = dsm.get_sync_size(item_infos) + + if size > dsm.max_sync_size(): + return + + dsm.start() dsm.add_items(item_infos) def handle_cancel_device_sync(self, message): @@ -1873,18 +1548,17 @@ for item_info in message.item_infos: # notes: # For sharing item the URL is encoded directory into the path. - url = item_info.video_path.urlize().decode('utf-8', 'replace') + url = item_info.filename.urlize().decode('utf-8', 'replace') file_format = '.' + item_info.file_format try: # So many to choose from ... let's just pick the first one. content_type = filetypes.EXT_MIMETYPES_MAP[file_format][0] except KeyError: content_type = 'audio/unknown' - additional = dict() - keys = (('name', 'title'), ('description', 'description')) - for src_key, dst_key in keys: - value = getattr(item_info, src_key) - additional[dst_key] = value + additional = { + 'title': item_info.title, + 'description': item_info.description, + } entry = _build_entry(url, content_type, additional=additional) download_video(entry) @@ -1894,22 +1568,22 @@ for item_info in message.item_infos: final_path = os.path.join(video_directory, - os.path.basename(item_info.video_path)) + os.path.basename(item_info.filename)) view = item.Item.make_view('is_file_item AND filename=?', (filename_to_unicode(final_path),)) if view.count(): # file already exists continue - shutil.copyfile(item_info.video_path, final_path) + shutil.copyfile(item_info.filename, final_path) fp_values = item.fp_values_for_file(final_path, - item_info.name, + item_info.title, item_info.description) fi = item.FileItem(final_path, feed_id=manual_feed.id, fp_values=fp_values) - fi.releaseDateObj = item_info.release_date + fi.release_date = item_info.release_date fi.duration = item_info.duration fi.permalink = item_info.permalink - if item_info.commentslink: - fi.comments_link = item_info.commentslink + if item_info.comments_link: + fi.comments_link = item_info.comments_link if item_info.payment_link: fi.payment_link = item_info.payment_link #fi.screenshot = item_info.thumbnail @@ -1917,14 +1591,70 @@ fi.thumbnail_url = item_info.thumbnail_url fi.file_format = item_info.file_format fi.license = item_info.license - if item_info.file_url: - fi.url = item_info.file_url fi.mime_type = item_info.mime_type - fi.creationTime = item_info.date_added - fi.title_tag = item_info.title_tag + fi.creation_time = item_info.date_added fi.artist = item_info.artist fi.album = item_info.album fi.track = item_info.track fi.year = item_info.year fi.genre = item_info.genre fi.signal_change() + + def handle_clog_backend(self, message): + logging.debug('handle_clog_backend: Backend snoozing for %d seconds. ' + 'ZZZZZZ.', message.n) + time.sleep(message.n) + logging.debug('handle_clog_backend: Backend out of snooze. Yawn!') + + def handle_force_feedparser_processing(self, message): + # For all our RSS feeds, force an update + for f in feed.Feed.make_view(): + if isinstance(f.actualFeed, feed.RSSFeedImpl): + f.actualFeed.etag = f.actualFeed.modified = None + f.actualFeed.signal_change() + f.update() + elif isinstance(f.actualFeed, feed.RSSMultiFeedBase): + f.actualFeed.etag = {} + f.actualFeed.modified = {} + f.actualFeed.signal_change() + f.update() + + def handle_force_dbsave_error(self, message): + app.db.simulate_db_save_error() + + def handle_force_device_dbsave_error(self, message): + app.device_manager.force_db_save_error(message.device_info) + + def handle_set_net_lookup_enabled(self, message): + paths = set() + if message.item_ids is None: + app.local_metadata_manager.set_net_lookup_enabled_for_all( + message.enabled) + return + + for item_id in message.item_ids: + try: + i = item.Item.get_by_id(item_id) + except database.ObjectNotFoundError: + logging.warn("handle_force_run_echonest: id not found: %s", + item_id) + else: + paths.add(i.get_filename()) + # Remove any None values in case an item didn't have a path + paths.discard(None) + app.local_metadata_manager.set_net_lookup_enabled(paths, + message.enabled) + + def handle_remove_echonest_data(self, message): + paths = set() + for item_id in message.item_ids: + try: + i = item.Item.get_by_id(item_id) + except database.ObjectNotFoundError: + logging.warn("handle_force_run_echonest: id not found: %s", + item_id) + else: + paths.append(i.get_filename()) + # Remove any None values in case an item didn't have a path + paths.discard(None) + app.local_metadata_manager.remove_echonest_data(paths) diff -Nru miro-4.0.4/lib/messages.py miro-6.0/lib/messages.py --- miro-4.0.4/lib/messages.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/messages.py 2013-04-05 16:02:42.000000000 +0000 @@ -39,14 +39,15 @@ This module defines the messages that are passed between the two threads. """ +import copy import logging from miro.gtcache import gettext as _ -from miro.folder import ChannelFolder, PlaylistFolder from miro.messagetools import Message, MessageHandler from miro.plat import resources from miro import app from miro import displaytext +from miro import models from miro import guide from miro import search from miro import prefs @@ -128,52 +129,6 @@ """ pass -class TrackItems(BackendMessage): - """Begin tracking items for a feed - - After this message is sent, the backend will send back a ItemList message, - then it will send ItemsChanged messages for items in the feed. - - type is the type of object that we are tracking items for. It can be one - of the following: - - * feed -- Items in a feed - * playlist -- Items in a playlist - * new -- Items that haven't been watched - * downloading -- Items being downloaded - * library -- All items - - id should be the id of a feed/playlist. For new, downloading and library - it is ignored. - """ - def __init__(self, typ, id_): - self.type = typ - self.id = id_ - -class TrackItemsManually(BackendMessage): - """Track a manually specified list of items. - - TrackItemsManually can only be used to track database items. - - No ItemList message will be sent, since the sender is providing the inital - list of items. Instead, if the infos_to_track is out of date of date, - then an ItemsChanged message will be sent with the changes. - - ItemsChanged messages will have "manual" as the type and will use the id - specified in the constructed. - """ - def __init__(self, id_, infos_to_track): - self.id = id_ - self.infos_to_track = infos_to_track - self.type = 'manual' - -class StopTrackingItems(BackendMessage): - """Stop tracking items for a feed. - """ - def __init__(self, typ, id_): - self.type = typ - self.id = id_ - class TrackDownloadCount(BackendMessage): """Start tracking the number of downloading items. After this message is received the backend will send a corresponding DownloadCountChanged @@ -265,15 +220,17 @@ """ pass -class TrackSharing(BackendMessage): - """Start tracking media shares. +class TrackShare(BackendMessage): + """Start tracking a media share """ - pass + def __init__(self, share_id): + self.share_id = share_id -class TrackDevices(BackendMessage): - """Start tracking devices. +class StopTrackingShare(BackendMessage): + """Stop tracking a media share """ - pass + def __init__(self, share_id): + self.share_id = share_id class SetFeedExpire(BackendMessage): """Sets the expiration for a feed. @@ -646,6 +603,13 @@ def __repr__(self): return BackendMessage.__repr__(self) + (", id: %s" % self.id) +class SetMediaKind(BackendMessage): + """Set the media kind of the list of items to be the specified kind. + """ + def __init__(self, item_infos, kind): + self.item_infos = item_infos + self.kind = kind + class SaveItemAs(BackendMessage): """Saves an item in the dark clutches of Miro to somewhere else. """ @@ -893,7 +857,46 @@ self.info = info self.rating = rating +class SetNetLookupEnabled(BackendMessage): + """Enable/disable internet lookups for a set of items. """ + def __init__(self, item_ids, enabled): + """Create a new message + + :param item_ids: list of item ids or None for all current items + :param boolean enabled: enable/disable flag + """ + self.item_ids = item_ids + self.enabled = enabled + +class ClogBackend(BackendMessage): + """Dev message: intentionally clog the backend for a specified number of + seconds. + """ + def __init__(self, n=0): + self.n = n + +class ForceFeedparserProcessing(BackendMessage): + """Force the backend to do a bunch of feedparser updates + """ + pass + +class ForceDBSaveError(BackendMessage): + """Simulate an error running an INSERT/UPDATE statement on the main DB. + """ + +class ForceDeviceDBSaveError(BackendMessage): + """Simulate an error running an INSERT/UPDATE statement on a device DB. + """ + def __init__(self, device_info): + self.device_info = device_info + # Frontend Messages +class DownloaderSyncCommandComplete(FrontendMessage): + """Tell the frontend that the pause/resume all command are complete, + so that we only sort once. This saves time sorting and also prevents + UI clog when items are updated and gets sorted one by one. + """ + pass class JettisonTabs(FrontendMessage): """Tell the frontend to remove certain sidebar tabs from its model. Done @@ -917,10 +920,11 @@ def __init__(self, share): self.share = share -class DeviceEjectFailed(FrontendMessage): - """Tell the frontend that the device eject failed.""" - def __init__(self, device): - self.device = device +class ShowWarning(FrontendMessage): + """Tell the frontend to show a warning.""" + def __init__(self, title, description): + self.title = title + self.description = description class FrontendQuit(FrontendMessage): """The frontend should exit.""" @@ -1012,7 +1016,7 @@ self.search_term = channel_obj.searchTerm else: self.search_term = None - if not isinstance(channel_obj, ChannelFolder): + if not isinstance(channel_obj, models.ChannelFolder): self.has_original_title = channel_obj.has_original_title() self.is_updating = channel_obj.is_updating() self.parent_id = channel_obj.folder_id @@ -1054,7 +1058,7 @@ def __init__(self, playlist_obj): self.name = playlist_obj.get_title() self.id = playlist_obj.id - self.is_folder = isinstance(playlist_obj, PlaylistFolder) + self.is_folder = isinstance(playlist_obj, models.PlaylistFolder) if self.is_folder: self.parent_id = None else: @@ -1087,218 +1091,6 @@ def __repr__(self): return '' % (self.id, self.name) -class ItemInfo(object): - """Tracks the state of an item - - :param name: name of the item - :param id: object id - :param source: the ItemSource this ItemInfo was generated from - :param feed_id: id for the items feed - :param feed_name: name of the feed item is attached to - :param feed_url: URL of the feed item is attached to - :param description: longer description for the item (HTML) - :param state: see Item.get_state() - :param release_date: datetime object when the item was published - :param size: size of the item in bytes - :param duration: length of the video in seconds - :param resume_time: time at which playback should restart - :param permalink: URL to a permalink to the item (or None) - :param commentslink: URL to a comments page for the item (or None) - :param payment_link: URL of the payment page associated with the item - (or empty string) - :param has_shareable_url: does this item have a shareable URL? - :param can_be_saved: is this an expiring downloaded item? - :param downloaded: has the item been downloaded? - :param is_external: is this item external (true) or from a channel - (false)? - :param expiration_date: datetime object for when the item will expire - (or None) - :param item_viewed: has the user ever seen the item? - :param video_watched: has the user watched the video for the item? - :param video_path: the file path to the video for this item (or None) - :param file_type: type of the downloaded file (video/audio/other) - :param subtitle_encoding: encoding for subtitle display - :param media_type_checked: has the movie data util checked file_type? - :param seeding_status: Torrent seeding status ('seeding', 'stopped', - or None) - :param thumbnail: path to the thumbnail for this file - :param thumbnail_url: URL for the item's thumbnail (or None) - :param file_format: User-facing format description. Possibly the - file type, pulled from the mime_type, or more - generic, like "audio" - :param remote: is this item from a media share or local? - :param host: machine hosting the item, only valid if remote is set - :param port: port to connect to for item, only valid if remote is set - :param license: this file's license, if known. - :param mime_type: mime-type of the enclosure that would be downloaded - :param album_artist: the album artist of the album - :param artist: the primary artist of the track - :param album: the track's album of origin - :param track: the track number within the album - :param year: the track's year of release - :param genre: the track's genre - :param rating: the user's rating of the track - :param date_added: when the item became part of the user's db - :param last_played: the date/time the item was last played - :param file_url: URL of the enclosure that would be downloaded - :param download_info: DownloadInfo object containing info about the - download (or None) - :param is_container_item: whether or not this item is actually a - collection of files as opposed to an - individual item - :param children: for container items the children of the item. - :param is_playable: is this item a audio/video file, or a container that - contains audio/video files inside. - :param is_playing: Whether item is the currently playing (or paused) item - :param leechers: (Torrent only) number of leeching clients - :param seeders: (Torrent only) number of seeding clients - :param up_rate: (Torrent only) how fast we're uploading data - :param down_rate: (Torrent only) how fast we're downloading data - :param up_total: (Torrent only) total amount we've uploaded - :param down_total: (Torrent only) total amount we've downloaded - :param up_down_ratio: (Torrent only) ratio of uploaded to downloaded - :param has_drm: True/False if known; None if unknown (usually means no) - """ - - html_stripper = util.HTMLStripper() - - def __repr__(self): - return "" % self.id - - def __getstate__(self): - d = self.__dict__.copy() - d['device'] = None - del d['description_stripped'] - del d['search_terms'] - return d - - def __setstate__(self, d): - self.__dict__.update(d) - self.description_stripped = ItemInfo.html_stripper.strip( - self.description) - self.search_terms = search.calc_search_terms(self) - - def __init__(self, id_, **kwargs): - self.id = id_ - - self.__dict__.update(kwargs) # we're just a thin wrapper around some - # data - - # stuff we can calculate from other attributes - if not hasattr(self, 'description_stripped'): - self.description_stripped = ItemInfo.html_stripper.strip( - self.description) - if not hasattr(self, 'search_terms'): - self.search_terms = search.calc_search_terms(self) - self.name_sort_key = util.name_sort_key(self.name) - self.album_sort_key = util.name_sort_key(self.album) - self.artist_sort_key = util.name_sort_key(self.artist) - if self.album_artist: - self.album_artist_sort_key = util.name_sort_key(self.album_artist) - else: - self.album_artist_sort_key = self.artist_sort_key - # pre-calculate things that get displayed in list view - self.description_oneline = ( - self.description_stripped[0].replace('\n', '$')) - self.display_date = displaytext.date_slashes(self.release_date) - self.display_duration = displaytext.duration(self.duration) - self.display_duration_short = displaytext.short_time_string( - self.duration) - self.display_size = displaytext.size_string(self.size) - self.display_date_added = displaytext.date_slashes(self.date_added) - self.display_last_played = displaytext.date_slashes(self.last_played) - self.display_track = displaytext.integer(self.track) - self.display_year = displaytext.integer(self.year) - self.display_torrent_details = self.calc_torrent_details() - self.display_drm = self.has_drm and _("Locked") or u"" - # FIXME: display_kind changes here need also be applied in itemedit - if self.kind == 'movie': - self.display_kind = _("Movie") - elif self.kind == 'show': - self.display_kind = _("Show") - elif self.kind == 'clip': - self.display_kind = _("Clip") - elif self.kind == 'podcast': - self.display_kind = _("Podcast") - else: - self.display_kind = None - - if self.state == 'downloading': - dl_info = self.download_info - if dl_info.eta > 0: - self.display_eta = displaytext.time_string(dl_info.eta) - else: - self.display_eta = '' - self.display_rate = displaytext.download_rate(dl_info.rate) - else: - self.display_rate = self.display_eta = '' - - def calc_torrent_details(self): - if not self.download_info or not self.download_info.torrent: - return '' - - details = _( - "S: %(seeders)s | " - "L: %(leechers)s | " - "UR: %(up_rate)s | " - "UT: %(up_total)s | " - "DR: %(down_rate)s | " - "DT: %(down_total)s | " - "R: %(ratio).2f", - {"seeders": self.seeders, - "leechers": self.leechers, - "up_rate": self.up_rate, - "up_total": self.up_total, - "down_rate": self.down_rate, - "down_total": self.down_total, - "ratio": self.up_down_ratio}) - return details - -class DownloadInfo(object): - """Tracks the download state of an item. - - :param downloaded_size: bytes downloaded - :param rate: current download rate, in bytes per second - :param state: one of ``downloading``, ``uploading``, ``finished``, - ``failed`` or ``paused``. ``uploading`` is for - torrents only. It means that we've finished - downloading the torrent and are now seeding it. - :param eta: Estimated seconds before the download is finished - :param startup_activity: The current stage of starting up - :param finished: True if the item has finished downloading - :param torrent: Is this a Torrent download? - """ - def __init__(self, downloader): - self.downloaded_size = downloader.get_current_size() - self.total_size = downloader.get_total_size() - self.rate = downloader.get_rate() - self.state = downloader.get_state() - self.startup_activity = downloader.get_startup_activity() - self.finished = downloader.is_finished() - self.torrent = (downloader.get_type() == 'bittorrent') - if self.state == 'failed': - self.reason_failed = downloader.get_reason_failed() - self.short_reason_failed = downloader.get_short_reason_failed() - else: - self.reason_failed = u"" - self.short_reason_failed = u"" - self.eta = downloader.get_eta() - -class PendingDownloadInfo(DownloadInfo): - """DownloadInfo object for pending downloads (downloads queued, - but not started because we've reached some limit) - """ - def __init__(self): - self.downloaded_size = 0 - self.rate = 0 - self.state = 'pending' - self.startup_activity = _('queued for download') - self.finished = False - self.torrent = False - self.reason_failed = u"" - self.short_reason_failed = u"" - self.eta = 0 - class WatchedFolderInfo(object): """Tracks the state of a watched folder. @@ -1419,39 +1211,63 @@ '(%d added, %d changed, %d removed)>') % (self.type, len(self.added), len(self.changed), len(self.removed)) -class ItemList(FrontendMessage): - """Sends the frontend the initial list of items for a feed +class ItemChanges(FrontendMessage): + """Sent to the frontend when items change - :param type: type of object being tracked (same as in TrackItems) - :param id: id of the object being tracked (same as in TrackItems) - :param items: list of ItemInfo objects + :attribute added: set ids for added items + :attribute changed: set ids for changed items + :attribute removed: set ids for removed items + :attribute changed_columns: set columns that were changed (the union of + changes for all items) + :attribute dlstats_changed: Did we get new download stats? + :attribute playlists_changed: Did items get added/removed from playlists? + """ + def __init__(self, added, changed, removed, changed_columns, + dlstats_changed, playlists_changed): + self.added = frozenset(added) + self.changed = frozenset(changed) + self.removed = frozenset(removed) + self.changed_columns = frozenset(changed_columns) + self.dlstats_changed = dlstats_changed + self.playlists_changed = playlists_changed + +class DeviceItemChanges(FrontendMessage): + """Sent to the frontend when items change on a device + + :attribute device_id: id for the device + :attribute added: set ids for added items + :attribute changed: set ids for changed items + :attribute removed: set ids for removed items + :attribute changed_columns: set columns that were changed (the union of + changes for all items) """ - def __init__(self, typ, id_, item_infos): - self.type = typ - self.id = id_ - self.items = item_infos + def __init__(self, device_id, added, changed, removed, changed_columns): + self.device_id = device_id + self.added = added + self.changed = changed + self.removed = removed + self.changed_columns = changed_columns -class ItemsChanged(FrontendMessage): - """Informs the frontend that the items in a feed have changed. +class SharingItemChanges(FrontendMessage): + """Sent to the frontend when items change on a share - :param type: type of object being tracked (same as in TrackItems) - :param id: id of the object being tracked (same as in TrackItems) - :param added: list containing an ItemInfo object for each added item. - The order will be the order they were added. - :param changed: set containing an ItemInfo for each changed item. - :param removed: set containing ids for each item that was removed - """ - def __init__(self, typ, id_, added, changed, removed): - self.type = typ - self.id = id_ + :attribute share_id: id for the share + :attribute added: set ids for added items + :attribute changed: set ids for changed items + :attribute removed: set ids for removed items + :attribute changed_columns: set columns that were changed (the union of + changes for all items) + :attribute changed_playlists: True if the any playlists have been changed + had their contents changed. + """ + def __init__(self, share_id, added, changed, removed, changed_columns, + changed_playlists): + self.share_id = share_id self.added = added self.changed = changed self.removed = removed - - def __str__(self): - return ('') % (self.type, self.id, - len(self.added), len(self.changed), len(self.removed)) + self.changed_columns = changed_columns + self.changed_playlists = changed_playlists class WatchedFolderList(FrontendMessage): """Sends the frontend the initial list of watched folders. @@ -1521,6 +1337,18 @@ def __init__(self, count): self.count = count +class ConverterList(FrontendMessage): + """Sends the list of converters to the frontend + + :attribute converters: list of converter groups. Each group will contain + a sublist of (identifier, name) tuples. + """ + def __init__(self, converter_list): + self.converters = [] + for name, converters in converter_list: + self.converters.append([(info.identifier, info.name) + for info in converters]) + class ConversionTaskInfo(object): """Tracks the state of an conversion task. @@ -1551,10 +1379,11 @@ self.log_path = task.log_path self.progress = task.progress self.error = task.error - self.item_name = task.item_info.name + self.item_name = task.item_info.title self.item_thumbnail = task.item_info.thumbnail self.eta = task.get_eta() self.target = task.get_display_name() + self.output_size_guess = task.get_output_size_guess() class ConversionTasksList(FrontendMessage): """Send the current list of running and pending conversion tasks to the @@ -1600,28 +1429,26 @@ class SharingInfo(object): """Tracks the state of an extent share.""" - def __init__(self, share_id, tracker_id, name, host, port, parent_id=None, - playlist_id=None, podcast=False, has_children=False): - # We need to create a unique identifier for indexing. Fortunately - # this may be non-numeric. We just combine the name, host, port - # as our index. - self.id = share_id - self.tracker_id = tracker_id - self.name = name - self.host = host - self.port = port + def __init__(self, share): + self.id = 'sharing-%s' % (share.id,) + self.share_id = share.id + self.sqlite_path = share.db_path + self.name = share.name + self.host = share.host + self.port = share.port self.share_available = False self.stale_callback = None self.mount = False - self.podcast = podcast self.is_updating = False + +class SharingPlaylistInfo(object): + """Tracks the state a playlist on a share.""" + def __init__(self, share_id, name, playlist_id, podcast): + self.share_id = share_id + self.id = u'sharing-%s-%s' % (share_id, playlist_id) + self.name = name + self.podcast = podcast self.playlist_id = playlist_id - if parent_id is not None: - self.is_folder = has_children - self.parent_id = parent_id - else: - self.parent_id = None - self.is_folder = True class SharingEject(BackendMessage): """Tells the backend that the user has requested the share be disconnected. @@ -1632,13 +1459,18 @@ class DeviceInfo(object): """Tracks the state of an attached device. """ - def __init__(self, id_, device_info, mount, database, size, remaining): + def __init__(self, id_, device_info, mount, sqlite_path, database, db_info, + metadata_manager, size, remaining, read_only): self.id = id_ self.mount = mount + self.sqlite_path = sqlite_path self.database = database + self.db_info = db_info + self.metadata_manager = metadata_manager self.size = size self.remaining = remaining self.info = device_info + self.read_only = read_only self.name = database.get('settings', {}).get('name', device_info.name) def __repr__(self): @@ -1653,9 +1485,10 @@ class CurrentSyncInformation(FrontendMessage): """Informs the frontend of what the current sync would look like. """ - def __init__(self, device, count): + def __init__(self, device, count, size): self.device = device self.count = count + self.size = size class DeviceSyncChanged(FrontendMessage): """Informs the frontend that the status of a device sync has changed. This @@ -1674,8 +1507,8 @@ self.title = title self.desc = desc -class PlayMovie(FrontendMessage): - """Starts playing a specific movie. +class PlayMovies(FrontendMessage): + """Play a list of files """ def __init__(self, item_infos): self.item_infos = item_infos @@ -1732,12 +1565,14 @@ self.key = key if display is not None: self.selected_view = display.selected_view - self.active_filters = display.active_filters self.shuffle = display.shuffle self.repeat = display.repeat self.selection = display.selection self.sort_state = display.sort_state self.last_played_item_id = display.last_played_item_id + # shallow-copy attributes that store lists, dicts, and sets so + # that changing the database object doesn't change the DisplayInfo + self.active_filters = copy.copy(display.active_filters) else: self.selected_view = None self.active_filters = None @@ -1746,14 +1581,6 @@ self.selection = None self.sort_state = None self.last_played_item_id = None - if display is not None and display.list_view_columns is not None: - self.list_view_columns = display.list_view_columns[:] - else: - self.list_view_columns = None - if display is not None and display.list_view_widths is not None: - self.list_view_widths = display.list_view_widths.copy() - else: - self.list_view_widths = None class GlobalInfo(object): """Contains the properties that are global to the widgets frontend @@ -1770,8 +1597,14 @@ self.key = key if view is not None: self.scroll_position = view.scroll_position + # shallow-copy attributes that store lists, dicts, and sets so + # that changing the database object doesn't change the DisplayInfo + self.columns_enabled = copy.copy(view.columns_enabled) + self.column_widths = copy.copy(view.column_widths) else: self.scroll_position = None + self.columns_enabled = None + self.column_widths = None class OpenInExternalBrowser(FrontendMessage): """Opens the specified url in an external browser. @@ -1804,8 +1637,19 @@ pass class MetadataProgressUpdate(FrontendMessage): - def __init__(self, target, remaining, eta, total): + def __init__(self, target, finished, finished_local, eta, total): self.target = target - self.remaining = remaining + self.finished = finished + self.finished_local = finished_local self.eta = eta self.total = total + +class SetNetLookupEnabledFinished(FrontendMessage): + """The backend has processed the SetNetLookupEnabled message.""" + pass + +class NetLookupCounts(FrontendMessage): + """Update the frontend on how many items we're running net lookups for.""" + def __init__(self, net_lookup_count, total_count): + self.net_lookup_count = net_lookup_count + self.total_count = total_count diff -Nru miro-4.0.4/lib/metadataprogress.py miro-6.0/lib/metadataprogress.py --- miro-4.0.4/lib/metadataprogress.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/metadataprogress.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,140 +0,0 @@ -# Miro - an RSS based video player application -# Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011 -# Participatory Culture Foundation -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA -# -# In addition, as a special exception, the copyright holders give -# permission to link the code of portions of this program with the OpenSSL -# library. -# -# You must obey the GNU General Public License in all respects for all of -# the code used other than OpenSSL. If you modify file(s) with this -# exception, you may extend this exception to your version of the file(s), -# but you are not obligated to do so. If you do not wish to do so, delete -# this exception statement from your version. If you delete this exception -# statement from all source files in the program, then also delete it here. - -"""``miro.metadataprogress`` -- Send the frontend metadata progress updates -""" - -import logging - -from miro import eventloop -from miro import filetypes -from miro import messages - -class MetadataProgressUpdater(object): - """Send the frontend progress updates for extracting metadata. - - - This class gets put in app.metadata_progress_updater. It should be used - for operations that create lots of FileItems. - - To use this class, call will_process_path() before creating any of the - FileItems for all the paths you will add. Then the moviedata.py calls - path_processed() once all of the metadata processing is done. - """ - def __init__(self): - # maps target type -> counts - self.total = {} - self.remaining = {} - # maps path -> target key - self.path_to_target = {} - # targets that we need to update the frontend about - self.targets_to_update = set() - # handle for our delayed callback - self.timeout = None - # by default, wait half a second before sending the progess update to - # the frontend - self.message_interval = 0.5 - - def _guess_mediatype(self, path): - """Guess the mediatype of a file. Needs to be quick, as it's executed - by the requesting thread in request_update(), and nothing will break - if it isn't always accurate - so just checks filename. - """ - if filetypes.is_video_filename(path): - mediatype = 'video' - elif filetypes.is_audio_filename(path): - mediatype = 'audio' - else: - mediatype = 'other' - return mediatype - - def _calc_target(self, path, device): - """Calculate the target to use for our messages. """ - - mediatype = self._guess_mediatype(path) - if device: - return (u'device', '%s-%s' % (device.id, mediatype)) - elif mediatype in ('audio', 'video'): - return (u'library', mediatype) - else: # mediatype 'other' - return None - - def will_process_path(self, path, device=None): - """Call we've started processing metadata for a file - - This method should only be called from the backend thread. - """ - if path in self.path_to_target: - # hmm, we already are storing path in our system. Log a warning - # and don't count it - logging.warn("MetadataProgressUpdate.will_process_path() called " - "for path %s that we already counted for %s", path, - self.path_to_target[path]) - return - target = self._calc_target(path, device) - if target is None: - return - - self.path_to_target[path] = target - self.total.setdefault(target, 0) - self.total[target] += 1 - self.remaining.setdefault(target, 0) - self.remaining[target] += 1 - self._schedule_update(target) - - def path_processed(self, path): - """Call we've finished all processing for a file. - - This method should only be called from the backend thread. - """ - try: - target = self.path_to_target.pop(path) - except KeyError: - # will_process_path wasn't called, just ignore - return - - self.remaining[target] -= 1 - if not self.remaining[target]: - # finished extracting all data, reset the total - self.total[target] = 0 - self._schedule_update(target) - - def _schedule_update(self, target): - self.targets_to_update.add(target) - if self.timeout is None: - self.timeout = eventloop.add_timeout(self.message_interval, - self._send_updates, "update metadata progress") - - def _send_updates(self): - for target in self.targets_to_update: - update = messages.MetadataProgressUpdate(target, - self.remaining[target], None, self.total[target]) - update.send_to_frontend() - self.targets_to_update = set() - self.timeout = None diff -Nru miro-4.0.4/lib/metadata.py miro-6.0/lib/metadata.py --- miro-4.0.4/lib/metadata.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/metadata.py 2013-04-05 16:02:42.000000000 +0000 @@ -29,239 +29,1890 @@ """``miro.metadata`` -- Handle metadata properties for *Items. Generally the frontend cares about these and the backend doesn't. + +Module properties: + attribute_names -- set of attribute names for metadata """ +import collections +import contextlib import logging -import fileutil import os.path +import time -from miro.util import returns_unicode -from miro import coverart +from miro import app +from miro import clock +from miro import database +from miro import echonest +from miro import eventloop from miro import filetags from miro import filetypes +from miro import fileutil +from miro import messages +from miro import net +from miro import prefs +from miro import signals +from miro import workerprocess +from miro.plat.utils import (filename_to_unicode, + get_enmfp_executable_info) + +attribute_names = set([ + 'file_type', 'duration', 'album', 'album_artist', 'album_tracks', + 'artist', 'cover_art', 'screenshot', 'has_drm', 'genre', + 'title', 'track', 'year', 'description', 'rating', 'show', 'episode_id', + 'episode_number', 'season_number', 'kind', 'net_lookup_enabled', +]) + +class MetadataStatus(database.DDBObject): + """Stores the status of different metadata extractors for a file + + For each metadata extractor (mutagen, movie data program, echonest, etc), + we store if that extractor has run yet, has failed, is being skipped, etc. + """ + + # NOTE: this class uses the database cache to store MetadataStatus objects + # for quick access. The category is "metadata", the key is the path to + # the metadata and the value is the MetadataStatus object. + # + # If the value is None, that means there's a MetadataStatus object in the + # database, but we haven't loaded it yet. + + # constants for the *_status columns + STATUS_NOT_RUN = u'N' + STATUS_COMPLETE = u'C' + STATUS_FAILURE = u'F' + STATUS_TEMPORARY_FAILURE = u'T' + STATUS_SKIP = u'S' + + FINISHED_STATUS_VERSION = 1 + + _source_name_to_status_column = { + u'mutagen': 'mutagen_status', + u'movie-data': 'moviedata_status', + u'echonest': 'echonest_status', + } + + def setup_new(self, path, net_lookup_enabled): + self.path = path + self.net_lookup_enabled = net_lookup_enabled + self.file_type = u'other' + self.mutagen_status = self.STATUS_NOT_RUN + self.moviedata_status = self.STATUS_NOT_RUN + if self.net_lookup_enabled: + self.echonest_status = self.STATUS_NOT_RUN + else: + self.echonest_status = self.STATUS_SKIP + self.mutagen_thinks_drm = False + self.echonest_id = None + self.max_entry_priority = -1 + # current processor tracks what processor we should be running for + # this status. We don't save it to the database. + self.current_processor = u'mutagen' + # finished_status tracks if we are done running metadata processors on + # an item. finished status is: + # - 0 if we haven't finished running metadata on it. + # - A positive version code once we are done. This version code + # increases as we add more metadata processors. + # This hopefully is allows us to track if metadata processing is + # finished, even as the database schema changes between miro versions. + self.finished_status = 0 + self._add_to_cache() + + def setup_restored(self): + self._set_current_processor(update_finished_status=False) + self.db_info.db.cache.set('metadata', self.path, self) + + def copy_status(self, other_status): + """Copy values from another metadata status object.""" + for name, field in app.db.schema_fields(MetadataStatus): + # don't copy id or path for obvious resons. Don't copy + # net_lookup_enabled because we don't want the other status's + # value to overwrite ours. The main reason is that for device + # items, when we copy the local item's metadata, we don't want to + # set net_lookup_enabled to True. + if name not in ('id', 'path', 'net_lookup_enabled'): + setattr(self, name, getattr(other_status, name)) + # also copy current_processor, which doesn't get stored in the DB and + # thus isn't returned by schema_fields() + self.current_processor = other_status.current_processor + self.signal_change() -class Source(object): - """Object with readable metadata properties.""" + @classmethod + def get_by_path(cls, path, db_info=None): + """Get an object by its path attribute. - def get_iteminfo_metadata(self): - # until MDP has run, has_drm is very uncertain; by letting it be True in - # the backend but False in the frontend while waiting for MDP, we keep - # is_playable False but don't show "DRM Locked" until we're sure. - has_drm = self.has_drm and self.mdp_state is not None - return dict( - name = self.get_title(), - title_tag = self.title_tag, - description = self.get_description(), - album = self.album, - album_artist = self.album_artist, - artist = self.artist, - track = self.track, - album_tracks = self.album_tracks, - year = self.year, - genre = self.genre, - rating = self.rating, - cover_art = self.cover_art, - has_drm = has_drm, - show = self.show, - episode_id = self.episode_id, - episode_number = self.episode_number, - season_number = self.season_number, - kind = self.kind, - metadata_version = self.metadata_version, - mdp_state = self.mdp_state, - ) + We use the DatabaseObjectCache to cache status by path, so this method + is fairly quick. + """ + if db_info is None: + db_info = app.db_info - def setup_new(self): - self.title = u"" - self.title_tag = None - self.description = u"" - self.album = None - self.album_artist = None - self.artist = None - self.track = None - self.album_tracks = None - self.year = None - self.genre = None - self.rating = None - self.cover_art = None - self.has_drm = None - self.file_type = None - self.show = None - self.episode_id = None - self.episode_number = None - self.season_number = None - self.kind = None - self.metadata_version = 0 - self.mdp_state = None # moviedata.State.UNSEEN - - @property - def media_type_checked(self): - """This was previously tracked as a real property; it's used by - ItemInfo. Provided for compatibility with the previous API. - """ - return self.file_type is not None - - @returns_unicode - def get_title(self): - if self.title: - return self.title - else: - return self.title_tag if self.title_tag else u'' - - @returns_unicode - def get_description(self): - return self.description - - def read_metadata(self): - # always mark the file as seen - self.metadata_version = filetags.METADATA_VERSION - - if self.file_type == u'other': - return - - path = self.get_filename() - rv = filetags.read_metadata(path) - if not rv: - return - - mediatype, duration, metadata, cover_art = rv - self.file_type = mediatype - # FIXME: duration isn't actually a attribute of metadata.Source. - # This currently works because Item and Device item are the only - # classes that call read_metadata(), and they both define duration - # the same way. - # - # But this is pretty fragile. We should probably refactor - # duration to be an attribute of metadata.Source. - self.duration = duration - self.cover_art = cover_art - self.album = metadata.get('album', None) - self.album_artist = metadata.get('album_artist', None) - self.artist = metadata.get('artist', None) - self.title_tag = metadata.get('title', None) - self.track = metadata.get('track', None) - self.year = metadata.get('year', None) - self.genre = metadata.get('genre', None) - self.has_drm = metadata.get('drm', False) - - # 16346#c26 - run MDP for all OGG files in case they're videos - extension = os.path.splitext(path)[1].lower() - # oga is the only ogg-ish extension guaranteed to be audio - if extension.startswith('.og') and extension != '.oga': - # None because we need is_playable to be False until MDP has - # determined the real file type, or newly-downloaded videos will - # always play as audio; MDP always looks at file_type=None files - self.file_type = None - -def metadata_setter(attribute, type_=None): - def set_metadata(self, value, _bulk=False): - if value is not None and type_ is not None: - # None is always an acceptable value for metadata properties - value = type_(value) - if not _bulk: - self.confirm_db_thread() - setattr(self, attribute, value) - if not _bulk: - self.signal_change() - self.write_back((attribute,)) - return set_metadata + try: + cache_value = db_info.db.cache.get('metadata', path) + except KeyError: + cache_value = None + + if cache_value is not None: + return cache_value + else: + view = cls.make_view('path=?', (filename_to_unicode(path),), + db_info=db_info) + return view.get_singleton() + + @classmethod + def paths_for_album(cls, album, db_info=None): + rows = cls.select(['path',], + 'id IN ' + '(SELECT status_id FROM metadata p WHERE ' + 'album=? AND priority=' + '(SELECT MAX(priority) FROM metadata c ' + 'WHERE p.status_id=c.status_id AND ' + 'NOT disabled AND album IS NOT NULL))', + (album,), db_info=db_info) + return [r[0] for r in rows] + + @classmethod + def net_lookup_enabled_view(cls, net_lookup_enabled, db_info=None): + return cls.make_view('net_lookup_enabled=?', + (net_lookup_enabled,), + db_info=db_info) + + @classmethod + def failed_temporary_view(cls, db_info=None): + return cls.make_view('echonest_status=?', + (cls.STATUS_TEMPORARY_FAILURE,), + db_info=db_info) + + def _add_to_cache(self): + if self.db_info.db.cache.key_exists('metadata', self.path): + # duplicate path. Lets let sqlite raise the error when we try to + # insert things. + logging.warn("self.path already in cache (%s)", self.path) + return + self.db_info.db.cache.set('metadata', self.path, self) + + def insert_into_db_failed(self): + self.db_info.db.cache.remove('metadata', self.path) + + def remove(self): + self.db_info.db.cache.remove('metadata', self.path) + database.DDBObject.remove(self) + + def get_has_drm(self): + """Does this media file have DRM preventing us from playing it? + + has_drm is True when all of these are True + - mutagen thinks the object has DRM + - movie data failed to open the file, or we're not going to run movie + data (this is only true for items created before the MetadataManager + existed) + """ + return (self.mutagen_thinks_drm and + self.moviedata_status in + (MetadataStatus.STATUS_FAILURE or MetadataStatus.STATUS_SKIP)) + + def _set_status_column(self, source_name, value): + column_name = self._source_name_to_status_column[source_name] + setattr(self, column_name, value) + + def need_metadata_for_source(self, source_name): + column_name = self._source_name_to_status_column[source_name] + return getattr(self, column_name) == self.STATUS_NOT_RUN -class Store(Source): - """Object with read/write metadata properties.""" + def update_after_success(self, entry, result): + """Update after we succussfully extracted some metadata - set_title = metadata_setter('title', unicode) - set_title_tag = metadata_setter('title_tag', unicode) - set_description = metadata_setter('description', unicode) - set_album = metadata_setter('album', unicode) - set_album_artist = metadata_setter('album_artist', unicode) - set_artist = metadata_setter('artist', unicode) - set_track = metadata_setter('track', int) - set_album_tracks = metadata_setter('album_tracks') - set_year = metadata_setter('year', int) - set_genre = metadata_setter('genre', unicode) - set_rating = metadata_setter('rating', int) - set_file_type = metadata_setter('file_type', unicode) - set_has_drm = metadata_setter('has_drm', bool) - set_show = metadata_setter('show', unicode) - set_episode_id = metadata_setter('episode_id', unicode) - set_episode_number = metadata_setter('episode_number', int) - set_season_number = metadata_setter('season_number', int) - set_kind = metadata_setter('kind', unicode) - set_metadata_version = metadata_setter('metadata_version', int) - set_mdp_state = metadata_setter('mdp_state', int) - - def set_cover_art(self, new_file, _bulk=False): - """Set new cover art. Deletes any old cover art. - - Creates a copy of the image in our cover art directory. - """ - if not _bulk: - self.confirm_db_thread() - if new_file: - new_cover = coverart.Image.from_file(new_file, self.get_filename()) - self.delete_cover_art() - if new_file: - self.cover_art = new_cover - if not _bulk: + :param entry: MetadataEntry for the new data + :param result: dictionary from the processor + """ + self._set_status_column(entry.source, self.STATUS_COMPLETE) + if (entry.priority >= self.max_entry_priority and + entry.file_type is not None): + self.file_type = entry.file_type + if entry.source == 'mutagen': + if self._should_skip_movie_data(entry): + self.moviedata_status = self.STATUS_SKIP + thinks_drm = entry.drm if entry.drm is not None else False + self.mutagen_thinks_drm = thinks_drm + elif entry.source == 'movie-data': + if entry.file_type != u'audio': + self.echonest_status = self.STATUS_SKIP + elif entry.source == 'echonest' and 'echonest_id' in result: + self.echonest_id = result['echonest_id'] + self.max_entry_priority = max(self.max_entry_priority, entry.priority) + self._set_current_processor() + self.signal_change() + + def set_echonest_id(self, echonest_id): + self.echonest_id = echonest_id + self.signal_change() + + def retry_echonest(self): + if self.echonest_status == self.STATUS_TEMPORARY_FAILURE: + self.echonest_status = self.STATUS_NOT_RUN self.signal_change() - self.write_back(('cover_art',)) + else: + logging.warn("MetadataEntry.retry_echonest() called, but " + "echonest_status is %r", self.echonest_status) + + def _should_skip_movie_data(self, entry): + my_ext = os.path.splitext(self.path)[1].lower() + # we should skip movie data if: + # - we're sure the file is audio (mutagen misreports .ogg videos as + # audio) + # - mutagen was able to get the duration for the file + # - mutagen doesn't think the file has DRM + if ((my_ext == '.oga' or not my_ext.startswith('.og')) and + entry.file_type == 'audio' and + entry.duration is not None and + not entry.drm): + return True + # We should also skip it if mutagen couldn't identify the file and the + # extension indicates it's a non-media file + if entry.file_type is None and filetypes.is_other_filename(self.path): + return True + return False + + def update_after_error(self, source_name, error): + """Update after we failed to extract some metadata. + + Returns the new status column value + """ + if source_name == 'echonest' and isinstance(error, net.NetworkError): + new_status = self.STATUS_TEMPORARY_FAILURE + else: + new_status = self.STATUS_FAILURE + self._set_status_column(source_name, new_status) + if (source_name == u'movie-data' and + (self.mutagen_status == self.STATUS_FAILURE or + self.file_type != u'audio')): + # if moviedata failed and mutagen either thought the file was + # video, or it couldn't read it, then don't + # bother sending it to echonest. We don't want to run the codegen + # program. + self.echonest_status = self.STATUS_SKIP + if new_status != self.STATUS_TEMPORARY_FAILURE: + self._set_current_processor() + self.signal_change() + return new_status + + def _set_current_processor(self, update_finished_status=True): + """Calculate and set the current_processor attribute """ + # check what the next processor we should run is + if self.mutagen_status == MetadataStatus.STATUS_NOT_RUN: + self.current_processor = u'mutagen' + elif self.moviedata_status == MetadataStatus.STATUS_NOT_RUN: + self.current_processor = u'movie-data' + elif self.echonest_status == MetadataStatus.STATUS_NOT_RUN: + self.current_processor = u'echonest' + else: + self.current_processor = None + if (update_finished_status and + self.FINISHED_STATUS_VERSION > self.finished_status): + self.finished_status = self.FINISHED_STATUS_VERSION + + def set_net_lookup_enabled(self, enabled): + self.net_lookup_enabled = enabled + if (enabled and + self.echonest_status == self.STATUS_SKIP + and self.file_type == u'audio'): + self.echonest_status = self.STATUS_NOT_RUN + elif not enabled and self.echonest_status == self.STATUS_NOT_RUN: + self.echonest_status = self.STATUS_SKIP + self._set_current_processor() + self.signal_change() + + def rename(self, new_path): + """Change the path for this object.""" + self.db_info.db.cache.remove('metadata', self.path) + self.db_info.db.cache.set('metadata', new_path, self) + self.path = new_path + self.signal_change() + + @classmethod + def was_running_select(cls, columns, db_info=None): + return cls.select(columns, 'finished_status < ?', + values=(cls.FINISHED_STATUS_VERSION,), + db_info=db_info) + +class MetadataEntry(database.DDBObject): + """Stores metadata from a single source. + + Each metadata extractor (mutagen, movie data, echonest, etc), we create a + MetadataEntry object for each path that it got metadata from. + """ + + # stores the priorities for each source type + source_priority_map = { + 'old-item': 10, + 'mutagen': 20, + 'movie-data': 30, + 'echonest': 40, + 'user-data': 50, + } + + # metadata columns stores the set of column names that store actual + # metadata (as opposed to things like source and path) + metadata_columns = attribute_names.copy() + # has_drm is a tricky column. In MetadataEntry objects it's just called + # 'drm'. Then MetadataManager calculates has_drm based on a variety of + # factors. + metadata_columns.discard('has_drm') + metadata_columns.add('drm') + # net_lookup_enabled is proveded by the metadata_status table, not the + # actual metadata tables + metadata_columns.discard('net_lookup_enabled') + + def setup_new(self, status, source, data): + self.status_id = status.id + self.source = source + self.priority = MetadataEntry.source_priority_map[source] + self.disabled = False + # set all metadata to None by default + for name in self.metadata_columns: + setattr(self, name, None) + self.__dict__.update(data) + if source != 'user-data': + # we only save cover_art for user-data. Other sources save the + # cover art using a per-album filename. + self.cover_art = None + + def update_metadata(self, new_data): + """Update the values for this object.""" + for name in self.metadata_columns: + if name in new_data: + setattr(self, name, new_data[name]) + self.signal_change() + + def get_metadata(self): + """Get the metadata stored in this object as a dict.""" + rv = {} + for name in self.metadata_columns: + value = getattr(self, name) + if value is not None: + rv[name] = value + return rv + + def rename(self, new_path): + """Change the path for this object.""" + self.path = new_path + self.signal_change() + + @classmethod + def metadata_for_status(cls, status, db_info=None): + return cls.make_view('status_id=? AND NOT disabled', + (status.id,), + order_by='priority ASC', + db_info=db_info) + + @classmethod + def get_entry(cls, source, status, db_info=None): + view = cls.make_view('source=? AND status_id=?', + (source, status.id), + db_info=db_info) + return view.get_singleton() + + @classmethod + def incomplete_echonest_view(cls, db_info=None): + # if echonest didn't find the song, then title will be NULL if + # 7digital didn't find the album, then album will be NULL. If either + # of those are true, then we want to retry re-querying + return cls.make_view('source="echonest" AND ' + '(album IS NULL or title IS NULL)', + db_info=None) + + @classmethod + def set_disabled(cls, source, status, disabled, db_info=None): + """Set/Unset the disabled flag for metadata entry. + + :returns: True if there was an entry to change + """ + try: + entry = cls.get_entry(source, status, db_info=None) + except database.ObjectNotFoundError: + return False + else: + entry.disabled = disabled + entry.signal_change() + return True + +class _MetadataProcessor(signals.SignalEmitter): + """Base class for processors that handle getting metadata somehow. + + Responsible for: + - starting the extraction processes + - queueing messages after we reach a limit + - handling callbacks and errbacks + + Attributes: + + source_name -- Name to identify the source name. This should match the + Metadata.source attribute + + Signals: + + - task-complete(path, result) -- we successfully extracted metadata + - task-error(path, error) -- we failed to extract metadata + """ + def __init__(self, source_name): + signals.SignalEmitter.__init__(self) + self.create_signal('task-complete') + self.create_signal('task-error') + self.source_name = source_name + + def remove_tasks_for_paths(self, paths): + """Cancel any pending tasks for paths + + _MetadataProcessors should make their best attempt to stop the task, + but since they are all using threads and/or different processes, + there's always a chance that the task will still be processed + """ + pass + +class _TaskProcessor(_MetadataProcessor): + """Handle sending tasks to the worker process. """ + + def __init__(self, source_name, limit): + _MetadataProcessor.__init__(self, source_name) + self.limit = limit + # map source paths to tasks + self._active_tasks = {} + self._pending_tasks = {} + + def task_count(self): + return len(self._active_tasks) + len(self._pending_tasks) + + def add_task(self, task): + if len(self._active_tasks) < self.limit: + self._send_task(task) + else: + self._pending_tasks[task.source_path] = task + + def _send_task(self, task): + self._active_tasks[task.source_path] = task + workerprocess.send(task, self._callback, self._errback) + + def remove_task_for_path(self, path): + self.remove_tasks_for_paths([path]) + + def remove_tasks_for_paths(self, paths): + for path in paths: + try: + del self._active_tasks[path] + except KeyError: + # task isn't in our system, maybe it's pending? + try: + del self._pending_tasks[path] + except KeyError: + pass + + while len(self._active_tasks) < self.limit and self._pending_tasks: + path, task = self._pending_tasks.popitem() + self._send_task(task) + + def _callback(self, task, result): + if task.source_path not in self._active_tasks: + logging.debug("%s done but already removed: %r", self.source_name, + task.source_path) + return + logging.debug("%s done: %r", self.source_name, task.source_path) + self._check_for_none_values(result) + self.emit('task-complete', task.source_path, result) + self.remove_task_for_path(task.source_path) + + def _check_for_none_values(self, result): + """Check that result dicts don't have keys for None values.""" + # FIXME: we shouldn't need this function, metadata extractors should + # only send keys for values that it actually has, not None values. + for key, value in result.items(): + if value is None: + app.controller.failed_soft('_check_for_none_values', + '%s has None Value' % key, + with_exception=False) + del result[key] + + def _errback(self, task, error): + logging.warn("Error running %s for %r: %s", task, task.source_path, + error) + self.emit('task-error', task.source_path, error) + self.remove_task_for_path(task.source_path) + +class _EchonestQueue(object): + """Queue for echonest tasks. + + _EchonestQueue is a modified FIFO. Each queue item is stored as a path + + optional additional data. + """ + def __init__(self): + self.queue = collections.deque() + + def add(self, path, *extra_data): + """Add a path to the queue. + + *extra_data can be used to store related data to the path. It will be + returned along with the path in pop() + """ + self.queue.append((path, extra_data)) + + def pop(self): + """Pop a path from the active queue. + + If any positional arguments were passed in to add(), then return the + tuple (path, extra_data1, extra_data2, ...). If not, just return + path. + + :raises IndexError: no path to pop + """ + path, extra_data = self.queue.popleft() + if extra_data: + return (path,) + extra_data + else: + return path + + def remove_paths(self, path_set): + """Remove all paths that are in a set.""" + + def filter_removed(item): + return item[0] not in path_set + new_items = filter(filter_removed, self.queue) + self.queue.clear() + self.queue.extend(new_items) + + def __len__(self): + """Get the number of items in the queue that are not disabled because + of the config value. + """ + return len(self.queue) + +class _EchonestProcessor(_MetadataProcessor): + """Processor runs echonest queries + + Currently we use ENMF to generate codes, but we may switch to echoprint in + the future + + _EchonestProcessor stops calling the codegen processor once a certain + buffer of codes to be sent to echonest is built up. + """ + + # Cooldown time for our codegen process + CODEGEN_COOLDOWN_TIME = 5.0 + + # constants that control pausing after we get a bunch of HTTP errors. + # These settings mean that if we get 3 errors in 5 minutes, then we will + # pause. + PAUSE_AFTER_HTTP_ERROR_COUNT = 3 + PAUSE_AFTER_HTTP_ERROR_TIMEOUT = 60 * 5 + + # NOTE: _EchonestProcessor dosen't inherity from _TaskProcessor because it + # handles it's work using httpclient rather than making tasks and sending + # them to workerprocess. + + def __init__(self, code_buffer_size, cover_art_dir): + _MetadataProcessor.__init__(self, u'echonest') + self._code_buffer_size = code_buffer_size + self._cover_art_dir = cover_art_dir + # We create 3 queues to handle items at various stages of the process. + # - _metadata_fetch_queue holds paths that we need to fetch the + # metadata for. It's the first queue that paths go to + # - _codegen_queue contains paths that we need to run the codegen + # executable on + # - _echonest_queue contains paths that we need to query echonest for + self._metadata_fetch_queue = _EchonestQueue() + self._codegen_queue = _EchonestQueue() + self._echonest_queue = _EchonestQueue() + self._running_codegen = False + self._querying_echonest = False + self._codegen_info = get_enmfp_executable_info() + self._codegen_cooldown_end = 0 + self._codegen_cooldown_caller = eventloop.DelayedFunctionCaller( + self._process_queue) + self._metadata_for_path = {} + self._paths_in_system = set() + self._http_error_times = collections.deque() + self._waiting_from_http_errors = False + + def add_path(self, path, metadata_fetcher): + """Add a path to the system. + + metadata_fetcher is used to get the metadata for that path to send to + echonest. We use a callable object that returns the metadata instead + of a straight dict because we want to fetch things lazily. + metadata_fetcher should raise a KeyError if the path is not in the + metadata system when its called. + + :param path: path to add + :param metadata_fetcher: callable that will return a metadata dict + """ + if path in self._paths_in_system: + logging.warn("_EchonestProcessor.add_path: attempt to add " + "duplicate path: %r", path) + return + self._paths_in_system.add(path) + self._metadata_fetch_queue.add(path, metadata_fetcher) + self._process_queue() + + def should_skip_codegen(self, metadata): + """Determine if a file can skip the code generator. + + This is true when we have enough metadata to send to echonest. + """ + # This check is actually pretty easy. If we have a title, then + # there's a good chance for a match. If we don't then there's no + # chance. + return 'title' in metadata + + def _run_codegen(self, path): + echonest.exec_codegen(self._codegen_info, path, self._codegen_callback, + self._codegen_errback) + self._running_codegen = True + + def _codegen_callback(self, path, code): + if path in self._paths_in_system: + self._echonest_queue.add(path, code) + else: + logging.warn("_EchonestProcessor._codegen_callback called for " + "path not in system: %r", path) + self._codegen_finished() + + def _codegen_errback(self, path, error): + logging.warn("Error running echonest codegen for %r (%s)" % + (path, error)) + self.emit('task-error', path, error) + del self._metadata_for_path[path] + self._paths_in_system.discard(path) + self._codegen_finished() + + def _query_echonest(self, path, code): + if path not in self._paths_in_system: + logging.warn("_EchonestProcessor._query_echonest() called for " + "path not in system: %r", path) + return + version = 3.15 # change to 4.11 for echoprint + metadata = self._metadata_for_path.pop(path) + echonest.query_echonest(path, self._cover_art_dir, code, version, + metadata, self._echonest_callback, + self._echonest_errback) + self._querying_echonest = True + + def _echonest_callback(self, path, metadata): + if path in self._paths_in_system: + logging.debug("Got echonest data for %s:\n%s", path, metadata) + self._paths_in_system.discard(path) + self.emit('task-complete', path, metadata) + else: + logging.warn("_EchonestProcessor._echonest_callback called for " + "path not in system: %r", path) + self._querying_echonest = False + self._process_queue() + + def _echonest_errback(self, path, error): + logging.warn("Error running echonest for %s (%s)" % (path, error)) + self._paths_in_system.discard(path) + self._querying_echonest = False + if isinstance(error, net.NetworkError): + self._http_error_times.append(clock.clock()) + if (len(self._http_error_times) > + self.PAUSE_AFTER_HTTP_ERROR_COUNT): + self._http_error_times.popleft() + self.emit('task-error', path, error) + self._process_queue() + + def _process_queue(self): + if self._should_process_metadata_fetch_queue(): + self._process_metadata_fetch_queue() + if self._should_process_echonest_queue(): + self._process_echonest_queue() + if self._should_process_codegen_queue(): + self._run_codegen(self._codegen_queue.pop()) + + def _should_process_metadata_fetch_queue(self): + # Try not to fetch metadata more quickly than we need to. Only do it + # if the other queues waiting for us + return (self._metadata_fetch_queue and + len(self._codegen_queue) + len(self._echonest_queue) < 10) + + def _should_process_codegen_queue(self): + if not (self._codegen_queue and + not self._running_codegen and + len(self._echonest_queue) < self._code_buffer_size): + return False + cooldown_left = self._codegen_cooldown_end - clock.clock() + if cooldown_left > 0: + self._codegen_cooldown_caller.call_after_timeout(cooldown_left) + return False + return True + + def _should_process_echonest_queue(self): + return (self._echonest_queue and not self._querying_echonest and + not self._waiting_from_http_errors) + + def _process_echonest_queue(self): + if not self._should_pause_from_http_errors(): + self._query_echonest(*self._echonest_queue.pop()) + else: + # we've gotten too many HTTP errors recently and are backing + # off sending new requests. Add a timeout and try again then + if not self._waiting_from_http_errors: + name = 'restart echonest queue after http error' + eventloop.add_timeout(self.PAUSE_AFTER_HTTP_ERROR_TIMEOUT, + self._restart_after_http_errors, name) + self._waiting_from_http_errors = True + + def _process_metadata_fetch_queue(self): + while self._metadata_fetch_queue: + path, metadata_fetcher = self._metadata_fetch_queue.pop() + try: + metadata = metadata_fetcher() + except StandardError: + # log exceptions and try the next item in the queue. + logging.warn("_process_metadata_fetch_queue: metadata_fetcher " + "raised exception (path: %r, fetcher: %s)", + path, metadata_fetcher, exc_info=True) + continue + else: + self._metadata_for_path[path] = metadata + if not self.should_skip_codegen(metadata): + self._codegen_queue.add(path) + else: + self._echonest_queue.add(path, None) + return + + def _codegen_finished(self): + self._running_codegen = False + self._codegen_cooldown_end = (clock.clock() + + self.CODEGEN_COOLDOWN_TIME) + self._process_queue() + + def _should_pause_from_http_errors(self): + """Have we seen enough HTTP errors recently that we should pause + running the queue? + """ + return ((len(self._http_error_times) == + self.PAUSE_AFTER_HTTP_ERROR_COUNT) and + self._http_error_times[0] > clock.clock() - + self.PAUSE_AFTER_HTTP_ERROR_TIMEOUT) + + def _restart_after_http_errors(self): + self._waiting_from_http_errors = False + self._process_queue() + + def task_count(self): + return (len(self._metadata_fetch_queue) + + len(self._codegen_queue) + + len(self._echonest_queue)) + + def remove_tasks_for_paths(self, paths): + path_set = set(paths) + self._metadata_fetch_queue.remove_paths(path_set) + self._echonest_queue.remove_paths(path_set) + self._codegen_queue.remove_paths(path_set) + self._paths_in_system -= path_set + for path in path_set.intersection(self._metadata_for_path.keys()): + del self._metadata_for_path[path] + # since we may have deleted active paths, process the new ones + self._process_queue() + +class ProgressCountTracker(object): + """Helps MetadataManager keep track of counts for MetadataProgressUpdate + + ProgressCountTracker counts the total number of items that need metadata + processing, the number of items finished, and the number of items that + have finished mutagen/movie-data but still need internet metadata. Once + all items are finished, then the counts reset. + """ + + def __init__(self): + self.reset() + + def reset(self): + """Reset the counts.""" + self.all_files = set() + self.finished_local = set() + self.finished = set() + self.all_sets = (self.all_files, self.finished_local, self.finished) + + def get_count_info(self): + """Get the current count info. + + This method gets three counts: + - total count: total number of files to be processed + - finished_local count: number of files that have finished + mutagen/moviedata, but not echonest + - finished_count count: number of files that have finished all + processing + + :returns: the tuple (total, finished_local, finished_count) + """ + return (len(self.all_files), + len(self.finished_local), + len(self.finished)) + + def file_started(self, path, initial_metadata): + """Add a file to the counts.""" + self.all_files.add(path) + + def file_net_lookup_restarted(self, path): + self.file_started(path, {}) + self.file_finished_local_processing(path) + + def file_updated(self, path, new_metadata): + """Call this as we get new metadata.""" + # subclasses can deal with this, but we don't + pass + + def file_finished_local_processing(self, path): + """Remove a file from our counts.""" + if path not in self.all_files: + logging.warn("file_finished_local_processing called for a file " + "that we're not tracking: %s", path) + return + self.finished_local.add(path) + + def file_finished(self, path): + """Remove a file from our counts.""" + if path not in self.all_files: + logging.warn("file_finished called for a file that we're " + "not tracking: %s", path) + return + self.finished.add(path) + self.finished_local.add(path) + if len(self.finished) == len(self.all_files): + self.reset() + + def file_moved(self, old_path, new_path): + """Handle a file changing names.""" + if old_path not in self.all_files: + logging.warn("file_moved called for a file that we're " + "not tracking: %s", old_path) + return + for count_set in self.all_sets: + if old_path in count_set: + count_set.remove(old_path) + count_set.add(new_path) + + def remove_file(self, path): + """Remove a file from the counts. + + This is different than finishing the file, since this will lower the + total count, rather than increase the finished count. + """ + for count_set in self.all_sets: + count_set.discard(path) + if len(self.finished) == len(self.all_files): + self.reset() + +class LibraryProgressCountTracker(object): + """Tracks progress counts for the library tabs. + + This has the same API as ProgressCountTracker for file tracking (the + functions file_started, file_updated, file_finished, etc), but + get_count_info() is different because it keeps separate counts for the + video and audio tabs, based on the file_type for each path. + """ + def __init__(self): + self.trackers = collections.defaultdict(ProgressCountTracker) + self.file_types = {} + + def get_count_info(self, file_type): + """Get the count info for the audio, video, or other tabs + + :param file_type: file type to get the count info for + :returns: the tuple (total, finished_local, finished_count) + """ + return self.trackers[file_type].get_count_info() + + def file_started(self, path, initial_metadata): + file_type = initial_metadata.get('file_type', u'other') + self.trackers[file_type].file_started(path, initial_metadata) + self.file_types[path] = file_type + + def file_net_lookup_restarted(self, path): + # assume that the file is audio, since we're only do internet lookups + # for those files + file_type = u'audio' + # start the file, then immediately move it to the net lookup stage. + tracker = self.trackers[file_type] + tracker.file_started(path, {}) + tracker.file_finished_local_processing(path) + self.file_types[path] = file_type + + def file_moved(self, old_path, new_path): + try: + tracker = self._get_tracker_for_path(old_path) + except KeyError: + logging.warn("_get_tracker_for_path raised KeyError in " + "file_moved() old: %s new: %s", old_path, new_path) + else: + file_type = self.file_types.pop(old_path) + tracker.file_moved(old_path, new_path) + self.file_types[new_path] = file_type + + def file_finished(self, path): + try: + tracker = self._get_tracker_for_path(path) + except KeyError: + logging.warn("_get_tracker_for_path raised KeyError in " + "file_finished (%s)", path) + else: + tracker.file_finished(path) + + def file_finished_local_processing(self, path): + try: + tracker = self._get_tracker_for_path(path) + except KeyError: + logging.warn("_get_tracker_for_path raised KeyError in " + "file_finished_local_processing (%s)", path) + else: + tracker.file_finished_local_processing(path) + + def _get_tracker_for_path(self, path): + """Get the ProgressCountTracker for a file. + + :returns: ProgressCountTracker or None if we couldn't look it up + :raises: KeyError path not in our file_types dict + """ + return self.trackers[self.file_types[path]] - def delete_cover_art(self): - """Delete the cover art file and unset cover_art.""" + def file_updated(self, path, metadata): + if 'file_type' not in metadata: + # no new file type, we don't have to change anything + return + new_file_type = metadata['file_type'] try: - fileutil.remove(self.cover_art) - except (OSError, TypeError): - pass - self.cover_art = None - - def setup_new(self): - Source.setup_new(self) - self._deferred_update = {} - - def set_metadata_from_iteminfo(self, changes, _deferrable=True): - self.confirm_db_thread() - for field, value in changes.iteritems(): - Store.ITEM_INFO_TO_ITEM[field](self, value, _bulk=True) - self.signal_change() - self.write_back(changes.keys()) - - def write_back(self, _changed): - """Write back metadata changes to the original source, if supported. If - this method fails because the item is playing, it should add the changed - fields to _deferred_update. - """ - # not implemented yet - #logging.debug("%s can't write back changes", self.__class__.__name__) - - def set_is_playing(self, playing): - """Hook so that we can defer updating an item's data if we can't change - it while it's playing. - """ - if not playing and self._deferred_update: - self.set_metadata_from_iteminfo(self._deferred_update, _deferrable=False) - self._deferred_update = {} - super(Store, self).set_is_playing(playing) - - ITEM_INFO_TO_ITEM = dict( - name = set_title, - title_tag = set_title_tag, - description = set_description, - album = set_album, - album_artist = set_album_artist, - artist = set_artist, - track = set_track, - album_tracks = set_album_tracks, - year = set_year, - genre = set_genre, - rating = set_rating, - file_type = set_file_type, - cover_art = set_cover_art, - has_drm = set_has_drm, - show = set_show, - episode_id = set_episode_id, - episode_number = set_episode_number, - season_number = set_season_number, - kind = set_kind, - metadata_version = set_metadata_version, - mdp_state = set_mdp_state, - ) + old_file_type = self.file_types[path] + except KeyError: + logging.warn("file_updated: couldn't lookup file type for: %s", + path) + return + if old_file_type == new_file_type: + return + old_tracker = self.trackers[old_file_type] + new_tracker = self.trackers[new_file_type] + + if path not in old_tracker.all_files: + logging.warn("file_changed_type called for a file we're not " + "tracking: %s", path) + return + + new_tracker.file_started(path, metadata) + if path in old_tracker.finished: + new_tracker.file_finished(path) + elif path in old_tracker.finished_local: + new_tracker.file_finished_local_processing(path) + + old_tracker.remove_file(path) + self.file_types[path] = new_file_type + +class _ProcessingCountTracker(object): + """Helps MetadataManager keep track of counts for MetadataProgressUpdate + + For each file type, this class tracks the number of files that we're + still getting metadata for. + """ + def __init__(self): + # map file type to the count for that type + self.counts = collections.defaultdict(int) + # map file type to the total for that type. The total should track + # the number of file_started() calls, without ever going down. Once + # the counts goes down to zero, it should be reset + self.totals = collections.defaultdict(int) + # map paths to the file type we currently think they are + self.file_types = {} + + def get_count(self, file_type): + return self.counts[file_type] + + def get_total(self, file_type): + return self.totals[file_type] + + def file_started(self, path, metadata=None): + """Add a path to our counts. + + metadata is an optional dict of metadata for that file. + """ + if metadata is not None: + file_type = metadata['file_type'] + else: + file_type = filetypes.item_file_type_for_filename(path) + self.file_types[path] = file_type + self.counts[file_type] += 1 + self.totals[file_type] += 1 + + def check_file_type(self, path, metadata): + """Check we have the right file type for a path. + + Call this whenever the metadata changes for a path. + """ + new_file_type = metadata['file_type'] + try: + old_file_type = self.file_types[path] + except KeyError: + # not a big deal, we probably finished the file at the same time + # as the metadata update. + return + if new_file_type != old_file_type: + self.counts[old_file_type] -= 1 + self.counts[new_file_type] += 1 + # change total value too, since the original guess was wrong + self.totals[old_file_type] -= 1 + self.totals[new_file_type] += 1 + self.file_types[path] = new_file_type + + def file_finished(self, path): + """Remove a file from our counts.""" + try: + file_type = self.file_types.pop(path) + except KeyError: + # Not tracking this path, just ignore + return + self.counts[file_type] -= 1 + if self.counts[file_type] == 0: + self.totals[file_type] = 0 + + def file_moved(self, old_path, new_path): + """Change the name for a file.""" + try: + self.file_types[new_path] = self.file_types.pop(old_path) + except KeyError: + # not tracking this path, just ignore + return + + def file_being_processed(self, path): + return path in self.file_types + +class MetadataManagerBase(signals.SignalEmitter): + """Extract and track metadata for files. + + This class is responsible for: + - creating/updating MetadataStatus and MetadataEntry objects + - invoking mutagen, moviedata, echonest, and other extractors + - combining all the metadata we have for a path into a single dict. + + Signals: + + - new-metadata(dict) -- We have new metadata for files. dict is a + dictionary mapping paths to the new metadata. + Note: the new metadata only contains changed + values, not the entire metadata dict. + """ + + # how long to wait before emiting the new-metadata signal. Shorter times + # mean more responsiveness, longer times allow us to bulk update many + # items at once. + UPDATE_INTERVAL = 1.0 + RETRY_TEMPORARY_INTERVAL = 3600 + # how often to re-try net lookups that have failed + NET_LOOKUP_RETRY_INTERVAL = 60 * 60 * 24 * 7 # 1 week + + def __init__(self, cover_art_dir, screenshot_dir, db_info=None): + signals.SignalEmitter.__init__(self) + if db_info is None: + self.db_info = app.db_info + else: + self.db_info = db_info + self.closed = False + self.create_signal('new-metadata') + self.cover_art_dir = cover_art_dir + self.screenshot_dir = screenshot_dir + self.echonest_cover_art_dir = os.path.join(cover_art_dir, 'echonest') + self.mutagen_processor = _TaskProcessor(u'mutagen', 100) + self.moviedata_processor = _TaskProcessor(u'movie-data', 100) + self.echonest_processor = _EchonestProcessor( + 5, self.echonest_cover_art_dir) + self.pending_mutagen_tasks = [] + self.bulk_add_count = 0 + self.metadata_processors = [ + self.mutagen_processor, + self.moviedata_processor, + self.echonest_processor, + ] + for processor in self.metadata_processors: + processor.connect("task-complete", self._on_task_complete) + processor.connect("task-error", self._on_task_error) + self.count_tracker = self.make_count_tracker() + self._send_net_lookup_counts_caller = eventloop.DelayedFunctionCaller( + self._send_net_lookup_counts) + # List of (processor, path, metadata) tuples for metadata since the + # last run_updates() call + self.metadata_finished = [] + # List of (processor, path) tuples for failed metadata since the last + # run_updates() call + self.metadata_errors = [] + self._reset_new_metadata() + self._run_update_caller = eventloop.DelayedFunctionCaller( + self.run_updates) + self._retry_temporary_failure_caller = \ + eventloop.DelayedFunctionCaller(self.retry_temporary_failures) + self._calc_incomplete() + self._retry_net_lookup_caller = \ + eventloop.DelayedFunctionCaller(self.retry_net_lookup) + self._retry_net_lookup_entries = {} + self._setup_path_placeholders() + self._setup_net_lookup_count() + # send initial NetLookupCounts message + self._send_net_lookup_counts() + + def _reset_new_metadata(self): + self.new_metadata = collections.defaultdict(dict) + + def check_image_directories(self, log_warnings=False): + """Check that our echonest and screenshot directories exist + + If they don't, we will try to create them. + + This method should be called often so that we recover quickly. The + current policy is to call it before adding a task that might need to + write to the directories. + + :param log_warnings: should we print errors out to the log file? + """ + directories = ( + self.cover_art_dir, + self.echonest_cover_art_dir, + self.screenshot_dir, + ) + for path in directories: + if not fileutil.exists(path): + try: + fileutil.makedirs(path) + except EnvironmentError, e: + if log_warnings: + logging.warn("MetadataManager: error creating: %s" + "(%s)", path, e) + + def _setup_path_placeholders(self): + """Add None values to the cache for all MetadataStatus objects + + This makes path_in_system() work since it checks if the key exists. + However, we don't actually want to load the objects yet, since this is + called pretty early in the startup process. + """ + rows = MetadataStatus.select(["path"], db_info=self.db_info) + for row in rows: + self.db_info.db.cache.set('metadata', row[0], None) + # also set up total_count here, since it's convenient. total_count + # tracks the total number of paths in the system + self.total_count = len(rows) + + def _setup_net_lookup_count(self): + """Set up net_lookup_count + + net_lookup_count tracks the number of paths in the system with + net_lookup_enabled=True. + """ + cursor = self.db_info.db.cursor + cursor.execute("SELECT COUNT(1) " + "FROM metadata_status " + "WHERE net_lookup_enabled=1") + self.net_lookup_count = cursor.fetchone()[0] + + @contextlib.contextmanager + def bulk_add(self): + """Context manager to use when adding lots of files + + While this context manager is active, we will delay calling mutagen. + bulk_add() contexts can be nested, we will delay processing metadata + until the last one finishes. + + Example: + + >>> with metadata_manager.bulk_add() + >>> add_lots_of_videos() + >>> add_lots_of_videos() + >>> # at this point mutagen calls will start + """ + # initialize context + self.bulk_add_count += 1 + yield + # cleanup context + self.bulk_add_count -= 1 + if not self.in_bulk_add(): + self._send_pending_mutagen_tasks() + + def in_bulk_add(self): + return self.bulk_add_count != 0 + + def _send_pending_mutagen_tasks(self): + for task in self.pending_mutagen_tasks: + self.mutagen_processor.add_task(task) + self.pending_mutagen_tasks = [] + + def _translate_path(self, path): + """Translate a path value from the db to a filesystem path. + """ + return path + + def _untranslate_path(self, path): + """Reverse the work of _translate_path.""" + return path + + def add_file(self, path, local_path=None): + """Add a new file to the metadata syestem + + :param path: path to the file + :param local_path: path to a local file to get initial metadata for + :returns initial metadata for the file + :raises ValueError: path is already in the system + """ + if self.closed: + raise ValueError("%r added to closed MetadataManager" % path) + if self.path_in_system(path): + raise ValueError("%r already added" % path) + + status = MetadataStatus(path, self.net_lookup_enabled_default(), + db_info=self.db_info) + if status.net_lookup_enabled: + self.net_lookup_count += 1 + self.total_count += 1 + initial_metadata = self._get_metadata_from_filename(path) + initial_metadata['net_lookup_enabled'] = status.net_lookup_enabled + if local_path is not None: + local_status = MetadataStatus.get_by_path(local_path) + status.copy_status(MetadataStatus.get_by_path(local_path)) + for entry in MetadataEntry.metadata_for_status(local_status): + entry_metadata = entry.get_metadata() + initial_metadata.update(entry_metadata) + MetadataEntry(status, entry.source, entry_metadata, + db_info=self.db_info) + if status.current_processor is not None: + self.count_tracker.file_started(path, initial_metadata) + self.run_next_processor(status) + self._run_update_caller.call_after_timeout(self.UPDATE_INTERVAL) + self._send_net_lookup_counts_caller.call_when_idle() + return initial_metadata + + def net_lookup_enabled_default(self): + """net_lookup_enabled value for new MetadataStatus objects.""" + return app.config.get(prefs.NET_LOOKUP_BY_DEFAULT) + + def path_in_system(self, path): + """Test if a path is in the metadata system.""" + return self.db_info.db.cache.key_exists('metadata', path) + + def worker_task_count(self): + return (self.mutagen_processor.task_count() + + self.moviedata_processor.task_count() + + self.echonest_processor.task_count()) + + def _cancel_processing_paths(self, paths): + paths = [self._translate_path(p) for p in paths] + workerprocess.cancel_tasks_for_files(paths) + for processor in self.metadata_processors: + processor.remove_tasks_for_paths(paths) + + def remove_file(self, path): + """Remove a file from the metadata system. + + This is basically equivelent to calling remove_files([path]), except + that it doesn't start the bulk_sql_manager. + """ + paths = [path] + self._remove_files(paths) + + def remove_files(self, paths): + """Remove files from the metadata system. + + All queued mutagen and movie data calls will be canceled. + + :param paths: paths to remove + :raises KeyError: path not in the metadata system + """ + app.bulk_sql_manager.start() + try: + self._remove_files(paths) + finally: + app.bulk_sql_manager.finish() + + def close(self): + """ + Close the MetadataExtractor. Cancel anything in progress, and don't + allow new requests. + """ + if self.closed: # already closed + return + self.closed = True + paths = [r[0] for r in + MetadataStatus.select(['path'], db_info=self.db_info)] + self._cancel_processing_paths(paths) + + def _remove_files(self, paths): + """Does the work for remove_file and remove_files""" + self._cancel_processing_paths(paths) + for path in paths: + try: + status = self._get_status_for_path(path) + except KeyError: + logging.warn("MetadataManager._remove_files: KeyError " + "getting status for %r", path) + continue + if status.net_lookup_enabled: + self.net_lookup_count -= 1 + self.total_count -= 1 + for entry in MetadataEntry.metadata_for_status(status, + self.db_info): + if entry.screenshot is not None: + self.remove_screenshot(entry.screenshot) + entry.remove() + status.remove() + if status.current_processor is not None: + self.count_tracker.file_finished(path) + self._run_update_caller.call_after_timeout(self.UPDATE_INTERVAL) + self._send_net_lookup_counts_caller.call_when_idle() + + def remove_screenshot(self, screenshot): + fileutil.delete(screenshot) + + def will_move_files(self, paths): + """Prepare for files to be moved + + All queued mutagen and movie data calls will be put on hold until + file_moved() is called. + + :param paths: list of paths that will be moved + """ + self._cancel_processing_paths(paths) + + def file_moved(self, old_path, new_path): + """Call this after a file has been moved to a new location. + + Queued mutagen and movie data calls will be restarted. + + :param move_info: list of (old_path, new_path) tuples + """ + if self.closed: + raise ValueError("%r moved to %r on closed MetadataManager" % ( + old_path, new_path)) + + try: + status = self._get_status_for_path(old_path) + except KeyError: + logging.warn("_process_files_moved: %s not in DB", old_path) + return + if self.db_info.db.cache.key_exists('metadata', new_path): + # There's already an entry for the new status. What to do + # here? Let's use the new one + logging.warn("_process_files_moved: already an object for " + "%s (old path: %s)" % (new_path, status.path)) + self.count_tracker.file_finished(status.path) + self.remove_file(status.path) + return + + status.rename(new_path) + if status.mutagen_status == MetadataStatus.STATUS_NOT_RUN: + self._run_mutagen(new_path) + elif status.moviedata_status == MetadataStatus.STATUS_NOT_RUN: + self._run_movie_data(new_path) + self.count_tracker.file_moved(old_path, new_path) + + def get_metadata(self, path): + """Get metadata for a path + + :param path: path to the file + :returns: dict of metadata + :raises KeyError: path not in the metadata system + """ + status = self._get_status_for_path(path) + + metadata = self._get_metadata_from_filename(path) + for entry in MetadataEntry.metadata_for_status(status, self.db_info): + entry_metadata = entry.get_metadata() + metadata.update(entry_metadata) + metadata['has_drm'] = status.get_has_drm() + metadata['net_lookup_enabled'] = status.net_lookup_enabled + self._add_cover_art(metadata) + return metadata + + def refresh_metadata_for_paths(self, paths): + """Send the new-metadata signal with the full metadata for paths. + + The metadata dicts will include None values to indicate metadata we + don't have, unlike normal. This means that we can erase metadata + from items if it is no longer present. + """ + + new_metadata = {} + for p in paths: + # make sure we include None values + metadata = dict((name, None) for name in attribute_names) + metadata.update(self.get_metadata(p)) + new_metadata[p] = metadata + self.emit("new-metadata", new_metadata) + + def _add_cover_art(self, metadata): + """Add the cover art path to a metadata dict """ + + # if the user hasn't explicitly set the cover art for an item, get it + # using the album. + if 'album' in metadata and 'cover_art' not in metadata: + filename = filetags.calc_cover_art_filename(metadata['album']) + mutagen_path = os.path.join(self.cover_art_dir, filename) + echonest_path = os.path.join(self.cover_art_dir, 'echonest', + filename) + if os.path.exists(echonest_path): + metadata['cover_art'] = echonest_path + elif os.path.exists(mutagen_path): + metadata['cover_art'] = mutagen_path + + def set_user_data(self, path, user_data): + """Update metadata based on user-inputted data + + :raises KeyError: path not in the metadata system + """ + if self.closed: + raise ValueError( + "%r called set_user_data on closed MetadataManager" % path) + # make sure that our MetadataStatus object exists + status = self._get_status_for_path(path) + try: + # try to update the current entry + current_entry = MetadataEntry.get_entry(u'user-data', status, + self.db_info) + current_entry.update_metadata(user_data) + except database.ObjectNotFoundError: + # make a new entry if none exists + MetadataEntry(status, u'user-data', user_data, db_info=self.db_info) + + def set_net_lookup_enabled(self, paths, enabled): + """Set if we should do an internet lookup for a list of paths + + :param paths: paths to change or None to change it for all entries + :param enabled: should we do internet lookups for paths? + """ + paths_to_refresh = [] + paths_to_cancel = [] + paths_to_start = [] + to_change = [] + + if paths is not None: + for path in paths: + try: + status = MetadataStatus.get_by_path(path, self.db_info) + if status.net_lookup_enabled != enabled: + to_change.append(status) + except database.ObjectNotFoundError: + logging.warn("set_net_lookup_enabled() " + "path not in system: %s", path) + else: + view = MetadataStatus.net_lookup_enabled_view(not enabled, + self.db_info) + to_change = list(view) + + app.bulk_sql_manager.start() + try: + for status in to_change: + old_current_processor = status.current_processor + status.set_net_lookup_enabled(enabled) + if MetadataEntry.set_disabled('echonest', status, not enabled, + self.db_info): + paths_to_refresh.append(status.path) + # Changing the net_lookup value may mean we have to send the + # path through echonest + if (old_current_processor is None and + status.current_processor == 'echonest'): + paths_to_start.append(status.path) + elif (status.current_processor is None and + old_current_processor == 'echonest'): + paths_to_cancel.append(status.path) + finally: + app.bulk_sql_manager.finish() + + if paths_to_cancel: + self.echonest_processor.remove_tasks_for_paths(paths_to_cancel) + + for path in paths_to_start: + # get_metadata() is sometimes more accurate than + # _get_metadata_from_filename() but slower. Let's go for speed. + metadata = self._get_metadata_from_filename(path) + self.count_tracker.file_started(path, metadata) + self._run_echonest(path) + + if paths_to_refresh: + self.refresh_metadata_for_paths(paths_to_refresh) + + if enabled: + self.net_lookup_count += len(to_change) + else: + self.net_lookup_count -= len(to_change) + # call _send_net_lookup_counts() immediately because we want the + # frontend to update the counts before it un-disables the buttons. + self._send_net_lookup_counts_caller.call_now() + self._send_progress_updates() + + def set_net_lookup_enabled_for_all(self, enabled): + """Set if we should do an internet lookup for all current paths""" + self.set_net_lookup_enabled(None, enabled) + messages.SetNetLookupEnabledFinished().send_to_frontend() + + def _send_net_lookup_counts(self): + m = messages.NetLookupCounts(self.net_lookup_count, self.total_count) + m.send_to_frontend() + + def _calc_incomplete(self): + """Figure out which metadata status objects we should restart. + + We have to call this method on startup, but we don't want to start + doing any work until restart_incomplete() is called. So we just save + the IDs of the rows to restart. + """ + results = MetadataStatus.was_running_select(['id'], self.db_info) + self.restart_ids = [row[0] for row in results] + + def restart_incomplete(self): + """Restart extractors for files with incomplete metadata + + This method queues calls to mutagen, movie data, etc. + """ + for id_ in self.restart_ids: + try: + status = MetadataStatus.get_by_id(id_, self.db_info) + except database.ObjectNotFoundError: + continue # just ignore deleted objects + self.run_next_processor(status) + # get_metadata() is sometimes more accurate than + # _get_metadata_from_filename() but slower. Let's go for speed. + metadata = self._get_metadata_from_filename(status.path) + self.count_tracker.file_started(status.path, metadata) + + del self.restart_ids + self._run_update_caller.call_after_timeout(self.UPDATE_INTERVAL) + + def schedule_retry_net_lookup(self): + last_refetch = app.config.get(prefs.LAST_RETRY_NET_LOOKUP) + if not last_refetch: + # never have done a refetch, do it in 10 minutes + timeout = 600 + else: + timeout = (last_refetch + self.NET_LOOKUP_RETRY_INTERVAL - + time.time()) + self._retry_net_lookup_caller.call_after_timeout(timeout) + + def retry_net_lookup(self): + """Re-download incomplete data from internet sources. """ + logging.info("Retrying incomplete internet lookups") + self._retry_net_lookup_caller.cancel_call() + for entry in MetadataEntry.incomplete_echonest_view(self.db_info): + try: + status = MetadataStatus.get_by_id(entry.status_id, + self.db_info) + except database.ObjectNotFoundError: + logging.warn("retry_net_lookup: MetadataStatus not found: %i", + entry.status_id) + continue + # check that we aren't already running metadata lookups for the + # file + if status.current_processor is not None: + logging.warn("retry_net_lookup: current_processor is %s", + status.current_processor) + continue + if not status.net_lookup_enabled: + continue + if status.path in self._retry_net_lookup_entries: + # already retrying + logging.info("retry_net_lookup: already retrying %r", + status.path) + continue + self.count_tracker.file_net_lookup_restarted(status.path) + self._run_echonest(status.path, status.echonest_id) + self._retry_net_lookup_entries[status.path] = entry + + app.config.set(prefs.LAST_RETRY_NET_LOOKUP, int(time.time())) + + def retry_temporary_failures(self): + app.bulk_sql_manager.start() + try: + for status in MetadataStatus.failed_temporary_view(self.db_info): + status.retry_echonest() + self.run_next_processor(status) + finally: + app.bulk_sql_manager.finish() + + def _get_status_for_path(self, path): + """Get a MetadataStatus object for a given path.""" + try: + return MetadataStatus.get_by_path(path, self.db_info) + except database.ObjectNotFoundError: + raise KeyError(path) + + def _run_mutagen(self, path): + """Run mutagen on a path.""" + self.check_image_directories() + path = self._translate_path(path) + task = workerprocess.MutagenTask(path, self.cover_art_dir) + if not self.in_bulk_add(): + self.mutagen_processor.add_task(task) + else: + self.pending_mutagen_tasks.append(task) + + def _run_movie_data(self, path): + """Run the movie data program on a path.""" + self.check_image_directories() + path = self._translate_path(path) + task = workerprocess.MovieDataProgramTask(path, self.screenshot_dir) + self.moviedata_processor.add_task(task) + + def _run_echonest(self, path, echonest_id=None): + """Run echonest and other internet queries on a path.""" + self.check_image_directories() + def metadata_fetcher(): + metadata = self.get_metadata(path) + # make sure to get metadata that we just created but haven't saved + # yet because we're doing a bulk insert + if path in self.new_metadata: + metadata.update(self.new_metadata[path]) + + # we only send a subset of the metadata to echonest and some of + # the key names are different + echonest_metadata = {} + for key in ('title', 'artist', 'album', 'duration'): + try: + echonest_metadata[key] = metadata[key] + except KeyError: + pass + if echonest_id: + echonest_metadata['echonest_id'] = echonest_id + return echonest_metadata + self.echonest_processor.add_path(self._translate_path(path), + metadata_fetcher) + + def _on_task_complete(self, processor, path, result): + path = self._untranslate_path(path) + self.metadata_finished.append((processor, path, result)) + self._run_update_caller.call_after_timeout(self.UPDATE_INTERVAL) + + def _on_task_error(self, processor, path, error): + path = self._untranslate_path(path) + self.metadata_errors.append((processor, path, error)) + self._run_update_caller.call_after_timeout(self.UPDATE_INTERVAL) + + def _get_metadata_from_filename(self, path): + """Get metadata that we know from a filename alone.""" + return { + 'file_type': filetypes.item_file_type_for_filename(path), + } + + def run_updates(self): + """Run any pending metadata updates. + + As we get metadata in from extractors, we store it up and send one big + update at a time. Normally this is scheduled using a timeout, we also + need to call it at shutdown to flush the pending updates. + """ + if self.closed: + return + # Should this be inside an idle iterator? It definitely runs slowly + # when we're running mutagen on a music library, but I think that's to + # be expected. It seems fast enough in other cases to me - BDK + new_metadata_copy = self.new_metadata + app.bulk_sql_manager.start() + try: + self._process_metadata_finished() + self._process_metadata_errors() + self.emit('new-metadata', self.new_metadata) + finally: + self._reset_new_metadata() + try: + app.bulk_sql_manager.finish() + except StandardError, e: + new_metadata_debug_string = '\n'.join( + '%s: %s' % (os.path.basename(k), v) + for k, v in new_metadata_copy.items()) + logging.warn("Error adding new metadata: %s. new_metadata\n%s", + e, new_metadata_debug_string) + raise + self._send_progress_updates() + + def _process_metadata_finished(self): + for (processor, path, result) in self.metadata_finished: + try: + status = MetadataStatus.get_by_path(path, self.db_info) + except database.ObjectNotFoundError: + logging.warn("_process_metadata_finished -- path removed: %r", + path) + continue + if path not in self._retry_net_lookup_entries: + self._process_metadata_result(status, processor, path, result) + else: + retry_entry = self._retry_net_lookup_entries.pop(path) + if retry_entry.id_exists(): + self._process_metadata_result_net_retry(status, + retry_entry, path, + result) + else: + logging.warn("_process_metadata_finished -- entry " + "removed while retrying net lookup: %r", + path) + + self.metadata_finished = [] + + def _process_metadata_result(self, status, processor, path, result): + if not status.need_metadata_for_source(processor.source_name): + logging.warn("_process_metadata_finished -- got duplicate " + "metadata for %s (source: %s)", path, + processor.source_name) + return + self._make_new_metadata_entry(status, processor, path, result) + self.count_tracker.file_updated(path, result) + self.run_next_processor(status) + if status.current_processor == u'echonest': + self.count_tracker.file_finished_local_processing(status.path) + + def _process_metadata_result_net_retry(self, status, entry, path, result): + if status.echonest_id is None and 'echonest_id' in result: + status.set_echonest_id(result['echonest_id']) + entry.update_metadata(result) + self.count_tracker.file_finished(path) + self.new_metadata[path].update(result) + + def _make_new_metadata_entry(self, status, processor, path, result): + # pop off created_cover_art, that's for us not the MetadataEntry + created_cover_art = result.pop('created_cover_art', False) + entry = MetadataEntry(status, processor.source_name, result, + db_info=self.db_info) + if entry.priority >= status.max_entry_priority: + # If this entry is going to overwrite all other metadata, then + # we don't have to call get_metadata(). Just send the new + # values. + can_skip_get_metadata = True + else: + can_skip_get_metadata = False + status.update_after_success(entry, result) + if can_skip_get_metadata: + self.new_metadata[path].update(result) + else: + self.new_metadata[path] = self.get_metadata(path) + # add cover-art-path for other items in the same album + if created_cover_art and 'album' in self.new_metadata[path]: + album = self.new_metadata[path]['album'] + cover_art = self.new_metadata[path]['cover_art'] + for path in MetadataStatus.paths_for_album(album, self.db_info): + self.new_metadata[path]['cover_art'] = cover_art + + def _process_metadata_errors(self): + for (processor, path, error) in self.metadata_errors: + try: + status = MetadataStatus.get_by_path(path, self.db_info) + except database.ObjectNotFoundError: + logging.warn("_process_metadata_finished -- path removed: %s", + path) + continue + processor_status = status.update_after_error( + processor.source_name, error) + if processor_status != status.STATUS_TEMPORARY_FAILURE: + self.run_next_processor(status) + if status.current_processor == u'echonest': + self.count_tracker.file_finished_local_processing(status.path) + # we only have new metadata if the error means we can set the + # has_drm flag now + if processor is self.moviedata_processor and status.get_has_drm(): + self.new_metadata[path].update({'has_drm': True}) + if processor_status == status.STATUS_TEMPORARY_FAILURE: + self._retry_temporary_failure_caller.call_after_timeout( + self.RETRY_TEMPORARY_INTERVAL) + self.metadata_errors = [] + + def run_next_processor(self, status): + """Called after both success and failure of a metadata processor + """ + # check what the next processor we should run is + if status.current_processor == u'mutagen': + self._run_mutagen(status.path) + elif status.current_processor == u'movie-data': + self._run_movie_data(status.path) + elif status.current_processor == u'echonest': + self._run_echonest(status.path) + else: + self.count_tracker.file_finished(status.path) + + def _send_progress_updates(self): + for file_type in (u'audio', u'video'): + target = (u'library', file_type) + count_info = self.count_tracker.get_count_info(file_type) + total, finished_local, finished = count_info + eta = None + msg = messages.MetadataProgressUpdate(target, finished, + finished_local, eta, total) + msg.send_to_frontend() + +class LibraryMetadataManager(MetadataManagerBase): + """MetadataManager for the user's audio/video library.""" + + def make_count_tracker(self): + return LibraryProgressCountTracker() + +class DeviceMetadataManager(MetadataManagerBase): + """MetadataManager for devices.""" + + def __init__(self, db_info, device_id, mount): + cover_art_dir = os.path.join(mount, '.miro', 'cover-art') + screenshot_dir = os.path.join(mount, '.miro', 'screenshots') + MetadataManagerBase.__init__(self, cover_art_dir, screenshot_dir, + db_info) + self.mount = mount + self.device_id = device_id + # FIXME: should we wait to restart incomplete metadata? + self.restart_incomplete() + + def net_lookup_enabled_default(self): + """For devices we always want net_lookup_enabled to be False. + + See #18788. + """ + return False + + def set_net_lookup_enabled(self, paths, enabled): + # net_lookup_enabled should be False for device items and never + # change. Log a warning if we call set_net_lookup_enabled + logging.warn("DeviceMetadataManager.set_net_lookup_enabled() called") + return + + def make_count_tracker(self): + # for devices we just use a simple count tracker + return ProgressCountTracker() + + def get_metadata(self, path): + metadata = MetadataManagerBase.get_metadata(self, path) + # device items expect cover art and screenshots to be relative to + # the device mount + for key in ('cover_art', 'screenshot'): + if key in metadata: + metadata[key] = self._untranslate_path(metadata[key]) + return metadata + + def _translate_path(self, path): + """Translate a path value from the db to a filesystem path. + """ + return os.path.join(self.mount, path) + + def _untranslate_path(self, path): + """Translate a path value from the db to a filesystem path. + """ + if path.startswith(self.mount): + return os.path.relpath(path, self.mount) + else: + raise ValueError("%s is not relative to %s" % (path, self.mount)) + + def _send_progress_updates(self): + target = (u'device', self.device_id) + count_info = self.count_tracker.get_count_info() + total, finished_local, finished = count_info + eta = None + msg = messages.MetadataProgressUpdate(target, finished, + finished_local, eta, total) + msg.send_to_frontend() + + def _send_net_lookup_counts(self): + # This isn't supported for devices yet + pass + +def remove_invalid_device_metadata(device): + """Remove Metadata objects that don't correspond to DeviceItems. + + If we have a path in the metadata_status table, but not in the device_item + table, then we get an error when trying to add the device item (see + #19847). So remove the metadata status objects. + """ + where = 'path not in (SELECT filename FROM device_item)' + for status in MetadataStatus.make_view(where, db_info=device.db_info): + logging.warn("removing invalid metadata status (%s, %s)", device.mount, + status.path) + status.remove() diff -Nru miro-4.0.4/lib/models.py miro-6.0/lib/models.py --- miro-4.0.4/lib/models.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/models.py 2013-04-05 16:02:42.000000000 +0000 @@ -43,8 +43,9 @@ global SavedSearchFeedImpl, ScraperFeedImpl, SearchFeedImpl global DirectoryWatchFeedImpl, DirectoryFeedImpl, SearchDownloadsFeedImpl global ManualFeedImpl, ChannelFolder, PlaylistFolder - global PlaylistFolderItemMap, ChannelGuide, Item, FileItem, IconCache - global SavedPlaylist, PlaylistItemMap, TabOrder, ThemeHistory + global PlaylistFolderItemMap, ChannelGuide + global Item, FileItem, DeviceItem, SharingItem + global IconCache, SavedPlaylist, PlaylistItemMap, TabOrder, ThemeHistory global messages from miro.downloader import RemoteDownloader @@ -54,7 +55,7 @@ from miro.folder import ChannelFolder, PlaylistFolder, \ PlaylistFolderItemMap from miro.guide import ChannelGuide - from miro.item import Item, FileItem + from miro.item import Item, FileItem, DeviceItem, SharingItem from miro.iconcache import IconCache from miro.playlist import SavedPlaylist, PlaylistItemMap from miro.tabs import TabOrder diff -Nru miro-4.0.4/lib/moviedata.py miro-6.0/lib/moviedata.py --- miro-4.0.4/lib/moviedata.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/moviedata.py 2013-04-05 16:02:42.000000000 +0000 @@ -27,321 +27,66 @@ # this exception statement from your version. If you delete this exception # statement from all source files in the program, then also delete it here. -from miro.eventloop import as_idle import os.path -import re -import subprocess -import tempfile -import time -import traceback -import threading -import Queue -import logging -from contextlib import contextmanager - -from miro import app -from miro import prefs -from miro import signals -from miro import util + +from miro import download_utils from miro import fileutil -from miro.plat.utils import (movie_data_program_info, - thread_body) -from miro.errors import Shutdown - -# Time in seconds that we wait for the utility to execute. If it goes -# longer than this, we assume it's hung and kill it. -MOVIE_DATA_UTIL_TIMEOUT = 30 - -# Time to sleep while we're polling the external movie command -SLEEP_DELAY = 0.1 - -DURATION_RE = re.compile("Miro-Movie-Data-Length: (\d+)") -TYPE_RE = re.compile("Miro-Movie-Data-Type: (audio|video|other)") -THUMBNAIL_SUCCESS_RE = re.compile("Miro-Movie-Data-Thumbnail: Success") -TRY_AGAIN_RE = re.compile("Miro-Try-Again: True") - -class State(object): - """Enum for tracking what we've looked at. - - None indicates that we haven't looked at the file at all; - non-true values indicate that we haven't run MDP. - """ - UNSEEN = None - SKIPPED = 0 - RAN = 1 - FAILED = 2 - -class MovieDataInfo(object): - """Little utility class to keep track of data associated with each - movie. This is: - - * The item. - * The path to the video. - * Path to the thumbnail we're trying to make. - * List of commands that we're trying to run, and their environments. +from miro.plat.utils import run_media_metadata_extractor + +def convert_mdp_result(source_path, screenshot, result): + """Convert the movie data program result for the metadata manager """ - def __init__(self, item): - self.item = item - self.video_path = item.get_filename() - self.thumbnail_path = self._make_thumbnail_path() - self._program_info = None - - def _make_thumbnail_path(self): - # add a random string to the filename to ensure it's unique. - # Two videos can have the same basename if they're in - # different directories. - video_base = os.path.basename(self.video_path) - filename = '%s.%s.png' % (video_base, util.random_string(5)) - return os.path.join(self.image_directory('extracted'), filename) - - @property - def program_info(self): - if not self._program_info: - self._program_info = self._calc_program_info() - return self._program_info - - def _calc_program_info(self): - videopath = fileutil.expand_filename(self.video_path) - thumbnailpath = fileutil.expand_filename(self.thumbnail_path) - command_line, env = movie_data_program_info(videopath, thumbnailpath) - return command_line, env - - @classmethod - def image_directory(cls, subdir): - dir_ = os.path.join(app.config.get(prefs.ICON_CACHE_DIRECTORY), subdir) - try: - fileutil.makedirs(dir_) - except OSError: - pass - return dir_ - -class ProcessHung(StandardError): pass - -class MovieDataUpdater(signals.SignalEmitter): - def __init__ (self): - signals.SignalEmitter.__init__(self, 'begin-loop', 'end-loop', - 'queue-empty') - self.in_shutdown = False - self.in_progress = set() - self.queue = Queue.Queue() - self.thread = None - - def start_thread(self): - self.thread = threading.Thread(name='Movie Data Thread', - target=thread_body, - args=[self.thread_loop]) - self.thread.setDaemon(True) - self.thread.start() - - def process_with_movie_data_program(self, mdi): - command_line, env = mdi.program_info - try: - stdout = self.run_movie_data_program(command_line, env) - except StandardError: - # check whether it's actually a Shutdown error, then raise - if self.in_shutdown: - raise Shutdown - raise - - if TRY_AGAIN_RE.search(stdout): - # FIXME: we should try again at some point, but right now we just - # ignore this - pass - - duration = self.parse_duration(stdout) - if os.path.splitext(mdi.video_path)[1] == '.flv': - # bug #17266. if the extension is .flv, we ignore the mediatype - # we just got from the movie data program. this is - # specifically for .flv files which the movie data - # extractors have a hard time with. - mediatype = u'video' - else: - mediatype = self.parse_type(stdout) - screenshot = self.parse_screenshot(stdout, mdi) - - logging.debug("moviedata: mdp %s %s %s %s", duration, screenshot, - mediatype, mdi.video_path) - return duration, screenshot, mediatype - - @contextmanager - def looping(self): - """Simple contextmanager to ensure that whatever happens in a - thread_loop, we signal begin/end properly. - """ - self.emit('begin-loop') - try: - yield - finally: - self.emit('end-loop') - - def thread_loop(self): - try: - while not self.in_shutdown: - with self.looping(): - self.process_item() - except Shutdown: - pass - - def process_item(self): - try: - mdi = self.queue.get(block=False) - except Queue.Empty: - self.emit('queue-empty') - mdi = self.queue.get(block=True) - # IMPORTANT: once we have popped an MDI off the queue, its mdp_state - # *must* be set (by update_finished or update_failed) unless we shut - # down before we could process it - if mdi is None: - raise Shutdown - try: - results = self.process_with_movie_data_program(mdi) - except ProcessHung: - self.update_failed(mdi.item) - # this kind of error is expected; just a warning - logging.warning("Movie data process hung, killing it. File was: %r", - mdi.video_path) - except StandardError: - self.update_failed(mdi.item) - signals.system.failed_exn( - "When running external movie data program for %r" % - mdi.video_path) - else: - self.update_finished(mdi.item, *results) - - def run_movie_data_program(self, command_line, env): - start_time = time.time() - # create tempfiles to catch output for the movie data program. Using - # a pipe fails if the movie data program outputs enough to fill up the - # buffers (see #17059) - movie_data_stdout = tempfile.TemporaryFile() - movie_data_stderr = tempfile.TemporaryFile() - pipe = subprocess.Popen(command_line, stdout=movie_data_stdout, - stdin=subprocess.PIPE, stderr=movie_data_stderr, env=env, - startupinfo=util.no_console_startupinfo()) - # close stdin since we won't write to it. - pipe.stdin.close() - while pipe.poll() is None and not self.in_shutdown: - time.sleep(SLEEP_DELAY) - if time.time() - start_time > MOVIE_DATA_UTIL_TIMEOUT: - logging.warning("Movie data process hung, killing it") - self.kill_process(pipe) - raise ProcessHung - - if self.in_shutdown: - if pipe.poll() is None: - logging.warning("Movie data process running after shutdown, " - "killing it") - self.kill_process(pipe) - raise Shutdown - # FIXME: should we do anything with stderr? - movie_data_stdout.seek(0) - return movie_data_stdout.read() - - def kill_process(self, pipe): - try: - pipe.kill() - pipe.wait() - except OSError: - logging.warning("Error trying to kill the movie data process:\n%s", - traceback.format_exc()) - else: - logging.warning("Movie data process killed") - - def parse_duration(self, stdout): - duration_match = DURATION_RE.search(stdout) - if duration_match: - return int(duration_match.group(1)) - else: - return None - - def parse_type(self, stdout): - type_match = TYPE_RE.search(stdout) - if type_match: - return unicode(type_match.group(1)) - else: - return None - - def parse_screenshot(self, stdout, mdi): - if (THUMBNAIL_SUCCESS_RE.search(stdout) and - fileutil.exists(mdi.thumbnail_path)): - return mdi.thumbnail_path - else: - return None - - @as_idle - def update_failed(self, item): - self.in_progress.remove(item.id) - if item.id_exists(): - item.mdp_state = State.FAILED - if item.has_drm: - #17442#c7, part2: if mutagen called it potentially DRM'd and we - # couldn't read it, we consider it DRM'd; files that we consider - # DRM'd initially go in "Other" - item.file_type = u'other' - item.signal_change() - - @as_idle - def update_finished(self, item, duration, screenshot, mediatype): - if hasattr(app, 'metadata_progress_updater'): # hack for unittests - app.metadata_progress_updater.path_processed(item.get_filename()) - self.in_progress.remove(item.id) - if item.id_exists(): - item.mdp_state = State.RAN - item.screenshot = screenshot - if duration is not None: - item.duration = duration - if duration != -1: - # if mutagen thought it might have DRM but we got a - # duration, override mutagen's guess - item.has_drm = False - if item.has_drm: - #17442#c7, part2: if mutagen called it potentially DRM'd and we - # couldn't read it, we consider it DRM'd; files that we consider - # DRM'd initially go in "Other" - item.file_type = u'other' - elif mediatype is not None: - item.file_type = mediatype - item.signal_change() - - def update_skipped(self, item): - item.mdp_state = State.SKIPPED - item.signal_change() - - def request_update(self, item): - if (hasattr(app, 'in_unit_tests') and - not hasattr(app, 'testing_mdp')): - # kludge for skipping MDP in non-MDP unittests - return - if self.in_shutdown: - return - if item.id in self.in_progress: - logging.warn("Not adding in-progess item (%s)", item.id) - return - - if self._should_process_item(item): - self.in_progress.add(item.id) - self.queue.put(MovieDataInfo(item)) - else: - self.update_skipped(item) - app.metadata_progress_updater.path_processed(item.get_filename()) - - def _should_process_item(self, item): - if item.has_drm: - # mutagen can only identify files that *might* have drm, so we - # always need to check that - return True - # Only run the movie data program for video items, audio items that we - # don't know the duration for, or items that do not have "other" - # filenames that mutagen could not determine type for. - return (item.file_type == u'video' or - (item.file_type == u'audio' and item.duration is None) or - item.file_type is None) - - def shutdown(self): - self.in_shutdown = True - # wake up our thread - self.queue.put(None) - if self.thread is not None: - self.thread.join() + converted_result = { 'source_path': source_path } + file_type, duration, success = result + + if duration >= 0: + converted_result['duration'] = duration + + # Return a file_type only if the duration is > 0. Otherwise it may be a + # false identification (#18840). Also, if moviedata reports other, that's + # a sign that it doesn't know what the file type is. Just leave out the + # file_type key so that we fallback to the mutagen guess. + if file_type != "other" and (duration not in (0, None)): + # Make file_type is unicode, or else database validation will fail on + # insert! + converted_result['file_type'] = unicode(file_type) + + if os.path.splitext(source_path)[1] == '.flv': + # bug #17266. if the extension is .flv, we ignore the file type + # we just got from the movie data program. this is + # specifically for .flv files which the movie data + # extractors have a hard time with. + converted_result['file_type'] = u'video' + + if (converted_result.get('file_type') == 'video' and success and + fileutil.exists(screenshot)): + converted_result['screenshot'] = screenshot + return converted_result -movie_data_updater = MovieDataUpdater() +def _make_screenshot_path(source_path, image_directory): + """Get a unique path to put a screenshot at + + This function creates a unique path to put a screenshot file at. + + :param source_path: path to the input video file + :param image_directory: directory to put screenshots in + """ + filename = os.path.basename(source_path) + ".png" + path = os.path.join(image_directory, filename) + # we have to use next_free_filename_no_create() here because we are + # passing the file path to the movie data process, so keeping the file + # open is not an option. We'll just have to live with the race condition + return download_utils.next_free_filename(path) + +def process_file(source_path, image_directory): + """Send a file to the movie data program. + + :param source_path: path to the file to process + :param image_directory: directory to put screenshut files + :returns: dictionary with metadata info + """ + screenshot, fp = _make_screenshot_path(source_path, image_directory) + result = run_media_metadata_extractor(source_path, screenshot) + # we can close the file now, since MDP has written to it + fp.close() + return convert_mdp_result(source_path, screenshot, result) diff -Nru miro-4.0.4/lib/net.py miro-6.0/lib/net.py --- miro-4.0.4/lib/net.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/net.py 2013-04-05 16:02:42.000000000 +0000 @@ -65,7 +65,7 @@ def convert_to_ssl(sock): return socket.ssl(sock) -class NetworkError(Exception): +class NetworkError(StandardError): """Base class for all errors that will be passed to errbacks from get_url and friends. NetworkErrors can be display in 2 ways: diff -Nru miro-4.0.4/lib/ngrams.c miro-6.0/lib/ngrams.c --- miro-4.0.4/lib/ngrams.c 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/ngrams.c 2013-04-05 16:02:42.000000000 +0000 @@ -61,8 +61,6 @@ static PyObject *breakup_word(PyObject *self, PyObject *args) { PyObject* source_string; - PyObject* iter; - PyObject* item; PyObject* ngram_list; long min, max; Py_ssize_t n; diff -Nru miro-4.0.4/lib/player.py miro-6.0/lib/player.py --- miro-4.0.4/lib/player.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/player.py 2013-04-05 16:02:42.000000000 +0000 @@ -27,6 +27,8 @@ # this exception statement from your version. If you delete this exception # statement from all source files in the program, then also delete it here. +import logging + from miro import signals from miro import messages @@ -47,25 +49,53 @@ messages.MarkItemWatched(self.item_info).send_to_backend() self.emit('cant-play') - def skip_forward(self): - current = self.get_elapsed_playback_time() - duration = self.get_total_playback_time() - if current is None or duration is None: - return - pos = min(duration, current + 30.0) + def _seek(self, seek_to_func, args): + if args is None: + return False + pos, duration = seek_to_func(*args) + if duration <= 0: + logging.warning('_seek: duration = %s', duration) + return None self.seek_to(pos / duration) + return True - def skip_backward(self): + def _skip_args(self): current = self.get_elapsed_playback_time() duration = self.get_total_playback_time() if current is None or duration is None: - return - pos = max(0, current - 15.0) - self.seek_to(pos / duration) + logging.warning('cannot skip: current = %s duration = %s', + current, duration) + return None + return (current, duration) + + def _resume_at_args(self, resume_time): + duration = self.get_total_playback_time() + if duration is None: + logging.warn('_resume_at_args: duration is None') + return None + return (resume_time, duration) + + def _skip_forward_func(self, current, duration): + return (min(duration, current + 30.0), duration) + + def _skip_backward_func(self, current, duration): + return (max(0, current - 15.0), duration) + + def _resume_at_func(self, resume_time, duration): + return (resume_time, duration) + + def skip_forward(self): + args = self._skip_args() + self._seek(self._skip_forward_func, args) + + def skip_backward(self): + args = self._skip_args() + self._seek(self._skip_backward_func, args) def play_from_time(self, resume_time=0): - self.seek_to(resume_time / self.get_total_playback_time()) - self.play() + args = self._resume_at_args(resume_time) + if self._seek(self._resume_at_func, args): + self.play() def set_item(self, item_info, success_callback, error_callback): raise NotImplementedError() diff -Nru miro-4.0.4/lib/playlist.py miro-6.0/lib/playlist.py --- miro-4.0.4/lib/playlist.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/playlist.py 2013-04-05 16:02:42.000000000 +0000 @@ -82,7 +82,7 @@ """Add a new item to end of the playlist. """ self.MapClass.add_item_id(self.id, item_id) item = models.Item.get_by_id(item_id) - item.save(always_signal=True) + item.playlists_changed(added=True) folder = self.get_folder() if folder is not None: @@ -101,7 +101,7 @@ folder.remove_id(item_id) if signal_change: item = models.Item.get_by_id(item_id) - item.signal_change(needs_save=False) + item.playlists_changed() def add_item(self, item): """Add an item to the end of the playlist""" @@ -125,6 +125,7 @@ (self.id, item_id)).get_singleton() map_.position = i map_.signal_change() + models.Item.playlist_reordered() class SavedPlaylist(database.DDBObject, PlaylistMixin): """An ordered list of videos that the user has saved. diff -Nru miro-4.0.4/lib/prefs.py miro-6.0/lib/prefs.py --- miro-4.0.4/lib/prefs.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/prefs.py 2013-04-05 16:02:42.000000000 +0000 @@ -62,6 +62,14 @@ return self.key != other.key # These are normal user preferences. +DONATE_PAYMENT_URL_TEMPLATE = Pref(key='donatePaymentURLTemplate', default='http://www.getmiro.com/give/?s=m%d', platformSpecific=False) +DONATE_URL_TEMPLATE = Pref(key='donateURLTemplate', default='http://getmiro.com/give/m%s/', platformSpecific=False) +DONATE_ASK1 = Pref(key='donateAsk1', default=10, platformSpecific=False) +DONATE_ASK2 = Pref(key='donateAsk2', default=50, platformSpecific=False) +DONATE_ASK3 = Pref(key='donateAsk3', default=100, platformSpecific=False) +DONATE_NOTHANKS = Pref(key='donateNoThanks', default=0, platformSpecific=False) +LAST_DONATE_TIME = Pref(key='lastDonateTime', default=0, platformSpecific=False) +DONATE_COUNTER = Pref(key='donateCounter', default=10, platformSpecific=False) MAIN_WINDOW_FRAME = Pref(key='mainWindowFrame', default=None, platformSpecific=False) LEFT_VIEW_SIZE = Pref(key='leftViewSize', default=None, platformSpecific=False) RIGHT_VIEW_SIZE = Pref(key='rightViewSize', default=None, platformSpecific=False) @@ -85,7 +93,9 @@ UPLOAD_RATIO = Pref(key='uploadRatio', default=2.0, platformSpecific=False) LIMIT_UPLOAD_RATIO = Pref(key='limitUploadRatio', default=False, platformSpecific=False) STARTUP_TASKS_DONE = Pref(key='startupTasksDone', default=False, platformSpecific=False) -SINGLE_VIDEO_PLAYBACK_MODE = Pref(key='singleVideoPlaybackMode', default=False, platformSpecific=False) +CONTINUOUS_VIDEO_PLAYBACK_MODE = Pref(key='continuousVideoPlaybackMode', default=True, platformSpecific=False) +CONTINUOUS_MUSIC_PLAYBACK_MODE = Pref(key='continuousMusicPlaybackMode', default=True, platformSpecific=False) +CONTINUOUS_PODCAST_PLAYBACK_MODE = Pref(key='continuousPodcastPlaybackMode', default=True, platformSpecific=False) PLAY_DETACHED = Pref(key='detachedPlaybackMode', default=False, platformSpecific=False) DETACHED_WINDOW_FRAME = Pref(key='detachedWindowFrame', default=None, platformSpecific=False) RESUME_VIDEOS_MODE = Pref(key='resumeVideosMode', default=True, platformSpecific=False) @@ -116,11 +126,16 @@ SHARE_VIDEO = Pref(key='ShareVideo', default=True, platformSpecific=False) SHARE_AUDIO = Pref(key='ShareAudio', default=True, platformSpecific=False) SHARE_FEED = Pref(key='ShareFeed', default=True, platformSpecific=False) -MUSIC_TAB_CLICKED = Pref(key='musicTabClicked', default=False, platformSpecific=False) +# the musicTabClicked key was used before miro 5.0. It's been changed because +# we want to pop up the dialog for users who ran 4.0.x and let them know about +# internet lookups +MUSIC_TAB_CLICKED = Pref(key='musicTabClicked2', default=False, platformSpecific=False) SHOW_PODCASTS_IN_VIDEO = Pref(key='showPodcastsInVideo', default=True, platformSpecific=False) SHOW_PODCASTS_IN_MUSIC = Pref(key='showPodcastsInMusic', default=False, platformSpecific=False) REMEMBER_LAST_DISPLAY = Pref(key='rememberLastDisplay', default=False, platformSpecific=False) PODCASTS_DEFAULT_VIEW = Pref(key='podcastsDefaultView', default=0, platformSpecific=False) +# metadata +LAST_RETRY_NET_LOOKUP = Pref(key='lastRetryNetLookup', default=0, platformSpecific=False) # This doesn't need to be defined on the platform, but it can be overridden there if the platform wants to. SHOW_ERROR_DIALOG = Pref(key='showErrorDialog', default=True, platformSpecific=True) @@ -149,11 +164,11 @@ u'http://www.miroguide.com/share') default_autoupdate = get_from_environ( 'DTV_AUTOUPDATE_URL', - u'http://www.participatoryculture.org/democracy-appcast.xml') + u'http://miro-updates.participatoryculture.org/democracy-appcast.xml') default_autoupdate_beta = get_from_environ( 'DTV_AUTOUPDATE_BETA_URL', - u'http://www.participatoryculture.org/democracy-appcast-beta.xml') + u'http://miro-updates.participatoryculture.org/democracy-appcast-beta.xml') CHANNEL_GUIDE_URL = Pref(key='ChannelGuideURL', default=default_guide, @@ -192,6 +207,9 @@ platformSpecific=False) PLANET_URL = Pref(key='PlanetURL', default=u"http://planet.getmiro.com/", platformSpecific=False) +# TODO: should be set to False by default +NET_LOOKUP_BY_DEFAULT = Pref(key='UseInternetLookupForNew', default=False, + platformSpecific=False) # These can be safely ignored on platforms without minimize to tray MINIMIZE_TO_TRAY = \ @@ -219,6 +237,8 @@ Pref(key='LogPathname', default=None, platformSpecific=True) DOWNLOADER_LOG_PATHNAME = \ Pref(key='DownloaderLogPathname', default=None, platformSpecific=True) +HELPER_LOG_PATHNAME = \ + Pref(key='HelperLogPathname', default=None, platformSpecific=True) GETTEXT_PATHNAME = \ Pref(key='GetTextPathname', default=None, platformSpecific=True) ENABLED_EXTENSIONS = \ diff -Nru miro-4.0.4/lib/schema.py miro-6.0/lib/schema.py --- miro-4.0.4/lib/schema.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/schema.py 2013-04-05 16:02:42.000000000 +0000 @@ -52,7 +52,7 @@ from types import NoneType from miro.plat.utils import PlatformFilenameType -class ValidationError(Exception): +class ValidationError(StandardError): """Error thrown when we try to save invalid data.""" pass @@ -174,6 +174,28 @@ super(SchemaSimpleItem, self).validate(data) self.validateTypes(data, [int, long, bool, str, unicode]) +class SchemaStringSet(SchemaItem): + """Stores a set of strings. + + This is stored in the database as a long string that separated by a + delimiter (by default ":"). + """ + + def __init__(self, noneOk=False, delimiter=':'): + SchemaItem.__init__(self, noneOk) + self.delimiter = delimiter + + def validate(self, data): + if data is None: + super(SchemaStringSet, self).validate(data) + return + self.validateType(data, set) + for obj in data: + self.validateType(obj, unicode) + if self.delimiter in obj: + raise ValidationError("%r contains the delimiter (%s)" % + (data, self.delimiter)) + class SchemaReprContainer(SchemaItem): """SchemaItem saved using repr() to save nested lists, dicts and tuples that store simple types. The look is similar to JSON, but @@ -284,34 +306,6 @@ raise ValidationError("value %r (key: %r) has the wrong type: %s" % (value, key, type(value))) -class SchemaStatusContainer(SchemaReprContainer): - """Version of SchemaReprContainer that stores the status dict for - RemoteDownloaders. It allows some values to be byte strings - rather than unicode objects. - """ - - filename_fields = ('channelName', 'shortFilename', 'filename') - - def validate(self, data): - binary_fields = self._binary_fields() - self.validateType(data, dict) - for key, value in data.items(): - self.validateTypes(key, [bool, int, long, float, unicode, - str, NoneType, datetime.datetime, - time.struct_time], "%s: %s" % (key, value)) - if key not in binary_fields: - self.validateTypes(value, [bool, int, long, float, unicode, - NoneType, datetime.datetime, - time.struct_time], "%s: %s" % (key, value)) - else: - self.validateType(value, str) - - def _binary_fields(self): - rv = ('metainfo',) - if PlatformFilenameType != unicode: - rv += self.filename_fields - return rv - class SchemaObject(SchemaItem): """SchemaObject type.""" def __init__(self, klass, noneOk=False): @@ -343,6 +337,7 @@ return cls.klass indexes = () + unique_indexes = () class MultiClassObjectSchema(ObjectSchema): """ObjectSchema where rows will be restored to different python @@ -355,6 +350,21 @@ should use to restore that row. """ +class NoObjectSchema(object): + """Schema for a table that's not associated with a DDBObject class + """ + + @classmethod + def ddb_object_classes(cls): + return () + + @classmethod + def get_ddb_class(cls, restored_data): + raise NotImplementedError("get_ddb_class() shouldn't be called") + + indexes = () + unique_indexes = () + from miro.database import DDBObject from miro.databaselog import DBLogEntry from miro.downloader import RemoteDownloader @@ -366,8 +376,9 @@ from miro.folder import (HideableTab, ChannelFolder, PlaylistFolder, PlaylistFolderItemMap) from miro.guide import ChannelGuide -from miro.item import Item, FileItem +from miro.item import Item, FileItem, DeviceItem, SharingItem from miro.iconcache import IconCache +from miro.metadata import MetadataStatus, MetadataEntry from miro.playlist import SavedPlaylist, PlaylistItemMap from miro.tabs import TabOrder from miro.theme import ThemeHistory @@ -406,54 +417,54 @@ fields = DDBObjectSchema.fields + [ ('is_file_item', SchemaBool()), + ('new', SchemaBool()), + ('title', SchemaString(noneOk=True)), ('feed_id', SchemaInt(noneOk=True)), ('downloader_id', SchemaInt(noneOk=True)), ('parent_id', SchemaInt(noneOk=True)), - ('seen', SchemaBool()), - ('autoDownloaded', SchemaBool()), - ('pendingManualDL', SchemaBool()), - ('pendingReason', SchemaString()), + ('parent_title', SchemaString(noneOk=True)), + ('auto_downloaded', SchemaBool()), + ('pending_manual_download', SchemaBool()), + ('pending_reason', SchemaString()), ('expired', SchemaBool()), ('keep', SchemaBool()), - ('creationTime', SchemaDateTime()), - ('linkNumber', SchemaInt(noneOk=True)), + ('creation_time', SchemaDateTime()), + ('link_number', SchemaInt(noneOk=True)), ('icon_cache_id', SchemaInt(noneOk=True)), - ('downloadedTime', SchemaDateTime(noneOk=True)), - ('watchedTime', SchemaDateTime(noneOk=True)), - ('lastWatched', SchemaDateTime(noneOk=True)), + ('downloaded_time', SchemaDateTime(noneOk=True)), + ('watched_time', SchemaDateTime(noneOk=True)), + ('last_watched', SchemaDateTime(noneOk=True)), ('subtitle_encoding', SchemaString(noneOk=True)), - ('isContainerItem', SchemaBool(noneOk=True)), - ('releaseDateObj', SchemaDateTime()), - ('eligibleForAutoDownload', SchemaBool()), + ('is_container_item', SchemaBool(noneOk=True)), + ('release_date', SchemaDateTime()), + ('eligible_for_autodownload', SchemaBool()), ('duration', SchemaInt(noneOk=True)), ('screenshot', SchemaFilename(noneOk=True)), - ('resumeTime', SchemaInt()), - ('channelTitle', SchemaString(noneOk=True)), + ('resume_time', SchemaInt()), + ('channel_title', SchemaString(noneOk=True)), ('license', SchemaString(noneOk=True)), ('rss_id', SchemaString(noneOk=True)), ('thumbnail_url', SchemaURL(noneOk=True)), ('entry_title', SchemaString(noneOk=True)), + ('torrent_title', SchemaString(noneOk=True)), ('entry_description', SchemaString(noneOk=False)), - ('link', SchemaURL(noneOk=False)), - ('payment_link', SchemaURL(noneOk=False)), - ('comments_link', SchemaURL(noneOk=False)), - ('url', SchemaURL(noneOk=False)), + ('link', SchemaURL(noneOk=True)), + ('payment_link', SchemaURL(noneOk=True)), + ('comments_link', SchemaURL(noneOk=True)), + ('url', SchemaURL(noneOk=True)), ('enclosure_size', SchemaInt(noneOk=True)), ('enclosure_type', SchemaString(noneOk=True)), ('enclosure_format', SchemaString(noneOk=True)), ('was_downloaded', SchemaBool()), ('filename', SchemaFilename(noneOk=True)), - ('deleted', SchemaBool(noneOk=True)), - ('shortFilename', SchemaFilename(noneOk=True)), - ('offsetPath', SchemaFilename(noneOk=True)), + ('deleted', SchemaBool()), + ('short_filename', SchemaFilename(noneOk=True)), + ('offset_path', SchemaFilename(noneOk=True)), ('play_count', SchemaInt()), ('skip_count', SchemaInt()), - ('cover_art', SchemaFilename(noneOk=True)), - ('mdp_state', SchemaInt(noneOk=True)), + ('size', SchemaInt(noneOk=True)), # metadata: - ('metadata_version', SchemaInt()), - ('title', SchemaString(noneOk=True)), - ('title_tag', SchemaString(noneOk=True)), + ('cover_art', SchemaFilename(noneOk=True)), ('description', SchemaString(noneOk=True)), ('album', SchemaString(noneOk=True)), ('album_artist', SchemaString(noneOk=True)), @@ -470,6 +481,8 @@ ('episode_number', SchemaInt(noneOk=True)), ('season_number', SchemaInt(noneOk=True)), ('kind', SchemaString(noneOk=True)), + ('net_lookup_enabled', SchemaBool()), + ('metadata_title', SchemaString(noneOk=True)), ] indexes = ( @@ -479,19 +492,130 @@ ('item_downloader', ('downloader_id',)), ('item_feed_downloader', ('feed_id', 'downloader_id',)), ('item_file_type', ('file_type',)), + ('item_filename', ('filename',)), + ) + +class DeviceItemSchema(ObjectSchema): + """Schema for items on devices. This only gets used for device databases + """ + klass = DeviceItem + table_name = 'device_item' + + fields = DDBObjectSchema.fields + [ + ('title', SchemaString()), + ('creation_time', SchemaDateTime()), + ('watched_time', SchemaDateTime(noneOk=True)), + ('last_watched', SchemaDateTime(noneOk=True)), + ('subtitle_encoding', SchemaString(noneOk=True)), + ('release_date', SchemaDateTime(noneOk=True)), + ('parent_title', SchemaString(noneOk=True)), + ('feed_url', SchemaString(noneOk=True)), + ('license', SchemaString(noneOk=True)), + ('rss_id', SchemaString(noneOk=True)), + ('entry_title', SchemaString(noneOk=True)), + ('torrent_title', SchemaString(noneOk=True)), + ('entry_description', SchemaString(noneOk=True)), + ('permalink', SchemaURL(noneOk=True)), + ('payment_link', SchemaURL(noneOk=True)), + ('comments_link', SchemaURL(noneOk=True)), + ('url', SchemaURL(noneOk=True)), + ('size', SchemaInt()), + ('enclosure_size', SchemaInt(noneOk=True)), + ('enclosure_type', SchemaString(noneOk=True)), + ('enclosure_format', SchemaString(noneOk=True)), + ('filename', SchemaFilename(noneOk=True)), + ('resume_time', SchemaInt()), + ('play_count', SchemaInt()), + ('skip_count', SchemaInt()), + ('auto_sync', SchemaBool()), + # metadata: + ('screenshot', SchemaFilename(noneOk=True)), + ('duration', SchemaInt(noneOk=True)), + ('cover_art', SchemaFilename(noneOk=True)), + ('description', SchemaString(noneOk=True)), + ('album', SchemaString(noneOk=True)), + ('album_artist', SchemaString(noneOk=True)), + ('artist', SchemaString(noneOk=True)), + ('track', SchemaInt(noneOk=True)), + ('album_tracks', SchemaInt(noneOk=True)), + ('year', SchemaInt(noneOk=True)), + ('genre', SchemaString(noneOk=True)), + ('rating', SchemaInt(noneOk=True)), + ('file_type', SchemaString(noneOk=True)), + ('has_drm', SchemaBool(noneOk=True)), + ('show', SchemaString(noneOk=True)), + ('episode_id', SchemaString(noneOk=True)), + ('episode_number', SchemaInt(noneOk=True)), + ('season_number', SchemaInt(noneOk=True)), + ('kind', SchemaString(noneOk=True)), + ('net_lookup_enabled', SchemaBool()), + ('metadata_title', SchemaString(noneOk=True)), + ] + +class SharingItemSchema(ObjectSchema): + """Schema for items on shares. This only gets used for sharing databases + """ + klass = SharingItem + table_name = 'sharing_item' + + fields = DDBObjectSchema.fields + [ + ('daap_id', SchemaInt()), + ('video_path', SchemaString()), + ('title', SchemaString()), + ('description', SchemaString(noneOk=True)), + ('parent_title', SchemaString(noneOk=True)), + ('file_type', SchemaString()), + ('file_format', SchemaString(noneOk=True)), + ('duration', SchemaInt(noneOk=True)), + ('size', SchemaInt(noneOk=True)), + ('artist', SchemaString(noneOk=True)), + ('album_artist', SchemaString(noneOk=True)), + ('album', SchemaString(noneOk=True)), + ('year', SchemaInt(noneOk=True)), + ('genre', SchemaString(noneOk=True)), + ('track', SchemaInt(noneOk=True)), + ('kind', SchemaString(noneOk=True)), + ('show', SchemaString(noneOk=True)), + ('season_number', SchemaInt(noneOk=True)), + ('episode_id', SchemaString(noneOk=True)), + ('episode_number', SchemaInt(noneOk=True)), + ('host', SchemaString()), + ('port', SchemaInt()), + ('address', SchemaString()), + ] + + unique_indexes = ( + ('sharing_item_daap_id', ('daap_id',)), + ) + +class SharingItemPlaylistMapSchema(NoObjectSchema): + """Schema for the playlist item map on shares. + + This only gets used for sharing databases + """ + table_name = 'sharing_item_playlist_map' + + fields = DDBObjectSchema.fields + [ + ('playlist_id', SchemaInt()), + ('item_id', SchemaInt()), + ] + + unique_indexes = ( + ('sharing_item_playlist_map_unique', ('playlist_id', 'item_id')), ) class FeedSchema(DDBObjectSchema): klass = Feed table_name = 'feed' fields = DDBObjectSchema.fields + [ - ('origURL', SchemaURL()), + ('orig_url', SchemaURL()), ('baseTitle', SchemaString(noneOk=True)), ('errorState', SchemaBool()), ('loading', SchemaBool()), ('feed_impl_id', SchemaInt()), ('icon_cache_id', SchemaInt(noneOk=True)), ('folder_id', SchemaInt(noneOk=True)), + ('thumbnail_path', SchemaFilename(noneOk=True)), ('searchTerm', SchemaString(noneOk=True)), ('userTitle', SchemaString(noneOk=True)), ('autoDownloadable', SchemaBool()), @@ -500,12 +624,15 @@ ('maxOldItems', SchemaInt(noneOk=True)), ('fallBehind', SchemaInt()), ('expire', SchemaString()), - ('expireTime', SchemaTimeDelta(noneOk=True)), + ('expire_timedelta', SchemaTimeDelta(noneOk=True)), ('section', SchemaString()), # not used anymore ('visible', SchemaBool()), - ('last_viewed', SchemaDateTime()), ] + indexes = ( + ('feed_impl_key', ('feed_impl_id',)), + ) + class FeedImplSchema(DDBObjectSchema): klass = FeedImpl table_name = 'feed_impl' @@ -594,16 +721,35 @@ table_name = 'remote_downloader' fields = DDBObjectSchema.fields + [ ('url', SchemaURL()), - ('origURL', SchemaURL()), + ('orig_url', SchemaURL()), ('dlid', SchemaString()), - ('contentType', SchemaString(noneOk=True)), - ('channelName', SchemaFilename(noneOk=True)), - ('status', SchemaStatusContainer()), + ('content_type', SchemaString(noneOk=True)), + ('channel_name', SchemaFilename(noneOk=True)), ('metainfo', SchemaBinary(noneOk=True)), ('manualUpload', SchemaBool()), ('state', SchemaString()), ('main_item_id', SchemaInt(noneOk=True)), ('child_deleted', SchemaBool()), + ('total_size', SchemaInt(noneOk=True)), + ('current_size', SchemaInt()), + ('start_time', SchemaInt(noneOk=True)), + ('end_time', SchemaInt(noneOk=True)), + ('short_filename', SchemaFilename(noneOk=True)), + ('filename', SchemaFilename(noneOk=True)), + ('reason_failed', SchemaString(noneOk=True)), + ('short_reason_failed', SchemaString(noneOk=True)), + ('type', SchemaString(noneOk=True)), + ('retry_time', SchemaDateTime(noneOk=True)), + ('retry_count', SchemaInt(noneOk=True)), + ('upload_size', SchemaInt(noneOk=True)), + ('info_hash', SchemaString(noneOk=True)), + ('eta', SchemaInt(noneOk=True)), + ('rate', SchemaInt(noneOk=True)), + ('upload_rate', SchemaInt(noneOk=True)), + ('activity', SchemaString(noneOk=True)), + ('seeders', SchemaInt(noneOk=True)), + ('leechers', SchemaInt(noneOk=True)), + ('connections', SchemaInt(noneOk=True)), ] indexes = ( @@ -660,6 +806,10 @@ ('position', SchemaInt()), ] + indexes = ( + ('playlist_item_map_item_id', ('item_id',)), + ) + class PlaylistFolderItemMapSchema(DDBObjectSchema): klass = PlaylistFolderItemMap table_name = 'playlist_folder_item_map' @@ -670,6 +820,10 @@ ('count', SchemaInt()), ] + indexes = ( + ('playlist_folder_item_map_item_id', ('item_id',)), + ) + class TabOrderSchema(DDBObjectSchema): klass = TabOrder table_name = 'taborder_order' @@ -720,9 +874,7 @@ ('type', SchemaString()), ('id_', SchemaString()), ('selected_view', SchemaInt(noneOk=True)), - ('active_filters', SchemaInt(noneOk=True)), - ('list_view_columns', SchemaList(SchemaString(), noneOk=True)), - ('list_view_widths', SchemaDict(SchemaString(), SchemaInt(), noneOk=True)), + ('active_filters', SchemaStringSet(noneOk=True)), ('shuffle', SchemaBool(noneOk=True)), ('repeat', SchemaInt(noneOk=True)), ('selection', SchemaList(SchemaMultiValue(), noneOk=True)), @@ -734,14 +886,6 @@ ('display_state_display', ('type', 'id_')), ) - @staticmethod - def handle_malformed_list_view_columns(value): - return None - - @staticmethod - def handle_malformed_list_view_widths(value): - return None - class GlobalStateSchema(DDBObjectSchema): klass = GlobalState table_name = 'global_state' @@ -768,6 +912,8 @@ ('display_id', SchemaString()), ('view_type', SchemaInt()), ('scroll_position', SchemaTuple(SchemaInt(), SchemaInt(), noneOk=True)), + ('columns_enabled', SchemaList(SchemaString(), noneOk=True)), + ('column_widths', SchemaDict(SchemaString(), SchemaInt(), noneOk=True)), ] indexes = ( @@ -782,7 +928,78 @@ def handle_malformed_selection(value): return None -VERSION = 160 + @staticmethod + def handle_malformed_columns_enabled(value): + return None + + @staticmethod + def handle_malformed_column_widths(value): + return None + +class MetadataStatusSchema(DDBObjectSchema): + klass = MetadataStatus + table_name = 'metadata_status' + fields = DDBObjectSchema.fields + [ + ('path', SchemaFilename()), + ('file_type', SchemaString()), + ('finished_status', SchemaInt()), + ('mutagen_status', SchemaString()), + ('moviedata_status', SchemaString()), + ('echonest_status', SchemaString()), + ('echonest_id', SchemaString(noneOk=True)), + ('net_lookup_enabled', SchemaBool()), + ('mutagen_thinks_drm', SchemaBool()), + ('max_entry_priority', SchemaInt()), + ] + + indexes = ( + ('metadata_finished', ('finished_status',)), + ) + + unique_indexes = ( + ('metadata_path', ('path',)), + ) + +class MetadataEntrySchema(DDBObjectSchema): + klass = MetadataEntry + table_name = 'metadata' + fields = DDBObjectSchema.fields + [ + ('status_id', SchemaInt()), + ('source', SchemaString()), + ('priority', SchemaInt()), + ('file_type', SchemaString(noneOk=True)), + ('duration', SchemaInt(noneOk=True)), + ('album', SchemaString(noneOk=True)), + ('album_artist', SchemaString(noneOk=True)), + ('album_tracks', SchemaInt(noneOk=True)), + ('artist', SchemaString(noneOk=True)), + ('screenshot', SchemaFilename(noneOk=True)), + ('cover_art', SchemaFilename(noneOk=True)), + ('drm', SchemaBool(noneOk=True)), + ('genre', SchemaString(noneOk=True)), + ('title', SchemaString(noneOk=True)), + ('track', SchemaInt(noneOk=True)), + ('year', SchemaInt(noneOk=True)), + ('description', SchemaString(noneOk=True)), + ('rating', SchemaInt(noneOk=True)), + ('show', SchemaString(noneOk=True)), + ('episode_id', SchemaString(noneOk=True)), + ('episode_number', SchemaInt(noneOk=True)), + ('season_number', SchemaInt(noneOk=True)), + ('kind', SchemaString(noneOk=True)), + ('disabled', SchemaBool()), + ] + + indexes = ( + ('metadata_entry_status', ('status_id',)), + ) + + unique_indexes = ( + ('metadata_entry_status_and_source', ('status_id', 'source')), + ) + +VERSION = 201 + object_schemas = [ IconCacheSchema, ItemSchema, FeedSchema, FeedImplSchema, RSSFeedImplSchema, SavedSearchFeedImplSchema, @@ -793,5 +1010,17 @@ PlaylistSchema, HideableTabSchema, ChannelFolderSchema, PlaylistFolderSchema, PlaylistItemMapSchema, PlaylistFolderItemMapSchema, TabOrderSchema, ThemeHistorySchema, DisplayStateSchema, GlobalStateSchema, - DBLogEntrySchema, ViewStateSchema, + DBLogEntrySchema, ViewStateSchema, MetadataStatusSchema, + MetadataEntrySchema +] + +device_object_schemas = [ + MetadataEntrySchema, + MetadataStatusSchema, + DeviceItemSchema, +] + +sharing_object_schemas = [ + SharingItemSchema, + SharingItemPlaylistMapSchema, ] diff -Nru miro-4.0.4/lib/schemav79.py miro-6.0/lib/schemav79.py --- miro-4.0.4/lib/schemav79.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/schemav79.py 2013-04-05 16:02:42.000000000 +0000 @@ -55,7 +55,7 @@ from types import NoneType from miro.plat.utils import PlatformFilenameType -class ValidationError(Exception): +class ValidationError(StandardError): """Error thrown when we try to save invalid data.""" pass diff -Nru miro-4.0.4/lib/searchengines.py miro-6.0/lib/searchengines.py --- miro-4.0.4/lib/searchengines.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/searchengines.py 2013-04-05 16:02:42.000000000 +0000 @@ -42,7 +42,7 @@ import logging from miro.gtcache import gettext as _ -class IntentionalCrash(Exception): +class IntentionalCrash(StandardError): pass class SearchEngineInfo: @@ -81,11 +81,7 @@ def __repr__(self): return "" % (self.name, self.title) -_engines = [] - -def _delete_engines(): - global _engines - _engines = [] +_engines = None def _search_for_search_engines(dir_): """Returns a dict of search engine -> search engine xml file for @@ -157,6 +153,7 @@ # FIXME - lock this down more warn(filename, "Exception parsing file") +from miro.util import DebuggingTimer def create_engines(): """Creates all the search engines specified in the ``resources/searchengines/`` directory and the theme searchengines @@ -164,7 +161,7 @@ engine. """ global _engines - _delete_engines() + _engines = [] engines = _search_for_search_engines(resources.path("searchengines")) engines_dir = os.path.join( app.config.get(prefs.SUPPORT_DIRECTORY), "searchengines") @@ -221,6 +218,8 @@ return [engine.get_request_url(query, filter_adult_contents, limit) \ for engine in _engines if engine.name != u'all'] + if _engines is None: + create_engines() for engine in _engines: if engine.name == engine_name: url = engine.get_request_url(query, filter_adult_contents, limit) @@ -230,6 +229,8 @@ def get_search_engines(): """Returns the list of :class:`SearchEngineInfo` instances. """ + if _engines is None: + create_engines() return list(_engines) def get_engine_for_name(name): diff -Nru miro-4.0.4/lib/search.py miro-6.0/lib/search.py --- miro-4.0.4/lib/search.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/search.py 2013-04-05 16:02:42.000000000 +0000 @@ -141,22 +141,32 @@ return ngrams.breakup_list(item_info.search_terms, NGRAM_MIN, NGRAM_MAX) -def item_matches(item_info, search_text): +def item_matches(item, search_text): """Test if a single ItemInfo matches a search - :param item_info: ItemInfo to test + :param item: Item to test :param search_text: search_text to search with :returns: True if the item matches the search string """ parsed_search = _get_boolean_search(search_text) - item_ngrams = _ngrams_for_item(item_info) + + match_against = [item.title, item.description, item.entry_description] + match_against.append(item.artist) + match_against.append(item.album) + match_against.append(item.genre) + match_against.append(item.get_source_for_search()) + if item.filename: + filename = os.path.basename(item.filename) + match_against.append(filename_to_unicode(filename)) + match_against_text = (' '.join(term.lower() for term in match_against + if term is not None)) for term in parsed_search.positive_terms: - if not set(_ngrams_for_term(term)).issubset(item_ngrams): + if term not in match_against_text: return False for term in parsed_search.negative_terms: - if set(_ngrams_for_term(term)).issubset(item_ngrams): + if term in match_against_text: return False return True @@ -258,3 +268,19 @@ for term in negative_terms: matching_ids.difference_update(self._term_search(term)) return matching_ids + + match_against = [item_info.name, item_info.description] + if item_info.artist is not None: + match_against.append(item_info.artist) + if item_info.album is not None: + match_against.append(item_info.album) + if item_info.genre is not None: + match_against.append(item_info.genre) + if item_info.feed_name is not None: + match_against.append(item_info.feed_name) + if item_info.download_info and item_info.download_info.torrent: + match_against.append(u'torrent') + if item_info.video_path: + filename = os.path.basename(item_info.video_path) + match_against.append(filename_to_unicode(filename)) + return (' '.join(match_against)).lower() diff -Nru miro-4.0.4/lib/sharing.py miro-6.0/lib/sharing.py --- miro-4.0.4/lib/sharing.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/sharing.py 2013-04-05 16:02:42.000000000 +0000 @@ -43,18 +43,22 @@ from miro.gtcache import gettext as _ from miro import app +from miro import database from miro import eventloop from miro import messages -from miro import playlist -from miro import feed +from miro import models from miro import prefs from miro import signals from miro import filetypes from miro import fileutil from miro import util +from miro import schema +from miro import storedatabase from miro import transcode from miro import metadata -from miro.item import Item +from miro.data import mappings +from miro.data import itemtrack +from miro.item import Item, SharingItem from miro.fileobject import FilenameType from miro.util import returns_filename @@ -75,6 +79,8 @@ 'org.participatoryculture.miro.itemkind,' + 'com.apple.itunes.mediakind') +DAAP_PODCAST_KEY = 'com.apple.itunes.is-podcast-playlist' + supported_filetypes = filetypes.VIDEO_EXTENSIONS + filetypes.AUDIO_EXTENSIONS # Conversion factor between our local duration (10th of a second) @@ -86,63 +92,11 @@ MIRO_ITEMKIND_SHOW = (1 << 2) MIRO_ITEMKIND_CLIP = (1 << 3) -miro_itemkind_mapping = { - 'movie': MIRO_ITEMKIND_MOVIE, - 'show': MIRO_ITEMKIND_SHOW, - 'clip': MIRO_ITEMKIND_CLIP, - 'podcast': MIRO_ITEMKIND_PODCAST -} - miro_itemkind_rmapping = { - MIRO_ITEMKIND_MOVIE: 'movie', - MIRO_ITEMKIND_SHOW: 'show', - MIRO_ITEMKIND_CLIP: 'clip', - MIRO_ITEMKIND_PODCAST: 'podcast' -} - -# XXX The daap mapping from the daap to the attribute is different from the -# reverse mapping, because we use daap_mapping to import items from remote -# side and we use daap_rmapping to create an export list. But, when -# we import and create SharingItem, the attribut needs to be 'title'. But -# when we export, we receive ItemInfo(), which uses 'name'. -daap_mapping = { - 'daap.songformat': 'file_format', - 'com.apple.itunes.mediakind': 'file_type', - 'dmap.itemid': 'id', - 'dmap.itemname': 'title', - 'daap.songtime': 'duration', - 'daap.songsize': 'size', - 'daap.songartist': 'artist', - 'daap.songalbumartist': 'album_artist', - 'daap.songalbum': 'album', - 'daap.songyear': 'year', - 'daap.songgenre': 'genre', - 'daap.songtracknumber': 'track', - 'org.participatoryculture.miro.itemkind': 'kind', - 'com.apple.itunes.series-name': 'show', - 'com.apple.itunes.season-num': 'season_number', - 'com.apple.itunes.episode-num-str': 'episode_id', - 'com.apple.itunes.episode-sort': 'episode_number' -} - -daap_rmapping = { - 'file_format': 'daap.songformat', - 'file_type': 'com.apple.itunes.mediakind', - 'id': 'dmap.itemid', - 'name': 'dmap.itemname', - 'duration': 'daap.songtime', - 'size': 'daap.songsize', - 'artist': 'daap.songartist', - 'album_artist': 'daap.songalbumartist', - 'album': 'daap.songalbum', - 'year': 'daap.songyear', - 'genre': 'daap.songgenre', - 'track': 'daap.songtracknumber', - 'kind': 'org.participatoryculture.miro.itemkind', - 'show': 'com.apple.itunes.series-name', - 'season_number': 'com.apple.itunes.season-num', - 'episode_id': 'com.apple.itunes.episode-num-str', - 'episode_number': 'com.apple.itunes.episode-sort' + MIRO_ITEMKIND_MOVIE: u'movie', + MIRO_ITEMKIND_SHOW: u'show', + MIRO_ITEMKIND_CLIP: u'clip', + MIRO_ITEMKIND_PODCAST: u'podcast' } # Windows Python does not have inet_ntop(). Sigh. Fallback to this one, @@ -158,107 +112,126 @@ ip)) raise ValueError('unknown address family %d' % af) -class SharingItem(metadata.Source): - """ - An item which lives on a remote share. - """ - def __init__(self, **kwargs): - for required in ('video_path', 'id', 'file_type', 'host', 'port'): - if required not in kwargs: - raise TypeError('SharingItem must be given a "%s" argument' - % required) - self.file_format = self.size = None - self.release_date = self.feed_name = self.feed_id = None - self.keep = True - self.isContainerItem = False - self.url = self.payment_link = None - self.comments_link = self.permalink = self.file_url = None - self.license = self.downloader = None - self.duration = self.screenshot = self.thumbnail_url = None - self.resumeTime = 0 - self.description = u'' - self.subtitle_encoding = self.enclosure_type = None - self.metadata_version = 0 - self.file_type = None - self.creation_time = None - - metadata.Source.setup_new(self) - - self.__dict__.update(kwargs) - - self.video_path = FilenameType(self.video_path) - if self.title is None: - self.title = _("Unknown") - # Do we care about file_format? - if self.file_format is None: - pass - if self.size is None: - self.size = 0 - if self.release_date is None or self.creation_time is None: - now = time.time() - if self.release_date is None: - self.release_date = now - if self.creation_time is None: - self.creation_time = now - if self.duration is None: # -1 is unknown - self.duration = 0 - - @staticmethod - def id_exists(): - return True +class Share(object): + """Backend object that tracks data for an active DAAP share.""" + _used_db_paths = set() + + def __init__(self, share_id, name, host, port): + self.id = share_id + self.name = name + self.host = host + self.port = port + self.db_path, self.db = self.find_unused_db() + self.db_info = database.DBInfo(self.db) + self.__class__._used_db_paths.add(self.db_path) + self.tracker = None + # SharingInfo object for this share. We use this to send updates to + # the frontend when things change. + self.info = None + + def destroy(self): + if self.db is not None: + self.db.close() + if self.db_path: + fileutil.delete(self.db_path) + self.db = self.db_info = self.db_path = None + + def is_closed(self): + return self.db is None + + def find_unused_db(self): + """Find a DB path for our share that's not being used. + + This method will ensure that no 2 Share objects share the same DB + path, but it will try delete and then reuse paths that were created by + previous miro instances. + """ + for candidate in self.generate_db_paths(): + if os.path.exists(candidate): + try: + os.remove(candidate) + except EnvironmentError, e: + logging.warn("Share.find_unused_db " + "error removing %s (%s)" % (candidate, e)) + continue + return candidate, self.make_new_database(candidate) + raise AssertionError("Couldn't find an unused path " + "for Share") + + @classmethod + def generate_db_paths(cls): + """Iterate through potential paths for a sharing db. + """ + support_dir = app.config.get(prefs.SUPPORT_DIRECTORY) + for i in xrange(300): + candidate = os.path.join(support_dir, 'sharing-db-%s' % i) + if candidate in cls._used_db_paths: + continue + yield candidate - def get_release_date(self): - try: - return datetime.fromtimestamp(self.release_date) - except ValueError: - logging.warn('SharingItem: release date time %s invalid' % - self.release_date) - return datetime.now() + @classmethod + def cleanup_old_databases(cls): + """Remove any databases left by old miro processes.""" + for path in cls.generate_db_paths(): + if os.path.exists(path): + try: + os.remove(path) + except EnvironmentError: + logging.warn("Share.cleanup_old_databases(): error " + "removing %s" % path) + + def make_new_database(self, path): + return storedatabase.SharingLiveStorage( + path, self.name, schema.sharing_object_schemas) - def get_creation_time(self): - try: - return datetime.fromtimestamp(self.creation_time) - except ValueError: - logging.warn('SharingItem: creation time %s invalid' % - self.creation_time) - return datetime.now() - - @returns_filename - def get_filename(self): - # For daap, sent it to be the same as http as it is basically - # http with a different port. - def daap_handler(path, host, port): - return 'http://%s:%s%s' % (host, port, path) - fn = FilenameType(self.video_path) - fn.set_urlize_handler(daap_handler, [self.address, self.port]) - return fn - - def get_url(self): - return self.url or u'' - - @returns_filename - def get_thumbnail(self): - # What about cover art? - if self.file_type == 'audio': - return resources.path("images/thumb-default-audio.png") - else: - return resources.path("images/thumb-default-video.png") + def start_tracking(self): + """Start tracking items on this share. - def _migrate_thumbnail(self): - # This should not ever do anything useful. We don't have a backing - # database to safe this stuff. - pass + This will create a SharingItemTrackerImpl that connects to the share + using a separate thread. Call stop_tracking() to end the tracking. + """ + if self.tracker is None: + self.tracker = SharingItemTrackerImpl(self) - def drm_description(self): - if self.has_drm: - return _("Locked") - else: - return u"" + def stop_tracking(self): + if self.tracker is not None: + self.tracker.client_disconnect() + self.tracker = None + self.reset_database() + if self.info: + self.info.is_updating = False + self.info.mount = False + self.send_tabs_changed() + + def reset_database(self): + SharingItem.delete(db_info=self.db_info) + self.db.forget_all_objects() + self.db.cache.clear_all() + + def set_info(self, info): + """Set the SharingInfo to use to send updates for.""" + # FIXME: we probably shouldn't be modifying the SharingInfo directly + # here (#19689) + self.info = info + + def update_started(self): + # FIXME: we probably shouldn't be modifying the SharingInfo directly + # here (#19689) + if self.info: + self.info.is_updating = True + self.send_tabs_changed() + + def update_finished(self, success=True): + # FIXME: we probably shouldn't be modifying the SharingInfo directly + # here (#19689) + if self.info: + self.info.mount = success + self.info.is_updating = False + self.send_tabs_changed() - def remove(self, save=True): - # This should never do anything useful, we don't have a backing - # database. Yet. - pass + def send_tabs_changed(self): + message = messages.TabsChanged('connect', [], [self.info], []) + message.send_to_frontend() class SharingTracker(object): """The sharing tracker is responsible for listening for available music @@ -275,12 +248,17 @@ def __init__(self): self.name_to_id_map = dict() self.trackers = dict() + self.shares = dict() + # FIXME: we probably can remove this dict as part of #19689. At the + # last, we should give it a name that better distinguishes it from + # shares self.available_shares = dict() self.r, self.w = util.make_dummy_socket_pair() self.paused = True self.event = threading.Event() libdaap.register_meta('org.participatoryculture.miro.itemkind', 'miKD', libdaap.DMAP_TYPE_UBYTE) + Share.cleanup_old_databases() def mdns_callback(self, added, fullname, host, port): eventloop.add_urgent_call(self.mdns_callback_backend, "mdns callback", @@ -322,17 +300,23 @@ 'DAAP test connect') def mdns_callback_backend(self, added, fullname, host, port): + # SAFE: the shared name should be unique. (Or else you could not + # identify the resource). if fullname == app.sharing_manager.name: return - # Need to come up with a unique ID for the share. We want to use the - # name only since that's supposed to be unique, but can't because - # the name may change, and the id is used throughout to identify - # the item tracker, and we don't want to change the id mid-way. - # We can't use the hostname or port directly also because - # on removal avahi can't do a name query so we have no hostname, - # or port information! So, we have a name to id map. + # Need to come up with a unique ID for the share and that's a bit + # tricky. We need the id to: + # - Be uniquly determined by the host/port which is the one thing + # that stays the same throughout the share. The fullname can + # change. + # - By accesible by the current name of the share, this is the only + # info we during avahi removal + # + # We take the hash of the host and the port to get the id, then map + # the last-known name it. We force the hash to be positive, since + # other ids are always positive. if added: - share_id = (host, port) + share_id = abs(hash((host, port))) self.name_to_id_map[fullname] = share_id else: try: @@ -397,8 +381,13 @@ [info], []) message.send_to_frontend() return - info = messages.SharingInfo(share_id, share_id, - fullname, host, port) + share = Share(share_id, fullname, host, port) + info = messages.SharingInfo(share) + share.set_info(info) + self.shares[share_id] = share + # FIXME: We should probably only store the Share object and + # create new SharingInfo objects when we want to send updates + # to the frontend (see #19689) info.connect_uuid = uuid.uuid4() self.available_shares[share_id] = info self.try_to_add(share_id, fullname, host, port, @@ -411,6 +400,7 @@ if not share_id in self.trackers.keys(): victim = self.available_shares[share_id] del self.available_shares[share_id] + self.destroy_share(share_id) # Only tell the frontend if the share's been tested because # otherwise the TabsChanged() message wouldn't have arrived. if victim.connect_uuid is None: @@ -420,19 +410,24 @@ # here? Let's add a timeout of 2 secs, if no added message # comes in, assume it's gone bye... share_info = self.available_shares[share_id] - share = self.trackers[share_id] - if share.share != share_info: + tracker_share_info = self.trackers[share_id].share.info + if tracker_share_info != share_info: logging.error('Share disconn error: share info != share') dc = eventloop.add_timeout(2, self.remove_timeout_callback, "share tab removal timeout callback", args=(share_id, share_info)) # Cancel pending callback is there is one. - if share.share.stale_callback: - share.share.stale_callback.cancel() - share.share.stale_callback = dc + if tracker_share_info.stale_callback: + tracker_share_info.stale_callback.cancel() + tracker_share_info.stale_callback = dc + + def destroy_share(self, share_id): + self.shares[share_id].destroy() + del self.shares[share_id] def remove_timeout_callback(self, share_id, share_info): del self.available_shares[share_id] + self.destroy_share(share_id) messages.SharingDisappeared(share_info).send_to_frontend() def server_thread(self): @@ -503,24 +498,19 @@ name='mDNS Browser Thread') self.thread.start() - def eject(self, share_id): + def track_share(self, share_id): try: - tracker = self.trackers[share_id] + self.shares[share_id].start_tracking() except KeyError: - pass - else: - del self.trackers[share_id] - tracker.client_disconnect() + logging.warn("SharingTracker.stop_tracking_share: " + "Unknown share_id: %s", share_id) - def get_tracker(self, share_id): + def stop_tracking_share(self, share_id): try: - return self.trackers[share_id] + self.shares[share_id].stop_tracking() except KeyError: - logging.debug('sharing: creating new tracker') - share = self.available_shares[share_id] - self.trackers[share_id] = SharingItemTrackerImpl(share) - return self.trackers[share_id] - + logging.warn("SharingTracker.stop_tracking_share: " + "Unknown share_id: %s", share_id) def stop_tracking(self): # What to do in case of socket error here? self.w.send(SharingTracker.CMD_QUIT) @@ -538,6 +528,157 @@ # What to do in case of socket error here? self.w.send(SharingTracker.CMD_RESUME) +class _ClientUpdateResult(object): + """Stores the results of a client update. + + One issue we must deal with is that we only want to access the daap client + in the thread maid for it. However, we want to create SharingItems in the + backend thread. + + This class helps that by calling all the daap client methods that we need + to inside the daap client thread, then allows us to access the data from + the backend thread. + + Attributes: + items - dictionary tracking items that have been added/updated. Maps + item ids to dicts of item data + item_paths - dictionary mapping item ids to their paths for + added/updated items + deleted_items - list of item ids for deleted items + playlists - dictionary tracking the playlists that have been + added/updated. Maps playlist ids to dicts of playlist + data deleted_playlist - list of playlist ids for deleted + playlist + deleted_playlist - list of playlist ids for deleted playlists + playlist_items - dictionary tracking items added/updated in playlists. + Maps playlist ids to list of item ids + playlist_deleted_items - dictionary tracking items deleted from + playlists. Maps playlist ids to a list of + item ids. + """ + def __init__(self, client, update=False): + self.update = update + self.items = {} + self.item_paths = {} + self.deleted_items = [] + self.playlists = {} + self.deleted_playlists = [] + self.playlist_items = {} + self.playlist_deleted_items = {} + + self.fetch_from_client(client) + + def strip_nuls_from_data(self, data_list): + """Strip nul characters from items/playlist data + + :param data_list: list of dicts containing playlist/item data. For + each string value of each dict nuls will be removed + """ + for data in data_list: + for key, value in data.items(): + if isinstance(value, str): + data[key] = value.replace('\x00', '') + + def fetch_from_client(self, client): + if not self.update: + self.check_database_exists(client) + self.fetch_playlists(client) + self.fetch_items(client) + for daap_id in self.playlists.keys(): + self.fetch_playlist_items(client, daap_id) + + def check_database_exists(self, client): + if not client.databases(update=self.update): + raise IOError('Cannot get database') + + def fetch_playlists(self, client): + self.playlists, self.deleted_playlists = client.playlists( + update=self.update) + if self.playlists is None: + raise IOError('Cannot get playlist') + # Clean the playlist: remove NUL characters. + self.strip_nuls_from_data(self.playlists.values()) + # Only return playlist that are not the base playlist. We don't + # explicitly show base playlist. + for daap_id, data in self.playlists.items(): + if data.get('daap.baseplaylist', False): + del self.playlists[daap_id] + + def fetch_items(self, client): + self.items, self.deleted_items = client.items( + meta=DAAP_META, + update=self.update) + if self.items is None: + raise ValueError('Cannot find items in base playlist') + + self.strip_nuls_from_data(self.items.values()) + for daap_id, item_data in self.items.items(): + self.item_paths[daap_id] = client.daap_get_file_request( + daap_id, item_data['daap.songformat']) + + def fetch_playlist_items(self, client, playlist_key): + items, deleted = client.items(playlist_id=playlist_key, + meta=DAAP_META, update=self.update) + if items is None: + raise ValueError('Cannot find items for playlist %d' % k) + self.playlist_items[playlist_key] = items.keys() + self.playlist_deleted_items[playlist_key] = deleted + +class _ClientPlaylistTracker(object): + """Tracks playlist data from the DAAP client for SharingItemTrackerImpl + + Attributes: + playlist_data - maps DAAP ids to the latest playlist data for them + playlist_items - maps DAAP playlist ids to sets of DAAP item ids + """ + def __init__(self): + self.playlist_data = {} + self.playlist_items = {} + + def update(self, result): + """Update data + + :param result: _ClientUpdateResult + """ + for playlist_id, playlist_data in result.playlists.items(): + if playlist_id not in self.playlist_data: + self.playlist_items[playlist_id] = set() + self.playlist_data[playlist_id] = playlist_data + for playlist_id in result.deleted_playlists: + del self.playlist_data[playlist_id] + del self.playlist_items[playlist_id] + for playlist_id, item_ids in result.playlist_items.items(): + self.playlist_items[playlist_id].update(item_ids) + for playlist_id, item_ids in result.playlist_deleted_items.items(): + self.playlist_items[playlist_id].difference_update(item_ids) + + def current_playlists(self): + """Get a the playlists that should be visible. """ + return dict((id_, data) + for id_, data in self.playlist_data.items() + if self.playlist_items.get(id_) and + self.playlist_data_valid(data)) + + def playlist_data_valid(self, playlist_data): + return (playlist_data.get('dmap.itemid') and + playlist_data.get('dmap.itemname')) + + def items_in_podcasts(self): + """Get the set of item ids in any podcast playlist.""" + rv = set() + for daap_id, playlist_data in self.playlist_data.items(): + if playlist_data.get(DAAP_PODCAST_KEY): + rv.update(self.playlist_items[daap_id]) + return rv + + def items_in_playlists(self): + """Get the set of item ids in any non-podcast playlist.""" + rv = set() + for daap_id, playlist_data in self.playlist_data.items(): + if not playlist_data.get(DAAP_PODCAST_KEY): + rv.update(self.playlist_items[daap_id]) + return rv + # Synchronization issues: this code is a bit sneaky, so here is an explanation # of how it works. When you click on a share tab in the frontend, the # display (the item list controller) starts tracking the items. It does @@ -558,28 +699,51 @@ # it finds the appropriate tracker and calls handle_item_list. Either it is # already populated, or if connection is still in process will return empty # list until the connection success callback is called. -class SharingItemTrackerImpl(signals.SignalEmitter): - """This is the backend for the SharingItemTracker the messagehandler file. - This backend class allows the item tracker to be persistent even as the - user switches across different tabs in the sidebar, until the disconnect - button is clicked. +class SharingItemTrackerImpl(object): + """Handle the backend work to track a single share + + SharingItemTrackerImpl creates a thread to connect and monitor the DAAP + client. As we get changes from the DAAP server, we update the database in + the backend thread. + + This backend is persistent as the user switches across different tabs in + the sidebar, until the disconnect button is clicked. """ - type = u'sharing' - fake_playlists = ('video', 'audio', 'playlist', 'podcast') + + # Maps DAAP keys to DeviceItem attribute names + daap_mapping = { + 'daap.songformat': 'file_format', + 'com.apple.itunes.mediakind': 'file_type', + 'dmap.itemid': 'daap_id', + 'dmap.itemname': 'title', + 'daap.songtime': 'duration', + 'daap.songsize': 'size', + 'daap.songartist': 'artist', + 'daap.songalbumartist': 'album_artist', + 'daap.songalbum': 'album', + 'daap.songyear': 'year', + 'daap.songgenre': 'genre', + 'daap.songtracknumber': 'track', + 'org.participatoryculture.miro.itemkind': 'kind', + 'com.apple.itunes.series-name': 'show', + 'com.apple.itunes.season-num': 'season_number', + 'com.apple.itunes.episode-num-str': 'episode_id', + 'com.apple.itunes.episode-sort': 'episode_number' + } def __init__(self, share): - signals.SignalEmitter.__init__(self) - for sig in 'added', 'changed', 'removed': - self.create_signal(sig) self.client = None self.share = share - self.items = dict() + self.playlist_item_map = mappings.SharingItemPlaylistMap( + share.db_info.db.connection) + self.current_item_ids = set() + self.current_playlist_ids = set() + self.playlist_tracker = _ClientPlaylistTracker() self.info_cache = dict() - self.playlists = dict() - self.base_playlist = None # Temporary - self.share.is_updating = True - message = messages.TabsChanged('connect', [], [self.share], []) - message.send_to_frontend() + self.share.update_started() + self.start_thread() + + def start_thread(self): name = self.share.name host = self.share.host port = self.share.port @@ -609,66 +773,84 @@ eventloop.add_idle(func, name, args=args) return succeeded + def run_client_connect(self): + return self.run(self.client_connect, self.client_connect_callback, + self.client_connect_error_callback) + + def run_client_update(self): + return self.run(self.client_update, self.client_update_callback, + self.client_update_error_callback) + def runloop(self): - success = self.run(self.client_connect, self.client_connect_callback, - self.client_connect_error_callback) + success = self.run_client_connect() # If server does not support update, then we short circuit since # the loop becomes useless. There is nothing wait for being updated. logging.debug('UPDATE SUPPORTED = %s', self.client.supports_update) if not success or not self.client.supports_update: return while True: - success = self.run(self.client_update, self.client_update_callback, - self.client_update_error_callback) + success = self.run_client_update() if not success: break - def sharing_item(self, rawitem): - kwargs = dict() + def convert_raw_sharing_item(self, rawitem, result): + """Convert raw data from libdaap to the attributes of SharingItem + """ + item_data = dict() for k in rawitem.keys(): try: - key = daap_mapping[k] + key = self.daap_mapping[k] except KeyError: # Got something back we don't really care about. continue - kwargs[key] = rawitem[k] + item_data[key] = rawitem[k] if isinstance(rawitem[k], str): - kwargs[key] = kwargs[key].decode('utf-8') + item_data[key] = item_data[key].decode('utf-8') try: - kwargs['kind'] = miro_itemkind_rmapping[kwargs['kind']] + item_data['kind'] = miro_itemkind_rmapping[item_data['kind']] except KeyError: pass # Fix this up. file_type = u'audio' # fallback try: - if kwargs['file_type'] == libdaap.DAAP_MEDIAKIND_AUDIO: + if item_data['file_type'] == libdaap.DAAP_MEDIAKIND_AUDIO: file_type = u'audio' - if kwargs['file_type'] in [libdaap.DAAP_MEDIAKIND_TV, - libdaap.DAAP_MEDIAKIND_MOVIE, - libdaap.DAAP_MEDIAKIND_VIDEO - ]: + if item_data['file_type'] in [libdaap.DAAP_MEDIAKIND_TV, + libdaap.DAAP_MEDIAKIND_MOVIE, + libdaap.DAAP_MEDIAKIND_VIDEO + ]: file_type = u'video' except KeyError: # Whoups. Server didn't send one over? Assume default. pass - kwargs['file_type'] = file_type - kwargs['video_path'] = self.client.daap_get_file_request( - kwargs['id'], - kwargs['file_format']) - kwargs['host'] = self.client.host + item_data['file_type'] = file_type + item_data['video_path'] = self.get_item_path(result, + item_data['daap_id']) + item_data['file_type'] = file_type + return item_data + + def get_item_path(self, result, daap_id): + return unicode(result.item_paths[daap_id]) + + def make_sharing_item(self, rawitem, result): + kwargs = self.convert_raw_sharing_item(rawitem, result) + kwargs['host'] = unicode(self.client.host) kwargs['port'] = self.client.port - kwargs['address'] = self.address - kwargs['file_type'] = file_type + kwargs['address'] = unicode(self.address) + return SharingItem(self.share, **kwargs) - # Duration: daap uses millisecond, so we need to scale it. - if kwargs['duration'] is not None: - kwargs['duration'] /= DURATION_SCALE + def get_sharing_item(self, daap_id): + return SharingItem.get_by_daap_id(daap_id, db_info=self.share.db_info) - sharing_item = SharingItem(**kwargs) - return sharing_item + def make_playlist_sharing_info(self, daap_id, playlist_data): + return messages.SharingPlaylistInfo( + self.share.id, + playlist_data['dmap.itemname'], + daap_id, + playlist_data.get(DAAP_PODCAST_KEY, False)) def client_disconnect(self): client = self.client @@ -679,19 +861,22 @@ 'DAAP client connect') def client_disconnect_error_callback(self, unused): - self.client_disconnect_callback_common(unused) + self.client_disconnect_callback_common() def client_disconnect_callback(self, unused): - self.client_disconnect_callback_common(unused) + self.client_disconnect_callback_common() - def client_disconnect_callback_common(self, unused): - tab_ids = [p.id for p in self.playlists.itervalues() - if self.items[p.playlist_id] or - p.playlist_id in SharingItemTrackerImpl.fake_playlists] - message = messages.TabsChanged('connect', [], [], tab_ids) + def client_disconnect_callback_common(self): + message = messages.TabsChanged('connect', [], [], + list(self.current_playlist_ids)) message.send_to_frontend() def client_connect(self): + self.make_client() + result = _ClientUpdateResult(self.client) + return result + + def make_client(self): name = self.share.name host = self.share.host port = self.share.port @@ -707,386 +892,116 @@ # Lousy Windows and Python API. address, port = self.client.conn.sock.getpeername() self.address = address - return self.setup_items() - - # See use of self.client in client_update(). - def setup_items(self, update=False): - name = self.share.name - host = self.share.host - port = self.share.port - try: - client = self.client - except AttributeError: - # Doesn't matter what exception it is, just raise to call error - # callback - raise - # From this point on ... if use of client is invalid (because socket - # is closed or something it should raise an error and we can bail - # out that way, and call the error callback. - if not client.databases(update=update): - raise IOError('Cannot get database') - deleted_items = dict() - playlists, deleted_playlists = client.playlists(update=update) - if playlists is None: - raise IOError('Cannot get playlist') - returned_playlists = dict() - video_tab_id = unicode(md5(repr((name, - host, - port, u'video'))).hexdigest()) - audio_tab_id = unicode(md5(repr((name, - host, - port, u'audio'))).hexdigest()) - playlist_tab_id = unicode(md5(repr((name, - host, - port, u'playlist'))).hexdigest()) - podcast_tab_id = unicode(md5(repr((name, - host, - port, u'podcast'))).hexdigest()) - for k in playlists.keys(): - # Clean the playlist: remove NUL characters. - for k_ in playlists[k]: - if isinstance(playlists[k][k_], str): - tmp = playlists[k][k_] - playlists[k][k_] = tmp.replace('\x00', '') - - is_base_playlist = None - if playlists[k].has_key('daap.baseplaylist'): - is_base_playlist = playlists[k]['daap.baseplaylist'] - if is_base_playlist: - if not update and self.base_playlist: - logging.debug('WARNING: more than one base playlist found') - if update and self.base_playlist != k: - logging.debug('WARNING: base playlistid changed in update') - self.base_playlist = k - # This isn't the playlist id of the remote share, this is the - # playlist id we use internally. - # XXX is there anything better we can do than repr()? - if not is_base_playlist: - # XXX only add playlist if it not base playlist. We don't - # explicitly show base playlist. - tab_id = unicode(md5(repr((name, - host, - port, k))).hexdigest()) - try: - key = 'com.apple.itunes.is-podcast-playlist' - podcast = playlists[k][key] - except KeyError: - podcast = False - if podcast: - parent_id = podcast_tab_id - else: - parent_id = playlist_tab_id - info = messages.SharingInfo(tab_id, - self.share.id, - playlists[k]['dmap.itemname'], - host, - port, - parent_id=parent_id, - playlist_id=k, - podcast=podcast) - returned_playlists[k] = info - # These are fake: so we only want to insert these once. - if not update: - video_info = messages.SharingInfo(video_tab_id, - self.share.id, - u'video', - host, - port, - parent_id=self.share.id, - playlist_id=u'video') - audio_info = messages.SharingInfo(audio_tab_id, - self.share.id, - u'audio', - host, - port, - parent_id=self.share.id, - playlist_id=u'audio') - playlist_folder_info = messages.SharingInfo(playlist_tab_id, - self.share.id, - u'playlist', - host, - port, - parent_id=self.share.id, - playlist_id=u'playlist', - has_children=True) - podcast_folder_info = messages.SharingInfo(podcast_tab_id, - self.share.id, - u'podcast', - host, - port, - parent_id=self.share.id, - playlist_id=u'podcast', - has_children=True) - returned_playlists['video'] = video_info - returned_playlists['audio'] = audio_info - returned_playlists['playlist'] = playlist_folder_info - returned_playlists['podcast'] = podcast_folder_info - - # Maybe we have looped through here without a base playlist. Then - # the server is broken? - if not self.base_playlist: - raise ValueError('Cannot find base playlist') - - items, deleted = client.items(playlist_id=self.base_playlist, - meta=DAAP_META, update=update) - if items is None: - raise ValueError('Cannot find items in base playlist') - - deleted_items[self.base_playlist] = deleted - # Make sure that we ditch stuff from the in-house video, music, - # playlist and podcast tabs too. - # - # The callback will filter out irrelevant items, so we can just - # add these as we please, it's easier that way to than figure out - # the exact set. - for p in SharingItemTrackerImpl.fake_playlists: - deleted_items[p] = deleted - - itemdict = dict() - returned_playlist_items = dict() - returned_items = dict() - video_items = dict() - audio_items = dict() - sharing_item_meth = self.sharing_item - for itemkey in items.keys(): - # Clean it of NUL - for k in items[itemkey]: - if isinstance(items[itemkey][k], str): - tmp = items[itemkey][k] - items[itemkey][k] = tmp.replace('\x00', '') - item = sharing_item_meth(items[itemkey]) - itemdict[itemkey] = item - returned_items[itemkey] = item - if item.file_type == u'video': - video_items[itemkey] = item - elif item.file_type == u'audio': - audio_items[itemkey] = item - else: - logging.warn('item file type unrecognized %s', item.file_type) - returned_playlist_items[u'video'] = video_items - returned_playlist_items[u'audio'] = audio_items - returned_playlist_items[self.base_playlist] = returned_items - - # Have to save the items from the base playlist first, because - # Rhythmbox will get lazy and only send the ids around (expecting - # us to already to have the data, I guess). - playlist_items = dict() - podcast_items = dict() - for k in playlists.keys(): - if k == self.base_playlist: - continue - returned_items = dict() - items, deleted = client.items(playlist_id=k, meta=DAAP_META, - update=update) - if items is None: - raise ValueError('Cannot find items for playlist %d' % k) - deleted_items[k] = deleted - for itemkey in items.keys(): - item = itemdict[itemkey] - returned_items[itemkey] = itemdict[itemkey] - try: - key = 'com.apple.itunes.is-podcast-playlist' - if playlists[k].has_key(key) and playlists[k][key]: - podcast_items[itemkey] = item - else: - playlist_items[itemkey] = item - except KeyError: - pass - returned_playlist_items[k] = returned_items - - returned_playlist_items['podcast'] = podcast_items - returned_playlist_items['playlist'] = playlist_items - - # We don't append these items directly to the object and let - # the success callback to do it to prevent race. - return (returned_playlist_items, returned_playlists, - deleted_playlists, deleted_items) - - # If we are disconnecting, then, disconnect() sets the self.client - # to None before actually running the client.disconnect() routine. - # So, usage of self.client should be: - # - # try: - # client = self.client - # except AttributeError: - # # Handle error here. - # pass - # else: - # client.blah() def client_update(self): logging.debug('CLIENT UPDATE') - try: - client = self.client - except AttributeError: - # Doesn't matter what exception it is, just raise to call error - # callback - raise - client.update() - return self.setup_items(update=True) + self.client.update() + result = _ClientUpdateResult(self.client, update=True) + return result - def client_update_callback(self, args): + def client_update_callback(self, result): logging.debug('CLIENT UPDATE CALLBACK') - deleted = [] - changed = [] - added = [] - (returned_items, returned_playlists, - deleted_playlists, deleted_items) = args + if self.share.is_closed(): + logging.warn("client_update_callback: database is closed") + return + self.update_sharing_items(result) + self.update_playlists(result) - # First: delete old junk. But only delete from sidebar if the - # thing was there in the first place, which basically means if there - # was a non-empty number of items in the list. - for k in deleted_playlists: - if k == self.base_playlist: - logging.debug('client_update_callback: remote asked us to ' - 'delete base playlist, ignoring') - continue - try: - playlist_id = self.playlists[k].id - del self.playlists[k] - if self.items[k]: - deleted.append(playlist_id) - except KeyError: - pass + def client_update_error_callback(self, unused): + if self.share.is_closed(): + logging.stacktrace("client_update_error_callback: " + "database is closed") + return + self.client_connect_update_error_callback(unused, update=True) - # Added/update changed playlist. But not necessarily the added/changed - # list to send, because we don't know whether this list is empty - # at this point. Defer until we process the items, we will fix up. - for k, v in returned_playlists.iteritems(): - if self.playlists.has_key(k): - changed.append(v) + # NB: this runs in the eventloop (backend) thread. + def client_connect_callback(self, result): + # ignore deleted items for the first run + if self.share.is_closed(): + logging.warn("client_connect_callback: database is closed") + return + result.deleted_items = [] + result.deleted_playlists = [] + result.playlist_deleted_items = {} + self.update_sharing_items(result) + self.update_playlists(result) + self.share.update_finished() + + def update_sharing_items(self, result): + """Create or update SharingItems on the database. + + :param new_item_data: _ClientUpdateResult + """ + for daap_id, item_data in result.items.items(): + if daap_id not in self.current_item_ids: + self.make_sharing_item(item_data, result) + self.current_item_ids.add(daap_id) else: - added.append(v) - self.items[k] = dict() - self.playlists[k] = v - - # Keep tabs on whether existing playlist is empty or not - old_empty_playlists = set([k for k in self.items if not self.items[k]]) - old_valid_playlists = set([k for k in self.items if self.items[k]]) - - # Process deleted items. - for k, item_ids in deleted_items.iteritems(): + sharing_item = self.get_sharing_item(daap_id) + new_data = self.convert_raw_sharing_item(item_data, result) + for key, value in new_data.items(): + setattr(sharing_item, key, value) + sharing_item.signal_change() + for item_id in result.deleted_items: try: - playlist_items = self.items[k] - except KeyError: - # Huh what? Sent us something that we don't have anymore. - logging.debug('Playlist %s already deleted', k) - continue - # Base playlist == None, so munge it up. Could probably just - # use the base playlist id and skip this trouble! - playlist_id = None if k == self.base_playlist else k - for item_id in item_ids: - try: - item = playlist_items[item_id] - del playlist_items[item_id] - self.emit('removed', playlist_id, item) - except KeyError: - pass + sharing_item = SharingItem.get_by_daap_id( + item_id, db_info=self.share.db_info) + except database.ObjectNotFoundError: + logging.warn("SharingItemTrackerImpl.update_sharing_items: " + "deleted item not found: %s", item_id) + sharing_item.remove() - # Now, process returned items, that have been added/changed. - for k, updated_playlist in returned_items.iteritems(): - try: - playlist_items = self.items[k] - except KeyError: - # Huh? We asked for a playlist that didn't exist? - logging.debug('CANNOT ACCESS self.items[%s]', k) - continue - playlist_id = None if k == self.base_playlist else k - for key, value in updated_playlist.iteritems(): - sig = 'changed' if playlist_items.has_key(key) else 'added' - self.emit(sig, playlist_id, value) - playlist_items[key] = value + def update_playlists(self, result): + added = [] + # We always send the share as changed since we're updating its + # contents. + changed = [] + removed = [] - # Keep tabs on current state of playlists - valid_playlists = set([k for k in self.items if self.items[k]]) + old_playlist_items = {} + for daap_id, item_ids in self.playlist_tracker.playlist_items.items(): + old_playlist_items[daap_id] = item_ids.copy() + + self.playlist_tracker.update(result) + # update the playlist item map + playlist_items_changed = False + new_playlist_items = self.playlist_tracker.playlist_items + for playlist_id in old_playlist_items: + if playlist_id not in new_playlist_items: + self.playlist_item_map.remove_playlist(playlist_id) + playlist_items_changed = True + for playlist_id, item_ids in new_playlist_items.items(): + if item_ids != old_playlist_items.get(playlist_id): + self.playlist_item_map.set_playlist_items(playlist_id, + item_ids) + playlist_items_changed = True + + current_playlists = self.playlist_tracker.current_playlists() + # check for added/changed playlists + for daap_id, playlist_data in current_playlists.items(): + if daap_id not in self.current_playlist_ids: + added.append( + self.make_playlist_sharing_info(daap_id, playlist_data)) + self.current_playlist_ids.add(daap_id) + elif daap_id in result.playlists: + changed.append( + self.make_playlist_sharing_info(daap_id, playlist_data)) + # check for removed playlists + removed.extend(self.current_playlist_ids - + set(current_playlists.keys())) + self.current_playlist_ids = set(current_playlists.keys()) + if playlist_items_changed or added or changed or removed: + SharingItem.change_tracker.playlist_changed(self.share.id) + self.update_fake_playlists() - # Filter out empty stuff. Re-add previously empty stuff that now - # has stuff (in changed). - # - # Algorithm: check added. If empty, filter out. - # Check changed. If empty, and previously not empty, append to - # deleted. If previously empty and now not empty, move from changed - # to added. If previously empty and now also empty, ditch it from the - # changed lists. - added = [a for a in added if self.items[a.playlist_id]] - to_remove = [] - for c in changed: - if (c.playlist_id == self.base_playlist or - c.playlist_id in SharingItemTrackerImpl.fake_playlists): - continue - if not self.items[c.playlist_id]: - # We need to remove it. - if c.playlist_id in old_valid_playlists: - # Transitioned from valid to empty. Append to deleted. - logging.debug('%s transitioned to empty', c.name) - deleted.append(c.id) - to_remove.append(c) - elif c.playlist_id in old_empty_playlists: - # Was empty, is still empty. Ditch from changed list. - logging.debug('%s was empty, still empty', c.name) - to_remove.append(c) - - # Ditch stuff in the changed list - for r in to_remove: - while True: - try: - changed.remove(r) - except ValueError: - break - - # Was empty and now not empty. Intersection of previously empty - # playlists and playlists are now not empty. Add to added list, - # and if it exists in the changed list, ditch it (because we should - # use add as it did not exist before). - added_ids = list(old_empty_playlists.intersection(valid_playlists)) - for added_id in added_ids: - if added_id == self.base_playlist: - continue - if added_id in SharingItemTrackerImpl.fake_playlists: - continue - added.append(self.playlists[added_id]) - try: - changed.remove(self.playlists[added_id]) - except ValueError: - logging.debug('empty to non-empty transition playlist %s ' - 'not in changed list', added_id) - - # Finally, update the tabs. Use set() to filter out the duplicates. - message = messages.TabsChanged('connect', set(added), set(changed), - set(deleted)) + message = messages.TabsChanged('connect', added, changed, removed) message.send_to_frontend() - def client_update_error_callback(self, unused): - self.client_connect_update_error_callback(unused, update=True) - - # NB: this runs in the eventloop (backend) thread. - def client_connect_callback(self, args): - # Just ignore any deleted items on first connect - they shouldn't - # be there. - returned_items, returned_playlists, _, _ = args - self.items = returned_items - self.playlists = returned_playlists - # Send a list of all the items to the main sharing tab. Only add - # those that are part of the base playlist. - for item in self.items[self.base_playlist].itervalues(): - self.emit('added', None, item) - # Once all the items are added then send display mounted and remove - # the progress indicator. - self.share.mount = True - self.share.is_updating = False - # Only show non-empty stuff, but make sure that we always display - # the top level podcast tabs. This filtering is a bit of work, but - # even if we have hundreds of playlists, it won't be that bad, and - # we only need to do this once. - playlists = [p for p in self.playlists.itervalues() - if self.items[p.playlist_id] and - p.playlist_id not in SharingItemTrackerImpl.fake_playlists] - # Append the ersatz playlists at the front. - for i, v in enumerate(SharingItemTrackerImpl.fake_playlists): - playlists.insert(i, self.playlists[v]) - message = messages.TabsChanged('connect', playlists, - [self.share], []) - message.send_to_frontend() + def update_fake_playlists(self): + self.playlist_item_map.set_playlist_items( + u'podcast', self.playlist_tracker.items_in_podcasts()) + self.playlist_item_map.set_playlist_items( + u'playlist', self.playlist_tracker.items_in_playlists()) def client_connect_error_callback(self, unused): self.client_connect_update_error_callback(unused) @@ -1099,442 +1014,494 @@ # happened while we were in the middle of an update(). return if not update: - self.share.is_updating = False - message = messages.TabsChanged('connect', [], [self.share], []) - message.send_to_frontend() - if not self.share.stale_callback: - app.sharing_tracker.eject(self.share.id) + self.share.update_finished(success=False) + if not self.share.info.stale_callback: + app.sharing_tracker.stop_tracking_share(self.share.id) messages.SharingConnectFailed(self.share).send_to_frontend() - def get_items(self, playlist_id=None): - # NB: keep this in a try/except construct because this could be - # called before the connection actually has succeeded. - try: - if playlist_id is None: - return self.items[self.base_playlist].values() - else: - return self.items[playlist_id].values() - except KeyError: - logging.error('Cannot get playlist, was looking for %s', - playlist_id) - return [] - -class SharingManagerBackend(object): - """SharingManagerBackend is the bridge between pydaap and Miro. It - pushes Miro media items to pydaap so pydaap can serve them to the outside - world.""" - - type = u'sharing-backend' - id = u'sharing-backend' +class _SharedDataSet(object): + """Tracks the items/playlist/feeds we're sharing to others. + This object is used by SharingManagerBackend to track the database objects + its sharing. It needs to be thread-safe since it gets updated in the + backend thread and accessed in the server thread for our share. + """ SHARE_AUDIO = libdaap.DAAP_MEDIAKIND_AUDIO SHARE_VIDEO = libdaap.DAAP_MEDIAKIND_VIDEO SHARE_FEED = 0x4 # XXX + # Maps ItemInfo attribute names to DAAP keys. + daap_mapping = { + 'id': 'dmap.itemid', + 'title': 'dmap.itemname', + 'duration': 'daap.songtime', + 'size': 'daap.songsize', + 'artist': 'daap.songartist', + 'album_artist': 'daap.songalbumartist', + 'album': 'daap.songalbum', + 'year': 'daap.songyear', + 'genre': 'daap.songgenre', + 'track': 'daap.songtracknumber', + 'show': 'com.apple.itunes.series-name', + 'season_number': 'com.apple.itunes.season-num', + 'episode_id': 'com.apple.itunes.episode-num-str', + 'episode_number': 'com.apple.itunes.episode-sort' + } + + # Map values for ItemInfo.kind to DAAP values + miro_itemkind_mapping = { + 'movie': MIRO_ITEMKIND_MOVIE, + 'show': MIRO_ITEMKIND_SHOW, + 'clip': MIRO_ITEMKIND_CLIP, + 'podcast': MIRO_ITEMKIND_PODCAST + } + def __init__(self): + # our lock must be acquired before acsessing any of our data. Our + # condition gets signaled when changes occur. + self.lock = threading.RLock() + self.condition = threading.Condition(self.lock) + # current revision number self.revision = 1 - self.share_types = [] + # map DAAP ids to dicts of item data + self.daap_items = dict() + # map DAAP ids to dicts of playlist data + self.daap_playlists = dict() + # map DAAP playlist ids to sets of items in that playlist + self.playlist_item_map = dict() # Playlist -> item mapping + # map DAAP playlist ids to sets of items that have been removed from + # that playlist. + self.deleted_item_map = dict() # Playlist -> deleted item mapping + # signal handle and trackers that we create in start_tracking() + self.config_handle = None + self.item_tracker = None + self.playlist_tracker = None + self.feed_tracker = None + self.after_event_finished_handle = None + self.item_changes_handle = None + # store SavedPlaylist/Feed objects that have been + # added/changed/removed. We save up the changes, then process them + # all at once when the eventloop emits "event-finished". + self.playlists_changed = set() + self.playlists_removed = set() + + def _deleted_item(self, daap_id): + """Make a dict for a delete playlist or item.""" + return { + 'revision': self.revision, + 'valid': False, + 'dmap.itemid': daap_id, + } + + def start_tracking(self): + with self.lock: + # make trackers + self.calc_share_types() + self.config_handle = app.backend_config_watcher.connect('changed', + self.on_config_changed) + # setup initial data + self.start_tracking_items() + self.start_tracking_playlists() + if _SharedDataSet.SHARE_FEED in self.share_types: + self.start_tracking_feeds() + self.after_event_finished_handle = eventloop.connect_after( + 'event-finished', self.after_event_finished) + self.item_changes_handle = models.Item.change_tracker.connect( + 'item-changes', self.on_item_changes) + + def stop_tracking(self): + if self.config_handle is not None: + backend_config_watcher.disconnect(self.config_handle) + self.config_handle = None + if self.item_tracker is not None: + self.item_tracker.destroy() + self.item_tracker = None + if self.playlist_tracker is not None: + self.playlist_tracker.unlink() + self.playlist_tracker = None + if self.feed_tracker is not None: + self.feed_tracker.unlink() + self.feed_tracker = None + if self.after_event_finished_handle: + eventloop.disconnect(self.after_event_finished_handle) + self.after_event_finished_handle = None + if self.item_changes_handle: + models.Item.changes.disconnect(self.item_changes_handle) + self.item_changes_handle = None + + def calc_share_types(self): + self.share_types = set() if app.config.get(prefs.SHARE_AUDIO): - self.share_types += [SharingManagerBackend.SHARE_AUDIO] + self.share_types.add(_SharedDataSet.SHARE_AUDIO) if app.config.get(prefs.SHARE_VIDEO): - self.share_types += [SharingManagerBackend.SHARE_VIDEO] + self.share_types.add(_SharedDataSet.SHARE_VIDEO) if app.config.get(prefs.SHARE_FEED): - self.share_types += [SharingManagerBackend.SHARE_FEED] - - self.item_lock = threading.Lock() - self.revision_cv = threading.Condition(self.item_lock) + self.share_types.add(_SharedDataSet.SHARE_FEED) + + def start_tracking_items(self): + query = self._make_item_tracker_query() + self.item_tracker = itemtrack.BackendItemTracker(query) + for item_info in self.item_tracker.get_items(): + self.make_daap_item(item_info) + self.item_tracker.connect('items-changed', self.on_items_changed) + + def start_tracking_playlists(self): + view = models.SavedPlaylist.make_view() + for playlist in view: + self.make_daap_playlist(playlist) + self.playlist_tracker = view.make_tracker() + self.playlist_tracker.connect('added', self.on_playlist_added) + self.playlist_tracker.connect('changed', self.on_playlist_changed) + self.playlist_tracker.connect('removed', self.on_playlist_removed) + + def start_tracking_feeds(self): + if self.feed_tracker is None: + view = models.Feed.visible_view() + for feed in view: + self.make_daap_playlist(feed) + self.feed_tracker = view.make_tracker() + self.feed_tracker.connect('added', self.on_playlist_added) + self.feed_tracker.connect('changed', self.on_playlist_changed) + self.feed_tracker.connect('removed', self.on_playlist_removed) + + def stop_tracking_feeds(self): + if self.feed_tracker is not None: + self.feed_tracker.unlink() + self.feed_tracker = None + # Remove all feeds from our lists + for feed in models.Feed.visible_view(): + self.daap_playlists[feed.id] = self._deleted_item(feed.id) + + def on_items_changed(self, tracker, added, changed, removed): + with self.lock: + self.revision += 1 + for item_info in added + changed: + self.make_daap_item(item_info) + for item_id in removed: + self.daap_items[item_id] = self._deleted_item(item_id) + self.condition.notify_all() + + def on_playlist_added(self, tracker, playlist_or_feed): + self.playlists_changed.add(playlist_or_feed) + + def on_playlist_changed(self, tracker, playlist_or_feed): + self.playlists_changed.add(playlist_or_feed) + + def on_playlist_removed(self, tracker, playlist_or_feed): + self.playlists_removed.add(playlist_or_feed) + + def after_event_finished(self, eventloop, success): + if not (self.playlists_changed or self.playlists_removed): + return + with self.lock: + self.revision += 1 + for obj in self.playlists_changed: + self.make_daap_playlist(obj) + for obj in self.playlists_removed: + self.daap_playlists[obj.id] = self._deleted_item(obj.id) + self.playlists_changed = set() + self.playlists_removed = set() + self.condition.notify_all() + + def on_item_changes(self, tracker, message): + if 'feed_id' in message.changed_columns: + # items have changed feeds, regenerate the item lists + for feed in models.Feed.visible_view(): + self.make_daap_playlist(feed) + if message.playlists_changed: + # items have been added/removed from playlists, + # regenerate the item lists + for playlist in models.SavedPlaylist.make_view(): + self.make_daap_playlist(playlist) + + def _make_item_tracker_query(self): + query = itemtrack.ItemTrackerQuery() + # we can do this simply when SHARE_AUDIO and SHARE_VIDEO are selected + if (_SharedDataSet.SHARE_AUDIO in self.share_types and + _SharedDataSet.SHARE_VIDEO in self.share_types): + query.add_complex_condition( + ['file_type'], 'item.file_type IN ("audio", "video")') + return query + + # No matter what, we only care about audio/video items + query.add_complex_condition(['file_type'], + 'item.file_type IN ("audio", "video")') + + # Based on the preferences, we OR together various other conditions + extra_sql_parts = [] + extra_sql_columns = set() + if _SharedDataSet.SHARE_AUDIO in self.share_types: + extra_sql_parts.append('file_type = "audio"') + extra_sql_columns.add('file_type') + if _SharedDataSet.SHARE_VIDEO in self.share_types: + extra_sql_parts.append('file_type = "video"') + extra_sql_columns.add('file_type') + if _SharedDataSet.SHARE_FEED in self.share_types: + extra_sql_parts.append('feed.visible') + extra_sql_columns.add('feed.visible') + # items in playlists are always included + extra_sql_parts.append('playlist_item_map.playlist_id IS NOT NULL') + extra_sql_columns.add('playlist_item_map.playlist_id') + # OR together all the parts into a complex condition + sql = ' OR '.join('(%s)' % part for part in extra_sql_parts) + query.add_complex_condition(extra_sql_columns, sql) + return query + + def make_daap_item(self, item_info): + daap_item = dict() + # Set attributes in daap_mapping + for attr_name, key_name in self.daap_mapping.items(): + value = getattr(item_info, attr_name) + # Fixup the year, etc being -1. XXX should read the daap + # type then determine what to do. + if value == -1: + value = 0 + # Fixup: these are stored as string? + if key_name in ('daap.songtracknumber', + 'daap.songyear'): + if value is not None: + value = int(value) + # Fixup the duration: need to convert to millisecond. + elif key_name == 'daap.songtime': + if value: + value *= DURATION_SCALE + else: + value = 0 + daap_item[key_name] = value + # add attributes that need to be calculated + self._calc_item_kind(item_info, daap_item) + self._calc_item_format_mediakind(item_info, daap_item) + self._calc_item_paths(item_info, daap_item) + # add attributes for keys needed by libdaap, but aren't part of DAAP + # itself + daap_item['dmap.containeritemid'] = item_info.id + daap_item['revision'] = self.revision + daap_item['valid'] = True + # Convert unicode to utf-8 + for key, value in daap_item.items(): + if isinstance(value, unicode): + daap_item[key] = value.encode('utf-8') + # store the data + self.daap_items[item_info.id] = daap_item + + # XXX TEMPORARY: should this item be podcast? We won't need this when + # the item type's metadata is completely accurate and won't lie to us. + def _item_from_podcast(self, item_info): + feed_url = item_info.feed_url + if feed_url is None: + logging.warn("_item_from_podcast: feed_url is None for %s", + item_info.title) + return False + ersatz_feeds = ['dtv:manualFeed', 'dtv:searchDownloads', 'dtv:search'] + is_feed = not any([feed_url.startswith(x) for x in ersatz_feeds]) + return item_info.feed_id and is_feed and not item_info.is_file_item + + def _calc_item_kind(self, item_info, daap_item): + """Calculate the value for org.participatoryculture.miro.itemkind + + :param item_info: ItemInfo to get the value from + :param daap_item: dict of DAAP data to set the value for + """ + key = 'org.participatoryculture.miro.itemkind' + if self._item_from_podcast(item_info): + daap_item[key] = MIRO_ITEMKIND_PODCAST + if item_info.kind: + try: + daap_item[key] = self.miro_itemkind_mapping[item_info.kind] + except KeyError: + logging.warn("Error looking up item kind: %s", item_info.kind) + + def _calc_item_format_mediakind(self, item_info, daap_item): + """Calculate the DAAP values for com.apple.itunes.mediakind and + daap.songformat + + :param item_info: ItemInfo to get the values from + :param daap_item: dict of DAAP data to set the values for + """ + + mediakind_key = 'com.apple.itunes.mediakind' + songformat_key = 'daap.songformat' + + # Fixup the enclosure format. This is hardcoded to mp4, + # as iTunes requires this. Other clients seem to be able to sniff + # out the container. We can change it if that's no longer true. + # Fixup the media kind: XXX what about u'other'? + enclosure = item_info.file_format + if enclosure not in supported_filetypes: + nam, ext = os.path.splitext(item_info.filename) + if ext in supported_filetypes: + enclosure = ext + if enclosure: + songformat = enclosure[1:] + else: + songformat = None + + if item_info.file_type == u'video': + daap_item[mediakind_key] = libdaap.DAAP_MEDIAKIND_VIDEO + if not songformat: + songformat = 'mp4' + daap_item[songformat_key] = songformat + else: + daap_item[mediakind_key] = libdaap.DAAP_MEDIAKIND_AUDIO + if not songformat: + songformat = 'mp3' + daap_item[songformat_key] = enclosure + + def _calc_item_paths(self, item_info, daap_item): + """Calculate the DAAP values for path and cover_art + + :param item_info: ItemInfo to get the values from + :param daap_item: dict of DAAP data to set the values for + """ + daap_item['path'] = item_info.filename + defaults = (resources.path('images/thumb-default-audio.png'), + resources.path('images/thumb-default-video.png')) + if item_info.thumbnail not in defaults: + daap_item['cover_art'] = item_info.thumbnail + else: + daap_item['cover_art'] = '' + + def make_daap_playlist(self, playlist_or_feed): + if isinstance(playlist_or_feed, models.SavedPlaylist): + view = models.PlaylistItemMap.playlist_view(playlist_or_feed.id) + item_ids = set(pim.item_id for pim in view) + is_podcast = False + else: + item_ids = set(i.id for i in playlist_or_feed.downloaded_items) + is_podcast = True + + daap_item = { + 'dmap.itemid': playlist_or_feed.id, + 'dmap.persistentid': playlist_or_feed.id, + 'dmap.itemname': playlist_or_feed.get_title(), + 'dmap.itemcount': len(item_ids), + 'dmap.parentcontainerid': 0, + 'revision': self.revision, + 'valid': True, + } + if is_podcast: + daap_item[DAAP_PODCAST_KEY] = 1 + self.daap_playlists[playlist_or_feed.id] = daap_item + self.playlist_item_map[playlist_or_feed.id] = item_ids + + def on_config_changed(self, obj, key, value): + watched_keys = [prefs.SHARE_AUDIO.key, prefs.SHARE_VIDEO.key, + prefs.SHARE_FEED.key] + if key not in watched_keys: + return + with self.lock: + old_share_types = self.share_types + self.calc_share_types() + changed = self.share_types.symmetric_difference(old_share_types) + if changed: + self.revision += 1 + # If SHARE_FEED changes, we need to start/stop tracking feeds + if self.SHARE_FEED in changed: + if self.SHARE_FEED in self.share_types: + self.start_tracking_feeds() + else: + self.stop_tracking_feeds() + # If SHARE_AUDIO/SHARE_VIDEO changes we need to recalculate + # which items are in the main list + if changed.intersection([self.SHARE_AUDIO, self.SHARE_VIDEO]): + query = self._make_item_tracker_query() + self.item_tracker.change_query(query) + self.condition.notify_all() + + def get_item(self, item_id): + with self.lock: + return self.daap_items[item_id] + + def get_items(self, playlist_id): + with self.lock: + if playlist_id is None: + return self.daap_items.copy() + else: + items_dict = dict() + for id_ in self.playlist_item_map[playlist_id]: + try: + items_dict[id_] = self.daap_items[id_] + except KeyError: + logging.warn("Error looking up DAAP item: %s", id_) + return items_dict + + def get_playlists(self): + with self.lock: + return self.daap_playlists.copy() + + def get_revision(self, old_revision, request_socket): + with self.lock: + while self.revision == old_revision: + # releause our lock and wait for some changes. + self.condition.wait(1.0) + # If we reached the timeout, check if request_socket is + # closed. + if self.revision == old_revision: + # we aren't expecting any data from the other side, so if + # the socket is available for reading, that means that + # either a) it's been closed, or b) the other side is + # sending us some garbage. Either way, don't wait on our + # condition for longer + r, w, x = select.select([request_socket], [], [], 0) + if r: + break + return self.revision + +class SharingManagerBackend(object): + """Implement a DAAP server using pydaap + + SharingManagerBackend pushes Miro media items to pydaap so pydaap can + serve them to the outside world. + """ + + type = u'sharing-backend' + id = u'sharing-backend' + + def __init__(self): + self.data_set = _SharedDataSet() self.transcode_lock = threading.Lock() self.transcode = dict() - # XXX daapplaylist should be hidden from view. - self.daapitems = dict() # DAAP format XXX - index via the items - self.daap_playlists = dict() # Playlist, in daap format - self.playlist_item_map = dict() # Playlist -> item mapping - self.deleted_item_map = dict() # Playlist -> deleted item mapping self.in_shutdown = False - self.config_handle = app.backend_config_watcher.connect('changed', - self.on_config_changed) # Reserved for future use: you can register new sharing protocols here. def register_protos(self, proto): pass - # Note: this can be called more than once, if you change your podcast - # configuration to show/hide podcast items! What we do here is, - # ditch the old list re-create new one with the updated information. - # This is a complete list send and not a diff like handle_items_changed() - # is. But make sure at the same time that the old deleted stuff is marked - # as such. - def handle_item_list(self, message): - with self.item_lock: - self.update_revision() - item_ids = [item.id for item in message.items] - if message.id is not None: - self.daap_playlists[message.id]['revision'] = self.revision - self.playlist_item_map[message.id] = item_ids - self.deleted_item_map[message.id] = [] - # Update the revision of these items, so they will match - # when the playlist items are fetched. - for item_id in item_ids: - try: - self.daapitems[item_id]['revision'] = self.revision - except KeyError: - # This non-downloaded podcast item? I think what - # we want to do here is set it as a podcast item - # but disable the items that are not yet available. - # - # Requires work to update the watchable view to include - # stuff from the individual feeds. - pass - else: - deleted = [item_id for item_id in self.daapitems if - item_id not in item_ids] - self.make_item_dict(message.items) - for d in deleted: - self.daapitems[d] = self.deleted_item() - - def handle_items_changed(self, message): - # If items are changed, overwrite with a recreated entry. This - # might not be necessary, as currently this change can be due to an - # item being moved out of, and then into, a playlist. Also, based on - # message.id, change the playlists accordingly. - with self.item_lock: - self.update_revision() - for itemid in message.removed: - try: - if message.id is not None: - revision = self.revision - self.daap_playlists[message.id]['revision'] = revision - self.playlist_item_map[message.id].remove(itemid) - self.deleted_item_map[message.id].append(itemid) - except KeyError: - pass - try: - if message.id is None: - self.daapitems[itemid] = self.deleted_item() - except KeyError: - pass - if message.id is not None: - item_ids = [item.id for item in message.added] - self.daap_playlists[message.id]['revision'] = self.revision - # If they have been previously removed, unmark deleted. - for i in item_ids: - try: - self.deleted_item_map[message.id].remove(i) - except ValueError: - pass - self.playlist_item_map[message.id] += item_ids - - # Only make or modify an item if it is for main library. - # Otherwise, we just re-create an item when all that's changed - # is the contents of the playlist. - if message.id is None: - self.make_item_dict(message.added) - self.make_item_dict(message.changed) - else: - # Simply update the item's revision. - # XXX Feed sharing: catch KeyError because item may not - # be downloaded (and hence not in watchable list). - # Catch changed as when feed items get added they - # do not get added to the main list. Catch added, when newly - # available podcasts come into view. - for x in message.added: - try: - self.daapitems[x.id]['revision'] = self.revision - except KeyError: - pass - for x in message.changed: - try: - self.daapitems[x.id]['revision'] = self.revision - except KeyError: - pass - - def deleted_item(self): - return dict(revision=self.revision, valid=False) - - # At this point: item_lock acquired - def update_revision(self, directed=None): - self.revision += 1 - self.directed = directed - self.revision_cv.notify_all() - - def make_daap_playlists(self, items, typ): - for item in items: - itemprop = dict() - for attr in daap_rmapping.keys(): - daap_string = daap_rmapping[attr] - itemprop[daap_string] = getattr(item, attr, None) - # XXX Pants. We use this for the initial population when - # we pass in DB objects and then later on we also use this - # when they are in fact tab infos. In the raw DBObject we - # use title, and in the tab infos we use name. But in the - # DBObject 'name' is valid too! - # - # Blargh! - if daap_string == 'dmap.itemname': - itemprop[daap_string] = getattr(item, 'title', None) - if itemprop[daap_string] is None: - itemprop[daap_string] = getattr(item, 'name', None) - if isinstance(itemprop[daap_string], unicode): - itemprop[daap_string] = ( - itemprop[daap_string].encode('utf-8')) - daap_string = 'dmap.itemcount' - if daap_string == 'dmap.itemcount': - # At this point, the item list has not been fully populated - # yet. Therefore, it may not be possible to run - # get_items() and getting the count attribute. Instead we - # use the playlist_item_map. - if typ == 'playlist': - tmp = [y for y in - playlist.PlaylistItemMap.playlist_view(item.id)] - elif typ == 'feed': - tmp = [y for y in Item.feed_view(item.id)] - else: - # whoups, sorry mate! - raise ValueError('unknown playlist variant type %s' % typ) - count = len(tmp) - itemprop[daap_string] = count - daap_string = 'dmap.parentcontainerid' - if daap_string == 'dmap.parentcontainerid': - itemprop[daap_string] = 0 - #attributes.append(('mpco', 0)) # Parent container ID - #attributes.append(('mimc', count)) # Item count - #self.daap_playlists[x.id] = attributes - daap_string = 'dmap.persistentid' - if daap_string == 'dmap.persistentid': - itemprop[daap_string] = item.id - - itemprop['podcast'] = typ == 'feed' - # XXX - if itemprop['podcast']: - itemprop['com.apple.itunes.is-podcast-playlist'] = True - - # piece de resistance - itemprop['revision'] = self.revision - itemprop['valid'] = True - - self.daap_playlists[item.id] = itemprop - - def handle_feed_added(self, obj, added): - added = [a for a in added if not a.url or - (a.url and not a.url.startswith('dtv:'))] - self.handle_playlist_added(obj, added, typ='feed') - - def handle_feed_changed(self, obj, changed): - changed = [c for c in changed if not c.url or - (c.url and not c.url.startswith('dtv:'))] - self.handle_playlist_changed(obj, changed, typ='feed') - - def handle_feed_removed(self, obj, removed): - # Can't actually filter out removed - it is a list of ids. But no - # matter as we just ignore it if we can't find it in our tracked - # playlists. - self.handle_playlist_removed(obj, removed, typ='feed') - - def handle_playlist_added(self, obj, added, typ='playlist'): - playlists = [x for x in added if not x.is_folder] - - def _handle_playlist_added(): - with self.item_lock: - self.update_revision() - self.make_daap_playlists(playlists, typ) - for p in playlists: - # no need to update the revision here: already done in - # make_daap_playlists. - self.playlist_item_map[p.id] = [] - self.deleted_item_map[p.id] = [] - app.info_updater.item_list_callbacks.add(self.type, - p.id, - self.handle_item_list) - app.info_updater.item_changed_callbacks.add(self.type, - p.id, - self.handle_items_changed) - id_ = (p.id, typ == 'feed') - messages.TrackItems(self.type, id_).send_to_backend() - - eventloop.add_urgent_call(lambda: _handle_playlist_added(), - "SharingManagerBackend: playlist added") - - def handle_playlist_changed(self, obj, changed, typ='playlist'): - def _handle_playlist_changed(): - with self.item_lock: - self.update_revision() - # We could just overwrite everything without actually deleting - # the object. A missing key means it's a folder, and we skip - # over it. - playlist = [] - for x in changed: - if self.daap_playlists.has_key(x.id): - #self.daap_playlists[x.id] = self.deleted_item() - del self.daap_playlists[x.id] - playlist.append(x) - self.make_daap_playlists(playlist, typ) - - eventloop.add_urgent_call(lambda: _handle_playlist_changed(), - "SharingManagerBackend: playlist changed") - - - def handle_playlist_removed(self, obj, removed, typ='playlist'): - def _handle_playlist_removed(): - with self.item_lock: - self.update_revision() - for x in removed: - # Missing key means it's a folder and we skip over it. - if self.daap_playlists.has_key(x): - self.daap_playlists[x] = self.deleted_item() - #del self.daap_playlists[x] - try: - del self.playlist_item_map[x] - except KeyError: - logging.debug('sharing: cannot delete ' - 'playlist_item_map id = %d', x) - try: - del self.deleted_item_map[x] - except KeyError: - logging.debug('sharing: cannot delete ' - 'deleted_item_map id = %d', x) - messages.StopTrackingItems(self.type, - x).send_to_backend() - app.info_updater.item_list_callbacks.remove(self.type, - x, - self.handle_item_list) - app.info_updater.item_changed_callbacks.remove( - self.type, - x, - self.handle_items_changed) - - eventloop.add_urgent_call(lambda: _handle_playlist_removed(), - "SharingManagerBackend: playlist removed") - - # XXX I think we can probably do away with this one, since the item list - # callbacks will end up populating this anyway? - def populate_playlists(self): - with self.item_lock: - self.update_revision() - # First, playlists. - playlists = playlist.SavedPlaylist.make_view() - # Grab feeds. We like the feeds, but don't grab fake ersatz stuff. - feeds = [f for f in feed.Feed.make_view() if not f.origURL or - (f.origURL and not f.origURL.startswith('dtv:'))] - playlist_ids = [p.id for p in playlists] - feed_ids = [f.id for f in feeds] - self.make_daap_playlists(playlist.SavedPlaylist.make_view(), - 'playlist') - # et tu, feed. But we basically handle it the same way. - self.make_daap_playlists(feeds, 'feed') - # Now, build the playlists. - for playlist_id in self.daap_playlists.keys(): - # revision for playlist already created in make_daap_playlist - if playlist_id in playlist_ids: - self.playlist_item_map[playlist_id] = [x.item_id - for x in playlist.PlaylistItemMap.playlist_view( - playlist_id)] - elif playlist_id in feed_ids: - self.playlist_item_map[playlist_id] = [x.id - for x in Item.feed_view(playlist_id)] - else: - logging.error('playlist id %s not valid', playlist_id) - continue - self.deleted_item_map[playlist_id] = [] - def start_tracking(self): - self.populate_playlists() - # Track items that do not belong in any playlist. Do this first - # so we pick up all items in the media library. - app.info_updater.item_list_callbacks.add(self.type, None, - self.handle_item_list) - app.info_updater.item_changed_callbacks.add(self.type, None, - self.handle_items_changed) - messages.TrackItems(self.type, None).send_to_backend() - - # Now, for the specific playlists. - for playlist_id in self.daap_playlists: - app.info_updater.item_list_callbacks.add(self.type, playlist_id, - self.handle_item_list) - app.info_updater.item_changed_callbacks.add(self.type, playlist_id, - self.handle_items_changed) - id_ = (playlist_id, self.daap_playlists[playlist_id]['podcast']) - messages.TrackItems(self.type, id_).send_to_backend() - - app.info_updater.connect('playlists-added', - self.handle_playlist_added) - app.info_updater.connect('playlists-changed', - self.handle_playlist_changed) - app.info_updater.connect('playlists-removed', - self.handle_playlist_removed) - - app.info_updater.connect('feeds-added', - self.handle_feed_added) - app.info_updater.connect('feeds-changed', - self.handle_feed_changed) - app.info_updater.connect('feeds-removed', - self.handle_feed_removed) + self.data_set.start_tracking() def stop_tracking(self): - for playlist_id in self.daap_playlists: - messages.StopTrackingItems(self.type, - playlist_id).send_to_backend() - app.info_updater.item_list_callbacks.remove(self.type, playlist_id, - self.handle_item_list) - app.info_updater.item_changed_callbacks.remove(self.type, - playlist_id, - self.handle_items_changed) - messages.StopTrackingItems(self.type, self.id).send_to_backend() - app.info_updater.item_list_callbacks.remove(self.type, None, - self.handle_item_list) - app.info_updater.item_changed_callbacks.remove(self.type, None, - self.handle_items_changed) - - app.info_updater.disconnect(self.handle_playlist_added) - app.info_updater.disconnect(self.handle_playlist_changed) - app.info_updater.disconnect(self.handle_playlist_removed) - - app.info_updater.disconnect(self.handle_feed_added) - app.info_updater.disconnect(self.handle_feed_changed) - app.info_updater.disconnect(self.handle_feed_removed) - - def watcher(self, session, request): - while True: - try: - r, w, x = select.select([request], [], []) - # Unlock the revision by bumping it - with self.item_lock: - logging.debug('WAKEUP %s', session) - self.update_revision(directed=session) - break - except select.error, (err, errstring): - if err == errno.EINTR: - continue - except StandardError, err: - raise ValueError('watcher: unknown error during select') + self.data_set.stop_tracking() def get_revision(self, session, old_revision, request): - self.revision_cv.acquire() - while self.revision == old_revision: - t = threading.Thread(target=self.watcher, args=(session, request)) - t.daemon = True - t.start() - self.revision_cv.wait() - # If we really did a update or if the wakeup was directed at us - # (because we are quitting or something) then release the lock - # and return the revision - if self.directed is None or self.directed == session: - break - # update revision and then wait again - old_revision = self.revision - self.revision_cv.release() - return self.revision + """Block until the there is a new revision. + + If the request socket is closed while we are waiting for a new + revision, then this method should return the old revision. + + :param session_id: session id + :param old_revision: old revision id. Return when we get an + item/playlist update with a newer revision than this. + :param request: socket handle to the client + + :returns: newest revision number + """ + return self.data_set.get_revision(old_revision, request) def get_file(self, itemid, generation, ext, session, request_path_func, offset=0, chunk=None): + """Get a file to serve + + :returns (fileobj, filename_hint) tuple: + """ + # FIXME: the above docstring could realy use some more details. + file_obj = None - # Get a copy of the item under the lock ... if the underlying item - # is going away then we'll deal with it later on. only care about - # the reference being valid (?) - with self.item_lock: - try: - daapitem = self.daapitems[itemid] - except KeyError: - return None + no_file = (None, None) + # Get a copy of the item and use that. If the item gets deleted in a + # different thread while we're running the code below, then we'll deal + # with it later on. + daapitem = self.data_set.get_item(itemid) path = daapitem['path'] if ext in ('ts', 'm3u8'): # If we are requesting a playlist, this basically means that @@ -1543,7 +1510,7 @@ need_create = False with self.transcode_lock: if self.in_shutdown: - return None + return no_file try: transcode_obj = self.transcode[session] if transcode_obj.itemid != itemid: @@ -1555,7 +1522,7 @@ if generation < transcode_obj.generation: logging.debug('item %s transcode out of order', itemid) - return None + return no_file if chunk is not None and transcode_obj.isseek(chunk): need_create = True old_transcode_obj = transcode_obj @@ -1610,184 +1577,39 @@ except OSError: if file_obj: file_obj.close() - return file_obj + return file_obj, os.path.basename(path) def get_playlists(self): - returned = dict() - with self.item_lock: - for p in self.daap_playlists: - pl = self.daap_playlists[p] - send_podcast = ( - SharingManagerBackend.SHARE_FEED in self.share_types) - if (not pl['valid'] or not pl['podcast'] or - (pl['podcast'] and send_podcast)): - returned[p] = pl - else: - returned[p] = self.deleted_item() - return returned + """Get the current list of playlists - def on_config_changed(self, obj, key, value): - keys = [prefs.SHARE_AUDIO.key, prefs.SHARE_VIDEO.key, - prefs.SHARE_FEED.key] - if key in keys: - with self.item_lock: - share_types_orig = self.share_types - self.share_types = [] - if app.config.get(prefs.SHARE_AUDIO): - self.share_types += [SharingManagerBackend.SHARE_AUDIO] - if app.config.get(prefs.SHARE_VIDEO): - self.share_types += [SharingManagerBackend.SHARE_VIDEO] - if app.config.get(prefs.SHARE_FEED): - self.share_types += [SharingManagerBackend.SHARE_FEED] - # Just by enabling and disabing this, the selection of items - # available to a user could have changed. We are a bit lazy - # here and just use a hammer to update everything without - # working out what needs to be updated. - if share_types_orig != self.share_types: - self.update_revision() - for p in self.daap_playlists: - self.daap_playlists[p]['revision'] = self.revision - for i in self.daapitems: - self.daapitems[i]['revision'] = self.revision - - # XXX TEMPORARY: should this item be podcast? We won't need this when - # the item type's metadata is completely accurate and won't lie to us. - def item_from_podcast(self, item): - feed_url = item.feed_url - ersatz_feeds = ['dtv:manualFeed', 'dtv:searchDownloads', 'dtv:search'] - is_feed = not any([feed_url.startswith(x) for x in ersatz_feeds]) - return item.feed_id and is_feed and not item.is_file_item + This should return a dict mapping DAAP playlist ids to dicts of + playlist data. Each dict should contain: + - dmap.itemid -> DAAP id + - dmap.persistentid -> DAAP id + - dmap.itemname -> title + - dmap.itemcount -> number of items in the playlist + - dmap.parentcontainerid -> DAAP id of the parent playlist + (currently always 0) + - revision -> revision this item was last updated + - valid -> False if the item has been deleted + """ + return self.data_set.get_playlists() def get_items(self, playlist_id=None): - # Easy: just return - with self.item_lock: - items = dict() - if not playlist_id: - for k in self.daapitems.keys(): - item = self.daapitems[k] - valid = item['valid'] - if valid: - mk = item['com.apple.itunes.mediakind'] - ik = item['org.participatoryculture.miro.itemkind'] - podcast = ik and (ik & MIRO_ITEMKIND_PODCAST) - include_if_podcast = (podcast and - SharingManagerBackend.SHARE_FEED in self.share_types) - if (not valid or - mk in self.share_types and - (not podcast or include_if_podcast)): - items[k] = item - else: - items[k] = self.deleted_item() - return items - # XXX Somehow cache this? - playlist = dict() - if self.playlist_item_map.has_key(playlist_id): - for x in self.daapitems.keys(): - item = self.daapitems[x] - valid = item['valid'] - if valid: - mk = item['com.apple.itunes.mediakind'] - ik = item['org.participatoryculture.miro.itemkind'] - podcast = ik and (ik & MIRO_ITEMKIND_PODCAST) - include_if_podcast = (podcast and - SharingManagerBackend.SHARE_FEED in self.share_types) - if (x in self.playlist_item_map[playlist_id] and - (not valid or - mk in self.share_types and - (not podcast or include_if_podcast))): - playlist[x] = item - else: - playlist[x] = self.deleted_item() - return playlist - - def make_item_dict(self, items): - # See the daap_rmapping/daap_mapping for a list of mappings that - # we do. - for item in items: - itemprop = dict() - for attr in daap_rmapping.keys(): - daap_string = daap_rmapping[attr] - itemprop[daap_string] = getattr(item, attr, None) - if isinstance(itemprop[daap_string], unicode): - itemprop[daap_string] = ( - itemprop[daap_string].encode('utf-8')) - # Fixup the year, etc being -1. XXX should read the daap - # type then determine what to do. - if itemprop[daap_string] == -1: - itemprop[daap_string] = 0 - # Fixup: these are stored as string? - if daap_string in ('daap.songtracknumber', - 'daap.songyear'): - if itemprop[daap_string] is not None: - itemprop[daap_string] = int(itemprop[daap_string]) - # Fixup the duration: need to convert to millisecond. - if daap_string == 'daap.songtime': - if itemprop[daap_string]: - itemprop[daap_string] *= DURATION_SCALE - else: - itemprop[daap_string] = 0 - # Fixup the enclosure format. This is hardcoded to mp4, - # as iTunes requires this. Other clients seem to be able to sniff - # out the container. We can change it if that's no longer true. - # Fixup the media kind: XXX what about u'other'? - enclosure = item.file_format - if enclosure not in supported_filetypes: - nam, ext = os.path.splitext(item.video_path) - if ext in supported_filetypes: - enclosure = ext - - # If this should be considered an item from a podcast feed then - # mark it as such. But allow for manual overriding by the user, - # as per what was set in the metadata. - if self.item_from_podcast(item): - key = 'org.participatoryculture.miro.itemkind' - itemprop[key] = MIRO_ITEMKIND_PODCAST - try: - key = 'org.participatoryculture.miro.itemkind' - kind = itemprop[key] - if kind: - itemprop[key] = miro_itemkind_mapping[kind] - except KeyError: - pass - if itemprop['com.apple.itunes.mediakind'] == u'video': - itemprop['com.apple.itunes.mediakind'] = ( - libdaap.DAAP_MEDIAKIND_VIDEO) - if not enclosure: - enclosure = '.mp4' - enclosure = enclosure[1:] - itemprop['daap.songformat'] = enclosure - else: - itemprop['com.apple.itunes.mediakind'] = ( - libdaap.DAAP_MEDIAKIND_AUDIO) - if not enclosure: - enclosure = '.mp3' - enclosure = enclosure[1:] - itemprop['daap.songformat'] = enclosure - # Normally our strings are fixed up above, but then we re-pull - # this out of the input data structure, so have to re-convert. - if isinstance(itemprop['daap.songformat'], unicode): - tmp = itemprop['daap.songformat'].encode('utf-8') - itemprop['daap.songformat'] = tmp - - # don't forget to set the path.. - # ok: it is ignored since this is not valid dmap/daap const. - itemprop['path'] = item.video_path - defaults = (resources.path('images/thumb-default-audio.png'), - resources.path('images/thumb-default-video.png')) - if item.thumbnail not in defaults: - itemprop['cover_art'] = item.thumbnail - else: - itemprop['cover_art'] = '' - - # HACK: the rmapping dict doesn't work because we can't - # double up the key. - itemprop['dmap.containeritemid'] = itemprop['dmap.itemid'] + """Get the current list of items - # piece de resistance: tack on the revision. - itemprop['revision'] = self.revision - itemprop['valid'] = True - - self.daapitems[item.id] = itemprop + This should return a dict mapping DAAP item ids to dicts of item data. + Each dict should contain: + - A value for each key in daap_mapping + - path -> file path of the item + - cover_path -> thumbnail path for the item + - revision -> revision this item was last updated + - valid -> False if the item has been deleted + + :param playlist_id: playlist to fetch items from, or None to fetch all + items. + """ + return self.data_set.get_items(playlist_id) def finished_callback(self, session): # Like shutdown but only shuts down one of the sessions. No need to @@ -1870,13 +1692,25 @@ self.sharing = False self.discoverable = False self.name = '' - self.mdns_present = libdaap.mdns_init() + self.mdns_init_result = None self.reload_done_event = threading.Event() self.mdns_callback = None self.sharing_frontend_volatile = False self.sharing_frontend_callbacks = dict() self.callback_handle = app.backend_config_watcher.connect('changed', self.on_config_changed) + + def init_mdns(self): + if self.mdns_init_result is None: + self.mdns_init_result = libdaap.mdns_init() + + @property + def mdns_present(self): + self.init_mdns() + return self.mdns_init_result + + def startup(self): + self.init_mdns() # Create the sharing server backend that keeps track of all the list # of items available. Don't know whether we can just query it on the # fly, maybe that's a better idea. diff -Nru miro-4.0.4/lib/signals.py miro-6.0/lib/signals.py --- miro-4.0.4/lib/signals.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/signals.py 2013-04-05 16:02:42.000000000 +0000 @@ -39,7 +39,7 @@ from miro import crashreport -class NestedSignalError(Exception): +class NestedSignalError(StandardError): pass class WeakMethodReference: @@ -98,11 +98,63 @@ def is_dead(self): return self.ref() is None +class CallbackSet(object): + """Stores callbacks connected to a signal for SignalEmitter.""" + def __init__(self): + self.callbacks = {} + self.callbacks_after = {} + self.callbacks_before = {} + + def add_callback(self, id_, callback): + self.callbacks[id_] = callback + + def add_callback_after(self, id_, callback): + self.callbacks_after[id_] = callback + + def add_callback_before(self, id_, callback): + self.callbacks_before[id_] = callback + + def remove_callback(self, id_): + if id_ in self.callbacks: + del self.callbacks[id_] + elif id_ in self.callbacks_after: + del self.callbacks_after[id_] + elif id_ in self.callbacks_before: + del self.callbacks_before[id_] + else: + logging.warning( + "disconnect called but callback_handle not in the callback") + + def all_callbacks(self): + """Get a list of all Callback objects stored. + + The list will contain callbacks added with add_callback() then + callbacks added with add_callback_after(). + """ + return (self.callbacks_before.values() + + self.callbacks.values() + + self.callbacks_after.values()) + + def clear_old_weak_references(self): + """Remove any dead WeakCallbacks.""" + all_dicts = (self.callbacks, + self.callbacks_after, + self.callbacks_before) + for callback_dict in all_dicts: + for id_, callback in callback_dict.items(): + if callback.is_dead(): + del callback_dict[id_] + + def __len__(self): + return (len(self.callbacks) + len(self.callbacks_after) + + len(self.callbacks_before)) + class SignalEmitter(object): def __init__(self, *signal_names): self.signal_callbacks = {} self.id_generator = itertools.count() self._currently_emitting = set() + self._okay_to_nest = set() self._frozen = False for name in signal_names: self.create_signal(name) @@ -113,8 +165,12 @@ def thaw_signals(self): self._frozen = False - def create_signal(self, name): - self.signal_callbacks[name] = {} + def create_signal(self, name, okay_to_nest=False): + if name in self.signal_callbacks: + raise KeyError("%s was already created" % name) + self.signal_callbacks[name] = CallbackSet() + if okay_to_nest: + self._okay_to_nest.add(name) def get_callbacks(self, signal_name): try: @@ -123,7 +179,7 @@ raise KeyError("Signal: %s doesn't exist" % signal_name) def _check_already_connected(self, name, func): - for callback in self.get_callbacks(name).values(): + for callback in self.get_callbacks(name).all_callbacks(): if callback.compare_function(func): raise ValueError("signal %s already connected to %s" % (name, func)) @@ -138,7 +194,31 @@ self._check_already_connected(name, func) id_ = self.id_generator.next() callbacks = self.get_callbacks(name) - callbacks[id_] = Callback(func, extra_args) + callbacks.add_callback(id_, Callback(func, extra_args)) + return (name, id_) + + def connect_after(self, name, func, *extra_args): + """Like connect(), but run the handler later + + When a signal is fired, we first run the handlers connected with + connect_before(), then connect(), then connect_after() + """ + self._check_already_connected(name, func) + id_ = self.id_generator.next() + callbacks = self.get_callbacks(name) + callbacks.add_callback_after(id_, Callback(func, extra_args)) + return (name, id_) + + def connect_before(self, name, func, *extra_args): + """Like connect(), but run the handler before others + + When a signal is fired, we first run the handlers connected with + connect_before(), then connect(), then connect_after() + """ + self._check_already_connected(name, func) + id_ = self.id_generator.next() + callbacks = self.get_callbacks(name) + callbacks.add_callback_before(id_, Callback(func, extra_args)) return (name, id_) def connect_weak(self, name, method, *extra_args): @@ -154,7 +234,7 @@ raise TypeError("connect_weak must be called with object methods") id_ = self.id_generator.next() callbacks = self.get_callbacks(name) - callbacks[id_] = WeakCallback(method, extra_args) + callbacks.add_callback(id_, WeakCallback(method, extra_args)) return (name, id_) def disconnect(self, callback_handle): @@ -162,23 +242,20 @@ connect() or connect_weak(). """ callbacks = self.get_callbacks(callback_handle[0]) - if callback_handle[1] in callbacks: - del callbacks[callback_handle[1]] - else: - logging.warning( - "disconnect called but callback_handle not in the callback") + callbacks.remove_callback(callback_handle[1]) def disconnect_all(self): for signal in self.signal_callbacks: - self.signal_callbacks[signal] = {} + self.signal_callbacks[signal] = CallbackSet() def emit(self, name, *args): if self._frozen: return - if name in self._currently_emitting: - raise NestedSignalError("Can't emit %s while handling %s" % - (name, name)) - self._currently_emitting.add(name) + if name not in self._okay_to_nest: + if name in self._currently_emitting: + raise NestedSignalError("Can't emit %s while handling %s" % + (name, name)) + self._currently_emitting.add(name) try: callback_returned_true = self._run_signal(name, args) finally: @@ -196,17 +273,15 @@ if self_callback(*args): callback_returned_true = True if not callback_returned_true: - for callback in self.get_callbacks(name).values(): + for callback in self.get_callbacks(name).all_callbacks(): if callback.invoke(self, args): callback_returned_true = True break return callback_returned_true def clear_old_weak_references(self): - for callback_map in self.signal_callbacks.values(): - for id_ in callback_map.keys(): - if callback_map[id_].is_dead(): - del callback_map[id_] + for callback_set in self.signal_callbacks.values(): + callback_set.clear_old_weak_references() class SystemSignals(SignalEmitter): """System wide signals for Miro. These can be accessed from the singleton diff -Nru miro-4.0.4/lib/singleclick.py miro-6.0/lib/singleclick.py --- miro-4.0.4/lib/singleclick.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/singleclick.py 2013-04-05 16:02:42.000000000 +0000 @@ -40,6 +40,7 @@ from miro import app from miro import amazon +from miro import emusic from miro import dialogs from miro import item from miro import feed @@ -209,7 +210,6 @@ """We need to figure out if the URL is a external video link, or a link to a feed. """ - print 'callback for', url, headers, content_type if check_url_exists(url): return @@ -244,13 +244,15 @@ else: handle_unknown_callback(url) - if metadata and 'mime_type' in metadata: + if metadata and metadata.get('mime_type'): # we've already got the mime type, don't do another call callback(None, metadata['mime_type']) elif is_magnet_uri(url): callback(None, 'application/x-magnet') elif amazon.is_amazon_url(url): amazon.download_file(url, handle_unknown_callback) + elif emusic.is_emusic_url(url): + emusic.download_file(url, handle_unknown_callback) else: httpclient.grab_headers(url, callback, errback) diff -Nru miro-4.0.4/lib/startfrontend.py miro-6.0/lib/startfrontend.py --- miro-4.0.4/lib/startfrontend.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/startfrontend.py 2013-04-05 16:02:42.000000000 +0000 @@ -47,9 +47,12 @@ import string import logging +import threading from miro import app from miro import startup +from miro import threadcheck +from miro import util def load_frontend(globals_, locals_, frontend): try: @@ -63,6 +66,7 @@ return None def run_application(frontend, props_to_set, theme): + app.startup_timer = util.DebuggingTimer() startup.initialize(theme) set_properties(props_to_set) @@ -84,6 +88,8 @@ break else: raise ValueError("Cannot load frontend: %s" % frontend) + threadcheck.set_ui_thread(threading.currentThread()) + app.frontend_name = frontend application.run_application() def set_properties(props): diff -Nru miro-4.0.4/lib/startup.py miro-6.0/lib/startup.py --- miro-4.0.4/lib/startup.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/startup.py 2013-04-05 16:02:42.000000000 +0000 @@ -31,7 +31,7 @@ In general, frontends should do the following to handle startup. FIXME - - (optional) call startup.install_movies_gone_handler() + - (optional) call startup.install_movies_directory_gone_handler() - Call startup.initialize() - Wait for either the 'startup-success', or 'startup-failure' signal """ @@ -45,6 +45,7 @@ import threading import time +from miro import api from miro import app from miro import autodler from miro import autoupdate @@ -56,6 +57,7 @@ from miro import databaseupgrade from miro import dbupgradeprogress from miro import dialogs +from miro import donate from miro import downloader from miro import eventloop from miro import fileutil @@ -65,35 +67,31 @@ from miro import iconcache from miro import item from miro import itemsource -from miro import iteminfocache from miro import feed from miro import folder from miro import messages from miro import messagehandler -from miro import metadataprogress from miro import models -from miro import moviedata from miro import playlist from miro import prefs import miro.plat.resources -from miro.plat.utils import setup_logging -from miro.plat import config as platformcfg +from miro.plat.utils import setup_logging, filename_to_unicode from miro import tabs from miro import theme +from miro import threadcheck from miro import util from miro import searchengines from miro import storedatabase from miro import conversions from miro import devices from miro import sharing -from miro import transcode from miro import workerprocess from miro.plat import devicetracker DEBUG_DB_MEM_USAGE = False mem_usage_test_event = threading.Event() -class StartupError(Exception): +class StartupError(StandardError): def __init__(self, summary, description): self.summary = summary self.description = description @@ -111,9 +109,7 @@ else: m = messages.FrontendQuit() m.send_to_frontend() - except (SystemExit, KeyboardInterrupt): - raise - except Exception, exc: + except StandardError, exc: # we do this so that we only kick up the database error # if it's a database-related exception AND the app has a # db attribute @@ -165,31 +161,8 @@ m.send_to_frontend() return wrapped -def _movies_directory_gone_handler(callback): - """Default _movies_directory_gone_handler. The frontend should - override this using the ``install_movies_directory_gone_handler`` - function. - """ - logging.error("Movies directory is gone -- no handler installed!") - eventloop.add_urgent_call(callback, "continuing startup") - -def install_movies_directory_gone_handler(callback): - global _movies_directory_gone_handler - _movies_directory_gone_handler = callback - -def _first_time_handler(callback): - """Default _first_time_handler. The frontend should override this - using the ``install_first_time_handler`` function. - """ - logging.error("First time -- no handler installed.") - eventloop.add_urgent_call(callback, "continuing startup") - -def install_first_time_handler(callback): - global _first_time_handler - _first_time_handler = callback - def setup_global_feed(url, *args, **kwargs): - view = feed.Feed.make_view('origURL=?', (url,)) + view = feed.Feed.make_view('orig_url=?', (url,)) view_count = view.count() if view_count == 0: logging.info("Spawning global feed %s", url) @@ -215,7 +188,8 @@ app.debugmode = False # this is platform specific - setup_logging() + setup_logging(app.config.get(prefs.LOG_PATHNAME), + main_process=True) # this is portable general util.setup_logging() app.controller = controller.Controller() @@ -242,7 +216,7 @@ logging.info("Builder: %s", app.config.get(prefs.BUILD_MACHINE)) logging.info("Build Time: %s", app.config.get(prefs.BUILD_TIME)) logging.info("Debugmode: %s", app.debugmode) - eventloop.connect('thread-started', finish_startup) + eventloop.connect('thread-started', startup_for_frontend) logging.info("Reading HTTP Password list") httpauth.init() httpauth.restore_from_file() @@ -264,8 +238,13 @@ app.extension_manager.load_extensions() @startup_function -def finish_startup(obj, thread): - database.set_thread(thread) +def startup_for_frontend(obj, thread): + """Run the startup code needed to get the frontend started + + This function should be kept as small as possible to ensure good startup + times. + """ + threadcheck.set_eventloop_thread(thread) logging.info("Installing deleted file checker...") item.setup_deleted_checker() logging.info("Restoring database...") @@ -288,6 +267,7 @@ except storedatabase.UpgradeError: raise StartupError(None, None) database.initialize() + downloader.reset_download_stats() end = time.time() logging.timing("Database upgrade time: %.3f", end - start) if app.db.startup_version != app.db.current_version: @@ -299,19 +279,11 @@ util.db_mem_usage_test() mem_usage_test_event.set() - # MetadataProgressUpdater needs to be installed before ItemInfoCache, - # since ItemInfoCache may create items if it uses failsafe mode - app.metadata_progress_updater = metadataprogress.MetadataProgressUpdater() - app.item_info_cache = iteminfocache.ItemInfoCache() - app.item_info_cache.load() dbupgradeprogress.upgrade_end() - logging.info("Loading video converters...") - conversions.conversion_manager.startup() - app.device_manager = devices.DeviceManager() - app.device_tracker = devicetracker.DeviceTracker() + app.startup_timer.log_time("after db upgrade") - searchengines.create_engines() + app.icon_cache_updater = iconcache.IconCacheUpdater() setup_global_feeds() # call fix_database_inconsistencies() ASAP after the manual feed is set up fix_database_inconsistencies() @@ -320,15 +292,13 @@ logging.info("setup theme...") setup_theme() install_message_handler() - itemsource.setup_handlers() - downloader.init_controller() - # Call this late, after the message handlers have been installed. - app.sharing_tracker = sharing.SharingTracker() app.sharing_manager = sharing.SharingManager() - app.transcode_manager = transcode.TranscodeManager() + app.download_state_manager = downloader.DownloadStateManager() + item.setup_change_tracker() + item.setup_metadata_manager() - eventloop.add_urgent_call(check_firsttime, "check first time") + _startup_checker.run_checks() def fix_database_inconsistencies(): item.fix_non_container_parents() @@ -336,101 +306,214 @@ playlist.fix_missing_item_ids() folder.fix_playlist_missing_item_ids() -@startup_function -def check_firsttime(): - """Run the first time wizard if need be. - """ - callback = lambda: eventloop.add_urgent_call(check_movies_gone, - "check movies gone") - if is_first_time(): - logging.info("First time run -- calling handler.") - _first_time_handler(callback) - return +class StartupChecker(object): + """Handles various checks at startup. - eventloop.add_urgent_call(check_movies_gone, "check movies gone") + This class handles the first-time startup check and the movies directory + gone check. -@startup_function -def check_movies_gone(): - """Checks to see if the movies directory is gone. + This code is a bit weird because of the interplay between the frontend and + the backend. The checks run in the backend, but if they fail then the + frontend needs to prompt the user to ask them what to do. Also, neither + side is totally started up at this point. """ + def run_checks(self): + self.check_firsttime() - # callback is what the frontend will call if the user asks us to continue - callback = lambda: eventloop.add_urgent_call(fix_movies_gone, - "fix movies gone") + @startup_function + def check_firsttime(self): + """Run the first time wizard if need be. + """ + callback = lambda: eventloop.add_urgent_call(self.check_movies_gone, + "check movies gone") + if is_first_time(): + logging.info("First time run -- calling handler.") + self.first_time_handler(callback) + return - movies_dir = fileutil.expand_filename(app.config.get( - prefs.MOVIES_DIRECTORY)) + self.check_movies_gone() - # if the directory doesn't exist, create it. - if (not os.path.exists(movies_dir) and - should_create_movies_directory(movies_dir)): - try: - fileutil.makedirs(movies_dir) - except OSError: - logging.info( - "Movies directory can't be created -- calling handler") - # FIXME - this isn't technically correct, but it's probably - # close enough that a user can fix the issue and Miro can - # run happily. - _movies_directory_gone_handler(callback) + def first_time_handler(callback): + """Default handler for first-time startup + + install_first_time_handler() replaces this method with the + frontend-specific one. + """ + logging.error("First time -- no handler installed.") + eventloop.add_urgent_call(callback, "continuing startup") + + @startup_function + def check_movies_gone(self, check_unmounted=True): + """Checks to see if the movies directory is gone. + """ + + movies_dir = fileutil.expand_filename(app.config.get( + prefs.MOVIES_DIRECTORY)) + movies_dir = filename_to_unicode(movies_dir) + + # if the directory doesn't exist, create it. + if (not os.path.exists(movies_dir) and + should_create_movies_directory(movies_dir)): + try: + fileutil.makedirs(movies_dir) + except OSError: + logging.info("Movies directory can't be created -- calling handler") + # FIXME - this isn't technically correct, but it's probably + # close enough that a user can fix the issue and Miro can + # run happily. + msg = _("Permissions error: %(appname)s couldn't " + "create the folder.", + {"appname": app.config.get(prefs.SHORT_APP_NAME)}) + self.movies_directory_gone_handler(msg, movies_dir) + return + + # make sure the directory is writeable + if not os.access(movies_dir, os.W_OK): + logging.info("Can't write to movies directory -- calling handler") + msg = _("Permissions error: %(appname)s can't " + "write to the folder.", + {"appname": app.config.get(prefs.SHORT_APP_NAME)}) + self.movies_directory_gone_handler(msg, movies_dir) return - # make sure the directory is writeable - if not os.access(movies_dir, os.W_OK): - _movies_directory_gone_handler(callback) - return + # make sure that the directory is populated if we've downloaded stuff to + # it + if check_unmounted and check_movies_directory_unmounted(): + logging.info("Movies directory is gone -- calling handler.") + msg = _("The folder contains no files: " + "is it on a drive that's disconnected?") + self.movies_directory_gone_handler(msg, movies_dir, + allow_continue=True) + return - # make sure that the directory is populated if we've downloaded stuff to - # it - if is_movies_directory_gone(): - logging.info("Movies directory is gone -- calling handler.") - _movies_directory_gone_handler(callback) - return + self.all_checks_done() - eventloop.add_urgent_call(finish_backend_startup, "reconnect downloaders") + def movies_directory_gone_handler(self, message, movies_dir, + allow_continue=False): + """Default movies_directory_gone_handler. + + This method simply quits when the movies directory is gone. + install_movies_directory_gone_handler() replaces this method with the + frontend-specific one. + + present them with the following options: + - quit + - change movies directory + - continue with current directory (if allow_continue is True) + + After the user picks, the frontend should call either + app.controller.shutdown() or startup.fix_movies_gone() + """ + logging.error("Movies directory is gone -- no handler installed!") + app.controller.shutdown() + + @startup_function + def fix_movies_gone(self, new_movies_directory): + """Called by the movies directory gone handler to fix the issue. + + :param new_movies_directory: new path for the movies directory, or + None if we should continue with the current directory. + """ + if new_movies_directory is not None: + app.config.set(prefs.MOVIES_DIRECTORY, new_movies_directory) + # do another check to make sure the selected directory works. Here we + # skip the unmounted check, since it's not exact and the user is + # giving us a directory. + self.check_movies_gone(check_unmounted=False) + + def all_checks_done(self): + # Uncomment the next line to test startup error handling + # raise StartupError("Test Error", "Startup Failed") + app.startup_timer.log_time("sending StartupSuccess()") + messages.StartupSuccess().send_to_frontend() -@startup_function -def fix_movies_gone(): - app.config.set(prefs.MOVIES_DIRECTORY, platformcfg.get( - prefs.MOVIES_DIRECTORY)) - eventloop.add_urgent_call(finish_backend_startup, "reconnect downloaders") +_startup_checker = StartupChecker() -@startup_function -def finish_backend_startup(): - """Last bit of startup required before we load the frontend. """ - # Uncomment the next line to test startup error handling - # raise StartupError("Test Error", "Startup Failed") - reconnect_downloaders() - guide.download_guides() - feed.remove_orphaned_feed_impls() - messages.StartupSuccess().send_to_frontend() +def install_movies_directory_gone_handler(callback): + """Install a function to handle the movies directory being gone + + The frontend should call this method and pass it a callback to handle this + situation. The signature is (message, movies_dir, allow_continue=False). + The callback should present the user with the following options: + - quit + - change movies directory + - continue with current directory (if allow_continue is True) + + After the user picks, the callback should call either + app.controller.shutdown() or startup.fix_movies_gone() + """ + _startup_checker.movies_directory_gone_handler = callback + +def install_first_time_handler(callback): + """Install a function to handle first-time startup + + If the frontend wants, it can pass a callback that shows a dialog to the + user on first-time startup. The function will be passed a single argument + which is a callback function to call once the dialog completes. + """ + _startup_checker.first_time_handler = callback + +def fix_movies_gone(new_movies_directory): + """Called by the movies directory gone handler to fix the issue. + + :param new_movies_directory: new path for the movies directory, or None if + we should continue with the current directory. + """ + eventloop.add_urgent_call(_startup_checker.fix_movies_gone, + "fix movies gone", + args=(new_movies_directory,)) @eventloop.idle_iterator def on_frontend_started(): """Perform startup actions that should happen after the frontend is already up and running. + + This function happens using an idle iterator. Before/after code that + could take a while to run, we yield to other eventloop callbacks. """ + conversions.conversion_manager.startup() + + app.sharing_tracker = sharing.SharingTracker() + app.sharing_manager.startup() + app.sharing_tracker.start_tracking() + + app.device_manager = devices.DeviceManager() + app.device_tracker = devicetracker.DeviceTracker() + app.device_tracker.start_tracking() + + reconnect_downloaders() + guide.download_guides() + feed.remove_orphaned_feed_impls() + + app.download_state_manager.init_controller() + itemsource.setup_handlers() + if app.frontend_name == 'widgets': + app.donate_manager = donate.DonateManager() + else: + logging.warn("frontend is %s, not starting DonateManager()", + app.frontend_name) + logging.info("Starting auto downloader...") autodler.start_downloader() + app.icon_cache_updater.start_updates() yield None feed.expire_items() yield None - moviedata.movie_data_updater.start_thread() - yield None commandline.startup() yield None autoupdate.check_for_updates() yield None + app.local_metadata_manager.schedule_retry_net_lookup() # Delay running high CPU/IO operations for a bit - eventloop.add_timeout(5, downloader.startup_downloader, + eventloop.add_timeout(5, app.download_state_manager.startup_downloader, "start downloader daemon") eventloop.add_timeout(10, workerprocess.startup, "start worker process") eventloop.add_timeout(20, item.start_deleted_checker, "start checking deleted items") eventloop.add_timeout(30, feed.start_updates, "start feed updates") - eventloop.add_timeout(60, item.update_incomplete_movie_data, - "update movie data") + eventloop.add_timeout(60, item.update_incomplete_metadata, + "update metadata data") eventloop.add_timeout(90, clear_icon_cache_orphans, "clear orphans") def setup_global_feeds(): @@ -470,16 +553,16 @@ def should_create_movies_directory(path): """Figure out if we should create the movies directory if it's missing.""" - if sys.platform == 'darwin' and path.startswith("/Volumes/"): - # Hack to fix #17826. Don't try to create new directories in the - # mount points on OS X. - return False - return True - -def is_movies_directory_gone(): - """Checks to see if the MOVIES_DIRECTORY exists. - - Returns True if yes, False if no. + # We should only do this if the directory is the default directory. This + # avoids trying to create files on unmonted filesystems (#17826) + return path == app.config.get_platform_default(prefs.MOVIES_DIRECTORY) + +def check_movies_directory_unmounted(): + """Checks to see MOVIES_DIRECTORY has been unmounted. + + Our hueristic is to check if there are any files in the directory. If + it's totally empty, and we think that we should have a downloaded file in + it, then we return True. """ movies_dir = fileutil.expand_filename(app.config.get( prefs.MOVIES_DIRECTORY)) @@ -594,6 +677,7 @@ downloader_.remove() manualItems = item.Item.feed_view(feed.Feed.get_manual_feed().get_id()) for item_ in manualItems: - if item_.downloader is None and item_.__class__ == item.Item: + if (item_.__class__ == item.Item and not item_.has_downloader() and + not item_.pending_manual_download): logging.warn("removing cancelled external torrent: %s", item_) item_.remove() diff -Nru miro-4.0.4/lib/storedatabase.py miro-6.0/lib/storedatabase.py --- miro-4.0.4/lib/storedatabase.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/storedatabase.py 2013-04-05 16:02:42.000000000 +0000 @@ -71,15 +71,17 @@ from miro import dialogs from miro import eventloop from miro import fileutil -from miro import iteminfocache from miro import messages from miro import schema +from miro import signals from miro import prefs from miro import util +from miro.data import fulltextsearch +from miro.data import item from miro.gtcache import gettext as _ from miro.plat.utils import PlatformFilenameType, filename_to_unicode -class UpgradeError(Exception): +class UpgradeError(StandardError): """While upgrading the database, we ran out of disk space.""" pass @@ -97,40 +99,316 @@ schema.SchemaURL: 'text', schema.SchemaInt: 'integer', schema.SchemaDateTime: 'timestamp', - schema.SchemaTimeDelta: 'pythonrepr', + schema.SchemaTimeDelta: 'text', schema.SchemaReprContainer: 'pythonrepr', schema.SchemaTuple: 'pythonrepr', schema.SchemaDict: 'pythonrepr', schema.SchemaList: 'pythonrepr', - schema.SchemaStatusContainer: 'pythonrepr', schema.SchemaFilename: 'text', + schema.SchemaStringSet: 'text', } VERSION_KEY = "Democracy Version" -def split_values_for_sqlite(value_list): - """Split a list of values into chunks that SQL can handle. +class DatabaseObjectCache(object): + """Handles caching objects for a database. - The cursor.execute() method can only handle 999 values at once, this - method splits long lists into chunks where each chunk has is safe to feed - to sqlite. + This class implements a generic caching system for DDBObjects. Other + components can use it reduce the number of database queries they run. """ - CHUNK_SIZE = 990 # use 990 just to be on the safe side. - for start in xrange(0, len(value_list), CHUNK_SIZE): - yield value_list[start:start+CHUNK_SIZE] + def __init__(self): + # map (category, cache_key) to objects + self._objects = {} + + def set(self, category, cache_key, obj): + """Add an object to the cache + + category is an arbitrary name used to separate different caches. Each + component that uses DatabaseObjectCache should use a different + category. + + :param category: unique string + :param key: key to retrieve the object with + :param obj: object to add + """ + self._objects[(category, cache_key)] = obj + + def get(self, category, cache_key): + """Get an object from the cache + + :param category: category from set + :param key: key from set + :returns: object passed in with set + :raises KeyError: object not in cache + """ + return self._objects[(category, cache_key)] + + def key_exists(self, category, cache_key): + """Test if an object is in the cache + + :param category: category from set + :param key: key from set + :returns: if an object is present with that key + """ + return (category, cache_key) in self._objects + + def remove(self, category, cache_key): + """Remove an object from the cache + + :param category: category from set + :param key: key from set + :raises KeyError: object not in cache + """ + del self._objects[(category, cache_key)] + + def clear(self, category): + """Clear all objects in a category. + + :param category: category to clear + """ + for key in self._objects.keys(): + if key[0] == category: + del self._objects[key] + + def clear_all(self): + """Clear all objects in the cache""" + self._objects = {} + +class LiveStorageErrorHandler(object): + """Handle database errors for LiveStorage. + """ + + ( ACTION_QUIT, ACTION_SUBMIT_REPORT, ACTION_START_FRESH, ACTION_RETRY, + ACTION_USE_TEMPORARY, ACTION_RERAISE, ) = range(6) + + def handle_load_error(self): + """Handle an error loading the database. + + When LiveStorage hits a load error, it always deletes the database and + starts fresh. The only thing to to here is inform the user + """ + title = _("%(appname)s database corrupt.", + {"appname": app.config.get(prefs.SHORT_APP_NAME)}) + description = _( + "Your %(appname)s database is corrupt. It will be " + "backed up in your Miro database directory and a new " + "database will be created now.", + {"appname": app.config.get(prefs.SHORT_APP_NAME)}) + dialogs.MessageBoxDialog(title, description).run_blocking() + + def handle_open_error(self): + """Handle an error opening a new database + + This method should return one of the following: + - ACTION_RERAISE -- Just re-raise the error + - ACTION_USE_TEMPORARY -- Use an in-memory database for now and try to + save the database to disk every so often. + """ + return self.ACTION_RERAISE + + def handle_upgrade_error(self): + """Handle an error upgrading the database. + Returns one of the class attribute constants: + - ACTION_QUIT -- close miro immediately + - ACTION_SUBMIT_REPORT -- send a crash report, then close + - ACTION_START_FRESH -- start with a fresh database + """ + title = _("%(appname)s database upgrade failed", + {"appname": app.config.get(prefs.SHORT_APP_NAME)}) + description = _( + "We're sorry, %(appname)s was unable to upgrade your database " + "due to errors.\n\n" + "Check to see if your disk is full. If it is full, then quit " + "%(appname)s, free up some space, and start %(appname)s " + "again.\n\n" + "If your disk is not full, help us understand the problem by " + "reporting a bug to our crash database.\n\n" + "Finally, you can start fresh and your damaged database will be " + "removed, but you will have to re-add your podcasts and media " + "files.", {"appname": app.config.get(prefs.SHORT_APP_NAME)} + ) + d = dialogs.ThreeChoiceDialog(title, description, + dialogs.BUTTON_QUIT, dialogs.BUTTON_SUBMIT_REPORT, + dialogs.BUTTON_START_FRESH) + choice = d.run_blocking() + if choice == dialogs.BUTTON_START_FRESH: + return self.ACTION_START_FRESH + elif choice == dialogs.BUTTON_SUBMIT_REPORT: + return self.ACTION_SUBMIT_REPORT + else: + return self.ACTION_QUIT -class LiveStorage: + def handle_save_error(self, error_text, integrity_check_passed): + """Handle an error when trying to save the database. + + Returns one of the class attribute constants: + - ACTION_QUIT -- close miro immediately + - ACTION_RETRY -- try running the statement again + - ACTION_USE_TEMPORARY -- start fresh using a temporary database + """ + + title = _("%(appname)s database save failed", + {"appname": app.config.get(prefs.SHORT_APP_NAME)}) + description = _( + "%(appname)s was unable to save its database.\n\n" + "If your disk is full, we suggest freeing up some space and " + "retrying. If your disk is not full, it's possible that " + "retrying will work.\n\n" + "If retrying did not work, please quit %(appname)s and restart. " + "Recent changes may be lost.\n\n" + "If you see this error often while downloading, we suggest " + "you reduce the number of simultaneous downloads in the Options " + "dialog in the Download tab.\n\n" + "Error: %(error_text)s\n\n", + {"appname": app.config.get(prefs.SHORT_APP_NAME), + "error_text": error_text} + ) + d = dialogs.DatabaseErrorDialog(title, description) + if d.run_blocking() == dialogs.BUTTON_RETRY: + return self.ACTION_RETRY + else: + return self.ACTION_QUIT + + def handle_save_succeeded(self): + """Handle a successful save after retrying + + This will only be called if handle_save_error return ACTION_RETRY. + """ + + title = _("%(appname)s database save succeeded", + {"appname": app.config.get(prefs.SHORT_APP_NAME)}) + description = _("The database has been successfully saved. " + "It is now safe to quit without losing any data.") + dialogs.MessageBoxDialog(title, description).run() + +class DeviceLiveStorageErrorHandler(LiveStorageErrorHandler): + """Handle database errors for LiveStorage on a device. + """ + def __init__(self, name): + self.name = name + + def handle_open_error(self): + return self.ACTION_USE_TEMPORARY + + def handle_load_error(self): + title = _("database for device %(name)s corrupt.", + {'name' : self.name}) + description = _( + "The %(appname)s database on your device is corrupt and a " + "new one will be created.", + {"appname": app.config.get(prefs.SHORT_APP_NAME)}) + dialogs.MessageBoxDialog(title, description).run_blocking() + + def handle_upgrade_error(self): + self.handle_load_error() + return self.ACTION_START_FRESH + + def handle_save_error(self, error_text, integrity_check_passed): + if not integrity_check_passed: + # If the database is corrupt, just start over + logging.warn("Database for %s is corrupt. Using temporary " + "database", self.name) + return self.ACTION_USE_TEMPORARY + + + title = _("Database save failed for device %(name)s.", + {'name' : self.name}) + description = _( + "%(appname)s was unable to save its database on %(device)s.\n\n" + "If your device is full, we suggest freeing up some space and " + "retrying. If your disk is not full, it's possible that " + "retrying will work.\n\n" + "If retrying does not work select start fresh to reset the " + "database to a new one.", { + "appname": app.config.get(prefs.SHORT_APP_NAME), + "device": self.name, + }) + d = dialogs.ChoiceDialog(title, description, + dialogs.BUTTON_RETRY, dialogs.BUTTON_START_FRESH) + if d.run_blocking() == dialogs.BUTTON_START_FRESH: + # we return ACTION_USE_TEMPORARY because we will create a + # temporary database to start fresh, since it's very likely that + # loading a new database will fail. With a temporary database we + # will try to save it to the disk every so often anyways. + return self.ACTION_USE_TEMPORARY + else: + return self.ACTION_RETRY + + def handle_save_succeeded(self): + title = _("Device database save succeeded") + description = _("The database for %(device)s has been successfully " + "saved. ", {'device': self.name}) + dialogs.MessageBoxDialog(title, description).run() + +class SharingLiveStorageErrorHandler(LiveStorageErrorHandler): + """Handle database errors for LiveStorage on for a share. + + We always create a new database for shares, so there shouldn't be any + errors. If there are, we always start fresh + """ + def __init__(self, name): + self.name = name + + def handle_open_error(self): + return self.ACTION_RERAISE + + def handle_load_error(self): + return + + def handle_upgrade_error(self): + return self.ACTION_START_FRESH + + def handle_save_error(self, error_text, integrity_check_passed): + # FIXME: we should handle this. + # + # We shouldn't ever get save errors, so I think the best way to deal + # with it is simply throw up an error dialog and remove the share tab + raise NotImplementedError() + + def handle_save_succeeded(self): + pass + +class LiveStorage(signals.SignalEmitter): """Handles the storage of DDBObjects. This class does basically two things: - 1. Loads the initial object list (and runs database upgrades) - 2. Handles updating the database based on changes to DDBObjects. + - Loads the initial object list (and runs database upgrades) + - Handles updating the database based on changes to DDBObjects. + + Attributes: + + - cache -- DatabaseObjectCache object + + Signals: + + - transaction-finished(success) -- We committed or rolled back a + transaction """ - def __init__(self, path=None, object_schemas=None, schema_version=None): + def __init__(self, path=None, error_handler=None, preallocate=None, + object_schemas=None, schema_version=None, + start_in_temp_mode=False): + """Create a LiveStorage for a database + + :param path: path to the database (or ":memory:") + :param error_handler: LiveStorageErrorHandler to use + :param preallocate: Ensure that approximately at least this much space + is allocated for the database file + :param object_schemas: list of schemas to use. Defaults to + schema.object_schemas + :param schema_version: current version of the schema for upgrading + purposes. Defaults to schema.VERSION. + :param start_in_temp_mode: True if this database should start in + temporary mode (running in memory, but + checking if it can write to the disk) + """ + signals.SignalEmitter.__init__(self) + self.create_signal("transaction-finished") if path is None: path = app.config.get(prefs.SQLITE_PATHNAME) + if error_handler is None: + error_handler = LiveStorageErrorHandler() if object_schemas is None: object_schemas = schema.object_schemas if schema_version is None: @@ -148,9 +426,12 @@ except AttributeError: logging.info("sqlite3 has no version attribute.") - db_existed = os.path.exists(path) + self.temp_mode = False + self.preallocate = preallocate + self.error_handler = error_handler + self.cache = DatabaseObjectCache() self.raise_load_errors = False # only gets set in unittests - self._dc = None + self.force_directory_creation = True # False for device databases self._query_times = {} self.path = path self._quitting_from_operational_error = False @@ -173,48 +454,201 @@ self._schema_column_map[oschema, name] = schema_item self._converter = SQLiteConverter() - self.open_connection() + self.open_connection(start_in_temp_mode=start_in_temp_mode) - if not db_existed: + self.created_new = self._calc_created_new() + if self.created_new: self._init_database() + if self.preallocate: + self._preallocate_space() - def open_connection(self, path=None): + def open_connection(self, path=None, start_in_temp_mode=False): if path is None: path = self.path - logging.info("opening database %s", path) - self.connection = sqlite3.connect(path, - isolation_level=None, - detect_types=sqlite3.PARSE_DECLTYPES) + if start_in_temp_mode: + self._switch_to_temp_mode() + else: + self._ensure_database_directory_exists(path) + logging.info("opening database %s", path) + try: + self.connection = sqlite3.connect(path, + isolation_level=None, + detect_types=sqlite3.PARSE_DECLTYPES) + except sqlite3.DatabaseError, e: + logging.warn("Error opening sqlite database: %s", e) + action = self.error_handler.handle_open_error() + if action == LiveStorageErrorHandler.ACTION_RERAISE: + raise + elif action == LiveStorageErrorHandler.ACTION_USE_TEMPORARY: + logging.warn("Error opening database %s. Opening an " + "in-memory database instead", path) + self._switch_to_temp_mode() + else: + logging.warn("Bad return value for handle_open_error: %s", + action) + raise + self.cursor = self.connection.cursor() + if path != ':memory:' and not self.temp_mode: + self._switch_to_wal_mode() + + def _switch_to_wal_mode(self): + """Switch to write-ahead logging mode for our connection + + WAL mode allows for better concurency between readers and writers and + is generally faster than other modes. See: + http://www.sqlite.org/wal.html + """ try: - self.cursor.execute("PRAGMA journal_mode=PERSIST"); + self.cursor.execute("PRAGMA journal_mode=wal"); except sqlite3.DatabaseError: - msg = "Error running 'PRAGMA journal_mode=PERSIST'" - self._show_corrupt_db_dialog() - self._handle_load_error(msg) + msg = "Error running 'PRAGMA journal_mode=wal'" + self.error_handler.handle_load_error() + self._handle_load_error(msg, init_schema=False) # rerun the command with our fresh database - self.cursor.execute("PRAGMA journal_mode=PERSIST"); + self.cursor.execute("PRAGMA journal_mode=wal"); + # check that we actually succesfully switch to wal mode + actual_mode = self.cursor.fetchall()[0][0] + if actual_mode != u'wal' and not hasattr(app, 'in_unit_tests'): + logging.warn("PRAGMA journal_mode=wal didn't change the " + "mode. journal_mode=%s", actual_mode) + + def _ensure_database_directory_exists(self, path): + if not self.force_directory_creation: + return + if path != ':memory:' and not os.path.exists(os.path.dirname(path)): + os.makedirs(os.path.dirname(path)) + + def _switch_to_temp_mode(self): + """Switch to temporary mode. + + In temporary mode, we use an in-memory database and try to save it to + disk every 5 minutes. Temporary mode is used to handle errors when + trying to open a database file. + """ + self.connection = sqlite3.connect(':memory:', + isolation_level=None, + detect_types=sqlite3.PARSE_DECLTYPES) + self.temp_mode = True + eventloop.add_timeout(300, + self._try_save_temp_to_disk, + "write in-memory sqlite database to disk") + + def _try_save_temp_to_disk(self): + if not self.temp_mode: # already fixed, move along + return + try: + self._change_path(self.path) + except StandardError, e: + logging.warn("_try_save_temp_to_disk failed: %s (path: %s)", e, + self.path, exc_info=True) + eventloop.add_timeout(300, + self._try_save_temp_to_disk, + "write in-memory sqlite database to disk") + else: + logging.warn("Sucessfully wrote database to %s. Changes " + "will now be saved as normal.", self.path) - def close(self, ignore_vacuum_error=True): - logging.info("closing database") - if self._dc: - self._dc.cancel() - self._dc = None + def _copy_data_to_path(self, new_path): + """Copy the contents of our database to a new file. """ self.finish_transaction() + # add the database at new_path to our current connection + self._ensure_database_directory_exists(new_path) + # delete any data currently at new_path + if os.path.exists(new_path): + os.remove(new_path) + self.cursor.execute("ATTACH ? as newdb", + (filename_to_unicode(new_path),)) + self.cursor.execute("BEGIN TRANSACTION") + try: + self._copy_data_to_newdb() + finally: + self.cursor.execute("COMMIT TRANSACTION") + self.cursor.execute("DETACH newdb") + + def _copy_data_to_newdb(self): + # copy current schema + self.cursor.execute("SELECT name, sql FROM main.sqlite_master " + "WHERE type='table'") + def should_recreate_table(table_name): + if (table_name.endswith("fts_content") or + table_name.endswith("fts_segments") or + table_name.endswith("fts_stat") or + table_name.endswith("fts_docsize") or + table_name.endswith("fts_segdir")): + # these tables are auto-generated by the fts4 code + return False + return True + table_info = [(table, sql) for (table, sql) in self.cursor.fetchall() + if should_recreate_table(table)] - # the unittests run in memory and vacuum causes a segfault if - # the db is in memory. - if self.path != ":memory:" and self.connection and self.cursor: - logging.info("Vacuuming the db before shutting down.") - try: - self.cursor.execute("vacuum") - except sqlite3.DatabaseError, sdbe: - if ignore_vacuum_error: - msg = "... Vacuuming failed with DatabaseError: %s" - logging.info(msg, sdbe) - else: - raise + for table, sql in table_info: + sql = sql.replace("TABLE %s" % table, + "TABLE newdb.%s" % table) + self.cursor.execute(sql) + self.cursor.execute("SELECT name, sql FROM sqlite_master " + "WHERE type='index' AND tbl_name=?", + (table,)) + for index, sql in self.cursor.fetchall(): + if index.startswith('sqlite_autoindex'): + continue + sql = sql.replace("INDEX %s" % index, + "INDEX newdb.%s" % index) + self.cursor.execute(sql) + # preallocate space now. We want to fail fast if the disk is full + if self.preallocate: + self._preallocate_space(db_name='newdb') + + # copy data + for table, sql in table_info: + self.cursor.execute("INSERT INTO newdb.%s SELECT * FROM main.%s" % + (table, table)) + + # create triggers + self.cursor.execute("SELECT name, sql FROM sqlite_master " + "WHERE type='trigger'") + for (name, sql,) in self.cursor.fetchall(): + self.cursor.execute(sql.replace(name, "newdb." + name)) + + def _change_path(self, new_path): + """Change the path of our database. + + This method copies the entire current database to new_path, then opens + a connection to it. + """ + self._copy_data_to_path(new_path) + # Looks like everything worked. Change to using a connection to the + # new database + self.path = new_path self.connection.close() + self.open_connection() + self.temp_mode = False + + def check_integrity(self): + """Run an integrity check on our database + + :returns True if the integrity check passed. + """ + + try: + self.cursor.execute("PRAGMA integrity_check") + return self.cursor.fetchall() == [ + ('ok',), + ] + except sqlite3.DatabaseError: + logging.warn("error running PRAGMA integrity_check: %s", + exc_info=True) + return False + + def close(self): + if self.connection is not None: + logging.info("closing database") + self.finish_transaction() + self.connection.close() + self.connection = None + + def is_closed(self): + return self.connection is None def get_backup_directory(self): """This returns the backup directory path. @@ -241,52 +675,43 @@ self.get_backup_directory(), LiveStorage.backup_filename_prefix + "*")) - def upgrade_database(self): - """Run any database upgrades that haven't been run.""" + def upgrade_database(self, context='main'): + """Run any database upgrades that haven't been run. + + :param context: context for the upgrade, either "main" for the main + database or "device" for the device database. + """ try: - self._upgrade_database() - except (KeyError, SystemError, - databaseupgrade.DatabaseTooNewError): - raise - except Exception, e: + self._upgrade_database(context) + except StandardError, e: logging.exception('error when upgrading database: %s', e) self._handle_upgrade_error() def _backup_failed_upgrade_db(self): save_name = self._find_unused_db_name(self.path, "failed_upgrade_database") path = os.path.join(os.path.dirname(self.path), save_name) - shutil.copyfile(self.path, path) + self._copy_data_to_path(path) logging.warn("upgrade failed. Backing up database to %s", path) def _handle_upgrade_error(self): + # commit any unsaved changes that the upgrade was in the process of + # making + self.cursor.execute("COMMIT TRANSACTION") self._backup_failed_upgrade_db() - title = _("%(appname)s database upgrade failed", - {"appname": app.config.get(prefs.SHORT_APP_NAME)}) - description = _( - "We're sorry, %(appname)s was unable to upgrade your database " - "due to errors.\n\n" - "Check to see if your disk is full. If it is full, then quit " - "%(appname)s, free up some space, and start %(appname)s " - "again.\n\n" - "If your disk is not full, help us understand the problem by " - "reporting a bug to our crash database.\n\n" - "Finally, you can start fresh and your damaged database will be " - "removed, but you will have to re-add your podcasts and media " - "files.", {"appname": app.config.get(prefs.SHORT_APP_NAME)} - ) - d = dialogs.ThreeChoiceDialog(title, description, - dialogs.BUTTON_QUIT, dialogs.BUTTON_SUBMIT_REPORT, - dialogs.BUTTON_START_FRESH) - choice = d.run_blocking() - if choice == dialogs.BUTTON_START_FRESH: + action = self.error_handler.handle_upgrade_error() + if action == LiveStorageErrorHandler.ACTION_START_FRESH: self._handle_load_error("Error upgrading database") - self.startup_version = self.current_version = self._get_version() - elif choice == dialogs.BUTTON_SUBMIT_REPORT: + self.startup_version = self.current_version = self.get_version() + elif action == LiveStorageErrorHandler.ACTION_SUBMIT_REPORT: report = crashreport.format_crash_report("Upgrading Database", exc_info=sys.exc_info(), details=None) raise UpgradeErrorSendCrashReport(report) - else: + elif action == LiveStorageErrorHandler.ACTION_QUIT: raise UpgradeError() + else: + logging.warn("Bad return value for handle_upgrade_error: %s", + action) + raise def _change_database_file(self, ver): """Switches the sqlitedb file that we have open @@ -301,22 +726,21 @@ :param ver: the current version (as string) """ logging.info("database path: %s", self.path) - # close database - self.close(ignore_vacuum_error=False) # copy the db to a backup file for posterity target_path = self.get_backup_directory() save_name = self._find_unused_db_name( target_path, "%s_%s" % (LiveStorage.backup_filename_prefix, ver)) - shutil.copyfile(self.path, os.path.join(target_path, save_name)) + self._copy_data_to_path(os.path.join(target_path, save_name)) # copy the db to the file we're going to operate on target_path = os.path.dirname(self.path) save_name = self._find_unused_db_name( target_path, "upgrading_database_%s" % ver) - shutil.copyfile(self.path, os.path.join(target_path, save_name)) + self._copy_data_to_path(os.path.join(target_path, save_name)) self._changed_db_path = os.path.join(target_path, save_name) + self.connection.close() self.open_connection(self._changed_db_path) def _change_database_file_back(self): @@ -327,13 +751,19 @@ database we were using to the normal place, and switches our sqlite connection to use that file """ - self.close(ignore_vacuum_error=False) + # _changed_db_path uses the default journal mode instead of WAL mode, + # so we can do a simple move here instead of using + # _copy_data_to_path() + self.connection.close() shutil.move(self._changed_db_path, self.path) self.open_connection() del self._changed_db_path - def _upgrade_database(self): - self.startup_version = current_version = self._get_version() + def show_upgrade_progress(self): + return True + + def _upgrade_database(self, context): + self.startup_version = current_version = self.get_version() if current_version > self._schema_version: msg = _("Database was created by a newer version of %(appname)s " @@ -346,13 +776,16 @@ self._upgrade_20_database() # need to pull the variable again here because # _upgrade_20_database will have done an upgrade - dbupgradeprogress.doing_new_style_upgrade() - current_version = self._get_version() + if self.show_upgrade_progress(): + dbupgradeprogress.doing_new_style_upgrade() + current_version = self.get_version() self._change_database_file(current_version) databaseupgrade.new_style_upgrade(self.cursor, current_version, - self._schema_version) - self._set_version() + self._schema_version, + context, + self.show_upgrade_progress()) + self.set_version() self._change_database_file_back() self.current_version = self._schema_version @@ -360,7 +793,7 @@ self.cursor.execute("SELECT COUNT(*) FROM sqlite_master " "WHERE type='table' and name = 'dtv_objects'") if self.cursor.fetchone()[0] > 0: - current_version = self._get_version() + current_version = self.get_version() if current_version >= 80: # we have a dtv_objects table, but we also have a database # that's been converted to the new-style. What happened was @@ -372,12 +805,14 @@ else: # Need to update an old-style database self._change_database_file("pre80") - dbupgradeprogress.doing_20_upgrade() + if self.show_upgrade_progress(): + dbupgradeprogress.doing_20_upgrade() if util.chatter: logging.info("converting pre 2.1 database") - convert20database.convert(self.cursor) - self._set_version(80) + convert20database.convert(self.cursor, + self.show_upgrade_progress()) + self.set_version(80) self._change_database_file_back() def get_variable(self, name): @@ -388,31 +823,64 @@ raise KeyError(name) return cPickle.loads(str(row[0])) - def set_variable(self, name, value): + def set_variable(self, name, value, db_name='main'): # we only store one variable and it's easier to deal with if we store # it using ASCII-base protocol. db_value = buffer(cPickle.dumps(value, 0)) - self.cursor.execute("REPLACE INTO dtv_variables " - "(name, serialized_value) VALUES (?,?)", (name, db_value)) + self.execute("REPLACE INTO %s.dtv_variables " + "(name, serialized_value) VALUES (?,?)" % db_name, + (name, db_value), + is_update=True) + self.finish_transaction() + + def unset_variable(self, name, db_name='main'): + self.execute("DELETE FROM %s.dtv_variables " + "WHERE name=?" % db_name, + (name,), is_update=True) + self.finish_transaction() def _create_variables_table(self): self.cursor.execute("""CREATE TABLE dtv_variables( name TEXT PRIMARY KEY NOT NULL, serialized_value BLOB NOT NULL);""") + def simulate_db_save_error(self): + """Simulate trying to save something to the database and getting an + Operational Error. + """ + # The below code is fairly dirty, but it's only executed from a devel + # menu item, so it should be okay + # Make it so that the next attempt (and only that attempt) to execute + # a query results in an error. + old_time_execute = self._time_execute + def time_execute_intercept(*args, **kwargs): + self._time_execute = old_time_execute + raise sqlite3.DatabaseError() + self._time_execute = time_execute_intercept + # force the db to execute sql + self.execute("REPLACE INTO dtv_variables " + "(name, serialized_value) VALUES (?,?)", + ('simulate_db_save_error', 1), is_update=True) + def remember_object(self, obj): - self._object_map[obj.id] = obj - self._ids_loaded.add(obj.id) + key = (obj.id, obj.db_info.db.table_name(obj.__class__)) + self._object_map[key] = obj + self._ids_loaded.add(key) def forget_object(self, obj): + key = (obj.id, obj.db_info.db.table_name(obj.__class__)) try: - del self._object_map[obj.id] + del self._object_map[key] except KeyError: details = ('storedatabase.forget_object: ' 'key error in forget_object: %s (obj: %s)' % (obj.id, obj)) logging.error(details) - self._ids_loaded.discard(obj.id) + self._ids_loaded.discard(key) + + def forget_all_objects(self): + self._object_map = {} + self._ids_loaded = set() def _insert_sql_for_schema(self, obj_schema): return "INSERT INTO %s (%s) VALUES(%s)" % (obj_schema.table_name, @@ -425,9 +893,8 @@ value = getattr(obj, name) try: schema_item.validate(value) - except schema.ValidationError: - if util.chatter: - logging.warn("error validating %s for %s", name, obj) + except schema.ValidationError, e: + logging.warn("error validating %s for %s (%s)", name, obj, e) raise values.append(self._converter.to_sql(obj_schema, name, schema_item, value)) @@ -439,7 +906,7 @@ obj_schema = self._schema_map[obj.__class__] values = self._values_for_obj(obj_schema, obj) sql = self._insert_sql_for_schema(obj_schema) - self._execute(sql, values, is_update=True) + self.execute(sql, values, is_update=True) obj.reset_changed_attributes() def bulk_insert(self, objects): @@ -457,7 +924,7 @@ raise ValueError("Incompatible types for bulk insert") value_list.append(self._values_for_obj(obj_schema, obj)) sql = self._insert_sql_for_schema(obj_schema) - self._execute(sql, value_list, is_update=True, many=True) + self.execute(sql, value_list, is_update=True, many=True) for obj in objects: obj.reset_changed_attributes() @@ -476,8 +943,7 @@ try: schema_item.validate(value) except schema.ValidationError: - if util.chatter: - logging.warn("error validating %s for %s", name, obj) + logging.warn("error validating %s for %s", name, obj) raise values.append(self._converter.to_sql(obj_schema, name, schema_item, value)) @@ -485,7 +951,7 @@ if values: sql = "UPDATE %s SET %s WHERE id=%s" % (obj_schema.table_name, ', '.join(setters), obj.id) - self._execute(sql, values, is_update=True) + self.execute(sql, values, is_update=True) if (self.cursor.rowcount != 1 and not self._quitting_from_operational_error): if self.cursor.rowcount == 0: @@ -501,7 +967,7 @@ schema = self._schema_map[obj.__class__] sql = "DELETE FROM %s WHERE id=?" % (schema.table_name) - self._execute(sql, (obj.id,), is_update=True) + self.execute(sql, (obj.id,), is_update=True) self.forget_object(obj) def bulk_remove(self, objects): @@ -519,21 +985,19 @@ raise ValueError("Incompatible types for bulk remove") # we can only feed sqlite so many variables at once, send it chunks of # 900 ids at once - for objects_chunk in split_values_for_sqlite(objects): + for objects_chunk in util.split_values_for_sqlite(objects): commas = ','.join('?' for x in xrange(len(objects_chunk))) sql = "DELETE FROM %s WHERE id IN (%s)" % (obj_schema.table_name, commas) - self._execute(sql, [o.id for o in objects_chunk], is_update=True) + self.execute(sql, [o.id for o in objects_chunk], is_update=True) for obj in objects: self.forget_object(obj) def get_last_id(self): try: return self._get_last_id() - except databaseupgrade.DatabaseTooNewError: - raise except StandardError: - self._show_corrupt_db_dialog() + self.error_handler.handle_load_error() self._handle_load_error("Error calculating last id") return self._get_last_id() @@ -544,21 +1008,27 @@ max_id = max(max_id, self.cursor.fetchone()[0]) return max_id - def get_obj_by_id(self, id_): + def get_obj_by_id(self, id_, klass): """Get a particular DDBObject. This will throw a KeyError if id is not in the database, or if the object for id has not been loaded yet. """ - return self._object_map[id_] + return self._object_map[(id_, self.table_name(klass))] - def id_alive(self, id_): - """Check if an id_ is exists and is loaded in the databes.""" - return id_ in self._object_map + def id_alive(self, id_, klass): + """Check if an id exists and is loaded in the database.""" + return (id_, self.table_name(klass)) in self._object_map + + def fetch_item_infos(self, item_ids): + return item.fetch_item_infos(self.connection, item_ids) def table_name(self, klass): return self._schema_map[klass].table_name + def schema_fields(self, klass): + return self._schema_map[klass].fields + def object_from_class_table(self, obj, klass): return self._schema_map[klass] is self._schema_map[obj.__class__] @@ -576,29 +1046,20 @@ sql.write(" LIMIT %s" % limit) return sql.getvalue() - def query(self, klass, where, values=None, order_by=None, joins=None, - limit=None): - schema = self._schema_map[klass] - id_list = list(self.query_ids(schema.table_name, where, values, - order_by, joins, - limit)) - if self.ensure_objects_loaded(klass, id_list): - # sometimes objects will call remove() in setup_restored(). - # We need to filter those out. - id_list = [id_ for id_ in id_list if id_ in self._object_map] - for id_ in id_list: - yield self._object_map[id_] - - def ensure_objects_loaded(self, klass, id_list): + def ensure_objects_loaded(self, klass, id_list, db_info): """Ensure that a list of ids are loaded into memory. :returns: True iff we needed to load objects """ - unrestored_ids = set(id_list).difference(self._ids_loaded) + table_name = self.table_name(klass) + unrestored_ids = [] + for id_ in id_list: + if (id_, table_name) not in self._ids_loaded: + unrestored_ids.append(id_) if unrestored_ids: # restore any objects that we don't already have in memory. schema = self._schema_map[klass] - self._restore_objects(schema, unrestored_ids) + self._restore_objects(schema, unrestored_ids, db_info) return True return False @@ -611,14 +1072,14 @@ self.cursor.execute(sql.getvalue(), values) return (row[0] for row in self.cursor.fetchall()) - def _restore_objects(self, schema, id_set): + def _restore_objects(self, schema, id_set, db_info): column_names = ['%s.%s' % (schema.table_name, f[0]) for f in schema.fields] # we can only feed sqlite so many variables at once, send it chunks of # 900 ids at once id_list = tuple(id_set) - for id_list_chunk in split_values_for_sqlite(id_list): + for id_list_chunk in util.split_values_for_sqlite(id_list): sql = StringIO() sql.write("SELECT %s " % (', '.join(column_names),)) sql.write("FROM %s WHERE id IN (%s)" % (schema.table_name, @@ -626,9 +1087,9 @@ self.cursor.execute(sql.getvalue(), id_list_chunk) for row in self.cursor.fetchall(): - self._restore_object_from_row(schema, row) + self._restore_object_from_row(schema, row, db_info) - def _restore_object_from_row(self, schema, db_row): + def _restore_object_from_row(self, schema, db_row, db_info): restored_data = {} columns_to_update = [] values_to_update = [] @@ -638,6 +1099,7 @@ value = self._converter.from_sql(schema, name, schema_item, value) except StandardError: + logging.exception('self._converter.from_sql failed.') handler = self._converter.get_malformed_data_handler(schema, name, schema_item, value) if handler is None: @@ -660,9 +1122,9 @@ setters = ['%s=?' % c for c in columns_to_update] sql = "UPDATE %s SET %s WHERE id=%s" % (schema.table_name, ', '.join(setters), restored_data['id']) - self._execute(sql, values_to_update) + self.execute(sql, values_to_update) klass = schema.get_ddb_class(restored_data) - return klass(restored_data=restored_data) + return klass(restored_data=restored_data, db_info=db_info) def persistent_object_count(self): return len(self._object_map) @@ -673,7 +1135,7 @@ sql.write('SELECT COUNT(*) ') sql.write(self._get_query_bottom(table_name, where, joins, None, limit)) - return self._execute(sql.getvalue(), values)[0][0] + return self.execute(sql.getvalue(), values)[0][0] def delete(self, klass, where, values): schema = self._schema_map[klass] @@ -681,7 +1143,7 @@ sql.write('DELETE FROM %s' % schema.table_name) if where is not None: sql.write('\nWHERE %s' % where) - self._execute(sql.getvalue(), values, is_update=True) + self.execute(sql.getvalue(), values, is_update=True) def select(self, klass, columns, where, values, joins=None, limit=None, convert=True): @@ -690,7 +1152,7 @@ sql.write('SELECT %s ' % ', '.join(columns)) sql.write(self._get_query_bottom(schema.table_name, where, joins, None, limit)) - results = self._execute(sql.getvalue(), values) + results = self.execute(sql.getvalue(), values) if not convert: return results schema_items = [self._schema_column_map[schema, c] for c in columns] @@ -716,8 +1178,19 @@ else: self.cursor.execute("ROLLBACK TRANSACTION") self._statements_in_transaction = [] + self.emit("transaction-finished", commit) + + def execute(self, sql, values=None, is_update=False, many=False): + """Execute an sql statement and return the results. + + :param sql: sql to execute + :param values: positional arguments for the sql statement + :param is_update: is this an update rather than a select? + :param many: use the execute_many() method instead of execute(). + values should be a list of argument tuples if this is true. + :returns: list of result rows, or None if the statement is an update. + """ - def _execute(self, sql, values, is_update=False, many=False): if is_update and self._quitting_from_operational_error: # We want to avoid updating the database at this point. return @@ -728,33 +1201,23 @@ if values is None: values = () - failed = False if is_update: self._statements_in_transaction.append((sql, values, many)) try: self._time_execute(sql, values, many) - except sqlite3.OperationalError, e: - self._log_error(sql, values, many) - failed = True + except sqlite3.DatabaseError, e: + self._log_error(sql, values, many, e) if is_update: self._current_select_statement = None else: # Make sure we re-run our SELECT statement so that the call to # fetchall() at the end of this method works. (#12885) self._current_select_statement = (sql, values, many) - self._handle_operational_error(e) - if self._quitting_from_operational_error and not is_update: - # This is a very bad state to be in because code calling - # us expects a return value. I think the best we can do - # is re-raise the exception (BDK) - raise + self._handle_operational_error(e, is_update) + except StandardError, e: + self._log_error(sql, values, many, e) + raise - if failed and not self._quitting_from_operational_error: - title = _("%(appname)s database save succeeded", - {"appname": app.config.get(prefs.SHORT_APP_NAME)}) - description = _("The database has been successfully saved. " - "It is now safe to quit without losing any data.") - dialogs.MessageBoxDialog(title, description).run() if is_update: return None else: @@ -769,16 +1232,14 @@ end = time.time() self._check_time(sql, end-start) - def _log_error(self, sql, values, many): + def _log_error(self, sql, values, many, e): # printing the traceback here in whole rather than doing # a logging.exception which seems to show the traceback # up to the try/except handler. - logging.exception("OperationalError\n" - "statement: %s\n\n" - "values: %s\n\n" - "many: %s\n\n" - "full stack:\n%s\n", sql, values, many, - "".join(traceback.format_stack())) + logging.error("%s while executing SQL\n" + "statement: %s\n\n" + "values: %s\n\n" + "many: %s\n\n", e, sql, values, many, exc_info=True) def _try_rerunning_transaction(self): if self._statements_in_transaction: @@ -791,52 +1252,64 @@ for (sql, values, many) in to_run: try: self._time_execute(sql, values, many) - except sqlite3.OperationalError: - self._log_error(sql, values, many) + except sqlite3.DatabaseError, e: + self._log_error(sql, values, many, e) return False return True - def _handle_operational_error(self, e): + def _handle_operational_error(self, e, is_update): if self._quitting_from_operational_error: return + succeeded = False while True: # try to rollback our old transaction if SQLite hasn't done it # automatically try: self.cursor.execute("ROLLBACK TRANSACTION") - except sqlite3.OperationalError: + except sqlite3.DatabaseError: pass - self._show_save_error_dialog(str(e)) - if self._quitting_from_operational_error: - return + retry = self._handle_query_error(str(e)) + if not retry: + break if self._try_rerunning_transaction(): + succeeded = True break - def _show_save_error_dialog(self, error_text): - title = _("%(appname)s database save failed", - {"appname": app.config.get(prefs.SHORT_APP_NAME)}) - description = _( - "%(appname)s was unable to save its database.\n\n" - "If your disk is full, we suggest freeing up some space and " - "retrying. If your disk is not full, it's possible that " - "retrying will work.\n\n" - "If retrying did not work, please quit %(appname)s and restart. " - "Recent changes may be lost.\n\n" - "If you see this error often while downloading, we suggest " - "you reduce the number of simultaneous downloads in the Options " - "dialog in the Download tab.\n\n" - "Error: %(error_text)s\n\n", - {"appname": app.config.get(prefs.SHORT_APP_NAME), - "error_text": error_text} - ) - d = dialogs.ChoiceDialog(title, description, - dialogs.BUTTON_RETRY, dialogs.BUTTON_QUIT) - choice = d.run_blocking() - if choice == dialogs.BUTTON_QUIT: + if not succeeded and not is_update: + logging.warn("re-raising SQL error because it was not an update") + # This is a very bad state to be in because code calling + # us expects a return value. I think the best we can do + # is re-raise the exception (BDK) + raise + + if succeeded: + self.error_handler.handle_save_succeeded() + + def _handle_query_error(self, error_text): + """Handle an error running an SQL query. + + :returns: True if we should try to re-run the query + """ + integrity_check_passed = self.check_integrity() + action = self.error_handler.handle_save_error(error_text, + integrity_check_passed) + if action == LiveStorageErrorHandler.ACTION_QUIT: self._quitting_from_operational_error = True - messages.FrontendQuit().send_to_frontend() - else: + return False + elif action == LiveStorageErrorHandler.ACTION_RETRY: logging.warn("Re-running SQL statement") + return True + elif action == LiveStorageErrorHandler.ACTION_USE_TEMPORARY: + self._switch_to_temp_mode() + # reset _statements_in_transaction. The data for the old DB is + # now lost + self._statements_in_transaction = [] + self.cursor = self.connection.cursor() + self._init_database() + return False + else: + logging.warn("Bad return value for handle_save_error: %s", action) + raise def _check_time(self, sql, query_time): SINGLE_QUERY_LIMIT = 0.5 @@ -862,62 +1335,113 @@ logging.timing('query cumulatively slow: %0.2f ' '(%0.03f): %s', cumulative, query_time, sql) + def _calc_created_new(self): + """Decide if the database that we just opened is new.""" + self.cursor.execute("SELECT COUNT(*) FROM sqlite_master " + "WHERE type='table'") + return self.cursor.fetchone()[0] == 0 + def _init_database(self): """Create a new empty database.""" for schema in self._object_schemas: + type_specs = [self._create_sql_for_column(name, schema_item) + for (name, schema_item) in schema.fields] self.cursor.execute("CREATE TABLE %s (%s)" % - (schema.table_name, self._calc_sqlite_types(schema))) + (schema.table_name, ', '.join(type_specs))) for name, columns in schema.indexes: self.cursor.execute("CREATE INDEX %s ON %s (%s)" % (name, schema.table_name, ', '.join(columns))) + for name, columns in schema.unique_indexes: + self.cursor.execute("CREATE UNIQUE INDEX %s ON %s (%s)" % + (name, schema.table_name, ', '.join(columns))) self._create_variables_table() - self.cursor.execute(iteminfocache.create_sql()) - self._set_version() + self.set_version() + self.setup_fulltext_search() + + def setup_fulltext_search(self): + fulltextsearch.setup_fulltext_search(self.connection) + + def _get_size_info(self): + """Get info about the database size - def _get_version(self): + :returns: (page_size, page_count, freelist_count) tuple or None if + there's an error getting the size info + """ + rv = [] + for name in ('page_size', 'page_count', 'freelist_count'): + sql = 'PRAGMA %s' % name + self.cursor.execute(sql) + row = self.cursor.fetchone() + if row is None: + # not sure why this happens, but it does #18633 + logging.warn("_get_size_info(): error running %s", sql) + return None + rv.append(row[0]) + return rv + + def _preallocate_space(self, db_name='main'): + if db_name == 'main': + size_info = self._get_size_info() + if size_info is None: + logging.warn("_get_size_info() returned None. Not " + "preallocating space for: %s", self.path) + return + page_size, page_count, freelist_count = size_info + current_size = page_size * (page_count + freelist_count) + else: + # HACK: We can't get size counts for attached databases so we just + # assume that the database is empty of content + current_size = 0 + size = self.preallocate - current_size + if size > 0: + # make a row that's big enough so that our database will be + # approximately preallocate bytes large + self.cursor.execute("REPLACE INTO %s.dtv_variables " + "(name, serialized_value) " + "VALUES ('preallocate', zeroblob(%s))" % + (db_name, size)) + # delete the row, sqlite will keep the space allocate until the + # VACUUM command. And we won't ever send a VACUUM. + self.cursor.execute("DELETE FROM %s.dtv_variables " + "WHERE name='preallocate'" % (db_name,)) + + def get_version(self): return self.get_variable(VERSION_KEY) - def _set_version(self, version=None): + def set_version(self, version=None, db_name='main'): """Set the database version to the current schema version.""" if version is None: version = self._schema_version - self.set_variable(VERSION_KEY, version) + self.set_variable(VERSION_KEY, version, db_name) - def _calc_sqlite_types(self, object_schema): - """What datatype should we use for the attributes of an object schema? + def get_sqlite_type(self, item_schema): + """Get sqlite type to use for a schema item_schema """ - types = [] - for name, schema_item in object_schema.fields: - typ = _sqlite_type_map[schema_item.__class__] - if name != 'id': - types.append('%s %s' % (name, typ)) - else: - types.append('%s %s PRIMARY KEY' % (name, typ)) - return ', '.join(types) + return _sqlite_type_map[item_schema.__class__] - def reset_database(self): + def _create_sql_for_column(self, column_name, item_schema): + typ = self.get_sqlite_type(item_schema) + if column_name != 'id': + return '%s %s' % (column_name, typ) + else: + return '%s %s PRIMARY KEY' % (column_name, typ) + + def reset_database(self, init_schema=True): """Saves the current database then starts fresh with an empty database. + + :param init_schema: should we create tables for our schema? """ self.connection.close() self.save_invalid_db() self.open_connection() - self._init_database() - - def _show_corrupt_db_dialog(self): - title = _("%(appname)s database corrupt.", - {"appname": app.config.get(prefs.SHORT_APP_NAME)}) - description = _( - "Your %(appname)s database is corrupt. It will be " - "backed up in your Miro database directory and a new " - "database will be created now.", - {"appname": app.config.get(prefs.SHORT_APP_NAME)}) - dialogs.MessageBoxDialog(title, description).run_blocking() + if init_schema: + self._init_database() - def _handle_load_error(self, message): + def _handle_load_error(self, message, init_schema=True): """Handle errors happening when we try to load the database. Our basic strategy is to log the error, save the current database then start fresh with an empty database. @@ -926,7 +1450,7 @@ raise if util.chatter: logging.exception(message) - self.reset_database() + self.reset_database(init_schema) def save_invalid_db(self): target_path = os.path.dirname(self.path) @@ -942,48 +1466,78 @@ save_name = "%s.%d" % (org_save_name, i) return save_name +class DeviceLiveStorage(LiveStorage): + """Version of LiveStorage used for a device.""" + def setup_fulltext_search(self): + fulltextsearch.setup_fulltext_search(self.connection, 'device_item') + + def show_upgrade_progress(self): + return False + +class SharingLiveStorage(LiveStorage): + """Version of LiveStorage used for a device.""" + + def __init__(self, path, share_name, object_schemas): + error_handler = SharingLiveStorageErrorHandler(share_name) + if os.path.exists(path): + raise ValueError("SharingLiveStorage should only be created with " + "a non-existent path") + LiveStorage.__init__(self, path, error_handler, + object_schemas=object_schemas) + + def open_connection(self, path=None, start_in_temp_mode=False): + LiveStorage.open_connection(self, path, start_in_temp_mode) + # execute a bunch of PRAGMA statements that make things faster at the + # expense of reliability in case of a crash. Since we open a new DB + # every time there's no risk. + self.cursor.execute("PRAGMA synchronous=OFF") + self.cursor.execute("PRAGMA temp_store=MEMORY") + self.cursor.execute("PRAGMA journal_mode=MEMORY") + + def setup_fulltext_search(self): + fulltextsearch.setup_fulltext_search(self.connection, 'sharing_item', + path_column='video_path', + has_entry_description=False) + class SQLiteConverter(object): def __init__(self): - self._to_sql_converters = {} - self._from_sql_converters = {} + self._to_sql_converters = { + schema.SchemaBinary: self._binary_to_sql, + schema.SchemaFilename: self._filename_to_sql, + schema.SchemaStringSet: self._string_set_to_sql, + schema.SchemaTimeDelta: self._timedelta_to_sql, + } + + self._from_sql_converters = { + schema.SchemaBool: self._bool_from_sql, + schema.SchemaBinary: self._binary_from_sql, + schema.SchemaFilename: self._filename_from_sql, + schema.SchemaStringSet: self._string_set_from_sql, + schema.SchemaTimeDelta: self._timedelta_from_sql, + } - repr_types = (schema.SchemaTimeDelta, - schema.SchemaReprContainer, + repr_types = ( schema.SchemaReprContainer, schema.SchemaTuple, schema.SchemaDict, schema.SchemaList, ) for schema_class in repr_types: - self._to_sql_converters[schema_class] = repr - self._from_sql_converters[schema_class] = self._convert_repr - self._to_sql_converters[schema.SchemaStatusContainer] = \ - self._convert_status_to_sql - self._from_sql_converters[schema.SchemaStatusContainer] = \ - self._convert_status - # bools get stored as integers in sqlite - self._from_sql_converters[schema.SchemaBool] = bool - # filenames are always stored in sqlite as unicode - if PlatformFilenameType != unicode: - self._to_sql_converters[schema.SchemaFilename] = filename_to_unicode - self._from_sql_converters[schema.SchemaFilename] = \ - self._unicode_to_filename - # make sure SchemaBinary is always restored as a byte-string - self._to_sql_converters[schema.SchemaBinary] = buffer - self._from_sql_converters[schema.SchemaBinary] = self._convert_binary + self._to_sql_converters[schema_class] = self._repr_to_sql + self._from_sql_converters[schema_class] = self._repr_from_sql def to_sql(self, schema, name, schema_item, value): if value is None: return None converter = self._to_sql_converters.get(schema_item.__class__, self._null_convert) - return converter(value) + return converter(value, schema_item) def from_sql(self, schema, name, schema_item, value): if value is None: return None converter = self._from_sql_converters.get(schema_item.__class__, self._null_convert) - return converter(value) + return converter(value, schema_item) def get_malformed_data_handler(self, schema, name, schema_item, value): handler_name = 'handle_malformed_%s' % name @@ -993,12 +1547,25 @@ return None def _unicode_to_filename(self, value): - return value.encode('utf-8') + # reverses filename_to_unicode(). We can't use the platform + # unicode_to_filename() because that also cleans out the filename. + # This code is not very good and should be replaces as part of #13182 + if value is not None and PlatformFilenameType != unicode: + return value.encode('utf-8') + else: + return value - def _null_convert(self, value): + def _null_convert(self, value, schema_item): return value - def _convert_binary(self, value): + def _bool_from_sql(self, value, schema_item): + # bools are stored as integers in the DB. + return bool(value) + + def _binary_to_sql(self, value, schema_item): + return buffer(value) + + def _binary_from_sql(self, value, schema_item): if isinstance(value, unicode): return value.encode('utf-8') elif isinstance(value, buffer): @@ -1006,26 +1573,30 @@ else: raise TypeError("Unknown type in _convert_binary") - def _convert_repr(self, value): + def _filename_from_sql(self, value, schema_item): + return self._unicode_to_filename(value) + + def _filename_to_sql(self, value, schema_item): + return filename_to_unicode(value) + + def _repr_to_sql(self, value, schema_item): + return repr(value) + + def _repr_from_sql(self, value, schema_item): return eval(value, __builtins__, {'datetime': datetime, 'time': _TIME_MODULE_SHADOW}) - def _convert_status(self, repr_value): - status_dict = self._convert_repr(repr_value) - filename_fields = schema.SchemaStatusContainer.filename_fields - for key in filename_fields: - value = status_dict.get(key) - if value is not None and PlatformFilenameType != unicode: - status_dict[key] = self._unicode_to_filename(value) - return status_dict - - def _convert_status_to_sql(self, status_dict): - to_save = status_dict.copy() - filename_fields = schema.SchemaStatusContainer.filename_fields - for key in filename_fields: - value = to_save.get(key) - if value is not None: - to_save[key] = filename_to_unicode(value) - return repr(to_save) + def _string_set_to_sql(self, value, schema_item): + return schema_item.delimiter.join(value) + + def _string_set_from_sql(self, value, schema_item): + return set(value.split(schema_item.delimiter)) + + def _timedelta_to_sql(self, value, schema_item): + return ':'.join((str(value.days), str(value.seconds), + str(value.microseconds))) + + def _timedelta_from_sql(self, value, schema_item): + return datetime.timedelta(*(int(c) for c in value.split(":"))) class TimeModuleShadow: """In Python 2.6, time.struct_time is a named tuple and evals poorly, diff -Nru miro-4.0.4/lib/subprocessmanager.py miro-6.0/lib/subprocessmanager.py --- miro-4.0.4/lib/subprocessmanager.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/subprocessmanager.py 2013-04-05 16:02:42.000000000 +0000 @@ -38,6 +38,7 @@ with the command line and env from plat.utils.miro_helper_program_info(). """ +import ctypes import cPickle as pickle import logging import os @@ -45,10 +46,12 @@ import subprocess import sys import threading +import trapcall import warnings import Queue from miro import app +from miro import clock from miro import config from miro import prefs from miro import crashreport @@ -57,7 +60,8 @@ from miro import messagetools from miro import trapcall from miro import util -from miro.plat.utils import miro_helper_program_info, initialize_locale +from miro.plat import utils +from miro.plat.popen import Popen def _on_windows(): """Test if we are unfortunate enough to be running in windows.""" @@ -101,6 +105,8 @@ def send_to_process(self): try: + # handler gets set in SubprocessManager.__init__() when it calls + # install_handler() handler = self.handler except AttributeError: logging.warn("No handler for %s" % self) @@ -109,8 +115,9 @@ class StartupInfo(SubprocessMessage): """Data needed to bootstrap the subprocess.""" - def __init__(self, config_dict): + def __init__(self, config_dict, in_unit_tests): self.config_dict = config_dict + self.in_unit_tests = in_unit_tests class HandlerInfo(SubprocessMessage): """Describes how to build a SubprocessHandler object.""" @@ -137,12 +144,25 @@ self.report = report self.soft_fail = soft_fail -def _send_subprocess_error_for_exception(soft_fail=True): +def send_subprocess_error_for_exception(soft_fail=True): + """Send a SubprocessError message after a caught exception. + + This method creates a crash report using sys.exc_info(). So it should + only be called after an exception is caught. + + :param soft_fail: trigger a soft failure in the main process + """ exc_info = sys.exc_info() - report = '---- subprocess stack ---- ' + report = '---- subprocess stack ----\n' report += crashreport.format_stack_report('in subprocess', exc_info) - report += '-------------------------- ' + report += '--------------------------' + if _on_windows(): + report += "\nGetLastError(): %s" % ctypes.GetLastError() + report += '\n--------------------------' + SubprocessError(report, soft_fail=soft_fail).send_to_main_process() + if logging_setup: + logging.warn("Sending crash report to main process:\n%s", report) class SubprocessHandler(messagetools.MessageHandler): """Handle messages inside a spawned subprocess @@ -163,7 +183,14 @@ try: method(message) except StandardError: - _send_subprocess_error_for_exception() + send_subprocess_error_for_exception() + + def get_task_from_queue(self, queue): + """Get the next task from a Queue object. + + This method should block until a task is ready. + """ + return queue.get() # NOTE: we use "on_" prefix to distinguish these from messages def on_startup(self): @@ -184,12 +211,34 @@ called to handle events in the process lifecycle, like startup and shutdown. """ + def __init__(self): + messagetools.MessageHandler.__init__(self) + # (handler_method, message) tuples to call in the event loop thread + self.handler_queue = Queue.Queue() + self.safe_to_skip_add_idle = False def call_handler(self, method, message): # this executes in the thread reading from the subprocess pipe. Move # things into the backend thread. - name = 'handle subprocess message: %s' % message - eventloop.add_idle(method, name, args=(message,)) + self.handler_queue.put((method, message)) + if not self.safe_to_skip_add_idle: + # we can skip calling add_idle() again if we put objects on the + # queue before process_handler_queue() starts getting from it in + # the event loop thread. + self.safe_to_skip_add_idle = True + eventloop.add_idle(self.process_handler_queue, + 'process handler queue') + + def process_handler_queue(self): + # this executes in the event loop thread. Here's where we should call + # handler methods + + # Before we get anything from our queue, we must set + # safe_to_skip_add_idle to False + self.safe_to_skip_add_idle = False + while not self.handler_queue.empty(): + (method, message) = self.handler_queue.get() + trapcall.trap_call('processing handler method', method, message) def on_startup(self): """Called after the subprocess starts up.""" @@ -212,7 +261,7 @@ class LoadError(StandardError): """Exception for corrupt data when reading from a pipe.""" -SIZEOF_LONG = struct.calcsize("L") +SIZEOF_LONG = struct.calcsize("Q") def _read_bytes_from_pipe(pipe, length): """Read size bytes from a pipe. @@ -226,10 +275,11 @@ """ data = [] while length > 0: - data = pipe.read(length) - if data == '': + d = pipe.read(length) + if d == '': break - length -= len(data) + length -= len(d) + data.append(d) return ''.join(data) def _load_obj(pipe): @@ -246,7 +296,7 @@ if len(size_data) < SIZEOF_LONG: raise LoadError("EOF reached while reading size field " "(read %s bytes)" % len(size_data)) - size = struct.unpack("L", size_data)[0] + size = struct.unpack("Q", size_data)[0] pickle_data = _read_bytes_from_pipe(pipe, size) if len(pickle_data) < size: raise LoadError("EOF reached while reading pickle data " @@ -259,7 +309,7 @@ raise LoadError("Pickle data references unimportable module") except StandardError, e: # log this exception for easier debugging. - _send_subprocess_error_for_exception() + send_subprocess_error_for_exception() raise LoadError("Unknown error in pickle.loads: %s" % e) def _dump_obj(obj, pipe): @@ -270,7 +320,7 @@ """ pickle_data = pickle.dumps(obj) - size_data = struct.pack("L", len(pickle_data)) + size_data = struct.pack("Q", len(pickle_data)) # NOTE: We do a blocking write here. This should be fine, since on both # sides we have a thread dedicated to just reading from the pipe and # pushing the data into a Queue. However, there's some chance that the @@ -289,7 +339,7 @@ """ def __init__(self, message_base_class, responder, handler_class, - handler_args=None): + handler_args=None, restart_delay=60): """Create a new SubprocessManager. This method prepares the subprocess to run. Use start() to start it @@ -302,6 +352,9 @@ handler_class and handler_args are used to build the SubprocessHandler inside the subprocess + + restart_delay controls how quickly we restart crashed subprocesses. + We will not start more than 1 process per seconds. """ if handler_args is None: handler_args = () @@ -310,8 +363,11 @@ self.handler_class = handler_class self.handler_args = handler_args self.is_running = False + self.sent_quit = False self.process = None self.thread = None + self.start_time = 0 + self.restart_delay = restart_delay # Process management @@ -339,27 +395,21 @@ self.thread.start() # work is all done, do some finishing touches self.is_running = True + self.sent_quit = False self._send_startup_info() trapcall.trap_call("subprocess startup", self.responder.on_startup) def _start_subprocess(self): - cmd_line, env = miro_helper_program_info() + cmd_line, env = utils.miro_helper_program_info() kwargs = { "stdout": subprocess.PIPE, "stdin": subprocess.PIPE, - "startupinfo": util.no_console_startupinfo(), + "stderr": open(os.devnull, 'wb'), "env": env, + "close_fds": True } - if _on_windows(): - # normally we just clone stderr for the subprocess, but on windows - # this doesn't work. So we use a pipe that we immediately close - kwargs["stderr"] = subprocess.PIPE - else: - kwargs["stderr"] = None - kwargs["close_fds"] = True - process = subprocess.Popen(cmd_line, **kwargs) - if _on_windows(): - process.stderr.close() + process = Popen(cmd_line, **kwargs) + self.start_time = clock.clock() return process def shutdown(self, timeout=1.0): @@ -378,39 +428,77 @@ # If things go right, the process will quit, then our thread will # quit. Wait for a clean shutdown self.thread.join(timeout) - # If things didn't shutdown, then force them to quit - if self.process.returncode is None: - self.process.terminate() + # If things didn't shutdown, then force them to quit. Let's not + # bother with SIGTERM since that really also would be an abnormal + # exit as far as the child is concerned. + try: + self.process.kill() + except OSError, e: + # Error on kill. Just log an error and move on. Nothing + # much we can do here anyway. + logging.exception('worker subprocess kill failed') self._cleanup_process() def _on_thread_quit(self, thread): """Handle our thread exiting.""" - # Igoner this call if it was queued from while we were in the middle + # Ignore this call if it was queued from while we were in the middle # of shutdown(). if not self.is_running: return if thread is not self.thread: - app.controller.failed_soft('handling subprocess', - '_on_thread_quit called by an old thread') + # If we have lost the race between the cleanup on shutdown + # it should be safe to ignore. + # + # This can happen when the process does not immediately shut down + # because the worker process is still processing pending jobs + # and the quit message was not processed in time and so the + # subprocess was forcibly terminated. When that happens + # _cleanup_process() is called which resets the thread attribute + # to None immediately, but _on_thread_quit() is only run some + # time after that (when we notice the pipe to the subprocess's + # close we add _on_thread_quit() to the idle loop). + # + # So if the self.thread attribute is None then it means we are done + # and so things are all good. + if self.thread is not None and thread.quit_type != thread.QUIT_NORMAL: + msg = ('_on_thread_quit called by an old thread ' + 'self.thread: %s thread: %s quit_type: %s' % + (self.thread.name, thread.name, thread.quit_type)) + app.controller.failed_soft('handling subprocess', msg) return - if self.thread.quit_type == self.thread.QUIT_NORMAL: + if (self.thread.quit_type == self.thread.QUIT_NORMAL and + self.sent_quit): self._cleanup_process() else: - logging.warn("Restarting failed subprocess (reason: %s)", - self.thread.quit_type) + logging.warn("Subprocess quit unexpectedly (quit_type: %s, " + "sent_quit: %s). Will restart subprocess", + self.thread.quit_type, self.sent_quit) # NOTE: should we enforce some sort of cool-down time before # restarting the subprocess? - self._restart() - - def _restart(self): - # close our stream to the subprocess - self.process.stdin.close() - # unset our attributes for the process that just quit. This protects - # us in case _start() fails for some reason. - self._cleanup_process() + time_since_start = clock.clock() - self.start_time + delay_time = self.restart_delay - time_since_start + if delay_time <= 0: + logging.warn("Subprocess died after %0.1f seconds. " + "Restarting", time_since_start) + self.restart() + else: + logging.warn("Subprocess died in %0.1f seconds, waiting " + "%0.1f to restart", time_since_start, delay_time) + eventloop.add_timeout(delay_time, self.restart, + 'restart failed subprocess') + + def restart(self, clean=False): + if clean: + self.shutdown() + else: + # close our stream to the subprocess + self.process.stdin.close() + # unset our attributes for the process that just quit. This protects + # us in case _start() fails for some reason. + self._cleanup_process() # restart ourselves self._start() trapcall.trap_call("subprocess restart", self.responder.on_restart) @@ -442,9 +530,11 @@ def send_quit(self): """Ask the subprocess to shutdown.""" self.send_message(None) + self.sent_quit = True def _send_startup_info(self): - self.send_message(StartupInfo(self._get_config_dict())) + self.send_message(StartupInfo(self._get_config_dict(), + hasattr(app, 'in_unit_tests'))) self.send_message(HandlerInfo(self.handler_class, self.handler_args)) def _get_config_dict(self): @@ -531,10 +621,16 @@ def subprocess_main(): """Run loop inside the subprocess.""" - # make sure that we are using binary mode for stdout + global logging_setup + logging_setup = False + if _on_windows(): + # On windows, both STDIN and STDOUT get opened as text mode. This + # can causes all kinds of weirdress when reading from our pipes. + # (See #17804). Change the mode to binary for both streams. import msvcrt msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY) + msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY) # unset stdin and stdout so that we don't accidentally print to them stdin = sys.stdin stdout = sys.stdout @@ -545,9 +641,11 @@ except Exception, e: # error reading our initial messages. Try to log a warning, then # quit. - _send_subprocess_error_for_exception() + + send_subprocess_error_for_exception() _finish_subprocess_message_stream(stdout) raise # reraise so that miro_helper.py returns a non-zero exit code + logging.info("_subprocess_setup() finished") # startup thread to process stdin queue = Queue.Queue() thread = threading.Thread(target=_subprocess_pipe_thread, args=(stdin, @@ -555,13 +653,16 @@ thread.daemon = False thread.start() # run our message loop + logging.info("starting message loop") handler.on_startup() try: while True: - msg = queue.get() + msg = handler.get_task_from_queue(queue) if msg is None: break handler.handle(msg) + except StandardError: + send_subprocess_error_for_exception() finally: handler.on_shutdown() # send None to signal that we are about to quit @@ -589,6 +690,7 @@ :raises IOError: low-level error while reading from the pipe :raises LoadError: data read was corrupted """ + global logging_setup # disable warnings so we don't get too much junk on stderr warnings.filterwarnings("ignore") # setup MessageHandler for messages going to the main process @@ -599,10 +701,15 @@ if not isinstance(msg, StartupInfo): raise LoadError("first message must a StartupInfo obj") # setup some basic modules like config and gtcache - initialize_locale() + utils.initialize_locale() config.load(config.ManualConfig()) app.config.set_dictionary(msg.config_dict) gtcache.init() + if not msg.in_unit_tests: + utils.setup_logging(app.config.get(prefs.HELPER_LOG_PATHNAME)) + util.setup_logging() + logging_setup = True + logging.info("Logging Started") # setup our handler msg = _load_obj(stdin) if not isinstance(msg, HandlerInfo): @@ -611,7 +718,7 @@ return msg.handler_class(*msg.handler_args) except StandardError, e: # log this exception for easier debugging. - _send_subprocess_error_for_exception() + send_subprocess_error_for_exception() raise LoadError("Exception while constructing handler: %s" % e) def _subprocess_pipe_thread(stdin, queue): @@ -635,16 +742,20 @@ """Handles messages by writing them to a pipe This is used in the subprocess to send messages back to the main process - over it's stdout pipe. + over it's stdout pipe + + It's safe for multiple threads in the subprocess to use this at once """ def __init__(self, fileobj): self.fileobj = fileobj + self.lock = threading.Lock() def handle(self, msg): try: - _dump_obj(msg, self.fileobj) + with self.lock: + _dump_obj(msg, self.fileobj) except pickle.PickleError: - _send_subprocess_error_for_exception() + send_subprocess_error_for_exception() # NOTE: we don't handle IOError here because what can we do about # that? Just let it propagate up to the top and which should cause us # to shutdown. diff -Nru miro-4.0.4/lib/tabs.py miro-6.0/lib/tabs.py --- miro-4.0.4/lib/tabs.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/tabs.py 2013-04-05 16:02:42.000000000 +0000 @@ -30,6 +30,7 @@ """``miro.tabs`` -- Holds the TabOrder DDBObject. """ +from miro import app from miro import database from miro import guide from miro import feed @@ -80,7 +81,6 @@ def _get_tab_views(self): if self.type == u'site': tab_views = (guide.ChannelGuide.site_view(),) -# tab_views = (guide.ChannelGuide.visible_view(),) elif self.type == u'channel': tab_views = (feed.Feed.visible_view(), folder.ChannelFolder.make_view()) @@ -211,7 +211,12 @@ def _on_remove_tab(self, tracker, obj): if obj.id in self.id_to_tab: del self.id_to_tab[obj.id] - self.tab_ids.remove(obj.id) + try: + self.tab_ids.remove(obj.id) + except ValueError: + app.controller.failed_soft('TabOrder._on_remove_tab', + 'when removing %i from %r order' % ( + obj.id, self.type)) self.signal_change() def reorder(self, newOrder): diff -Nru miro-4.0.4/lib/test/conversionstest.py miro-6.0/lib/test/conversionstest.py --- miro-4.0.4/lib/test/conversionstest.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/test/conversionstest.py 2013-04-05 16:02:42.000000000 +0000 @@ -129,38 +129,6 @@ finally: f.close() - -class MockFFMpeg2TheoraConversionTask(conversions.FFMpeg2TheoraConversionTask): - def __init__(self): - # not calling superclass init because it does a bunch of - # stuff we don't want to deal with mocking. - - # instead, initialize the bits we're testing - self.error = None - self.progress = 0 - self.duration = None - - def _log_progress(self, line): - pass - - def _notify_progress(self): - pass - -class FFMpeg2TheoraConversionTaskTest(MiroTestCase): - def test_ffmpeg2theora_mp4_to_oggtheora(self): - f = open(os.path.join(DATA, "ffmpeg2theora.mp4.oggtheora.txt"), "r") - try: - lines = conversions.line_reader(f) - mock = MockFFMpeg2TheoraConversionTask() - mock.process_output(lines) - - # no errors and progress equals 1.0 - self.assertEquals(mock.error, None) - self.assertEquals(mock.progress, 1.0) - self.assertEquals(mock.duration, 368) - finally: - f.close() - class ConversionInfoTest(MiroTestCase): def get_output_file(self, filepath): filename = os.path.basename(filepath) diff -Nru miro-4.0.4/lib/test/databaseerrortest.py miro-6.0/lib/test/databaseerrortest.py --- miro-4.0.4/lib/test/databaseerrortest.py 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/test/databaseerrortest.py 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,328 @@ +import sqlite3 + +from miro import app +from miro import dialogs +from miro.data import dberrors +from miro.data import item +from miro.data import itemtrack +from miro.test import mock +from miro.test import testobjects +from miro.test.framework import MiroTestCase + +class DBErrorTest(MiroTestCase): + """Test database error handling.""" + + def setUp(self): + MiroTestCase.setUp(self) + self.frontend = mock.Mock() + self.run_choice_dialog = self.frontend.run_choice_dialog + self.db_error_handler = dberrors.DBErrorHandler(self.frontend) + + def check_run_dialog_scheduled(self, title, description, thread, + reset_mock=True): + call_on_ui_thread = self.frontend.call_on_ui_thread + self.assertEquals(call_on_ui_thread.call_count, 1) + func = call_on_ui_thread.call_args[0][0] + args = call_on_ui_thread.call_args[0][1:] + self.assertEquals(func, self.db_error_handler._run_dialog) + self.assertEquals(args, (title, description, thread)) + if reset_mock: + call_on_ui_thread.reset_mock() + return args + + def check_run_dialog_not_scheduled(self): + self.assertEquals(self.frontend.call_on_ui_thread.call_count, 0) + + def run_dialog(self, title, description, retry_callback=None): + self.db_error_handler.run_dialog(title, description, retry_callback) + # run_dialog should schedule the dialog to be run later using + # call_on_ui_thread() + args = self.check_run_dialog_scheduled(title, description, 'ui thread') + # call _run_dialog to simulate showing the dialog + self.db_error_handler._run_dialog(*args) + + def run_backend_dialog(self, dialog): + self.db_error_handler.run_backend_dialog(dialog) + # run_dialog should schedule the dialog to be run later using + # call_on_ui_thread() + call_on_ui_thread = self.frontend.call_on_ui_thread + self.assertEquals(call_on_ui_thread.call_count, 1) + func = call_on_ui_thread.call_args[0][0] + args = call_on_ui_thread.call_args[0][1:] + call_on_ui_thread.reset_mock() + self.assertEquals(func, self.db_error_handler._run_dialog) + self.assertEquals(args, (dialog.title, dialog.description, + 'eventloop thread')) + # call _run_dialog to simulate showing the dialog + self.db_error_handler._run_dialog(*args) + + def test_frontend_error_handling(self): + self.run_choice_dialog.return_value = dialogs.BUTTON_RETRY + # the frontend calls run_dialog() when it sees an error + retry_callback = mock.Mock() + self.run_dialog('test 1', 'test 2', retry_callback) + # run_dialog() should pop up a choice dialog + self.assertEquals(self.run_choice_dialog.call_count, 1) + self.assertEquals(self.run_choice_dialog.call_args[0], + ('test 1', 'test 2', + [dialogs.BUTTON_RETRY, dialogs.BUTTON_QUIT])) + self.assertEquals(self.run_choice_dialog.call_args[1], {}) + # since RETRY was chosen, the retry callback should be called + self.assertEquals(retry_callback.call_count, 1) + # try again with QUIT chosen. In that case, the retry callback + # shouldn't be called + retry_callback.reset_mock() + self.run_choice_dialog.return_value = dialogs.BUTTON_QUIT + self.run_dialog('test 1', 'test 2', retry_callback) + self.assertEquals(retry_callback.call_count, 0) + + def test_backend_error_handling(self): + # when the backend sees an error, it should send the + # DatabaseErrorDialog to the frontend and the frontend should call + # DBErrorHandler.run_backend_dialog(). This test is testing what + # happens when run_backend_dialog() is called. + self.run_choice_dialog.return_value = dialogs.BUTTON_RETRY + dialog = mock.Mock(title='test 1', description='test 2') + self.run_backend_dialog(dialog) + self.assertEquals(self.run_choice_dialog.call_count, 1) + self.assertEquals(dialog.run_callback.call_count, 1) + self.assertEquals(dialog.run_callback.call_args, + ((dialogs.BUTTON_RETRY,), {})) + + def test_backend_then_frontend_errors(self): + retry_callback = mock.Mock() + def run_choice_dialog(title, description, buttons): + # while inside the choice dialog for the backend, we trigger + # another error from the frontend. + self.run_dialog('test 1', 'test 2', retry_callback) + return dialogs.BUTTON_RETRY + self.run_choice_dialog.side_effect = run_choice_dialog + dialog = mock.Mock(title='test 1', description='test 2') + self.run_backend_dialog(dialog) + # even though we saw 2 errors, only 1 dialog should be shown + self.assertEquals(self.run_choice_dialog.call_count, 1) + # since RETRY was chosen for the dialog, both backend and frontend + # should see that + self.assertEquals(dialog.run_callback.call_args[0][0], + dialogs.BUTTON_RETRY) + self.assertEquals(retry_callback.call_count, 1) + + def test_frontend_then_backend_errors(self): + retry_callback = mock.Mock() + dialog = mock.Mock(title='test 1', description='test 2') + def run_choice_dialog(title, description, buttons): + # while inside the choice dialog for the backend, we trigger + # another error from the frontend. + self.run_backend_dialog(dialog) + return dialogs.BUTTON_RETRY + self.run_choice_dialog.side_effect = run_choice_dialog + self.run_dialog('test 1', 'test 2', retry_callback) + # even though we saw 2 errors, only 1 dialog should be shown + self.assertEquals(self.run_choice_dialog.call_count, 1) + # since RETRY was chosen for the dialog, both backend and frontend + # should see that + self.assertEquals(dialog.run_callback.call_args[0][0], + dialogs.BUTTON_RETRY) + self.assertEquals(retry_callback.call_count, 1) + + def test_nested_frontend_errors(self): + retry_callback = mock.Mock() + def run_choice_dialog(title, description, buttons): + # simulate several other errors while running the dialog + self.run_dialog('test 1', 'test 2', retry_callback) + self.run_dialog('test 1', 'test 2', retry_callback) + self.run_dialog('test 1', 'test 2', retry_callback) + return dialogs.BUTTON_RETRY + self.run_choice_dialog.side_effect = run_choice_dialog + self.run_dialog('test 1', 'test 2', retry_callback) + # even though we saw 4 errors, only 1 dialog should be shown + self.assertEquals(self.run_choice_dialog.call_count, 1) + # the retry_callback should be called for each run_dialog() call. + self.assertEquals(retry_callback.call_count, 4) + + def test_reuse_retry(self): + # if we get an error on one thread and the user response with RETRY, + # then we should reuse that response if another thread sees an error. + self.run_choice_dialog.return_value = dialogs.BUTTON_RETRY + dialog = mock.Mock(title='test 1', description='test 2') + # handle a backend error + self.run_backend_dialog(dialog) + self.assertEquals(self.run_choice_dialog.call_count, 1) + self.assertEquals(dialog.run_callback.call_args[0][0], + dialogs.BUTTON_RETRY) + # handle a frontend error, we should reuse the RETRY response + retry_callback = mock.Mock() + self.run_dialog('test 1', 'test 2', retry_callback) + self.assertEquals(self.run_choice_dialog.call_count, 1) + self.assertEquals(retry_callback.call_count, 1) + # handle another frontend error this time we shouldn't reuse the RETRY + # response + self.run_dialog('test 1', 'test 2', retry_callback) + self.assertEquals(self.run_choice_dialog.call_count, 2) + self.assertEquals(retry_callback.call_count, 2) + + def test_reuse_quit(self): + # if the user replies with QUIT, then we should always return QUIT + # for future errors + self.run_choice_dialog.return_value = dialogs.BUTTON_QUIT + dialog = mock.Mock(title='test 1', description='test 2') + # handle a backend error + self.run_backend_dialog(dialog) + self.assertEquals(self.run_choice_dialog.call_count, 1) + self.assertEquals(dialog.run_callback.call_args[0][0], + dialogs.BUTTON_QUIT) + # for future errors, we should assume the user still wants to quit + + # try a bunch of frontend errors + retry_callback = mock.Mock() + self.run_dialog('test 1', 'test 2', retry_callback) + self.run_dialog('test 1', 'test 2', retry_callback) + self.run_dialog('test 1', 'test 2', retry_callback) + self.assertEquals(self.run_choice_dialog.call_count, 1) + self.assertEquals(retry_callback.call_count, 0) + # try a bunch of backend errors + self.run_backend_dialog(dialog) + self.run_backend_dialog(dialog) + self.run_backend_dialog(dialog) + self.assertEquals(self.run_choice_dialog.call_count, 1) + self.assertEquals(dialog.run_callback.call_args_list, [ + ((dialogs.BUTTON_QUIT,), {}), + ((dialogs.BUTTON_QUIT,), {}), + ((dialogs.BUTTON_QUIT,), {}), + ((dialogs.BUTTON_QUIT,), {}) + ]) + + def test_quit(self): + # Check that we call Frontend.quit() + self.run_choice_dialog.return_value = dialogs.BUTTON_QUIT + dialog = mock.Mock(title='test 1', description='test 2') + self.run_backend_dialog(dialog) + self.assertEquals(self.frontend.quit.call_count, 1) + # another error shouldn't result in 2 quit calls + self.run_choice_dialog.return_value = dialogs.BUTTON_QUIT + dialog = mock.Mock(title='test 1', description='test 2') + self.run_backend_dialog(dialog) + self.assertEquals(self.frontend.quit.call_count, 1) + + def test_error_in_retry_callback(self): + self.run_choice_dialog.return_value = dialogs.BUTTON_RETRY + # the frontend calls run_dialog() when it sees an error + mock_retry_callback = mock.Mock() + def retry_callback(): + # the first time this one is called, we similate another error + # happening + if mock_retry_callback.call_count == 1: + self.db_error_handler.run_dialog('test 1', 'test 2', + mock_retry_callback) + mock_retry_callback.side_effect = retry_callback + self.run_dialog('test 1', 'test 2', mock_retry_callback) + # the first run through retry_callback resulted in an error. We + # should have a new dialog scheduled to pop up. We shouldn't call + # retry_callback() again yet, nor have actually popped up the dialog. + self.assertEquals(self.run_choice_dialog.call_count, 1) + args = self.check_run_dialog_scheduled('test 1', 'test 2', 'ui thread') + self.assertEquals(mock_retry_callback.call_count, 1) + # Run the dialog again. The second time through our retry callback + # won't have an error + self.db_error_handler._run_dialog(*args) + self.assertEquals(self.run_choice_dialog.call_count, 2) + self.assertEquals(mock_retry_callback.call_count, 2) + self.check_run_dialog_not_scheduled() + +class TestItemTrackErrors(MiroTestCase): + def setUp(self): + MiroTestCase.setUp(self) + self.init_data_package() + self.idle_scheduler = mock.Mock() + self.feed, self.items = testobjects.make_feed_with_items(10) + app.db.finish_transaction() + + def make_tracker(self): + query = itemtrack.ItemTrackerQuery() + query.add_condition('feed_id', '=', self.feed.id) + query.set_order_by(['release_date']) + item_tracker = itemtrack.ItemTracker(self.idle_scheduler, query, + item.ItemSource()) + self.list_changed_callback = mock.Mock() + self.items_changed_callback = mock.Mock() + item_tracker.connect('list-changed', self.list_changed_callback) + item_tracker.connect('items-changed', self.items_changed_callback) + return item_tracker + + def force_db_error(self): + def execute_that_fails(*args, **kwargs): + raise sqlite3.DatabaseError("Test Error") + mock_execute = mock.Mock(side_effect=execute_that_fails) + return mock.patch('miro.data.connectionpool.Connection.execute', + mock_execute) + + def fetch_item_infos(self): + return item.fetch_item_infos(app.db, [i.id for i in self.items]) + + def test_error_fetching_list(self): + with self.allow_warnings(): + with self.force_db_error(): + tracker = self.make_tracker() + self.assertEquals(app.db_error_handler.run_dialog.call_count, 1) + # since there was an error while fetching the initial item list, + # get_items() should return None + self.assertEquals(tracker.get_items(), []) + # when the retry callback is called, we should send the list-changed + # callback with the correct data + retry_callback = app.db_error_handler.run_dialog.call_args[0][2] + self.assertNotEquals(retry_callback, None) + self.assertEquals(self.list_changed_callback.call_count, 0) + retry_callback() + self.assertEquals(self.list_changed_callback.call_count, 1) + # get_items() should return the correct items now + self.assertSameSet(tracker.get_items(), self.fetch_item_infos()) + + def test_error_fetching_rows(self): + tracker = self.make_tracker() + with self.allow_warnings(): + with self.force_db_error(): + # call get_row a bunch of items. On GTK I think we can get + # nested errors while waiting for the dialog response. + rv1 = tracker.get_row(0) + rv2 = tracker.get_row(1) + rv3 = tracker.get_row(2) + self.assertEquals(rv1.__class__, item.DBErrorItemInfo) + self.assertEquals(rv2.__class__, item.DBErrorItemInfo) + self.assertEquals(rv3.__class__, item.DBErrorItemInfo) + self.assertSameSet(tracker.get_items(), + [item.DBErrorItemInfo(item_obj.id) + for item_obj in self.items]) + self.assertEquals(app.db_error_handler.run_dialog.call_count, 1) + # when the retry callback is called, we should send the list-changed + # callback with the correct data + retry_callback = app.db_error_handler.run_dialog.call_args[0][2] + self.assertNotEquals(retry_callback, None) + self.assertEquals(self.list_changed_callback.call_count, 0) + retry_callback() + self.assertEquals(self.list_changed_callback.call_count, 1) + # get_items() should return the correct items now + self.assertSameSet(tracker.get_items(), self.fetch_item_infos()) + + def test_error_has_playables(self): + tracker = self.make_tracker() + with self.allow_warnings(): + with self.force_db_error(): + retval = tracker.has_playables() + self.assertEquals(app.db_error_handler.run_dialog.call_count, 1) + # on db errors, has_playables() should return False + self.assertEquals(retval, False) + # We don't need a retry callback for this. + retry_callback = app.db_error_handler.run_dialog.call_args[0][2] + self.assertNotEquals(retry_callback, None) + + def test_error_get_playable_ids(self): + tracker = self.make_tracker() + with self.allow_warnings(): + with self.force_db_error(): + retval = tracker.get_playable_ids() + self.assertEquals(app.db_error_handler.run_dialog.call_count, 1) + # on db errors, get_playable_ids() should return an empty list + self.assertEquals(retval, []) + # We don't need a retry callback for this. + retry_callback = app.db_error_handler.run_dialog.call_args[0][2] + self.assertNotEquals(retry_callback, None) diff -Nru miro-4.0.4/lib/test/databasesanitytest.py miro-6.0/lib/test/databasesanitytest.py --- miro-4.0.4/lib/test/databasesanitytest.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/test/databasesanitytest.py 2013-04-05 16:02:42.000000000 +0000 @@ -40,7 +40,7 @@ def test_phantom_feed_checking(self): f = feed.Feed(u"http://feed.uk") i = item.Item(item.FeedParserValues({}), feed_id=f.id) - i2 = item.FileItem(FilenameType('/foo/bar.txt'), feed_id=f.id) + i2 = item.FileItem(self.make_temp_path('.txt'), feed_id=f.id) self.check_object_list_fails_test([i]) self.check_fix_if_possible([i, i2], []) self.check_object_list_passes_test([i, f]) diff -Nru miro-4.0.4/lib/test/databasetest.py miro-6.0/lib/test/databasetest.py --- miro-4.0.4/lib/test/databasetest.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/test/databasetest.py 2013-04-05 16:02:42.000000000 +0000 @@ -140,7 +140,7 @@ self.feed2.set_title(u"booya") self.i3.signal_change() self.i2.remove() - self.i1.set_title(u"new title") + self.i1.mark_item_skipped() app.bulk_sql_manager.finish() self.assertEquals(self.add_callbacks, [self.i3]) self.assertEquals(self.remove_callbacks, [self.i2]) @@ -154,14 +154,6 @@ self.assertEquals(self.remove_callbacks, []) self.assertEquals(self.change_callbacks, []) - def test_reset(self): - database.setup_managers() - self.feed2.set_title(u"booya") - self.feed.revert_title() - self.assertEquals(self.add_callbacks, []) - self.assertEquals(self.remove_callbacks, []) - self.assertEquals(self.change_callbacks, []) - def test_check_all_item_not_loaded(self): tracker = self.view.make_tracker() self.clear_ddb_object_cache() @@ -205,7 +197,8 @@ def test_remove_in_setup_new(self): self.assertEquals(TestDDBObject.make_view().count(), 0) - TestDDBObject(self, remove=True) + with self.allow_warnings(): + TestDDBObject(self, remove=True) self.assertEquals(TestDDBObject.make_view().count(), 0) def test_test_attribute_track(self): diff -Nru miro-4.0.4/lib/test/datastructurestest.py miro-6.0/lib/test/datastructurestest.py --- miro-4.0.4/lib/test/datastructurestest.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/test/datastructurestest.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,38 +0,0 @@ -from miro import datastructures - -from miro.test.framework import MiroTestCase - -class FifoTestCase(MiroTestCase): - def test_fifo(self): - fifo = datastructures.Fifo() - self.assertEquals(len(fifo), 0) - self.assertRaises(ValueError, fifo.dequeue) - - fifo.enqueue(1) - fifo.enqueue(2) - fifo.enqueue(3) - - self.assertEquals(len(fifo), 3) - self.assertEquals(fifo.dequeue(), 1) - self.assertEquals(fifo.dequeue(), 2) - self.assertEquals(fifo.dequeue(), 3) - self.assertRaises(ValueError, fifo.dequeue) - - fifo.enqueue(1) - fifo.enqueue(2) - self.assertEquals(fifo.dequeue(), 1) - self.assertEquals(len(fifo), 1) - fifo.enqueue(3) - fifo.enqueue(4) - self.assertEquals(len(fifo), 3) - self.assertEquals(fifo.dequeue(), 2) - fifo.enqueue(5) - self.assertEquals(len(fifo), 3) - self.assertEquals(fifo.dequeue(), 3) - self.assertEquals(fifo.dequeue(), 4) - self.assertEquals(fifo.dequeue(), 5) - self.assertEquals(len(fifo), 0) - - def test_empty(self): - fifo = datastructures.Fifo() - self.assertRaises(ValueError, fifo.dequeue) diff -Nru miro-4.0.4/lib/test/devicestest.py miro-6.0/lib/test/devicestest.py --- miro-4.0.4/lib/test/devicestest.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/test/devicestest.py 2013-04-05 16:02:42.000000000 +0000 @@ -28,13 +28,34 @@ # statement from all source files in the program, then also delete it here. import os -import json +try: + import simplejson as json +except ImportError: + import json + +import datetime +import shutil +import sqlite3 from miro.gtcache import gettext as _ -from miro.plat.utils import PlatformFilenameType -from miro.test.framework import MiroTestCase - +from miro.plat.utils import (PlatformFilenameType, unicode_to_filename, + utf8_to_filename) +from miro.test.framework import MiroTestCase, EventLoopTest +from miro.test import mock + +from miro import app +from miro import database +from miro import devicedatabaseupgrade from miro import devices +from miro import item +from miro import messages +from miro import metadata +from miro import models +from miro import schema +from miro import storedatabase +from miro.data.item import fetch_item_infos +from miro.plat import resources +from miro.test import testobjects class DeviceManagerTest(MiroTestCase): def build_config_file(self, filename, data): @@ -128,7 +149,8 @@ devices = [target1] """) dm = devices.DeviceManager() - dm.load_devices(os.path.join(self.tempdir, "*.py")) + with self.allow_warnings(): + dm.load_devices(os.path.join(self.tempdir, "*.py")) self.assertRaises(KeyError, dm.get_device, "Target1") self.assertRaises(KeyError, dm.get_device_by_id, 0, 0) @@ -232,22 +254,22 @@ ddb = devices.load_database(self.tempdir) self.assertEqual(dict(ddb), data) - self.assertEqual(len(ddb.get_callbacks('changed')), 1) + self.assertNotEqual(ddb.write_manager, None) def test_load_database_missing(self): ddb = devices.load_database(self.tempdir) self.assertEqual(dict(ddb), {}) - self.assertEqual(len(ddb.get_callbacks('changed')), 1) + self.assertNotEqual(ddb.write_manager, None) def test_load_database_error(self): os.makedirs(os.path.join(self.tempdir, '.miro')) with open(os.path.join(self.tempdir, '.miro', 'json'), 'w') as f: f.write('NOT JSON DATA') - ddb = devices.load_database(self.tempdir) + with self.allow_warnings(): + ddb = devices.load_database(self.tempdir) self.assertEqual(dict(ddb), {}) - self.assertEqual(len(ddb.get_callbacks('changed')), 1) - + self.assertNotEqual(ddb.write_manager, None) def test_write_database(self): data = {u'a': 2, @@ -257,6 +279,65 @@ new_data = json.load(f) self.assertEqual(data, new_data) +class ScanDeviceForFilesTest(EventLoopTest): + def setUp(self): + EventLoopTest.setUp(self) + self.setup_device() + self.add_fake_media_files_to_device() + self.setup_fake_device_manager() + + def tearDown(self): + del app.device_manager + EventLoopTest.tearDown(self) + + def setup_device(self): + self.device = testobjects.make_mock_device() + + def setup_fake_device_manager(self): + app.device_manager = mock.Mock() + app.device_manager.running = True + app.device_manager._is_hidden.return_value = False + + def add_fake_media_files_to_device(self): + self.device_item_filenames = [ + unicode_to_filename(f.decode('utf-8')) for f in + ['foo.mp3', 'bar.mp3', 'foo.avi', 'bar.ogg'] + ] + for filename in self.device_item_filenames: + path = os.path.join(self.device.mount, filename) + with open(path, 'w') as f: + f.write("fake-data") + + def check_device_items(self, correct_paths): + device_items = item.DeviceItem.make_view(db_info=self.device.db_info) + device_item_paths = [di.filename for di in device_items] + self.assertSameSet(device_item_paths, correct_paths) + + def run_scan_device_for_files(self): + devices.scan_device_for_files(self.device) + self.runPendingIdles() + + def test_scan_device_for_files(self): + self.run_scan_device_for_files() + self.check_device_items(self.device_item_filenames) + + def test_removed_files(self): + self.run_scan_device_for_files() + self.check_device_items(self.device_item_filenames) + # remove a couple files + for i in xrange(2): + filename = self.device_item_filenames.pop() + os.remove(os.path.join(self.device.mount, filename)) + # run scan_device_for_files again, it should remove the items that are + # no longer present + self.run_scan_device_for_files() + self.check_device_items(self.device_item_filenames) + + def test_skip_read_only(self): + self.device.read_only = True + self.run_scan_device_for_files() + self.check_device_items([]) + class GlobSetTest(MiroTestCase): def test_globset_regular_match(self): @@ -291,3 +372,465 @@ self.assertTrue(gs & set('bc')) self.assertTrue(gs & set(['bc', 'c'])) self.assertFalse(gs & set('cd')) + + def test_globset_and_plain(self): + gs = devices.GlobSet('ab') + self.assertTrue(gs & set('a')) + self.assertTrue(gs & set('b')) + self.assertTrue(gs & set('ab')) + self.assertTrue(gs & set('bc')) + self.assertFalse(gs & set('cd')) + +class DeviceDatabaseTest(MiroTestCase): + "Test sqlite databases on devices.""" + def setUp(self): + MiroTestCase.setUp(self) + self.device = testobjects.make_mock_device(no_database=True) + + def open_database(self): + testobjects.setup_mock_device_database(self.device) + + def test_open(self): + self.open_database() + self.assertEquals(self.device.db_info.db.__class__, + storedatabase.DeviceLiveStorage) + self.assertEquals(self.device.db_info.db.error_handler.__class__, + storedatabase.DeviceLiveStorageErrorHandler) + + def test_reload(self): + self.open_database() + testobjects.make_device_items(self.device, 'foo.mp3', 'bar.mp3') + # close, then reopen the database + self.device.db_info.db.finish_transaction() + self.open_database() + # test that the database is still intact by checking the + # metadata_status table + cursor = self.device.db_info.db.cursor + cursor.execute("SELECT path FROM metadata_status") + paths = [r[0] for r in cursor.fetchall()] + self.assertSameSet(paths, ['foo.mp3', 'bar.mp3']) + + @mock.patch('miro.dialogs.MessageBoxDialog.run_blocking') + def test_load_error(self, mock_dialog_run): + # Test an error loading the device database + def mock_get_last_id(livestorage_self): + if not self.faked_get_last_id_error: + self.faked_get_last_id_error = True + raise sqlite3.DatabaseError("Error") + else: + return 0 + self.faked_get_last_id_error = False + self.patch_function('miro.storedatabase.LiveStorage._get_last_id', + mock_get_last_id) + self.open_database() + # check that we displayed an error dialog + mock_dialog_run.assert_called_once_with() + # check that our corrupt database logic ran + dir_contents = os.listdir(os.path.join(self.device.mount, '.miro')) + self.assert_('corrupt_database' in dir_contents) + + def test_save_error(self): + # FIXME: what should we do if we have an error saving to the device? + pass + +class DeviceUpgradeTest(MiroTestCase): + """Test upgrading data from a JSON db from an old version of Miro.""" + + def setUp(self): + MiroTestCase.setUp(self) + # setup a device object + self.device = testobjects.make_mock_device(no_database=True) + + def setup_json_db(self, path): + # setup a device database + json_path = resources.path(path) + self.device.db = devices.DeviceDatabase(json.load(open(json_path))) + + def test_upgrade_from_4x(self): + # Test the upgrade from devices with just a JSON database + self.setup_json_db('testdata/device-dbs/4.x-json') + self.check_json_import(self.device.db[u'audio']) + + def check_json_import(self, device_data): + """Check that we successfully imported the sqlite data.""" + sqlite_db = devices.load_sqlite_database(self.device.mount, 1024) + sqlite_db.cursor.execute("SELECT album from metadata") + db_info = database.DeviceDBInfo(sqlite_db, self.device.id) + importer = devicedatabaseupgrade.OldItemImporter(sqlite_db, + self.device.mount, + self.device.db) + importer.import_metadata() + metadata_manager = devices.make_metadata_manager(self.device.mount, + db_info, + self.device.id) + importer.import_device_items(metadata_manager) + + for path, item_data in device_data.items(): + # fill in data that's implicit with the dict + item_data['file_type'] = u'audio' + item_data['video_path'] = path + filename = utf8_to_filename(path.encode('utf-8')) + self.check_migrated_status(filename, db_info) + self.check_migrated_entries(filename, item_data, db_info) + self.check_migrated_device_item(filename, item_data, db_info) + # check that the title tag was deleted + self.assert_(not hasattr(item, 'title_tag')) + + def check_migrated_status(self, filename, device_db_info): + # check the MetadataStatus. For all items, we should be in the movie + # data stage. + status = metadata.MetadataStatus.get_by_path(filename, device_db_info) + self.assertEquals(status.current_processor, u'movie-data') + self.assertEquals(status.mutagen_status, status.STATUS_SKIP) + self.assertEquals(status.moviedata_status, status.STATUS_NOT_RUN) + self.assertEquals(status.echonest_status, status.STATUS_SKIP) + self.assertEquals(status.net_lookup_enabled, False) + + def check_migrated_entries(self, filename, item_data, device_db_info): + status = metadata.MetadataStatus.get_by_path(filename, device_db_info) + entries = metadata.MetadataEntry.metadata_for_status(status, + device_db_info) + entries = list(entries) + self.assertEquals(len(entries), 1) + entry = entries[0] + self.assertEquals(entry.source, 'old-item') + + columns_to_check = entry.metadata_columns.copy() + # handle drm specially + self.assertEquals(entry.drm, False) + columns_to_check.discard('drm') + + for name in columns_to_check: + device_value = item_data.get(name) + if device_value == '': + device_value = None + if getattr(entry, name) != device_value: + raise AssertionError( + "Error migrating %s (old: %s new: %s)" % + (name, device_value, getattr(entry, name))) + + def check_migrated_device_item(self, filename, item_data, device_db_info): + device_item = item.DeviceItem.get_by_path(filename, device_db_info) + for name, field in schema.DeviceItemSchema.fields: + if name == 'filename': + # need to special-case this one, since filename is not stored + # in the item_data dict + self.assertEquals(device_item.filename, filename) + continue + elif name == 'id': + continue # this column doesn't get migrated + elif name == 'net_lookup_enabled': + # this column should always be False. It dosen't get migrated + # from the old device item + self.assertEquals(device_item.net_lookup_enabled, False) + continue + old_value = item_data.get(name) + if (isinstance(field, schema.SchemaDateTime) and + old_value is not None): + old_value = datetime.datetime.fromtimestamp(old_value) + new_value = getattr(device_item, name) + if new_value != old_value: + raise AssertionError("Error converting field %s " + "(old: %r, new: %r)" % (name, old_value, + new_value)) + + def test_upgrade_from_5x(self): + # Test the upgrade from devices from Miro 5.x. These have an sqlite + # database, but only metadata on it, not the device_item table + self.setup_json_db('testdata/device-dbs/5.x-json') + device_sqlite = os.path.join(self.device.mount, '.miro', 'sqlite') + shutil.copyfile(resources.path('testdata/5.x-device-database.sqlite'), + device_sqlite) + self.check_json_import(self.device.db[u'audio']) + + def test_upgrade_from_5x_with_new_data(self): + # Test a tricky case, we upgraded a device database for miro 5.x then + # added new data to it. When the data in the sqlite database doesn't + # match the data in the JSON database we should prefer the data from + # sqlite. + self.setup_json_db('testdata/device-dbs/5.x-json') + new_album = u'New Album Title' + path = os.path.join(self.device.mount, '.miro', 'sqlite') + shutil.copyfile(resources.path('testdata/5.x-device-database.sqlite'), + path) + # tweak the sqlite database to simulate new data in it + connection = sqlite3.connect(path) + connection.execute("UPDATE metadata SET album=? ", (new_album,)) + connection.commit() + connection.close() + # do the same thing to the data that we are use to check the migrated + # items + device_data = self.device.db[u'audio'].copy() + for dct in device_data.values(): + dct['album'] = new_album + # check that the import code gets the value from the sqlite database, + # not the JSON one + self.check_json_import(device_data) + + +class DeviceSyncManagerTest(EventLoopTest): + def setUp(self): + EventLoopTest.setUp(self) + self.setup_feed() + self.setup_playlist() + self.setup_device() + + def setup_device(self): + self.device = testobjects.make_mock_device() + self.sync = self.device.database[u'sync'] = devices.DeviceDatabase() + self.sync[u'podcasts'] = devices.DeviceDatabase() + self.sync[u'playlists'] = devices.DeviceDatabase() + self.sync[u'podcasts'][u'all'] = True + self.sync[u'podcasts'][u'enabled'] = True + self.sync[u'podcasts'][u'expire'] = True + self.sync[u'podcasts'][u'items'] = [self.feed.url] + self.sync[u'playlists'][u'enabled'] = True + self.sync[u'playlists'][u'items'] = [self.playlist.title] + + def setup_feed(self): + self.feed, items = testobjects.make_feed_with_items( + 10, file_items=True, prefix='feed') + for i in items[:5]: + i.mark_watched() + i.signal_change() + self.feed_items = items + self.feed_unwatched_items = items[5:] + + def setup_playlist(self): + self.manual_feed = testobjects.make_manual_feed() + items = testobjects.add_items_to_feed(self.manual_feed, + 10, + file_items=True, + prefix='playlist-') + self.playlist = models.SavedPlaylist(u'playlist', + [i.id for i in items]) + self.playlist_items = items + + def check_get_sync_items(self, correct_items, correct_expired=None): + dsm = app.device_manager.get_sync_for_device(self.device) + items, expired = dsm.get_sync_items() + self.assertSameSet([i.id for i in items], + [i.id for i in correct_items]) + if correct_expired is None: + correct_expired = [] + # correct_expired are Item objects and the get_sync_items() returns + # DeviceItems objects. To compare, we use the URL. + self.assertSameSet([i.url for i in expired], + [i.url for i in correct_expired]) + + def check_device_items(self, correct_items): + # check which DeviceItems we've created + view = models.DeviceItem.make_view(db_info=self.device.db_info) + self.assertSameSet([i.url for i in view], + [i.url for i in correct_items]) + + def test_get_sync_items(self): + # Test that get_sync_items() items to sync correctly + self.check_get_sync_items(self.feed_items + self.playlist_items) + + self.sync[u'podcasts'][u'all'] = False + self.check_get_sync_items(self.feed_unwatched_items + + self.playlist_items) + self.sync[u'podcasts'][u'enabled'] = False + self.check_get_sync_items(self.playlist_items) + self.sync[u'playlists'][u'enabled'] = False + self.check_get_sync_items([]) + + def add_sync_items(self): + """Call get_sync_items() and feed the results to add_items(). + + This will sync all potential items to the device. + + :returns: list of ItemInfos synced + """ + + dsm = app.device_manager.get_sync_for_device(self.device) + infos, expired = dsm.get_sync_items() + dsm.start() + dsm.add_items(infos) + self.runPendingIdles() + return infos + + def test_add_items(self): + # Test add_items() + self.check_device_items([]) + infos = self.add_sync_items() + self.check_device_items(infos) + + def test_get_sync_items_expired(self): + # Test that get_sync_items() calculates expired items correctly + self.add_sync_items() + for i in self.feed_items: + os.remove(i.filename) + i.expire() + self.check_get_sync_items([], self.feed_items) + + def test_expire_items(self): + # Test expiring items + + infos = self.add_sync_items() + self.check_device_items(infos) + # remove all items in our feed + for i in self.feed_items: + os.remove(i.filename) + i.expire() + dsm = app.device_manager.get_sync_for_device(self.device) + # get_sync_items() should return the corresponding items in our device + # for the expired items + infos, expired = dsm.get_sync_items() + self.assertSameSet([i.url for i in self.feed_items], + [i.url for i in expired]) + # test sending the items through expire_items() + dsm.expire_items(expired) + self.check_device_items(self.playlist_items) + + def set_feed_item_file_sizes(self, size): + for i in self.feed_items: + i.size = size + i.signal_change() + + def set_playlist_item_file_size(self, size): + for i in self.playlist_items: + i.size = size + i.signal_change() + + def setup_auto_fill_settings(self, feed_space, playlist_space): + self.sync[u'auto_fill'] = True + self.sync[u'auto_fill_settings'] = { + u'recent_music': 0.0, + u'random_music': 0.0, + u'most_played_music': 0.0, + u'new_playlists': playlist_space, + u'recent_podcasts': feed_space, + } + + def check_get_auto_items(self, dsm, size, correct_feed_count, + correct_playlist_count): + feed_item_count = 0 + playlist_item_count = 0 + for item_info in dsm.get_auto_items(size): + if item_info.feed_id == self.feed.id: + feed_item_count += 1 + elif item_info.feed_id == self.manual_feed.id: + playlist_item_count += 1 + self.assertEquals(feed_item_count, correct_feed_count) + self.assertEquals(playlist_item_count, correct_playlist_count) + + def test_get_auto_items_auto_fill_off(self): + # With auto_fill off, we shouldn't get any items + self.set_feed_item_file_sizes(10) + self.set_playlist_item_file_size(10) + self.sync[u'auto_fill'] = False + dsm = app.device_manager.get_sync_for_device(self.device) + self.check_get_auto_items(dsm, 1000000000, 0, 0) + + def test_get_auto_items(self): + # Test get_auto_items() + self.set_feed_item_file_sizes(20) + self.set_playlist_item_file_size(10) + # Allocate 100 bytes to both our playlist items and our feed items. + # This should be enough for the entire playlist and 1/2 of the feed + # items + self.setup_auto_fill_settings(feed_space=0.5, playlist_space=0.5) + dsm = app.device_manager.get_sync_for_device(self.device) + self.check_get_auto_items(dsm, 200, 5, 10) + + def test_get_auto_items_doesnt_half_fill_playlist(self): + # Test that get_auto_items() only will return items for a playlist if + # it can fill the entire playlist + self.set_feed_item_file_sizes(10) + self.set_playlist_item_file_size(10) + # When we allocate 50 bytes to each', we can only sync the feed items + self.setup_auto_fill_settings(feed_space=0.5, playlist_space=0.5) + dsm = app.device_manager.get_sync_for_device(self.device) + self.check_get_auto_items(dsm, 100, 5, 0) + # When we allocate 100, we can sync both + self.setup_auto_fill_settings(feed_space=0.5, playlist_space=0.5) + dsm = app.device_manager.get_sync_for_device(self.device) + self.check_get_auto_items(dsm, 200, 10, 10) + + def test_auto_sync(self): + # Test that add_items() sets the auto_sync flag correctly + + # setup some auto-sync settings + self.set_feed_item_file_sizes(10) + self.set_playlist_item_file_size(10) + self.setup_auto_fill_settings(feed_space=1.0, playlist_space=0.0) + dsm = app.device_manager.get_sync_for_device(self.device) + # get our sync items, they should all be from our feed + auto_sync_items = dsm.get_auto_items(10000) + for item in auto_sync_items: + self.assertEquals(item.feed_id, self.feed.id) + # call sync some items + playlist_items = fetch_item_infos(app.db.connection, + [i.id for i in self.playlist_items]) + dsm.start() + dsm.add_items(playlist_items) + dsm.add_items(auto_sync_items, auto_sync=True) + self.runPendingIdles() + # check that the device items got created and that auto_sync is set + # correctly + db_info=self.device.db_info + for item in self.playlist_items: + device_item = models.DeviceItem.get_by_url(item.url, + db_info=db_info) + self.assertEquals(device_item.auto_sync, False) + for item in self.feed_items: + device_item = models.DeviceItem.get_by_url(item.url, + db_info=db_info) + self.assertEquals(device_item.auto_sync, True) + # check auto_sync_view() + auto_sync_items = models.DeviceItem.auto_sync_view(db_info=db_info) + self.assertSameSet(set(i.title for i in self.feed_items), + set(i.title for i in auto_sync_items)) + + def test_run_conversion(self): + # FIXME: Should write this one + pass + +class DeviceItemTest(MiroTestCase): + """Tests for the DeviceItem class.""" + def setUp(self): + MiroTestCase.setUp(self) + self.device = testobjects.make_mock_device() + self.item_paths = [ 'foo.mp3', 'bar.mp3' ] + self.device_items = testobjects.make_device_items(self.device, + *self.item_paths) + + def test_select_paths(self): + # Test that select_paths returns the correct results and the correct + # types + result = item.DeviceItem.select_paths(self.device.db_info) + paths = [row[0] for row in result] + for path in paths: + self.assertEqual(type(path), PlatformFilenameType) + self.assertSameSet(paths, self.item_paths) + + def test_path_sql_arguments(self): + # Check that DeviceItem converts paths to unicode when sending values + # to sqlite and does case-insensitive comparisons + path = PlatformFilenameType('Foo.mp3') + mock_make_view = self.patch_for_test( + 'miro.item.DeviceItem.make_view', autospec=False) + + # test get_by_path + item.DeviceItem.get_by_path(path, self.device.db_info) + self.assertEquals(mock_make_view.call_count, 1) + sql, params = mock_make_view.call_args[0] + self.assertEquals(type(params[0]), unicode) + if 'LOWER' not in sql: + raise AssertionError("this doesn't look like lower case " + "comparison: %s " % sql) + self.assertEquals(params[0], 'Foo.mp3') + self.assertEquals(type(mock_make_view.call_args[0][1][0]), unicode) + mock_make_view.reset_mock() + + # test_items_for_paths + mock_make_view.return_value = [mock.Mock(filename='foo.mp3')] + item.DeviceItem.items_for_paths([path], self.device.db_info) + sql, params = mock_make_view.call_args[0] + self.assertEquals(type(params[0]), unicode) + if 'LOWER' not in sql: + raise AssertionError("this doesn't look like lower case " + "comparison: %s " % sql) + self.assertEquals(params[0], 'Foo.mp3') diff -Nru miro-4.0.4/lib/test/downloadertest.py miro-6.0/lib/test/downloadertest.py --- miro-4.0.4/lib/test/downloadertest.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/test/downloadertest.py 2013-04-05 16:02:42.000000000 +0000 @@ -5,67 +5,133 @@ from miro import eventloop from miro import models from miro import prefs -from miro.test.framework import EventLoopTest, uses_httpclient +from miro.dl_daemon import command +from miro.plat import resources +from miro.test import testobjects +from miro.test.framework import MiroTestCase -class DownloaderTest(EventLoopTest): +class DownloaderTest(MiroTestCase): """Test feeds that download things. """ - def setup_state(self): - self.url = u'http://pculture.org/feeds_test/unittest-feed-1.rss' - self.feed = models.Feed(self.url) - self.log_file = os.path.join(self.tempdir, 'miro-download-unit-tests') - app.config.set(prefs.DOWNLOADER_LOG_PATHNAME, self.log_file) - self.movies_dir = os.path.join(self.tempdir, 'movies-dir') - if not os.path.exists(self.movies_dir): - os.makedirs(self.movies_dir) - app.config.set(prefs.MOVIES_DIRECTORY, self.movies_dir) - - # initialize and start the downloader after fixing the MOVIES_DIRECTORY - downloader.init_controller() - downloader.startup_downloader() - - def tearDown(self): - downloader.shutdown_downloader( - lambda: self.stopEventLoop(abnormal=False)) - self.runEventLoop() - downloader.daemon_starter = None - EventLoopTest.tearDown(self) - - def run_eventloop_until_items(self): - tracker = self.feed.items.make_tracker() - tracker.connect('added', lambda view, obj: eventloop.shutdown()) - try: - self.runEventLoop() - finally: - tracker.unlink() - - def run_eventloop_until_download(self): - tracker = self.feed.downloaded_items.make_tracker() - tracker.connect('added', lambda view, obj: eventloop.shutdown()) - try: - self.runEventLoop() - finally: - tracker.unlink() - - def download_item(self): - self.feed.update() - self.run_eventloop_until_items() - self.assertEquals(self.feed.items.count(), 1) - i = list(self.feed.items)[0] - i.download() - self.run_eventloop_until_download() + def setUp(self): + MiroTestCase.setUp(self) + self.mock_grab_headers = self.patch_for_test( + 'miro.httpclient.grab_headers') + self.mock_try_scraping_url = self.patch_for_test( + 'miro.flashscraper.try_scraping_url') + self.mock_send = self.patch_for_test( + 'miro.dl_daemon.command.Command.send') + self.feed = testobjects.make_feed() + self.url = u'http://example.com/my-video.mp4' + self.item = testobjects.make_item(self.feed, u'my item', url=self.url) + self.downloading_path = os.path.join(self.tempdir, + 'Incomplete Downloads', + 'download.mp4') + self.final_path = os.path.join(self.tempdir, 'download.mp4') + + def start_download(self): + self.item.download() + self.dlid = self.item.downloader.dlid + self.run_content_type_check() + self.run_flash_scrape() + self.run_daemon_commands() + + def run_content_type_check(self): + self.assertEquals(self.mock_grab_headers.call_count, 1) + url, callback, errback = self.mock_grab_headers.call_args[0] + self.assertEquals(url, self.url) + self.mock_grab_headers.reset_mock() + callback({ + 'status': 200, + 'updated-url': self.url, + 'content-type': 'video/mp4' + }) + + def run_flash_scrape(self): + self.assertEquals(self.mock_try_scraping_url.call_count, 1) + url, callback = self.mock_try_scraping_url.call_args[0] + self.assertEquals(url, self.url) + self.mock_try_scraping_url.reset_mock() + callback(self.url) + + def run_daemon_commands(self): + app.download_state_manager.send_updates() + self.assertEquals(self.mock_send.call_count, 1) + cmd = self.mock_send.call_args[0][0] + self.assertEquals(type(cmd), command.DownloaderBatchCommand) + arg = cmd.args[0] + self.assertEquals(arg.keys(), [self.dlid]) + self.assertEquals(arg[self.dlid][0], + command.DownloaderBatchCommand.RESUME) + self.assertEquals(arg[self.dlid][1]['url'], self.url) + + def update_status(self, download_progress, elapsed_time): + # define some arbitrary constants + total_size = 100000 + start_time = 1000 + # calculate values based on download_progress/elapsed_time + current_size = int(total_size * download_progress) + rate = current_size / elapsed_time + eta = int((total_size - current_size) / rate) + if download_progress < 1.0: + state = u'downloading' + end_time = None + filename = self.downloading_path + else: + end_time = start_time + elapsed_time + state = u'finished' + filename = self.final_path + + downloader.RemoteDownloader.update_status({ + 'dlid': self.dlid, + 'url': self.url, + 'state': state, + 'total_size': total_size, + 'current_size': current_size, + 'eta': eta, + 'rate': rate, + 'upload_size': 0, + 'filename': filename, + 'start_time': start_time, + 'end_time': end_time, + 'short_filename': 'download.mp4', + 'reason_failed': None, + 'short_reason_failed': None, + 'type': None, + 'retry_time': None, + 'retry_count': None, + }, cmd_done=True) + + def check_download_in_progress(self): + self.assertEquals(self.item.downloader.get_state(), u'downloading') + self.assertEquals(self.item.get_state(), u'downloading') + + def check_download_finished(self): + self.assertEquals(self.item.downloader.get_state(), u'finished') + self.assertEquals(self.item.get_state(), u'newly-downloaded') + + def run_download(self): + self.start_download() + self.check_download_in_progress() + self.update_status(0.3, 10) + self.check_download_in_progress() + self.update_status(0.5, 20) + self.check_download_in_progress() + self.update_status(0.9, 30) + self.check_download_in_progress() + with open(self.final_path, 'w') as f: + f.write("bogus data") + self.update_status(1.0, 40) + self.check_download_finished() - @uses_httpclient def test_download(self): - self.setup_state() - self.download_item() + self.run_download() - @uses_httpclient def test_delete(self): - self.setup_state() - self.download_item() - self.assertEquals(self.feed.items.count(), 1) - list(self.feed.items)[0].expire() + self.run_download() + self.assertEquals(self.feed.downloaded_items.count(), 1) + self.item.expire() + self.assertEquals(self.feed.downloaded_items.count(), 0) ## def test_resume(self): ## # FIXME - implement this diff -Nru miro-4.0.4/lib/test/extensiontest.py miro-6.0/lib/test/extensiontest.py --- miro-4.0.4/lib/test/extensiontest.py 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/test/extensiontest.py 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,170 @@ +import ConfigParser +import logging + +from miro import api +from miro import app + +from miro.test.framework import MiroTestCase +from miro.test import mock + +# define stubs to allow us to use this module as an extension module +def unload(): + pass + +ext_context = None +def load(context): + global ext_context + ext_context = context + +class ExtensionTestBase(MiroTestCase): + def setUp(self): + MiroTestCase.setUp(self) + self.reset_ext_storage_manager() + + def tearDown(self): + self.reset_ext_storage_manager() + MiroTestCase.tearDown(self) + + def reset_ext_storage_manager(self): + global ext_context + ext_context = None + + def make_extension_config(self): + """Generate a SafeConfigParser object to use as a test extension.""" + config = ConfigParser.SafeConfigParser() + config.add_section('extension') + config.set('extension', 'name', 'Unittest Extension') + config.set('extension', 'version', 'core') + config.set('extension', 'enabled_by_default', 'False') + config.set('extension', 'module', 'miro.test.extensiontest') + return config + + def create_extension(self): + """Write a .miroext file using a SafeConfigParser object.""" + config = self.make_extension_config() + self.ext_path, fp = self.make_temp_path_fileobj('.miroext') + config.write(fp) + fp.close() + app.extension_manager.load_extensions() + + def load_extension(self): + ext = app.extension_manager.get_extension_by_name( + "Unittest Extension") + app.extension_manager.import_extension(ext) + app.extension_manager.load_extension(ext) + self.storage_manager = ext_context.storage_manager + + def unload_extension(self): + ext = app.extension_manager.get_extension_by_name( + "Unittest Extension") + app.extension_manager.unload_extension(ext) + +class ExtensionStorageTest(ExtensionTestBase): + # test extensions storing data + def setUp(self): + ExtensionTestBase.setUp(self) + # load our extension + self.create_extension() + self.load_extension() + + def check_simple_set(self, key, value): + self.storage_manager.set_value(key, value) + stored_value = self.storage_manager.get_value(key) + if stored_value != value: + raise AssertionError("Error storing %s: set %r, got %r" % (key, + value, stored_value)) + + def test_simple_store(self): + self.check_simple_set('a', 'foo') + self.check_simple_set('b', 200) + self.check_simple_set('c', 3.0) + # try some unicode values + self.check_simple_set(u'd', [1, 2, 'three']) + self.check_simple_set(u'e\u03a0', {'key': 'value'}) + # try clearing a value + self.storage_manager.clear_value("a") + self.assertRaises(KeyError, self.storage_manager.get_value, 'a') + # test key_exists() + self.assertEquals(self.storage_manager.key_exists('b'), True) + self.assertEquals(self.storage_manager.key_exists('c'), True) + self.assertEquals(self.storage_manager.key_exists('z'), False) + self.assertEquals(self.storage_manager.key_exists('a'), False) + + def test_sqlite_store(self): + # test that the sqlite connection works + conn = self.storage_manager.get_sqlite_connection() + # all we need to test is if we have a real sqlite connection. Let's + # assume that if we can run a few SQL commands, we're good + cursor = conn.cursor() + cursor.execute("CREATE TABLE foo(a, b)") + cursor.execute("INSERT INTO foo (a, b) VALUES (?, ?)", (1, 'two')) + cursor.execute("INSERT INTO foo (a, b) VALUES (?, ?)", (3, 'four')) + cursor.execute("SELECT a, b FROM foo ORDER BY a ASC") + self.assertEquals(cursor.fetchall(), [(1, 'two'), (3, 'four')]) + +class ExtensionHookTest(ExtensionTestBase): + def setUp(self): + ExtensionTestBase.setUp(self) + # Make a Mock object to use as a hook function. Nest inside another + # Mock object to test hook parser better + global hook_holder + hook_holder = mock.Mock() + hook_holder.hook_func = mock.Mock() + self.mock_hook = hook_holder.hook_func + # make our extension + self.create_extension() + + def make_extension_config(self): + config = ExtensionTestBase.make_extension_config(self) + # add hooks + config.add_section('hooks') + config.set('hooks', 'test_hook', + 'miro.test.extensiontest:hook_holder.hook_func') + return config + + def test_hook_invoke(self): + # test calling hook functions + self.load_extension() + # setup our mock function to return a value + self.mock_hook.return_value = 123 + # invoke the hook + results1 = api.hook_invoke('test_hook', 1, 2, foo=3) + results2 = api.hook_invoke('test_hook', 4, 5, bar=6) + # check thath the function was called and the results are correct + self.assertEquals(self.mock_hook.call_count, 2) + self.assertEquals(self.mock_hook.call_args_list[0], + ((1, 2), {'foo': 3})) + self.assertEquals(self.mock_hook.call_args_list[1], + ((4, 5), {'bar': 6})) + self.assertEquals(results1, [123]) + self.assertEquals(results2, [123]) + + def test_hook_exception(self): + # test hook functions raising exceptions + self.load_extension() + self.log_filter.reset_records() + # setup our mock function to throw an error + self.mock_hook.side_effect = ValueError("Bad Value") + # invoke the hook + with self.allow_warnings(): + results = api.hook_invoke('test_hook') + # check that the error isn't included in the results and that we + # logged the exception + self.log_filter.check_record_count(1) + self.log_filter.check_record_level(logging.ERROR) + self.assertEquals(results, []) + + def test_unloaded_extension(self): + # check that unloaded extensions don't provide hooks + # before we load our extension, the hook shouldn't be registered + # invoking the hook shouldn't do anything now + results = api.hook_invoke('test_hook') + self.assertEquals(self.mock_hook.call_count, 0) + self.assertEquals(results, []) + # if we load, then unload our extension, the hook shouldn't be + # registered + self.load_extension() + self.unload_extensions() + results = api.hook_invoke('test_hook') + self.assertEquals(self.mock_hook.call_count, 0) + self.assertEquals(results, []) diff -Nru miro-4.0.4/lib/test/fastresumetest.py miro-6.0/lib/test/fastresumetest.py --- miro-4.0.4/lib/test/fastresumetest.py 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/test/fastresumetest.py 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,52 @@ +import errno +import sys +import os +import tempfile +import shutil + +from miro import prefs +from miro import app +from miro.test.framework import MiroTestCase +from miro.dl_daemon.download import (save_fast_resume_data, + load_fast_resume_data, + generate_fast_resume_filename) + +FAKE_INFO_HASH = 'PINKPASTA' +FAKE_RESUME_DATA = 'BEER' + +class FastResumeTest(MiroTestCase): + # test_resume_data: Test easy load/store. + def test_resume_data(self): + save_fast_resume_data(FAKE_INFO_HASH, FAKE_RESUME_DATA) + data = load_fast_resume_data(FAKE_INFO_HASH) + self.assertEquals(FAKE_RESUME_DATA, data) + + # Precreate the file but lock down the file so the file open fails. + def test_save_fast_resume_data_bad(self): + # Grab the filename that will be used + filename = generate_fast_resume_filename(FAKE_INFO_HASH) + # Create the file + os.makedirs(os.path.dirname(filename)) + f = open(filename, 'wb') + f.close() + os.chmod(filename, 0) + with self.allow_warnings(): + save_fast_resume_data(FAKE_INFO_HASH, FAKE_RESUME_DATA) + # We did not lock down the directory so check save_fast_resume_data + # nuked the file for us. + self.assertFalse(os.path.exists(filename)) + + # Try to load a unreadable file so the load fails. + def test_load_fast_resume_data_bad(self): + # Grab the filename that will be used + filename = generate_fast_resume_filename(FAKE_INFO_HASH) + # Create the file + os.makedirs(os.path.dirname(filename)) + f = open(filename, 'wb') + f.close() + old_mode = os.stat(filename).st_mode + os.chmod(filename, 0) + with self.allow_warnings(): + data = load_fast_resume_data(FAKE_INFO_HASH) + self.assertEquals(data, None) + os.chmod(filename, old_mode) diff -Nru miro-4.0.4/lib/test/feedparsertest.py miro-6.0/lib/test/feedparsertest.py --- miro-4.0.4/lib/test/feedparsertest.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/test/feedparsertest.py 2013-04-05 16:02:42.000000000 +0000 @@ -5,7 +5,7 @@ from miro import feedparserutil from miro.item import FeedParserValues from miro.plat import resources -from miro.test.framework import MiroTestCase +from miro.test.framework import MiroTestCase, dynamic_test FPTESTINPUT = resources.path("testdata/feedparsertests/feeds") FPTESTOUTPUT = resources.path("testdata/feedparsertests/output") @@ -68,6 +68,7 @@ self.assertEqual(a.equal(d), False) self.assertEqual(d.equal(a), False) +@dynamic_test(expected_cases=9) class FeedParserTest(MiroTestCase): def eq_output(self, str1, str2): # we do this to allow the strings to match on windows where @@ -108,16 +109,15 @@ # this should kick up a KeyError and NOT a TypeError self.assertRaises(KeyError, lambda: d['url']) -# this creates a separate test method in FeedParserTest for each test -# in the FPTESTINPUT directory. makes it easier to debug tests that -# go awry, makes it easier to see test progress from the command line -# (lots of little tests rather than one big test), and increases the -# test count appropriately. -def _test_closure(mem): - def _actual_test(self): - d = _parse_feed(mem) + @classmethod + def generate_tests(cls): + for path in os.listdir(FPTESTINPUT): + yield (path,) + + def dynamic_test_case(self, path): + d = _parse_feed(path) d = feedparserutil.convert_datetime(d) - fp = open(os.path.join(FPTESTOUTPUT, "%s.output" % mem), "r") + fp = open(os.path.join(FPTESTOUTPUT, "%s.output" % path), "r") output = fp.read() fp.close() if 'entries' in d: @@ -126,12 +126,6 @@ d = d['bozo_exception'] self.eq_output(pprint.pformat(d), output) - return _actual_test - -for mem in os.listdir(FPTESTINPUT): - setattr(FeedParserTest, 'test_%s' % mem.replace(".", ""), - _test_closure(mem)) - class FeedParserValuesTest(unittest.TestCase): def test_empty(self): fpv = FeedParserValues({}) @@ -140,10 +134,10 @@ self.assertEquals(fpv.data["entry_title"], None) self.assertEquals(fpv.data["thumbnail_url"], None) self.assertEquals(fpv.data["entry_description"], u"") - self.assertEquals(fpv.data["link"], u"") - self.assertEquals(fpv.data["payment_link"], u"") - self.assertEquals(fpv.data["comments_link"], u"") - self.assertEquals(fpv.data["url"], u"") + self.assertEquals(fpv.data["link"], None) + self.assertEquals(fpv.data["payment_link"], None) + self.assertEquals(fpv.data["comments_link"], None) + self.assertEquals(fpv.data["url"], None) self.assertEquals(fpv.data["enclosure_size"], None) self.assertEquals(fpv.data["enclosure_type"], None) self.assertEquals(fpv.data["enclosure_format"], None) diff -Nru miro-4.0.4/lib/test/feedtest.py miro-6.0/lib/test/feedtest.py --- miro-4.0.4/lib/test/feedtest.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/test/feedtest.py 2013-04-05 16:02:42.000000000 +0000 @@ -203,68 +203,6 @@ self.assertEqual(len(items), 1) my_feed.remove() -class MultiFeedExpireTest(FeedTestCase): - def write_files(self, subfeed_count, feed_item_count): - all_urls = [] - self.filenames = [] - - content = self.make_feed_content(feed_item_count) - for i in xrange(subfeed_count): - filename = self.make_temp_path() - open(filename, 'wb').write(content) - all_urls.append(u"file://%s" % filename) - self.filenames.append(filename) - - self.url = u'dtv:multi:' + ','.join(all_urls) + "," + 'testquery' - - def rewrite_files(self, feed_item_count): - content = self.make_feed_content(feed_item_count) - for filename in self.filenames: - open(filename, 'wb').write(content) - - def make_feed_content(self, entry_count): - # make a feed with a new item and parse it - items = [] - counter = 0 - - items.append(""" - - - Downhill Battle Pics - http://downhillbattle.org/ - Downhill Battle is a non-profit organization working to \ -support participatory culture and build a fairer music industry. - Wed, 16 Mar 2005 12:03:42 EST -""") - - for x in range(entry_count): - counter += 1 - items.append("""\ - - Bumper Sticker - guid-%s - - I'm a musician and I support filesharing. - -""" % (counter, counter)) - - items.append(""" - -""") - return "".join(items) - - def test_multi_feed_expire(self): - # test what happens when a RSSMultiFeed has feeds that - # reference the same item, and they are truncated at the same - # time (#11756) - - self.write_files(5, 10) # 5 feeds containing 10 identical items - self.feed = self.make_feed() - app.config.set(prefs.TRUNCATE_CHANNEL_AFTER_X_ITEMS, 4) - app.config.set(prefs.MAX_OLD_ITEMS_DEFAULT, 5) - self.rewrite_files(1) # now only 5 items in each feed - self.update_feed(self.feed) - class EnclosureFeedTestCase(FeedTestCase): def setUp(self): FeedTestCase.setUp(self) @@ -523,12 +461,5 @@ self.save_then_restore_db() self.assertEquals(self.item.get_rss_id(), None) - def test_change_title(self): - entry = self.parsed_feed.entries[0] - self.item.set_title(u"new title") - self.save_then_restore_db() - self.assertEquals(self.item.get_title(), "new title") - - if __name__ == "__main__": unittest.main() diff -Nru miro-4.0.4/lib/test/fileobjecttest.py miro-6.0/lib/test/fileobjecttest.py --- miro-4.0.4/lib/test/fileobjecttest.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/test/fileobjecttest.py 2013-04-05 16:02:42.000000000 +0000 @@ -4,7 +4,7 @@ from miro.test.framework import MiroTestCase from miro import fileobject -class Test_url_encode_dict(MiroTestCase): +class FileObjectTest(MiroTestCase): def test_type(self): filename = fileobject.FilenameType("/foo/bar") if sys.platform == 'win32': diff -Nru miro-4.0.4/lib/test/filetagstest.py miro-6.0/lib/test/filetagstest.py --- miro-4.0.4/lib/test/filetagstest.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/test/filetagstest.py 2013-04-05 16:02:42.000000000 +0000 @@ -2,87 +2,105 @@ writing - to be implemented) of metadata tags. """ -from miro.test.framework import MiroTestCase +try: + import simplejson as json +except ImportError: + import json -from os import path +from miro.test.framework import MiroTestCase, dynamic_test -from miro.plat import resources -from miro.filetags import read_metadata - - -# NOTE: moviedatatest has expanded to make this redundant, but there's some data -# here that's not there yet. TODO: merge useful parts of this into moviedatatest -# and scrap this module. +import shutil +from os import path, stat +from miro.plat import resources +from miro.plat.utils import PlatformFilenameType +from miro.filetags import calc_cover_art_filename, process_file -# FIXME: cover art detection is currently tested, but content is not - +@dynamic_test(expected_cases=8) class FileTagsTest(MiroTestCase): - def setUp(self): - MiroTestCase.setUp(self) + # mp3-2.mp3: + # FIXME: losing data - TPE2="Chicago Public Media" - def assert_file_data(self, test, - mediatype='*', duration='*', data='*', cover_art='*'): - if data != '*': - data = dict((unicode(key), value) for key, value in data.iteritems()) - filename = resources.path(path.join('testdata', 'metadata', test)) - expected = (mediatype, duration, data, cover_art) - for observed, expected in zip(read_metadata(filename, True), expected): - if expected != '*': - self.assertEquals(observed, expected) - - def test_video_with_ogg_extension(self): - self.assert_file_data('theora_with_ogg_extension.ogg', mediatype='video') - - def test_mp3(self): - mp3_0 = dict( - album=u'Increase The Dosage', - artist=u'Revolution Void', - genre=u'Blues', - title=u'Invisible Walls', - track=1, - ) - self.assert_file_data('mp3-0.mp3', 'audio', 1055, mp3_0, None) - mp3_1 = dict( - album=u'The Heart EP', - artist=u'Ckz', - title=u'Race Lieu', - track=2, - year=2008, - ) - self.assert_file_data('mp3-1.mp3', 'audio', 1055, mp3_1, None) - mp3_2 = dict( - # FIXME: losing data - TPE2="Chicago Public Media" - artist=u'This American Life', # TPE1 - genre=u'Podcast', - title=u'#426: Tough Room 2011', - year=2011, - ) - self.assert_file_data('mp3-2.mp3', 'audio', 1066, mp3_2, None) - - def test_mp4(self): - mp4_0 = dict( - title=u'Africa: Cash for Climate Change?', - ) - self.assert_file_data('mp4-0.mp4', 'video', 312308, mp4_0, None) - - def test_m4v_drm(self): - m4v = dict( - # FIXME: losing data - CPRT='\xa9 2002 Discovery Communications Inc.' - # FIXME: losing data - DESC='When it comes to sorting out some'... - # FIXME: losing data - LDES='When it comes to sorting out some'... - # FIXME: we should probably not include an album_artist field when - # FIXME: we should probably not include an album_artist field when - # its origin is the same field as artist - # FIXME: losing data - TVSH='The Most Extreme' - # FIXME: losing data - TVNN='Animal Planet' - album=u'The Most Extreme, Season 1', - album_artist=u'The Most Extreme', - artist=u'The Most Extreme', - drm=True, - genre=u'Nonfiction', - title=u'Thinkers', - track=10, - year=2000, - ) - self.assert_file_data('drm.m4v', 'video', 2668832, m4v, True) + # drm.m4v: + # FIXME: losing data - CPRT='\xa9 2002 Discovery Communications Inc.' + # FIXME: losing data - DESC='When it comes to sorting out some'... + # FIXME: losing data - LDES='When it comes to sorting out some'... + # FIXME: we should probably not include an album_artist field when + # its origin is the same field as artist + # FIXME: losing data - TVSH='The Most Extreme' + # FIXME: losing data - TVNN='Animal Planet' + + @classmethod + def generate_tests(cls): + results_path = resources.path(path.join('testdata', 'filetags.json')) + return json.load(open(results_path)).iteritems() + + def dynamic_test_case(self, filename, expected): + # make all keys unicode + #expected = dict((unicode(key), value) + #for key, value in expected.iteritems()) + filename = resources.path(path.join('testdata', 'metadata', filename)) + results = process_file(filename, self.tempdir) + # cover art nedes to be handled specially + cover_art = expected.pop('cover_art') + if cover_art: + # cover art should be stored using the album name as its file + correct_path = path.join(self.tempdir, results['album']) + self.assertEquals(results.pop('cover_art'), correct_path) + self.assertEquals(results.pop('created_cover_art'), True) + else: + self.assert_('cover_art' not in results) + if 'duration' in expected: + expected_duration = expected.pop('duration') + result_duration = results.pop('duration') + self.assertClose(result_duration, expected_duration) + + # for the rest, we just compare the dicts + self.assertDictEquals(results, expected) + + def test_shared_cover_art(self): + # test what happens when 2 files with coverart share the same album. + # In this case the first one we process should create the cover art + # file and the next one should just skip cover art processing. + src_path = resources.path(path.join('testdata', 'metadata', + 'drm.m4v')) + dest_paths = [] + for x in range(3): + new_filename = 'drm-%s.m4v' % x + dest_path = path.join(self.tempdir, new_filename) + shutil.copyfile(src_path, dest_path) + dest_paths.append(dest_path) + + # process the first file + result_1 = process_file(dest_paths[0], self.tempdir) + self.assertEquals(result_1['cover_art'], + path.join(self.tempdir, result_1['album'])) + self.assert_(path.exists(result_1['cover_art'])) + org_mtime = stat(result_1['cover_art']).st_mtime + + # process the rest, they should fill in the cover_art value, but + # not rewrite the file + for dup_path in dest_paths[1:]: + results = process_file(dup_path, self.tempdir) + self.assertEquals(results['cover_art'], + result_1['cover_art']) + self.assert_(path.exists(results['cover_art'])) + self.assertEquals(stat(results['cover_art']).st_mtime, + org_mtime) + +@dynamic_test() +class TestCalcCoverArtFilename(MiroTestCase): + @classmethod + def generate_tests(cls): + return [ + (u'Simple Album Name', 'Simple Album Name'), + (u'Bad/File\0Parts<>:"\\|?*', + 'Bad%2FFile%00Parts%3C%3E%3A%22%5C%7C%3F%2A'), + (u'Extended Chars\xf3', 'Extended Chars%C3%B3'), + ] + + def dynamic_test_case(self, album_name, correct_filename): + self.assertEquals(calc_cover_art_filename(album_name), + correct_filename) + self.assert_(isinstance(calc_cover_art_filename(album_name), + PlatformFilenameType)) diff -Nru miro-4.0.4/lib/test/filetypestest.py miro-6.0/lib/test/filetypestest.py --- miro-4.0.4/lib/test/filetypestest.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/test/filetypestest.py 2013-04-05 16:02:42.000000000 +0000 @@ -54,9 +54,9 @@ self.assertEquals(filetypes.is_subtitle_filename(test), True) def test_item_file_type_for_filename(self): - for test in (("foo", None), + for test in (("foo", u'other'), ("foo.flv", u'video'), - ("", None), + ("", u'other'), ("foo.ogg", u'audio'), ("foo.jpg", u'other'), ("foo.foo.mp3", u'audio')): diff -Nru miro-4.0.4/lib/test/flashscrapertest.py miro-6.0/lib/test/flashscrapertest.py --- miro-4.0.4/lib/test/flashscrapertest.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/test/flashscrapertest.py 2013-04-05 16:02:42.000000000 +0000 @@ -24,8 +24,8 @@ FlashScraperBase.setUp(self) self._response = None - def scrape_callback(self, new_url, contentType=None, title=None): - self._response = (new_url, contentType, title) + def scrape_callback(self, new_url, content_type=None, title=None): + self._response = (new_url, content_type, title) self.stopEventLoop(abnormal=False) @uses_httpclient @@ -35,3 +35,34 @@ self.scrape_callback) self.run_event_loop() # print self._response + +class VimeoScraper(FlashScraperBase): + def setUp(self): + FlashScraperBase.setUp(self) + self._response = None + + def scrape_callback(self, new_url, content_type=None, title=None): + self._response = (new_url, content_type, title) + self.stopEventLoop(abnormal=False) + + @uses_httpclient + def test_scrape(self): + flashscraper.try_scraping_url( + u'http://vimeo.com/42231616', + self.scrape_callback) + self.run_event_loop() + self.assertNotEqual(self._response, None) + self.assertNotEqual(self._response[0], None) + self.assertEqual(type(self._response[1]), unicode) + self.assertEqual(self._response[1], u'video/mp4') + + @uses_httpclient + def test_scrape_moogaloop(self): + flashscraper.try_scraping_url( + u'http://vimeo.com/moogaloop.swf?clip_id=42231616', + self.scrape_callback) + self.run_event_loop() + self.assertNotEqual(self._response, None) + self.assertNotEqual(self._response[0], None) + self.assertEqual(type(self._response[1]), unicode) + self.assertEqual(self._response[1], u'video/mp4') diff -Nru miro-4.0.4/lib/test/framework.py miro-6.0/lib/test/framework.py --- miro-4.0.4/lib/test/framework.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/test/framework.py 2013-04-05 16:02:42.000000000 +0000 @@ -1,33 +1,47 @@ +import datetime +import contextlib import os import logging +import random import unittest import tempfile import threading import shutil import functools +from miro import api from miro import app from miro import config +from miro import data from miro import database +from miro import devices from miro import eventloop +from miro import extensionmanager from miro import feed -from miro import feedparserutil from miro import downloader from miro import httpauth from miro import httpclient +from miro import iconcache from miro import item -from miro import iteminfocache -from miro import moviedata +from miro import itemsource +from miro import messages from miro import util from miro import prefs +from miro import schema from miro import searchengines +from miro import sharing from miro import signals from miro import storedatabase +from miro import threadcheck from time import sleep from miro import models from miro import workerprocess +from miro.data import itemtrack +from miro.fileobject import FilenameType +from miro.test import mock from miro.test import testhttpserver +from miro.test import testobjects util.setup_logging() @@ -46,6 +60,14 @@ import sys +class MatchAny(object): + """Object that matches anything. + + Useful for creating a wildcard when calling Mock.assert_called_with(). + """ + def __eq__(self, other): + return True + VALID_PLATFORMS = ['linux', 'win32', 'osx'] PLATFORM_MAP = { 'osx': 'osx', @@ -136,7 +158,7 @@ else: return identity -class HadToStopEventLoop(Exception): +class HadToStopEventLoop(StandardError): pass class DummyMainFrame: @@ -180,6 +202,8 @@ def failed_soft(self, when, details, with_exception=False): # FIXME: should have some way to make this turn into an exception if not self.failed_soft_okay: + print "failed_soft called in DummyController" + print details raise AssertionError("failed_soft called in DummyController") self.failed_soft_count += 1 @@ -195,7 +219,7 @@ # if there's already a curl_manager, then this is probably # being called in a nested context, so this iteration is not # in charge of starting and stopping the httpclient - if httpclient.curl_manager: + if not isinstance(httpclient.curl_manager, mock.Mock): return fun(*args, **kwargs) httpclient.start_thread() @@ -214,20 +238,28 @@ if name.startswith("test"): class_dict[name] = decorator(getattr(cls, name)) +class UnexpectedLogError(Exception): + # Note: this purposely doesn't subclass StandardError, since we don't want + # code to catch this one + pass + class LogFilter(logging.Filter): """Log filter that turns logging messages into exceptions.""" def __init__(self): - self.exception_level = logging.CRITICAL + self.exception_level = logging.WARN self.records = [] + self.raised_error = False def set_exception_level(self, level): """Set the min logging level where we should throw an exception""" self.exception_level = level def filter(self, record): - if record.levelno >= self.exception_level: - raise AssertionError("Unexpected logging: %s" % record) + if record.levelno >= self.exception_level and not self.raised_error: + self.raised_error = True + raise UnexpectedLogError("Unexpected logging: %s" % + logging.Formatter().format(record)) else: self.records.append(record) return False @@ -242,17 +274,6 @@ for rec in self.records: assert rec.levelno == level -class FakeMetadataProgressUpdater(object): - def __init__(self): - self.paths_processed = set() - - def path_processed(self, path): - self.paths_processed.add(path) - - def will_process_path(self, path): - # we should test this, but for now it's just a stub - pass - class MiroTestCase(unittest.TestCase): def setUp(self): self.setup_log_filter() @@ -262,26 +283,35 @@ self.setup_downloader_log() models.initialize() app.in_unit_tests = True + app.device_manager = devices.DeviceManager() + models.Item._path_count_tracker.reset() + testobjects.test_started(self) # Tweak Item to allow us to make up fake paths for FileItems models.Item._allow_nonexistent_paths = True # setup the deleted file checker item.setup_deleted_checker() item.start_deleted_checker() - # setup movie data stuff - self.metadata_progress_updater = FakeMetadataProgressUpdater() - app.metadata_progress_updater = self.metadata_progress_updater - moviedata.movie_data_updater = moviedata.MovieDataUpdater() # Skip worker proccess for feedparser feed._RUN_FEED_PARSER_INLINE = True + signals.system.connect('new-dialog', self.handle_new_dialog) # reload config and initialize it to temprary config.load_temporary() + self.setup_config_watcher() self.platform = app.config.get(prefs.APP_PLATFORM) - database.set_thread(threading.currentThread()) - database.setup_managers() + self.set_temp_support_directory() + # setup icon_cache_updater + app.icon_cache_updater = iconcache.IconCacheUpdater() + # for the unittests, both the database code and any UI code should run + # in the main thread. + threadcheck.set_eventloop_thread(threading.currentThread()) + threadcheck.set_ui_thread(threading.currentThread()) self.raise_db_load_errors = True app.db = None + self.allow_db_upgrade_error_dialog = False self.reload_database() - self.setup_new_item_info_cache() + self.setup_dummy_message_handlers() + self.setup_dummy_curl_manager() + item.setup_metadata_manager(self.tempdir) searchengines._engines = [ searchengines.SearchEngineInfo(u"all", u"Search All", u"", -1) ] @@ -295,23 +325,55 @@ httpauth.init() # reset any logging records from our setUp call() self.log_filter.reset_records() + # create an extension manager that searches our tempdir for extensions + # NOTE: this doesn't actually load any extensions, since the directory + # is currently empty. If you want to use the ExtensionManager you + # need to put a .miroext file in the tempdir then call + # app.extension_manager.load_extension() + app.extension_manager = extensionmanager.ExtensionManager( + [self.tempdir], []) + # Create a download state object (but don't start the downloader + # for the individual test unless necessary. In this case we override + # the class to run the downloader). + app.download_state_manager = downloader.DownloadStateManager() + self.mock_dldaemon = mock.Mock() + downloader.RemoteDownloader.dldaemon = self.mock_dldaemon + self.mock_patchers = [] + + def setup_config_watcher(self): + app.backend_config_watcher = config.ConfigWatcher( + lambda func, *args: func(*args)) + + def set_temp_support_directory(self): + self.sandbox_support_directory = os.path.join(self.tempdir, 'support') + if not os.path.exists(self.sandbox_support_directory): + os.makedirs(self.sandbox_support_directory) + app.config.set(prefs.SUPPORT_DIRECTORY, self.sandbox_support_directory) def on_windows(self): return self.platform == "windows" def tearDown(self): + testobjects.test_stopped(self) + for patcher in self.mock_patchers: + patcher.stop() + self.destroy_connection_pools() # shutdown workerprocess if we started it for some reason. workerprocess.shutdown() + workerprocess._subprocess_manager = \ + workerprocess.WorkerSubprocessManager() + workerprocess._miro_task_queue.reset() self.reset_log_filter() signals.system.disconnect_all() util.chatter = True self.stop_http_server() - del app.metadata_progress_updater + + # unload extensions + self.unload_extensions() # Remove any leftover database app.db.close() app.db = None - database.setup_managers() # Remove anything that may have been accidentally queued up eventloop._eventloop = eventloop.EventLoop() @@ -319,6 +381,57 @@ # Remove tempdir shutil.rmtree(self.tempdir, onerror=self._on_rmtree_error) + def destroy_connection_pools(self): + connection_pools = app.connection_pools + if connection_pools is not None: + for pool in connection_pools.get_all_pools(): + pool.destroy() + app.connection_pools = None + + def handle_new_dialog(self, obj, dialog): + """Handle the new-dialog signal + + Subclasses must implement this if they expect to see a dialog. + """ + raise AssertionError("Unexpected dialog: %s" % dialog) + + def patch_function(self, function_name, new_function): + """Use Mock to replace an existing function for a single test. + + function_name should be in the form "full.module.name.object". For + example "miro.startup.startup" + + This can also be used on a class object in order to return a different + object, if we only use class objects as factory functions. + + :param function_name: name of the function to patch + :param new_function: function object to replace it with + :returns: Mock object used to patch + """ + mock_object = self.patch_for_test(function_name) + mock_object.side_effect = new_function + return mock_object + + def patch_for_test(self, object_name, autospec=True): + """Use Mock to replace a function/class/object with a mock object. + + We will unpatch the object during teardown + + :param object_name: name of the object to patch + :param autospec: autospec parameter to pass to mock.patch. Generally + this should be True, except for class methods. + :returns: Mock object used to patch + """ + patcher = mock.patch(object_name, autospec=autospec) + mock_object = patcher.start() + self.mock_patchers.append(patcher) + return mock_object + + def unload_extensions(self): + for ext in app.extension_manager.extensions: + if ext.loaded: + app.extension_manager.unload_extension(ext) + def setup_log_filter(self): """Make a LogFilter that will turn loggings into exceptions.""" logger = logging.getLogger() @@ -328,6 +441,15 @@ self.log_filter = LogFilter() logger.addFilter(self.log_filter) + @contextlib.contextmanager + def allow_warnings(self): + """Context manager to allow log warnings go through without triggering + a unittest error + """ + self.log_filter.set_exception_level(logging.CRITICAL) + yield + self.log_filter.set_exception_level(logging.WARN) + def reset_log_filter(self): logger = logging.getLogger() for old_filter in logger.filters: @@ -336,6 +458,9 @@ # tearDown call. logger.setLevel(logging.ERROR) + def log_messages(self): + return '\n'.join(m.getMessage() for m in self.log_filter.records) + def _on_rmtree_error(self, func, path, excinfo): global FILES_TO_CLEAN_UP FILES_TO_CLEAN_UP.append(path) @@ -384,9 +509,26 @@ self.httpserver.stop() self.httpserver = None - def setup_new_item_info_cache(self): - app.item_info_cache = iteminfocache.ItemInfoCache() - app.item_info_cache.load() + def setup_dummy_message_handlers(self): + messages.FrontendMessage.handler = mock.Mock() + messages.BackendMessage.handler = mock.Mock() + + def setup_dummy_curl_manager(self): + httpclient.curl_manager = mock.Mock() + + def get_backend_messages(self, reset_mock=True): + msg_list = [args[0] for args, kwargs in + messages.BackendMessage.handler.handle.call_args_list] + if reset_mock: + messages.BackendMessage.handler.handle.reset_mock() + return msg_list + + def get_frontend_messages(self, reset_mock=True): + msg_list = [args[0] for args, kwargs in + messages.FrontendMessage.handler.handle.call_args_list] + if reset_mock: + messages.FrontendMessage.handler.handle.reset_mock() + return msg_list def reset_failed_soft_count(self): app.controller.failed_soft_count = 0 @@ -394,22 +536,45 @@ def check_failed_soft_count(self, count): self.assertEquals(app.controller.failed_soft_count, count) - def reload_database(self, path=':memory:', schema_version=None, - object_schemas=None, upgrade=True): + def reload_database(self, path=':memory:', upgrade=True, **kwargs): self.shutdown_database() - self.setup_new_database(path, schema_version, object_schemas) + self.setup_new_database(path, **kwargs) if upgrade: - app.db.upgrade_database() - database.update_last_id() + if self.allow_db_upgrade_error_dialog: + # this means that exceptions in the upgrade will be sent to a + # dialog box. Be careful with this, if you don't handle the + # dialog, then the unit tests will hang. + app.db.upgrade_database(context='main') + else: + # normal case: use _upgrade_database() because we want + # exceptions to keep propagating + app.db._upgrade_database(context='main') + item.setup_change_tracker() + database.initialize() + + def init_data_package(self): + """Initialize the data package + + The data package is used by the frontend to get data. + + Note: Since data uses a different connection than the backend system + (storedatabase and friends), we need to create an on-disk database. + """ + self.db_path = self.make_temp_path(".sqlite") + if os.path.exists(self.db_path): + os.unlink(self.db_path) + self.reload_database(FilenameType(self.db_path)) + data.init(self.db_path) + # use a mock objects for the database error handler + app.db_error_handler = mock.Mock() def clear_ddb_object_cache(self): app.db._ids_loaded = set() app.db._object_map = {} + app.db.cache = storedatabase.DatabaseObjectCache() - def setup_new_database(self, path, schema_version, object_schemas): - app.db = storedatabase.LiveStorage(path, - schema_version=schema_version, - object_schemas=object_schemas) + def setup_new_database(self, path, **kwargs): + app.db = storedatabase.LiveStorage(path, **kwargs) app.db.raise_load_errors = self.raise_db_load_errors def allow_db_load_errors(self, allow): @@ -424,8 +589,9 @@ def reload_object(self, obj): # force an object to be reloaded from the databas. - del app.db._object_map[obj.id] - app.db._ids_loaded.remove(obj.id) + key = (obj.id, app.db.table_name(obj.__class__)) + del app.db._object_map[key] + app.db._ids_loaded.remove(key) return obj.__class__.get_by_id(obj.id) def handle_error(self, obj, report): @@ -446,8 +612,22 @@ self.assertSameSet(dict1.keys(), dict2.keys()) for k in dict1: if not dict1[k] == dict2[k]: - raise AssertionError("Values differ for key %s: %s -- %s", - k, dict1[k], dict2[k]) + raise AssertionError("Values differ for key %r: %r -- %r" % + (k, dict1[k], dict2[k])) + + def assertClose(self, value1, value2, tolerance=0.1): + """Assert that 2 values are near each other. + + :param value1: value to compare + :param value2: value to compare + :param tolerance: how different the two can be + """ + + difference = abs(value1 - value2) + relative_difference = difference / max(abs(value1), abs(value2)) + if relative_difference > tolerance: + raise AssertionError("Difference too big: %s, %s" % (value1, + value2)) class EventLoopTest(MiroTestCase): def setUp(self): @@ -494,6 +674,19 @@ finally: eventloop.thread_pool_quit() + def run_idles_for_this_loop(self): + idle_queue = eventloop._eventloop.idle_queue + urgent_queue = eventloop._eventloop.urgent_queue + while idle_queue.has_pending_idle() or urgent_queue.has_pending_idle(): + if urgent_queue.has_pending_idle(): + urgent_queue.process_idles() + if idle_queue.has_pending_idle(): + idle_queue.process_next_idle() + # make sure that idles scheduled for the next loop run as well, but + # don't do this inside the while loop. + eventloop._eventloop._add_idles_for_next_loop() + + def run_pending_timeouts(self): scheduler = eventloop._eventloop.scheduler while scheduler.has_pending_timeout(): @@ -534,3 +727,49 @@ downloader.shutdown_downloader(eventloop.shutdown) self.runEventLoop() EventLoopTest.tearDown(self) + +def dynamic_test(expected_cases=None): + """Class decorator for tests that use external test cases. This creates a + separate test method for each case; this makes it easier to debug tests that + go awry, makes it easier to see test progress from the command line (lots of + little tests rather than one big test), and increases the test count + appropriately:: + + class ExampleDynamicTest(object): + @classmethod + def generate_tests(cls): + # Should return an iterable of test cases. Each test case is an + # iterable of arguments to pass to the dynamic_test_case implementation. + # + # Test names will be created by stripping non-alphanumeric chars out of + # the value of the first arg in each test case, so the first arg should be + # a string that can be used to identify the test uniquely. + raise NotImplementedError + + def dynamic_test_case(self, *args): + # This will be run once for each value produced by setup_tests. The + # iterables of values returned by generate_tests will be passed as + # arguments. + raise NotImplementedError + """ + + def _generate_closure(cls, args): + return lambda self: cls.dynamic_test_case(self, *args) + + def wrap_class(cls): + generated_cases = 0 + for test_args in cls.generate_tests(): + test_name = ''.join(x for x in + test_args[0].encode('ascii', 'ignore') + if x.isalnum()) + setattr(cls, 'test_%s_dyn' % test_name, _generate_closure(cls, test_args)) + generated_cases += 1 + + if expected_cases is not None: + assert generated_cases == expected_cases, ( + "generated test count %d not equal to expected count %d for" + " %s; if you have just added a test, update expected_cases" + " (in the dynamic_test args); if not, there are test cases" + " missing" % (generated_cases, expected_cases, cls.__name__)) + return cls + return wrap_class diff -Nru miro-4.0.4/lib/test/gtcachetest.py miro-6.0/lib/test/gtcachetest.py --- miro-4.0.4/lib/test/gtcachetest.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/test/gtcachetest.py 2013-04-05 16:02:42.000000000 +0000 @@ -45,6 +45,21 @@ logging.getLogger().setLevel(self.oldlevel) @make_french + def test_gettext_lazy(self): + ok = gtcache.gettext_lazy("OK") + channels = gtcache.gettext_lazy("Channels") + self.assertEqual(ok, u'Valider') + self.assertEqual(u'%s' % ok, u'Valider') + self.assertEqual(channels, u'Cha\xeenes') + self.assertEqual(u'%s' % channels, u'Cha\xeenes') + gtcache.init(languages=['en'], + localedir=resources.path("testdata/locale")) + self.assertEqual(ok, u'OK') + self.assertEqual(u'%s' % ok, u'OK') + self.assertEqual(channels, u'Channels') + self.assertEqual(u'%s' % channels, u'Channels') + + @make_french def test_gettext(self): self.assertEqual(gtcache.gettext("OK"), u'Valider') self.assertEqual(gtcache.gettext("Channels"), u'Cha\xeenes') diff -Nru miro-4.0.4/lib/test/httpclienttest.py miro-6.0/lib/test/httpclienttest.py --- miro-4.0.4/lib/test/httpclienttest.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/test/httpclienttest.py 2013-04-05 16:02:42.000000000 +0000 @@ -1,9 +1,12 @@ import functools import os +import logging import pycurl import pickle from cStringIO import StringIO +from miro import app +from miro import prefs from miro import dialogs from miro import eventloop from miro import httpauth @@ -483,21 +486,23 @@ class HTTPAuthTest(HTTPClientTestBase): def setUp(self): HTTPClientTestBase.setUp(self) - self.callback_handle = None self.setup_cancel() self.dialogs_seen = 0 self.dialog_callback = None + self.answer = None def setup_answer(self, username, password, button=dialogs.BUTTON_OK): - def handler(obj, dialog): - self.dialogs_seen += 1 - if self.dialog_callback: - self.dialog_callback() - dialog.run_callback(button, unicode(username), - unicode(password)) - if self.callback_handle: - signals.system.disconnect(self.callback_handle) - self.callback_handle = signals.system.connect('new-dialog', handler) + self.answer = (unicode(username), unicode(password), button) + + def handle_new_dialog(self, obj, dialog): + if self.answer is None: + HTTPClientTestBase.handle_new_dialog(self, obj, dialog) + + self.dialogs_seen += 1 + if self.dialog_callback: + self.dialog_callback() + username, password, button = self.answer + dialog.run_callback(button, username, password) def setup_cancel(self): self.setup_answer('', '', dialogs.BUTTON_CANCEL) @@ -606,13 +611,17 @@ self.check_auth_canceled() self.setup_answer("wronguser", "wrongpass") - self.grab_url(self.httpserver.build_url('digest-protected/index.txt')) + with self.allow_warnings(): + self.grab_url( + self.httpserver.build_url('digest-protected/index.txt')) self.check_auth_errback_called() @uses_httpclient def test_digest_auth_correct(self): self.setup_answer("user", "password") - self.grab_url(self.httpserver.build_url('digest-protected/index.txt')) + with self.allow_warnings(): + self.grab_url( + self.httpserver.build_url('digest-protected/index.txt')) self.assertEquals(self.dialogs_seen, 1) @uses_httpclient @@ -629,20 +638,29 @@ @uses_httpclient def test_digest_auth_memory(self): self.setup_answer("user", "password") - self.grab_url(self.httpserver.build_url('digest-protected/index.txt')) + with self.allow_warnings(): + self.grab_url( + self.httpserver.build_url('digest-protected/index.txt')) self.assertEquals(self.dialogs_seen, 1) # We shouldn't see another dialog for the same URL - self.grab_url(self.httpserver.build_url('digest-protected/index.txt')) + with self.allow_warnings(): + self.grab_url( + self.httpserver.build_url('digest-protected/index.txt')) self.assertEquals(self.dialogs_seen, 1) # ditto for ones in the same directory - self.grab_url(self.httpserver.build_url('digest-protected/index2.txt')) + with self.allow_warnings(): + self.grab_url( + self.httpserver.build_url('digest-protected/index2.txt')) self.assertEquals(self.dialogs_seen, 1) # Even for ones in a subdirectory - self.grab_url(self.httpserver.build_url( - 'digest-protected/foo/index2.txt')) + with self.allow_warnings(): + self.grab_url( + self.httpserver.build_url('digest-protected/foo/index2.txt')) self.assertEquals(self.dialogs_seen, 1) # Or even for ones outside the directory (only true for digest auth) - self.grab_url(self.httpserver.build_url('digest-protected2/index.txt')) + with self.allow_warnings(): + self.grab_url( + self.httpserver.build_url('digest-protected2/index.txt')) self.assertEquals(self.dialogs_seen, 1) @uses_httpclient @@ -714,7 +732,8 @@ self.expecting_errback = True # test when schemes that aren't "basic" or "digest" - self.grab_url(self.httpserver.build_url('invalid-auth/badscheme')) + with self.allow_warnings(): + self.grab_url(self.httpserver.build_url('invalid-auth/badscheme')) self.assert_(isinstance(self.grab_url_error, httpclient.AuthorizationFailed)) # when we get invalid auth headers, we shouldn't pop up dialogs for @@ -722,19 +741,22 @@ self.assertEquals(self.dialogs_seen, 0) # test auth header without realm - self.grab_url(self.httpserver.build_url('invalid-auth/norealm')) + with self.allow_warnings(): + self.grab_url(self.httpserver.build_url('invalid-auth/norealm')) self.assert_(isinstance(self.grab_url_error, httpclient.AuthorizationFailed)) self.assertEquals(self.dialogs_seen, 0) # test completely garbled auth header - self.grab_url(self.httpserver.build_url('invalid-auth/garbled')) + with self.allow_warnings(): + self.grab_url(self.httpserver.build_url('invalid-auth/garbled')) self.assert_(isinstance(self.grab_url_error, httpclient.AuthorizationFailed)) self.assertEquals(self.dialogs_seen, 0) # test auth header with no data - self.grab_url(self.httpserver.build_url('invalid-auth/')) + with self.allow_warnings(): + self.grab_url(self.httpserver.build_url('invalid-auth/')) self.assert_(isinstance(self.grab_url_error, httpclient.AuthorizationFailed)) self.assertEquals(self.dialogs_seen, 0) @@ -748,10 +770,9 @@ def setUp(self): self.dialogs_seen = 0 - signals.system.connect('new-dialog', self.handle_dialog) EventLoopTest.setUp(self) - def handle_dialog(self, obj, dialog): + def handle_new_dialog(self, obj, dialog): self.dialogs_seen += 1 dialog.run_callback(dialogs.BUTTON_OK, u'user', u'password') @@ -978,8 +999,15 @@ def test_connect_error(self): self.grab_url('http://255.255.255.255/') self.check_errback_called() - self.assert_(isinstance(self.grab_url_error, - httpclient.ConnectionError)) + # The http proxy always replies with something, (probably a 40x or + # 50x message) so we should not check against this as it will + # be unreliable when the proxy is active. + if app.config.get(prefs.HTTP_PROXY_ACTIVE): + logging.info('Proxy active: skipping specific connection error ' + 'check') + else: + self.assert_(isinstance(self.grab_url_error, + httpclient.ConnectionError)) @uses_httpclient def test_closed_connection_error(self): @@ -991,16 +1019,22 @@ httpclient.ServerClosedConnection)) @uses_httpclient - def test_404_error(self): + def test_404_error(self, write_file=None): self.expecting_errback = True url = self.httpserver.build_url('badfile.txt') - self.grab_url(url) + self.grab_url(url, write_file=write_file) self.assert_(isinstance(self.grab_url_error, httpclient.UnexpectedStatusCode)) # FIXME: It'd be nice if we could check a HTTP code rather than a # static message. self.assertEquals(self.grab_url_error.friendlyDescription, _("File not found")) + if write_file: + self.assertEquals(open(write_file).read(), '') + + def test_error_nofile(self): + write_file = self.make_temp_path(".txt") + self.test_404_error(write_file=write_file) @uses_mock_httpclient def test_bad_domain_name(self): @@ -1029,7 +1063,8 @@ options = httpclient.TransferOptions("http://example.com/") bogus_transfer = httpclient.CurlTransfer(options, self.grab_url_callback, self.grab_url_errback) - bogus_transfer.on_error(123456, BogusLibcurlHandle()) + with self.allow_warnings(): + bogus_transfer.on_error(123456, BogusLibcurlHandle()) self.runPendingIdles() # Check that we saw a NetworkError and that the description strings @@ -1038,5 +1073,22 @@ self.assert_(isinstance(self.grab_url_error, httpclient.NetworkError)) self.assert_(isinstance(self.grab_url_error.longDescription, unicode)) - self.assert_(isinstance(self.grab_url_error.friendlyDescription, - unicode)) + self.assert_(isinstance(self.grab_url_error.friendlyDescription, unicode)) + +class LimitProtocolTest(HTTPClientTestBase): + #test that we limit the protocols to HTTP and HTTPS + @uses_httpclient + def test_url_scheme(self): + self.expecting_errback = True + self.grab_url('ftp://ben@example.com/') + self.check_errback_called() + self.assert_(isinstance(self.grab_url_error, httpclient.MalformedURL)) + + @uses_httpclient + def test_redirect(self): + self.expecting_errback = True + self.httpserver.custom_redirect_url('ftp://ben@example.com/') + self.grab_url(self.httpserver.build_url('custom-redirect')) + self.check_errback_called() + self.assert_(isinstance(self.grab_url_error, + httpclient.InvalidRedirect)) diff -Nru miro-4.0.4/lib/test/httpdownloadertest.py miro-6.0/lib/test/httpdownloadertest.py --- miro-4.0.4/lib/test/httpdownloadertest.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/test/httpdownloadertest.py 2013-04-05 16:02:42.000000000 +0000 @@ -91,14 +91,14 @@ self.downloader = TestingDownloader(self, self.download_url, "ID1") def stopOnData(): if (self.downloader.state == 'downloading' and - self.downloader.currentSize == 10000): + self.downloader.current_size == 10000): self.downloader.stop(False) self.stopEventLoop(False) self.downloader.statusCallback = stopOnData self.httpserver.pause_after(10000) self.runEventLoop() self.assertEquals(self.downloader.state, 'stopped') - self.assertEquals(self.downloader.currentSize, 0) + self.assertEquals(self.downloader.current_size, 0) self.wait_for_libcurl_manager() self.assert_(not os.path.exists(self.downloader.filename)) self.assertEquals(self.countConnections(), 0) @@ -108,22 +108,22 @@ self.downloader.statusCallback = self.stopOnFinished self.httpserver.pause_after(-1) self.runEventLoop() - self.assertEquals(self.downloader.currentSize, self.download_size) - self.assertEquals(self.downloader.totalSize, self.download_size) + self.assertEquals(self.downloader.current_size, self.download_size) + self.assertEquals(self.downloader.total_size, self.download_size) @uses_httpclient def test_pause(self): self.downloader = TestingDownloader(self, self.download_url, "ID1") def pauseOnData(): if (self.downloader.state == 'downloading' and - self.downloader.currentSize == 10000): + self.downloader.current_size == 10000): self.downloader.pause() self.stopEventLoop(False) self.downloader.statusCallback = pauseOnData self.httpserver.pause_after(10000) self.runEventLoop() self.assertEquals(self.downloader.state, 'paused') - self.assertEquals(self.downloader.currentSize, 10000) + self.assertEquals(self.downloader.current_size, 10000) self.assert_(os.path.exists(self.downloader.filename)) self.assertEquals(self.countConnections(), 0) def restart(): @@ -132,28 +132,28 @@ self.downloader.statusCallback = self.stopOnFinished self.httpserver.pause_after(-1) self.runEventLoop() - self.assertEquals(self.downloader.currentSize, self.download_size) - self.assertEquals(self.downloader.totalSize, self.download_size) + self.assertEquals(self.downloader.current_size, self.download_size) + self.assertEquals(self.downloader.total_size, self.download_size) @uses_httpclient def test_restore(self): self.downloader = TestingDownloader(self, self.download_url, "ID1") def pauseInMiddle(): if (self.downloader.state == 'downloading' and - self.downloader.currentSize == 10000): + self.downloader.current_size == 10000): self.downloader.pause() self.stopEventLoop(False) self.downloader.statusCallback = pauseInMiddle self.httpserver.pause_after(10000) self.runEventLoop() self.assertEquals(self.downloader.state, 'paused') - self.assertEquals(self.downloader.currentSize, 10000) + self.assertEquals(self.downloader.current_size, 10000) restore = self.downloader.lastStatus.copy() restore['state'] = 'downloading' download._downloads = {} self.httpserver.pause_after(-1) self.downloader2 = TestingDownloader(self, restore=restore) - restoreSize = restore['currentSize'] + restoreSize = restore['current_size'] self.restarted = False def start_new_download_intercept(): self.restarted = True diff -Nru miro-4.0.4/lib/test/idleiteratetest.py miro-6.0/lib/test/idleiteratetest.py --- miro-4.0.4/lib/test/idleiteratetest.py 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/test/idleiteratetest.py 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,43 @@ +from miro import eventloop +from miro.test.framework import EventLoopTest + +class IdleIterateTest(EventLoopTest): + """Test feeds that download things. + """ + def setUp(self): + self.current_value = None + EventLoopTest.setUp(self) + + def check_idle_iterator(self, *values): + """Check the progress of our idle iterator. + + values are be correct value at each stop of the processing. + """ + # we shouldn't start processing until the eventloop start running + self.assertEquals(self.current_value, None) + # Each time through an iteration of the event loop, we should run one + # step of the idle iterator. + for v in values: + self.run_idles_for_this_loop() + self.assertEquals(self.current_value, v) + + def test_idle_iterate(self): + # test using eventloop.idle_iterate() + def foo(start, stop, step): + for x in xrange(start, stop, step): + self.current_value = x + yield + + eventloop.idle_iterate(foo, "test idle iterator", + args=(10, 20, 2)) + self.check_idle_iterator(10, 12, 14, 16, 18) + + def test_decorator(self): + # test using the @idle_iterator decorator method + @eventloop.idle_iterator + def foo(): + for x in xrange(5): + self.current_value = x + yield + foo() + self.check_idle_iterator(0, 1, 2, 3, 4) diff -Nru miro-4.0.4/lib/test/infolisttest.py miro-6.0/lib/test/infolisttest.py --- miro-4.0.4/lib/test/infolisttest.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/test/infolisttest.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,428 +0,0 @@ -import itertools -import weakref - -from miro.test.framework import (MiroTestCase, skip_for_platforms, - only_on_platforms) -from miro import infolist - -class FakeInfo(object): - def __init__(self, name, id_=None): - if id_ is None: - id_ = FakeInfo.counter.next() - self.id = id_ - self.name = name - - def __eq__(self, other): - return (type(self) == type(other) and - self.name == other.name) - - def __str__(self): - return repr(self) - - def __repr__(self): - return "FakeInfo(%r, %s)" % (self.name, self.id) - -class InfoListTestBase(MiroTestCase): - def setUp(self): - MiroTestCase.setUp(self) - FakeInfo.counter = itertools.count() - self.infolist = self.build_infolist() - self.sorter = self.sort_key_func - self.reverse = False - self.correct_infos = [] - - def build_infolist(self): - return infolist.InfoList(self.sort_key_func, False) - - def sort_key_func(self, info): - return info.name.lower() - - def sort_info_list(self, info_list): - if self.sorter is not None: - info_list.sort(key=self.sorter, reverse=self.reverse) - - def make_infos(self, *names): - return [FakeInfo(n) for n in names] - - def check_info_list(self, check_against): - if self.sorter is None: - self.assertEquals(self.infolist.info_list(), check_against) - else: - # allow for variations in the order that still match the sort - self.assertSameSet(self.infolist.info_list(), check_against) - self.assertEquals([self.sorter(i) for i in - self.infolist.info_list()], [self.sorter(i) for i in - check_against]) - - # test index_of_id(), get_prev_info() and get_next_info() - list_of_infos = self.infolist.info_list() - for i, info in enumerate(list_of_infos): - self.assertEquals(self.infolist.index_of_id(info.id), i) - if i > 0: - self.assertEquals(self.infolist.get_prev_info(info.id), - list_of_infos[i-1]) - else: - self.assertEquals(self.infolist.get_prev_info(info.id), None) - if i < len(list_of_infos) - 1: - self.assertEquals(self.infolist.get_next_info(info.id), - list_of_infos[i+1]) - else: - self.assertEquals(self.infolist.get_next_info(info.id), None) - - # test get_first_info() - if list_of_infos: - self.assertEquals(self.infolist.get_first_info(), - list_of_infos[0]) - else: - self.assertEquals(self.infolist.get_first_info(), None) - - # test get_last_info() - if list_of_infos: - self.assertEquals(self.infolist.get_last_info(), - list_of_infos[-1]) - else: - self.assertEquals(self.infolist.get_last_info(), None) - - self.infolist._sanity_check() - - def check_insert(self, infos): - self.correct_infos.extend(infos) - self.sort_info_list(self.correct_infos) - self.infolist.add_infos(infos) - self.check_info_list(self.correct_infos) - - def find_info_index(self, id_): - filtered_list = filter(lambda i: i.id ==id_, self.correct_infos) - if len(filtered_list) == 0: - raise ValueError("no info with id %s", id_) - if len(filtered_list) > 1: - raise ValueError("multiple infos with id %s", id_) - return self.correct_infos.index(filtered_list[0]) - - def check_update(self, *args, **kwargs): - """Update the list and check it. args should be in the format id, - name, id2, name2, ... - """ - to_update = [] - resort = bool(kwargs.get('resort')) - for i in xrange(0, len(args), 2): - info = FakeInfo(args[i+1], args[i]) - idx = self.find_info_index(info.id) - self.correct_infos[idx] = info - to_update.append(info) - if resort: - self.sort_info_list(self.correct_infos) - self.infolist.update_infos(to_update, resort=resort) - self.check_info_list(self.correct_infos) - - def check_remove(self, *id_list): - for i in reversed(range(len(self.correct_infos))): - if self.correct_infos[i].id in id_list: - del self.correct_infos[i] - self.infolist.remove_ids(id_list) - self.check_info_list(self.correct_infos) - - def check_update_sort(self, new_sorter, reverse=False): - self.sorter = new_sorter - self.reverse = reverse - if new_sorter is not None: - self.correct_infos.sort(key=new_sorter, reverse=reverse) - self.infolist.change_sort(new_sorter, reverse) - self.check_info_list(self.correct_infos) - -class InfoListDataTest(InfoListTestBase): - def check_list(self, *names): - self.assertEquals(list(names), - [i.name for i in self.infolist.info_list()]) - - def test_insert(self): - self.check_insert(self.make_infos('m', 'i', 'r', 'o')) - self.check_insert(self.make_infos('n', 'p', 'r')) - # inserting an info twice should result in value error - self.assertRaises(ValueError, self.infolist.add_infos, - self.correct_infos[0:1]) - self.check_info_list(self.correct_infos) - # inserting if even 1 info is not new then to changes should be made - info2 = FakeInfo('non-dup') - self.assertRaises(ValueError, self.infolist.add_infos, - [info2, self.correct_infos[-1]]) - self.check_info_list(self.correct_infos) - # check reversed order - self.check_update_sort(self.sorter, reverse=True) - self.check_insert(self.make_infos('a', 'z', 'd')) - - def test_insert_in_order(self): - # ordered inserts is a possible edge case - self.check_insert(self.make_infos('a', 'b', 'c', 'd')) - - def test_insert_in_reversed_order(self): - # reversed order inserts is another possible edge case - self.check_insert(self.make_infos('d', 'c', 'b', 'a')) - - def test_remove(self): - self.check_insert(self.make_infos('m', 'i', 'r', 'o')) - self.check_remove(0, 2) - # check removing node that's already been removed - self.assertRaises(KeyError, self.infolist.remove_ids, [0]) - self.check_info_list(self.correct_infos) - # check removing node that was never in the list - self.assertRaises(KeyError, self.infolist.remove_ids, [200]) - self.check_info_list(self.correct_infos) - # check removing with one node in the list and one out - # nothing node with id==1 shouldn't be removed in this case - self.assertRaises(KeyError, self.infolist.remove_ids, [1, 0]) - self.check_info_list(self.correct_infos) - - def test_update(self): - self.check_insert(self.make_infos('m', 'i', 'r', 'o')) - self.check_update(0, 'ZZZ', 3, 'ABC') - # check info not in list raises KeyError - new_info = FakeInfo("bar") - self.assertRaises(KeyError, self.infolist.update_infos, [new_info], - True) - self.check_info_list(self.correct_infos) - # check no changes if any info is not in the list - self.assertRaises(KeyError, self.infolist.update_infos, - [new_info, self.correct_infos[2]], True) - self.check_info_list(self.correct_infos) - - def test_update_resort(self): - self.check_insert(self.make_infos('m', 'i', 'r', 'o')) - self.check_update(0, 'ZXY', 3, 'abc', resort=True) - # check reversed order - self.check_update_sort(self.sorter, reverse=True) - self.check_update(1, 'aaa', 2, 'ZZZ', resort=True) - - def test_non_integer_id(self): - infos = self.make_infos('m', 'i', 'r', 'o', 'p', 'c', 'f') - for i in infos: - i.id = i.name # id is the initial name of the info - self.check_insert(infos[:4]) - self.check_update('m', 'ZZZ', 'r', 'ABC') - self.check_remove('m', 'i') - - def test_new_sort_order(self): - self.check_insert(self.make_infos('m', 'i', 'r', 'o')) - self.check_update_sort(lambda info: info.id) - # None shouldn't be allowed - self.assertRaises(ValueError, self.infolist.change_sort, None) - -class InfoListMemoryTest(InfoListTestBase): - def test_objects_released(self): - self.check_insert(self.make_infos('m', 'i', 'r', 'o')) - # make weakrefs to all data in the list - info_refs = [] - for info in self.correct_infos: - info_refs.append(weakref.ref(self.infolist.get_info(info.id))) - # try to do as many operations as possible on the list - self.check_update(0, 'ZZZ', 1, '123', 2, 'pcf', resort=True) - self.check_remove(0, 2) - self.infolist.remove_all() - # drop all our references and check that the objects are now deleted - del self.correct_infos - del info - for wr in info_refs: - self.assertEquals(wr(), None) - - def test_objects_released_insert_exception(self): - # test the edge case where we make some nodes, then see an exception - # in add_infos() - self.check_insert(self.make_infos('m', 'i', 'r', 'o')) - # prepare a batch of infos to add, except 1 is a duplicate which - # should raise an exception - new_infos = self.make_infos('a', 'c', 'd') - new_infos[-1].id = 3 - # make weakrefs to all data in the list - info_refs = [] - for info in self.correct_infos + new_infos: - info_refs.append(weakref.ref(info)) - # try to do as many operations as possible on the list - self.assertRaises(ValueError, self.infolist.add_infos, new_infos) - self.infolist.remove_all() - # drop all our references and check that the objects are now deleted - del new_infos - del self.correct_infos - del info - for wr in info_refs: - self.assertEquals(wr(), None) - -class InfoListFeaturesTest(InfoListTestBase): - def test_attrs(self): - self.check_insert(self.make_infos('m', 'i', 'r', 'o')) - self.infolist.set_attr(0, 'miro', 'foo') - self.assertEquals(self.infolist.get_attr(0, 'miro'), 'foo') - self.assertRaises(KeyError, self.infolist.get_attr, 0, 'miro2') - self.infolist.unset_attr(0, 'miro') - self.assertRaises(KeyError, self.infolist.get_attr, 0, 'miro') - # test second unset is okay - self.infolist.unset_attr(0, 'miro') - -@skip_for_platforms('osx') -class InfoListGTKTest(InfoListDataTest): - # Test the same things as in InfoListTest, but check using GTK's classes. - # Also, check that GTK signal handlers work. - - def setUp(self): - InfoListDataTest.setUp(self) - self.signals_seen = [] - # import gtk inside the function because it will fail on OS X - import gtk - self.treeview = gtk.TreeView() - self.infolist.add_to_tableview(self.treeview) - # track what infos should be visible when we handle callbacks - self.tracked_infos = [] - self.signal_error = False # did we get an exception in our signals? - gtk_model = self.treeview.get_model() - gtk_model.connect('row-inserted', self.on_row_inserted) - gtk_model.connect('row-deleted', self.on_row_deleted) - gtk_model.connect('row-changed', self.on_row_changed) - gtk_model.connect('rows-reordered', self.on_rows_reordered) - - def check_info_list(self, info_list): - # check GTK-specific data structures - gtk_model = self.treeview.get_model() - self.assertEquals(len(info_list), len(gtk_model)) - it = gtk_model.get_iter_first() - for x in xrange(len(info_list)): - check_path = (x,) - self.assertEquals(gtk_model.get_path(it), check_path) - iter_for_path = gtk_model.get_iter(check_path) - self.assertEquals(self.infolist.row_for_iter(iter_for_path), - self.infolist.row_for_iter(it)) - # check that iter_for_id gives a correct iter. This is a bit - # weird, because iters don't do all that much directly. To check, - # make sure that the iter is associated with the correct path. - info = self.infolist.row_for_iter(it)[0] - iter_to_check = self.infolist.iter_for_id(info.id) - self.assertEquals(gtk_model.get_path(iter_to_check), check_path) - # prepare next loop - it = gtk_model.iter_next(it) - - def on_row_inserted(self, obj, path, it): - try: - # check that that our current model reflects the insert - info, attrs = self.infolist.row_for_iter(it) - if self.sorter is not None: - self.tracked_infos.append(info) - self.sort_info_list(self.tracked_infos) - else: - self.tracked_infos.insert(path[0], info) - self.check_info_list(self.tracked_infos) - # check that path is correct - possible_paths = [(i,) for i in xrange(len(self.tracked_infos)) - if self.tracked_infos[i] == info] - self.assert_(path in possible_paths) - except Exception: - # Exceptions in signal handlers won't actually halt the test, we - # have to do that manually - self.signal_error = True - raise - - def on_row_changed(self, obj, path, it): - try: - # check path points to the correct info - self.assertEquals(len(path), 1) - info = self.tracked_infos[path[0]] - model_info, attrs = self.infolist.row_for_iter(it) - self.assertEquals(info.id, model_info.id) - # update tracked_infos to reflect the change - self.tracked_infos[path[0]] = model_info - except Exception: - # Exceptions in signal handlers won't actually halt the test, we - # have to do that manually - self.signal_error = True - raise - - def on_row_deleted(self, obj, path): - try: - # check that the model reflects the change - self.assertEquals(len(path), 1) - del self.tracked_infos[path[0]] - self.check_info_list(self.tracked_infos) - except Exception: - # Exceptions in signal handlers won't actually halt the test, we - # have to do that manually - self.signal_error = True - raise - - def on_rows_reordered(self, obj, path, it, new_order): - try: - # path and iter should always be empty, since we aren't a tree - self.assertEquals(it, None) - self.assertEquals(path, ()) - # check new_order. - # NOTE: tracked_infos contains our updates, but is in the old - # order at this point - correct_new_order = [0 for i in xrange(len(self.tracked_infos))] - for old_index, info in enumerate(self.tracked_infos): - new_index = self.find_info_index(info.id) - correct_new_order[new_index] = old_index - # FIXME: new_order is not available in python - # new_order == correct_new_order - - # update tracked_infos to reflect the change - self.tracked_infos = self.infolist.info_list() - except Exception: - # Exceptions in signal handlers won't actually halt the test, we - # have to do that manually - self.signal_error = True - raise - - def check_insert(self, *args, **kwargs): - self.tracked_infos = self.correct_infos[:] - InfoListDataTest.check_insert(self, *args, **kwargs) - if self.signal_error: - raise AssertionError("assertion failure in signal callback") - self.check_info_list(self.tracked_infos) - - def check_update(self, *args, **kwargs): - self.tracked_infos = self.correct_infos[:] - InfoListDataTest.check_update(self, *args, **kwargs) - if self.signal_error: - raise AssertionError("assertion failure in signal callback") - self.check_info_list(self.tracked_infos) - - def check_remove(self, *args, **kwargs): - self.tracked_infos = self.correct_infos[:] - InfoListDataTest.check_remove(self, *args, **kwargs) - if self.signal_error: - raise AssertionError("assertion failure in signal callback") - self.check_info_list(self.tracked_infos) - - def check_update_sort(self, *args, **kwargs): - self.tracked_infos = self.correct_infos[:] - InfoListDataTest.check_update_sort(self, *args, **kwargs) - if self.signal_error: - raise AssertionError("assertion failure in signal callback") - self.check_info_list(self.tracked_infos) - -@only_on_platforms('osx') -class InfoListCocoaTest(InfoListDataTest): - # Test the same things as in InfoListTest, but check using Cocoa's classes - - def setUp(self): - from miro.plat.frontends.widgets import tablemodel - - InfoListDataTest.setUp(self) - source = tablemodel.MiroInfoListDataSource.alloc() - self.data_source = source.initWithModel_(self.infolist) - - def build_infolist(self): - from miro.plat.frontends.widgets import tablemodel - return tablemodel.InfoListModel(self.sort_key_func, False) - - def check_info_list(self, info_list): - # Note we just pass in a None for tableviews, the InfoList data source - # doesn't use it. - rows = self.data_source.numberOfRowsInTableView_(None) - data_source_rows = [] - for i in xrange(rows): - info, attrs = self.infolist.row_for_iter(i) - self.assertEquals((info, attrs), - self.data_source.tableView_objectValueForTableColumn_row_( - None, 0, i)) - data_source_rows.append(info) - # check that iter_for_id gives the correct iter. On OS X, this is - # just the index of the row - self.assertEquals(self.infolist.iter_for_id(info.id), i) - InfoListDataTest.check_info_list(self, data_source_rows) diff -Nru miro-4.0.4/lib/test/__init__.py miro-6.0/lib/test/__init__.py --- miro-4.0.4/lib/test/__init__.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/test/__init__.py 2013-04-05 16:02:42.000000000 +0000 @@ -49,7 +49,6 @@ from miro.test.devicestest import * from miro.test.flashscrapertest import * from miro.test.unicodetest import * -from miro.test.datastructurestest import * from miro.test.schematest import * from miro.test.storedatabasetest import * from miro.test.databasesanitytest import * @@ -74,15 +73,23 @@ from miro.test.itemtest import * from miro.test.filetypestest import * from miro.test.cellpacktest import * -from miro.test.searchtest import * -from miro.test.infolisttest import * from miro.test.fileobjecttest import * +from miro.test.fastresumetest import * from miro.test.widgetstateconstantstest import * from miro.test.metadatatest import * from miro.test.tableselectiontest import * from miro.test.filetagstest import * from miro.test.watchedfoldertest import * from miro.test.subprocesstest import * +from miro.test.itemfiltertest import * +from miro.test.extensiontest import * +from miro.test.idleiteratetest import * +from miro.test.itemtracktest import * +from miro.test.itemlisttest import * +from miro.test.itemrenderertest import * +from miro.test.sharingtest import * +from miro.test.databaseerrortest import * +from miro.test.playbacktest import * # platform specific tests @@ -91,11 +98,9 @@ if app.config.get(prefs.APP_PLATFORM) == "linux": from miro.test.gtcachetest import * from miro.test.downloadertest import * - from miro.test.moviedatatest import * else: framework.skipped_tests.append("miro.test.gtcachetest tests: not linux") framework.skipped_tests.append("miro.test.downloadertest tests: not linux") - framework.skipped_tests.append("miro.test.moviedatatest tests: not linux") if app.config.get(prefs.APP_PLATFORM) == "osx": from miro.test.osxsparkletest import * diff -Nru miro-4.0.4/lib/test/itemfiltertest.py miro-6.0/lib/test/itemfiltertest.py --- miro-4.0.4/lib/test/itemfiltertest.py 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/test/itemfiltertest.py 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,53 @@ +from miro.frontends.widgets import itemfilter + +from miro.test.framework import MiroTestCase + +class FakeItemInfo(object): + # really simple item info. This is just enough to pass it to a couple + # filters + def __init__(self, file_type, downloaded): + self.file_type = file_type + if downloaded: + self.video_path = '/fake/filename' + else: + self.video_path = None + +class ItemFilterTest(MiroTestCase): + def check_active_filters(self, filter_set, *correct_filters): + self.assertEquals(filter_set.active_filters, + set(correct_filters)) + # check that all filter keys are unicode + for filter in filter_set.active_filters: + self.assertEquals(type(filter), unicode) + + def test_filter_selection(self): + # Test simple cases of changing filters + filter_set = itemfilter.ItemFilterSet() + # all should be the default + self.check_active_filters(filter_set, 'all') + # change to unplayed + filter_set.select('unplayed') + self.check_active_filters(filter_set, 'unplayed') + # change back to all + filter_set.select('all') + self.check_active_filters(filter_set, 'all') + + def test_podcast_filters_selection(self): + # Test the filters for the podcasts tab. + filter_set = itemfilter.ItemFilterSet() + filter_set.select('all') + self.check_active_filters(filter_set, 'all') + # selecting video should auto-select downloaded by default + filter_set.select('video') + self.check_active_filters(filter_set, 'video', 'downloaded') + # but if unplayed is selected, then it should not leave that alone + filter_set.select('all') + filter_set.select('unplayed') + filter_set.select('video') + self.check_active_filters(filter_set, 'video', 'unplayed') + # if we select unplayed again nothing should change + filter_set.select('unplayed') + self.check_active_filters(filter_set, 'video', 'unplayed') + # if we select downloaded, then unplayed should unselect + filter_set.select('downloaded') + self.check_active_filters(filter_set, 'video', 'downloaded') diff -Nru miro-4.0.4/lib/test/itemlisttest.py miro-6.0/lib/test/itemlisttest.py --- miro-4.0.4/lib/test/itemlisttest.py 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/test/itemlisttest.py 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,294 @@ +# Miro - an RSS based video player application +# Copyright (C) 2012 +# Participatory Culture Foundation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +# +# In addition, as a special exception, the copyright holders give +# permission to link the code of portions of this program with the OpenSSL +# library. +# +# You must obey the GNU General Public License in all respects for all of +# the code used other than OpenSSL. If you modify file(s) with this +# exception, you may extend this exception to your version of the file(s), +# but you are not obligated to do so. If you do not wish to do so, delete +# this exception statement from your version. If you delete this exception +# statement from all source files in the program, then also delete it here. + +"""itemlisttest -- Test ItemList""" + +import gc +import itertools +import random +import string +import weakref + +from miro import app +from miro import messages +from miro import models +from miro import util +from miro.frontends.widgets import itemlist +from miro.frontends.widgets import itemsort +from miro.test import mock, testobjects +from miro.test.framework import MiroTestCase + +class ItemListTest(MiroTestCase): + def setUp(self): + MiroTestCase.setUp(self) + self.init_data_package() + self.feed = models.Feed(u'http://example.com/feed.rss') + self.items = [testobjects.make_item(self.feed, u'item-%s' % i) + for i in xrange(10)] + app.db.finish_transaction() + self.item_list = itemlist.ItemList('feed', self.feed.id) + self.items_changed_handler = mock.Mock() + self.list_changed_handler = mock.Mock() + self.item_list.connect("items-changed", self.items_changed_handler) + self.item_list.connect("list-changed", self.list_changed_handler) + + def refresh_item_list(self): + app.db.finish_transaction() + self.item_list._refetch_id_list() + + def check_list_changed_signal(self): + self.assertEquals(self.list_changed_handler.call_count, 1) + self.list_changed_handler.reset_mock() + + def test_filter(self): + # make some of our items podcasts so we can use the PodcastFilter + podcast_count = 0 + for i in xrange(0, len(self.items), 2): + self.items[i].kind = u'podcast' + self.items[i].signal_change() + podcast_count += 1 + app.db.finish_transaction() + # set the filter + self.item_list.set_filters(['podcasts']) + self.check_list_changed_signal() + self.assertEquals(len(self.item_list), podcast_count) + for i in xrange(podcast_count): + item = self.item_list.get_row(i) + self.assertEquals(item.kind, u'podcast') + + def check_sort_order(self, correct_item_order): + correct_ids = [i.id for i in correct_item_order] + ids_from_item_list = [ self.item_list.get_row(i).id + for i in range(len(self.item_list)) ] + self.assertEquals(correct_ids, ids_from_item_list) + + def test_sort(self): + # give each items a random title + for i in self.items: + i.title = u''.join(random.choice(string.letters) for i in range(5)) + self.items.sort(key=lambda i: i.title) + for i in self.items: + i.signal_change() + app.db.finish_transaction() + # test that the default sort is release date + self.items.sort(key=lambda i: i.release_date) + self.check_sort_order(self.items) + # test reversing a sort + self.item_list.set_sort(itemsort.DateSort(False)) + self.items.sort(key=lambda i: i.release_date, reverse=True) + self.check_sort_order(self.items) + + def test_name_sort(self): + # Test that name sort does a natural sort on the items, and removes + # "the" / "a" from the begining + self.items[0].title = u'Podcast Item A' + self.items[1].title = u'The Podcast Item B' + self.items[2].title = u'A Podcast Item C' + self.items[3].title = u'SeriesItem9' + self.items[4].title = u'SeriesItem10' + self.items[5].title = u'SeriesItem11' + for i in self.items[:6]: + i.signal_change() + app.db.finish_transaction() + # test that the default sort is release date + self.item_list.set_sort(itemsort.TitleSort(True)) + self.items.sort(key=lambda i: util.name_sort_key(i.title)) + self.check_sort_order(self.items) + + def test_attrs(self): + id1 = self.items[0].id + id2 = self.items[-1].id + id3 = self.items[1].id + # test setting attributes + self.item_list.set_attr(id1, 'key', 'value') + self.item_list.set_attr(id2, 'key', 'value2') + self.assertEquals(self.item_list.get_attr(id1, 'key'), 'value') + self.assertEquals(self.item_list.get_attr(id2, 'key'), 'value2') + # test missing attributes + self.assertEquals(self.item_list.get_attr(id1, 'otherkey'), None) + self.assertEquals(self.item_list.get_attr(id3, 'key', 123), 123) + # test changing attributes + self.item_list.set_attr(id1, 'key', 'new-value') + self.assertEquals(self.item_list.get_attr(id1, 'key'), 'new-value') + # test that if an item is removed, the attributes stay around + self.items[4].remove() + self.refresh_item_list() + self.assertEquals(len(self.item_list), len(self.items) - 1) + self.assertEquals(self.item_list.get_attr(id1, 'key'), 'new-value') + self.assertEquals(self.item_list.get_attr(id2, 'key'), 'value2') + # test unsetting attributes + self.item_list.unset_attr(id1, 'key') + self.assertEquals(self.item_list.get_attr(id1, 'key'), None) + # test that a second unset is okay + self.item_list.unset_attr(id1, 'key') + + def check_group_info(self, grouping_func): + items = [self.item_list.get_row(i) + for i in xrange(len(self.item_list))] + row_counter = itertools.count() + for key, group in itertools.groupby(items, grouping_func): + group_list = list(group) + for i, item in enumerate(group_list): + correct_group_info = (i, len(group_list), group_list[0]) + group_info = self.item_list.get_group_info(row_counter.next()) + self.assertEquals(group_info, correct_group_info) + + def test_grouping(self): + # change all item titles so they start and end with 'a' or 'b' + first_letter = itertools.cycle(['a', 'a', 'b']) + last_letter = itertools.cycle(['b', 'a', 'b']) + for item in self.items: + item.title = u''.join((first_letter.next(), + random.choice(string.letters), + last_letter.next())) + item.signal_change() + self.refresh_item_list() + # check that get_group_info() raises a ValueError before a grouping + # func is set + for i in xrange(len(self.item_list)): + self.assertRaises(ValueError, self.item_list.get_group_info, i) + self.assertEquals(self.item_list.get_grouping(), None) + # test setting a grouping function + def first_letter_grouping(item): + return item.title[0] + self.item_list.set_grouping(first_letter_grouping) + self.check_group_info(first_letter_grouping) + # test changing a grouping function + def last_letter_grouping(info): + return info.title[-1] + self.item_list.set_grouping(last_letter_grouping) + self.check_group_info(last_letter_grouping) + # test grouping is correct after changing the sort + self.item_list.set_sort(itemsort.TitleSort()) + self.check_group_info(last_letter_grouping) + + def test_grouping_change_first_item(self): + # test item changes with a grouping set + + # Split the items into 2 groups + list_items = self.item_list.get_items() + first_group_count = len(list_items) // 2 + first_group = [i.id for i in list_items[0:first_group_count]] + def group_func(item): + return item.id in first_group + self.item_list.set_grouping(group_func) + for i in range(first_group_count): + group_info = self.item_list.get_group_info(i) + self.assertEquals(group_info[0], i) + self.assertEquals(group_info[1], first_group_count) + self.assertEquals(group_info[2], list_items[0]) + # try changing the first item in the list, make sure that the + # group_info changes based on that. + for item in self.items: + if item.id == list_items[0].id: + item.title = u'new-title' + item.signal_change() + break + app.db.finish_transaction() + msg = messages.ItemChanges(set(), set([list_items[0].id]), set(), + set(['title']), False, False) + self.item_list.on_item_changes(msg) + self.assertEquals(self.item_list.get_row(0).title, u'new-title') + for i in range(first_group_count): + group_info = self.item_list.get_group_info(i) + self.assertEquals(group_info[2].title, u'new-title') + + def test_grouping_returns_none(self): + # If the grouping function returns None, then the item should never be + # part of a group + + # Split the items into 2 groups + list_items = self.item_list.get_items() + group_count = len(list_items) // 2 + grouped_items = [i.id for i in list_items[0:group_count]] + def group_func(item): + if item.id in grouped_items: + return 123 + else: + return None + self.item_list.set_grouping(group_func) + for i in range(group_count): + group_info = self.item_list.get_group_info(i) + self.assertEquals(group_info[0], i) + self.assertEquals(group_info[1], group_count) + self.assertEquals(group_info[2], list_items[0]) + for i in range(group_count, len(list_items)): + group_info = self.item_list.get_group_info(i) + self.assertEquals(group_info[0], 0) + self.assertEquals(group_info[1], 1) + self.assertEquals(group_info[2], list_items[i]) + +class TestItemListPool(MiroTestCase): + def setUp(self): + MiroTestCase.setUp(self) + self.init_data_package() + self.feed = models.Feed(u'http://example.com/feed.rss') + self.items = [testobjects.make_item(self.feed, u'item-%s' % i) + for i in xrange(10)] + app.db.finish_transaction() + self.pool = itemlist.ItemListPool() + app.item_tracker_updater = itemlist.ItemTrackerUpdater() + self.item_list = self.pool.get('feed', self.feed.id) + self.item_list2 = self.pool.get('feed', self.feed.id + 1) + + def tearDown(self): + del app.item_tracker_updater + MiroTestCase.tearDown(self) + + def test_reuse(self): + # test that we re-use ItemList objects rather than creating multiples. + dup_item_list = self.pool.get('feed', self.feed.id) + if dup_item_list is not self.item_list: + raise AssertionError("Didn't re-use item list") + non_dup_item_list = self.pool.get('feed', -1) + if (non_dup_item_list is self.item_list or + non_dup_item_list is self.item_list2): + raise AssertionError("Re-used item list when we shouldn't have") + + def test_on_item_changes(self): + # Test that calling on_item_changes on the ItemListPool calls it on + # all lists inside that pool. + self.item_list.on_item_changes = mock.Mock() + self.item_list2.on_item_changes = mock.Mock() + fake_message = mock.Mock() + app.item_tracker_updater.on_item_changes(fake_message) + self.item_list.on_item_changes.assert_called_once_with(fake_message) + self.item_list2.on_item_changes.assert_called_once_with(fake_message) + + def test_release(self): + # Test that we actually remove objects from the pool once there are no + # more references to them. + self.pool.release(self.item_list) + self.assertSameSet(self.pool.all_item_lists, [self.item_list2]) + # try it with 2 references + dup_item_list2 = self.pool.get('feed', self.feed.id + 1) + self.pool.release(self.item_list2) + self.assertSameSet(self.pool.all_item_lists, [self.item_list2]) + self.pool.release(dup_item_list2) + self.assertSameSet(self.pool.all_item_lists, []) diff -Nru miro-4.0.4/lib/test/itemrenderertest.py miro-6.0/lib/test/itemrenderertest.py --- miro-4.0.4/lib/test/itemrenderertest.py 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/test/itemrenderertest.py 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,115 @@ +# Miro - an RSS based video player application +# Copyright (C) 2012 +# Participatory Culture Foundation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +# +# In addition, as a special exception, the copyright holders give +# permission to link the code of portions of this program with the OpenSSL +# library. +# +# You must obey the GNU General Public License in all respects for all of +# the code used other than OpenSSL. If you modify file(s) with this +# exception, you may extend this exception to your version of the file(s), +# but you are not obligated to do so. If you do not wish to do so, delete +# this exception statement from your version. If you delete this exception +# statement from all source files in the program, then also delete it here. + +"""itemrenderertest.py -- Test rendering items.""" + +from miro import app +from miro import downloader +from miro import models +from miro.data import item +from miro.frontends.widgets import itemrenderer +from miro.test import mock +from miro.test import testobjects +from miro.test.framework import MiroTestCase, skip_for_platforms + +@skip_for_platforms('osx') +class ItemRendererTest(MiroTestCase): + def setUp(self): + MiroTestCase.setUp(self) + self.renderer = itemrenderer.ItemRenderer() + self.feed = models.Feed(u'http://example.com/feed.rss') + self.item = testobjects.make_item(self.feed, u'item') + self.manual_feed = models.Feed(u'dtv:manualFeed', + initiallyAutoDownloadable=False) + self.file_item = models.FileItem(self.make_temp_path(), + self.manual_feed.id) + app.saved_items = set() + app.playback_manager = mock.Mock() + app.playback_manager.item_resume_policy.return_value = False + + def _get_item(self, item_id): + item_list = item.fetch_item_infos(app.db.connection, [item_id]) + return item_list[0] + + def check_render(self, item): + """Check that ItemRenderer can sucessfully render a row. + + NOTE: we don't actually check the correctness of the render, just that + it doesn't crash. + """ + self.renderer.attrs = {} + self.renderer.info = self._get_item(item.id) + context = mock.Mock() + layout_manager = mock.Mock() + hotspot = hover = None + context.width = self.renderer.MIN_WIDTH + context.height = self.renderer.HEIGHT + mock_textbox = layout_manager.textbox.return_value + mock_textbox.font.line_height.return_value = 16 + mock_textbox.get_size.return_value = (100, 16) + layout_manager.current_font.line_height.return_value = 16 + layout_manager.current_font.ascent.return_value = 12 + for selected in (False, True): + self.renderer.render(context, layout_manager, selected, hotspot, + hover) + + def test_undownloaded(self): + self.check_render(self.item) + + def test_downloading(self): + self.item.download() + fake_status = { + 'current_size': 100, + 'total_size': None, + 'state': u'downloading', + 'rate': 100, + 'eta': None, + 'type': 'HTTP', + 'dlid': self.item.downloader.dlid, + } + downloader.RemoteDownloader.update_status(fake_status) + self.check_render(self.item) + + def test_downloaded(self): + self.item.download() + fake_status = { + 'current_size': 100, + 'total_size': 100, + 'state': u'finished', + 'rate': 0, + 'eta': 0, + 'type': 'HTTP', + 'dlid': self.item.downloader.dlid, + 'filename': self.make_temp_path() + } + downloader.RemoteDownloader.update_status(fake_status) + self.check_render(self.item) + + def test_file_item(self): + self.check_render(self.file_item) diff -Nru miro-4.0.4/lib/test/itemtest.py miro-6.0/lib/test/itemtest.py --- miro-4.0.4/lib/test/itemtest.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/test/itemtest.py 2013-04-05 16:02:42.000000000 +0000 @@ -6,24 +6,28 @@ from miro import app from miro import prefs from miro.feed import Feed -from miro.item import Item, FileItem, FeedParserValues +from miro.item import Item, FileItem, FeedParserValues, on_new_metadata from miro.fileobject import FilenameType from miro.downloader import RemoteDownloader -from miro.test.framework import MiroTestCase +from miro.test import mock, testobjects +from miro.test.framework import MiroTestCase, EventLoopTest from miro.singleclick import _build_entry +from miro.plat.utils import unicode_to_filename def fp_values_for_url(url, additional=None): return FeedParserValues(_build_entry(url, 'video/x-unknown', additional)) -class ContainerItemTest(MiroTestCase): +class ContainerItemTest(EventLoopTest): def setUp(self): - MiroTestCase.setUp(self) + EventLoopTest.setUp(self) self.feed = Feed(u'dtv:manualFeed', initiallyAutoDownloadable=False) self.mytempdir = FilenameType(tempfile.mkdtemp(dir=self.tempdir)) self._make_fake_item("pcf.avi") self._make_fake_item("dean.avi") self._make_fake_item("npr.txt") self.container_item = FileItem(self.mytempdir, self.feed.id) + # Give the iterators some time to run + self.process_idles() for child in self.container_item.get_children(): if child.filename.endswith("avi"): child.file_type = u'video' @@ -33,7 +37,7 @@ def tearDown(self): shutil.rmtree(self.mytempdir, ignore_errors=True) - MiroTestCase.tearDown(self) + EventLoopTest.tearDown(self) def _make_fake_item(self, filename): f = open(os.path.join(self.mytempdir, filename), 'wb') @@ -41,23 +45,23 @@ f.close() class ItemSeenTest(ContainerItemTest): - def test_seen_attribute(self): - # parents should be consider "seen" when all of their - # audio/video children are marked seen. + def test_get_watched_attribute(self): + # parents should be consider watched when all of their + # audio/video children are marked watched. children = list(self.container_item.get_children()) media_children = [i for i in children if i.is_playable()] other_children = [i for i in children if not i.is_playable()] self.assertEquals(len(media_children), 2) self.assertEquals(len(other_children), 1) - self.assert_(not self.container_item.seen) - media_children[0].mark_item_seen() - self.assert_(not self.container_item.seen) - media_children[1].mark_item_seen() - self.assert_(self.container_item.seen) - media_children[1].mark_item_unseen() - self.assert_(not self.container_item.seen) - media_children[1].mark_item_seen() - self.assert_(self.container_item.seen) + self.assert_(not self.container_item.get_watched()) + media_children[0].mark_watched() + self.assert_(not self.container_item.get_watched()) + media_children[1].mark_watched() + self.assert_(self.container_item.get_watched()) + media_children[1].mark_unwatched() + self.assert_(not self.container_item.get_watched()) + media_children[1].mark_watched() + self.assert_(self.container_item.get_watched()) class ChildRemoveTest(ContainerItemTest): def test_expire_all_children(self): @@ -96,7 +100,7 @@ feed_id=f1.id) f1.set_expiration(u'never', 0) - i1.watchedTime = i2.watchedTime = datetime.now() + i1.watched_time = i2.watched_time = datetime.now() for obj in (f1, i1, i2): obj.signal_change() @@ -114,8 +118,8 @@ f2.set_expiration(u'system', 0) # system default is 6 days as set in setUp, so i3 should expire, # but i4 should not. - i3.watchedTime = datetime.now() - timedelta(days=12) - i4.watchedTime = datetime.now() - timedelta(days=3) + i3.watched_time = datetime.now() - timedelta(days=12) + i4.watched_time = datetime.now() - timedelta(days=3) for obj in (f2, i3, i4): obj.signal_change() @@ -131,8 +135,8 @@ feed_id=f3.id) f3.set_expiration(u'feed', 24) - i5.watchedTime = datetime.now() - timedelta(days=3) - i6.watchedTime = datetime.now() - timedelta(hours=12) + i5.watched_time = datetime.now() - timedelta(days=3) + i6.watched_time = datetime.now() - timedelta(hours=12) for obj in (f3, i5, i6): obj.signal_change() @@ -162,14 +166,23 @@ item.skip_count = 0 self.assertEquals(item.get_auto_rating(), 5) + def test_set_rating(self): + feed = Feed(u'http://example.com/1') + item = Item(fp_values_for_url(u'http://example.com/1/item1'), + feed_id=feed.id) + item.set_rating(5) + self.assertEquals(item.rating, 5) + item.set_rating(3) + self.assertEquals(item.rating, 3) + class ItemRemoveTest(MiroTestCase): def test_watched_time_reset(self): feed = Feed(u'http://example.com/1') item = Item(fp_values_for_url(u'http://example.com/1/item1'), feed_id=feed.id) - item.watchedTime = datetime.now() + item.watched_time = datetime.now() item.expire() - self.assertEquals(item.watchedTime, None) + self.assertEquals(item.watched_time, None) def test_remove_before_downloader_referenced(self): # when items are restored from the DB, the downloader @@ -210,20 +223,20 @@ # subtitle encoding. self.item1.set_subtitle_encoding('latin-9') self.assertEquals(self.item2.subtitle_encoding, None) - self.item2.mark_item_seen() + self.item2.mark_watched() self.assertEquals(self.item2.subtitle_encoding, 'latin-9') # Test the value isn't re-set the next time it's marked watched self.item1.set_subtitle_encoding('latin-5') - self.item2.mark_item_seen() + self.item2.mark_watched() self.assertEquals(self.item2.subtitle_encoding, 'latin-9') def test_set_none(self): - # Test an item is marked seen when the subtitle encoding is None) - self.item1.mark_item_seen() + # Test an item is marked watched when the subtitle encoding is None) + self.item1.mark_watched() self.assertEquals(self.item2.subtitle_encoding, None) self.item2.set_subtitle_encoding('latin-7') - self.item2.mark_item_seen() - self.item1.mark_item_seen() + self.item2.mark_watched() + self.item1.mark_watched() self.assertEquals(self.item1.subtitle_encoding, None) class ItemSearchTest(MiroTestCase): @@ -236,7 +249,8 @@ feed_id=self.feed.id) def test_matches_search(self): - self.item1.set_title(u"miro is cool") + self.item1.title = u"miro is cool" + self.item1.signal_change() self.assertEquals(self.item1.matches_search('miro'), True) self.assertEquals(self.item1.matches_search('iro'), True) self.assertEquals(self.item1.matches_search('c'), True) @@ -263,4 +277,273 @@ # cause a crash. A soft failure is okay though. app.controller.failed_soft_okay = True Item._allow_nonexistent_paths = False - FileItem("/non/existent/path/", feed.id) + with self.allow_warnings(): + FileItem("/non/existent/path/", feed.id) + +class HaveItemForPathTest(MiroTestCase): + def setUp(self): + MiroTestCase.setUp(self) + self.feed = Feed(u'http://example.com/1') + self.added_items = {} + self.deleted_paths = [] + + def add_item(self, filename): + path = os.path.join(self.tempdir, unicode_to_filename(filename)) + # create a bogus file so we don't get a warning when we create a + # filename. + open(path, 'wb').write("data") + self.added_items[path] = FileItem(path, self.feed.id) + + def remove_item(self, filename): + path = os.path.join(self.tempdir, unicode_to_filename(filename)) + self.added_items[path].remove() + del self.added_items[path] + self.deleted_paths.append(path) + + def check_have_item_for_path(self): + for path, item in self.added_items.items(): + self.assertEquals(Item.have_item_for_path(path), True) + # case differences shouldn't matter + self.assertEquals(Item.have_item_for_path(path.lower()), True) + self.assertEquals(Item.have_item_for_path(path.upper()), True) + for path in self.deleted_paths: + self.assertEquals(Item.have_item_for_path(path), False) + self.assertEquals(Item.have_item_for_path(path.upper()), False) + self.assertEquals(Item.have_item_for_path(path.lower()), False) + + def test_have_item_for_path(self): + # add some items before the first items_for_path() call + self.add_item(u'video-1') + self.add_item(u'vIdEO-2') + self.check_have_item_for_path() + # Add more items and test again + self.add_item(u'VIDEO\xe4-3') + self.add_item(u'vIdEO-four') + self.check_have_item_for_path() + # Delete some items and test one more time + self.remove_item(u'vIdEO-2') + self.remove_item(u'VIDEO\xe4-3') + self.check_have_item_for_path() + + def test_19929(self): + # Test have_item_for_path when sqlite and python have different values + # for LOWER() + self.add_item(u'\xd0') + self.check_have_item_for_path() + +class ItemMetadataTest(MiroTestCase): + # Test integration between the item and metadata modules. + def setUp(self): + MiroTestCase.setUp(self) + self.manual_feed = Feed(u'dtv:manualFeed') + self.regular_feed = Feed(u'http://example.com/1') + self.path, fp = self.make_temp_path_fileobj(".avi") + fp.write("fake data") + fp.close() + + def make_new_file_item(self): + path, fp = self.make_temp_path_fileobj(".avi") + fp.write("fake data") + fp.close() + return FileItem(path, self.manual_feed.id) + + def check_path_in_metadata_manager(self): + if not app.local_metadata_manager.path_in_system(self.path): + raise AssertionError("path not in the metadata manager") + + def check_path_not_in_metadata_manager(self): + if app.local_metadata_manager.path_in_system(self.path): + raise AssertionError("path still in the metadata manager") + + def test_make_deleted(self): + # Test calling make_deleted on a FileItem + file_item = testobjects.make_file_item(self.manual_feed, path=self.path) + self.check_path_in_metadata_manager() + + file_item.make_deleted() + self.check_path_not_in_metadata_manager() + + file_item.make_undeleted() + self.check_path_in_metadata_manager() + + def test_remove(self): + # Test calling remove() a file item + file_item = testobjects.make_file_item(self.manual_feed, path=self.path) + self.check_path_in_metadata_manager() + + file_item.remove() + self.check_path_not_in_metadata_manager() + + def make_regular_item(self, feed): + url = u'http://example.com/1/item1' + item = Item(fp_values_for_url(url), feed_id=feed.id) + item.set_downloader(RemoteDownloader(url, item)) + return item + + def test_expire(self): + # Test calling expire() on a item downloaded from a feed + item = self.make_regular_item(self.regular_feed) + item.downloader.filename = self.path + item.downloader.state = u'finished' + item.on_download_finished() + self.check_path_in_metadata_manager() + + downloader = item.downloader + item.expire() + self.check_path_not_in_metadata_manager() + + def test_expire_external_item(self): + # Test calling expire() on a item downloaded by itself + item = self.make_regular_item(self.manual_feed) + item.downloader.filename = self.path + item.downloader.state = u'finished' + item.on_download_finished() + self.check_path_in_metadata_manager() + + item.expire() + + def test_on_new_metadata(self): + # make a bunch of file items + items = [self.make_new_file_item() for i in xrange(10)] + # create new metadata for some those items + new_metadata = {} + for i in [0, 1, 4, 5, 8, 9]: + item = items[i] + new_metadata[item.filename] = { + u'album': u'Album-%s' % i, + u'title': u'Title-%s' % i, + u'duration': 100, + } + on_new_metadata(mock.Mock(), new_metadata) + for item in items: + if item.filename in new_metadata: + md = new_metadata[item.filename] + self.assertEquals(item.album, md['album']) + self.assertEquals(item.metadata_title, md['title']) + self.assertEquals(item.title, md['title']) + self.assertEquals(item.duration, md['duration']) + else: + self.assertEquals(item.album, None) + self.assertEquals(item.metadata_title, None) + self.assertEquals(item.duration, None) + +class ItemSizeTest(MiroTestCase): + def setUp(self): + MiroTestCase.setUp(self) + self.feed = testobjects.make_feed() + + def update_status(self, download_progress, elapsed_time): + # define some arbitrary constants + total_size = 100000 + start_time = 1000 + # calculate values based on download_progress/elapsed_time + current_size = int(total_size * download_progress) + rate = current_size / elapsed_time + eta = int((total_size - current_size) / rate) + if download_progress < 1.0: + state = u'downloading' + end_time = None + filename = self.downloading_path + else: + end_time = start_time + elapsed_time + state = u'finished' + filename = self.final_path + + downloader.RemoteDownloader.update_status({ + 'dlid': self.dlid, + 'url': self.url, + 'state': state, + 'total_size': total_size, + 'current_size': current_size, + 'eta': eta, + 'rate': rate, + 'upload_size': 0, + 'filename': filename, + 'start_time': start_time, + 'end_time': end_time, + 'short_filename': 'download.mp4', + 'reason_failed': None, + 'short_reason_failed': None, + 'type': None, + 'retry_time': None, + 'retry_count': None, + }, cmd_done=True) + + def make_file(self, size): + path, fp = self.make_temp_path_fileobj(".avi") + fp.write(" " * size) + fp.close() + return path + + def check_size(self, item, size): + self.assertEquals(item.get_size(), size) + self.assertEquals(item.size, size) + + def update_downloader_status(self, item, path, current_size, total_size): + status = { + 'dlid': item.downloader.dlid, + 'url': item.url, + 'current_size': current_size, + 'total_size': total_size, + 'upload_size': 0, + 'start_time': 1000, + 'short_filename': os.path.basename(path), + 'reason_failed': None, + 'short_reason_failed': None, + 'type': None, + 'retry_time': None, + 'retry_count': None, + } + if current_size < total_size: + status.update({ + 'state': u'downloading', + 'end_time': None, + 'eta': 10, + 'rate': 1, + 'filename': os.path.join(self.tempdir, + 'Incomplete downloads', + os.path.basename(path)), + }) + else: + status.update({ + 'state': u'finished', + 'end_time': status['start_time'] + 50, + 'eta': None, + 'rate': None, + 'filename': path, + }) + RemoteDownloader.update_status(status, cmd_done=True) + + def test_download(self): + item = testobjects.make_item(self.feed, u'my item') + item.download() + path = self.make_file(size=1000) + # while downloading, size should be the total size of the download + self.update_downloader_status(item, path, 500, 1000) + self.assertEquals(item.get_state(), 'downloading') + self.check_size(item, 1000) + # after downloading, size should be the same + self.update_downloader_status(item, path, 1000, 1000) + self.assertEquals(item.get_state(), 'newly-downloaded') + self.check_size(item, 1000) + + def test_rss_entry(self): + # initially we should use the RSS enclosure for size + item = testobjects.make_item(self.feed, u'my item', + enclosure_size=2000) + self.check_size(item, 2000) + + def test_file_item(self): + # file items should have size = their file size + item = testobjects.make_file_item(self.feed, u'my item', + path=self.make_file(3000)) + self.check_size(item, 3000) + + def test_file_removed(self): + # test what happens if set_filename() is called with a non-existant + # file + item = testobjects.make_item(self.feed, u'my item', + enclosure_size=2000) + with self.allow_warnings(): + item.set_filename('non-existant-path') + self.check_size(item, None) diff -Nru miro-4.0.4/lib/test/itemtracktest.py miro-6.0/lib/test/itemtracktest.py --- miro-4.0.4/lib/test/itemtracktest.py 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/test/itemtracktest.py 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,1052 @@ +# Miro - an RSS based video player application +# Copyright (C) 2012 +# Participatory Culture Foundation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +# +# In addition, as a special exception, the copyright holders give +# permission to link the code of portions of this program with the OpenSSL +# library. +# +# You must obey the GNU General Public License in all respects for all of +# the code used other than OpenSSL. If you modify file(s) with this +# exception, you may extend this exception to your version of the file(s), +# but you are not obligated to do so. If you do not wish to do so, delete +# this exception statement from your version. If you delete this exception +# statement from all source files in the program, then also delete it here. + +"""itemtracktest -- Test the miro.data.itemtrack module. """ + +import datetime +import itertools + +from miro import app +from miro import downloader +from miro import eventloop +from miro import messages +from miro import models +from miro import sharing +from miro.data import item +from miro.data import itemtrack +from miro.test import mock +from miro.data import connectionpool +from miro.test.framework import MiroTestCase, MatchAny +from miro.test import testobjects + +class ItemTrackTestCase(MiroTestCase): + """Base classes for all ItemTracker tests. + + This class doesn't define any tests, it simply provides a setUp/tearDown + methond and some helper functions. + """ + + def setUp(self): + MiroTestCase.setUp(self) + self.idle_scheduler = mock.Mock() + self.init_data_package() + self.setup_items() + self.setup_connection_pool() + self.force_wal_mode() + self.setup_mock_message_handler() + self.setup_tracker() + # make the change tracker start fresh for the unittests. Since we + # don't know which change tracker our item type will use, we go for + # the sledge hammer approach here and reset them all. + models.Item.change_tracker.reset() + models.DeviceItem.change_tracker.reset() + models.SharingItem.change_tracker.reset() + + def tearDown(self): + self.tracker.destroy() + MiroTestCase.tearDown(self) + + def force_wal_mode(self): + """Force WAL mode to be a certain value. + + By default we set wal_mode to be True. The NonWalMode versions of the + test case overrides this method and sets wal_mode to False. + """ + self.connection_pool.wal_mode = True + + def setup_mock_message_handler(self): + """Install a mock object to handle frontend messages. + + We use this to intercept the ItemChanges message + """ + self.mock_message_handler = mock.Mock() + messages.FrontendMessage.install_handler(self.mock_message_handler) + # move past the the SharingItemChanges method for our initial items. + eventloop._eventloop.emit('event-finished', True) + self.mock_message_handler.reset_mock() + + def process_items_changed_messages(self): + """Simulate the eventloop finishing and sending the ItemChanges + message to the frontend. Also, intercept that message and pass it to + our item tracker. + """ + eventloop._eventloop.emit('event-finished', True) + mock_handle = self.mock_message_handler.handle + # filter through the TabsChanged messages and to find + # ItemChanges messages. + for args, kwargs in mock_handle.call_args_list: + msg = args[0] + if type(msg) in (messages.ItemChanges, + messages.DeviceItemChanges, + messages.SharingItemChanges): + self.tracker.on_item_changes(msg) + mock_handle.reset_mock() + + def get_items_changed_message(self): + """Simulate the eventloop finishing and return the ItemChanges that + would be sent to the frontend. + """ + eventloop._eventloop.emit('event-finished', True) + mock_handle = self.mock_message_handler.handle + # filter through the TabsChanged messages and to find + # ItemChanges messages. + items_changed_messages = [] + for args, kwargs in mock_handle.call_args_list: + msg = args[0] + if type(msg) in (messages.ItemChanges, + messages.DeviceItemChanges, + messages.SharingItemChanges): + items_changed_messages.append(msg) + mock_handle.reset_mock() + if len(items_changed_messages) == 0: + return None + elif len(items_changed_messages) > 1: + raise AssertionError("Multiple messages: %s" % + items_changed_messages) + else: + return items_changed_messages[0] + + def run_tracker_idle(self): + self.assertEqual(self.idle_scheduler.call_count, 1) + args, kwargs = self.idle_scheduler.call_args + self.idle_scheduler.reset_mock() + callback = args[0] + callback() + + def run_all_tracker_idles(self): + loop_check = itertools.count() + while self.idle_scheduler.call_count > 0: + if loop_check.next() > 1000: + raise AssertionError("idle callbacks never stopped") + self.run_tracker_idle() + + # These next methods need to be implemented by subclasses + def setup_items(self): + """Setup the initial database items to track. + """ + raise NotImplementedError() + + def setup_connection_pool(self): + """Setup a connection pool to use with our tracker. + """ + raise NotImplementedError() + + def setup_tracker(self): + """Setup an item tracker to use.""" + raise NotImplementedError + +class ItemTrackTestWALMode(ItemTrackTestCase): + def setUp(self): + ItemTrackTestCase.setUp(self) + # setup mock objects to track when the items-changed and list-changed + # signals get emitted + self.signal_handlers = {} + for signal in ("items-changed", "list-changed"): + self.signal_handlers[signal] = mock.Mock() + self.tracker.connect(signal, self.signal_handlers[signal]) + + def setup_items(self): + self.tracked_feed, self.tracked_items = \ + testobjects.make_feed_with_items(10) + self.other_feed1, self.other_items1 = \ + testobjects.make_feed_with_items(12) + self.other_feed2, self.other_items2 = \ + testobjects.make_feed_with_items(8) + app.db.finish_transaction() + + def setup_connection_pool(self): + self.connection_pool = app.connection_pools.get_main_pool() + + def setup_tracker(self): + query = itemtrack.ItemTrackerQuery() + query.add_condition('feed_id', '=', self.tracked_feed.id) + query.set_order_by(['release_date']) + self.tracker = itemtrack.ItemTracker(self.idle_scheduler, query, + item.ItemSource()) + + def check_no_signals(self): + """Check that our ItemTracker hasn't emitted any signals.""" + for handler in self.signal_handlers.values(): + self.assertEquals(handler.call_count, 0) + + def check_one_signal(self, should_have_fired): + """Check that our ItemTracker has emitted a specific signal and no + others. + + Reset the Mock object that handled the signal. + + :returns: argumunts passed to that signal + """ + if should_have_fired not in self.signal_handlers.keys(): + raise ValueError("Unknown signal: %s", should_have_fired) + for signal, handler in self.signal_handlers.items(): + if signal == should_have_fired: + self.assertEquals(handler.call_count, 1) + args = handler.call_args[0] + handler.reset_mock() + else: + self.assertEquals(handler.call_count, 0) + + # do some sanity checks on the arguments passed + # first argument should always be our tracker + self.assertEquals(args[0], self.tracker) + if should_have_fired in ('initial-list', 'items-changed'): + # should be passed a list of ids + self.assertEquals(len(args), 2) + else: + # shouldn't be passed anything + self.assertEquals(len(args), 1) + return args + + def check_tracker_items(self, correct_items=None, sort_items=True): + """Calculate which items should be in our ItemTracker and check if + it's data agrees with this. + + :param correct_items: items that should be in our ItemTracker. If + None, we will use calc_items_in_tracker() to calculate this. + """ + if correct_items is not None: + item_list = correct_items + else: + item_list = self.calc_items_in_tracker() + if sort_items and self.tracker.query.order_by: + self.sort_item_list(item_list) + self.assertEquals(len(item_list), len(self.tracker)) + # test the get_items() method + tracker_items = self.tracker.get_items() + self.assertEquals(len(tracker_items), len(item_list)) + for i, ti in zip(item_list, tracker_items): + self.assertEquals(i.id, ti.id) + # test the get_row() and get_item() methods + for i, item in enumerate(item_list): + self.assertEquals(self.tracker.get_row(i).id, item.id) + self.assertEquals(self.tracker.get_item(item.id).id, item.id) + + def calc_items_in_tracker(self): + item_list = [] + for i in models.Item.make_view(): + meets_conditions = True + for condition in self.tracker.query.conditions: + if len(condition.columns) > 1: + raise AssertionError("Don't know how to get value for %s" + % condition.columns) + table, column = condition.columns[0] + if table == 'item': + item_value = getattr(i, column) + elif table == 'remote_downloader': + dler = i.downloader + if dler is None: + item_value = None + else: + item_value = getattr(dler, column) + elif table == 'feed': + item_value = getattr(i.get_feed(), column) + else: + raise AssertionError("Don't know how to get value for %s" + % condition.columns) + full_column = "%s.%s" % (table, column) + if condition.sql == '%s = ?' % full_column: + if item_value != condition.values[0]: + meets_conditions = False + break + elif condition.sql == '%s < ?' % full_column: + if item_value >= condition.values[0]: + meets_conditions = False + break + elif condition.sql == '<': + if item_value <= condition.values[0]: + meets_conditions = False + break + elif condition.sql == '%s LIKE ?' % full_column: + value = condition.values[0] + if (value[0] != '%' or + value[-1] != '%'): + raise ValueError("Can't handle like without % on " + "both ends") + inner_part = value[1:-1] + meets_conditions = inner_part in item_value + break + else: + raise ValueError("Can't handle condition operator: %s" % + condition.operater) + if meets_conditions: + item_list.append(i) + return item_list + + def sort_item_list(self, item_list): + def cmp_func(item1, item2): + for table, column in self.tracker.query.order_by.columns: + value1 = getattr(item1, column) + value2 = getattr(item2, column) + cmp_val = cmp(value1, value2) + desc_expr = '%s.%s DESC' % (table, column) + if desc_expr in self.tracker.query.order_by.sql: + cmp_val *= -1 + if cmp_val != 0: + return cmp_val + return 0 + item_list.sort(cmp=cmp_func) + + def test_initial_list(self): + self.check_tracker_items() + + def test_background_fetch(self): + # test that ItemTracker fetches its rows in the backend using + # idle callbacks + + # initially we should just store None for our data as a placeholder + # until we actually do the fetch. + self.assertEquals(self.tracker.row_data, {}) + # we should have an idle callback to schedule fetching the row data. + self.assertEqual(self.idle_scheduler.call_count, 1) + self.run_all_tracker_idles() + for row in self.tracker.row_data: + self.assertNotEquals(row, None) + self.check_tracker_items() + + def check_items_changed_after_message(self, changed_items): + self.process_items_changed_messages() + signal_args = self.check_one_signal('items-changed') + self.assertSameSet([i.id for i in changed_items], + signal_args[1]) + + def check_list_change_after_message(self): + self.process_items_changed_messages() + self.check_one_signal('list-changed') + + def test_item_changes(self): + # test that simple changes result in a items-changed signal + item1 = self.tracked_items[0] + item2 = self.tracked_items[1] + item1.title = u'new title' + item1.signal_change() + item2.title = u'new title2' + item2.signal_change() + self.check_items_changed_after_message([item1, item2]) + self.check_tracker_items() + # test that changes to order by fields result in a list-changed + item1.release_date += datetime.timedelta(days=400) + item1.signal_change() + item2.release_date += datetime.timedelta(days=400) + item2.signal_change() + self.check_list_change_after_message() + self.check_tracker_items() + # test that changes to conditions result in a list-changed + item1.feed_id = self.other_feed2.id + item1.signal_change() + item2.feed_id = self.tracked_feed.id + item2.signal_change() + self.check_list_change_after_message() + self.check_tracker_items() + + def test_item_changes_after_finished(self): + # test item changes after we've finished fetching all rows + while not self.tracker.idle_work_scheduled: + self.tracker.do_idle_work() + item1 = self.tracked_items[0] + item2 = self.tracked_items[1] + item1.title = u'new title' + item1.signal_change() + item2.title = u'new title2' + item2.signal_change() + self.check_items_changed_after_message([item1, item2]) + self.check_tracker_items() + + def test_add_remove(self): + # adding items to our tracked feed should result in the list-changed + # signal + new_item = testobjects.make_item(self.tracked_feed, u'new-item') + self.check_list_change_after_message() + self.check_tracker_items() + # removed items to our tracked feed should result in the list-changed + # signal + to_remove = self.tracked_items.pop(0) + to_remove.remove() + self.check_list_change_after_message() + self.check_tracker_items() + # adding/remove items from other feeds shouldn't result in any signals + self.other_items1[0].remove() + testobjects.make_item(self.other_feed2, u'new-item2') + self.check_no_signals() + self.check_tracker_items() + + def test_extra_conditions(self): + # test adding more conditions + titles = [i.title for i in self.tracked_items] + titles.sort() + middle_title = titles[len(titles) // 2] + query = itemtrack.ItemTrackerQuery() + query.add_condition('feed_id', '=', self.tracked_feed.id) + query.add_condition('title', '<', middle_title) + query.set_order_by(['release_date']) + self.tracker.change_query(query) + # changing the query should emit list-changed + self.check_one_signal('list-changed') + self.check_tracker_items() + + def test_complex_conditions(self): + # test adding more conditions + query = itemtrack.ItemTrackerQuery() + + sql = "feed_id IN (SELECT id FROM feed WHERE id in (?, ?))" + values = (self.tracked_feed.id, self.other_feed1.id) + query.add_complex_condition(["feed_id"], sql, values) + query.set_order_by(['release_date']) + self.tracker.change_query(query) + # changing the query should emit list-changed + self.check_one_signal('list-changed') + self.check_tracker_items(self.tracked_items + self.other_items1) + + def test_like(self): + # test adding more conditions + query = itemtrack.ItemTrackerQuery() + query.add_condition('title', 'LIKE', '%feed1%') + self.tracker.change_query(query) + # changing the query should emit list-changed + self.check_one_signal('list-changed') + self.check_tracker_items() + + def test_search(self): + # test full-text search + + # manually set some titles so that we can test searching those + item1, item2, item3 = self.tracked_items[:3] + item1.title = u'foo bar' + item1.signal_change() + item2.title = u'bar baz' + item2.signal_change() + item3.title = u'foo bar baz' + item3.signal_change() + app.db.finish_transaction() + self.check_items_changed_after_message([item1, item2, item3]) + query = itemtrack.ItemTrackerQuery() + query.add_condition('feed_id', '=', self.tracked_feed.id) + query.set_search('foo') + self.tracker.change_query(query) + self.check_one_signal('list-changed') + self.check_tracker_items([item1, item3]) + # test two terms + query = itemtrack.ItemTrackerQuery() + query.add_condition('feed_id', '=', self.tracked_feed.id) + query.set_search('foo baz') + self.tracker.change_query(query) + self.check_one_signal('list-changed') + self.check_tracker_items([item3]) + # test that we do a prefix search for the last term + query = itemtrack.ItemTrackerQuery() + query.add_condition('feed_id', '=', self.tracked_feed.id) + query.set_search('fo') + query.set_order_by(['release_date']) + self.tracker.change_query(query) + self.check_one_signal('list-changed') + self.check_tracker_items([item1, item3]) + # But we should'nt do a prefix search for terms other than the last + query = itemtrack.ItemTrackerQuery() + query.add_condition('feed_id', '=', self.tracked_feed.id) + query.set_search('fo bar') + query.set_order_by(['release_date']) + self.tracker.change_query(query) + self.check_one_signal('list-changed') + self.check_tracker_items([]) + + def test_search_for_torrent(self): + # test searching for the string "torrent" in this case, we should + # match items that are torrents. + + item1 = self.tracked_items[0] + # item1 will be a torrent download + item1.download() + downloader.RemoteDownloader.update_status({ + 'current_size': 0, + 'total_size': None, + 'state': u'downloading', + 'rate': 0, + 'eta': None, + 'type': 'BitTorrent', + 'dlid': item1.downloader.dlid, + }) + app.db.finish_transaction() + self.check_items_changed_after_message([item1]) + # a search for torrent should match both of them + query = itemtrack.ItemTrackerQuery() + query.add_condition('feed_id', '=', self.tracked_feed.id) + query.set_search('torrent') + self.tracker.change_query(query) + self.check_one_signal('list-changed') + self.check_tracker_items([item1]) + + def test_feed_conditions(self): + # change the query to something that involves downloader columns + query = itemtrack.ItemTrackerQuery() + query.add_condition('feed.orig_url', '=', self.tracked_feed.orig_url) + self.tracker.change_query(query) + self.check_one_signal('list-changed') + self.check_tracker_items() + + def test_downloader_conditions(self): + # change the query to something that involves downloader columns + query = itemtrack.ItemTrackerQuery() + query.add_condition('remote_downloader.state', '=', 'downloading') + self.tracker.change_query(query) + self.check_one_signal('list-changed') + # start downloading some items + downloads = self.tracked_items[:4] + for i in downloads: + i.download() + self.check_list_change_after_message() + self.check_tracker_items() + for i in downloads[2:]: + i.expire() + self.check_list_change_after_message() + self.check_tracker_items() + + def test_playlist_conditions(self): + # change the query to something that involves playlist columns + playlist = models.SavedPlaylist(u'My playlist') + for item in self.tracked_items: + playlist.add_item(item) + app.db.finish_transaction() + query = itemtrack.ItemTrackerQuery() + query.add_condition('playlist_item_map.playlist_id', '=', + playlist.id) + self.tracker.change_query(query) + self.check_one_signal('list-changed') + self.check_tracker_items(self.tracked_items) + # add items to the playlist + new_items = self.other_items1[:4] + for item in new_items: + playlist.add_item(item) + self.check_list_change_after_message() + self.check_tracker_items(self.tracked_items + new_items) + # remove items from the playlist + removed_items = self.tracked_items[:4] + for item in removed_items: + playlist.remove_item(item) + self.check_list_change_after_message() + self.check_tracker_items(self.tracked_items[4:] + new_items) + + def test_order(self): + # test order by a different column + query = itemtrack.ItemTrackerQuery() + query.add_condition('feed_id', '=', self.tracked_feed.id) + query.set_order_by(['title']) + self.tracker.change_query(query) + self.check_one_signal('list-changed') + self.check_tracker_items() + # test reverse ordering + query.set_order_by(['-title']) + self.tracker.change_query(query) + self.check_one_signal('list-changed') + self.check_tracker_items() + # test order by multiple column + query.set_order_by(['title', '-release_date']) + self.tracker.change_query(query) + self.check_one_signal('list-changed') + self.check_tracker_items() + # test order by something more complex. Move item0 to the bottom of + # the list, then sort alphabetically + sql = ("CASE " + "WHEN title LIKE '%item0' THEN 'zzzzzz' " + "ELSE title " + "END") + query.set_complex_order_by(['title'], sql) + self.tracker.change_query(query) + self.check_one_signal('list-changed') + correct_order = self.tracked_items[1:] + self.tracked_items[:1] + self.check_tracker_items(correct_order, sort_items=False) + + def test_limit(self): + # test order by a different column + query = itemtrack.ItemTrackerQuery() + query.add_condition('feed_id', '=', self.tracked_feed.id) + query.set_order_by(['title']) + query.set_limit(3) + self.tracker.change_query(query) + self.check_one_signal('list-changed') + sorted_items = sorted(self.tracked_items, + key=lambda item: item.title) + self.check_tracker_items(sorted_items[:3]) + # test changes + last_item = sorted_items[-1] + last_item.title = u'aaaaaa' + last_item.signal_change() + self.check_list_change_after_message() + self.check_tracker_items([last_item] + sorted_items[:2]) + + def test_downloader_order(self): + downloads = self.tracked_items[:4] + for i, item_ in enumerate(downloads): + # simulate a the download being in progress + item_.download() + # ensure that the downloads goes from slowest to fastest + rate = i * 1024 + fake_status = { + 'current_size': 0, + 'total_size': None, + 'state': u'downloading', + 'rate': rate, + 'eta': None, + 'type': 'HTTP', + 'dlid': item_.downloader.dlid, + } + downloader.RemoteDownloader.update_status(fake_status) + + app.db.finish_transaction() + self.check_items_changed_after_message(downloads) + query = itemtrack.ItemTrackerQuery() + query.add_condition('remote_downloader.state', '=', 'downloading') + query.set_order_by(['remote_downloader.rate']) + self.tracker.change_query(query) + # Need to manually fetch the items to compare to + with self.connection_pool.context() as connection: + id_list = [i.id for i in downloads] + correct_items = item.fetch_item_infos(connection, id_list) + self.check_tracker_items(correct_items) + + def test_change_while_loading_data(self): + # test the backend writing to the DB before all data is loaded. + item1 = self.tracked_items[0] + item2 = self.tracked_items[1] + old_title = item1.title + item1.title = u'new title' + item1.signal_change() + # since these changes happened after our ItemTracker fetched its IDs, + # when we load data from our ItemTracker it should have the old data + self.assertEquals(self.tracker.get_item(item1.id).title, + old_title) + item2.feed_id = self.other_feed1.id + item2.signal_change() + # For the same reason as above, item2 should still be in the tracker + # and the next line should not throw an exception + self.tracker.get_item(item2.id) + # After ItemTracker gets the ItemChanges message, it should load the + # new data + self.process_items_changed_messages() + self.assertEquals(self.tracker.get_item(item1.id).title, + u'new title') + self.assertRaises(KeyError, self.tracker.get_item, item2.id) + + def test_19823(self): + # Test the tricky case from bz19823. + item = self.tracked_items[0] + item2 = self.tracked_items[1] + # make a change where the ItemTracker just needs to refresh the data + item.title = u'new title' + item.signal_change() + msg1 = self.get_items_changed_message() + # make another change that removes an item before the first one is + # processed. This provokes the race condition in 19823. + item2.remove() + msg2 = self.get_items_changed_message() + # process the first message, the issue for 19823 was this caused us to + # commit the transaction which makes sqlite see both changes. The + # ItemTracker still has item in it's list, but when it tries to read + # it from its database connection, it's not there + self.tracker.on_item_changes(msg1) + # check that get_items() doesn't raise an exception + self.tracker.get_items() + # process the second change for good measure + self.tracker.on_item_changes(msg2) + self.tracker.get_items() + + def test_19823_case2(self): + # Test a variation on the 19823 issue. + item = self.tracked_items[0] + item2 = self.tracked_items[1] + # make a change where the ItemTracker just needs to refresh the data + item.title = u'new title' + item.signal_change() + msg = self.get_items_changed_message() + # make it so that we remove an item right after refresh_items() is + # called + old_refresh_items = self.tracker.item_fetcher.refresh_items + def refresh_items_intercept(changed_ids): + # call originial version + rv = old_refresh_items(changed_ids) + item2.remove() + app.db.finish_transaction() + return rv + self.tracker.item_fetcher.refresh_items = refresh_items_intercept + # make another change that removes an item before the first one is + # processed. This provokes the race condition in 19823. + # process the first message, the issue for 19823 was this caused us to + # commit the transaction which makes sqlite see both changes. The + # ItemTracker still has item in it's list, but when it tries to read + # it from its database connection, it's not there + self.tracker.on_item_changes(msg) + # check that get_items() doesn't raise an exception + self.tracker.get_items() + # process the second change for good measure + + def test_19866(self): + # variation on 19823 where the crash happens while handling the + # will-change signal. + item = self.tracked_items[0] + item2 = self.tracked_items[1] + # make a change where the ItemTracker just needs to refresh the data + item.title = u'new title' + item.signal_change() + msg1 = self.get_items_changed_message() + # make another change that removes an item before the first one is + # processed. This provokes the race condition in 19823. + item2.remove() + msg2 = self.get_items_changed_message() + # process the first message and check that get_items() doesn't crash + # when it's called inside signal handlers + self.tracker.connect('will-change', + lambda tracker: tracker.get_items()) + self.tracker.connect('list-changed', + lambda tracker: tracker.get_items()) + self.tracker.on_item_changes(msg1) + # process the second message + self.tracker.on_item_changes(msg2) + + def test_19913(self): + # Yet another tricky cae + + # call get_items() to ensure all items are in loaded + self.tracker.get_items() + self.tracker.do_idle_work() + # change, then remove an item from the list + first_item = self.tracked_items[0] + first_item.title = u'new-title' + first_item.signal_change() + first_item.remove() + # during the will-change signal, call get_items() again. + # The bug in #19913 is that since the item is on the changed list, we + # will removed the cached data for it and close our transaction. This + # makes the item not in cache and not able to be loaded again. + # Check that get_items() still returns the original + # ten items at this point + def on_will_change(tracker): + self.assertEquals(len(tracker.get_items()), 10) + self.tracker.connect('will-change', on_will_change) + self.tracker.on_item_changes(self.get_items_changed_message()) + +class ItemTrackTestNonWALMode(ItemTrackTestWALMode): + def force_wal_mode(self): + self.connection_pool.wal_mode = False + +class DeviceItemTrackTestWALMode(ItemTrackTestCase): + def setup_items(self): + self.device = testobjects.make_mock_device() + device_items = testobjects.make_device_items(self.device, 'audio1.mp3', + 'audio2.mp3', 'video1.avi') + self.audio1, self.audio2, self.video1 = device_items + self.device.db_info.db.finish_transaction() + + def setup_connection_pool(self): + # simulate the device tab being sent to the frontend so that + # app.connection_pools has a ConnectionPool for the device + msg = messages.TabsChanged('connect', [self.device], [], []) + app.connection_pools.on_tabs_changed(msg) + self.connection_pool = app.connection_pools.get_device_pool( + self.device.id) + + def setup_tracker(self): + query = itemtrack.DeviceItemTrackerQuery() + query.add_condition('file_type', '=', u'audio') + query.set_order_by(['filename']) + item_source = item.DeviceItemSource(self.device) + self.tracker = itemtrack.ItemTracker(self.idle_scheduler, query, + item_source) + + def check_list(self, *correct_items): + tracker_items = self.tracker.get_items() + self.assertEquals([i.id for i in tracker_items], + [i.id for i in correct_items]) + + def test_list(self): + self.check_list(self.audio1, self.audio2) + + def test_changes(self): + self.audio2.update_from_metadata({u'file_type': u'video'}) + self.audio2.signal_change() + self.video1.update_from_metadata({u'file_type': u'audio'}) + self.video1.signal_change() + self.process_items_changed_messages() + self.check_list(self.audio1, self.video1) + +class DeviceItemTrackTestNoWALMode(DeviceItemTrackTestWALMode): + def force_wal_mode(self): + self.connection_pool.wal_mode = False + +class SharingItemTrackTestWalMode(ItemTrackTestCase): + def setup_items(self): + self.setup_client() + self.setup_share() + + def setup_share(self): + # make a share and that uses our mock client + self.patch_function('miro.libdaap.make_daap_client', + lambda *args, **kwargs: self.client) + # Make sure the SharingItemTrackerImpl doesn't actually create a + # thread. We want to manually call its methods and have them run in + # the in the main thread. + self.patch_for_test('miro.sharing.SharingItemTrackerImpl.start_thread') + self.share = testobjects.make_share() + self.share_info = messages.SharingInfo(self.share) + self.share.set_info(self.share_info) + self.share.start_tracking() + self.run_client_connect() + + def setup_connection_pool(self): + msg = messages.TabsChanged('connect', [self.share_info], [], []) + app.connection_pools.on_tabs_changed(msg) + self.connection_pool = app.connection_pools.get_sharing_pool( + self.share.info.id) + self.setup_tracker() + + def setup_client(self): + self.client = testobjects.MockDAAPClient() + self.video1 = testobjects.make_mock_daap_item(1001, 'video-item-1', + u'video') + self.video2 = testobjects.make_mock_daap_item(1002, 'video-item-2', + u'video') + self.audio1 = testobjects.make_mock_daap_item(2001, 'audio-item-1', + u'audio') + self.audio2 = testobjects.make_mock_daap_item(2002, 'audio-item-2', + u'audio') + self.client.set_items([self.video1, self.video2, + self.audio1, self.audio2]) + + def setup_tracker(self): + # Set up our item tracker + query = itemtrack.SharingItemTrackerQuery() + query.add_condition('file_type', '=', u'audio') + query.set_order_by(['title']) + item_source = item.SharingItemSource(self.share.info) + self.tracker = itemtrack.ItemTracker(self.idle_scheduler, query, + item_source) + + def setup_mock_message_handler(self): + """Install a mock object to handle frontend messages. + + We use this to intercept the SharingItemChanges message + """ + self.mock_message_handler = mock.Mock() + messages.FrontendMessage.install_handler(self.mock_message_handler) + # move past the the SharingItemChanges method for our initial items. + eventloop._eventloop.emit('event-finished', True) + self.mock_message_handler.reset_mock() + + def run_client_connect(self): + result = self.share.tracker.client_connect() + self.share.tracker.client_connect_callback(result) + self.share.db_info.db.finish_transaction() + + def run_client_update(self): + result = self.share.tracker.client_update() + self.share.tracker.client_update_callback(result) + self.share.db_info.db.finish_transaction() + + def check_list(self, *correct_items): + tracker_items = self.tracker.get_items() + correct_ids = [i['dmap.itemid'] for i in correct_items] + self.assertEquals([i.daap_id for i in tracker_items], correct_ids) + + def test_list(self): + self.check_list(self.audio1, self.audio2) + + def test_changes(self): + new_video1 = self.video1.copy() + new_video1 = testobjects.make_mock_daap_item(1001, 'video-item-1', + u'audio') + audio3 = testobjects.make_mock_daap_item(2003, 'audio-item-3', + u'audio') + self.client.set_items([new_video1, self.video2, + self.audio1, self.audio2, audio3]) + self.run_client_update() + self.process_items_changed_messages() + self.check_list(self.audio1, self.audio2, audio3, new_video1) + + def test_playlist_filter(self): + self.client.add_playlist( + testobjects.make_mock_daap_playlist(3001, 'playlist') + ) + self.client.set_playlist_items(3001, [1001, 1002]) + self.run_client_update() + query = itemtrack.SharingItemTrackerQuery() + query.add_condition('sharing_item_playlist_map.playlist_id', '=', + 3001) + query.set_order_by(['title']) + self.tracker.change_query(query) + self.check_list(self.video1, self.video2) + # test changes + self.client.set_playlist_items(3001, [1001, 1002, 2001]) + self.run_client_update() + self.process_items_changed_messages() + self.check_list(self.audio1, self.video1, self.video2) + +class SharingItemTrackTestNOWalMode(SharingItemTrackTestWalMode): + def force_wal_mode(self): + self.connection_pool.wal_mode = False + +class ItemInfoAttributeTest(MiroTestCase): + # Test that DeviceItemInfo and SharingItemInfo to make sure that they + # define the same attributes that ItemInfo does + + def test_device_item_info(self): + self._check_class_against_item_info(item.DeviceItemInfo) + + def test_sharing_item_info(self): + self._check_class_against_item_info(item.SharingItemInfo) + + def _check_class_against_item_info(self, klass): + required_attrs = self._calc_required_attrs() + # make sure the other class either has a SelectColumn or a class + # property for each of the required SelectColumns + klass_attrs = self._select_column_attrs(klass) + klass_attrs.update(self._class_properties(klass)) + # special case, if ItemInfo only uses filename_unicode, to implement + # the filename_property(). So if the class defines filename(), then + # it doesn't need to define filename_unicode + if 'filename' in klass_attrs: + required_attrs.remove('filename_unicode') + + if not required_attrs.issubset(klass_attrs): + msg = ("%s does not define required attributes: (%s)" % + (klass, required_attrs.difference(klass_attrs))) + raise AssertionError(msg) + + def _select_column_attrs(self, klass): + return set([col.attr_name for col in klass.select_info.select_columns]) + + def _class_properties(self, klass): + return set(name for name, obj in klass.__dict__.items() + if isinstance(obj, property)) + + def _calc_required_attrs(self): + item_attrs = self._select_column_attrs(item.ItemInfo) + # remove default values defined in ItemInfoBase + required_attrs = item_attrs.difference( + item.ItemInfoBase.__dict__.keys()) + return required_attrs + + def test_db_error_item_attributes(self): + # test that DBErrorItemInfo defines + required_attrs = self._calc_required_attrs() + missing_attributes = set() + db_error_item_info = item.DBErrorItemInfo(0) + for attr_name in required_attrs: + if not hasattr(db_error_item_info, attr_name): + missing_attributes.add(attr_name) + if missing_attributes: + msg = ("DBErrorItemInfo does not define required " + "attributes: (%s)" % missing_attributes) + raise AssertionError(msg) + +class BackendItemTrackerTest(MiroTestCase): + def setUp(self): + MiroTestCase.setUp(self) + self.setup_data() + self.setup_tracker() + + def tearDown(self): + self.item_tracker.destroy() + MiroTestCase.tearDown(self) + + def setup_data(self): + self.feed, self.items = \ + testobjects.make_feed_with_items(10, file_items=True) + self.other_feed, self.other_items = \ + testobjects.make_feed_with_items(10, file_items=True) + self.process_item_changes() + + def setup_tracker(self): + query = itemtrack.ItemTrackerQuery() + query.add_condition('feed_id', '=', self.feed.id) + self.item_tracker = itemtrack.BackendItemTracker(query) + self.items_changed_callback = mock.Mock() + self.item_tracker.connect('items-changed', self.items_changed_callback) + + def fetch_item_infos(self, item_objects): + if len(item_objects) == 0: + return [] + return item.fetch_item_infos(app.db.connection, + [i.id for i in item_objects]) + + def test_initial_list(self): + self.assertSameSet(self.item_tracker.get_items(), + self.fetch_item_infos(self.items)) + + def check_callback(self, added, changed, removed): + self.assertEquals(self.items_changed_callback.call_count, 1) + call_args, call_kwargs = self.items_changed_callback.call_args + self.assertEquals(call_args[0], self.item_tracker) + self.assertSameSet(call_args[1], self.fetch_item_infos(added)) + self.assertSameSet(call_args[2], self.fetch_item_infos(changed)) + self.assertSameSet(call_args[3], [item.id for item in removed]) + self.assertEquals(call_kwargs, {}) + self.items_changed_callback.reset_mock() + + def process_item_changes(self): + app.db.finish_transaction() + models.Item.change_tracker.send_changes() + + def test_changes(self): + self.assertEquals(self.items_changed_callback.call_count, 0) + # make changes that don't add/remove items from the list + self.items[0].set_user_metadata({'title': u'new title'}) + self.items[0].signal_change() + self.items[1].set_user_metadata({'title': u'new title'}) + self.items[1].signal_change() + self.process_item_changes() + self.check_callback(added=[], changed=self.items[:2], removed=[]) + # make changes that add/remove items from the list. + self.items[0].remove() + new_items = testobjects.add_items_to_feed(self.feed, 5, + file_items=True) + self.items[1].set_user_metadata({'title': u'newer title'}) + self.items[1].signal_change() + self.other_items[0].set_feed(self.feed.id) + self.process_item_changes() + self.check_callback(added=new_items + [self.other_items[0]], + changed=[self.items[1]], + removed=[self.items[0]]) + + def test_change_query(self): + new_query = itemtrack.ItemTrackerQuery() + new_query.add_condition('feed_id', '=', self.other_feed.id) + self.item_tracker.change_query(new_query) + self.assertSameSet(self.item_tracker.get_items(), + self.fetch_item_infos(self.other_items)) + # check that changing the query resulted in the items-changed signal + self.process_item_changes() + self.check_callback(added=self.other_items, + changed=[], + removed=self.items) + + def test_destroy(self): + # test that after destroy() is called, we no longer track changes + self.item_tracker.destroy() + self.items[0].set_user_metadata({'title': u'new title'}) + self.items[0].signal_change() + self.process_item_changes() + self.assertEquals(self.items_changed_callback.call_count, 0) diff -Nru miro-4.0.4/lib/test/messagetest.py miro-6.0/lib/test/messagetest.py --- miro-4.0.4/lib/test/messagetest.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/test/messagetest.py 2013-04-05 16:02:42.000000000 +0000 @@ -15,9 +15,8 @@ from miro import itemsource from miro import messages from miro import messagehandler -from miro import metadataprogress -from miro.test import mock +from miro.test import mock, testobjects from miro.test.framework import MiroTestCase, EventLoopTest, uses_httpclient class MessageOne(messages.BackendMessage): @@ -62,6 +61,10 @@ self.messages = [] def handle(self, message): + if isinstance(message, messages.ItemChanges): + # ItemChanges didn't exist when the unittests were written and + # it's easier to ignore them then to fix the unittests + return self.messages.append(message) class TrackerTest(EventLoopTest): @@ -404,403 +407,3 @@ Feed(u'http://example.com/3') self.feed2.remove() self.check_message_count(1) - - -class FeedItemTrackTest(TrackerTest): - def setUp(self): - TrackerTest.setUp(self) - self.items = [] - self.feed = Feed(u'dtv:manualFeed') - self.make_item(u'http://example.com/', u'my first item') - self.make_item(u'http://example.com/2', u'my second item') - self.runUrgentCalls() - messages.TrackItems('feed', self.feed.id).send_to_backend() - self.runUrgentCalls() - - def make_item(self, url, title=u'default item title'): - additional = {'title': title} - entry = _build_entry(url, 'video/x-unknown', additional) - item_ = Item(FeedParserValues(entry), feed_id=self.feed.id) - self.items.append(item_) - - def checkDownloadInfo(self, info, item): - downloader = item.downloader - self.assertEquals(info.current_size, downloader.get_current_size()) - self.assertEquals(info.rate, downloader.get_rate()) - self.assertEquals(info.state, downloader.get_state()) - - def check_info(self, itemInfo, item): - self.assertEquals(itemInfo.name, item.get_title()) - self.assertEquals(itemInfo.description, item.get_description()) - self.assertEquals(itemInfo.release_date, item.get_release_date()) - self.assertEquals(itemInfo.size, item.get_size()) - self.assertEquals(itemInfo.permalink, item.get_link()) - self.assertEquals(itemInfo.id, item.id) - self.assertEquals(itemInfo.expiration_date, item.get_expiration_time()) - self.assertEquals(itemInfo.thumbnail, item.get_thumbnail()) - if item.downloader: - self.checkDownloadInfo(itemInfo.download_info) - else: - self.assertEquals(itemInfo.download_info, None) - - def check_changed_message_type(self, message): - self.assertEquals(type(message), messages.ItemsChanged) - self.assertEquals(message.type, 'feed') - - def test_initial_list(self): - self.assertEquals(len(self.test_handler.messages), 1) - message = self.test_handler.messages[0] - self.assert_(isinstance(message, messages.ItemList)) - self.assertEquals(message.type, 'feed') - self.assertEquals(message.id, self.feed.id) - - self.assertEquals(len(message.items), len(self.items)) - message.items.sort(key=lambda i: i.id) - self.check_info_list(message.items, self.items) - - def test_update(self): - self.items[0].entry_title = u'new name' - self.items[0].signal_change() - self.runUrgentCalls() - self.assertEquals(len(self.test_handler.messages), 2) - self.check_changed_message(1, changed=[self.items[0]]) - - def test_multiple_updates(self): - # see #15782 - self.items[0].set_title(u'new name') - self.items[0].set_title(u'new name2') - self.items[0].set_title(u'new name3') - self.runUrgentCalls() - self.assertEquals(len(self.test_handler.messages), 2) - self.check_changed_message(1, changed=[self.items[0]]) - - def test_add(self): - self.make_item(u'http://example.com/3') - self.make_item(u'http://example.com/4') - self.make_item(u'http://example.com/5') - self.runUrgentCalls() - self.assertEquals(len(self.test_handler.messages), 2) - self.check_changed_message(1, added=self.items[2:]) - - def test_remove(self): - self.items[1].remove() - self.runUrgentCalls() - self.assertEquals(len(self.test_handler.messages), 2) - self.check_changed_message(1, removed=[self.items[1]]) - - def test_stop(self): - messages.StopTrackingItems('feed', self.feed.id).send_to_backend() - self.runUrgentCalls() - self.items[0].entry_title = u'new name' - self.items[0].signal_change() - self.items[1].remove() - self.make_item(u'http://example.com/4') - self.runUrgentCalls() - self.assertEquals(len(self.test_handler.messages), 1) - -class PlaylistItemTrackTest(TrackerTest): - def setUp(self): - TrackerTest.setUp(self) - self.items = [] - self.feed = Feed(u'dtv:manualFeed') - self.playlist = SavedPlaylist(u'test playlist') - self.make_item(u'http://example.com/') - self.make_item(u'http://example.com/2') - self.runUrgentCalls() - messages.TrackItems('playlist', self.playlist.id).send_to_backend() - self.runUrgentCalls() - - def make_item(self, url): - entry = _build_entry(url, 'video/x-unknown') - item_ = Item(FeedParserValues(entry), feed_id=self.feed.id) - self.items.append(item_) - self.playlist.add_item(item_) - - def checkDownloadInfo(self, info, item): - downloader = item.downloader - self.assertEquals(info.current_size, downloader.get_current_size()) - self.assertEquals(info.rate, downloader.get_rate()) - self.assertEquals(info.state, downloader.get_state()) - - def check_info(self, itemInfo, item): - self.assertEquals(itemInfo.name, item.get_title()) - self.assertEquals(itemInfo.description, item.get_description()) - self.assertEquals(itemInfo.release_date, item.get_release_date()) - self.assertEquals(itemInfo.size, item.get_size()) - self.assertEquals(itemInfo.permalink, item.get_link()) - self.assertEquals(itemInfo.id, item.id) - self.assertEquals(itemInfo.expiration_date, item.get_expiration_time()) - self.assertEquals(itemInfo.thumbnail, item.get_thumbnail()) - if item.downloader: - self.checkDownloadInfo(itemInfo.download_info) - else: - self.assertEquals(itemInfo.download_info, None) - - def check_changed_message_type(self, message): - self.assertEquals(type(message), messages.ItemsChanged) - self.assertEquals(message.type, 'playlist') - - def test_initial_list(self): - self.assertEquals(len(self.test_handler.messages), 1) - message = self.test_handler.messages[0] - self.assert_(isinstance(message, messages.ItemList)) - self.assertEquals(message.type, 'playlist') - self.assertEquals(message.id, self.playlist.id) - - self.assertEquals(len(message.items), len(self.items)) - message.items.sort(key=lambda i: i.id) - self.check_info_list(message.items, self.items) - - def test_update(self): - self.items[0].entry_title = u'new name' - self.items[0].signal_change() - self.runUrgentCalls() - self.assertEquals(len(self.test_handler.messages), 2) - self.check_changed_message(1, changed=[self.items[0]]) - - def test_add(self): - self.make_item(u'http://example.com/3') - self.make_item(u'http://example.com/4') - self.make_item(u'http://example.com/5') - self.runUrgentCalls() - self.assertEquals(len(self.test_handler.messages), 2) - self.check_changed_message(1, added=self.items[2:]) - - def test_remove(self): - self.items[1].remove() - self.runUrgentCalls() - self.assertEquals(len(self.test_handler.messages), 2) - self.check_changed_message(1, removed=[self.items[1]]) - - def test_stop(self): - messages.StopTrackingItems( - 'playlist', self.playlist.id).send_to_backend() - self.runUrgentCalls() - self.items[0].entry_title = u'new name' - self.items[0].signal_change() - self.items[1].remove() - self.make_item(u'http://example.com/4') - self.runUrgentCalls() - self.assertEquals(len(self.test_handler.messages), 1) - -class ItemInfoCacheTest(FeedItemTrackTest): - # this class runs the exact same tests as FeedItemTrackTest, but using - # values read from the item_info_cache file. Also, we check to make sure - # that item_info_cache.save() after the test doesn't raise an exception. - def __init__(self, testMethodName='runTest'): - # little hack to call app.item_info_cache.save() at the end of our test - # method - FeedItemTrackTest.__init__(self, testMethodName) - org_test_method = getattr(self, self._testMethodName) - def wrapper(): - org_test_method() - app.db.finish_transaction() - app.item_info_cache.save() - test_with_save_at_end = functools.update_wrapper(wrapper, - org_test_method) - setattr(self, self._testMethodName, test_with_save_at_end) - - def setUp(self): - FeedItemTrackTest.setUp(self) - app.db.finish_transaction() - app.item_info_cache.save() - self.setup_new_item_info_cache() - -class ItemInfoCacheErrorTest(MiroTestCase): - # Test errors when loading the Item info cache - def setUp(self): - MiroTestCase.setUp(self) - self.items = [] - self.feed = Feed(u'dtv:manualFeed') - self.make_item(u'http://example.com/') - self.make_item(u'http://example.com/2') - - def make_item(self, url): - entry = _build_entry(url, 'video/x-unknown') - item_ = Item(FeedParserValues(entry), feed_id=self.feed.id) - self.items.append(item_) - - def test_failsafe_load(self): - # Make sure current data is saved - app.db.finish_transaction() - app.item_info_cache.save() - # insert bogus values into the db - app.db.cursor.execute("UPDATE item_info_cache SET pickle='BOGUS'") - # this should fallback to the failsafe values - self.setup_new_item_info_cache() - for item in self.items: - cache_info = app.item_info_cache.id_to_info[item.id] - real_info = itemsource.DatabaseItemSource._item_info_for(item) - self.assertEquals(cache_info.__dict__, real_info.__dict__) - # it should also delete all data from the item cache table - app.db.cursor.execute("SELECT COUNT(*) FROM item_info_cache") - self.assertEquals(app.db.cursor.fetchone()[0], 0) - # Next call to save() should fix the data - app.db.finish_transaction() - app.item_info_cache.save() - app.db.cursor.execute("SELECT COUNT(*) FROM item_info_cache") - self.assertEquals(app.db.cursor.fetchone()[0], len(self.items)) - for item in self.items: - app.db.cursor.execute("SELECT pickle FROM item_info_cache " - "WHERE id=%s" % item.id) - db_info = cPickle.loads(str(app.db.cursor.fetchone()[0])) - real_info = itemsource.DatabaseItemSource._item_info_for(item) - self.assertEquals(db_info.__dict__, real_info.__dict__) - - def test_failsafe_load_item_change(self): - # Test Items calling signal_change() when we do a failsafe load - - # setup some stuff so that we will do a failsafe load - app.db.finish_transaction() - app.item_info_cache.save() - self.clear_ddb_object_cache() - # insert bogus values into the db - app.db.cursor.execute("UPDATE item_info_cache SET pickle='BOGUS'") - app.item_info_cache = None - - # ensure that Item calls signal_change in setup_restored - old_setup_restored = Item.setup_restored - def new_setup_restored(self): - self.title = u'new title2' - self.signal_change() - old_setup_restored(self) - Item.setup_restored = new_setup_restored - try: - # load up item_info_cache - self.setup_new_item_info_cache() - finally: - Item.setup_restored = old_setup_restored - cached_info = self.get_info_from_item_info_cache(self.items[0].id) - self.assertEquals(cached_info.name, 'new title2') - - def test_change_in_setup_restored(self): - # Test Items changing themselve is setup_restored after we've loaded - # the item info cache. - - self.clear_ddb_object_cache() - # ensure that Item calls signal_change in setup_restored - old_setup_restored = Item.setup_restored - def new_setup_restored(self): - old_setup_restored(self) - self.title = u'new title2' - self.signal_change() - Item.setup_restored = new_setup_restored - try: - # Causes the items to be loaded from the db - list(Item.feed_view(self.feed.id)) - finally: - Item.setup_restored = old_setup_restored - cached_info = self.get_info_from_item_info_cache(self.items[0].id) - self.assertEquals(cached_info.name, 'new title2') - - def get_info_from_item_info_cache(self, id): - return app.item_info_cache.id_to_info[id] - - def test_item_info_version(self): - app.db.finish_transaction() - app.item_info_cache.save() - itemsource.DatabaseItemSource.VERSION += 1 - # We should delete the old cache data because ItemInfoCache.VERSION - # has changed - self.setup_new_item_info_cache() - app.db.cursor.execute("SELECT COUNT(*) FROM item_info_cache") - self.assertEquals(app.db.cursor.fetchone()[0], 0) - -class MetadataProgressUpdaterTest(EventLoopTest): - def setUp(self): - EventLoopTest.setUp(self) - self.updater = metadataprogress.MetadataProgressUpdater() - # make messages send immediately to speed up test execution - self.updater.message_interval = 0.0 - # setup a fake device to use - self.device = mock.Mock() - self.device.id = 123 - # make our handler - self.test_handler = TestFrontendMessageHandler() - messages.FrontendMessage.install_handler(self.test_handler) - # the warnings from MetadataProgressUpdater should be errors in the - # test case - self.log_filter.set_exception_level(logging.WARNING) - - def _check_message(self, index, target, remaining, total): - message = self.test_handler.messages[index] - self.assertEquals(message.target, target) - self.assertEquals(message.remaining, remaining) - self.assertEquals(message.total, total) - - def run_timeouts_and_idles(self): - """Run the timeouts/idles necessary to make our updater calls go - thorugh. - """ - # make will_process_path()/path_processed() run - self.runPendingIdles() - # make _send_updates() run - self.run_pending_timeouts() - - def test_simple(self): - p1 = 'foo.mp3' - p2 = 'foo2.mp3' - self.updater.will_process_path(p1, None) - self.updater.will_process_path(p2, None) - self.run_timeouts_and_idles() - self.assertEquals(len(self.test_handler.messages), 1) - self._check_message(0, ('library', 'audio'), 2, 2) - # finish one video - self.updater.path_processed(p2) - self.run_timeouts_and_idles() - self.assertEquals(len(self.test_handler.messages), 2) - self._check_message(1, ('library', 'audio'), 1, 2) - # finish last video, the total should go to 0 since we are done with - # our work - self.updater.path_processed(p1) - self.run_timeouts_and_idles() - self.assertEquals(len(self.test_handler.messages), 3) - self._check_message(2, ('library', 'audio'), 0, 0) - - def test_total_resets(self): - p1 = 'foo.mp3' - p2 = 'foo2.mp3' - # add p1, then finish it, and don't worry about the messages - self.updater.will_process_path(p1, None) - self.run_timeouts_and_idles() - self.updater.path_processed(p1) - self.run_timeouts_and_idles() - self.test_handler.messages = [] - # add p2 - self.updater.will_process_path(p2, None) - self.run_timeouts_and_idles() - # The total should have been reset to 0, when we finished with p1. - # Check that it now only includes p2 - self._check_message(0, ('library', 'audio'), 1, 1) - - def test_device(self): - p1 = 'foo.mp3' - p2 = 'foo2.mp3' - self.updater.will_process_path(p1, self.device) - self.updater.will_process_path(p2, self.device) - self.run_timeouts_and_idles() - self._check_message(0, ('device', '123-audio'), 2, 2) - - def test_duplicate_will_add(self): - # test that calling will_add for a path twice results in a warning - p1 = 'foo.mp3' - self.updater.will_process_path(p1, None) - self.run_timeouts_and_idles() - self.log_filter.set_exception_level(logging.CRITICAL) - self.updater.will_process_path(p1, None) - self.run_timeouts_and_idles() - self.log_filter.check_record_count(1) - self.log_filter.check_record_level(logging.WARNING) - # test that finishing the path, then adding again doesn't result in a - # warning - self.log_filter.set_exception_level(logging.WARNING) - self.updater.path_processed(p1) - self.updater.will_process_path(p1, None) - self.run_timeouts_and_idles() - - def test_unexpected_path_added(self): - # test that calling path_processed() for a path that wasn't set - # with will_process_path() is a noop - p1 = 'foo.mp3' - self.updater.path_processed(p1) - self.run_timeouts_and_idles() diff -Nru miro-4.0.4/lib/test/metadatatest.py miro-6.0/lib/test/metadatatest.py --- miro-4.0.4/lib/test/metadatatest.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/test/metadatatest.py 2013-04-05 16:02:42.000000000 +0000 @@ -1,38 +1,2286 @@ -from miro.test.framework import MiroTestCase +import collections +import itertools +import logging +import os +import urllib +import urlparse +import random +import shutil +import string +import sqlite3 +import time +import json + +from miro.test import mock +from miro.test.framework import MiroTestCase, EventLoopTest, MatchAny +from miro import app +from miro import database +from miro import devices +from miro import echonest +from miro import item +from miro import httpclient +from miro import messages +from miro import models +from miro import prefs +from miro import schema +from miro import filetypes from miro import metadata +from miro import workerprocess +from miro.plat import resources +from miro.plat.utils import (PlatformFilenameType, + get_enmfp_executable_info, + utf8_to_filename, unicode_to_filename) +from miro.test import testobjects + +class MockMetadataProcessor(object): + """Replaces the mutagen and movie data code with test values.""" + def __init__(self): + self.reset() + + def reset(self): + # each dict in task_data maps source paths to callback/errback/task + # data the call to that path. For example, for each MutagenTask that + # we intercept, we store that task, the callback, and the errback. + self.task_data = { + 'mutagen': {}, + 'movie-data': {}, + 'echonest-codegen': {}, + 'echonest': {}, + } + self.canceled_files = set() + # store the codes we see in query_echonest calls + self.query_echonest_codes = {} + self.query_echonest_metadata = {} + + def mutagen_paths(self): + """Get the paths for mutagen calls currently in the system.""" + return self.task_data['mutagen'].keys() + + def movie_data_paths(self): + """Get the paths for movie data calls currently in the system.""" + return self.task_data['movie-data'].keys() + + def echonest_codegen_paths(self): + """Get the paths for ecohnest codegen calls currently in the system.""" + return self.task_data['echonest-codegen'].keys() + + def echonest_paths(self): + """Get the paths for ecohnest codegen calls currently in the system.""" + return self.task_data['echonest'].keys() + + def add_task_data(self, source_path, name, data): + task_data_dict = self.task_data[name] + if source_path in task_data_dict: + raise ValueError("Already processing %s (path: %s)" % + (name, source_path)) + task_data_dict[source_path] = data + + def pop_task_data(self, source_path, name): + task_data_dict = self.task_data[name] + try: + return task_data_dict.pop(source_path) + except KeyError: + raise ValueError("No %s run scheduled for %s" % + (name, source_path)) + + def send(self, task, callback, errback): + task_data = (task, callback, errback) + + if isinstance(task, workerprocess.MutagenTask): + self.add_task_data(task.source_path, 'mutagen', task_data) + elif isinstance(task, workerprocess.MovieDataProgramTask): + self.add_task_data(task.source_path, 'movie-data', task_data) + elif isinstance(task, workerprocess.CancelFileOperations): + self.canceled_files.update(task.paths) + else: + raise TypeError(task) + + def exec_codegen(self, codegen_info, path, callback, errback): + task_data = (callback, errback) + self.add_task_data(path, 'echonest-codegen', task_data) + + def query_echonest(self, path, album_art_dir, code, version, metadata, + callback, errback): + if path in self.query_echonest_codes: + raise ValueError("query_echonest already called for %s" % path) + self.query_echonest_codes[path] = code + self.query_echonest_metadata[path] = metadata + self.add_task_data(path, 'echonest', (callback, errback)) + + def run_mutagen_callback(self, source_path, metadata): + task, callback, errback = self.pop_task_data(source_path, 'mutagen') + callback_data = {'source_path': source_path} + callback_data.update(metadata) + callback(task, callback_data) -class TestSource(metadata.Source): - pass + def run_mutagen_errback(self, source_path, error): + task, callback, errback = self.pop_task_data(source_path, 'mutagen') + errback(task, error) -class TestStore(metadata.Store): - def confirm_db_thread(self): pass - def signal_change(self): pass - # doesn't need a get_filename() because no coverart file will be written + def run_movie_data_callback(self, source_path, metadata): + task, callback, errback = self.pop_task_data(source_path, 'movie-data') + callback_data = {'source_path': source_path} + callback_data.update(metadata) + callback(task, callback_data) -class Metadata(MiroTestCase): + def run_movie_data_errback(self, source_path, error): + task, callback, errback = self.pop_task_data(source_path, 'movie-data') + errback(task, error) + + def run_echonest_codegen_callback(self, source_path, code): + callback, errback = self.pop_task_data(source_path, + 'echonest-codegen') + callback(source_path, code) + + def run_echonest_codegen_errback(self, source_path, error): + callback, errback = self.pop_task_data(source_path, + 'echonest-codegen') + errback(source_path, error) + + def run_echonest_callback(self, source_path, metadata): + callback, errback = self.pop_task_data(source_path, 'echonest') + callback(source_path, metadata) + + def run_echonest_errback(self, source_path, error): + callback, errback = self.pop_task_data(source_path, 'echonest') + # remove the echonest query code in case we retry it + del self.query_echonest_codes[source_path] + errback(source_path, error) + +class MetadataManagerTest(MiroTestCase): + # Test the MetadataManager class def setUp(self): MiroTestCase.setUp(self) + self.mutagen_data = collections.defaultdict(dict) + self.movieprogram_data = collections.defaultdict(dict) + self.echonest_data = collections.defaultdict(dict) + self.echonest_ids = {} + self.user_info_data = collections.defaultdict(dict) + # maps paths -> should we do an interent lookup + self.net_lookup_enabled = {} + self.processor = MockMetadataProcessor() + self.patch_function('miro.workerprocess.send', self.processor.send) + self.patch_function('miro.echonest.exec_codegen', + self.processor.exec_codegen) + self.patch_function('miro.echonest.query_echonest', + self.processor.query_echonest) + self.metadata_manager = metadata.LibraryMetadataManager(self.tempdir, + self.tempdir) + # For these examples we want to run echonest by default + app.config.set(prefs.NET_LOOKUP_BY_DEFAULT, True) + # Don't wait in-between runs of the echonest codegen + metadata._EchonestProcessor.CODEGEN_COOLDOWN_TIME = 0.0 + + def tearDown(self): + metadata._EchonestProcessor.CODEGEN_COOLDOWN_TIME = 5.0 + MiroTestCase.tearDown(self) + + def _calc_correct_metadata(self, path): + """Calculate what the metadata should be for a path.""" + metadata = { + 'file_type': filetypes.item_file_type_for_filename(path), + } + metadata.update(self.mutagen_data[path]) + metadata.update(self.movieprogram_data[path]) + if self.net_lookup_enabled[path]: + metadata.update(self.echonest_data[path]) + metadata['net_lookup_enabled'] = True + else: + metadata['net_lookup_enabled'] = False + metadata.update(self.user_info_data[path]) + if 'album' in metadata: + cover_art = self.cover_art_for_album(metadata['album']) + if cover_art: + metadata['cover_art'] = cover_art + # created_cover_art is used by MetadataManager, but it's not saved to + # the metadata table + if 'created_cover_art' in metadata: + del metadata['created_cover_art'] + return metadata + + def check_set_net_lookup_enabled(self, filename, enabled): + path = self.make_path(filename) + self.net_lookup_enabled[path] = enabled + self.metadata_manager.set_net_lookup_enabled([path], enabled) + self.check_metadata(path) + + def cover_art_for_album(self, album_name): + mutagen_cover_art = None + echonest_cover_art = None + for metadata in self.mutagen_data.values(): + if ('album' in metadata and 'cover_art' in metadata and + metadata['album'] == album_name): + if (mutagen_cover_art is not None and + metadata['cover_art'] != mutagen_cover_art): + raise AssertionError("Different mutagen cover_art " + "for " + album_name) + mutagen_cover_art = metadata['cover_art'] + for metadata in self.echonest_data.values(): + if ('album' in metadata and 'cover_art' in metadata and + metadata['album'] == album_name): + if (echonest_cover_art is not None and + metadata['cover_art'] != echonest_cover_art): + raise AssertionError("Different mutagen cover_art " + "for " + album_name) + echonest_cover_art = metadata['cover_art'] + if echonest_cover_art: + return echonest_cover_art + else: + return mutagen_cover_art + + def check_metadata(self, filename): + path = self.make_path(filename) + correct_metadata = self._calc_correct_metadata(path) + self.metadata_manager._process_metadata_finished() + with self.allow_warnings(): + self.metadata_manager._process_metadata_errors() + metadata_for_path = self.metadata_manager.get_metadata(path) + # don't check has_drm, we have a special test for that + for dct in (metadata_for_path, correct_metadata): + for key in ('has_drm', 'drm'): + if key in dct: + del dct[key] + self.assertDictEquals(metadata_for_path, correct_metadata) + # check echonest ids + status = metadata.MetadataStatus.get_by_path(path) + self.assertEquals(status.echonest_id, + self.echonest_ids.get(path)) + + def make_path(self, filename): + """Create a pathname for that file in the "/videos" directory + """ + if not filename.startswith('/'): + return '/videos/' + filename + else: + # filename is already absolute + return filename + + def check_add_file(self, filename): + path = self.make_path(filename) + pref_value = app.config.get(prefs.NET_LOOKUP_BY_DEFAULT) + self.net_lookup_enabled[path] = pref_value + # before we add the path, get_metadata() should raise a KeyError + self.assertRaises(KeyError, self.metadata_manager.get_metadata, path) + # after we add the path, we should have only have metadata that we can + # guess from the file + self.metadata_manager.add_file(path) + self.check_metadata(path) + # after we add the path, calling add file again should raise a + # ValueError + with self.allow_warnings(): + self.assertRaises(ValueError, self.metadata_manager.add_file, path) + + def cover_art(self, album_name, echonest=False): + path_parts = [self.tempdir] + if echonest: + path_parts.append('echonest') + path_parts.append(urllib.quote(album_name, safe=" ,")) + return os.path.join(*path_parts) + + def check_run_mutagen(self, filename, file_type, duration, title, + album=None, drm=False, cover_art=True): + # NOTE: real mutagen calls send more metadata, but this is enough to + # test + path = self.make_path(filename) + mutagen_data = {} + if file_type is not None: + mutagen_data['file_type'] = unicode(file_type) + if duration is not None: + mutagen_data['duration'] = duration + if title is not None: + mutagen_data['title'] = unicode('title') + if album is not None: + mutagen_data['album'] = unicode(album) + mutagen_data['drm'] = drm + if cover_art and album is not None: + cover_art = self.cover_art(album) + mutagen_data['cover_art'] = cover_art + if not os.path.exists(cover_art): + # simulate read_metadata() writing the mutagen_data file + open(cover_art, 'wb').write("FAKE FILE") + mutagen_data['created_cover_art'] = True + self.mutagen_data[path] = mutagen_data + self.processor.run_mutagen_callback(path, mutagen_data) + self.check_metadata(path) + + def check_queued_mutagen_calls(self, filenames): + correct_paths = ['/videos/' + f for f in filenames] + self.assertSameSet(correct_paths, self.processor.mutagen_paths()) + + def check_queued_moviedata_calls(self, filenames): + correct_paths = ['/videos/' + f for f in filenames] + self.assertSameSet(correct_paths, self.processor.movie_data_paths()) + + def check_queued_echonest_codegen_calls(self, filenames): + correct_paths = ['/videos/' + f for f in filenames] + self.assertSameSet(correct_paths, + self.processor.echonest_codegen_paths()) + + def check_queued_echonest_calls(self, filenames): + correct_paths = ['/videos/' + f for f in filenames] + self.assertSameSet(correct_paths, + self.processor.echonest_paths()) + + def get_metadata(self, filename): + path = self.make_path(filename) + return self.metadata_manager.get_metadata(path) + + def check_mutagen_error(self, filename): + path = self.make_path(filename) + with self.allow_warnings(): + self.processor.run_mutagen_errback(path, ValueError()) + # mutagen failing shouldn't change the metadata + self.check_metadata(path) + + def check_movie_data_not_scheduled(self, filename): + if self.make_path(filename) in self.processor.movie_data_paths(): + raise AssertionError("movie data scheduled for %s" % filename) + + def get_screenshot_path(self, filename): + return '/tmp/' + filename + '.png' + + def check_run_movie_data(self, filename, file_type, duration, + screenshot_worked): + path = self.make_path(filename) + moviedata_data = {} + if file_type is not None: + moviedata_data['file_type'] = unicode(file_type) + if duration is not None: + moviedata_data['duration'] = duration + if screenshot_worked: + ss_path = self.get_screenshot_path(filename) + moviedata_data['screenshot'] = ss_path + self.movieprogram_data[path] = moviedata_data + self.processor.run_movie_data_callback(path, moviedata_data) + self.check_metadata(path) + + def check_movie_data_error(self, filename): + path = self.make_path(filename) + with self.allow_warnings(): + self.processor.run_movie_data_errback(path, ValueError()) + # movie data failing shouldn't change the metadata + self.check_metadata(path) + + def check_echonest_not_scheduled(self, filename): + """check that echonest is not running and that it won't be when + mutagen/moviedata finishes. + """ + self.check_echonest_not_running(filename) + path = self.make_path(filename) + status = metadata.MetadataStatus.get_by_path(path) + self.assertEquals(status.echonest_status, status.STATUS_SKIP) + + def check_echonest_not_running(self, filename): + path = self.make_path(filename) + if path in self.processor.echonest_codegen_paths(): + raise AssertionError("echonest_codegen scheduled for %s" % + filename) + if path in self.processor.echonest_paths(): + raise AssertionError("echonest scheduled for %s" % + filename) + + def check_fetch_album_not_scheduled(self, filename): + path = self.make_path(filename) + if path in self.processor.fetch_album_paths(): + raise AssertionError("fetch_album scheduled for %s" % + filename) + + def calc_fake_echonest_code(self, path): + """Echoprint codes are huge strings of ascii data. Generate a unique + one for a path. + """ + random.seed(path) + length = random.randint(3000, 4000) + return ''.join(random.choice(string.ascii_letters) + for i in xrange(length)) + + def check_run_echonest_codegen(self, filename): + path = self.make_path(filename) + code = self.calc_fake_echonest_code(path) + self.processor.run_echonest_codegen_callback(path, code) + self.check_metadata(path) + # check that the data sent to echonest is correct + metadata = self._calc_correct_metadata(path) + echonest_metadata = {} + for key in ('title', 'artist', 'duration'): + if key in metadata: + echonest_metadata[key] = metadata[key] + if 'album' in metadata: + echonest_metadata['release'] = metadata['album'] + self.assertEquals(self.processor.query_echonest_codes[path], code) + self.assertDictEquals(self.processor.query_echonest_metadata[path], + echonest_metadata) + + def allow_additional_echonest_query(self, filename): + path = self.make_path(filename) + del self.processor.query_echonest_codes[path] + + def check_echonest_codegen_error(self, filename): + path = self.make_path(filename) + error = IOError() + with self.allow_warnings(): + self.processor.run_echonest_codegen_errback(path, error) + self.check_metadata(path) + + def check_run_echonest(self, filename, title, artist=None, album=None): + path = self.make_path(filename) + echonest_data = {} + echonest_data['title'] = unicode('title') + if artist is not None: + echonest_data['artist'] = unicode(artist) + if album is not None: + echonest_data['album'] = unicode(album) + cover_art = self.cover_art(album, True) + echonest_data['cover_art'] = cover_art + # simulate grab_url() writing the mutagen_data file + if not os.path.exists(cover_art): + open(cover_art, 'wb').write("FAKE FILE") + echonest_data['created_cover_art'] = True + # if any data is present, generate a fake echonest id + result_data = echonest_data.copy() + if (title, artist, album) != (None, None, None): + if self.echonest_ids.get(path) is None: + echonest_id = u''.join(random.choice(string.ascii_letters) + for i in xrange(10)) + self.echonest_ids[path] = echonest_id + else: + echonest_id = self.echonest_ids[path] + result_data['echonest_id'] = echonest_id + self.echonest_data[path] = echonest_data + self.processor.run_echonest_callback(path, result_data) + self.check_metadata(path) + + def check_echonest_error(self, filename, http_error=False): + path = self.make_path(filename) + if http_error: + error = httpclient.UnknownHostError('fake.echonest.host') + else: + error = IOError() + with self.allow_warnings(): + self.processor.run_echonest_errback(path, error) + self.check_metadata(path) + + def check_set_user_info(self, filename, **info): + path = self.make_path(filename) + self.user_info_data[path].update(info) + self.metadata_manager.set_user_data(path, info) + # force the entry for the user data to be reloaded. This ensures that + # the changes are actually reflected in the database + status = metadata.MetadataStatus.get_by_path(path) + entry = metadata.MetadataEntry.get_entry(u'user-data', status) + self.reload_object(entry) + self.check_metadata(path) + + def test_video(self): + # Test video files with no issues + self.check_add_file('foo.avi') + self.check_run_mutagen('foo.avi', 'video', 101, 'Foo', 'Fight Vids') + self.check_run_movie_data('foo.avi', 'video', 100, True) + self.check_echonest_not_scheduled('foo.avi') + + def test_video_no_screenshot(self): + # Test video files where the movie data program fails to take a + # screenshot + self.check_add_file('foo.avi') + self.check_run_mutagen('foo.avi', 'video', 100, 'Foo') + self.check_run_movie_data('foo.avi', 'video', 100, False) + self.check_echonest_not_scheduled('foo.avi') + + def test_audio(self): + # Test audio files with no issues + self.check_add_file('foo.mp3') + self.check_run_mutagen('foo.mp3', 'audio', 200, 'Bar', 'Fights') + self.check_movie_data_not_scheduled('foo.mp3') + self.check_run_echonest('foo.mp3', 'Bar', 'Artist', 'Fights') + + def test_audio_without_tags(self): + # Test audio files without any metadata for echonest + self.check_add_file('foo.mp3') + self.check_run_mutagen('foo.mp3', 'audio', 200, None, None) + self.check_movie_data_not_scheduled('foo.mp3') + # Since there wasn't any metadata to send echonest, we should have + # scheduled running the codegen + self.check_run_echonest_codegen('foo.mp3') + # After we run the codegen, we should run an echonest_query + self.check_run_echonest('foo.mp3', 'Bar', 'Artist', 'Fights') + + def test_echonest_codegen_error(self): + # Test audio files that echonest_codegen bails on + self.check_add_file('foo.mp3') + self.check_run_mutagen('foo.mp3', 'audio', 200, None) + self.check_movie_data_not_scheduled('foo.mp3') + self.check_echonest_codegen_error('foo.mp3') + + def test_internet_lookup_pref(self): + # Test that the NET_LOOKUP_BY_DEFAULT pref works + app.config.set(prefs.NET_LOOKUP_BY_DEFAULT, False) + self.check_add_file('foo.mp3') + metadata = self.get_metadata('foo.mp3') + self.assertEquals(metadata['net_lookup_enabled'], False) + app.config.set(prefs.NET_LOOKUP_BY_DEFAULT, True) + self.check_add_file('bar.mp3') + metadata = self.get_metadata('bar.mp3') + self.assertEquals(metadata['net_lookup_enabled'], True) + + def test_set_net_lookup_enabled(self): + self.check_add_file('foo.mp3') + self.check_run_mutagen('foo.mp3', 'audio', 200, 'Bar', 'Fights') + self.check_movie_data_not_scheduled('foo.mp3') + self.check_run_echonest('foo.mp3', 'Bar', 'Artist', 'Fights') + self.check_set_net_lookup_enabled('foo.mp3', False) + self.check_set_net_lookup_enabled('foo.mp3', True) + + def test_set_net_lookup_for_all(self): + for x in xrange(10): + self.check_add_file('foo-%s.mp3' % x) + + self.metadata_manager.set_net_lookup_enabled_for_all(False) + for x in xrange(10): + path = self.make_path('foo-%s.mp3' % x) + metadata = self.metadata_manager.get_metadata(path) + self.assertEquals(metadata['net_lookup_enabled'], False) + + self.metadata_manager.set_net_lookup_enabled_for_all(True) + for x in xrange(10): + path = self.make_path('foo-%s.mp3' % x) + metadata = self.metadata_manager.get_metadata(path) + self.assertEquals(metadata['net_lookup_enabled'], True) + + def test_net_lookup_enabled_stops_processor(self): + # test that we don't run the echonest processor if if it's not enabled + app.config.set(prefs.NET_LOOKUP_BY_DEFAULT, False) + self.check_add_file('foo.mp3') + self.check_run_mutagen('foo.mp3', 'audio', 200, 'Bar', 'Fights') + self.check_movie_data_not_scheduled('foo.mp3') + self.check_echonest_not_running('foo.mp3') + self.check_echonest_not_scheduled('foo.mp3') + # test that it starts running if we set the value to true + self.check_set_net_lookup_enabled('foo.mp3', True) + self.check_run_echonest('foo.mp3', 'Bar', 'Artist', 'Fights2') + + def test_net_lookup_enabled_signals(self): + # test that we don't run the echonest processor if if it's not enabled + self.check_add_file('foo.mp3') + self.check_run_mutagen('foo.mp3', 'audio', 200, 'Bar', 'Fights') + self.check_run_echonest('foo.mp3', 'Bar', 'Artist', 'Fights2') + # test that we get the new-metadata signal when we set/unset the + # set_lookup_enabled flag + foo_path = self.make_path('foo.mp3') + signal_handler = mock.Mock() + self.metadata_manager.connect("new-metadata", signal_handler) + def check_callback_data(): + args, kwargs = signal_handler.call_args + self.assertEquals(kwargs, {}) + self.assertEquals(args[0], self.metadata_manager) + # _calc_correct_metadata doesn't calculate has_drm. Just ignore + # it for this telt + del args[1][foo_path]['has_drm'] + self.assertEquals(args[1].keys(), [foo_path]) + correct_metadata = self._calc_correct_metadata(foo_path) + # we include None values for this signal because we may be erasing + # metadata + for name in metadata.attribute_names: + if name not in correct_metadata and name != 'has_drm': + correct_metadata[name] = None + self.assertDictEquals(args[1][foo_path], correct_metadata) + + self.check_set_net_lookup_enabled('foo.mp3', False) + self.assertEquals(signal_handler.call_count, 1) + check_callback_data() + + self.check_set_net_lookup_enabled('foo.mp3', True) + self.assertEquals(signal_handler.call_count, 2) + check_callback_data() + + def test_echonest_error(self): + # Test echonest failing + self.check_add_file('foo.mp3') + self.check_run_mutagen('foo.mp3', 'audio', 200, 'Bar', 'Fights') + self.check_movie_data_not_scheduled('foo.mp3') + self.check_echonest_error('foo.mp3') + + def test_echonest_http_error(self): + # Test echonest failing + self.check_add_file('foo.mp3') + self.check_run_mutagen('foo.mp3', 'audio', 200, 'Bar', 'Fights') + self.check_movie_data_not_scheduled('foo.mp3') + # If echonest sees an HTTP error, it should log it an a temporary + # failure + mock_retry_temporary_failure_caller = mock.Mock() + patcher = mock.patch.object(self.metadata_manager, + '_retry_temporary_failure_caller', + mock_retry_temporary_failure_caller) + with patcher: + self.check_echonest_error('foo.mp3', http_error=True) + path = self.make_path('foo.mp3') + status = metadata.MetadataStatus.get_by_path(path) + self.assertEquals(status.echonest_status, + status.STATUS_TEMPORARY_FAILURE) + # check that we scheduled an attempt to retry the request + mock_call = mock_retry_temporary_failure_caller.call_after_timeout + mock_call.assert_called_once_with(3600) + # check that success after retrying + self.metadata_manager.retry_temporary_failures() + status = metadata.MetadataStatus.get_by_path(path) + self.assertEquals(status.echonest_status, + status.STATUS_NOT_RUN) + self.check_run_echonest('foo.mp3', 'Bar', 'Artist', 'Fights') + + def test_audio_shares_cover_art(self): + # Test that if one audio file in an album has cover art, they all will + self.check_add_file('foo.mp3') + self.check_run_mutagen('foo.mp3', 'audio', 200, 'Bar', 'Fights') + self.check_add_file('foo2.mp3') + self.check_run_mutagen('foo2.mp3', 'audio', 300, 'Foo', 'Fights', + cover_art=False) + self.check_add_file('foo3.mp3') + self.check_run_mutagen('foo3.mp3', 'audio', 400, 'Baz', 'Fights', + cover_art=False) + + def test_audio_no_duration(self): + # Test audio files where mutagen can't get the duration + self.check_add_file('foo.mp3') + self.check_run_mutagen('foo.mp3', 'audio', None, 'Bar', 'Fights') + # Because mutagen failed to get the duration, we should have a movie + # data call scheduled + self.check_run_movie_data('foo.mp3', 'audio', 100, False) + self.check_run_echonest('foo.mp3', 'Bar', 'Artist', 'Fights') + + def test_audio_no_duration2(self): + # same as test_audio_no_duration, but have movie data return that the + # file is actually a video file. In this case, we shouldn't run + # echonest_codegen + self.check_add_file('foo.mp3') + self.check_run_mutagen('foo.mp3', 'audio', None, 'Bar', 'Fights') + # Because mutagen failed to get the duration, we should have a movie + # data call scheduled + self.check_run_movie_data('foo.mp3', 'video', 100, False) + # since movie data returned video, we shouldn't run echonest_codegen + self.check_echonest_not_scheduled('foo.mp3') + + def test_ogg(self): + # Test ogg files + self.check_add_file('foo.ogg') + self.check_run_mutagen('foo.ogg', 'audio', 100, 'Bar', 'Fights') + # Even though mutagen thinks this file is audio, we should still run + # mutagen because it might by a mis-identified ogv file + self.check_run_movie_data('foo.ogg', 'video', 100, True) + self.check_echonest_not_scheduled('foo.ogg') + + def test_other(self): + # Test non media files + self.check_add_file('foo.pdf') + self.check_run_mutagen('foo.pdf', 'other', None, None, None) + # Since mutagen couldn't determine the file type, we should run movie + # data + self.check_run_movie_data('foo.pdf', 'other', None, False) + # since neither could determine the filename, we shouldn't run + # echonest_codegen + self.check_echonest_not_scheduled('foo.pdf') + + def test_mutagen_failure(self): + # Test mutagen failing + self.check_add_file('foo.avi') + self.check_mutagen_error('foo.avi') + # We should run movie data since mutagen failed + self.check_run_movie_data('foo.avi', 'other', 100, True) + self.check_echonest_not_scheduled('foo.avi') + + def test_movie_data_failure(self): + # Test video files where movie data fails + self.check_add_file('foo.avi') + self.check_run_mutagen('foo.avi', 'video', 100, 'Foo') + self.check_movie_data_error('foo.avi') + self.check_echonest_not_scheduled('foo.avi') + + def test_movie_data_skips_other(self): + # Check that we don't run movie data if mutagen can't read the file + # and the extension indicates it's not a media file (#18840) + self.check_add_file('foo.pdf') + self.check_run_mutagen('foo.pdf', None, None, None) + self.check_movie_data_not_scheduled('foo.pdf') + + def test_has_drm(self): + # check the has_drm flag + self.check_add_file('foo.avi') + self.check_run_mutagen('foo.avi', 'audio', 100, 'Foo', 'Fighters', + drm=True) + # if mutagen thinks a file has drm, we still need to check with movie + # data to make sure + self.assertEquals(self.get_metadata('foo.avi')['has_drm'], False) + # if we get a movie data error, than we know there's DRM + self.check_movie_data_error('foo.avi') + self.assertEquals(self.get_metadata('foo.avi')['has_drm'], True) + + # let's try that whole process again, but make movie data succeed. In + # that case has_drm should be false + self.check_add_file('foo2.avi') + self.check_run_mutagen('foo2.avi', 'audio', 100, 'Foo', 'Fighters', + drm=True) + self.assertEquals(self.get_metadata('foo2.avi')['has_drm'], False) + self.check_run_movie_data('foo2.avi', 'audio', 100, True) + self.assertEquals(self.get_metadata('foo2.avi')['has_drm'], False) + + def test_cover_art_and_new_metadata(self): + # Test that when we get cover art for one item, we update it for all + # items for that album + self.check_add_file('foo.mp3') + self.check_add_file('foo-2.mp3') + self.check_add_file('foo-3.mp3') + self.check_add_file('foo-4.mp3') + self.check_add_file('foo-5.mp3') + self.check_run_mutagen('foo.mp3', 'audio', 200, 'Bar', 'AlbumOne', + cover_art=False) + self.check_run_mutagen('foo-2.mp3', 'audio', 200, 'Bar', 'AlbumOne', + cover_art=False) + self.check_run_mutagen('foo-3.mp3', 'audio', 200, 'Bar', + 'DifferentAlbum', cover_art=False) + # send new-metadata for all of the current changes + self.metadata_manager.run_updates() + # set up a signal handle to handle the next new-metadata signal + signal_handler = mock.Mock() + self.metadata_manager.connect("new-metadata", signal_handler) + # Simulate mutagen getting cover art. We should send new-metadata for + # all items in the album + self.check_run_mutagen('foo-4.mp3', 'audio', 200, 'Bar', 'AlbumOne', + cover_art=True) + + # make our metadata manager send the new-metadata signal and check the + # result + def check_new_metadata_for_album(album_name, *correct_paths): + """Check that the new-metadata signal emits for all items in an + album. + + :param album_name: name of the album + :param correct_paths: paths for all items in the album + """ + + signal_handler.reset_mock() + self.metadata_manager.run_updates() + self.assertEquals(signal_handler.call_count, 1) + args = signal_handler.call_args[0] + self.assertEquals(args[0], self.metadata_manager) + new_metadata = args[1] + self.assertSameSet(new_metadata.keys(), correct_paths) + correct_cover_art = self.cover_art_for_album(album_name) + for key, value in new_metadata.items(): + self.assertEquals(value['cover_art'], correct_cover_art) + check_new_metadata_for_album('AlbumOne', + self.make_path('foo.mp3'), + self.make_path('foo-2.mp3'), + self.make_path('foo-4.mp3'), + ) + # test that if we get more cover art for the same file, we don't + # re-update the other items + signal_handler.reset_mock() + self.check_run_mutagen('foo-5.mp3', 'audio', 200, 'Bar', 'AlbumOne', + cover_art=True) + self.metadata_manager.run_updates() + self.assertEquals(signal_handler.call_count, 1) + args = signal_handler.call_args[0] + self.assertEquals(args[0], self.metadata_manager) + new_metadata = args[1] + self.assertSameSet(new_metadata.keys(), [ + self.make_path('foo-5.mp3'), + ]) + + # Test cover art from echonest + signal_handler.reset_mock() + self.check_run_echonest('foo.mp3', 'NewTitle', 'NewArtist', + 'AlbumOne') + check_new_metadata_for_album('AlbumOne', + self.make_path('foo.mp3'), + self.make_path('foo-2.mp3'), + self.make_path('foo-4.mp3'), + self.make_path('foo-5.mp3'), + ) + + def test_restart_incomplete(self): + # test restarting incomplete + self.check_add_file('foo.avi') + self.check_run_mutagen('foo.avi', 'video', 100, 'Foo') + self.check_add_file('bar.avi') + self.check_add_file('baz.mp3') + self.check_run_mutagen('baz.mp3', 'audio', 100, None) + self.check_add_file('qux.avi') + self.check_run_mutagen('qux.avi', 'video', 100, 'Foo') + self.check_run_movie_data('qux.avi', 'video', 100, True) + # At this point, foo is waiting for moviedata, bar is waiting for + # mutagen and baz is waiting for echonest_codegen. + self.check_queued_moviedata_calls(['foo.avi']) + self.check_queued_mutagen_calls(['bar.avi']) + self.check_queued_echonest_codegen_calls(['baz.mp3']) + # Check that if we call restart_incomplete now, we don't get queue + # mutagen or movie data twice. + self.processor.reset() + self.metadata_manager.restart_incomplete() + self.check_queued_moviedata_calls([]) + self.check_queued_mutagen_calls([]) + self.check_queued_echonest_codegen_calls([]) + # Create a new MetadataManager and call restart_incomplete on that. + # That should invoke mutagen and movie data + self.metadata_manager = metadata.LibraryMetadataManager(self.tempdir, + self.tempdir) + self.metadata_manager.restart_incomplete() + self.check_queued_moviedata_calls(['foo.avi']) + self.check_queued_mutagen_calls(['bar.avi']) + self.check_queued_echonest_codegen_calls(['baz.mp3']) + # Theck that when things finish, we get other incomplete metadata + self.check_run_mutagen('bar.avi', 'audio', None, None) + # None for both duration and title will cause bar.avi to go through + # both movie data and the echonest codegen + self.check_queued_moviedata_calls(['foo.avi', 'bar.avi']) + self.check_run_movie_data('bar.avi', 'audio', 100, None) + self.check_run_echonest_codegen('baz.mp3') + self.check_run_echonest('baz.mp3', 'Foo') + self.check_queued_echonest_codegen_calls(['bar.avi']) + + def test_restart_incomplete_restarts_http_errors(self): + # test restarting incomplete + self.check_add_file('foo.mp3') + self.check_run_mutagen('foo.mp3', 'audio', 100, 'Foo') + self.check_echonest_error('foo.mp3', http_error=True) + self.processor.reset() + self.metadata_manager = metadata.LibraryMetadataManager(self.tempdir, + self.tempdir) + self.metadata_manager.restart_incomplete() + self.check_queued_echonest_calls(['foo.mp3']) + + @mock.patch('time.time') + @mock.patch('miro.eventloop.add_timeout') + def test_schedule_retry_net_lookup(self, mock_add_timeout, mock_time): + # make time stand still for this test to make checking add_timeout + # simpler + mock_time.return_value = 1000.0 + + caller = self.metadata_manager._retry_net_lookup_caller + + # the first time the user starts up miro, the check should be + # scheduled in 10 minutes. + self.metadata_manager.schedule_retry_net_lookup() + mock_add_timeout.assert_called_once_with( + 600, caller.call_now, MatchAny(), + args=(), kwargs={}) + # test calling it once the timeout fires. The next time it should be + # scheduled for 1 week later + self.metadata_manager.retry_net_lookup() + mock_add_timeout.reset_mock() + self.metadata_manager.schedule_retry_net_lookup() + mock_add_timeout.assert_called_once_with( + 60 * 60 * 24 * 7, caller.call_now, MatchAny(), + args=(), kwargs={}) + + def test_retry_net_lookup(self): + self.check_add_file('foo.mp3') + self.check_run_mutagen('foo.mp3', 'audio', 200, 'title', 'album') + self.check_run_echonest('foo.mp3', 'title', 'Artist', None) + self.allow_additional_echonest_query('foo.mp3') + self.metadata_manager.retry_net_lookup() + self.check_run_echonest('foo.mp3', 'title', 'Artist', 'Album') + path = self.make_path('foo.mp3') + query_metadata = self.processor.query_echonest_metadata[path] + self.assertEquals(query_metadata['echonest_id'], + self.echonest_ids[path]) + + def test_retry_net_lookup_no_echonest_id(self): + # test calling retry_net_lookup for items without an echonest id + self.check_add_file('foo.mp3') + self.check_run_mutagen('foo.mp3', 'audio', 200, 'title', 'album') + # this will simulates echonest not finding the album, so echonest_id + # will not be set + self.check_run_echonest('foo.mp3', None, None, None) + self.allow_additional_echonest_query('foo.mp3') + self.metadata_manager.retry_net_lookup() + self.check_run_echonest('foo.mp3', 'title', 'Artist', 'Album') + path = self.make_path('foo.mp3') + query_metadata = self.processor.query_echonest_metadata[path] + self.assert_('echonest_id' not in query_metadata) + # now that the second run worked, we should have an echonest id + status = metadata.MetadataStatus.get_by_path(path) + self.assertNotEquals(status.echonest_id, None) + + def test_retry_net_lookup_errors(self): + self.check_add_file('foo.mp3') + self.check_run_mutagen('foo.mp3', 'audio', 200, 'title', 'album') + self.check_run_echonest('foo.mp3', 'title', 'Artist', None) + self.allow_additional_echonest_query('foo.mp3') + self.metadata_manager.retry_net_lookup() + self.check_echonest_error('foo.mp3') + path = self.make_path('foo.mp3') + query_metadata = self.processor.query_echonest_metadata[path] + self.assertEquals(query_metadata['echonest_id'], + self.echonest_ids[path]) + # test that the progress counter is updated + count_tracker = self.metadata_manager.count_tracker + self.assertEquals(count_tracker.get_count_info(u'audio'), + (0, 0, 0)) + + def test_retry_net_lookup_called_twice(self): + # test if retry_net_lookup() is called a second time before the first + # pass finishes. This seems pretty unlikely to happen in real life, + # but it's a good check for robustness anyways. + self.check_add_file('foo.mp3') + self.check_run_mutagen('foo.mp3', 'audio', 200, 'title', 'album') + self.check_run_echonest('foo.mp3', 'title', 'Artist', None) + self.allow_additional_echonest_query('foo.mp3') + self.metadata_manager.retry_net_lookup() + self.metadata_manager.retry_net_lookup() + self.check_run_echonest('foo.mp3', 'title', 'Artist', 'Album') + path = self.make_path('foo.mp3') + query_metadata = self.processor.query_echonest_metadata[path] + self.assertEquals(query_metadata['echonest_id'], + self.echonest_ids[path]) + + def test_retry_net_lookup_checks_net_lookup_enabled(self): + # test that retry_net_lookup() honors net_lookup_enabled being False + self.check_add_file('foo.mp3') + self.check_run_mutagen('foo.mp3', 'audio', 200, 'title', 'album') + self.check_run_echonest('foo.mp3', 'title', 'Artist', None) + self.check_set_net_lookup_enabled('foo.mp3', False) + self.allow_additional_echonest_query('foo.mp3') + self.metadata_manager.retry_net_lookup() + self.check_echonest_not_running('foo.mp3') + # test that the progress counter is not updated + count_tracker = self.metadata_manager.count_tracker + self.assertEquals(count_tracker.get_count_info(u'audio'), + (0, 0, 0)) + + def test_retry_net_lookup_with_many_items(self): + should_retry = [] + shouldnt_retry = [] + # make items missing album data + for i in range(10): + name = 'noalbum-song-%d.mp3' % i + self.check_add_file(name) + self.check_run_mutagen(name, + 'audio', 200, 'title', 'album') + self.check_run_echonest(name, + 'title', 'better-artist-name', None) + should_retry.append(name) + # make items with album data + for i in range(10): + name = 'withalbum-song-%d.mp3' % i + self.check_add_file(name) + self.check_run_mutagen(name, + 'audio', 200, 'title', 'album') + self.check_run_echonest(name, + 'title', 'better-artist-name', + 'better-album-title') + shouldnt_retry.append(name) + + # check what happens when retry_net_lookup is called + for name in should_retry: + self.allow_additional_echonest_query(name) + self.metadata_manager.retry_net_lookup() + + for name in shouldnt_retry: + self.check_echonest_not_running(name) + # make some of the retries succeed + for i in range(5): + name = should_retry.pop(0) + self.check_run_echonest(name, 'title', 'better-artist-name', + 'better-album-title') + shouldnt_retry.append(name) + # make some of them fail + for name in should_retry: + self.check_run_echonest(name, 'title', 'better-artist-name', None) + + # check what happens when retry_net_lookup is called again + for name in should_retry: + self.allow_additional_echonest_query(name) + self.metadata_manager.retry_net_lookup() + for name in shouldnt_retry: + self.check_echonest_not_running(name) + # this time let all the album succeed + for name in should_retry: + self.check_run_echonest(name, 'title', 'better-artist-name', + 'better-album-title') + + def check_path_in_system(self, filename, correct_value): + path = self.make_path(filename) + self.assertEquals(self.metadata_manager.path_in_system(path), + correct_value) + + def test_path_in_system(self): + # Test the path_in_system() call + self.check_add_file('foo.avi') + self.check_run_mutagen('foo.avi', 'video', 100, 'Foo') + self.check_add_file('bar.avi') + self.check_add_file('baz.mp3') + self.check_run_mutagen('baz.mp3', 'audio', 100, 'Foo', 'Fighters') + self.check_add_file('qux.avi') + self.check_run_mutagen('qux.avi', 'video', 100, 'Foo') + self.check_run_movie_data('qux.avi', 'video', 100, True) + self.check_path_in_system('foo.avi', True) + self.check_path_in_system('bar.avi', True) + self.check_path_in_system('baz.mp3', True) + self.check_path_in_system('qux.avi', True) + self.check_path_in_system('other-file.avi', False) + # Test path_in_system() for objects in the DB, but not in cache + self.clear_ddb_object_cache() + self.metadata_manager = metadata.LibraryMetadataManager(self.tempdir, + self.tempdir) + self.check_path_in_system('foo.avi', True) + self.check_path_in_system('bar.avi', True) + self.check_path_in_system('baz.mp3', True) + self.check_path_in_system('qux.avi', True) + self.check_path_in_system('other-file.avi', False) + + def test_path_in_system_failed_insert(self): + # check that if the DB insert fails for some reason, then path in + # system returns False (#19508) + path = self.make_path('foo.avi') + mock_insert = mock.Mock() + mock_insert.side_effect = sqlite3.OperationalError + patcher = mock.patch('miro.storedatabase.LiveStorage.insert_obj', + new=mock_insert) + with patcher: + try: + self.metadata_manager.add_file(path) + except sqlite3.OperationalError: + pass + self.check_path_in_system('foo.avi', False) + + def test_delete(self): + # add many files at different points in the metadata process + files_in_mutagen = [] + for x in range(10): + filename = 'in-mutagen-%i.mp3' % x + files_in_mutagen.append(self.make_path(filename)) + self.check_add_file(filename) + + files_in_moviedata = [] + for x in range(10): + filename = 'in-movie-data-%i.avi' % x + files_in_moviedata.append(self.make_path(filename)) + self.check_add_file(filename) + self.check_run_mutagen(filename, 'video', 100, 'Foo') + + files_in_echonest = [] + for x in range(10): + filename = 'in-echonest-%i.mp3' % x + files_in_echonest.append(self.make_path(filename)) + self.check_add_file(filename) + # Use None for title to force the files to go through the codegen + self.check_run_mutagen(filename, 'audio', 100, None) + + files_finished = [] + screenshot_paths = [] + for x in range(10): + filename = 'finished-%i.avi' % x + files_finished.append(self.make_path(filename)) + self.check_add_file(filename) + self.check_run_mutagen(filename, 'video', 100, 'Foo') + self.check_run_movie_data(filename, 'video', 100, True) + screenshot_paths.append(self.get_screenshot_path(filename)) + + # remove the files using both api calls + all_paths = (files_in_mutagen + files_in_moviedata + + files_in_echonest + files_finished) + + self.metadata_manager.remove_file(all_paths[0]) + mock_delete = self.patch_for_test('miro.fileutil.delete') + self.metadata_manager.remove_files(all_paths[1:]) + # check that the metadata manager sent a CancelFileOperations message + self.assertEquals(self.processor.canceled_files, set(all_paths)) + # check that echonest calls were canceled + echonest_processor = self.metadata_manager.echonest_processor + self.assertEquals(len(echonest_processor._codegen_queue), 0) + self.assertEquals(len(echonest_processor._echonest_queue), 0) + # check that none of the videos are in the metadata manager + for path in all_paths: + self.assertRaises(KeyError, self.metadata_manager.get_metadata, + path) + # check that the screenshots were deleted + deleted_files = [args[0] for args, kwargs in + mock_delete.call_args_list] + self.assertSameSet(deleted_files, screenshot_paths) + # check that callbacks/errbacks for those files don't result in + # errors. The metadata system may have already been processing the + # file when it got the CancelFileOperations message. + metadata = { + 'file_type': u'video', + 'duration': 100 + } + self.processor.run_movie_data_callback(files_in_moviedata[0], + metadata) + with self.allow_warnings(): + self.processor.run_mutagen_errback( + files_in_mutagen[0], ValueError()) + + def test_restore(self): + db_path = os.path.join(self.tempdir, 'testdb'); + self.reload_database(db_path) + self.metadata_manager.db_info = app.db_info + self.check_add_file('foo.mp3') + self.check_run_mutagen('foo.mp3', 'audio', 200, 'Bar', 'Fights') + self.check_movie_data_not_scheduled('foo.mp3') + self.check_run_echonest('foo.mp3', 'Bar', 'Artist', 'Fights') + # reload our database to force restoring metadata items + self.reload_database(db_path) + self.metadata_manager.db_info = app.db_info + self.check_metadata('foo.mp3') + + def test_set_user_info(self): + self.check_add_file('foo.avi') + self.check_set_user_info('foo.avi', title=u'New Foo', + album=u'First Name') + self.check_set_user_info('foo.avi', title=u'New Foo', + album=u'Second Name') + + def test_user_and_torrent_data(self): + self.check_add_file('foo.avi') + self.check_set_user_info('foo.avi', title=u'New Foo', + album=u'The best') + self.check_run_mutagen('foo.avi', 'video', 100, 'Foo') + self.check_set_user_info('foo.avi', title=u'Newer Foo') + self.check_run_movie_data('foo.avi', 'video', 100, True) + self.check_set_user_info('foo.avi', album=u'The bestest') + # check the final metadata one last time + metadata = self.get_metadata('foo.avi') + self.assertEquals(metadata['title'], 'Newer Foo') + self.assertEquals(metadata['album'], 'The bestest') + + def test_queueing(self): + # test that if we don't send too many requests to the worker process + paths = ['/videos/video-%d.avi' % i for i in xrange(200)] + + def run_mutagen(start, stop): + for p in paths[start:stop]: + # this ensures that both moviedata and echonest will be run + # for this file + metadata = { + 'file_type': u'audio', + # Don't send title to force items to go through the + # echonest codegen + 'album': u'Album', + 'drm': False, + } + self.processor.run_mutagen_callback(p, metadata) + + def run_movie_data(start, stop): + for p in paths[start:stop]: + metadata = { + 'file_type': u'audio', + 'duration': 100, + } + self.processor.run_movie_data_callback(p, metadata) + + def run_echonest_codegen(start, stop): + for p in paths[start:stop]: + code = self.calc_fake_echonest_code(p) + self.processor.run_echonest_codegen_callback(p, code) + + def run_echonest(start, stop): + for p in paths[start:stop]: + metadata = { + 'title': u'Title', + 'album': u'Album', + } + self.processor.run_echonest_callback(p, metadata) + + def check_counts(mutagen_calls, movie_data_calls, + echonest_codegen_calls, echonest_calls): + self.metadata_manager._process_metadata_finished() + self.metadata_manager._process_metadata_errors() + self.assertEquals(len(self.processor.mutagen_paths()), + mutagen_calls) + self.assertEquals(len(self.processor.movie_data_paths()), + movie_data_calls) + self.assertEquals(len(self.processor.echonest_codegen_paths()), + echonest_codegen_calls) + self.assertEquals(len(self.processor.echonest_paths()), + echonest_calls) + + # Add all 200 paths to the metadata manager. Only 100 should be + # queued up to mutagen + for p in paths: + self.metadata_manager.add_file(p) + check_counts(100, 0, 0, 0) + + # let 50 mutagen tasks complete, we should queue up 50 more + run_mutagen(0, 50) + check_counts(100, 50, 0, 0) + # let 75 more complete, we should be hitting our movie data max now + run_mutagen(50, 125) + check_counts(75, 100, 0, 0) + # run a bunch of movie data calls. This will let us test the echonest + # queueing + run_movie_data(0, 100) + # we should only have 1 echonest codegen program running at once + check_counts(75, 25, 1, 0) + # when that gets done, we should only have 1 echonest query running at + # once + run_echonest_codegen(0, 2) + check_counts(75, 25, 1, 1) + # we should stop running echonest codegen once we have 5 codes queued + # up + run_echonest_codegen(2, 6) + check_counts(75, 25, 0, 1) + # looks good, just double check that we finish our queues okay + run_mutagen(125, 200) + check_counts(0, 100, 0, 1) + run_movie_data(100, 200) + check_counts(0, 0, 0, 1) + for i in xrange(195): + run_echonest(i, i+1) + run_echonest_codegen(i+6, i+7) + run_echonest(195, 200) + + def test_move(self): + # add a couple files at different points in the metadata process + self.check_add_file('foo.avi') + self.check_run_mutagen('foo.avi', 'video', 100, 'Foo') + self.check_add_file('bar.mp3') + self.check_add_file('baz.avi') + self.check_run_mutagen('baz.avi', 'video', 100, 'Foo') + self.check_run_movie_data('baz.avi', 'video', 100, True) + self.check_queued_moviedata_calls(['foo.avi']) + self.check_queued_mutagen_calls(['bar.mp3']) + # Move some of the files to new names + def new_path_name(old_path): + return '/videos2/' + os.path.basename(old_path) + to_move = ['/videos/foo.avi', '/videos/bar.mp3', '/videos/baz.avi' ] + old_metadata = dict((p, self.metadata_manager.get_metadata(p)) + for p in to_move) + self.metadata_manager.will_move_files(to_move) + # check that the metadata manager sent a CancelFileOperations message + self.assertEquals(self.processor.canceled_files, set(to_move)) + # tell metadata manager that the move is done + for path in to_move: + new_path = new_path_name(path) + self.metadata_manager.file_moved(path, new_path) + self.net_lookup_enabled[new_path] = \ + self.net_lookup_enabled.pop(path) + # check that the metadata stored with the new path and not the old one + for path in to_move: + new_path = new_path_name(path) + for dct in (self.mutagen_data, self.movieprogram_data, + self.user_info_data): + dct[new_path] = dct.pop(path) + self.assertEquals(old_metadata[path], + self.metadata_manager.get_metadata(new_path)) + self.assertRaises(KeyError, self.metadata_manager.get_metadata, + path) + # check that callbacks/errbacks for the old paths don't result in + # errors. The metadata system may have already been processing the + # file when it got the CancelFileOperations message. + metadata = { + 'file_type': u'video', + 'duration': 100, + } + self.processor.run_movie_data_callback('/videos/foo.avi', metadata) + with self.allow_warnings(): + self.processor.run_mutagen_errback('/videos/bar.mp3', ValueError()) + # check that callbacks work for new paths + self.check_run_movie_data('/videos2/foo.avi', 'video', 100, True) + self.check_run_mutagen('/videos2/bar.mp3', 'audio', 120, 'Bar', + 'Fights') + + def test_queueing_with_delete(self): + # test that we remove files that are queued as well + paths = ['/videos/video-%d.avi' % i for i in xrange(200)] + for p in paths: + self.metadata_manager.add_file(p) + # we now have 200 mutagen calls so 100 of them should be pending + + # if some files get removed, then we should start new ones + self.metadata_manager.remove_files(paths[:25]) + self.assertEquals(len(self.processor.mutagen_paths()), 125) + + # If pending files get removed, we should remove them from the pending + # queues + self.metadata_manager.remove_files(paths[25:]) + mm = self.metadata_manager + self.assertEquals(len(mm.mutagen_processor._pending_tasks), 0) + self.assertEquals(len(mm.moviedata_processor._pending_tasks), 0) + + def test_queueing_with_move(self): + # test moving queued files + paths = ['/videos/video-%d.avi' % i for i in xrange(200)] + for p in paths: + self.metadata_manager.add_file(p) + # we now have 200 mutagen calls so 100 of them should be pending + + # if pending files get moved, the paths should be updated + moved = paths[150:] + new_paths = ['/new' + p for p in moved] + self.metadata_manager.will_move_files(moved) + for old_path, new_path in zip(moved, new_paths): + self.metadata_manager.file_moved(old_path, new_path) + # send mutagen call backs so the pending calls start + for p in paths[:100]: + metadata = { + 'file_type': u'video', + 'duration': 100, + 'title': u'Title', + 'album': u'Album', + 'drm': False, + } + self.processor.run_mutagen_callback(p, metadata) + correct_paths = paths[100:150] + new_paths + self.assertSameSet(self.processor.mutagen_paths(), correct_paths) + +class EchonestNetErrorTest(EventLoopTest): + # Test our pause/retry logic when we get HTTP errors from echonest + + def setUp(self): + EventLoopTest.setUp(self) + self.processor = MockMetadataProcessor() + self.patch_function('miro.echonest.query_echonest', + self.processor.query_echonest) + + @mock.patch('miro.eventloop.add_timeout') + def test_pause_on_http_errors(self, mock_add_timeout): + _echonest_processor = metadata._EchonestProcessor(1, self.tempdir) + paths = [PlatformFilenameType('/videos/item-%s.mp3' % i) + for i in xrange(100)] + error_count = _echonest_processor.PAUSE_AFTER_HTTP_ERROR_COUNT + timeout = _echonest_processor.PAUSE_AFTER_HTTP_ERROR_TIMEOUT + for i, path in enumerate(paths): + # give enough initial metadata so that we skip the codegen step + fetcher = lambda: { u'title': "Song-%i" % i } + _echonest_processor.add_path(path, fetcher) + path_iter = iter(paths) + + for i in xrange(error_count): + http_error = httpclient.UnknownHostError('fake.echonest.host') + with self.allow_warnings(): + _echonest_processor._echonest_errback(path_iter.next(), + http_error) + # after we get enough error, we should stop querying echonest + self.assertEquals(_echonest_processor._querying_echonest, False) + # we should also set a timeout to re-run the queue once enough time + # has passed + mock_add_timeout.assert_called_once_with( + timeout, _echonest_processor._restart_after_http_errors, + MatchAny()) + # simulate time passing then run _restart_after_http_errors(). We + # should schedule a new echonest call + for i in xrange(len(_echonest_processor._http_error_times)): + _echonest_processor._http_error_times[i] -= timeout + mock_add_timeout.reset_mock() + _echonest_processor._restart_after_http_errors() + self.assertEquals(_echonest_processor._querying_echonest, True) + # test that if this call is sucessfull, we keep going + _echonest_processor._echonest_callback(path_iter.next(), + {'album': u'Album'}) + self.assertEquals(_echonest_processor._querying_echonest, True) + # test that if we get enough errors, we halt again + for i in xrange(error_count): + http_error = httpclient.UnknownHostError('fake.echonest.host') + with self.allow_warnings(): + _echonest_processor._echonest_errback(path_iter.next(), + http_error) + self.assertEquals(_echonest_processor._querying_echonest, False) + mock_add_timeout.assert_called_once_with( + timeout, _echonest_processor._restart_after_http_errors, + MatchAny()) + +class DeviceMetadataTest(EventLoopTest): + def setUp(self): + EventLoopTest.setUp(self) + messages.FrontendMessage.handler = mock.Mock() + # setup a device database + device_db = devices.DeviceDatabase() + device_db[u'audio'] = {} + device_db[u'video'] = {} + device_db[u'other'] = {} + # setup a device object + device_info = mock.Mock() + device_info.name = 'DeviceName' + mount = self.tempdir + "/" + device_id = 123 + os.makedirs(os.path.join(mount, '.miro')) + self.cover_art_dir = os.path.join(self.tempdir, 'cover-art') + os.makedirs(self.cover_art_dir) + sqlite_db = devices.load_sqlite_database(':memory:', 1024) + db_info = database.DeviceDBInfo(sqlite_db, device_id) + metadata_manager = devices.make_metadata_manager( + self.tempdir, db_info, device_id) + self.device = messages.DeviceInfo(device_id, device_info, mount, + devices.sqlite_database_path(mount), + device_info, db_info, + metadata_manager, 1000, 0, False) + # copy a file to our device + src = resources.path('testdata/Wikipedia_Song_by_teddy.ogg') + dest = os.path.join(self.tempdir, 'test-song.ogg') + shutil.copyfile(src, dest) + self.mutagen_metadata = { + 'file_type': u'audio', + 'title': u'Title', + 'album': u'Album', + 'artist': u'Artist', + } + self.moviedata_metadata = { 'duration': 100 } + # make a device manager + app.device_manager = mock.Mock() + app.device_manager.connected = {self.device.id: self.device} + app.device_manager._is_hidden.return_value = False + # Set NET_LOOKUP_BY_DEFAULT to True. We should ignore it for device + # items and always set net_lookup_enabled to False. + app.config.set(prefs.NET_LOOKUP_BY_DEFAULT, True) + + def tearDown(self): + app.device_manager = None + EventLoopTest.tearDown(self) + + def make_device_item(self): + return testobjects.make_device_item(self.device, + 'test-song.ogg') + def run_processors(self): + self._run_processors(self.device.metadata_manager, + os.path.join(self.tempdir, 'test-song.ogg')) + + def run_processors_for_file_item(self, item): + self._run_processors(app.local_metadata_manager, + item.get_filename()) + + def _run_processors(self, metadata_manager, path): + metadata_manager.mutagen_processor.emit("task-complete", path, + self.mutagen_metadata) + metadata_manager.run_updates() + metadata_manager.moviedata_processor.emit("task-complete", path, + self.moviedata_metadata) + metadata_manager.run_updates() + + def get_metadata_for_item(self): + return self.device.metadata_manager.get_metadata('test-song.ogg') + + def test_new_item(self): + # Test that we create metadata entries for new DeviceItems. + device_item = testobjects.make_device_item(self.device, + 'test-song.ogg') + self.assertDictEquals(self.get_metadata_for_item(), { + u'file_type': u'audio', + u'net_lookup_enabled': False, + u'has_drm': False, + }) + self.assertEquals(device_item.file_type, u'audio') + self.assertEquals(device_item.net_lookup_enabled, False) + + def test_update(self): + # Test that we update DeviceItems as we get metadata + self.make_device_item() + self.run_processors() + # check data in MetadataManager + self.assertDictEquals(self.get_metadata_for_item(), { + 'file_type': u'audio', + 'title': u'Title', + 'album': u'Album', + 'duration': 100, + 'artist': 'Artist', + 'has_drm': False, + 'net_lookup_enabled': False, + }) + # check data in the DeviceItem + device_item = item.DeviceItem.get_by_path( + unicode_to_filename(u'test-song.ogg'), self.device.db_info) + self.assertEquals(device_item.title, u'Title') + self.assertEquals(device_item.artist, u'Artist') + self.assertEquals(device_item.album, u'Album') + + def test_image_paths(self): + # Test that screenshot and cover_art are relative to the + # device + screenshot = os.path.join(self.device.mount, '.miro', + 'icon-cache', 'extracted', + 'screenshot.png') + cover_art = os.path.join(self.device.mount, '.miro', 'cover-art', + unicode_to_filename(u'Album')) + self.moviedata_metadata['screenshot'] = screenshot + for path in (screenshot, cover_art): + if not os.path.exists(os.path.dirname(path)): + os.makedirs(os.path.dirname(path)) + open(path, 'w').write("FAKE DATA") + + self.make_device_item() + self.run_processors() + item_metadata = self.get_metadata_for_item() + self.assertEquals(item_metadata['screenshot'], + os.path.relpath(screenshot, self.device.mount)) + self.assertEquals(item_metadata['cover_art'], + os.path.relpath(cover_art, self.device.mount)) + + def test_remove(self): + # Test that we remove metadata entries for removed DeviceItems. + device_item = self.make_device_item() + self.run_processors() + device_item.remove(self.device) + self.assertRaises(KeyError, self.get_metadata_for_item) + + def test_restart_incomplete(self): + # test that when we create our metadata manager it restarts the + # pending metadata + self.make_device_item() + mock_send = mock.Mock() + patcher = mock.patch('miro.workerprocess.send', mock_send) + with patcher: + self.device.metadata_manager = devices.make_metadata_manager( + self.tempdir, self.device.db_info, self.device.id) + self.assertEquals(mock_send.call_count, 1) + task = mock_send.call_args[0][0] + self.assertEquals(task.source_path, + os.path.join(self.tempdir, 'test-song.ogg')) + + @mock.patch('miro.fileutil.migrate_file') + def test_copy(self, mock_migrate_file): + # Test copying/converting existing files into the device + source_path = resources.path('testdata/Wikipedia_Song_by_teddy.ogg') + feed = models.Feed(u'dtv:manualFeed') + item = models.FileItem(source_path, feed.id) + self.run_processors_for_file_item(item) + item_metadata = app.local_metadata_manager.get_metadata( + item.get_filename()) + source_info = testobjects.make_item_info(item) + copy_path = os.path.join(self.tempdir, 'copied-file-dest.ogg') + + shutil.copyfile(source_path, copy_path) + dsm = devices.DeviceSyncManager(self.device) + dsm._add_item(copy_path, source_info) + self.runPendingIdles() + self.assertEquals(mock_migrate_file.call_count, 1) + current_path, final_path, callback = mock_migrate_file.call_args[0] + shutil.copyfile(current_path, final_path) + copied_item_path = os.path.relpath(final_path, self.device.mount) + callback() + # we shouldn't have any metadata processing scheduled for the device + device_db_info = self.device.metadata_manager.db_info + status = metadata.MetadataStatus.get_by_path(copied_item_path, + device_db_info) + self.assertEquals(status.current_processor, None) + self.assertEquals(status.mutagen_status, status.STATUS_COMPLETE) + self.assertEquals(status.moviedata_status, status.STATUS_COMPLETE) + self.assertEquals(status.echonest_status, status.STATUS_SKIP) + # test that we made MetadataEntry rows + for source in (u'mutagen', u'movie-data'): + # this will raise an exception if the entry is not there + metadata.MetadataEntry.get_entry(source, status, + device_db_info) + # the device item should have the original items metadata, except + # we shouldn't copy net_lookup_enabled + device_item_metadata = self.device.metadata_manager.get_metadata( + copied_item_path) + self.assertEquals(item_metadata['net_lookup_enabled'], True) + self.assertEquals(device_item_metadata['net_lookup_enabled'], False) + del item_metadata['net_lookup_enabled'] + del device_item_metadata['net_lookup_enabled'] + self.assertDictEquals(device_item_metadata, item_metadata) + +class TestCodegen(EventLoopTest): + def setUp(self): + EventLoopTest.setUp(self) + self.callback_data = self.errback_data = None + self.codegen_info = get_enmfp_executable_info() + + def callback(self, *args): + self.callback_data = args + self.stopEventLoop(abnormal=False) + + def errback(self, *args): + self.errback_data = args + self.stopEventLoop(abnormal=False) + + def run_codegen(self, song_path): + echonest.exec_codegen(self.codegen_info, song_path, + self.callback, self.errback) + self.processThreads() + self.runEventLoop() + + def test_codegen(self): + song_path = resources.path('testdata/Wikipedia_Song_by_teddy.ogg') + self.run_codegen(song_path) + + self.assertEquals(self.errback_data, None) + correct_code = ('eJwdkIkRBCEIBFPiEzEcRDb_EG64LavV5pFaov8nAejA5moFrD' + 'n6YE8gBkeAnFM58Cb5JdBwLHCsg6liH7cbOOjHiTyexlwI84eA' + 'TDuZ18R9phicJn7r1afGwXvtrfSZ03qLUvVB0mWJ-gwjS1mqyK' + 'KGVDlxTAOVlS4LXR9tOdT3nGvMzprtrl4rrC_nfReS8nOs0q1y' + 'X17Z8aryw34aEnmnceG3PXuHRuyFPIRaIEkF8-IPmVFd5Mdhhi' + 'S9LmYmndQvMEfdDL3aiECqoAryB-OLX8E=') + self.assertEquals(self.callback_data, (song_path, correct_code)) + + def test_codegen_error(self): + song_path =resources.path('/file/not/found') + self.run_codegen(song_path) + self.assertEquals(self.callback_data, None) + self.assertEquals(self.errback_data[0], song_path) + self.assert_(isinstance(self.errback_data[1], Exception)) + + +mock_grab_url = mock.Mock() +@mock.patch('miro.httpclient.grab_url', new=mock_grab_url) +class TestEchonestQueries(MiroTestCase): + """Test our echonest handling code""" + + def setUp(self): + MiroTestCase.setUp(self) + mock_grab_url.reset_mock() + self.callback_data = self.errback_data = None + self.path = "/videos/FakeSong.mp3" + self.album_art_dir = os.path.join(self.tempdir, 'echonest-album-art') + os.makedirs(self.album_art_dir) + # data to send to echonest. Note that the metadata we pass to + # query_echonest() doesn't neseccarily relate to the fake reply we + # send back. + self.echonest_code = "FaKe=EChoNEST+COdE" + self.setup_query_metadata_for_rock_music() + self.echonest_id = "fake-id-echonest" + self.seven_digital_id = "fake-id-7digital" + self.album_art_url = None + self.album_art_data = "fake-album-art-data" + # 7digital release ids + self.bossanova_release_id = 189844 + self.release_ids_for_billie_jean = [ + 518377, 280410, 307167, 289401, 282494, 282073, 624250, 312343, + 391641, 341656, 284075, 280538, 283379, 312343, 669160, 391639, + ] + self.thriller_release_id = 282494 + echonest._EchonestQuery.seven_digital_cache = {} + + def callback(self, *args): + self.callback_data = args + + def errback(self, *args): + self.errback_data = args + + def setup_query_metadata_for_billie_jean(self): + self.query_metadata = { + "artist": "Michael jackson", + "album": "Thriller", + "title": "Billie jean", + "duration": 168400, + } + + def setup_query_metadata_for_rock_music(self): + self.query_metadata = { + "artist": "Pixies", + "album": "Bossanova", + "title": "Rock Music", + "duration": 168400, + } + + def setup_query_metadata_for_break_like_the_wind(self): + self.query_metadata = { + "artist": u'Sp\u0131n\u0308al Tap', + "album": "Break Like The Wind", + "title": "Break Like The Wind", + "duration": 168400, + } + + def start_query_with_tags(self): + """Send ID3 tags echonest.query_echonest().""" + # This tracks the metadata we expect to see back from query_echonest() + self.reply_metadata = {} + echonest.query_echonest(self.path, self.album_art_dir, + None, 3.15, self.query_metadata, + self.callback, self.errback) + + def start_query_with_echonest_id(self): + """Send an echonest id to echonest.query_echonest().""" + # This tracks the metadata we expect to see back from query_echonest() + self.query_metadata['echonest_id'] = 'abcdef' + self.reply_metadata = {} + echonest.query_echonest(self.path, self.album_art_dir, + None, 3.15, self.query_metadata, + self.callback, self.errback) + + def start_query_with_code(self): + """Send a generated code to echonest.query_echonest().""" + # This tracks the metadata we expect to see back from query_echonest() + self.reply_metadata = {} + # we only call echonest codegen if we don't have a lot of metadata + del self.query_metadata['title'] + del self.query_metadata['album'] + echonest.query_echonest(self.path, self.album_art_dir, + self.echonest_code, 3.15, self.query_metadata, + self.callback, self.errback) + + def check_grab_url(self, url, query_dict=None, post_vars=None, + write_file=None): + """Check that grab_url was called with a given URL. + """ + self.assertEquals(mock_grab_url.call_count, 1) + args, kwargs = mock_grab_url.call_args + if post_vars is not None: + grab_url_post_vars = kwargs.pop('post_vars') + # handle query specially, since it's a json encoded dict so it can + # be formatted different ways + if 'query' in post_vars: + self.assertDictEquals( + json.loads(grab_url_post_vars.pop('query')), + post_vars.pop('query')) + self.assertDictEquals(grab_url_post_vars, post_vars) + if write_file is None: + self.assertDictEquals(kwargs, {}) + else: + self.assertDictEquals(kwargs, {'write_file': write_file}) + self.assertEquals(len(args), 3) + grabbed_url = urlparse.urlparse(args[0]) + parsed_url = urlparse.urlparse(url) + self.assertEquals(grabbed_url.scheme, parsed_url.scheme) + self.assertEquals(grabbed_url.netloc, parsed_url.netloc) + self.assertEquals(grabbed_url.path, parsed_url.path) + self.assertEquals(grabbed_url.fragment, parsed_url.fragment) + if query_dict: + self.assertDictEquals(urlparse.parse_qs(grabbed_url.query), + query_dict) + else: + self.assertEquals(grabbed_url.query, '') + + def check_grab_url_multiple(self, calls_to_check): + """Check that grab_url was called with multiple urls + + :param calls_to_check: list of (url, query) tuples to check. The order + doesn't matter. + """ + + def query_to_set(query_dict): + """Make a frozenset that represets a query dict. + + This allows us to have something hashable, which is needed for + assertSameSet. + """ + return frozenset((key, tuple(values)) + for key, values in query.iteritems()) + + grabbed_urls_parsed = [] + for args, kwargs in mock_grab_url.call_args_list: + grabbed_url = urlparse.urlparse(args[0]) + query = urlparse.parse_qs(grabbed_url.query) + grabbed_urls_parsed.append((grabbed_url.scheme, + grabbed_url.netloc, + grabbed_url.path, + grabbed_url.fragment, + query_to_set(query))) + calls_to_check_parsed = [] + for url, query in calls_to_check: + parsed_url = urlparse.urlparse(url) + calls_to_check_parsed.append((parsed_url.scheme, + parsed_url.netloc, + parsed_url.path, + parsed_url.fragment, + query_to_set(query))) + self.assertSameSet(grabbed_urls_parsed, calls_to_check_parsed) + + def check_echonest_grab_url_call(self): + search_url = 'http://echonest.pculture.org/api/v4/song/search' + query_dict = { + 'api_key': [echonest.ECHO_NEST_API_KEY], + # NOTE: either order of the bucket params is okay + 'bucket': ['tracks', 'id:7digital'], + 'results': ['1'], + 'sort': ['song_hotttnesss-desc'], + 'artist': [self.query_metadata['artist'].encode('utf-8')], + 'title': [self.query_metadata['title'].encode('utf-8')], + } + self.check_grab_url(search_url, query_dict) + + def check_echonest_grab_url_call_with_code(self): + """Check the url sent to grab_url to perform our echonest query.""" + identify_url = 'http://echonest.pculture.org/api/v4/song/identify' + post_vars = { + 'api_key': echonest.ECHO_NEST_API_KEY, + # NOTE: either order of the bucket params is okay + 'bucket': ['tracks', 'id:7digital'], + 'query': { + 'code': self.echonest_code, + 'metadata': { + 'version': 3.15, + 'artist': self.query_metadata['artist'].encode('utf-8'), + 'duration': self.query_metadata['duration'] // 1000, + }, + }, + } + self.check_grab_url(identify_url, post_vars=post_vars) + + def check_echonest_grab_url_call_with_echonest_id(self): + """Check the url sent to grab_url to perform our echonest query.""" + + profile_url = 'http://echonest.pculture.org/api/v4/song/profile' + query_dict = { + 'api_key': [echonest.ECHO_NEST_API_KEY], + # NOTE: either order of the bucket params is okay + 'bucket': ['tracks', 'id:7digital'], + 'id': [self.query_metadata['echonest_id']], + } + self.check_grab_url(profile_url, query_dict) + + def send_echonest_reply(self, response_file): + """Send a reply back from echonest. + + As a side-effect we reset mock_grab_url before sending the reply to + get ready for the 7digital grab_url call + + :param response_file: which file to use for response data + """ + response_path = resources.path('testdata/echonest-replies/' + + response_file) + response_data = open(response_path).read() + callback = mock_grab_url.call_args[0][1] + mock_grab_url.reset_mock() + callback({'body': response_data}) + if response_file in ('rock-music', 'no-releases'): + self.reply_metadata['artist'] = 'Pixies' + self.reply_metadata['title'] = 'Rock Music' + self.reply_metadata['echonest_id'] = 'SOGQSXU12AF72A2615' + elif response_file == 'billie-jean': + self.reply_metadata['artist'] = 'Michael Jackson' + self.reply_metadata['title'] = 'Billie Jean' + self.reply_metadata['echonest_id'] = 'SOJIZLV12A58A78309' + elif response_file == 'break-like-the-wind': + self.reply_metadata['artist'] = u'Sp\u0131n\u0308al Tap' + self.reply_metadata['title'] = 'Break Like The Wind' + self.reply_metadata['echonest_id'] = 'SOBKZUR12B0B80C2C1' + + def check_7digital_grab_url_calls(self, release_ids): + """Check the url sent to grab_url to perform our 7digital query.""" + calls_to_check = [] + seven_digital_url = 'http://7digital.pculture.org/1.2/release/details' + for releaseid in release_ids: + calls_to_check.append((seven_digital_url, { + 'oauth_consumer_key': [echonest.SEVEN_DIGITAL_API_KEY], + 'imageSize': ['350'], + 'releaseid': [str(releaseid)], + })) + self.check_grab_url_multiple(calls_to_check) + + def send_7digital_reply(self, response_file, call_index=0, + reset_mock=True, best_reply=True): + """Send a reply back from 7digital. + + :param response_file: which file to use for response data + :param call_index: if we called grab_url multiple times, use this to + pick which callback to send + :param reset_mock: should we reset mock_grab_url? + :param best_reply: is this the reply that we should choose? + """ + response_path = resources.path('testdata/7digital-replies/%s' % + response_file) + response_data = open(response_path).read() + callback = mock_grab_url.call_args_list[call_index][0][1] + if reset_mock: + mock_grab_url.reset_mock() + callback({'body': response_data}) + if not best_reply: + return + if response_file == self.bossanova_release_id: + self.reply_metadata['album'] = 'Bossanova' + self.reply_metadata['cover_art'] = os.path.join( + self.album_art_dir, 'Bossanova') + self.reply_metadata['created_cover_art'] = True + self.reply_metadata['album_artist'] = 'Pixies' + self.album_art_url = ( + 'http://cdn.7static.com/static/img/sleeveart/' + '00/001/898/0000189844_350.jpg') + elif response_file == self.thriller_release_id: + # NOTE: there are multiple thriller relaseses. We just pick one + # arbitrarily + self.reply_metadata['album'] = 'Thriller' + self.reply_metadata['album_artist'] = 'Michael Jackson' + self.reply_metadata['cover_art'] = os.path.join( + self.album_art_dir, 'Thriller') + self.reply_metadata['created_cover_art'] = True + self.album_art_url = ( + 'http://cdn.7static.com/static/img/sleeveart/' + '00/002/840/0000284075_350.jpg') + + def check_album_art_grab_url_call(self): + if self.album_art_url is None: + raise ValueError("album_art_url not set") + album_art_path = os.path.join(self.album_art_dir, + self.reply_metadata['album']) + self.check_grab_url(self.album_art_url, write_file=album_art_path) + + def send_album_art_reply(self): + """Send a reply back from the album art webserver + + As a side-effect we reset mock_grab_url. + """ + callback = mock_grab_url.call_args[0][1] + cover_art_file = mock_grab_url.call_args[1]['write_file'] + open(cover_art_file, 'w').write("fake data") + mock_grab_url.reset_mock() + # don't send the body since we write a file instead + callback({}) + + def check_callback(self): + """Check that echonest.query_echonest() sent the right data to our + callback. + """ + self.assertNotEquals(self.callback_data, None) + self.assertEquals(self.errback_data, None) + self.assertEquals(self.callback_data[0], self.path) + self.assertDictEquals(self.callback_data[1], self.reply_metadata) + for key, value in self.callback_data[1].items(): + if (key in ('title', 'artist', 'album') and + not isinstance(value, unicode)): + raise AssertionError("value for %s not unicode" % key) + + def check_errback(self): + """Check that echonest.query_echonest() called our errback instead of + our callback. + """ + self.assertEquals(self.callback_data, None) + self.assertNotEquals(self.errback_data, None) + + def check_grab_url_not_called(self): + self.assertEquals(mock_grab_url.call_count, 0) + + def send_http_error(self, call_index=0, reset_mock=False): + errback = mock_grab_url.call_args_list[call_index][0][2] + if reset_mock: + mock_grab_url.reset_mock() + error = httpclient.UnexpectedStatusCode(404) + with self.allow_warnings(): + errback(error) + + def test_query_with_tags(self): + # test normal operations + self.start_query_with_tags() + self.check_echonest_grab_url_call() + self.send_echonest_reply('rock-music') + self.check_7digital_grab_url_calls([self.bossanova_release_id]) + self.send_7digital_reply(self.bossanova_release_id) + self.check_album_art_grab_url_call() + self.send_album_art_reply() + self.check_callback() + + def test_query_with_code(self): + # test normal operations + self.start_query_with_code() + self.check_echonest_grab_url_call_with_code() + self.send_echonest_reply('rock-music') + self.check_7digital_grab_url_calls([self.bossanova_release_id]) + self.send_7digital_reply(self.bossanova_release_id) + self.check_album_art_grab_url_call() + self.send_album_art_reply() + self.check_callback() + + def test_query_with_echonest_id(self): + # test queries where we already have an echonest id + self.start_query_with_echonest_id() + self.check_echonest_grab_url_call_with_echonest_id() + self.send_echonest_reply('rock-music') + self.check_7digital_grab_url_calls([self.bossanova_release_id]) + self.send_7digital_reply(self.bossanova_release_id) + self.check_album_art_grab_url_call() + self.send_album_art_reply() + self.check_callback() + + def test_album_art_error(self): + # test normal operations + self.start_query_with_tags() + self.check_echonest_grab_url_call() + self.send_echonest_reply('rock-music') + self.check_7digital_grab_url_calls([self.bossanova_release_id]) + self.send_7digital_reply(self.bossanova_release_id) + self.check_album_art_grab_url_call() + self.send_http_error() + # we shouldn't have cover_art in the reply, since the request + # failed + del self.reply_metadata['cover_art'] + del self.reply_metadata['created_cover_art'] + self.check_callback() + + def test_not_found(self): + # test echonest not finding our song + self.start_query_with_tags() + self.check_echonest_grab_url_call() + with self.allow_warnings(): + self.send_echonest_reply('no-match') + self.check_callback() + + def test_echonest_http_error(self): + # test http errors with echonest + self.start_query_with_tags() + self.check_echonest_grab_url_call() + self.send_http_error() + mock_grab_url.reset_mock() + self.check_grab_url_not_called() + self.check_errback() + + def test_no_releases(self): + # test no releases for a song + self.start_query_with_tags() + self.check_echonest_grab_url_call() + with self.allow_warnings(): + self.send_echonest_reply('no-releases') + self.check_grab_url_not_called() + self.check_callback() + + def test_7digital_http_error(self): + # test http errors with 7digital + self.start_query_with_tags() + self.check_echonest_grab_url_call() + self.send_echonest_reply('rock-music') + self.check_7digital_grab_url_calls([self.bossanova_release_id]) + self.send_http_error() + self.check_callback() + + def test_7digital_no_match(self): + # test 7digital not matching our release id + self.start_query_with_tags() + self.check_echonest_grab_url_call() + self.send_echonest_reply('rock-music') + self.check_7digital_grab_url_calls([self.bossanova_release_id]) + with self.allow_warnings(): + self.send_7digital_reply('no-matches') + self.check_callback() + + def test_7digital_invalid_xml(self): + # test 7digital sending back invalid XML + self.start_query_with_tags() + self.check_echonest_grab_url_call() + self.send_echonest_reply('rock-music') + self.check_7digital_grab_url_calls([self.bossanova_release_id]) + with self.allow_warnings(): + self.send_7digital_reply('invalid-xml') + self.check_callback() + + def test_multiple_releases(self): + # test multiple 7digital releases + self.setup_query_metadata_for_billie_jean() + self.start_query_with_tags() + self.check_echonest_grab_url_call() + self.send_echonest_reply('billie-jean') + release_ids = [ 518377, 280410, 307167, 289401, 282494, 282073, + 624250, 312343, 391641, 341656, 284075, 280538, 283379, + 312343, 669160, 391639, + ] + replys_with_errors = set([283379, 307167, 312343, 391641, 518377, ]) + + self.check_7digital_grab_url_calls(release_ids) + + # send replies + for i, release_id in enumerate(release_ids): + # For the last reply, send an HTTP error. We should just skip + # over this and use the rest of the replies. + # Also, reset our mock_grab_url call to get ready for the album + # art grab_url calls + if i == len(release_ids) - 1: + self.send_http_error(i, reset_mock=True) + continue + if release_id == self.thriller_release_id: + best_reply = True + else: + best_reply = False + if release_id in replys_with_errors: + self.log_filter.set_exception_level(logging.CRITICAL) + self.send_7digital_reply(release_id, i, reset_mock=False, + best_reply=best_reply) + # reset log filter + self.log_filter.set_exception_level(logging.WARNING) + + self.check_album_art_grab_url_call() + self.send_album_art_reply() + self.check_callback() + + def test_multiple_releases_error(self): + # test multiple 7digital releases and all of them resulting in an HTTP + # error + self.setup_query_metadata_for_billie_jean() + self.start_query_with_tags() + self.check_echonest_grab_url_call() + self.send_echonest_reply('billie-jean') + release_ids = [ 518377, 280410, 307167, 289401, 282494, 282073, + 624250, 312343, 391641, 341656, 284075, 280538, 283379, + 312343, 669160, 391639, + ] + # send HTTP errors for all results + for i in xrange(len(release_ids) - 1): + self.send_http_error(i, reset_mock=False) + self.send_http_error(len(release_ids)-1, reset_mock=True) + # we should still get our callback with echonest data + self.check_callback() + + def test_multiple_releases_no_album_name(self): + # test multiple 7digital releases and when we don't have a album name + self.setup_query_metadata_for_billie_jean() + del self.query_metadata['album'] + self.start_query_with_tags() + self.check_echonest_grab_url_call() + with self.allow_warnings(): + self.send_echonest_reply('billie-jean') + # since there's no good way to pick from multiple releases, we should + # skipn the 7digital step + self.check_callback() + + def test_7digital_caching(self): + # test that we cache 7digital results + self.start_query_with_tags() + self.check_echonest_grab_url_call() + self.send_echonest_reply('rock-music') + self.check_7digital_grab_url_calls([self.bossanova_release_id]) + self.send_7digital_reply(self.bossanova_release_id) + self.check_album_art_grab_url_call() + self.send_album_art_reply() + self.check_callback() + old_metadata = self.reply_metadata + # start a new query that results in the same release id. + self.echonest_code = 'fake-code-2' + self.start_query_with_tags() + self.check_echonest_grab_url_call() + self.send_echonest_reply('rock-music') + # we shouldn't call grab_URL + self.check_grab_url_not_called() + self.reply_metadata = old_metadata + del self.reply_metadata['created_cover_art'] + self.check_callback() + + def test_avoid_redownloading_album_art(self): + # test that we don't download album art that we already have + album_art_path = os.path.join(self.album_art_dir, 'Bossanova') + open(album_art_path, 'w').write('FAKE DATA') + self.start_query_with_tags() + self.check_echonest_grab_url_call() + self.send_echonest_reply('rock-music') + self.check_7digital_grab_url_calls([self.bossanova_release_id]) + self.send_7digital_reply(self.bossanova_release_id) + # we shouldn't try to download the album art, since that file is + # already there + self.check_grab_url_not_called() + del self.reply_metadata['created_cover_art'] + self.check_callback() + + def test_query_encoding(self): + # test that we send parameters as unicode to echonest/7digital + self.query_metadata['artist'] = u'Pixi\u00e9s jackson' + self.query_metadata['album'] = u'Bossan\u00f6va' + self.query_metadata['title'] = u"Rock Mus\u0129c" + self.test_query_with_tags() + + def test_reply_encoding(self): + # test that we handle extended unicode chars from echonest + self.setup_query_metadata_for_break_like_the_wind() + self.start_query_with_tags() + self.check_echonest_grab_url_call() + with self.allow_warnings(): + self.send_echonest_reply('break-like-the-wind') + # break like the wind contains no tracks, so we don't need to deal + # with the 7digital stuff. Just check that we properly parsed the + # echonest query + self.check_callback() + +class ProgressUpdateTest(MiroTestCase): + # Test the objects used to send the MetadataProgressUpdate messages + def test_count_tracker(self): + # test the ProgressCountTracker + counter = metadata.ProgressCountTracker() + # test as the total goes up + files = ["foo.avi", "bar.avi", "baz.avi"] + files = [PlatformFilenameType(f) for f in files] + for i, f in enumerate(files): + counter.file_started(f, {}) + self.assertEquals(counter.get_count_info(), (i+1, 0, 0)) + # test as files finish moviedata/mutagen and move to echonest + for i, f in enumerate(files): + counter.file_finished_local_processing(f) + self.assertEquals(counter.get_count_info(), (3, i+1, 0)) + # test as files finish echonest + for i, f in enumerate(files): + counter.file_finished(f) + if i < 2: + self.assertEquals(counter.get_count_info(), (3, 3, i+1)) + else: + # all files completely done. We should reset the counts + self.assertEquals(counter.get_count_info(), (0, 0, 0)) + + def test_count_tracker_no_net_lookup(self): + # test the ProgressCountTracker when files skip the net lookup stage + + counter = metadata.ProgressCountTracker() + # test as the total goes up + files = ["foo.avi", "bar.avi", "baz.avi"] + files = [PlatformFilenameType(f) for f in files] + for i, f in enumerate(files): + counter.file_started(f, {}) + self.assertEquals(counter.get_count_info(), (i+1, 0, 0)) + # test as files finish processing + for i, f in enumerate(files): + counter.file_finished(f) + if i < 2: + self.assertEquals(counter.get_count_info(), (3, i+1, i+1)) + else: + # all files completely done. We should reset the counts + self.assertEquals(counter.get_count_info(), (0, 0, 0)) + + def test_count_tracker_file_moved(self): + # test the ProgressCountTracker after a file move + + counter = metadata.ProgressCountTracker() + # add some files + for i in xrange(10): + f = PlatformFilenameType("file-%s.avi" % i) + counter.file_started(f, {}) + self.assertEquals(counter.get_count_info(), (i+1, 0, 0)) + # check calling file_updated. It should be a no-op + for i in xrange(10): + f = PlatformFilenameType("file-%s.avi" % i) + counter.file_updated(f, { + 'duration': 10, + 'file_type': u'audio', + 'title': u'Title', + }) + + # move some of those files + for i in xrange(5, 10): + old = PlatformFilenameType("file-%s.avi" % i) + new = PlatformFilenameType("new-file-%s.avi" % i) + counter.file_moved(old, new) + # check as the files finished + for i in xrange(0, 10): + if i < 5: + f = PlatformFilenameType("file-%s.avi" % i) + else: + f = PlatformFilenameType("new-file-%s.avi" % i) + counter.file_finished(f) + if i < 9: + self.assertEquals(counter.get_count_info(), (10, i+1, i+1)) + else: + self.assertEquals(counter.get_count_info(), (0, 0, 0)) + + def test_library_count_tracker(self): + # test the LibraryProgressCountTracker + counter = metadata.LibraryProgressCountTracker() + # add a couple files that we will never finish. These will keep it so + # the counts don't get reset + counter.file_started(PlatformFilenameType("sentinal.avi"), + {'file_type': u"video"}) + counter.file_started(PlatformFilenameType("sentinal.mp3"), + {'file_type': u"audio"}) + # add a file whose filetype changes as it runs through the processors + self.assertEquals(counter.get_count_info('video'), (1, 0, 0)) + self.assertEquals(counter.get_count_info('audio'), (1, 0, 0)) + + foo = PlatformFilenameType("foo.mp3") + + counter.file_started(foo, {'file_type': u"audio"}) + self.assertEquals(counter.get_count_info('video'), (1, 0, 0)) + self.assertEquals(counter.get_count_info('audio'), (2, 0, 0)) + + counter.file_updated(foo, {'file_type': u"video"}) + self.assertEquals(counter.get_count_info('video'), (2, 0, 0)) + self.assertEquals(counter.get_count_info('audio'), (1, 0, 0)) + + # change the name, this shouldn't affect the counts at all + bar = PlatformFilenameType("bar.mp3") + counter.file_moved(foo, bar) + self.assertEquals(counter.get_count_info('video'), (2, 0, 0)) + self.assertEquals(counter.get_count_info('audio'), (1, 0, 0)) + + # check calling file_updated after file_moved + counter.file_updated(bar, {'file_type': u"audio"}) + self.assertEquals(counter.get_count_info('video'), (1, 0, 0)) + self.assertEquals(counter.get_count_info('audio'), (2, 0, 0)) + + # check finishing the file after all of the changes + counter.file_finished(bar) + self.assertEquals(counter.get_count_info('video'), (1, 0, 0)) + self.assertEquals(counter.get_count_info('audio'), (2, 1, 1)) - def test_iteminfo_round_trip(self): - """Test that properties changed by ItemInfo name affect the right - attributes. Test will also fail with errors if setup_new doesn't - initialize all the properties that are used by ItemInfo. - """ - source = TestSource() - source.setup_new() - info = source.get_iteminfo_metadata() - - store = TestStore() - store.setup_new() - store.set_metadata_from_iteminfo(info) - - original_dict = info - after_round_trip = store.get_iteminfo_metadata() - - if hasattr(self, 'assertDictEqual'): - # python2.7 includes helpful details - self.assertDictEqual(original_dict, after_round_trip) - else: - original_items = sorted(original_dict.items()) - round_trip_items = sorted(after_round_trip.items()) - self.assertEqual(original_items, round_trip_items) + # check file_updated for the last item + counter.file_updated(PlatformFilenameType("sentinal.avi"), + {'file_type': u"audio"}) + self.assertEquals(counter.get_count_info('video'), (0, 0, 0)) + self.assertEquals(counter.get_count_info('audio'), (3, 1, 1)) diff -Nru miro-4.0.4/lib/test/mock.py miro-6.0/lib/test/mock.py --- miro-4.0.4/lib/test/mock.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/test/mock.py 2013-04-05 16:02:42.000000000 +0000 @@ -1,9 +1,9 @@ # mock.py # Test tools for mocking and patching. -# Copyright (C) 2007-2010 Michael Foord & the mock team +# Copyright (C) 2007-2012 Michael Foord & the mock team # E-mail: fuzzyman AT voidspace DOT org DOT uk -# mock 0.7.0 +# mock 1.0 # http://www.voidspace.org.uk/python/mock/ # Released subject to the BSD License @@ -16,20 +16,25 @@ __all__ = ( 'Mock', 'MagicMock', - 'mocksignature', 'patch', - 'patch_object', 'sentinel', - 'DEFAULT' + 'DEFAULT', + 'ANY', + 'call', + 'create_autospec', + 'FILTER_DIR', + 'NonCallableMock', + 'NonCallableMagicMock', + 'mock_open', + 'PropertyMock', ) -__version__ = '0.7.0b4' -__unittest = True +__version__ = '1.0.1' +import pprint import sys -import warnings try: import inspect @@ -39,20 +44,27 @@ inspect = None try: - BaseException -except NameError: - # Python 2.4 compatibility - BaseException = Exception - -try: - from functools import wraps + from functools import wraps as original_wraps except ImportError: # Python 2.4 compatibility def wraps(original): def inner(f): f.__name__ = original.__name__ + f.__doc__ = original.__doc__ + f.__module__ = original.__module__ + f.__wrapped__ = original return f return inner +else: + if sys.version_info[:2] >= (3, 3): + wraps = original_wraps + else: + def wraps(func): + def inner(f): + f = original_wraps(func)(f) + f.__wrapped__ = func + return f + return inner try: unicode @@ -66,106 +78,275 @@ # Python 3 long = int +try: + BaseException +except NameError: + # Python 2.4 compatibility + BaseException = Exception + +try: + next +except NameError: + def next(obj): + return obj.next() + + +BaseExceptions = (BaseException,) +if 'java' in sys.platform: + # jython + import java + BaseExceptions = (BaseException, java.lang.Throwable) + +try: + _isidentifier = str.isidentifier +except AttributeError: + # Python 2.X + import keyword + import re + regex = re.compile(r'^[a-z_][a-z0-9_]*$', re.I) + def _isidentifier(string): + if string in keyword.kwlist: + return False + return regex.match(string) + + inPy3k = sys.version_info[0] == 3 +# Needed to work around Python 3 bug where use of "super" interferes with +# defining __class__ as a descriptor +_super = super + +self = 'im_self' +builtin = '__builtin__' if inPy3k: self = '__self__' -else: - self = 'im_self' + builtin = 'builtins' + +FILTER_DIR = True + + +def _is_instance_mock(obj): + # can't use isinstance on Mock objects because they override __class__ + # The base class for all mocks is NonCallableMock + return issubclass(type(obj), NonCallableMock) + + +def _is_exception(obj): + return ( + isinstance(obj, BaseExceptions) or + isinstance(obj, ClassTypes) and issubclass(obj, BaseExceptions) + ) -# getsignature and mocksignature heavily "inspired" by -# the decorator module: http://pypi.python.org/pypi/decorator/ -# by Michele Simionato +class _slotted(object): + __slots__ = ['a'] -def _getsignature(func, skipfirst): + +DescriptorTypes = ( + type(_slotted.a), + property, +) + + +def _getsignature(func, skipfirst, instance=False): if inspect is None: raise ImportError('inspect module not available') - if inspect.isclass(func): - func = func.__init__ - # will have a self arg + if isinstance(func, ClassTypes) and not instance: + try: + func = func.__init__ + except AttributeError: + return skipfirst = True - elif not (inspect.ismethod(func) or inspect.isfunction(func)): - func = func.__call__ + elif not isinstance(func, FunctionTypes): + # for classes where instance is True we end up here too + try: + func = func.__call__ + except AttributeError: + return - regargs, varargs, varkwargs, defaults = inspect.getargspec(func) + if inPy3k: + try: + argspec = inspect.getfullargspec(func) + except TypeError: + # C function / method, possibly inherited object().__init__ + return + regargs, varargs, varkw, defaults, kwonly, kwonlydef, ann = argspec + else: + try: + regargs, varargs, varkwargs, defaults = inspect.getargspec(func) + except TypeError: + # C function / method, possibly inherited object().__init__ + return - # instance methods need to lose the self argument + # instance methods and classmethods need to lose the self argument if getattr(func, self, None) is not None: regargs = regargs[1:] - - _msg = "_mock_ is a reserved argument name, can't mock signatures using _mock_" - assert '_mock_' not in regargs, _msg - if varargs is not None: - assert '_mock_' not in varargs, _msg - if varkwargs is not None: - assert '_mock_' not in varkwargs, _msg if skipfirst: + # this condition and the above one are never both True - why? regargs = regargs[1:] - signature = inspect.formatargspec(regargs, varargs, varkwargs, defaults, - formatvalue=lambda value: "") + + if inPy3k: + signature = inspect.formatargspec( + regargs, varargs, varkw, defaults, + kwonly, kwonlydef, ann, formatvalue=lambda value: "") + else: + signature = inspect.formatargspec( + regargs, varargs, varkwargs, defaults, + formatvalue=lambda value: "") return signature[1:-1], func +def _check_signature(func, mock, skipfirst, instance=False): + if not _callable(func): + return + + result = _getsignature(func, skipfirst, instance) + if result is None: + return + signature, func = result + + # can't use self because "self" is common as an argument name + # unfortunately even not in the first place + src = "lambda _mock_self, %s: None" % signature + checksig = eval(src, {}) + _copy_func_details(func, checksig) + type(mock)._mock_check_sig = checksig + + def _copy_func_details(func, funcopy): funcopy.__name__ = func.__name__ funcopy.__doc__ = func.__doc__ - funcopy.__dict__.update(func.__dict__) + #funcopy.__dict__.update(func.__dict__) funcopy.__module__ = func.__module__ if not inPy3k: funcopy.func_defaults = func.func_defaults - else: - funcopy.__defaults__ = func.__defaults__ - funcopy.__kwdefaults__ = func.__kwdefaults__ - - -def mocksignature(func, mock=None, skipfirst=False): - """ - mocksignature(func, mock=None, skipfirst=False) - - Create a new function with the same signature as `func` that delegates - to `mock`. If `skipfirst` is True the first argument is skipped, useful - for methods where `self` needs to be omitted from the new function. + return + funcopy.__defaults__ = func.__defaults__ + funcopy.__kwdefaults__ = func.__kwdefaults__ + + +def _callable(obj): + if isinstance(obj, ClassTypes): + return True + if getattr(obj, '__call__', None) is not None: + return True + return False + + +def _is_list(obj): + # checks for list or tuples + # XXXX badly named! + return type(obj) in (list, tuple) + + +def _instance_callable(obj): + """Given an object, return True if the object is callable. + For classes, return True if instances would be callable.""" + if not isinstance(obj, ClassTypes): + # already an instance + return getattr(obj, '__call__', None) is not None + + klass = obj + # uses __bases__ instead of __mro__ so that we work with old style classes + if klass.__dict__.get('__call__') is not None: + return True + + for base in klass.__bases__: + if _instance_callable(base): + return True + return False + + +def _set_signature(mock, original, instance=False): + # creates a function with signature (*args, **kwargs) that delegates to a + # mock. It still does signature checking by calling a lambda with the same + # signature as the original. + if not _callable(original): + return + + skipfirst = isinstance(original, ClassTypes) + result = _getsignature(original, skipfirst, instance) + if result is None: + # was a C function (e.g. object().__init__ ) that can't be mocked + return + + signature, func = result + + src = "lambda %s: None" % signature + checksig = eval(src, {}) + _copy_func_details(func, checksig) + + name = original.__name__ + if not _isidentifier(name): + name = 'funcopy' + context = {'_checksig_': checksig, 'mock': mock} + src = """def %s(*args, **kwargs): + _checksig_(*args, **kwargs) + return mock(*args, **kwargs)""" % name + exec (src, context) + funcopy = context[name] + _setup_func(funcopy, mock) + return funcopy - If you don't pass in a `mock` then one will be created for you. - The mock is set as the `mock` attribute of the returned function for easy - access. +def _setup_func(funcopy, mock): + funcopy.mock = mock - `mocksignature` can also be used with classes. It copies the signature of - the `__init__` method. + # can't use isinstance with mocks + if not _is_instance_mock(mock): + return + + def assert_called_with(*args, **kwargs): + return mock.assert_called_with(*args, **kwargs) + def assert_called_once_with(*args, **kwargs): + return mock.assert_called_once_with(*args, **kwargs) + def assert_has_calls(*args, **kwargs): + return mock.assert_has_calls(*args, **kwargs) + def assert_any_call(*args, **kwargs): + return mock.assert_any_call(*args, **kwargs) + def reset_mock(): + funcopy.method_calls = _CallList() + funcopy.mock_calls = _CallList() + mock.reset_mock() + ret = funcopy.return_value + if _is_instance_mock(ret) and not ret is mock: + ret.reset_mock() + + funcopy.called = False + funcopy.call_count = 0 + funcopy.call_args = None + funcopy.call_args_list = _CallList() + funcopy.method_calls = _CallList() + funcopy.mock_calls = _CallList() + + funcopy.return_value = mock.return_value + funcopy.side_effect = mock.side_effect + funcopy._mock_children = mock._mock_children + + funcopy.assert_called_with = assert_called_with + funcopy.assert_called_once_with = assert_called_once_with + funcopy.assert_has_calls = assert_has_calls + funcopy.assert_any_call = assert_any_call + funcopy.reset_mock = reset_mock - When used with callable objects (instances) it copies the signature of the - `__call__` method. - """ - if mock is None: - mock = Mock() - signature, func = _getsignature(func, skipfirst) - src = "lambda %(signature)s: _mock_(%(signature)s)" % { - 'signature': signature - } - - funcopy = eval(src, dict(_mock_=mock)) - _copy_func_details(func, funcopy) - funcopy.mock = mock - return funcopy + mock._mock_delegate = funcopy def _is_magic(name): return '__%s__' % name[2:-2] == name -class SentinelObject(object): +class _SentinelObject(object): "A unique, named, sentinel object." def __init__(self, name): self.name = name def __repr__(self): - return '' % self.name + return 'sentinel.%s' % self.name -class Sentinel(object): +class _Sentinel(object): """Access attributes to return a named object, usable as a sentinel.""" def __init__(self): self._sentinels = {} @@ -174,12 +355,14 @@ if name == '__bases__': # Without this help(mock) raises an exception raise AttributeError - return self._sentinels.setdefault(name, SentinelObject(name)) + return self._sentinels.setdefault(name, _SentinelObject(name)) -sentinel = Sentinel() +sentinel = _Sentinel() DEFAULT = sentinel.DEFAULT +_missing = sentinel.MISSING +_deleted = sentinel.DELETED class OldStyleClass: @@ -193,255 +376,469 @@ return value -if inPy3k: - class_types = type -else: - class_types = (type, ClassType) +ClassTypes = (type,) +if not inPy3k: + ClassTypes = (type, ClassType) + +_allowed_names = set( + [ + 'return_value', '_mock_return_value', 'side_effect', + '_mock_side_effect', '_mock_parent', '_mock_new_parent', + '_mock_name', '_mock_new_name' + ] +) -class Mock(object): - """ - Create a new ``Mock`` object. ``Mock`` takes several optional arguments - that specify the behaviour of the Mock object: +def _delegating_property(name): + _allowed_names.add(name) + _the_name = '_mock_' + name + def _get(self, name=name, _the_name=_the_name): + sig = self._mock_delegate + if sig is None: + return getattr(self, _the_name) + return getattr(sig, name) + def _set(self, value, name=name, _the_name=_the_name): + sig = self._mock_delegate + if sig is None: + self.__dict__[_the_name] = value + else: + setattr(sig, name, value) - * ``spec``: This can be either a list of strings or an existing object (a - class or instance) that acts as the specification for the mock object. If - you pass in an object then a list of strings is formed by calling dir on - the object (excluding unsupported magic attributes and methods). Accessing - any attribute not in this list will raise an ``AttributeError``. + return property(_get, _set) - If ``spec`` is an object (rather than a list of strings) then - `mock.__class__` returns the class of the spec object. This allows mocks - to pass `isinstance` tests. - * ``spec_set``: A stricter variant of ``spec``. If used attempting to *set* - or get an attribute on the mock that isn't on the object passed as - ``spec_set`` will raise an ``AttributeError``. - * ``side_effect``: A function to be called whenever the Mock is called. See - the :attr:`Mock.side_effect` attribute. Useful for raising exceptions or - dynamically changing return values. The function is called with the same - arguments as the mock, and unless it returns :data:`DEFAULT`, the return - value of this function is used as the return value. +class _CallList(list): - Alternatively ``side_effect`` can be an exception class or instance. In - this case the exception will be raised when the mock is called. + def __contains__(self, value): + if not isinstance(value, list): + return list.__contains__(self, value) + len_value = len(value) + len_self = len(self) + if len_value > len_self: + return False - * ``return_value``: The value returned when the mock is called. By default - this is a new Mock (created on first access). See the - :attr:`Mock.return_value` attribute. + for i in range(0, len_self - len_value + 1): + sub_list = self[i:i+len_value] + if sub_list == value: + return True + return False - * ``wraps``: Item for the mock object to wrap. If ``wraps`` is not None - then calling the Mock will pass the call through to the wrapped object - (returning the real result and ignoring ``return_value``). Attribute - access on the mock will return a Mock object that wraps the corresponding - attribute of the wrapped object (so attempting to access an attribute that - doesn't exist will raise an ``AttributeError``). + def __repr__(self): + return pprint.pformat(list(self)) - If the mock has an explicit ``return_value`` set then calls are not passed - to the wrapped object and the ``return_value`` is returned instead. - * ``name``: If the mock has a name then it will be used in the repr of the - mock. This can be useful for debugging. - """ +def _check_and_set_parent(parent, value, name, new_name): + if not _is_instance_mock(value): + return False + if ((value._mock_name or value._mock_new_name) or + (value._mock_parent is not None) or + (value._mock_new_parent is not None)): + return False + + _parent = parent + while _parent is not None: + # setting a mock (value) as a child or return value of itself + # should not modify the mock + if _parent is value: + return False + _parent = _parent._mock_new_parent + + if new_name: + value._mock_new_parent = parent + value._mock_new_name = new_name + if name: + value._mock_parent = parent + value._mock_name = name + return True + + + +class Base(object): + _mock_return_value = DEFAULT + _mock_side_effect = None + def __init__(self, *args, **kwargs): + pass + + + +class NonCallableMock(Base): + """A non-callable version of `Mock`""" + def __new__(cls, *args, **kw): # every instance has its own class # so we can create magic methods on the # class without stomping on other mocks new = type(cls.__name__, (cls,), {'__doc__': cls.__doc__}) - return object.__new__(new) + instance = object.__new__(new) + return instance - def __init__(self, spec=None, side_effect=None, return_value=DEFAULT, - wraps=None, name=None, spec_set=None, parent=None): - self._parent = parent - self._name = name - _spec_class = None + def __init__( + self, spec=None, wraps=None, name=None, spec_set=None, + parent=None, _spec_state=None, _new_name='', _new_parent=None, + **kwargs + ): + if _new_parent is None: + _new_parent = parent + + __dict__ = self.__dict__ + __dict__['_mock_parent'] = parent + __dict__['_mock_name'] = name + __dict__['_mock_new_name'] = _new_name + __dict__['_mock_new_parent'] = _new_parent + if spec_set is not None: spec = spec_set spec_set = True - if spec is not None and not isinstance(spec, list): - if isinstance(spec, class_types): - _spec_class = spec - else: - _spec_class = spec.__class__ - spec = dir(spec) + self._mock_add_spec(spec, spec_set) - self._spec_class = _spec_class - self._spec_set = spec_set - self._methods = spec - self._children = {} - self._return_value = return_value - self.side_effect = side_effect - self._wraps = wraps + __dict__['_mock_children'] = {} + __dict__['_mock_wraps'] = wraps + __dict__['_mock_delegate'] = None - self.reset_mock() + __dict__['_mock_called'] = False + __dict__['_mock_call_args'] = None + __dict__['_mock_call_count'] = 0 + __dict__['_mock_call_args_list'] = _CallList() + __dict__['_mock_mock_calls'] = _CallList() + __dict__['method_calls'] = _CallList() - @property - def __class__(self): - if self._spec_class is None: - return type(self) - return self._spec_class + if kwargs: + self.configure_mock(**kwargs) + _super(NonCallableMock, self).__init__( + spec, wraps, name, spec_set, parent, + _spec_state + ) - def reset_mock(self): - "Restore the mock object to its initial state." - self.called = False - self.call_args = None - self.call_count = 0 - self.call_args_list = [] - self.method_calls = [] - for child in self._children.values(): - child.reset_mock() - if isinstance(self._return_value, Mock): - self._return_value.reset_mock() + + def attach_mock(self, mock, attribute): + """ + Attach a mock as an attribute of this one, replacing its name and + parent. Calls to the attached mock will be recorded in the + `method_calls` and `mock_calls` attributes of this one.""" + mock._mock_parent = None + mock._mock_new_parent = None + mock._mock_name = '' + mock._mock_new_name = None + + setattr(self, attribute, mock) + + + def mock_add_spec(self, spec, spec_set=False): + """Add a spec to a mock. `spec` can either be an object or a + list of strings. Only attributes on the `spec` can be fetched as + attributes from the mock. + + If `spec_set` is True then only attributes on the spec can be set.""" + self._mock_add_spec(spec, spec_set) + + + def _mock_add_spec(self, spec, spec_set): + _spec_class = None + + if spec is not None and not _is_list(spec): + if isinstance(spec, ClassTypes): + _spec_class = spec + else: + _spec_class = _get_class(spec) + + spec = dir(spec) + + __dict__ = self.__dict__ + __dict__['_spec_class'] = _spec_class + __dict__['_spec_set'] = spec_set + __dict__['_mock_methods'] = spec def __get_return_value(self): - if self._return_value is DEFAULT: - self._return_value = self._get_child_mock() - return self._return_value + ret = self._mock_return_value + if self._mock_delegate is not None: + ret = self._mock_delegate.return_value + + if ret is DEFAULT: + ret = self._get_child_mock( + _new_parent=self, _new_name='()' + ) + self.return_value = ret + return ret + def __set_return_value(self, value): - self._return_value = value + if self._mock_delegate is not None: + self._mock_delegate.return_value = value + else: + self._mock_return_value = value + _check_and_set_parent(self, value, None, '()') __return_value_doc = "The value to be returned when the mock is called." return_value = property(__get_return_value, __set_return_value, __return_value_doc) - def __call__(self, *args, **kwargs): - self.called = True - self.call_count += 1 - self.call_args = callargs((args, kwargs)) - self.call_args_list.append(callargs((args, kwargs))) + @property + def __class__(self): + if self._spec_class is None: + return type(self) + return self._spec_class - parent = self._parent - name = self._name - while parent is not None: - parent.method_calls.append(callargs((name, args, kwargs))) - if parent._parent is None: - break - name = parent._name + '.' + name - parent = parent._parent + called = _delegating_property('called') + call_count = _delegating_property('call_count') + call_args = _delegating_property('call_args') + call_args_list = _delegating_property('call_args_list') + mock_calls = _delegating_property('mock_calls') + + + def __get_side_effect(self): + sig = self._mock_delegate + if sig is None: + return self._mock_side_effect + return sig.side_effect + + def __set_side_effect(self, value): + value = _try_iter(value) + sig = self._mock_delegate + if sig is None: + self._mock_side_effect = value + else: + sig.side_effect = value - ret_val = DEFAULT - if self.side_effect is not None: - if (isinstance(self.side_effect, BaseException) or - isinstance(self.side_effect, class_types) and - issubclass(self.side_effect, BaseException)): - raise self.side_effect + side_effect = property(__get_side_effect, __set_side_effect) - ret_val = self.side_effect(*args, **kwargs) - if ret_val is DEFAULT: - ret_val = self.return_value - if self._wraps is not None and self._return_value is DEFAULT: - return self._wraps(*args, **kwargs) - if ret_val is DEFAULT: - ret_val = self.return_value - return ret_val + def reset_mock(self): + "Restore the mock object to its initial state." + self.called = False + self.call_args = None + self.call_count = 0 + self.mock_calls = _CallList() + self.call_args_list = _CallList() + self.method_calls = _CallList() + + for child in self._mock_children.values(): + if isinstance(child, _SpecState): + continue + child.reset_mock() + + ret = self._mock_return_value + if _is_instance_mock(ret) and ret is not self: + ret.reset_mock() + + + def configure_mock(self, **kwargs): + """Set attributes on the mock through keyword arguments. + + Attributes plus return values and side effects can be set on child + mocks using standard dot notation and unpacking a dictionary in the + method call: + + >>> attrs = {'method.return_value': 3, 'other.side_effect': KeyError} + >>> mock.configure_mock(**attrs)""" + for arg, val in sorted(kwargs.items(), + # we sort on the number of dots so that + # attributes are set before we set attributes on + # attributes + key=lambda entry: entry[0].count('.')): + args = arg.split('.') + final = args.pop() + obj = self + for entry in args: + obj = getattr(obj, entry) + setattr(obj, final, val) def __getattr__(self, name): - if name == '_methods': + if name == '_mock_methods': raise AttributeError(name) - elif self._methods is not None: - if name not in self._methods or name in _all_magics: - raise AttributeError("Mock object has no attribute '%s'" % name) + elif self._mock_methods is not None: + if name not in self._mock_methods or name in _all_magics: + raise AttributeError("Mock object has no attribute %r" % name) elif _is_magic(name): raise AttributeError(name) - if name not in self._children: + result = self._mock_children.get(name) + if result is _deleted: + raise AttributeError(name) + elif result is None: wraps = None - if self._wraps is not None: - wraps = getattr(self._wraps, name) - self._children[name] = self._get_child_mock(parent=self, name=name, wraps=wraps) + if self._mock_wraps is not None: + # XXXX should we get the attribute without triggering code + # execution? + wraps = getattr(self._mock_wraps, name) + + result = self._get_child_mock( + parent=self, name=name, wraps=wraps, _new_name=name, + _new_parent=self + ) + self._mock_children[name] = result - return self._children[name] + elif isinstance(result, _SpecState): + result = create_autospec( + result.spec, result.spec_set, result.instance, + result.parent, result.name + ) + self._mock_children[name] = result + + return result def __repr__(self): - if self._name is None and self._spec_class is None: - return object.__repr__(self) + _name_list = [self._mock_new_name] + _parent = self._mock_new_parent + last = self + + dot = '.' + if _name_list == ['()']: + dot = '' + seen = set() + while _parent is not None: + last = _parent + + _name_list.append(_parent._mock_new_name + dot) + dot = '.' + if _parent._mock_new_name == '()': + dot = '' + + _parent = _parent._mock_new_parent + + # use ids here so as not to call __hash__ on the mocks + if id(_parent) in seen: + break + seen.add(id(_parent)) + + _name_list = list(reversed(_name_list)) + _first = last._mock_name or 'mock' + if len(_name_list) > 1: + if _name_list[1] not in ('()', '().'): + _first += '.' + _name_list[0] = _first + name = ''.join(_name_list) name_string = '' - spec_string = '' - if self._name is not None: - def get_name(name): - if name is None: - return 'mock' - return name - parent = self._parent - name = self._name - while parent is not None: - name = get_name(parent._name) + '.' + name - parent = parent._parent + if name not in ('mock', 'mock.'): name_string = ' name=%r' % name + + spec_string = '' if self._spec_class is not None: spec_string = ' spec=%r' if self._spec_set: spec_string = ' spec_set=%r' spec_string = spec_string % self._spec_class.__name__ - return "<%s%s%s id='%s'>" % (type(self).__name__, - name_string, - spec_string, - id(self)) + return "<%s%s%s id='%s'>" % ( + type(self).__name__, + name_string, + spec_string, + id(self) + ) + + + def __dir__(self): + """Filter the output of `dir(mock)` to only useful members. + XXXX + """ + extras = self._mock_methods or [] + from_type = dir(type(self)) + from_dict = list(self.__dict__) + + if FILTER_DIR: + from_type = [e for e in from_type if not e.startswith('_')] + from_dict = [e for e in from_dict if not e.startswith('_') or + _is_magic(e)] + return sorted(set(extras + from_type + from_dict + + list(self._mock_children))) def __setattr__(self, name, value): - if not 'method_calls' in self.__dict__: - # allow all attribute setting until initialisation is complete + if name in _allowed_names: + # property setters go through here return object.__setattr__(self, name, value) - if (self._spec_set and self._methods is not None and name not in - self._methods and name not in self.__dict__ and - name != 'return_value'): + elif (self._spec_set and self._mock_methods is not None and + name not in self._mock_methods and + name not in self.__dict__): raise AttributeError("Mock object has no attribute '%s'" % name) - if name in _unsupported_magics: + elif name in _unsupported_magics: msg = 'Attempting to set unsupported magic method %r.' % name raise AttributeError(msg) elif name in _all_magics: - if self._methods is not None and name not in self._methods: + if self._mock_methods is not None and name not in self._mock_methods: raise AttributeError("Mock object has no attribute '%s'" % name) - if not isinstance(value, Mock): - setattr(type(self), name, get_method(name, value)) + if not _is_instance_mock(value): + setattr(type(self), name, _get_method(name, value)) original = value - real = lambda *args, **kw: original(self, *args, **kw) - value = mocksignature(value, real, skipfirst=True) + value = lambda *args, **kw: original(self, *args, **kw) else: + # only set _new_name and not name so that mock_calls is tracked + # but not method calls + _check_and_set_parent(self, value, None, name) setattr(type(self), name, value) + self._mock_children[name] = value + elif name == '__class__': + self._spec_class = value + return + else: + if _check_and_set_parent(self, value, name, name): + self._mock_children[name] = value return object.__setattr__(self, name, value) def __delattr__(self, name): if name in _all_magics and name in type(self).__dict__: delattr(type(self), name) - return object.__delattr__(self, name) + if name not in self.__dict__: + # for magic methods that are still MagicProxy objects and + # not set on the instance itself + return + + if name in self.__dict__: + object.__delattr__(self, name) + + obj = self._mock_children.get(name, _missing) + if obj is _deleted: + raise AttributeError(name) + if obj is not _missing: + del self._mock_children[name] + self._mock_children[name] = _deleted - def assert_called_with(self, *args, **kwargs): - """ - assert that the mock was called with the specified arguments. + + def _format_mock_call_signature(self, args, kwargs): + name = self._mock_name or 'mock' + return _format_call_signature(name, args, kwargs) + + + def _format_mock_failure_message(self, args, kwargs): + message = 'Expected call: %s\nActual call: %s' + expected_string = self._format_mock_call_signature(args, kwargs) + call_args = self.call_args + if len(call_args) == 3: + call_args = call_args[1:] + actual_string = self._format_mock_call_signature(*call_args) + return message % (expected_string, actual_string) + + + def assert_called_with(_mock_self, *args, **kwargs): + """assert that the mock was called with the specified arguments. Raises an AssertionError if the args and keyword args passed in are - different to the last call to the mock. - """ + different to the last call to the mock.""" + self = _mock_self if self.call_args is None: - raise AssertionError('Expected: %s\nNot called' % ((args, kwargs),)) - if not self.call_args == (args, kwargs): - raise AssertionError( - 'Expected: %s\nCalled with: %s' % ((args, kwargs), self.call_args) - ) + expected = self._format_mock_call_signature(args, kwargs) + raise AssertionError('Expected call: %s\nNot called' % (expected,)) + if self.call_args != (args, kwargs): + msg = self._format_mock_failure_message(args, kwargs) + raise AssertionError(msg) - def assert_called_once_with(self, *args, **kwargs): - """ - assert that the mock was called exactly once and with the specified - arguments. - """ + + def assert_called_once_with(_mock_self, *args, **kwargs): + """assert that the mock was called exactly once and with the specified + arguments.""" + self = _mock_self if not self.call_count == 1: msg = ("Expected to be called once. Called %s times." % self.call_count) @@ -449,47 +846,242 @@ return self.assert_called_with(*args, **kwargs) + def assert_has_calls(self, calls, any_order=False): + """assert the mock has been called with the specified calls. + The `mock_calls` list is checked for the calls. + + If `any_order` is False (the default) then the calls must be + sequential. There can be extra calls before or after the + specified calls. + + If `any_order` is True then the calls can be in any order, but + they must all appear in `mock_calls`.""" + if not any_order: + if calls not in self.mock_calls: + raise AssertionError( + 'Calls not found.\nExpected: %r\n' + 'Actual: %r' % (calls, self.mock_calls) + ) + return + + all_calls = list(self.mock_calls) + + not_found = [] + for kall in calls: + try: + all_calls.remove(kall) + except ValueError: + not_found.append(kall) + if not_found: + raise AssertionError( + '%r not all found in call list' % (tuple(not_found),) + ) + + + def assert_any_call(self, *args, **kwargs): + """assert the mock has been called with the specified arguments. + + The assert passes if the mock has *ever* been called, unlike + `assert_called_with` and `assert_called_once_with` that only pass if + the call is the most recent one.""" + kall = call(*args, **kwargs) + if kall not in self.call_args_list: + expected_string = self._format_mock_call_signature(args, kwargs) + raise AssertionError( + '%s call not found' % expected_string + ) + + def _get_child_mock(self, **kw): - klass = type(self).__mro__[1] + """Create the child mocks for attributes and return value. + By default child mocks will be the same type as the parent. + Subclasses of Mock may want to override this to customize the way + child mocks are made. + + For non-callable mocks the callable variant will be used (rather than + any custom subclass).""" + _type = type(self) + if not issubclass(_type, CallableMixin): + if issubclass(_type, NonCallableMagicMock): + klass = MagicMock + elif issubclass(_type, NonCallableMock) : + klass = Mock + else: + klass = _type.__mro__[1] return klass(**kw) -class callargs(tuple): - """ - A tuple for holding the results of a call to a mock, either in the form - `(args, kwargs)` or `(name, args, kwargs)`. +def _try_iter(obj): + if obj is None: + return obj + if _is_exception(obj): + return obj + if _callable(obj): + return obj + try: + return iter(obj) + except TypeError: + # XXXX backwards compatibility + # but this will blow up on first call - so maybe we should fail early? + return obj - If args or kwargs are empty then a callargs tuple will compare equal to - a tuple without those values. This makes comparisons less verbose:: - callargs('name', (), {}) == ('name',) - callargs('name', (1,), {}) == ('name', (1,)) - callargs((), {'a': 'b'}) == ({'a': 'b'},) + +class CallableMixin(Base): + + def __init__(self, spec=None, side_effect=None, return_value=DEFAULT, + wraps=None, name=None, spec_set=None, parent=None, + _spec_state=None, _new_name='', _new_parent=None, **kwargs): + self.__dict__['_mock_return_value'] = return_value + + _super(CallableMixin, self).__init__( + spec, wraps, name, spec_set, parent, + _spec_state, _new_name, _new_parent, **kwargs + ) + + self.side_effect = side_effect + + + def _mock_check_sig(self, *args, **kwargs): + # stub method that can be replaced with one with a specific signature + pass + + + def __call__(_mock_self, *args, **kwargs): + # can't use self in-case a function / method we are mocking uses self + # in the signature + _mock_self._mock_check_sig(*args, **kwargs) + return _mock_self._mock_call(*args, **kwargs) + + + def _mock_call(_mock_self, *args, **kwargs): + self = _mock_self + self.called = True + self.call_count += 1 + self.call_args = _Call((args, kwargs), two=True) + self.call_args_list.append(_Call((args, kwargs), two=True)) + + _new_name = self._mock_new_name + _new_parent = self._mock_new_parent + self.mock_calls.append(_Call(('', args, kwargs))) + + seen = set() + skip_next_dot = _new_name == '()' + do_method_calls = self._mock_parent is not None + name = self._mock_name + while _new_parent is not None: + this_mock_call = _Call((_new_name, args, kwargs)) + if _new_parent._mock_new_name: + dot = '.' + if skip_next_dot: + dot = '' + + skip_next_dot = False + if _new_parent._mock_new_name == '()': + skip_next_dot = True + + _new_name = _new_parent._mock_new_name + dot + _new_name + + if do_method_calls: + if _new_name == name: + this_method_call = this_mock_call + else: + this_method_call = _Call((name, args, kwargs)) + _new_parent.method_calls.append(this_method_call) + + do_method_calls = _new_parent._mock_parent is not None + if do_method_calls: + name = _new_parent._mock_name + '.' + name + + _new_parent.mock_calls.append(this_mock_call) + _new_parent = _new_parent._mock_new_parent + + # use ids here so as not to call __hash__ on the mocks + _new_parent_id = id(_new_parent) + if _new_parent_id in seen: + break + seen.add(_new_parent_id) + + ret_val = DEFAULT + effect = self.side_effect + if effect is not None: + if _is_exception(effect): + raise effect + + if not _callable(effect): + result = next(effect) + if _is_exception(result): + raise result + return result + + ret_val = effect(*args, **kwargs) + if ret_val is DEFAULT: + ret_val = self.return_value + + if (self._mock_wraps is not None and + self._mock_return_value is DEFAULT): + return self._mock_wraps(*args, **kwargs) + if ret_val is DEFAULT: + ret_val = self.return_value + return ret_val + + + +class Mock(CallableMixin, NonCallableMock): """ - def __eq__(self, other): - if len(self) == 3: - if other[0] != self[0]: - return False - args_kwargs = self[1:] - other_args_kwargs = other[1:] - else: - args_kwargs = tuple(self) - other_args_kwargs = other + Create a new `Mock` object. `Mock` takes several optional arguments + that specify the behaviour of the Mock object: - if len(other_args_kwargs) == 0: - other_args, other_kwargs = (), {} - elif len(other_args_kwargs) == 1: - if isinstance(other_args_kwargs[0], tuple): - other_args = other_args_kwargs[0] - other_kwargs = {} - else: - other_args = () - other_kwargs = other_args_kwargs[0] - else: - other_args, other_kwargs = other_args_kwargs + * `spec`: This can be either a list of strings or an existing object (a + class or instance) that acts as the specification for the mock object. If + you pass in an object then a list of strings is formed by calling dir on + the object (excluding unsupported magic attributes and methods). Accessing + any attribute not in this list will raise an `AttributeError`. + + If `spec` is an object (rather than a list of strings) then + `mock.__class__` returns the class of the spec object. This allows mocks + to pass `isinstance` tests. + + * `spec_set`: A stricter variant of `spec`. If used, attempting to *set* + or get an attribute on the mock that isn't on the object passed as + `spec_set` will raise an `AttributeError`. + + * `side_effect`: A function to be called whenever the Mock is called. See + the `side_effect` attribute. Useful for raising exceptions or + dynamically changing return values. The function is called with the same + arguments as the mock, and unless it returns `DEFAULT`, the return + value of this function is used as the return value. + + Alternatively `side_effect` can be an exception class or instance. In + this case the exception will be raised when the mock is called. + + If `side_effect` is an iterable then each call to the mock will return + the next value from the iterable. If any of the members of the iterable + are exceptions they will be raised instead of returned. + + * `return_value`: The value returned when the mock is called. By default + this is a new Mock (created on first access). See the + `return_value` attribute. + + * `wraps`: Item for the mock object to wrap. If `wraps` is not None then + calling the Mock will pass the call through to the wrapped object + (returning the real result). Attribute access on the mock will return a + Mock object that wraps the corresponding attribute of the wrapped object + (so attempting to access an attribute that doesn't exist will raise an + `AttributeError`). + + If the mock has an explicit `return_value` set then calls are not passed + to the wrapped object and the `return_value` is returned instead. + + * `name`: If the mock has a name then it will be used in the repr of the + mock. This can be useful for debugging. The name is propagated to child + mocks. + + Mocks can also be called with arbitrary keyword arguments. These will be + used to set attributes on the mock after it is created. + """ - return tuple(args_kwargs) == (other_args, other_kwargs) def _dot_lookup(thing, comp, import_path): @@ -511,36 +1103,73 @@ return thing +def _is_started(patcher): + # XXXX horrible + return hasattr(patcher, 'is_local') + + class _patch(object): - def __init__(self, target, attribute, new, spec, create, - mocksignature, spec_set): - self.target = target + + attribute_name = None + _active_patches = set() + + def __init__( + self, getter, attribute, new, spec, create, + spec_set, autospec, new_callable, kwargs + ): + if new_callable is not None: + if new is not DEFAULT: + raise ValueError( + "Cannot use 'new' and 'new_callable' together" + ) + if autospec is not None: + raise ValueError( + "Cannot use 'autospec' and 'new_callable' together" + ) + + self.getter = getter self.attribute = attribute self.new = new + self.new_callable = new_callable self.spec = spec self.create = create self.has_local = False - self.mocksignature = mocksignature self.spec_set = spec_set + self.autospec = autospec + self.kwargs = kwargs + self.additional_patchers = [] def copy(self): - return _patch(self.target, self.attribute, self.new, self.spec, - self.create, self.mocksignature, self.spec_set) + patcher = _patch( + self.getter, self.attribute, self.new, self.spec, + self.create, self.spec_set, + self.autospec, self.new_callable, self.kwargs + ) + patcher.attribute_name = self.attribute_name + patcher.additional_patchers = [ + p.copy() for p in self.additional_patchers + ] + return patcher def __call__(self, func): - if isinstance(func, class_types): + if isinstance(func, ClassTypes): return self.decorate_class(func) - else: - return self.decorate_callable(func) + return self.decorate_callable(func) def decorate_class(self, klass): for attr in dir(klass): + if not attr.startswith(patch.TEST_PREFIX): + continue + attr_value = getattr(klass, attr) - if attr.startswith("test") and hasattr(attr_value, "__call__"): - setattr(klass, attr, self.copy()(attr_value)) + if not hasattr(attr_value, "__call__"): + continue + + patcher = self.copy() + setattr(klass, attr, patcher(attr_value)) return klass @@ -551,171 +1180,398 @@ @wraps(func) def patched(*args, **keywargs): - # don't use a with here (backwards compatability with 2.5) + # don't use a with here (backwards compatability with Python 2.4) extra_args = [] - for patching in patched.patchings: - arg = patching.__enter__() - if patching.new is DEFAULT: - extra_args.append(arg) - args += tuple(extra_args) + entered_patchers = [] + + # can't use try...except...finally because of Python 2.4 + # compatibility + exc_info = tuple() try: - return func(*args, **keywargs) + try: + for patching in patched.patchings: + arg = patching.__enter__() + entered_patchers.append(patching) + if patching.attribute_name is not None: + keywargs.update(arg) + elif patching.new is DEFAULT: + extra_args.append(arg) + + args += tuple(extra_args) + return func(*args, **keywargs) + except: + if (patching not in entered_patchers and + _is_started(patching)): + # the patcher may have been started, but an exception + # raised whilst entering one of its additional_patchers + entered_patchers.append(patching) + # Pass the exception to __exit__ + exc_info = sys.exc_info() + # re-raise the exception + raise finally: - for patching in reversed(getattr(patched, 'patchings', [])): - patching.__exit__() + for patching in reversed(entered_patchers): + patching.__exit__(*exc_info) patched.patchings = [self] if hasattr(func, 'func_code'): # not in Python 3 - patched.compat_co_firstlineno = getattr(func, "compat_co_firstlineno", - func.func_code.co_firstlineno) + patched.compat_co_firstlineno = getattr( + func, "compat_co_firstlineno", + func.func_code.co_firstlineno + ) return patched def get_original(self): - target = self.target + target = self.getter() name = self.attribute - create = self.create original = DEFAULT - if _has_local_attr(target, name): - try: - original = target.__dict__[name] - except AttributeError: - # for instances of classes with slots, they have no __dict__ - original = getattr(target, name) - elif not create and not hasattr(target, name): - raise AttributeError("%s does not have the attribute %r" % (target, name)) - return original + local = False + + try: + original = target.__dict__[name] + except (AttributeError, KeyError): + original = getattr(target, name, DEFAULT) + else: + local = True + + if not self.create and original is DEFAULT: + raise AttributeError( + "%s does not have the attribute %r" % (target, name) + ) + return original, local def __enter__(self): + """Perform the patch.""" new, spec, spec_set = self.new, self.spec, self.spec_set - original = self.get_original() - if new is DEFAULT: - # XXXX what if original is DEFAULT - shouldn't use it as a spec + autospec, kwargs = self.autospec, self.kwargs + new_callable = self.new_callable + self.target = self.getter() + + # normalise False to None + if spec is False: + spec = None + if spec_set is False: + spec_set = None + if autospec is False: + autospec = None + + if spec is not None and autospec is not None: + raise TypeError("Can't specify spec and autospec") + if ((spec is not None or autospec is not None) and + spec_set not in (True, None)): + raise TypeError("Can't provide explicit spec_set *and* spec or autospec") + + original, local = self.get_original() + + if new is DEFAULT and autospec is None: inherit = False - if spec_set == True: - spec_set = original - if isinstance(spec_set, class_types): - inherit = True - elif spec == True: + if spec is True: # set spec to the object we are replacing spec = original - if isinstance(spec, class_types): + if spec_set is True: + spec_set = original + spec = None + elif spec is not None: + if spec_set is True: + spec_set = spec + spec = None + elif spec_set is True: + spec_set = original + + if spec is not None or spec_set is not None: + if original is DEFAULT: + raise TypeError("Can't use 'spec' with create=True") + if isinstance(original, ClassTypes): + # If we're patching out a class and there is a spec inherit = True - new = Mock(spec=spec, spec_set=spec_set) - if inherit: - new.return_value = Mock(spec=spec, spec_set=spec_set) + + Klass = MagicMock + _kwargs = {} + if new_callable is not None: + Klass = new_callable + elif spec is not None or spec_set is not None: + this_spec = spec + if spec_set is not None: + this_spec = spec_set + if _is_list(this_spec): + not_callable = '__call__' not in this_spec + else: + not_callable = not _callable(this_spec) + if not_callable: + Klass = NonCallableMagicMock + + if spec is not None: + _kwargs['spec'] = spec + if spec_set is not None: + _kwargs['spec_set'] = spec_set + + # add a name to mocks + if (isinstance(Klass, type) and + issubclass(Klass, NonCallableMock) and self.attribute): + _kwargs['name'] = self.attribute + + _kwargs.update(kwargs) + new = Klass(**_kwargs) + + if inherit and _is_instance_mock(new): + # we can only tell if the instance should be callable if the + # spec is not a list + this_spec = spec + if spec_set is not None: + this_spec = spec_set + if (not _is_list(this_spec) and not + _instance_callable(this_spec)): + Klass = NonCallableMagicMock + + _kwargs.pop('name') + new.return_value = Klass(_new_parent=new, _new_name='()', + **_kwargs) + elif autospec is not None: + # spec is ignored, new *must* be default, spec_set is treated + # as a boolean. Should we check spec is not None and that spec_set + # is a bool? + if new is not DEFAULT: + raise TypeError( + "autospec creates the mock for you. Can't specify " + "autospec and new." + ) + if original is DEFAULT: + raise TypeError("Can't use 'autospec' with create=True") + spec_set = bool(spec_set) + if autospec is True: + autospec = original + + new = create_autospec(autospec, spec_set=spec_set, + _name=self.attribute, **kwargs) + elif kwargs: + # can't set keyword args when we aren't creating the mock + # XXXX If new is a Mock we could call new.configure_mock(**kwargs) + raise TypeError("Can't pass kwargs to a mock we aren't creating") + new_attr = new - if self.mocksignature: - original_for_sig = original - if original is DEFAULT and not self.create: - # for mocking signature on methods with - # patch.object(...) - original_for_sig = getattr(self.target, self.attribute) - new_attr = mocksignature(original_for_sig, new) self.temp_original = original + self.is_local = local setattr(self.target, self.attribute, new_attr) + if self.attribute_name is not None: + extra_args = {} + if self.new is DEFAULT: + extra_args[self.attribute_name] = new + for patching in self.additional_patchers: + arg = patching.__enter__() + if patching.new is DEFAULT: + extra_args.update(arg) + return extra_args + return new - def __exit__(self, *_): - if self.temp_original is not DEFAULT: + def __exit__(self, *exc_info): + """Undo the patch.""" + if not _is_started(self): + raise RuntimeError('stop called on unstarted patcher') + + if self.is_local and self.temp_original is not DEFAULT: setattr(self.target, self.attribute, self.temp_original) else: delattr(self.target, self.attribute) + if not self.create and not hasattr(self.target, self.attribute): + # needed for proxy objects like django settings + setattr(self.target, self.attribute, self.temp_original) + del self.temp_original + del self.is_local + del self.target + for patcher in reversed(self.additional_patchers): + if _is_started(patcher): + patcher.__exit__(*exc_info) + + + def start(self): + """Activate a patch, returning any created mock.""" + result = self.__enter__() + self._active_patches.add(self) + return result + + + def stop(self): + """Stop an active patch.""" + self._active_patches.discard(self) + return self.__exit__() - start = __enter__ - stop = __exit__ -def _patch_object(target, attribute, new=DEFAULT, spec=None, create=False, - mocksignature=False, spec_set=None): +def _get_target(target): + try: + target, attribute = target.rsplit('.', 1) + except (TypeError, ValueError): + raise TypeError("Need a valid target to patch. You supplied: %r" % + (target,)) + getter = lambda: _importer(target) + return getter, attribute + + +def _patch_object( + target, attribute, new=DEFAULT, spec=None, + create=False, spec_set=None, autospec=None, + new_callable=None, **kwargs + ): """ patch.object(target, attribute, new=DEFAULT, spec=None, create=False, - mocksignature=False, spec_set=None) + spec_set=None, autospec=None, new_callable=None, **kwargs) patch the named member (`attribute`) on an object (`target`) with a mock object. - Arguments new, spec, create, mocksignature and spec_set have the same - meaning as for patch. - """ - return _patch(target, attribute, new, spec, create, mocksignature, - spec_set) + `patch.object` can be used as a decorator, class decorator or a context + manager. Arguments `new`, `spec`, `create`, `spec_set`, + `autospec` and `new_callable` have the same meaning as for `patch`. Like + `patch`, `patch.object` takes arbitrary keyword arguments for configuring + the mock object it creates. - -def patch_object(*args, **kwargs): - "A deprecated form of patch.object(...)" - warnings.warn(('Please use patch.object instead.'), DeprecationWarning, 2) - return _patch_object(*args, **kwargs) - - -def patch(target, new=DEFAULT, spec=None, create=False, - mocksignature=False, spec_set=None): + When used as a class decorator `patch.object` honours `patch.TEST_PREFIX` + for choosing which methods to wrap. """ - ``patch`` acts as a function decorator, class decorator or a context - manager. Inside the body of the function or with statement, the ``target`` - (specified in the form `'PackageName.ModuleName.ClassName'`) is patched - with a ``new`` object. When the function/with statement exits the patch is - undone. + getter = lambda: target + return _patch( + getter, attribute, new, spec, create, + spec_set, autospec, new_callable, kwargs + ) + + +def _patch_multiple(target, spec=None, create=False, spec_set=None, + autospec=None, new_callable=None, **kwargs): + """Perform multiple patches in a single call. It takes the object to be + patched (either as an object or a string to fetch the object by importing) + and keyword arguments for the patches:: + + with patch.multiple(settings, FIRST_PATCH='one', SECOND_PATCH='two'): + ... + + Use `DEFAULT` as the value if you want `patch.multiple` to create + mocks for you. In this case the created mocks are passed into a decorated + function by keyword, and a dictionary is returned when `patch.multiple` is + used as a context manager. + + `patch.multiple` can be used as a decorator, class decorator or a context + manager. The arguments `spec`, `spec_set`, `create`, + `autospec` and `new_callable` have the same meaning as for `patch`. These + arguments will be applied to *all* patches done by `patch.multiple`. - The target is imported and the specified attribute patched with the new - object, so it must be importable from the environment you are calling the - decorator from. + When used as a class decorator `patch.multiple` honours `patch.TEST_PREFIX` + for choosing which methods to wrap. + """ + if type(target) in (unicode, str): + getter = lambda: _importer(target) + else: + getter = lambda: target - If ``new`` is omitted, then a new ``Mock`` is created and passed in as an - extra argument to the decorated function. + if not kwargs: + raise ValueError( + 'Must supply at least one keyword argument with patch.multiple' + ) + # need to wrap in a list for python 3, where items is a view + items = list(kwargs.items()) + attribute, new = items[0] + patcher = _patch( + getter, attribute, new, spec, create, spec_set, + autospec, new_callable, {} + ) + patcher.attribute_name = attribute + for attribute, new in items[1:]: + this_patcher = _patch( + getter, attribute, new, spec, create, spec_set, + autospec, new_callable, {} + ) + this_patcher.attribute_name = attribute + patcher.additional_patchers.append(this_patcher) + return patcher + + +def patch( + target, new=DEFAULT, spec=None, create=False, + spec_set=None, autospec=None, new_callable=None, **kwargs + ): + """ + `patch` acts as a function decorator, class decorator or a context + manager. Inside the body of the function or with statement, the `target` + is patched with a `new` object. When the function/with statement exits + the patch is undone. + + If `new` is omitted, then the target is replaced with a + `MagicMock`. If `patch` is used as a decorator and `new` is + omitted, the created mock is passed in as an extra argument to the + decorated function. If `patch` is used as a context manager the created + mock is returned by the context manager. + + `target` should be a string in the form `'package.module.ClassName'`. The + `target` is imported and the specified object replaced with the `new` + object, so the `target` must be importable from the environment you are + calling `patch` from. The target is imported when the decorated function + is executed, not at decoration time. - The ``spec`` and ``spec_set`` keyword arguments are passed to the ``Mock`` + The `spec` and `spec_set` keyword arguments are passed to the `MagicMock` if patch is creating one for you. - In addition you can pass ``spec=True`` or ``spec_set=True``, which causes + In addition you can pass `spec=True` or `spec_set=True`, which causes patch to pass in the object being mocked as the spec/spec_set object. - If ``mocksignature`` is True then the patch will be done with a function - created by mocking the one being replaced. If the object being replaced is - a class then the signature of `__init__` will be copied. If the object - being replaced is a callable object then the signature of `__call__` will - be copied. + `new_callable` allows you to specify a different class, or callable object, + that will be called to create the `new` object. By default `MagicMock` is + used. + + A more powerful form of `spec` is `autospec`. If you set `autospec=True` + then the mock with be created with a spec from the object being replaced. + All attributes of the mock will also have the spec of the corresponding + attribute of the object being replaced. Methods and functions being + mocked will have their arguments checked and will raise a `TypeError` if + they are called with the wrong signature. For mocks replacing a class, + their return value (the 'instance') will have the same spec as the class. + + Instead of `autospec=True` you can pass `autospec=some_object` to use an + arbitrary object as the spec instead of the one being replaced. - By default ``patch`` will fail to replace attributes that don't exist. If - you pass in 'create=True' and the attribute doesn't exist, patch will + By default `patch` will fail to replace attributes that don't exist. If + you pass in `create=True`, and the attribute doesn't exist, patch will create the attribute for you when the patched function is called, and delete it again afterwards. This is useful for writing tests against attributes that your production code creates at runtime. It is off by by default because it can be dangerous. With it switched on you can write passing tests against APIs that don't actually exist! - Patch can be used as a TestCase class decorator. It works by + Patch can be used as a `TestCase` class decorator. It works by decorating each test method in the class. This reduces the boilerplate - code when your test methods share a common patchings set. + code when your test methods share a common patchings set. `patch` finds + tests by looking for method names that start with `patch.TEST_PREFIX`. + By default this is `test`, which matches the way `unittest` finds tests. + You can specify an alternative prefix by setting `patch.TEST_PREFIX`. + + Patch can be used as a context manager, with the with statement. Here the + patching applies to the indented block after the with statement. If you + use "as" then the patched object will be bound to the name after the + "as"; very useful if `patch` is creating a mock object for you. - Patch can be used with the with statement, if this is available in your - version of Python. Here the patching applies to the indented block after - the with statement. If you use "as" then the patched object will be bound - to the name after the "as"; very useful if `patch` is creating a mock - object for you. + `patch` takes arbitrary keyword arguments. These will be passed to + the `Mock` (or `new_callable`) on construction. - `patch.dict(...)` and `patch.object(...)` are available for alternate - use-cases. + `patch.dict(...)`, `patch.multiple(...)` and `patch.object(...)` are + available for alternate use-cases. """ - try: - target, attribute = target.rsplit('.', 1) - except (TypeError, ValueError): - raise TypeError("Need a valid target to patch. You supplied: %r" % - (target,)) - target = _importer(target) - return _patch(target, attribute, new, spec, create, mocksignature, spec_set) + getter, attribute = _get_target(target) + return _patch( + getter, attribute, new, spec, create, + spec_set, autospec, new_callable, kwargs + ) class _patch_dict(object): """ - Patch a dictionary and restore the dictionary to its original state after - the test. + Patch a dictionary, or dictionary like object, and restore the dictionary + to its original state after the test. `in_dict` can be a dictionary or a mapping like container. If it is a mapping then it must at least support getting, setting and deleting items @@ -725,24 +1581,35 @@ will then be fetched by importing it. `values` can be a dictionary of values to set in the dictionary. `values` - can also be an iterable of ``(key, value)`` pairs. + can also be an iterable of `(key, value)` pairs. If `clear` is True then the dictionary will be cleared before the new values are set. + + `patch.dict` can also be called with arbitrary keyword arguments to set + values in the dictionary:: + + with patch.dict('sys.modules', mymodule=Mock(), other_module=Mock()): + ... + + `patch.dict` can be used as a context manager, decorator or class + decorator. When used as a class decorator `patch.dict` honours + `patch.TEST_PREFIX` for choosing which methods to wrap. """ - def __init__(self, in_dict, values=(), clear=False): + def __init__(self, in_dict, values=(), clear=False, **kwargs): if isinstance(in_dict, basestring): in_dict = _importer(in_dict) self.in_dict = in_dict # support any argument supported by dict(...) constructor self.values = dict(values) + self.values.update(kwargs) self.clear = clear self._original = None def __call__(self, f): - if isinstance(f, class_types): + if isinstance(f, ClassTypes): return self.decorate_class(f) @wraps(f) def _inner(*args, **kw): @@ -758,7 +1625,8 @@ def decorate_class(self, klass): for attr in dir(klass): attr_value = getattr(klass, attr) - if attr.startswith("test") and hasattr(attr_value, "__call__"): + if (attr.startswith(patch.TEST_PREFIX) and + hasattr(attr_value, "__call__")): decorator = _patch_dict(self.in_dict, self.values, self.clear) decorated = decorator(attr_value) setattr(klass, attr, decorated) @@ -766,6 +1634,7 @@ def __enter__(self): + """Patch the dict.""" self._patch_dict() @@ -809,6 +1678,7 @@ def __exit__(self, *args): + """Unpatch the dict.""" self._unpatch_dict() return False @@ -825,18 +1695,17 @@ del in_dict[key] -patch.object = _patch_object -patch.dict = _patch_dict +def _patch_stopall(): + """Stop all active patches.""" + for patch in list(_patch._active_patches): + patch.stop() -def _has_local_attr(obj, name): - try: - return name in vars(obj) - except TypeError: - # objects without a __dict__ - # XXX could check in the class __dict__ - return hasattr(obj, name) - +patch.object = _patch_object +patch.dict = _patch_dict +patch.multiple = _patch_multiple +patch.stopall = _patch_stopall +patch.TEST_PREFIX = 'test' magic_methods = ( "lt le gt ge eq ne " @@ -849,15 +1718,14 @@ "trunc floor ceil " ) -numerics = "add sub mul div truediv floordiv mod lshift rshift and xor or pow " +numerics = "add sub mul div floordiv mod lshift rshift and xor or pow " inplace = ' '.join('i%s' % n for n in numerics.split()) right = ' '.join('r%s' % n for n in numerics.split()) extra = '' if inPy3k: extra = 'bool next ' else: - extra = 'unicode long nonzero oct hex ' -# __truediv__ and __rtruediv__ not available in Python 3 either + extra = 'unicode long nonzero oct hex truediv rtruediv ' # not including __prepare__, __instancecheck__, __subclasscheck__ # (as they are metaclass methods) @@ -865,21 +1733,25 @@ _non_defaults = set('__%s__' % method for method in [ 'cmp', 'getslice', 'setslice', 'coerce', 'subclasses', - 'dir', 'format', 'get', 'set', 'delete', 'reversed', + 'format', 'get', 'set', 'delete', 'reversed', 'missing', 'reduce', 'reduce_ex', 'getinitargs', 'getnewargs', 'getstate', 'setstate', 'getformat', - 'setformat', 'repr' + 'setformat', 'repr', 'dir' ]) -def get_method(name, func): +def _get_method(name, func): + "Turns a callable object (like a mock) into a real function" def method(self, *args, **kw): return func(self, *args, **kw) method.__name__ = name return method -_magics = set('__%s__' % method for method in ' '.join([magic_methods, numerics, inplace, right, extra]).split()) +_magics = set( + '__%s__' % method for method in + ' '.join([magic_methods, numerics, inplace, right, extra]).split() +) _all_magics = _magics | _non_defaults @@ -898,10 +1770,13 @@ } _return_values = { + '__lt__': NotImplemented, + '__gt__': NotImplemented, + '__le__': NotImplemented, + '__ge__': NotImplemented, '__int__': 1, '__contains__': False, '__len__': 0, - '__iter__': iter([]), '__exit__': False, '__complex__': 1j, '__float__': 1.0, @@ -914,39 +1789,579 @@ } +def _get_eq(self): + def __eq__(other): + ret_val = self.__eq__._mock_return_value + if ret_val is not DEFAULT: + return ret_val + return self is other + return __eq__ + +def _get_ne(self): + def __ne__(other): + if self.__ne__._mock_return_value is not DEFAULT: + return DEFAULT + return self is not other + return __ne__ + +def _get_iter(self): + def __iter__(): + ret_val = self.__iter__._mock_return_value + if ret_val is DEFAULT: + return iter([]) + # if ret_val was already an iterator, then calling iter on it should + # return the iterator unchanged + return iter(ret_val) + return __iter__ + +_side_effect_methods = { + '__eq__': _get_eq, + '__ne__': _get_ne, + '__iter__': _get_iter, +} + + + def _set_return_value(mock, method, name): - return_value = DEFAULT - if name in _return_values: - return_value = _return_values[name] - elif name in _calculate_return_value: + fixed = _return_values.get(name, DEFAULT) + if fixed is not DEFAULT: + method.return_value = fixed + return + + return_calulator = _calculate_return_value.get(name) + if return_calulator is not None: try: - return_value = _calculate_return_value[name](mock) + return_value = return_calulator(mock) except AttributeError: + # XXXX why do we return AttributeError here? + # set it as a side_effect instead? return_value = AttributeError(name) - if return_value is not DEFAULT: method.return_value = return_value + return + + side_effector = _side_effect_methods.get(name) + if side_effector is not None: + method.side_effect = side_effector(mock) + + + +class MagicMixin(object): + def __init__(self, *args, **kw): + _super(MagicMixin, self).__init__(*args, **kw) + self._mock_set_magics() + + def _mock_set_magics(self): + these_magics = _magics + + if self._mock_methods is not None: + these_magics = _magics.intersection(self._mock_methods) + + remove_magics = set() + remove_magics = _magics - these_magics + + for entry in remove_magics: + if entry in type(self).__dict__: + # remove unneeded magic methods + delattr(self, entry) + + # don't overwrite existing attributes if called a second time + these_magics = these_magics - set(type(self).__dict__) + + _type = type(self) + for entry in these_magics: + setattr(_type, entry, MagicProxy(entry, self)) -class MagicMock(Mock): + + +class NonCallableMagicMock(MagicMixin, NonCallableMock): + """A version of `MagicMock` that isn't callable.""" + def mock_add_spec(self, spec, spec_set=False): + """Add a spec to a mock. `spec` can either be an object or a + list of strings. Only attributes on the `spec` can be fetched as + attributes from the mock. + + If `spec_set` is True then only attributes on the spec can be set.""" + self._mock_add_spec(spec, spec_set) + self._mock_set_magics() + + + +class MagicMock(MagicMixin, Mock): """ - MagicMock is a subclass of :Mock with default implementations + MagicMock is a subclass of Mock with default implementations of most of the magic methods. You can use MagicMock without having to configure the magic methods yourself. - If you use the ``spec`` or ``spec_set`` arguments then *only* magic + If you use the `spec` or `spec_set` arguments then *only* magic methods that exist in the spec will be created. Attributes and the return value of a `MagicMock` will also be `MagicMocks`. """ - def __init__(self, *args, **kw): - Mock.__init__(self, *args, **kw) + def mock_add_spec(self, spec, spec_set=False): + """Add a spec to a mock. `spec` can either be an object or a + list of strings. Only attributes on the `spec` can be fetched as + attributes from the mock. - these_magics = _magics - if self._methods is not None: - these_magics = _magics.intersection(self._methods) + If `spec_set` is True then only attributes on the spec can be set.""" + self._mock_add_spec(spec, spec_set) + self._mock_set_magics() - for entry in these_magics: - # could specify parent? - m = Mock() - setattr(self, entry, m) - _set_return_value(self, m, entry) + + +class MagicProxy(object): + def __init__(self, name, parent): + self.name = name + self.parent = parent + + def __call__(self, *args, **kwargs): + m = self.create_mock() + return m(*args, **kwargs) + + def create_mock(self): + entry = self.name + parent = self.parent + m = parent._get_child_mock(name=entry, _new_name=entry, + _new_parent=parent) + setattr(parent, entry, m) + _set_return_value(parent, m, entry) + return m + + def __get__(self, obj, _type=None): + return self.create_mock() + + + +class _ANY(object): + "A helper object that compares equal to everything." + + def __eq__(self, other): + return True + + def __ne__(self, other): + return False + + def __repr__(self): + return '' + +ANY = _ANY() + + + +def _format_call_signature(name, args, kwargs): + message = '%s(%%s)' % name + formatted_args = '' + args_string = ', '.join([repr(arg) for arg in args]) + kwargs_string = ', '.join([ + '%s=%r' % (key, value) for key, value in kwargs.items() + ]) + if args_string: + formatted_args = args_string + if kwargs_string: + if formatted_args: + formatted_args += ', ' + formatted_args += kwargs_string + + return message % formatted_args + + + +class _Call(tuple): + """ + A tuple for holding the results of a call to a mock, either in the form + `(args, kwargs)` or `(name, args, kwargs)`. + + If args or kwargs are empty then a call tuple will compare equal to + a tuple without those values. This makes comparisons less verbose:: + + _Call(('name', (), {})) == ('name',) + _Call(('name', (1,), {})) == ('name', (1,)) + _Call(((), {'a': 'b'})) == ({'a': 'b'},) + + The `_Call` object provides a useful shortcut for comparing with call:: + + _Call(((1, 2), {'a': 3})) == call(1, 2, a=3) + _Call(('foo', (1, 2), {'a': 3})) == call.foo(1, 2, a=3) + + If the _Call has no name then it will match any name. + """ + def __new__(cls, value=(), name=None, parent=None, two=False, + from_kall=True): + name = '' + args = () + kwargs = {} + _len = len(value) + if _len == 3: + name, args, kwargs = value + elif _len == 2: + first, second = value + if isinstance(first, basestring): + name = first + if isinstance(second, tuple): + args = second + else: + kwargs = second + else: + args, kwargs = first, second + elif _len == 1: + value, = value + if isinstance(value, basestring): + name = value + elif isinstance(value, tuple): + args = value + else: + kwargs = value + + if two: + return tuple.__new__(cls, (args, kwargs)) + + return tuple.__new__(cls, (name, args, kwargs)) + + + def __init__(self, value=(), name=None, parent=None, two=False, + from_kall=True): + self.name = name + self.parent = parent + self.from_kall = from_kall + + + def __eq__(self, other): + if other is ANY: + return True + try: + len_other = len(other) + except TypeError: + return False + + self_name = '' + if len(self) == 2: + self_args, self_kwargs = self + else: + self_name, self_args, self_kwargs = self + + other_name = '' + if len_other == 0: + other_args, other_kwargs = (), {} + elif len_other == 3: + other_name, other_args, other_kwargs = other + elif len_other == 1: + value, = other + if isinstance(value, tuple): + other_args = value + other_kwargs = {} + elif isinstance(value, basestring): + other_name = value + other_args, other_kwargs = (), {} + else: + other_args = () + other_kwargs = value + else: + # len 2 + # could be (name, args) or (name, kwargs) or (args, kwargs) + first, second = other + if isinstance(first, basestring): + other_name = first + if isinstance(second, tuple): + other_args, other_kwargs = second, {} + else: + other_args, other_kwargs = (), second + else: + other_args, other_kwargs = first, second + + if self_name and other_name != self_name: + return False + + # this order is important for ANY to work! + return (other_args, other_kwargs) == (self_args, self_kwargs) + + + def __ne__(self, other): + return not self.__eq__(other) + + + def __call__(self, *args, **kwargs): + if self.name is None: + return _Call(('', args, kwargs), name='()') + + name = self.name + '()' + return _Call((self.name, args, kwargs), name=name, parent=self) + + + def __getattr__(self, attr): + if self.name is None: + return _Call(name=attr, from_kall=False) + name = '%s.%s' % (self.name, attr) + return _Call(name=name, parent=self, from_kall=False) + + + def __repr__(self): + if not self.from_kall: + name = self.name or 'call' + if name.startswith('()'): + name = 'call%s' % name + return name + + if len(self) == 2: + name = 'call' + args, kwargs = self + else: + name, args, kwargs = self + if not name: + name = 'call' + elif not name.startswith('()'): + name = 'call.%s' % name + else: + name = 'call%s' % name + return _format_call_signature(name, args, kwargs) + + + def call_list(self): + """For a call object that represents multiple calls, `call_list` + returns a list of all the intermediate calls as well as the + final call.""" + vals = [] + thing = self + while thing is not None: + if thing.from_kall: + vals.append(thing) + thing = thing.parent + return _CallList(reversed(vals)) + + +call = _Call(from_kall=False) + + + +def create_autospec(spec, spec_set=False, instance=False, _parent=None, + _name=None, **kwargs): + """Create a mock object using another object as a spec. Attributes on the + mock will use the corresponding attribute on the `spec` object as their + spec. + + Functions or methods being mocked will have their arguments checked + to check that they are called with the correct signature. + + If `spec_set` is True then attempting to set attributes that don't exist + on the spec object will raise an `AttributeError`. + + If a class is used as a spec then the return value of the mock (the + instance of the class) will have the same spec. You can use a class as the + spec for an instance object by passing `instance=True`. The returned mock + will only be callable if instances of the mock are callable. + + `create_autospec` also takes arbitrary keyword arguments that are passed to + the constructor of the created mock.""" + if _is_list(spec): + # can't pass a list instance to the mock constructor as it will be + # interpreted as a list of strings + spec = type(spec) + + is_type = isinstance(spec, ClassTypes) + + _kwargs = {'spec': spec} + if spec_set: + _kwargs = {'spec_set': spec} + elif spec is None: + # None we mock with a normal mock without a spec + _kwargs = {} + + _kwargs.update(kwargs) + + Klass = MagicMock + if type(spec) in DescriptorTypes: + # descriptors don't have a spec + # because we don't know what type they return + _kwargs = {} + elif not _callable(spec): + Klass = NonCallableMagicMock + elif is_type and instance and not _instance_callable(spec): + Klass = NonCallableMagicMock + + _new_name = _name + if _parent is None: + # for a top level object no _new_name should be set + _new_name = '' + + mock = Klass(parent=_parent, _new_parent=_parent, _new_name=_new_name, + name=_name, **_kwargs) + + if isinstance(spec, FunctionTypes): + # should only happen at the top level because we don't + # recurse for functions + mock = _set_signature(mock, spec) + else: + _check_signature(spec, mock, is_type, instance) + + if _parent is not None and not instance: + _parent._mock_children[_name] = mock + + if is_type and not instance and 'return_value' not in kwargs: + mock.return_value = create_autospec(spec, spec_set, instance=True, + _name='()', _parent=mock) + + for entry in dir(spec): + if _is_magic(entry): + # MagicMock already does the useful magic methods for us + continue + + if isinstance(spec, FunctionTypes) and entry in FunctionAttributes: + # allow a mock to actually be a function + continue + + # XXXX do we need a better way of getting attributes without + # triggering code execution (?) Probably not - we need the actual + # object to mock it so we would rather trigger a property than mock + # the property descriptor. Likewise we want to mock out dynamically + # provided attributes. + # XXXX what about attributes that raise exceptions other than + # AttributeError on being fetched? + # we could be resilient against it, or catch and propagate the + # exception when the attribute is fetched from the mock + try: + original = getattr(spec, entry) + except AttributeError: + continue + + kwargs = {'spec': original} + if spec_set: + kwargs = {'spec_set': original} + + if not isinstance(original, FunctionTypes): + new = _SpecState(original, spec_set, mock, entry, instance) + mock._mock_children[entry] = new + else: + parent = mock + if isinstance(spec, FunctionTypes): + parent = mock.mock + + new = MagicMock(parent=parent, name=entry, _new_name=entry, + _new_parent=parent, **kwargs) + mock._mock_children[entry] = new + skipfirst = _must_skip(spec, entry, is_type) + _check_signature(original, new, skipfirst=skipfirst) + + # so functions created with _set_signature become instance attributes, + # *plus* their underlying mock exists in _mock_children of the parent + # mock. Adding to _mock_children may be unnecessary where we are also + # setting as an instance attribute? + if isinstance(new, FunctionTypes): + setattr(mock, entry, new) + + return mock + + +def _must_skip(spec, entry, is_type): + if not isinstance(spec, ClassTypes): + if entry in getattr(spec, '__dict__', {}): + # instance attribute - shouldn't skip + return False + spec = spec.__class__ + if not hasattr(spec, '__mro__'): + # old style class: can't have descriptors anyway + return is_type + + for klass in spec.__mro__: + result = klass.__dict__.get(entry, DEFAULT) + if result is DEFAULT: + continue + if isinstance(result, (staticmethod, classmethod)): + return False + return is_type + + # shouldn't get here unless function is a dynamically provided attribute + # XXXX untested behaviour + return is_type + + +def _get_class(obj): + try: + return obj.__class__ + except AttributeError: + # in Python 2, _sre.SRE_Pattern objects have no __class__ + return type(obj) + + +class _SpecState(object): + + def __init__(self, spec, spec_set=False, parent=None, + name=None, ids=None, instance=False): + self.spec = spec + self.ids = ids + self.spec_set = spec_set + self.parent = parent + self.instance = instance + self.name = name + + +FunctionTypes = ( + # python function + type(create_autospec), + # instance method + type(ANY.__eq__), + # unbound method + type(_ANY.__eq__), +) + +FunctionAttributes = set([ + 'func_closure', + 'func_code', + 'func_defaults', + 'func_dict', + 'func_doc', + 'func_globals', + 'func_name', +]) + + +file_spec = None + + +def mock_open(mock=None, read_data=''): + """ + A helper function to create a mock to replace the use of `open`. It works + for `open` called directly or used as a context manager. + + The `mock` argument is the mock object to configure. If `None` (the + default) then a `MagicMock` will be created for you, with the API limited + to methods or attributes available on standard file handles. + + `read_data` is a string for the `read` method of the file handle to return. + This is an empty string by default. + """ + global file_spec + if file_spec is None: + # set on first use + if inPy3k: + import _io + file_spec = list(set(dir(_io.TextIOWrapper)).union(set(dir(_io.BytesIO)))) + else: + file_spec = file + + if mock is None: + mock = MagicMock(name='open', spec=open) + + handle = MagicMock(spec=file_spec) + handle.write.return_value = None + handle.__enter__.return_value = handle + handle.read.return_value = read_data + + mock.return_value = handle + return mock + + +class PropertyMock(Mock): + """ + A mock intended to be used as a property, or other descriptor, on a class. + `PropertyMock` provides `__get__` and `__set__` methods so you can specify + a return value when it is fetched. + + Fetching a `PropertyMock` instance from an object calls the mock, with + no args. Setting it calls the mock with the value being set. + """ + def _get_child_mock(self, **kwargs): + return MagicMock(**kwargs) + + def __get__(self, obj, obj_type): + return self() + def __set__(self, obj, val): + self(val) diff -Nru miro-4.0.4/lib/test/moviedatatest.py miro-6.0/lib/test/moviedatatest.py --- miro-4.0.4/lib/test/moviedatatest.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/test/moviedatatest.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,198 +0,0 @@ -"""This module tests miro.moviedata for use of the data provided by -miro.filetags and the moviedataprogram. -""" - -from miro.test.framework import EventLoopTest, MiroTestCase - -import json -from os import path - -from miro import moviedata -from miro import filetags -from miro import metadata -from miro import app -from miro import models -from miro import filetypes -from miro.feed import Feed -from miro.plat import resources -from miro.plat import renderers -from miro.fileobject import FilenameType - -import time - -moviedata.MOVIE_DATA_UTIL_TIMEOUT = 10 # shouldn't break any other tests -renderers.init_renderer() # won't break other tests since nothing else touches - # plat.renderers - -class Namespace(dict): - __getattr__ = dict.__getitem__ - def __setattr__(self, a, v): - if a in self.__dict__: - dict.__setattr__(self, a, v) - else: - dict.__setitem__(self, a, v) - __delattr__ = dict.__delitem__ - __hasattr__ = dict.__contains__ - -class FakeItem(Namespace, metadata.Source): - """Acts like an item, but uses the Namespace class to become a dict of any - properties set on it. - """ - def __init__(self, filename): - Namespace.__init__(self) - metadata.Source.__init__(self) - filename = resources.path(path.join('testdata', 'metadata', filename)) - self.__dict__['_filename'] = filename - self.__dict__['id'] = 9999 - self.mdp_state = None - self.file_type = None - - def get_filename(self): return self._filename - def id_exists(self): return True - def signal_change(self): pass - -class MovieDataTest(EventLoopTest): - def setUp(self): - app.testing_mdp = True # hack to override moviedata's in_unit_tests hack - EventLoopTest.setUp(self) - self.mdu = moviedata.MovieDataUpdater() - - def tearDown(self): - del app.testing_mdp - EventLoopTest.tearDown(self) - - def check_media_file(self, item): - # this is much like Item.check_media_file, be we don't want to catch any - # exceptions here - item.file_type = filetypes.item_file_type_for_filename(item._filename) - item.read_metadata() - item.signal_change() - - self.assertTrue(self.mdu.queue.empty()) - self.mdu.request_update(item) - if not self.mdu.queue.empty(): - self.mdu.process_item() - self.process_idles() - self.assertTrue(self.mdu.queue.empty()) - if item.file_type is None: - item.file_type = u'other' - item.signal_change() - - def process_file(self, test): - item = FakeItem(test) - self.check_media_file(item) - return item - - # def test_media_with_mdp(self): - # results_path = resources.path(path.join('testdata', 'moviedata.json')) - # expected_results = json.load(open(results_path)) - # for filename, expected in expected_results.iteritems(): - # actual = self.process_file(FilenameType(filename)) - # self.assertNotEqual(actual.mdp_state, None, filename) - # del actual['mdp_state'] - # expected['metadata_version'] = filetags.METADATA_VERSION - # expected['test'], actual.test = filename, filename - # if hasattr(actual, 'cover_art'): - # actual.cover_art = bool(actual.cover_art) - # if hasattr(actual, 'screenshot'): - # actual.screenshot = bool(actual.screenshot) - # expected = dict((str(k), v) for k, v in expected.iteritems()) - # actual = dict(actual) - # self.assertEqual(actual, expected) - # assert actual == expected, ("metadata wrong for %s " - # "actual: %r expected: %r" % (filename, actual, expected)) - -class MovieDataRequestTest(MiroTestCase): - """Test when we choose to invoke our moviedata programs.""" - def setUp(self): - app.testing_mdp = True # hack to override moviedata's in_unit_tests hack - MiroTestCase.setUp(self) - self.feed = models.Feed(u'dtv:manualFeed') - mp3_path = resources.path("testdata/metadata/mp3-0.mp3") - webm_path = resources.path("testdata/metadata/webm-0.webm") - jpg_path = resources.path("testdata/dean.jpg") - - self.audio_item = models.FileItem(mp3_path, self.feed.id) - self.video_item = models.FileItem(webm_path, self.feed.id) - self.other_item = models.FileItem(jpg_path, self.feed.id) - - def tearDown(self): - del app.testing_mdp - MiroTestCase.tearDown(self) - - def rerun_request_update(self): - """Rerun the request_update code(). - - Use this method if you want to change attributes on the items and see - the effects on the movie data request code. - """ - for item in (self.audio_item, self.video_item, self.other_item): - # reset mdp_state to SKIPPED to simulate this being the first time - # we called request_update() - item.mdp_state = None - item.signal_change() - moviedata.movie_data_updater.request_update(item) - - def check_will_run_moviedata(self, item, should_run): - # check MovieDataUpdater._should_process_item() - mdu = moviedata.movie_data_updater - self.assertEquals(mdu._should_process_item(item), should_run) - # check incomplete_mdp_view - incomplete_view = set(models.Item.incomplete_mdp_view()) - self.assertEquals(item in incomplete_view, should_run) - - def check_path_processed(self, item, should_run): - # Check if path_processed was called. Note: this can only test based - # on the initial state of the item. If we fiddle with it's - # attributes, then we shouldn't call this. - - paths_processed = self.metadata_progress_updater.paths_processed - if should_run: - # If we will call movie data, then path_processed shouldn't be - # called until that happens, which is never in the unit tests. - if item.filename in paths_processed: - raise AssertionError("path_processed() called for %s when " - "it shouldn't have been (path_processed: %s)" % - (item.filename, paths_processed)) - else: - if item.filename not in paths_processed: - raise AssertionError("path_processed() not called for %s when " - "it shouldn't have been (paths_processed: %s)" % - (item.filename, paths_processed)) - - def test_initial_mutagan_worked_audio(self): - # shouldn't run moviedata for audio that mutagen can process - self.check_will_run_moviedata(self.audio_item, False) - self.check_path_processed(self.audio_item, False) - - def test_initial_mutagan_worked_video(self): - # should run moviedata for video that mutagen can process - self.check_will_run_moviedata(self.video_item, True) - self.check_path_processed(self.video_item, True) - - def test_initial_mutagan_failed_other(self): - # shouldn't run moviedata for other filenames - self.check_will_run_moviedata(self.other_item, False) - self.check_path_processed(self.other_item, False) - - def test_run_moviedata_no_duration(self): - # we should always run moviedata if mutagen can't determine the - # duration - self.audio_item.duration = self.video_item.duration = None - self.rerun_request_update() - self.check_will_run_moviedata(self.video_item, True) - self.check_will_run_moviedata(self.audio_item, True) - - def test_run_moviedata_no_screenshot(self): - # we should run moviedata if it's a video item and we haven't captured - # a screenshot - self.audio_item.screenshot = self.video_item.screenshot = None - self.rerun_request_update() - self.check_will_run_moviedata(self.video_item, True) - self.check_will_run_moviedata(self.audio_item, False) - -# FIXME -# theora_with_ogg_extension test case expected to have a screenshot") -# mp4-0 test case expected to have a screenshot") -# drm.m4v test case expected to have a screenshot") -# webm-0.assertEqual(item.duration, *something*) diff -Nru miro-4.0.4/lib/test/performancetest.py miro-6.0/lib/test/performancetest.py --- miro-4.0.4/lib/test/performancetest.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/test/performancetest.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,68 +0,0 @@ -import shutil -import os -import pstats -import cProfile - -from miro import app -from miro import messagehandler -from miro import messages -from miro import models -from miro.fileobject import FilenameType -from miro.test.framework import EventLoopTest -from miro.test import messagetest - -class PerformanceTest(EventLoopTest): - def setUp(self): - print 'setting up' - EventLoopTest.setUp(self) - # We want to store our database in a file, so that we can test - # performance on a freshly opened db. - save_path = FilenameType(self.make_temp_path(extension=".db")) - if os.path.exists(save_path): - os.unlink(save_path) - self.reload_database(save_path) - models.Feed(u'dtv:search') - self.test_handler = messagetest.TestFrontendMessageHandler() - messages.FrontendMessage.install_handler(self.test_handler) - self.backend_message_handler = messagehandler.BackendMessageHandler(None) - messages.BackendMessage.install_handler(self.backend_message_handler) - self.feed = models.Feed(u'dtv:manualFeed') - self.items = [] - template_file = self.make_temp_path(".avi") - self.stats_path = self.make_temp_path(".prof") - open(template_file, 'w').write(' ') - app.bulk_sql_manager.start() - for x in xrange(5000): - path = self.make_temp_path(".avi") - shutil.copyfile(template_file, path) - models.FileItem(path, self.feed.id) - app.bulk_sql_manager.finish() - self.reload_database(save_path) - - def _run_test(self, code): - print 'testing performance for %s' % code - cProfile.runctx(code, globals(), locals(), - self.stats_path) - stats = pstats.Stats(self.stats_path) - print '*' * 20 + "first run" + "*" * 20 - stats.strip_dirs().sort_stats("cumulative").print_stats(0.2) - - cProfile.runctx(code, globals(), locals(), - self.stats_path) - stats = pstats.Stats(self.stats_path) - print '*' * 20 + "second run" + "*" * 20 - stats.strip_dirs().sort_stats("cumulative").print_stats(0.2) - - def test_track_items(self): - self._run_test("self.track_items()") - - def test_track_item_count(self): - self._run_test("self.track_item_count()") - - def track_items(self): - messages.TrackItems('feed', self.feed.id).send_to_backend() - self.runUrgentCalls() - - def track_item_count(self): - messages.TrackNewVideoCount().send_to_backend() - self.runUrgentCalls() diff -Nru miro-4.0.4/lib/test/playbacktest.py miro-6.0/lib/test/playbacktest.py --- miro-4.0.4/lib/test/playbacktest.py 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/test/playbacktest.py 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,205 @@ +from miro.frontends.widgets import playback +from miro import signals +from miro.test import mock +from miro.test.framework import MiroTestCase + +class MockItemList(signals.SignalEmitter): + def __init__(self, items): + signals.SignalEmitter.__init__(self, 'items-changed', 'list-changed') + self.items = items + + def _get_child_mock(self, **kwargs): + return mock.Mock(**kwargs) + + def __len__(self): + return len(self.items) + + def get_first_item(self): + return self.items[0] + + def get_item(self, id): + return self.items[self.get_index(id)] + + def item_in_list(self, id): + for item in self.items: + if item.id == id: + return True + return False + + def get_index(self, id): + for i, item in enumerate(self.items): + if item.id == id: + return i + raise KeyError(id) + + def get_row(self, index): + return self.items[index] + + def get_playable_ids(self): + return [item.id for item in self.items if item.is_playable] + +class PlaybackPlaylistTest(MiroTestCase): + def setUp(self): + MiroTestCase.setUp(self) + self.mock_item_list_pool = self.patch_for_test( + 'miro.app.item_list_pool') + self.items = [ + mock.Mock(id=0, title='one', is_playable=True), + mock.Mock(id=1, title='two', is_playable=True), + mock.Mock(id=2, title='three', is_playable=False), + mock.Mock(id=3, title='four', is_playable=True), + ] + self.item_list = MockItemList(self.items) + + def check_currently_playing(self, playlist, correct_index): + if playlist.currently_playing != self.items[correct_index]: + raise AssertionError("Item %s != Item %s" % + (playlist.currently_playing.title, + self.items[correct_index].title)) + + def test_normal(self): + playlist = playback.PlaybackPlaylist(self.item_list, 1, False, False) + self.check_currently_playing(playlist, 1) + playlist.select_next_item() + self.check_currently_playing(playlist, 3) + playlist.select_next_item() + self.assertEquals(playlist.currently_playing, None) + + def test_repeat(self): + playlist = playback.PlaybackPlaylist(self.item_list, 1, False, True) + self.check_currently_playing(playlist, 1) + playlist.select_next_item() + self.check_currently_playing(playlist, 3) + playlist.select_next_item() + # after the end of the list, we should wrap around and continue + # playing + self.check_currently_playing(playlist, 0) + playlist.select_next_item() + self.check_currently_playing(playlist, 1) + + def test_shuffle(self): + mock_choice = self.patch_for_test('random.choice') + playlist = playback.PlaybackPlaylist(self.item_list, 1, True, False) + # playback starts on the first item + self.check_currently_playing(playlist, 1) + # the next item should use random.choice. We should pick between the + # 2 remaining playable items + mock_choice.return_value = 3 + playlist.select_next_item() + self.assertEquals(mock_choice.call_args[0][0], [0, 3]) + self.check_currently_playing(playlist, 3) + # the next item should use random.choice() with the last playable item + mock_choice.return_value = 0 + playlist.select_next_item() + self.assertEquals(mock_choice.call_args[0][0], [0]) + self.check_currently_playing(playlist, 0) + # the next time, we should stop playback without calling choice() + mock_choice.reset_mock() + playlist.select_next_item() + self.assertEquals(playlist.currently_playing, None) + self.assertEquals(mock_choice.call_count, 0) + + def test_shuffle_repeat(self): + mock_choice = self.patch_for_test('random.choice') + playlist = playback.PlaybackPlaylist(self.item_list, 1, True, True) + # playback starts on the first item + self.check_currently_playing(playlist, 1) + # the next item should use random.choice. We should pick between the + # 3 playable items in the list + mock_choice.return_value = 3 + playlist.select_next_item() + self.assertEquals(mock_choice.call_args[0][0], [0, 1, 3]) + self.check_currently_playing(playlist, 3) + # the next item should do the same + mock_choice.return_value = 0 + playlist.select_next_item() + self.assertEquals(mock_choice.call_args[0][0], [0, 1, 3]) + self.check_currently_playing(playlist, 0) + + def test_forward_back(self): + playlist = playback.PlaybackPlaylist(self.item_list, 0, False, False) + self.check_currently_playing(playlist, 0) + playlist.select_next_item() + self.check_currently_playing(playlist, 1) + playlist.select_previous_item() + self.check_currently_playing(playlist, 0) + playlist.select_next_item() + self.check_currently_playing(playlist, 1) + + def test_shuffle_forward_back(self): + mock_choice = self.patch_for_test('random.choice') + playlist = playback.PlaybackPlaylist(self.item_list, 1, True, True) + # playback starts on the first item + self.check_currently_playing(playlist, 1) + # the next item should use random.choice. We should pick between the + # 2 remaining playable items + mock_choice.return_value = 3 + playlist.select_next_item() + self.assertEquals(mock_choice.call_args[0][0], [0, 1, 3]) + self.check_currently_playing(playlist, 3) + # going back should return us to the item #1, without calling choice() + mock_choice.reset_mock() + playlist.select_previous_item() + self.assertEquals(mock_choice.call_count, 0) + self.check_currently_playing(playlist, 1) + # going forward should return us to item #3, again without calling + # choice() + playlist.select_next_item() + self.assertEquals(mock_choice.call_count, 0) + self.check_currently_playing(playlist, 3) + # going forward again should return to the normal shuffle logic + mock_choice.return_value = 0 + playlist.select_next_item() + self.assertEquals(mock_choice.call_args[0][0], [0, 1, 3]) + self.check_currently_playing(playlist, 0) + + def test_item_change(self): + playlist = playback.PlaybackPlaylist(self.item_list, 0, False, False) + mock_handler = mock.Mock() + playlist.connect("playing-info-changed", mock_handler) + self.check_currently_playing(playlist, 0) + # simulate an item changing titles + new_item = mock.Mock(id=0, title='New item one', is_playable=True) + self.item_list.items[0] = new_item + self.item_list.emit('items-changed', set([new_item.id])) + self.assertEquals(mock_handler.call_count, 1) + self.assertEquals(playlist.currently_playing, new_item) + # simulate an item that's not playing changing + mock_handler.reset_mock() + new_item2 = mock.Mock(id=2, title='New item three', is_playable=True) + self.item_list.items[2] = new_item2 + self.item_list.emit('items-changed', set([new_item2.id])) + self.assertEquals(mock_handler.call_count, 0) + # simulate an item changing to not playable + new_item3 = mock.Mock(id=0, title='New item one', is_playable=False) + self.item_list.items[0] = new_item3 + self.item_list.emit('items-changed', set([new_item3.id])) + self.assertEquals(mock_handler.call_count, 1) + self.assertEquals(playlist.currently_playing, None) + + def test_remove_from_list(self): + playlist = playback.PlaybackPlaylist(self.item_list, 0, False, False) + mock_handler = mock.Mock() + playlist.connect("playing-info-changed", mock_handler) + self.check_currently_playing(playlist, 0) + # simulate an item getting removed from the list, we should still keep + # playing the item + removed = self.item_list.items.pop(0) + self.item_list.emit('list-changed') + self.assertEquals(mock_handler.call_count, 0) + self.assertEquals(playlist.currently_playing, removed) + + def test_empty_item_list(self): + empty_list = MockItemList([]) + playlist = playback.PlaybackPlaylist(empty_list, 0, False, False) + self.assertEquals(playlist.currently_playing, None) + + def test_add_ref_and_release(self): + self.assertEquals(self.mock_item_list_pool.add_ref.call_count, 0) + self.assertEquals(self.mock_item_list_pool.release.call_count, 0) + playlist = playback.PlaybackPlaylist(self.item_list, 0, False, False) + self.assertEquals(self.mock_item_list_pool.add_ref.call_count, 1) + self.assertEquals(self.mock_item_list_pool.release.call_count, 0) + playlist.finished() + self.assertEquals(self.mock_item_list_pool.add_ref.call_count, 1) + self.assertEquals(self.mock_item_list_pool.release.call_count, 1) diff -Nru miro-4.0.4/lib/test/playlisttest.py miro-6.0/lib/test/playlisttest.py --- miro-4.0.4/lib/test/playlisttest.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/test/playlisttest.py 2013-04-05 16:02:42.000000000 +0000 @@ -2,7 +2,7 @@ import os from miro.feed import Feed -from miro.item import Item, FeedParserValues +from miro.item import Item, FeedParserValues, setup_metadata_manager from miro.playlist import SavedPlaylist, PlaylistItemMap from miro.folder import PlaylistFolder, PlaylistFolderItemMap from miro import app @@ -179,6 +179,8 @@ shutil.copyfile(old_db_path, self.tmp_path) self.reload_database(self.tmp_path) app.db.upgrade_database() + # reload the metadata manager since we now have a new DB + setup_metadata_manager(self.tempdir) # figure out which maps were created folder_maps = set() playlist_maps = set() diff -Nru miro-4.0.4/lib/test/schedulertest.py miro-6.0/lib/test/schedulertest.py --- miro-4.0.4/lib/test/schedulertest.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/test/schedulertest.py 2013-04-05 16:02:42.000000000 +0000 @@ -53,7 +53,7 @@ for i in range(threadCount): t = threading.Thread(target=thread) t.start() - eventloop.add_timeout(1, self.callback, "foo", kwargs={'stop':1}) + eventloop.add_timeout(1.2, self.callback, "foo", kwargs={'stop':1}) self.runEventLoop() totalCalls = len(timeouts) * threadCount + 1 self.assertEquals(len(self.got_args), totalCalls) diff -Nru miro-4.0.4/lib/test/searchtest.py miro-6.0/lib/test/searchtest.py --- miro-4.0.4/lib/test/searchtest.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/test/searchtest.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,238 +0,0 @@ -import gc - -from miro import messages -from miro import models -from miro import search -from miro import ngrams -from miro import itemsource -from miro.item import FeedParserValues -from miro.singleclick import _build_entry -from miro.test.framework import MiroTestCase -from miro.frontends.widgets.itemtrack import SearchFilter - -class NGramTest(MiroTestCase): - def test_simple(self): - results = ngrams.breakup_word('foobar', 2, 3) - self.assertSameSet(results, [ - 'fo', 'oo', 'ob', 'ba', 'ar', - 'foo', 'oob', 'oba', 'bar']) - - def test_list(self): - word_list = ['foo', 'bar', 'bazbaz'] - results = ngrams.breakup_list(word_list, 2, 3) - self.assertSameSet(results, [ - 'fo', 'oo', 'foo', - 'ba', 'ar', 'bar', - 'az', 'zb', 'baz', 'azb', 'zba']) - -class SearchTest(MiroTestCase): - def setUp(self): - MiroTestCase.setUp(self) - self.feed = models.Feed(u'http://example.com/') - self.item1 = self.make_item(u'http://example.com/', u'my first item') - self.item2 = self.make_item(u'http://example.com/', u'my second item') - - def make_item(self, url, title=u'default item title'): - additional = {'title': title} - entry = _build_entry(url, 'video/x-unknown', additional) - item = models.Item(FeedParserValues(entry), feed_id=self.feed.id) - return itemsource.DatabaseItemSource._item_info_for(item) - - def assertMatches(self, query, item_info): - self.assertTrue(search.item_matches(item_info, query)) - - def assertNotMatches(self, query, item_info): - self.assertFalse(search.item_matches(item_info, query)) - - def test_item_matches(self): - self.assertMatches('first', self.item1) - self.assertNotMatches('first', self.item2) - self.assertMatches('second', self.item2) - self.assertNotMatches('second', self.item1) - self.assertMatches('my', self.item1) - self.assertMatches('my', self.item2) - self.assertNotMatches('foo', self.item1) - self.assertNotMatches('foo', self.item2) - - def test_item_matches_substring(self): - self.assertMatches('eco', self.item2) - self.assertNotMatches('eco', self.item1) - self.assertMatches('irst', self.item1) - self.assertNotMatches('irst', self.item2) - - def test_item_matches_short(self): - # try a 3-character search. This is the shortest search that we have - # n-grams for. - self.assertMatches('ond', self.item2) - self.assertNotMatches('ond', self.item1) - # all searches less than 3 characters should match everything - self.assertMatches('', self.item1) - self.assertMatches('', self.item2) - self.assertMatches('d', self.item1) - self.assertMatches('d', self.item2) - self.assertMatches('st', self.item1) - self.assertMatches('st', self.item2) - - def test_item_matches_case_insensitive(self): - self.assertMatches('FiRsT', self.item1) - self.assertNotMatches('FiRsT', self.item2) - self.assertMatches('sEcOnD', self.item2) - self.assertNotMatches('sEcOnD', self.item1) - - def test_list_matches(self): - items = [self.item1, self.item2] - self.assertEquals(list(search.list_matches(items, 'first')), - [self.item1]) - self.assertEquals(list(search.list_matches(items, 'second')), - [self.item2]) - self.assertEquals(list(search.list_matches(items, 'my')), - [self.item1, self.item2]) - self.assertEquals(list(search.list_matches(items, 'foo')), - []) - - def test_ngrams_for_term(self): - self.assertEquals(search._ngrams_for_term('abc'), - ['abc']) - self.assertEquals(search._ngrams_for_term('five'), - ['five']) - self.assertEquals(search._ngrams_for_term('verybig'), - ['veryb', 'erybi', 'rybig']) - -class ItemSearcherTest(MiroTestCase): - def setUp(self): - MiroTestCase.setUp(self) - self.searcher = search.ItemSearcher() - self.feed = models.Feed(u'http://example.com/') - self.item1 = self.make_item(u'http://example.com/', u'my first item') - self.item2 = self.make_item(u'http://example.com/', u'my second item') - - def make_item(self, url, title=u'default item title'): - additional = {'title': title} - entry = _build_entry(url, 'video/x-unknown', additional) - item = models.Item(FeedParserValues(entry), feed_id=self.feed.id) - self.searcher.add_item(self.make_info(item)) - return item - - def make_info(self, item): - return itemsource.DatabaseItemSource._item_info_for(item) - - def check_search_results(self, search_text, *correct_items): - correct_ids = [i.id for i in correct_items] - self.assertSameSet(self.searcher.search(search_text), correct_ids) - - def check_empty_result(self, search_text): - self.assertSameSet(self.searcher.search(search_text), []) - - def test_match(self): - self.check_search_results('my', self.item1, self.item2) - self.check_search_results('first', self.item1) - self.check_empty_result('miro') - - def test_update(self): - self.item1.set_title(u'my new title') - self.searcher.update_item(self.make_info(self.item1)) - self.check_search_results('my', self.item1, self.item2) - self.check_search_results('item', self.item2) - self.check_search_results('title', self.item1) - self.check_empty_result('first') - - def test_remove(self): - self.searcher.remove_item(self.make_info(self.item2).id) - self.check_search_results('my', self.item1) - self.check_empty_result('second') - -class SearchFilterTest(MiroTestCase): - def setUp(self): - MiroTestCase.setUp(self) - self.feed = models.Feed(u'http://example.com/') - self.initial_list = [] - self.added_objects = [] - self.changed_objects = [] - self.removed_objects = [] - self.filterer = SearchFilter() - self.info1 = self.make_info(u'info one') - self.info2 = self.make_info(u'info two') - self.info3 = self.make_info(u'info three') - self.info4 = self.make_info(u'info four') - - def make_info(self, title): - additional = {'title': title} - url = u'http://example.com/' - entry = _build_entry(url, 'video/x-unknown', additional) - item = models.Item(FeedParserValues(entry), feed_id=self.feed.id) - return itemsource.DatabaseItemSource._item_info_for(item) - - def check_initial_list_filter(self, initial_list, filtered_list): - self.assertEquals(self.filterer.filter_initial_list(initial_list), - filtered_list) - - def check_changed_filter(self, added, changed, removed, - filtered_added, filtered_changed, filtered_removed): - removed_ids = [i.id for i in removed] - filtered_removed_ids = [i.id for i in filtered_removed] - results = self.filterer.filter_changes(added, changed, removed_ids) - self.assertSameSet(results[0], filtered_added) - self.assertSameSet(results[1], filtered_changed) - self.assertSameSet(results[2], filtered_removed_ids) - - def check_search_change(self, query, filtered_added, filtered_removed): - filtered_removed_ids = [i.id for i in filtered_removed] - results = self.filterer.set_search(query) - self.assertSameSet(results[0], filtered_added) - self.assertSameSet(results[1], filtered_removed_ids) - - def send_items_changed_message(self, added, changed, removed): - removed = [i.id for i in removed] - message = messages.ItemsChanged('mytpe', 123, added, changed, removed) - self.filterer.handle_items_changed(message) - - def update_info(self, info, name): - info.name = name - info.search_terms = search.calc_search_terms(info) - - def test_initial_list(self): - # try with no search just to see - self.check_initial_list_filter([self.info1, self.info2], - [self.info1, self.info2]) - # try again with a search set - self.filterer = SearchFilter() - self.filterer.set_search("two") - self.check_initial_list_filter([self.info1, self.info2], [self.info2]) - - def test_change_search(self): - self.filterer.filter_initial_list([self.info1, self.info2]) - # info1 doesn't matches the search, it should be removed - self.check_search_change("two", [], [self.info1]) - # info1 matches now, item2 doesn't - self.check_search_change("one", [self.info1], [self.info2]) - - def test_add(self): - # setup initial state - self.filterer.filter_initial_list([self.info1, self.info2]) - self.filterer.set_search("three") - # only info3 matches the search, it should be the only one added - self.check_changed_filter([self.info3, self.info4], [], [], - [self.info3], [], []) - - def test_update(self): - # setup initial state - infos = [self.info1, self.info2, self.info3] - self.filterer.filter_initial_list(infos) - self.filterer.set_search("three") - # info1 now matches the search, it should be added - # info3 matched the search before and now, so it should be changed - self.update_info(self.info1, u'three') - self.check_changed_filter([], infos, [], - [self.info1], [self.info3], []) - # info1 no longer matches the search, it should be removed - # info3 matched the search before and now, so it should be changed - self.update_info(self.info1, u'one') - self.check_changed_filter([], infos, [], - [], [self.info3], [self.info1]) - - def test_remove(self): - self.filterer.filter_initial_list([self.info1, self.info2]) - self.filterer.set_search("two") - # only info2 matches the search, so removed should only include it - self.check_changed_filter([], [], [self.info1, self.info2], - [], [], [self.info2]) diff -Nru miro-4.0.4/lib/test/sharingtest.py miro-6.0/lib/test/sharingtest.py --- miro-4.0.4/lib/test/sharingtest.py 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/test/sharingtest.py 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,623 @@ +# Miro - an RSS based video player application +# Copyright (C) 2010, 2011 +# Participatory Culture Foundation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +# +# In addition, as a special exception, the copyright holders give +# permission to link the code of portions of this program with the OpenSSL +# library. +# +# You must obey the GNU General Public License in all respects for all of +# the code used other than OpenSSL. If you modify file(s) with this +# exception, you may extend this exception to your version of the file(s), +# but you are not obligated to do so. If you do not wish to do so, delete +# this exception statement from your version. If you delete this exception +# statement from all source files in the program, then also delete it here. + +from miro import sharing +import os + +import sqlite3 + +from miro import app +from miro import messages +from miro import messagehandler +from miro import models +from miro import prefs +from miro import startup +from miro.data import mappings +from miro.test import mock +from miro.test import testobjects +from miro.test.framework import MiroTestCase, EventLoopTest + +class ShareTest(MiroTestCase): + # Test the backend Share object + def setUp(self): + MiroTestCase.setUp(self) + # No need to have Share objects create real SharingItemTrackerImpl + # instances + self.MockSharingItemTrackerIpml = self.patch_for_test( + 'miro.sharing.SharingItemTrackerImpl') + + def test_database_paths(self): + share1 = testobjects.make_share('TestShare') + self.assertEquals(share1.db_path, + os.path.join(self.sandbox_support_directory, + 'sharing-db-0')) + share2 = testobjects.make_share('TestShare2') + self.assertEquals(share2.db_path, + os.path.join(self.sandbox_support_directory, + 'sharing-db-1')) + + def test_database_create_deletes_old_files(self): + # test that if there's a file leftover from a previous miro run, we + # delete it and then re-use it + old_path = os.path.join(self.sandbox_support_directory, + 'sharing-db-0') + with open(old_path, 'wb') as f: + f.write("old data") + share = testobjects.make_share() + self.assertEquals(share.db_path, old_path) + # check that we opened a sqlite database on that path and overwrote + # the old data + if open(old_path).read() == 'old data': + raise AssertionError("Didn't overwrite old path") + + def test_create_and_destroy(self): + share = testobjects.make_share() + self.assertNotEquals(share.db_path, None) + self.assertNotEquals(share.db_info, None) + old_path = share.db_path + share.destroy() + self.assertEquals(share.db_path, None) + self.assertEquals(share.db_info, None) + if os.path.exists(old_path): + raise AssertionError("Calling Share.destroy() " + "didn't delete database") + + def test_start_tracking(self): + share = testobjects.make_share() + self.assertEquals(share.tracker, None) + # Calling start_tracking should create a SharingItemTrackerImpl + share.start_tracking() + self.assertNotEquals(share.tracker, None) + tracker = share.tracker + share.stop_tracking() + self.assertEquals(share.tracker, None) + # check that we call client_disconnect() + self.assertEquals(tracker.client_disconnect.call_count, 1) + # Calling start_tracking() again should re-create the tracker + share.start_tracking() + self.assertNotEquals(share.tracker, None) + + def test_start_tracking_twice(self): + # Check that that calling start_tracking() twice in a row + share = testobjects.make_share() + share.start_tracking() + first_tracker = share.tracker + share.start_tracking() + # second call should keep the same tracker + self.assertEquals(share.tracker, first_tracker) + + def test_stop_tracking_twice(self): + # Check that that calling stop_tracking() twice in a row + share = testobjects.make_share() + share.start_tracking() + share.stop_tracking() + self.assertEquals(share.tracker, None) + # second call shouldn't cause an error + share.stop_tracking() + +class SharingTest(EventLoopTest): + def setUp(self): + EventLoopTest.setUp(self) + self.share = testobjects.make_share() + self.playlist_item_map = mappings.SharingItemPlaylistMap( + self.share.db_info.db.connection) + # Replace threading.Thread() with a mock object so that + # SharingItemTrackerImpl objects don't create real theads. + self.patch_for_test('threading.Thread') + # Replace update_started() and update_finished() with mock objects. + # We want to ignore the TabsChanged messages that they send out. + self.patch_for_test('miro.sharing.Share.update_started') + self.patch_for_test('miro.sharing.Share.update_finished') + # also use a Mock object for the daap client + self.client = testobjects.MockDAAPClient() + self.patch_function('miro.libdaap.make_daap_client', + lambda *args, **kwargs: self.client) + self.MockTabsChanged = self.patch_for_test( + 'miro.messages.TabsChanged') + + def check_sharing_item_tracker_db(self, db_path): + if not os.path.exists(db_path): + raise AssertionError("SharingItemTrackerImpl didn't create " + "its database") + # do a quick check on the DB schema + conn = sqlite3.connect(db_path) + cursor = conn.execute("SELECT name FROM sqlite_master WHERE type='table'") + table_names = [r[0] for r in cursor] + conn.close() + self.assertSameSet(table_names, + ['sharing_item', 'dtv_variables']) + + def check_tracker_items(self, correct_items): + # check the SharingItems in the database. correct_items should be a + # dictionary that maps item ids to item titles. + item_view = models.SharingItem.make_view(db_info=self.share.db_info) + data_from_db = dict((i.id, i.title) for i in item_view) + # check that the IDs are correct + self.assertSameSet(data_from_db.keys(), correct_items.keys()) + # check that the titles are correct + self.assertEquals(data_from_db, correct_items) + + def check_playlist_items_map(self, correct_playlist_items): + """Check the data in playlist_item_map + + :param correct_playlist_items: dict mapping playlist ids to the items + that should be in them. + """ + self.assertEquals(self.playlist_item_map.get_map(), + correct_playlist_items) + + def make_daap_items(self, items_dict): + """Given a dict mapping item ids to item titles, create a dict mapping + those ids to DAAP items. + """ + return [testobjects.make_mock_daap_item(item_id, title) + for (item_id, title) in items_dict.items()] + + def check_client_connect(self): + """Check the initial pass that creates items for + SharingItemTrackerImpl """ + # simulate the inital pass + client_connect_result = self.share.tracker.client_connect() + # we shouldn't touch the DB in setup_items() + self.check_tracker_items({}) + self.check_playlist_items_map({}) + # check the results of the initial pass + self.share.tracker.client_connect_callback(client_connect_result) + correct_items = dict((key, item['dmap.itemname']) + for key, item in + self.client.current_items().items()) + self.check_tracker_items(correct_items) + correct_playlist_items = dict( + (playlist_id, set(item_ids)) + for (playlist_id, item_ids) in + self.client.current_playlist_item_map().items()) + self.check_playlist_items_map(correct_playlist_items) + + def check_client_update(self): + """Check the an update run for SharingItemTrackerImpl """ + # calculate the what items are in the DB before the update + item_view = models.SharingItem.make_view(db_info=self.share.db_info) + items_before_update = dict((i.id, i.title) for i in item_view) + playlist_items_before_update = self.playlist_item_map.get_map() + # run the update + client_update_result = self.share.tracker.client_update() + # we shouldn't touch the DB in setup_items() + self.check_tracker_items(items_before_update) + self.check_playlist_items_map(playlist_items_before_update) + # check the results of the update + self.share.tracker.client_update_callback(client_update_result) + correct_items = dict((key, item['dmap.itemname']) + for key, item in + self.client.current_items().items()) + self.check_tracker_items(correct_items) + correct_playlist_items = dict( + (playlist_id, set(item_ids)) + for (playlist_id, item_ids) in + self.client.current_playlist_item_map().items()) + self.check_playlist_items_map(correct_playlist_items) + + def daap_playlist_tab_id(self, daap_id): + return 'sharing-%s-%s' % (self.share.id, daap_id) + + def check_tabs_changed(self, correct_added_ids, correct_changed_ids, + correct_removed_ids): + self.assertEquals(self.MockTabsChanged.call_count, 1) + type_, added, changed, removed = self.MockTabsChanged.call_args[0] + self.assertEquals(type_, 'connect') + current_playlists = self.client.current_playlists() + # check ids for added/removed/changed playlists + self.assertEquals([info.id for info in added], + [self.daap_playlist_tab_id(daap_id) + for daap_id in correct_added_ids]) + self.assertEquals([info.id for info in changed], + [self.daap_playlist_tab_id(daap_id) + for daap_id in correct_changed_ids]) + self.assertSameSet(removed, correct_removed_ids) + # check that the info for added/changed playlists is correct + for info in added + changed: + playlist_data = current_playlists[info.playlist_id] + self.assertEquals(info.name, playlist_data['dmap.itemname']) + podcast_key = 'com.apple.itunes.is-podcast-playlist' + self.assertEquals(info.podcast, + playlist_data.get(podcast_key, False)) + + self.MockTabsChanged.reset_mock() + + + def test_sharing_items(self): + # test sharing items created/update/delete + + # test initial item creation + self.share.start_tracking() + self.client.set_items(self.make_daap_items( + {1: 'title-1', 2: 'title-2'})) + self.check_client_connect() + # test item update. + # item 1 is updated, item 2 is deleted, and item 3 is added + self.client.set_items(self.make_daap_items( + {1: 'new-title-1', 3: 'new-title-3'})) + self.check_client_update() + + def test_playlists(self): + # test sending TabInfo updates for playlists + + # test initial item creation + # only playlists with items should be created + self.share.start_tracking() + self.MockTabsChanged.reset_mock() + self.client.set_items(self.make_daap_items( + {1: 'title-1', 2: 'title-2'})) + self.client.add_playlist( + testobjects.make_mock_daap_playlist(101, 'playlist-1') + ) + self.client.add_playlist( + testobjects.make_mock_daap_playlist(102, 'playlist-2') + ) + self.client.add_playlist( + testobjects.make_mock_daap_playlist(103, 'playlist-3') + ) + self.client.set_playlist_items(101, [1]) + self.client.set_playlist_items(102, [1, 2]) + self.check_client_connect() + self.check_tabs_changed([101, 102], [], []) + + # check updating playlists + self.client.add_playlist( + testobjects.make_mock_daap_playlist(104, 'playlist-4') + ) + self.client.set_playlist_items(104, [1, 2]) + self.client.add_playlist( + testobjects.make_mock_daap_playlist(102, 'new-playlist-2', + is_podcast=True) + ) + self.check_client_update() + self.check_tabs_changed([104], [102], []) + + # check playlist deletion + self.client.remove_playlist(101) + # 102 gets removed because of no items + self.client.set_playlist_items(102, []) + # 103 is not removed because it never contained items + self.client.remove_playlist(103) + self.check_client_update() + self.check_tabs_changed([], [], [101, 102]) + + # check that adding items to an empty playlist results in it being + # added + self.client.set_playlist_items(102, [1]) + self.check_client_update() + self.check_tabs_changed([102], [], []) + + def test_disconnect_removes_playlists(self): + # test that playlist tabs get after the client disconnects + self.share.start_tracking() + self.MockTabsChanged.reset_mock() + self.client.set_items(self.make_daap_items( + {1: 'title-1', 2: 'title-2'})) + self.client.add_playlist( + testobjects.make_mock_daap_playlist(101, 'playlist-1') + ) + self.client.add_playlist( + testobjects.make_mock_daap_playlist(102, 'playlist-2') + ) + self.client.set_playlist_items(101, [1]) + self.check_client_connect() + self.check_tabs_changed([101], [], []) + # run the code that happens after the tracker disconnects + # playlist 1 should be removed. playlist 2 was never added, so it + # shouldn't be in the message. + self.share.tracker.client_disconnect_callback_common() + self.check_tabs_changed([], [], [101]) + + def test_nul_in_playlist_data(self): + # test that we remove NUL chars from playlist data (#17537) + self.share.start_tracking() + self.MockTabsChanged.reset_mock() + self.client.set_items(self.make_daap_items({1: 'title-1'})) + self.client.add_playlist( + testobjects.make_mock_daap_playlist(101, 'playlist-\0\0one\0') + ) + self.client.set_playlist_items(101, [1]) + self.share.tracker.client_connect_callback( + self.share.tracker.client_connect()) + type_, added, changed, removed = self.MockTabsChanged.call_args[0] + self.assertEquals(added[0].name, "playlist-one") + + def test_nul_in_item_data(self): + # test that we remove NUL chars from item data (#17537) + self.share.start_tracking() + self.client.set_items(self.make_daap_items( + {1: 'title-\0\0one\0'})) + self.share.tracker.client_connect_callback( + self.share.tracker.client_connect()) + db_item = models.SharingItem.get_by_daap_id( + 1, db_info=self.share.db_info) + self.assertEquals(db_item.title, "title-one") + +class SharingServerTest(EventLoopTest): + """Test the sharing server.""" + def setUp(self): + EventLoopTest.setUp(self) + # need to call setup_tabs for PlaylistTracker() and + # ChannelTracker() + startup.setup_tabs() + self.setup_config() + self.setup_data() + + def setup_config(self): + app.config.set(prefs.SHARE_AUDIO, True) + app.config.set(prefs.SHARE_VIDEO, False) + app.config.set(prefs.SHARE_FEED, True) + + def setup_data(self): + self.manual_feed = testobjects.make_manual_feed() + self.feed_with_downloads = testobjects.make_feed() + self.feed_without_downloads = testobjects.make_feed() + self.audio_items = [] + self.video_items = [] + self.undownloaded_items = [] + for i in xrange(10): + self.audio_items.append(testobjects.make_file_item( + self.feed_with_downloads, u'audio-item-%s' % i, ext='.mp3')) + self.video_items.append(testobjects.make_file_item( + self.manual_feed, u'video-item-%s' % i, ext='.avi')) + self.undownloaded_items.append(testobjects.make_item( + self.feed_without_downloads, u'feed-item-%s' % i)) + + self.audio_playlist = models.SavedPlaylist(u'My music', + [i.id for i in self.audio_items]) + # put some videos in the videos playlist. These will be sent back by + # the server, even if SHARE_VIDEO is not set + self.video_playlist_items = self.video_items[:5] + self.video_playlist = models.SavedPlaylist(u'My best videos', + [i.id for i in self.video_playlist_items]) + app.db.finish_transaction() + models.Item.change_tracker.reset() + + def setup_sharing_manager_backend(self): + self.backend = sharing.SharingManagerBackend() + self.backend.start_tracking() + + def check_daap_list(self, daap_list, ddb_objects): + """Check data in our SharingManagerBackend + + :param daap_list: The return value of get_items() or get_playlists() + :param ddb_objects: list of Item or Playlist objects to check against + """ + daap_ids_and_names = [] + for daap_id, daap_data in daap_list.items(): + if daap_data['valid']: + # do a quick check that the key for the dictionary is the same + # as dmap.itemid + self.assertEquals(daap_id, daap_data['dmap.itemid']) + daap_ids_and_names.append( + (daap_id, daap_data['dmap.itemname'])) + correct_ids_and_names = [(obj.id, obj.get_title()) + for obj in ddb_objects] + self.assertSameSet(daap_ids_and_names, correct_ids_and_names) + # check that the podcast key gets set for podcasts + for obj in ddb_objects: + if isinstance(obj, models.Feed): + daap_data = daap_list[obj.id] + if not daap_data.get(sharing.DAAP_PODCAST_KEY): + msg = ("DAAP item %s does't have %s set" % + (daap_data['dmap.itemname'], + sharing.DAAP_PODCAST_KEY)) + raise AssertionError(msg) + + def check_daap_item_deleted(self, item_list, ddb_object): + self.assertEquals(item_list[ddb_object.id]['valid'], False) + + def check_get_revision_will_block(self, old_revision): + self.assertEquals(self.backend.data_set.revision, old_revision) + + def send_changes_from_trackers(self): + app.db.finish_transaction() + models.Item.change_tracker.send_changes() + self.backend.data_set.after_event_finished(mock.Mock(), True) + + def test_initial_list(self): + self.setup_sharing_manager_backend() + # test getting all items + self.check_daap_list(self.backend.get_items(), + self.audio_items + self.video_playlist_items) + # test getting playlists + self.check_daap_list(self.backend.get_playlists(), + [self.audio_playlist, self.video_playlist, + self.feed_with_downloads, self.feed_without_downloads]) + # test getting items for a playlist + self.check_daap_list(self.backend.get_items(self.audio_playlist.id), + self.audio_items) + self.check_daap_list( + self.backend.get_items(self.video_playlist.id), + self.video_playlist_items) + self.check_daap_list( + self.backend.get_items(self.feed_with_downloads.id), + self.audio_items) + self.check_daap_list( + self.backend.get_items(self.feed_without_downloads.id), []) + + def test_initial_list_no_feeds(self): + app.config.set(prefs.SHARE_FEED, False) + self.setup_sharing_manager_backend() + self.check_daap_list(self.backend.get_playlists(), + [self.audio_playlist, self.video_playlist]) + + def test_item_changes(self): + self.setup_sharing_manager_backend() + initial_revision = self.backend.data_set.revision + added = self.video_items[0] + added.set_user_metadata({'file_type': u'audio'}) + added.signal_change() + changed = self.audio_items[0] + changed.set_user_metadata({'title': u'New title'}) + changed.signal_change() + removed = self.audio_items[-1] + removed.remove() + self.check_get_revision_will_block(initial_revision) + self.send_changes_from_trackers() + self.assertNotEquals(self.backend.data_set.revision, initial_revision) + # get_items() should reflect the changes + new_item_list = ([added] + self.audio_items[:-1] + + self.video_playlist_items) + self.check_daap_list(self.backend.get_items(), new_item_list) + self.check_daap_item_deleted(self.backend.get_items(), removed) + + def test_feed_changes(self): + self.setup_sharing_manager_backend() + initial_revision = self.backend.data_set.revision + new_feed = testobjects.make_feed() + self.feed_with_downloads.set_title(u'New Title') + self.feed_without_downloads.remove() + self.send_changes_from_trackers() + self.assertNotEquals(self.backend.data_set.revision, initial_revision) + self.check_daap_list(self.backend.get_playlists(), + [new_feed, self.feed_with_downloads, self.audio_playlist, + self.video_playlist]) + self.check_daap_item_deleted(self.backend.get_playlists(), + self.feed_without_downloads) + # test adding items + second_revision = self.backend.data_set.revision + for item in self.video_items: + item.set_feed(new_feed.id) + self.send_changes_from_trackers() + self.assertNotEquals(self.backend.data_set.revision, second_revision) + self.check_daap_list(self.backend.get_items(new_feed.id), + self.video_items) + # test removing items + third_revision = self.backend.data_set.revision + for item in self.video_items[4:]: + item.remove() + self.send_changes_from_trackers() + self.assertNotEquals(self.backend.data_set.revision, third_revision) + self.check_daap_list(self.backend.get_items(new_feed.id), + self.video_items[:4]) + + def test_playlist_changes(self): + self.setup_sharing_manager_backend() + initial_revision = self.backend.data_set.revision + new_playlist = models.SavedPlaylist(u'My Playlist') + self.audio_playlist.set_title(u'My Audio Files') + self.video_playlist.remove() + self.send_changes_from_trackers() + self.assertNotEquals(self.backend.data_set.revision, initial_revision) + self.check_daap_list(self.backend.get_playlists(), + [self.feed_with_downloads, self.feed_without_downloads, + self.audio_playlist, new_playlist]) + self.check_daap_item_deleted(self.backend.get_playlists(), + self.video_playlist) + # test adding items + second_revision = self.backend.data_set.revision + for item in self.video_items: + new_playlist.add_item(item) + self.send_changes_from_trackers() + self.assertNotEquals(self.backend.data_set.revision, second_revision) + self.check_daap_list(self.backend.get_items(new_playlist.id), + self.video_items) + # test removing items + third_revision = self.backend.data_set.revision + for item in self.video_items[4:]: + new_playlist.remove_item(item) + self.send_changes_from_trackers() + self.assertNotEquals(self.backend.data_set.revision, third_revision) + self.check_daap_list(self.backend.get_items(new_playlist.id), + self.video_items[:4]) + + def test_change_share_feed(self): + self.setup_sharing_manager_backend() + initial_revision = self.backend.data_set.revision + app.config.set(prefs.SHARE_FEED, False) + self.assertNotEquals(self.backend.data_set.revision, initial_revision) + self.check_daap_list(self.backend.get_items(), + self.audio_items + self.video_playlist_items) + self.check_daap_list(self.backend.get_playlists(), + [self.audio_playlist, self.video_playlist]) + self.check_daap_item_deleted(self.backend.get_playlists(), + self.feed_with_downloads) + self.check_daap_item_deleted(self.backend.get_playlists(), + self.feed_without_downloads) + + def test_change_share_video(self): + self.setup_sharing_manager_backend() + initial_revision = self.backend.data_set.revision + app.config.set(prefs.SHARE_VIDEO, True) + self.assertNotEquals(self.backend.data_set.revision, initial_revision) + + self.check_daap_list(self.backend.get_items(), + self.audio_items + self.video_items) + app.config.set(prefs.SHARE_VIDEO, False) + self.check_daap_list(self.backend.get_items(), + self.audio_items + self.video_playlist_items) + for item in self.video_items: + if item not in self.video_playlist_items: + self.check_daap_item_deleted(self.backend.get_items(), item) + + def test_client_disconnects_in_get_revision(self): + # get_revision() blocks waiting for chainges, but it should return if + # the client disconnects. Test that this happens + self.setup_sharing_manager_backend() + mock_socket = mock.Mock() + # We use a threading.Condition object to wait for changes. + self.wait_count = 0 + def mock_wait(timeout=None): + # we must use a timeout since we want to poll the socket + self.assertNotEquals(timeout, None) + if self.wait_count > 2: + raise AssertionError("wait called too many times") + self.wait_count += 1 + self.backend.data_set.condition.wait = mock_wait + + # We use select() to check if the socket is closed. + self.select_count = 0 + def mock_select(rlist, wlist, xlist, timeout=None): + self.assertEquals(timeout, 0) + self.assertEquals(rlist, [mock_socket]) + if self.select_count == 0: + # first time around, return nothing + rv = [], [], [] + elif self.select_count == 1: + # second time around, return the socket as available for + # reading. This happens when the socket gets closed + rv = [mock_socket], [], [] + else: + raise AssertionError("select called too much") + self.select_count += 1 + return rv + self.patch_function('select.select', mock_select) + # calling get_revision() should set all the wheels in motion + initial_revision = self.backend.data_set.revision + new_revision = self.backend.get_revision(mock.Mock(), + initial_revision, + mock_socket) + # get_revision() should have returned before any changes happened. + self.assertEquals(initial_revision, new_revision) + + # FIXME: implement this + # def test_get_file(self): + # pass diff -Nru miro-4.0.4/lib/test/signalstest.py miro-6.0/lib/test/signalstest.py --- miro-4.0.4/lib/test/signalstest.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/test/signalstest.py 2013-04-05 16:02:42.000000000 +0000 @@ -1,3 +1,6 @@ +import itertools +import random + from miro import signals from miro.test.framework import MiroTestCase @@ -23,7 +26,7 @@ self.callbacks = [] self.signaller = TestSignaller() MiroTestCase.setUp(self) - + def callback(self, *args): self.callbacks.append(args) @@ -52,6 +55,48 @@ self.assertRaises(ValueError, self.signaller.connect_weak, 'signal1', callback_obj.callback) + def test_connect_before_after(self): + # test handlers connected using connect_after() and connect_before() + self.callback_before_called = False + self.callback_normal_called = False + self.callback_after_called = False + counts_for_callback = [] + counts_for_callback_after = [] + counts_for_callback_before = [] + # make a bunch of callbacks and connect them with either connect() or + # connect_after() + for x in range(100): + def callback_before(obj): + self.callback_before_called = True + if self.callback_after_called: + raise AssertionError( + "callback connected with connect_after() called " + "before callback connected with connect_before()") + if self.callback_normal_called: + raise AssertionError( + "callback connected with connect() called before " + "callback connected with connect_before()") + def callback_normal(obj): + self.callback_normal_called = True + if self.callback_after_called: + raise AssertionError( + "callback connected with connect_after() called " + "before callback connected with connect()") + def callback_after(obj): + self.callback_after_called = True + choice = random.choice([callback_before, + callback_normal, + callback_after]) + if choice == callback_before: + self.signaller.connect_before('signal1', callback_before) + elif choice == callback_normal: + self.signaller.connect('signal1', callback_normal) + else: + self.signaller.connect_after('signal1', callback_after) + # emit our signal and the signal handlers will ensure that the + # callbacks execute in the right order + self.signaller.emit('signal1') + def test_nested_call(self): def callback(obj): # emiting signal1 while handling signal 1 should raise an error diff -Nru miro-4.0.4/lib/test/storedatabasetest.py miro-6.0/lib/test/storedatabasetest.py --- miro-4.0.4/lib/test/storedatabasetest.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/test/storedatabasetest.py 2013-04-05 16:02:42.000000000 +0000 @@ -1,11 +1,16 @@ from datetime import datetime import os import unittest +import string +import random import time +import sqlite3 + from miro import app from miro import database from miro import databaseupgrade +from miro import devices from miro import dialogs from miro import downloader from miro import item @@ -23,11 +28,13 @@ from miro.plat import resources from miro.plat.utils import PlatformFilenameType -from miro.test.framework import EventLoopTest, skip_for_platforms +from miro.test import mock +from miro.test.framework import (MiroTestCase, EventLoopTest, + skip_for_platforms, MatchAny) from miro.schema import (SchemaString, SchemaInt, SchemaFloat, SchemaReprContainer, SchemaList, SchemaDict, SchemaBool, SchemaFilename, - SchemaBinary) + SchemaBinary, SchemaStringSet) # create a dummy object schema class Human(database.DDBObject): @@ -44,6 +51,7 @@ self.high_scores = high_scores self.stuff = stuff self.id_code = None + self.favorite_colors = set([u'red', u'blue']) def add_friend(self, friend): self.friends.append(friend) @@ -85,6 +93,7 @@ ('high_scores', SchemaDict(SchemaString(), SchemaInt())), ('stuff', SchemaReprContainer(noneOk=True)), ('id_code', SchemaBinary(noneOk=True)), + ('favorite_colors', SchemaStringSet(delimiter='@')), ] @staticmethod @@ -145,7 +154,6 @@ def tearDown(self): # need to close the db before removing it from disk - from miro import app app.db.close() self.remove_database() corrupt_path = os.path.join(os.path.dirname(self.save_path), @@ -158,7 +166,7 @@ class EmptyDBTest(StoreDatabaseTest): def test_open_empty_db(self): self.reload_test_database() - app.db.cursor.execute("SELECT name FROM sqlite_master " + app.db.cursor.execute("SELECT name FROM main.sqlite_master " "WHERE type='table'") for row in app.db.cursor.fetchall(): table = row[0] @@ -181,33 +189,52 @@ pass StoreDatabaseTest.tearDown(self) - @skip_for_platforms('win32') - def test_indexes_same(self): - # this fails on windows because it's using a non-Windows - # database + def load_fresh_database(self): self.remove_database() self.reload_database() - app.db.cursor.execute("SELECT name FROM sqlite_master " - "WHERE type='index'") - blank_db_indexes = set(app.db.cursor) + self.db = app.db + + def load_upgraded_database(self): shutil.copy(resources.path("testdata/olddatabase.v79"), self.save_path2) self.reload_database(self.save_path2) - app.db.cursor.execute("SELECT name FROM sqlite_master " - "WHERE type='index'") - upgraded_db_indexes = set(app.db.cursor) + self.db = app.db + + @skip_for_platforms('win32') + def test_indexes_same(self): + # this fails on windows because it's using a non-Windows + # database + self.load_fresh_database() + self.db.cursor.execute("SELECT name FROM main.sqlite_master " + "WHERE type='index'") + blank_db_indexes = set(self.db.cursor) + self.load_upgraded_database() + self.db.cursor.execute("SELECT name FROM main.sqlite_master " + "WHERE type='index'") + upgraded_db_indexes = set(self.db.cursor) + self.assertEquals(upgraded_db_indexes, blank_db_indexes) + + @skip_for_platforms('win32') + def test_triggers_same(self): + # this fails on windows because it's using a non-Windows + # database + self.load_fresh_database() + self.db.cursor.execute("SELECT name, sql FROM main.sqlite_master " + "WHERE type='trigger'") + blank_db_indexes = set(self.db.cursor) + self.load_upgraded_database() + self.db.cursor.execute("SELECT name, sql FROM main.sqlite_master " + "WHERE type='trigger'") + upgraded_db_indexes = set(self.db.cursor) self.assertEquals(upgraded_db_indexes, blank_db_indexes) @skip_for_platforms('win32') def test_schema_same(self): # this fails on windows because it's using a non-Windows # database - self.remove_database() - self.reload_database() + self.load_fresh_database() blank_column_types = self._get_column_types() - shutil.copy(resources.path("testdata/olddatabase.v79"), - self.save_path2) - self.reload_database(self.save_path2) + self.load_upgraded_database() upgraded_column_types = self._get_column_types() self.assertEquals(set(blank_column_types.keys()), set(upgraded_column_types.keys())) @@ -219,14 +246,38 @@ (table_name, diff)) def _get_column_types(self): - app.db.cursor.execute("SELECT name FROM sqlite_master " + self.db.cursor.execute("SELECT name FROM main.sqlite_master " "WHERE type='table'") rv = {} - for table_name in app.db.cursor.fetchall(): - app.db.cursor.execute('pragma table_info(%s)' % table_name) - rv[table_name] = set((r[1], r[2].lower()) for r in app.db.cursor) + for table_name in [r[0] for r in self.db.cursor.fetchall()]: + self.db.cursor.execute('pragma table_info(%s)' % table_name) + rv[table_name] = set((r[1], r[2].lower()) for r in self.db.cursor) return rv +class DeviceDBUpgradeTest(DBUpgradeTest): + def setUp(self): + StoreDatabaseTest.setUp(self) + self.save_path2 = self.make_temp_path() + + def tearDown(self): + try: + os.unlink(self.save_path2) + except OSError: + pass + StoreDatabaseTest.tearDown(self) + + def load_fresh_database(self): + device_mount = self.make_temp_dir_path() + os.makedirs(os.path.join(device_mount, '.miro')) + self.db = devices.load_sqlite_database(device_mount, 1024) + + def load_upgraded_database(self): + device_mount = self.make_temp_dir_path() + os.makedirs(os.path.join(device_mount, '.miro')) + shutil.copyfile(resources.path('testdata/5.x-device-database.sqlite'), + os.path.join(device_mount, '.miro', 'sqlite')) + self.db = devices.load_sqlite_database(device_mount, 1024) + class FakeSchemaTest(StoreDatabaseTest): OBJECT_SCHEMAS = test_object_schemas @@ -243,6 +294,13 @@ databaseupgrade._upgrade_overide[2] = upgrade2 class DiskTest(FakeSchemaTest): + def setUp(self): + FakeSchemaTest.setUp(self) + # should we handle upgrade error dialogs by clicking "start fresh" + self.handle_upgrade_error_dialogs = False + # should we handle database corrupt message boxes by clicking "OK?" + self.handle_corruption_dialogs = False + def check_database(self): obj_map = {} for klass in (PCFProgramer, RestorableHuman, Human): @@ -323,6 +381,7 @@ self.reload_test_database() self.check_database() # check deleting the different class + im_special = self.reload_object(im_special) im_special.remove() self.db.pop() self.reload_test_database() @@ -336,55 +395,74 @@ self.setup_new_database(self.save_path, schema_version=0, object_schemas=self.OBJECT_SCHEMAS) app.db.upgrade_database() + database.initialize() self.check_database() def test_upgrade(self): self.reload_test_database(version=1) new_lee = Human.get_by_id(self.lee.id) self.assertEquals(new_lee.name, 'new name') + # check that we created a backup file + backup_path = os.path.join(os.path.dirname(self.save_path), + 'dbbackups', 'sqlitedb_backup_0') + if not os.path.exists(backup_path): + raise AssertionError("database backup doesn't exist") + # check that the backup has the old data + backup_conn = sqlite3.connect(backup_path) + cursor = backup_conn.execute("SELECT name FROM human WHERE id=?", + (self.lee.id,)) + backup_lee_name = cursor.fetchone()[0] + self.assertEquals(backup_lee_name, 'lee') def test_restore_with_newer_version(self): self.reload_test_database(version=1) self.assertRaises(databaseupgrade.DatabaseTooNewError, self.reload_test_database, version=0) - def test_last_id(self): - correct_last_id = database.DDBObject.lastID - database.DDBObject.lastID = 0 + def test_make_new_id(self): + # Check that when we reload the database, the id counter stays the + # same + org_id = app.db_info.make_new_id() + # reload the database self.reload_test_database() - self.assert_(database.DDBObject.lastID > 0) - self.assertEquals(database.DDBObject.lastID, correct_last_id) + self.assertEquals(app.db_info.make_new_id(), org_id) def check_reload_error(self, **reload_args): corrupt_path = os.path.join(os.path.dirname(self.save_path), 'corrupt_database') self.assert_(not os.path.exists(corrupt_path)) self.allow_db_load_errors(True) - self.reload_test_database(**reload_args) + with self.allow_warnings(): + self.reload_test_database(**reload_args) self.allow_db_load_errors(False) self.assert_(os.path.exists(corrupt_path)) - def handle_corrupt_db_dialogs(self, upgrade, corruption): + def handle_dialogs(self, upgrade, corruption): """Handle the dialogs that we pop up when we notice database errors. :param upgrade: handle upgrade dialogs by clicking "start fresh" :param corruption: handle database corrupt message boxes """ - def dialog_handler(obj, dialog): - if upgrade and (dialogs.BUTTON_START_FRESH in dialog.buttons): - # handle database upgrade dialog - dialog.run_callback(dialogs.BUTTON_START_FRESH) - elif corruption and isinstance(dialog, dialogs.MessageBoxDialog): - # handle the load error dialog - dialog.run_callback(dialogs.BUTTON_OK) - else: - raise AssertionError("Don't know how to handle dialog: %s", - dialog) - signals.system.connect('new-dialog', dialog_handler) + self.allow_db_upgrade_error_dialog = True + self.handle_upgrade_error_dialogs = upgrade + self.handle_corruption_dialogs = corruption + + def handle_new_dialog(self, obj, dialog): + if (self.handle_upgrade_error_dialogs and + (dialogs.BUTTON_START_FRESH in dialog.buttons)): + # handle database upgrade dialog + dialog.run_callback(dialogs.BUTTON_START_FRESH) + elif (self.handle_corruption_dialogs and + isinstance(dialog, dialogs.MessageBoxDialog)): + # handle the load error dialog + dialog.run_callback(dialogs.BUTTON_OK) + else: + return FakeSchemaTest.handle_new_dialog(self, obj, dialog) def test_upgrade_error(self): - self.handle_corrupt_db_dialogs(upgrade=True, corruption=False) - self.check_reload_error(version=2) + self.handle_dialogs(upgrade=True, corruption=False) + with self.allow_warnings(): + self.check_reload_error(version=2) def test_corrupt_database(self): app.db.close() @@ -392,12 +470,12 @@ # depending on the SQLite version, we will notice the error when we # issup the PRAGMA journal_mode command, or when we do the upgrades. # Handle the dialogs for both. - self.handle_corrupt_db_dialogs(upgrade=True, corruption=True) + self.handle_dialogs(upgrade=True, corruption=True) self.check_reload_error() def test_database_data_error(self): app.db.cursor.execute("DROP TABLE human") - self.handle_corrupt_db_dialogs(upgrade=False, corruption=True) + self.handle_dialogs(upgrade=False, corruption=True) self.check_reload_error() def test_bulk_insert(self): @@ -491,7 +569,8 @@ obj.signal_change() def assert_object_invalid(self, obj): - self.assertRaises(schema.ValidationError, obj.signal_change) + with self.allow_warnings(): + self.assertRaises(schema.ValidationError, obj.signal_change) def test_none_values(self): self.lee.age = None @@ -539,7 +618,8 @@ def test_repr_failure(self): app.db.cursor.execute("UPDATE human SET stuff='{baddata' " "WHERE name='lee'") - restored_lee = self.reload_object(self.lee) + with self.allow_warnings(): + restored_lee = self.reload_object(self.lee) self.assertEqual(restored_lee.stuff, 'testing123') app.db.cursor.execute("SELECT stuff from human WHERE name='lee'") row = app.db.cursor.fetchone() @@ -548,21 +628,25 @@ def test_repr_failure_no_handler(self): app.db.cursor.execute("UPDATE pcf_programmer SET stuff='{baddata' " "WHERE name='ben'") - self.assertRaises(SyntaxError, self.reload_object, self.ben) + with self.allow_warnings(): + self.assertRaises(SyntaxError, self.reload_object, self.ben) class ConverterTest(StoreDatabaseTest): def test_convert_repr(self): converter = storedatabase.SQLiteConverter() + # _repr_to_sql ignores the schema_item parameter, so we can just pass + # in None + schema_item = None test1 = """{'updated_parsed': (2009, 6, 5, 1, 30, 0, 4, 156, 0)}""" - val = converter._convert_repr(test1) + val = converter._repr_from_sql(test1, schema_item) self.assertEquals(val, {"updated_parsed": (2009, 6, 5, 1, 30, 0, 4, 156, 0)}) test2 = """{'updated_parsed': time.struct_time(tm_year=2009, \ tm_mon=6, tm_mday=5, tm_hour=1, tm_min=30, tm_sec=0, tm_wday=4, tm_yday=156, \ tm_isdst=0)}""" - val = converter._convert_repr(test2) + val = converter._repr_from_sql(test2, schema_item) self.assertEquals(val, {"updated_parsed": (2009, 6, 5, 1, 30, 0, 4, 156, 0)}) @@ -579,10 +663,11 @@ self.tab_order = tabs.TabOrder(u'channel') self.guide = guide.ChannelGuide(u'http://example.com/') self.theme_hist = theme.ThemeHistory() - self.display_state = widgetstate.DisplayState((u'testtype', u'testid')) + self.view_state = widgetstate.ViewState((u'testtype', u'testid', 0)) def check_fixed_value(self, obj, column_name, value, disk_value=None): - obj = self.reload_object(obj) + with self.allow_warnings(): + obj = self.reload_object(obj) self.assertEquals(getattr(obj, column_name), value) # make sure the values stored on disk are correct as well if disk_value is None: @@ -598,14 +683,6 @@ schema_item, disk_value) self.assertEqual(row[0], sql_value) - def test_corrupt_status(self): - app.db.cursor.execute("UPDATE remote_downloader " - "SET status='{baddata' WHERE id=?", (self.downloader.id,)) - # setup_restored sets some values for status, so we will have - # more than an empty dict - self.check_fixed_value(self.downloader, 'status', - {'rate': 0, 'upRate': 0, 'eta': 0}, disk_value={}) - def test_corrupt_etag(self): app.db.cursor.execute("UPDATE saved_search_feed_impl " "SET etag='{baddata' WHERE ufeed_id=?", (self.feed.id,)) @@ -619,7 +696,8 @@ def test_corrupt_tab_ids(self): app.db.cursor.execute("UPDATE taborder_order " "SET tab_ids='[1, 2; 3 ]' WHERE id=?", (self.tab_order.id,)) - reloaded = self.reload_object(self.tab_order) + with self.allow_warnings(): + reloaded = self.reload_object(self.tab_order) self.check_fixed_value(reloaded, 'tab_ids', []) # check that restore_tab_list() re-adds the tab ids reloaded.restore_tab_list() @@ -636,17 +714,12 @@ (self.theme_hist.id,)) self.check_fixed_value(self.theme_hist, 'pastThemes', []) - def test_corrupt_display_state(self): - app.db.cursor.execute("UPDATE display_state SET " - "active_filters=?, selected_view=?, " - "list_view_columns=?, list_view_widths=? WHERE id=?", - (5, 1, '{baddata', '{baddata', self.display_state.id)) - self.check_fixed_value(self.display_state, 'list_view_columns', None) - self.check_fixed_value(self.display_state, 'list_view_widths', None) - # check that fields with valid values were salvaged - reloaded = self.reload_object(self.display_state) - self.assertEquals(reloaded.active_filters, 5) - self.assertEquals(reloaded.selected_view, 1) + def test_corrupt_view_state(self): + app.db.cursor.execute("UPDATE view_state SET " + "columns_enabled=?, column_widths=? WHERE id=?", + ('{baddata', '{baddata', self.view_state.id)) + self.check_fixed_value(self.view_state, 'columns_enabled', None) + self.check_fixed_value(self.view_state, 'column_widths', None) def test_corrupt_link_history(self): # TODO: should test ScraperFeedIpml.linkHistory, but it's not so easy @@ -682,7 +755,8 @@ def test_extra_tab_ids(self): self.screw_with_tab_order(self.f1.id, self.f2.id, self.folder.id, 123) - self.check_order(self.f1.id, self.f2.id, self.folder.id) + with self.allow_warnings(): + self.check_order(self.f1.id, self.f2.id, self.folder.id) def test_order_wrong(self): self.f1.set_folder(self.folder) @@ -692,5 +766,138 @@ self.screw_with_tab_order(self.f1.id, self.f2.id, self.folder.id) self.check_order(self.f2.id, self.folder.id, self.f1.id) +class PreallocateTest(MiroTestCase): + def check_preallocate_size(self, path, preallocate_size): + disk_size = os.stat(path).st_size + # allow some variance for the disk size, we just need to be in the + # ballpark + self.assertClose(disk_size, preallocate_size) + + def test_preallocate(self): + # test preallocating space + preallocate = 512 * 1024 # 512KB + path = os.path.join(self.tempdir, 'testdb') + storage = storedatabase.LiveStorage(path, preallocate=preallocate) + # check while open + self.check_preallocate_size(path, preallocate) + # check that it remains that big after close + storage.close() + self.check_preallocate_size(path, preallocate) + +class TemporaryModeTest(MiroTestCase): + # test getting an error when opening a new database and using an + # in-memory database to work around it + + def setUp(self): + MiroTestCase.setUp(self) + self.save_path = os.path.join(self.tempdir, 'test-db') + # set up an error handler that tells LiveStorage to use temporary + # storage if it fails to open a new database + use_temp = storedatabase.LiveStorageErrorHandler.ACTION_USE_TEMPORARY + self.error_handler = mock.Mock() + self.error_handler.handle_open_error.return_value = use_temp + + self.row_data = [] + + self.mock_add_timeout = mock.Mock() + self.patch_function('miro.eventloop.add_timeout', + self.mock_add_timeout) + + self.real_sqlite3_connect = sqlite3.connect + self.patch_function('sqlite3.connect', self.mock_sqlite3_connect) + + def force_temporary_database(self): + """Open a new database and force it to fail. + + After this we should be using an in-memory database and trying to save + it to disk every so often. + """ + + self.force_next_connect_to_fail = True + with self.allow_warnings(): + self.reload_database(self.save_path, + object_schemas=test_object_schemas, + error_handler=self.error_handler) + + def test_error_handler_called(self): + # Test that our error handler was called when storedatabase could'nt + # open the database + handle_open_error = self.error_handler.handle_open_error + self.force_temporary_database() + handle_open_error.assert_called_once_with() + + def test_use_memory(self): + # Test that we use an in-memory database after failing to open a real + # one + self.force_temporary_database() + self.assertEquals(self.last_connect_path, ':memory:') + self.assert_(not os.path.exists(self.save_path)) + + def test_try_save_scheduling(self): + # test that we call add_timeout to schedule trying to save to the + # database + self.force_temporary_database() + delay = 300 + self.mock_add_timeout.assert_called_once_with( + delay, app.db._try_save_temp_to_disk, MatchAny()) + # Make the timeout run and fail. Check that we schedule another try + self.mock_add_timeout.reset_mock() + self.force_next_connect_to_fail = True + with self.allow_warnings(): + app.db._try_save_temp_to_disk() + self.mock_add_timeout.assert_called_once_with( + delay, app.db._try_save_temp_to_disk, MatchAny()) + # make the timeout succeed. Check that we don't schedule anymore + self.mock_add_timeout.reset_mock() + app.db._try_save_temp_to_disk() + self.assertEquals(self.mock_add_timeout.called, False) + + def add_data(self, row_count): + for i in range(row_count): + age = 40 + meters_tall = 2.5 + name = u"Name-%s" % i + password = u"x" * i + Human(name, age, meters_tall, [], password=password) + self.row_data.append((name, age, meters_tall, password)) + + def check_data(self): + view = Human.make_view(order_by="name") + self.row_data.sort() # name is the 1st column, so this sorts by name + self.assertEquals(view.count(), len(self.row_data)) + for i, h in enumerate(view): + self.assertEquals(h.name, self.row_data[i][0]) + self.assertEquals(h.age, self.row_data[i][1]) + self.assertEquals(h.meters_tall, self.row_data[i][2]) + self.assertEquals(h.stuff, {'password': self.row_data[i][3]}) + + def test_data(self): + # test storing data in the temp database + self.force_temporary_database() + # add a bunch of fake data to the database + self.add_data(100) + # make the database save to its real path + with self.allow_warnings(): + app.db._try_save_temp_to_disk() + # check that the data got saved to disk + self.assertEquals(self.last_connect_path, self.save_path) + self.assert_(os.path.exists(self.save_path)) + # test that the data is correct immediately after change_path + self.check_data() + # add some more data + self.add_data(100) + # re-open the database and check that everything is correct + self.reload_database(self.save_path, object_schemas=test_object_schemas) + self.check_data() + + def mock_sqlite3_connect(self, path, *args, **kwargs): + """Force the next call to sqlite3.connect to raise an exception. """ + if self.force_next_connect_to_fail: + self.force_next_connect_to_fail = False + raise sqlite3.OperationalError() + else: + self.last_connect_path = path + return self.real_sqlite3_connect(path, *args, **kwargs) + if __name__ == '__main__': unittest.main() diff -Nru miro-4.0.4/lib/test/subprocesstest.py miro-6.0/lib/test/subprocesstest.py --- miro-4.0.4/lib/test/subprocesstest.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/test/subprocesstest.py 2013-04-05 16:02:42.000000000 +0000 @@ -3,10 +3,12 @@ import Queue from miro import app +from miro import moviedata from miro import subprocessmanager from miro import workerprocess from miro.plat import resources -from miro.test.framework import EventLoopTest +from miro.test import mock +from miro.test.framework import EventLoopTest, only_on_platforms # setup some test messages/handlers class TestSubprocessHandler(subprocessmanager.SubprocessHandler): @@ -72,6 +74,12 @@ def __init__(self, event): self.event = event +class SlowRunningTask(workerprocess.TaskMessage): + """Task sent to the worker process that should do nothing except take a + bunch of time. + """ + priority = -10 + # Actual tests go below here class SubprocessManagerTest(EventLoopTest): @@ -83,7 +91,7 @@ self.responder = TestSubprocessResponder() self.subprocess = subprocessmanager.SubprocessManager(TestMessage, - self.responder, TestSubprocessHandler) + self.responder, TestSubprocessHandler, restart_delay=0) self.subprocess.start() self._wait_for_subprocess_ready() @@ -91,7 +99,7 @@ self.subprocess.shutdown() EventLoopTest.tearDown(self) - def _wait_for_subprocess_ready(self, timeout=4.0): + def _wait_for_subprocess_ready(self, timeout=6.0): """Wait for the subprocess to startup.""" start = time.time() @@ -101,9 +109,10 @@ if self.responder.subprocess_ready: return if time.time() - start > timeout: - self.subprocess.process.terminate() - raise AssertionError("subprocess didn't startup in %s secs", - timeout) + if self.subprocess.process is not None: + self.subprocess.process.terminate() + raise AssertionError("subprocess didn't startup in %s secs" % + timeout) def test_startup(self): # test that we startup the process @@ -147,7 +156,8 @@ self.responder.subprocess_events_saw = [] self.subprocess.process.terminate() self.responder.subprocess_ready = False - self._wait_for_subprocess_ready() + with self.allow_warnings(): + self._wait_for_subprocess_ready() # the subprocess should see a startup self.assertEqual(self.responder.subprocess_events_saw, ['startup']) # the main process should see a startup and a restart @@ -162,13 +172,35 @@ self.assertEqual(self.responder.events_saw, ['shutdown']) def test_restart(self): - # test that we restart process when the quit unexpectedly + # test that we restart subprocesses when they quit unexpectedly old_pid = self.subprocess.process.pid old_thread = self.subprocess.thread self.subprocess.process.terminate() # wait a bit for the subprocess to quit then restart self.responder.subprocess_ready = False - self._wait_for_subprocess_ready() + with self.allow_warnings(): + self._wait_for_subprocess_ready() + # test that process #1 has been restarted + self.assert_(self.subprocess.is_running) + self.assert_(self.subprocess.process.poll() is None) + self.assert_(self.subprocess.thread.is_alive()) + self.assertNotEqual(old_pid, self.subprocess.process.pid) + # test that the original thread is gone + self.assert_(not old_thread.is_alive()) + + def test_restart2(self): + # test that we restart subprocesses if the quit normally, but we + # haven't sent the quit message to them + old_pid = self.subprocess.process.pid + old_thread = self.subprocess.thread + # Send None to the subprocess to make it quit, but without going + # through our SubprocessManager. SubprocessManager should restart the + # child process in this case + subprocessmanager._dump_obj(None, self.subprocess.process.stdin) + # wait a bit for the subprocess to quit then restart + self.responder.subprocess_ready = False + with self.allow_warnings(): + self._wait_for_subprocess_ready() # test that process #1 has been restarted self.assert_(self.subprocess.is_running) self.assert_(self.subprocess.process.poll() is None) @@ -202,6 +234,10 @@ return workerprocess.WorkerProcessHandler.handle_feedparser_task( self, msg) + def handle_slow_running_task(self, msg): + time.sleep(0.5) + return None + class WorkerProcessTest(EventLoopTest): """Test our worker process.""" def setUp(self): @@ -209,26 +245,37 @@ # override the normal handler class with our own workerprocess._subprocess_manager.handler_class = ( UnittestWorkerProcessHandler) + workerprocess._subprocess_manager.restart_delay = 0 + self.reset_results() + + def tearDown(self): + EventLoopTest.tearDown(self) + workerprocess.shutdown() + + def reset_results(self): self.result = self.error = None - def callback(self, result): + def callback(self, msg, result): self.result = result self.stopEventLoop(abnormal=False) - def errback(self, error): + def errback(self, msg, error): self.error = error self.stopEventLoop(abnormal=False) +class FeedParserTest(WorkerProcessTest): def send_feedparser_task(self): # send feedparser successfully parsing a feed path = os.path.join(resources.path("testdata/feedparsertests/feeds"), "http___feeds_miroguide_com_miroguide_featured.xml") html = open(path).read() - workerprocess.run_feedparser(html, self.callback, self.errback) + msg = workerprocess.FeedparserTask(html) + workerprocess.send(msg, self.callback, self.errback) def check_successful_result(self): + if self.error is not None: + raise self.error self.assertNotEquals(self.result, None) - self.assertEquals(self.error, None) # just do some very basic test to see if the result is correct if self.result['bozo']: raise AssertionError("Feedparser parse error: %s", @@ -244,8 +291,8 @@ def test_feedparser_error(self): # test feedparser failing to parse a feed workerprocess.startup() - workerprocess.run_feedparser('FORCE EXCEPTION', self.callback, - self.errback) + msg = workerprocess.FeedparserTask('FORCE EXCEPTION') + workerprocess.send(msg, self.callback, self.errback) self.runEventLoop(4.0) self.assertEquals(self.result, None) self.assert_(isinstance(self.error, ValueError)) @@ -256,7 +303,8 @@ original_pid = workerprocess._subprocess_manager.process.pid self.send_feedparser_task() workerprocess._subprocess_manager.process.terminate() - self.runEventLoop(4.0) + with self.allow_warnings(): + self.runEventLoop(4.0) # check that we really restarted the subprocess self.assertNotEqual(original_pid, workerprocess._subprocess_manager.process.pid) @@ -271,3 +319,110 @@ workerprocess.startup() self.runEventLoop(4.0) self.check_successful_result() + +class MovieDataTest(WorkerProcessTest): + + def setUp(self): + WorkerProcessTest.setUp(self) + workerprocess.startup() + + def check_successful_result(self): + # just do some very basic test to see if the result is correct + if self.error is not None: + raise self.error + if not isinstance(self.result, dict): + raise TypeError(self.result) + + def get_from_result(self, key): + try: + return self.result[key] + except KeyError: + raise AssertionError("result missing key %s: %s" % (key, self.result)) + + def check_movie_data_call(self, filename, file_type, duration, + has_screenshot): + source_path = resources.path("testdata/metadata/" + filename) + msg = workerprocess.MovieDataProgramTask(source_path, self.tempdir) + workerprocess.send(msg, self.callback, self.errback) + self.runEventLoop(30.0) + self.check_successful_result() + self.assertEquals(self.get_from_result('source_path'), source_path) + if file_type is not None: + self.assertEquals(self.get_from_result('file_type'), file_type) + else: + self.assert_('file_type' not in self.result) + if duration is not None: + self.assertClose(self.get_from_result('duration'), duration) + else: + self.assert_('duration' not in self.result) + if has_screenshot: + screenshot_name = os.path.basename(source_path) + '.png' + self.assertEquals(self.get_from_result('screenshot'), + os.path.join(self.tempdir, screenshot_name)) + else: + self.assert_('screenshot' not in self.result) + self.reset_results() + + def test_movie_data_worker_process_audio(self): + self.check_movie_data_call('mp3-0.mp3', 'audio', 1044, False) + self.check_movie_data_call('mp3-1.mp3', 'audio', 1044, False) + self.check_movie_data_call('mp3-2.mp3', 'audio', 1044, False) + + @only_on_platforms('linux', 'win32') + def test_movie_data_worker_process_video(self): + self.check_movie_data_call('theora_with_ogg_extension.ogg', 'video', + 1044, True) + + @only_on_platforms('linux', 'win32') + def test_moviedata_drm_gtk(self): + self.check_movie_data_call('drm.m4v', None, None, False) + + # FIXME: re-impement this one + #@only_on_platforms('osx') + #def test_movie_data_webm(self): + #self.check_movie_data_call('webm-0.webm', 'video', 1044, True) + + @only_on_platforms('osx') + def test_movie_data_drm_osx(self): + self.check_movie_data_call('drm.m4v', 'video', 2668832, False) + + +class MutagenTest(WorkerProcessTest): + def check_successful_result(self): + # just do some very basic test to see if the result is correct + if self.error is not None: + raise self.error + if not isinstance(self.result, dict): + raise TypeError(self.result) + + def check_mutagen_call(self, filename, file_type, duration, title, + has_cover_art): + source_path = resources.path("testdata/metadata/" + filename) + msg = workerprocess.MutagenTask(source_path, self.tempdir) + workerprocess.send(msg, self.callback, self.errback) + self.runEventLoop(4.0) + self.check_successful_result() + self.assertEquals(self.result['file_type'], file_type) + self.assertClose(self.result['duration'], duration) + self.assertEquals(self.result['title'], title) + if has_cover_art: + self.assertNotEquals(self.result['cover_art'], None) + else: + self.assert_('cover_art' not in self.result) + self.reset_results() + + def test_mutagen_worker_process(self): + workerprocess.startup() + self.check_mutagen_call('mp3-0.mp3', 'audio', 1055, + 'Invisible Walls', False) + self.check_mutagen_call('mp3-1.mp3', 'audio', 1055, 'Race Lieu', + False) + self.check_mutagen_call('mp3-2.mp3', 'audio', 1066, + '#426: Tough Room 2011', False) + self.check_mutagen_call('drm.m4v', 'video', 2668832, 'Thinkers', + True) + + +# TODO: +# Test task priority system in worker process +# Test that the CancelFileOperations message is handled properly diff -Nru miro-4.0.4/lib/test/subscriptiontest.py miro-6.0/lib/test/subscriptiontest.py --- miro-4.0.4/lib/test/subscriptiontest.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/test/subscriptiontest.py 2013-04-05 16:02:42.000000000 +0000 @@ -198,7 +198,8 @@ class Testfind_subscribe_links(MiroTestCase): def test_garbage(self): url = 5 - self.assertEquals(subscription.find_subscribe_links(url), []) + with self.allow_warnings(): + self.assertEquals(subscription.find_subscribe_links(url), []) def test_different_host(self): url = 'http://youtoob.com' diff -Nru miro-4.0.4/lib/test/testhttpserver.py miro-6.0/lib/test/testhttpserver.py --- miro-4.0.4/lib/test/testhttpserver.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/test/testhttpserver.py 2013-04-05 16:02:42.000000000 +0000 @@ -141,6 +141,12 @@ code = 302 location_header = self.build_url("linux-screen.jpg") path = self.translate_path('redirect.html') + elif self.path == '/custom-redirect': + if self.server.custom_redirect_url is None: + raise AssertionError("custom_redirect_url not set") + code = 302 + location_header = self.server.custom_redirect_url + path = self.translate_path('redirect.html') elif (self.path.startswith("/protected/") or self.path.startswith("/protected2/") or self.path.startswith("/protected3/")): @@ -314,6 +320,7 @@ self.httpserver.close_connection = False self.httpserver.allow_resume = True self.httpserver.pause_after = -1 + self.httpserver.custom_redirect_url = None self.event.set() try: self.httpserver.serve_forever() @@ -365,3 +372,6 @@ def pause_after(self, bytes): self.httpserver.pause_after = bytes + + def custom_redirect_url(self, url): + self.httpserver.custom_redirect_url = url diff -Nru miro-4.0.4/lib/test/testobjects.py miro-6.0/lib/test/testobjects.py --- miro-4.0.4/lib/test/testobjects.py 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/test/testobjects.py 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,400 @@ +"""miro.test.testobjs -- Create test objects. + +This module is a collection of functions to make objects to use for testing. +""" + +import collections +import datetime +import itertools +import random +import os + +from miro import app +from miro import database +from miro import devices +from miro import item +from miro import libdaap +from miro import models +from miro import messages +from miro import sharing +from miro import util +from miro.data.item import fetch_item_infos +from miro.plat.utils import filename_to_unicode, unicode_to_filename +from miro.test import mock + +feed_counter = itertools.count() +shares_created = [] +device_databases_created = [] + +def test_started(current_test_case): + """Reset the test object data""" + global feed_counter, current_test + feed_counter = itertools.count() + current_test = current_test_case + +def test_stopped(current_test_case): + for share in shares_created: + share.destroy() + for device_db in device_databases_created: + device_db.close() + shares_created[:] = [] + device_databases_created[:] = [] + +def make_item_info(itemobj): + return fetch_item_infos(app.db.connection, [itemobj.id])[0] + +def make_feed(): + url = u'http://feed%d.com/feed.rss' % feed_counter.next() + return models.Feed(url, initiallyAutoDownloadable=False) + +def make_manual_feed(): + return models.Feed(u'dtv:manualFeed', initiallyAutoDownloadable=False) + +def make_feed_for_file(path): + """Create a feed with a file:// URL that points to a file """ + url = u'file://%s' % path + return models.Feed(url) + +def make_item(feed, title, **kwargs): + """Make a new item.""" + fp_values = item.FeedParserValues({}) + fp_values.data['entry_title'] = title + fp_values.data['url'] = u'http://example.com/%s.mkv' % title + fp_values.data.update(kwargs) + # pick a random recent date for the release date + seconds_ago = random.randint(0, 60 * 60 * 24 * 7) + release_date = (datetime.datetime.now() - + datetime.timedelta(seconds=seconds_ago)) + fp_values.data['release_date'] = release_date + return models.Item(fp_values, feed_id=feed.id) + +def make_file_item(feed, title=None, path=None, ext='.avi'): + if path is None: + path = current_test.make_temp_path(ext) + ensure_file_exists(path) + return models.FileItem(path, feed.id, + fp_values=item.fp_values_for_file(path, title)) + +def make_feed_with_items(item_count, file_items=False, prefix=None): + feed = make_feed() + items = add_items_to_feed(feed, item_count, file_items, prefix) + return feed, items + +def add_items_to_feed(feed, item_count, file_items=False, prefix=None): + items = [] + for i in xrange(item_count): + if prefix is None: + name = u"%s-item%d" % (feed.get_title(), i) + else: + name = u"%s-item%d" % (prefix, i) + if file_items: + items.append(make_file_item(feed, name)) + else: + items.append(make_item(feed, name)) + return items + +def ensure_file_exists(path): + if not os.path.exists(path): + with open(path, 'w') as f: + f.write("test-data") + f.close() + +def make_devices_device_info(): + return devices.DeviceInfo( + u'Test Device', + video_conversion='copy', + audio_conversion='copy', + video_path=u'Video', + audio_path=u'Music', + vendor_id=123, + product_id=123, + container_types='mp3 wav asf isom ogg mpeg avi'.split(), + audio_types='mp* wmav* aac pcm* vorbis'.split(), + video_types='theora h264 mpeg* wmv*'.split(), + mount_instructions='') + +def make_mock_device(no_database=False): + mount = current_test.make_temp_dir_path() + os.makedirs(os.path.join(mount, '.miro')) + os.makedirs(os.path.join(mount, 'cover-art')) + device = messages.DeviceInfo(123, make_devices_device_info(), mount, + devices.sqlite_database_path(mount), + devices.DeviceDatabase(), None, None, + 1024000, 512000, False) + if not no_database: + setup_mock_device_database(device) + return device + +def setup_mock_device_database(device): + device.database = devices.DeviceDatabase() + device.database[u'settings'] = { + u'audio_path': os.path.join(device.mount, 'audio_path'), + u'video_path': os.path.join(device.mount, 'video_path'), + } + sqlite_db = devices.load_sqlite_database(device.mount, + device.size) + db_info = database.DeviceDBInfo(sqlite_db, device.id) + metadata_manager = devices.make_metadata_manager(device.mount, + db_info, + device.id) + device.db_info = db_info + device.metadata_manager = metadata_manager + device_databases_created.append(sqlite_db) + return device + +def make_device_items(device, *filenames): + return [make_device_item(device, filename) for filename in filenames] + +def make_device_item(device, filename): + # ensure that filename is the correct type for our platform + filename = unicode_to_filename(unicode(filename)) + ensure_file_exists(os.path.join(device.mount, filename)) + return item.DeviceItem(device, filename) + +class MockDAAPClientLibrary(object): + """Tracks items in the library for MockDAAPClient + """ + + def __init__(self): + self.base_playlist_id = 123 + # maps item ids to item data for all items + self.all_items = {} + # maps playlist id to playlist data + self.playlists = { + self.base_playlist_id: { 'daap.baseplaylist': 1, }, + } + # maps playlist id to dict mapping item ids to item data for that + # playlist + self.playlist_items = { + self.base_playlist_id: {} + } + + def set_items(self, new_items): + self.all_items = dict((i['dmap.itemid'], i) for i in new_items) + self.set_playlist_items(self.base_playlist_id, self.all_items.keys()) + + def add_playlist(self, new_playlist): + daap_id = new_playlist['dmap.itemid'] + if daap_id not in self.playlists: + self.playlist_items[daap_id] = {} + self.playlists[daap_id] = new_playlist.copy() + + def remove_playlist(self, daap_id): + del self.playlists[daap_id] + del self.playlist_items[daap_id] + + def set_playlist_items(self, playlist_id, item_ids): + self.playlist_items[playlist_id] = dict( + (daap_id, self.all_items[daap_id]) + for daap_id in item_ids) + + def copy(self): + """Get a copy of this library.""" + rv = MockDAAPClientLibrary() + rv.set_items(self.all_items.values()) + for k, playlist in self.playlists.items(): + if k != self.base_playlist_id: + rv.add_playlist(playlist) + rv.set_playlist_items(k, self.playlist_items[k]) + return rv + +class MockDAAPClient(mock.Mock): + """Mock up a DAAP client. + + Call set_items(), set_playlists(), and set_playlist_items() to change the + data the client returns. MockDAAPClient is smart enough to understand the + upgrade=True flag and only return items that have changed since the last + call + + + """ + def __init__(self, *args, **kwargs): + mock.Mock.__init__(self) + self.host = '127.0.0.1' + self.port = 8000 + self.conn.sock.getpeername.return_value = ('127.0.0.1', 8000) + self.library = MockDAAPClientLibrary() + # maps playlist ids to the last library we used to send items for that + # playlist. We use this to calculate which items we need to send when + # update=True + self.last_sent_library = {} + # last sent library used for the playlists() method + self.last_sent_library_for_playlists = None + + def set_items(self, new_items): + """Change the current set of items. + + :param new_items: dict mapping DAAP ids to dicts of item data + """ + self.library.set_items(new_items) + + def add_playlist(self, new_playlist): + """Add a new playlist to the client.""" + self.library.add_playlist(new_playlist) + + def remove_playlist(self, playlist_id): + """Remove a playlisst from the client.""" + self.library.remove_playlist(playlist_id) + + def set_playlist_items(self, playlist_id, new_playlist_items): + """Change the current set of playlists. + + :param playlist_id: DAAP id of the playlist + :param new_items: dict mapping DAAP ids for playlists to lists of DAAP + ids for items in that playlist + """ + self.library.set_playlist_items(playlist_id, new_playlist_items) + + def dict_diff(self, new_items, old_items): + """Calculate the difference of 2 dicts. + + This method is used in items() and playlists() to when the update=True + flag is used. + + :returns: (changed_items, deleted_ids) tuple. changed_items is a dict + mapping daap_ids to item data for new or updated items. deleted_ids + is a list of ids for deleted items. + """ + items = {} + deleted_items = [] + for k, item_data in new_items.items(): + if k not in old_items or old_items[k] != item_data: + items[k] = item_data + for k in old_items: + if k not in new_items: + deleted_items.append(k) + return items, deleted_items + + def current_items(self, playlist_id=None): + """Get current set of items.""" + if playlist_id is None: + return self.library.all_items.copy() + else: + return self.library.playlist_items[playlist_id].copy() + + def current_playlists(self): + """Get current set of playlists.""" + return self.library.playlists.copy() + + def current_playlist_item_map(self): + """Get the current playlist item map + + :returns: dict mapping playlist id to item id lists. This will only + contain entries for playlists that actually have items in them. + """ + rv = {} + playlist_items = set() + podcast_items = set() + for playlist_id, items in self.library.playlist_items.items(): + playlist_data = self.library.playlists[playlist_id] + if playlist_id != self.library.base_playlist_id and items: + rv[playlist_id] = set(items.keys()) + if playlist_data.get('com.apple.itunes.is-podcast-playlist'): + podcast_items.update(items.keys()) + else: + playlist_items.update(items.keys()) + if playlist_items: + rv[u'playlist'] = playlist_items + if podcast_items: + rv[u'podcast'] = podcast_items + + return rv + + def items(self, playlist_id=None, meta=None, update=False): + def get_items_from_library(library): + if playlist_id is not None: + return library.playlist_items[playlist_id] + else: + return library.all_items + last_library = self.last_sent_library.get(playlist_id) + if not update or last_library is None: + items = get_items_from_library(self.library).copy() + deleted_items = [] + else: + items, deleted_items = self.dict_diff( + get_items_from_library(self.library), + get_items_from_library(last_library)) + self.last_sent_library[playlist_id] = self.library.copy() + return items, deleted_items + + def playlists(self, meta=None, update=False): + if not update or self.last_sent_library_for_playlists is None: + playlists = self.library.playlists.copy() + deleted_playlists = [] + else: + playlists, deleted_playlists = self.dict_diff( + self.library.playlists, + self.last_sent_library_for_playlists.playlists) + # add playlists that have had their items changed + for playlist_id, item_set in self.library.playlist_items.items(): + try: + last_library = self.last_sent_library[playlist_id] + except KeyError: + continue + last_item_set = last_library.playlist_items[playlist_id] + if last_item_set != item_set: + playlist_data = self.library.playlists[playlist_id] + playlists[playlist_id] = playlist_data + + self.last_sent_library_for_playlists = self.library.copy() + return playlists, deleted_playlists + + def databases(self, update): + return True + + def daap_get_file_request(self, daap_id, file_format): + return u'/item-%s' % daap_id + + def _get_child_mock(self, **kwargs): + return mock.Mock(**kwargs) + + def returnself(self, *args): + """Return a references to ourselves. + + This method can be used to patch the miro.libdaap.make_daap_client() + """ + return self + +def make_mock_daap_item(item_id, title, file_type='audio'): + if file_type == 'audio': + daap_file_type = libdaap.DAAP_MEDIAKIND_AUDIO + elif file_type == 'video': + daap_file_type = libdaap.DAAP_MEDIAKIND_VIDEO + else: + raise ValueError("Unknown file type %s" % file_type) + return { + 'com.apple.itunes.mediakind': daap_file_type, + 'daap.songformat': 'mpeg', + 'dmap.itemid': item_id, + 'dmap.itemname': title, + 'daap.songtime': 123, + } + +def make_mock_daap_playlist(playlist_id, title, is_podcast=False): + playlist_data = { + 'dmap.itemid': playlist_id, + 'dmap.itemname': title, + } + if is_podcast: + playlist_data['com.apple.itunes.is-podcast-playlist'] = True + return playlist_data + +def make_share(name='TestShare'): + rv = sharing.Share('testshareid', name, u'127.0.0.1', 1234) + shares_created.append(rv) + return rv + +def make_sharing_items(share, *titles): + return [make_sharing_item(share, i, u"/item-%s" % i, title) + for i, title in enumerate(titles)] + +def make_sharing_item(share, daap_id, path, title, file_type=u'video'): + kwargs = { + 'video_path': path, + 'host': share.host, + 'port': share.port, + 'title': title, + 'file_type': file_type, + 'db_info': share.db_info, + } + return item.SharingItem(daap_id, **kwargs) diff -Nru miro-4.0.4/lib/test/unicodetest.py miro-6.0/lib/test/unicodetest.py --- miro-4.0.4/lib/test/unicodetest.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/test/unicodetest.py 2013-04-05 16:02:42.000000000 +0000 @@ -12,11 +12,10 @@ class UnicodeFeedTestCase(framework.EventLoopTest): def setUp(self): super(UnicodeFeedTestCase, self).setUp() - signals.system.connect('new-dialog', self.onNewDialog) self.choice = None self.num_dialogs = 0 - def onNewDialog(self, obj, dialog): + def handle_new_dialog(self, obj, dialog): self.assertNotEqual(self.choice, None) self.num_dialogs += 1 # print "rundialog called from %s" % dialog.title @@ -76,7 +75,7 @@ http://participatoryculture.org/boguslink \u25cb\u4e00\u4e8c\u4e09\u56db\u4e94\u516d\u4e03\u516b\ \u4e5d - + Fri, 25 Aug 2006 17:39:21 GMT @@ -126,7 +125,7 @@ H\xe4ppy Birthday http://participatoryculture.org/boguslink H\xe4ppy Birthday - + Fri, 25 Aug 2006 17:39:21 GMT diff -Nru miro-4.0.4/lib/test/utiltest.py miro-6.0/lib/test/utiltest.py --- miro-4.0.4/lib/test/utiltest.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/test/utiltest.py 2013-04-05 16:02:42.000000000 +0000 @@ -1,16 +1,22 @@ # coding=latin-1 # The above comment is required, because it includes non-latin characters # as an inline string in the source, we need to have this here as per PEP 263. +import itertools import os import tempfile +import time +import signal import shutil import unittest import sys +import zipfile from miro.test.framework import skip_for_platforms, MiroTestCase from miro import download_utils from miro import util +from miro import buildutils from miro.fileobject import FilenameType +from miro.plat.utils import unicode_to_filename # We're going to override this so we can guarantee that if the order # changes later that it doesn't really affect us. @@ -33,6 +39,19 @@ def flush(self): pass +class MockCache(util.Cache): + """ + MockCache is used to test the Cache object. The new values are a tuple of + the value passed in and a counter value, incremented each time a new value + is made. + """ + def __init__(self, size): + util.Cache.__init__(self, size) + self.value_counter = itertools.count() + + def create_new_value(self, val, invalidator=None): + return (val, self.value_counter.next()) + class AutoFlushingStreamTest(unittest.TestCase): def setUp(self): unittest.TestCase.setUp(self) @@ -174,6 +193,16 @@ # input, handleerror, expected output if handlerror is None, # then it isn't passed in as an argument + class GoodStringObject(object): + """Object whose __str__ method returns an ASCII string.""" + def __str__(self): + return "abc" + + class BadStringObject(object): + """Object whose __str__ method returns a non-ASCII string.""" + def __str__(self): + return "abc\xe4" + for i, h, o in [ ( "", None, ""), ( "abc", None, "abc"), @@ -181,7 +210,13 @@ ( 5.5, None, "5.5"), ( u"abc", None, "abc"), ( u"abc\xe4", None, "abcä"), - ( u"abc\xe4", "replace", "abc?") + ( u"abc\xe4", "replace", "abc?"), + # test that bytestrings are converted to plain ASCII + ( "abc", None, "abc"), + ( "abc\xe4", None, "abc?"), + # test that objects are converted to plain ASCII + ( GoodStringObject(), None, "abc"), + ( BadStringObject(), None, "abc?"), ]: if h == None: @@ -458,7 +493,7 @@ def test_next_free_filename_generators(self): # try path without extension path = "/foo/.bar/test" - generator = download_utils.next_free_filename_candidates(path) + generator = util.next_free_filename_candidates(path) # first candidate should just be the file itself self.assertEquals(generator.next(), "/foo/.bar/test") # next candidate should just be the file with .X added to it @@ -467,7 +502,7 @@ # try path with extension path = "/foo/.bar/test.jpg" - generator = download_utils.next_free_filename_candidates(path) + generator = util.next_free_filename_candidates(path) # first candidate should just be the file itself self.assertEquals(generator.next(), "/foo/.bar/test.jpg") # next candidate should just be the file with .X added before the @@ -476,7 +511,7 @@ self.assertEquals(generator.next(), "/foo/.bar/test.2.jpg") # test that if we call it too many times, we get an exception - generator = download_utils.next_free_filename_candidates(path) + generator = util.next_free_filename_candidates(path) for x in xrange(100000): try: generator.next() @@ -492,7 +527,7 @@ def test_next_free_directory_generators(self): path = "/foo/.bar/test" - generator = download_utils.next_free_directory_candidates(path) + generator = util.next_free_directory_candidates(path) # first candidate should just be the file itself self.assertEquals(generator.next(), "/foo/.bar/test") # next candidate should just be the file with .X added to it @@ -500,7 +535,7 @@ self.assertEquals(generator.next(), "/foo/.bar/test.2") # test that if we call it too many times, we get an exception - generator = download_utils.next_free_directory_candidates(path) + generator = util.next_free_directory_candidates(path) for x in xrange(100000): try: generator.next() @@ -523,14 +558,14 @@ path1 = os.path.join(self.tempdir, 'foo') # test we find the a nonexistent file - returned_path, fp = download_utils.next_free_filename(path1) + returned_path, fp = util.next_free_filename(path1) self.assertEquals(returned_path, os.path.join(self.tempdir, 'foo.3')) # test that we create the file self.assert_(os.path.exists(returned_path)) # try with an extension path2 = os.path.join(self.tempdir, 'bar.jpg') - returned_path, fp = download_utils.next_free_filename(path2) + returned_path, fp = util.next_free_filename(path2) self.assertEquals(returned_path, os.path.join(self.tempdir, 'bar.2.jpg')) self.assert_(os.path.exists(returned_path)) @@ -543,7 +578,7 @@ path = os.path.join(self.tempdir, 'foo') # test we find the a nonexistent file - returned_path = download_utils.next_free_directory(path) + returned_path = util.next_free_directory(path) self.assertEquals(returned_path, os.path.join(self.tempdir, 'foo.3')) # test that we don't create the directory self.assert_(not os.path.exists(returned_path)) @@ -571,7 +606,7 @@ """.strip().replace("S", " ")) f.close() - cfg = util.read_simple_config_file(fn) + cfg = buildutils.read_simple_config_file(fn) self.assertEquals(cfg["a"], "b") self.assertEquals(cfg["c"], "d ") self.assertEquals(cfg["E"], "F") @@ -583,9 +618,9 @@ cfg = {"a": "b", "c": "d", "E": "F "} - util.write_simple_config_file(fn, cfg) + buildutils.write_simple_config_file(fn, cfg) - cfg2 = util.read_simple_config_file(fn) + cfg2 = buildutils.read_simple_config_file(fn) self.assertEquals(cfg2["a"], cfg["a"]) self.assertEquals(cfg2["c"], cfg["c"]) self.assertEquals(cfg2["E"], cfg["E"]) @@ -796,15 +831,25 @@ class TestNameSortKey(unittest.TestCase): def test_simple(self): for testcase in ((None, 'ZZZZZZZZZZZZZ'), - (u'', [u'']), - (u'a', [u'a']), - (u'a1a', [u'a', 1.0, u'a']), - (u'Episode_100', [u'episode_', 100.0, u'']), - (u'episode_1', [u'episode_', 1.0, u'']) + (u'', (u'',)), + (u'a', (u'a',)), + (u'a1a', (u'a', 1.0, u'a')), + (u'Episode_100', (u'episode_', 100.0, u'')), + (u'episode_1', (u'episode_', 1.0, u'')) ): self.assertEquals(util.name_sort_key(testcase[0]), testcase[1]) + def test_hashable(self): + for testcase in (None, + u'', + u'a', + u'a1a', + u'Episode_100', + u'episode_1', + ): + hash(util.name_sort_key(testcase)) + def test_sorting(self): for inlist, outlist in ( ([], []), @@ -881,3 +926,198 @@ self.verify_results() self.add_file('test.ogv', True) self.verify_results() + +class TestBackupSupportDir(MiroTestCase): + # Test backing up the support directory + def setUp(self): + MiroTestCase.setUp(self) + self.support_dir = self.make_temp_dir_path() + self.correct_files = [] + self.skip_dirs = [] + self.setup_support_dir() + + def setup_support_dir(self): + """Add objects to our fake support directory that we want around for + every test. + """ + + # add log files + self.add_file_to_support_dir('miro.log') + self.add_file_to_support_dir('miro-downloader.log') + for i in range(1, 5): + self.add_file_to_support_dir('miro.log.%s' % i) + self.add_file_to_support_dir('miro-downloader.log.%s' % i) + # add database files + self.add_file_to_support_dir('sqlitedb') + self.add_file_to_support_dir('sqlitedb-journal') + self.add_file_to_support_dir('dbbackups/sqlitedb_backup_165') + self.add_file_to_support_dir('dbbackups/sqlitedb_backup_170') + self.add_file_to_support_dir('dbbackups/sqlitedb_backup_183') + # add other files + self.add_skip_dir('icon-cache') + self.add_skip_dir('cover-art') + self.add_file_to_support_dir('httpauth', should_skip=True) + self.add_file_to_support_dir('preferences.bin', should_skip=True) + for i in range(5): + self.add_file_to_support_dir('cover-art/Album-%s' % i, + should_skip=True) + self.add_file_to_support_dir('icon-cache/icon-%s' % i, + should_skip=True) + self.add_file_to_support_dir('crashes/crash-report-%i' % i) + + def add_skip_dir(self, skip_dir): + self.skip_dirs.append(os.path.join(self.support_dir, skip_dir)) + + def add_file_to_support_dir(self, path, archive_name=None, + should_skip=False, contents='FAKE DATA'): + if archive_name is None: + archive_name = path + full_path = os.path.join(self.support_dir, path) + directory = os.path.dirname(full_path) + if not os.path.exists(directory): + os.makedirs(directory) + open(full_path, "wt").write(contents) + if not should_skip: + self.correct_files.append(archive_name) + + def check_backup(self): + backup = util.SupportDirBackup(self.support_dir, self.skip_dirs, + max_size=1000000) + archive = zipfile.ZipFile(backup.fileobj(), 'r') + errors = archive.testzip() + self.assertTrue(errors is None, "Errors in the zip file: %s" % errors) + self.assertSameSet(archive.namelist(), self.correct_files) + + def test_backup(self): + self.check_backup() + + def test_extendend_chars(self): + filename = unicode_to_filename(u'\ufffdxtended Chars') + self.add_file_to_support_dir(filename, 'xtended Chars') + self.check_backup() + + def test_size_limit(self): + # create 200 kb worth of data + large_data = " " * 200000 + # add a bunch of those files + for i in xrange(10): + self.add_file_to_support_dir('big-file-%s' % i, + contents=large_data) + # check that we don't max an archive file too much bigger than our max + # size + max_size = 1000000 # 1MB + with self.allow_warnings(): + backup = util.SupportDirBackup(self.support_dir, self.skip_dirs, + max_size=max_size) + filesize = os.stat(backup.backupfile).st_size + self.assertTrue(filesize <= 1100000, + "Backup file too big. filesize: %s max_size: %s" % + (filesize, max_size)) + + +class MtimeInvalidatorTestCase(MiroTestCase): + + def test_valid(self): + filename = os.path.join(self.tempdir, 'mtime_test') + file(filename, 'w').write('foo') + invalidator = util.mtime_invalidator(filename) + self.assertFalse(invalidator(None)) + + def test_invalid(self): + filename = os.path.join(self.tempdir, 'mtime_test_future') + file(filename, 'w').write('foo') + invalidator = util.mtime_invalidator(filename) + mtime = os.stat(filename).st_mtime + # pretend the file was modified in the future + os.utime(filename, (mtime + 10, mtime + 10)) + self.assertTrue(invalidator(None)) + + def test_doesnotexist(self): + filename = os.path.join(self.tempdir, + 'mtime_test_doesnotexist') + invalidator = util.mtime_invalidator(filename) + self.assertTrue(invalidator(None)) + + def test_disappears(self): + filename = os.path.join(self.tempdir, + 'mtime_test_disappears') + file(filename, 'w').write('foo') + invalidator = util.mtime_invalidator(filename) + self.assertFalse(invalidator(None)) + os.unlink(filename) + self.assertTrue(invalidator(None)) + +class CacheTestCase(MiroTestCase): + + def setUp(self): + MiroTestCase.setUp(self) + self.cache = MockCache(2) + + def test_set_get(self): + self.cache.set(1, 1) + self.assertEquals(self.cache.get(1), 1) + + def test_create_new_value_get(self): + self.assertEquals(self.cache.get(1), (1, 0)) + self.assertEquals(self.cache.get(3), (3, 1)) + + def test_remove(self): + self.cache.set(1, 1) + self.cache.remove(1) + self.assertFalse(1 in self.cache.keys()) + + def test_lru(self): + self.cache.get(1) + self.cache.get(2) + self.cache.get(3) + # 1 has expired out + self.assertEquals(set(self.cache.keys()), set((2, 3))) + + def test_invalidator_set(self): + def invalidator(key): + return True + self.cache.set(1, 1, invalidator=invalidator) + # previous value is now invalid, get a new one + self.assertEquals(self.cache.get(1), (1, 0)) + + def test_invalidator_get(self): + def invalidator(key): + return True + self.assertEquals(self.cache.get(1, invalidator=invalidator), + (1, 0)) + # previous value was invalid, get a new one + self.assertEquals(self.cache.get(1, invalidator=invalidator), + (1, 1)) + + +class AlarmTestCase(MiroTestCase): + @staticmethod + def _long_function(): + time.sleep(1.5) + return True + + def _wrapped_function(self, set_signal=True): + with util.alarm(1, set_signal=set_signal): + return self._long_function() + + if hasattr(signal, 'SIGALRM'): + def test_alarm_works(self): + self.assertRaises(IOError, self._wrapped_function) + + def test_context_manager__True(self): + with util.alarm(1) as result: + self.assertTrue(result) + + def test_alarm_noop(self): + self.assertTrue(self._wrapped_function(set_signal=False)) + + def test_context_manager__False(self): + with util.alarm(0, set_signal=False) as result: + self.assertFalse(result) + +class NamedTupleTest(MiroTestCase): + def test_namedtuple(self): + MyClass = util.namedtuple("MyClass", "a b c", "My Docstring") + m = MyClass(1,2,3) + self.assertEquals(m.__doc__, "My Docstring") + self.assertEquals((m.a, m.b, m.c), (1,2,3)) diff -Nru miro-4.0.4/lib/test/watchedfoldertest.py miro-6.0/lib/test/watchedfoldertest.py --- miro-4.0.4/lib/test/watchedfoldertest.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/test/watchedfoldertest.py 2013-04-05 16:02:42.000000000 +0000 @@ -84,11 +84,13 @@ self.copy_new_file('c.mp3') self.send_watcher_signal("added", "c.mp3") self.run_pending_timeouts() + self.runPendingIdles() self.check_items('a.mp3', 'b.mp3', 'c.mp3') # if we already know about the file, nothing should be added self.send_watcher_signal("added", "a.mp3") self.send_watcher_signal("added", "c.mp3") self.run_pending_timeouts() + self.runPendingIdles() self.check_items('a.mp3', 'b.mp3', 'c.mp3') def test_watcher_deleted(self): @@ -100,11 +102,13 @@ self.remove_file('a.mp3') self.send_watcher_signal("deleted", "a.mp3") self.run_pending_timeouts() + self.runPendingIdles() self.check_items('b.mp3') # deleted for a file not contained in our feed shouldn't crash self.send_watcher_signal("deleted", "a.mp3") self.send_watcher_signal("deleted", "never-there.mp3") self.run_pending_timeouts() + self.runPendingIdles() self.check_items('b.mp3') def test_double_update(self): diff -Nru miro-4.0.4/lib/test/widgetstateconstantstest.py miro-6.0/lib/test/widgetstateconstantstest.py --- miro-4.0.4/lib/test/widgetstateconstantstest.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/test/widgetstateconstantstest.py 2013-04-05 16:02:42.000000000 +0000 @@ -1,26 +1,44 @@ from miro.test.framework import MiroTestCase from miro.frontends.widgets.widgetstatestore import WidgetStateStore -from miro.frontends.widgets.itemlist import SORT_KEY_MAP +from miro.frontends.widgets.itemsort import SORT_KEY_MAP class WidgetStateConstants(MiroTestCase): def setUp(self): MiroTestCase.setUp(self) self.display_types = set(WidgetStateStore.get_display_types()) - self.columns = set() - for display_type in self.display_types: - self.columns.update(WidgetStateStore.get_columns_available(display_type)) def test_view_types(self): - self.assertNotEqual(WidgetStateStore.get_list_view_type(), - WidgetStateStore.get_standard_view_type()) + # test that all view types are different + view_types = (WidgetStateStore.get_list_view_type(), + WidgetStateStore.get_standard_view_type(), + WidgetStateStore.get_album_view_type()) + + for i in range(len(view_types)): + for j in range(i + 1, len(view_types)): + self.assertNotEqual(view_types[i], view_types[j]) def test_default_view_types(self): display_types = set(WidgetStateStore.DEFAULT_VIEW_TYPE) self.assertEqual(self.display_types, display_types) def test_default_column_widths(self): - columns = set(WidgetStateStore.DEFAULT_COLUMN_WIDTHS) - self.assertEqual(self.columns, columns) + # test that all available columns have widths set for them + + # calculate all columns that available for some display/view + # combination + available_columns = set() + display_id = None # this isn't used yet, just set it to a dummy value + for display_type in self.display_types: + for view_type in (WidgetStateStore.get_list_view_type(), + WidgetStateStore.get_standard_view_type(), + WidgetStateStore.get_album_view_type()): + available_columns.update( + WidgetStateStore.get_columns_available( + display_type, display_id, view_type)) + + # make sure that we have widths for those columns + self.assertEqual(available_columns, + set(WidgetStateStore.DEFAULT_COLUMN_WIDTHS.keys())) def test_default_sort_column(self): display_types = set(WidgetStateStore.DEFAULT_SORT_COLUMN) diff -Nru miro-4.0.4/lib/test/xhtmltest.py miro-6.0/lib/test/xhtmltest.py --- miro-4.0.4/lib/test/xhtmltest.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/test/xhtmltest.py 2013-04-05 16:02:42.000000000 +0000 @@ -89,7 +89,8 @@ # test non string items--these log a warning, but otherwise # produce nothing - self.assertEquals(urlencodedict({"a": 1}), "") + with self.allow_warnings(): + self.assertEquals(urlencodedict({"a": 1}), "") # test weird stuff self.assertEquals(urlencodedict({"a": "&blah;\'\""}), diff -Nru miro-4.0.4/lib/theme.py miro-6.0/lib/theme.py --- miro-4.0.4/lib/theme.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/theme.py 2013-04-05 16:02:42.000000000 +0000 @@ -164,7 +164,6 @@ (u"http://revision3.com/lifehacker/feed/MP4-hd30", False), (u"http://feeds.thisamericanlife.org/talpodcast", False), (u"http://feeds.themoth.org/themothpodcast", False), - (u"http://feeds.feedburner.com/VodoPromotedWorks", False), ] for default in default_feeds: @@ -182,10 +181,11 @@ default_guides = [ (u"http://www.youtube.com", u"YouTube", False), + (u"http://www.archive.org", u"Internet Archive", False), (u"http://www.hulu.com/", u"Hulu", False), (u"http://video.pbs.org/", u"PBS", False), - (u"http://www.youtorrent.com/", u"YouTorrent", False), (u"http://www.clearbits.net/", u"ClearBits", False), + (u"http://www.amara.org/", u"Amara", False), (u'http://www.amazon.com/b?_encoding=UTF8&site-redirect=&' 'node=163856011&tag=pcultureorg-20&linkCode=ur2&camp=1789&' 'creative=9325', u"Amazon MP3 Store", True), @@ -194,6 +194,7 @@ u"%26node%3D2350149011&tag=pcultureorg-20&linkCode=ur2&camp=" u"1789&creative=9325", u"Amazon Android Store", True), (u"http://market.android.com/", u"Google Android Store", True), + (u"http://www.kqzyfj.com/click-5294129-10364534", u"eMusic", True) ] if app.debugmode: diff -Nru miro-4.0.4/lib/threadcheck.py miro-6.0/lib/threadcheck.py --- miro-4.0.4/lib/threadcheck.py 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/lib/threadcheck.py 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,71 @@ +# Miro - an RSS based video player application +# Copyright (C) 2012 +# Participatory Culture Foundation +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA +# +# In addition, as a special exception, the copyright holders give +# permission to link the code of portions of this program with the OpenSSL +# library. +# +# You must obey the GNU General Public License in all respects for all of +# the code used other than OpenSSL. If you modify file(s) with this +# exception, you may extend this exception to your version of the file(s), +# but you are not obligated to do so. If you do not wish to do so, delete +# this exception statement from your version. If you delete this exception +# statement from all source files in the program, then also delete it here. + +"""threadcheck -- check that we are running in the right thread. +""" + +import threading +import traceback + +eventloop_thread = None +ui_thread = None + +class ThreadError(StandardError): + """Raised when we are running code in the wrong thread. + """ + pass + +def set_eventloop_thread(thread): + global eventloop_thread + eventloop_thread = thread + +def set_ui_thread(thread): + global ui_thread + ui_thread = thread + +def confirm_eventloop_thread(): + """Confirm that we are running in the eventloop thread. + + If we aren't then a ThreadError will be raised + """ + _confirm_thread(eventloop_thread, 'Eventloop thread') + +def confirm_ui_thread(): + """Confirm that we are running in the UI thread. + + If we aren't then a ThreadError will be raised + """ + _confirm_thread(ui_thread, 'UI thread') + +def _confirm_thread(correct_thread, thread_name): + if correct_thread is None: + raise ThreadError("%s not set" % thread_name) + if correct_thread != threading.currentThread(): + raise ThreadError("Code running in %s instead of the %s" % + (threading.currentThread(), thread_name)) diff -Nru miro-4.0.4/lib/transcode.py miro-6.0/lib/transcode.py --- miro-4.0.4/lib/transcode.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/transcode.py 2013-04-05 16:02:42.000000000 +0000 @@ -35,6 +35,7 @@ import os import select import socket +import subprocess import sys import SocketServer import threading @@ -44,6 +45,7 @@ get_segmenter_executable_path, thread_body, get_transcode_video_options, get_transcode_audio_options) +from miro.plat.popen import Popen # Transcoding # @@ -165,11 +167,9 @@ kwargs = {"stdout": subprocess.PIPE, "stderr": subprocess.PIPE, "stdin": subprocess.PIPE, - "startupinfo": util.no_console_startupinfo()} - if os.name != "nt": - kwargs["close_fds"] = True + "close_fds": True} args = [ffmpeg_exe, "-i", media_file] - handle = subprocess.Popen(args, **kwargs) + handle = Popen(args, **kwargs) # XXX unbounded read here but should be okay, ffmpeg output is finite. # note that we need to read from stderr, since that's what ffmpeg spits # out. @@ -397,9 +397,7 @@ kwargs = {"stdin": open(os.devnull, 'rb'), "stdout": subprocess.PIPE, "stderr": open(os.devnull, 'wb'), - "startupinfo": util.no_console_startupinfo()} - if os.name != "nt": - kwargs["close_fds"] = True + "close_fds": True} args = [ffmpeg_exe, "-i", self.media_file] if self.time_offset: logging.debug('transcode: start job @ %d' % self.time_offset) @@ -427,22 +425,20 @@ args += TranscodeObject.output_args logging.debug('Running command %s' % ' '.join(args)) - self.ffmpeg_handle = subprocess.Popen(args, **kwargs) + self.ffmpeg_handle = Popen(args, **kwargs) segmenter_exe = get_segmenter_executable_path() args = [segmenter_exe] address, port = self.sink.server_address args += TranscodeObject.segmenter_args + [str(port)] + # Can't use close_fds here because we need to pass the fds to + # the child kwargs = {"stdout": open(os.devnull, 'rb'), "stdin": self.ffmpeg_handle.stdout, - "stderr": open(os.devnull, 'wb'), - "startupinfo": util.no_console_startupinfo()} - # XXX Can't use this - need to pass on the child fds - #if os.name != "nt": - # kwargs["close_fds"] = True + "stderr": open(os.devnull, 'wb')} logging.debug('Running command %s' % ' '.join(args)) - self.segmenter_handle = subprocess.Popen(args, **kwargs) + self.segmenter_handle = Popen(args, **kwargs) self.sink_thread = threading.Thread(target=thread_body, args=[self.segmenter_consumer], diff -Nru miro-4.0.4/lib/util.py miro-6.0/lib/util.py --- miro-4.0.4/lib/util.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/util.py 2013-04-05 16:02:42.000000000 +0000 @@ -33,26 +33,42 @@ any other Miro modules. """ -import sys +from hashlib import sha1 as sha +from StringIO import StringIO +import cgi +import collections +import contextlib import itertools +import logging import os import random import re import shutil -import unicodedata -try: - from hashlib import sha1 as sha -except ImportError: - from sha import sha +import socket import string +import signal +import struct +import subprocess +import sys +import tempfile +import traceback +import unicodedata import urllib -import socket -import logging +import zipfile + +from miro.clock import clock from miro import filetypes -import traceback -import subprocess -from StringIO import StringIO -from clock import clock +from miro.plat.popen import Popen + +# Do NOT import libtorrent up here. libtorrent.so is compiled with +# @executable_path-relative path dependency for the Python library +# within the Python framework. This makes Miro.app invoke properly, +# but you cannot invoke the python command line interpreter (like +# we do with the metadata extractor) because the python executable +# has a different path to the Miro binary in Miro.app, and so when +# libtorrent tries to load Python shared library it fails. +# +# See bz:18370. # Should we print out warning messages. Turn off in the unit tests. chatter = True @@ -69,6 +85,15 @@ MAX_TORRENT_SIZE = 500 * (2**10) # 500k +def bitness(): + return struct.calcsize('L') * 8 + +def bits_32(): + return bitness() == 32 + +def bits_64(): + return bitness() == 64 + def get_nice_stack(): """Get a stack trace that's a easier to read that the full one.""" stack = traceback.extract_stack() @@ -104,84 +129,6 @@ stack = [i for i in stack if 'trap_call' not in i] return stack - -CONFIG_LINE_RE = re.compile(r"^([^ ]+) *= *([^\r\n]*)[\r\n]*$") - -def read_simple_config_file(path): - """Parse a configuration file in a very simple format and return contents - as a dict. - - Each line is either whitespace or "Key = Value". Whitespace is ignored - at the beginning of Value, but the remainder of the line is taken - literally, including any whitespace. - - Note: There is no way to put a newline in a value. - """ - ret = {} - - filep = open(path, "rt") - for line in filep.readlines(): - # Skip blank lines - if not line.strip(): - continue - - # Otherwise it'd better be a configuration setting - match = CONFIG_LINE_RE.match(line) - if not match: - print ("WARNING: %s: ignored bad configuration directive '%s'" % - (path, line)) - continue - - key = match.group(1) - value = match.group(2) - if key in ret: - print "WARNING: %s: ignored duplicate directive '%s'" % (path, - line) - continue - - ret[key] = value - - return ret - -def write_simple_config_file(path, data): - """Given a dict, write a configuration file in the format that - read_simple_config_file reads. - """ - filep = open(path, "wt") - - for k, v in data.iteritems(): - filep.write("%s = %s\n" % (k, v)) - - filep.close() - -def query_revision(): - """Called at build-time to ask git for the revision of this - checkout. - - Returns the (url, revision) on success and None on failure. - """ - url = "unknown" - revision = "unknown" - try: - proc = subprocess.Popen(["git", "config", "--list"], - stdout=subprocess.PIPE) - info = proc.stdout.read().splitlines() - proc.stdout.close() - origline = "remote.origin.url" - info = [m for m in info if m.startswith(origline)] - if info: - url = info[0][len(origline)+1:].strip() - - proc = subprocess.Popen(["git", "rev-parse", "HEAD"], - stdout=subprocess.PIPE) - info = proc.stdout.read() - proc.stdout.close() - revision = info[0:8] - return (url, revision) - except StandardError, exc: - print "Exception thrown when querying revision: %s" % exc - return (url, revision) - class AutoFlushingStream: """Converts a stream to an auto-flushing one. It behaves in exactly the same way, except all write() calls are automatically @@ -295,23 +242,43 @@ # file is too large, bailout. (see #12301) raise ValueError("%s is not a valid torrent" % path) - import libtorrent as lt f = open(path, 'rb') try: + import libtorrent data = f.read(MAX_TORRENT_SIZE) if not data or data[0] != 'd': # File doesn't start with 'd', bailout (see #12301) raise ValueError("%s is not a valid torrent" % path) - metainfo = lt.bdecode(data) + metainfo = libtorrent.bdecode(data) try: infohash = metainfo['info'] except StandardError: raise ValueError("%s is not a valid torrent" % path) - infohash = sha(lt.bencode(infohash)).digest() + infohash = sha(libtorrent.bencode(infohash)).digest() return infohash finally: f.close() +def get_name_from_torrent_metadata(metadata): + """Get the name of a torrent + + metadata must be the contents of a torrent file. + + :returns: torrent name unicode string + :raises ValueError: metadata was not formatted properly + """ + import libtorrent + metadata_dict = libtorrent.bdecode(metadata) + if metadata_dict is None: + raise ValueError("metadata is not bencoded") + try: + return metadata_dict['info']['name'].decode('utf-8') + except KeyError, e: + raise ValueError("key missing when reading metadata: %s (%s)", e, + metadata_dict) + except UnicodeError: + raise ValueError("torrent name is not valid utf-8") + def gather_media_files(path): """Gather media files on the disk in a directory tree. This is used by the first time startup dialog. @@ -422,9 +389,14 @@ sub_basename = video_basename_root + sub_ext dest_path = os.path.join(os.path.dirname(video_path), sub_basename) if sub_path != dest_path: - if os.path.exists(dest_path): - os.remove(dest_path) - shutil.copyfile(sub_path, dest_path) + try: + if os.path.exists(dest_path): + os.remove(dest_path) + shutil.copyfile(sub_path, dest_path) + except EnvironmentError: + logging.exception('unable to remove existing subtitle file ' + 'or copy subtitle file') + dest_path = '' return dest_path def format_size_for_user(nbytes, zero_string="", with_decimals=True, @@ -517,9 +489,12 @@ """Adds TIMING and JSALERT logging levels. """ logging.addLevelName(15, "STACK TRACE") - logging.stacktrace = lambda msg, *args, **kargs: logging.log( - 15, "%s\n%s" % ("".join(traceback.format_stack()), msg), - *args, **kargs) + def stacktrace(msg, *args, **kwargs): + msg = "%s\n" + msg + stack = "".join(traceback.format_stack()) + args = (stack,) + args + logging.log( 15, msg, *args, **kwargs) + logging.stacktrace = stacktrace logging.addLevelName(25, "TIMING") logging.timing = lambda msg, *args, **kargs: logging.log(25, msg, @@ -588,8 +563,8 @@ return a filename, file object """ def check_func(*args, **kwargs): + result = func(*args, **kwargs) try: - result = func(*args, **kwargs) filename, fileobj = result if result is not None and type(fileobj) != file: raise ValueError('returns_file: not a valid file object') @@ -623,7 +598,7 @@ data = data.decode('ascii', 'replace') return data -def stringify(unicode_str, handleerror="xmlcharrefreplace"): +def stringify(stringobj, handleerror="xmlcharrefreplace"): """Takes a possibly unicode string and converts it to a string string. This is required for some logging especially where the things being logged are filenames which can be Unicode in the @@ -640,11 +615,15 @@ This is not the inverse of unicodify! """ - if isinstance(unicode_str, unicode): - return unicode_str.encode("ascii", handleerror) - if not isinstance(unicode_str, str): - return str(unicode_str) - return unicode_str + if isinstance(stringobj, unicode): + return stringobj.encode("ascii", handleerror) + if isinstance(stringobj, str): + # make sure bytestrings are ASCII + return stringobj.decode('ascii', 'replace').encode('ascii', + 'replace') + else: + # convert objects to strings, then ensure they are ASCII + return stringify(str(stringobj)) def quote_unicode_url(url): """Quote international characters contained in a URL according to @@ -659,23 +638,6 @@ quoted_chars.append(c) return u''.join(quoted_chars) -def no_console_startupinfo(): - """Returns the startupinfo argument for subprocess.Popen so that - we don't open a console window. On platforms other than windows, - this is just None. On windows, it's some win32 silliness. - """ - if subprocess.mswindows: - startupinfo = subprocess.STARTUPINFO() - # XXX temporary: STARTF_USESHOWWINDOW is in a different location - # as of Python 2.6.6 - try: - startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW - except AttributeError: - startupinfo.dwFlags |= subprocess._subprocess.STARTF_USESHOWWINDOW - return startupinfo - else: - return None - def call_command(*args, **kwargs): """Call an external command. If the command doesn't exit with status 0, or if it outputs to stderr, an exception will be raised. @@ -688,16 +650,18 @@ then this returns (retcode, stdout, stderr). This implies ignore_stderr is True, so you don't need to explicitly state that, too. + :param env: dict. Environment to pass to subprocess.Popen """ ignore_stderr = kwargs.pop('ignore_stderr', False) return_everything = kwargs.pop('return_everything', False) + env = kwargs.pop('env', None) if kwargs: raise TypeError('extra keyword arguments: %s' % kwargs) - pipe = subprocess.Popen(args, stdout=subprocess.PIPE, - stdin=subprocess.PIPE, stderr=subprocess.PIPE, - startupinfo=no_console_startupinfo()) + pipe = Popen(args, stdout=subprocess.PIPE, + stdin=subprocess.PIPE, stderr=subprocess.PIPE, + env=env) stdout, stderr = pipe.communicate() if return_everything: return (pipe.returncode, stdout, stderr) @@ -1062,7 +1026,7 @@ """ Returns true if this is a magnet link which can be handled by Miro. """ - return MAGNET_MATCH_RE.match(uri) and info_hash_from_magnet(uri) + return bool(MAGNET_MATCH_RE.match(uri) and info_hash_from_magnet(uri)) MAGNET_INFO_HASH_MATCH = re.compile(r'(?<=btih:)[a-zA-Z0-9]+') @@ -1075,6 +1039,19 @@ else: return None +def title_from_magnet(uri): + """Get the title from a magnet URI, or None if there is not one + """ + try: + query = uri[uri.find('?')+1:] + query_parsed = cgi.parse_qs(query) + if 'dn' in query_parsed: + return query_parsed['dn'][0] + else: + return None + except StandardError: + logging.warn("Error parsing title from magnet URI", exc_info=True) + def _strip_accents(text): nfkd_form = unicodedata.normalize('NFKD', unicode(text)) return u"".join([c for c in nfkd_form if not unicodedata.combining(c)]) @@ -1104,10 +1081,10 @@ return "ZZZZZZZZZZZZZ" text = text.lower() if text.startswith("a "): - text = text[2:] + text = text[2:] + ', a' elif text.startswith("the "): - text = text[4:] - return [_trynum(c) for c in NUM_RE.split(text)] + text = text[4:] + ', the' + return tuple(_trynum(c) for c in NUM_RE.split(text)) LOWER_TRANSLATE = string.maketrans(string.ascii_uppercase, string.ascii_lowercase) @@ -1135,27 +1112,65 @@ def log_total_time(self): logging.timing("total time: %0.3f", clock() - self.start_time) +def mtime_invalidator(path): + """ + Returns a function which returns True if the mtime of path is greater than + it was when this function was initially called. Useful as an invalidator + for Cache. + """ + path = os.path.abspath(path) + try: + mtime = os.stat(path).st_mtime + except EnvironmentError: + # if the file doesn't exist or has a problem when we start, the cache + # will always be invalid + return lambda x: True + + def invalidator(key): + try: + return os.stat(path).st_mtime > mtime + except EnvironmentError: + # if the file disappears, the cache is also invalid + return True + + return invalidator + class Cache(object): def __init__(self, size): self.size = size self.dict = {} self.counter = itertools.count() self.access_times = {} + self.invalidators = {} - def get(self, key): + def get(self, key, invalidator=None): if key in self.dict: - self.access_times[key] = self.counter.next() - return self.dict[key] - else: - value = self.create_new_value(key) - self.set(key, value) - return value + existing_invalidator = self.invalidators[key] + if (existing_invalidator is None or + not existing_invalidator(key)): + self.access_times[key] = self.counter.next() + return self.dict[key] + + value = self.create_new_value(key, invalidator=invalidator) + self.set(key, value, invalidator=invalidator) + return value - def set(self, key, value): + def set(self, key, value, invalidator=None): if len(self.dict) == self.size: self.shrink_size() self.access_times[key] = self.counter.next() self.dict[key] = value + self.invalidators[key] = invalidator + + def remove(self, key): + if key in self.dict: + del self.dict[key] + del self.access_times[key] + if key in self.invalidators: + del self.invalidators[key] + + def keys(self): + return self.dict.iterkeys() def shrink_size(self): # shrink by LRU @@ -1163,12 +1178,232 @@ to_sort.sort(key=lambda m: m[1]) new_dict = {} new_access_times = {} + new_invalidators = {} latest_times = to_sort[len(self.dict) // 2:] for (key, time) in latest_times: new_dict[key] = self.dict[key] + new_invalidators[key] = self.invalidators[key] new_access_times[key] = time self.dict = new_dict self.access_times = new_access_times - def create_new_value(self, val): + def create_new_value(self, val, invalidator=None): raise NotImplementedError() + +def all_subclasses(cls): + """Find all subclasses of a given new-style class. + + This method also returns sub-subclasses, etc. + """ + for subclass in cls.__subclasses__(): + yield subclass + for sub_subclass in all_subclasses(subclass): + yield sub_subclass + +def import_module(module_name): + """Import a module and return it. + + This function works like __import__, except it returns the last module + named, rather than the first. If you import 'foo.bar', __import__ will + return foo, but import_module will return bar. + """ + mod = __import__(module_name) + parts = module_name.split('.') + for part in parts[1:]: + mod = getattr(mod, part) + return mod + +def make_file_url(path): + """Get a file:// URL for a file path.""" + if isinstance(path, unicode): + path = path.encode('utf-8') + path_part = urllib.pathname2url(os.path.abspath(path)) + # On windows pathname2url adds a leading "///" to absolute paths. This is + # pretty weird and annoying, but easy to fix + path_part = re.sub(r'^/+', '', path_part) + # Always return str. Pathname2url() returns a str and from that point + # there are no unicode to infect us for a unicode type upgrade. + return 'file:///' + path_part + +def split_values_for_sqlite(value_list): + """Split a list of values into chunks that SQL can handle. + + The cursor.execute() method can only handle 999 values at once, this + method splits long lists into chunks where each chunk has is safe to feed + to sqlite. + """ + CHUNK_SIZE = 990 # use 990 just to be on the safe side. + for start in xrange(0, len(value_list), CHUNK_SIZE): + yield value_list[start:start+CHUNK_SIZE] + + +class SupportDirBackup(object): + """Backup the support directory to send in a crash report.""" + def __init__(self, support_dir, skip_dirs, max_size): + logging.info("Attempting to back up support directory: %r", + support_dir) + backupfile_start = os.path.join(tempfile.gettempdir(), + 'miro-support-backup.zip') + self.backupfile, fp = next_free_filename(backupfile_start) + archive = zipfile.ZipFile(fp, "w") + self.skip_dirs = [os.path.normpath(d) for d in skip_dirs] + + total_size = 0 + for root, directories, files in os.walk(support_dir): + self.filter_directories(root, directories) + relativeroot = os.path.relpath(root, support_dir) + for fn in files: + if self.should_skip_file(root, fn): + continue + path = os.path.join(root, fn) + if relativeroot != '.': + relpath = os.path.join(relativeroot, fn) + else: + relpath = fn + relpath = self.ensure_ascii_filename(relpath) + archive.write(path, relpath) + total_size += archive.getinfo(relpath).compress_size + if total_size >= max_size: + break + if total_size >= max_size: + logging.warn("Support directory backup too big. " + "Quitting after %s bytes", total_size) + break + archive.close() + logging.info("Support directory backed up to %s (%d bytes)", + self.backupfile, os.path.getsize(self.backupfile)) + + def filter_directories(self, root, directories): + """Remove directories from the list that os.walk() passes us.""" + filtered = [d for d in directories + if not self.should_skip_directory(os.path.join(root, d))] + # os.walk() wants us to change directories in-place + directories[:] = filtered + + def should_skip_directory(self, directory): + for skip_dir in self.skip_dirs: + if directory.startswith(skip_dir): + return True + return False + + def should_skip_file(self, directory, filename): + if os.path.islink(os.path.join(directory, filename)): + return True + if filename == 'httpauth': + # don't send http passwords over the internet + return True + if filename == 'preferences.bin': + # On windows, don't send the config file. Other + # platforms don't handle config the same way, so we + # don't need to worry about them + return True + return False + + def ensure_ascii_filename(self, relpath): + """Ensure that a path we are about to archive is ASCII.""" + + # NOTE: zipfiles in general, and especially the python zipfile module + # don't seem to support them well. The only filenames we should be + # sending are ASCII anyways, so let's just use a hack here to force + # things. See the "zipfile and unicode filenames" thread here: + # http://mail.python.org/pipermail/python-dev/2007-June/thread.html + if isinstance(relpath, unicode): + return relpath.encode('ascii', 'ignore') + else: + return relpath.decode('ascii', 'ignore').encode('ascii', 'ignore') + + def fileobj(self): + """Get a file object for the archive file.""" + return open(self.backupfile, "rb") + +def next_free_filename_candidates(path): + """Generates candidate names for next_free_filename.""" + + # try unmodified path first + yield path + # add stuff to the filename to try to make it unique + + dirname, filename = os.path.split(path) + if not filename: + raise ValueError("%s is a directory name" % path) + basename, ext = os.path.splitext(filename) + count = 1 + while True: + filename = "%s.%s%s" % (basename, count, ext) + yield os.path.join(dirname, filename) + count += 1 + if count > 1000: + raise ValueError("Can't find available filename for %s" % path) + +@returns_file +def next_free_filename(name): + """Finds a filename that's unused and similar the the file we want + to download and returns an open file handle to it. + """ + check_f(name) + mask = os.O_CREAT | os.O_EXCL | os.O_RDWR + # On Windows we need to pass in O_BINARY, fdopen() even with 'b' + # specified is not sufficient. + if sys.platform == 'win32': + mask |= os.O_BINARY + + candidates = next_free_filename_candidates(name) + while True: + # Try with the name supplied. + newname = candidates.next() + try: + fd = os.open(newname, mask) + fp = os.fdopen(fd, 'wb') + return newname, fp + except OSError: + continue + return (newname, fp) + +def next_free_directory_candidates(name): + """Generates candidate names for next_free_directory.""" + yield name + count = 1 + while True: + yield "%s.%s" % (name, count) + count += 1 + if count > 1000: + raise ValueError("Can't find available directory for %s" % name) + +@returns_filename +def next_free_directory(name): + """Finds a unused directory name using name as a base. + + This method doesn't create the directory, it just finds an an-used one. + """ + candidates = next_free_directory_candidates(name) + while True: + candidate = candidates.next() + if not os.path.exists(candidate): + return candidate + +@contextlib.contextmanager +def alarm(timeout, set_signal=True): + def alarm_handler(signum, frame): + raise IOError('timeout after %i seconds' % timeout) + if set_signal: + set_signal = hasattr(signal, 'SIGALRM') + if set_signal: + signal.signal(signal.SIGALRM, alarm_handler) + signal.alarm(timeout) + yield set_signal + if set_signal: + signal.alarm(0) + +def supports_alarm(): + return hasattr(signal, 'SIGALRM') + +def namedtuple(class_name, fields, docstring=None): + """Version of collections.namedtuple that adds docstring support.""" + # make the base class using the standard namedtuple + nt = collections.namedtuple(class_name + "Tuple", fields) + # make a subclass that adds the docstring and doesn't add a per-instance + # dict. + dct = { '__slots__': () } + if docstring: + dct['__doc__'] = docstring + return type(class_name, (nt,), dct) diff -Nru miro-4.0.4/lib/widgetstate.py miro-6.0/lib/widgetstate.py --- miro-4.0.4/lib/widgetstate.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/widgetstate.py 2013-04-05 16:02:42.000000000 +0000 @@ -35,6 +35,8 @@ STANDARD_VIEW = 0 LIST_VIEW = 1 +# skip over 2 since that's used in the frontend for CUSTOM_VIEW +ALBUM_VIEW = 3 class DisplayState(DDBObject): """Properties that are shared across all TableViews for a Display, or only @@ -47,13 +49,10 @@ self.shuffle = None self.repeat = None self.selected_view = None - self.active_filters = None - # ListView properties - self.list_view_columns = None - self.list_view_widths = None self.selection = None - self.sort_state = None self.last_played_item_id = None + self.active_filters = None + self.sort_state = None class ViewState(DDBObject): """Properties that need to be stored for each TableView @@ -63,6 +62,8 @@ self.display_id = key[1] self.view_type = key[2] self.scroll_position = None + self.columns_enabled = None + self.column_widths = None class GlobalState(DDBObject): """Properties that apply globally""" @@ -76,6 +77,7 @@ def setup_new(self): self.item_details_expanded = { + ALBUM_VIEW: False, LIST_VIEW: True, STANDARD_VIEW: False, } diff -Nru miro-4.0.4/lib/workerprocess.py miro-6.0/lib/workerprocess.py --- miro-4.0.4/lib/workerprocess.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/workerprocess.py 2013-04-05 16:02:42.000000000 +0000 @@ -34,58 +34,395 @@ includes feedparser, but we could pretty easily extend this to other tasks. """ +from collections import deque, namedtuple import itertools +import logging +import threading +from miro import clock +from miro import eventloop from miro import feedparserutil +from miro import filetags +from miro import messagetools +from miro import moviedata from miro import subprocessmanager from miro import util +from miro.plat import utils + +class SubprocessTimeoutError(StandardError): + """A task failed because the subprocess didn't respond in enough time.""" + # define messages/handlers -class TaskMessage(subprocessmanager.SubprocessMessage): +class WorkerMessage(subprocessmanager.SubprocessMessage): + pass + +class WorkerStartupInfo(WorkerMessage): + def __init__(self, thread_count): + self.thread_count = thread_count + +class TaskMessage(WorkerMessage): _id_counter = itertools.count() + priority = 0 def __init__(self): subprocessmanager.SubprocessMessage.__init__(self) self.task_id = TaskMessage._id_counter.next() class FeedparserTask(TaskMessage): + priority = 20 def __init__(self, html): TaskMessage.__init__(self) self.html = html +class MovieDataProgramTask(TaskMessage): + priority = 10 + def __init__(self, source_path, screenshot_directory): + TaskMessage.__init__(self) + self.source_path = source_path + self.screenshot_directory = screenshot_directory + + def __str__(self): + return 'MovieDataProgramTask (path: %s)' % self.source_path + +class MutagenTask(TaskMessage): + priority = 10 + def __init__(self, source_path, cover_art_directory): + TaskMessage.__init__(self) + self.source_path = source_path + self.cover_art_directory = cover_art_directory + + def __str__(self): + return 'MutagenTask (path: %s)' % self.source_path + +class CancelFileOperations(TaskMessage): + """Cancel mutagen/movie data tasks for a set of path.""" + priority = 0 + def __init__(self, paths): + TaskMessage.__init__(self) + self.paths = paths + +class WorkerProcessReady(subprocessmanager.SubprocessResponse): + pass + class TaskResult(subprocessmanager.SubprocessResponse): def __init__(self, task_id, result): self.task_id = task_id self.result = result +class MovieDataTaskStatus(subprocessmanager.SubprocessResponse): + """Report when we are handling movie data tasks. + + This is sent to the main process before and after we handle a movie data + task. The movie data code has some change of just hanging, and we use + this message in the main process to catch that. + """ + + def __init__(self, task_id): + self.task_id = task_id + class WorkerProcessHandler(subprocessmanager.SubprocessHandler): + def __init__(self): + subprocessmanager.SubprocessHandler.__init__(self) + self.threads = [] + self.task_queue = WorkerTaskQueue() + self.main_thread_tasks = deque() + self.supports_alarm = util.supports_alarm() + def call_handler(self, method, msg): try: - # normally we send the result of our handler method back - rv = method(msg) - except StandardError, e: - # if something breaks, we send the Exception back - rv = e - TaskResult(msg.task_id, rv).send_to_main_process() + if isinstance(msg, CancelFileOperations): + # handle this message as soon as we can. + handle_task(method, msg) + elif isinstance(msg, MovieDataProgramTask): + # we have to handle this message on this thread, since + # QtKit will break if we use it on any thread except the main + # one. Put it in main_thread_tasks and handle once + # there's no more tasks waiting in to be processed + self.main_thread_tasks.append((method, msg)) + elif isinstance(msg, MutagenTask): + # If we're using the alarm, then MutagenTasks need to run in + # the main thread as well. Signals aren't support outside of + # the main thread. + if self.supports_alarm: + self.main_thread_tasks.append((method, msg)) + else: + self.task_queue.add_task(method, msg) + elif isinstance(msg, TaskMessage): + self.task_queue.add_task(method, msg) + else: + method(msg) + except StandardError: + subprocessmanager.send_subprocess_error_for_exception() + + def get_task_from_queue(self, queue): + # handle movie data tasks if no more tasks are coming in right now + while queue.empty() and self.main_thread_tasks: + method, msg = self.main_thread_tasks.popleft() + if isinstance(msg, MutagenTask): + # if we're here, it means we want to use the signals + handle_task(self.handle_mutagen_task_with_alarm, msg) + continue + handle_task(method, msg) + + # block waiting for the next message. We know that one of the + # following is True + # - The queue has a message in it and therefore + # get_task_from_queue() will be called again soon + # - main_thread_tasks is empty + # + # So we don't have to worry about the blocking call preventing the + # MovieDataProgramTasks from running + return queue.get() + + def on_shutdown(self): + self.task_queue.shutdown() + + def handle_worker_startup_info(self, msg): + for i in xrange(msg.thread_count): + t = threading.Thread(target=worker_thread, args=(self.task_queue,)) + t.daemon = True + t.start() + self.threads.append(t) + WorkerProcessReady().send_to_main_process() + + def handle_cancel_file_operations(self, msg): + path_set = set(msg.paths) + self.task_queue.cancel_file_operations(path_set) + # we need to handle main_thread_tasks, since those skip the task + # queue + filtered_tasks = deque(t for t in self.main_thread_tasks + if t.source_path not in path_set) + self.main_thread_tasks = filtered_tasks + return None + + # handle_movie_data_program_task gets called in the main thread, unlike + # all other task handler methods + + def handle_movie_data_program_task(self, msg): + return moviedata.process_file(msg.source_path, + msg.screenshot_directory) + + + # NOTE: all of the handle_*_task() methods below get called in one of our + # worker threads, so they should only call thread-safe functions def handle_feedparser_task(self, msg): - parsed_feed = feedparserutil.parse(msg.html) + parsed_feed = feedparserutil.parse(msg.html) # bozo_exception is sometimes C object that is not picklable. We # don't use it anyways, so just unset the value parsed_feed['bozo_exception'] = None return parsed_feed + def handle_mutagen_task(self, msg): + return filetags.process_file(msg.source_path, msg.cover_art_directory) + + def handle_mutagen_task_with_alarm(self, msg): + with util.alarm(2): + return self.handle_mutagen_task(msg) + +class _SinglePriorityQueue(object): + """Manages tasks at a single priority for WorkerTaskQueue + + For any given priority we want to do the following: + - If there is more than one TaskMessage class with that priority, we + want to alternate handling tasks between them. + - For a given TaskMessage class, we want to handle tasks FIFO. + """ + def __init__(self, priority): + self.priority = priority + # map message classes to FIFO deques for that class + self.fifo_map = {} + # set up our structure for each task with our priority + for cls in util.all_subclasses(TaskMessage): + if cls.priority == priority: + self.fifo_map[cls] = deque() + # fifo_cycler is used to cycle through each fifo + self.fifo_cycler = itertools.cycle(self.fifo_map.values()) + self.fifo_count = len(self.fifo_map) + + def add_task(self, handler_method, msg): + self.fifo_map[msg.__class__].append((handler_method, msg)) + + def get_next_task(self): + for i, fifo in enumerate(self.fifo_cycler): + if i >= self.fifo_count: + # no tasks in any of our fifos + return None + if fifo: + return fifo.popleft() + + def filter_messages(self, filterfunc, message_class): + """Remove messages from the queue + + :param filterfunc: function to determine if messages should stay + :param message_class: type of messages to filter + """ + fifo = self.fifo_map[message_class] + new_items = tuple((method, msg) for (method, msg) in fifo + if filterfunc(msg)) + fifo.clear() + fifo.extend(new_items) + +class WorkerTaskQueue(object): + """Store the pending tasks for the worker process. + + WorkerTaskQueue is responsible for storing task info for each pending + task, and getting the next one in order of priority. + + It's shared between the main subprocess thread, and all worker threads, so + all methods need to be thread-safe. + """ + def __init__(self): + self.should_quit = False + self.condition = threading.Condition() + # queues_by_priority contains a _SinglePriorityQueue for each priority + # level, ordered from highest to lowest priority + self.queues_by_priority = [] + # queue_map maps priority levels to queues + self.queue_map = {} + self._init_queues() + + def _init_queues(self): + all_prorities = set(cls.priority for + cls in util.all_subclasses(TaskMessage)) + for priority in sorted(all_prorities, reverse=True): + queue = _SinglePriorityQueue(priority) + self.queues_by_priority.append(queue) + self.queue_map[queue.priority] = queue + + def add_task(self, handler_method, msg): + """Add a new task to the queue. """ + with self.condition: + self.queue_map[msg.priority].add_task(handler_method, msg) + self.condition.notify() + + def get_next_task(self): + """Get the next task to be processed from the queue. + + This method will block if there are no tasks ready in the queue. + + It will return the tuple (handler_method, message) once there is + something ready. The worker thread should call + handler_method(message) to run the task, and send back the result to + the main process. + + get_next_task() returns None if the worker thread should quit. + """ + with self.condition: + if self.should_quit: + return None + next_task_info = self._get_next_task() + if next_task_info is not None: + return next_task_info + # no tasks yet, need to wait for more + self.condition.wait() + if self.should_quit: + return None + return self._get_next_task() + + def _get_next_task(self): + for queue in self.queues_by_priority: + next_for_queue = queue.get_next_task() + if next_for_queue is not None: + return next_for_queue + # no tasks in any of our queues + return None + + def cancel_file_operations(self, path_set): + """Cancels all mutagen/movie data tasks for a list of paths.""" + # Acquire our lock as soon as possible. We want to prevent other + # tasks from getting tasks, since they may be about to deleted. + with self.condition: + def filter_func(msg): + return msg.source_path not in path_set + for cls in (MutagenTask, MovieDataProgramTask): + queue = self.queue_map[cls.priority] + queue.filter_messages(filter_func, cls) + + def shutdown(self): + # should be save to set this without the lock, since it's a boolean + with self.condition: + self.should_quit = True + self.condition.notify_all() + +def handle_task(handler_method, msg): + """Process a TaskMessage.""" + # If we are running movie data, send the MovieDataTaskStatus message. + # This starts a timer on the frontend to kill this process if movie data + # hangs + if isinstance(msg, MovieDataProgramTask): + MovieDataTaskStatus(msg.task_id).send_to_main_process() + try: + # normally we send the result of our handler method back + logging.info("starting task: %s", msg) + rv = handler_method(msg) + except StandardError, e: + # if something breaks, we send the Exception back + rv = e + logging.info("task error: %s (%s)", msg, e) + else: + logging.info("task finished: %s", msg) + # Send the MovieDataTaskStatus before the task result to avoid a race + # where the main thread gets a result, but then the timeout for movie data + # expires + if isinstance(msg, MovieDataProgramTask): + MovieDataTaskStatus(None).send_to_main_process() + + TaskResult(msg.task_id, rv).send_to_main_process() + +def worker_thread(task_queue): + """Thread loop in the worker process.""" + + while True: + next_task = task_queue.get_next_task() + if next_task is None: + break + handle_task(*next_task) + +MovieDataTaskStatusInfo = namedtuple('MovieDataTaskStatusInfo', + 'task_id start_time') + class WorkerProcessResponder(subprocessmanager.SubprocessResponder): + def __init__(self): + subprocessmanager.SubprocessResponder.__init__(self) + self.worker_ready = False + self.startup_message = None + self.movie_data_task_status = None + def on_startup(self): - _task_queue.run_pending_tasks() + self.startup_message.send_to_process() + _miro_task_queue.run_pending_tasks() + + def on_shutdown(self): + # do the tasks that we've already gotten + self.process_handler_queue() + self.worker_ready = False + + def on_restart(self): + self.worker_ready = False def handle_task_result(self, msg): - _task_queue.process_result(msg) + _miro_task_queue.process_result(msg) + + def handle_worker_process_ready(self, msg): + self.worker_ready = True -# Manage task queue + def handle_movie_data_task_status(self, msg): + if msg.task_id is not None: + self.movie_data_task_status = MovieDataTaskStatusInfo( + msg.task_id, clock.clock()) + else: + self.movie_data_task_status = None -class TaskQueue(object): +class MiroTaskQueue(object): + """Store the pending tasks for the main process. + + Responsible for: + - Storing callbacks/errbacks for each pending task + - Calling the callback/errback for a finished task + """ def __init__(self): # maps task_ids to (msg, callback, errback) tuples self.tasks_in_progress = {} @@ -103,23 +440,66 @@ """Process a TaskResult from our subprocess.""" msg, callback, errback = self.tasks_in_progress.pop(reply.task_id) if isinstance(reply.result, Exception): - errback(reply.result) + errback(msg, reply.result) else: - callback(reply.result) + callback(msg, reply.result) def run_pending_tasks(self): """Rerun all tasks in the queue.""" for msg, callback, errback in self.tasks_in_progress.values(): msg.send_to_process() -_task_queue = TaskQueue() +_miro_task_queue = MiroTaskQueue() # Manage subprocess -_subprocess_manager = subprocessmanager.SubprocessManager(TaskMessage, - WorkerProcessResponder(), WorkerProcessHandler) +class WorkerSubprocessManager(subprocessmanager.SubprocessManager): + def __init__(self): + subprocessmanager.SubprocessManager.__init__(self, WorkerMessage, + WorkerProcessResponder(), WorkerProcessHandler) + self.check_hung_timeout = None + + def _start(self): + subprocessmanager.SubprocessManager._start(self) + self.schedule_check_subprocess_hung() + + def shutdown(self): + self.cancel_check_subprocess_hung() + subprocessmanager.SubprocessManager.shutdown(self) + + def restart(self, clean=False): + self.cancel_check_subprocess_hung() + self.responder.movie_data_task_status = None + subprocessmanager.SubprocessManager.restart(self, clean) + + def schedule_check_subprocess_hung(self): + self.check_hung_timeout = eventloop.add_timeout(90, + self.check_subprocess_hung, 'check workerprocess hung') + + def cancel_check_subprocess_hung(self): + if self.check_hung_timeout is not None: + self.check_hung_timeout.cancel() + self.check_hung_timeout = None + + def check_subprocess_hung(self): + task_status = self.responder.movie_data_task_status + + if (task_status is not None and + clock.clock() - task_status.start_time > 90): + logging.warn("Worker process is hanging on a movie data task.") + error_result = TaskResult(task_status.task_id, + SubprocessTimeoutError()) + self.responder.handle_task_result(error_result) + self.restart() + else: + self.schedule_check_subprocess_hung() + +_subprocess_manager = WorkerSubprocessManager() -def startup(): +def startup(thread_count=3): """Startup the worker process.""" + + startup_msg = WorkerStartupInfo(thread_count) + _subprocess_manager.responder.startup_message = startup_msg _subprocess_manager.start() def shutdown(): @@ -127,7 +507,20 @@ _subprocess_manager.shutdown() # API for sending tasks -def run_feedparser(html, callback, errback): - """Run feedparser on a chunk of html.""" - msg = FeedparserTask(html) - _task_queue.add_task(msg, callback, errback) +def send(msg, callback, errback): + """Send a message to the worker process. + + :param msg: Message to send + :param callback: function to call on success + :param errback: function to call on error + """ + _miro_task_queue.add_task(msg, callback, errback) + +def cancel_tasks_for_files(paths): + """Cancel mutagen and movie data tasks for a list of paths.""" + msg = CancelFileOperations(paths) + # we don't care about the return value, but we still want to use the task + # queue to queue up this message. + def null_callback(msg, result): + pass + send(msg, null_callback, null_callback) diff -Nru miro-4.0.4/lib/xhtmltools.py miro-6.0/lib/xhtmltools.py --- miro-4.0.4/lib/xhtmltools.py 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/lib/xhtmltools.py 2013-04-05 16:02:42.000000000 +0000 @@ -37,6 +37,7 @@ from HTMLParser import HTMLParser, HTMLParseError import random import logging +import collections class XHTMLifier(HTMLParser): """Very simple parser to convert HTML to XHTML @@ -52,7 +53,7 @@ self.output = u'' else: self.output = '' - self.stack = [] + self.stack = collections.deque() self.filter_font_tags = filter_font_tags self.feed(data) self.close() @@ -174,8 +175,8 @@ return '%s' % (xml_decl, charset, the_rest) -HTML_HEADER_RE = re.compile( - u"^(.*)\<\s*head\s*(.*?)\s*\>(.*?)\(.*)", re.I | re.S) +HTML_HEADER_OPEN_RE = re.compile(r'<\s*head\s*(.*?)\s*>') +HTML_HEADER_CLOSE_RE = re.compile(r'') def fix_html_header(data, charset): """Adds a \\3\\4')) + content_type_meta = ('\n' % charset) + return ''.join((before_head, + head_open_tag, + content_type_meta, + head_contents, + head_close_tag, + after_head)) def url_encode_dict(orig): """Converts a Python dictionary to data suitable for a POST or GET diff -Nru miro-4.0.4/linux/clean.sh miro-6.0/linux/clean.sh --- miro-4.0.4/linux/clean.sh 2011-12-22 14:45:47.000000000 +0000 +++ miro-6.0/linux/clean.sh 2013-04-05 16:02:42.000000000 +0000 @@ -2,6 +2,15 @@ rm -rf build dist rm -rf miro.1.gz miro.real.1.gz -rm plat/xlibhelper.c -rm ../lib/frontends/widgets/gtk/pygtkhacks.c +rm -f plat/xlibhelper.c +rm -f ../lib/frontends/widgets/gtk/pygtkhacks.c +rm -f ../lib/frontends/widgets/gtk/webkitgtkhacks.c +rm -f ../lib/frontends/widgets/infolist/infolist.c rm -rf tmp +rm -f plat/frontends/widgets/windowcreator.cpp +rm -f plat/frontends/widgets/pluginsdir.cpp +rm -f plat/frontends/widgets/mozprompt.c +rm -f plat/frontends/widgets/mozprompt.h +rm -f plat/frontends/widgets/httpobserver.c +rm -f contrib/echoprint-codegen/src/libcodegen.so.4.1.1 +rm -f contrib/echoprint-codegen/src/*.o diff -Nru miro-4.0.4/linux/contrib/echoprint-codegen/AUTHORS miro-6.0/linux/contrib/echoprint-codegen/AUTHORS --- miro-4.0.4/linux/contrib/echoprint-codegen/AUTHORS 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/linux/contrib/echoprint-codegen/AUTHORS 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,26 @@ +See the LICENSE file for important license information. + +Whitening, SubbandAnalysis, Fingerprint +Dan Ellis +Brian Whitman + +AudioBufferInput, AudioStreamInput, Codegen, Common, File, MatrixUtility, Metadata +Tristan Jehan +Paul Lamere +Jason Sundram +Brian Whitman + +Murmurhash2 +Austin Appleby + +Base64 +Rene Nyffenegger + +Contributors +Alastair Porter +efsavage +alsuren +artgillespie +yhorng +divan + diff -Nru miro-4.0.4/linux/contrib/echoprint-codegen/examples/lookup.py miro-6.0/linux/contrib/echoprint-codegen/examples/lookup.py --- miro-4.0.4/linux/contrib/echoprint-codegen/examples/lookup.py 1970-01-01 00:00:00.000000000 +0000 +++ miro-6.0/linux/contrib/echoprint-codegen/examples/lookup.py 2013-04-05 16:02:42.000000000 +0000 @@ -0,0 +1,39 @@ +#!/usr/bin/python + +# This script takes an audio file and performs an echoprint lookup on it. +# Requirements: pyechonest >= 4.2.15 http://code.google.com/p/pyechonest/ +# The echoprint-codegen binary (run make from ../src) +# an Echo Nest API key + +import sys +import os + +import pyechonest.config as config +import pyechonest.song as song + +config.CODEGEN_BINARY_OVERRIDE = os.path.abspath("../echoprint-codegen") + +# Put your API key in a shell variable ECHO_NEST_API_KEY, or put it here +# config.ECHO_NEST_API_KEY='KEY HERE' + +def lookup(file): + # Note that song.identify reads just the first 30 seconds of the file + fp = song.util.codegen(file) + if len(fp) and "code" in fp[0]: + # The version parameter to song/identify indicates the use of echoprint + result = song.identify(query_obj=fp, version="4.11") + print "Got result:", result + if len(result): + print "Artist: %s (%s)" % (result[0].artist_name, result[0].artist_id) + print "Song: %s (%s)" % (result[0].title, result[0].id) + else: + print "No match. This track may not be in the database yet." + else: + print "Couldn't decode", file + + +if __name__ == "__main__": + if len(sys.argv) < 2: + print >>sys.stderr, "Usage: %s