changeset 4:df9d4e704257

Made the architecture a little cleaner, but it's still pretty messy; I really need to turn some OO-like-hacks into actual OO constructions. At least the Config.py structure is simpler.
author Atul Varma <varmaa@toolness.com>
date Sat, 16 Feb 2008 21:56:15 -0600
parents 919b98450387
children 56bd30b89166
files Config.py.sample FeedSources.py GenerateHtml.py ImapFeed.py MakeEverything.py RssAtomFeed.py Serializer.py UpdateFeeds.py
diffstat 8 files changed, 82 insertions(+), 52 deletions(-) [+]
line wrap: on
line diff
--- a/Config.py.sample	Sat Feb 16 20:22:21 2008 -0600
+++ b/Config.py.sample	Sat Feb 16 21:56:15 2008 -0600
@@ -1,8 +1,3 @@
-from os.path import abspath
-from urllib import pathname2url
-
-FILENAME = "feeds.dat"
-
 # Importance is from 1 to 10; 1 is really important, 10 is not very
 # important.
 DEFAULT_IMPORTANCE = 10
@@ -13,13 +8,14 @@
     dict( name = "Play",
           tags = ["friends", "entertainment", "news"] ),
     dict( name = "Work",
-          tags = ["work", "mozilla"] )
+          tags = ["work", "mozilla"] ),
+    dict( name = "Chores",
+          tags = ["chores"] ),
 ]
 
 IMAP_FEEDS = [
     dict( name = "Mozilla Email",
           tags = ["work"],
-          filename = abspath("MozillaEmail.rss"),
           server = "mail.mozilla.com",
           port = 993,
           mailbox = "INBOX",
@@ -27,17 +23,23 @@
           url = "https://mail.mozilla.com" ),
     dict( name = "Humanized Email",
           tags = ["work"],
-          filename = abspath("HumanizedEmail.rss"),
           server = "imap.gmail.com",
           port = 993,
           # In Gmail's IMAP interface, labels are treated as
           # IMAP mailboxes.
           mailbox = "humanized",
           isSsl = True,
+          url = "https://www.gmail.com" ),
+    dict( name = "Personal TODO Email",
+          tags = ["chores"],
+          server = "imap.gmail.com",
+          port = 993,
+          mailbox = "personal-todo",
+          isSsl = True,
           url = "https://www.gmail.com" )
 ]
 
-FEEDS = [
+RSS_ATOM_FEEDS = [
     dict( name = "Mozilla Intranet Forum",
           tags = ["mozilla"],
           url = "https://intranet.mozilla.org/forum/?Feed=Atom" ),
@@ -79,11 +81,3 @@
           tags = ["work"],
           url = "http://feeds.feedburner.com/ProductiveFirefox" ),
 ]
-
-for feed in IMAP_FEEDS:
-    info = dict( name = feed["name"],
-                 tags = feed["tags"],
-                 url = "file://" + pathname2url( feed["filename"] ),
-                 # Email feeds are really important!
-                 importance = 1 )
-    FEEDS.insert( 0, info )
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/FeedSources.py	Sat Feb 16 21:56:15 2008 -0600
@@ -0,0 +1,22 @@
+import RssAtomFeed
+import ImapFeed
+
+FEED_SOURCES = [RssAtomFeed, ImapFeed]
+
+FEED_INFO = {}
+
+def _filenameForFeed( name ):
+    # TODO: This should be more robust and deal with weird characters
+    # better.  It should also be a one-to-one mapping from feed names
+    # to filenames.
+    return "feed-%s.rss" % name.replace( " ", "-" )
+
+for feedSource in FEED_SOURCES:
+    for feed in feedSource.getFeedInfo():
+        key = feed["name"]
+        feed["filename"] = _filenameForFeed( key )
+        feed["source"] = feedSource
+        assert key not in FEED_INFO
+        FEED_INFO[key] = feed
+del feed
+del feedSource
--- a/GenerateHtml.py	Sat Feb 16 20:22:21 2008 -0600
+++ b/GenerateHtml.py	Sat Feb 16 21:56:15 2008 -0600
@@ -3,6 +3,7 @@
 
 import Serializer
 import Config
+import FeedSources
 
 class Detagger( HTMLParser.HTMLParser ):
     """
@@ -92,8 +93,7 @@
     outFile.write( "</body>" )
 
 def getFeedConfig( name ):
-    return [ feed for feed in Config.FEEDS
-             if feed["name"] == name ][0]
+    return FeedSources.FEED_INFO[name]
 
 def cmpRowUrgencyThenTimestamp( a, b ):
     """
--- a/ImapFeed.py	Sat Feb 16 20:22:21 2008 -0600
+++ b/ImapFeed.py	Sat Feb 16 21:56:15 2008 -0600
@@ -7,6 +7,12 @@
 from LocalAuth import IMAP_AUTH
 from Config import IMAP_FEEDS
 
+def getFeedInfo():
+    for feed in IMAP_FEEDS:
+        info = dict( importance = 1 )
+        info.update( feed )
+        yield info
+
 def getImapUnreadMailInfo( server, port, username, password,
                            mailbox = "INBOX", isSsl = True ):
     if isSsl:
@@ -70,11 +76,6 @@
                               feed["url"] )
         )
 
-def main():
-    for feed in IMAP_FEEDS:
-        print "Generating feed for %s (%s)" % ( feed["name"],
-                                                feed["filename"] )
-        generateFeed( feed, IMAP_AUTH )
-
-if __name__ == "__main__":
-    main()
+def update( feed ):
+    print "Generating feed for %s" % feed["name"]
+    generateFeed( feed, IMAP_AUTH)
--- a/MakeEverything.py	Sat Feb 16 20:22:21 2008 -0600
+++ b/MakeEverything.py	Sat Feb 16 21:56:15 2008 -0600
@@ -1,8 +1,6 @@
-import ImapFeed
 import UpdateFeeds
 import GenerateHtml
 
 if __name__ == "__main__":
-    ImapFeed.main()
     UpdateFeeds.main()
     GenerateHtml.main()
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/RssAtomFeed.py	Sat Feb 16 21:56:15 2008 -0600
@@ -0,0 +1,26 @@
+import Config
+import urllib2
+
+def getFeedInfo():
+    for feed in Config.RSS_ATOM_FEEDS:
+        yield dict( feed )
+
+def _getHandlers():
+    try:
+        import LocalAuth
+        handlers = LocalAuth.getAuthHandlers()
+    except ImportError:
+        print "No LocalAuth.py found, assuming no auth handlers."
+        handlers = []
+    return handlers
+
+def update( feed ):
+    # Yes, we need to rebuild the opener and the handlers every
+    # time through this loop, or else things will fail on multiple
+    # Livejournal requests.
+    opener = urllib2.build_opener( *_getHandlers() )
+    print "Fetching feed for %s..." % feed["name"]
+    data = opener.open( feed["url"] )
+    fileObj = open( feed["filename"], "w" )
+    fileObj.write( data.read() )
+    fileObj.close()
--- a/Serializer.py	Sat Feb 16 20:22:21 2008 -0600
+++ b/Serializer.py	Sat Feb 16 21:56:15 2008 -0600
@@ -1,9 +1,10 @@
 import cPickle as pickle
-import Config
+
+FILENAME = "feeds.dat"
 
 class Serializer( object ):
     def __init__( self ):
-        self._fileObj = open( Config.FILENAME, "wb" )
+        self._fileObj = open( FILENAME, "wb" )
 
     def store( self, feedInfo ):
         pickle.dump( feedInfo,
@@ -14,7 +15,7 @@
         self._fileObj.close()
 
 def iterFeeds():
-    fileObj = open( Config.FILENAME, "rb" )
+    fileObj = open( FILENAME, "rb" )
     while 1:
         try:
             feedInfo = pickle.load( fileObj )
--- a/UpdateFeeds.py	Sat Feb 16 20:22:21 2008 -0600
+++ b/UpdateFeeds.py	Sat Feb 16 21:56:15 2008 -0600
@@ -1,29 +1,17 @@
 import feedparser
-import urllib2
+import FeedSources
 import Serializer
-import Config
-
-def getHandlers():
-    try:
-        import LocalAuth
-        handlers = LocalAuth.getAuthHandlers()
-    except ImportError:
-        print "No LocalAuth.py found, assuming no auth handlers."
-        handlers = []
-    return handlers
 
 def main():
+    for feed in FeedSources.FEED_INFO.values():
+        source = feed["source"]
+        source.update( feed )
+
     serializer = Serializer.Serializer()
 
-    for feed in Config.FEEDS:
-        # Yes, we need to rebuild the opener and the handlers every
-        # time through this loop, or else things will fail on multiple
-        # Livejournal requests.
-        opener = urllib2.build_opener( *getHandlers() )
-
-        print "Fetching feed for %s..." % feed["name"]
-        data = opener.open( feed["url"] )
-        feedInfo = feedparser.parse( data )
+    for feed in FeedSources.FEED_INFO.values():
+        fileObj = open( feed["filename"], "r" )
+        feedInfo = feedparser.parse( fileObj )
         feedInfo["config_name"] = feed["name"]
         serializer.store( feedInfo )
     serializer.finalize()