Hey zapt0, maybe it would be better to add archive parameter to the class header, use my
parse_index method and modify my
cleanup:
Code:
class AdvancedUserRecipe1299694372(BasicNewsRecipe):
title = u'Instapaper'
archive = False
...
def parse_index(self):
totalfeeds = []
lfeeds = self.get_feeds()
for feedobj in lfeeds:
feedtitle, feedurl = feedobj
self.report_progress(0, _('Fetching feed')+' %s...'%(feedtitle if feedtitle else feedurl))
articles = []
soup = self.index_to_soup(feedurl)
self.myFormKey = soup.find('input', attrs={'name': 'form_key'})['value']
for item in soup.findAll('div', attrs={'class':'cornerControls'}):
description = self.tag_to_string(item.div)
atag = item.a
if atag and atag.has_key('href'):
url = atag['href']
articles.append({
'url' :url
})
totalfeeds.append((feedtitle, articles))
return totalfeeds
def cleanup(self):
if self.archive:
params = urllib.urlencode(dict(form_key=self.myFormKey, submit="Archive All"))
self.browser.open("http://www.instapaper.com/bulk-archive", params)
Users wanting auto-archive then change just one line: