#!/usr/bin/python import time import feedparser import sys import re url = sys.argv[1] last=time.time() if len(sys.argv)>2: last=0 while 1: feed = feedparser.parse(url) if len(feed['entries']) == 0: print('error', flush=True, file=sys.stderr) time.sleep(60) continue for i in range(0,len(feed['entries']))[::-1]: published = int(time.mktime(feed['entries'][i]['published_parsed'])) if published > last: print( feed['entries'][i]['title'], '--', feed['entries'][i]['author']+ ': '+ re.sub('<[^<]+?>', '', re.sub('', '\\1', feed['entries'][i]['summary']) ).replace('\n',' ; ').replace('"','"')[:250].split('.',1)[0]+'...', feed['entries'][i]['link'], flush=True ) last = int(time.mktime(feed['updated_parsed'])) time.sleep(900)