|
|
|
@@ -10,6 +10,7 @@ import re
|
|
|
|
|
|
|
|
|
|
import lxml.etree
|
|
|
|
|
import lxml.html
|
|
|
|
|
from bs4 import BeautifulSoup
|
|
|
|
|
|
|
|
|
|
from . import feeds
|
|
|
|
|
from . import feedify
|
|
|
|
@@ -18,19 +19,20 @@ from . import readabilite
|
|
|
|
|
|
|
|
|
|
import wsgiref.simple_server
|
|
|
|
|
import wsgiref.handlers
|
|
|
|
|
import cgitb
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
# python 2
|
|
|
|
|
from Queue import Queue
|
|
|
|
|
from httplib import HTTPException
|
|
|
|
|
from urllib import quote_plus
|
|
|
|
|
from urllib import unquote
|
|
|
|
|
from urlparse import urlparse, urljoin, parse_qs
|
|
|
|
|
except ImportError:
|
|
|
|
|
# python 3
|
|
|
|
|
from queue import Queue
|
|
|
|
|
from http.client import HTTPException
|
|
|
|
|
from urllib.parse import quote_plus
|
|
|
|
|
from urllib.parse import unquote
|
|
|
|
|
from urllib.parse import urlparse, urljoin, parse_qs
|
|
|
|
|
|
|
|
|
|
LIM_ITEM = 100 # deletes what's beyond
|
|
|
|
@@ -44,7 +46,7 @@ THREADS = 10 # number of threads (1 for single-threaded)
|
|
|
|
|
DEBUG = False
|
|
|
|
|
PORT = 8080
|
|
|
|
|
|
|
|
|
|
PROTOCOL = ['http', 'https', 'ftp']
|
|
|
|
|
PROTOCOL = ['http', 'https']
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def filterOptions(options):
|
|
|
|
@@ -66,6 +68,7 @@ def log(txt, force=False):
|
|
|
|
|
if DEBUG or force:
|
|
|
|
|
if 'REQUEST_URI' in os.environ:
|
|
|
|
|
open('morss.log', 'a').write("%s\n" % repr(txt))
|
|
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
print(repr(txt))
|
|
|
|
|
|
|
|
|
@@ -73,6 +76,7 @@ def log(txt, force=False):
|
|
|
|
|
def len_html(txt):
|
|
|
|
|
if len(txt):
|
|
|
|
|
return len(lxml.html.fromstring(txt).text_content())
|
|
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
return 0
|
|
|
|
|
|
|
|
|
@@ -80,6 +84,7 @@ def len_html(txt):
|
|
|
|
|
def count_words(txt):
|
|
|
|
|
if len(txt):
|
|
|
|
|
return len(lxml.html.fromstring(txt).text_content().split())
|
|
|
|
|
|
|
|
|
|
return 0
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@@ -88,12 +93,14 @@ class Options:
|
|
|
|
|
if len(args):
|
|
|
|
|
self.options = args
|
|
|
|
|
self.options.update(options or {})
|
|
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
self.options = options or {}
|
|
|
|
|
|
|
|
|
|
def __getattr__(self, key):
|
|
|
|
|
if key in self.options:
|
|
|
|
|
return self.options[key]
|
|
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
@@ -107,17 +114,23 @@ class Options:
|
|
|
|
|
def parseOptions(options):
|
|
|
|
|
""" Turns ['md=True'] into {'md':True} """
|
|
|
|
|
out = {}
|
|
|
|
|
|
|
|
|
|
for option in options:
|
|
|
|
|
split = option.split('=', 1)
|
|
|
|
|
|
|
|
|
|
if len(split) > 1:
|
|
|
|
|
if split[0].lower() == 'true':
|
|
|
|
|
out[split[0]] = True
|
|
|
|
|
|
|
|
|
|
elif split[0].lower() == 'false':
|
|
|
|
|
out[split[0]] = False
|
|
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
out[split[0]] = split[1]
|
|
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
out[split[0]] = True
|
|
|
|
|
|
|
|
|
|
return out
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@@ -158,6 +171,11 @@ def ItemFix(item, feedurl='/'):
|
|
|
|
|
item.link = parse_qs(urlparse(item.link).query)['url'][0]
|
|
|
|
|
log(item.link)
|
|
|
|
|
|
|
|
|
|
# pocket
|
|
|
|
|
if fnmatch(item.link, 'https://getpocket.com/redirect?url=*'):
|
|
|
|
|
item.link = parse_qs(urlparse(item.link).query)['url'][0]
|
|
|
|
|
log(item.link)
|
|
|
|
|
|
|
|
|
|
# facebook
|
|
|
|
|
if fnmatch(item.link, 'https://www.facebook.com/l.php?u=*'):
|
|
|
|
|
item.link = parse_qs(urlparse(item.link).query)['u'][0]
|
|
|
|
@@ -208,6 +226,7 @@ def ItemFill(item, options, feedurl='/', fast=False):
|
|
|
|
|
if len(match):
|
|
|
|
|
link = match[0]
|
|
|
|
|
log(link)
|
|
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
link = None
|
|
|
|
|
|
|
|
|
@@ -217,6 +236,7 @@ def ItemFill(item, options, feedurl='/', fast=False):
|
|
|
|
|
if len(match) and urlparse(match[0]).netloc != 'www.facebook.com':
|
|
|
|
|
link = match[0]
|
|
|
|
|
log(link)
|
|
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
link = None
|
|
|
|
|
|
|
|
|
@@ -284,24 +304,27 @@ def ItemAfter(item, options):
|
|
|
|
|
return item
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def FeedFetch(url, options):
|
|
|
|
|
# basic url clean-up
|
|
|
|
|
def UrlFix(url):
|
|
|
|
|
if url is None:
|
|
|
|
|
raise MorssException('No url provided')
|
|
|
|
|
|
|
|
|
|
if isinstance(url, bytes):
|
|
|
|
|
url = url.decode()
|
|
|
|
|
|
|
|
|
|
if urlparse(url).scheme not in PROTOCOL:
|
|
|
|
|
url = 'http://' + url
|
|
|
|
|
log(url)
|
|
|
|
|
|
|
|
|
|
url = url.replace(' ', '%20')
|
|
|
|
|
|
|
|
|
|
if isinstance(url, bytes):
|
|
|
|
|
url = url.decode()
|
|
|
|
|
return url
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def FeedFetch(url, options):
|
|
|
|
|
# allow for code execution for feedify
|
|
|
|
|
pre = feedify.pre_worker(url)
|
|
|
|
|
if pre:
|
|
|
|
|
url = pre
|
|
|
|
|
url = UrlFix(pre)
|
|
|
|
|
log('url redirect')
|
|
|
|
|
log(url)
|
|
|
|
|
|
|
|
|
@@ -324,8 +347,7 @@ def FeedFetch(url, options):
|
|
|
|
|
|
|
|
|
|
if options.items:
|
|
|
|
|
# using custom rules
|
|
|
|
|
rss = feeds.FeedHTML(xml, url, contenttype)
|
|
|
|
|
feed.rule
|
|
|
|
|
rss = feeds.FeedHTML(xml)
|
|
|
|
|
|
|
|
|
|
rss.rules['items'] = options.items
|
|
|
|
|
|
|
|
|
@@ -338,6 +360,8 @@ def FeedFetch(url, options):
|
|
|
|
|
if options.item_time:
|
|
|
|
|
rss.rules['item_time'] = options.item_time
|
|
|
|
|
|
|
|
|
|
rss = rss.convert(feeds.FeedXML)
|
|
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
try:
|
|
|
|
|
rss = feeds.parse(xml, url, contenttype)
|
|
|
|
@@ -375,6 +399,7 @@ def FeedGather(rss, url, options):
|
|
|
|
|
value = queue.get()
|
|
|
|
|
try:
|
|
|
|
|
worker(*value)
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
log('Thread Error: %s' % e.message)
|
|
|
|
|
queue.task_done()
|
|
|
|
@@ -414,6 +439,7 @@ def FeedGather(rss, url, options):
|
|
|
|
|
for i, item in enumerate(list(rss.items)):
|
|
|
|
|
if threads == 1:
|
|
|
|
|
worker(*[i, item])
|
|
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
queue.put([i, item])
|
|
|
|
|
|
|
|
|
@@ -433,37 +459,38 @@ def FeedGather(rss, url, options):
|
|
|
|
|
return rss
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def FeedFormat(rss, options):
|
|
|
|
|
def FeedFormat(rss, options, encoding='utf-8'):
|
|
|
|
|
if options.callback:
|
|
|
|
|
if re.match(r'^[a-zA-Z0-9\.]+$', options.callback) is not None:
|
|
|
|
|
return '%s(%s)' % (options.callback, rss.tojson())
|
|
|
|
|
out = '%s(%s)' % (options.callback, rss.tojson(encoding='unicode'))
|
|
|
|
|
return out if encoding == 'unicode' else out.encode(encoding)
|
|
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
raise MorssException('Invalid callback var name')
|
|
|
|
|
|
|
|
|
|
elif options.json:
|
|
|
|
|
if options.indent:
|
|
|
|
|
return rss.tojson(encoding='UTF-8', indent=4)
|
|
|
|
|
return rss.tojson(encoding=encoding, indent=4)
|
|
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
return rss.tojson(encoding='UTF-8')
|
|
|
|
|
return rss.tojson(encoding=encoding)
|
|
|
|
|
|
|
|
|
|
elif options.csv:
|
|
|
|
|
return rss.tocsv(encoding='UTF-8')
|
|
|
|
|
return rss.tocsv(encoding=encoding)
|
|
|
|
|
|
|
|
|
|
elif options.reader:
|
|
|
|
|
if options.indent:
|
|
|
|
|
return rss.tohtml(encoding='UTF-8', pretty_print=True)
|
|
|
|
|
return rss.tohtml(encoding=encoding, pretty_print=True)
|
|
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
return rss.tohtml(encoding='UTF-8')
|
|
|
|
|
return rss.tohtml(encoding=encoding)
|
|
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
if options.indent:
|
|
|
|
|
return rss.torss(xml_declaration=True, encoding='UTF-8', pretty_print=True)
|
|
|
|
|
return rss.torss(xml_declaration=True, encoding=encoding, pretty_print=True)
|
|
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
return rss.torss(xml_declaration=True, encoding='UTF-8')
|
|
|
|
|
return rss.torss(xml_declaration=True, encoding=encoding)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def process(url, cache=None, options=None):
|
|
|
|
@@ -475,14 +502,16 @@ def process(url, cache=None, options=None):
|
|
|
|
|
if cache:
|
|
|
|
|
crawler.default_cache = crawler.SQLiteCache(cache)
|
|
|
|
|
|
|
|
|
|
url = UrlFix(url)
|
|
|
|
|
rss = FeedFetch(url, options)
|
|
|
|
|
rss = FeedGather(rss, url, options)
|
|
|
|
|
|
|
|
|
|
return FeedFormat(rss, options)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def cgi_app(environ, start_response):
|
|
|
|
|
def cgi_parse_environ(environ):
|
|
|
|
|
# get options
|
|
|
|
|
|
|
|
|
|
if 'REQUEST_URI' in environ:
|
|
|
|
|
url = environ['REQUEST_URI'][1:]
|
|
|
|
|
else:
|
|
|
|
@@ -496,7 +525,7 @@ def cgi_app(environ, start_response):
|
|
|
|
|
if url.startswith(':'):
|
|
|
|
|
split = url.split('/', 1)
|
|
|
|
|
|
|
|
|
|
options = split[0].replace('|', '/').replace('\\\'', '\'').split(':')[1:]
|
|
|
|
|
raw_options = unquote(split[0]).replace('|', '/').replace('\\\'', '\'').split(':')[1:]
|
|
|
|
|
|
|
|
|
|
if len(split) > 1:
|
|
|
|
|
url = split[1]
|
|
|
|
@@ -504,15 +533,22 @@ def cgi_app(environ, start_response):
|
|
|
|
|
url = ''
|
|
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
options = []
|
|
|
|
|
raw_options = []
|
|
|
|
|
|
|
|
|
|
# init
|
|
|
|
|
options = Options(filterOptions(parseOptions(options)))
|
|
|
|
|
headers = {}
|
|
|
|
|
options = Options(filterOptions(parseOptions(raw_options)))
|
|
|
|
|
|
|
|
|
|
global DEBUG
|
|
|
|
|
DEBUG = options.debug
|
|
|
|
|
|
|
|
|
|
return (url, options)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def cgi_app(environ, start_response):
|
|
|
|
|
url, options = cgi_parse_environ(environ)
|
|
|
|
|
|
|
|
|
|
headers = {}
|
|
|
|
|
|
|
|
|
|
# headers
|
|
|
|
|
headers['status'] = '200 OK'
|
|
|
|
|
headers['cache-control'] = 'max-age=%s' % DELAY
|
|
|
|
@@ -537,6 +573,7 @@ def cgi_app(environ, start_response):
|
|
|
|
|
crawler.default_cache = crawler.SQLiteCache(os.path.join(os.getcwd(), 'morss-cache.db'))
|
|
|
|
|
|
|
|
|
|
# get the work done
|
|
|
|
|
url = UrlFix(url)
|
|
|
|
|
rss = FeedFetch(url, options)
|
|
|
|
|
|
|
|
|
|
if headers['content-type'] == 'text/xml':
|
|
|
|
@@ -547,18 +584,42 @@ def cgi_app(environ, start_response):
|
|
|
|
|
rss = FeedGather(rss, url, options)
|
|
|
|
|
out = FeedFormat(rss, options)
|
|
|
|
|
|
|
|
|
|
if not options.silent:
|
|
|
|
|
return out
|
|
|
|
|
if options.silent:
|
|
|
|
|
return ['']
|
|
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
return [out]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def cgi_wrapper(environ, start_response):
|
|
|
|
|
# simple http server for html and css
|
|
|
|
|
def middleware(func):
|
|
|
|
|
" Decorator to turn a function into a wsgi middleware "
|
|
|
|
|
# This is called when parsing the code
|
|
|
|
|
|
|
|
|
|
def app_builder(app):
|
|
|
|
|
# This is called when doing app = cgi_wrapper(app)
|
|
|
|
|
|
|
|
|
|
def app_wrap(environ, start_response):
|
|
|
|
|
# This is called when a http request is being processed
|
|
|
|
|
|
|
|
|
|
return func(environ, start_response, app)
|
|
|
|
|
|
|
|
|
|
return app_wrap
|
|
|
|
|
|
|
|
|
|
return app_builder
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@middleware
|
|
|
|
|
def cgi_file_handler(environ, start_response, app):
|
|
|
|
|
" Simple HTTP server to serve static files (.html, .css, etc.) "
|
|
|
|
|
|
|
|
|
|
files = {
|
|
|
|
|
'': 'text/html',
|
|
|
|
|
'index.html': 'text/html'}
|
|
|
|
|
'index.html': 'text/html',
|
|
|
|
|
'sheet.xsl': 'text/xsl'}
|
|
|
|
|
|
|
|
|
|
if 'REQUEST_URI' in environ:
|
|
|
|
|
url = environ['REQUEST_URI'][1:]
|
|
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
url = environ['PATH_INFO'][1:]
|
|
|
|
|
|
|
|
|
@@ -587,16 +648,80 @@ def cgi_wrapper(environ, start_response):
|
|
|
|
|
start_response(headers['status'], list(headers.items()))
|
|
|
|
|
return ['Error %s' % headers['status']]
|
|
|
|
|
|
|
|
|
|
# actual morss use
|
|
|
|
|
else:
|
|
|
|
|
return app(environ, start_response)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def cgi_page(environ, start_response):
|
|
|
|
|
url, options = cgi_parse_environ(environ)
|
|
|
|
|
|
|
|
|
|
# get page
|
|
|
|
|
PROTOCOL = ['http', 'https']
|
|
|
|
|
|
|
|
|
|
if urlparse(url).scheme not in ['http', 'https']:
|
|
|
|
|
url = 'http://' + url
|
|
|
|
|
|
|
|
|
|
con = crawler.custom_handler().open(url)
|
|
|
|
|
data = con.read()
|
|
|
|
|
|
|
|
|
|
contenttype = con.info().get('Content-Type', '').split(';')[0]
|
|
|
|
|
|
|
|
|
|
if contenttype in ['text/html', 'application/xhtml+xml', 'application/xml']:
|
|
|
|
|
html = lxml.html.fromstring(BeautifulSoup(data, 'lxml').prettify())
|
|
|
|
|
html.make_links_absolute(url)
|
|
|
|
|
|
|
|
|
|
kill_tags = ['script', 'iframe', 'noscript']
|
|
|
|
|
|
|
|
|
|
for tag in kill_tags:
|
|
|
|
|
for elem in html.xpath('//'+tag):
|
|
|
|
|
elem.getparent().remove(elem)
|
|
|
|
|
|
|
|
|
|
output = lxml.etree.tostring(html.getroottree(), encoding='utf-8')
|
|
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
output = None
|
|
|
|
|
|
|
|
|
|
# return html page
|
|
|
|
|
headers = {'status': '200 OK', 'content-type': 'text/html'}
|
|
|
|
|
start_response(headers['status'], list(headers.items()))
|
|
|
|
|
return [output]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
dispatch_table = {
|
|
|
|
|
'getpage': cgi_page
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@middleware
|
|
|
|
|
def cgi_dispatcher(environ, start_response, app):
|
|
|
|
|
url, options = cgi_parse_environ(environ)
|
|
|
|
|
|
|
|
|
|
for key in dispatch_table.keys():
|
|
|
|
|
if key in options:
|
|
|
|
|
return dispatch_table[key](environ, start_response)
|
|
|
|
|
|
|
|
|
|
return app(environ, start_response)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@middleware
|
|
|
|
|
def cgi_error_handler(environ, start_response, app):
|
|
|
|
|
try:
|
|
|
|
|
return [cgi_app(environ, start_response) or '(empty)']
|
|
|
|
|
return app(environ, start_response)
|
|
|
|
|
|
|
|
|
|
except (KeyboardInterrupt, SystemExit):
|
|
|
|
|
raise
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
headers = {'status': '500 Oops', 'content-type': 'text/plain'}
|
|
|
|
|
headers = {'status': '500 Oops', 'content-type': 'text/html'}
|
|
|
|
|
start_response(headers['status'], list(headers.items()), sys.exc_info())
|
|
|
|
|
log('ERROR <%s>: %s' % (url, e.message), force=True)
|
|
|
|
|
return ['An error happened:\n%s' % e.message]
|
|
|
|
|
log('ERROR: %s' % repr(e), force=True)
|
|
|
|
|
return [cgitb.html(sys.exc_info())]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@middleware
|
|
|
|
|
def cgi_encode(environ, start_response, app):
|
|
|
|
|
out = app(environ, start_response)
|
|
|
|
|
return [x if isinstance(x, bytes) else x.encode('utf-8') for x in out]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def cli_app():
|
|
|
|
@@ -608,6 +733,7 @@ def cli_app():
|
|
|
|
|
|
|
|
|
|
crawler.default_cache = crawler.SQLiteCache(os.path.expanduser('~/.cache/morss-cache.db'))
|
|
|
|
|
|
|
|
|
|
url = UrlFix(url)
|
|
|
|
|
rss = FeedFetch(url, options)
|
|
|
|
|
rss = FeedGather(rss, url, options)
|
|
|
|
|
out = FeedFormat(rss, options)
|
|
|
|
@@ -622,6 +748,7 @@ def isInt(string):
|
|
|
|
|
try:
|
|
|
|
|
int(string)
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
except ValueError:
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
@@ -629,7 +756,13 @@ def isInt(string):
|
|
|
|
|
def main():
|
|
|
|
|
if 'REQUEST_URI' in os.environ:
|
|
|
|
|
# mod_cgi
|
|
|
|
|
wsgiref.handlers.CGIHandler().run(cgi_wrapper)
|
|
|
|
|
|
|
|
|
|
app = cgi_app
|
|
|
|
|
app = cgi_dispatcher(app)
|
|
|
|
|
app = cgi_error_handler(app)
|
|
|
|
|
app = cgi_encode(app)
|
|
|
|
|
|
|
|
|
|
wsgiref.handlers.CGIHandler().run(app)
|
|
|
|
|
|
|
|
|
|
elif len(sys.argv) <= 1 or isInt(sys.argv[1]) or '--root' in sys.argv[1:]:
|
|
|
|
|
# start internal (basic) http server
|
|
|
|
@@ -638,22 +771,31 @@ def main():
|
|
|
|
|
argPort = int(sys.argv[1])
|
|
|
|
|
if argPort > 0:
|
|
|
|
|
port = argPort
|
|
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
raise MorssException('Port must be positive integer')
|
|
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
port = PORT
|
|
|
|
|
|
|
|
|
|
print('Serving http://localhost:%s/'%port)
|
|
|
|
|
httpd = wsgiref.simple_server.make_server('', port, cgi_wrapper)
|
|
|
|
|
app = cgi_app
|
|
|
|
|
app = cgi_file_handler(app)
|
|
|
|
|
app = cgi_dispatcher(app)
|
|
|
|
|
app = cgi_error_handler(app)
|
|
|
|
|
app = cgi_encode(app)
|
|
|
|
|
|
|
|
|
|
print('Serving http://localhost:%s/' % port)
|
|
|
|
|
httpd = wsgiref.simple_server.make_server('', port, app)
|
|
|
|
|
httpd.serve_forever()
|
|
|
|
|
|
|
|
|
|
else:
|
|
|
|
|
# as a CLI app
|
|
|
|
|
try:
|
|
|
|
|
cli_app()
|
|
|
|
|
|
|
|
|
|
except (KeyboardInterrupt, SystemExit):
|
|
|
|
|
raise
|
|
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
print('ERROR: %s' % e.message)
|
|
|
|
|
|
|
|
|
|