diff --git a/README.md b/README.md index 0363884..52cb755 100644 --- a/README.md +++ b/README.md @@ -263,11 +263,11 @@ arguments to morss is explained in Run above. The list of arguments can be obtained by running `morss --help` ``` -usage: morss [-h] [--format {rss,json,html,csv}] [--search STRING] [--clip] - [--indent] [--cache] [--force] [--proxy] [--newest] [--firstlink] - [--resolve] [--items XPATH] [--item_link XPATH] - [--item_title XPATH] [--item_content XPATH] [--item_time XPATH] - [--nolink] [--noref] [--silent] +usage: morss [-h] [--post STRING] [--format {rss,json,html,csv}] + [--search STRING] [--clip] [--indent] [--cache] [--force] + [--proxy] [--newest] [--firstlink] [--resolve] [--items XPATH] + [--item_link XPATH] [--item_title XPATH] [--item_content XPATH] + [--item_time XPATH] [--nolink] [--noref] [--silent] url Get full-text RSS feeds @@ -277,6 +277,7 @@ positional arguments: optional arguments: -h, --help show this help message and exit + --post STRING POST request output: --format {rss,json,html,csv} diff --git a/morss/cli.py b/morss/cli.py index 56a7092..a9de7b4 100644 --- a/morss/cli.py +++ b/morss/cli.py @@ -32,6 +32,8 @@ def cli_app(): parser.add_argument('url', help='feed url') + parser.add_argument('--post', action='store', type=str, metavar='STRING', help='POST request') + group = parser.add_argument_group('output') group.add_argument('--format', default='rss', choices=('rss', 'json', 'html', 'csv'), help='output format') group.add_argument('--search', action='store', type=str, metavar='STRING', help='does a basic case-sensitive search in the feed') diff --git a/morss/crawler.py b/morss/crawler.py index 672ecb0..41fa0d6 100644 --- a/morss/crawler.py +++ b/morss/crawler.py @@ -81,14 +81,17 @@ def get(*args, **kwargs): return adv_get(*args, **kwargs)['data'] -def adv_get(url, timeout=None, *args, **kwargs): +def adv_get(url, post=None, timeout=None, *args, **kwargs): url = sanitize_url(url) + if post is not None: + post = post.encode('utf-8') + if timeout is None: - con = custom_opener(*args, **kwargs).open(url) + con = custom_opener(*args, **kwargs).open(url, data=post) else: - con = custom_opener(*args, **kwargs).open(url, timeout=timeout) + con = custom_opener(*args, **kwargs).open(url, data=post, timeout=timeout) data = con.read() diff --git a/morss/morss.py b/morss/morss.py index 6e430ba..198b0a5 100644 --- a/morss/morss.py +++ b/morss/morss.py @@ -276,7 +276,7 @@ def FeedFetch(url, options): delay = 0 try: - req = crawler.adv_get(url=url, follow=('rss' if not options.items else None), delay=delay, timeout=TIMEOUT * 2) + req = crawler.adv_get(url=url, post=options.post, follow=('rss' if not options.items else None), delay=delay, timeout=TIMEOUT * 2) except (IOError, HTTPException): raise MorssException('Error downloading feed')