Allow POST requests

This commit is contained in:
2021-09-08 20:43:21 +02:00
parent 71d9c7a027
commit 06e0ada95b
4 changed files with 15 additions and 9 deletions

View File

@@ -32,6 +32,8 @@ def cli_app():
parser.add_argument('url', help='feed url')
parser.add_argument('--post', action='store', type=str, metavar='STRING', help='POST request')
group = parser.add_argument_group('output')
group.add_argument('--format', default='rss', choices=('rss', 'json', 'html', 'csv'), help='output format')
group.add_argument('--search', action='store', type=str, metavar='STRING', help='does a basic case-sensitive search in the feed')

View File

@@ -81,14 +81,17 @@ def get(*args, **kwargs):
return adv_get(*args, **kwargs)['data']
def adv_get(url, timeout=None, *args, **kwargs):
def adv_get(url, post=None, timeout=None, *args, **kwargs):
url = sanitize_url(url)
if post is not None:
post = post.encode('utf-8')
if timeout is None:
con = custom_opener(*args, **kwargs).open(url)
con = custom_opener(*args, **kwargs).open(url, data=post)
else:
con = custom_opener(*args, **kwargs).open(url, timeout=timeout)
con = custom_opener(*args, **kwargs).open(url, data=post, timeout=timeout)
data = con.read()

View File

@@ -276,7 +276,7 @@ def FeedFetch(url, options):
delay = 0
try:
req = crawler.adv_get(url=url, follow=('rss' if not options.items else None), delay=delay, timeout=TIMEOUT * 2)
req = crawler.adv_get(url=url, post=options.post, follow=('rss' if not options.items else None), delay=delay, timeout=TIMEOUT * 2)
except (IOError, HTTPException):
raise MorssException('Error downloading feed')