diff --git a/morss/__main__.py b/morss/__main__.py
index d074337..6803aa0 100644
--- a/morss/__main__.py
+++ b/morss/__main__.py
@@ -20,9 +20,7 @@
import os
import sys
-from . import wsgi
-from . import cli
-
+from . import cli, wsgi
from .morss import MorssException
diff --git a/morss/crawler.py b/morss/crawler.py
index 41fa0d6..f882ac3 100644
--- a/morss/crawler.py
+++ b/morss/crawler.py
@@ -16,30 +16,33 @@
# with this program. If not, see .
import os
-import sys
-
-import zlib
-from io import BytesIO, StringIO
-import re
-import chardet
-from cgi import parse_header
-import time
-import threading
import random
+import re
+import sys
+import threading
+import time
+import zlib
+from cgi import parse_header
from collections import OrderedDict
+from io import BytesIO, StringIO
+
+import chardet
try:
# python 2
- from urllib2 import BaseHandler, HTTPCookieProcessor, Request, addinfourl, parse_keqv_list, parse_http_list, build_opener
from urllib import quote
- from urlparse import urlparse, urlunparse
+
import mimetools
+ from urllib2 import (BaseHandler, HTTPCookieProcessor, Request, addinfourl,
+ build_opener, parse_http_list, parse_keqv_list)
+ from urlparse import urlparse, urlunparse
except ImportError:
# python 3
- from urllib.request import BaseHandler, HTTPCookieProcessor, Request, addinfourl, parse_keqv_list, parse_http_list, build_opener
- from urllib.parse import quote
- from urllib.parse import urlparse, urlunparse
import email
+ from urllib.parse import quote, urlparse, urlunparse
+ from urllib.request import (BaseHandler, HTTPCookieProcessor, Request,
+ addinfourl, build_opener, parse_http_list,
+ parse_keqv_list)
try:
# python 2
@@ -620,7 +623,7 @@ class BaseCache:
return True
-import sqlite3
+import sqlite3 # isort:skip
class SQLiteCache(BaseCache):
@@ -657,7 +660,7 @@ class SQLiteCache(BaseCache):
self.con.execute('INSERT INTO data VALUES (?,?,?,?,?,?) ON CONFLICT(url) DO UPDATE SET code=?, msg=?, headers=?, data=?, timestamp=?', (url,) + value + value)
-import pymysql.cursors
+import pymysql.cursors # isort:skip
class MySQLCacheHandler(BaseCache):
diff --git a/morss/feeds.py b/morss/feeds.py
index f738ebe..5c0736a 100644
--- a/morss/feeds.py
+++ b/morss/feeds.py
@@ -15,35 +15,33 @@
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see .
-import sys
import os.path
+import sys
-from datetime import datetime
-
-import re
-import json
import csv
-
+import json
+import re
+from copy import deepcopy
+from datetime import datetime
from fnmatch import fnmatch
-from lxml import etree
-from dateutil import tz
import dateutil.parser
-from copy import deepcopy
-
import lxml.html
+from dateutil import tz
+from lxml import etree
+
from .readabilite import parse as html_parse
json.encoder.c_make_encoder = None
try:
# python 2
- from StringIO import StringIO
from ConfigParser import RawConfigParser
+ from StringIO import StringIO
except ImportError:
# python 3
- from io import StringIO
from configparser import RawConfigParser
+ from io import StringIO
try:
# python 2
diff --git a/morss/morss.py b/morss/morss.py
index 198b0a5..4423d38 100644
--- a/morss/morss.py
+++ b/morss/morss.py
@@ -16,30 +16,25 @@
# with this program. If not, see .
import os
-
+import re
import time
from datetime import datetime
-from dateutil import tz
-
from fnmatch import fnmatch
-import re
import lxml.etree
import lxml.html
+from dateutil import tz
-from . import feeds
-from . import crawler
-from . import readabilite
-
+from . import crawler, feeds, readabilite
try:
# python 2
from httplib import HTTPException
- from urlparse import urlparse, urljoin, parse_qs
+ from urlparse import parse_qs, urljoin, urlparse
except ImportError:
# python 3
from http.client import HTTPException
- from urllib.parse import urlparse, urljoin, parse_qs
+ from urllib.parse import parse_qs, urljoin, urlparse
MAX_ITEM = int(os.getenv('MAX_ITEM', 5)) # cache-only beyond
diff --git a/morss/wsgi.py b/morss/wsgi.py
index 32a3cde..76fd895 100644
--- a/morss/wsgi.py
+++ b/morss/wsgi.py
@@ -15,16 +15,16 @@
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see .
-import sys
+import cgitb
+import mimetypes
import os.path
import re
-import lxml.etree
-
-import cgitb
-import wsgiref.util
-import wsgiref.simple_server
+import sys
import wsgiref.handlers
-import mimetypes
+import wsgiref.simple_server
+import wsgiref.util
+
+import lxml.etree
try:
# python 2
@@ -33,11 +33,9 @@ except ImportError:
# python 3
from urllib.parse import unquote
-from . import crawler
-from . import readabilite
-from .morss import FeedFetch, FeedGather, FeedFormat
-from .morss import Options, log, TIMEOUT, DELAY, MorssException
-
+from . import crawler, readabilite
+from .morss import (DELAY, TIMEOUT, FeedFetch, FeedFormat, FeedGather,
+ MorssException, Options, log)
PORT = int(os.getenv('PORT', 8080))