diff --git a/module/Api.py b/module/Api.py index f0bf5e2648..223ead8faa 100644 --- a/module/Api.py +++ b/module/Api.py @@ -96,10 +96,9 @@ def __init__(self, core): self.core = core def _convertPyFile(self, p): - f = FileData(p["id"], p["url"], p["name"], p["plugin"], p["size"], + return FileData(p["id"], p["url"], p["name"], p["plugin"], p["size"], p["format_size"], p["status"], p["statusmsg"], p["package"], p["error"], p["order"]) - return f def _convertConfigFormat(self, c): sections = {} @@ -259,9 +258,8 @@ def getLog(self, offset=0): """ filename = join(self.core.config['log']['log_folder'], 'log.txt') try: - fh = open(filename, "r") - lines = fh.readlines() - fh.close() + with open(filename, "r") as fh: + lines = fh.readlines() if offset >= len(lines): return [] return lines[offset:] @@ -316,11 +314,7 @@ def addPackage(self, name, links, dest=Destination.Queue): :param dest: `Destination` :return: package id of the new package """ - if self.core.config['general']['folder_per_package']: - folder = name - else: - folder = "" - + folder = name if self.core.config['general']['folder_per_package'] else "" folder = folder.replace("http://", "").replace(":", "").replace("/", "_").replace("\\", "_") pid = self.core.files.addPackage(name, folder, dest) @@ -402,10 +396,8 @@ def checkOnlineStatusContainer(self, urls, container, data): :param data: file content :return: online check """ - th = open(join(self.core.config["general"]["download_folder"], "tmp_" + container), "wb") - th.write(str(data)) - th.close() - + with open(join(self.core.config["general"]["download_folder"], "tmp_" + container), "wb") as th: + th.write(str(data)) return self.checkOnlineStatus(urls + [th.name]) @permission(PERMS.ADD) @@ -417,12 +409,12 @@ def pollResults(self, rid): """ result = self.core.threadManager.getInfoResult(rid) - if "ALL_INFO_FETCHED" in result: - del result["ALL_INFO_FETCHED"] - return OnlineCheck(-1, result) - else: + if "ALL_INFO_FETCHED" not in result: return OnlineCheck(rid, result) + del result["ALL_INFO_FETCHED"] + return OnlineCheck(-1, result) + @permission(PERMS.ADD) def generatePackages(self, links): @@ -431,8 +423,7 @@ def generatePackages(self, links): :param links: list of urls :return: package names mapped to urls """ - result = parseNames((x, x) for x in links) - return result + return parseNames((x, x) for x in links) @permission(PERMS.ADD) def generateAndAddPackages(self, links, dest=Destination.Queue): @@ -470,11 +461,16 @@ def getPackageData(self, pid): if not data: raise PackageDoesNotExists(pid) - pdata = PackageData(data["id"], data["name"], data["folder"], data["site"], data["password"], - data["queue"], data["order"], - links=[self._convertPyFile(x) for x in data["links"].itervalues()]) - - return pdata + return PackageData( + data["id"], + data["name"], + data["folder"], + data["site"], + data["password"], + data["queue"], + data["order"], + links=[self._convertPyFile(x) for x in data["links"].itervalues()], + ) @permission(PERMS.LIST) def getPackageInfo(self, pid): @@ -484,15 +480,20 @@ def getPackageInfo(self, pid): :return: `PackageData` with .fid attribute """ data = self.core.files.getPackageData(int(pid)) - + if not data: raise PackageDoesNotExists(pid) - pdata = PackageData(data["id"], data["name"], data["folder"], data["site"], data["password"], - data["queue"], data["order"], - fids=[int(x) for x in data["links"]]) - - return pdata + return PackageData( + data["id"], + data["name"], + data["folder"], + data["site"], + data["password"], + data["queue"], + data["order"], + fids=[int(x) for x in data["links"]], + ) @permission(PERMS.LIST) def getFileData(self, fid): @@ -505,8 +506,7 @@ def getFileData(self, fid): if not info: raise FileDoesNotExists(fid) - fdata = self._convertPyFile(info.values()[0]) - return fdata + return self._convertPyFile(info.values()[0]) @permission(PERMS.DELETE) def deleteFiles(self, fids): @@ -695,10 +695,8 @@ def uploadContainer(self, filename, data): :param filename: filename, extension is important so it can correctly decrypted :param data: file content """ - th = open(join(self.core.config["general"]["download_folder"], "tmp_" + filename), "wb") - th.write(str(data)) - th.close() - + with open(join(self.core.config["general"]["download_folder"], "tmp_" + filename), "wb") as th: + th.write(str(data)) self.addPackage(th.name, [th.name], Destination.Queue) @permission(PERMS.MODIFY) @@ -762,7 +760,7 @@ def getPackageOrder(self, destination): for pid in packs: pack = self.core.files.getPackageData(int(pid)) - while pack["order"] in order.keys(): #just in case + while pack["order"] in order: #just in case pack["order"] += 1 order[pack["order"]] = pack["id"] return order @@ -777,7 +775,7 @@ def getFileOrder(self, pid): rawData = self.core.files.getPackageData(int(pid)) order = {} for id, pyfile in rawData["links"].iteritems(): - while pyfile["order"] in order.keys(): #just in case + while pyfile["order"] in order: #just in case pyfile["order"] += 1 order[pyfile["order"]] = pyfile["id"] return order @@ -791,7 +789,7 @@ def isCaptchaWaiting(self): """ self.core.lastClientConnected = time() task = self.core.captchaManager.getTask() - return not task is None + return task is not None @permission(PERMS.STATUS) def getCaptchaTask(self, exclusive=False): @@ -805,8 +803,7 @@ def getCaptchaTask(self, exclusive=False): if task: task.setWatingForUser(exclusive=exclusive) data, type, result = task.getCaptcha() - t = CaptchaTask(int(task.id), standard_b64encode(data), type, result) - return t + return CaptchaTask(int(task.id), standard_b64encode(data), type, result) else: return CaptchaTask(-1) @@ -911,7 +908,7 @@ def login(self, username, password, remoteip=None): :param remoteip: Omit this argument, its only used internal :return: bool indicating login was successful """ - return True if self.checkAuth(username, password, remoteip) else False + return bool(self.checkAuth(username, password, remoteip)) def checkAuth(self, username, password, remoteip=None): """Check authentication and returns details @@ -955,11 +952,16 @@ def getUserData(self, username, password): def getAllUserData(self): """returns all known user and info""" - res = {} - for user, data in self.core.db.getAllUserData().iteritems(): - res[user] = UserData(user, data["email"], data["role"], data["permission"], data["template"]) - - return res + return { + user: UserData( + user, + data["email"], + data["role"], + data["permission"], + data["template"], + ) + for user, data in self.core.db.getAllUserData().iteritems() + } @permission(PERMS.STATUS) def getServices(self): @@ -967,11 +969,10 @@ def getServices(self): :return: dict with this style: {"plugin": {"method": "description"}} """ - data = {} - for plugin, funcs in self.core.hookManager.methods.iteritems(): - data[plugin] = funcs - - return data + return { + plugin: funcs + for plugin, funcs in self.core.hookManager.methods.iteritems() + } @permission(PERMS.STATUS) def hasService(self, plugin, func): diff --git a/module/CaptchaManager.py b/module/CaptchaManager.py index 02cd10a118..eecf7d8eba 100644 --- a/module/CaptchaManager.py +++ b/module/CaptchaManager.py @@ -125,10 +125,7 @@ def setWaiting(self, sec): self.status = "waiting" def isWaiting(self): - if self.result or self.error or time() > self.waitUntil: - return False - - return True + return not self.result and not self.error and time() <= self.waitUntil def isTextual(self): """ returns if text is written on the captcha """ @@ -139,10 +136,7 @@ def isPositional(self): return self.captchaResultType == 'positional' def setWatingForUser(self, exclusive): - if exclusive: - self.status = "user" - else: - self.status = "shared-user" + self.status = "user" if exclusive else "shared-user" def timedOut(self): return time() > self.waitUntil diff --git a/module/ConfigParser.py b/module/ConfigParser.py index 78b612f139..f8d8a8f6c5 100644 --- a/module/ConfigParser.py +++ b/module/ConfigParser.py @@ -246,10 +246,10 @@ def cast(self, typ, value): elif typ == "int": return int(value) elif typ == "bool": - return True if value.lower() in ("1", "true", "on", "an", "yes") else False + return value.lower() in ("1", "true", "on", "an", "yes") elif typ == "time": if not value: value = "0:00" - if not ":" in value: value += ":00" + if ":" not in value: value += ":00" return value elif typ in ("str", "file", "folder"): try: diff --git a/module/HookManager.py b/module/HookManager.py index 16f692d76f..9ff4f19531 100644 --- a/module/HookManager.py +++ b/module/HookManager.py @@ -106,7 +106,7 @@ def addRPC(self, plugin, func, doc): def callRPC(self, plugin, func, args, parse): if not args: args = tuple() if parse: - args = tuple([literal_eval(x) for x in args]) + args = tuple(literal_eval(x) for x in args) plugin = self.pluginMap[plugin] f = getattr(plugin, func) @@ -147,10 +147,11 @@ def createIndex(self): self.plugins = plugins def manageHooks(self, plugin, name, value): - if name == "activated" and value: - self.activateHook(plugin) - elif name == "activated" and not value: - self.deactivateHook(plugin) + if name == "activated": + if value: + self.activateHook(plugin) + else: + self.deactivateHook(plugin) def activateHook(self, plugin): diff --git a/module/PullEvents.py b/module/PullEvents.py index 5ec76765e5..fcb91ad27c 100644 --- a/module/PullEvents.py +++ b/module/PullEvents.py @@ -71,8 +71,8 @@ def addEvent(self, event): class UpdateEvent(): def __init__(self, itype, iid, destination): - assert itype == "pack" or itype == "file" - assert destination == "queue" or destination == "collector" + assert itype in ["pack", "file"] + assert destination in ["queue", "collector"] self.type = itype self.id = iid self.destination = destination @@ -82,8 +82,8 @@ def toList(self): class RemoveEvent(): def __init__(self, itype, iid, destination): - assert itype == "pack" or itype == "file" - assert destination == "queue" or destination == "collector" + assert itype in ["pack", "file"] + assert destination in ["queue", "collector"] self.type = itype self.id = iid self.destination = destination @@ -93,8 +93,8 @@ def toList(self): class InsertEvent(): def __init__(self, itype, iid, after, destination): - assert itype == "pack" or itype == "file" - assert destination == "queue" or destination == "collector" + assert itype in ["pack", "file"] + assert destination in ["queue", "collector"] self.type = itype self.id = iid self.after = after @@ -105,7 +105,7 @@ def toList(self): class ReloadAllEvent(): def __init__(self, destination): - assert destination == "queue" or destination == "collector" + assert destination in ["queue", "collector"] self.destination = destination def toList(self): diff --git a/module/PyFile.py b/module/PyFile.py index 3dede93600..8ebbaef5db 100644 --- a/module/PyFile.py +++ b/module/PyFile.py @@ -280,6 +280,6 @@ def notifyChange(self): self.m.core.pullManager.addEvent(e) def setProgress(self, value): - if not value == self.progress: + if value != self.progress: self.progress = value self.notifyChange() diff --git a/module/Scheduler.py b/module/Scheduler.py index 0bc396b691..0c9ecc0b28 100644 --- a/module/Scheduler.py +++ b/module/Scheduler.py @@ -133,8 +133,7 @@ def get(self): """ return element or None """ self.lock.acquire() try: - el = heappop(self.queue) - return el + return heappop(self.queue) except IndexError: return None, None finally: diff --git a/module/ThreadManager.py b/module/ThreadManager.py index 8937f4a293..d160315cfc 100644 --- a/module/ThreadManager.py +++ b/module/ThreadManager.py @@ -68,7 +68,7 @@ def __init__(self, core): pycurl.global_init(pycurl.GLOBAL_DEFAULT) - for i in range(0, self.core.config.get("download", "max_downloads")): + for _ in range(self.core.config.get("download", "max_downloads")): self.createThread() @@ -215,7 +215,7 @@ def getIP(self): ("http://checkip.dyndns.org/",".*Current IP Address: (\S+).*")] ip = "" - for i in range(10): + for _ in range(10): try: sv = choice(services) ip = getURL(sv[0]) diff --git a/module/cli/ManageFiles.py b/module/cli/ManageFiles.py index 4d0377d9d6..cde60b8f16 100644 --- a/module/cli/ManageFiles.py +++ b/module/cli/ManageFiles.py @@ -182,23 +182,22 @@ def getLinks(self): def parseInput(self, inp, package=True): inp = inp.strip() - if "-" in inp: - l, n, h = inp.partition("-") - l = int(l) - h = int(h) - r = range(l, h + 1) - - ret = [] - if package: - for p in self.cache: - if p.pid in r: - ret.append(p.pid) - else: - for l in self.links.links: - if l.lid in r: - ret.append(l.lid) - - return ret + if "-" not in inp: + return [int(x) for x in inp.split(",")] + + l, n, h = inp.partition("-") + l = int(l) + h = int(h) + r = range(l, h + 1) + ret = [] + if package: + for p in self.cache: + if p.pid in r: + ret.append(p.pid) else: - return [int(x) for x in inp.split(",")] + for l in self.links.links: + if l.lid in r: + ret.append(l.lid) + + return ret diff --git a/module/common/APIExerciser.py b/module/common/APIExerciser.py index 96f5ce9cf9..3dcd96f195 100644 --- a/module/common/APIExerciser.py +++ b/module/common/APIExerciser.py @@ -14,7 +14,7 @@ def createURLs(): """ create some urls, some may fail """ urls = [] - for x in range(0, randint(20, 100)): + for _ in range(randint(20, 100)): name = "DEBUG_API" if randint(0, 5) == 5: name = "" #this link will fail @@ -30,7 +30,7 @@ def createURLs(): def startApiExerciser(core, n): - for i in range(n): + for _ in range(n): APIExerciser(core).start() class APIExerciser(Thread): @@ -45,12 +45,7 @@ def __init__(self, core, thrift=False, user=None, pw=None): self.count = 0 #number of methods self.time = time() - if thrift: - self.api = ThriftClient(user=user, password=pw) - else: - self.api = core.api - - + self.api = ThriftClient(user=user, password=pw) if thrift else core.api self.id = idPool idPool += 1 diff --git a/module/common/JsEngine.py b/module/common/JsEngine.py index 576be2a1bf..336cda1a7e 100644 --- a/module/common/JsEngine.py +++ b/module/common/JsEngine.py @@ -84,7 +84,7 @@ def __init__(self): self.init = False def __nonzero__(self): - return False if not ENGINE else True + return bool(ENGINE) def eval(self, script): if not self.init: @@ -141,8 +141,7 @@ def eval_js(self, script): script = "print(eval(unescape('%s')))" % quote(script) p = subprocess.Popen(["js", "-e", script], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, bufsize=-1) out, err = p.communicate() - res = out.strip() - return res + return out.strip() def eval_rhino(self, script): script = "print(eval(unescape('%s')))" % quote(script) diff --git a/module/common/packagetools.py b/module/common/packagetools.py index 5bfbcba95e..9e793040be 100644 --- a/module/common/packagetools.py +++ b/module/common/packagetools.py @@ -11,8 +11,7 @@ def matchFirst(string, *args): for pattern in patternlist: r = pattern.search(string) if r is not None: - name = r.group(1) - return name + return r.group(1) return string diff --git a/module/common/pylgettext.py b/module/common/pylgettext.py index fb36feceee..e1bff30ff3 100644 --- a/module/common/pylgettext.py +++ b/module/common/pylgettext.py @@ -9,10 +9,7 @@ def setpaths(pathlist): global _searchdirs - if isinstance(pathlist, list): - _searchdirs = pathlist - else: - _searchdirs = list(pathlist) + _searchdirs = pathlist if isinstance(pathlist, list) else list(pathlist) def addpath(path): @@ -26,9 +23,8 @@ def addpath(path): def delpath(path): global _searchdirs - if _searchdirs is not None: - if path in _searchdirs: - _searchdirs.remove(path) + if _searchdirs is not None and path in _searchdirs: + _searchdirs.remove(path) def clearpath(): @@ -41,21 +37,21 @@ def find(domain, localedir=None, languages=None, all=False): if _searchdirs is None: return origfind(domain, localedir, languages, all) searches = [localedir] + _searchdirs - results = list() + results = [] for dir in searches: res = origfind(domain, dir, languages, all) if all is False: results.append(res) else: results.extend(res) - if all is False: - results = filter(lambda x: x is not None, results) - if len(results) == 0: - return None - else: - return results[0] - else: + if all is not False: return results + results = filter(lambda x: x is not None, results) + if len(results) == 0: + return None + else: + return results[0] + #Is there a smarter/cleaner pythonic way for this? translation.func_globals['find'] = find diff --git a/module/database/DatabaseBackend.py b/module/database/DatabaseBackend.py index 9530390c30..d32678cf20 100644 --- a/module/database/DatabaseBackend.py +++ b/module/database/DatabaseBackend.py @@ -83,7 +83,7 @@ def __repr__(self): from os.path import basename frame = self.frame.f_back output = "" - for i in range(5): + for _ in range(5): output += "\t%s:%s, %s\n" % (basename(frame.f_code.co_filename), frame.f_lineno, frame.f_code.co_name) frame = frame.f_back del frame @@ -228,19 +228,13 @@ def _createTables(self): #try to lower ids self.c.execute('SELECT max(id) FROM LINKS') fid = self.c.fetchone()[0] - if fid: - fid = int(fid) - else: - fid = 0 + fid = int(fid) if fid else 0 self.c.execute('UPDATE SQLITE_SEQUENCE SET seq=? WHERE name=?', (fid, "links")) self.c.execute('SELECT max(id) FROM packages') pid = self.c.fetchone()[0] - if pid: - pid = int(pid) - else: - pid = 0 + pid = int(pid) if pid else 0 self.c.execute('UPDATE SQLITE_SEQUENCE SET seq=? WHERE name=?', (pid, "packages")) self.c.execute('VACUUM') diff --git a/module/database/FileDatabase.py b/module/database/FileDatabase.py index 7e7efb028f..900b9ce93a 100644 --- a/module/database/FileDatabase.py +++ b/module/database/FileDatabase.py @@ -262,12 +262,14 @@ def getPackageData(self, id): data = self.db.getPackageData(id) - tmplist = [] - cache = self.cache.values() - for x in cache: - if int(x.toDbDict()[x.id]["package"]) == int(id): - tmplist.append((x.id, x.toDbDict()[x.id])) + tmplist = [ + (x.id, x.toDbDict()[x.id]) + for x in cache + if int(x.toDbDict()[x.id]["package"]) == int(id) + ] + + data.update(tmplist) pack["links"] = data @@ -346,9 +348,8 @@ def getDecryptJob(self): jobs = self.db.getPluginJob(plugins) if jobs: return self.getFile(jobs[0]) - else: - self.jobCache["decrypt"] = "empty" - return None + self.jobCache["decrypt"] = "empty" + return None def getFileCount(self): """returns number of files""" @@ -526,22 +527,24 @@ def checkPackageFinished(self, pyfile): """ checks if package is finished and calls hookmanager """ ids = self.db.getUnfinished(pyfile.packageid) - if not ids or (pyfile.id in ids and len(ids) == 1): - if not pyfile.package().setFinished: - self.core.log.info(_("Package finished: %s") % pyfile.package().name) - self.core.hookManager.packageFinished(pyfile.package()) - pyfile.package().setFinished = True + if ( + not ids or (pyfile.id in ids and len(ids) == 1) + ) and not pyfile.package().setFinished: + self.core.log.info(_("Package finished: %s") % pyfile.package().name) + self.core.hookManager.packageFinished(pyfile.package()) + pyfile.package().setFinished = True def reCheckPackage(self, pid): """ recheck links in package """ data = self.db.getPackageData(pid) - urls = [] + urls = [ + (pyfile["url"], pyfile["plugin"]) + for pyfile in data.itervalues() + if pyfile["status"] not in (0, 12, 13) + ] - for pyfile in data.itervalues(): - if pyfile["status"] not in (0, 12, 13): - urls.append((pyfile["url"], pyfile["plugin"])) self.core.threadManager.createInfoThread(urls, pid) @@ -659,9 +662,7 @@ def getAllLinks(self, q): """ self.c.execute('SELECT l.id,l.url,l.name,l.size,l.status,l.error,l.plugin,l.package,l.linkorder FROM links as l INNER JOIN packages as p ON l.package=p.id WHERE p.queue=? ORDER BY l.linkorder', (q,)) - data = {} - for r in self.c: - data[r[0]] = { + return {r[0]: { 'id': r[0], 'url': r[1], 'name': r[2], @@ -673,9 +674,7 @@ def getAllLinks(self, q): 'plugin': r[6], 'package': r[7], 'order': r[8], - } - - return data + } for r in self.c} @style.queue def getAllPackages(self, q): @@ -743,9 +742,7 @@ def getPackageData(self, id): """get data about links for a package""" self.c.execute('SELECT id,url,name,size,status,error,plugin,package,linkorder FROM links WHERE package=? ORDER BY linkorder', (str(id), )) - data = {} - for r in self.c: - data[r[0]] = { + return {r[0]: { 'id': r[0], 'url': r[1], 'name': r[2], @@ -757,9 +754,7 @@ def getPackageData(self, id): 'plugin': r[6], 'package': r[7], 'order': r[8], - } - - return data + } for r in self.c} @style.async @@ -770,15 +765,12 @@ def updateLink(self, f): def updatePackage(self, p): self.c.execute('UPDATE packages SET name=?,folder=?,site=?,password=?,queue=? WHERE id=?', (p.name, p.folder, p.site, p.password, p.queue, str(p.id))) - @style.queue + @style.queue def updateLinkInfo(self, data): """ data is list of tupels (name, size, status, url) """ self.c.executemany('UPDATE links SET name=?, size=?, status=? WHERE url=? AND status IN (1,2,3,14)', data) - ids = [] self.c.execute('SELECT id FROM links WHERE url IN (\'%s\')' % "','".join([x[3] for x in data])) - for r in self.c: - ids.append(int(r[0])) - return ids + return [int(r[0]) for r in self.c] @style.queue def reorderPackage(self, p, position, noMove=False): diff --git a/module/database/StorageDatabase.py b/module/database/StorageDatabase.py index 3ed29625f9..d2b7fbe097 100644 --- a/module/database/StorageDatabase.py +++ b/module/database/StorageDatabase.py @@ -37,10 +37,7 @@ def getStorage(db, identifier, key=None): return row[0] else: db.c.execute("SELECT key, value FROM storage WHERE identifier=?", (identifier, )) - d = {} - for row in db.c: - d[row[0]] = row[1] - return d + return {row[0]: row[1] for row in db.c} @style.queue def delStorage(db, identifier, key): diff --git a/module/database/UserDatabase.py b/module/database/UserDatabase.py index 0c781057dd..abd11985a6 100644 --- a/module/database/UserDatabase.py +++ b/module/database/UserDatabase.py @@ -42,7 +42,10 @@ def checkAuth(db, user, password): @style.queue def addUser(db, user, password): - salt = reduce(lambda x, y: x + y, [str(random.randint(0, 9)) for i in range(0, 5)]) + salt = reduce( + lambda x, y: x + y, [str(random.randint(0, 9)) for i in range(5)] + ) + h = sha1(salt + password) password = salt + h.hexdigest() @@ -65,7 +68,10 @@ def changePassword(db, user, oldpw, newpw): pw = r[2][5:] h = sha1(salt + oldpw) if h.hexdigest() == pw: - salt = reduce(lambda x, y: x + y, [str(random.randint(0, 9)) for i in range(0, 5)]) + salt = reduce( + lambda x, y: x + y, [str(random.randint(0, 9)) for i in range(5)] + ) + h = sha1(salt + newpw) password = salt + h.hexdigest() @@ -87,19 +93,20 @@ def setRole(db, user, role): @style.queue def listUsers(db): db.c.execute('SELECT name FROM users') - users = [] - for row in db.c: - users.append(row[0]) - return users + return [row[0] for row in db.c] @style.queue def getAllUserData(db): db.c.execute("SELECT name, permission, role, template, email FROM users") - user = {} - for r in db.c: - user[r[0]] = {"permission": r[1], "role": r[2], "template": r[3], "email": r[4]} - - return user + return { + r[0]: { + "permission": r[1], + "role": r[2], + "template": r[3], + "email": r[4], + } + for r in db.c + } @style.queue def removeUser(db, user): diff --git a/module/gui/Accounts.py b/module/gui/Accounts.py index 8db04dfa9a..1b098d58ea 100644 --- a/module/gui/Accounts.py +++ b/module/gui/Accounts.py @@ -76,11 +76,10 @@ def data(self, index, role=Qt.DisplayRole): if not self.toData(index).validuntil: return QVariant(_("n/a")) until = int(self.toData(index).validuntil) - if until > 0: - fmtime = strftime(_("%a, %d %b %Y %H:%M"), gmtime(until)) - return QVariant(fmtime) - else: + if until <= 0: return QVariant(_("unlimited")) + fmtime = strftime(_("%a, %d %b %Y %H:%M"), gmtime(until)) + return QVariant(fmtime) #elif role == Qt.EditRole: # if index.column() == 0: # return QVariant(index.internalPointer().data["name"]) @@ -92,13 +91,12 @@ def index(self, row, column, parent=QModelIndex()): """ if parent == QModelIndex() and len(self._data) > row: pointer = self._data[row] - index = self.createIndex(row, column, pointer) + return self.createIndex(row, column, pointer) elif parent.isValid(): pointer = parent.internalPointer().children[row] - index = self.createIndex(row, column, pointer) + return self.createIndex(row, column, pointer) else: - index = QModelIndex() - return index + return QModelIndex() def parent(self, index): """ @@ -121,10 +119,7 @@ def hasChildren(self, parent=QModelIndex()): """ everything on top level """ - if parent == QModelIndex(): - return True - else: - return False + return parent == QModelIndex() def canFetchMore(self, parent): return False diff --git a/module/gui/Collector.py b/module/gui/Collector.py index 3ec4262f14..a665287995 100644 --- a/module/gui/Collector.py +++ b/module/gui/Collector.py @@ -16,6 +16,7 @@ @author: mkaay """ + from PyQt4.QtCore import * from PyQt4.QtGui import * @@ -24,7 +25,7 @@ from module.remote.thriftbackend.ThriftClient import Destination, FileDoesNotExists, ElementType -statusMapReverse = dict((v,k) for k, v in statusMap.iteritems()) +statusMapReverse = {v: k for k, v in statusMap.iteritems()} translatedStatusMap = {} # -> CollectorModel.__init__ @@ -154,7 +155,7 @@ def updateEvent(self, event): for k, child in enumerate(package.children): if child.id == event.id: child.update(info) - if not info.status == 12: + if info.status != 12: child.data["downloading"] = None self.emit(SIGNAL("dataChanged(const QModelIndex &, const QModelIndex &)"), self.index(k, 0, self.index(p, 0)), self.index(k, self.cols, self.index(p, self.cols))) break @@ -182,7 +183,7 @@ def data(self, index, role=Qt.DisplayRole): plugins = [] if isinstance(item, Package): for child in item.children: - if not child.data["plugin"] in plugins: + if child.data["plugin"] not in plugins: plugins.append(child.data["plugin"]) else: plugins.append(item.data["plugin"]) @@ -201,11 +202,8 @@ def data(self, index, role=Qt.DisplayRole): item = index.internalPointer() if isinstance(item, Link): return QVariant(formatSize(item.data["size"])) - else: - ms = 0 - for c in item.children: - ms += c.data["size"] - return QVariant(formatSize(ms)) + ms = sum(c.data["size"] for c in item.children) + return QVariant(formatSize(ms)) elif role == Qt.EditRole: if index.column() == 0: return QVariant(index.internalPointer().data["name"]) @@ -217,16 +215,15 @@ def index(self, row, column, parent=QModelIndex()): """ if parent == QModelIndex() and len(self._data) > row: pointer = self._data[row] - index = self.createIndex(row, column, pointer) + return self.createIndex(row, column, pointer) elif parent.isValid(): try: pointer = parent.internalPointer().children[row] except: return QModelIndex() - index = self.createIndex(row, column, pointer) + return self.createIndex(row, column, pointer) else: - index = QModelIndex() - return index + return QModelIndex() def parent(self, index): """ @@ -250,17 +247,15 @@ def rowCount(self, parent=QModelIndex()): if parent == QModelIndex(): #return package count return len(self._data) - else: - if parent.isValid(): - #index is valid - pack = parent.internalPointer() - if isinstance(pack, Package): - #index points to a package - #return len of children - return len(pack.children) - else: - #index is invalid - return False + if not parent.isValid(): + #index is invalid + return False + #index is valid + pack = parent.internalPointer() + if isinstance(pack, Package): + #index points to a package + #return len of children + return len(pack.children) #files have no children return 0 diff --git a/module/gui/ConnectionManager.py b/module/gui/ConnectionManager.py index def575abc1..07ac3b04e0 100644 --- a/module/gui/ConnectionManager.py +++ b/module/gui/ConnectionManager.py @@ -138,18 +138,15 @@ def slotRemove(self): def slotConnect(self): if self.internal.checkState() == 2: data = {"type": "internal"} - self.emit(SIGNAL("connect"), data) else: item = self.connList.currentItem() data = item.data(Qt.UserRole).toPyObject() data = self.cleanDict(data) - self.emit(SIGNAL("connect"), data) + + self.emit(SIGNAL("connect"), data) def cleanDict(self, data): - tmp = {} - for k, d in data.items(): - tmp[str(k)] = d - return tmp + return {str(k): d for k, d in data.items()} def slotSave(self, data): self.emit(SIGNAL("saveConnection"), data) @@ -240,14 +237,11 @@ def __init__(self): def setData(self, data): if not data: return - + self.id = data["id"] self.default = data["default"] self.controls["name"].setText(data["name"]) - if data["type"] == "local": - data["local"] = True - else: - data["local"] = False + data["local"] = data["type"] == "local" self.controls["local"].setChecked(data["local"]) if not data["local"]: self.controls["user"].setText(data["user"]) @@ -280,19 +274,18 @@ def slotLocalChanged(self, val): self.controls["host"].setDisabled(False) def getData(self): - d = {} - d["id"] = self.id - d["default"] = self.default - d["name"] = self.controls["name"].text() + d = { + "id": self.id, + "default": self.default, + "name": self.controls["name"].text(), + } + d["local"] = self.controls["local"].isChecked() d["user"] = self.controls["user"].text() d["password"] = self.controls["password"].text() d["host"] = self.controls["host"].text() d["port"] = self.controls["port"].value() - if d["local"]: - d["type"] = "local" - else: - d["type"] = "remote" + d["type"] = "local" if d["local"] else "remote" return d def slotDone(self): diff --git a/module/gui/CoreConfigParser.py b/module/gui/CoreConfigParser.py index 0d1d298c6f..5a478eb435 100644 --- a/module/gui/CoreConfigParser.py +++ b/module/gui/CoreConfigParser.py @@ -24,15 +24,11 @@ def checkVersion(self): if not exists(join(self.configdir, "pyload.conf")): return False - f = open(join(self.configdir, "pyload.conf"), "rb") - v = f.readline() - f.close() + with open(join(self.configdir, "pyload.conf"), "rb") as f: + v = f.readline() v = v[v.find(":")+1:].strip() - - if int(v) < CONF_VERSION: - return False - - return True + + return int(v) >= CONF_VERSION #---------------------------------------------------------------------- def readConfig(self): @@ -45,85 +41,74 @@ def readConfig(self): def parseConfig(self, config): """parses a given configfile""" - f = open(config) - - config = f.read() + with open(config) as f: + config = f.read() - config = config.split("\n")[1:] - - conf = {} - - section, option, value, typ, desc = "","","","","" - - listmode = False - - for line in config: - - line = line.rpartition("#") # removes comments - - if line[1]: - line = line[0] - else: - line = line[2] - - line = line.strip() - - try: - - if line == "": - continue - elif line.endswith(":"): - section, none, desc = line[:-1].partition('-') - section = section.strip() - desc = desc.replace('"', "").strip() - conf[section] = { "desc" : desc } - else: - if listmode: - - if line.endswith("]"): - listmode = False - line = line.replace("]","") - - value += [self.cast(typ, x.strip()) for x in line.split(",") if x] - - if not listmode: - conf[section][option] = { "desc" : desc, - "type" : typ, - "value" : value} - - - else: - content, none, value = line.partition("=") - - content, none, desc = content.partition(":") + config = config.split("\n")[1:] + + conf = {} + + section, option, value, typ, desc = "","","","","" + + listmode = False + + for line in config: + + line = line.rpartition("#") # removes comments + + line = line[0] if line[1] else line[2] + line = line.strip() + + try: + if line == "": + continue + elif line.endswith(":"): + section, none, desc = line[:-1].partition('-') + section = section.strip() desc = desc.replace('"', "").strip() - - typ, option = content.split() - - value = value.strip() - - if value.startswith("["): - if value.endswith("]"): + conf[section] = { "desc" : desc } + else: + if listmode: + + if line.endswith("]"): listmode = False - value = value[:-1] - else: - listmode = True - - value = [self.cast(typ, x.strip()) for x in value[1:].split(",") if x] + line = line.replace("]","") + + value += [self.cast(typ, x.strip()) for x in line.split(",") if x] + else: - value = self.cast(typ, value) - + content, none, value = line.partition("=") + + content, none, desc = content.partition(":") + + desc = desc.replace('"', "").strip() + + typ, option = content.split() + + value = value.strip() + + if value.startswith("["): + if value.endswith("]"): + listmode = False + value = value[:-1] + else: + listmode = True + + value = [self.cast(typ, x.strip()) for x in value[1:].split(",") if x] + else: + value = self.cast(typ, value) + if not listmode: conf[section][option] = { "desc" : desc, "type" : typ, - "value" : value} - - except: - pass - - - f.close() + "value" : value} + + + except: + pass + + return conf #---------------------------------------------------------------------- @@ -131,11 +116,11 @@ def cast(self, typ, value): """cast value to given format""" if type(value) not in (str, unicode): return value - - if typ == "int": + + if typ == "bool": + return value.lower() in ("1","true", "on", "an","yes") + elif typ == "int": return int(value) - elif typ == "bool": - return True if value.lower() in ("1","true", "on", "an","yes") else False else: return value diff --git a/module/gui/Overview.py b/module/gui/Overview.py index 183383b5eb..9853c83e60 100644 --- a/module/gui/Overview.py +++ b/module/gui/Overview.py @@ -43,11 +43,7 @@ def queueChanged(self): #dirty.. self.packages = [] def partsFinished(p): - f = 0 - for c in p.children: - if c.data["status"] == 0: - f += 1 - return f + return sum(1 for c in p.children if c.data["status"] == 0) def maxSize(p): ms = 0 diff --git a/module/gui/PackageDock.py b/module/gui/PackageDock.py index 73db8f177f..a787353664 100644 --- a/module/gui/PackageDock.py +++ b/module/gui/PackageDock.py @@ -52,8 +52,7 @@ def parseUri(self): self.widget.box.setText("") result = re.findall(r"(?:ht|f)tps?:\/\/[a-zA-Z0-9\-\.\/\?=_&%#]+[<| |\"|\'|\r|\n|\t]{1}", text) for url in result: - if "\n" or "\t" or "\r" or "\"" or "<" or "'" in url: - url = url[:-1] + url = url[:-1] self.widget.box.append("%s " % url) class NewPackageWindow(QWidget): diff --git a/module/gui/Queue.py b/module/gui/Queue.py index 0a0cbb8106..8a6d3530a1 100644 --- a/module/gui/Queue.py +++ b/module/gui/Queue.py @@ -105,9 +105,7 @@ def updateCount(self): ugly?: Overview connects to this signal for updating """ packageCount = len(self._data) - fileCount = 0 - for p in self._data: - fileCount += len(p.children) + fileCount = sum(len(p.children) for p in self._data) self.mutex.unlock() self.emit(SIGNAL("updateCount"), packageCount, fileCount) self.mutex.lock() @@ -167,24 +165,27 @@ def getWaitingProgress(self, item): returns time to wait, caches startingtime to provide progress """ locker = QMutexLocker(self.mutex) - if isinstance(item, Link): - if item.data["status"] == 5 and item.data["downloading"]: - until = float(item.data["downloading"]["wait_until"]) - try: - since, until_old = self.wait_dict[item.id] - if not until == until_old: - raise Exception - except: - since = time() - self.wait_dict[item.id] = since, until - since = float(since) - max_wait = float(until-since) - rest = int(until-time()) - if rest < 0: - return 0, None - res = 100/max_wait - perc = rest*res - return perc, rest + if ( + isinstance(item, Link) + and item.data["status"] == 5 + and item.data["downloading"] + ): + until = float(item.data["downloading"]["wait_until"]) + try: + since, until_old = self.wait_dict[item.id] + if until != until_old: + raise Exception + except: + since = time() + self.wait_dict[item.id] = since, until + since = float(since) + max_wait = float(until-since) + rest = int(until-time()) + if rest < 0: + return 0, None + res = 100/max_wait + perc = rest*res + return perc, rest return None def getProgress(self, item, locked=True): @@ -232,7 +233,7 @@ def getSpeed(self, item): for child in item.children: val = 0 if child.data["downloading"]: - if not child.data["statusmsg"] == "waiting": + if child.data["statusmsg"] != "waiting": all_waiting = False val = int(child.data["downloading"]["speed"]) running = True @@ -256,7 +257,7 @@ def data(self, index, role=Qt.DisplayRole): plugins = [] if isinstance(item, Package): for child in item.children: - if not child.data["plugin"] in plugins: + if child.data["plugin"] not in plugins: plugins.append(child.data["plugin"]) else: plugins.append(item.data["plugin"]) @@ -271,7 +272,7 @@ def data(self, index, role=Qt.DisplayRole): status = child.data["status"] else: status = item.data["status"] - + if speed is None or status == 7 or status == 10 or status == 5: return QVariant(self.translateStatus(statusMapReverse[status])) else: @@ -305,15 +306,14 @@ def data(self, index, role=Qt.DisplayRole): elif self.getProgress(c, False) == 100: cs += s ms += s - if cs == 0 or cs == ms: + if cs in [0, ms]: return QVariant(formatSize(ms)) else: return QVariant("%s / %s" % (formatSize(cs), formatSize(ms))) elif index.column() == 4: item = index.internalPointer() - if isinstance(item, Link): - if item.data["downloading"]: - return QVariant(item.data["downloading"]["format_eta"]) + if isinstance(item, Link) and item.data["downloading"]: + return QVariant(item.data["downloading"]["format_eta"]) elif role == Qt.EditRole: if index.column() == 0: return QVariant(index.internalPointer().data["name"]) @@ -380,7 +380,7 @@ def paint(self, painter, option, index): opts.rect.setHeight(option.rect.height()-1) opts.textVisible = True opts.textAlignment = Qt.AlignCenter - if not wait is None: + if wait is not None: opts.text = QString(_("waiting %d seconds") % (wait,)) else: opts.text = QString.number(opts.progress) + "%" diff --git a/module/gui/SettingsWidget.py b/module/gui/SettingsWidget.py index cd22a7b9eb..3d96eef73d 100644 --- a/module/gui/SettingsWidget.py +++ b/module/gui/SettingsWidget.py @@ -114,10 +114,10 @@ def reloadSection(self, sections, pdata): if item.type == "int": i.setValue(int(item.value)) - elif not item.type.find(";") == -1: + elif item.type.find(";") != -1: i.setCurrentIndex(i.findText(item.value)) elif item.type == "bool": - if True if item.value.lower() in ("1","true", "on", "an","yes") else False: + if item.value.lower() in ("1","true", "on", "an","yes"): i.setCurrentIndex(0) else: i.setCurrentIndex(1) @@ -146,11 +146,14 @@ def saveSection(self, sections, pdata, sec="core"): if item.type == "int": if i.value() != int(item.value): self.connector.setConfigValue(k, option, i.value(), sec) - elif not item.type.find(";") == -1: + elif item.type.find(";") != -1: if i.currentText() != item.value: self.connector.setConfigValue(k, option, i.currentText(), sec) elif item.type == "bool": - if (True if item.value.lower() in ("1","true", "on", "an","yes") else False) ^ (not i.currentIndex()): + if ( + item.value.lower() + in ("1", "true", "on", "an", "yes") + ) ^ (not i.currentIndex()): self.connector.setConfigValue(k, option, not i.currentIndex(), sec) else: if i.text() != item.value: @@ -165,24 +168,24 @@ def __init__(self, data, parent, ctype="core"): self.ctype = ctype layout = QFormLayout(self) self.setLayout(layout) - + sw = QWidget() sw.setLayout(QVBoxLayout()) sw.layout().addWidget(self) - + sa = QScrollArea() sa.setWidgetResizable(True) sa.setWidget(sw) sa.setFrameShape(sa.NoFrame) - + parent.addTab(sa, data.description) - + for option in self.data.items: if option.type == "int": i = QSpinBox(self) i.setMaximum(999999) i.setValue(int(option.value)) - elif not option.type.find(";") == -1: + elif option.type.find(";") != -1: choices = option.type.split(";") i = QComboBox(self) i.addItems(choices) @@ -191,7 +194,7 @@ def __init__(self, data, parent, ctype="core"): i = QComboBox(self) i.addItem(_("Yes"), QVariant(True)) i.addItem(_("No"), QVariant(False)) - if True if option.value.lower() in ("1","true", "on", "an","yes") else False: + if option.value.lower() in ("1","true", "on", "an","yes"): i.setCurrentIndex(0) else: i.setCurrentIndex(1) diff --git a/module/gui/XMLParser.py b/module/gui/XMLParser.py index 5e3b7bf65f..3c191745df 100644 --- a/module/gui/XMLParser.py +++ b/module/gui/XMLParser.py @@ -53,10 +53,7 @@ def saveData(self): return content def parseNode(self, node, ret_type="list"): - if ret_type == "dict": - childNodes = {} - else: - childNodes = [] + childNodes = {} if ret_type == "dict" else [] child = node.firstChild() while True: n = child.toElement() diff --git a/module/gui/connector.py b/module/gui/connector.py index c16ccd08e9..41460d3bf6 100644 --- a/module/gui/connector.py +++ b/module/gui/connector.py @@ -83,17 +83,17 @@ def connectProxy(self): self.emit(SIGNAL("errorBox"), err) Connector.firstAttempt = False return False - + self.proxy = DispatchRPC(self.mutex, client) self.connect(self.proxy, SIGNAL("connectionLost"), self, SIGNAL("connectionLost")) - + server_version = self.proxy.getServerVersion() self.connectionID = uuid().hex - - if not server_version == SERVER_VERSION: + + if server_version != SERVER_VERSION: self.emit(SIGNAL("errorBox"), _("server is version %(new)s client accepts version %(current)s") % { "new": server_version, "current": SERVER_VERSION}) return False - + return True def __getattr__(self, attr): @@ -131,8 +131,7 @@ def __getattr__(self, attr): """ self.mutex.lock() self.fname = attr - f = self.Wrapper(getattr(self.server, attr), self.mutex, self) - return f + return self.Wrapper(getattr(self.server, attr), self.mutex, self) class Wrapper(object): """ diff --git a/module/lib/BeautifulSoup.py b/module/lib/BeautifulSoup.py index 55567f588b..15fdf0232d 100644 --- a/module/lib/BeautifulSoup.py +++ b/module/lib/BeautifulSoup.py @@ -187,63 +187,60 @@ def _lastRecursiveChild(self): return lastChild def insert(self, position, newChild): - if isinstance(newChild, basestring) \ - and not isinstance(newChild, NavigableString): - newChild = NavigableString(newChild) - - position = min(position, len(self.contents)) - if hasattr(newChild, 'parent') and newChild.parent is not None: - # We're 'inserting' an element that's already one - # of this object's children. - if newChild.parent is self: - index = self.index(newChild) - if index > position: - # Furthermore we're moving it further down the - # list of this object's children. That means that - # when we extract this element, our target index - # will jump down one. - position = position - 1 - newChild.extract() - - newChild.parent = self - previousChild = None - if position == 0: - newChild.previousSibling = None - newChild.previous = self - else: - previousChild = self.contents[position-1] - newChild.previousSibling = previousChild - newChild.previousSibling.nextSibling = newChild - newChild.previous = previousChild._lastRecursiveChild() - if newChild.previous: - newChild.previous.next = newChild - - newChildsLastElement = newChild._lastRecursiveChild() - - if position >= len(self.contents): - newChild.nextSibling = None - - parent = self - parentsNextSibling = None - while not parentsNextSibling: - parentsNextSibling = parent.nextSibling - parent = parent.parent - if not parent: # This is the last element in the document. - break - if parentsNextSibling: - newChildsLastElement.next = parentsNextSibling - else: - newChildsLastElement.next = None - else: - nextChild = self.contents[position] - newChild.nextSibling = nextChild - if newChild.nextSibling: - newChild.nextSibling.previousSibling = newChild - newChildsLastElement.next = nextChild - - if newChildsLastElement.next: - newChildsLastElement.next.previous = newChildsLastElement - self.contents.insert(position, newChild) + if isinstance(newChild, basestring) \ + and not isinstance(newChild, NavigableString): + newChild = NavigableString(newChild) + + position = min(position, len(self.contents)) + if hasattr(newChild, 'parent') and newChild.parent is not None: + # We're 'inserting' an element that's already one + # of this object's children. + if newChild.parent is self: + index = self.index(newChild) + if index > position: + # Furthermore we're moving it further down the + # list of this object's children. That means that + # when we extract this element, our target index + # will jump down one. + position = position - 1 + newChild.extract() + + newChild.parent = self + previousChild = None + if position == 0: + newChild.previousSibling = None + newChild.previous = self + else: + previousChild = self.contents[position-1] + newChild.previousSibling = previousChild + newChild.previousSibling.nextSibling = newChild + newChild.previous = previousChild._lastRecursiveChild() + if newChild.previous: + newChild.previous.next = newChild + + newChildsLastElement = newChild._lastRecursiveChild() + + if position >= len(self.contents): + newChild.nextSibling = None + + parent = self + parentsNextSibling = None + while not parentsNextSibling: + parentsNextSibling = parent.nextSibling + parent = parent.parent + if not parent: # This is the last element in the document. + break + newChildsLastElement.next = parentsNextSibling or None + else: + nextChild = self.contents[position] + newChild.nextSibling = nextChild + if newChild.nextSibling: + newChild.nextSibling.previousSibling = newChild + newChildsLastElement.next = nextChild + + if newChildsLastElement.next: + newChildsLastElement.next.previous = newChildsLastElement + self.contents.insert(position, newChild) def append(self, tag): """Appends the given tag to the contents of this tag.""" @@ -404,22 +401,17 @@ def substituteEncoding(self, str, encoding=None): return str.replace("%SOUP-ENCODING%", encoding) def toEncoding(self, s, encoding=None): - """Encodes an object to a string in some encoding, or to Unicode. + """Encodes an object to a string in some encoding, or to Unicode. .""" - if isinstance(s, unicode): - if encoding: - s = s.encode(encoding) - elif isinstance(s, str): - if encoding: - s = s.encode(encoding) - else: - s = unicode(s) - else: - if encoding: - s = self.toEncoding(str(s), encoding) - else: - s = unicode(s) - return s + if isinstance(s, unicode) and encoding or isinstance(s, str) and encoding: + s = s.encode(encoding) + elif isinstance(s, unicode): + pass + elif not encoding: + s = unicode(s) + else: + s = self.toEncoding(str(s), encoding) + return s class NavigableString(unicode, PageElement): @@ -481,11 +473,8 @@ class Tag(PageElement): """Represents a found HTML tag with its attributes and contents.""" def _invert(h): - "Cheap function to invert a hash." - i = {} - for k,v in h.items(): - i[v] = k - return i + "Cheap function to invert a hash." + return {v: k for k,v in h.items()} XML_ENTITIES_TO_SPECIAL_CHARS = { "apos" : "'", "quot" : '"', @@ -614,18 +603,18 @@ def __nonzero__(self): return True def __setitem__(self, key, value): - """Setting tag[key] sets the value of the 'key' attribute for the + """Setting tag[key] sets the value of the 'key' attribute for the tag.""" - self._getAttrMap() - self.attrMap[key] = value - found = False - for i in range(0, len(self.attrs)): - if self.attrs[i][0] == key: - self.attrs[i] = (key, value) - found = True - if not found: - self.attrs.append((key, value)) - self._getAttrMap()[key] = value + self._getAttrMap() + self.attrMap[key] = value + found = False + for i in range(len(self.attrs)): + if self.attrs[i][0] == key: + self.attrs[i] = (key, value) + found = True + if not found: + self.attrs.append((key, value)) + self._getAttrMap()[key] = value def __delitem__(self, key): "Deleting tag[key] deletes all 'key' attributes for the tag." @@ -653,19 +642,17 @@ def __getattr__(self, tag): raise AttributeError, "'%s' object has no attribute '%s'" % (self.__class__, tag) def __eq__(self, other): - """Returns true iff this tag has the same name, the same attributes, + """Returns true iff this tag has the same name, the same attributes, and the same contents (recursively) as the given tag. NOTE: right now this will return false if two tags have the same attributes in a different order. Should this be fixed?""" - if other is self: - return True - if not hasattr(other, 'name') or not hasattr(other, 'attrs') or not hasattr(other, 'contents') or self.name != other.name or self.attrs != other.attrs or len(self) != len(other): - return False - for i in range(0, len(self.contents)): - if self.contents[i] != other.contents[i]: - return False - return True + if other is self: + return True + if not hasattr(other, 'name') or not hasattr(other, 'attrs') or not hasattr(other, 'contents') or self.name != other.name or self.attrs != other.attrs or len(self) != len(other): + return False + return all( + self.contents[i] == other.contents[i] for i in range(len(self.contents))) def __ne__(self, other): """Returns true iff this tag is not identical to the other tag, @@ -690,88 +677,86 @@ def _sub_entity(self, x): def __str__(self, encoding=DEFAULT_OUTPUT_ENCODING, prettyPrint=False, indentLevel=0): - """Returns a string or Unicode representation of this tag and + """Returns a string or Unicode representation of this tag and its contents. To get Unicode, pass None for encoding. NOTE: since Python's HTML parser consumes whitespace, this method is not certain to reproduce the whitespace present in the original string.""" - encodedName = self.toEncoding(self.name, encoding) - - attrs = [] - if self.attrs: - for key, val in self.attrs: - fmt = '%s="%s"' - if isinstance(val, basestring): - if self.containsSubstitutions and '%SOUP-ENCODING%' in val: - val = self.substituteEncoding(val, encoding) - - # The attribute value either: - # - # * Contains no embedded double quotes or single quotes. - # No problem: we enclose it in double quotes. - # * Contains embedded single quotes. No problem: - # double quotes work here too. - # * Contains embedded double quotes. No problem: - # we enclose it in single quotes. - # * Embeds both single _and_ double quotes. This - # can't happen naturally, but it can happen if - # you modify an attribute value after parsing - # the document. Now we have a bit of a - # problem. We solve it by enclosing the - # attribute in single quotes, and escaping any - # embedded single quotes to XML entities. - if '"' in val: - fmt = "%s='%s'" - if "'" in val: - # TODO: replace with apos when - # appropriate. - val = val.replace("'", "&squot;") - - # Now we're okay w/r/t quotes. But the attribute - # value might also contain angle brackets, or - # ampersands that aren't part of entities. We need - # to escape those to XML entities too. - val = self.BARE_AMPERSAND_OR_BRACKET.sub(self._sub_entity, val) - - attrs.append(fmt % (self.toEncoding(key, encoding), - self.toEncoding(val, encoding))) - close = '' - closeTag = '' - if self.isSelfClosing: - close = ' /' - else: - closeTag = '' % encodedName - - indentTag, indentContents = 0, 0 - if prettyPrint: - indentTag = indentLevel - space = (' ' * (indentTag-1)) - indentContents = indentTag + 1 - contents = self.renderContents(encoding, prettyPrint, indentContents) - if self.hidden: - s = contents - else: - s = [] - attributeString = '' - if attrs: - attributeString = ' ' + ' '.join(attrs) - if prettyPrint: - s.append(space) - s.append('<%s%s%s>' % (encodedName, attributeString, close)) - if prettyPrint: - s.append("\n") - s.append(contents) - if prettyPrint and contents and contents[-1] != "\n": - s.append("\n") - if prettyPrint and closeTag: - s.append(space) - s.append(closeTag) - if prettyPrint and closeTag and self.nextSibling: - s.append("\n") - s = ''.join(s) - return s + encodedName = self.toEncoding(self.name, encoding) + + attrs = [] + if self.attrs: + for key, val in self.attrs: + fmt = '%s="%s"' + if isinstance(val, basestring): + if self.containsSubstitutions and '%SOUP-ENCODING%' in val: + val = self.substituteEncoding(val, encoding) + + # The attribute value either: + # + # * Contains no embedded double quotes or single quotes. + # No problem: we enclose it in double quotes. + # * Contains embedded single quotes. No problem: + # double quotes work here too. + # * Contains embedded double quotes. No problem: + # we enclose it in single quotes. + # * Embeds both single _and_ double quotes. This + # can't happen naturally, but it can happen if + # you modify an attribute value after parsing + # the document. Now we have a bit of a + # problem. We solve it by enclosing the + # attribute in single quotes, and escaping any + # embedded single quotes to XML entities. + if '"' in val: + fmt = "%s='%s'" + if "'" in val: + # TODO: replace with apos when + # appropriate. + val = val.replace("'", "&squot;") + + # Now we're okay w/r/t quotes. But the attribute + # value might also contain angle brackets, or + # ampersands that aren't part of entities. We need + # to escape those to XML entities too. + val = self.BARE_AMPERSAND_OR_BRACKET.sub(self._sub_entity, val) + + attrs.append(fmt % (self.toEncoding(key, encoding), + self.toEncoding(val, encoding))) + close = '' + closeTag = '' + if self.isSelfClosing: + close = ' /' + else: + closeTag = '' % encodedName + + indentTag, indentContents = 0, 0 + if prettyPrint: + indentTag = indentLevel + space = (' ' * (indentTag-1)) + indentContents = indentTag + 1 + contents = self.renderContents(encoding, prettyPrint, indentContents) + if self.hidden: + return contents + s = [] + attributeString = '' + if attrs: + attributeString = ' ' + ' '.join(attrs) + if prettyPrint: + s.append(space) + s.append('<%s%s%s>' % (encodedName, attributeString, close)) + if prettyPrint: + s.append("\n") + s.append(contents) + if prettyPrint and contents and contents[-1] != "\n": + s.append("\n") + if prettyPrint and closeTag: + s.append(space) + s.append(closeTag) + if prettyPrint and closeTag and self.nextSibling: + s.append("\n") + return ''.join(s) def decompose(self): """Recursively destroys the contents of this tree.""" @@ -906,41 +891,36 @@ def __str__(self): return "%s|%s" % (self.name, self.attrs) def searchTag(self, markupName=None, markupAttrs={}): - found = None - markup = None - if isinstance(markupName, Tag): - markup = markupName - markupAttrs = markup - callFunctionWithTagData = callable(self.name) \ - and not isinstance(markupName, Tag) - - if (not self.name) \ + found = None + markup = None + if isinstance(markupName, Tag): + markup = markupName + markupAttrs = markup + callFunctionWithTagData = callable(self.name) \ + and not isinstance(markupName, Tag) + + if (not self.name) \ or callFunctionWithTagData \ or (markup and self._matches(markup, self.name)) \ or (not markup and self._matches(markupName, self.name)): - if callFunctionWithTagData: - match = self.name(markupName, markupAttrs) - else: - match = True - markupAttrMap = None - for attr, matchAgainst in self.attrs.items(): - if not markupAttrMap: - if hasattr(markupAttrs, 'get'): - markupAttrMap = markupAttrs - else: - markupAttrMap = {} - for k,v in markupAttrs: - markupAttrMap[k] = v - attrValue = markupAttrMap.get(attr) - if not self._matches(attrValue, matchAgainst): - match = False - break - if match: - if markup: - found = markup - else: - found = markupName - return found + if callFunctionWithTagData: + match = self.name(markupName, markupAttrs) + else: + match = True + markupAttrMap = None + for attr, matchAgainst in self.attrs.items(): + if not markupAttrMap: + if hasattr(markupAttrs, 'get'): + markupAttrMap = markupAttrs + else: + markupAttrMap = {k: v for k,v in markupAttrs} + attrValue = markupAttrMap.get(attr) + if not self._matches(attrValue, matchAgainst): + match = False + break + if match: + found = markup or markupName + return found def search(self, markup): #print 'looking for %s in %s' % (self, markup) @@ -1155,37 +1135,36 @@ def convert_charref(self, name): return self.convert_codepoint(n) def _feed(self, inDocumentEncoding=None, isHTML=False): - # Convert the document to Unicode. - markup = self.markup - if isinstance(markup, unicode): - if not hasattr(self, 'originalEncoding'): - self.originalEncoding = None - else: - dammit = UnicodeDammit\ - (markup, [self.fromEncoding, inDocumentEncoding], - smartQuotesTo=self.smartQuotesTo, isHTML=isHTML) - markup = dammit.unicode - self.originalEncoding = dammit.originalEncoding - self.declaredHTMLEncoding = dammit.declaredHTMLEncoding - if markup: - if self.markupMassage: - if not hasattr(self.markupMassage, "__iter__"): - self.markupMassage = self.MARKUP_MASSAGE - for fix, m in self.markupMassage: - markup = fix.sub(m, markup) - # TODO: We get rid of markupMassage so that the - # soup object can be deepcopied later on. Some - # Python installations can't copy regexes. If anyone - # was relying on the existence of markupMassage, this - # might cause problems. - del(self.markupMassage) - self.reset() - - SGMLParser.feed(self, markup) - # Close out any unfinished strings and close all the open tags. - self.endData() - while self.currentTag.name != self.ROOT_TAG_NAME: - self.popTag() + # Convert the document to Unicode. + markup = self.markup + if isinstance(markup, unicode): + if not hasattr(self, 'originalEncoding'): + self.originalEncoding = None + else: + dammit = UnicodeDammit\ + (markup, [self.fromEncoding, inDocumentEncoding], + smartQuotesTo=self.smartQuotesTo, isHTML=isHTML) + markup = dammit.unicode + self.originalEncoding = dammit.originalEncoding + self.declaredHTMLEncoding = dammit.declaredHTMLEncoding + if markup and self.markupMassage: + if not hasattr(self.markupMassage, "__iter__"): + self.markupMassage = self.MARKUP_MASSAGE + for fix, m in self.markupMassage: + markup = fix.sub(m, markup) + # TODO: We get rid of markupMassage so that the + # soup object can be deepcopied later on. Some + # Python installations can't copy regexes. If anyone + # was relying on the existence of markupMassage, this + # might cause problems. + del(self.markupMassage) + self.reset() + + SGMLParser.feed(self, markup) + # Close out any unfinished strings and close all the open tags. + self.endData() + while self.currentTag.name != self.ROOT_TAG_NAME: + self.popTag() def __getattr__(self, methodName): """This method routes method call requests to either the SGMLParser @@ -1232,49 +1211,49 @@ def pushTag(self, tag): self.currentTag = self.tagStack[-1] def endData(self, containerClass=NavigableString): - if self.currentData: - currentData = u''.join(self.currentData) - if (currentData.translate(self.STRIP_ASCII_SPACES) == '' and - not set([tag.name for tag in self.tagStack]).intersection( - self.PRESERVE_WHITESPACE_TAGS)): - if '\n' in currentData: - currentData = '\n' - else: - currentData = ' ' - self.currentData = [] - if self.parseOnlyThese and len(self.tagStack) <= 1 and \ - (not self.parseOnlyThese.text or \ - not self.parseOnlyThese.search(currentData)): - return - o = containerClass(currentData) - o.setup(self.currentTag, self.previous) - if self.previous: - self.previous.next = o - self.previous = o - self.currentTag.contents.append(o) + if not self.currentData: + return + + currentData = u''.join(self.currentData) + if currentData.translate(self.STRIP_ASCII_SPACES) == '' and not { + tag.name + for tag in self.tagStack + }.intersection(self.PRESERVE_WHITESPACE_TAGS): + currentData = '\n' if '\n' in currentData else ' ' + self.currentData = [] + if self.parseOnlyThese and len(self.tagStack) <= 1 and \ + (not self.parseOnlyThese.text or \ + not self.parseOnlyThese.search(currentData)): + return + o = containerClass(currentData) + o.setup(self.currentTag, self.previous) + if self.previous: + self.previous.next = o + self.previous = o + self.currentTag.contents.append(o) def _popToTag(self, name, inclusivePop=True): - """Pops the tag stack up to and including the most recent + """Pops the tag stack up to and including the most recent instance of the given tag. If inclusivePop is false, pops the tag stack up to but *not* including the most recent instqance of the given tag.""" - #print "Popping to %s" % name - if name == self.ROOT_TAG_NAME: - return - - numPops = 0 - mostRecentTag = None - for i in range(len(self.tagStack)-1, 0, -1): - if name == self.tagStack[i].name: - numPops = len(self.tagStack)-i - break - if not inclusivePop: - numPops = numPops - 1 - - for i in range(0, numPops): - mostRecentTag = self.popTag() - return mostRecentTag + #print "Popping to %s" % name + if name == self.ROOT_TAG_NAME: + return + + numPops = 0 + mostRecentTag = None + for i in range(len(self.tagStack)-1, 0, -1): + if name == self.tagStack[i].name: + numPops = len(self.tagStack)-i + break + if not inclusivePop: + numPops -= 1 + + for _ in range(numPops): + mostRecentTag = self.popTag() + return mostRecentTag def _smartPop(self, name): @@ -1388,12 +1367,9 @@ def handle_comment(self, text): self._toStringSubclass(text, Comment) def handle_charref(self, ref): - "Handle character references as data." - if self.convertEntities: - data = unichr(int(ref)) - else: - data = '&#%s;' % ref - self.handle_data(data) + "Handle character references as data." + data = unichr(int(ref)) if self.convertEntities else '&#%s;' % ref + self.handle_data(data) def handle_entityref(self, ref): """Handle entity references as data, possibly converting known @@ -1796,15 +1772,12 @@ def __init__(self, markup, overrideEncodings=[], if not u: self.originalEncoding = None def _subMSChar(self, orig): - """Changes a MS smart quote character to an XML or HTML + """Changes a MS smart quote character to an XML or HTML entity.""" - sub = self.MS_CHARS.get(orig) - if isinstance(sub, tuple): - if self.smartQuotesTo == 'xml': - sub = '&#x%s;' % sub[1] - else: - sub = '&%s;' % sub[0] - return sub + sub = self.MS_CHARS.get(orig) + if isinstance(sub, tuple): + sub = '&#x%s;' % sub[1] if self.smartQuotesTo == 'xml' else '&%s;' % sub[0] + return sub def _convertFrom(self, proposed): proposed = self.find_codec(proposed) @@ -1835,96 +1808,94 @@ def _convertFrom(self, proposed): return self.markup def _toUnicode(self, data, encoding): - '''Given a string and its encoding, decodes the string into Unicode. + '''Given a string and its encoding, decodes the string into Unicode. %encoding is a string recognized by encodings.aliases''' - # strip Byte Order Mark (if present) - if (len(data) >= 4) and (data[:2] == '\xfe\xff') \ + # strip Byte Order Mark (if present) + if (len(data) >= 4) and (data[:2] == '\xfe\xff') \ + and (data[2:4] != '\x00\x00'): + encoding = 'utf-16be' + data = data[2:] + elif (len(data) >= 4) and (data[:2] == '\xff\xfe') \ and (data[2:4] != '\x00\x00'): - encoding = 'utf-16be' - data = data[2:] - elif (len(data) >= 4) and (data[:2] == '\xff\xfe') \ - and (data[2:4] != '\x00\x00'): - encoding = 'utf-16le' - data = data[2:] - elif data[:3] == '\xef\xbb\xbf': - encoding = 'utf-8' - data = data[3:] - elif data[:4] == '\x00\x00\xfe\xff': - encoding = 'utf-32be' - data = data[4:] - elif data[:4] == '\xff\xfe\x00\x00': - encoding = 'utf-32le' - data = data[4:] - newdata = unicode(data, encoding) - return newdata + encoding = 'utf-16le' + data = data[2:] + elif data[:3] == '\xef\xbb\xbf': + encoding = 'utf-8' + data = data[3:] + elif data[:4] == '\x00\x00\xfe\xff': + encoding = 'utf-32be' + data = data[4:] + elif data[:4] == '\xff\xfe\x00\x00': + encoding = 'utf-32le' + data = data[4:] + return unicode(data, encoding) def _detectEncoding(self, xml_data, isHTML=False): - """Given a document, tries to detect its XML encoding.""" - xml_encoding = sniffed_xml_encoding = None - try: - if xml_data[:4] == '\x4c\x6f\xa7\x94': - # EBCDIC - xml_data = self._ebcdic_to_ascii(xml_data) - elif xml_data[:4] == '\x00\x3c\x00\x3f': - # UTF-16BE - sniffed_xml_encoding = 'utf-16be' - xml_data = unicode(xml_data, 'utf-16be').encode('utf-8') - elif (len(xml_data) >= 4) and (xml_data[:2] == '\xfe\xff') \ - and (xml_data[2:4] != '\x00\x00'): - # UTF-16BE with BOM - sniffed_xml_encoding = 'utf-16be' - xml_data = unicode(xml_data[2:], 'utf-16be').encode('utf-8') - elif xml_data[:4] == '\x3c\x00\x3f\x00': - # UTF-16LE - sniffed_xml_encoding = 'utf-16le' - xml_data = unicode(xml_data, 'utf-16le').encode('utf-8') - elif (len(xml_data) >= 4) and (xml_data[:2] == '\xff\xfe') and \ - (xml_data[2:4] != '\x00\x00'): - # UTF-16LE with BOM - sniffed_xml_encoding = 'utf-16le' - xml_data = unicode(xml_data[2:], 'utf-16le').encode('utf-8') - elif xml_data[:4] == '\x00\x00\x00\x3c': - # UTF-32BE - sniffed_xml_encoding = 'utf-32be' - xml_data = unicode(xml_data, 'utf-32be').encode('utf-8') - elif xml_data[:4] == '\x3c\x00\x00\x00': - # UTF-32LE - sniffed_xml_encoding = 'utf-32le' - xml_data = unicode(xml_data, 'utf-32le').encode('utf-8') - elif xml_data[:4] == '\x00\x00\xfe\xff': - # UTF-32BE with BOM - sniffed_xml_encoding = 'utf-32be' - xml_data = unicode(xml_data[4:], 'utf-32be').encode('utf-8') - elif xml_data[:4] == '\xff\xfe\x00\x00': - # UTF-32LE with BOM - sniffed_xml_encoding = 'utf-32le' - xml_data = unicode(xml_data[4:], 'utf-32le').encode('utf-8') - elif xml_data[:3] == '\xef\xbb\xbf': - # UTF-8 with BOM - sniffed_xml_encoding = 'utf-8' - xml_data = unicode(xml_data[3:], 'utf-8').encode('utf-8') - else: - sniffed_xml_encoding = 'ascii' - pass - except: - xml_encoding_match = None - xml_encoding_match = re.compile( - '^<\?.*encoding=[\'"](.*?)[\'"].*\?>').match(xml_data) - if not xml_encoding_match and isHTML: - regexp = re.compile('<\s*meta[^>]+charset=([^>]*?)[;\'">]', re.I) - xml_encoding_match = regexp.search(xml_data) - if xml_encoding_match is not None: - xml_encoding = xml_encoding_match.groups()[0].lower() - if isHTML: - self.declaredHTMLEncoding = xml_encoding - if sniffed_xml_encoding and \ - (xml_encoding in ('iso-10646-ucs-2', 'ucs-2', 'csunicode', - 'iso-10646-ucs-4', 'ucs-4', 'csucs4', - 'utf-16', 'utf-32', 'utf_16', 'utf_32', - 'utf16', 'u16')): - xml_encoding = sniffed_xml_encoding - return xml_data, xml_encoding, sniffed_xml_encoding + """Given a document, tries to detect its XML encoding.""" + xml_encoding = sniffed_xml_encoding = None + try: + if xml_data[:4] == '\x4c\x6f\xa7\x94': + # EBCDIC + xml_data = self._ebcdic_to_ascii(xml_data) + elif xml_data[:4] == '\x00\x3c\x00\x3f': + # UTF-16BE + sniffed_xml_encoding = 'utf-16be' + xml_data = unicode(xml_data, 'utf-16be').encode('utf-8') + elif (len(xml_data) >= 4) and (xml_data[:2] == '\xfe\xff') \ + and (xml_data[2:4] != '\x00\x00'): + # UTF-16BE with BOM + sniffed_xml_encoding = 'utf-16be' + xml_data = unicode(xml_data[2:], 'utf-16be').encode('utf-8') + elif xml_data[:4] == '\x3c\x00\x3f\x00': + # UTF-16LE + sniffed_xml_encoding = 'utf-16le' + xml_data = unicode(xml_data, 'utf-16le').encode('utf-8') + elif (len(xml_data) >= 4) and (xml_data[:2] == '\xff\xfe') and \ + (xml_data[2:4] != '\x00\x00'): + # UTF-16LE with BOM + sniffed_xml_encoding = 'utf-16le' + xml_data = unicode(xml_data[2:], 'utf-16le').encode('utf-8') + elif xml_data[:4] == '\x00\x00\x00\x3c': + # UTF-32BE + sniffed_xml_encoding = 'utf-32be' + xml_data = unicode(xml_data, 'utf-32be').encode('utf-8') + elif xml_data[:4] == '\x3c\x00\x00\x00': + # UTF-32LE + sniffed_xml_encoding = 'utf-32le' + xml_data = unicode(xml_data, 'utf-32le').encode('utf-8') + elif xml_data[:4] == '\x00\x00\xfe\xff': + # UTF-32BE with BOM + sniffed_xml_encoding = 'utf-32be' + xml_data = unicode(xml_data[4:], 'utf-32be').encode('utf-8') + elif xml_data[:4] == '\xff\xfe\x00\x00': + # UTF-32LE with BOM + sniffed_xml_encoding = 'utf-32le' + xml_data = unicode(xml_data[4:], 'utf-32le').encode('utf-8') + elif xml_data[:3] == '\xef\xbb\xbf': + # UTF-8 with BOM + sniffed_xml_encoding = 'utf-8' + xml_data = unicode(xml_data[3:], 'utf-8').encode('utf-8') + else: + sniffed_xml_encoding = 'ascii' + except: + xml_encoding_match = None + xml_encoding_match = re.compile( + '^<\?.*encoding=[\'"](.*?)[\'"].*\?>').match(xml_data) + if not xml_encoding_match and isHTML: + regexp = re.compile('<\s*meta[^>]+charset=([^>]*?)[;\'">]', re.I) + xml_encoding_match = regexp.search(xml_data) + if xml_encoding_match is not None: + xml_encoding = xml_encoding_match.groups()[0].lower() + if isHTML: + self.declaredHTMLEncoding = xml_encoding + if sniffed_xml_encoding and \ + (xml_encoding in ('iso-10646-ucs-2', 'ucs-2', 'csunicode', + 'iso-10646-ucs-4', 'ucs-4', 'csucs4', + 'utf-16', 'utf-32', 'utf_16', 'utf_32', + 'utf16', 'u16')): + xml_encoding = sniffed_xml_encoding + return xml_data, xml_encoding, sniffed_xml_encoding def find_codec(self, charset): diff --git a/module/lib/Getch.py b/module/lib/Getch.py index 22b7ea7f89..e9883217dc 100644 --- a/module/lib/Getch.py +++ b/module/lib/Getch.py @@ -62,15 +62,14 @@ def __call__(self): if Carbon.Evt.EventAvail(0x0008)[0] == 0: # 0x0008 is the keyDownMask return '' - else: - # - # The event contains the following info: - # (what,msg,when,where,mod)=Carbon.Evt.GetNextEvent(0x0008)[1] - # - # The message (msg) contains the ASCII char which is - # extracted with the 0x000000FF charCodeMask; this - # number is converted to an ASCII character with chr() and - # returned - # - (what, msg, when, where, mod) = Carbon.Evt.GetNextEvent(0x0008)[1] - return chr(msg) \ No newline at end of file + # + # The event contains the following info: + # (what,msg,when,where,mod)=Carbon.Evt.GetNextEvent(0x0008)[1] + # + # The message (msg) contains the ASCII char which is + # extracted with the 0x000000FF charCodeMask; this + # number is converted to an ASCII character with chr() and + # returned + # + (what, msg, when, where, mod) = Carbon.Evt.GetNextEvent(0x0008)[1] + return chr(msg) \ No newline at end of file diff --git a/module/lib/SafeEval.py b/module/lib/SafeEval.py index 8fc57f261f..9250d4388d 100644 --- a/module/lib/SafeEval.py +++ b/module/lib/SafeEval.py @@ -20,10 +20,7 @@ def _get_opcodes(codeobj): while i < len(s): code = ord(s[i]) opcodes.append(code) - if code >= dis.HAVE_ARGUMENT: - i += 3 - else: - i += 1 + i += 3 if code >= dis.HAVE_ARGUMENT else 1 return opcodes, names def test_expr(expr, allowed_codes): diff --git a/module/lib/Unzip.py b/module/lib/Unzip.py index f56fbe751a..6e306eb3e5 100644 --- a/module/lib/Unzip.py +++ b/module/lib/Unzip.py @@ -40,11 +40,12 @@ def _listdirs(self, file): to extract the file to it. """ zf = zipfile.ZipFile(file) - dirs = [] + dirs = [ + name.replace("pyload/", "") + for name in zf.namelist() + if name.endswith('/') + ] - for name in zf.namelist(): - if name.endswith('/'): - dirs.append(name.replace("pyload/","")) dirs.sort() return dirs diff --git a/module/lib/beaker/cache.py b/module/lib/beaker/cache.py index 4a96537ff2..24f33d80c6 100644 --- a/module/lib/beaker/cache.py +++ b/module/lib/beaker/cache.py @@ -360,20 +360,6 @@ def load(search_term, limit, offset): """ return region_invalidate(namespace, region, *args) - if callable(namespace): - if not region: - region = namespace._arg_region - namespace = namespace._arg_namespace - - if not region: - raise BeakerException("Region or callable function " - "namespace is required") - else: - region = self.regions[region] - - cache = self.get_cache(namespace, **region) - cache_key = " ".join(str(x) for x in args) - cache.remove_value(cache_key) def cache(self, *args, **kwargs): """Decorate a function to cache itself with supplied parameters diff --git a/module/lib/beaker/container.py b/module/lib/beaker/container.py index 515e97af68..1d85558701 100644 --- a/module/lib/beaker/container.py +++ b/module/lib/beaker/container.py @@ -307,7 +307,7 @@ def _get_value(self): try: stored, expired, value = value except ValueError: - if not len(value) == 2: + if len(value) != 2: raise # Old format: upgrade stored, value = value @@ -320,7 +320,7 @@ def _get_value(self): # occurs when the value is None. memcached # may yank the rug from under us in which case # that's the result - raise KeyError(self.key) + raise KeyError(self.key) return stored, expired, value def set_value(self, value, storedtime=None): @@ -554,21 +554,17 @@ def file_exists(self, file): def do_open(self, flags): if self.file_exists(self.file): - fh = open(self.file, 'rb') - try: - self.hash = cPickle.load(fh) - except (IOError, OSError, EOFError, cPickle.PickleError, ValueError): - pass - fh.close() - + with open(self.file, 'rb') as fh: + try: + self.hash = cPickle.load(fh) + except (IOError, OSError, EOFError, cPickle.PickleError, ValueError): + pass self.flags = flags def do_close(self): - if self.flags == 'c' or self.flags == 'w': - fh = open(self.file, 'wb') - cPickle.dump(self.hash, fh) - fh.close() - + if self.flags in ['c', 'w']: + with open(self.file, 'wb') as fh: + cPickle.dump(self.hash, fh) self.hash = {} self.flags = None diff --git a/module/lib/beaker/crypto/__init__.py b/module/lib/beaker/crypto/__init__.py index 3e26b0c134..e3389d125a 100644 --- a/module/lib/beaker/crypto/__init__.py +++ b/module/lib/beaker/crypto/__init__.py @@ -6,24 +6,15 @@ keyLength = None -if util.jython: - try: +try: + if util.jython: from beaker.crypto.jcecrypto import getKeyLength, aesEncrypt - keyLength = getKeyLength() - except ImportError: - pass -else: - try: + else: from beaker.crypto.pycrypto import getKeyLength, aesEncrypt, aesDecrypt - keyLength = getKeyLength() - except ImportError: - pass - -if not keyLength: - has_aes = False -else: - has_aes = True - + keyLength = getKeyLength() +except ImportError: + pass +has_aes = bool(keyLength) if has_aes and keyLength < 32: warn('Crypto implementation only supports key lengths up to %d bits. ' 'Generated session cookies may be incompatible with other ' @@ -36,5 +27,4 @@ def generateCryptoKeys(master_key, salt, iterations): # os.urandom() returns truly random data, this will have no effect on the # overall security. keystream = PBKDF2(master_key, salt, iterations=iterations) - cipher_key = keystream.read(keyLength) - return cipher_key + return keystream.read(keyLength) diff --git a/module/lib/beaker/crypto/pbkdf2.py b/module/lib/beaker/crypto/pbkdf2.py index 96dc5fbb2c..ea725decf6 100644 --- a/module/lib/beaker/crypto/pbkdf2.py +++ b/module/lib/beaker/crypto/pbkdf2.py @@ -137,7 +137,7 @@ def __f(self, i): assert (1 <= i <= 0xffffffff) U = self.__prf(self.__passphrase, self.__salt + pack("!L", i)) result = U - for j in xrange(2, 1+self.__iterations): + for _ in xrange(2, 1+self.__iterations): U = self.__prf(self.__passphrase, U) result = strxor(result, U) return result diff --git a/module/lib/beaker/ext/database.py b/module/lib/beaker/ext/database.py index 701e6f7d21..ed23318bb2 100644 --- a/module/lib/beaker/ext/database.py +++ b/module/lib/beaker/ext/database.py @@ -125,7 +125,7 @@ def do_open(self, flags): self.loaded = True def do_close(self): - if self.flags is not None and (self.flags == 'c' or self.flags == 'w'): + if self.flags is not None and self.flags in ['c', 'w']: cache = self.cache if self._is_new: cache.insert().execute(namespace=self.namespace, data=self.hash, diff --git a/module/lib/beaker/ext/google.py b/module/lib/beaker/ext/google.py index dd8380d7fc..09f5bb75cd 100644 --- a/module/lib/beaker/ext/google.py +++ b/module/lib/beaker/ext/google.py @@ -32,8 +32,7 @@ def make_cache(): table_dict = dict(created=db.DateTimeProperty(), accessed=db.DateTimeProperty(), data=db.BlobProperty()) - table = type(table_name, (db.Model,), table_dict) - return table + return type(table_name, (db.Model,), table_dict) self.table_name = table_name self.cache = GoogleNamespaceManager.tables.setdefault(table_name, make_cache()) self.hash = {} @@ -76,7 +75,7 @@ def do_open(self, flags): self.loaded = True def do_close(self): - if self.flags is not None and (self.flags == 'c' or self.flags == 'w'): + if self.flags is not None and self.flags in ['c', 'w']: if self._is_new: item = self.cache(key_name=self.namespace) item.data = cPickle.dumps(self.hash) diff --git a/module/lib/beaker/ext/sqla.py b/module/lib/beaker/ext/sqla.py index 8c79633c15..a21ecb8882 100644 --- a/module/lib/beaker/ext/sqla.py +++ b/module/lib/beaker/ext/sqla.py @@ -87,7 +87,7 @@ def do_open(self, flags): self.loaded = True def do_close(self): - if self.flags is not None and (self.flags == 'c' or self.flags == 'w'): + if self.flags is not None and self.flags in ['c', 'w']: if self._is_new: insert = self.table.insert() self.bind.execute(insert, namespace=self.namespace, data=self.hash, diff --git a/module/lib/beaker/middleware.py b/module/lib/beaker/middleware.py index 7ba88b37d5..0781b940ed 100644 --- a/module/lib/beaker/middleware.py +++ b/module/lib/beaker/middleware.py @@ -65,10 +65,9 @@ def __init__(self, app, config=None, environ_key='beaker.cache', **kwargs): self.environ_key = environ_key def __call__(self, environ, start_response): - if environ.get('paste.registry'): - if environ['paste.registry'].reglist: - environ['paste.registry'].register(self.cache, - self.cache_manager) + if environ.get('paste.registry') and environ['paste.registry'].reglist: + environ['paste.registry'].register(self.cache, + self.cache_manager) environ[self.environ_key] = self.cache_manager return self.app(environ, start_response) diff --git a/module/lib/beaker/session.py b/module/lib/beaker/session.py index 7d465530bd..c0be3b6e99 100644 --- a/module/lib/beaker/session.py +++ b/module/lib/beaker/session.py @@ -26,16 +26,13 @@ def __init__(self, secret, input=None): def value_decode(self, val): val = val.strip('"') sig = HMAC.new(self.secret, val[40:], SHA1).hexdigest() - - # Avoid timing attacks - invalid_bits = 0 + input_sig = val[:40] if len(sig) != len(input_sig): return None, val - - for a, b in zip(sig, input_sig): - invalid_bits += a != b - + + # Avoid timing attacks + invalid_bits = sum(a != b for a, b in zip(sig, input_sig)) if invalid_bits: return None, val else: @@ -66,25 +63,22 @@ def __init__(self, request, id=None, invalidate_corrupt=False, cookie_domain=None, secret=None, secure=False, namespace_class=None, **namespace_args): if not type: - if data_dir: - self.type = 'file' - else: - self.type = 'memory' + self.type = 'file' if data_dir else 'memory' else: self.type = type self.namespace_class = namespace_class or clsmap[self.type] self.namespace_args = namespace_args - + self.request = request self.data_dir = data_dir self.key = key - + self.timeout = timeout self.use_cookies = use_cookies self.cookie_expires = cookie_expires - + # Default cookie domain/path self._domain = cookie_domain self._path = '/' @@ -93,7 +87,7 @@ def __init__(self, request, id=None, invalidate_corrupt=False, self.secure = secure self.id = id self.accessed_dict = {} - + if self.use_cookies: cookieheader = request.get('cookie', '') if secret: @@ -103,10 +97,10 @@ def __init__(self, request, id=None, invalidate_corrupt=False, self.cookie = SignedCookie(secret, input=None) else: self.cookie = Cookie.SimpleCookie(input=cookieheader) - + if not self.id and self.key in self.cookie: self.id = self.cookie[self.key].value - + self.is_new = self.id is None if self.is_new: self._create_id() @@ -441,10 +435,10 @@ def _decrypt_data(self): self.validate_key + nonce, 1) payload = b64decode(self.cookie[self.key].value[8:]) data = crypto.aesDecrypt(payload, encrypt_key) - return pickle.loads(data) else: data = b64decode(self.cookie[self.key].value) - return pickle.loads(data) + + return pickle.loads(data) def _make_id(self): return md5(md5( diff --git a/module/lib/beaker/synchronization.py b/module/lib/beaker/synchronization.py index 761303707b..628b3a3458 100644 --- a/module/lib/beaker/synchronization.py +++ b/module/lib/beaker/synchronization.py @@ -45,10 +45,7 @@ class NameLock(object): class NLContainer(object): def __init__(self, reentrant): - if reentrant: - self.lock = _threading.RLock() - else: - self.lock = _threading.Lock() + self.lock = _threading.RLock() if reentrant else _threading.Lock() def __call__(self): return self.lock @@ -108,12 +105,12 @@ def __init__(self): self.reading = False def state(self): - if not self._state.has(): - state = SynchronizerImpl.SyncState() - self._state.put(state) - return state - else: + if self._state.has(): return self._state.get() + + state = SynchronizerImpl.SyncState() + self._state.put(state) + return state state = property(state) def release_read_lock(self): @@ -209,12 +206,8 @@ class FileSynchronizer(SynchronizerImpl): def __init__(self, identifier, lock_dir): super(FileSynchronizer, self).__init__() self._filedescriptor = util.ThreadLocal() - - if lock_dir is None: - lock_dir = tempfile.gettempdir() - else: - lock_dir = lock_dir + lock_dir = tempfile.gettempdir() if lock_dir is None else lock_dir self.filename = util.encoded_path( lock_dir, [identifier], diff --git a/module/lib/beaker/util.py b/module/lib/beaker/util.py index 04c9617c5f..7af6ef5a70 100644 --- a/module/lib/beaker/util.py +++ b/module/lib/beaker/util.py @@ -146,30 +146,27 @@ def __init__(self): sha1 = None def encoded_path(root, identifiers, extension = ".enc", depth = 3, digest_filenames=True): - + """Generate a unique file-accessible path from the given list of identifiers starting at the given root directory.""" ident = "_".join(identifiers) - + global sha1 if sha1 is None: from beaker.crypto import sha1 - + if digest_filenames: if py3k: ident = sha1(ident.encode('utf-8')).hexdigest() else: ident = sha1(ident).hexdigest() - + ident = os.path.basename(ident) - tokens = [] - for d in range(1, depth): - tokens.append(ident[0:d]) - + tokens = [ident[0:d] for d in range(1, depth)] dir = os.path.join(root, *tokens) verify_directory(dir) - + return os.path.join(dir, ident + extension) diff --git a/module/lib/bottle.py b/module/lib/bottle.py index 2c243278e2..4edc138341 100644 --- a/module/lib/bottle.py +++ b/module/lib/bottle.py @@ -317,7 +317,7 @@ def parse_rule(self, rule): offset = match.end() continue if prefix: yield prefix, None, None - name, filtr, conf = g[1:4] if not g[2] is None else g[4:7] + name, filtr, conf = g[1:4] if g[2] is not None else g[4:7] if not filtr: filtr = self.default_filter yield name, filtr, conf or None offset, prefix = match.end(), '' @@ -508,7 +508,7 @@ def _make_callback(self): callback = plugin(callback) except RouteReset: # Try again with changed configuration. return self._make_callback() - if not callback is self.callback: + if callback is not self.callback: try_update_wrapper(callback, self.callback) return callback @@ -1223,7 +1223,7 @@ def copy(self): ''' Returns a copy of self. ''' copy = Response() copy.status = self.status - copy._headers = dict((k, v[:]) for (k, v) in self._headers.items()) + copy._headers = {k: v[:] for (k, v) in self._headers.items()} return copy def __iter__(self): @@ -1385,15 +1385,15 @@ def set_cookie(self, name, value, secret=None, **options): self._cookies[name] = value for key, value in options.iteritems(): - if key == 'max_age': - if isinstance(value, timedelta): - value = value.seconds + value.days * 24 * 3600 if key == 'expires': if isinstance(value, (datedate, datetime)): value = value.timetuple() elif isinstance(value, (int, float)): value = time.gmtime(value) value = time.strftime("%a, %d %b %Y %H:%M:%S GMT", value) + elif key == 'max_age': + if isinstance(value, timedelta): + value = value.seconds + value.days * 24 * 3600 self._cookies[name][key.replace('_', '-')] = value def delete_cookie(self, key, **kwargs): @@ -1463,7 +1463,7 @@ class HooksPlugin(object): _names = 'before_request', 'after_request', 'app_reset' def __init__(self): - self.hooks = dict((name, []) for name in self._names) + self.hooks = {name: [] for name in self._names} self.app = None def _empty(self): @@ -1566,7 +1566,7 @@ class MultiDict(DictMixin): """ def __init__(self, *a, **k): - self.dict = dict((k, [v]) for k, v in dict(*a, **k).iteritems()) + self.dict = {k: [v] for k, v in dict(*a, **k).iteritems()} def __len__(self): return len(self.dict) def __iter__(self): return iter(self.dict) def __contains__(self, key): return key in self.dict @@ -1814,7 +1814,7 @@ def static_file(filename, root, mimetype='auto', download=False): """ root = os.path.abspath(root) + os.sep filename = os.path.abspath(os.path.join(root, filename.strip('/\\'))) - header = dict() + header = {} if not filename.startswith(root): return HTTPError(403, "Access denied.") @@ -2184,8 +2184,7 @@ class GeventServer(ServerAdapter): """ def run(self, handler): from gevent import wsgi as wsgi_fast, pywsgi, monkey, local - if self.options.get('monkey', True): - if not threading.local is local.local: monkey.patch_all() + if self.options.get('monkey', True) and threading.local is not local.local: monkey.patch_all() wsgi = wsgi_fast if self.options.get('fast') else pywsgi wsgi.WSGIServer((self.host, self.port), handler).serve_forever() @@ -2404,7 +2403,7 @@ def __init__(self, lockfile, interval): def run(self): exists = os.path.exists mtime = lambda path: os.stat(path).st_mtime - files = dict() + files = {} for module in sys.modules.values(): path = getattr(module, '__file__', '') @@ -2493,12 +2492,12 @@ def search(cls, name, lookup=[]): @classmethod def global_config(cls, key, *args): ''' This reads or sets the global settings stored in class.settings. ''' - if args: - cls.settings = cls.settings.copy() # Make settings local to class - cls.settings[key] = args[0] - else: + if not args: return cls.settings[key] + cls.settings = cls.settings.copy() # Make settings local to class + cls.settings[key] = args[0] + def prepare(self, **options): """ Run preparations (parsing, caching, ...). It should be possible to call this again to refresh a template or to @@ -2550,9 +2549,8 @@ def render(self, *args, **kwargs): for dictarg in args: kwargs.update(dictarg) self.context.vars.update(self.defaults) self.context.vars.update(kwargs) - out = str(self.tpl) self.context.vars.clear() - return out + return str(self.tpl) class Jinja2Template(BaseTemplate): diff --git a/module/lib/feedparser.py b/module/lib/feedparser.py index a746ed8f52..3cece37dcd 100644 --- a/module/lib/feedparser.py +++ b/module/lib/feedparser.py @@ -274,10 +274,7 @@ def start(self, n): # Python 2.1 does not have dict from UserDict import UserDict def dict(aList): - rc = {} - for k, v in aList: - rc[k] = v - return rc + return {k: v for k, v in aList} class FeedParserDict(UserDict): keymap = {'channel': 'feed', @@ -684,10 +681,7 @@ def handle_charref(self, ref): if ref in ('34', '38', '39', '60', '62', 'x22', 'x26', 'x27', 'x3c', 'x3e'): text = '&#%s;' % ref else: - if ref[0] == 'x': - c = int(ref[1:], 16) - else: - c = int(ref) + c = int(ref[1:], 16) if ref[0] == 'x' else int(ref) text = unichr(c).encode('utf-8') self.elementstack[-1][2].append(text) @@ -788,7 +782,7 @@ def push(self, element, expectingText): def pop(self, element, stripWhitespace=1): if not self.elementstack: return if self.elementstack[-1][0] != element: return - + element, expectingText, pieces = self.elementstack.pop() if self.version == 'atom10' and self.contentparams.get('type','text') == 'application/xhtml+xml': @@ -833,11 +827,11 @@ def pop(self, element, stripWhitespace=1): # In Python 3, base64 takes and outputs bytes, not str # This may not be the most correct way to accomplish this output = _base64decode(output.encode('utf-8')).decode('utf-8') - + # resolve relative URIs if (element in self.can_be_relative_uri) and output: output = self.resolveURI(output) - + # decode entities within embedded markup if not self.contentparams.get('base64', 0): output = self.decodeEntities(element, output) @@ -857,10 +851,13 @@ def pop(self, element, stripWhitespace=1): is_htmlish = self.mapContentType(self.contentparams.get('type', 'text/html')) in self.html_types # resolve relative URIs within embedded markup - if is_htmlish and RESOLVE_RELATIVE_URIS: - if element in self.can_contain_relative_uris: - output = _resolveRelativeURIs(output, self.baseuri, self.encoding, self.contentparams.get('type', 'text/html')) - + if ( + is_htmlish + and RESOLVE_RELATIVE_URIS + and element in self.can_contain_relative_uris + ): + output = _resolveRelativeURIs(output, self.baseuri, self.encoding, self.contentparams.get('type', 'text/html')) + # parse microformats # (must do this before sanitizing because some microformats # rely on elements that we sanitize) @@ -876,11 +873,14 @@ def pop(self, element, stripWhitespace=1): vcard = mfresults.get('vcard') if vcard: self._getContext()['vcard'] = vcard - + # sanitize embedded markup - if is_htmlish and SANITIZE_HTML: - if element in self.can_contain_dangerous_markup: - output = _sanitizeHTML(output, self.encoding, self.contentparams.get('type', 'text/html')) + if ( + is_htmlish + and SANITIZE_HTML + and element in self.can_contain_dangerous_markup + ): + output = _sanitizeHTML(output, self.encoding, self.contentparams.get('type', 'text/html')) if self.encoding and type(output) != type(u''): try: @@ -906,7 +906,7 @@ def pop(self, element, stripWhitespace=1): if element == 'title' and self.hasTitle: return output - + # store output in appropriate place(s) if self.inentry and not self.insource: if element == 'content': @@ -1030,15 +1030,15 @@ def _save(self, key, value, overwrite=False): context.setdefault(key, value) def _start_rss(self, attrsD): - versionmap = {'0.91': 'rss091u', - '0.92': 'rss092', - '0.93': 'rss093', - '0.94': 'rss094'} #If we're here then this is an RSS feed. #If we don't have a version or have a version that starts with something #other than RSS then there's been a mistake. Correct it. if not self.version or not self.version.startswith('rss'): attr_version = attrsD.get('version', '') + versionmap = {'0.91': 'rss091u', + '0.92': 'rss092', + '0.93': 'rss093', + '0.94': 'rss094'} version = versionmap.get(attr_version) if version: self.version = version @@ -1067,16 +1067,13 @@ def _cdf_common(self, attrsD): def _start_feed(self, attrsD): self.infeed = 1 - versionmap = {'0.1': 'atom01', - '0.2': 'atom02', - '0.3': 'atom03'} if not self.version: attr_version = attrsD.get('version') + versionmap = {'0.1': 'atom01', + '0.2': 'atom02', + '0.3': 'atom03'} version = versionmap.get(attr_version) - if version: - self.version = version - else: - self.version = 'atom' + self.version = version or 'atom' def _end_channel(self): self.infeed = 0 @@ -1232,16 +1229,15 @@ def _end_email(self): def _getContext(self): if self.insource: - context = self.sourcedata + return self.sourcedata elif self.inimage and self.feeddata.has_key('image'): - context = self.feeddata['image'] + return self.feeddata['image'] elif self.intextinput: - context = self.feeddata['textinput'] + return self.feeddata['textinput'] elif self.inentry: - context = self.entries[-1] + return self.entries[-1] else: - context = self.feeddata - return context + return self.feeddata def _save_author(self, key, value, prefix='author'): context = self._getContext() @@ -1644,8 +1640,8 @@ def _start_content_encoded(self, attrsD): def _end_content(self): copyToSummary = self.mapContentType(self.contentparams.get('type')) in (['text/plain'] + self.html_types) - value = self.popContent('content') if copyToSummary: + value = self.popContent('content') self._save('summary', value) _end_body = _end_content @@ -1685,9 +1681,12 @@ def _start_media_thumbnail(self, attrsD): def _end_media_thumbnail(self): url = self.pop('url') context = self._getContext() - if url is not None and len(url.strip()) != 0: - if not context['media_thumbnail'][-1].has_key('url'): - context['media_thumbnail'][-1]['url'] = url + if ( + url is not None + and len(url.strip()) != 0 + and not context['media_thumbnail'][-1].has_key('url') + ): + context['media_thumbnail'][-1]['url'] = url def _start_media_player(self, attrsD): self.push('media_player', 0) @@ -1829,9 +1828,12 @@ def _shorttag_replace(self, match): def parse_starttag(self,i): j=sgmllib.SGMLParser.parse_starttag(self, i) - if self._type == 'application/xhtml+xml': - if j>2 and self.rawdata[j-2:j]=='/>': - self.unknown_endtag(self.lasttag) + if ( + self._type == 'application/xhtml+xml' + and j > 2 + and self.rawdata[j - 2 : j] == '/>' + ): + self.unknown_endtag(self.lasttag) return j def feed(self, data): @@ -1901,11 +1903,7 @@ def unknown_endtag(self, tag): def handle_charref(self, ref): # called for each character reference, e.g. for ' ', ref will be '160' # Reconstruct the original character reference. - if ref.startswith('x'): - value = unichr(int(ref[1:],16)) - else: - value = unichr(int(ref)) - + value = unichr(int(ref[1:],16)) if ref.startswith('x') else unichr(int(ref)) if value in _cp1252.keys(): self.pieces.append('&#%s;' % hex(ord(_cp1252[value]))[1:]) else: @@ -2082,10 +2080,7 @@ def getPropertyValue(self, elmRoot, sProperty, iPropertyType=4, bAllowMultiple=0 for node in snapFilter: if node.findParent(all, propertyMatch): arFilter.append(node) - arResults = [] - for node in snapResults: - if node not in arFilter: - arResults.append(node) + arResults = [node for node in snapResults if node not in arFilter] bFound = (len(arResults) != 0) if not bFound: if bAllowMultiple: return [] @@ -2098,11 +2093,10 @@ def getPropertyValue(self, elmRoot, sProperty, iPropertyType=4, bAllowMultiple=0 for elmResult in arResults: sValue = None if iPropertyType == self.NODE: - if bAllowMultiple: - arValues.append(elmResult) - continue - else: + if not bAllowMultiple: return elmResult + arValues.append(elmResult) + continue sNodeName = elmResult.name.lower() if (iPropertyType == self.EMAIL) and (sNodeName == 'a'): sValue = (elmResult.get('href') or '').split('mailto:').pop().split('?')[0] @@ -2398,10 +2392,7 @@ def findXFN(self): all = lambda x: 1 for elm in self.document(all, {'rel': re.compile('.+'), 'href': re.compile('.+')}): rels = elm.get('rel', '').split() - xfn_rels = [] - for rel in rels: - if rel in self.known_xfn_relationships: - xfn_rels.append(rel) + xfn_rels = [rel for rel in rels if rel in self.known_xfn_relationships] if xfn_rels: self.xfn.append({"relationships": xfn_rels, "href": elm.get('href', ''), "name": elm.string}) @@ -2673,11 +2664,11 @@ def unknown_starttag(self, tag, attrs): _BaseHTMLProcessor.unknown_starttag(self, tag, clean_attrs) def unknown_endtag(self, tag): - if not tag in self.acceptable_elements: + if tag not in self.acceptable_elements: if tag in self.unacceptable_elements_with_end_tag: self.unacceptablestack -= 1 if self.mathmlOK and tag in self.mathml_elements: - if tag == 'math' and self.mathmlOK: self.mathmlOK -= 1 + if tag == 'math': self.mathmlOK -= 1 elif self.svgOK and tag in self.svg_elements: tag = self.svg_elem_map.get(tag,tag) if tag == 'svg' and self.svgOK: self.svgOK -= 1 @@ -2706,18 +2697,20 @@ def sanitize_style(self, style): clean = [] for prop,value in re.findall("([-\w]+)\s*:\s*([^:;]*)",style): - if not value: continue - if prop.lower() in self.acceptable_css_properties: - clean.append(prop + ': ' + value + ';') - elif prop.split('-')[0].lower() in ['background','border','margin','padding']: - for keyword in value.split(): - if not keyword in self.acceptable_css_keywords and \ - not self.valid_css_values.match(keyword): - break - else: - clean.append(prop + ': ' + value + ';') - elif self.svgOK and prop.lower() in self.acceptable_svg_properties: - clean.append(prop + ': ' + value + ';') + if not value: continue + if prop.lower() in self.acceptable_css_properties: + clean.append(prop + ': ' + value + ';') + elif prop.split('-')[0].lower() in ['background','border','margin','padding']: + for keyword in value.split(): + if ( + keyword not in self.acceptable_css_keywords + and not self.valid_css_values.match(keyword) + ): + break + else: + clean.append(prop + ': ' + value + ';') + elif self.svgOK and prop.lower() in self.acceptable_svg_properties: + clean.append(prop + ': ' + value + ';') return ' '.join(clean) @@ -2764,7 +2757,7 @@ def _tidy(data, **kwargs): class _FeedURLHandler(urllib2.HTTPDigestAuthHandler, urllib2.HTTPRedirectHandler, urllib2.HTTPDefaultErrorHandler): def http_error_default(self, req, fp, code, msg, headers): - if ((code / 100) == 3) and (code != 304): + if code == 300 and code != 304: return self.http_error_302(req, fp, code, msg, headers) infourl = urllib.addinfourl(fp, headers, req.get_full_url()) infourl.status = code @@ -3251,10 +3244,7 @@ def __extract_date(m): else: month = int(month) day = m.group('day') - if day: - day = int(day) - else: - day = 1 + day = int(day) if day else 1 return year, month, day def __extract_time(m): @@ -3266,10 +3256,7 @@ def __extract_time(m): hours = int(hours) minutes = int(m.group('minutes')) seconds = m.group('seconds') - if seconds: - seconds = int(seconds) - else: - seconds = 0 + seconds = int(seconds) if seconds else 0 return hours, minutes, seconds def __extract_tzd(m): @@ -3283,10 +3270,7 @@ def __extract_tzd(m): return 0 hours = int(m.group('tzdhours')) minutes = m.group('tzdminutes') - if minutes: - minutes = int(minutes) - else: - minutes = 0 + minutes = int(minutes) if minutes else 0 offset = (hours*60 + minutes) * 60 if tzd[0] == '+': return -offset @@ -3571,18 +3555,14 @@ def _stripDoctype(data): start = re.search(_s2bytes('<\w'), data) start = start and start.start() or -1 head,data = data[:start+1], data[start+1:] - + entity_pattern = re.compile(_s2bytes(r'^\s*]*?)>'), re.MULTILINE) entity_results=entity_pattern.findall(head) head = entity_pattern.sub(_s2bytes(''), head) doctype_pattern = re.compile(_s2bytes(r'^\s*]*?)>'), re.MULTILINE) doctype_results = doctype_pattern.findall(head) doctype = doctype_results and doctype_results[0] or _s2bytes('') - if doctype.lower().count(_s2bytes('netscape')): - version = 'rss091n' - else: - version = None - + version = 'rss091n' if doctype.lower().count(_s2bytes('netscape')) else None # only allow in 'safe' inline entity definitions replacement=_s2bytes('') if len(doctype_results)==1 and entity_results: @@ -3821,10 +3801,8 @@ def _writer(self, stream, node, prefix): if node.has_key(k + '_parsed'): continue self._writer(stream, node[k], prefix + k + '.') elif type(node) == types.ListType: - index = 0 - for n in node: + for index, n in enumerate(node): self._writer(stream, n, prefix[:-1] + '[' + str(index) + '].') - index += 1 else: try: s = str(node).encode('utf-8') diff --git a/module/lib/jinja2/_markupsafe/tests.py b/module/lib/jinja2/_markupsafe/tests.py index c1ce3943a6..076b66d629 100644 --- a/module/lib/jinja2/_markupsafe/tests.py +++ b/module/lib/jinja2/_markupsafe/tests.py @@ -55,8 +55,8 @@ class MarkupLeakTestCase(unittest.TestCase): def test_markup_leaks(self): counts = set() - for count in xrange(20): - for item in xrange(1000): + for _ in xrange(20): + for _ in xrange(1000): escape("foo") escape("") escape(u"foo") diff --git a/module/lib/jinja2/_stringdefs.py b/module/lib/jinja2/_stringdefs.py index 1161b7f4a4..a1be4b09ed 100644 --- a/module/lib/jinja2/_stringdefs.py +++ b/module/lib/jinja2/_stringdefs.py @@ -110,21 +110,20 @@ def allexcept(*args): cat = unicodedata.category(c) categories.setdefault(cat, []).append(c) - f = open(__file__, 'w') - f.write(header) - - for cat in sorted(categories): - val = u''.join(categories[cat]) - if cat == 'Cs': - # Jython can't handle isolated surrogates - f.write("""\ + with open(__file__, 'w') as f: + f.write(header) + + for cat in sorted(categories): + val = u''.join(categories[cat]) + if cat == 'Cs': + # Jython can't handle isolated surrogates + f.write("""\ try: Cs = eval(r"%r") except UnicodeDecodeError: Cs = '' # Jython can't handle isolated surrogates\n\n""" % val) - else: - f.write('%s = %r\n\n' % (cat, val)) - f.write('cats = %r\n\n' % sorted(categories.keys())) + else: + f.write('%s = %r\n\n' % (cat, val)) + f.write('cats = %r\n\n' % sorted(categories.keys())) - f.write(footer) - f.close() + f.write(footer) diff --git a/module/lib/jinja2/bccache.py b/module/lib/jinja2/bccache.py index 1e2236c3a5..8d8ffd3ff3 100644 --- a/module/lib/jinja2/bccache.py +++ b/module/lib/jinja2/bccache.py @@ -64,10 +64,7 @@ def load_bytecode(self, f): return # now load the code. Because marshal is not able to load # from arbitrary streams we have to work around that - if isinstance(f, file): - self.code = marshal.load(f) - else: - self.code = marshal.loads(f.read()) + self.code = marshal.load(f) if isinstance(f, file) else marshal.loads(f.read()) def write_bytecode(self, f): """Dump the bytecode into the file or file like object passed.""" diff --git a/module/lib/jinja2/compiler.py b/module/lib/jinja2/compiler.py index 57641596a3..a6e2d2d43c 100644 --- a/module/lib/jinja2/compiler.py +++ b/module/lib/jinja2/compiler.py @@ -49,7 +49,7 @@ def unoptimize_before_dead_code(): x = 42 def f(): - if 0: dummy(x) + pass return f unoptimize_before_dead_code = bool(unoptimize_before_dead_code().func_closure) @@ -73,10 +73,7 @@ def has_safe_repr(value): xrange, Markup)): return True if isinstance(value, (tuple, list, set, frozenset)): - for item in value: - if not has_safe_repr(item): - return False - return True + return all(has_safe_repr(item) for item in value) elif isinstance(value, dict): for key, value in value.iteritems(): if not has_safe_repr(key): @@ -207,9 +204,9 @@ def find_shadowed(self, extra=()): that may be defined with `add_special` which may occour scoped. """ i = self.identifiers - return (i.declared | i.outer_undeclared) & \ - (i.declared_locally | i.declared_parameter) | \ - set(x for x in extra if i.is_declared(x)) + return (i.declared | i.outer_undeclared) & ( + i.declared_locally | i.declared_parameter + ) | {x for x in extra if i.is_declared(x)} def inner(self): """Return an inner frame.""" @@ -618,10 +615,12 @@ def push_scope(self, frame, extra_vars=()): for name in frame.find_shadowed(extra_vars): aliases[name] = ident = self.temporary_identifier() self.writeline('%s = l_%s' % (ident, name)) - to_declare = set() - for name in frame.identifiers.declared_locally: - if name not in aliases: - to_declare.add('l_' + name) + to_declare = { + 'l_' + name + for name in frame.identifiers.declared_locally + if name not in aliases + } + if to_declare: self.writeline(' = '.join(to_declare) + ' = missing') return aliases @@ -630,10 +629,12 @@ def pop_scope(self, aliases, frame): """Restore all aliases and delete unused variables.""" for name, alias in aliases.iteritems(): self.writeline('l_%s = %s' % (name, alias)) - to_delete = set() - for name in frame.identifiers.declared_locally: - if name not in aliases: - to_delete.add('l_' + name) + to_delete = { + 'l_' + name + for name in frame.identifiers.declared_locally + if name not in aliases + } + if to_delete: # we cannot use the del statement here because enclosed # scopes can trigger a SyntaxError: @@ -1244,10 +1245,7 @@ def visit_Output(self, node, frame): # at that point. try: if frame.eval_ctx.autoescape: - if hasattr(const, '__html__'): - const = const.__html__() - else: - const = escape(const) + const = const.__html__() if hasattr(const, '__html__') else escape(const) const = finalize(const) except: # if something goes wrong here we evaluate the node diff --git a/module/lib/jinja2/debug.py b/module/lib/jinja2/debug.py index eb15456d13..bb4273b0fb 100644 --- a/module/lib/jinja2/debug.py +++ b/module/lib/jinja2/debug.py @@ -280,8 +280,11 @@ class _Traceback(_PyObject): def tb_set_next(tb, next): """Set the tb_next attribute of a traceback object.""" - if not (isinstance(tb, TracebackType) and - (next is None or isinstance(next, TracebackType))): + if ( + not isinstance(tb, TracebackType) + or next is not None + and not isinstance(next, TracebackType) + ): raise TypeError('tb_set_next arguments must be traceback objects') obj = _Traceback.from_address(id(tb)) if tb.tb_next is not None: diff --git a/module/lib/jinja2/environment.py b/module/lib/jinja2/environment.py index ac74a5c68f..e3b3645e32 100644 --- a/module/lib/jinja2/environment.py +++ b/module/lib/jinja2/environment.py @@ -482,10 +482,7 @@ def compile(self, source, name=None, filename=None, raw=False, defer_init=defer_init) if raw: return source - if filename is None: - filename = '