bitbake: lib: Clean up various file access syntax

Python 3 is stricter about how files are accessed. Specficially:

 * Use open(), not file()
 * Use binary mode for binary files (when checksumming)
 * Use with statements to ensure files get closed
 * Add missing file close statements

(Bitbake rev: 9f08b901375ba640f47596f1bcf43f98a931550f)

Signed-off-by: Richard Purdie <richard.purdie@linuxfoundation.org>
This commit is contained in:
Richard Purdie 2013-05-09 21:06:45 +00:00
parent 7f2bf08280
commit 4a081b5a52
6 changed files with 39 additions and 22 deletions

View File

@ -275,7 +275,8 @@ def exec_func_shell(func, d, runfile, cwd=None):
bb.debug(2, "Executing shell function %s" % func)
try:
bb.process.run(cmd, shell=False, stdin=NULL, log=logfile)
with open(os.devnull, 'r+') as stdin:
bb.process.run(cmd, shell=False, stdin=stdin, log=logfile)
except bb.process.CmdError:
logfn = d.getVar('BB_LOGFILE', True)
raise FuncFailed(func, logfn)
@ -319,12 +320,11 @@ def _exec_task(fn, task, d, quieterr):
# Document the order of the tasks...
logorder = os.path.join(tempdir, 'log.task_order')
try:
logorderfile = file(logorder, 'a')
with open(logorder, 'a') as logorderfile:
logorderfile.write('{0} ({1}): {2}\n'.format(task, os.getpid(), logbase))
except OSError:
logger.exception("Opening log file '%s'", logorder)
pass
logorderfile.write('{0} ({1}): {2}\n'.format(task, os.getpid(), logbase))
logorderfile.close()
# Setup the courtesy link to the logfn
loglink = os.path.join(tempdir, 'log.{0}'.format(task))
@ -348,10 +348,10 @@ def _exec_task(fn, task, d, quieterr):
self.triggered = True
# Handle logfiles
si = file('/dev/null', 'r')
si = open('/dev/null', 'r')
try:
bb.utils.mkdirhier(os.path.dirname(logfn))
logfile = file(logfn, 'w')
logfile = open(logfn, 'w')
except OSError:
logger.exception("Opening log file '%s'", logfn)
pass
@ -533,8 +533,7 @@ def make_stamp(task, d, file_name = None):
# change on broken NFS filesystems
if stamp:
bb.utils.remove(stamp)
f = open(stamp, "w")
f.close()
open(stamp, "w").close()
# If we're in task context, write out a signature file for each task
# as it completes

View File

@ -738,8 +738,9 @@ class MultiProcessCache(object):
logger.debug(1, "Using cache in '%s'", self.cachefile)
try:
p = pickle.Unpickler(file(self.cachefile, "rb"))
data, version = p.load()
with open(self.cachefile, "rb") as f:
p = pickle.Unpickler(f)
data, version = p.load()
except:
return
@ -779,8 +780,9 @@ class MultiProcessCache(object):
i = i + 1
continue
p = pickle.Pickler(file(self.cachefile + "-" + str(i), "wb"), -1)
p.dump([self.cachedata_extras, self.__class__.CACHE_VERSION])
with open(self.cachefile + "-" + str(i), "wb") as f:
p = pickle.Pickler(f, -1)
p.dump([self.cachedata_extras, self.__class__.CACHE_VERSION])
bb.utils.unlockfile(lf)
bb.utils.unlockfile(glf)
@ -798,8 +800,9 @@ class MultiProcessCache(object):
glf = bb.utils.lockfile(self.cachefile + ".lock")
try:
p = pickle.Unpickler(file(self.cachefile, "rb"))
data, version = p.load()
with open(self.cachefile, "rb") as f:
p = pickle.Unpickler(f)
data, version = p.load()
except (IOError, EOFError):
data, version = None, None
@ -809,8 +812,9 @@ class MultiProcessCache(object):
for f in [y for y in os.listdir(os.path.dirname(self.cachefile)) if y.startswith(os.path.basename(self.cachefile) + '-')]:
f = os.path.join(os.path.dirname(self.cachefile), f)
try:
p = pickle.Unpickler(file(f, "rb"))
extradata, version = p.load()
with open(f, "rb") as fd:
p = pickle.Unpickler(fd)
extradata, version = p.load()
except (IOError, EOFError):
extradata, version = self.create_cachedata(), None
@ -822,8 +826,9 @@ class MultiProcessCache(object):
self.compress_keys(data)
p = pickle.Pickler(file(self.cachefile, "wb"), -1)
p.dump([data, self.__class__.CACHE_VERSION])
with open(self.cachefile, "wb") as f:
p = pickle.Pickler(f, -1)
p.dump([data, self.__class__.CACHE_VERSION])
bb.utils.unlockfile(glf)

View File

@ -104,6 +104,7 @@ def get_statements(filename, absolute_filename, base_name):
if not s: break
s = s.rstrip()
feeder(lineno, s, filename, base_name, statements)
file.close()
if __inpython__:
# add a blank line to close out any python definition
feeder(IN_PYTHON_EOF, "", filename, base_name, statements)

View File

@ -145,6 +145,8 @@ def handle(fn, data, include):
if oldfile:
data.setVar('FILE', oldfile)
f.close()
for f in confFilters:
f(fn, data)

View File

@ -102,6 +102,10 @@ def _logged_communicate(pipe, log, input):
log.write(data)
finally:
log.flush()
if pipe.stdout is not None:
pipe.stdout.close()
if pipe.stderr is not None:
pipe.stderr.close()
return ''.join(outdata), ''.join(errdata)
def run(cmd, input=None, log=None, **options):

View File

@ -414,6 +414,10 @@ def lockfile(name, shared=False, retry=True):
return lf
lf.close()
except Exception:
try:
lf.close()
except Exception:
pass
pass
if not retry:
return None
@ -443,8 +447,9 @@ def md5_file(filename):
import md5
m = md5.new()
for line in open(filename):
m.update(line)
with open(filename, "rb") as f:
for line in f:
m.update(line)
return m.hexdigest()
def sha256_file(filename):
@ -460,8 +465,9 @@ def sha256_file(filename):
return None
s = hashlib.sha256()
for line in open(filename):
s.update(line)
with open(filename, "rb") as f:
for line in f:
s.update(line)
return s.hexdigest()
def preserved_envvars_exported():