[Buildbot-commits] buildbot/buildbot/status builder.py,1.52,1.53
Brian Warner
warner at users.sourceforge.net
Fri May 6 06:40:08 UTC 2005
Update of /cvsroot/buildbot/buildbot/buildbot/status
In directory sc8-pr-cvs1.sourceforge.net:/tmp/cvs-serv11379/buildbot/status
Modified Files:
builder.py
Log Message:
Revision: arch at buildbot.sf.net--2004/buildbot--dev--0--patch-143
Creator: Brian Warner <warner at monolith.lothar.com>
Merged from warner at monolith.lothar.com--2005 (patch 7-8)
Patches applied:
* warner at monolith.lothar.com--2005/buildbot--dev--0--patch-7
Merged from arch at buildbot.sf.net--2004 (patch 137-142)
* warner at monolith.lothar.com--2005/buildbot--dev--0--patch-8
create OfflineLogFile to handle very large logs without lots of RAM
Index: builder.py
===================================================================
RCS file: /cvsroot/buildbot/buildbot/buildbot/status/builder.py,v
retrieving revision 1.52
retrieving revision 1.53
diff -u -d -r1.52 -r1.53
--- builder.py 26 Apr 2005 07:35:07 -0000 1.52
+++ builder.py 6 May 2005 06:40:05 -0000 1.53
@@ -4,8 +4,9 @@
from twisted.python import log, components
from twisted.internet import reactor, defer
+from twisted.protocols import basic
-import time, os.path, shutil, sys
+import time, os.path, shutil, sys, re
try:
import cPickle as pickle
except ImportError:
@@ -200,6 +201,111 @@
# self.step must be filled in by our parent
self.finished = True
+class OfflineLogFileScanner(basic.NetstringReceiver):
+ def __init__(self, line_cb, channels=[]):
+ self.line_cb = line_cb
+ self.channels = channels
+ def stringReceived(self, line):
+ channel = int(line[0])
+ if not self.channels or (channel in self.channels):
+ self.line_cb(channel, line[1:])
+
+class OfflineLogFile(LogFile):
+ """An OfflineLogFile keeps all of its contents on disk, in a non-pickle
+ format that can be appended easily.
+
+ This behaves like LogFile for each individual entry. The difference is
+ that, when a run of entries are merged together, the merged chunk is
+ written out to disk instead of being stored in self.entries .
+
+ """
+ openfile = None
+
+ def __init__(self, parent, logfilename):
+ self.step = parent
+ self.watchers = []
+ self.finishedWatchers = []
+ self.runEntries = []
+ self.filename = logfilename
+
+ def openForWriting(self):
+ assert not self.finished, "you can't add to a finished log"
+ if not self.openfile:
+ f = open(self.step.build.getLogfileNamed(self.filename), "a")
+ self.openfile = f
+ return self.openfile
+
+ def scanEntries(self, cb, channels=[]):
+ if self.openfile:
+ # we must close it so we can read it properly. It will be opened
+ # again the next time we try to write to it.
+ self.openfile.close()
+ self.openfile = None
+ f = open(self.step.build.getLogfileNamed(self.filename), "r")
+ p = OfflineLogFileScanner(cb, channels)
+ data = f.read(2000)
+ while data:
+ p.dataReceived(data)
+ data = f.read(2000)
+ f.close()
+ for channel, text in self.runEntries:
+ if not channels or (channel in channels):
+ cb(channel, text)
+
+ def getText(self):
+ # this produces one ginormous string
+ chunks = []
+ self.scanEntries(lambda channel,chunk: chunks.append(chunk),
+ (STDOUT, STDERR))
+ return "".join(chunks)
+
+ def getTextWithHeaders(self):
+ chunks = []
+ self.scanEntries(lambda channel,chunk: chunks.append(chunk))
+ return "".join(chunks)
+
+ def subscribe(self, receiver, catchup):
+ if self.finished:
+ return
+ self.watchers.append(receiver)
+ if catchup:
+ self.scanEntries(lambda channel,chunk:
+ receiver.logChunk(self.step.build,
+ self.step, self,
+ channel, chunk))
+
+ def merge(self):
+ # merge all .runEntries (which are all of the same type) into a
+ # single chunk for .entries
+ if not self.runEntries:
+ return
+ channel = self.runEntries[0][0]
+ text = "".join([c[1] for c in self.runEntries])
+ assert channel < 10
+ f = self.openForWriting()
+ f.write("%d:%d" % (1 + len(text), channel))
+ f.write(text)
+ f.write(",")
+ self.runEntries = []
+ self.runLength = 0
+
+ def finish(self):
+ self.merge()
+ if self.openfile:
+ self.openfile.close()
+ self.openfile = None
+ LogFile.finish(self)
+
+ def __getstate__(self):
+ d = LogFile.__getstate__(self)
+ if d.has_key('openfile'):
+ del d['openfile']
+ return d
+
+ def stubify(self):
+ return self
+
+
class HTMLLogFile:
__implements__ = interfaces.IStatusLog,
@@ -302,7 +408,7 @@
@cvar text: list of short texts that describe the command and its status
@type text2: list of strings
@cvar text2: list of short texts added to the overall build description
- @type logs: dict of string -> L{buildbot.status.builder.LogFile}
+ @type logs: dict of string -> L{buildbot.status.builder.OfflineLogFile}
@ivar logs: logs of steps
"""
# note that these are created when the Build is set up, before each
@@ -321,6 +427,7 @@
finishedWatchers = []
def __init__(self, parent):
+ assert interfaces.IBuildStatus(parent)
self.build = parent
self.logs = []
self.watchers = []
@@ -449,7 +556,8 @@
def addLog(self, name):
assert self.started # addLog before stepStarted won't notify watchers
- log = LogFile(self)
+ logfilename = self.build.getLogfileName(self.name, name)
+ log = OfflineLogFile(self, logfilename)
log.name = name
self.logs.append(log)
for w in self.watchers:
@@ -558,6 +666,7 @@
@type parent: L{BuilderStatus}
@type number: int
"""
+ assert interfaces.IBuilderStatus(parent)
self.builder = parent
self.number = number
self.watchers = []
@@ -783,6 +892,28 @@
# persistence stuff
+ def getLogfileName(self, stepname, logname):
+ """Return an relative path where this logfile's contents can be
+ stored."""
+ starting_filename = stepname + logname
+ starting_filename = re.sub(r'[^\w\.]', '_', starting_filename)
+ # now make it unique
+ unique_counter = 0
+ filename = starting_filename
+ while filename in [l.filename
+ for step in self.steps
+ for l in step.getLogs()
+ if l.filename]:
+ filename = "%s_%d" % (starting_filename, unique_counter)
+ unique_counter += 1
+ return filename
+
+ def getLogfileNamed(self, logname):
+ """Return the absolute path of the logfile with the given name"""
+ filename = os.path.join(self.builder.basedir,
+ "%d-%s" % (self.number, logname))
+ return filename
+
def stubify(self):
self.steps = [step.stubify() for step in self.steps]
return self
More information about the Commits
mailing list