Merge lp:~stefanor/ibid/memory-logging into lp:~ibid-core/ibid/old-trunk-pack-0.92
- memory-logging
- Merge into old-trunk-pack-0.92
Proposed by
Stefano Rivera
Status: | Merged |
---|---|
Approved by: | Stefano Rivera |
Approved revision: | 681 |
Merged at revision: | 678 |
Proposed branch: | lp:~stefanor/ibid/memory-logging |
Merge into: | lp:~ibid-core/ibid/old-trunk-pack-0.92 |
Diff against target: | None lines |
To merge this branch: | bzr merge lp:~stefanor/ibid/memory-logging |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Jonathan Hitchcock | Approve | ||
Michael Gorven | Approve | ||
Review via email: mp+7974@code.launchpad.net |
Commit message
Description of the change
To post a comment you must log in.
Revision history for this message
Stefano Rivera (stefanor) wrote : | # |
lp:~stefanor/ibid/memory-logging
updated
- 680. By Stefano Rivera
-
Don't overlap dates in memgraph
- 681. By Stefano Rivera
-
Disable logging by default
Revision history for this message
Michael Gorven (mgorven) wrote : | # |
I would have tried to use Python logging's rotating logfiles instead of doing
it manually, but since you've already implemented this it's fine. I think
that the logging processor should have autoload=False though, so that it has
to be explicitly enabled.
review approve
review:
Approve
Revision history for this message
Jonathan Hitchcock (vhata) wrote : | # |
Not sure if we do want it in the main line of code. Let's put it in now, and factor it out into devtools-plugins when we finally get round to refactoring.
review:
Approve
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === added file 'ibid/plugins/memory.py' |
2 | --- ibid/plugins/memory.py 1970-01-01 00:00:00 +0000 |
3 | +++ ibid/plugins/memory.py 2009-06-27 22:50:48 +0000 |
4 | @@ -0,0 +1,104 @@ |
5 | +import csv |
6 | +from datetime import datetime, timedelta |
7 | +import gc |
8 | +import gzip |
9 | +import os |
10 | +import os.path |
11 | + |
12 | +import simplejson |
13 | +import objgraph |
14 | + |
15 | +import ibid |
16 | +from ibid.plugins import Processor, match |
17 | +from ibid.config import Option, IntOption |
18 | + |
19 | +help = {} |
20 | + |
21 | +help['memory'] = u'Debugging module that keeps track of memory usage' |
22 | + |
23 | +def get_memusage(): |
24 | + status = file('/proc/%i/status' % os.getpid(), 'r').readlines() |
25 | + status = [x.strip().split(':', 1) for x in status if x.startswith('Vm')] |
26 | + return dict((x, int(y.split()[0])) for (x, y) in status) |
27 | + |
28 | +class MemoryLog(Processor): |
29 | + |
30 | + feature = 'memory' |
31 | + |
32 | + mem_filename = Option('mem_filename', 'Memory log filename', 'logs/memory.log') |
33 | + mem_interval = IntOption('mem_interval', 'Interval between memory stat logging', 1) |
34 | + obj_filename = Option('obj_filename', 'Object Statistics log filename', 'logs/objstats.log') |
35 | + obj_interval = IntOption('obj_interval', 'Interval between logging object statistics', 1) |
36 | + |
37 | + def setup(self): |
38 | + fns = [] |
39 | + if self.mem_interval: |
40 | + fns.append(self.mem_filename) |
41 | + if self.obj_interval: |
42 | + fns.append(self.obj_filename) |
43 | + for filename in fns: |
44 | + if os.path.isfile(filename + '.10.gz'): |
45 | + os.remove(filename + '.10.gz') |
46 | + for i in range(9, 0, -1): |
47 | + if os.path.isfile('%s.%i.gz' % (filename, i)): |
48 | + os.rename('%s.%i.gz' % (filename, i), |
49 | + '%s.%i.gz' % (filename, i+1)) |
50 | + if os.path.isfile(filename): |
51 | + o = gzip.open(filename + '.1.gz', 'wb') |
52 | + i = open(filename, 'rb') |
53 | + o.write(i.read()) |
54 | + o.close() |
55 | + i.close() |
56 | + stat = os.stat(filename) |
57 | + os.utime(filename + '.1.gz', (stat.st_atime, stat.st_mtime)) |
58 | + |
59 | + if self.mem_interval: |
60 | + self.mem_file = file(self.mem_filename, 'w+') |
61 | + self.mem_file.write('Ibid Memory Log v2: %s\n' % ibid.config['botname']) |
62 | + self.mem_csv = csv.writer(self.mem_file) |
63 | + self.mem_last = datetime.utcnow() |
64 | + |
65 | + if self.obj_interval: |
66 | + self.obj_file = file(self.obj_filename, 'w+') |
67 | + self.obj_file.write('Ibid Object Log v1: %s\n' % ibid.config['botname']) |
68 | + self.obj_last = datetime.utcnow() |
69 | + |
70 | + def process(self, event): |
71 | + now = datetime.utcnow() |
72 | + if self.mem_interval and now - self.mem_last >= timedelta(seconds=self.mem_interval): |
73 | + self.mem_log() |
74 | + self.mem_last = now |
75 | + if self.obj_interval and now - self.obj_last >= timedelta(seconds=self.obj_interval): |
76 | + self.obj_log() |
77 | + self.obj_last = now |
78 | + |
79 | + def mem_log(self): |
80 | + status = get_memusage() |
81 | + gc.collect() |
82 | + |
83 | + self.mem_csv.writerow(( |
84 | + datetime.utcnow().isoformat(), |
85 | + len(gc.get_objects()), |
86 | + status['VmSize'], |
87 | + status['VmRSS'], |
88 | + )) |
89 | + self.mem_file.flush() |
90 | + |
91 | + def obj_log(self): |
92 | + stats = objgraph.typestats() |
93 | + self.obj_file.write('%s %s\n' % ( |
94 | + datetime.utcnow().isoformat(), |
95 | + simplejson.dumps(objgraph.typestats()) |
96 | + )) |
97 | + self.obj_file.flush() |
98 | + |
99 | +class MemoryInfo(Processor): |
100 | + u"memory usage" |
101 | + |
102 | + feature = 'memory' |
103 | + |
104 | + @match('^memory\s+usage$') |
105 | + def memory_usage(self, event): |
106 | + event.addresponse(u"Today, I weigh in at %(VmSize)i kiB Virtual, %(VmRSS)s kiB RSS", get_memusage()) |
107 | + |
108 | +# vi: set et sta sw=4 ts=4: |
109 | |
110 | === added file 'lib/objgraph.py' |
111 | --- lib/objgraph.py 1970-01-01 00:00:00 +0000 |
112 | +++ lib/objgraph.py 2009-06-27 22:50:48 +0000 |
113 | @@ -0,0 +1,416 @@ |
114 | +""" |
115 | +Ad-hoc tools for drawing Python object reference graphs with graphviz. |
116 | + |
117 | +This module is more useful as a repository of sample code and ideas, than |
118 | +as a finished product. For documentation and background, read |
119 | + |
120 | + http://mg.pov.lt/blog/hunting-python-memleaks.html |
121 | + http://mg.pov.lt/blog/python-object-graphs.html |
122 | + http://mg.pov.lt/blog/object-graphs-with-graphviz.html |
123 | + |
124 | +in that order. Then use pydoc to read the docstrings, as there were |
125 | +improvements made since those blog posts. |
126 | + |
127 | +Copyright (c) 2008 Marius Gedminas <marius@pov.lt> |
128 | + |
129 | +Released under the MIT licence. |
130 | + |
131 | + |
132 | +Changes |
133 | +======= |
134 | + |
135 | +(unreleased) |
136 | +------------ |
137 | + |
138 | +Highlight objects with a __del__ method. |
139 | + |
140 | + |
141 | +1.2 (2009-03-25) |
142 | +---------------- |
143 | + |
144 | +Project website, public source repository, uploaded to PyPI. |
145 | + |
146 | +No code changes. |
147 | + |
148 | + |
149 | +1.1dev (2008-09-05) |
150 | +------------------- |
151 | + |
152 | +New function: show_refs() for showing forward references. |
153 | + |
154 | +New functions: typestats() and show_most_common_types(). |
155 | + |
156 | +Object boxes are less crammed with useless information (such as IDs). |
157 | + |
158 | +Spawns xdot if it is available. |
159 | +""" |
160 | +# Permission is hereby granted, free of charge, to any person obtaining a |
161 | +# copy of this software and associated documentation files (the "Software"), |
162 | +# to deal in the Software without restriction, including without limitation |
163 | +# the rights to use, copy, modify, merge, publish, distribute, sublicense, |
164 | +# and/or sell copies of the Software, and to permit persons to whom the |
165 | +# Software is furnished to do so, subject to the following conditions: |
166 | +# |
167 | +# The above copyright notice and this permission notice shall be included in |
168 | +# all copies or substantial portions of the Software. |
169 | +# |
170 | +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
171 | +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
172 | +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE |
173 | +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
174 | +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING |
175 | +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER |
176 | +# DEALINGS IN THE SOFTWARE. |
177 | + |
178 | +__author__ = "Marius Gedminas (marius@gedmin.as)" |
179 | +__copyright__ = "Copyright (c) 2008, 2009 Marius Gedminas" |
180 | +__license__ = "MIT" |
181 | +__version__ = "1.2+bzr" |
182 | +__date__ = "2009-03-25" |
183 | + |
184 | + |
185 | +import gc |
186 | +import inspect |
187 | +import types |
188 | +import weakref |
189 | +import operator |
190 | +import os |
191 | + |
192 | + |
193 | +def count(typename): |
194 | + """Count objects tracked by the garbage collector with a given class name. |
195 | + |
196 | + Example: |
197 | + |
198 | + >>> count('dict') |
199 | + 42 |
200 | + >>> count('MyClass') |
201 | + 3 |
202 | + |
203 | + Note that the GC does not track simple objects like int or str. |
204 | + """ |
205 | + return sum(1 for o in gc.get_objects() if type(o).__name__ == typename) |
206 | + |
207 | + |
208 | +def typestats(): |
209 | + """Count the number of instances for each type tracked by the GC. |
210 | + |
211 | + Note that the GC does not track simple objects like int or str. |
212 | + |
213 | + Note that classes with the same name but defined in different modules |
214 | + will be lumped together. |
215 | + """ |
216 | + stats = {} |
217 | + for o in gc.get_objects(): |
218 | + stats.setdefault(type(o).__name__, 0) |
219 | + stats[type(o).__name__] += 1 |
220 | + return stats |
221 | + |
222 | + |
223 | +def show_most_common_types(limit=10): |
224 | + """Count the names of types with the most instances. |
225 | + |
226 | + Note that the GC does not track simple objects like int or str. |
227 | + |
228 | + Note that classes with the same name but defined in different modules |
229 | + will be lumped together. |
230 | + """ |
231 | + stats = sorted(typestats().items(), key=operator.itemgetter(1), |
232 | + reverse=True) |
233 | + if limit: |
234 | + stats = stats[:limit] |
235 | + width = max(len(name) for name, count in stats) |
236 | + for name, count in stats[:limit]: |
237 | + print name.ljust(width), count |
238 | + |
239 | + |
240 | +def by_type(typename): |
241 | + """Return objects tracked by the garbage collector with a given class name. |
242 | + |
243 | + Example: |
244 | + |
245 | + >>> by_type('MyClass') |
246 | + [<mymodule.MyClass object at 0x...>] |
247 | + |
248 | + Note that the GC does not track simple objects like int or str. |
249 | + """ |
250 | + return [o for o in gc.get_objects() if type(o).__name__ == typename] |
251 | + |
252 | + |
253 | +def at(addr): |
254 | + """Return an object at a given memory address. |
255 | + |
256 | + The reverse of id(obj): |
257 | + |
258 | + >>> at(id(obj)) is obj |
259 | + True |
260 | + |
261 | + Note that this function does not work on objects that are not tracked by |
262 | + the GC (e.g. ints or strings). |
263 | + """ |
264 | + for o in gc.get_objects(): |
265 | + if id(o) == addr: |
266 | + return o |
267 | + return None |
268 | + |
269 | + |
270 | +def find_backref_chain(obj, predicate, max_depth=20, extra_ignore=()): |
271 | + """Find a shortest chain of references leading to obj. |
272 | + |
273 | + The start of the chain will be some object that matches your predicate. |
274 | + |
275 | + ``max_depth`` limits the search depth. |
276 | + |
277 | + ``extra_ignore`` can be a list of object IDs to exclude those objects from |
278 | + your search. |
279 | + |
280 | + Example: |
281 | + |
282 | + >>> find_backref_chain(obj, inspect.ismodule) |
283 | + [<module ...>, ..., obj] |
284 | + |
285 | + Returns None if such a chain could not be found. |
286 | + """ |
287 | + queue = [obj] |
288 | + depth = {id(obj): 0} |
289 | + parent = {id(obj): None} |
290 | + ignore = set(extra_ignore) |
291 | + ignore.add(id(extra_ignore)) |
292 | + ignore.add(id(queue)) |
293 | + ignore.add(id(depth)) |
294 | + ignore.add(id(parent)) |
295 | + ignore.add(id(ignore)) |
296 | + gc.collect() |
297 | + while queue: |
298 | + target = queue.pop(0) |
299 | + if predicate(target): |
300 | + chain = [target] |
301 | + while parent[id(target)] is not None: |
302 | + target = parent[id(target)] |
303 | + chain.append(target) |
304 | + return chain |
305 | + tdepth = depth[id(target)] |
306 | + if tdepth < max_depth: |
307 | + referrers = gc.get_referrers(target) |
308 | + ignore.add(id(referrers)) |
309 | + for source in referrers: |
310 | + if inspect.isframe(source) or id(source) in ignore: |
311 | + continue |
312 | + if id(source) not in depth: |
313 | + depth[id(source)] = tdepth + 1 |
314 | + parent[id(source)] = target |
315 | + queue.append(source) |
316 | + return None # not found |
317 | + |
318 | + |
319 | +def show_backrefs(objs, max_depth=3, extra_ignore=(), filter=None, too_many=10, |
320 | + highlight=None): |
321 | + """Generate an object reference graph ending at ``objs`` |
322 | + |
323 | + The graph will show you what objects refer to ``objs``, directly and |
324 | + indirectly. |
325 | + |
326 | + ``objs`` can be a single object, or it can be a list of objects. |
327 | + |
328 | + Produces a Graphviz .dot file and spawns a viewer (xdot) if one is |
329 | + installed, otherwise converts the graph to a .png image. |
330 | + |
331 | + Use ``max_depth`` and ``too_many`` to limit the depth and breadth of the |
332 | + graph. |
333 | + |
334 | + Use ``filter`` (a predicate) and ``extra_ignore`` (a list of object IDs) to |
335 | + remove undesired objects from the graph. |
336 | + |
337 | + Use ``highlight`` (a predicate) to highlight certain graph nodes in blue. |
338 | + |
339 | + Examples: |
340 | + |
341 | + >>> show_backrefs(obj) |
342 | + >>> show_backrefs([obj1, obj2]) |
343 | + >>> show_backrefs(obj, max_depth=5) |
344 | + >>> show_backrefs(obj, filter=lambda x: not inspect.isclass(x)) |
345 | + >>> show_backrefs(obj, highlight=inspect.isclass) |
346 | + >>> show_backrefs(obj, extra_ignore=[id(locals())]) |
347 | + |
348 | + """ |
349 | + show_graph(objs, max_depth=max_depth, extra_ignore=extra_ignore, |
350 | + filter=filter, too_many=too_many, highlight=highlight, |
351 | + edge_func=gc.get_referrers, swap_source_target=False) |
352 | + |
353 | + |
354 | +def show_refs(objs, max_depth=3, extra_ignore=(), filter=None, too_many=10, |
355 | + highlight=None): |
356 | + """Generate an object reference graph starting at ``objs`` |
357 | + |
358 | + The graph will show you what objects are reachable from ``objs``, directly |
359 | + and indirectly. |
360 | + |
361 | + ``objs`` can be a single object, or it can be a list of objects. |
362 | + |
363 | + Produces a Graphviz .dot file and spawns a viewer (xdot) if one is |
364 | + installed, otherwise converts the graph to a .png image. |
365 | + |
366 | + Use ``max_depth`` and ``too_many`` to limit the depth and breadth of the |
367 | + graph. |
368 | + |
369 | + Use ``filter`` (a predicate) and ``extra_ignore`` (a list of object IDs) to |
370 | + remove undesired objects from the graph. |
371 | + |
372 | + Use ``highlight`` (a predicate) to highlight certain graph nodes in blue. |
373 | + |
374 | + Examples: |
375 | + |
376 | + >>> show_refs(obj) |
377 | + >>> show_refs([obj1, obj2]) |
378 | + >>> show_refs(obj, max_depth=5) |
379 | + >>> show_refs(obj, filter=lambda x: not inspect.isclass(x)) |
380 | + >>> show_refs(obj, highlight=inspect.isclass) |
381 | + >>> show_refs(obj, extra_ignore=[id(locals())]) |
382 | + |
383 | + """ |
384 | + show_graph(objs, max_depth=max_depth, extra_ignore=extra_ignore, |
385 | + filter=filter, too_many=too_many, highlight=highlight, |
386 | + edge_func=gc.get_referents, swap_source_target=True) |
387 | + |
388 | +# |
389 | +# Internal helpers |
390 | +# |
391 | + |
392 | +def show_graph(objs, edge_func, swap_source_target, |
393 | + max_depth=3, extra_ignore=(), filter=None, too_many=10, |
394 | + highlight=None): |
395 | + if not isinstance(objs, (list, tuple)): |
396 | + objs = [objs] |
397 | + f = file('objects.dot', 'w') |
398 | + print >> f, 'digraph ObjectGraph {' |
399 | + print >> f, ' node[shape=box, style=filled, fillcolor=white];' |
400 | + queue = [] |
401 | + depth = {} |
402 | + ignore = set(extra_ignore) |
403 | + ignore.add(id(objs)) |
404 | + ignore.add(id(extra_ignore)) |
405 | + ignore.add(id(queue)) |
406 | + ignore.add(id(depth)) |
407 | + ignore.add(id(ignore)) |
408 | + for obj in objs: |
409 | + print >> f, ' %s[fontcolor=red];' % (obj_node_id(obj)) |
410 | + depth[id(obj)] = 0 |
411 | + queue.append(obj) |
412 | + gc.collect() |
413 | + nodes = 0 |
414 | + while queue: |
415 | + nodes += 1 |
416 | + target = queue.pop(0) |
417 | + tdepth = depth[id(target)] |
418 | + print >> f, ' %s[label="%s"];' % (obj_node_id(target), obj_label(target, tdepth)) |
419 | + h, s, v = gradient((0, 0, 1), (0, 0, .3), tdepth, max_depth) |
420 | + if inspect.ismodule(target): |
421 | + h = .3 |
422 | + s = 1 |
423 | + if highlight and highlight(target): |
424 | + h = .6 |
425 | + s = .6 |
426 | + v = 0.5 + v * 0.5 |
427 | + print >> f, ' %s[fillcolor="%g,%g,%g"];' % (obj_node_id(target), h, s, v) |
428 | + if v < 0.5: |
429 | + print >> f, ' %s[fontcolor=white];' % (obj_node_id(target)) |
430 | + if hasattr(target, '__del__'): |
431 | + print >> f, " %s->%s_has_a_del[color=red,style=dotted,len=0.25,weight=10];" % (obj_node_id(target), obj_node_id(target)) |
432 | + print >> f, ' %s_has_a_del[label="__del__",shape=doublecircle,height=0.25,color=red,fillcolor="0,.5,1",fontsize=6];' % (obj_node_id(target)) |
433 | + if inspect.ismodule(target) or tdepth >= max_depth: |
434 | + continue |
435 | + neighbours = edge_func(target) |
436 | + ignore.add(id(neighbours)) |
437 | + n = 0 |
438 | + for source in neighbours: |
439 | + if inspect.isframe(source) or id(source) in ignore: |
440 | + continue |
441 | + if filter and not filter(source): |
442 | + continue |
443 | + if swap_source_target: |
444 | + srcnode, tgtnode = target, source |
445 | + else: |
446 | + srcnode, tgtnode = source, target |
447 | + elabel = edge_label(srcnode, tgtnode) |
448 | + print >> f, ' %s -> %s%s;' % (obj_node_id(srcnode), obj_node_id(tgtnode), elabel) |
449 | + if id(source) not in depth: |
450 | + depth[id(source)] = tdepth + 1 |
451 | + queue.append(source) |
452 | + n += 1 |
453 | + if n >= too_many: |
454 | + print >> f, ' %s[color=red];' % obj_node_id(target) |
455 | + break |
456 | + print >> f, "}" |
457 | + f.close() |
458 | + print "Graph written to objects.dot (%d nodes)" % nodes |
459 | + if os.system('which xdot >/dev/null') == 0: |
460 | + print "Spawning graph viewer (xdot)" |
461 | + os.system("xdot objects.dot &") |
462 | + else: |
463 | + os.system("dot -Tpng objects.dot > objects.png") |
464 | + print "Image generated as objects.png" |
465 | + |
466 | + |
467 | +def obj_node_id(obj): |
468 | + if isinstance(obj, weakref.ref): |
469 | + return 'all_weakrefs_are_one' |
470 | + return ('o%d' % id(obj)).replace('-', '_') |
471 | + |
472 | + |
473 | +def obj_label(obj, depth): |
474 | + return quote(type(obj).__name__ + ':\n' + |
475 | + safe_repr(obj)) |
476 | + |
477 | + |
478 | +def quote(s): |
479 | + return s.replace("\\", "\\\\").replace("\"", "\\\"").replace("\n", "\\n") |
480 | + |
481 | + |
482 | +def safe_repr(obj): |
483 | + try: |
484 | + return short_repr(obj) |
485 | + except: |
486 | + return '(unrepresentable)' |
487 | + |
488 | + |
489 | +def short_repr(obj): |
490 | + if isinstance(obj, (type, types.ModuleType, types.BuiltinMethodType, |
491 | + types.BuiltinFunctionType)): |
492 | + return obj.__name__ |
493 | + if isinstance(obj, types.MethodType): |
494 | + if obj.im_self is not None: |
495 | + return obj.im_func.__name__ + ' (bound)' |
496 | + else: |
497 | + return obj.im_func.__name__ |
498 | + if isinstance(obj, (tuple, list, dict, set)): |
499 | + return '%d items' % len(obj) |
500 | + if isinstance(obj, weakref.ref): |
501 | + return 'all_weakrefs_are_one' |
502 | + return repr(obj)[:40] |
503 | + |
504 | + |
505 | +def gradient(start_color, end_color, depth, max_depth): |
506 | + if max_depth == 0: |
507 | + # avoid division by zero |
508 | + return start_color |
509 | + h1, s1, v1 = start_color |
510 | + h2, s2, v2 = end_color |
511 | + f = float(depth) / max_depth |
512 | + h = h1 * (1-f) + h2 * f |
513 | + s = s1 * (1-f) + s2 * f |
514 | + v = v1 * (1-f) + v2 * f |
515 | + return h, s, v |
516 | + |
517 | + |
518 | +def edge_label(source, target): |
519 | + if isinstance(target, dict) and target is getattr(source, '__dict__', None): |
520 | + return ' [label="__dict__",weight=10]' |
521 | + elif isinstance(source, dict): |
522 | + for k, v in source.iteritems(): |
523 | + if v is target: |
524 | + if isinstance(k, basestring) and k: |
525 | + return ' [label="%s",weight=2]' % quote(k) |
526 | + else: |
527 | + return ' [label="%s"]' % quote(safe_repr(k)) |
528 | + return '' |
529 | + |
530 | |
531 | === added file 'scripts/ibid-memgraph' |
532 | --- scripts/ibid-memgraph 1970-01-01 00:00:00 +0000 |
533 | +++ scripts/ibid-memgraph 2009-06-27 22:50:48 +0000 |
534 | @@ -0,0 +1,68 @@ |
535 | +#!/usr/bin/env python |
536 | + |
537 | +import gzip |
538 | +import optparse |
539 | +import sys |
540 | + |
541 | +import dateutil |
542 | +import matplotlib.pyplot as pyplot |
543 | +from matplotlib.dates import date2num |
544 | +import numpy |
545 | + |
546 | +parser = optparse.OptionParser(usage="""%prog logfile |
547 | +logfile is a memory log file (possibly gzipped)""") |
548 | +parser.add_option('-o', '--output', dest='output', metavar='FILE', |
549 | + help='Output to filename rather than interactive') |
550 | +parser.add_option('-d', '--dpi', dest='dpi', |
551 | + help='Output DPI') |
552 | + |
553 | +(options, args) = parser.parse_args() |
554 | + |
555 | +if len(args) != 1: |
556 | + sys.stderr.write("Log file required\n") |
557 | + sys.exit(2) |
558 | + |
559 | +f = args[0].endswith('.gz') and gzip.GzipFile(args[0], 'r') or file(args[0], 'r') |
560 | +header = f.readline().strip() |
561 | +f.close() |
562 | +if not header.startswith('Ibid Memory Log v2: '): |
563 | + sys.stderr.write("Incorrect file format\n") |
564 | + sys.exit(1) |
565 | + |
566 | +botname = header.split(':', 1)[1].strip() |
567 | + |
568 | +data = numpy.loadtxt(args[0], |
569 | + dtype=float, |
570 | + delimiter=',', |
571 | + skiprows=1, |
572 | + converters={0: lambda x: date2num(dateutil.parser.parse(x))}, |
573 | +) |
574 | + |
575 | +fig = pyplot.figure() |
576 | +ax_obj = fig.add_subplot(111) |
577 | +ax_obj.set_xlabel('time (s)') |
578 | +ax_mem = ax_obj.twinx() |
579 | +ax_mem.grid(True) |
580 | + |
581 | +ax_obj.plot_date(data[:,0], data[:,1]/1000, 'b-', label='Objects (k)') |
582 | +ax_obj.set_ylabel('Objects (k)', color='b') |
583 | + |
584 | +for tl in ax_obj.get_yticklabels(): |
585 | + tl.set_color('b') |
586 | + |
587 | +ax_mem.plot_date(data[:,0], data[:,2]/1024, 'r-', label='VM Size') |
588 | +ax_mem.plot_date(data[:,0], data[:,3]/1024, 'g-', label='VM RSS') |
589 | + |
590 | +ax_mem.set_ylabel('Memory (MiB)') |
591 | + |
592 | +pyplot.legend(loc='best') |
593 | +pyplot.title(botname + ' Memory Usage') |
594 | + |
595 | +fig.autofmt_xdate() |
596 | + |
597 | +if options.output: |
598 | + pyplot.savefig(options.output, dpi=options.dpi) |
599 | +else: |
600 | + pyplot.show() |
601 | + |
602 | +# vi: set et sta sw=4 ts=4: |
603 | |
604 | === added file 'scripts/ibid-objgraph' |
605 | --- scripts/ibid-objgraph 1970-01-01 00:00:00 +0000 |
606 | +++ scripts/ibid-objgraph 2009-06-27 22:50:48 +0000 |
607 | @@ -0,0 +1,77 @@ |
608 | +#!/usr/bin/env python |
609 | + |
610 | +import gzip |
611 | +import optparse |
612 | +import sys |
613 | + |
614 | +import dateutil |
615 | +import simplejson |
616 | + |
617 | +import matplotlib.pyplot as pyplot |
618 | +from matplotlib.dates import date2num |
619 | +import numpy |
620 | + |
621 | +parser = optparse.OptionParser(usage="""%prog [arguments] logfile types... |
622 | +logfile is an object log file (possibly gzipped) |
623 | +types are a list of object types to graph""") |
624 | +parser.add_option('-o', '--output', dest='output', metavar='FILE', |
625 | + help='Output to filename rather than interactive') |
626 | +parser.add_option('-d', '--dpi', dest='dpi', |
627 | + help='Output DPI') |
628 | + |
629 | +(options, args) = parser.parse_args() |
630 | + |
631 | +if len(args) < 1: |
632 | + sys.stderr.write("Log file required\n") |
633 | + sys.exit(2) |
634 | +if len(args) < 2: |
635 | + sys.stderr.write("At least one type required\n") |
636 | + sys.exit(2) |
637 | + |
638 | +f = args[0].endswith('.gz') and gzip.GzipFile(args[0], 'r') or file(args[0], 'r') |
639 | +header = f.readline().strip() |
640 | + |
641 | +if not header.startswith('Ibid Object Log v1: '): |
642 | + sys.stderr.write("Incorrect file format\n") |
643 | + sys.exit(1) |
644 | + |
645 | +botname = header.split(':', 1)[1].strip() |
646 | + |
647 | +types = args[1:] |
648 | + |
649 | +times = [] |
650 | +data = [] |
651 | + |
652 | +for line in f: |
653 | + timestamp, json = line.split(' ', 1) |
654 | + |
655 | + times.append(date2num(dateutil.parser.parse(timestamp))) |
656 | + |
657 | + json = simplejson.loads(json) |
658 | + data.append([json.get(type, 0) for type in types]) |
659 | + |
660 | +times = numpy.array(times, dtype=float) |
661 | +data = numpy.array(data, dtype=int) |
662 | + |
663 | +fig = pyplot.figure() |
664 | +ax = fig.add_subplot(111) |
665 | +ax.set_xlabel('time (s)') |
666 | +ax.set_ylabel('Objects (k)', color='b') |
667 | +ax.grid(True) |
668 | + |
669 | +ax.set_color_cycle(list('brgycmk')) |
670 | + |
671 | +for i, type in enumerate(types): |
672 | + ax.plot_date(times, data[:,i], '-', label=type) |
673 | + |
674 | +pyplot.legend(loc='best') |
675 | +pyplot.title(botname + ' Object Stats') |
676 | + |
677 | +fig.autofmt_xdate() |
678 | + |
679 | +if options.output: |
680 | + pyplot.savefig(options.output, dpi=options.dpi) |
681 | +else: |
682 | + pyplot.show() |
683 | + |
684 | +# vi: set et sta sw=4 ts=4: |
Some of this may not be useful. Do we want this kind of thing in mainline?