Merge lp:~stefanor/ibid/memory-logging into lp:~ibid-core/ibid/old-trunk-pack-0.92
- memory-logging
- Merge into old-trunk-pack-0.92
Proposed by
Stefano Rivera
Status: | Merged |
---|---|
Approved by: | Stefano Rivera |
Approved revision: | 681 |
Merged at revision: | 678 |
Proposed branch: | lp:~stefanor/ibid/memory-logging |
Merge into: | lp:~ibid-core/ibid/old-trunk-pack-0.92 |
Diff against target: | None lines |
To merge this branch: | bzr merge lp:~stefanor/ibid/memory-logging |
Related bugs: |
Reviewer | Review Type | Date Requested | Status |
---|---|---|---|
Jonathan Hitchcock | Approve | ||
Michael Gorven | Approve | ||
Review via email: mp+7974@code.launchpad.net |
Commit message
Description of the change
To post a comment you must log in.
Revision history for this message
Stefano Rivera (stefanor) wrote : | # |
lp:~stefanor/ibid/memory-logging
updated
- 680. By Stefano Rivera
-
Don't overlap dates in memgraph
- 681. By Stefano Rivera
-
Disable logging by default
Revision history for this message
Michael Gorven (mgorven) wrote : | # |
I would have tried to use Python logging's rotating logfiles instead of doing
it manually, but since you've already implemented this it's fine. I think
that the logging processor should have autoload=False though, so that it has
to be explicitly enabled.
review approve
review:
Approve
Revision history for this message
Jonathan Hitchcock (vhata) wrote : | # |
Not sure if we do want it in the main line of code. Let's put it in now, and factor it out into devtools-plugins when we finally get round to refactoring.
review:
Approve
Preview Diff
[H/L] Next/Prev Comment, [J/K] Next/Prev File, [N/P] Next/Prev Hunk
1 | === added file 'ibid/plugins/memory.py' | |||
2 | --- ibid/plugins/memory.py 1970-01-01 00:00:00 +0000 | |||
3 | +++ ibid/plugins/memory.py 2009-06-27 22:50:48 +0000 | |||
4 | @@ -0,0 +1,104 @@ | |||
5 | 1 | import csv | ||
6 | 2 | from datetime import datetime, timedelta | ||
7 | 3 | import gc | ||
8 | 4 | import gzip | ||
9 | 5 | import os | ||
10 | 6 | import os.path | ||
11 | 7 | |||
12 | 8 | import simplejson | ||
13 | 9 | import objgraph | ||
14 | 10 | |||
15 | 11 | import ibid | ||
16 | 12 | from ibid.plugins import Processor, match | ||
17 | 13 | from ibid.config import Option, IntOption | ||
18 | 14 | |||
19 | 15 | help = {} | ||
20 | 16 | |||
21 | 17 | help['memory'] = u'Debugging module that keeps track of memory usage' | ||
22 | 18 | |||
23 | 19 | def get_memusage(): | ||
24 | 20 | status = file('/proc/%i/status' % os.getpid(), 'r').readlines() | ||
25 | 21 | status = [x.strip().split(':', 1) for x in status if x.startswith('Vm')] | ||
26 | 22 | return dict((x, int(y.split()[0])) for (x, y) in status) | ||
27 | 23 | |||
28 | 24 | class MemoryLog(Processor): | ||
29 | 25 | |||
30 | 26 | feature = 'memory' | ||
31 | 27 | |||
32 | 28 | mem_filename = Option('mem_filename', 'Memory log filename', 'logs/memory.log') | ||
33 | 29 | mem_interval = IntOption('mem_interval', 'Interval between memory stat logging', 1) | ||
34 | 30 | obj_filename = Option('obj_filename', 'Object Statistics log filename', 'logs/objstats.log') | ||
35 | 31 | obj_interval = IntOption('obj_interval', 'Interval between logging object statistics', 1) | ||
36 | 32 | |||
37 | 33 | def setup(self): | ||
38 | 34 | fns = [] | ||
39 | 35 | if self.mem_interval: | ||
40 | 36 | fns.append(self.mem_filename) | ||
41 | 37 | if self.obj_interval: | ||
42 | 38 | fns.append(self.obj_filename) | ||
43 | 39 | for filename in fns: | ||
44 | 40 | if os.path.isfile(filename + '.10.gz'): | ||
45 | 41 | os.remove(filename + '.10.gz') | ||
46 | 42 | for i in range(9, 0, -1): | ||
47 | 43 | if os.path.isfile('%s.%i.gz' % (filename, i)): | ||
48 | 44 | os.rename('%s.%i.gz' % (filename, i), | ||
49 | 45 | '%s.%i.gz' % (filename, i+1)) | ||
50 | 46 | if os.path.isfile(filename): | ||
51 | 47 | o = gzip.open(filename + '.1.gz', 'wb') | ||
52 | 48 | i = open(filename, 'rb') | ||
53 | 49 | o.write(i.read()) | ||
54 | 50 | o.close() | ||
55 | 51 | i.close() | ||
56 | 52 | stat = os.stat(filename) | ||
57 | 53 | os.utime(filename + '.1.gz', (stat.st_atime, stat.st_mtime)) | ||
58 | 54 | |||
59 | 55 | if self.mem_interval: | ||
60 | 56 | self.mem_file = file(self.mem_filename, 'w+') | ||
61 | 57 | self.mem_file.write('Ibid Memory Log v2: %s\n' % ibid.config['botname']) | ||
62 | 58 | self.mem_csv = csv.writer(self.mem_file) | ||
63 | 59 | self.mem_last = datetime.utcnow() | ||
64 | 60 | |||
65 | 61 | if self.obj_interval: | ||
66 | 62 | self.obj_file = file(self.obj_filename, 'w+') | ||
67 | 63 | self.obj_file.write('Ibid Object Log v1: %s\n' % ibid.config['botname']) | ||
68 | 64 | self.obj_last = datetime.utcnow() | ||
69 | 65 | |||
70 | 66 | def process(self, event): | ||
71 | 67 | now = datetime.utcnow() | ||
72 | 68 | if self.mem_interval and now - self.mem_last >= timedelta(seconds=self.mem_interval): | ||
73 | 69 | self.mem_log() | ||
74 | 70 | self.mem_last = now | ||
75 | 71 | if self.obj_interval and now - self.obj_last >= timedelta(seconds=self.obj_interval): | ||
76 | 72 | self.obj_log() | ||
77 | 73 | self.obj_last = now | ||
78 | 74 | |||
79 | 75 | def mem_log(self): | ||
80 | 76 | status = get_memusage() | ||
81 | 77 | gc.collect() | ||
82 | 78 | |||
83 | 79 | self.mem_csv.writerow(( | ||
84 | 80 | datetime.utcnow().isoformat(), | ||
85 | 81 | len(gc.get_objects()), | ||
86 | 82 | status['VmSize'], | ||
87 | 83 | status['VmRSS'], | ||
88 | 84 | )) | ||
89 | 85 | self.mem_file.flush() | ||
90 | 86 | |||
91 | 87 | def obj_log(self): | ||
92 | 88 | stats = objgraph.typestats() | ||
93 | 89 | self.obj_file.write('%s %s\n' % ( | ||
94 | 90 | datetime.utcnow().isoformat(), | ||
95 | 91 | simplejson.dumps(objgraph.typestats()) | ||
96 | 92 | )) | ||
97 | 93 | self.obj_file.flush() | ||
98 | 94 | |||
99 | 95 | class MemoryInfo(Processor): | ||
100 | 96 | u"memory usage" | ||
101 | 97 | |||
102 | 98 | feature = 'memory' | ||
103 | 99 | |||
104 | 100 | @match('^memory\s+usage$') | ||
105 | 101 | def memory_usage(self, event): | ||
106 | 102 | event.addresponse(u"Today, I weigh in at %(VmSize)i kiB Virtual, %(VmRSS)s kiB RSS", get_memusage()) | ||
107 | 103 | |||
108 | 104 | # vi: set et sta sw=4 ts=4: | ||
109 | 0 | 105 | ||
110 | === added file 'lib/objgraph.py' | |||
111 | --- lib/objgraph.py 1970-01-01 00:00:00 +0000 | |||
112 | +++ lib/objgraph.py 2009-06-27 22:50:48 +0000 | |||
113 | @@ -0,0 +1,416 @@ | |||
114 | 1 | """ | ||
115 | 2 | Ad-hoc tools for drawing Python object reference graphs with graphviz. | ||
116 | 3 | |||
117 | 4 | This module is more useful as a repository of sample code and ideas, than | ||
118 | 5 | as a finished product. For documentation and background, read | ||
119 | 6 | |||
120 | 7 | http://mg.pov.lt/blog/hunting-python-memleaks.html | ||
121 | 8 | http://mg.pov.lt/blog/python-object-graphs.html | ||
122 | 9 | http://mg.pov.lt/blog/object-graphs-with-graphviz.html | ||
123 | 10 | |||
124 | 11 | in that order. Then use pydoc to read the docstrings, as there were | ||
125 | 12 | improvements made since those blog posts. | ||
126 | 13 | |||
127 | 14 | Copyright (c) 2008 Marius Gedminas <marius@pov.lt> | ||
128 | 15 | |||
129 | 16 | Released under the MIT licence. | ||
130 | 17 | |||
131 | 18 | |||
132 | 19 | Changes | ||
133 | 20 | ======= | ||
134 | 21 | |||
135 | 22 | (unreleased) | ||
136 | 23 | ------------ | ||
137 | 24 | |||
138 | 25 | Highlight objects with a __del__ method. | ||
139 | 26 | |||
140 | 27 | |||
141 | 28 | 1.2 (2009-03-25) | ||
142 | 29 | ---------------- | ||
143 | 30 | |||
144 | 31 | Project website, public source repository, uploaded to PyPI. | ||
145 | 32 | |||
146 | 33 | No code changes. | ||
147 | 34 | |||
148 | 35 | |||
149 | 36 | 1.1dev (2008-09-05) | ||
150 | 37 | ------------------- | ||
151 | 38 | |||
152 | 39 | New function: show_refs() for showing forward references. | ||
153 | 40 | |||
154 | 41 | New functions: typestats() and show_most_common_types(). | ||
155 | 42 | |||
156 | 43 | Object boxes are less crammed with useless information (such as IDs). | ||
157 | 44 | |||
158 | 45 | Spawns xdot if it is available. | ||
159 | 46 | """ | ||
160 | 47 | # Permission is hereby granted, free of charge, to any person obtaining a | ||
161 | 48 | # copy of this software and associated documentation files (the "Software"), | ||
162 | 49 | # to deal in the Software without restriction, including without limitation | ||
163 | 50 | # the rights to use, copy, modify, merge, publish, distribute, sublicense, | ||
164 | 51 | # and/or sell copies of the Software, and to permit persons to whom the | ||
165 | 52 | # Software is furnished to do so, subject to the following conditions: | ||
166 | 53 | # | ||
167 | 54 | # The above copyright notice and this permission notice shall be included in | ||
168 | 55 | # all copies or substantial portions of the Software. | ||
169 | 56 | # | ||
170 | 57 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | ||
171 | 58 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | ||
172 | 59 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | ||
173 | 60 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | ||
174 | 61 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING | ||
175 | 62 | # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER | ||
176 | 63 | # DEALINGS IN THE SOFTWARE. | ||
177 | 64 | |||
178 | 65 | __author__ = "Marius Gedminas (marius@gedmin.as)" | ||
179 | 66 | __copyright__ = "Copyright (c) 2008, 2009 Marius Gedminas" | ||
180 | 67 | __license__ = "MIT" | ||
181 | 68 | __version__ = "1.2+bzr" | ||
182 | 69 | __date__ = "2009-03-25" | ||
183 | 70 | |||
184 | 71 | |||
185 | 72 | import gc | ||
186 | 73 | import inspect | ||
187 | 74 | import types | ||
188 | 75 | import weakref | ||
189 | 76 | import operator | ||
190 | 77 | import os | ||
191 | 78 | |||
192 | 79 | |||
193 | 80 | def count(typename): | ||
194 | 81 | """Count objects tracked by the garbage collector with a given class name. | ||
195 | 82 | |||
196 | 83 | Example: | ||
197 | 84 | |||
198 | 85 | >>> count('dict') | ||
199 | 86 | 42 | ||
200 | 87 | >>> count('MyClass') | ||
201 | 88 | 3 | ||
202 | 89 | |||
203 | 90 | Note that the GC does not track simple objects like int or str. | ||
204 | 91 | """ | ||
205 | 92 | return sum(1 for o in gc.get_objects() if type(o).__name__ == typename) | ||
206 | 93 | |||
207 | 94 | |||
208 | 95 | def typestats(): | ||
209 | 96 | """Count the number of instances for each type tracked by the GC. | ||
210 | 97 | |||
211 | 98 | Note that the GC does not track simple objects like int or str. | ||
212 | 99 | |||
213 | 100 | Note that classes with the same name but defined in different modules | ||
214 | 101 | will be lumped together. | ||
215 | 102 | """ | ||
216 | 103 | stats = {} | ||
217 | 104 | for o in gc.get_objects(): | ||
218 | 105 | stats.setdefault(type(o).__name__, 0) | ||
219 | 106 | stats[type(o).__name__] += 1 | ||
220 | 107 | return stats | ||
221 | 108 | |||
222 | 109 | |||
223 | 110 | def show_most_common_types(limit=10): | ||
224 | 111 | """Count the names of types with the most instances. | ||
225 | 112 | |||
226 | 113 | Note that the GC does not track simple objects like int or str. | ||
227 | 114 | |||
228 | 115 | Note that classes with the same name but defined in different modules | ||
229 | 116 | will be lumped together. | ||
230 | 117 | """ | ||
231 | 118 | stats = sorted(typestats().items(), key=operator.itemgetter(1), | ||
232 | 119 | reverse=True) | ||
233 | 120 | if limit: | ||
234 | 121 | stats = stats[:limit] | ||
235 | 122 | width = max(len(name) for name, count in stats) | ||
236 | 123 | for name, count in stats[:limit]: | ||
237 | 124 | print name.ljust(width), count | ||
238 | 125 | |||
239 | 126 | |||
240 | 127 | def by_type(typename): | ||
241 | 128 | """Return objects tracked by the garbage collector with a given class name. | ||
242 | 129 | |||
243 | 130 | Example: | ||
244 | 131 | |||
245 | 132 | >>> by_type('MyClass') | ||
246 | 133 | [<mymodule.MyClass object at 0x...>] | ||
247 | 134 | |||
248 | 135 | Note that the GC does not track simple objects like int or str. | ||
249 | 136 | """ | ||
250 | 137 | return [o for o in gc.get_objects() if type(o).__name__ == typename] | ||
251 | 138 | |||
252 | 139 | |||
253 | 140 | def at(addr): | ||
254 | 141 | """Return an object at a given memory address. | ||
255 | 142 | |||
256 | 143 | The reverse of id(obj): | ||
257 | 144 | |||
258 | 145 | >>> at(id(obj)) is obj | ||
259 | 146 | True | ||
260 | 147 | |||
261 | 148 | Note that this function does not work on objects that are not tracked by | ||
262 | 149 | the GC (e.g. ints or strings). | ||
263 | 150 | """ | ||
264 | 151 | for o in gc.get_objects(): | ||
265 | 152 | if id(o) == addr: | ||
266 | 153 | return o | ||
267 | 154 | return None | ||
268 | 155 | |||
269 | 156 | |||
270 | 157 | def find_backref_chain(obj, predicate, max_depth=20, extra_ignore=()): | ||
271 | 158 | """Find a shortest chain of references leading to obj. | ||
272 | 159 | |||
273 | 160 | The start of the chain will be some object that matches your predicate. | ||
274 | 161 | |||
275 | 162 | ``max_depth`` limits the search depth. | ||
276 | 163 | |||
277 | 164 | ``extra_ignore`` can be a list of object IDs to exclude those objects from | ||
278 | 165 | your search. | ||
279 | 166 | |||
280 | 167 | Example: | ||
281 | 168 | |||
282 | 169 | >>> find_backref_chain(obj, inspect.ismodule) | ||
283 | 170 | [<module ...>, ..., obj] | ||
284 | 171 | |||
285 | 172 | Returns None if such a chain could not be found. | ||
286 | 173 | """ | ||
287 | 174 | queue = [obj] | ||
288 | 175 | depth = {id(obj): 0} | ||
289 | 176 | parent = {id(obj): None} | ||
290 | 177 | ignore = set(extra_ignore) | ||
291 | 178 | ignore.add(id(extra_ignore)) | ||
292 | 179 | ignore.add(id(queue)) | ||
293 | 180 | ignore.add(id(depth)) | ||
294 | 181 | ignore.add(id(parent)) | ||
295 | 182 | ignore.add(id(ignore)) | ||
296 | 183 | gc.collect() | ||
297 | 184 | while queue: | ||
298 | 185 | target = queue.pop(0) | ||
299 | 186 | if predicate(target): | ||
300 | 187 | chain = [target] | ||
301 | 188 | while parent[id(target)] is not None: | ||
302 | 189 | target = parent[id(target)] | ||
303 | 190 | chain.append(target) | ||
304 | 191 | return chain | ||
305 | 192 | tdepth = depth[id(target)] | ||
306 | 193 | if tdepth < max_depth: | ||
307 | 194 | referrers = gc.get_referrers(target) | ||
308 | 195 | ignore.add(id(referrers)) | ||
309 | 196 | for source in referrers: | ||
310 | 197 | if inspect.isframe(source) or id(source) in ignore: | ||
311 | 198 | continue | ||
312 | 199 | if id(source) not in depth: | ||
313 | 200 | depth[id(source)] = tdepth + 1 | ||
314 | 201 | parent[id(source)] = target | ||
315 | 202 | queue.append(source) | ||
316 | 203 | return None # not found | ||
317 | 204 | |||
318 | 205 | |||
319 | 206 | def show_backrefs(objs, max_depth=3, extra_ignore=(), filter=None, too_many=10, | ||
320 | 207 | highlight=None): | ||
321 | 208 | """Generate an object reference graph ending at ``objs`` | ||
322 | 209 | |||
323 | 210 | The graph will show you what objects refer to ``objs``, directly and | ||
324 | 211 | indirectly. | ||
325 | 212 | |||
326 | 213 | ``objs`` can be a single object, or it can be a list of objects. | ||
327 | 214 | |||
328 | 215 | Produces a Graphviz .dot file and spawns a viewer (xdot) if one is | ||
329 | 216 | installed, otherwise converts the graph to a .png image. | ||
330 | 217 | |||
331 | 218 | Use ``max_depth`` and ``too_many`` to limit the depth and breadth of the | ||
332 | 219 | graph. | ||
333 | 220 | |||
334 | 221 | Use ``filter`` (a predicate) and ``extra_ignore`` (a list of object IDs) to | ||
335 | 222 | remove undesired objects from the graph. | ||
336 | 223 | |||
337 | 224 | Use ``highlight`` (a predicate) to highlight certain graph nodes in blue. | ||
338 | 225 | |||
339 | 226 | Examples: | ||
340 | 227 | |||
341 | 228 | >>> show_backrefs(obj) | ||
342 | 229 | >>> show_backrefs([obj1, obj2]) | ||
343 | 230 | >>> show_backrefs(obj, max_depth=5) | ||
344 | 231 | >>> show_backrefs(obj, filter=lambda x: not inspect.isclass(x)) | ||
345 | 232 | >>> show_backrefs(obj, highlight=inspect.isclass) | ||
346 | 233 | >>> show_backrefs(obj, extra_ignore=[id(locals())]) | ||
347 | 234 | |||
348 | 235 | """ | ||
349 | 236 | show_graph(objs, max_depth=max_depth, extra_ignore=extra_ignore, | ||
350 | 237 | filter=filter, too_many=too_many, highlight=highlight, | ||
351 | 238 | edge_func=gc.get_referrers, swap_source_target=False) | ||
352 | 239 | |||
353 | 240 | |||
354 | 241 | def show_refs(objs, max_depth=3, extra_ignore=(), filter=None, too_many=10, | ||
355 | 242 | highlight=None): | ||
356 | 243 | """Generate an object reference graph starting at ``objs`` | ||
357 | 244 | |||
358 | 245 | The graph will show you what objects are reachable from ``objs``, directly | ||
359 | 246 | and indirectly. | ||
360 | 247 | |||
361 | 248 | ``objs`` can be a single object, or it can be a list of objects. | ||
362 | 249 | |||
363 | 250 | Produces a Graphviz .dot file and spawns a viewer (xdot) if one is | ||
364 | 251 | installed, otherwise converts the graph to a .png image. | ||
365 | 252 | |||
366 | 253 | Use ``max_depth`` and ``too_many`` to limit the depth and breadth of the | ||
367 | 254 | graph. | ||
368 | 255 | |||
369 | 256 | Use ``filter`` (a predicate) and ``extra_ignore`` (a list of object IDs) to | ||
370 | 257 | remove undesired objects from the graph. | ||
371 | 258 | |||
372 | 259 | Use ``highlight`` (a predicate) to highlight certain graph nodes in blue. | ||
373 | 260 | |||
374 | 261 | Examples: | ||
375 | 262 | |||
376 | 263 | >>> show_refs(obj) | ||
377 | 264 | >>> show_refs([obj1, obj2]) | ||
378 | 265 | >>> show_refs(obj, max_depth=5) | ||
379 | 266 | >>> show_refs(obj, filter=lambda x: not inspect.isclass(x)) | ||
380 | 267 | >>> show_refs(obj, highlight=inspect.isclass) | ||
381 | 268 | >>> show_refs(obj, extra_ignore=[id(locals())]) | ||
382 | 269 | |||
383 | 270 | """ | ||
384 | 271 | show_graph(objs, max_depth=max_depth, extra_ignore=extra_ignore, | ||
385 | 272 | filter=filter, too_many=too_many, highlight=highlight, | ||
386 | 273 | edge_func=gc.get_referents, swap_source_target=True) | ||
387 | 274 | |||
388 | 275 | # | ||
389 | 276 | # Internal helpers | ||
390 | 277 | # | ||
391 | 278 | |||
392 | 279 | def show_graph(objs, edge_func, swap_source_target, | ||
393 | 280 | max_depth=3, extra_ignore=(), filter=None, too_many=10, | ||
394 | 281 | highlight=None): | ||
395 | 282 | if not isinstance(objs, (list, tuple)): | ||
396 | 283 | objs = [objs] | ||
397 | 284 | f = file('objects.dot', 'w') | ||
398 | 285 | print >> f, 'digraph ObjectGraph {' | ||
399 | 286 | print >> f, ' node[shape=box, style=filled, fillcolor=white];' | ||
400 | 287 | queue = [] | ||
401 | 288 | depth = {} | ||
402 | 289 | ignore = set(extra_ignore) | ||
403 | 290 | ignore.add(id(objs)) | ||
404 | 291 | ignore.add(id(extra_ignore)) | ||
405 | 292 | ignore.add(id(queue)) | ||
406 | 293 | ignore.add(id(depth)) | ||
407 | 294 | ignore.add(id(ignore)) | ||
408 | 295 | for obj in objs: | ||
409 | 296 | print >> f, ' %s[fontcolor=red];' % (obj_node_id(obj)) | ||
410 | 297 | depth[id(obj)] = 0 | ||
411 | 298 | queue.append(obj) | ||
412 | 299 | gc.collect() | ||
413 | 300 | nodes = 0 | ||
414 | 301 | while queue: | ||
415 | 302 | nodes += 1 | ||
416 | 303 | target = queue.pop(0) | ||
417 | 304 | tdepth = depth[id(target)] | ||
418 | 305 | print >> f, ' %s[label="%s"];' % (obj_node_id(target), obj_label(target, tdepth)) | ||
419 | 306 | h, s, v = gradient((0, 0, 1), (0, 0, .3), tdepth, max_depth) | ||
420 | 307 | if inspect.ismodule(target): | ||
421 | 308 | h = .3 | ||
422 | 309 | s = 1 | ||
423 | 310 | if highlight and highlight(target): | ||
424 | 311 | h = .6 | ||
425 | 312 | s = .6 | ||
426 | 313 | v = 0.5 + v * 0.5 | ||
427 | 314 | print >> f, ' %s[fillcolor="%g,%g,%g"];' % (obj_node_id(target), h, s, v) | ||
428 | 315 | if v < 0.5: | ||
429 | 316 | print >> f, ' %s[fontcolor=white];' % (obj_node_id(target)) | ||
430 | 317 | if hasattr(target, '__del__'): | ||
431 | 318 | print >> f, " %s->%s_has_a_del[color=red,style=dotted,len=0.25,weight=10];" % (obj_node_id(target), obj_node_id(target)) | ||
432 | 319 | print >> f, ' %s_has_a_del[label="__del__",shape=doublecircle,height=0.25,color=red,fillcolor="0,.5,1",fontsize=6];' % (obj_node_id(target)) | ||
433 | 320 | if inspect.ismodule(target) or tdepth >= max_depth: | ||
434 | 321 | continue | ||
435 | 322 | neighbours = edge_func(target) | ||
436 | 323 | ignore.add(id(neighbours)) | ||
437 | 324 | n = 0 | ||
438 | 325 | for source in neighbours: | ||
439 | 326 | if inspect.isframe(source) or id(source) in ignore: | ||
440 | 327 | continue | ||
441 | 328 | if filter and not filter(source): | ||
442 | 329 | continue | ||
443 | 330 | if swap_source_target: | ||
444 | 331 | srcnode, tgtnode = target, source | ||
445 | 332 | else: | ||
446 | 333 | srcnode, tgtnode = source, target | ||
447 | 334 | elabel = edge_label(srcnode, tgtnode) | ||
448 | 335 | print >> f, ' %s -> %s%s;' % (obj_node_id(srcnode), obj_node_id(tgtnode), elabel) | ||
449 | 336 | if id(source) not in depth: | ||
450 | 337 | depth[id(source)] = tdepth + 1 | ||
451 | 338 | queue.append(source) | ||
452 | 339 | n += 1 | ||
453 | 340 | if n >= too_many: | ||
454 | 341 | print >> f, ' %s[color=red];' % obj_node_id(target) | ||
455 | 342 | break | ||
456 | 343 | print >> f, "}" | ||
457 | 344 | f.close() | ||
458 | 345 | print "Graph written to objects.dot (%d nodes)" % nodes | ||
459 | 346 | if os.system('which xdot >/dev/null') == 0: | ||
460 | 347 | print "Spawning graph viewer (xdot)" | ||
461 | 348 | os.system("xdot objects.dot &") | ||
462 | 349 | else: | ||
463 | 350 | os.system("dot -Tpng objects.dot > objects.png") | ||
464 | 351 | print "Image generated as objects.png" | ||
465 | 352 | |||
466 | 353 | |||
467 | 354 | def obj_node_id(obj): | ||
468 | 355 | if isinstance(obj, weakref.ref): | ||
469 | 356 | return 'all_weakrefs_are_one' | ||
470 | 357 | return ('o%d' % id(obj)).replace('-', '_') | ||
471 | 358 | |||
472 | 359 | |||
473 | 360 | def obj_label(obj, depth): | ||
474 | 361 | return quote(type(obj).__name__ + ':\n' + | ||
475 | 362 | safe_repr(obj)) | ||
476 | 363 | |||
477 | 364 | |||
478 | 365 | def quote(s): | ||
479 | 366 | return s.replace("\\", "\\\\").replace("\"", "\\\"").replace("\n", "\\n") | ||
480 | 367 | |||
481 | 368 | |||
482 | 369 | def safe_repr(obj): | ||
483 | 370 | try: | ||
484 | 371 | return short_repr(obj) | ||
485 | 372 | except: | ||
486 | 373 | return '(unrepresentable)' | ||
487 | 374 | |||
488 | 375 | |||
489 | 376 | def short_repr(obj): | ||
490 | 377 | if isinstance(obj, (type, types.ModuleType, types.BuiltinMethodType, | ||
491 | 378 | types.BuiltinFunctionType)): | ||
492 | 379 | return obj.__name__ | ||
493 | 380 | if isinstance(obj, types.MethodType): | ||
494 | 381 | if obj.im_self is not None: | ||
495 | 382 | return obj.im_func.__name__ + ' (bound)' | ||
496 | 383 | else: | ||
497 | 384 | return obj.im_func.__name__ | ||
498 | 385 | if isinstance(obj, (tuple, list, dict, set)): | ||
499 | 386 | return '%d items' % len(obj) | ||
500 | 387 | if isinstance(obj, weakref.ref): | ||
501 | 388 | return 'all_weakrefs_are_one' | ||
502 | 389 | return repr(obj)[:40] | ||
503 | 390 | |||
504 | 391 | |||
505 | 392 | def gradient(start_color, end_color, depth, max_depth): | ||
506 | 393 | if max_depth == 0: | ||
507 | 394 | # avoid division by zero | ||
508 | 395 | return start_color | ||
509 | 396 | h1, s1, v1 = start_color | ||
510 | 397 | h2, s2, v2 = end_color | ||
511 | 398 | f = float(depth) / max_depth | ||
512 | 399 | h = h1 * (1-f) + h2 * f | ||
513 | 400 | s = s1 * (1-f) + s2 * f | ||
514 | 401 | v = v1 * (1-f) + v2 * f | ||
515 | 402 | return h, s, v | ||
516 | 403 | |||
517 | 404 | |||
518 | 405 | def edge_label(source, target): | ||
519 | 406 | if isinstance(target, dict) and target is getattr(source, '__dict__', None): | ||
520 | 407 | return ' [label="__dict__",weight=10]' | ||
521 | 408 | elif isinstance(source, dict): | ||
522 | 409 | for k, v in source.iteritems(): | ||
523 | 410 | if v is target: | ||
524 | 411 | if isinstance(k, basestring) and k: | ||
525 | 412 | return ' [label="%s",weight=2]' % quote(k) | ||
526 | 413 | else: | ||
527 | 414 | return ' [label="%s"]' % quote(safe_repr(k)) | ||
528 | 415 | return '' | ||
529 | 416 | |||
530 | 0 | 417 | ||
531 | === added file 'scripts/ibid-memgraph' | |||
532 | --- scripts/ibid-memgraph 1970-01-01 00:00:00 +0000 | |||
533 | +++ scripts/ibid-memgraph 2009-06-27 22:50:48 +0000 | |||
534 | @@ -0,0 +1,68 @@ | |||
535 | 1 | #!/usr/bin/env python | ||
536 | 2 | |||
537 | 3 | import gzip | ||
538 | 4 | import optparse | ||
539 | 5 | import sys | ||
540 | 6 | |||
541 | 7 | import dateutil | ||
542 | 8 | import matplotlib.pyplot as pyplot | ||
543 | 9 | from matplotlib.dates import date2num | ||
544 | 10 | import numpy | ||
545 | 11 | |||
546 | 12 | parser = optparse.OptionParser(usage="""%prog logfile | ||
547 | 13 | logfile is a memory log file (possibly gzipped)""") | ||
548 | 14 | parser.add_option('-o', '--output', dest='output', metavar='FILE', | ||
549 | 15 | help='Output to filename rather than interactive') | ||
550 | 16 | parser.add_option('-d', '--dpi', dest='dpi', | ||
551 | 17 | help='Output DPI') | ||
552 | 18 | |||
553 | 19 | (options, args) = parser.parse_args() | ||
554 | 20 | |||
555 | 21 | if len(args) != 1: | ||
556 | 22 | sys.stderr.write("Log file required\n") | ||
557 | 23 | sys.exit(2) | ||
558 | 24 | |||
559 | 25 | f = args[0].endswith('.gz') and gzip.GzipFile(args[0], 'r') or file(args[0], 'r') | ||
560 | 26 | header = f.readline().strip() | ||
561 | 27 | f.close() | ||
562 | 28 | if not header.startswith('Ibid Memory Log v2: '): | ||
563 | 29 | sys.stderr.write("Incorrect file format\n") | ||
564 | 30 | sys.exit(1) | ||
565 | 31 | |||
566 | 32 | botname = header.split(':', 1)[1].strip() | ||
567 | 33 | |||
568 | 34 | data = numpy.loadtxt(args[0], | ||
569 | 35 | dtype=float, | ||
570 | 36 | delimiter=',', | ||
571 | 37 | skiprows=1, | ||
572 | 38 | converters={0: lambda x: date2num(dateutil.parser.parse(x))}, | ||
573 | 39 | ) | ||
574 | 40 | |||
575 | 41 | fig = pyplot.figure() | ||
576 | 42 | ax_obj = fig.add_subplot(111) | ||
577 | 43 | ax_obj.set_xlabel('time (s)') | ||
578 | 44 | ax_mem = ax_obj.twinx() | ||
579 | 45 | ax_mem.grid(True) | ||
580 | 46 | |||
581 | 47 | ax_obj.plot_date(data[:,0], data[:,1]/1000, 'b-', label='Objects (k)') | ||
582 | 48 | ax_obj.set_ylabel('Objects (k)', color='b') | ||
583 | 49 | |||
584 | 50 | for tl in ax_obj.get_yticklabels(): | ||
585 | 51 | tl.set_color('b') | ||
586 | 52 | |||
587 | 53 | ax_mem.plot_date(data[:,0], data[:,2]/1024, 'r-', label='VM Size') | ||
588 | 54 | ax_mem.plot_date(data[:,0], data[:,3]/1024, 'g-', label='VM RSS') | ||
589 | 55 | |||
590 | 56 | ax_mem.set_ylabel('Memory (MiB)') | ||
591 | 57 | |||
592 | 58 | pyplot.legend(loc='best') | ||
593 | 59 | pyplot.title(botname + ' Memory Usage') | ||
594 | 60 | |||
595 | 61 | fig.autofmt_xdate() | ||
596 | 62 | |||
597 | 63 | if options.output: | ||
598 | 64 | pyplot.savefig(options.output, dpi=options.dpi) | ||
599 | 65 | else: | ||
600 | 66 | pyplot.show() | ||
601 | 67 | |||
602 | 68 | # vi: set et sta sw=4 ts=4: | ||
603 | 0 | 69 | ||
604 | === added file 'scripts/ibid-objgraph' | |||
605 | --- scripts/ibid-objgraph 1970-01-01 00:00:00 +0000 | |||
606 | +++ scripts/ibid-objgraph 2009-06-27 22:50:48 +0000 | |||
607 | @@ -0,0 +1,77 @@ | |||
608 | 1 | #!/usr/bin/env python | ||
609 | 2 | |||
610 | 3 | import gzip | ||
611 | 4 | import optparse | ||
612 | 5 | import sys | ||
613 | 6 | |||
614 | 7 | import dateutil | ||
615 | 8 | import simplejson | ||
616 | 9 | |||
617 | 10 | import matplotlib.pyplot as pyplot | ||
618 | 11 | from matplotlib.dates import date2num | ||
619 | 12 | import numpy | ||
620 | 13 | |||
621 | 14 | parser = optparse.OptionParser(usage="""%prog [arguments] logfile types... | ||
622 | 15 | logfile is an object log file (possibly gzipped) | ||
623 | 16 | types are a list of object types to graph""") | ||
624 | 17 | parser.add_option('-o', '--output', dest='output', metavar='FILE', | ||
625 | 18 | help='Output to filename rather than interactive') | ||
626 | 19 | parser.add_option('-d', '--dpi', dest='dpi', | ||
627 | 20 | help='Output DPI') | ||
628 | 21 | |||
629 | 22 | (options, args) = parser.parse_args() | ||
630 | 23 | |||
631 | 24 | if len(args) < 1: | ||
632 | 25 | sys.stderr.write("Log file required\n") | ||
633 | 26 | sys.exit(2) | ||
634 | 27 | if len(args) < 2: | ||
635 | 28 | sys.stderr.write("At least one type required\n") | ||
636 | 29 | sys.exit(2) | ||
637 | 30 | |||
638 | 31 | f = args[0].endswith('.gz') and gzip.GzipFile(args[0], 'r') or file(args[0], 'r') | ||
639 | 32 | header = f.readline().strip() | ||
640 | 33 | |||
641 | 34 | if not header.startswith('Ibid Object Log v1: '): | ||
642 | 35 | sys.stderr.write("Incorrect file format\n") | ||
643 | 36 | sys.exit(1) | ||
644 | 37 | |||
645 | 38 | botname = header.split(':', 1)[1].strip() | ||
646 | 39 | |||
647 | 40 | types = args[1:] | ||
648 | 41 | |||
649 | 42 | times = [] | ||
650 | 43 | data = [] | ||
651 | 44 | |||
652 | 45 | for line in f: | ||
653 | 46 | timestamp, json = line.split(' ', 1) | ||
654 | 47 | |||
655 | 48 | times.append(date2num(dateutil.parser.parse(timestamp))) | ||
656 | 49 | |||
657 | 50 | json = simplejson.loads(json) | ||
658 | 51 | data.append([json.get(type, 0) for type in types]) | ||
659 | 52 | |||
660 | 53 | times = numpy.array(times, dtype=float) | ||
661 | 54 | data = numpy.array(data, dtype=int) | ||
662 | 55 | |||
663 | 56 | fig = pyplot.figure() | ||
664 | 57 | ax = fig.add_subplot(111) | ||
665 | 58 | ax.set_xlabel('time (s)') | ||
666 | 59 | ax.set_ylabel('Objects (k)', color='b') | ||
667 | 60 | ax.grid(True) | ||
668 | 61 | |||
669 | 62 | ax.set_color_cycle(list('brgycmk')) | ||
670 | 63 | |||
671 | 64 | for i, type in enumerate(types): | ||
672 | 65 | ax.plot_date(times, data[:,i], '-', label=type) | ||
673 | 66 | |||
674 | 67 | pyplot.legend(loc='best') | ||
675 | 68 | pyplot.title(botname + ' Object Stats') | ||
676 | 69 | |||
677 | 70 | fig.autofmt_xdate() | ||
678 | 71 | |||
679 | 72 | if options.output: | ||
680 | 73 | pyplot.savefig(options.output, dpi=options.dpi) | ||
681 | 74 | else: | ||
682 | 75 | pyplot.show() | ||
683 | 76 | |||
684 | 77 | # vi: set et sta sw=4 ts=4: |
Some of this may not be useful. Do we want this kind of thing in mainline?