Skip to content

Commit

Permalink
Merge pull request #35 from xybaby/master
Browse files Browse the repository at this point in the history
Ndd new function: growth().
  • Loading branch information
mgedmin authored Dec 28, 2017
2 parents d717856 + d3939f2 commit 054187d
Show file tree
Hide file tree
Showing 3 changed files with 60 additions and 20 deletions.
2 changes: 1 addition & 1 deletion CHANGES.rst
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ Changes
3.2.1 (unreleased)
------------------

- Nothing changed yet.
- New function: :func:`growth`.


3.2.0 (2017-12-20)
Expand Down
66 changes: 47 additions & 19 deletions objgraph.py
Original file line number Diff line number Diff line change
Expand Up @@ -270,9 +270,11 @@ def show_most_common_types(
file.write('%-*s %i\n' % (width, name, count))


def show_growth(limit=10, peak_stats={}, shortnames=True, file=None,
filter=None):
"""Show the increase in peak object counts since last call.
def growth(limit=10, peak_stats={}, shortnames=True, filter=None):
"""Count the increase in peak object since last call.
Returns a list of (type_name, total_count, increase_delta),
descending order by increase_delta.
Limits the output to ``limit`` largest deltas. You may set ``limit`` to
None to see all of them.
Expand All @@ -287,6 +289,40 @@ def show_growth(limit=10, peak_stats={}, shortnames=True, file=None,
The caveats documented in :func:`typestats` apply.
Example:
>>> growth(2)
[(tuple, 12282, 10), (dict, 1922, 7)]
.. versionadded:: 3.2.1
"""
gc.collect()
stats = typestats(shortnames=shortnames, filter=filter)
deltas = {}
for name, count in iteritems(stats):
old_count = peak_stats.get(name, 0)
if count > old_count:
deltas[name] = count - old_count
peak_stats[name] = count
deltas = sorted(deltas.items(), key=operator.itemgetter(1),
reverse=True)
if limit:
deltas = deltas[:limit]

return [(name, stats[name], delta) for name, delta in deltas]


def show_growth(limit=10, peak_stats=None, shortnames=True, file=None,
filter=None):
"""Show the increase in peak object counts since last call.
if ``peak_stats`` is None, peak object counts will recorded in
func `growth`, and your can record the counts by yourself with set
``peak_stats`` to a dictionary.
The caveats documented in :func:`growth` apply.
Example:
>>> show_growth()
Expand All @@ -307,24 +343,16 @@ def show_growth(limit=10, peak_stats={}, shortnames=True, file=None,
New parameter: ``filter``.
"""
gc.collect()
stats = typestats(shortnames=shortnames, filter=filter)
deltas = {}
for name, count in iteritems(stats):
old_count = peak_stats.get(name, 0)
if count > old_count:
deltas[name] = count - old_count
peak_stats[name] = count
deltas = sorted(deltas.items(), key=operator.itemgetter(1),
reverse=True)
if limit:
deltas = deltas[:limit]
if deltas:
if peak_stats is None:
result = growth(limit, shortnames=shortnames, filter=filter)
else:
result = growth(limit, peak_stats, shortnames, filter)
if result:
if file is None:
file = sys.stdout
width = max(len(name) for name, count in deltas)
for name, delta in deltas:
file.write('%-*s%9d %+9d\n' % (width, name, stats[name], delta))
width = max(len(name) for name, _, _ in result)
for name, count, delta in result:
file.write('%-*s%9d %+9d\n' % (width, name, count, delta))


def get_leaking_objects(objects=None):
Expand Down
12 changes: 12 additions & 0 deletions tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -304,6 +304,18 @@ def test_with_filter(self):
self.assertEqual(1, stats['mymodule.MyClass'])


class GrowthTest(GarbageCollectedMixin, unittest.TestCase):
"""Tests for the growth function."""

def test_growth(self):
objgraph.growth(limit=None)
x = type('MyClass', (), {'__module__': 'mymodule'})() # noqa
growth_info = objgraph.growth(limit=None)
cared = [record for record in growth_info if record[0] == 'MyClass']
self.assertEqual(1, len(cared))
self.assertEqual(1, cared[0][2])


class ByTypeTest(GarbageCollectedMixin, unittest.TestCase):
"""Tests for the by_test function."""

Expand Down

0 comments on commit 054187d

Please sign in to comment.