Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Remove old normalising code from plt.hist #9121

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 4 commits into from
Oct 19, 2017
Merged
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Clear up variable naming
  • Loading branch information
dstansby committed Aug 29, 2017
commit 02cfe85fe7cccc6a9694f5455657e76eb0ca02a5
28 changes: 16 additions & 12 deletions lib/matplotlib/axes/_axes.py
Original file line number Diff line number Diff line change
Expand Up @@ -6080,8 +6080,10 @@ def hist(self, x, bins=None, range=None, density=None, weights=None,
else:
hist_kwargs = dict(range=bin_range)

n = []
# List to store all the top coordinates of the histograms
tops = []
mlast = None
# Loop through datasets
for i in xrange(nx):
# this will automatically overwrite bins,
# so that each histogram uses the same bins
Expand All @@ -6097,21 +6099,23 @@ def hist(self, x, bins=None, range=None, density=None, weights=None,
mlast = np.zeros(len(bins)-1, m.dtype)
m += mlast
mlast[:] = m
n.append(m)
tops.append(m)

# If a stacked density plot, normalize so the area of all the stacked
# histograms together is 1
if stacked and density:
db = np.diff(bins)
for m in n:
m[:] = (m.astype(float) / db) / n[-1].sum()
for m in tops:
m[:] = (m.astype(float) / db) / tops[-1].sum()
if cumulative:
slc = slice(None)
if cbook.is_numlike(cumulative) and cumulative < 0:
slc = slice(None, None, -1)

if density:
n = [(m * np.diff(bins))[slc].cumsum()[slc] for m in n]
tops = [(m * np.diff(bins))[slc].cumsum()[slc] for m in tops]
else:
n = [m[slc].cumsum()[slc] for m in n]
tops = [m[slc].cumsum()[slc] for m in tops]

patches = []

Expand All @@ -6129,7 +6133,7 @@ def hist(self, x, bins=None, range=None, density=None, weights=None,

if rwidth is not None:
dr = np.clip(rwidth, 0, 1)
elif (len(n) > 1 and
elif (len(tops) > 1 and
((not stacked) or rcParams['_internal.classic_mode'])):
dr = 0.8
else:
Expand All @@ -6155,7 +6159,7 @@ def hist(self, x, bins=None, range=None, density=None, weights=None,
_barfunc = self.bar
bottom_kwarg = 'bottom'

for m, c in zip(n, color):
for m, c in zip(tops, color):
if bottom is None:
bottom = np.zeros(len(m))
if stacked:
Expand Down Expand Up @@ -6199,7 +6203,7 @@ def hist(self, x, bins=None, range=None, density=None, weights=None,
# For data that is normed to form a probability density,
# set to minimum data value / logbase
# (gives 1 full tick-label unit for the lowest filled bin)
ndata = np.array(n)
ndata = np.array(tops)
minimum = (np.min(ndata[ndata > 0])) / logbase
else:
# For non-normed (density = False) data,
Expand All @@ -6222,7 +6226,7 @@ def hist(self, x, bins=None, range=None, density=None, weights=None,
fill = (histtype == 'stepfilled')

xvals, yvals = [], []
for m in n:
for m in tops:
if stacked:
# starting point for drawing polygon
y[0] = y[1]
Expand Down Expand Up @@ -6285,9 +6289,9 @@ def hist(self, x, bins=None, range=None, density=None, weights=None,
p.set_label('_nolegend_')

if nx == 1:
return n[0], bins, cbook.silent_list('Patch', patches[0])
return tops[0], bins, cbook.silent_list('Patch', patches[0])
else:
return n, bins, cbook.silent_list('Lists of Patches', patches)
return tops, bins, cbook.silent_list('Lists of Patches', patches)

@_preprocess_data(replace_names=["x", "y", "weights"], label_namer=None)
def hist2d(self, x, y, bins=10, range=None, normed=False, weights=None,
Expand Down