Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit 23c042e

Browse files
amuellerGaelVaroquaux
authored andcommitted
ENH added random_state to Gaussian Process
1 parent e75dbc9 commit 23c042e

File tree

2 files changed

+11
-4
lines changed

2 files changed

+11
-4
lines changed

doc/modules/gaussian_process.rst

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -76,7 +76,7 @@ parameters or alternatively it uses the given parameters.
7676
>>> gp.fit(X, y) # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
7777
GaussianProcess(beta0=None, corr=<function squared_exponential at 0x...>,
7878
normalize=True, nugget=array(2.22...-15),
79-
optimizer='fmin_cobyla', random_start=1,
79+
optimizer='fmin_cobyla', random_start=1, random_state=...
8080
regr=<function constant at 0x...>, storage_mode='full',
8181
theta0=array([[ 0.01]]), thetaL=array([[ 0.0001]]),
8282
thetaU=array([[ 0.1]]), verbose=False)

sklearn/gaussian_process/gaussian_process.py

Lines changed: 10 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010

1111
from ..base import BaseEstimator, RegressorMixin
1212
from ..metrics.pairwise import manhattan_distances
13-
from ..utils import array2d
13+
from ..utils import array2d, check_random_state
1414
from . import regression_models as regression
1515
from . import correlation_models as correlation
1616

@@ -163,6 +163,11 @@ class GaussianProcess(BaseEstimator, RegressorMixin):
163163
exponential distribution (log-uniform on [thetaL, thetaU]).
164164
Default does not use random starting point (random_start = 1).
165165
166+
random_state: integer or numpy.RandomState, optional
167+
The generator used to shuffle the sequence of coordinates of theta in
168+
the Welch optimizer. If an integer is given, it fixes the seed.
169+
Defaults to the global numpy random number generator.
170+
166171
Examples
167172
--------
168173
>>> import numpy as np
@@ -212,7 +217,7 @@ def __init__(self, regr='constant', corr='squared_exponential', beta0=None,
212217
storage_mode='full', verbose=False, theta0=1e-1,
213218
thetaL=None, thetaU=None, optimizer='fmin_cobyla',
214219
random_start=1, normalize=True,
215-
nugget=10. * MACHINE_EPSILON):
220+
nugget=10. * MACHINE_EPSILON, random_state=None):
216221

217222
self.regr = regr
218223
self.corr = corr
@@ -226,6 +231,7 @@ def __init__(self, regr='constant', corr='squared_exponential', beta0=None,
226231
self.nugget = nugget
227232
self.optimizer = optimizer
228233
self.random_start = random_start
234+
self.random_state = random_state
229235

230236
# Run input checks
231237
self._check_params()
@@ -250,6 +256,7 @@ def fit(self, X, y):
250256
A fitted Gaussian Process model object awaiting data to perform
251257
predictions.
252258
"""
259+
self.random_state = check_random_state(self.random_state)
253260

254261
# Force data to 2D numpy.array
255262
X = array2d(np.asarray(X))
@@ -748,7 +755,7 @@ def minus_reduced_likelihood_function(log10t):
748755
# Iterate over all dimensions of theta allowing for anisotropy
749756
if verbose:
750757
print("Now improving allowing for anisotropy...")
751-
for i in np.random.permutation(range(theta0.size)):
758+
for i in self.random_state.shuffle(range(theta0.size)):
752759
if verbose:
753760
print "Proceeding along dimension %d..." % (i + 1)
754761
self.theta0 = array2d(theta_iso)

0 commit comments

Comments
 (0)