Skip to content

Commit abb0ea2

Browse files
Christian Lorentzenviclafargue
Christian Lorentzen
authored andcommitted
DOC avoid FutureWarnings for deprecations examples (scikit-learn#17264)
* MNT keyword only in examples * MNT pandas 1.0.0 deprectation See pandas-dev/pandas#23566 * MNT new keyword in 0.23
1 parent 9737b7e commit abb0ea2

6 files changed

+20
-16
lines changed

examples/applications/plot_stock_market.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -173,7 +173,8 @@
173173
# #############################################################################
174174
# Cluster using affinity propagation
175175

176-
_, labels = cluster.affinity_propagation(edge_model.covariance_)
176+
_, labels = cluster.affinity_propagation(edge_model.covariance_,
177+
random_state=0)
177178
n_labels = labels.max()
178179

179180
for i in range(n_labels + 1):

examples/calibration/plot_calibration.py

+7-5
Original file line numberDiff line numberDiff line change
@@ -65,23 +65,25 @@
6565

6666
# Gaussian Naive-Bayes with isotonic calibration
6767
clf_isotonic = CalibratedClassifierCV(clf, cv=2, method='isotonic')
68-
clf_isotonic.fit(X_train, y_train, sw_train)
68+
clf_isotonic.fit(X_train, y_train, sample_weight=sw_train)
6969
prob_pos_isotonic = clf_isotonic.predict_proba(X_test)[:, 1]
7070

7171
# Gaussian Naive-Bayes with sigmoid calibration
7272
clf_sigmoid = CalibratedClassifierCV(clf, cv=2, method='sigmoid')
73-
clf_sigmoid.fit(X_train, y_train, sw_train)
73+
clf_sigmoid.fit(X_train, y_train, sample_weight=sw_train)
7474
prob_pos_sigmoid = clf_sigmoid.predict_proba(X_test)[:, 1]
7575

7676
print("Brier scores: (the smaller the better)")
7777

78-
clf_score = brier_score_loss(y_test, prob_pos_clf, sw_test)
78+
clf_score = brier_score_loss(y_test, prob_pos_clf, sample_weight=sw_test)
7979
print("No calibration: %1.3f" % clf_score)
8080

81-
clf_isotonic_score = brier_score_loss(y_test, prob_pos_isotonic, sw_test)
81+
clf_isotonic_score = brier_score_loss(y_test, prob_pos_isotonic,
82+
sample_weight=sw_test)
8283
print("With isotonic calibration: %1.3f" % clf_isotonic_score)
8384

84-
clf_sigmoid_score = brier_score_loss(y_test, prob_pos_sigmoid, sw_test)
85+
clf_sigmoid_score = brier_score_loss(y_test, prob_pos_sigmoid,
86+
sample_weight=sw_test)
8587
print("With sigmoid calibration: %1.3f" % clf_sigmoid_score)
8688

8789
# #############################################################################

examples/cluster/plot_ward_structured_vs_unstructured.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@
3737
# Generate data (swiss roll dataset)
3838
n_samples = 1500
3939
noise = 0.05
40-
X, _ = make_swiss_roll(n_samples, noise)
40+
X, _ = make_swiss_roll(n_samples, noise=noise)
4141
# Make it thinner
4242
X[:, 1] *= .5
4343

examples/linear_model/plot_sparse_logistic_regression_20newsgroups.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@
4242
# Turn down for faster run time
4343
n_samples = 10000
4444

45-
X, y = fetch_20newsgroups_vectorized('all', return_X_y=True)
45+
X, y = fetch_20newsgroups_vectorized(subset='all', return_X_y=True)
4646
X = X[:n_samples]
4747
y = y[:n_samples]
4848

examples/linear_model/plot_tweedie_regression_insurance_claims.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -115,7 +115,7 @@ def plot_obs_pred(df, feature, weight, observed, predicted, y_label=None,
115115
df_["observed"] = df[observed] * df[weight]
116116
df_["predicted"] = predicted * df[weight]
117117
df_ = (
118-
df_.groupby([feature])[weight, "observed", "predicted"]
118+
df_.groupby([feature])[[weight, "observed", "predicted"]]
119119
.sum()
120120
.assign(observed=lambda x: x["observed"] / x[weight])
121121
.assign(predicted=lambda x: x["predicted"] / x[weight])
@@ -173,9 +173,9 @@ def score_estimator(
173173
if metric is None:
174174
if not hasattr(estimator, "score"):
175175
continue
176-
score = estimator.score(X, y, _weights)
176+
score = estimator.score(X, y, sample_weight=_weights)
177177
else:
178-
score = metric(y, y_pred, _weights)
178+
score = metric(y, y_pred, sample_weight=_weights)
179179

180180
res.append(
181181
{"subset": subset_label, "metric": score_label, "score": score}

examples/manifold/plot_lle_digits.py

+6-5
Original file line numberDiff line numberDiff line change
@@ -126,7 +126,8 @@ def plot_embedding(X, title=None):
126126
# Isomap projection of the digits dataset
127127
print("Computing Isomap projection")
128128
t0 = time()
129-
X_iso = manifold.Isomap(n_neighbors, n_components=2).fit_transform(X)
129+
X_iso = manifold.Isomap(n_neighbors=n_neighbors, n_components=2
130+
).fit_transform(X)
130131
print("Done.")
131132
plot_embedding(X_iso,
132133
"Isomap projection of the digits (time %.2fs)" %
@@ -136,7 +137,7 @@ def plot_embedding(X, title=None):
136137
# ----------------------------------------------------------------------
137138
# Locally linear embedding of the digits dataset
138139
print("Computing LLE embedding")
139-
clf = manifold.LocallyLinearEmbedding(n_neighbors, n_components=2,
140+
clf = manifold.LocallyLinearEmbedding(n_neighbors=n_neighbors, n_components=2,
140141
method='standard')
141142
t0 = time()
142143
X_lle = clf.fit_transform(X)
@@ -149,7 +150,7 @@ def plot_embedding(X, title=None):
149150
# ----------------------------------------------------------------------
150151
# Modified Locally linear embedding of the digits dataset
151152
print("Computing modified LLE embedding")
152-
clf = manifold.LocallyLinearEmbedding(n_neighbors, n_components=2,
153+
clf = manifold.LocallyLinearEmbedding(n_neighbors=n_neighbors, n_components=2,
153154
method='modified')
154155
t0 = time()
155156
X_mlle = clf.fit_transform(X)
@@ -162,7 +163,7 @@ def plot_embedding(X, title=None):
162163
# ----------------------------------------------------------------------
163164
# HLLE embedding of the digits dataset
164165
print("Computing Hessian LLE embedding")
165-
clf = manifold.LocallyLinearEmbedding(n_neighbors, n_components=2,
166+
clf = manifold.LocallyLinearEmbedding(n_neighbors=n_neighbors, n_components=2,
166167
method='hessian')
167168
t0 = time()
168169
X_hlle = clf.fit_transform(X)
@@ -175,7 +176,7 @@ def plot_embedding(X, title=None):
175176
# ----------------------------------------------------------------------
176177
# LTSA embedding of the digits dataset
177178
print("Computing LTSA embedding")
178-
clf = manifold.LocallyLinearEmbedding(n_neighbors, n_components=2,
179+
clf = manifold.LocallyLinearEmbedding(n_neighbors=n_neighbors, n_components=2,
179180
method='ltsa')
180181
t0 = time()
181182
X_ltsa = clf.fit_transform(X)

0 commit comments

Comments
 (0)