bvk1ng commited on
Commit
571eb00
·
1 Parent(s): 140db7a

Initial Commit

Browse files
Files changed (2) hide show
  1. app.py +83 -0
  2. requirements.txt +5 -0
app.py ADDED
@@ -0,0 +1,83 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import gradio as gr
3
+ import matplotlib.pyplot as plt
4
+
5
+ from sklearn.pipeline import Pipeline
6
+ from sklearn.preprocessing import PolynomialFeatures
7
+ from sklearn.linear_model import LinearRegression
8
+ from sklearn.model_selection import cross_val_score
9
+
10
+ plt.switch_backend("agg")
11
+
12
+ def true_fn(X):
13
+ return np.cos(1.5 * np.pi * X)
14
+
15
+
16
+ def modelData(n_samples: int, degree: int, cv: int) -> "plt.Figure":
17
+ """
18
+ This function demonstrate the principle of overfitting vs underfitting by
19
+ modeling a dataset using Linear Regression.
20
+
21
+ :param n_samples: the number of samples required in the data.
22
+ :param degree: the number of degrees for the polynomial features.
23
+
24
+ :returns: the matplotlib figures
25
+ """
26
+
27
+ X = np.sort(np.random.rand(n_samples))
28
+ y = true_fn(X) + np.random.randn(n_samples) * .1
29
+
30
+ fig, ax = plt.subplots(1, 1, figsize=(24, 15))
31
+
32
+ poly_feats = PolynomialFeatures(degree=degree, include_bias=False)
33
+ model = LinearRegression()
34
+
35
+ pipeline = Pipeline([
36
+ ("polynomial_feats", poly_feats),
37
+ ("lr", model)
38
+ ])
39
+
40
+ pipeline.fit(X[:, np.newaxis], y)
41
+ scores = cross_val_score(
42
+ pipeline, X[:, np.newaxis], y, scoring="neg_mean_squared_error", cv=cv
43
+ )
44
+
45
+ X_test = np.linspace(0, 1, 1000)
46
+
47
+ ax.plot(X_test, pipeline.predict(X_test[:, np.newaxis]), "--", linewidth=2.5, color="#C73E1D", label="Model")
48
+ ax.plot(X_test, true_fn(X_test), linewidth=2.5, color="#2E86AB", label="True function")
49
+ ax.scatter(X, y, s=20, alpha=.75, edgecolors="#3B1F2B", label="Samples")
50
+ ax.set_xlabel("x")
51
+ ax.set_ylabel("y")
52
+ ax.set_xticks(())
53
+ ax.set_yticks(())
54
+ ax.set_xlim((0, 1))
55
+ ax.set_ylim((-2, 2))
56
+ ax.legend(loc="best")
57
+ ax.set_title(f"Degree : {degree} \n MSE: {-scores.mean():.2e}(+/- {scores.std():.2e})")
58
+
59
+ return fig
60
+
61
+
62
+ with gr.Blocks() as demo:
63
+
64
+ gr.Markdown("""
65
+ # Underfitting vs Overfitting
66
+
67
+ This space is a re-implementation of the original scikit-learn docs [Underfitting vs Overfitting](https://scikit-learn.org/stable/auto_examples/model_selection/plot_underfitting_overfitting.html#sphx-glr-auto-examples-model-selection-plot-underfitting-overfitting-py)
68
+ In this space you can vary the sliders to get a picture of what an **underfitted** model looks like and what an **overfitted** model looks like.
69
+ If you want more details you can always head onto the scikit-learn doc mentioned above.
70
+
71
+ Have fun enjoying the tool 🤗
72
+ """)
73
+
74
+ n_samples = gr.Slider(30, 10_000, label="n_samples", info="number of samples", step=1, value=100)
75
+ degree = gr.Slider(1, 20, label="degree", info="the polynomial features degree", step=1, value=4)
76
+ cv = gr.Slider(1, 10, label="cv", info="number of cross-validation to run", step=1, value=5)
77
+
78
+ output = gr.Plot(label="Plot")
79
+
80
+ btn = gr.Button("Show")
81
+ btn.click(fn=modelData, inputs=[n_samples, degree, cv], outputs=output, api_name="overfitunderfit")
82
+
83
+
requirements.txt ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ scikit-learn
2
+ scipy
3
+ numpy
4
+ pandas
5
+ matplotlib