File size: 3,297 Bytes
c2f5dff
 
 
 
 
3f45c69
c2f5dff
 
 
 
 
 
 
 
 
 
 
 
 
3f45c69
 
c2f5dff
 
 
 
 
 
 
 
3f45c69
 
c2f5dff
3f45c69
 
 
 
 
 
 
 
 
 
 
 
 
 
c2f5dff
 
 
 
3f45c69
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c2f5dff
 
 
4da6665
c2f5dff
 
 
 
4da6665
 
 
c2f5dff
17c38ec
 
 
 
 
 
c2f5dff
 
e328c80
c2f5dff
e328c80
c2f5dff
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
# Code source: Gaël Varoquaux
# License: BSD 3 clause

# This code is a MOD with Gradio Demo
import numpy as np
import plotly.graph_objects as go

from sklearn import decomposition
from sklearn import datasets

import gradio as gr

np.random.seed(5)

## PCA
def PCA_Pred(x1, x2, x3, x4):
    #Load Data from iris dataset:
    iris = datasets.load_iris()
    X = iris.data
    Y = iris.target
    label_data = [("Setosa", 0), ("Versicolour", 1), ("Virginica", 2)]

    #Create the model with 3 principal components:
    pca = decomposition.PCA(n_components=3)
    
    #Fit model and transform (decrease dimensions) iris dataset:
    pca.fit(X)
    X = pca.transform(X)

    #Create figure with plotly
    fig = go.Figure()

    for name, label in label_data:
        fig.add_trace(go.Scatter3d(
            x=X[Y == label, 0],
            y=X[Y == label, 1],
            z=X[Y == label, 2],
            mode='markers',
            marker=dict(
                size=8,
                color=label,            
                colorscale='Viridis',   
                opacity=0.8),
            name=name
            ))
    
    user_iris_data = np.array([[x1, x2, x3, x4]], ndmin=2)

    #Perform reduction to user data
    pc_output = pca.transform(user_iris_data)
    fig.add_traces([go.Scatter3d(
            x=np.array(pc_output[0, 0]),
            y=np.array(pc_output[0, 1]),
            z=np.array(pc_output[0, 2]),
            mode='markers',
            marker=dict(
                size=12,
                color=4,                # set color
                colorscale='Viridis',   # choose a colorscale
                opacity=0.8),
            name="User data"
            )])
    fig.update_layout(scene = dict(
                            xaxis_title="1st PCA Axis",
                            yaxis_title="2nd PCA Axis",
                            zaxis_title="3th PCA Axis"),
                     legend_title="Species"
                    )

    return [pc_output, fig]
    
title = "PCA example with Iris Dataset 🌺"
with gr.Blocks(title=title) as demo:
    gr.Markdown(f"## {title}")
    gr.Markdown(
        """
        The following app is a demo for PCA decomposition. It takes 4 dimensions as input, in reference \
        to the following image, and returns the transformed first three principal components (feature \
        reduction), taken from a pre-trained model with Iris dataset. 
        """)
    html = (
        "<div >"
        "<img  src='file/iris_dataset_info.png' alt='image one'>"
        + "</div>"
        )
    gr.HTML(html)
    with gr.Row():
        with gr.Column():
            inp1 = gr.Slider(0, 7, value=1, step=0.1, label="Sepal Length (cm)")
            inp2 = gr.Slider(0, 5, value=1, step=0.1, label="Sepal Width (cm)")
            inp3 = gr.Slider(0, 7, value=1, step=0.1, label="Petal Length (cm)")
            inp4 = gr.Slider(0, 5, value=1, step=0.1, label="Petal Width (cm)")
            output = gr.Textbox(label="PCA Axes")
        with gr.Column():
            plot = gr.Plot(label="PCA 3D Space")

    Reduction = gr.Button("PCA Transform")
    Reduction.click(fn=PCA_Pred, inputs=[inp1, inp2, inp3, inp4], outputs=[output, plot])
    demo.load(fn=PCA_Pred, inputs=[inp1, inp2, inp3, inp4], outputs=[output, plot])

demo.launch()