-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathexample_rice.py
More file actions
102 lines (86 loc) · 3.47 KB
/
example_rice.py
File metadata and controls
102 lines (86 loc) · 3.47 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
import numpy as np
from scipy.spatial.distance import euclidean
from scipy.spatial.distance import mahalanobis
from diffusionmap import DiffusionMap
from sklearn.cluster import KMeans
from sklearn.decomposition import PCA
from sklearn.manifold import SpectralEmbedding
import matplotlib.pyplot as plt
import matplotlib.cm as cm
if __name__ == '__main__':
# Load data
data = np.loadtxt('rice.txt')
# Plot luminance
plt.imshow(data.mean(axis=1).reshape((93, 56)), cmap='gray', origin='lower')
plt.axis('off')
plt.savefig('rice_luminance.png', bbox_inches='tight')
plt.show()
# Diffusion map clustering based on Euclidean distances
e_dm = DiffusionMap(data, kernel_params={'eps': 1e3}, neighbors=500)
e_w, e_v = e_dm.map(3, 5)
kmeans = KMeans(n_clusters=3, n_init=100)
kmeans.fit(e_v)
e_y = kmeans.predict(e_v)
plt.imshow(e_y.reshape((93, 56)), origin='lower')
plt.axis('off')
plt.savefig('rice_euclidean.png', bbox_inches='tight')
plt.show()
# Diffusion map clustering based on Mahalanobis distances with overall covariances
inv_cov = np.linalg.inv(np.cov(data, rowvar=False))
def mdistance(x, y):
return mahalanobis(x, y, VI=inv_cov)
m_dm = DiffusionMap(data, kernel_params={'eps': 1e6, 'distance': mdistance}, neighbors=500)
m_w, m_v = m_dm.map(3, 5)
kmeans = KMeans(n_clusters=3, n_init=100)
kmeans.fit(m_v)
m_y = kmeans.predict(m_v)
plt.imshow(m_y.reshape((93, 56)), origin='lower')
plt.axis('off')
plt.savefig('rice_mahalanobis.png', bbox_inches='tight')
plt.show()
# Diffusion map clustering based on Mahalanobis distances with local covariances
lm_dm = DiffusionMap(data, kernel_params={'eps': 1e6}, neighbors=500)
lm_w, lm_v = lm_dm.map(3, 5, local_mahalanobis=True, clusters=10)
kmeans = KMeans(n_clusters=3, n_init=100)
kmeans.fit(lm_v)
lm_y = kmeans.predict(lm_v)
print("completed kmeans")
plt.imshow(lm_y.reshape((93, 56)), origin='lower')
print("generated image")
plt.axis('off')
plt.savefig('rice_local_gmm_mahalanobis.png', bbox_inches='tight')
plt.show()
# # Diffusion map clustering based on Mahalanobis distances with local covariances with PCA preprocessing
# pca = PCA(n_components=25)
# pca.fit(data)
# data_pca = pca.transform(data)
#
# plm_dm = DiffusionMap(data_pca, kernel_params={'eps': 1}, neighbors=500)
# plm_w, plm_v = plm_dm.map(3, local_mahalanobis=True, clusters=25)
# print("assigned mapping")
#
# kmeans = KMeans(n_clusters=3, n_init=100)
# print("initialized kmeans")
# kmeans.fit(plm_v)
# print("fitted kmeans")
# plm_y = kmeans.predict(plm_v)
# print("completed kmeans")
#
# plt.imshow(plm_y.reshape((93, 56)), origin='lower')
# print("generated image")
# plt.savefig('rice_pca_local_gmm_mahalanobis.png')
# plt.show()
# # Diffusion map clustering based on Mahalanobis distances with local covariances with Laplacian eigenmaps preprocessing
# le = SpectralEmbedding(n_components=50)
# data_le = le.fit_transform(data)
#
# llm_dm = DiffusionMap(data_le, kernel_params={'eps': 1}, neighbors=500)
# llm_w, llm_v = llm_dm.map(3, local_mahalanobis=True, clusters=25)
#
# kmeans = KMeans(n_clusters=3, n_init=100)
# kmeans.fit(llm_v)
# llm_y = kmeans.predict(llm_v)
#
# plt.imshow(llm_y.reshape((93, 56)), origin='lower')
# plt.savefig('rice_laplacian_eigenmaps_local_gmm_mahalanobis.png')
# plt.show()