Error "RGBA should have length 3 or 4" shows when trying to visualize SOM using matplotlib

1.6k Views Asked by At

I'm doing some identification with self organizing map & python. I use a raster data and I convert it to a numpy array. I want to visualize it with matplotlib but it shows an error like this.

And here is the python code.

from __future__ import division
import gdal

import numpy as np
import matplotlib.pyplot as plt 
from matplotlib.patches import Rectangle


fn = "D:/kuliah/bismillahsatu/djengkol/ndwigabung.tif"
ds = gdal.Open(fn)

band1 = ds.GetRasterBand(1).ReadAsArray()

#print(band1.shape)
#print(band1)

network_dimensions = np.array([5,5])
n_iterations = 10000
init_learning_rate = 0.01

normalise_data = True

#if True, assume all data is on common scale
#if false, normalise to [0 1] range along each column
normalise_by_column = False 

#establish size variables based ondata
m = band1.shape[0]
n = band1.shape[1]

#initial neighborhood radius
init_radius = max(network_dimensions[0], network_dimensions[1]) / 2

#radius decay parameter
time_constant = n_iterations / np.log(init_radius)

#we want to keep a copy of the raw data for later
data = band1


#check if data needs to be normalised
if normalise_data:
    if normalise_by_column:
        #normalise along each column
        col_maxes = band1.max(axis=0)
        data = band1/col_maxes[np.newaxis, :]
    else:
        #normalise entire dataset
        data = band1 / data.max()

net = np.random.random((network_dimensions[0], network_dimensions[1],m))

def find_bmu (t,net,m):

    bmu_idx = np.array([0,0])
    min_dist = np.iinfo(np.int).max
    for x in range(net.shape[0]):
        for y in range(net.shape[1]):
            w = net[x, y, :].reshape(m,1)
            sq_dist = np.sum((w - t) ** 2)
            if sq_dist < min_dist:
                min_dist = sq_dist
                bmu_idx = np.array([x,y])
    bmu = net[bmu_idx[0], bmu_idx[1], :].reshape(m,1)
    return (bmu, bmu_idx)

def decay_radius(initial_radius, i, time_constant):
    return initial_radius * np.exp(-i / time_constant)

def decay_learning_rate(initial_learning_rate,i, n_iterations):
    return initial_learning_rate * np.exp(-i / n_iterations)

def calculate_influence(distance, radius):
    return np.exp(-distance /(2*(radius**2)))

for i in range (n_iterations):

    t = data[:, np.random.randint(0, n)].reshape(np.array([m,1]))
    bmu, bmu_idx = find_bmu(t, net, m)

    r = decay_radius(init_radius, i, time_constant)
    l = decay_learning_rate(init_learning_rate, i, n_iterations)

    for x in range(net.shape[0]):
        for y in range(net.shape[1]):
            w = net[x, y, :].reshape(m,1)
            w_dist = np.sum((np.array([x, y]) - bmu_idx) ** 2)
            if w_dist <= r**2:
                influence = calculate_influence(w_dist, r)
                new_w = w + (l * influence * (t-w))
                net[x, y, :] = new_w.reshape(1,274)

fig = plt.figure()
ax = fig.add_subplot(111, aspect='equal')
ax.set_xlim((0, net.shape[0]+1))
ax.set_ylim((0, net.shape[1]+1))
ax.set_title('Self-Organising Map after %d iterations' % n_iterations)

for x in range(1, net.shape[0] + 1):
    for y in range(1, net.shape[1] + 1):
        ax.add_patch(Rectangle((x-0.5, y-0.5),1,1,
            facecolor=net[x-1,y-1,:],
            edgecolor='none'))

plt.show()

Is there something wrong with my code? I would appreciate any help. Thanks before!

0

There are 0 best solutions below