HASIL ANALISIS
data_path = 'datasheet\\data_training\\1_apel_matang\\train.MATANG_09.jpg'
data = cv2.imread(data_path)
scale_percent = 10 # percent of original size
width = int(data.shape[1] * scale_percent / 100)
height = int(data.shape[0] * scale_percent / 100)
dim = (width, height)
# resize image
image = cv2.resize(data, dim, interpolation = cv2.INTER_AREA)
# gray = cv2.cvtColor(hsv, cv2.COLOR_RGB2GRAY)
while True:
path_ds = "datasheet/cvt"
properties =np.zeros(7)
glcmMAtrix=[]
final=[]
image_folder_list = os.listdir(INPUT_SCAN_FOLDER)
for i in range(len(image_folder_list)):
abc =cv2.imread(INPUT_SCAN_FOLDER+image_folder_list[i])
# images = images.f.arr_0
# print(INPUT_SCAN_FOLDER+image_folder_list[i])
val_mean = np.mean(gray_image)
val_std = np.std(gray_image)
val_skewness = val_mean/val_std
val_entropy = entropy(gray_image)
glcmMatrix = (graycomatrix(gray_image, distances=[2], angles=[0]))
val_energy = (graycoprops(glcmMatrix, prop='energy'))
val_homogeneity = (graycoprops(glcmMatrix, prop='homogeneity'))
properties[0] = val_mean
properties[1] = val_std
properties[2] = val_skewness
properties[3] = val_entropy
properties[4] = val_energy
properties[5] = val_homogeneity
properties[6] = labell
# print("mean > ",val_mean)
# print("std deviasion > ",val_std)
# print("skewness > ", val_skewness)
# print("entropy > ",val_entropy)
# print("energy > ",val_energy)
# print("homogeneity > ",val_homogeneity)
features = np.array(
[properties[0], properties[1], properties[2],
properties[3], properties[4],properties[5],
labell])
final.append(features)
df = pd.DataFrame(final)
Y= df[6]
X = df.drop([6],axis=1)