|
| 1 | +from sklearn.preprocessing import LabelEncoder |
| 2 | +from sklearn.model_selection import train_test_split |
| 3 | +from keras.models import Sequential |
| 4 | +from keras.layers import Activation |
| 5 | +from keras.optimizers import SGD |
| 6 | +from keras.layers import Dense |
| 7 | +from keras.utils import np_utils |
| 8 | +from imutils import paths |
| 9 | +import numpy as np |
| 10 | +import argparse |
| 11 | +import cv2 |
| 12 | +import os |
| 13 | + |
| 14 | +def image_to_feature_vector(image, size=(32, 32)): |
| 15 | + return cv2.resize(image, size).flatten() |
| 16 | + |
| 17 | +ap = argparse.ArgumentParser() |
| 18 | +ap.add_argument("-d", "--dataset", required=True, |
| 19 | + help="path to input dataset") |
| 20 | +ap.add_argument("-m", "--model", required=True, |
| 21 | + help="path to output model file") |
| 22 | +args = vars(ap.parse_args()) |
| 23 | + |
| 24 | +print("[INFO] describing images...") |
| 25 | +imagePaths = list(paths.list_images(args["dataset"])) |
| 26 | + |
| 27 | +data = [] |
| 28 | +labels = [] |
| 29 | + |
| 30 | +for (i, imagePath) in enumerate(imagePaths): |
| 31 | + image = cv2.imread(imagePath) |
| 32 | + label = imagePath.split(os.path.sep)[-1].split(".")[0] |
| 33 | + |
| 34 | + features = image_to_feature_vector(image) |
| 35 | + data.append(features) |
| 36 | + labels.append(label) |
| 37 | + |
| 38 | + if i > 0 and i % 1000 == 0: |
| 39 | + print("[INFO] processed {}/{}".format(i, len(imagePaths))) |
| 40 | + |
| 41 | +le = LabelEncoder() |
| 42 | +labels = le.fit_transform(labels) |
| 43 | + |
| 44 | +data = np.array(data) / 255.0 |
| 45 | +labels = np_utils.to_categorical(labels, 2) |
| 46 | + |
| 47 | +print("[INFO] constructing training/testing split...") |
| 48 | +(trainData, testData, trainLabels, testLabels) = train_test_split( |
| 49 | + data, labels, test_size=0.25, random_state=42) |
| 50 | + |
| 51 | +model = Sequential() |
| 52 | +model.add(Dense(768, input_dim=3072, init="uniform", |
| 53 | + activation="relu")) |
| 54 | +model.add(Dense(384, activation="relu", kernel_initializer="uniform")) |
| 55 | +model.add(Dense(2)) |
| 56 | +model.add(Activation("softmax")) |
| 57 | + |
| 58 | +print("[INFO] compiling model...") |
| 59 | +sgd = SGD(lr=0.01) |
| 60 | +model.compile(loss="binary_crossentropy", optimizer=sgd, |
| 61 | + metrics=["accuracy"]) |
| 62 | +model.fit(trainData, trainLabels, epochs=50, batch_size=128, |
| 63 | + verbose=1) |
| 64 | + |
| 65 | +print("[INFO] evaluating on testing set...") |
| 66 | +(loss, accuracy) = model.evaluate(testData, testLabels, |
| 67 | + batch_size=128, verbose=1) |
| 68 | +print("[INFO] loss={:.4f}, accuracy: {:.4f}%".format(loss, |
| 69 | + accuracy * 100)) |
| 70 | + |
| 71 | +print("[INFO] dumping architecture and weights to file...") |
| 72 | +model.save(args["model"]) |
0 commit comments