Форум программистов, компьютерный форум, киберфорум
Konst2016
Войти
Регистрация
Восстановить пароль
Оценить эту запись

Обучение на Cifar10.Сверточные сети

Запись от Konst2016 размещена 26.09.2021 в 20:54
Обновил(-а) mik-a-el 27.09.2021 в 10:32
Метки nn, python3, сnn

Добрый день!В этой статье я собираюсь привести код по обработке изображений cifar10, это
Нажмите на изображение для увеличения
Название: habr_cifar10.png
Просмотров: 172
Размер:	313.0 Кб
ID:	7161
и они цветные.
Работал по этой статье:
https://habr.com/ru/company/wu... og/314872/
Обучал в google colab c GPU системой.
Итак:
Python
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
import keras
from keras.datasets import cifar10 # subroutines for fetching the CIFAR-10 dataset
from keras.models import Model # basic class for specifying and training a neural network
from keras.layers import Input, Convolution2D, MaxPooling2D, Dense, Dropout, Flatten
from keras.utils import np_utils # utilities for one-hot encoding of ground truth values
import numpy as np
import pickle
 
 
batch_size = 32 # in each iteration, we consider 32 training examples at once
num_epochs = 200 # we iterate 200 times over the entire training set
kernel_size = 3 # we will use 3x3 kernels throughout
pool_size = 2 # we will use 2x2 pooling throughout
conv_depth_1 = 32 # we will initially have 32 kernels per conv. layer...
conv_depth_2 = 64 # ...switching to 64 after the first pooling layer
drop_prob_1 = 0.25 # dropout after pooling with probability 0.25
drop_prob_2 = 0.5 # dropout in the FC layer with probability 0.5
hidden_size = 512 # the FC layer will have 512 neurons
 
fpickle='learned.pkl'
 
 
def save_model(model):
    len_layers=len(model.layers)
    weights={}
    for i in range(len_layers):
        weights[i]=model.layers[i].get_weights()
 
    with open(fpickle, 'wb') as f:
        pickle.dump(weights, f) 
        print(f'saved learned to {fpickle}')   
 
 
def main():
  
  print('dat form', keras.backend._image_data_format)
  """
  ->
  channels_last
  Похоже что для бэкэнда tensorflow нельзя поменять формат канала глубины через keras-keras.backend.set_image_data_format("channels_first")
  """
  (X_train, y_train), (X_test, y_test) = cifar10.load_data() # fetch CIFAR-10 data
 
  num_train, depth, height, width = X_train.shape # there are 50_000 training examples in CIFAR-10
 
  num_test = X_test.shape[0] # there are 10_000 test examples in CIFAR-10
 
  X_train=np.reshape(X_train, (num_train, height, width, depth))
 
  X_test=np.reshape(X_test, (num_test, height, width, depth))
 
 
  for pair in locals().items():
    if isinstance(pair[1], np.ndarray):
      print("name: ",pair[0],end=' ')
      print("shape", pair[1].shape)
 
  """
  ->
  name:  X_train shape (50000, 32, 32, 3)
  name:  y_train shape (50000, 1)
  name:  X_test shape (10000, 32, 32, 3)
  name:  y_test shape (10000, 1)
  """    
  
  X_train = X_train.astype('float32') 
  X_test = X_test.astype('float32')
  X_train /= np.max(X_train) # Normalise data to [0, 1] range
  X_test /= np.max(X_train) # Normalise data to [0, 1] range
  
  num_classes = np.unique(y_train).shape[0] # there are 10 image classes
  Y_train = np_utils.to_categorical(y_train, num_classes) # One-hot encode the labels
  Y_test = np_utils.to_categorical(y_test, num_classes) # One-hot encode the labels
 
  
  inp = Input(shape=(height, width, depth)) # N.B. depth goes first in Keras!
  
  # Conv [32] -> Conv [32] -> Pool (with dropout on the pooling layer)
  conv_1 = Convolution2D(conv_depth_1, kernel_size, kernel_size, padding='same', activation='relu')(inp)
  conv_2 = Convolution2D(conv_depth_1, kernel_size, kernel_size, padding='same', activation='relu')(conv_1)
  pool_1 = MaxPooling2D(pool_size=(pool_size, pool_size), padding='same')(conv_2)
  drop_1 = Dropout(drop_prob_1)(pool_1)
  # Conv [64] -> Conv [64] -> Pool (with dropout on the pooling layer)
  conv_3 = Convolution2D(conv_depth_2, kernel_size, kernel_size, padding='same', activation='relu')(drop_1)
  conv_4 = Convolution2D(conv_depth_2, kernel_size, kernel_size, padding='same', activation='relu')(conv_3)
  pool_2 = MaxPooling2D(pool_size=(pool_size, pool_size), padding='same')(conv_4)
  drop_2 = Dropout(drop_prob_1)(pool_2)
  # Now flatten to 1D, apply FC -> ReLU (with dropout) -> softmax
  flat = Flatten()(drop_2)
  hidden = Dense(hidden_size, activation='relu')(flat)
  drop_3 = Dropout(drop_prob_2)(hidden)
  out = Dense(num_classes, activation='softmax')(drop_3)
 
  model = Model(inputs=inp, outputs=out) # To define a model, just specify its input and output layers
 
  model.compile(loss='categorical_crossentropy', # using the cross-entropy loss function
              optimizer='adam', # using the Adam optimiser
              metrics=['accuracy']) # reporting the accuracy
  save=False            
  try: # Имеем возможность прервать обучение и сохранить веса
    model.fit(X_train, Y_train, # Train the model using the training set...
          batch_size=batch_size, epochs=num_epochs,
          verbose=1, validation_split=0.1) # ...holding out 10% of the data for validation
  except KeyboardInterrupt:
    save_model(model)
    model.evaluate(X_test, Y_test, verbose=1) # Evaluate the trained model on the test set!
  finally:
    """
    ->
    За 2_00 эпох мы добились:
    loss: 1.4153 - accuracy: 0.4948 - val_loss: 1.3901 - val_accuracy: 0.5004
    """
    save_model(model) 
    model.evaluate(X_test, Y_test, verbose=1) 
    """
    ->
    accuracy ~ 0.5  
    """
 
main()

Похоже KeyboardInterrupt там не совсем работает.
ссылка на Colab:
https://colab.research.google.... sp=sharing
Размещено в Без категории
Показов 1295 Комментарии 0
КиберФорум - форум программистов, компьютерный форум, программирование
Powered by vBulletin® Version 3.8.9
Copyright ©2000 - 2021, vBulletin Solutions, Inc.