Modifs archi données

This commit is contained in:
Clémentine Bonneau 2021-12-22 20:45:07 +01:00
parent 4312087dc9
commit fbea931fba
13 changed files with 76 additions and 63 deletions

View file

@ -46,7 +46,7 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 1,
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
@ -81,9 +81,31 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 5,
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[name: \"/device:CPU:0\"\n",
"device_type: \"CPU\"\n",
"memory_limit: 268435456\n",
"locality {\n",
"}\n",
"incarnation: 6212331521511793857\n",
"]\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"2021-12-22 20:34:12.391220: I tensorflow/core/platform/cpu_feature_guard.cc:145] This TensorFlow binary is optimized with Intel(R) MKL-DNN to use the following CPU instructions in performance critical operations: SSE4.1 SSE4.2\n",
"To enable them in non-MKL-DNN operations, rebuild TensorFlow with the appropriate compiler flags.\n"
]
}
],
"source": [ "source": [
"from tensorflow.python.client import device_lib\n", "from tensorflow.python.client import device_lib\n",
"print(device_lib.list_local_devices())" "print(device_lib.list_local_devices())"
@ -91,9 +113,17 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": null, "execution_count": 6,
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"CPU\n"
]
}
],
"source": [ "source": [
"MODE = \"GPU\" if \"GPU\" in [k.device_type for k in device_lib.list_local_devices()] else \"CPU\"\n", "MODE = \"GPU\" if \"GPU\" in [k.device_type for k in device_lib.list_local_devices()] else \"CPU\"\n",
"print(MODE)" "print(MODE)"
@ -132,25 +162,24 @@
"\n", "\n",
"```\n", "```\n",
"data_dir\n", "data_dir\n",
"└───sample/\n", "└───train/\n",
"│ └───train/\n", "│ └───absence/\n",
"│ │ └───absence/\n", "│ │ │ absence_t_0001.jpg\n",
"│ │ │ │ absence_t_0001.jpg\n", "│ │ │ absence_t_0002.jpg\n",
"│ │ │ │ absence_t_0002.jpg\n", "│ │ │ ...\n",
"│ │ │ │ ...\n", "│ └───presence/\n",
"│ │ └───presence/\n", "│ │ │ presence_t_0001.jpg\n",
"│ │ │ │ presence_t_0001.jpg\n", "│ │ │ presence_t_0002.jpg\n",
"│ │ │ │ presence_t_0002.jpg\n", "│ │ │ ...\n",
"│ │ │ │ ...\n", "└───validation/\n",
"│ └───validation/\n", "│ └───absence/\n",
"│ │ └───absence/\n", "│ │ │ absence_v_0001.jpg\n",
"│ │ │ │ absence_v_0001.jpg\n", "│ │ │ absence_v_0002.jpg\n",
"│ │ │ │ absence_v_0002.jpg\n", "│ │ │ ...\n",
"│ │ │ │ ...\n", "│ └───presence/\n",
"│ │ └───presence/\n", "│ │ │ presence_v_0001.jpg\n",
"│ │ │ │ presence_v_0001.jpg\n", "│ │ │ presence_v_0002.jpg\n",
"│ │ │ │ presence_v_0002.jpg\n", "│ │ │ ...\n",
"│ │ │ │ ...\n",
"```" "```"
] ]
}, },
@ -158,23 +187,19 @@
"cell_type": "markdown", "cell_type": "markdown",
"metadata": {}, "metadata": {},
"source": [ "source": [
"### Parameter" "### Reading the data"
] ]
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 2, "execution_count": 7,
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
"data_dir = 'data/' # data path\n", "data_dir = 'data/' # data path\n",
"\n", "\n",
"# Sample directory path\n", "# Sample directory path\n",
"# That way we can use this algorithm with an other sample if we have mire pictures in the future.\n", "# That way we can use this algorithm with an other sample if we have mire pictures in the future.\n"
"\n",
"N_train = 160\n",
"N_val = 40\n",
"data_dir_sub = data_dir+'sample_%d_Ntrain_%d_Nval' %(N_train, N_val)"
] ]
}, },
{ {
@ -188,23 +213,11 @@
}, },
{ {
"cell_type": "code", "cell_type": "code",
"execution_count": 1, "execution_count": null,
"metadata": {}, "metadata": {},
"outputs": [ "outputs": [],
{
"ename": "NameError",
"evalue": "name 'kpi' is not defined",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)",
"\u001b[0;32m/var/folders/83/bqq_97cs083fpgkn46bd5k5r0000gn/T/ipykernel_61636/3158407634.py\u001b[0m in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0mimg\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mkpi\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mload_img\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mdata_dir_sub\u001b[0m\u001b[0;34m+\u001b[0m\u001b[0;34m'/train/presence/presence_t_0001.jpg'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2\u001b[0m \u001b[0mimg\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;31mNameError\u001b[0m: name 'kpi' is not defined"
]
}
],
"source": [ "source": [
"img = kpi.load_img(data_dir_sub+'/train/presence/presence_t_0001.jpg') \n", "img = kpi.load_img(data_dir+'/train/presence/presence_t_0001.jpg') \n",
"img" "img"
] ]
}, },
@ -281,15 +294,15 @@
"metadata": {}, "metadata": {},
"outputs": [], "outputs": [],
"source": [ "source": [
"img = kpi.load_img(data_dir_sub+\"/train/presence/presence_t_0001.jpg\")\n", "img = kpi.load_img(data_dir+\"/train/presence/presence_t_0001.jpg\")\n",
"x = kpi.img_to_array(img) \n", "x = kpi.img_to_array(img) \n",
"x_ = np.expand_dims(x, axis=0)\n", "x_ = np.expand_dims(x, axis=0)\n",
"\n", "\n",
"if not(os.path.isdir(data_dir_sub+\"/preprocessing_example\")):\n", "if not(os.path.isdir(data_dir+\"/preprocessing_example\")):\n",
" os.mkdir(data_dir_sub+\"/preprocessing_example\")\n", " os.mkdir(data_dir+\"/preprocessing_example\")\n",
"\n", "\n",
" i = 0\n", " i = 0\n",
" for batch in datagen.flow(x_, batch_size=1,save_to_dir=data_dir_sub+\"/preprocessing_example\", save_prefix='bird', save_format='jpg'):\n", " for batch in datagen.flow(x_, batch_size=1,save_to_dir=data_dir+\"/preprocessing_example\", save_prefix='bird', save_format='jpg'):\n",
" i += 1\n", " i += 1\n",
" if i > 7:\n", " if i > 7:\n",
" break " " break "
@ -330,8 +343,8 @@
], ],
"source": [ "source": [
"X_list=[]\n", "X_list=[]\n",
"for f in os.listdir(data_dir_sub+\"/preprocessing_example\"):\n", "for f in os.listdir(data_dir+\"/preprocessing_example\"):\n",
" X_list.append(kpi.img_to_array(kpi.load_img(data_dir_sub+\"/preprocessing_example/\"+f)))\n", " X_list.append(kpi.img_to_array(kpi.load_img(data_dir+\"/preprocessing_example/\"+f)))\n",
"\n", "\n",
"\n", "\n",
"fig=plt.figure(figsize=(16,8))\n", "fig=plt.figure(figsize=(16,8))\n",
@ -411,8 +424,8 @@
} }
], ],
"source": [ "source": [
"x_0 = kpi.img_to_array(kpi.load_img(data_dir_sub+\"/train/presence/presence_t_0001.jpg\"))\n", "x_0 = kpi.img_to_array(kpi.load_img(data_dir+\"/train/presence/presence_t_0001.jpg\"))\n",
"x_1 = kpi.img_to_array(kpi.load_img(data_dir_sub+\"/train/presence/presence_t_0002.jpg\"))\n", "x_1 = kpi.img_to_array(kpi.load_img(data_dir+\"/train/presence/presence_t_0002.jpg\"))\n",
"x_0.shape, x_1.shape" "x_0.shape, x_1.shape"
] ]
}, },
@ -461,14 +474,14 @@
"# subfolers of 'data/train', and indefinitely generate\n", "# subfolers of 'data/train', and indefinitely generate\n",
"# batches of augmented image data\n", "# batches of augmented image data\n",
"train_generator = train_datagen.flow_from_directory(\n", "train_generator = train_datagen.flow_from_directory(\n",
" data_dir_sub+\"/train/\", # this is the target directory\n", " data_dir+\"/train/\", # this is the target directory\n",
" target_size=(img_width, img_height), \n", " target_size=(img_width, img_height), \n",
" batch_size=batch_size,\n", " batch_size=batch_size,\n",
" class_mode='binary') # since we use binary_crossentropy loss, we need binary labels\n", " class_mode='binary') # since we use binary_crossentropy loss, we need binary labels\n",
"\n", "\n",
"# this is a similar generator, for validation data\n", "# this is a similar generator, for validation data\n",
"validation_generator = valid_datagen.flow_from_directory(\n", "validation_generator = valid_datagen.flow_from_directory(\n",
" data_dir_sub+\"/validation/\",\n", " data_dir+\"/validation/\",\n",
" target_size=(img_width, img_height),\n", " target_size=(img_width, img_height),\n",
" batch_size=batch_size,\n", " batch_size=batch_size,\n",
" class_mode='binary')" " class_mode='binary')"
@ -840,7 +853,7 @@
"datagen = kpi.ImageDataGenerator(rescale=1. / 255)\n", "datagen = kpi.ImageDataGenerator(rescale=1. / 255)\n",
"\n", "\n",
"generator = datagen.flow_from_directory(\n", "generator = datagen.flow_from_directory(\n",
" data_dir_sub+\"/train\",\n", " data_dir+\"/train\",\n",
" target_size=(img_width, img_height),\n", " target_size=(img_width, img_height),\n",
" batch_size=batch_size,\n", " batch_size=batch_size,\n",
" class_mode=None, # this means our generator will only yield batches of data, no labels\n", " class_mode=None, # this means our generator will only yield batches of data, no labels\n",
@ -849,7 +862,7 @@
"\n", "\n",
"\n", "\n",
"generator = datagen.flow_from_directory(\n", "generator = datagen.flow_from_directory(\n",
" data_dir_sub+\"/validation\",\n", " data_dir+\"/validation\",\n",
" target_size=(img_width, img_height),\n", " target_size=(img_width, img_height),\n",
" batch_size=batch_size,\n", " batch_size=batch_size,\n",
" class_mode=None,\n", " class_mode=None,\n",
@ -1044,7 +1057,7 @@
} }
], ],
"source": [ "source": [
"model_VGG_fcm.save_weights(data_dir_sub+'/weights_model_VGG_fully_connected_model_%d_epochs_%d_batch_size.h5' %(epochs, batch_size))" "model_VGG_fcm.save_weights(data_dir+'/weights_model_VGG_fully_connected_model_%d_epochs_%d_batch_size.h5' %(epochs, batch_size))"
] ]
}, },
{ {
@ -1113,7 +1126,7 @@
"top_model.add(kl.Dropout(0.5))\n", "top_model.add(kl.Dropout(0.5))\n",
"top_model.add(kl.Dense(1, activation='sigmoid'))\n", "top_model.add(kl.Dense(1, activation='sigmoid'))\n",
"\n", "\n",
"top_model.load_weights(data_dir_sub+'/weights_model_VGG_fully_connected_model_%d_epochs_%d_batch_size.h5' %(epochs, batch_size))\n" "top_model.load_weights(data_dir+'/weights_model_VGG_fully_connected_model_%d_epochs_%d_batch_size.h5' %(epochs, batch_size))\n"
] ]
}, },
{ {
@ -1247,13 +1260,13 @@
"test_datagen = kpi.ImageDataGenerator(rescale=1. / 255)\n", "test_datagen = kpi.ImageDataGenerator(rescale=1. / 255)\n",
"\n", "\n",
"train_generator = train_datagen.flow_from_directory(\n", "train_generator = train_datagen.flow_from_directory(\n",
" data_dir_sub+\"/train/\",\n", " data_dir+\"/train/\",\n",
" target_size=(img_height, img_width),\n", " target_size=(img_height, img_width),\n",
" batch_size=batch_size,\n", " batch_size=batch_size,\n",
" class_mode='binary')\n", " class_mode='binary')\n",
"\n", "\n",
"validation_generator = test_datagen.flow_from_directory(\n", "validation_generator = test_datagen.flow_from_directory(\n",
" data_dir_sub+\"/validation/\",\n", " data_dir+\"/validation/\",\n",
" target_size=(img_height, img_width),\n", " target_size=(img_height, img_width),\n",
" batch_size=batch_size,\n", " batch_size=batch_size,\n",
" class_mode='binary')\n" " class_mode='binary')\n"
@ -1390,7 +1403,7 @@
"name": "python", "name": "python",
"nbconvert_exporter": "python", "nbconvert_exporter": "python",
"pygments_lexer": "ipython3", "pygments_lexer": "ipython3",
"version": "3.9.7" "version": "3.7.11"
}, },
"toc": { "toc": {
"nav_menu": {}, "nav_menu": {},

BIN
data/.DS_Store vendored

Binary file not shown.

View file

Before

Width:  |  Height:  |  Size: 603 KiB

After

Width:  |  Height:  |  Size: 603 KiB

View file

Before

Width:  |  Height:  |  Size: 654 KiB

After

Width:  |  Height:  |  Size: 654 KiB

View file

Before

Width:  |  Height:  |  Size: 586 KiB

After

Width:  |  Height:  |  Size: 586 KiB

View file

Before

Width:  |  Height:  |  Size: 620 KiB

After

Width:  |  Height:  |  Size: 620 KiB

View file

Before

Width:  |  Height:  |  Size: 605 KiB

After

Width:  |  Height:  |  Size: 605 KiB

View file

Before

Width:  |  Height:  |  Size: 626 KiB

After

Width:  |  Height:  |  Size: 626 KiB

View file

Before

Width:  |  Height:  |  Size: 619 KiB

After

Width:  |  Height:  |  Size: 619 KiB

View file

Before

Width:  |  Height:  |  Size: 672 KiB

After

Width:  |  Height:  |  Size: 672 KiB

BIN
data/validation/.DS_Store vendored Normal file

Binary file not shown.