Outils entrainer et évaluer les modèles
This commit is contained in:
parent
63cc3aeb3c
commit
7b2ac1ef25
4 changed files with 291115 additions and 117 deletions
589
Apprentissage_initial_dataset.ipynb
Normal file
589
Apprentissage_initial_dataset.ipynb
Normal file
File diff suppressed because one or more lines are too long
114
Benchmark.ipynb
114
Benchmark.ipynb
|
@ -2,7 +2,7 @@
|
|||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"execution_count": 1,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
|
@ -26,12 +26,14 @@
|
|||
"import sys\n",
|
||||
"from glob import glob\n",
|
||||
"from IPython.display import clear_output\n",
|
||||
"from datetime import datetime"
|
||||
"from datetime import datetime\n",
|
||||
"import json\n",
|
||||
"from time import time"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
"execution_count": 2,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
|
@ -128,7 +130,7 @@
|
|||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 4,
|
||||
"execution_count": 3,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
|
@ -306,7 +308,7 @@
|
|||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 5,
|
||||
"execution_count": 4,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
|
@ -388,7 +390,7 @@
|
|||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 6,
|
||||
"execution_count": 5,
|
||||
"metadata": {},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
|
@ -490,7 +492,7 @@
|
|||
" # ici,le changement de la distance euclidienne sont entre 0.7 et 1.3\n",
|
||||
" if abs(p-1)<0.3:\n",
|
||||
" # Ce n'est qu'alors que Ransac renvoie le résultat correct\n",
|
||||
" return([round(y),round(x),round(rot,3)])\n",
|
||||
" return([round(x),round(y),round(rot,3)])\n",
|
||||
" else:\n",
|
||||
" return ([-2])\n",
|
||||
" else:\n",
|
||||
|
@ -508,13 +510,8 @@
|
|||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"Fragment 35/500 (7.0%)\n",
|
||||
"---- Frag ----\n",
|
||||
"- Total: 3171.483648 -\n",
|
||||
"- Reserved: 1216.34816 -\n",
|
||||
"- Allocated: 306.299392 -\n",
|
||||
"---- ----- ----\n",
|
||||
"\n"
|
||||
"Fresque 0, fragment 34/200 (17.0%)\n",
|
||||
"Temps par fragment: 12.3. ETA = 2.05e+03s\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
|
@ -524,38 +521,51 @@
|
|||
" # Variable du réseau\n",
|
||||
" frag_size=16\n",
|
||||
" using_cuda=True\n",
|
||||
" net=load_net(\"./net_trainned6000\")\n",
|
||||
" fresque_id = 4\n",
|
||||
" \n",
|
||||
" for fresque_id in range(6):\n",
|
||||
" #fresque_id = 2\n",
|
||||
"\n",
|
||||
" # Variable des données\n",
|
||||
" base_dir = './training_data/'\n",
|
||||
" fresque_filename = base_dir+'fresque{}.ppm'\n",
|
||||
" fresque_filename_wild = base_dir+'fresque*.ppm'\n",
|
||||
" fragment_filename = base_dir+'fragments/fresque{}/frag_bench_{:03}.ppm'\n",
|
||||
" base_dir = './training_data_small/'\n",
|
||||
" fresque_filename = base_dir+'fresque_small{}.ppm'\n",
|
||||
" fresque_filename_wild = base_dir+'fresque_small*.ppm'\n",
|
||||
" fragment_filename = base_dir+'fragments/fresque{}/frag_bench_{:05}.ppm'\n",
|
||||
" fragments_filename_wild = base_dir+'fragments/fresque{}/frag_bench_*.ppm'\n",
|
||||
" vt_filename = base_dir+'fragments/fresque{}/vt/frag_bench_{:03}_vt.txt'\n",
|
||||
" vt_filename = base_dir+'fragments/fresque{}/vt/frag_bench_{:05}_vt.txt'\n",
|
||||
" net_filename = \"./trained_net/net_trainned_from-random_full-dataset-small_01-29_18-14_0001\"\n",
|
||||
" \n",
|
||||
" #Charge la fresque en mémoire\n",
|
||||
" expe_id = int(net_filename.split(\"_\")[-1]) # ID de l'expérience, à ajouter à tout les fichiers écrits pour identifier les résultats d'une même expérience.\n",
|
||||
" date = datetime.now().strftime(\"%m-%d_%H-%M\")\n",
|
||||
" results_filename = './results_bench/results_bench_f{}_{}_{:04}'.format(fresque_id,date,expe_id)\n",
|
||||
"\n",
|
||||
" # Chargement du réseau\n",
|
||||
" net=load_net(net_filename)\n",
|
||||
"\n",
|
||||
" # Charge la fresque en mémoire\n",
|
||||
" img=cv2.imread(fresque_filename.format(fresque_id))\n",
|
||||
" \n",
|
||||
"\n",
|
||||
" N_fragments = len(glob(fragments_filename_wild.format(fresque_id)))\n",
|
||||
" print(fragments_filename_wild.format(fresque_id))\n",
|
||||
" print(N_fragments)\n",
|
||||
" \n",
|
||||
"\n",
|
||||
" # Crée les tableau de résultats\n",
|
||||
" distances, matched, positions, verite_terrain = [],[],[],[]\n",
|
||||
" \n",
|
||||
" tailles = []\n",
|
||||
"\n",
|
||||
" time_old = time()\n",
|
||||
" # Parcour tout les fragments de bench de cette fresque\n",
|
||||
" for fragment_id in range(N_fragments):\n",
|
||||
" clear_output(wait=True)\n",
|
||||
" print(\"Fragment {}/{} ({:.3}%)\".format(fragment_id,N_fragments,(fragment_id/N_fragments*100)))\n",
|
||||
" print(\"Fresque {}, fragment {}/{} ({:.3}%)\".format(fresque_id,fragment_id,N_fragments,(fragment_id/N_fragments*100)))\n",
|
||||
" delta = time()-time_old\n",
|
||||
" print(\"Temps par fragment: {:.3}. ETA = {:.3}s\".format(delta,(N_fragments-fragment_id)*delta))\n",
|
||||
" time_old = time()\n",
|
||||
" frag = cv2.imread(fragment_filename.format(fresque_id,fragment_id))\n",
|
||||
" \n",
|
||||
" show_mem(\"Frag\")\n",
|
||||
" \n",
|
||||
"\n",
|
||||
" # Faites pivoter les pièces de 20 degrés à chaque fois pour correspondre, répétez 18 fois\n",
|
||||
" for i in range(18):\n",
|
||||
" for i in [0,17]:\n",
|
||||
" rotation=20*i\n",
|
||||
" #rotation=0\n",
|
||||
" #rotation_base=0\n",
|
||||
" score_list,positions_patchs=run_net_v3(net,img,frag,frag_size,60,0.7,using_cuda,rotation)\n",
|
||||
" frag_position=frag_match(frag,img,positions_patchs)\n",
|
||||
" # Lorsque Ransac obtient le bon résultat, sortez de la boucle\n",
|
||||
|
@ -564,55 +574,59 @@
|
|||
" break\n",
|
||||
" # Si Ransac trouve une solution, la variable renvoyé est une liste de deux positions et une rotation\n",
|
||||
" if len(frag_position)==3:\n",
|
||||
"\n",
|
||||
" # MATCHED\n",
|
||||
" matched.append(1)\n",
|
||||
" \n",
|
||||
" # Conversion de la position\n",
|
||||
"\n",
|
||||
" # POSITION\n",
|
||||
" frag_position[2]=rotation_base-360-frag_position[2]\n",
|
||||
" if frag_position[2]>0:\n",
|
||||
" frag_position[2]=frag_position[2]-360\n",
|
||||
" positions.append([fragment_id,frag_position[0],frag_position[1],round(frag_position[2],3)])\n",
|
||||
" \n",
|
||||
" # Charge la verité terrain et calcule la distance entre la vérité et le placement\n",
|
||||
" positions.append([frag_position[0],frag_position[1],round(frag_position[2],3)])\n",
|
||||
"\n",
|
||||
" # VERITE TERRAIN\n",
|
||||
" with open(vt_filename.format(fresque_id,fragment_id), 'r') as f:\n",
|
||||
" data_vt = f.read().splitlines()\n",
|
||||
" verite_terrain.append([int(data_vt[2]),int(data_vt[3]),frag.shape[0],frag.shape[1]])\n",
|
||||
"\n",
|
||||
" verite_terrain.append(data_vt)\n",
|
||||
" print(frag_position)\n",
|
||||
" print(data_vt)\n",
|
||||
" distances.append(np.linalg.norm([float(data_vt[2])-float(frag_position[0]),float(data_vt[3])-float(frag_position[1])]))\n",
|
||||
" # DISTANCE\n",
|
||||
" distances.append(np.linalg.norm([float(data_vt[3])-float(frag_position[0]),float(data_vt[2])-float(frag_position[1])]))\n",
|
||||
" else:\n",
|
||||
" matched.append(0)\n",
|
||||
" distances.append(-1)\n",
|
||||
" positions.append([])\n",
|
||||
" verite_terrain.append([])\n",
|
||||
" \n",
|
||||
"\n",
|
||||
" del frag\n",
|
||||
" \n",
|
||||
"with open('training_done.txt','w') as f:\n",
|
||||
" f.write(\"Done\")\n",
|
||||
" f.close()"
|
||||
"\n",
|
||||
" meta = {'date':date,'base_dir':base_dir,'fresque_id':fresque_id,'fresque_taille':img.shape,'N_fragments': N_fragments,'expe_id': expe_id}\n",
|
||||
" res = {'meta':meta, 'matched':matched,'distances':distances,'positions':positions,'vt':verite_terrain}\n",
|
||||
"\n",
|
||||
" with open(results_filename,'w') as f:\n",
|
||||
" f.write(json.dumps(res))\n",
|
||||
"\n",
|
||||
" print(\"Sauvegardé dans {}\".format(results_filename))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 9,
|
||||
"execution_count": 13,
|
||||
"metadata": {},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"Sauvegarder dans results_f4_01-25_23-36\n"
|
||||
"Sauvegarder dans results_f2_01-31_14-32\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"import json\n",
|
||||
"date = datetime.now().strftime(\"%m-%d_%H-%M\")\n",
|
||||
"meta = {'date':date,'base_dir':base_dir,'fresque_id':fresque_id,'fresque_taille':img.shape,'N_fragments': N_fragments}\n",
|
||||
"res = {'meta':meta,'fresque_id':fresque_id, 'matched':matched,'distances':distances,'positions':positions,'vt':verite_terrain}\n",
|
||||
"res = {'meta':meta, 'matched':matched,'distances':distances,'positions':positions,'vt':verite_terrain}\n",
|
||||
"\n",
|
||||
"with open('results_f{}_{}'.format(fresque_id,date),'w') as f:\n",
|
||||
"with open('results_bench/results_bench_from-random_full-dataset-small_MB9_f{}_{}'.format(fresque_id,date),'w') as f:\n",
|
||||
" f.write(json.dumps(res))\n",
|
||||
"\n",
|
||||
"print(\"Sauvegarder dans {}\".format('results_f{}_{}'.format(fresque_id,date)))"
|
||||
|
|
File diff suppressed because one or more lines are too long
289560
view_weights.ipynb
Normal file
289560
view_weights.ipynb
Normal file
File diff suppressed because one or more lines are too long
Loading…
Reference in a new issue