diff --git a/.shippable.yml b/.shippable.yml index 9c73785b..a230cf26 100755 --- a/.shippable.yml +++ b/.shippable.yml @@ -49,17 +49,17 @@ script: - nosetests -v --exe --with-doctest --with-xunit --with-coverage --cover-package=imsegm --xunit-file=$CI_REPORTS/nosetests.xml # ANNOTATION section - - python handling_annotations/run_image_color_quantization.py -imgs "images/drosophila_ovary_slice/segm_rgb/*.png" - - python handling_annotations/run_image_convert_label_color.py -imgs "images/drosophila_ovary_slice/segm/*.png" -out images/drosophila_ovary_slice/segm_rgb - - python handling_annotations/run_overlap_images_segms.py -imgs "images/drosophila_ovary_slice/image/*.jpg" -segs images/drosophila_ovary_slice/segm -out results/overlap_ovary_segment - - python handling_annotations/run_segm_annot_inpaint.py -imgs "images/drosophila_ovary_slice/segm/*.png" --label 0 - - python handling_annotations/run_segm_annot_relabel.py -imgs "images/drosophila_ovary_slice/center_levels/*.png" -out results/relabel_center_levels + - python handling_annotations/run_image_color_quantization.py -imgs "data_images/drosophila_ovary_slice/segm_rgb/*.png" + - python handling_annotations/run_image_convert_label_color.py -imgs "data_images/drosophila_ovary_slice/segm/*.png" -out data_images/drosophila_ovary_slice/segm_rgb + - python handling_annotations/run_overlap_images_segms.py -imgs "data_images/drosophila_ovary_slice/image/*.jpg" -segs data_images/drosophila_ovary_slice/segm -out results/overlap_ovary_segment + - python handling_annotations/run_segm_annot_inpaint.py -imgs "data_images/drosophila_ovary_slice/segm/*.png" --label 0 + - python handling_annotations/run_segm_annot_relabel.py -imgs "data_images/drosophila_ovary_slice/center_levels/*.png" -out results/relabel_center_levels # SEGMENTATION section - rm -r -f results && mkdir results - - python experiments_segmentation/run_compute-stat_annot-segm.py - - python experiments_segmentation/run_segm_slic_model_graphcut.py --nb_jobs 1 - - python experiments_segmentation/run_segm_slic_classif_graphcut.py --nb_jobs 1 + - python experiments_segmentation/run_compute_stat_annot_segm.py --visual + - python experiments_segmentation/run_segm_slic_model_graphcut.py --path_config experiments_segmentation/sample_config.json --nb_jobs 1 + - python experiments_segmentation/run_segm_slic_classif_graphcut.py --path_config experiments_segmentation/sample_config.json --nb_jobs 1 # CENTER DETECT. section - rm -r -f results && mkdir results diff --git a/MANIFEST.in b/MANIFEST.in index 3d387c34..e149bfc7 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,2 +1,3 @@ include README.md +include LICENSE include requirements.txt \ No newline at end of file diff --git a/README.md b/README.md index cf1a078a..b2e24e0e 100755 --- a/README.md +++ b/README.md @@ -118,35 +118,34 @@ Short description of our three sets of experiments that together compose single We introduce some useful tools for work with image annotation and segmentation. -* In case you have some smooth color labeling in your images you can remove them with following quantization script. +* **Quantization:** in case you have some smooth color labeling in your images you can remove them with following quantization script. ```bash python handling_annotations/run_image_color_quantization.py \ - -imgs "images/drosophila_ovary_slice/segm_rgb/*.png" \ + -imgs "data_images/drosophila_ovary_slice/segm_rgb/*.png" \ -m position -thr 0.01 --nb_jobs 2 ``` -* Concerting image labels into colour space and other way around. +* **Paint labels:** concerting image labels into colour space and other way around. ```bash python handling_annotations/run_image_convert_label_color.py \ - -imgs "images/drosophila_ovary_slice/segm/*.png" \ - -out images/drosophila_ovary_slice/segm_rgb + -imgs "data_images/drosophila_ovary_slice/segm/*.png" \ + -out data_images/drosophila_ovary_slice/segm_rgb ``` -* Having input image and its segmentation we can use simple visualisation which overlap the segmentation over input image. +* **Visualisation:** having input image and its segmentation we can use simple visualisation which overlap the segmentation over input image. ```bash python handling_annotations/run_overlap_images_segms.py \ - -imgs "images/drosophila_ovary_slice/image/*.jpg" \ - -segs images/drosophila_ovary_slice/segm \ + -imgs "data_images/drosophila_ovary_slice/image/*.jpg" \ + -segs data_images/drosophila_ovary_slice/segm \ -out results/overlap_ovary_segment ``` -* Inpainting selected labels in segmentation. +* **Inpainting** selected labels in segmentation. ```bash python handling_annotations/run_segm_annot_inpaint.py \ - -imgs "images/drosophila_ovary_slice/segm/*.png" \ + -imgs "data_images/drosophila_ovary_slice/segm/*.png" \ --label 4 ``` -* Change labels in input segmentation into another set of lables in 1:1 schema. +* **Replace labels:** change labels in input segmentation into another set of lables in 1:1 schema. ```bash python handling_annotations/run_segm_annot_relabel.py \ - -imgs "images/drosophila_ovary_slice/center_levels/*.png" \ -out results/relabel_center_levels \ --label_old 2 3 --label_new 1 1 ``` @@ -159,31 +158,39 @@ We utilize (un)supervised segmentation according to given training examples or s * Evaluate superpixels (with given SLIC parameters) quality against given segmentation. It helps find out best SLIC configuration. ```bash python experiments_segmentation/run_eval_superpixels.py \ - -imgs "images/drosophila_ovary_slice/image/*.jpg" \ - -segm "images/drosophila_ovary_slice/annot_eggs/*.png" \ - --img_type 2d_gray \ + -imgs "data_images/drosophila_ovary_slice/image/*.jpg" \ + -segm "data_images/drosophila_ovary_slice/annot_eggs/*.png" \ + --img_type 2d_split \ --slic_size 20 --slic_regul 0.25 --slico 0 ``` -* Perform **Unsupervised** segmentation. +* Perform **Unsupervised** segmentation in images given in CSV ```bash python experiments_segmentation/run_segm_slic_model_graphcut.py \ - -list images/langerhans_islets/list_lang-isl_imgs-annot.csv \ - -imgs "images/langerhans_islets/image/*.jpg" \ - -out results -n langIsl --nb_classes 3 --visual --nb_jobs 2 + -l data_images/langerhans_islets/list_lang-isl_imgs-annot.csv -i "" \ + --path_config experiments_segmentation/sample_config.json \ + -o results -n langIsl --nb_classes 3 --visual --nb_jobs 2 + ``` + OR specified on particuler path: + ```bash + python experiments_segmentation/run_segm_slic_model_graphcut.py \ + -l "" -i "data_images/langerhans_islets/image/*.jpg" \ + --path_config experiments_segmentation/sample_config.json \ + -o results -n langIsl --nb_classes 3 --visual --nb_jobs 2 ``` * Perform **Supervised** segmentation with afterwards evaluation. ```bash python experiments_segmentation/run_segm_slic_classif_graphcut.py \ - -list images/drosophila_ovary_slice/list_imgs-annot-struct.csv \ - -imgs "images/drosophila_ovary_slice/image/*.jpg" \ - -out results -n Ovary --img_type 2d_gray --visual --nb_jobs 2 + -l data_images/drosophila_ovary_slice/list_imgs-annot-struct.csv \ + -i "data_images/drosophila_ovary_slice/image/*.jpg" \ + --path_config experiments_segmentation/sample_config.json \ + -o results -n Ovary --img_type 2d_split --visual --nb_jobs 2 ``` * For both experiment you can evaluate segmentation results. ```bash python experiments_segmentation/run_compute-stat_annot-segm.py \ - -annot "images/drosophila_ovary_slice/annot_struct/*.png" \ + -annot "data_images/drosophila_ovary_slice/annot_struct/*.png" \ -segm "results/experiment_segm-supervise_ovary/*.png" \ - -img "images/drosophila_ovary_slice/image/*.jpg" \ + -img "data_images/drosophila_ovary_slice/image/*.jpg" \ -out results/evaluation ``` @@ -203,16 +210,16 @@ In general, the input is a formatted list (CSV file) of input images and annotat 1. With zone annotation, we train a classifier for center candidate prediction. The annotation can be a CSV file with annotated centers as points, and the zone of positive examples is set uniformly as the circular neighborhood around these points. Another way (preferable) is to use annotated image with marked zones for positive, negative and neutral examples. ```bash python experiments_ovary_centres/run_center_candidate_training.py -list none \ - -segs "images/drosophila_ovary_slice/segm/*.png" \ - -imgs "images/drosophila_ovary_slice/image/*.jpg" \ - -centers "images/drosophila_ovary_slice/center_levels/*.png" \ + -segs "data_images/drosophila_ovary_slice/segm/*.png" \ + -imgs "data_images/drosophila_ovary_slice/image/*.jpg" \ + -centers "data_images/drosophila_ovary_slice/center_levels/*.png" \ -out results -n ovary ``` 1. Having trained classifier we perfom center prediction composed from two steps: i. center candidate clustering and candidate clustering. ```bash python experiments_ovary_centres/run_center_prediction.py -list none \ - -segs "images/drosophila_ovary_slice/segm/*.png" \ - -imgs "images/drosophila_ovary_slice/image/*.jpg" \ + -segs "data_images/drosophila_ovary_slice/segm/*.png" \ + -imgs "data_images/drosophila_ovary_slice/image/*.jpg" \ -centers results/detect-centers-train_ovary/classifier_RandForest.pkl \ -out results -n ovary ``` @@ -269,7 +276,7 @@ python setup.py install 1. Run several segmentation techniques on each image. ```bash python experiments_ovary_detect/run_ovary_egg-segmentation.py \ - -list images/drosophila_ovary_slice/list_imgs-segm-center-points.csv \ + -list data_images/drosophila_ovary_slice/list_imgs-segm-center-points.csv \ -out output -n ovary_image --nb_jobs 1 \ -m ellipse_moments \ ellipse_ransac_mmt \ @@ -289,8 +296,8 @@ python setup.py install 1. In the end, cut individual segmented objects comes as minimal bounding box. ```bash python experiments_ovary_detect/run_cut_segmented_objects.py \ - -annot "images/drosophila_ovary_slice/annot_eggs/*.png" \ - -img "images/drosophila_ovary_slice/segm/*.png" \ + -annot "data_images/drosophila_ovary_slice/annot_eggs/*.png" \ + -img "data_images/drosophila_ovary_slice/segm/*.png" \ -out results/cut_images --padding 50 ``` 1. Finally, performing visualisation of segmentation results toghter with expert annotation. diff --git a/circle.yml b/circle.yml index 499578d8..f4347dfd 100755 --- a/circle.yml +++ b/circle.yml @@ -35,18 +35,18 @@ test: - coverage report && coverage xml -o $CIRCLE_TEST_REPORTS/coverage.xml # ANNOTATION section - - unset DISPLAY && python handling_annotations/run_image_color_quantization.py -imgs "images/drosophila_ovary_slice/segm_rgb/*.png" - - unset DISPLAY && python handling_annotations/run_image_color_quantization.py -imgs "images/drosophila_ovary_slice/segm_rgb/*.png" -m position - - unset DISPLAY && python handling_annotations/run_image_convert_label_color.py -imgs "images/drosophila_ovary_slice/segm/*.png" -out images/drosophila_ovary_slice/segm_rgb - - unset DISPLAY && python handling_annotations/run_image_convert_label_color.py -imgs "images/drosophila_ovary_slice/segm_rgb/*.png" -out images/drosophila_ovary_slice/segm - - unset DISPLAY && python handling_annotations/run_overlap_images_segms.py -imgs "images/drosophila_ovary_slice/image/*.jpg" -segs images/drosophila_ovary_slice/segm -out results/overlap_ovary_segment - - unset DISPLAY && python handling_annotations/run_segm_annot_inpaint.py -imgs "images/drosophila_ovary_slice/segm/*.png" --label 0 - - unset DISPLAY && python handling_annotations/run_segm_annot_relabel.py -imgs "images/drosophila_ovary_slice/center_levels/*.png" -out results/relabel_center_levels + - unset DISPLAY && python handling_annotations/run_image_color_quantization.py -imgs "data_images/drosophila_ovary_slice/segm_rgb/*.png" + - unset DISPLAY && python handling_annotations/run_image_color_quantization.py -imgs "data_images/drosophila_ovary_slice/segm_rgb/*.png" -m position + - unset DISPLAY && python handling_annotations/run_image_convert_label_color.py -imgs "data_images/drosophila_ovary_slice/segm/*.png" -out data_images/drosophila_ovary_slice/segm_rgb + - unset DISPLAY && python handling_annotations/run_image_convert_label_color.py -imgs "data_images/drosophila_ovary_slice/segm_rgb/*.png" -out data_images/drosophila_ovary_slice/segm + - unset DISPLAY && python handling_annotations/run_overlap_images_segms.py -imgs "data_images/drosophila_ovary_slice/image/*.jpg" -segs data_images/drosophila_ovary_slice/segm -out results/overlap_ovary_segment + - unset DISPLAY && python handling_annotations/run_segm_annot_inpaint.py -imgs "data_images/drosophila_ovary_slice/segm/*.png" --label 0 + - unset DISPLAY && python handling_annotations/run_segm_annot_relabel.py -imgs "data_images/drosophila_ovary_slice/center_levels/*.png" -out results/relabel_center_levels # SEGMENTATION section - - unset DISPLAY && python experiments_segmentation/run_compute-stat_annot-segm.py - - unset DISPLAY && python experiments_segmentation/run_segm_slic_model_graphcut.py --visual - - unset DISPLAY && python experiments_segmentation/run_segm_slic_classif_graphcut.py --visual + - unset DISPLAY && python experiments_segmentation/run_compute_stat_annot_segm.py --visual + - unset DISPLAY && python experiments_segmentation/run_segm_slic_model_graphcut.py --path_config experiments_segmentation/sample_config.json --visual + - unset DISPLAY && python experiments_segmentation/run_segm_slic_classif_graphcut.py --path_config experiments_segmentation/sample_config.json --visual # CENTER DETECT. section - unset DISPLAY && python experiments_ovary_centres/run_create_annotation.py diff --git a/images/drosophila_disc/annot/img_12.png b/data_images/drosophila_disc/annot/img_12.png similarity index 100% rename from images/drosophila_disc/annot/img_12.png rename to data_images/drosophila_disc/annot/img_12.png diff --git a/images/drosophila_disc/annot/img_14.png b/data_images/drosophila_disc/annot/img_14.png similarity index 100% rename from images/drosophila_disc/annot/img_14.png rename to data_images/drosophila_disc/annot/img_14.png diff --git a/images/drosophila_disc/annot/img_15.png b/data_images/drosophila_disc/annot/img_15.png similarity index 100% rename from images/drosophila_disc/annot/img_15.png rename to data_images/drosophila_disc/annot/img_15.png diff --git a/images/drosophila_disc/annot/img_19.png b/data_images/drosophila_disc/annot/img_19.png similarity index 100% rename from images/drosophila_disc/annot/img_19.png rename to data_images/drosophila_disc/annot/img_19.png diff --git a/images/drosophila_disc/annot/img_20.png b/data_images/drosophila_disc/annot/img_20.png similarity index 100% rename from images/drosophila_disc/annot/img_20.png rename to data_images/drosophila_disc/annot/img_20.png diff --git a/images/drosophila_disc/annot/img_24.png b/data_images/drosophila_disc/annot/img_24.png similarity index 100% rename from images/drosophila_disc/annot/img_24.png rename to data_images/drosophila_disc/annot/img_24.png diff --git a/images/drosophila_disc/annot/img_26.png b/data_images/drosophila_disc/annot/img_26.png similarity index 100% rename from images/drosophila_disc/annot/img_26.png rename to data_images/drosophila_disc/annot/img_26.png diff --git a/images/drosophila_disc/annot/img_43.png b/data_images/drosophila_disc/annot/img_43.png similarity index 100% rename from images/drosophila_disc/annot/img_43.png rename to data_images/drosophila_disc/annot/img_43.png diff --git a/images/drosophila_disc/annot/img_5.png b/data_images/drosophila_disc/annot/img_5.png similarity index 100% rename from images/drosophila_disc/annot/img_5.png rename to data_images/drosophila_disc/annot/img_5.png diff --git a/images/drosophila_disc/annot/img_6.png b/data_images/drosophila_disc/annot/img_6.png similarity index 100% rename from images/drosophila_disc/annot/img_6.png rename to data_images/drosophila_disc/annot/img_6.png diff --git a/images/drosophila_disc/annot_rgb/img_12.png b/data_images/drosophila_disc/annot_rgb/img_12.png similarity index 100% rename from images/drosophila_disc/annot_rgb/img_12.png rename to data_images/drosophila_disc/annot_rgb/img_12.png diff --git a/images/drosophila_disc/annot_rgb/img_14.png b/data_images/drosophila_disc/annot_rgb/img_14.png similarity index 100% rename from images/drosophila_disc/annot_rgb/img_14.png rename to data_images/drosophila_disc/annot_rgb/img_14.png diff --git a/images/drosophila_disc/annot_rgb/img_15.png b/data_images/drosophila_disc/annot_rgb/img_15.png similarity index 100% rename from images/drosophila_disc/annot_rgb/img_15.png rename to data_images/drosophila_disc/annot_rgb/img_15.png diff --git a/images/drosophila_disc/annot_rgb/img_19.png b/data_images/drosophila_disc/annot_rgb/img_19.png similarity index 100% rename from images/drosophila_disc/annot_rgb/img_19.png rename to data_images/drosophila_disc/annot_rgb/img_19.png diff --git a/images/drosophila_disc/annot_rgb/img_20.png b/data_images/drosophila_disc/annot_rgb/img_20.png similarity index 100% rename from images/drosophila_disc/annot_rgb/img_20.png rename to data_images/drosophila_disc/annot_rgb/img_20.png diff --git a/images/drosophila_disc/annot_rgb/img_24.png b/data_images/drosophila_disc/annot_rgb/img_24.png similarity index 100% rename from images/drosophila_disc/annot_rgb/img_24.png rename to data_images/drosophila_disc/annot_rgb/img_24.png diff --git a/images/drosophila_disc/annot_rgb/img_26.png b/data_images/drosophila_disc/annot_rgb/img_26.png similarity index 100% rename from images/drosophila_disc/annot_rgb/img_26.png rename to data_images/drosophila_disc/annot_rgb/img_26.png diff --git a/images/drosophila_disc/annot_rgb/img_43.png b/data_images/drosophila_disc/annot_rgb/img_43.png similarity index 100% rename from images/drosophila_disc/annot_rgb/img_43.png rename to data_images/drosophila_disc/annot_rgb/img_43.png diff --git a/images/drosophila_disc/annot_rgb/img_5.png b/data_images/drosophila_disc/annot_rgb/img_5.png similarity index 100% rename from images/drosophila_disc/annot_rgb/img_5.png rename to data_images/drosophila_disc/annot_rgb/img_5.png diff --git a/images/drosophila_disc/annot_rgb/img_6.png b/data_images/drosophila_disc/annot_rgb/img_6.png similarity index 100% rename from images/drosophila_disc/annot_rgb/img_6.png rename to data_images/drosophila_disc/annot_rgb/img_6.png diff --git a/images/drosophila_disc/image/img_12.jpg b/data_images/drosophila_disc/image/img_12.jpg similarity index 100% rename from images/drosophila_disc/image/img_12.jpg rename to data_images/drosophila_disc/image/img_12.jpg diff --git a/images/drosophila_disc/image/img_14.jpg b/data_images/drosophila_disc/image/img_14.jpg similarity index 100% rename from images/drosophila_disc/image/img_14.jpg rename to data_images/drosophila_disc/image/img_14.jpg diff --git a/images/drosophila_disc/image/img_15.jpg b/data_images/drosophila_disc/image/img_15.jpg similarity index 100% rename from images/drosophila_disc/image/img_15.jpg rename to data_images/drosophila_disc/image/img_15.jpg diff --git a/images/drosophila_disc/image/img_19.jpg b/data_images/drosophila_disc/image/img_19.jpg similarity index 100% rename from images/drosophila_disc/image/img_19.jpg rename to data_images/drosophila_disc/image/img_19.jpg diff --git a/images/drosophila_disc/image/img_20.jpg b/data_images/drosophila_disc/image/img_20.jpg similarity index 100% rename from images/drosophila_disc/image/img_20.jpg rename to data_images/drosophila_disc/image/img_20.jpg diff --git a/images/drosophila_disc/image/img_24.jpg b/data_images/drosophila_disc/image/img_24.jpg similarity index 100% rename from images/drosophila_disc/image/img_24.jpg rename to data_images/drosophila_disc/image/img_24.jpg diff --git a/images/drosophila_disc/image/img_26.jpg b/data_images/drosophila_disc/image/img_26.jpg similarity index 100% rename from images/drosophila_disc/image/img_26.jpg rename to data_images/drosophila_disc/image/img_26.jpg diff --git a/images/drosophila_disc/image/img_43.jpg b/data_images/drosophila_disc/image/img_43.jpg similarity index 100% rename from images/drosophila_disc/image/img_43.jpg rename to data_images/drosophila_disc/image/img_43.jpg diff --git a/images/drosophila_disc/image/img_5.jpg b/data_images/drosophila_disc/image/img_5.jpg similarity index 100% rename from images/drosophila_disc/image/img_5.jpg rename to data_images/drosophila_disc/image/img_5.jpg diff --git a/images/drosophila_disc/image/img_6.jpg b/data_images/drosophila_disc/image/img_6.jpg similarity index 100% rename from images/drosophila_disc/image/img_6.jpg rename to data_images/drosophila_disc/image/img_6.jpg diff --git a/data_images/drosophila_disc/list_imaginal-disks.csv b/data_images/drosophila_disc/list_imaginal-disks.csv new file mode 100644 index 00000000..3e134e25 --- /dev/null +++ b/data_images/drosophila_disc/list_imaginal-disks.csv @@ -0,0 +1,11 @@ +,path_image,path_annot +1,data_images/drosophila_disc/image/img_5.jpg,data_images/drosophila_disc/annot/img_5.png +2,data_images/drosophila_disc/image/img_6.jpg,data_images/drosophila_disc/annot/img_6.png +3,data_images/drosophila_disc/image/img_12.jpg,data_images/drosophila_disc/annot/img_12.png +4,data_images/drosophila_disc/image/img_14.jpg,data_images/drosophila_disc/annot/img_14.png +5,data_images/drosophila_disc/image/img_15.jpg,data_images/drosophila_disc/annot/img_15.png +6,data_images/drosophila_disc/image/img_19.jpg,data_images/drosophila_disc/annot/img_19.png +7,data_images/drosophila_disc/image/img_20.jpg,data_images/drosophila_disc/annot/img_20.png +8,data_images/drosophila_disc/image/img_24.jpg,data_images/drosophila_disc/annot/img_24.png +9,data_images/drosophila_disc/image/img_26.jpg,data_images/drosophila_disc/annot/img_26.png +10,data_images/drosophila_disc/image/img_43.jpg,data_images/drosophila_disc/annot/img_43.png diff --git a/data_images/drosophila_disc/list_imaginal-disks_short.csv b/data_images/drosophila_disc/list_imaginal-disks_short.csv new file mode 100644 index 00000000..8c9248d4 --- /dev/null +++ b/data_images/drosophila_disc/list_imaginal-disks_short.csv @@ -0,0 +1,3 @@ +,path_image,path_annot +1,data_images/drosophila_disc/image/img_6.jpg,data_images/drosophila_disc/annot/img_6.png +2,data_images/drosophila_disc/image/img_43.jpg,data_images/drosophila_disc/annot/img_43.png diff --git a/images/drosophila_ovary_3D/AU10-13_f0011.tif b/data_images/drosophila_ovary_3D/AU10-13_f0011.tif similarity index 100% rename from images/drosophila_ovary_3D/AU10-13_f0011.tif rename to data_images/drosophila_ovary_3D/AU10-13_f0011.tif diff --git a/images/drosophila_ovary_slice/annot_eggs/insitu4174.png b/data_images/drosophila_ovary_slice/annot_eggs/insitu4174.png similarity index 100% rename from images/drosophila_ovary_slice/annot_eggs/insitu4174.png rename to data_images/drosophila_ovary_slice/annot_eggs/insitu4174.png diff --git a/images/drosophila_ovary_slice/annot_eggs/insitu4358.png b/data_images/drosophila_ovary_slice/annot_eggs/insitu4358.png similarity index 100% rename from images/drosophila_ovary_slice/annot_eggs/insitu4358.png rename to data_images/drosophila_ovary_slice/annot_eggs/insitu4358.png diff --git a/images/drosophila_ovary_slice/annot_eggs/insitu7331.png b/data_images/drosophila_ovary_slice/annot_eggs/insitu7331.png similarity index 100% rename from images/drosophila_ovary_slice/annot_eggs/insitu7331.png rename to data_images/drosophila_ovary_slice/annot_eggs/insitu7331.png diff --git a/images/drosophila_ovary_slice/annot_eggs/insitu7544.png b/data_images/drosophila_ovary_slice/annot_eggs/insitu7544.png similarity index 100% rename from images/drosophila_ovary_slice/annot_eggs/insitu7544.png rename to data_images/drosophila_ovary_slice/annot_eggs/insitu7544.png diff --git a/images/drosophila_ovary_slice/annot_eggs/insitu7545.png b/data_images/drosophila_ovary_slice/annot_eggs/insitu7545.png similarity index 100% rename from images/drosophila_ovary_slice/annot_eggs/insitu7545.png rename to data_images/drosophila_ovary_slice/annot_eggs/insitu7545.png diff --git a/images/drosophila_ovary_slice/annot_struct/insitu4174.png b/data_images/drosophila_ovary_slice/annot_struct/insitu4174.png similarity index 100% rename from images/drosophila_ovary_slice/annot_struct/insitu4174.png rename to data_images/drosophila_ovary_slice/annot_struct/insitu4174.png diff --git a/images/drosophila_ovary_slice/annot_struct/insitu4358.png b/data_images/drosophila_ovary_slice/annot_struct/insitu4358.png similarity index 100% rename from images/drosophila_ovary_slice/annot_struct/insitu4358.png rename to data_images/drosophila_ovary_slice/annot_struct/insitu4358.png diff --git a/images/drosophila_ovary_slice/annot_struct/insitu7331.png b/data_images/drosophila_ovary_slice/annot_struct/insitu7331.png similarity index 100% rename from images/drosophila_ovary_slice/annot_struct/insitu7331.png rename to data_images/drosophila_ovary_slice/annot_struct/insitu7331.png diff --git a/images/drosophila_ovary_slice/annot_struct/insitu7544.png b/data_images/drosophila_ovary_slice/annot_struct/insitu7544.png similarity index 100% rename from images/drosophila_ovary_slice/annot_struct/insitu7544.png rename to data_images/drosophila_ovary_slice/annot_struct/insitu7544.png diff --git a/images/drosophila_ovary_slice/annot_struct/insitu7545.png b/data_images/drosophila_ovary_slice/annot_struct/insitu7545.png similarity index 100% rename from images/drosophila_ovary_slice/annot_struct/insitu7545.png rename to data_images/drosophila_ovary_slice/annot_struct/insitu7545.png diff --git a/images/drosophila_ovary_slice/center_levels/insitu4174.csv b/data_images/drosophila_ovary_slice/center_levels/insitu4174.csv similarity index 100% rename from images/drosophila_ovary_slice/center_levels/insitu4174.csv rename to data_images/drosophila_ovary_slice/center_levels/insitu4174.csv diff --git a/images/drosophila_ovary_slice/center_levels/insitu4174.png b/data_images/drosophila_ovary_slice/center_levels/insitu4174.png similarity index 100% rename from images/drosophila_ovary_slice/center_levels/insitu4174.png rename to data_images/drosophila_ovary_slice/center_levels/insitu4174.png diff --git a/images/drosophila_ovary_slice/center_levels/insitu4358.csv b/data_images/drosophila_ovary_slice/center_levels/insitu4358.csv similarity index 100% rename from images/drosophila_ovary_slice/center_levels/insitu4358.csv rename to data_images/drosophila_ovary_slice/center_levels/insitu4358.csv diff --git a/images/drosophila_ovary_slice/center_levels/insitu4358.png b/data_images/drosophila_ovary_slice/center_levels/insitu4358.png similarity index 100% rename from images/drosophila_ovary_slice/center_levels/insitu4358.png rename to data_images/drosophila_ovary_slice/center_levels/insitu4358.png diff --git a/images/drosophila_ovary_slice/center_levels/insitu7331.csv b/data_images/drosophila_ovary_slice/center_levels/insitu7331.csv similarity index 100% rename from images/drosophila_ovary_slice/center_levels/insitu7331.csv rename to data_images/drosophila_ovary_slice/center_levels/insitu7331.csv diff --git a/images/drosophila_ovary_slice/center_levels/insitu7331.png b/data_images/drosophila_ovary_slice/center_levels/insitu7331.png similarity index 100% rename from images/drosophila_ovary_slice/center_levels/insitu7331.png rename to data_images/drosophila_ovary_slice/center_levels/insitu7331.png diff --git a/images/drosophila_ovary_slice/center_levels/insitu7544.csv b/data_images/drosophila_ovary_slice/center_levels/insitu7544.csv similarity index 100% rename from images/drosophila_ovary_slice/center_levels/insitu7544.csv rename to data_images/drosophila_ovary_slice/center_levels/insitu7544.csv diff --git a/images/drosophila_ovary_slice/center_levels/insitu7544.png b/data_images/drosophila_ovary_slice/center_levels/insitu7544.png similarity index 100% rename from images/drosophila_ovary_slice/center_levels/insitu7544.png rename to data_images/drosophila_ovary_slice/center_levels/insitu7544.png diff --git a/images/drosophila_ovary_slice/center_levels/insitu7545.csv b/data_images/drosophila_ovary_slice/center_levels/insitu7545.csv similarity index 100% rename from images/drosophila_ovary_slice/center_levels/insitu7545.csv rename to data_images/drosophila_ovary_slice/center_levels/insitu7545.csv diff --git a/images/drosophila_ovary_slice/center_levels/insitu7545.png b/data_images/drosophila_ovary_slice/center_levels/insitu7545.png similarity index 100% rename from images/drosophila_ovary_slice/center_levels/insitu7545.png rename to data_images/drosophila_ovary_slice/center_levels/insitu7545.png diff --git a/images/drosophila_ovary_slice/egg_ray_shapes.csv b/data_images/drosophila_ovary_slice/egg_ray_shapes.csv similarity index 100% rename from images/drosophila_ovary_slice/egg_ray_shapes.csv rename to data_images/drosophila_ovary_slice/egg_ray_shapes.csv diff --git a/images/drosophila_ovary_slice/ellipse_fitting/insitu4174.csv b/data_images/drosophila_ovary_slice/ellipse_fitting/insitu4174.csv similarity index 100% rename from images/drosophila_ovary_slice/ellipse_fitting/insitu4174.csv rename to data_images/drosophila_ovary_slice/ellipse_fitting/insitu4174.csv diff --git a/images/drosophila_ovary_slice/ellipse_fitting/insitu4358.csv b/data_images/drosophila_ovary_slice/ellipse_fitting/insitu4358.csv similarity index 100% rename from images/drosophila_ovary_slice/ellipse_fitting/insitu4358.csv rename to data_images/drosophila_ovary_slice/ellipse_fitting/insitu4358.csv diff --git a/images/drosophila_ovary_slice/ellipse_fitting/insitu7331.csv b/data_images/drosophila_ovary_slice/ellipse_fitting/insitu7331.csv similarity index 100% rename from images/drosophila_ovary_slice/ellipse_fitting/insitu7331.csv rename to data_images/drosophila_ovary_slice/ellipse_fitting/insitu7331.csv diff --git a/images/drosophila_ovary_slice/ellipse_fitting/insitu7544.csv b/data_images/drosophila_ovary_slice/ellipse_fitting/insitu7544.csv similarity index 100% rename from images/drosophila_ovary_slice/ellipse_fitting/insitu7544.csv rename to data_images/drosophila_ovary_slice/ellipse_fitting/insitu7544.csv diff --git a/images/drosophila_ovary_slice/ellipse_fitting/insitu7545.csv b/data_images/drosophila_ovary_slice/ellipse_fitting/insitu7545.csv similarity index 100% rename from images/drosophila_ovary_slice/ellipse_fitting/insitu7545.csv rename to data_images/drosophila_ovary_slice/ellipse_fitting/insitu7545.csv diff --git a/images/drosophila_ovary_slice/image/insitu4174.jpg b/data_images/drosophila_ovary_slice/image/insitu4174.jpg similarity index 100% rename from images/drosophila_ovary_slice/image/insitu4174.jpg rename to data_images/drosophila_ovary_slice/image/insitu4174.jpg diff --git a/images/drosophila_ovary_slice/image/insitu4358.jpg b/data_images/drosophila_ovary_slice/image/insitu4358.jpg similarity index 100% rename from images/drosophila_ovary_slice/image/insitu4358.jpg rename to data_images/drosophila_ovary_slice/image/insitu4358.jpg diff --git a/images/drosophila_ovary_slice/image/insitu7331.jpg b/data_images/drosophila_ovary_slice/image/insitu7331.jpg similarity index 100% rename from images/drosophila_ovary_slice/image/insitu7331.jpg rename to data_images/drosophila_ovary_slice/image/insitu7331.jpg diff --git a/images/drosophila_ovary_slice/image/insitu7544.jpg b/data_images/drosophila_ovary_slice/image/insitu7544.jpg similarity index 100% rename from images/drosophila_ovary_slice/image/insitu7544.jpg rename to data_images/drosophila_ovary_slice/image/insitu7544.jpg diff --git a/images/drosophila_ovary_slice/image/insitu7545.jpg b/data_images/drosophila_ovary_slice/image/insitu7545.jpg similarity index 100% rename from images/drosophila_ovary_slice/image/insitu7545.jpg rename to data_images/drosophila_ovary_slice/image/insitu7545.jpg diff --git a/images/drosophila_ovary_slice/image/insitu7545.tif b/data_images/drosophila_ovary_slice/image/insitu7545.tif similarity index 100% rename from images/drosophila_ovary_slice/image/insitu7545.tif rename to data_images/drosophila_ovary_slice/image/insitu7545.tif diff --git a/images/drosophila_ovary_slice/image_cut-stage-2/insitu4174.png b/data_images/drosophila_ovary_slice/image_cut-stage-2/insitu4174.png similarity index 100% rename from images/drosophila_ovary_slice/image_cut-stage-2/insitu4174.png rename to data_images/drosophila_ovary_slice/image_cut-stage-2/insitu4174.png diff --git a/images/drosophila_ovary_slice/image_cut-stage-2/insitu4358.png b/data_images/drosophila_ovary_slice/image_cut-stage-2/insitu4358.png similarity index 100% rename from images/drosophila_ovary_slice/image_cut-stage-2/insitu4358.png rename to data_images/drosophila_ovary_slice/image_cut-stage-2/insitu4358.png diff --git a/images/drosophila_ovary_slice/image_cut-stage-2/insitu7331.png b/data_images/drosophila_ovary_slice/image_cut-stage-2/insitu7331.png similarity index 100% rename from images/drosophila_ovary_slice/image_cut-stage-2/insitu7331.png rename to data_images/drosophila_ovary_slice/image_cut-stage-2/insitu7331.png diff --git a/images/drosophila_ovary_slice/image_cut-stage-2/insitu7544.png b/data_images/drosophila_ovary_slice/image_cut-stage-2/insitu7544.png similarity index 100% rename from images/drosophila_ovary_slice/image_cut-stage-2/insitu7544.png rename to data_images/drosophila_ovary_slice/image_cut-stage-2/insitu7544.png diff --git a/images/drosophila_ovary_slice/image_cut-stage-2/insitu7545.png b/data_images/drosophila_ovary_slice/image_cut-stage-2/insitu7545.png similarity index 100% rename from images/drosophila_ovary_slice/image_cut-stage-2/insitu7545.png rename to data_images/drosophila_ovary_slice/image_cut-stage-2/insitu7545.png diff --git a/images/drosophila_ovary_slice/info_ovary_images.txt b/data_images/drosophila_ovary_slice/info_ovary_images.txt similarity index 100% rename from images/drosophila_ovary_slice/info_ovary_images.txt rename to data_images/drosophila_ovary_slice/info_ovary_images.txt diff --git a/images/drosophila_ovary_slice/info_ovary_images_ellipses.csv b/data_images/drosophila_ovary_slice/info_ovary_images_ellipses.csv similarity index 100% rename from images/drosophila_ovary_slice/info_ovary_images_ellipses.csv rename to data_images/drosophila_ovary_slice/info_ovary_images_ellipses.csv diff --git a/data_images/drosophila_ovary_slice/list_imgs-annot-struct.csv b/data_images/drosophila_ovary_slice/list_imgs-annot-struct.csv new file mode 100644 index 00000000..b5645329 --- /dev/null +++ b/data_images/drosophila_ovary_slice/list_imgs-annot-struct.csv @@ -0,0 +1,11 @@ +,path_image,path_annot +1,data_images/drosophila_ovary_slice/image/insitu4174.jpg,data_images/drosophila_ovary_slice/annot_struct/insitu4174.png +2,data_images/drosophila_ovary_slice/image/insitu4358.jpg,data_images/drosophila_ovary_slice/annot_struct/insitu4358.png +3,data_images/drosophila_ovary_slice/image/insitu7331.jpg,data_images/drosophila_ovary_slice/annot_struct/insitu7331.png +4,data_images/drosophila_ovary_slice/image/insitu7544.jpg,data_images/drosophila_ovary_slice/annot_struct/insitu7544.png +5,data_images/drosophila_ovary_slice/image/insitu7545.jpg,data_images/drosophila_ovary_slice/annot_struct/insitu7545.png +6,data_images/drosophila_ovary_slice/image/insitu4174.tif,data_images/drosophila_ovary_slice/annot_struct/insitu4174.png +7,data_images/drosophila_ovary_slice/image/insitu4358.tif,data_images/drosophila_ovary_slice/annot_struct/insitu4358.png +8,data_images/drosophila_ovary_slice/image/insitu7331.tif,data_images/drosophila_ovary_slice/annot_struct/insitu7331.png +9,data_images/drosophila_ovary_slice/image/insitu7544.tif,data_images/drosophila_ovary_slice/annot_struct/insitu7544.png +10,data_images/drosophila_ovary_slice/image/insitu7545.tif,data_images/drosophila_ovary_slice/annot_struct/insitu7545.png diff --git a/data_images/drosophila_ovary_slice/list_imgs-annot-struct_short.csv b/data_images/drosophila_ovary_slice/list_imgs-annot-struct_short.csv new file mode 100644 index 00000000..67427076 --- /dev/null +++ b/data_images/drosophila_ovary_slice/list_imgs-annot-struct_short.csv @@ -0,0 +1,3 @@ +,path_image,path_annot +1,data_images/drosophila_ovary_slice/image/insitu4174.jpg,data_images/drosophila_ovary_slice/annot_struct/insitu4174.png +2,data_images/drosophila_ovary_slice/image/insitu7545.tif,data_images/drosophila_ovary_slice/annot_struct/insitu7545.png diff --git a/data_images/drosophila_ovary_slice/list_imgs-segm-center-levels.csv b/data_images/drosophila_ovary_slice/list_imgs-segm-center-levels.csv new file mode 100644 index 00000000..24cc4dec --- /dev/null +++ b/data_images/drosophila_ovary_slice/list_imgs-segm-center-levels.csv @@ -0,0 +1,6 @@ +,path_image,path_centers,path_annot,path_segm +1,data_images/drosophila_ovary_slice/image/insitu4174.tif,data_images/drosophila_ovary_slice/center_levels/insitu4174.png,data_images/drosophila_ovary_slice/annot_eggs/insitu4174.png,data_images/drosophila_ovary_slice/segm/insitu4174.png +2,data_images/drosophila_ovary_slice/image/insitu4358.tif,data_images/drosophila_ovary_slice/center_levels/insitu4358.png,data_images/drosophila_ovary_slice/annot_eggs/insitu4358.png,data_images/drosophila_ovary_slice/segm/insitu4358.png +3,data_images/drosophila_ovary_slice/image/insitu7331.tif,data_images/drosophila_ovary_slice/center_levels/insitu7331.png,data_images/drosophila_ovary_slice/annot_eggs/insitu7331.png,data_images/drosophila_ovary_slice/segm/insitu7331.png +4,data_images/drosophila_ovary_slice/image/insitu7544.tif,data_images/drosophila_ovary_slice/center_levels/insitu7544.png,data_images/drosophila_ovary_slice/annot_eggs/insitu7544.png,data_images/drosophila_ovary_slice/segm/insitu7544.png +5,data_images/drosophila_ovary_slice/image/insitu7545.tif,data_images/drosophila_ovary_slice/center_levels/insitu7545.png,data_images/drosophila_ovary_slice/annot_eggs/insitu7545.png,data_images/drosophila_ovary_slice/segm/insitu7545.png diff --git a/data_images/drosophila_ovary_slice/list_imgs-segm-center-levels_short.csv b/data_images/drosophila_ovary_slice/list_imgs-segm-center-levels_short.csv new file mode 100644 index 00000000..97455de1 --- /dev/null +++ b/data_images/drosophila_ovary_slice/list_imgs-segm-center-levels_short.csv @@ -0,0 +1,3 @@ +,path_image,path_centers,path_annot,path_segm +1,data_images/drosophila_ovary_slice/image/insitu4358.jpg,data_images/drosophila_ovary_slice/center_levels/insitu4358.png,data_images/drosophila_ovary_slice/annot_eggs/insitu4358.png,data_images/drosophila_ovary_slice/segm/insitu4358.png +2,data_images/drosophila_ovary_slice/image/insitu7545.tif,data_images/drosophila_ovary_slice/center_levels/insitu7545.png,data_images/drosophila_ovary_slice/annot_eggs/insitu7545.png,data_images/drosophila_ovary_slice/segm/insitu7545.png diff --git a/data_images/drosophila_ovary_slice/list_imgs-segm-center-points.csv b/data_images/drosophila_ovary_slice/list_imgs-segm-center-points.csv new file mode 100644 index 00000000..4f01a95e --- /dev/null +++ b/data_images/drosophila_ovary_slice/list_imgs-segm-center-points.csv @@ -0,0 +1,6 @@ +,path_image,path_centers,path_annot,path_segm +1,data_images/drosophila_ovary_slice/image/insitu4174.jpg,data_images/drosophila_ovary_slice/center_levels/insitu4174.csv,data_images/drosophila_ovary_slice/annot_eggs/insitu4174.png,data_images/drosophila_ovary_slice/segm/insitu4174.png +2,data_images/drosophila_ovary_slice/image/insitu4358.jpg,data_images/drosophila_ovary_slice/center_levels/insitu4358.csv,data_images/drosophila_ovary_slice/annot_eggs/insitu4358.png,data_images/drosophila_ovary_slice/segm/insitu4358.png +3,data_images/drosophila_ovary_slice/image/insitu7331.jpg,data_images/drosophila_ovary_slice/center_levels/insitu7331.csv,data_images/drosophila_ovary_slice/annot_eggs/insitu7331.png,data_images/drosophila_ovary_slice/segm/insitu7331.png +4,data_images/drosophila_ovary_slice/image/insitu7544.jpg,data_images/drosophila_ovary_slice/center_levels/insitu7544.csv,data_images/drosophila_ovary_slice/annot_eggs/insitu7544.png,data_images/drosophila_ovary_slice/segm/insitu7544.png +5,data_images/drosophila_ovary_slice/image/insitu7545.jpg,data_images/drosophila_ovary_slice/center_levels/insitu7545.csv,data_images/drosophila_ovary_slice/annot_eggs/insitu7545.png,data_images/drosophila_ovary_slice/segm/insitu7545.png diff --git a/data_images/drosophila_ovary_slice/list_imgs-segm-center-points_short.csv b/data_images/drosophila_ovary_slice/list_imgs-segm-center-points_short.csv new file mode 100644 index 00000000..bed54caf --- /dev/null +++ b/data_images/drosophila_ovary_slice/list_imgs-segm-center-points_short.csv @@ -0,0 +1,3 @@ +,path_image,path_centers,path_annot,path_segm +1,data_images/drosophila_ovary_slice/image/insitu4358.jpg,data_images/drosophila_ovary_slice/center_levels/insitu4358.csv,data_images/drosophila_ovary_slice/annot_eggs/insitu4358.png,data_images/drosophila_ovary_slice/segm/insitu4358.png +2,data_images/drosophila_ovary_slice/image/insitu7545.tif,data_images/drosophila_ovary_slice/center_levels/insitu7545.csv,data_images/drosophila_ovary_slice/annot_eggs/insitu7545.png,data_images/drosophila_ovary_slice/segm/insitu7545.png diff --git a/images/drosophila_ovary_slice/segm/insitu4174.png b/data_images/drosophila_ovary_slice/segm/insitu4174.png similarity index 100% rename from images/drosophila_ovary_slice/segm/insitu4174.png rename to data_images/drosophila_ovary_slice/segm/insitu4174.png diff --git a/images/drosophila_ovary_slice/segm/insitu4358.png b/data_images/drosophila_ovary_slice/segm/insitu4358.png similarity index 100% rename from images/drosophila_ovary_slice/segm/insitu4358.png rename to data_images/drosophila_ovary_slice/segm/insitu4358.png diff --git a/images/drosophila_ovary_slice/segm/insitu7331.png b/data_images/drosophila_ovary_slice/segm/insitu7331.png similarity index 100% rename from images/drosophila_ovary_slice/segm/insitu7331.png rename to data_images/drosophila_ovary_slice/segm/insitu7331.png diff --git a/images/drosophila_ovary_slice/segm/insitu7544.png b/data_images/drosophila_ovary_slice/segm/insitu7544.png similarity index 100% rename from images/drosophila_ovary_slice/segm/insitu7544.png rename to data_images/drosophila_ovary_slice/segm/insitu7544.png diff --git a/images/drosophila_ovary_slice/segm/insitu7545.png b/data_images/drosophila_ovary_slice/segm/insitu7545.png similarity index 100% rename from images/drosophila_ovary_slice/segm/insitu7545.png rename to data_images/drosophila_ovary_slice/segm/insitu7545.png diff --git a/images/drosophila_ovary_slice/segm_rgb/insitu4174.png b/data_images/drosophila_ovary_slice/segm_rgb/insitu4174.png similarity index 100% rename from images/drosophila_ovary_slice/segm_rgb/insitu4174.png rename to data_images/drosophila_ovary_slice/segm_rgb/insitu4174.png diff --git a/images/drosophila_ovary_slice/segm_rgb/insitu4358.png b/data_images/drosophila_ovary_slice/segm_rgb/insitu4358.png similarity index 100% rename from images/drosophila_ovary_slice/segm_rgb/insitu4358.png rename to data_images/drosophila_ovary_slice/segm_rgb/insitu4358.png diff --git a/images/drosophila_ovary_slice/segm_rgb/insitu7331.png b/data_images/drosophila_ovary_slice/segm_rgb/insitu7331.png similarity index 100% rename from images/drosophila_ovary_slice/segm_rgb/insitu7331.png rename to data_images/drosophila_ovary_slice/segm_rgb/insitu7331.png diff --git a/images/drosophila_ovary_slice/segm_rgb/insitu7544.png b/data_images/drosophila_ovary_slice/segm_rgb/insitu7544.png similarity index 100% rename from images/drosophila_ovary_slice/segm_rgb/insitu7544.png rename to data_images/drosophila_ovary_slice/segm_rgb/insitu7544.png diff --git a/images/drosophila_ovary_slice/segm_rgb/insitu7545.png b/data_images/drosophila_ovary_slice/segm_rgb/insitu7545.png similarity index 100% rename from images/drosophila_ovary_slice/segm_rgb/insitu7545.png rename to data_images/drosophila_ovary_slice/segm_rgb/insitu7545.png diff --git a/images/histology_CIMA/29-041-Izd2-w35-CD31-3-les1.jpg b/data_images/histology_CIMA/29-041-Izd2-w35-CD31-3-les1.jpg similarity index 100% rename from images/histology_CIMA/29-041-Izd2-w35-CD31-3-les1.jpg rename to data_images/histology_CIMA/29-041-Izd2-w35-CD31-3-les1.jpg diff --git a/images/histology_CIMA/29-041-Izd2-w35-CD31-3-les3.jpg b/data_images/histology_CIMA/29-041-Izd2-w35-CD31-3-les3.jpg similarity index 100% rename from images/histology_CIMA/29-041-Izd2-w35-CD31-3-les3.jpg rename to data_images/histology_CIMA/29-041-Izd2-w35-CD31-3-les3.jpg diff --git a/images/histology_CIMA/29-041-Izd2-w35-He-les1.jpg b/data_images/histology_CIMA/29-041-Izd2-w35-He-les1.jpg similarity index 100% rename from images/histology_CIMA/29-041-Izd2-w35-He-les1.jpg rename to data_images/histology_CIMA/29-041-Izd2-w35-He-les1.jpg diff --git a/images/histology_CIMA/29-041-Izd2-w35-He-les3.jpg b/data_images/histology_CIMA/29-041-Izd2-w35-He-les3.jpg similarity index 100% rename from images/histology_CIMA/29-041-Izd2-w35-He-les3.jpg rename to data_images/histology_CIMA/29-041-Izd2-w35-He-les3.jpg diff --git a/images/histology_CIMA/29-041-Izd2-w35-proSPC-4-les1.jpg b/data_images/histology_CIMA/29-041-Izd2-w35-proSPC-4-les1.jpg similarity index 100% rename from images/histology_CIMA/29-041-Izd2-w35-proSPC-4-les1.jpg rename to data_images/histology_CIMA/29-041-Izd2-w35-proSPC-4-les1.jpg diff --git a/images/histology_CIMA/29-041-Izd2-w35-proSPC-4-les3.jpg b/data_images/histology_CIMA/29-041-Izd2-w35-proSPC-4-les3.jpg similarity index 100% rename from images/histology_CIMA/29-041-Izd2-w35-proSPC-4-les3.jpg rename to data_images/histology_CIMA/29-041-Izd2-w35-proSPC-4-les3.jpg diff --git a/images/histology_Flagship/Case001_Cytokeratin.jpg b/data_images/histology_Flagship/Case001_Cytokeratin.jpg similarity index 100% rename from images/histology_Flagship/Case001_Cytokeratin.jpg rename to data_images/histology_Flagship/Case001_Cytokeratin.jpg diff --git a/images/histology_Flagship/Case001_HE.jpg b/data_images/histology_Flagship/Case001_HE.jpg similarity index 100% rename from images/histology_Flagship/Case001_HE.jpg rename to data_images/histology_Flagship/Case001_HE.jpg diff --git a/images/histology_Flagship/Case001_Ki67.jpg b/data_images/histology_Flagship/Case001_Ki67.jpg similarity index 100% rename from images/histology_Flagship/Case001_Ki67.jpg rename to data_images/histology_Flagship/Case001_Ki67.jpg diff --git a/images/histology_Flagship/Rat_Kidney_Section02_HE.jpg b/data_images/histology_Flagship/Rat_Kidney_Section02_HE.jpg similarity index 100% rename from images/histology_Flagship/Rat_Kidney_Section02_HE.jpg rename to data_images/histology_Flagship/Rat_Kidney_Section02_HE.jpg diff --git a/images/histology_Flagship/Rat_Kidney_Section04_Podocin.jpg b/data_images/histology_Flagship/Rat_Kidney_Section04_Podocin.jpg similarity index 100% rename from images/histology_Flagship/Rat_Kidney_Section04_Podocin.jpg rename to data_images/histology_Flagship/Rat_Kidney_Section04_Podocin.jpg diff --git a/images/histology_Flagship/Rat_Kidney_Section06_PanCytokeratin.jpg b/data_images/histology_Flagship/Rat_Kidney_Section06_PanCytokeratin.jpg similarity index 100% rename from images/histology_Flagship/Rat_Kidney_Section06_PanCytokeratin.jpg rename to data_images/histology_Flagship/Rat_Kidney_Section06_PanCytokeratin.jpg diff --git a/data_images/langerhans_islets/annot/Lh05-04.png b/data_images/langerhans_islets/annot/Lh05-04.png new file mode 100644 index 00000000..5f381e52 Binary files /dev/null and b/data_images/langerhans_islets/annot/Lh05-04.png differ diff --git a/data_images/langerhans_islets/annot/Lh05-09.png b/data_images/langerhans_islets/annot/Lh05-09.png new file mode 100644 index 00000000..2421184f Binary files /dev/null and b/data_images/langerhans_islets/annot/Lh05-09.png differ diff --git a/data_images/langerhans_islets/annot/Lh09-07.png b/data_images/langerhans_islets/annot/Lh09-07.png new file mode 100644 index 00000000..d8590909 Binary files /dev/null and b/data_images/langerhans_islets/annot/Lh09-07.png differ diff --git a/data_images/langerhans_islets/annot/Lh10-03.png b/data_images/langerhans_islets/annot/Lh10-03.png new file mode 100644 index 00000000..25aac84d Binary files /dev/null and b/data_images/langerhans_islets/annot/Lh10-03.png differ diff --git a/images/langerhans_islets/annot/gtExoIsl_13.png b/data_images/langerhans_islets/annot/gtExoIsl_13.png similarity index 100% rename from images/langerhans_islets/annot/gtExoIsl_13.png rename to data_images/langerhans_islets/annot/gtExoIsl_13.png diff --git a/images/langerhans_islets/annot/gtExoIsl_21.png b/data_images/langerhans_islets/annot/gtExoIsl_21.png similarity index 100% rename from images/langerhans_islets/annot/gtExoIsl_21.png rename to data_images/langerhans_islets/annot/gtExoIsl_21.png diff --git a/images/langerhans_islets/annot/gtExoIsl_27.png b/data_images/langerhans_islets/annot/gtExoIsl_27.png similarity index 100% rename from images/langerhans_islets/annot/gtExoIsl_27.png rename to data_images/langerhans_islets/annot/gtExoIsl_27.png diff --git a/data_images/langerhans_islets/image/Lh05-04.jpg b/data_images/langerhans_islets/image/Lh05-04.jpg new file mode 100644 index 00000000..5af201ff Binary files /dev/null and b/data_images/langerhans_islets/image/Lh05-04.jpg differ diff --git a/data_images/langerhans_islets/image/Lh05-09.jpg b/data_images/langerhans_islets/image/Lh05-09.jpg new file mode 100644 index 00000000..9e9380e2 Binary files /dev/null and b/data_images/langerhans_islets/image/Lh05-09.jpg differ diff --git a/data_images/langerhans_islets/image/Lh09-07.jpg b/data_images/langerhans_islets/image/Lh09-07.jpg new file mode 100644 index 00000000..cf93815d Binary files /dev/null and b/data_images/langerhans_islets/image/Lh09-07.jpg differ diff --git a/data_images/langerhans_islets/image/Lh10-03.jpg b/data_images/langerhans_islets/image/Lh10-03.jpg new file mode 100644 index 00000000..c0cdc32b Binary files /dev/null and b/data_images/langerhans_islets/image/Lh10-03.jpg differ diff --git a/images/langerhans_islets/image/gtExoIsl_13.jpg b/data_images/langerhans_islets/image/gtExoIsl_13.jpg similarity index 100% rename from images/langerhans_islets/image/gtExoIsl_13.jpg rename to data_images/langerhans_islets/image/gtExoIsl_13.jpg diff --git a/images/langerhans_islets/image/gtExoIsl_21.jpg b/data_images/langerhans_islets/image/gtExoIsl_21.jpg similarity index 100% rename from images/langerhans_islets/image/gtExoIsl_21.jpg rename to data_images/langerhans_islets/image/gtExoIsl_21.jpg diff --git a/images/langerhans_islets/image/gtExoIsl_27.jpg b/data_images/langerhans_islets/image/gtExoIsl_27.jpg similarity index 100% rename from images/langerhans_islets/image/gtExoIsl_27.jpg rename to data_images/langerhans_islets/image/gtExoIsl_27.jpg diff --git a/data_images/langerhans_islets/list_lang-isl_imgs-annot-2.csv b/data_images/langerhans_islets/list_lang-isl_imgs-annot-2.csv new file mode 100644 index 00000000..6d0a2c69 --- /dev/null +++ b/data_images/langerhans_islets/list_lang-isl_imgs-annot-2.csv @@ -0,0 +1,5 @@ +,path_image,path_annot +1,data_images/langerhans_islets/image/Lh05-04.jpg,data_images/langerhans_islets/annot/Lh05-04.png +2,data_images/langerhans_islets/image/Lh05-09.jpg,data_images/langerhans_islets/annot/Lh05-09.png +3,data_images/langerhans_islets/image/Lh09-07.jpg,data_images/langerhans_islets/annot/Lh09-07.png +4,data_images/langerhans_islets/image/Lh10-03.jpg,data_images/langerhans_islets/annot/Lh10-03.png diff --git a/data_images/langerhans_islets/list_lang-isl_imgs-annot.csv b/data_images/langerhans_islets/list_lang-isl_imgs-annot.csv new file mode 100644 index 00000000..63518630 --- /dev/null +++ b/data_images/langerhans_islets/list_lang-isl_imgs-annot.csv @@ -0,0 +1,4 @@ +,path_image,path_annot +1,data_images/langerhans_islets/image/gtExoIsl_13.jpg,data_images/langerhans_islets/annot/gtExoIsl_13.png +2,data_images/langerhans_islets/image/gtExoIsl_21.jpg,data_images/langerhans_islets/annot/gtExoIsl_21.png +3,data_images/langerhans_islets/image/gtExoIsl_27.jpg,data_images/langerhans_islets/annot/gtExoIsl_27.png diff --git a/images/others/industry.jpg b/data_images/others/industry.jpg similarity index 100% rename from images/others/industry.jpg rename to data_images/others/industry.jpg diff --git a/images/others/lena.png b/data_images/others/lena.png similarity index 100% rename from images/others/lena.png rename to data_images/others/lena.png diff --git a/images/others/sample.zvi b/data_images/others/sample.zvi similarity index 100% rename from images/others/sample.zvi rename to data_images/others/sample.zvi diff --git a/images/see_starfish/star_nb1.jpg b/data_images/others/sea_starfish-1.jpg similarity index 100% rename from images/see_starfish/star_nb1.jpg rename to data_images/others/sea_starfish-1.jpg diff --git a/images/see_starfish/stars_nb2.jpg b/data_images/others/sea_starfish-2.jpg similarity index 100% rename from images/see_starfish/stars_nb2.jpg rename to data_images/others/sea_starfish-2.jpg diff --git a/images/synthetic/moving-affine.jpg b/data_images/synthetic/moving-affine.jpg similarity index 100% rename from images/synthetic/moving-affine.jpg rename to data_images/synthetic/moving-affine.jpg diff --git a/images/synthetic/moving-elastic.jpg b/data_images/synthetic/moving-elastic.jpg similarity index 100% rename from images/synthetic/moving-elastic.jpg rename to data_images/synthetic/moving-elastic.jpg diff --git a/images/synthetic/reference.jpg b/data_images/synthetic/reference.jpg similarity index 100% rename from images/synthetic/reference.jpg rename to data_images/synthetic/reference.jpg diff --git a/images/textures/sample-1.jpg b/data_images/synthetic/texture-1.jpg similarity index 100% rename from images/textures/sample-1.jpg rename to data_images/synthetic/texture-1.jpg diff --git a/images/textures/sample-2_gray.jpg b/data_images/synthetic/texture-2_gray.jpg similarity index 100% rename from images/textures/sample-2_gray.jpg rename to data_images/synthetic/texture-2_gray.jpg diff --git a/images/textures/sample-3-small_annot.png b/data_images/synthetic/texture-3-small_annot.png similarity index 100% rename from images/textures/sample-3-small_annot.png rename to data_images/synthetic/texture-3-small_annot.png diff --git a/images/textures/sample-3-small_gray.jpg b/data_images/synthetic/texture-3-small_gray.jpg similarity index 100% rename from images/textures/sample-3-small_gray.jpg rename to data_images/synthetic/texture-3-small_gray.jpg diff --git a/images/textures/sample_rgb_3cls.jpg b/data_images/synthetic/texture_rgb_3cls.jpg similarity index 100% rename from images/textures/sample_rgb_3cls.jpg rename to data_images/synthetic/texture_rgb_3cls.jpg diff --git a/experiments_ovary_centres/gui_annot_center_correction.py b/experiments_ovary_centres/gui_annot_center_correction.py index 8d0e3d1f..864e0846 100755 --- a/experiments_ovary_centres/gui_annot_center_correction.py +++ b/experiments_ovary_centres/gui_annot_center_correction.py @@ -48,7 +48,7 @@ import imsegm.utils.data_io as tl_data import imsegm.utils.drawing as tl_visu -PATH_BASE = tl_data.update_path(os.path.join('images', 'drosophila_ovary_slice')) +PATH_BASE = tl_data.update_path(os.path.join('data_images', 'drosophila_ovary_slice')) PATH_IMAGES = os.path.join(PATH_BASE, 'image', '*.jpg') PATH_CSV = os.path.join(PATH_BASE, 'center_levels', '*.csv') NAME_INFO_SHORT = 'ovary_image_info.csv' diff --git a/experiments_ovary_centres/run_center_candidate_training.py b/experiments_ovary_centres/run_center_candidate_training.py index adb43a6a..a57d97f8 100755 --- a/experiments_ovary_centres/run_center_candidate_training.py +++ b/experiments_ovary_centres/run_center_candidate_training.py @@ -14,9 +14,9 @@ SAMPLE run: >> python run_center_candidate_training.py -list none \ - -imgs "images/drosophila_ovary_slice/image/*.jpg" \ - -segs "images/drosophila_ovary_slice/segm/*.png" \ - -centers "images/drosophila_ovary_slice/center_levels/*.png" \ + -imgs "data_images/drosophila_ovary_slice/image/*.jpg" \ + -segs "data_images/drosophila_ovary_slice/segm/*.png" \ + -centers "data_images/drosophila_ovary_slice/center_levels/*.png" \ -out results -n ovary Copyright (C) 2016-2017 Jiri Borovec @@ -103,7 +103,7 @@ 'center_dist_thr': 50, # distance to from annotated center as a point } -PATH_IMAGES = os.path.join(tl_data.update_path('images'), +PATH_IMAGES = os.path.join(tl_data.update_path('data_images'), 'drosophila_ovary_slice') PATH_RESULTS = tl_data.update_path('results', absolute=True) CENTER_PARAMS.update({ @@ -270,12 +270,12 @@ def load_image_segm_center(idx_row, path_out=None, dict_relabel=None): centers = np.array(LUT_ANNOT_CENTER_RELABEL)[centers] else: logging.warning('not supported file format %s', ext) + centers = None else: centers = None if is_drawing(path_out): - export_visual_input_image_segm(path_out, idx_name, img_rgb, segm, - centers) + export_visual_input_image_segm(path_out, idx_name, img_rgb, segm, centers) return idx_name, img_rgb, segm, centers @@ -459,10 +459,11 @@ def dataset_load_images_segms_compute_features(params, df_paths, dict_imgs, dict_segms, dict_center = dict(), dict(), dict() logging.info('loading input data (images, segmentation and centers)') path_show_in = os.path.join(params['path_expt'], FOLDER_INPUT) - wrapper_load_data = partial(load_image_segm_center, path_out=path_show_in, - dict_relabel=params['dict_relabel']) - iterate = tl_expt.WrapExecuteSequence(wrapper_load_data, df_paths.iterrows(), - nb_jobs=nb_jobs, desc='loading input data') + _wrapper_load = partial(load_image_segm_center, path_out=path_show_in, + dict_relabel=params['dict_relabel']) + iterate = tl_expt.WrapExecuteSequence(_wrapper_load, df_paths.iterrows(), + nb_jobs=nb_jobs, + desc='loading input data') for name, img, seg, center in iterate: dict_imgs[name] = img dict_segms[name] = seg @@ -472,10 +473,10 @@ def dataset_load_images_segms_compute_features(params, df_paths, logging.info('estimate candidate points and compute features') gene_name_img_seg = ((name, dict_imgs[name], dict_segms[name]) for name in dict_imgs) - wrapper_points_features = partial(wrapper_estim_points_compute_features, - params=params) + _wrapper_pnt_features = partial(wrapper_estim_points_compute_features, + params=params) feature_names = None - iterate = tl_expt.WrapExecuteSequence(wrapper_points_features, + iterate = tl_expt.WrapExecuteSequence(_wrapper_pnt_features, gene_name_img_seg, nb_jobs=nb_jobs, desc='estimate candidates & features') for name, slic, points, features, feature_names in iterate: @@ -572,14 +573,14 @@ def detect_center_candidates(name, image, segm, centers_gt, slic, points, :param str name: :param ndarray image: - :param ndarray seg: + :param ndarray segm: :param centers_gt: :param slic: np.array :param [(int, int)] points: :param features: :param [str] feature_names: :param {} params: - :param paths: path + :param str path_out: :param classif: obj :return {}: """ @@ -650,11 +651,11 @@ def experiment_loo(classif, dict_imgs, dict_segms, dict_centers, dict_slics, gener_data = ((n, dict_imgs[n], dict_segms[n], dict_centers[n], dict_slics[n], dict_points[n], dict_features[n], feature_names) for n in dict_imgs) - wrapper_detection = partial(wrapper_detect_center_candidates, - params=params, classif=classif, - path_output=params['path_expt']) + _wrapper_detection = partial(wrapper_detect_center_candidates, + params=params, classif=classif, + path_output=params['path_expt']) df_stat = pd.DataFrame() - iterate = tl_expt.WrapExecuteSequence(wrapper_detection, + iterate = tl_expt.WrapExecuteSequence(_wrapper_detection, gener_data, nb_jobs=params['nb_jobs']) for dict_stat in iterate: df_stat = df_stat.append(dict_stat, ignore_index=True) diff --git a/experiments_ovary_centres/run_center_clustering.py b/experiments_ovary_centres/run_center_clustering.py index 253b0a1c..b195eff9 100755 --- a/experiments_ovary_centres/run_center_clustering.py +++ b/experiments_ovary_centres/run_center_clustering.py @@ -211,10 +211,10 @@ def main(params): logging.info('run clustering...') df_paths_new = pd.DataFrame() - wrapper_clustering = partial(cluster_points_draw_export, params=params, - path_out=params['path_expt']) - iterate = tl_expt.WrapExecuteSequence(wrapper_clustering, - (dict(row) for idx, row in df_paths.iterrows()), + _wrapper_clustering = partial(cluster_points_draw_export, params=params, + path_out=params['path_expt']) + rows = (dict(row) for idx, row in df_paths.iterrows()) + iterate = tl_expt.WrapExecuteSequence(_wrapper_clustering, rows, nb_jobs=params['nb_jobs']) for dict_center in iterate: df_paths_new = df_paths_new.append(dict_center, ignore_index=True) diff --git a/experiments_ovary_centres/run_center_evaluation.py b/experiments_ovary_centres/run_center_evaluation.py index d475082a..df626c3d 100755 --- a/experiments_ovary_centres/run_center_evaluation.py +++ b/experiments_ovary_centres/run_center_evaluation.py @@ -4,8 +4,8 @@ SAMPLE run: >> python run_center_evaluation.py -list none \ - -segs "images/drosophila_ovary_slice/segm/*.png" \ - -imgs "images/drosophila_ovary_slice/image/*.jpg" \ + -segs "data_images/drosophila_ovary_slice/segm/*.png" \ + -imgs "data_images/drosophila_ovary_slice/image/*.jpg" \ -centers "results/detect-centers-predict_ovary/centers/*.csv" \ -out results/detect-centers-predict_ovary @@ -79,7 +79,8 @@ def estimate_eggs_from_info(row_slice, mask_shape): """ finds all eggs for particular slice and mask them by ellipse annotated by ant, post and lat in the all info table - :param str path_img: + :param row_slice: + :param mask_shape: :return ndarray: ndarray """ pos_ant, pos_lat, pos_post = tl_visu.parse_annot_rectangles(row_slice) @@ -138,9 +139,10 @@ def load_center_evaluate(idx_row, df_annot, path_annot, path_visu=None, generate points, compute features and using given classifier predict labels :param (int, DF:row) idx_row: - :param {str: ...} params: - :param {str: str} paths: - :param classif: + :param df_annot: + :param str path_annot: + :param str path_visu: + :param str col_prefix: :return {str: float}: """ idx, row = idx_row @@ -223,10 +225,11 @@ def evaluate_detection_stage(df_paths, stage, path_info, path_out, nb_jobs=1): # perfom on new images stage_prefix = '[stage-%s] ' % str_stage logging.info('start section %s - load_center_evaluate ...', stage_prefix) - wrapper_detection = partial(load_center_evaluate, df_annot=df_slices_info, - path_annot=path_annot, path_visu=path_visu, - col_prefix=stage_prefix) - iterate = tl_expt.WrapExecuteSequence(wrapper_detection, df_paths.iterrows(), + _wrapper_detection = partial(load_center_evaluate, df_annot=df_slices_info, + path_annot=path_annot, path_visu=path_visu, + col_prefix=stage_prefix) + iterate = tl_expt.WrapExecuteSequence(_wrapper_detection, + df_paths.iterrows(), nb_jobs=nb_jobs) for dict_eval in iterate: df_eval = df_eval.append(dict_eval, ignore_index=True) @@ -238,8 +241,7 @@ def evaluate_detection_stage(df_paths, stage, path_info, path_out, nb_jobs=1): def main(params): """ PIPELINE for new detections - :param {str: str} paths: - :param int nb_jobs: + :param {str: ...} params: """ logging.info('running...') diff --git a/experiments_ovary_centres/run_center_prediction.py b/experiments_ovary_centres/run_center_prediction.py index 4a6803f6..96af52d0 100644 --- a/experiments_ovary_centres/run_center_prediction.py +++ b/experiments_ovary_centres/run_center_prediction.py @@ -4,8 +4,8 @@ SAMPLE run: >> python run_center_prediction.py -list none \ - -segs "images/drosophila_ovary_slice/segm/*.png" \ - -imgs "images/drosophila_ovary_slice/image/*.jpg" \ + -segs "data_images/drosophila_ovary_slice/segm/*.png" \ + -imgs "data_images/drosophila_ovary_slice/image/*.jpg" \ -centers results/detect-centers-train_ovary/classifier_RandForest.pkl \ -out results -n ovary @@ -155,10 +155,10 @@ def main(params): # perform on new images df_stat = pd.DataFrame() - wrapper_detection = partial(load_compute_detect_centers, params=params_clf, - path_classif=params['path_classif'], - path_output=params['path_expt']) - iterate = tl_expt.WrapExecuteSequence(wrapper_detection, df_paths.iterrows(), + _wrapper_detection = partial(load_compute_detect_centers, params=params_clf, + path_classif=params['path_classif'], + path_output=params['path_expt']) + iterate = tl_expt.WrapExecuteSequence(_wrapper_detection, df_paths.iterrows(), nb_jobs=params['nb_jobs']) for dict_center in iterate: df_stat = df_stat.append(dict_center, ignore_index=True) diff --git a/experiments_ovary_centres/run_create_annotation.py b/experiments_ovary_centres/run_create_annotation.py index ab4f3402..4cff110c 100644 --- a/experiments_ovary_centres/run_create_annotation.py +++ b/experiments_ovary_centres/run_create_annotation.py @@ -157,7 +157,6 @@ def main(path_segs, path_out, nb_jobs): :param str path_segs: path with image pattern of images - obj segmentation :param str path_out: :param int nb_jobs: number of processes in parallel - :return ndarray: """ logging.info('running...') @@ -171,10 +170,10 @@ def main(path_segs, path_out, nb_jobs): 'missing: %s' % path_out os.mkdir(path_out) - wrapper_create_annot_centers = partial(create_annot_centers, - path_out_seg=path_out, - path_out_csv=path_out) - iterate = tl_expt.WrapExecuteSequence(wrapper_create_annot_centers, + _wrapper_create_annot_centers = partial(create_annot_centers, + path_out_seg=path_out, + path_out_csv=path_out) + iterate = tl_expt.WrapExecuteSequence(_wrapper_create_annot_centers, list_imgs, nb_jobs=nb_jobs, desc='annotating images') list(iterate) diff --git a/experiments_ovary_detect/run_RG2Sp_estim_shape-models.py b/experiments_ovary_detect/run_RG2Sp_estim_shape-models.py index dfad44e9..b7dc5f03 100644 --- a/experiments_ovary_detect/run_RG2Sp_estim_shape-models.py +++ b/experiments_ovary_detect/run_RG2Sp_estim_shape-models.py @@ -24,7 +24,7 @@ import imsegm.region_growing as tl_rg PATH_DATA = tl_data.update_path('data', absolute=True) -PATH_IMAGES = os.path.join(tl_data.update_path('images'), 'drosophila_ovary_slice') +PATH_IMAGES = os.path.join(tl_data.update_path('data_images'), 'drosophila_ovary_slice') PATH_ANNOT = os.path.join(PATH_IMAGES, 'annot_eggs', '*.png') RAY_STEP = 10 # names of default files for models diff --git a/experiments_ovary_detect/run_cut_segmented_objects.py b/experiments_ovary_detect/run_cut_segmented_objects.py index 2eab5688..5e930676 100644 --- a/experiments_ovary_detect/run_cut_segmented_objects.py +++ b/experiments_ovary_detect/run_cut_segmented_objects.py @@ -3,8 +3,8 @@ SAMPLE run: >> python run_cut_segmented_objects.py \ - -annot "images/drosophila_ovary_slice/annot_eggs/*.png" \ - -img "images/drosophila_ovary_slice/segm/*.png" \ + -annot "data_images/drosophila_ovary_slice/annot_eggs/*.png" \ + -img "data_images/drosophila_ovary_slice/segm/*.png" \ -out results/cut_images --padding 20 """ @@ -23,7 +23,7 @@ import imsegm.utils.experiments as tl_expt NB_THREADS = max(1, int(mproc.cpu_count() * 0.9)) -PATH_IMAGES = tl_data.update_path(os.path.join('images', 'drosophila_ovary_slice')) +PATH_IMAGES = tl_data.update_path(os.path.join('data_images', 'drosophila_ovary_slice')) PATH_RESULTS = tl_data.update_path('results', absolute=True) PATHS = { 'annot': os.path.join(PATH_IMAGES, 'annot_eggs', '*.png'), @@ -113,9 +113,9 @@ def main(dict_paths, padding=0, use_mask=False, bg_color=None, df_paths = tl_data.find_files_match_names_across_dirs(list_dirs) logging.info('start cutting images') - wrapper_cutting = partial(export_cut_objects, path_out=dict_paths['output'], - padding=padding, use_mask=use_mask, bg_color=bg_color) - iterate = tl_expt.WrapExecuteSequence(wrapper_cutting, + _wrapper_cutting = partial(export_cut_objects, path_out=dict_paths['output'], + padding=padding, use_mask=use_mask, bg_color=bg_color) + iterate = tl_expt.WrapExecuteSequence(_wrapper_cutting, (row for idx, row in df_paths.iterrows()), nb_jobs=nb_jobs) list(iterate) diff --git a/experiments_ovary_detect/run_egg_swap_orientation.py b/experiments_ovary_detect/run_egg_swap_orientation.py index 1db5c74c..2a3cde15 100644 --- a/experiments_ovary_detect/run_egg_swap_orientation.py +++ b/experiments_ovary_detect/run_egg_swap_orientation.py @@ -16,7 +16,6 @@ import multiprocessing as mproc from functools import partial -import tqdm import numpy as np sys.path += [os.path.abspath('.'), os.path.abspath('..')] # Add path to root @@ -27,7 +26,7 @@ IMAGE_CHANNEL = 0 # image channel for mass extraction NB_THREADS = max(1, int(mproc.cpu_count() * 0.8)) -PATH_IMAGES = tl_data.update_path(os.path.join('images', 'drosophila_ovary_slice')) +PATH_IMAGES = tl_data.update_path(os.path.join('data_images', 'drosophila_ovary_slice')) PATH_RESULTS = tl_data.update_path('results', absolute=True) PARAMS = { @@ -77,10 +76,10 @@ def main(params): if not os.path.isdir(params['path_output']): os.mkdir(params['path_output']) - wrapper_object = partial(perform_orientation_swap, - path_out=params['path_output']) + _wrapper_object = partial(perform_orientation_swap, + path_out=params['path_output']) dir_name = os.path.dirname(params['path_images']) - iterate = tl_expt.WrapExecuteSequence(wrapper_object, list_imgs, + iterate = tl_expt.WrapExecuteSequence(_wrapper_object, list_imgs, nb_jobs=params['nb_jobs'], desc=dir_name) list(iterate) diff --git a/experiments_ovary_detect/run_ellipse_annot_match.py b/experiments_ovary_detect/run_ellipse_annot_match.py index 9df390bd..6c98fbbb 100644 --- a/experiments_ovary_detect/run_ellipse_annot_match.py +++ b/experiments_ovary_detect/run_ellipse_annot_match.py @@ -33,7 +33,7 @@ OVERLAP_THRESHOLD = 0. NB_THREADS = max(1, int(mproc.cpu_count() * 0.8)) -PATH_IMAGES = tl_data.update_path(os.path.join('images', 'drosophila_ovary_slice')) +PATH_IMAGES = tl_data.update_path(os.path.join('data_images', 'drosophila_ovary_slice')) PARAMS = { 'path_ellipses': os.path.join(PATH_IMAGES, 'ellipse_fitting', '*.csv'), @@ -155,9 +155,9 @@ def main(params): list_evals = [] # get the folder path_dir_csv = os.path.dirname(params['path_ellipses']) - wrapper_match = partial(select_optimal_ellipse, - path_dir_csv=path_dir_csv) - iterate = tl_expt.WrapExecuteSequence(wrapper_match, df_info.iterrows(), + _wrapper_match = partial(select_optimal_ellipse, + path_dir_csv=path_dir_csv) + iterate = tl_expt.WrapExecuteSequence(_wrapper_match, df_info.iterrows(), nb_jobs=params['nb_jobs']) for i, dict_row in enumerate(iterate): list_evals.append(dict_row) diff --git a/experiments_ovary_detect/run_ellipse_cut_scale.py b/experiments_ovary_detect/run_ellipse_cut_scale.py index 829ff429..99fbfd0d 100644 --- a/experiments_ovary_detect/run_ellipse_cut_scale.py +++ b/experiments_ovary_detect/run_ellipse_cut_scale.py @@ -34,7 +34,7 @@ NORM_FUNC = np.median # other options - mean, max, ... NB_THREADS = max(1, int(mproc.cpu_count() * 0.8)) -PATH_IMAGES = tl_data.update_path(os.path.join('images', 'drosophila_ovary_slice')) +PATH_IMAGES = tl_data.update_path(os.path.join('data_images', 'drosophila_ovary_slice')) PATH_RESULTS = tl_data.update_path('results', absolute=True) PARAMS = { @@ -93,10 +93,10 @@ def perform_stage(df_group, stage, path_images, path_out): if not os.path.isdir(path_out_stage): os.mkdir(path_out_stage) - wrapper_object = partial(extract_ellipse_object, path_images=path_images, - path_out=path_out_stage, norm_size=norm_size) + _wrapper_object = partial(extract_ellipse_object, path_images=path_images, + path_out=path_out_stage, norm_size=norm_size) desc = 'stage %i - size %s' % (stage, norm_size) - iterate = tl_expt.WrapExecuteSequence(wrapper_object, df_group.iterrows(), + iterate = tl_expt.WrapExecuteSequence(_wrapper_object, df_group.iterrows(), nb_jobs=params['nb_jobs'], desc=desc) list(iterate) diff --git a/experiments_ovary_detect/run_export_user-annot-segm.py b/experiments_ovary_detect/run_export_user-annot-segm.py index 3664ea03..5e244e4e 100644 --- a/experiments_ovary_detect/run_export_user-annot-segm.py +++ b/experiments_ovary_detect/run_export_user-annot-segm.py @@ -40,7 +40,7 @@ import imsegm.annotation as seg_annot NB_THREADS = max(1, int(mproc.cpu_count() * 0.8)) -PATH_IMAGES = tl_data.update_path(os.path.join('images', 'drosophila_ovary_slice')) +PATH_IMAGES = tl_data.update_path(os.path.join('data_images', 'drosophila_ovary_slice')) PATH_RESULTS = tl_data.update_path('results', absolute=True) PARAMS = { 'path_images': os.path.join(PATH_IMAGES, 'image', '*.jpg'), @@ -215,9 +215,9 @@ def main(params): df_slices_info = seg_annot.load_info_group_by_slices(params['path_infofile'], params['stages']) - wrapper_export = partial(export_figure, df_slices_info=df_slices_info, - path_out=params['path_output']) - iterate = tl_expt.WrapExecuteSequence(wrapper_export, df_paths.iterrows(), + _wrapper_export = partial(export_figure, df_slices_info=df_slices_info, + path_out=params['path_output']) + iterate = tl_expt.WrapExecuteSequence(_wrapper_export, df_paths.iterrows(), nb_jobs=params['nb_jobs']) list(iterate) logging.info('DONE') diff --git a/experiments_ovary_detect/run_ovary_egg-segmentation.py b/experiments_ovary_detect/run_ovary_egg-segmentation.py index 017f12fb..218391fa 100755 --- a/experiments_ovary_detect/run_ovary_egg-segmentation.py +++ b/experiments_ovary_detect/run_ovary_egg-segmentation.py @@ -9,7 +9,7 @@ SAMPLE run: >> python run_ovary_egg-segmentation.py \ - -list images/drosophila_ovary_slice/list_imgs-segm-center-points.csv \ + -list data_images/drosophila_ovary_slice/list_imgs-segm-center-points.csv \ -out results -n ovary_slices --nb_jobs 1 \ -m ellipse_moments \ ellipse_ransac_mmt \ @@ -96,7 +96,7 @@ } PATH_DATA = tl_data.update_path('data', absolute=True) -PATH_IMAGES = os.path.join(tl_data.update_path('images'), +PATH_IMAGES = os.path.join(tl_data.update_path('data_images'), 'drosophila_ovary_slice') # sample segmentation methods LIST_SAMPLE_METHODS = ( @@ -807,8 +807,8 @@ def main(params, debug_export=DEBUG_EXPORT): list_dirs = [n + DIR_DEBUG_POSIX for n in dict_segment if 'rg2sp' in n] tl_expt.create_subfolders(params['path_exp'], list_dirs) - wrapper_segment = partial(image_segmentation, params=params) - iterate = tl_expt.WrapExecuteSequence(wrapper_segment, df_paths.iterrows(), + _wrapper_segment = partial(image_segmentation, params=params) + iterate = tl_expt.WrapExecuteSequence(_wrapper_segment, df_paths.iterrows(), nb_jobs=params['nb_jobs']) list(iterate) diff --git a/experiments_ovary_detect/run_ovary_segm_evaluation.py b/experiments_ovary_detect/run_ovary_segm_evaluation.py index e007d694..057d6ce5 100755 --- a/experiments_ovary_detect/run_ovary_segm_evaluation.py +++ b/experiments_ovary_detect/run_ovary_segm_evaluation.py @@ -46,7 +46,7 @@ SKIP_DIRS = ['input', 'simple', NAME_DIR_VISUAL_1, NAME_DIR_VISUAL_2, NAME_DIR_VISUAL_3] NAME_CSV_STAT = 'segmented-eggs_%s.csv' -PATH_IMAGES = tl_data.update_path(os.path.join('images', 'drosophila_ovary_slice')) +PATH_IMAGES = tl_data.update_path(os.path.join('data_images', 'drosophila_ovary_slice')) PATH_RESULTS = tl_data.update_path('results', absolute=True) PATHS = { 'images': os.path.join(PATH_IMAGES, 'image', '*.jpg'), @@ -245,7 +245,7 @@ def evaluate_folder(path_dir, dict_paths, export_visual=EXPORT_VUSIALISATION): for n in ['mean', 'std']: names = ['%s (%s)' % (c, n) for c in cols] dict_eval.update(zip(names, df_summary.T[n].values.tolist())) - dict_eval.update(zip(['%s (median)' % (c) for c in cols], + dict_eval.update(zip(['%s (median)' % c for c in cols], df_eval.median(axis=0).values.tolist())) return dict_eval @@ -271,9 +271,9 @@ def main(dict_paths, export_visual=EXPORT_VUSIALISATION, nb_jobs=NB_THREADS): [NAME_DIR_VISUAL_1, NAME_DIR_VISUAL_2, NAME_DIR_VISUAL_3]) df_all = pd.DataFrame() - wrapper_eval = partial(evaluate_folder, dict_paths=dict_paths, - export_visual=export_visual) - iterate = tl_expt.WrapExecuteSequence(wrapper_eval, list_results, + _wrapper_eval = partial(evaluate_folder, dict_paths=dict_paths, + export_visual=export_visual) + iterate = tl_expt.WrapExecuteSequence(_wrapper_eval, list_results, nb_jobs=nb_jobs) for dict_eval in iterate: df_all = df_all.append(dict_eval, ignore_index=True) diff --git a/experiments_segmentation/run_compute-stat_annot-segm.py b/experiments_segmentation/run_compute_stat_annot_segm.py similarity index 75% rename from experiments_segmentation/run_compute-stat_annot-segm.py rename to experiments_segmentation/run_compute_stat_annot_segm.py index aa433631..e02f3d12 100644 --- a/experiments_segmentation/run_compute-stat_annot-segm.py +++ b/experiments_segmentation/run_compute_stat_annot_segm.py @@ -2,12 +2,12 @@ With two given folder find image match and compute segmentation statistic >> python run_compute_stat_annot_segm.py \ - -annot "images/drosophila_ovary_slice/annot_struct/*.png" \ + -annot "data_images/drosophila_ovary_slice/annot_struct/*.png" \ -segm "results/experiment_segm-supervise_ovary/*.png" \ - -img "images/drosophila_ovary_slice/image/*.jpg" \ - -out results/evaluation + -img "data_images/drosophila_ovary_slice/image/*.jpg" \ + -out results/evaluation --visual -Copyright (C) 2016-2017 Jiri Borovec +Copyright (C) 2016-2018 Jiri Borovec """ import os @@ -30,8 +30,9 @@ NB_THREADS = max(1, int(mproc.cpu_count() * 0.9)) NAME_CVS_OVERALL = 'segm-STATISTIC_%s_stat-overall.csv' NAME_CVS_PER_IMAGE = 'segm-STATISTIC_%s_stat-per-images.csv' -PATH_IMAGES = tl_data.update_path(os.path.join('images', 'drosophila_ovary_slice')) +PATH_IMAGES = tl_data.update_path(os.path.join('data_images', 'drosophila_ovary_slice')) PATH_RESULTS = tl_data.update_path('results', absolute=True) +SUFFIX_VISUAL = '__visual' PATHS = { 'annot': os.path.join(PATH_IMAGES, 'annot_struct', '*.png'), 'segm': os.path.join(PATH_IMAGES, 'segm', '*.png'), @@ -58,6 +59,11 @@ def aparse_params(dict_paths): parser.add_argument('-out', '--path_out', type=str, required=False, help='path to the output directory', default=dict_paths['output']) + parser.add_argument('--nb_jobs', type=int, required=False, + default=NB_THREADS, + help='number of processes in parallel') + parser.add_argument('--visual', required=False, action='store_true', + help='export visualisations', default=False) args = parser.parse_args() logging.info('ARG PARAMETERS: \n %s', repr(args)) dict_paths = { @@ -106,7 +112,7 @@ def wrapper_relabel_segm(annot_segm): return segm -def main(dict_paths, nb_jobs=NB_THREADS, relabel=True): +def main(dict_paths, nb_jobs=NB_THREADS, visual=True, relabel=True): """ main evaluation :param {str: str} dict_paths: @@ -127,35 +133,45 @@ def main(dict_paths, nb_jobs=NB_THREADS, relabel=True): path_csv = os.path.join(dict_paths['output'], NAME_CVS_PER_IMAGE % name) df_paths.to_csv(path_csv) + assert len(df_paths) > 0, 'nothing to compare' + annots, _ = tl_data.load_images_list(df_paths['path_1'].values.tolist()) segms, names = tl_data.load_images_list(df_paths['path_2'].values.tolist()) logging.info('loaded %i annots and %i segms', len(annots), len(segms)) if relabel: + logging.info('reabel annotations and segmentations') annots = [relabel_sequential(annot)[0] for annot in annots] - segms = list(map(wrapper_relabel_segm, zip(annots, segms))) + iterate = tl_expt.WrapExecuteSequence(wrapper_relabel_segm, + zip(annots, segms), + nb_jobs=nb_jobs, ordered=True) + segms = list(iterate) + logging.info('compute statistic per image') path_csv = os.path.join(dict_paths['output'], NAME_CVS_PER_IMAGE % name) logging.debug('export to "%s"', path_csv) df_stat = seg_clf.compute_stat_per_image(segms, annots, names, nb_jobs) df_stat.to_csv(path_csv) + logging.info('sumarise statistic') path_csv = os.path.join(dict_paths['output'], NAME_CVS_OVERALL % name) logging.debug('export to "%s"', path_csv) df_desc = df_stat.describe() logging.info(df_desc.T[['count', 'mean', 'std']]) df_desc.to_csv(path_csv) - path_visu = os.path.join(dict_paths['output'], '%s__visual' % name) - if not os.path.isdir(path_visu): - os.mkdir(path_visu) - # for idx, row in df_paths.iterrows(): - # export_visual(row, path_visu) - wrapper_visual = partial(export_visual, path_out=path_visu) - iterate = tl_expt.WrapExecuteSequence(wrapper_visual, - (row for idx, row in df_paths.iterrows()), - nb_jobs=nb_jobs) - list(iterate) + if visual: + path_visu = os.path.join(dict_paths['output'], + '%s%s' % (name, SUFFIX_VISUAL)) + if not os.path.isdir(path_visu): + os.mkdir(path_visu) + # for idx, row in df_paths.iterrows(): + # export_visual(row, path_visu) + _wrapper_visual = partial(export_visual, path_out=path_visu) + iterate = tl_expt.WrapExecuteSequence(_wrapper_visual, + (row for idx, row in df_paths.iterrows()), + nb_jobs=nb_jobs) + list(iterate) logging.info('DONE') @@ -163,4 +179,4 @@ def main(dict_paths, nb_jobs=NB_THREADS, relabel=True): if __name__ == '__main__': logging.basicConfig(level=logging.INFO) dict_paths, args = aparse_params(PATHS) - main(dict_paths) + main(dict_paths, nb_jobs=args.nb_jobs, visual=args.visual) diff --git a/experiments_segmentation/run_eval_superpixels.py b/experiments_segmentation/run_eval_superpixels.py index 6cff50ad..2b6ea7d6 100644 --- a/experiments_segmentation/run_eval_superpixels.py +++ b/experiments_segmentation/run_eval_superpixels.py @@ -3,9 +3,9 @@ SAMPLE run: >> python run_eval_superpixels.py \ - -imgs "images/drosophila_ovary_slice/image/*.jpg" \ - -segm "images/drosophila_ovary_slice/annot_eggs/*.png" \ - --img_type 2d_gray \ + -imgs "data_images/drosophila_ovary_slice/image/*.jpg" \ + -segm "data_images/drosophila_ovary_slice/annot_eggs/*.png" \ + --img_type 2d_split \ --slic_size 20 --slic_regul 0.25 --slico 0 Copyright (C) 2017 Jiri Borovec @@ -32,14 +32,14 @@ NB_THREADS = max(1, int(mproc.cpu_count() * 0.9)) -PATH_IMAGES = tl_data.update_path(os.path.join('images', 'drosophila_ovary_slice')) +PATH_IMAGES = tl_data.update_path(os.path.join('data_images', 'drosophila_ovary_slice')) PATH_RESULTS = tl_data.update_path('results', absolute=True) NAME_CSV_DISTANCES = 'measured_boundary_distances.csv' PARAMS = { 'path_images': os.path.join(PATH_IMAGES, 'image', '*.jpg'), 'path_segms': os.path.join(PATH_IMAGES, 'annot_eggs', '*.png'), 'path_out': os.path.join(PATH_RESULTS, 'compute_boundary_distances'), - 'img_type': '2d_gray', + 'img_type': '2d_split', } @@ -93,7 +93,7 @@ def compute_boundary_distance(idx_row, params, path_out=''): _, row = idx_row name = os.path.splitext(os.path.basename(row['path_image']))[0] img = load_image(row['path_image'], params['img_type']) - segm = load_image(row['path_segm'], 'segm') + segm = load_image(row['path_segm'], '2d_segm') logging.debug('segment SLIC...') slic = seg_spx.segment_slic_img2d(img, @@ -126,9 +126,9 @@ def main(params): df_dist = pd.DataFrame() - wrapper_eval = partial(compute_boundary_distance, params=params, - path_out=params['path_out']) - iterate = tl_expt.WrapExecuteSequence(wrapper_eval, df_paths.iterrows(), + _wrapper_eval = partial(compute_boundary_distance, params=params, + path_out=params['path_out']) + iterate = tl_expt.WrapExecuteSequence(_wrapper_eval, df_paths.iterrows(), nb_jobs=params['nb_jobs'], desc='evaluate SLIC') for name, dist in iterate: diff --git a/experiments_segmentation/run_segm_slic_classif_graphcut.py b/experiments_segmentation/run_segm_slic_classif_graphcut.py index e45bab69..d9275dee 100644 --- a/experiments_segmentation/run_segm_slic_classif_graphcut.py +++ b/experiments_segmentation/run_segm_slic_classif_graphcut.py @@ -18,11 +18,12 @@ SAMPLE run: >> python run_segm_slic_classif_graphcut.py \ - -list images/langerhans_islets/list_lang-isl_imgs-annot.csv \ - -imgs "images/langerhans_islets/image/*.jpg" \ - -out results -n LangIsl --img_type 2d_rgb --visual 1 --nb_jobs 2 + -l data_images/drosophila_ovary_slice/list_imgs-annot-struct.csv \ + -i "data_images/drosophila_ovary_slice/image/*.jpg" \ + --path_config experiments_segmentation/sample_config.json \ + -o results -n Ovary --img_type 2d_split --visual -Copyright (C) 2016-2017 Jiri Borovec +Copyright (C) 2016-2018 Jiri Borovec """ import os @@ -47,6 +48,7 @@ import matplotlib.pyplot as plt # from llvmpy._api.llvm.CmpInst import FCMP_OLE from skimage import segmentation +import skimage.color as sk_color from sklearn import metrics sys.path += [os.path.abspath('.'), os.path.abspath('..')] # Add path to root @@ -65,7 +67,7 @@ NAME_EXPERIMENT = 'experiment_segm-Supervised' NB_THREADS = max(1, int(mproc.cpu_count() * 0.9)) -TYPES_LOAD_IMAGE = ['2d_rgb', '2d_gray'] +TYPES_LOAD_IMAGE = ['2d_rgb', '2d_split'] NAME_FIG_LABEL_HISTO = 'fig_histogram_annot_segments.png' NAME_CSV_SEGM_STAT_SLIC_ANNOT = 'statistic_segm_slic_annot.csv' NAME_CSV_SEGM_STAT_RESULT_LOO = 'statistic_segm_LOO.csv' @@ -81,11 +83,12 @@ FOLDER_SLIC = 'slic' FOLDER_SLIC_ANNOT = 'annot_slic' FOLDER_SEGM = 'segmentation_trained' -FOLDER_SEGM_VISU = FOLDER_SEGM + '___visual' +SUFFIX_VISUAL = '___visual' +FOLDER_SEGM_VISU = FOLDER_SEGM + SUFFIX_VISUAL FOLDER_LOO = 'segmentation_leave-one-out' -FOLDER_LOO_VISU = FOLDER_LOO + '___visual' +FOLDER_LOO_VISU = FOLDER_LOO + SUFFIX_VISUAL FOLDER_LPO = 'segmentation_leave-P-out' -FOLDER_LPO_VISU = FOLDER_LPO + '___visual' +FOLDER_LPO_VISU = FOLDER_LPO + SUFFIX_VISUAL LIST_FOLDERS_BASE = (FOLDER_IMAGE, FOLDER_ANNOT, FOLDER_SLIC, FOLDER_SLIC_ANNOT, FOLDER_SEGM, FOLDER_LOO, FOLDER_LPO) LIST_FOLDERS_DEBUG = (FOLDER_SEGM_VISU, FOLDER_LOO_VISU, FOLDER_LPO_VISU) @@ -93,15 +96,15 @@ # unique experiment means adding timestemp on the end of folder name EACH_UNIQUE_EXPERIMENT = False # showing some intermediate debug images from segmentation -SHOW_DEBUG_IMAGES = False +SHOW_DEBUG_IMAGES = True # relabel annotation such that labels are in sequence no gaps in between them ANNOT_RELABEL_SEQUENCE = False # whether skip loading config from previous fun -FORCE_RELOAD = False +FORCE_RELOAD = True # even you have dumped data from previous time, all wil be recomputed -FORCE_RECOMP_DATA = False +FORCE_RECOMP_DATA = True # even you have saved classif. data from previous time, all wil be retrained -FORCE_RETRAIN_CLASSIF = False +FORCE_RETRAIN_CLASSIF = True # ration of fold size for LPO for hyper-parameter search CROSS_VAL_LEAVE_OUT_SEARCH = 0.2 # ration of fold size for LPO for evaluation @@ -112,20 +115,20 @@ RUN_CROSS_VAL_LPO = True -FEATURES_SET_COLOR = {'color': ('mean', 'std', 'eng')} -FEATURES_SET_TEXTURE = {'tLM': ('mean', 'std', 'eng')} +FEATURES_SET_COLOR = {'color': ('mean', 'std', 'energy')} +FEATURES_SET_TEXTURE = {'tLM': ('mean', 'std', 'energy')} FEATURES_SET_ALL = {'color': ('mean', 'std', 'median'), - 'tLM': ('mean', 'std', 'eng', 'mG')} + 'tLM': ('mean', 'std', 'energy', 'meanGrad')} FEATURES_SET_MIN = {'color': ('mean', 'std', 'energy'), 'tLM_s': ('mean', )} -FEATURES_SET_MIX = {'color': ('mean', 'std', 'eng', 'median'), +FEATURES_SET_MIX = {'color': ('mean', 'std', 'energy', 'median'), 'tLM': ('mean', 'std')} # Default parameter configuration SEGM_PARAMS = { 'name': 'ovary', 'nb_classes': None, 'clr_space': 'rgb', - 'img_type': '2d_gray', + 'img_type': '2d_split', 'slic_size': 35, 'slic_regul': 0.3, # 'spacing': (12, 1, 1), @@ -139,7 +142,7 @@ 'gc_edge_type': 'model', 'gc_use_trans': False, } -PATH_IMAGES = os.path.join(tl_data.update_path('images'), +PATH_IMAGES = os.path.join(tl_data.update_path('data_images'), 'drosophila_ovary_slice') PATH_RESULTS = tl_data.update_path('results', absolute=True) SEGM_PARAMS.update({ @@ -165,6 +168,17 @@ def visu_histogram_labels(params, dict_label_hist, fig_name=NAME_FIG_LABEL_HISTO plt.close(fig) +def use_rgb_image(img, clr='rgb'): + # clr = params.get('clr_space', 'rgb').lower() + if img.ndim == 3 and img.shape[-1] in (3, 4): + img_rgb = seg_pipe.convert_img_color_to_rgb(img, clr) + elif img.ndim == 2: + img_rgb = sk_color.gray2rgb(img) + else: + img_rgb = img.copy() + return img_rgb + + def load_image_annot_compute_features_labels(idx_row, params, show_debug_imgs=SHOW_DEBUG_IMAGES): """ load image and annotation, and compute superpixel features and labels @@ -174,42 +188,47 @@ def load_image_annot_compute_features_labels(idx_row, params, :param bool show_debug_imgs: whether show debug images :return (...): """ - def path_out_img(params, dir_name, name): + def _path_out_img(params, dir_name, name): return os.path.join(params['path_exp'], dir_name, name + '.png') idx, row = idx_row idx_name = get_idx_name(idx, row['path_image']) img = load_image(row['path_image'], params['img_type']) - annot = load_image(row['path_annot'], 'segm') + annot = load_image(row['path_annot'], '2d_segm') logging.debug('.. processing: %s', idx_name) assert img.shape[:2] == annot.shape[:2], \ 'individual size of image %s and seg_pipe %s for "%s" - "%s"' % \ (repr(img.shape), repr(annot.shape), row['path_image'], row['path_annot']) if show_debug_imgs: - plt.imsave(path_out_img(params, FOLDER_IMAGE, idx_name), img, + plt.imsave(_path_out_img(params, FOLDER_IMAGE, idx_name), img, cmap=plt.cm.gray) - plt.imsave(path_out_img(params, FOLDER_ANNOT, idx_name), annot) + plt.imsave(_path_out_img(params, FOLDER_ANNOT, idx_name), annot) # duplicate gray band to be as rgb # if img.ndim == 2: # img = np.rollaxis(np.tile(img, (3, 1, 1)), 0, 3) slic = seg_spx.segment_slic_img2d(img, sp_size=params['slic_size'], rltv_compact=params['slic_regul']) - img = seg_pipe.convert_img_color_space(img, params.get('clr_space', 'rgb')) + img = seg_pipe.convert_img_color_from_rgb(img, params.get('clr_space', 'rgb')) logging.debug('computed SLIC with %i labels', slic.max()) if show_debug_imgs: - img_slic = segmentation.mark_boundaries(img / float(img.max()), slic, - color=(1, 0, 0), mode='subpixel') - plt.imsave(path_out_img(params, FOLDER_SLIC, idx_name), img_slic) - features, ft_names = seg_fts.compute_selected_features_img2d(img, slic, - params['features']) - - label_hist = seg_label.histogram_regions_labels_norm(slic, annot) - labels = np.argmax(label_hist, axis=1) + img_rgb = use_rgb_image(img, params.get('clr_space', 'rbgb').lower()) + img_slic = segmentation.mark_boundaries(img_rgb, slic, + color=(1, 0, 0), + mode='subpixel') + plt.imsave(_path_out_img(params, FOLDER_SLIC, idx_name), + np.clip(img_slic, 0, 1)) + slic_label_hist = seg_label.histogram_regions_labels_norm(slic, annot) + labels = np.argmax(slic_label_hist, axis=1) slic_annot = labels[slic] if show_debug_imgs: - plt.imsave(path_out_img(params, FOLDER_SLIC_ANNOT, idx_name), slic_annot) - return idx_name, img, annot, slic, features, labels, label_hist, ft_names + plt.imsave(_path_out_img(params, FOLDER_SLIC_ANNOT, idx_name), + np.clip(slic_annot, 0, slic_annot.max())) + + features, feature_names = seg_fts.compute_selected_features_img2d( + img, slic, params['features']) + return idx_name, img, annot, slic, features, labels, \ + slic_label_hist, feature_names def dataset_load_images_annot_compute_features(params, @@ -231,9 +250,9 @@ def dataset_load_images_annot_compute_features(params, df_paths = pd.read_csv(params['path_train_list'], index_col=0) assert all(n in df_paths.columns for n in ['path_image', 'path_annot']), \ 'missing required columns in loaded csv file' - wrapper_load_compute = partial(load_image_annot_compute_features_labels, - params=params, show_debug_imgs=show_debug_imgs) - iterate = tl_expt.WrapExecuteSequence(wrapper_load_compute, df_paths.iterrows(), + _wrapper_load_compute = partial(load_image_annot_compute_features_labels, + params=params, show_debug_imgs=show_debug_imgs) + iterate = tl_expt.WrapExecuteSequence(_wrapper_load_compute, df_paths.iterrows(), nb_jobs=params['nb_jobs'], desc='extract training data') for name, img, annot, slic, features, labels, label_hist, feature_names in iterate: @@ -307,15 +326,17 @@ def segment_image(imgs_idx_path, params, classif, path_out, path_visu=None, :param obj classif: trained classifier :param str path_out: path for output :param str path_visu: the existing patch means export also visualisation + :param bool show_debug_imgs: whether show debug images :return (str, ndarray, ndarray): """ idx, path_img = parse_imgs_idx_path(imgs_idx_path) logging.debug('segmenting image: "%s"', path_img) idx_name = get_idx_name(idx, path_img) - img = load_image(path_img, params['img_type']) - slic = seg_spx.segment_slic_img2d(img, sp_size=params['slic_size'], - rltv_compact=params['slic_regul']) - img = seg_pipe.convert_img_color_space(img, params.get('clr_space', 'rgb')) + img_rgb = load_image(path_img, params['img_type']) + slic = seg_spx.segment_slic_img2d(img_rgb, sp_size=params['slic_size'], + rltv_compact=params['slic_regul']) + img = seg_pipe.convert_img_color_from_rgb(img_rgb, params.get('clr_space', + 'rgb')) features, _ = seg_fts.compute_selected_features_img2d(img, slic, params['features']) labels = classif.predict(features) @@ -328,7 +349,7 @@ def segment_image(imgs_idx_path, params, classif, path_out, path_visu=None, # plt.imsave(os.path.join(path_out, idx_name + '_rgb.png'), seg_pipe) if path_visu is not None and os.path.isdir(path_visu): - export_draw_image_segm_contour(img, segm, path_visu, idx_name) + export_draw_image_segm_contour(img_rgb, segm, path_visu, idx_name) try: # in case some classiefier do not support predict_proba proba = classif.predict_proba(features) @@ -336,7 +357,7 @@ def segment_image(imgs_idx_path, params, classif, path_out, path_visu=None, path_npz = os.path.join(path_out, idx_name + '.npz') np.savez_compressed(path_npz, segm_soft) except Exception: - logging.warning('classif: %s not support predict_proba(.)', + logging.warning('classif: %s not support predict_proba(...)', repr(classif)) proba = None segm_soft = None @@ -362,9 +383,8 @@ def segment_image(imgs_idx_path, params, classif, path_out, path_visu=None, # io.imsave(path_img, segm_gc) if path_visu is not None and os.path.isdir(path_visu): - export_draw_image_segm_contour(img, segm_gc, path_visu, + export_draw_image_segm_contour(img_rgb, segm_gc, path_visu, idx_name, '_gc') - if show_debug_imgs: labels_map = np.argmax(proba, axis=1) plt.imsave(os.path.join(path_visu, idx_name + '_map.png'), @@ -415,14 +435,16 @@ def eval_segment_with_annot(params, dict_annot, dict_segm, dict_label_hist=None, def retrain_loo_segment_image(imgs_idx_path, path_classif, path_dump, - path_out, path_visu): + path_out, path_visu, + show_debug_imgs=SHOW_DEBUG_IMAGES): """ load the classifier, and dumped data, subtract the image, retrain the classif. without it and do the segmentation - :param str path_img: path to input image + :param () imgs_idx_path: path to input image :param str path_classif: path to saved classifier :param str path_dump: path to dumped data :param, str path_out: path to segmentation outputs + :param bool show_debug_imgs: whether show debug images :return (str, ndarray, ndarray): """ idx, path_img = parse_imgs_idx_path(imgs_idx_path) @@ -443,20 +465,23 @@ def retrain_loo_segment_image(imgs_idx_path, path_classif, path_dump, classif.fit(features, labels) idx_name, segm, segm_gc = segment_image(imgs_idx_path, params, classif, - path_out, path_visu) + path_out, path_visu, + show_debug_imgs=show_debug_imgs) # gc.collect(), time.sleep(1) return idx_name, segm, segm_gc def retrain_lpo_segment_image(list_imgs_idx_path, path_classif, path_dump, - path_out, path_visu): + path_out, path_visu, + show_debug_imgs=SHOW_DEBUG_IMAGES): """ load the classifier, and dumped data, subtract the image, retrain the classif without it and do the segmentation - :param str path_img: path to input image + :param [str] list_imgs_idx_path: path to input image :param str path_classif: path to saved classifier :param str path_dump: path to dumped data :param, str path_out: path to segmentation outputs + :param bool show_debug_imgs: whether show debug images :return (str, ndarray, ndarray): """ dict_imgs, _, _, dict_features, dict_labels, _, _ = \ @@ -481,7 +506,8 @@ def retrain_lpo_segment_image(list_imgs_idx_path, path_classif, path_dump, dict_segm, dict_segm_gc = {}, {} for imgs_idx_path in list_imgs_idx_path: idx_name, segm, segm_gc = segment_image(imgs_idx_path, params, classif, - path_out, path_visu) + path_out, path_visu, + show_debug_imgs=show_debug_imgs) dict_segm[idx_name] = segm dict_segm_gc[idx_name] = segm_gc # gc.collect(), time.sleep(1) @@ -509,16 +535,18 @@ def get_summary(df, name, list_stat=('mean', 'std', 'median')): return dict_state -def perform_predictions(params, paths_img, classif): +def perform_predictions(params, paths_img, classif, + show_debug_imgs=SHOW_DEBUG_IMAGES): logging.info('run prediction on training images...') imgs_idx_path = list(zip(range(1, len(paths_img) + 1), paths_img)) dict_segms, dict_segms_gc = dict(), dict() path_out = os.path.join(params['path_exp'], FOLDER_SEGM) path_visu = os.path.join(params['path_exp'], FOLDER_SEGM_VISU) - wrapper_segment = partial(segment_image, params=params, classif=classif, - path_out=path_out, path_visu=path_visu) - iterate = tl_expt.WrapExecuteSequence(wrapper_segment, imgs_idx_path, + _wrapper_segment = partial(segment_image, params=params, classif=classif, + path_out=path_out, path_visu=path_visu, + show_debug_imgs=show_debug_imgs) + iterate = tl_expt.WrapExecuteSequence(_wrapper_segment, imgs_idx_path, nb_jobs=params['nb_jobs'], desc='image segm: prediction') for name, segm, segm_gc in iterate: @@ -528,16 +556,28 @@ def perform_predictions(params, paths_img, classif): def experiment_loo(params, df_stat, dict_annot, paths_img, path_classif, - path_dump): + path_dump, show_debug_imgs=SHOW_DEBUG_IMAGES): + """ experiment Leave-One-Out + + :param {str: ...} params: + :param DF df_stat: + :param {str: ndarray} dict_annot: + :param [str] paths_img: + :param str path_classif: + :param str path_dump: + :param bool show_debug_imgs: whether show debug images + :return {}: + """ imgs_idx_path = list(zip(range(1, len(paths_img) + 1), paths_img)) logging.info('run prediction on training images as Leave-One-Out...') dict_segms, dict_segms_gc = dict(), dict() path_out = os.path.join(params['path_exp'], FOLDER_LOO) path_visu = os.path.join(params['path_exp'], FOLDER_LOO_VISU) - wrapper_segment = partial(retrain_loo_segment_image, - path_classif=path_classif, path_dump=path_dump, - path_out=path_out, path_visu=path_visu) - iterate = tl_expt.WrapExecuteSequence(wrapper_segment, imgs_idx_path, + _wrapper_segment = partial(retrain_loo_segment_image, + path_classif=path_classif, path_dump=path_dump, + path_out=path_out, path_visu=path_visu, + show_debug_imgs=show_debug_imgs) + iterate = tl_expt.WrapExecuteSequence(_wrapper_segment, imgs_idx_path, nb_jobs=params['nb_jobs'], desc='experiment LOO') for name, segm, segm_gc in iterate: @@ -562,7 +602,19 @@ def experiment_loo(params, df_stat, dict_annot, paths_img, path_classif, def experiment_lpo(params, df_stat, dict_annot, paths_img, path_classif, - path_dump, nb_holdout): + path_dump, nb_holdout, show_debug_imgs=SHOW_DEBUG_IMAGES): + """ experiment Leave-P-samples-Out + + :param {str: ...} params: + :param DF df_stat: + :param {str: ndarray} dict_annot: + :param [str] paths_img: + :param str path_classif: + :param str path_dump: + :param int nb_holdout: + :param bool show_debug_imgs: whether show debug images + :return {}: + """ imgs_idx_path = list(zip(range(1, len(paths_img) + 1), paths_img)) logging.info('run prediction on training images as Leave-%i-Out...', nb_holdout) @@ -571,10 +623,11 @@ def experiment_lpo(params, df_stat, dict_annot, paths_img, path_classif, test_imgs_idx_path = [[imgs_idx_path[i] for i in ids] for _, ids in cv] path_out = os.path.join(params['path_exp'], FOLDER_LPO) path_visu = os.path.join(params['path_exp'], FOLDER_LPO_VISU) - wrapper_segment = partial(retrain_lpo_segment_image, - path_classif=path_classif, path_dump=path_dump, - path_out=path_out, path_visu=path_visu) - iterate = tl_expt.WrapExecuteSequence(wrapper_segment, test_imgs_idx_path, + _wrapper_segment = partial(retrain_lpo_segment_image, + path_classif=path_classif, path_dump=path_dump, + path_out=path_out, path_visu=path_visu, + show_debug_imgs=show_debug_imgs) + iterate = tl_expt.WrapExecuteSequence(_wrapper_segment, test_imgs_idx_path, nb_jobs=params['nb_jobs'], desc='experiment LPO') for dict_seg, dict_seg_gc in iterate: @@ -620,7 +673,7 @@ def load_train_classifier(params, features, labels, feature_names, sizes, classif, path_classif = seg_clf.create_classif_train_export( params['classif'], features, labels, cross_val=cv, params=params, feature_names=feature_names, - nb_search_iter=params['nb_classif_search'], + nb_search_iter=params.get('nb_classif_search', 1), nb_jobs=params['nb_jobs'], pca_coef=params['pca_coef'], path_out=params['path_exp']) params['path_classif'] = path_classif @@ -640,19 +693,21 @@ def main_train(params): 4) perform Leave-One-Out and Leave-P-Out experiments on images :param {str: ...} params: - :return{str: ...} : + :return {str: ...}: """ logging.getLogger().setLevel(logging.DEBUG) logging.info('running TRAINING...') + show_debug_imgs = params.get('visual', False) or SHOW_DEBUG_IMAGES - reload_dir_config = (os.path.isfile(params['path_config']) or FORCE_RELOAD) + reload_dir_config = (os.path.isfile(params.get('path_config', '')) + or FORCE_RELOAD) params = tl_expt.create_experiment_folder(params, dir_name=NAME_EXPERIMENT, - stamp_unique=EACH_UNIQUE_EXPERIMENT, + stamp_unique=params.get('unique', EACH_UNIQUE_EXPERIMENT), skip_load=reload_dir_config) tl_expt.set_experiment_logger(params['path_exp']) logging.info(tl_expt.string_dict(params, desc='PARAMETERS')) tl_expt.create_subfolders(params['path_exp'], LIST_FOLDERS_BASE) - if params['visual']: + if params.get('visual', False): tl_expt.create_subfolders(params['path_exp'], LIST_FOLDERS_DEBUG) df_stat = pd.DataFrame() @@ -663,7 +718,8 @@ def main_train(params): else: dict_imgs, dict_annot, dict_slics, dict_features, dict_labels, \ dict_label_hist, feature_names = \ - dataset_load_images_annot_compute_features(params) + dataset_load_images_annot_compute_features(params, + show_debug_imgs) save_dump_data(path_dump, dict_imgs, dict_annot, dict_slics, dict_features, dict_labels, dict_label_hist, feature_names) @@ -704,18 +760,22 @@ def main_train(params): # test classif on images df_paths = pd.read_csv(params['path_train_list'], index_col=0) paths_img = df_paths['path_image'].tolist() - perform_predictions(params, paths_img, classif) + perform_predictions(params, paths_img, classif, + show_debug_imgs=show_debug_imgs) # LEAVE ONE OUT if RUN_CROSS_VAL_LOO: df_stat = experiment_loo(params, df_stat, dict_annot, paths_img, - path_classif, path_dump) + path_classif, path_dump, + show_debug_imgs=show_debug_imgs) # LEAVE P OUT if RUN_CROSS_VAL_LPO: df_stat = experiment_lpo(params, df_stat, dict_annot, paths_img, - path_classif, path_dump, nb_holdout) + path_classif, path_dump, nb_holdout, + show_debug_imgs=show_debug_imgs) + logging.info('Statistic: \n %s', repr(df_stat.describe())) logging.info('training DONE') return params @@ -734,14 +794,14 @@ def prepare_output_dir(path_pattern_imgs, path_out, name): path_out = os.path.join(path_out, name) if not os.path.isdir(path_out): os.mkdir(path_out) - path_visu = path_out + '___visual' + path_visu = path_out + SUFFIX_VISUAL if not os.path.isdir(path_visu): os.mkdir(path_visu) return path_out, path_visu def try_segment_image(img_idx_path, params, classif, path_out, path_visu, - show_debug_imgs=False): + show_debug_imgs=SHOW_DEBUG_IMAGES): try: return segment_image(img_idx_path, params, classif, path_out, path_visu, @@ -759,7 +819,6 @@ def main_predict(path_classif, path_pattern_imgs, path_out, name='segment_', :param str path_pattern_imgs: :param str path_out: :param str name: - :return: """ logging.getLogger().setLevel(logging.INFO) logging.info('running PREDICTION...') @@ -780,10 +839,11 @@ def main_predict(path_classif, path_pattern_imgs, path_out, name='segment_', path_pattern_imgs) logging.debug('run prediction...') - wrapper_segment = partial(try_segment_image, params=params, classif=classif, - path_out=path_out, path_visu=path_visu) + _wrapper_segment = partial(try_segment_image, params=params, classif=classif, + path_out=path_out, path_visu=path_visu, + show_debug_imgs=SHOW_DEBUG_IMAGES) list_img_path = list(zip([None] * len(paths_img), paths_img)) - iterate = tl_expt.WrapExecuteSequence(wrapper_segment, list_img_path, + iterate = tl_expt.WrapExecuteSequence(_wrapper_segment, list_img_path, nb_jobs=params['nb_jobs'], desc='segmenting images') for _ in iterate: diff --git a/experiments_segmentation/run_segm_slic_model_graphcut.py b/experiments_segmentation/run_segm_slic_model_graphcut.py index 23a5718a..c16eed31 100644 --- a/experiments_segmentation/run_segm_slic_model_graphcut.py +++ b/experiments_segmentation/run_segm_slic_model_graphcut.py @@ -12,16 +12,17 @@ SAMPLE run: >> python run_segm_slic_model_graphcut.py \ - -list images/langerhans_islets/list_lang-isl_imgs-annot.csv \ - -imgs "images/langerhans_islets/image/*.jpg" \ - -out results -n LangIsl --nb_classes 3 --visual 1 --nb_jobs 2 + -l data_images/langerhans_islets/list_lang-isl_imgs-annot.csv \ + -i "data_images/langerhans_islets/image/*.jpg" \ + -o results -n LangIsl --nb_classes 3 --visual --nb_jobs 2 -Copyright (C) 2016-2017 Jiri Borovec +Copyright (C) 2016-2018 Jiri Borovec """ import os import sys import json +import glob import pickle import argparse import logging @@ -36,7 +37,6 @@ logging.warning('No display found. Using non-interactive Agg backend.') matplotlib.use('Agg') -import tqdm from PIL import Image import numpy as np import pandas as pd @@ -50,20 +50,21 @@ import imsegm.utils.experiments as tl_expt import imsegm.utils.drawing as tl_visu import imsegm.pipelines as seg_pipe +import imsegm.labeling as seg_lbs import imsegm.descriptors as seg_fts # sometimes it freeze in "Cython: computing Colour means for image" seg_fts.USE_CYTHON = False NB_THREADS = max(1, int(mproc.cpu_count() * 0.9)) -TYPES_LOAD_IMAGE = ['2d_rgb', '2d_gray'] +TYPES_LOAD_IMAGE = ['2d_rgb', '2d_split'] NAME_DUMP_MODEL = 'estimated_model.npz' NAME_CSV_ARS_CORES = 'metric_ARS.csv' # setting experiment sub-folders FOLDER_IMAGE = 'images' FOLDER_ANNOT = 'annotations' -FOLDER_SEGM_GMM = 'segmentation_GaussMixModel' +FOLDER_SEGM_GMM = 'segmentation_MixtureModel' FOLDER_SEGM_GMM_VISU = FOLDER_SEGM_GMM + '___visual' -FOLDER_SEGM_GROUP = 'segmentation_GroupGMM' +FOLDER_SEGM_GROUP = 'segmentation_GroupMM' FOLDER_SEGM_GROUP_VISU = FOLDER_SEGM_GROUP + '___visual' LIST_FOLDERS_BASE = (FOLDER_IMAGE, FOLDER_SEGM_GMM, FOLDER_SEGM_GROUP) LIST_FOLDERS_DEBUG = (FOLDER_SEGM_GMM_VISU, FOLDER_SEGM_GROUP_VISU) @@ -71,23 +72,22 @@ # unique experiment means adding timestemp on the end of folder name EACH_UNIQUE_EXPERIMENT = False # showing some intermediate debug images from segmentation -SHOW_DEBUG_IMAGES = False +SHOW_DEBUG_IMAGES = True # relabel annotation such that labels are in sequence no gaps in between them ANNOT_RELABEL_SEQUENCE = False # whether skip loading config from previous fun -FORCE_RELOAD = False +FORCE_RELOAD = True # even you have dumped data from previous time, all wil be recomputed -FORCE_RECOMP_DATA = False +FORCE_RECOMP_DATA = True -FEATURES_SET_COLOR = {'color': ('mean', 'std', 'eng')} -FEATURES_SET_TEXTURE = {'tLM': ('mean', 'std', 'eng')} +FEATURES_SET_COLOR = {'color': ('mean', 'std', 'energy')} +FEATURES_SET_TEXTURE = {'tLM': ('mean', 'std', 'energy')} FEATURES_SET_ALL = {'color': ('mean', 'std', 'median'), - 'tLM': ('mean', 'std', 'eng', 'mG')} + 'tLM': ('mean', 'std', 'energy', 'meanGrad')} FEATURES_SET_MIN = {'color': ('mean', 'std', 'energy'), 'tLM_s': ('mean', )} -FEATURES_SET_MIX = {'color': ('mean', 'std', 'eng', 'median'), +FEATURES_SET_MIX = {'color': ('mean', 'std', 'energy', 'median'), 'tLM': ('mean', 'std')} -TYPE_GMM = ('GMM', 'Group') # Default parameter configuration SEGM_PARAMS = { 'name': 'imgDisk', @@ -98,15 +98,14 @@ 'slic_regul': 0.2, # 'spacing': (12, 1, 1), 'features': FEATURES_SET_COLOR, - 'prob_type': 'GMM', + 'estim_model': 'GMM', 'pca_coef': None, 'gc_regul': 2.0, 'gc_edge_type': 'model', 'gc_use_trans': False, - 'estimate': TYPE_GMM[0], } -PATH_IMAGES = os.path.join(tl_data.update_path('images'), 'drosophila_disc') -# PATH_IMAGES = tl_data.update_path(os.path.join('images', 'langerhans_islets')) +PATH_IMAGES = os.path.join(tl_data.update_path('data_images'), 'drosophila_disc') +# PATH_IMAGES = tl_data.update_path(os.path.join('data_images', 'langerhans_islets')) PATH_RESULTS = tl_data.update_path('results', absolute=True) NAME_EXPERIMENT = 'experiment_segm-unSupervised' SEGM_PARAMS.update({ @@ -124,13 +123,13 @@ def arg_parse_params(params): :return {str: ...}: """ parser = argparse.ArgumentParser() - parser.add_argument('-list', '--path_train_list', type=str, required=False, + parser.add_argument('-l', '--path_train_list', type=str, required=False, help='path to the list of image', default=params['path_train_list']) - parser.add_argument('-imgs', '--path_predict_imgs', type=str, + parser.add_argument('-i', '--path_predict_imgs', type=str, help='path to folder & name pattern with new image', required=False, default=params['path_predict_imgs']) - parser.add_argument('-out', '--path_out', type=str, required=False, + parser.add_argument('-o', '--path_out', type=str, required=False, help='path to the output directory', default=params['path_out']) parser.add_argument('-n', '--name', type=str, required=False, @@ -148,6 +147,9 @@ def arg_parse_params(params): help='number of processes in parallel') parser.add_argument('--visual', required=False, action='store_true', help='export debug visualisations', default=False) + parser.add_argument('--unique', required=False, action='store_true', + help='each experiment has uniques stamp', + default=EACH_UNIQUE_EXPERIMENT) args = vars(parser.parse_args()) logging.info('ARG PARAMETERS: \n %s', repr(args)) for k in (k for k in args if 'path' in k): @@ -157,7 +159,7 @@ def arg_parse_params(params): assert os.path.exists(p), 'missing: (%s) "%s"' % (k, p) # args['visual'] = bool(args['visual']) # if the config path is set load the it otherwise use default - if os.path.isfile(args['path_config']): + if os.path.isfile(args.get('path_config', '')): with open(args['path_config'], 'r') as fd: config = json.load(fd) params.update(config) @@ -174,7 +176,7 @@ def load_image(path_img, img_type=TYPES_LOAD_IMAGE[0]): """ path_img = tl_data.update_path(path_img) assert os.path.isfile(path_img), 'missing: "%s"' % path_img - if img_type == '2d_gray': + if img_type == '2d_split': img, _ = tl_data.load_img_double_band_split(path_img) assert img.ndim == 2, 'image dims: %s' % repr(img.shape) # img = np.rollaxis(np.tile(img, (3, 1, 1)), 0, 3) @@ -184,8 +186,10 @@ def load_image(path_img, img_type=TYPES_LOAD_IMAGE[0]): img, _ = tl_data.load_image_2d(path_img) # if img.max() > 1: # img = (img / 255.) - elif img_type == 'segm': + elif img_type == '2d_segm': img, _ = tl_data.load_image_2d(path_img) + if img.ndim == 3: + img = img[:, :, 0] if ANNOT_RELABEL_SEQUENCE: img, _, _ = segmentation.relabel_sequential(img) else: @@ -204,15 +208,13 @@ def load_model(path_model): with open(path_model, 'rb') as f: dict_data = pickle.load(f) # npz_file = np.load(path_model) - scaler = dict_data['scaler'] - pca = dict_data['pca'] model = dict_data['model'] params = dict_data['params'] feature_names = dict_data['feature_names'] - return scaler, pca, model, params, feature_names + return model, params, feature_names -def save_model(path_model, scaler, pca, model, params=None, feature_names=None): +def save_model(path_model, model, params=None, feature_names=None): """ save model on specific destination :param str path_model: @@ -225,7 +227,7 @@ def save_model(path_model, scaler, pca, model, params=None, feature_names=None): logging.info('save (dump) model to "%s"', path_model) # np.savez_compressed(path_model, scaler=scaler, pca=pca, # model=model, params=params, feature_names=feature_names) - dict_data = dict(scaler=scaler, pca=pca, model=model, params=params, + dict_data = dict(model=model, params=params, feature_names=feature_names) with open(path_model, 'wb') as f: pickle.dump(dict_data, f) @@ -265,7 +267,7 @@ def export_visual(idx_name, img, segm, dict_debug_imgs=None, path_out=None, path_visu=None): """ export visualisations - :param (int, str) idx_name: + :param str idx_name: :param ndarray img: input image :param ndarray segm: resulting segmentation :param dict_debug_imgs: dictionary with debug images @@ -273,6 +275,9 @@ def export_visual(idx_name, img, segm, dict_debug_imgs=None, :param str path_visu: path to dir with debug images """ logging.info('export results and visualization...') + if set(np.unique(segm)) <= set([0, 1]): + segm *= 255 + path_img = os.path.join(path_out, str(idx_name) + '.png') logging.debug('exporting segmentation: %s', path_img) im_seg = Image.fromarray(segm.astype(np.uint8)) @@ -295,7 +300,8 @@ def export_visual(idx_name, img, segm, dict_debug_imgs=None, plt.close(fig) -def segment_image_independent(img_idx_path, params, path_out, path_visu=None): +def segment_image_independent(img_idx_path, params, path_out, path_visu=None, + show_debug_imgs=SHOW_DEBUG_IMAGES): """ segment image indecently (estimate model just for this model) :param (int, str) img_idx_path: @@ -312,12 +318,12 @@ def segment_image_independent(img_idx_path, params, path_out, path_visu=None): path_img = os.path.join(params['path_exp'], FOLDER_IMAGE, idx_name + '.png') tl_data.io_imsave(path_img, img.astype(np.uint8)) - dict_debug_imgs = dict() if SHOW_DEBUG_IMAGES else None + dict_debug_imgs = dict() if show_debug_imgs else None try: segm = seg_pipe.pipe_color2d_slic_features_gmm_graphcut( img, nb_classes=params['nb_classes'], clr_space=params['clr_space'], sp_size=params['slic_size'], sp_regul=params['slic_regul'], - dict_features=params['features'], proba_type=params['prob_type'], + dict_features=params['features'], estim_model=params['estim_model'], pca_coef=params['pca_coef'], gc_regul=params['gc_regul'], gc_edge_type=params['gc_edge_type'], dict_debug_imgs=dict_debug_imgs) @@ -325,14 +331,18 @@ def segment_image_independent(img_idx_path, params, path_out, path_visu=None): logging.error(traceback.format_exc()) segm = np.zeros(img.shape[:2]) + boundary_size = int(np.sqrt(np.prod(segm.shape)) * 0.01) + segm = seg_lbs.assume_bg_on_boundary(segm, bg_label=0, + boundary_size=boundary_size) + export_visual(idx_name, img, segm, dict_debug_imgs, path_out, path_visu) # gc.collect(), time.sleep(1) return idx_name, segm -def segment_image_model(imgs_idx_path, params, scaler, pca, model, path_out=None, - path_visu=None): +def segment_image_model(imgs_idx_path, params, model, path_out=None, + path_visu=None, show_debug_imgs=SHOW_DEBUG_IMAGES): """ segment image with already estimated model :param (int, str) imgs_idx_path: @@ -342,6 +352,7 @@ def segment_image_model(imgs_idx_path, params, scaler, pca, model, path_out=None :param obj model: :param str path_out: path to dir with segmentation :param str path_visu: path to dir with debug images + :param bool show_debug_imgs: whether show debug images :return (str, ndarray): """ idx, path_img = parse_imgs_idx_path(imgs_idx_path) @@ -352,11 +363,11 @@ def segment_image_model(imgs_idx_path, params, scaler, pca, model, path_out=None path_img = os.path.join(params['path_exp'], FOLDER_IMAGE, idx_name + '.png') tl_data.io_imsave(path_img, img.astype(np.uint8)) - dict_debug_imgs = dict() if SHOW_DEBUG_IMAGES else None + dict_debug_imgs = dict() if show_debug_imgs else None try: segm = seg_pipe.segment_color2d_slic_features_model_graphcut( - img, scaler, pca, model, clr_space=params['clr_space'], + img, model, clr_space=params['clr_space'], sp_size=params['slic_size'], sp_regul=params['slic_regul'], dict_features=params['features'], gc_regul=params['gc_regul'], gc_edge_type=params['gc_edge_type'], @@ -365,6 +376,10 @@ def segment_image_model(imgs_idx_path, params, scaler, pca, model, path_out=None logging.error(traceback.format_exc()) segm = np.zeros(img.shape[:2]) + boundary_size = int(np.sqrt(np.prod(segm.shape)) * 0.01) + segm = seg_lbs.assume_bg_on_boundary(segm, bg_label=0, + boundary_size=boundary_size) + export_visual(idx_name, img, segm, dict_debug_imgs, path_out, path_visu) # gc.collect(), time.sleep(1) @@ -393,21 +408,25 @@ def compare_segms_metric_ars(dict_segm_a, dict_segm_b, suffix=''): return df_ars -def experiment_single_gmm(params, paths_img, path_out, path_visu): +def experiment_single_gmm(params, paths_img, path_out, path_visu, + show_debug_imgs=SHOW_DEBUG_IMAGES): imgs_idx_path = list(zip([None] * len(paths_img), paths_img)) logging.info('Perform image segmentation as single image in each time') - dict_segms_gmm = {} - wrapper_segment = partial(segment_image_independent, params=params, - path_out=path_out, path_visu=path_visu) - iterate = tl_expt.WrapExecuteSequence(wrapper_segment, imgs_idx_path, + _wrapper_segment = partial(segment_image_independent, params=params, + path_out=path_out, path_visu=path_visu, + show_debug_imgs=show_debug_imgs) + iterate = tl_expt.WrapExecuteSequence(_wrapper_segment, imgs_idx_path, nb_jobs=params['nb_jobs'], desc='experiment single GMM') - for name, segm in iterate: - dict_segms_gmm[name] = segm + # dict_segms_gmm = {} + # for name, segm in iterate: + # dict_segms_gmm[name] = segm + dict_segms_gmm = dict(iterate) return dict_segms_gmm -def experiment_group_gmm(params, paths_img, path_out, path_visu): +def experiment_group_gmm(params, paths_img, path_out, path_visu, + show_debug_imgs=SHOW_DEBUG_IMAGES): logging.info('load all images') list_images = [load_image(path_img, params['img_type']) for path_img in paths_img] @@ -415,28 +434,45 @@ def experiment_group_gmm(params, paths_img, path_out, path_visu): logging.info('Estimate image segmentation from whole sequence of images') params['path_model'] = os.path.join(params['path_exp'], NAME_DUMP_MODEL) if os.path.isfile(params['path_model']) and not FORCE_RECOMP_DATA: - scaler, pca, model, _, _ = load_model(params['path_model']) + model, _, _ = load_model(params['path_model']) else: - scaler, pca, model = seg_pipe.estim_model_classes_group( + model, _ = seg_pipe.estim_model_classes_group( list_images, nb_classes=params['nb_classes'], clr_space=params['clr_space'], sp_size=params['slic_size'], sp_regul=params['slic_regul'], dict_features=params['features'], - proba_type=params['prob_type'], pca_coef=params['pca_coef']) - save_model(params['path_model'], scaler, pca, model) + proba_type=params['estim_model'], pca_coef=params['pca_coef']) + save_model(params['path_model'], model) logging.info('Perform image segmentation from group model') - dict_segms_group = {} - wrapper_segment = partial(segment_image_model, params=params, - scaler=scaler, pca=pca, model=model, - path_out=path_out, path_visu=path_visu) - iterate = tl_expt.WrapExecuteSequence(wrapper_segment, imgs_idx_path, + _wrapper_segment = partial(segment_image_model, params=params, model=model, + path_out=path_out, path_visu=path_visu, + show_debug_imgs=show_debug_imgs) + iterate = tl_expt.WrapExecuteSequence(_wrapper_segment, imgs_idx_path, nb_jobs=params['nb_jobs'], desc='experiment group GMM') - for name, segm in iterate: - dict_segms_group[name] = segm + # dict_segms_group = {} + # for name, segm in iterate: + # dict_segms_group[name] = segm + dict_segms_group = dict(iterate) return dict_segms_group +def load_path_images(params): + if os.path.isfile(params.get('path_train_list', '')): + logging.info('loading images from CSV: %s', params['path_train_list']) + df_paths = pd.read_csv(params['path_train_list'], index_col=0) + paths_img = df_paths['path_image'].tolist() + elif 'path_predict_imgs' in params: + logging.info('loading images from path: %s', params['path_predict_imgs']) + paths_img = glob.glob(params['path_predict_imgs']) + if len(paths_img) == 0: + logging.warning('no images found on given path...') + else: + logging.warning('no images to load!') + paths_img = [] + return paths_img + + def main(params): """ the main body containgn two approches: 1) segment each image indecently @@ -447,42 +483,43 @@ def main(params): """ logging.getLogger().setLevel(logging.DEBUG) logging.info('running...') + show_debug_imgs = params.get('visual', False) or SHOW_DEBUG_IMAGES reload_dir_config = (os.path.isfile(params['path_config']) or FORCE_RELOAD) params = tl_expt.create_experiment_folder(params, dir_name=NAME_EXPERIMENT, - stamp_unique=EACH_UNIQUE_EXPERIMENT, + stamp_unique=params.get('unique', EACH_UNIQUE_EXPERIMENT), skip_load=reload_dir_config) tl_expt.set_experiment_logger(params['path_exp']) logging.info(tl_expt.string_dict(params, desc='PARAMETERS')) tl_expt.create_subfolders(params['path_exp'], LIST_FOLDERS_BASE) - if params['visual']: + if show_debug_imgs: tl_expt.create_subfolders(params['path_exp'], LIST_FOLDERS_DEBUG) - assert os.path.isfile(params['path_train_list']), \ - 'missing %s' % params['path_train_list'] - dict_segms_gmm, dict_segms_group = {}, {} - df_paths = pd.read_csv(params['path_train_list'], index_col=0) - paths_img = df_paths['path_image'].tolist() + paths_img = load_path_images(params) + assert len(paths_img) > 0, 'missing images' - def path_expt(n): + def _path_expt(n): return os.path.join(params['path_exp'], n) # Segment as single model per image dict_segms_gmm = experiment_single_gmm(params, paths_img, - path_expt(FOLDER_SEGM_GMM), - path_expt(FOLDER_SEGM_GMM_VISU)) + _path_expt(FOLDER_SEGM_GMM), + _path_expt(FOLDER_SEGM_GMM_VISU), + show_debug_imgs=show_debug_imgs) gc.collect() time.sleep(1) + # Segment as model ober set of images dict_segms_group = experiment_group_gmm(params, paths_img, - path_expt(FOLDER_SEGM_GROUP), - path_expt(FOLDER_SEGM_GROUP_VISU)) + _path_expt(FOLDER_SEGM_GROUP), + _path_expt(FOLDER_SEGM_GROUP_VISU), + show_debug_imgs=show_debug_imgs) gc.collect() time.sleep(1) df_ars = compare_segms_metric_ars(dict_segms_gmm, dict_segms_group, suffix='_gmm-group') - df_ars.to_csv(path_expt(NAME_CSV_ARS_CORES)) + df_ars.to_csv(_path_expt(NAME_CSV_ARS_CORES)) logging.info(df_ars.describe()) logging.info('DONE') diff --git a/experiments_segmentation/sample_config.json b/experiments_segmentation/sample_config.json new file mode 100755 index 00000000..1a84c051 --- /dev/null +++ b/experiments_segmentation/sample_config.json @@ -0,0 +1,13 @@ +{ + "clr_space": "hsv", + "slic_size": 35, + "slic_regul": 0.2, + "features": {"color": ["mean", "std", "eng"]}, + "pca_coef": null, + "estim_model": "GMM", + "balance": "unique", + "classif": "RandForest", + "nb_classif_search": 250, + "gc_edge_type": "model", + "gc_regul": 3.0 +} \ No newline at end of file diff --git a/handling_annotations/run_image_color_quantization.py b/handling_annotations/run_image_color_quantization.py index f4296f91..bc8192aa 100644 --- a/handling_annotations/run_image_color_quantization.py +++ b/handling_annotations/run_image_color_quantization.py @@ -7,7 +7,7 @@ SAMPLE run: >> python run_image_color_quantization.py \ - -imgs "images/drosophila_ovary_slice/segm_rgb/*.png" \ + -imgs "data_images/drosophila_ovary_slice/segm_rgb/*.png" \ -m position Copyright (C) 2014-2016 Jiri Borovec @@ -29,7 +29,7 @@ import imsegm.utils.experiments as tl_expt import imsegm.annotation as seg_annot -PATH_IMAGES = os.path.join('images', 'drosophila_ovary_slice', 'segm_rgb', '*.png') +PATH_IMAGES = os.path.join('data_images', 'drosophila_ovary_slice', 'segm_rgb', '*.png') NB_THREADS = max(1, int(mproc.cpu_count() * 0.9)) THRESHOLD_INVALID_PIXELS = 5e-3 @@ -61,8 +61,7 @@ def parse_arg_params(): def see_images_color_info(path_images, px_thr=THRESHOLD_INVALID_PIXELS): """ look to the folder on all images and estimate most frequent colours - :param path_dir: str - :param im_pattern: str + :param [str] path_images: list of images :param px_th: float, percentage of nb clr pixels to be assumed as important :return {}: """ @@ -83,7 +82,9 @@ def perform_quantize_image(path_image, list_colors, method='color'): """ logging.debug('quantize img: "%s"', path_image) im = tl_data.io_imread(path_image) - assert im.ndim == 3, 'not valid color image of dims %s' % repr(im.shape) + if not im.ndim == 3: + logging.warning('not valid color image of dims %s', repr(im.shape)) + return im = im[:, :, :3] # im = io.imread(path_image)[:, :, :3] if method == 'color': @@ -92,6 +93,7 @@ def perform_quantize_image(path_image, list_colors, method='color'): im_q = seg_annot.quantize_image_nearest_pixel(im, list_colors) else: logging.error('not implemented method "%s"', method) + im_q = np.zeros(im.shape) path_image = os.path.splitext(path_image)[0] + '.png' tl_data.io_imsave(path_image, im_q.astype(np.uint8)) # io.imsave(path_image, im_q) @@ -118,10 +120,11 @@ def quantize_folder_images(path_images, list_colors=None, method='color', dict_colors = see_images_color_info(path_images, px_thr=px_threshold) list_colors = [c for c in dict_colors] - wrapper_quantize_img = partial(perform_quantize_image, - method=method, list_colors=list_colors) - iterate = tl_expt.WrapExecuteSequence(wrapper_quantize_img, path_imgs, - nb_jobs=nb_jobs, desc='quantize images') + _wrapper_quantize_img = partial(perform_quantize_image, + method=method, list_colors=list_colors) + iterate = tl_expt.WrapExecuteSequence(_wrapper_quantize_img, path_imgs, + nb_jobs=nb_jobs, + desc='quantize images') list(iterate) diff --git a/handling_annotations/run_image_convert_label_color.py b/handling_annotations/run_image_convert_label_color.py index 77ef24b8..1631c538 100644 --- a/handling_annotations/run_image_convert_label_color.py +++ b/handling_annotations/run_image_convert_label_color.py @@ -3,9 +3,9 @@ SAMPLE run: >> python run_image_convert_label_color.py \ - -imgs "images/drosophila_ovary_slice/segm/*.png" \ - -out images/drosophila_ovary_slice/segm_rgb \ - -clrs images/drosophila_ovary_slice/segm_rgb/dict_label-color.json + -imgs "data_images/drosophila_ovary_slice/segm/*.png" \ + -out data_images/drosophila_ovary_slice/segm_rgb \ + -clrs data_images/drosophila_ovary_slice/segm_rgb/dict_label-color.json Copyright (C) 2014-2016 Jiri Borovec """ @@ -27,8 +27,8 @@ import imsegm.utils.experiments as tl_expt import imsegm.annotation as seg_annot -PATH_INPUT = os.path.join('images', 'drosophila_ovary_slice', 'segm', '*.png') -PATH_OUTPUT = os.path.join('images', 'drosophila_ovary_slice', 'segm_rgb') +PATH_INPUT = os.path.join('data_images', 'drosophila_ovary_slice', 'segm', '*.png') +PATH_OUTPUT = os.path.join('data_images', 'drosophila_ovary_slice', 'segm_rgb') NAME_JSON_DICT = 'dictionary_label-color.json' NB_THREADS = max(1, int(mproc.cpu_count() * 0.9)) @@ -143,10 +143,11 @@ def convert_folder_images(path_images, path_out, path_json=None, nb_jobs=1): dict_colors = load_dict_colours(path_json) logging.debug('loaded dictionary %s', repr(dict_colors)) - wrapper_img_convert = partial(perform_img_convert, path_out=path_out, - dict_colors=dict_colors) - iterate = tl_expt.WrapExecuteSequence(wrapper_img_convert, path_imgs, - nb_jobs=nb_jobs, desc='convert images') + _wrapper_img_convert = partial(perform_img_convert, path_out=path_out, + dict_colors=dict_colors) + iterate = tl_expt.WrapExecuteSequence(_wrapper_img_convert, path_imgs, + nb_jobs=nb_jobs, + desc='convert images') list(iterate) diff --git a/handling_annotations/run_overlap_images_segms.py b/handling_annotations/run_overlap_images_segms.py index 9b6fd4bb..88b6a2d5 100644 --- a/handling_annotations/run_overlap_images_segms.py +++ b/handling_annotations/run_overlap_images_segms.py @@ -4,8 +4,8 @@ SAMPLE run: >> python run_overlap_images_segms.py \ - -imgs "images/drosophila_ovary_slice/image/*.jpg" \ - -segs images/drosophila_ovary_slice/segm \ + -imgs "data_images/drosophila_ovary_slice/image/*.jpg" \ + -segs data_images/drosophila_ovary_slice/segm \ -out results/overlap_ovary_segment Copyright (C) 2014-2016 Jiri Borovec @@ -48,7 +48,6 @@ def parse_arg_params(): """ create simple arg parser with default values (input, output, dataset) - :param dict_params: {str: ...} :return obj: object argparse """ parser = argparse.ArgumentParser() diff --git a/handling_annotations/run_segm_annot_inpaint.py b/handling_annotations/run_segm_annot_inpaint.py index e0337154..57183d2e 100644 --- a/handling_annotations/run_segm_annot_inpaint.py +++ b/handling_annotations/run_segm_annot_inpaint.py @@ -3,7 +3,7 @@ SAMPLE run: >> python run_image_annot_inpaint.py \ - -imgs "images/drosophila_ovary_slice/segm/*.png" \ + -imgs "data_images/drosophila_ovary_slice/segm/*.png" \ --label 4 --nb_jobs 2 Copyright (C) 2014-2016 Jiri Borovec @@ -25,7 +25,7 @@ import imsegm.utils.experiments as tl_expt import imsegm.annotation as seg_annot -PATH_IMAGES = os.path.join('images', 'drosophila_ovary_slice', 'segm', '*.png') +PATH_IMAGES = os.path.join('data_images', 'drosophila_ovary_slice', 'segm', '*.png') NB_THREADS = max(1, int(mproc.cpu_count() * 0.9)) @@ -74,18 +74,18 @@ def perform_img_inpaint(path_img, labels): def quantize_folder_images(path_images, label, nb_jobs=1): """ perform single or multi thread image quantisation - :param path_dir: str, input directory - :param im_pattern: str, image pattern for loading - :param nb_jobs: int + :param [str] path_images: list of image paths + :param int nb_jobs: """ assert os.path.isdir(os.path.dirname(path_images)), \ 'input folder does not exist: %s' % os.path.dirname(path_images) path_imgs = sorted(glob.glob(path_images)) logging.info('found %i images', len(path_imgs)) - wrapper_img_inpaint = partial(perform_img_inpaint, labels=label) - iterate = tl_expt.WrapExecuteSequence(wrapper_img_inpaint, path_imgs, - nb_jobs=nb_jobs, desc='quantise images') + _wrapper_img_inpaint = partial(perform_img_inpaint, labels=label) + iterate = tl_expt.WrapExecuteSequence(_wrapper_img_inpaint, path_imgs, + nb_jobs=nb_jobs, + desc='quantise images') list(iterate) diff --git a/handling_annotations/run_segm_annot_relabel.py b/handling_annotations/run_segm_annot_relabel.py index c702ba44..78888ea5 100644 --- a/handling_annotations/run_segm_annot_relabel.py +++ b/handling_annotations/run_segm_annot_relabel.py @@ -3,7 +3,7 @@ SAMPLE run: >> python run_segm_annot_relabel.py \ - -imgs "images/drosophila_ovary_slice/center_levels/*.png" \ + -imgs "data_images/drosophila_ovary_slice/center_levels/*.png" \ -out results/relabel_center_levels \ --label_old 2 3 --label_new 1 1 --nb_jobs 2 @@ -25,7 +25,7 @@ import imsegm.utils.data_io as tl_data import imsegm.utils.experiments as tl_expt -PATH_IMAGES = os.path.join('images', 'drosophila_ovary_slice', 'center_levels', '*.png') +PATH_IMAGES = os.path.join('data_images', 'drosophila_ovary_slice', 'center_levels', '*.png') PATH_OUTPUT = os.path.join('results', 'relabel_center_levels') NB_THREADS = max(1, int(mproc.cpu_count() * 0.9)) @@ -106,10 +106,11 @@ def relabel_folder_images(path_images, path_out, labels_old, labels_new, path_imgs = sorted(glob.glob(path_images)) logging.info('found %i images', len(path_imgs)) - wrapper_img_relabel = partial(perform_image_relabel, path_out=path_out, - labels_old=labels_old, labels_new=labels_new) - iterate = tl_expt.WrapExecuteSequence(wrapper_img_relabel, path_imgs, - nb_jobs=nb_jobs, desc='relabel images') + _wrapper_img_relabel = partial(perform_image_relabel, path_out=path_out, + labels_old=labels_old, labels_new=labels_new) + iterate = tl_expt.WrapExecuteSequence(_wrapper_img_relabel, path_imgs, + nb_jobs=nb_jobs, + desc='relabel images') list(iterate) diff --git a/images/drosophila_disc/list_imaginal-disks.csv b/images/drosophila_disc/list_imaginal-disks.csv deleted file mode 100644 index 55eb5c45..00000000 --- a/images/drosophila_disc/list_imaginal-disks.csv +++ /dev/null @@ -1,11 +0,0 @@ -,path_image,path_annot -1,images/drosophila_disc/image/img_5.jpg,images/drosophila_disc/annot/img_5.png -2,images/drosophila_disc/image/img_6.jpg,images/drosophila_disc/annot/img_6.png -3,images/drosophila_disc/image/img_12.jpg,images/drosophila_disc/annot/img_12.png -4,images/drosophila_disc/image/img_14.jpg,images/drosophila_disc/annot/img_14.png -5,images/drosophila_disc/image/img_15.jpg,images/drosophila_disc/annot/img_15.png -6,images/drosophila_disc/image/img_19.jpg,images/drosophila_disc/annot/img_19.png -7,images/drosophila_disc/image/img_20.jpg,images/drosophila_disc/annot/img_20.png -8,images/drosophila_disc/image/img_24.jpg,images/drosophila_disc/annot/img_24.png -9,images/drosophila_disc/image/img_26.jpg,images/drosophila_disc/annot/img_26.png -10,images/drosophila_disc/image/img_43.jpg,images/drosophila_disc/annot/img_43.png diff --git a/images/drosophila_disc/list_imaginal-disks_short.csv b/images/drosophila_disc/list_imaginal-disks_short.csv deleted file mode 100644 index 19f60844..00000000 --- a/images/drosophila_disc/list_imaginal-disks_short.csv +++ /dev/null @@ -1,3 +0,0 @@ -,path_image,path_annot -1,images/drosophila_disc/image/img_6.jpg,images/drosophila_disc/annot/img_6.png -2,images/drosophila_disc/image/img_43.jpg,images/drosophila_disc/annot/img_43.png diff --git a/images/drosophila_ovary_slice/list_imgs-annot-struct.csv b/images/drosophila_ovary_slice/list_imgs-annot-struct.csv deleted file mode 100644 index b621b29b..00000000 --- a/images/drosophila_ovary_slice/list_imgs-annot-struct.csv +++ /dev/null @@ -1,11 +0,0 @@ -,path_image,path_annot -1,images/drosophila_ovary_slice/image/insitu4174.jpg,images/drosophila_ovary_slice/annot_struct/insitu4174.png -2,images/drosophila_ovary_slice/image/insitu4358.jpg,images/drosophila_ovary_slice/annot_struct/insitu4358.png -3,images/drosophila_ovary_slice/image/insitu7331.jpg,images/drosophila_ovary_slice/annot_struct/insitu7331.png -4,images/drosophila_ovary_slice/image/insitu7544.jpg,images/drosophila_ovary_slice/annot_struct/insitu7544.png -5,images/drosophila_ovary_slice/image/insitu7545.jpg,images/drosophila_ovary_slice/annot_struct/insitu7545.png -6,images/drosophila_ovary_slice/image/insitu4174.tif,images/drosophila_ovary_slice/annot_struct/insitu4174.png -7,images/drosophila_ovary_slice/image/insitu4358.tif,images/drosophila_ovary_slice/annot_struct/insitu4358.png -8,images/drosophila_ovary_slice/image/insitu7331.tif,images/drosophila_ovary_slice/annot_struct/insitu7331.png -9,images/drosophila_ovary_slice/image/insitu7544.tif,images/drosophila_ovary_slice/annot_struct/insitu7544.png -10,images/drosophila_ovary_slice/image/insitu7545.tif,images/drosophila_ovary_slice/annot_struct/insitu7545.png diff --git a/images/drosophila_ovary_slice/list_imgs-annot-struct_short.csv b/images/drosophila_ovary_slice/list_imgs-annot-struct_short.csv deleted file mode 100644 index d14ddd56..00000000 --- a/images/drosophila_ovary_slice/list_imgs-annot-struct_short.csv +++ /dev/null @@ -1,3 +0,0 @@ -,path_image,path_annot -1,images/drosophila_ovary_slice/image/insitu4174.jpg,images/drosophila_ovary_slice/annot_struct/insitu4174.png -2,images/drosophila_ovary_slice/image/insitu7545.tif,images/drosophila_ovary_slice/annot_struct/insitu7545.png diff --git a/images/drosophila_ovary_slice/list_imgs-segm-center-levels.csv b/images/drosophila_ovary_slice/list_imgs-segm-center-levels.csv deleted file mode 100644 index 4e0818fc..00000000 --- a/images/drosophila_ovary_slice/list_imgs-segm-center-levels.csv +++ /dev/null @@ -1,6 +0,0 @@ -,path_image,path_centers,path_annot,path_segm -1,images/drosophila_ovary_slice/image/insitu4174.tif,images/drosophila_ovary_slice/center_levels/insitu4174.png,images/drosophila_ovary_slice/annot_eggs/insitu4174.png,images/drosophila_ovary_slice/segm/insitu4174.png -2,images/drosophila_ovary_slice/image/insitu4358.tif,images/drosophila_ovary_slice/center_levels/insitu4358.png,images/drosophila_ovary_slice/annot_eggs/insitu4358.png,images/drosophila_ovary_slice/segm/insitu4358.png -3,images/drosophila_ovary_slice/image/insitu7331.tif,images/drosophila_ovary_slice/center_levels/insitu7331.png,images/drosophila_ovary_slice/annot_eggs/insitu7331.png,images/drosophila_ovary_slice/segm/insitu7331.png -4,images/drosophila_ovary_slice/image/insitu7544.tif,images/drosophila_ovary_slice/center_levels/insitu7544.png,images/drosophila_ovary_slice/annot_eggs/insitu7544.png,images/drosophila_ovary_slice/segm/insitu7544.png -5,images/drosophila_ovary_slice/image/insitu7545.tif,images/drosophila_ovary_slice/center_levels/insitu7545.png,images/drosophila_ovary_slice/annot_eggs/insitu7545.png,images/drosophila_ovary_slice/segm/insitu7545.png diff --git a/images/drosophila_ovary_slice/list_imgs-segm-center-levels_short.csv b/images/drosophila_ovary_slice/list_imgs-segm-center-levels_short.csv deleted file mode 100644 index 40cae6a5..00000000 --- a/images/drosophila_ovary_slice/list_imgs-segm-center-levels_short.csv +++ /dev/null @@ -1,3 +0,0 @@ -,path_image,path_centers,path_annot,path_segm -1,images/drosophila_ovary_slice/image/insitu4358.jpg,images/drosophila_ovary_slice/center_levels/insitu4358.png,images/drosophila_ovary_slice/annot_eggs/insitu4358.png,images/drosophila_ovary_slice/segm/insitu4358.png -2,images/drosophila_ovary_slice/image/insitu7545.tif,images/drosophila_ovary_slice/center_levels/insitu7545.png,images/drosophila_ovary_slice/annot_eggs/insitu7545.png,images/drosophila_ovary_slice/segm/insitu7545.png diff --git a/images/drosophila_ovary_slice/list_imgs-segm-center-points.csv b/images/drosophila_ovary_slice/list_imgs-segm-center-points.csv deleted file mode 100644 index 109e15a2..00000000 --- a/images/drosophila_ovary_slice/list_imgs-segm-center-points.csv +++ /dev/null @@ -1,6 +0,0 @@ -,path_image,path_centers,path_annot,path_segm -1,images/drosophila_ovary_slice/image/insitu4174.jpg,images/drosophila_ovary_slice/center_levels/insitu4174.csv,images/drosophila_ovary_slice/annot_eggs/insitu4174.png,images/drosophila_ovary_slice/segm/insitu4174.png -2,images/drosophila_ovary_slice/image/insitu4358.jpg,images/drosophila_ovary_slice/center_levels/insitu4358.csv,images/drosophila_ovary_slice/annot_eggs/insitu4358.png,images/drosophila_ovary_slice/segm/insitu4358.png -3,images/drosophila_ovary_slice/image/insitu7331.jpg,images/drosophila_ovary_slice/center_levels/insitu7331.csv,images/drosophila_ovary_slice/annot_eggs/insitu7331.png,images/drosophila_ovary_slice/segm/insitu7331.png -4,images/drosophila_ovary_slice/image/insitu7544.jpg,images/drosophila_ovary_slice/center_levels/insitu7544.csv,images/drosophila_ovary_slice/annot_eggs/insitu7544.png,images/drosophila_ovary_slice/segm/insitu7544.png -5,images/drosophila_ovary_slice/image/insitu7545.jpg,images/drosophila_ovary_slice/center_levels/insitu7545.csv,images/drosophila_ovary_slice/annot_eggs/insitu7545.png,images/drosophila_ovary_slice/segm/insitu7545.png diff --git a/images/drosophila_ovary_slice/list_imgs-segm-center-points_short.csv b/images/drosophila_ovary_slice/list_imgs-segm-center-points_short.csv deleted file mode 100644 index 8fe908d8..00000000 --- a/images/drosophila_ovary_slice/list_imgs-segm-center-points_short.csv +++ /dev/null @@ -1,3 +0,0 @@ -,path_image,path_centers,path_annot,path_segm -1,images/drosophila_ovary_slice/image/insitu4358.jpg,images/drosophila_ovary_slice/center_levels/insitu4358.csv,images/drosophila_ovary_slice/annot_eggs/insitu4358.png,images/drosophila_ovary_slice/segm/insitu4358.png -2,images/drosophila_ovary_slice/image/insitu7545.tif,images/drosophila_ovary_slice/center_levels/insitu7545.csv,images/drosophila_ovary_slice/annot_eggs/insitu7545.png,images/drosophila_ovary_slice/segm/insitu7545.png diff --git a/images/langerhans_islets/list_lang-isl_imgs-annot.csv b/images/langerhans_islets/list_lang-isl_imgs-annot.csv deleted file mode 100644 index f100e541..00000000 --- a/images/langerhans_islets/list_lang-isl_imgs-annot.csv +++ /dev/null @@ -1,4 +0,0 @@ -,path_image,path_annot -1,images/langerhans_islets/image/gtExoIsl_13.jpg,images/langerhans_islets/annot/gtExoIsl_13.png -2,images/langerhans_islets/image/gtExoIsl_21.jpg,images/langerhans_islets/annot/gtExoIsl_21.png -3,images/langerhans_islets/image/gtExoIsl_27.jpg,images/langerhans_islets/annot/gtExoIsl_27.png diff --git a/imsegm/annotation.py b/imsegm/annotation.py index 589d1375..eaed9755 100755 --- a/imsegm/annotation.py +++ b/imsegm/annotation.py @@ -1,17 +1,17 @@ """ Framework for handling annotations -Copyright (C) 2014-2016 Jiri Borovec +Copyright (C) 2014-2018 Jiri Borovec """ -import os, sys +import os import logging import tqdm import numpy as np import pandas as pd from PIL import Image -from skimage import io +# from skimage import io from scipy import interpolate # sys.path += [os.path.abspath('.'), os.path.abspath('..')] # Add path to root @@ -321,7 +321,7 @@ def load_info_group_by_slices(path_txt, stages, pos_columns=COLUMNS_POSITION, :param [str] pos_columns: :return: DF - >>> path_txt = os.path.join(tl_data.update_path('images'), + >>> path_txt = os.path.join(tl_data.update_path('data_images'), ... 'drosophila_ovary_slice', 'info_ovary_images.txt') >>> load_info_group_by_slices(path_txt, [4]) # doctest: +NORMALIZE_WHITESPACE ant_x ant_y lat_x lat_y post_x post_y diff --git a/imsegm/classification.py b/imsegm/classification.py index c70f8a12..f446ea01 100755 --- a/imsegm/classification.py +++ b/imsegm/classification.py @@ -2,7 +2,7 @@ Supporting file to create and set parameters for scikit-learn classifiers and some prepossessing functions that support classification -Copyright (C) 2014-2016 Jiri Borovec +Copyright (C) 2014-2018 Jiri Borovec """ import os @@ -11,8 +11,7 @@ import random import collections import traceback -# import gc -# import time +import itertools # import multiprocessing as mproc import numpy as np @@ -261,8 +260,8 @@ def compute_classif_metrics(y_true, y_pred, metric_averages=METRIC_AVERAGES): :return {str: float}: >>> np.random.seed(0) - >>> y_true = np.random.randint(0, 3, 25) - >>> y_pred = np.random.randint(0, 2, 25) + >>> y_true = np.random.randint(0, 3, 25) * 2 + >>> y_pred = np.random.randint(0, 2, 25) * 2 >>> d = compute_classif_metrics(y_true, y_true) >>> d['accuracy'] # doctest: +ELLIPSIS 1.0 @@ -273,6 +272,9 @@ def compute_classif_metrics(y_true, y_pred, metric_averages=METRIC_AVERAGES): 0.32... >>> d['confusion'] [[3, 7, 0], [5, 5, 0], [1, 4, 0]] + >>> d = compute_classif_metrics(y_pred, y_pred) + >>> d['accuracy'] # doctest: +ELLIPSIS + 1.0 """ y_true = np.array(y_true) y_pred = np.array(y_pred) @@ -282,18 +284,13 @@ def compute_classif_metrics(y_true, y_pred, metric_averages=METRIC_AVERAGES): logging.debug('unique lbs true: %s, predict %s', repr(np.unique(y_true)), repr(np.unique(y_pred))) - uq_y_true = np.unique(y_true) - # in case the are just two classes relabel them as [0, 1] only - # solving sklearn error: + uq_labels = np.unique(np.hstack((y_true, y_pred))) + # in case there are just two classes, relabel them as [0, 1], sklearn error: # "ValueError: pos_label=1 is not a valid label: array([ 0, 255])" - if np.array_equal(sorted(uq_y_true), sorted(np.unique(y_pred))) \ - and len(uq_y_true) <= 2: - logging.debug('relabeling original %s to [0, 1]', repr(uq_y_true)) - lut = np.zeros(uq_y_true.max() + 1) - if len(uq_y_true) == 2: - lut[uq_y_true[1]] = 1 - y_true = lut[y_true] - y_pred = lut[y_pred] + if len(uq_labels) <= 2: + # NOTE, this is temporal just for purposes of computing statistic + y_true = relabel_sequential(y_true, uq_labels) + y_pred = relabel_sequential(y_pred, uq_labels) # http://scikit-learn.org/stable/modules/generated/sklearn.metrics.precision_recall_fscore_support.html EVAL_STR = 'EVALUATION: {:<2} PRE: {:.3f} REC: {:.3f} F1: {:.3f} S: {:>6}' @@ -538,6 +535,27 @@ def export_results_clf_search(path_out, clf_name, clf_search): f.write('\n'.join(rows)) +def relabel_sequential(labels, uq_lbs=None): + """ relabel sequantila vetor staring from 0 + + :param [] labels: + :return []: + + >>> relabel_sequential([0, 0, 0, 5, 5, 5, 0, 5]) + [0, 0, 0, 1, 1, 1, 0, 1] + """ + labels = np.asarray(labels) + if uq_lbs is None: + uq_lbs = np.unique(labels) + lut = np.zeros(np.max(uq_lbs) + 1) + logging.debug('relabeling original %s to %s', repr(uq_lbs), + range(len(uq_lbs))) + for i, lb in enumerate(uq_lbs): + lut[lb] = i + labesl_new = lut[labels].astype(labels.dtype).tolist() + return labesl_new + + def create_classif_train_export(clf_name, features, labels, cross_val=10, nb_search_iter=1, search_type='random', nb_jobs=NB_JOBS_CLASSIF_SEARCH, @@ -600,7 +618,9 @@ def create_classif_train_export(clf_name, features, labels, cross_val=10, clf_search = create_classif_search(clf_name, clf_pipeline, nb_labels, search_type, cross_val, nb_search_iter, nb_jobs) - clf_search.fit(features, labels) + + # NOTE, this is temporal just for purposes of computing statistic + clf_search.fit(features, relabel_sequential(labels)) logging.info('Best score: %s', repr(clf_search.best_score_)) clf_pipeline = clf_search.best_estimator_ @@ -609,9 +629,9 @@ def create_classif_train_export(clf_name, features, labels, cross_val=10, logging.info('Best parameters set: \n %s', repr(best_parameters)) if path_out is not None and os.path.isdir(path_out): export_results_clf_search(path_out, clf_name, clf_search) - else: - # while there is no search, just train the best one - clf_pipeline.fit(features, labels) + + # while there is no search, just train the best one + clf_pipeline.fit(features, labels) if path_out is not None and os.path.isdir(path_out): path_classif = save_classifier(path_out, clf_pipeline, clf_name, @@ -673,7 +693,11 @@ def eval_classif_cross_val_scores(clf_name, classif, features, labels, df_scoring = pd.DataFrame() for scoring in scorings: try: + uq_labels = np.unique(labels) # ValueError: pos_label=1 is not a valid label: array([0, 2]) + if len(uq_labels) <= 2: + # NOTE, this is temporal just for purposes of computing stat. + labels = relabel_sequential(labels, uq_labels) scores = model_selection.cross_val_score(classif, features, labels, cv=cross_val, scoring=scoring) @@ -682,7 +706,6 @@ def eval_classif_cross_val_scores(clf_name, classif, features, labels, df_scoring[scoring] = scores except Exception: logging.error(traceback.format_exc()) - df_stat = df_scoring.describe() if path_out is not None: assert os.path.exists(path_out), 'missing: "%s"' % path_out @@ -690,10 +713,16 @@ def eval_classif_cross_val_scores(clf_name, classif, features, labels, path_csv = os.path.join(path_out, name_csv) df_scoring.to_csv(path_csv) - name_csv = NAME_CSV_CLASSIF_CV_SCORES.format(clf_name, 'statistic') - path_csv = os.path.join(path_out, name_csv) - df_stat.to_csv(path_csv) - logging.info('cross_val scores: \n %s', repr(df_stat)) + if len(df_scoring) > 1: + df_stat = df_scoring.describe() + logging.info('cross_val scores: \n %s', repr(df_stat)) + if path_out is not None: + assert os.path.exists(path_out), 'missing: "%s"' % path_out + name_csv = NAME_CSV_CLASSIF_CV_SCORES.format(clf_name, 'statistic') + path_csv = os.path.join(path_out, name_csv) + df_stat.to_csv(path_csv) + else: + logging.warning('no statistic collected') return df_scoring @@ -709,6 +738,7 @@ def eval_classif_cross_val_roc(clf_name, classif, features, labels, :param [int] labels: annotation for samples :param object cross_val: :param str path_out: path for exporting statistic + :param int nb_thr: number of thresholds :return: >>> np.random.seed(0) @@ -1136,7 +1166,7 @@ class HoldOut: Parameters ---------- - n : total number of samples + nb : total number of samples hold_idx : int index where the test starts random_state : Seed for the random number generator. @@ -1328,12 +1358,10 @@ def __iter__(self): """ for i in range(0, len(self.set_sizes), self.nb_hold_out): test = self.sets_order[i:i + self.nb_hold_out] - inds_train, inds_test = [], [] - for i in self.sets_order: - if i in test: - inds_test += self.set_indexes[i] - else: - inds_train += self.set_indexes[i] + inds_train = list(itertools.chain.from_iterable( + self.set_indexes[i] for i in self.sets_order if i not in test)) + inds_test = list(itertools.chain.from_iterable( + self.set_indexes[i] for i in self.sets_order if i in test)) yield inds_train, inds_test def __len__(self): diff --git a/imsegm/descriptors.py b/imsegm/descriptors.py index 9220d676..f175744b 100755 --- a/imsegm/descriptors.py +++ b/imsegm/descriptors.py @@ -5,7 +5,7 @@ * Ray features * label histogram -Copyright (C) 2014-2016 Jiri Borovec +Copyright (C) 2014-2018 Jiri Borovec """ import logging @@ -28,13 +28,14 @@ logging.warning('descriptors: using pure python libraries') USE_CYTHON = False +NAMES_FEATURE_FLAGS = ('mean', 'std', 'energy', 'median', 'meanGrad') DEFAULT_FILTERS_SIGMAS = (np.sqrt(2), 2, 2 * np.sqrt(2), 4) SHORT_FILTERS_SIGMAS = (np.sqrt(2), 2, 4) -FEATURES_SET_ALL = {'color': ('mean', 'std', 'eng', 'median'), - 'tLM': ('mean', 'std', 'eng', 'mG')} -FEATURES_SET_COLOR = {'color': ('mean', 'std', 'eng')} -FEATURES_SET_TEXTURE = {'tLM': ('mean', 'std', 'eng')} -FEATURES_SET_TEXTURE_SHORT = {'tLM_s': ('mean', 'std', 'eng')} +FEATURES_SET_ALL = {'color': ('mean', 'std', 'energy', 'median', 'meanGrad'), + 'tLM': ('mean', 'std', 'energy', 'median', 'meanGrad')} +FEATURES_SET_COLOR = {'color': ('mean', 'std', 'energy')} +FEATURES_SET_TEXTURE = {'tLM': ('mean', 'std', 'energy')} +FEATURES_SET_TEXTURE_SHORT = {'tLM_s': ('mean', 'std', 'energy')} HIST_CIRCLE_DIAGONALS = (10, 20, 30, 40, 50) # maxila reposnse is bounded by fix number to preven overflowing MAX_SIGNAL_RESPONSE = 1.e6 @@ -134,6 +135,21 @@ def _check_color_image(image): return True +def _check_unrecognised_feature_group(dict_feature_flags): + unknown = [k for k in dict_feature_flags + if k not in ('color', 'tLM', 'tLM_s')] + if len(unknown) > 0: + logging.warning('unrecognised following feature groups: %s', + repr(unknown)) + + +def _check_unrecognised_feature_names(list_feature_flags): + unknown = [k for k in list_feature_flags if k not in NAMES_FEATURE_FLAGS] + if len(unknown) > 0: + logging.warning('unrecognised following feature names: %s', + repr(unknown)) + + def cython_img2d_color_mean(im, seg): """ wrapper for fast implementation of colour features @@ -609,8 +625,7 @@ def numpy_img3d_gray_median(im, seg): def compute_image3d_gray_statistic(image, segm, - list_feature_flags=('mean', 'std', 'eng', - 'median', 'mG'), + list_feature_flags=NAMES_FEATURE_FLAGS, ch_name='gray'): """ compute complete descriptors / statistic on gray (3D) images @@ -669,7 +684,7 @@ def compute_image3d_gray_statistic(image, segm, features.append(std) names += ['%s_std' % ch_name] # ENERGY - if 'eng' in list_feature_flags: + if 'energy' in list_feature_flags: if USE_CYTHON: energy = cython_img3d_gray_energy(image, segm) else: @@ -682,7 +697,7 @@ def compute_image3d_gray_statistic(image, segm, features.append(median) names += ['%s_median' % ch_name] # mean Gradient - if 'mG' in list_feature_flags: + if 'meanGrad' in list_feature_flags: grad_matrix = np.zeros_like(image) for i in range(image.shape[0]): grad_matrix[i, :, :] = np.sum(np.gradient(image[i]), axis=0) @@ -692,6 +707,7 @@ def compute_image3d_gray_statistic(image, segm, grad = numpy_img3d_gray_mean(grad_matrix, segm) features.append(grad) names += ['%s_meanGrad' % ch_name] + _check_unrecognised_feature_names(list_feature_flags) features = np.concatenate(tuple([fts] for fts in features), axis=0) features = np.nan_to_num(features).T # normalise +/- zeros as set all as positive @@ -702,8 +718,7 @@ def compute_image3d_gray_statistic(image, segm, def compute_image2d_color_statistic(image, segm, - list_feature_flags=('mean', 'std', 'eng', - 'median'), + list_feature_flags=NAMES_FEATURE_FLAGS, ch_name='color'): """ compute complete descriptors / statistic on color (2D) images @@ -718,18 +733,17 @@ def compute_image2d_color_statistic(image, segm, >>> image[:, 4:9, 2] = 2 >>> segm = np.array([[0, 0, 0, 0, 0, 1, 1, 1, 1, 1], ... [0, 0, 0, 0, 0, 1, 1, 1, 1, 1]]) - >>> features, names = compute_image2d_color_statistic(image, segm, - ... ['mean', 'std', 'eng', 'median']) + >>> features, names = compute_image2d_color_statistic(image, segm) >>> names # doctest: +NORMALIZE_WHITESPACE ['color-ch1_mean', 'color-ch2_mean', 'color-ch3_mean', 'color-ch1_std', 'color-ch2_std', 'color-ch3_std', 'color-ch1_energy', 'color-ch2_energy', 'color-ch3_energy', - 'color-ch1_median', 'color-ch2_median', 'color-ch3_median'] + 'color-ch1_median', 'color-ch2_median', 'color-ch3_median', + 'color-ch1_meanGrad', 'color-ch2_meanGrad', 'color-ch3_meanGrad'] >>> features.shape - (2, 12) + (2, 15) >>> np.round(features, 1).tolist() # doctest: +NORMALIZE_WHITESPACE - [[0.6, 1.2, 0.4, 0.5, 1.5, 0.8, 0.6, 3.6, 0.8, 1.0, 0.0, 0.0], - [0.2, 1.2, 1.6, 0.4, 1.5, 0.8, 0.2, 3.6, 3.2, 0.0, 0.0, 2.0]] + [[0.6, 1.2, 0.4, 0.5, 1.5, 0.8, 0.6, 3.6, 0.8, 1.0, 0.0, 0.0, 0.2, 0.6, 0.4], [0.2, 1.2, 1.6, 0.4, 1.5, 0.8, 0.2, 3.6, 3.2, 0.0, 0.0, 2.0, -0.2, -0.6, -0.6]] """ _check_color_image(image) _check_color_image_segm(image, segm) @@ -757,7 +771,7 @@ def compute_image2d_color_statistic(image, segm, features = np.hstack((features, std)) names += ['%s_std' % n for n in ch_names] # ENERGY - if 'eng' in list_feature_flags: + if 'energy' in list_feature_flags: if USE_CYTHON: energy = cython_img2d_color_energy(image, segm) else: @@ -770,6 +784,18 @@ def compute_image2d_color_statistic(image, segm, features = np.hstack((features, median)) names += ['%s_median' % n for n in ch_names] # mean Gradient + if 'meanGrad' in list_feature_flags: + grad_matrix = np.zeros_like(image) + for i in range(image.shape[-1]): + grad_matrix[:, :, i] = np.sum(np.gradient(image[:, :, i]), axis=0) + if USE_CYTHON: + grad = cython_img2d_color_mean(grad_matrix, segm) + else: + grad = numpy_img2d_color_mean(grad_matrix, segm) + features = np.hstack((features, grad)) + names += ['%s_meanGrad' % n for n in ch_names] + _check_unrecognised_feature_names(list_feature_flags) + # mean Gradient # G = np.zeros_like(image) # for i in range(image.shape[0]): # G[i,:,:] = np.sum(np.gradient(image[i]), axis=0) @@ -1053,9 +1079,9 @@ def compute_selected_features_gray3d(img, segments, >>> names # doctest: +NORMALIZE_WHITESPACE ['gray_mean', 'gray_std', 'gray_median'] >>> _ = compute_selected_features_gray3d(img, slic, - ... {'tLM': ['median', 'std', 'eng']}) + ... {'tLM': ['median', 'std', 'energy']}) >>> fts, names = compute_selected_features_gray3d(img, slic, - ... {'tLM_s': ['mean', 'std', 'eng']}) + ... {'tLM_s': ['mean', 'std', 'energy']}) >>> fts.shape (4, 45) >>> names # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE @@ -1082,6 +1108,8 @@ def compute_selected_features_gray3d(img, segments, 'short') features.append(fts) names += n + _check_unrecognised_feature_group(dict_feature_flags) + if len(features) == 0: logging.error('not supported features: %s', repr(dict_feature_flags)) features = np.concatenate(tuple(features), axis=1) @@ -1095,7 +1123,7 @@ def compute_selected_features_gray3d(img, segments, def compute_selected_features_gray2d(img, segments, dict_features_flags=FEATURES_SET_ALL): - """ + """ compute selected features for gray image 2D :param ndarray img: :param ndarray segments: @@ -1113,14 +1141,14 @@ def compute_selected_features_gray2d(img, segments, array([[ 0.9 , 1.136, 0.5 ], [ 0.7 , 1.187, 0. ]]) >>> _ = compute_selected_features_gray2d(image, segm, - ... {'tLM': ['mean', 'std', 'median']}) + ... {'tLM': ['mean', 'std', 'median']}) >>> features, names = compute_selected_features_gray2d(image, segm, - ... {'tLM_s': ['mean', 'std', 'eng']}) + ... {'tLM_s': ['mean', 'std', 'energy']}) >>> features.shape (2, 45) >>> features, names = compute_selected_features_gray2d(image, segm) >>> features.shape - (2, 84) + (2, 105) """ _check_gray_image_segm(img, segments) @@ -1134,7 +1162,7 @@ def compute_selected_features_gray2d(img, segments, def compute_selected_features_color2d(img, segments, dict_feature_flags=FEATURES_SET_ALL): - """ compute selected features color2d + """ compute selected features color image 2D :param ndarray img: :param ndarray segments: @@ -1153,14 +1181,14 @@ def compute_selected_features_color2d(img, segments, array([[ 0.6 , 1.2 , 0.4 , 0.49, 1.47, 0.8 , 1. , 0. , 0. ], [ 0.2 , 1.2 , 1.6 , 0.4 , 1.47, 0.8 , 0. , 0. , 2. ]]) >>> _ = compute_selected_features_color2d(image, segm, - ... {'tLM': ['mean', 'std', 'eng']}) + ... {'tLM': ['mean', 'std', 'energy']}) >>> features, names = compute_selected_features_color2d(image, segm, - ... {'tLM_s': ['mean', 'std', 'eng']}) + ... {'tLM_s': ['mean', 'std', 'energy']}) >>> features.shape (2, 135) >>> features, names = compute_selected_features_color2d(image, segm) >>> features.shape - (2, 192) + (2, 315) """ _check_color_image(img) features = np.empty((np.max(segments) + 1, 0)) @@ -1181,6 +1209,8 @@ def compute_selected_features_color2d(img, segments, 'short') features = np.concatenate((features, fts), axis=1) names += n + _check_unrecognised_feature_group(dict_feature_flags) + features = np.nan_to_num(features) # normalise +/- zeros as set all as positive features[features == 0] = 0 @@ -1636,7 +1666,7 @@ def compute_ray_features_positions(segm, list_positions, angle_step=5., if isinstance(segm_open, int): seg_binary = morphology.opening(seg_binary, morphology.disk(segm_open)) - pos_rays, pos_shift = list(), list() + pos_rays, pos_shift, ray_dist = [], [], [] for pos in list_positions: # logging.debug('position %s', repr(pos)) ray_dist = compute_ray_features_segm_2d(seg_binary, pos, angle_step, @@ -1701,18 +1731,18 @@ def interpolate_ray_dist(ray_dists, order='spline'): y_train_ext) ray_dists[missing] = uinterp_us(x_space[missing]) elif order == 'cos': - def fn_cos(x, t): + def _fn_cos(x, t): return x[0] + x[1] * np.sin(x[2] + x[3] * t) - def fn_cos_residual(x, t, y): - return fn_cos(x, t) - y + def _fn_cos_residual(x, t, y): + return _fn_cos(x, t) - y x0 = np.array([np.mean(y_train), (y_train.max() - y_train.min()) / 2., 0, len(x_space) / np.pi]) - lsm_res = optimize.least_squares(fn_cos_residual, x0, gtol=1e-1, + lsm_res = optimize.least_squares(_fn_cos_residual, x0, gtol=1e-1, # loss='soft_l1', f_scale=0.1, args=(x_train, y_train)) - ray_dists[missing] = fn_cos(lsm_res.x, x_space[missing]) + ray_dists[missing] = _fn_cos(lsm_res.x, x_space[missing]) return ray_dists diff --git a/imsegm/ellipse_fitting.py b/imsegm/ellipse_fitting.py index d9381a79..4ecaf7c8 100755 --- a/imsegm/ellipse_fitting.py +++ b/imsegm/ellipse_fitting.py @@ -1,7 +1,7 @@ """ Framework for ellipse fitting -Copyright (C) 2014-2017 Jiri Borovec +Copyright (C) 2014-2018 Jiri Borovec """ import numpy as np @@ -42,11 +42,6 @@ class EllipseModelSegm(sk_fit.EllipseModel): xc, yc, a, b, theta - Attributes - ---------- - params : tuple - Ellipse model parameters `xc`, `yc`, `a`, `b`, `theta`. - Example ------- >>> params = 20, 30, 12, 16, np.deg2rad(30) @@ -157,7 +152,7 @@ def ransac_segm(points, model_class, points_all, weights, labels, table_prob, ``is_model_valid(model, *random_data)`` and ``is_data_valid(*random_data)`` must all take each points array as separate arguments. - model_class : object + model_class : class Object with the following object methods: * ``success = estimate(*points)`` @@ -171,11 +166,6 @@ def ransac_segm(points, model_class, points_all, weights, labels, table_prob, Maximum distance for a points point to be classified as an inlier. max_trials : int, optional Maximum number of iterations for random sample selection. - stop_sample_num : int, optional - Stop iteration if at least this number of inliers are found. - stop_residuals_sum : float, optional - Stop iteration if sum of residuals is less than or equal to this - threshold. Returns diff --git a/imsegm/features_cython.pyx b/imsegm/features_cython.pyx index 7ac6b34b..30ff6b76 100755 --- a/imsegm/features_cython.pyx +++ b/imsegm/features_cython.pyx @@ -1,6 +1,6 @@ """ -Copyright (C) 2014-2016 Jiri Borovec +Copyright (C) 2014-2018 Jiri Borovec """ cimport cython diff --git a/imsegm/graph_cuts.py b/imsegm/graph_cuts.py index 69bfdb45..12316141 100755 --- a/imsegm/graph_cuts.py +++ b/imsegm/graph_cuts.py @@ -1,14 +1,16 @@ """ Framework for GraphCut -Copyright (C) 2014-2016 Jiri Borovec +Copyright (C) 2014-2018 Jiri Borovec """ import logging import numpy as np from gco import cut_general_graph -from sklearn import metrics, mixture, cluster, preprocessing +from skimage import filters +from sklearn import metrics, preprocessing +from sklearn import pipeline, cluster, mixture, decomposition import imsegm.utils.drawing as tl_visu import imsegm.superpixels as seg_spx @@ -29,10 +31,10 @@ def estim_gmm_params(features, prob): >>> np.random.seed(0) >>> prob = np.array([[1, 0]] * 30 + [[0, 1]] * 40) >>> fts = prob + np.random.random(prob.shape) - >>> gmm = estim_gmm_params(fts, prob) - >>> gmm['weights'] + >>> mm = estim_gmm_params(fts, prob) + >>> mm['weights'] [0.42857142857142855, 0.5714285714285714] - >>> gmm['means'] + >>> mm['means'] array([[ 1.49537196, 0.53745455], [ 0.54199936, 1.42606497]]) """ @@ -49,24 +51,136 @@ def estim_gmm_params(features, prob): return gmm_params -def estim_class_model(features, nb_classes, proba_type): - """ wrapper over several options how to cluster samples +def estim_class_model(features, nb_classes, estim_model='GMM', pca_coef=None, + scaler=True, max_iter=99): + """ create pipeline (scaler, PCA, model) over several options how + to cluster samples and fit it on data :param ndarray features: - :param int nb_classes: - :param str proba_type: + :param int nb_classes: number of expected classes + :param str proba_type: tyre of used model + :param float pca_coef: range (0, 1) or None + :param bool scaler: wheter use a scaler + :param str init_type: initialsi of :return: + + >>> np.random.seed(0) + >>> fts = np.row_stack([np.random.random((50, 3)) - 1, + ... np.random.random((50, 3)) + 1]) + >>> mm = estim_class_model(fts, 2) + >>> mm.predict_proba(fts).shape + (100, 2) + >>> mm = estim_class_model(fts, 2, estim_model='GMM_kmeans', + ... pca_coef=0.95, max_iter=3) + >>> mm.predict_proba(fts).shape + (100, 2) + >>> mm = estim_class_model(fts, 2, estim_model='GMM_Otsu', max_iter=3) + >>> mm.predict_proba(fts).shape + (100, 2) + >>> mm = estim_class_model(fts, 2, estim_model='kmeans_quantiles', + ... scaler=False, max_iter=3) + >>> mm.predict_proba(fts).shape + (100, 2) + >>> mm = estim_class_model(fts, 2, estim_model='BGM', max_iter=3) + >>> mm.predict_proba(fts).shape + (100, 2) + >>> mm = estim_class_model(fts, 2, estim_model='Otsu', max_iter=3) + >>> mm.predict_proba(fts).shape + (100, 2) """ - if proba_type == 'GMM': - model = estim_class_model_gmm(features, nb_classes) - elif proba_type == 'quantiles': - model = estim_class_model_kmeans(features, nb_classes, - init_type='quantiles') + components = [] + if scaler: + components += [('scaler', preprocessing.StandardScaler())] + if pca_coef is not None: + components += [('reduce_dim', decomposition.PCA(pca_coef))] + + nb_inits = max(1, int(np.sqrt(max_iter))) + # http://scikit-learn.org/stable/modules/generated/sklearn.mixture.GMM.html + mm = mixture.GaussianMixture(n_components=nb_classes, covariance_type='full', + n_init=nb_inits, max_iter=max_iter) + + # split the model and used initilaisation + if '_' in estim_model: + init_type = estim_model.split('_')[-1] + estim_model = estim_model.split('_')[0] else: - model = estim_class_model_kmeans(features, nb_classes) + init_type = '' + + y = None + if estim_model == 'GMM': + # model = estim_class_model_gmm(features, nb_classes) + if init_type == 'kmeans': + mm.set_params(n_init=1) + # http://scikit-learn.org/stable/modules/generated/sklearn.cluster.KMeans.html + kmeans = cluster.KMeans(n_clusters=nb_classes, init='k-means++', + n_jobs=-1) + y = kmeans.fit_predict(features) + elif init_type == 'Otsu': + mm.set_params(n_init=1) + y = compute_multivarian_otsu(features) + + elif estim_model == 'kmeans': + # http://scikit-learn.org/stable/modules/generated/sklearn.mixture.GMM.html + mm.set_params(max_iter=1) + init_type = 'quantiles' if init_type == 'quantiles' else 'k-means++' + _, y = estim_class_model_kmeans(features, nb_classes, + init_type=init_type, max_iter=max_iter) + + logging.info('compute probability of each feature to all component') + + elif estim_model == 'BGM': + mm = mixture.BayesianGaussianMixture(n_components=nb_classes, + covariance_type='full', + n_init=nb_inits, max_iter=max_iter) + + elif estim_model == 'Otsu' and nb_classes == 2: + mm.set_params(max_iter=1, n_init=1) + y = compute_multivarian_otsu(features) + + components += [('model', mm)] + # compose the pipeline + model = pipeline.Pipeline(components) + + if y is not None: + # fit with examples + model.fit(features, y) + else: + # fit from scrach + model.fit(features) return model +def compute_multivarian_otsu(features): + """ compute otsu individually over each sample dimension + WARNING: this compute only localy and since it does compare all + combinations of orienting the asign for tight cases it may not decide + + :param ndarray features: + :return [bool]: + + >>> np.random.seed(0) + >>> fts = np.row_stack([np.random.random((5, 3)) - 1, + ... np.random.random((5, 3)) + 1]) + >>> fts[:, 1] = - fts[:, 1] + >>> compute_multivarian_otsu(fts).astype(int) + array([0, 0, 0, 0, 0, 1, 1, 1, 1, 1]) + """ + ys = np.zeros(features.shape) + for i in range(features.shape[-1]): + thr = filters.threshold_otsu(features[:, i]) + asign = features[:, i] > thr + if i > 0: + m = np.mean(ys[:, :i], axis=1) + d1 = np.mean(np.abs(asign - m)) + d2 = np.mean(np.abs(~asign - m)) + # check if for this dimension it wount be better to swap it + if d2 < d1: + asign = ~asign + ys[:, i] = asign + y = np.mean(ys, axis=1) > 0.5 + return y + + # def estim_class_model_gmm(features, nb_classes, init='kmeans'): # """ from all features estimate Gaussian Mixture Model and assuming # each cluster is a single class compute probability that each feature @@ -79,16 +193,16 @@ def estim_class_model(features, nb_classes, proba_type): # logging.debug('estimate GMM for all given features %s and %i component', # repr(features.shape), nb_classes) # # http://scikit-learn.org/stable/modules/generated/sklearn.mixture.GMM.html -# gmm = mixture.GMM(n_components=nb_classes, covariance_type='full', n_iter=999) +# mm = mixture.GMM(n_components=nb_classes, covariance_type='full', n_iter=999) # if init == 'kmeans': # # http://scikit-learn.org/stable/modules/generated/sklearn.cluster.KMeans.html # kmeans = cluster.KMeans(n_clusters=nb_classes, init='k-means++', n_jobs=-1) # y = kmeans.fit_predict(features) -# gmm.fit(features, y) +# mm.fit(features, y) # else: -# gmm.fit(features) +# mm.fit(features) # logging.info('compute probability of each feature to all component') -# return gmm +# return mm def estim_class_model_gmm(features, nb_classes, init='kmeans'): @@ -103,8 +217,8 @@ def estim_class_model_gmm(features, nb_classes, init='kmeans'): >>> np.random.seed(0) >>> fts = np.row_stack([np.random.random((50, 3)) - 1, ... np.random.random((50, 3)) + 1]) - >>> gmm = estim_class_model_gmm(fts, 2) - >>> gmm.predict_proba(fts).shape + >>> mm = estim_class_model_gmm(fts, 2) + >>> mm.predict_proba(fts).shape (100, 2) """ logging.debug('estimate GMM for all given features %s and %i component', @@ -124,7 +238,8 @@ def estim_class_model_gmm(features, nb_classes, init='kmeans'): return gmm -def estim_class_model_kmeans(features, nb_classes, init_type='k-means++'): +def estim_class_model_kmeans(features, nb_classes, init_type='k-means++', + max_iter=99): """ from all features estimate Gaussian from k-means clustering :param [[float]] features: list of features per segment @@ -134,26 +249,29 @@ def estim_class_model_kmeans(features, nb_classes, init_type='k-means++'): >>> np.random.seed(0) >>> fts = np.row_stack([np.random.random((50, 3)) - 1, ... np.random.random((50, 3)) + 1]) - >>> gmm = estim_class_model_kmeans(fts, 2) - >>> gmm.predict_proba(fts).shape + >>> mm, y = estim_class_model_kmeans(fts, 2, max_iter=9) + >>> y.shape + (100,) + >>> mm.predict_proba(fts).shape (100, 2) """ logging.debug('estimate Gaussian from k-means clustering for all given ' - 'features %s and %i component', repr(features.shape), nb_classes) + 'features %s and %i components', repr(features.shape), + nb_classes) # http://scikit-learn.org/stable/modules/generated/sklearn.cluster.KMeans.html if init_type == 'quantiles': quntiles = np.linspace(5, 95, nb_classes).tolist() init_perc = np.array(np.percentile(features, quntiles, axis=0)) kmeans = cluster.KMeans(nb_classes, init=init_perc, max_iter=2, n_jobs=-1) else: - kmeans = cluster.KMeans(nb_classes, init=init_type, n_init=25, n_jobs=-1) + nb_inits = max(1, int(np.sqrt(max_iter))) + kmeans = cluster.KMeans(nb_classes, init=init_type, max_iter=max_iter, + n_init=nb_inits, n_jobs=-1) y = kmeans.fit_predict(features) - logging.info('compute probability of each feature to all component') - # http://scikit-learn.org/stable/modules/generated/sklearn.mixture.GMM.html gmm = mixture.GaussianMixture(n_components=nb_classes, covariance_type='full', max_iter=1) gmm.fit(features, y) - return gmm + return gmm, y def get_vertexes_edges(segments): @@ -263,7 +381,8 @@ def compute_edge_model(edges, proba, metric='l_T'): and so we take the min valus :param [(int, int)] edges: - :param [[float]] features: + :param [[float]] proba: + :param str metric: :return [float]: @@ -302,6 +421,9 @@ def compute_edge_model(edges, proba, metric='l_T'): # setting min weight ~ max difference in proba as weight dist = np.max(diff, axis=1) edge_weights = np.exp(- dist / (2 * np.std(dist) ** 2)) + else: + logging.error('not implemented for: %s', metric) + edge_weights = np.ones(len(edges)) return edge_weights diff --git a/imsegm/labeling.py b/imsegm/labeling.py index 1773f03d..062d60ee 100755 --- a/imsegm/labeling.py +++ b/imsegm/labeling.py @@ -1,7 +1,7 @@ """ Framework for labeling -Copyright (C) 2014-2016 Jiri Borovec +Copyright (C) 2014-2018 Jiri Borovec """ import logging @@ -10,6 +10,8 @@ from scipy import ndimage import skimage.segmentation as sk_segm +import imsegm.utils.data_io as tl_data + def contour_binary_map(seg, label=1, include_boundary=False): """ get object boundaries @@ -104,8 +106,8 @@ def contour_coords(seg, label=1, include_boundary=False): def binary_image_from_coords(coords, size): """ create binary image just from point contours - :param ndarray seg: integer images, typically a segmentation - :param int label: selected singe label in segmentation + :param ndarray coords: + :param (int, int) size: :return ndarray: >>> img = np.zeros((6, 6), dtype=int) @@ -156,7 +158,7 @@ def compute_distance_map(seg, label=1): def segm_labels_assignment(segm, segm_gt): """ create labels assign to the particular regions - :param ndarray seg: input segmentation + :param ndarray segm: input segmentation :param ndarray segm_gt: true segmentation :return: @@ -673,3 +675,41 @@ def compute_boundary_distances(segm_ref, segm): assert len(points) == len(dist), \ 'number of points and disntances should be equal' return points, dist + + +def assume_bg_on_boundary(segm, bg_label=0, boundary_size=1): + """ swap labels such that the bacround label will be mostly on image boundary + + :param ndarray segm: + :param int bg_label: + :return: + + >>> segm = np.zeros((6, 12), dtype=int) + >>> segm[1:4, 4:] = 2 + >>> assume_bg_on_boundary(segm, boundary_size=1) + array([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 2, 2, 2, 2, 2, 2, 2, 2], + [0, 0, 0, 0, 2, 2, 2, 2, 2, 2, 2, 2], + [0, 0, 0, 0, 2, 2, 2, 2, 2, 2, 2, 2], + [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]) + >>> segm[segm == 0] = 1 + >>> assume_bg_on_boundary(segm, boundary_size=1) + array([[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 2, 2, 2, 2, 2, 2, 2, 2], + [0, 0, 0, 0, 2, 2, 2, 2, 2, 2, 2, 2], + [0, 0, 0, 0, 2, 2, 2, 2, 2, 2, 2, 2], + [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]) + """ + boundary_lb = tl_data.get_image2d_boundary_color(segm, size=boundary_size) + used_lbs = np.unique(segm) + if boundary_lb not in used_lbs: + segm[segm == boundary_lb] = bg_label + else: + lut = list(range(used_lbs.max() + 1)) + lut[boundary_lb] = bg_label + lut[bg_label] = boundary_lb + segm = np.array(lut)[segm] + return segm + diff --git a/imsegm/pipelines.py b/imsegm/pipelines.py index c2964351..14602d1c 100755 --- a/imsegm/pipelines.py +++ b/imsegm/pipelines.py @@ -1,7 +1,7 @@ """ Pipelines for supervised and unsupervised segmentation -Copyright (C) 2014-2017 Jiri Borovec +Copyright (C) 2014-2018 Jiri Borovec """ import logging @@ -10,7 +10,7 @@ import numpy as np import skimage.color as sk_color -from sklearn import preprocessing, mixture, decomposition +# from sklearn import mixture import imsegm.utils.experiments as tl_expt import imsegm.graph_cuts as seg_gc @@ -26,25 +26,51 @@ CROSS_VAL_LEAVE_OUT = 2 NB_THREADS = max(1, int(mproc.cpu_count() * 0.6)) -DICT_CONVERT_COLOR = { +DICT_CONVERT_COLOR_FROM_RGB = { 'hsv': sk_color.rgb2hsv, 'luv': sk_color.rgb2luv, 'lab': sk_color.rgb2lab, 'hed': sk_color.rgb2hed, 'xyz': sk_color.rgb2xyz } +DICT_CONVERT_COLOR_TO_RGB = { + 'hsv': sk_color.hsv2rgb, + 'luv': sk_color.luv2rgb, + 'lab': sk_color.lab2rgb, + 'hed': sk_color.hed2rgb, + 'xyz': sk_color.xyz2rgb +} -def convert_img_color_space(image, clr_space): +def convert_img_color_from_rgb(image, clr_space): """ convert image colour space from RGB to xxx - :param image: rgb image - :param clr_space: str - :return: image + :param ndarray image: rgb image + :param str clr_space: + :return ndarray: image + + >>> convert_img_color_from_rgb(np.ones((50, 75, 3)), 'hsv').shape + (50, 75, 3) + """ + if image.ndim == 3 and image.shape[-1] in (3, 4) \ + and clr_space in DICT_CONVERT_COLOR_FROM_RGB: + image = DICT_CONVERT_COLOR_FROM_RGB[clr_space](image) + return image + + +def convert_img_color_to_rgb(image, clr_space): + """ convert image colour space to RGB to xxx + + :param ndarray image: rgb image + :param str clr_space: + :return ndarray: image + + >>> convert_img_color_to_rgb(np.ones((50, 75, 3)), 'hsv').shape + (50, 75, 3) """ - if image.ndim == 3 and image.shape[2] == 3 \ - and clr_space in DICT_CONVERT_COLOR: - image = DICT_CONVERT_COLOR[clr_space](image) + if image.ndim == 3 and image.shape[-1] == 3 \ + and clr_space in DICT_CONVERT_COLOR_TO_RGB: + image = DICT_CONVERT_COLOR_TO_RGB[clr_space](image) return image @@ -53,7 +79,7 @@ def pipe_color2d_slic_features_gmm_graphcut(image, nb_classes=3, sp_size=30, sp_regul=0.2, gc_regul=1., dict_features=FTS_SET_SIMPLE, - proba_type='GMM', + estim_model='GMM', gc_edge_type='model_lT', pca_coef=None, dict_debug_imgs=None): @@ -66,9 +92,10 @@ def pipe_color2d_slic_features_gmm_graphcut(image, nb_classes=3, :param float sp_regul: regularisation in range(0;1) where "0" gives elastic and "1" nearly square slic :param int nb_classes: number of classes to be segmented(indexing from 0) - :param dict_features: {clr: [str], ...} + :param {} dict_features: {clr: [str]} :param str clr_space: use color space :param float gc_regul: GC regularisation + :param str estim_model: estimating model :param str gc_edge_type: graphCut edge type :param float pca_coef: range (0, 1) or None :param dict_debug_imgs: {str: ...} @@ -94,18 +121,14 @@ def pipe_color2d_slic_features_gmm_graphcut(image, nb_classes=3, dict_debug_imgs['slic_mean'] = sk_color.label2rgb(slic, image, kind='avg') - if pca_coef is not None: - pca = decomposition.PCA(pca_coef) - features = pca.fit_transform(features) - - model = seg_gc.estim_class_model(features, nb_classes, proba_type) + model = seg_gc.estim_class_model(features, nb_classes, estim_model, pca_coef) proba = model.predict_proba(features) logging.debug('list of probabilities: %s', repr(proba.shape)) - gmm = mixture.GaussianMixture(n_components=nb_classes, - covariance_type='full', max_iter=1) - gmm.fit(features, np.argmax(proba, axis=1)) - proba = gmm.predict_proba(features) + # gmm = mixture.GaussianMixture(n_components=nb_classes, + # covariance_type='full', max_iter=1) + # gmm.fit(features, np.argmax(proba, axis=1)) + # proba = gmm.predict_proba(features) graph_labels = seg_gc.segment_graph_cut_general(slic, proba, image, features, gc_regul, gc_edge_type, dict_debug_imgs=dict_debug_imgs) @@ -116,7 +139,7 @@ def pipe_color2d_slic_features_gmm_graphcut(image, nb_classes=3, def estim_model_classes_group(list_images, nb_classes=4, clr_space='rgb', sp_size=30, sp_regul=0.2, dict_features=FTS_SET_SIMPLE, - pca_coef=None, proba_type='GMM', + pca_coef=None, scaler=True, proba_type='GMM', nb_jobs=NB_THREADS): """ estimate a model from sequence of input images and return it as result @@ -128,16 +151,17 @@ def estim_model_classes_group(list_images, nb_classes=4, clr_space='rgb', and "1" nearly square slic :param {str: [str]} dict_features: list of features to be extracted :param float pca_coef: range (0, 1) or None + :param bool scaler: wheter use a scaler :param str proba_type: model type :param int nb_jobs: number of jobs running in parallel :return: """ list_slic, list_features = list(), list() - wrapper_compute = partial(compute_color2d_superpixels_features, - sp_size=sp_size, sp_regul=sp_regul, - dict_features=dict_features, - clr_space=clr_space, fts_norm=False) - iterate = tl_expt.WrapExecuteSequence(wrapper_compute, list_images, + _wrapper_compute = partial(compute_color2d_superpixels_features, + sp_size=sp_size, sp_regul=sp_regul, + dict_features=dict_features, + clr_space=clr_space, fts_norm=False) + iterate = tl_expt.WrapExecuteSequence(_wrapper_compute, list_images, nb_jobs=nb_jobs) for slic, features in iterate: list_slic.append(slic) @@ -152,21 +176,13 @@ def estim_model_classes_group(list_images, nb_classes=4, clr_space='rgb', features = np.concatenate(tuple(list_features), axis=0) features = np.nan_to_num(features) - # scaling - scaler = preprocessing.StandardScaler() - scaler.fit(features) - features = scaler.transform(features) - - pca = None - if pca_coef is not None: - pca = decomposition.PCA(pca_coef) - features = pca.fit_transform(features) + model = seg_gc.estim_class_model(features, nb_classes, proba_type, + pca_coef, scaler) - model = seg_gc.estim_class_model(features, nb_classes, proba_type) - return scaler, pca, model + return model, list_features -def segment_color2d_slic_features_model_graphcut(image, scaler, pca, model, +def segment_color2d_slic_features_model_graphcut(image, model_pipeline, clr_space='rgb', sp_size=30, sp_regul=0.2, gc_regul=1., @@ -176,7 +192,8 @@ def segment_color2d_slic_features_model_graphcut(image, scaler, pca, model, """ complete pipe-line for segmentation using superpixels, extracting features and graphCut segmentation - :param ndarry img: input RGB image + :param ndarry image: input RGB image + :param obj model_pipeline: :param str clr_space: chose the color space :param int sp_size: initial size of a superpixel(meaning edge lenght) :param float sp_regul: regularisation in range(0;1) where "0" gives elastic @@ -184,15 +201,28 @@ def segment_color2d_slic_features_model_graphcut(image, scaler, pca, model, :param {str: [str]} dict_features: list of features to be extracted :param float gc_regul: GC regularisation :param str gc_edge_type: select the GC edge type - :param float pca_coef: range (0, 1) or None :param dict_debug_imgs: {str: ...} :return [[int]]: segmentation matrix mapping each pixel into a class + UnSupervised: >>> np.random.seed(0) + >>> seg_fts.USE_CYTHON = False >>> image = np.random.random((125, 150, 3)) / 2. >>> image[:, :75] += 0.5 - >>> sc, pca, model = estim_model_classes_group([image], nb_classes=2) - >>> segm = segment_color2d_slic_features_model_graphcut(image, sc, pca, model) + >>> model, _ = estim_model_classes_group([image], nb_classes=2) + >>> segm = segment_color2d_slic_features_model_graphcut(image, model) + >>> segm.shape + (125, 150) + + Supervised: + >>> np.random.seed(0) + >>> seg_fts.USE_CYTHON = False + >>> image = np.random.random((125, 150, 3)) / 2. + >>> image[:, 75:] += 0.5 + >>> annot = np.zeros(image.shape[:2], dtype=int) + >>> annot[:, 75:] = 1 + >>> clf, _, _, _ = train_classif_color2d_slic_features([image], [annot]) + >>> segm = segment_color2d_slic_features_model_graphcut(image, clf) >>> segm.shape (125, 150) """ @@ -207,23 +237,25 @@ def segment_color2d_slic_features_model_graphcut(image, scaler, pca, model, image = np.rollaxis(np.tile(image, (3, 1, 1)), 0, 3) dict_debug_imgs['image'] = image dict_debug_imgs['slic'] = slic - dict_debug_imgs['slic_mean'] = sk_color.label2rgb(slic, image, kind='avg') - - features = scaler.transform(features) - if pca is not None: - features = pca.fit_transform(features) + dict_debug_imgs['slic_mean'] = sk_color.label2rgb(slic, image, + kind='avg') - proba = model.predict_proba(features) + proba = model_pipeline.predict_proba(features) logging.debug('list of probabilities: %s', repr(proba.shape)) - gmm = mixture.GaussianMixture(n_components=proba.shape[1], - covariance_type='full', max_iter=1) - gmm.fit(features, np.argmax(proba, axis=1)) - proba = gmm.predict_proba(features) + # gmm = mixture.GaussianMixture(n_components=proba.shape[1], + # covariance_type='full', max_iter=1) + # gmm.fit(features, np.argmax(proba, axis=1)) + # proba = gmm.predict_proba(features) - graph_labels = seg_gc.segment_graph_cut_general(slic, proba, image, features, - gc_regul, gc_edge_type, dict_debug_imgs=dict_debug_imgs) + graph_labels = seg_gc.segment_graph_cut_general(slic, proba, image, + features, + gc_regul, gc_edge_type, + dict_debug_imgs=dict_debug_imgs) segm = graph_labels[slic] + # relabel according classif classes + if hasattr(model_pipeline, 'classes_'): + segm = model_pipeline.classes_[segm] return segm @@ -249,7 +281,7 @@ def compute_color2d_superpixels_features(image, clr_space='rgb', # plt.figure(), plt.imshow(slic) logging.debug('extract slic/superpixels features.') - image = convert_img_color_space(image, clr_space) + image = convert_img_color_from_rgb(image, clr_space) features, _ = seg_fts.compute_selected_features_img2d(image, slic, dict_features) logging.debug('list of features RAW: %s', repr(features.shape)) @@ -289,12 +321,15 @@ def wrapper_compute_color2d_slic_features_labels(img_annot, clr_space, return slic, features, labels -def train_classif_color2d_slic_features(list_images, list_annots, clr_space='rgb', +def train_classif_color2d_slic_features(list_images, list_annots, + clr_space='rgb', sp_size=30, sp_regul=0.2, dict_features=FTS_SET_SIMPLE, - clf_name=CLASSIF_NAME, label_purity=0.9, + clf_name=CLASSIF_NAME, + label_purity=0.9, feature_balance='unique', pca_coef=None, nb_classif_search=1, + nb_hold_out=CROSS_VAL_LEAVE_OUT, nb_jobs=1): """ train classifier on list of annotated images @@ -310,6 +345,7 @@ def train_classif_color2d_slic_features(list_images, list_annots, clr_space='rgb :param str feature_balance: set how to balance datasets :param float pca_coef: select PCA coef or None :param int nb_classif_search: number of tries for hyper-parameters seach + :param int nb_hold_out: cross-val leave out :param int nb_jobs: parallelism :return: """ @@ -319,33 +355,18 @@ def train_classif_color2d_slic_features(list_images, list_annots, clr_space='rgb % (len(list_images), len(list_annots)) list_slic, list_features, list_labels = list(), list(), list() - wrapper_compute = partial(wrapper_compute_color2d_slic_features_labels, - clr_space=clr_space, sp_size=sp_size, - sp_regul=sp_regul, dict_features=dict_features, - label_purity=label_purity) + _wrapper_compute = partial(wrapper_compute_color2d_slic_features_labels, + clr_space=clr_space, sp_size=sp_size, + sp_regul=sp_regul, dict_features=dict_features, + label_purity=label_purity) list_imgs_annot = zip(list_images, list_annots) - iterate = tl_expt.WrapExecuteSequence(wrapper_compute, list_imgs_annot, + iterate = tl_expt.WrapExecuteSequence(_wrapper_compute, list_imgs_annot, nb_jobs=nb_jobs) for slic, fts, lbs in iterate: list_slic.append(slic) list_features.append(fts) list_labels.append(lbs) - # for img, annot in zip(list_images, list_annots): - # assert img.shape[:2] == annot.shape[:2] - # slic, features = compute_color2d_superpixels_features(img, clr_space, - # sp_size, sp_regul, - # dict_features, - # fts_norm=False) - # list_slic.append(slic) - # list_features.append(features) - # - # label_hist = seg_lbs.histogram_regions_labels_norm(slic, annot) - # labels = np.argmax(label_hist, axis=1) - # purity = np.max(label_hist, axis=1) - # labels[purity < label_purity] = -1 - # list_labels.append(labels) - logging.debug('concentrate features...') # concentrate features, labels features, labels, sizes = seg_clf.convert_set_features_labels_2_dataset( @@ -359,83 +380,30 @@ def train_classif_color2d_slic_features(list_images, list_annots, clr_space='rgb # clf_pipeline = seg_clf.create_clf_pipeline(clf_name, pca_coef) # clf_pipeline.fit(np.array(features), np.array(labels, dtype=int)) - if len(sizes) > (CROSS_VAL_LEAVE_OUT * 5): - cv = seg_clf.CrossValidatePSetsOut(sizes, nb_hold_out=CROSS_VAL_LEAVE_OUT) + if len(sizes) > (nb_hold_out * 5): + cv = seg_clf.CrossValidatePSetsOut(sizes, nb_hold_out=nb_hold_out) # for small nuber of training images this does not make sence else: cv = 10 classif, _ = seg_clf.create_classif_train_export(clf_name, features, labels, nb_search_iter=nb_classif_search, - cross_val=cv, nb_jobs=nb_jobs, + cross_val=cv, + nb_jobs=nb_jobs, pca_coef=pca_coef) return classif, list_slic, list_features, list_labels -def segment_color2d_slic_features_classif_graphcut(image, classif, - clr_space='rgb', - sp_size=30, sp_regul=0.2, - gc_regul=1., - dict_features=FTS_SET_SIMPLE, - gc_edge_type='model', - dict_debug_imgs=None): - """ take trained classifier and apply it on new images - - :param ndarray image: input image - :param classif: trained classifier - :param str clr_space: chose the color space - :param int sp_size: initial size of a superpixel(meaning edge lenght) - :param float sp_regul: regularisation in range(0;1) where "0" gives elastic - and "1" nearly square segments - :param {str: [str]} dict_features: list of features to be extracted - :param gc_regul: regularisation for GC - :param str gc_edge_type: select the GC edge type - :param dict_debug_imgs: - :return: - - >>> np.random.seed(0) - >>> seg_fts.USE_CYTHON = False - >>> image = np.random.random((125, 150, 3)) / 2. - >>> image[:, 75:] += 0.5 - >>> annot = np.zeros(image.shape[:2], dtype=int) - >>> annot[:, 75:] = 1 - >>> clf, _, _, _ = train_classif_color2d_slic_features([image], [annot]) - >>> segm = segment_color2d_slic_features_classif_graphcut(image, clf) - >>> segm.shape - (125, 150) - """ - logging.info('SEGMENTATION Superpixels-Features-Classifier-GraphCut') - slic, features = compute_color2d_superpixels_features(image, clr_space, - sp_size, sp_regul, - dict_features, - fts_norm=False) - - proba = classif.predict_proba(features) - - if dict_debug_imgs is not None: - if image.ndim == 2: # duplicate channels to be like RGB - image = np.rollaxis(np.tile(image, (3, 1, 1)), 0, 3) - dict_debug_imgs['image'] = image - dict_debug_imgs['slic'] = slic - dict_debug_imgs['slic_mean'] = sk_color.label2rgb(slic, image, kind='avg') - - graph_labels = seg_gc.segment_graph_cut_general(slic, proba, image, features, - gc_regul, gc_edge_type, - dict_debug_imgs=dict_debug_imgs) - segm = graph_labels[slic] - # relabel according classif classes - segm = classif.classes_[segm] - return segm - - -def pipe_gray3d_slic_features_gmm_graphcut(image, nb_classes=4, spacing=(12, 1, 1), - sp_size=15, sp_regul=0.2, gc_regul=0.1, +def pipe_gray3d_slic_features_gmm_graphcut(image, nb_classes=4, + spacing=(12, 1, 1), + sp_size=15, sp_regul=0.2, + gc_regul=0.1, dict_features=FTS_SET_SIMPLE): """ complete pipe-line for segmentation using superpixels, extracting features and graphCut segmentation - :param ndarray img: input RGB image + :param ndarray image: input RGB image :param int sp_size: initial size of a superpixel(meaning edge lenght) :param float sp_regul: regularisation in range(0;1) where "0" gives elastic and "1" nearly square segments @@ -457,7 +425,8 @@ def pipe_gray3d_slic_features_gmm_graphcut(image, nb_classes=4, spacing=(12, 1, # plt.imshow(segments) logging.info('extract segments/superpixels features.') # f = features.computeColourMean(image, segments) - features, _ = seg_fts.compute_selected_features_gray3d(image, slic, dict_features) + features, _ = seg_fts.compute_selected_features_gray3d(image, slic, + dict_features) # merge features together logging.debug('list of features RAW: %s', repr(features.shape)) features[np.isnan(features)] = 0 @@ -466,13 +435,13 @@ def pipe_gray3d_slic_features_gmm_graphcut(image, nb_classes=4, spacing=(12, 1, features, _ = seg_fts.norm_features(features) logging.debug('list of features NORM: %s', repr(features.shape)) - model = seg_gc.estim_class_model_gmm(features, nb_classes) + model = seg_gc.estim_class_model(features, nb_classes) proba = model.predict_proba(features) logging.debug('list of probabilities: %s', repr(proba.shape)) # resultGraph = graphCut.segment_graph_cut_int_vals(segments, prob, gcReg) - graph_labels = seg_gc.segment_graph_cut_general(slic, proba, image, features, - gc_regul) + graph_labels = seg_gc.segment_graph_cut_general(slic, proba, image, + features, gc_regul) return graph_labels[slic] diff --git a/imsegm/region_growing.py b/imsegm/region_growing.py index 9abfcedb..f01430b3 100755 --- a/imsegm/region_growing.py +++ b/imsegm/region_growing.py @@ -3,7 +3,7 @@ * general GraphCut segmentation with and without shape model * region growing with shape prior - greedy & GraphCut -Copyright (C) 2016-2017 Jiri Borovec +Copyright (C) 2016-2018 Jiri Borovec """ import logging @@ -146,7 +146,7 @@ def object_segmentation_graphcut_pixels(segm, centres, dict_debug_imgs=None): """ object segmentation using Graph Cut directly on pixel level - :param ndarray slic: superpixel pre-segmentation + :param ndarray centres: :param ndarray segm: input structure segmentation :param [(int, int)] centres: superpixel centres :param [float] labels_fg_prob: set how much particular label belongs to foreground @@ -582,7 +582,7 @@ def compute_shape_prior_table_cdf(point, cum_distribution, centre, :param (int, int) point: single points :param (int, int) centre: center of model - :param [[float]] cum_hist: cumulative histogram + :param [[float]] cum_distribution: cumulative histogram :return float: >>> chist = [[1.0, 1.0, 0.8, 0.7, 0.6, 0.5, 0.3, 0.0, 0.0], @@ -1562,7 +1562,7 @@ def region_growing_shape_slic_graphcut(segm, slic, centres, shape_model, if len(gc_edges) > 0: graph_labels = cut_general_graph(np.array(gc_edges), edge_weights, unary, pairwise, n_iter=999) - labels_gc[gc_vestexes] = graph_labels + labels_gc[gc_vestexes] = graph_labels else: for i in range(len(centres)): diff --git a/imsegm/superpixels.py b/imsegm/superpixels.py index cc231369..06195830 100755 --- a/imsegm/superpixels.py +++ b/imsegm/superpixels.py @@ -6,7 +6,7 @@ SEE: * http://scikit-image.org/docs/dev/auto_examples/plot_segmentations.html -Copyright (C) 2014-2016 Jiri Borovec +Copyright (C) 2014-2018 Jiri Borovec """ @@ -22,7 +22,7 @@ def segment_slic_img2d(img, sp_size=50, rltv_compact=0.1, slico=False): """ segmentation by SLIC superpixels using original SLIC implementation - :param ndarray im: input color image + :param ndarray img: input color image :param int sp_size: superpixel initial size :param float rltv_compact: relative regularisation in range (0, 1) where 0 is for free form and 1 for nearly rectangular superpixels diff --git a/imsegm/tests/test-classification.py b/imsegm/tests/test-classification.py index 0b78ab96..278d44c3 100644 --- a/imsegm/tests/test-classification.py +++ b/imsegm/tests/test-classification.py @@ -1,7 +1,7 @@ """ Unit testing for particular segmentation module -Copyright (C) 2014-2017 Jiri Borovec +Copyright (C) 2014-2018 Jiri Borovec """ import os diff --git a/imsegm/tests/test-descriptors.py b/imsegm/tests/test-descriptors.py index 32bbc76d..ddfc4ca2 100644 --- a/imsegm/tests/test-descriptors.py +++ b/imsegm/tests/test-descriptors.py @@ -1,7 +1,7 @@ """ Unit testing for particular segmentation module -Copyright (C) 2014-2017 Jiri Borovec +Copyright (C) 2014-2018 Jiri Borovec """ import os diff --git a/imsegm/tests/test-ellipse_fitting.py b/imsegm/tests/test-ellipse_fitting.py index 698bdc20..6fa4438b 100644 --- a/imsegm/tests/test-ellipse_fitting.py +++ b/imsegm/tests/test-ellipse_fitting.py @@ -1,7 +1,7 @@ """ Unit testing for particular segmentation module -Copyright (C) 2014-2017 Jiri Borovec +Copyright (C) 2014-2018 Jiri Borovec """ import os @@ -20,7 +20,7 @@ # set some default paths PATH_OUTPUT = tl_data.update_path('output', absolute=True) -PATH_OVARY = os.path.join(tl_data.update_path('images', absolute=True), +PATH_OVARY = os.path.join(tl_data.update_path('data_images', absolute=True), 'drosophila_ovary_slice') PATH_IMAGES = os.path.join(PATH_OVARY, 'image') PATH_SEGM = os.path.join(PATH_OVARY, 'segm') diff --git a/imsegm/tests/test-graph_cut.py b/imsegm/tests/test-graph_cut.py index a5af9e13..689d41d6 100644 --- a/imsegm/tests/test-graph_cut.py +++ b/imsegm/tests/test-graph_cut.py @@ -1,7 +1,7 @@ """ Unit testing for particular segmentation module -Copyright (C) 2014-2017 Jiri Borovec +Copyright (C) 2014-2018 Jiri Borovec """ import os diff --git a/imsegm/tests/test-labels.py b/imsegm/tests/test-labels.py index 1cdd49e1..58cdbea8 100644 --- a/imsegm/tests/test-labels.py +++ b/imsegm/tests/test-labels.py @@ -2,7 +2,7 @@ Unit testing for particular segmentation module -Copyright (C) 2014-2017 Jiri Borovec +Copyright (C) 2014-2018 Jiri Borovec """ import os diff --git a/imsegm/tests/test-pipelines.py b/imsegm/tests/test-pipelines.py index 56d4d234..4417f77e 100644 --- a/imsegm/tests/test-pipelines.py +++ b/imsegm/tests/test-pipelines.py @@ -1,7 +1,7 @@ """ Unit testing for particular segmentation module -Copyright (C) 2014-2017 Jiri Borovec +Copyright (C) 2014-2018 Jiri Borovec """ import logging @@ -79,8 +79,9 @@ def run_segm2d_gmm_gc(img2d, dir_name, types_edge=('model', 'const'), if not os.path.isdir(path_dir): os.mkdir(path_dir) - scaler, pca, model = pipelines.estim_model_classes_group( - [img2d], proba_type='GMM', **dict_params) + model, _ = pipelines.estim_model_classes_group([img2d], + proba_type='GMM', + **dict_params) dict_params.pop('nb_classes', None) dict_params.pop('pca_coef', None) @@ -92,13 +93,13 @@ def run_segm2d_gmm_gc(img2d, dir_name, types_edge=('model', 'const'), # dict_debug_imgs=dict_imgs, **dict_params) seg = pipelines.segment_color2d_slic_features_model_graphcut( - img2d, scaler, pca, model, gc_regul=regul, gc_edge_type=edge, + img2d, model, gc_regul=regul, gc_edge_type=edge, dict_debug_imgs=dict_imgs, **dict_params) show_segm_debugs_2d(dict_imgs, path_dir, 'fig_regul-%.2f_edge-%s_debug.png' % (regul, edge)) show_segm_results_2d(img2d, seg, path_dir, - 'fig_regul-%.2f_edge-%s.png' % (regul, edge)) + 'fig_regul-%.2f_edge-%s.png' % (regul, edge)) dict_imgs = None @@ -198,7 +199,7 @@ def test_segm_supervised(self): classif, _, _, _ = pipelines.train_classif_color2d_slic_features( [img], [annot], sp_size, dict_features=FEATURES_TEXTURE) - _ = pipelines.segment_color2d_slic_features_classif_graphcut( + _ = pipelines.segment_color2d_slic_features_model_graphcut( img, classif, sp_size=sp_size, gc_regul=0., dict_features=FEATURES_TEXTURE, dict_debug_imgs=dict_imgs) show_segm_debugs_2d(dict_imgs, path_dir, name % (1, 0, '_debug')) @@ -206,12 +207,12 @@ def test_segm_supervised(self): for edge in tp_edge: dict_imgs = dict() for regul in list_regul: - seg = pipelines.segment_color2d_slic_features_classif_graphcut( + seg = pipelines.segment_color2d_slic_features_model_graphcut( img, classif, sp_size=sp_size, gc_regul=regul, gc_edge_type=edge, dict_features=FEATURES_TEXTURE) show_segm_results_2d(img, seg, path_dir, name % (1, regul, edge)) - seg = pipelines.segment_color2d_slic_features_classif_graphcut( + seg = pipelines.segment_color2d_slic_features_model_graphcut( img2, classif, sp_size=sp_size, gc_regul=regul, gc_edge_type=edge, dict_features=FEATURES_TEXTURE, dict_debug_imgs=dict_imgs) show_segm_results_2d(img2, seg, path_dir, name % (2, regul, edge)) diff --git a/imsegm/tests/test-region_growing.py b/imsegm/tests/test-region_growing.py index f5af5f5e..0da458cf 100644 --- a/imsegm/tests/test-region_growing.py +++ b/imsegm/tests/test-region_growing.py @@ -1,7 +1,7 @@ """ Unit testing for particular segmentation module -Copyright (C) 2014-2017 Jiri Borovec +Copyright (C) 2014-2018 Jiri Borovec """ import logging @@ -22,7 +22,7 @@ import imsegm.superpixels as seg_spx import imsegm.region_growing as seg_rg -PATH_OVARY = os.path.join(tl_data.update_path('images', absolute=True), +PATH_OVARY = os.path.join(tl_data.update_path('data_images', absolute=True), 'drosophila_ovary_slice') PATH_IMAGE = os.path.join(PATH_OVARY, 'image') PATH_SEGM = os.path.join(PATH_OVARY, 'segm') diff --git a/imsegm/tests/test-superpixels.py b/imsegm/tests/test-superpixels.py index b2f0a5fd..a171c7d4 100644 --- a/imsegm/tests/test-superpixels.py +++ b/imsegm/tests/test-superpixels.py @@ -1,7 +1,7 @@ """ Unit testing for particular segmentation module -Copyright (C) 2014-2017 Jiri Borovec +Copyright (C) 2014-2018 Jiri Borovec """ import os diff --git a/imsegm/utils/data_io.py b/imsegm/utils/data_io.py index 69fe50ae..b25875ef 100755 --- a/imsegm/utils/data_io.py +++ b/imsegm/utils/data_io.py @@ -1,7 +1,7 @@ """ Framework for handling input/output -Copyright (C) 2015-2016 Jiri Borovec +Copyright (C) 2015-2018 Jiri Borovec """ import os @@ -236,7 +236,7 @@ def scale_image_intensity(img, im_range=1., quantiles=(2, 98)): in_range=(p_low, p_high), out_range='float') if im_range == 255: - img = (img * im_range).astype(np.uint8) + img = np.array(img * im_range).astype(np.uint8) return img @@ -582,12 +582,12 @@ def load_image_tiff_volume(path_img, im_range=None): :param float im_range: range to scale image values (1. or 255) :return ndarray: - >>> p_img = os.path.join(update_path('images'), 'drosophila_ovary_3D', + >>> p_img = os.path.join(update_path('data_images'), 'drosophila_ovary_3D', ... 'AU10-13_f0011.tif') >>> img = load_image_tiff_volume(p_img) >>> img.shape (30, 323, 512) - >>> p_img = os.path.join(update_path('images'), + >>> p_img = os.path.join(update_path('data_images'), ... 'drosophila_ovary_slice', 'image', 'insitu7545.tif') >>> img = load_image_tiff_volume(p_img) >>> img.shape @@ -633,14 +633,14 @@ def load_tiff_volume_split_double_band(path_img, im_range=None): :param float im_range: range to scale image values (1. or 255) :return ndarray, ndarray: - >>> p_img = os.path.join(update_path('images'), 'drosophila_ovary_3D', + >>> p_img = os.path.join(update_path('data_images'), 'drosophila_ovary_3D', ... 'AU10-13_f0011.tif') >>> img_b1, img_b2 = load_tiff_volume_split_double_band(p_img) >>> img_b1.shape (15, 323, 512) >>> img_b2.shape (15, 323, 512) - >>> p_img = os.path.join(update_path('images'), + >>> p_img = os.path.join(update_path('data_images'), ... 'drosophila_ovary_slice', 'image', 'insitu7545.tif') >>> img_b1, img_b2 = load_tiff_volume_split_double_band(p_img) >>> img_b1.shape @@ -678,7 +678,7 @@ def load_zvi_volume_double_band_split(path_img): :param str path_img: path to the image :return ndarray, ndarray: - >>> p_img = os.path.join(update_path('images'), + >>> p_img = os.path.join(update_path('data_images'), ... 'others', 'sample.zvi') >>> img_b1, img_b2 = load_zvi_volume_double_band_split(p_img) >>> img_b1.shape @@ -700,7 +700,7 @@ def load_img_double_band_split(path_img, im_range=1., quantiles=(2, 98)): :param (int, int) quantiles: scale image values in certain quantile range :return: - >>> p_imgs = os.path.join(update_path('images'), + >>> p_imgs = os.path.join(update_path('data_images'), ... 'drosophila_ovary_slice', 'image') >>> p_img = os.path.join(p_imgs, 'insitu7545.jpg') >>> img_b1, img_b2 = load_img_double_band_split(p_img) @@ -710,7 +710,7 @@ def load_img_double_band_split(path_img, im_range=1., quantiles=(2, 98)): >>> img_b1, img_b2 = load_img_double_band_split(p_img) >>> img_b1.shape (647, 1024) - >>> p_img = os.path.join(update_path('images'), + >>> p_img = os.path.join(update_path('data_images'), ... 'drosophila_ovary_3D', 'AU10-13_f0011.tif') >>> img_b1, img_b2 = load_img_double_band_split(p_img) >>> img_b1.shape @@ -772,7 +772,7 @@ def load_complete_image_folder(path_dir, img_name_pattern='*.png', :param [str] skip: skip some prticular images by name :return: - >>> p_imgs = os.path.join(update_path('images'), + >>> p_imgs = os.path.join(update_path('data_images'), ... 'drosophila_ovary_slice', 'image') >>> l_imgs, l_names = load_complete_image_folder(p_imgs, '*.jpg') >>> len(l_imgs) @@ -881,20 +881,19 @@ def find_files_match_names_across_dirs(list_path_pattern, drop_none=True): :param bool drop_none: drop if there are some none - missing values in rows :return: DF - >>> def mpath(d, n): - ... p = os.path.join(update_path('images'), - ... 'drosophila_ovary_slice', d, n) - ... return p - >>> df = find_files_match_names_across_dirs([mpath('image', '*.jpg'), - ... mpath('segm', '*.png'), - ... mpath('center_levels', '*.csv')]) + >>> def _mp(d, n): + ... return os.path.join(update_path('data_images'), + ... 'drosophila_ovary_slice', d, n) + >>> df = find_files_match_names_across_dirs([_mp('image', '*.jpg'), + ... _mp('segm', '*.png'), + ... _mp('center_levels', '*.csv')]) >>> len(df) > 0 True >>> df.columns.tolist() ['path_1', 'path_2', 'path_3'] - >>> df = find_files_match_names_across_dirs([mpath('image', '*.png'), - ... mpath('segm', '*.jpg'), - ... mpath('center_levels', '*.csv')]) + >>> df = find_files_match_names_across_dirs([_mp('image', '*.png'), + ... _mp('segm', '*.jpg'), + ... _mp('center_levels', '*.csv')]) >>> len(df) 0 """ @@ -904,21 +903,21 @@ def find_files_match_names_across_dirs(list_path_pattern, drop_none=True): assert os.path.exists(os.path.dirname(p)), \ 'missing "%s"' % os.path.dirname(p) - def get_name(path, pattern='*'): + def _get_name(path, pattern='*'): name = os.path.splitext(os.path.basename(path))[0] for s in pattern.split('*'): name = name.replace(s, '') return name - def get_paths_names(path_pattern): + def _get_paths_names(path_pattern): paths_ = glob.glob(path_pattern) if len(paths_) == 0: return [None], [None] - names_ = [get_name(p, os.path.basename(path_pattern)) for p in paths_] + names_ = [_get_name(p, os.path.basename(path_pattern)) for p in paths_] return paths_, names_ logging.info('find match files...') - paths_0, names_0 = get_paths_names(list_path_pattern[0]) + paths_0, names_0 = _get_paths_names(list_path_pattern[0]) list_paths = [paths_0] for path_pattern_n in list_path_pattern[1:]: @@ -927,7 +926,7 @@ def get_paths_names(path_pattern): list_files = glob.glob(path_pattern_n) logging.debug('found %i files in %s', len(list_files), path_pattern_n) for path_n in list_files: - name_n = get_name(path_n, name_pattern) + name_n = _get_name(path_n, name_pattern) if name_n in names_0: idx = names_0.index(name_n) paths_n[idx] = path_n @@ -942,7 +941,7 @@ def get_paths_names(path_pattern): return df_paths -def get_background_color(image): +def get_image2d_boundary_color(image, size=1): """ extract background color as median along image boundaries :param image: @@ -956,20 +955,22 @@ def get_background_color(image): [0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0], [0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]) - >>> get_background_color(img) + >>> get_image2d_boundary_color(img) 0 - >>> get_background_color(np.ones((5, 15, 3), dtype=int)) + >>> get_image2d_boundary_color(np.ones((5, 15, 3), dtype=int), size=2) array([1, 1, 1]) - >>> get_background_color(np.ones((5, 15, 3, 1), dtype=int)) + >>> get_image2d_boundary_color(np.ones((5, 15, 3, 1), dtype=int)) array(0) """ + size = int(size) if image.ndim == 2: - bg_pixels = np.hstack([image[0, :], image[:, 0], - image[-1, :], image[:, -1]]) - bg_color = np.argmax(np.bincount(bg_pixels)) + bg_pixels = np.hstack([image[:size, :], image[:, :size].T, + image[-size:, :], image[:, -size:].T]) + bg_color = np.argmax(np.bincount(bg_pixels.ravel())) elif image.ndim == 3: - bg_pixels = np.vstack([image[0, :, ...], image[:, 0, ...], - image[-1, :, ...], image[:, -1, ...]]) + bounds = [image[:size, :, ...], image[:, :size, ...], + image[-size:, :, ...], image[:, -size:, ...]] + bg_pixels = np.vstack([b.reshape(-1, image.shape[-1]) for b in bounds]) bg_color = np.median(bg_pixels, axis=0) else: logging.error('not supported image dim: %s' % repr(image.shape)) @@ -1035,7 +1036,7 @@ def cut_object(img, mask, padding, use_mask=False, bg_color=None): bg_mask = np.argmax(np.bincount(bg_pixels)) if bg_color is None: - bg_color = get_background_color(img) + bg_color = get_image2d_boundary_color(img) rotate = np.rad2deg(prop.orientation) shift = prop.centroid - (np.array(mask.shape) / 2.) diff --git a/imsegm/utils/data_samples.py b/imsegm/utils/data_samples.py index 7eba923f..50ae5e69 100755 --- a/imsegm/utils/data_samples.py +++ b/imsegm/utils/data_samples.py @@ -2,13 +2,13 @@ """ Sandbox with some sample images -Copyright (C) 2015-2016 Jiri Borovec +Copyright (C) 2015-2018 Jiri Borovec """ import os import logging -from PIL import Image +# from PIL import Image import numpy as np import imsegm.utils.data_io as tl_data @@ -18,12 +18,12 @@ SAMPLE_SEG_NB_CLASSES = 3 SAMPLE_SEG_SIZE_3D_SMALL = (10, 5, 6) -PATH_IMAGES = tl_data.update_path('images') +PATH_IMAGES = tl_data.update_path('data_images') IMAGE_LENNA = os.path.join('others', 'lena.png') IMAGE_OBJECTS = os.path.join('synthetic', 'reference.jpg') -IMAGE_3CLS = os.path.join('textures', 'sample_rgb_3cls.jpg') -IMAGE_STAR_1 = os.path.join('see_starfish', 'star_nb1.jpg') -IMAGE_STAR_2 = os.path.join('see_starfish', 'stars_nb2.jpg') +IMAGE_3CLS = os.path.join('synthetic', 'texture_rgb_3cls.jpg') +IMAGE_STAR_1 = os.path.join('others', 'sea_starfish-1.jpg') +IMAGE_STAR_2 = os.path.join('others', 'sea_starfish-2.jpg') IMAGE_HISTOL_CIMA = \ os.path.join('histology_CIMA', '29-041-Izd2-w35-CD31-3-les1.jpg') IMAGE_HISTOL_FLAGSHIP = \ diff --git a/imsegm/utils/drawing.py b/imsegm/utils/drawing.py index a06cb55c..0289aba9 100755 --- a/imsegm/utils/drawing.py +++ b/imsegm/utils/drawing.py @@ -1,7 +1,7 @@ """ Framework for visualisations -Copyright (C) 2016-2017 Jiri Borovec +Copyright (C) 2016-2018 Jiri Borovec """ import os @@ -248,7 +248,7 @@ def figure_image_segm_results(img, seg, subfig_size=9): axarr[1].set_title('original image w. segment overlap') axarr[1].imshow(color.rgb2gray(img), cmap=plt.cm.Greys_r) axarr[1].imshow(seg, alpha=0.2, cmap=plt.cm.jet) - axarr[1].contour(seg, levels=np.unique(seg), linewidth=2, cmap=plt.cm.jet) + axarr[1].contour(seg, levels=np.unique(seg), linewidths=2, cmap=plt.cm.jet) axarr[2].set_title('segmentation of all labels') axarr[2].imshow(seg, cmap=plt.cm.jet) @@ -289,12 +289,12 @@ def figure_overlap_annot_segm_image(annot, segm, img=None, subfig_size=9): axarr[0].set_title('Annotation') axarr[0].imshow(img) axarr[0].imshow(annot, alpha=0.2) - axarr[0].contour(annot, levels=np.unique(annot), linewidth=2) + axarr[0].contour(annot, levels=np.unique(annot), linewidths=2) axarr[1].set_title('Segmentation') axarr[1].imshow(img) axarr[1].imshow(segm, alpha=0.2) - axarr[1].contour(segm, levels=np.unique(segm), linewidth=2) + axarr[1].contour(segm, levels=np.unique(segm), linewidths=2) # visualise the 3th label axarr[2].set_title('difference annot & segment') @@ -307,8 +307,8 @@ def figure_overlap_annot_segm_image(annot, segm, img=None, subfig_size=9): boundaries=np.linspace(-max_val - 0.5, max_val + 0.5, max_val * 2 + 2)) # plt.clim(-max_val - 0.5, max_val - 0.5) - # axarr[2].contour(annot, levels=np.unique(annot), linewidth=1, colors='g') - # axarr[2].contour(segm, levels=np.unique(segm), linewidth=1, colors='b') + # axarr[2].contour(annot, levels=np.unique(annot), linewidths=1, colors='g') + # axarr[2].contour(segm, levels=np.unique(segm), linewidths=1, colors='b') for i in range(len(axarr)): axarr[i].axis('off') @@ -427,13 +427,21 @@ def figure_annot_slic_histogram_labels(dict_label_hist, slic_size=-1, :param int slic_size: :param float slic_regul: :return Figure: + + >>> np.random.seed(0) + >>> dict_label_hist = {'a': np.tile([1, 0, 0, 0, 1], (25, 1)), + ... 'b': np.tile([0, 1, 0, 0, 1], (30, 1))} + >>> figure_annot_slic_histogram_labels(dict_label_hist) # doctest: +ELLIPSIS + """ matrix_hist_all = np.concatenate(tuple(dict_label_hist.values()), axis=0) - nb_labels = matrix_hist_all.shape[1] + lb_sums = np.sum(matrix_hist_all, axis=0) fig = plt.figure(figsize=(10, 5)) ax = fig.gca() - for i in range(nb_labels): + for i, nb in enumerate(lb_sums): + if nb == 0: + continue patches, bin_edges = np.histogram(matrix_hist_all[:, i], bins=50, density=True) bins = [(a + b) / 2. for a, b in zip(bin_edges[:-1], bin_edges[1:])] @@ -592,7 +600,7 @@ def draw_eggs_ellipse(mask_shape, pos_ant, pos_lat, pos_post, def parse_annot_rectangles(rows_slice): """ parse annotation fromDF to lists - :param row_slice: + :param rows_slice: :return: >>> import pandas as pd @@ -732,13 +740,13 @@ def draw_image_segm_points(ax, img, points, labels=None, slic=None, if slic is not None: ax.contour(slic, levels=np.unique(slic), alpha=0.5, colors=clr_slic, - linewidth=0.5) + linewidths=0.5) # fig.gca().imshow(mark_boundaries(img, slic)) if seg_contour is not None and isinstance(seg_contour, np.ndarray): assert img.shape[:2] == seg_contour.shape[:2], \ 'image size %s and segm. %s should match' \ % (repr(img.shape), repr(seg_contour.shape)) - ax.contour(seg_contour, linewidth=3, levels=np.unique(seg_contour)) + ax.contour(seg_contour, linewidths=3, levels=np.unique(seg_contour)) if labels is not None: assert len(points) == len(labels), \ 'number of points (%i) and labels (%i) should match' \ @@ -776,7 +784,7 @@ def figure_image_segm_centres(img, segm, centers=None, segm_show = segm if segm.ndim > 2: segm_show = np.argmax(segm, axis=2) - ax.contour(segm_show, cmap=cmap_contour, linewidth=0.5) + ax.contour(segm_show, cmap=cmap_contour, linewidths=0.5) if isinstance(centers, list): ax.plot(np.array(centers)[:, 1], np.array(centers)[:, 0], 'o', color=COLOR_ORANGE) @@ -878,7 +886,7 @@ def figure_rg2sp_debug_complete(seg, slic, dict_rg2sp_debug, iter_index=-1, :param ndarray seg: :param ndarray slic: :param dict_rg2sp_debug: - :param int iter: + :param int iter_index: :param int max_size: :return Figure: @@ -991,7 +999,7 @@ def make_overlap_images_chess(imgs, chess_field=SIZE_CHESS_FIELD): # copy images to the maximal image for i, im in enumerate(imgs): imgs_w[i][:im.shape[0], :im.shape[1]] = im - img = np.zeros(max_size, dtype=im.dtype) + img = np.zeros(max_size, dtype=imgs[0].dtype) idx_row = 0 for i in range(int(max_size[0] / chess_field)): idx = idx_row diff --git a/imsegm/utils/experiments.py b/imsegm/utils/experiments.py index 50a3f632..ef50e1ef 100755 --- a/imsegm/utils/experiments.py +++ b/imsegm/utils/experiments.py @@ -1,7 +1,7 @@ """ Framework for general experiments -Copyright (C) 2014-2016 Jiri Borovec +Copyright (C) 2014-2018 Jiri Borovec """ import os @@ -81,7 +81,8 @@ def __create_folder(self, time_stamp): """ # create results folder for experiments if not os.path.exists(self.params.get('path_out')): - logging.error('no results folder: %s', repr(self.p.get('path_out'))) + logging.error('no results folder: %s', + repr(self.params.get('path_out'))) self.params['path_exp'] = '' return self.params = create_experiment_folder(self.params, @@ -137,15 +138,15 @@ def create_experiment_folder(params, dir_name, stamp_unique=True, skip_load=True if not os.path.exists(path_expt): os.mkdir(path_expt) path_config = os.path.join(path_expt, CONFIG_JSON) - params.update({'computer': os.uname(), - 'path_exp': path_expt}) if os.path.exists(path_config) and not skip_load: + params_in = params logging.debug('loading saved params from file "%s"', CONFIG_JSON) with open(path_config, 'r') as fp: params = json.load(fp) - params.update({'computer': os.uname(), - 'path_exp': path_expt}) + params.update({k: params_in[k] for k in params_in if 'path' in k}) logging.info('loaded following PARAMETERS: %s', string_dict(params)) + params.update({'computer': os.uname(), + 'path_exp': path_expt}) logging.debug('saving params to file "%s"', CONFIG_JSON) with open(path_config, 'w') as f: json.dump(params, f) @@ -297,7 +298,8 @@ def create_subfolders(path_out, list_folders): class WrapExecuteSequence: """ wrapper for execution paralle of single thread as for... - >>> it = WrapExecuteSequence(lambda x: (x, x ** 2), range(5), 1) + >>> it = WrapExecuteSequence(lambda x: (x, x ** 2), range(5), + ... nb_jobs=1, ordered=True) >>> list(it) [(0, 0), (1, 1), (2, 4), (3, 9), (4, 16)] >>> it = WrapExecuteSequence(sum, [[0, 1]] * 5, 2, desc=None) @@ -308,11 +310,22 @@ class WrapExecuteSequence: [0, 0, 0, 0, 0] """ - def __init__(self, wrap_func, iterate_vals, nb_jobs=NB_THREADS, desc=''): + def __init__(self, wrap_func, iterate_vals, nb_jobs=NB_THREADS, desc='', + ordered=False): + """ the init of this wrapper fro parallelism + + :param wrap_func: funtion which will be exectited in the iterations + :param [] iterate_vals: list or iterator which will ide in terations + :param int nb_jobs: number og jobs running in parallel + :param str desc: decrotion for the bar, + if it is set None, bar is suppresed + :param bool ordered: whether enforce ordering in the parallelism + """ self.wrap_func = wrap_func self.iterate_vals = list(iterate_vals) self.nb_jobs = nb_jobs self.desc = desc + self.ordered = ordered def __iter__(self): if self.desc is not None: @@ -323,7 +336,10 @@ def __iter__(self): if self.nb_jobs > 1: logging.debug('perform sequential in %i threads', self.nb_jobs) pool = mproc.Pool(self.nb_jobs) - for out in pool.imap_unordered(self.wrap_func, self.iterate_vals): + + pooling = pool.imap if self.ordered else pool.imap_unordered + + for out in pooling(self.wrap_func, self.iterate_vals): yield out if tqdm_bar is not None: tqdm_bar.update() @@ -339,7 +355,8 @@ def __len__(self): return len(self.iterate_vals) -# def wrap_execute_parallel(wrap_func, iterate_vals, nb_jobs=NB_THREADS, desc=''): +# def wrap_execute_parallel(wrap_func, iterate_vals, +# nb_jobs=NB_THREADS, desc=''): # """ wrapper for execution paralle of single thread as for... # # :param func wrap_func: diff --git a/imsegm/utils/read_zvi.py b/imsegm/utils/read_zvi.py index 166db15a..78b14412 100755 --- a/imsegm/utils/read_zvi.py +++ b/imsegm/utils/read_zvi.py @@ -14,7 +14,7 @@ >>> import os, sys >>> sys.path += [os.path.abspath(os.path.join('..', '..'))] >>> import imsegm.utils.data_io as tl_io ->>> path_file = os.path.join('images', 'others', 'sample.zvi') +>>> path_file = os.path.join('data_images', 'others', 'sample.zvi') >>> path_file = tl_io.update_path(path_file) >>> n = get_layer_count(path_file) >>> get_dir(path_file) # doctest: +ELLIPSIS diff --git a/notebooks/RG2SP_region-growing.ipynb b/notebooks/RG2Sp_region-growing.ipynb similarity index 99% rename from notebooks/RG2SP_region-growing.ipynb rename to notebooks/RG2Sp_region-growing.ipynb index 3c7cbbd0..6d6654ac 100755 --- a/notebooks/RG2SP_region-growing.ipynb +++ b/notebooks/RG2Sp_region-growing.ipynb @@ -77,8 +77,8 @@ "source": [ "COLORS = 'bgrmyck'\n", "RG2SP_THRESHOLDS = seg_rg.DEFAULT_RG2SP_THRESHOLDS\n", - "PATH_IMAGES = tl_io.update_path(os.path.join('images', 'drosophila_ovary_slice'))\n", - "PATH_DATA = tl_io.update_path('data', absolute=True)\n", + "PATH_IMAGES = tl_io.update_path(os.path.join('data_images', 'drosophila_ovary_slice'))\n", + "PATH_DATA = tl_io.update_path('data_images', absolute=True)\n", "PATH_OUT = tl_io.update_path('output', absolute=True)\n", "print ([os.path.basename(p) for p in glob.glob(os.path.join(PATH_IMAGES, '*')) if os.path.isdir(p)])" ] diff --git a/notebooks/RG2Sp_shape-models.ipynb b/notebooks/RG2Sp_shape-models.ipynb index 9d01b142..8b047c32 100755 --- a/notebooks/RG2Sp_shape-models.ipynb +++ b/notebooks/RG2Sp_shape-models.ipynb @@ -80,8 +80,8 @@ ], "source": [ "COLORS = 'bgrmyck'\n", - "PATH_IMAGES = tl_io.update_path(os.path.join('images', 'drosophila_ovary_slice'))\n", - "PATH_DATA = tl_io.update_path('data', absolute=True)\n", + "PATH_IMAGES = tl_io.update_path(os.path.join('data_images', 'drosophila_ovary_slice'))\n", + "PATH_DATA = tl_io.update_path('data_images', absolute=True)\n", "PATH_OUT = tl_io.update_path('output', absolute=True)\n", "print ([os.path.basename(p) for p in glob.glob(os.path.join(PATH_IMAGES, '*')) if os.path.isdir(p)])\n", "dir_annot = os.path.join(PATH_IMAGES, 'annot_eggs')\n", diff --git a/notebooks/egg-center_candidates-clustering.ipynb b/notebooks/egg-center_candidates-clustering.ipynb index 87a3f370..c2910d51 100644 --- a/notebooks/egg-center_candidates-clustering.ipynb +++ b/notebooks/egg-center_candidates-clustering.ipynb @@ -78,7 +78,7 @@ "outputs": [], "source": [ "name = 'insitu7545'\n", - "PATH_BASE = tl_io.update_path(os.path.join('images', 'drosophila_ovary_slice'))\n", + "PATH_BASE = tl_io.update_path(os.path.join('data_images', 'drosophila_ovary_slice'))\n", "PATH_IMAGES = os.path.join(PATH_BASE, 'image')\n", "PATH_SEGM = os.path.join(PATH_BASE, 'segm')\n", "PATH_ANNOT = os.path.join(PATH_BASE, 'annot_eggs')\n", diff --git a/notebooks/egg-detect_ellipse-fitting.ipynb b/notebooks/egg-detect_ellipse-fitting.ipynb index cd51700e..6049773d 100755 --- a/notebooks/egg-detect_ellipse-fitting.ipynb +++ b/notebooks/egg-detect_ellipse-fitting.ipynb @@ -74,7 +74,7 @@ }, "outputs": [], "source": [ - "PATH_BASE = tl_io.update_path(os.path.join('images', 'drosophila_ovary_slice'))\n", + "PATH_BASE = tl_io.update_path(os.path.join('data_images', 'drosophila_ovary_slice'))\n", "PATH_IMAGES = os.path.join(PATH_BASE, 'image')\n", "PATH_SEGM = os.path.join(PATH_BASE, 'segm')\n", "PATH_ANNOT = os.path.join(PATH_BASE, 'annot_eggs')\n", diff --git a/notebooks/egg_segment_graphcut.ipynb b/notebooks/egg_segment_graphcut.ipynb index ede08a3f..ac22a0ed 100755 --- a/notebooks/egg_segment_graphcut.ipynb +++ b/notebooks/egg_segment_graphcut.ipynb @@ -72,7 +72,7 @@ ], "source": [ "COLORS = 'bgrmyck'\n", - "PATH_IMAGES = tl_io.update_path(os.path.join('images', 'drosophila_ovary_slice'))\n", + "PATH_IMAGES = tl_io.update_path(os.path.join('data_images', 'drosophila_ovary_slice'))\n", "print ([os.path.basename(p) for p in glob.glob(os.path.join(PATH_IMAGES, '*')) if os.path.isdir(p)])\n", "dir_img = os.path.join(PATH_IMAGES, 'image')\n", "dir_segm = os.path.join(PATH_IMAGES, 'segm')\n", diff --git a/notebooks/segment-2d_slic-fts-classif-gc.ipynb b/notebooks/segment-2d_slic-fts-classif-gc.ipynb index 880cbcb9..1cf523d7 100755 --- a/notebooks/segment-2d_slic-fts-classif-gc.ipynb +++ b/notebooks/segment-2d_slic-fts-classif-gc.ipynb @@ -58,7 +58,7 @@ } ], "source": [ - "path_dir = tl_data.update_path(os.path.join('images', 'drosophila_ovary_slice'))\n", + "path_dir = tl_data.update_path(os.path.join('data_images', 'drosophila_ovary_slice'))\n", "path_images = os.path.join(path_dir, 'image')\n", "print ([os.path.basename(p) for p in glob.glob(os.path.join(path_images, '*.jpg'))])\n", "# loading images\n", @@ -169,7 +169,7 @@ "outputs": [], "source": [ "dict_debug = {}\n", - "seg = segm_pipe.segment_color2d_slic_features_classif_graphcut(img2, classif, clr_space, sp_size, sp_regul, \n", + "seg = segm_pipe.segment_color2d_slic_features_model_graphcut(img2, classif, clr_space, sp_size, sp_regul, \n", " gc_regul=1., dict_features=dict_features, gc_edge_type='model', dict_debug_imgs=dict_debug)" ] }, diff --git a/notebooks/segment-2d_slic-fts-model-gc.ipynb b/notebooks/segment-2d_slic-fts-model-gc.ipynb index 3862a023..2b3f2d6a 100644 --- a/notebooks/segment-2d_slic-fts-model-gc.ipynb +++ b/notebooks/segment-2d_slic-fts-model-gc.ipynb @@ -68,9 +68,9 @@ } ], "source": [ - "path_dir = tl_data.update_path(os.path.join('images', 'see_starfish'))\n", + "path_dir = tl_data.update_path(os.path.join('data_images', 'others'))\n", "print ([os.path.basename(p) for p in glob.glob(os.path.join(path_dir, '*.jpg'))])\n", - "path_img = os.path.join(path_dir, 'stars_nb2.jpg')\n", + "path_img = os.path.join(path_dir, 'sea_starfish-2.jpg')\n", "\n", "img = np.array(Image.open(path_img))\n", "\n", @@ -122,8 +122,8 @@ }, "outputs": [], "source": [ - "scaler, pca, model = segm_pipe.estim_model_classes_group([img], nb_classes, clr_space, sp_size, sp_regul, \n", - " dict_features=dict_features, pca_coef=None, proba_type='GMM')" + "model, _ = segm_pipe.estim_model_classes_group([img], nb_classes, clr_space, sp_size, sp_regul, \n", + " dict_features=dict_features, pca_coef=None, estim_model='GMM')" ] }, { @@ -151,7 +151,7 @@ ], "source": [ "dict_debug = {}\n", - "seg = segm_pipe.segment_color2d_slic_features_model_graphcut(img, scaler, pca, model, clr_space, sp_size, sp_regul, \n", + "seg = segm_pipe.segment_color2d_slic_features_model_graphcut(img, model, clr_space, sp_size, sp_regul, \n", " dict_features=dict_features, gc_regul=5., gc_edge_type='color', dict_debug_imgs=dict_debug)" ] }, diff --git a/notebooks/transform-img-plane_inter-circle.ipynb b/notebooks/transform-img-plane_inter-circle.ipynb index b27a3643..2a2ce549 100755 --- a/notebooks/transform-img-plane_inter-circle.ipynb +++ b/notebooks/transform-img-plane_inter-circle.ipynb @@ -43,7 +43,7 @@ } ], "source": [ - "path_image = os.path.abspath(os.path.join('images', 'other', 'industry.jpg'))\n", + "path_image = os.path.abspath(os.path.join('data_images', 'other', 'industry.jpg'))\n", "img = Image.open(path_image)\n", "img = img.resize((int(img.width * 0.5), int(img.height * 0.5)), Image.ANTIALIAS)\n", "img = np.array(img)\n", diff --git a/setup.cfg b/setup.cfg index 224a7795..0c61b02b 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,2 +1,4 @@ [metadata] -description-file = README.md \ No newline at end of file +description-file = README.md +license-file = LICENSE +requirements-file = requirements.txt \ No newline at end of file diff --git a/setup.py b/setup.py index 1c5b7859..65c42012 100644 --- a/setup.py +++ b/setup.py @@ -47,9 +47,11 @@ def finalize_options(self): def _parse_requirements(file_path): - pip_version = list(map(int, pkg_resources.get_distribution('pip').version.split('.')[:2])) + pip_ver = pkg_resources.get_distribution('pip').version + pip_version = list(map(int, pip_ver.split('.')[:2])) if pip_version >= [6, 0]: - raw = pip.req.parse_requirements(file_path, session=pip.download.PipSession()) + raw = pip.req.parse_requirements(file_path, + session=pip.download.PipSession()) else: raw = pip.req.parse_requirements(file_path) return [str(i.req) for i in raw]