From 1d639e9e0e64de2ca3add0af41983bbc8c14ded7 Mon Sep 17 00:00:00 2001 From: "jean-baptiste.durand1" Date: Mon, 12 Jan 2026 11:25:13 +0100 Subject: [PATCH 1/6] Fixing wrapper for class Clusters --- doc/Doxyfile | 2 +- doc/examples/clustering.ipynb | 1433 +++++++++++++++++++----- src/wrapper/export_distance_matrix.cpp | 36 +- test/test_cluster.py | 4 + test/test_distance_matrix.py | 1 + test/test_matrix.py | 30 +- 6 files changed, 1199 insertions(+), 307 deletions(-) diff --git a/doc/Doxyfile b/doc/Doxyfile index 3b908996..7a1ee5d4 100644 --- a/doc/Doxyfile +++ b/doc/Doxyfile @@ -769,7 +769,7 @@ FILE_PATTERNS = # be searched for input files as well. # The default value is: NO. -RECURSIVE = NO +RECURSIVE = YES # The EXCLUDE tag can be used to specify files and/or directories that should be # excluded from the INPUT source files. This way you can easily exclude a diff --git a/doc/examples/clustering.ipynb b/doc/examples/clustering.ipynb index 0eb0f12e..6c9eac58 100644 --- a/doc/examples/clustering.ipynb +++ b/doc/examples/clustering.ipynb @@ -6,12 +6,20 @@ "metadata": {}, "source": [ "# Clustering with stat_tool \n", - "This notebook illustates how to perform clustering with stat_tool.Cluster" + "## This notebook illustates how to perform clustering of vectors with stat_tool.cluster ##" + ] + }, + { + "cell_type": "markdown", + "id": "b2d3987a-58b5-4568-9ae1-b054ce159ce1", + "metadata": {}, + "source": [ + "Load a data set (chene_sessile.vec) " ] }, { "cell_type": "code", - "execution_count": 15, + "execution_count": 1, "id": "c291d761-82a6-4f0f-a23f-25b2d1adc384", "metadata": {}, "outputs": [ @@ -19,201 +27,7 @@ "name": "stdout", "output_type": "stream", "text": [ - "1 \n", - "2 # marginal histogram - sample size: 138\n", - "3 # mean: 5.19565 variance: 21.341 standard deviation: 4.61963\n", - "4 \n", - "5 # correlation matrix\n", - "6 \n", - "7 # 1 2 3 4 5 6\n", - "8 # 1 1 -0.223663 -0.75155 -0.316395 -0.258309 -0.775112\n", - "9 # 2 -0.223663 1 0.557942 0.649939 0.883212 0.386496\n", - "10 # 3 -0.75155 0.557942 1 0.446346 0.501469 0.693231\n", - "11 # 4 -0.316395 0.649939 0.446346 1 0.681954 0.378931\n", - "12 # 5 -0.258309 0.883212 0.501469 0.681954 1 0.425416\n", - "13 # 6 -0.775112 0.386496 0.693231 0.378931 0.425416 1\n", - "14 \n", - "15 # reference t-value: 1.978 reference critical probability: 0.05\n", - "16 # limit correlation coefficient: 0.167224\n", - "17 \n", - "18 # reference t-value: 2.61289 reference critical probability: 0.01\n", - "19 # limit correlation coefficient: 0.218633\n", - "20 \n", - "21 1995 110 219 2 52 14 # (1)\n", - "22 1996 17 119 2 24 9 # (2)\n", - "23 1997 57 101 2 33 1 # (3)\n", - "24 1995 88 214 2 50 12 # (4)\n", - "25 1996 35 127 1 18 8 # (5)\n", - "26 1997 77 92 2 40 0 # (6)\n", - "27 1995 59 136 2 35 6 # (7)\n", - "28 1996 14 93 1 17 10 # (8)\n", - "29 1997 68 86 2 40 2 # (9)\n", - "30 1995 79 180 2 55 7 # (10)\n", - "31 1996 26 132 1 18 8 # (11)\n", - "32 1997 59 129 2 30 0 # (12)\n", - "33 1995 65 210 2 40 6 # (13)\n", - "34 1996 31 123 1 33 11 # (14)\n", - "35 1997 69 104 2 36 1 # (15)\n", - "36 1995 35 127 2 28 6 # (16)\n", - "37 1996 18 106 2 27 5 # (17)\n", - "38 1997 60 103 2 33 1 # (18)\n", - "39 1995 23 145 2 16 8 # (19)\n", - "40 1996 24 103 1 9 3 # (20)\n", - "41 1997 71 89 2 54 0 # (21)\n", - "42 1995 45 113 2 33 10 # (22)\n", - "43 1996 8 85 1 12 2 # (23)\n", - "44 1997 35 57 1 22 0 # (24)\n", - "45 1995 83 157 2 35 7 # (25)\n", - "46 1996 4 95 1 10 3 # (26)\n", - "47 1997 37 68 2 29 0 # (27)\n", - "48 1995 33 109 2 24 6 # (28)\n", - "49 1996 12 66 1 17 4 # (29)\n", - "50 1997 53 63 2 40 1 # (30)\n", - "51 1995 63 112 2 52 14 # (31)\n", - "52 1996 25 60 1 19 8 # (32)\n", - "53 1997 6 39 1 12 0 # (33)\n", - "54 1995 55 124 3 27 8 # (34)\n", - "55 1996 23 70 1 25 2 # (35)\n", - "56 1997 20 64 1 21 0 # (36)\n", - "57 1995 70 125 2 42 14 # (37)\n", - "58 1996 26 102 1 17 7 # (38)\n", - "59 1997 50 71 2 33 0 # (39)\n", - "60 1995 79 119 2 52 9 # (40)\n", - "61 1996 26 73 1 22 6 # (41)\n", - "62 1997 9 41 1 13 0 # (42)\n", - "63 1995 69 142 2 33 12 # (43)\n", - "64 1996 26 85 1 22 8 # (44)\n", - "65 1997 21 51 1 14 0 # (45)\n", - "66 1995 50 146 2 40 8 # (46)\n", - "67 1996 22 95 1 22 6 # (47)\n", - "68 1997 26 51 1 20 0 # (48)\n", - "69 1995 81 145 2 53 19 # (49)\n", - "70 1996 15 61 1 23 3 # (50)\n", - "71 1997 19 53 2 21 0 # (51)\n", - "72 1995 74 160 2 45 8 # (52)\n", - "73 1996 38 102 1 22 5 # (53)\n", - "74 1997 34 68 1 33 0 # (54)\n", - "75 1995 59 177 2 35 1 # (55)\n", - "76 1996 29 140 1 22 5 # (56)\n", - "77 1997 78 101 2 53 2 # (57)\n", - "78 1995 49 172 2 37 9 # (58)\n", - "79 1996 50 144 3 42 9 # (59)\n", - "80 1997 62 70 2 50 2 # (60)\n", - "81 1995 14 109 1 16 1 # (61)\n", - "82 1996 33 100 2 21 5 # (62)\n", - "83 1997 57 79 2 45 0 # (63)\n", - "84 1995 70 142 2 61 12 # (64)\n", - "85 1996 26 89 1 29 5 # (65)\n", - "86 1997 28 79 1 32 0 # (66)\n", - "87 1995 66 129 2 58 7 # (67)\n", - "88 1996 38 90 1 29 5 # (68)\n", - "89 1997 24 43 1 21 0 # (69)\n", - "90 1995 19 90 2 22 3 # (70)\n", - "91 1996 6 84 1 8 1 # (71)\n", - "92 1997 63 71 2 37 2 # (72)\n", - "93 1995 56 116 2 47 8 # (73)\n", - "94 1996 16 76 1 20 5 # (74)\n", - "95 1997 21 45 1 19 0 # (75)\n", - "96 1995 73 169 2 35 11 # (76)\n", - "97 1996 27 126 1 16 9 # (77)\n", - "98 1997 35 93 1 16 0 # (78)\n", - "99 1995 53 141 2 29 17 # (79)\n", - "100 1996 33 93 1 20 4 # (80)\n", - "101 1997 24 60 1 23 0 # (81)\n", - "102 1995 51 132 3 51 14 # (82)\n", - "103 1996 28 117 1 23 4 # (83)\n", - "104 1997 90 122 2 53 7 # (84)\n", - "105 1995 58 152 4 50 15 # (85)\n", - "106 1996 15 96 1 11 2 # (86)\n", - "107 1997 75 86 2 42 2 # (87)\n", - "108 1995 39 138 2 37 9 # (88)\n", - "109 1996 35 106 1 22 7 # (89)\n", - "110 1997 38 85 1 26 0 # (90)\n", - "111 1995 37 163 2 28 8 # (91)\n", - "112 1996 29 116 2 38 6 # (92)\n", - "113 1997 58 102 2 44 3 # (93)\n", - "114 1995 70 136 2 54 10 # (94)\n", - "115 1996 25 75 1 23 4 # (95)\n", - "116 1997 9 50 1 15 0 # (96)\n", - "117 1995 42 135 2 33 6 # (97)\n", - "118 1996 23 111 1 17 2 # (98)\n", - "119 1997 79 96 2 37 2 # (99)\n", - "120 1995 63 131 2 40 15 # (100)\n", - "121 1996 29 79 1 17 4 # (101)\n", - "122 1997 23 86 2 20 2 # (102)\n", - "123 1995 34 130 2 27 6 # (103)\n", - "124 1996 12 102 1 13 4 # (104)\n", - "125 1997 57 85 2 39 1 # (105)\n", - "126 1995 72 176 2 41 5 # (106)\n", - "127 1996 30 99 2 31 7 # (107)\n", - "128 1997 28 70 1 19 0 # (108)\n", - "129 1995 61 119 2 45 5 # (109)\n", - "130 1996 30 116 1 23 7 # (110)\n", - "131 1997 40 87 1 38 0 # (111)\n", - "132 1995 95 210 2 61 17 # (112)\n", - "133 1996 20 120 1 18 5 # (113)\n", - "134 1997 66 99 2 46 0 # (114)\n", - "135 1995 54 150 2 38 9 # (115)\n", - "136 1996 36 94 1 23 8 # (116)\n", - "137 1997 31 63 2 24 0 # (117)\n", - "138 1995 93 134 3 59 14 # (118)\n", - "139 1996 16 90 1 15 2 # (119)\n", - "140 1997 38 69 2 29 0 # (120)\n", - "141 1995 72 190 2 51 15 # (121)\n", - "142 1996 33 116 2 27 7 # (122)\n", - "143 1997 76 85 2 50 1 # (123)\n", - "144 1995 38 95 2 27 12 # (124)\n", - "145 1996 11 73 1 11 3 # (125)\n", - "146 1997 42 61 2 24 0 # (126)\n", - "147 1995 20 138 2 24 4 # (127)\n", - "148 1996 19 113 1 15 3 # (128)\n", - "149 1997 42 94 1 33 0 # (129)\n", - "150 1995 65 175 2 63 12 # (130)\n", - "151 1996 32 122 1 27 8 # (131)\n", - "152 1997 34 81 1 25 0 # (132)\n", - "153 1995 58 145 2 42 9 # (133)\n", - "154 1996 19 95 1 18 3 # (134)\n", - "155 1997 85 86 3 57 2 # (135)\n", - "156 1995 46 155 2 28 6 # (136)\n", - "157 1996 24 107 2 26 9 # (137)\n", - "158 1997 31 70 2 21 0 # (138)\n", - "1 # 138 vectors\n", - "2 \n", - "3 6 VARIABLES\n", - "4 \n", - "5 VARIABLE 1 : INT # (minimum value: 1995, maximum value: 1997)\n", - "6 \n", - "7 # marginal histogram - sample size: 138\n", - "8 # mean: 1996 variance: 0.671533 standard deviation: 0.819471\n", - "9 \n", - "10 VARIABLE 2 : INT # (minimum value: 4, maximum value: 110)\n", - "11 \n", - "12 # marginal histogram - sample size: 138\n", - "13 # mean: 42.8043 variance: 539.064 standard deviation: 23.2177\n", - "14 \n", - "15 VARIABLE 3 : INT # (minimum value: 39, maximum value: 219)\n", - "16 \n", - "17 # marginal histogram - sample size: 138\n", - "18 # mean: 107.725 variance: 1424.04 standard deviation: 37.7365\n", - "19 \n", - "20 VARIABLE 4 : INT # (minimum value: 1, maximum value: 4)\n", - "21 \n", - "22 # marginal histogram - sample size: 138\n", - "23 # mean: 1.63768 variance: 0.349519 standard deviation: 0.591201\n", - "24 \n", - "25 # | marginal histogram\n", - "26 # 0 0\n", - "27 # 1 57\n", - "28 # 2 75\n", - "29 # 3 5\n", - "30 # 4 1\n", - "31 \n", - "32 VARIABLE 5 : INT # (minimum value: 8, maximum value: 63)\n", - "33 \n", - "34 # marginal histogram - sample size: 138\n", - "35 # mean: 30.9783 variance: 179.934 standard deviation: 13.4139\n", - "36 \n", - "37 VARIABLE 6 : INT # (minimum value: 0, maximum value: 19)\n" + "Running cmake --build & --install in /home/jdurand/devlp/Git/openalea/StructureAnalysis/stat_tool/build\n" ] } ], @@ -225,184 +39,1211 @@ "\n", "from openalea.stat_tool.vectors import VectorDistance, Vectors\n", "\n", - "# from pathlib import Path\n", - "\n", - "# import pytest\n", - "\n", - "# \n", - "vec10 = Vectors(get_shared_data(\"chene_sessile.vec\"))" + "vec = Vectors(get_shared_data(\"chene_sessile.vec\"))" ] }, { "cell_type": "code", - "execution_count": 16, + "execution_count": 2, + "id": "5241e12e-5985-43af-8955-7718c1978033", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "138" + ] + }, + "execution_count": 2, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "vec.nb_vector" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "id": "660abfde-3c80-4e19-88d7-96ec56328bbb", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "6" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "vec.nb_variable" + ] + }, + { + "cell_type": "markdown", + "id": "20241e5a-bccf-466f-9f87-47e42a6c3dbc", + "metadata": {}, + "source": [ + "vec contains 138 vectors in dimension 6" + ] + }, + { + "cell_type": "markdown", + "id": "af85b5b7-177e-4ceb-b9bf-9481dd43a20c", + "metadata": {}, + "source": [ + "Discard variables 1, 3 and 6 in vec" + ] + }, + { + "cell_type": "code", + "execution_count": 4, "id": "57aaf4c2-559a-4cd9-b488-2cb64ccad54b", "metadata": {}, "outputs": [], "source": [ "from openalea.stat_tool.data_transform import SelectVariable\n", - "vec15 = SelectVariable(vec10, [1, 3, 6], Mode=\"Reject\")" + "vec2 = SelectVariable(vec, [1, 3, 6], Mode=\"Reject\")" + ] + }, + { + "cell_type": "markdown", + "id": "64686b57-8c35-40e8-a030-137b3f1f5ad0", + "metadata": {}, + "source": [ + "Computation of a 138x138 distance matrix using a standardization procedure \n", + "Arguments \"N\" specify that vector components are Numerical" ] }, { "cell_type": "code", - "execution_count": 17, + "execution_count": 5, "id": "95550f46-928b-416b-a322-5827182dc127", "metadata": {}, + "outputs": [], + "source": [ + "matrix = Compare(vec2, VectorDistance(\"N\", \"N\", \"N\"))" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "id": "8fc0d3d7-7e2b-44fd-8762-1611b8d33753", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(138, 138)" + ] + }, + "execution_count": 6, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "matrix.nb_column, matrix.nb_row" + ] + }, + { + "cell_type": "markdown", + "id": "2401a7d5-8aac-4071-8e82-c375a602d7c5", + "metadata": {}, + "source": [ + "## Clustering using a partitioning method" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "id": "edf0f509-23a6-4eed-a576-23225f423270", + "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ + "cluster 1 (69 vectors): 69, 48, 41, 44, 32, 47, 81, 95, 11, 36, 75, 108, 56, 83, 38, 98, 113, 134, 110, 101, 77, 35, 74, 80, 50, 24, 89, 128, 5, 45, 8, 116, 119, 132, 61, 78, 53, 29, 131, 65, 90, 96, 104, 20, 86, 66, 42, 68, 125, 14, 23, 54, 33, 26, 71, 129, 102, 51, 70, 111, 138, 19, 127, 62, 117, 137, 2, 28, 17\n", + "cluster 2 (69 vectors): 100, 13, 133, 105, 72, 9, 93, 109, 30, 115, 63, 7, 55, 37, 15, 114, 106, 46, 73, 18, 3, 87, 58, 43, 60, 76, 52, 6, 39, 31, 12, 99, 121, 123, 22, 79, 94, 88, 21, 97, 25, 40, 57, 136, 67, 49, 10, 4, 120, 92, 27, 91, 64, 124, 16, 130, 84, 107, 126, 103, 122, 112, 59, 1, 82, 34, 135, 118, 85\n", "\n", - "variable 0 dispersion: 26.3549 | 26.3549 | sqrt(2) * mean absolute deviation: 28.1644\n", - "\n", - "variable 1 dispersion: 0.586692 | 0.586692 | sqrt(2) * mean absolute deviation: 0.74498\n", - "\n", - "variable 2 dispersion: 15.2374 | 15.2374 | sqrt(2) * mean absolute deviation: 15.9119\n", - "3 VARIABLES\n", - "\n", - "DISTANCE : ABSOLUTE_VALUE\n", - "\n", - "VARIABLE 1 : NUMERIC WEIGHT : 0.333333\n", + "cluster distance matrix\n", "\n", - "VARIABLE 2 : NUMERIC WEIGHT : 0.333333\n", + " | cluster 1 | cluster 2\n", + "cluster 1 0.444986 1.46646\n", + "cluster 2 1.46646 0.608382\n", "\n", - "VARIABLE 3 : NUMERIC WEIGHT : 0.333333\n", + " | within-cluster distance | between-cluster distance | diameter | separation\n", + "cluster 1 0.444986 1.46646 1.26444 0.0656281\n", + "cluster 2 0.608382 1.46646 2.02706 0.0656281\n", "\n", - "variable 0 mean absolute difference: 26.3549\n", - "\n", - "variable 1 mean absolute difference: 0.586692\n", - "\n", - "variable 2 mean absolute difference: 15.2374\n", - "\n", - "Average distances\n", - "variable 0: 1\n", - "variable 1: 1\n", - "variable 2: 1\n" + "cluster 1: non-isolated\n", + "cluster 2: non-isolated\n", + "\n" ] } ], "source": [ - "# computation of a distance matrix using a standardization procedure\n", - "\n", - "matrix10 = Compare(vec15, VectorDistance(\"N\", \"N\", \"N\"))" + "clust1 = Clustering(matrix, \"Partition\", 2)\n", + "print(clust1)" + ] + }, + { + "cell_type": "markdown", + "id": "1f45c864-72a1-4318-971c-b21159289738", + "metadata": {}, + "source": [ + "Number of clusters" ] }, { "cell_type": "code", - "execution_count": 18, - "id": "edf0f509-23a6-4eed-a576-23225f423270", + "execution_count": 8, + "id": "668c60c2-c896-41d1-af1b-c23132a0159d", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "\n", - "cluster 1: 2 5 8 11 14 16 17 19 20 22 23 24 26 27 28 29 32 33 34 35 36 38 39 41 42 44 45 47 48 50 51 53 54 56 61 62 65 66 68 69 70 71 74 75 77 78 79 80 81 83 86 88 89 90 91 92 95 96 97 98 101 102 103 104 107 108 110 111 113 116 117 119 120 122 124 125 126 127 128 129 131 132 134 136 137 138\n", - "cluster 2: 1 3 4 6 7 9 10 12 13 15 18 21 25 30 31 37 40 43 46 49 52 55 57 58 59 60 63 64 67 72 73 76 82 84 85 87 93 94 99 100 105 106 109 112 114 115 118 121 123 130 133 135\n", - "\n", - "iteration 1: 8\n", - "cluster 1: 2 5 8 11 14 16 17 19 20 23 24 26 27 28 29 32 33 35 36 38 41 42 44 45 47 48 50 51 53 54 56 61 62 65 66 68 69 70 71 74 75 77 78 80 81 83 86 89 90 91 95 96 98 101 102 103 104 107 108 110 111 113 116 117 119 120 122 124 125 126 127 128 129 131 132 134 137 138\n", - "cluster 2: 1 3 4 6 7 9 10 12 13 15 18 21 22 25 30 31 34 37 39 40 43 46 49 52 55 57 58 59 60 63 64 67 72 73 76 79 82 84 85 87 88 92 93 94 97 99 100 105 106 109 112 114 115 118 121 123 130 133 135 136\n", - "\n", - "within-cluster distance: 0.534215 between-cluster distance: 1.47504 ratio: 0.362169\n", - "\n", - "\n", - "iteration 2: 2\n", - "cluster 1: 2 5 8 11 14 16 17 19 20 23 24 26 28 29 32 33 35 36 38 41 42 44 45 47 48 50 51 53 54 56 61 62 65 66 68 69 70 71 74 75 77 78 80 81 83 86 89 90 91 95 96 98 101 102 103 104 107 108 110 111 113 116 117 119 122 124 125 126 127 128 129 131 132 134 137 138\n", - "cluster 2: 1 3 4 6 7 9 10 12 13 15 18 21 22 25 27 30 31 34 37 39 40 43 46 49 52 55 57 58 59 60 63 64 67 72 73 76 79 82 84 85 87 88 92 93 94 97 99 100 105 106 109 112 114 115 118 120 121 123 130 133 135 136\n", - "\n", - "within-cluster distance: 0.531784 between-cluster distance: 1.4711 ratio: 0.361488\n", - "\n", - "\n", - "iteration 3: 2\n", - "cluster 1: 2 5 8 11 14 16 17 19 20 23 24 26 28 29 32 33 35 36 38 41 42 44 45 47 48 50 51 53 54 56 61 62 65 66 68 69 70 71 74 75 77 78 80 81 83 86 89 90 95 96 98 101 102 103 104 107 108 110 111 113 116 117 119 122 125 126 127 128 129 131 132 134 137 138\n", - "cluster 2: 1 3 4 6 7 9 10 12 13 15 18 21 22 25 27 30 31 34 37 39 40 43 46 49 52 55 57 58 59 60 63 64 67 72 73 76 79 82 84 85 87 88 91 92 93 94 97 99 100 105 106 109 112 114 115 118 120 121 123 124 130 133 135 136\n", - "\n", - "within-cluster distance: 0.530074 between-cluster distance: 1.46804 ratio: 0.361076\n", - "\n", - "\n", - "iteration 4: 3\n", - "cluster 1: 2 5 8 11 14 17 19 20 23 24 26 28 29 32 33 35 36 38 41 42 44 45 47 48 50 51 53 54 56 61 62 65 66 68 69 70 71 74 75 77 78 80 81 83 86 89 90 95 96 98 101 102 103 104 108 110 111 113 116 117 119 122 125 127 128 129 131 132 134 137 138\n", - "cluster 2: 1 3 4 6 7 9 10 12 13 15 16 18 21 22 25 27 30 31 34 37 39 40 43 46 49 52 55 57 58 59 60 63 64 67 72 73 76 79 82 84 85 87 88 91 92 93 94 97 99 100 105 106 107 109 112 114 115 118 120 121 123 124 126 130 133 135 136\n", - "\n", - "within-cluster distance: 0.527278 between-cluster distance: 1.46666 ratio: 0.35951\n", - "\n", - "\n", - "iteration 5: 1\n", - "cluster 1: 2 5 8 11 14 17 19 20 23 24 26 28 29 32 33 35 36 38 41 42 44 45 47 48 50 51 53 54 56 61 62 65 66 68 69 70 71 74 75 77 78 80 81 83 86 89 90 95 96 98 101 102 104 108 110 111 113 116 117 119 122 125 127 128 129 131 132 134 1" + "2\n" ] - }, + } + ], + "source": [ + "nb_clusters = clust1.get_nb_cluster()\n", + "print(nb_clusters)" + ] + }, + { + "cell_type": "markdown", + "id": "b16f81c4-61bc-49bb-96eb-5694812cdcb0", + "metadata": {}, + "source": [ + "Cluster of individual (\"pattern\") 2" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "id": "b21c41d7-1cf2-4453-9db7-745d1cbabf36", + "metadata": {}, + "outputs": [ { "data": { "text/plain": [ - "'cluster 1 (69 vectors): 69, 48, 41, 44, 32, 47, 81, 95, 11, 36, 75, 108, 56, 83, 38, 98, 113, 134, 110, 101, 77, 35, 74, 80, 50, 24, 89, 128, 5, 45, 8, 116, 119, 132, 61, 78, 53, 29, 131, 65, 90, 96, 104, 20, 86, 66, 42, 68, 125, 14, 23, 54, 33, 26, 71, 129, 102, 51, 70, 111, 138, 19, 127, 62, 117, 137, 2, 28, 17\\ncluster 2 (69 vectors): 100, 13, 133, 105, 72, 9, 93, 109, 30, 115, 63, 7, 55, 37, 15, 114, 106, 46, 73, 18, 3, 87, 58, 43, 60, 76, 52, 6, 39, 31, 12, 99, 121, 123, 22, 79, 94, 88, 21, 97, 25, 40, 57, 136, 67, 49, 10, 4, 120, 92, 27, 91, 64, 124, 16, 130, 84, 107, 126, 103, 122, 112, 59, 1, 82, 34, 135, 118, 85\\n\\ncluster distance matrix\\n\\n | cluster 1 | cluster 2\\ncluster 1 0.444986 1.46646\\ncluster 2 1.46646 0.608382\\n\\n | within-cluster distance | between-cluster distance | diameter | separation\\ncluster 1 0.444986 1.46646 1.26444 0.0656281\\ncluster 2 0.608382 1.46646 2.02706 0.0656281\\n\\ncluster 1: non-isolated\\ncluster 2: non-isolated\\n'" + "1" ] }, - "execution_count": 18, + "execution_count": 9, "metadata": {}, "output_type": "execute_result" - }, + } + ], + "source": [ + "clust1.get_assignment(2)" + ] + }, + { + "cell_type": "markdown", + "id": "0fcaa4df-4a1e-4cc0-bb28-bfe4615d059a", + "metadata": {}, + "source": [ + "To get the partition:" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "id": "4ae475b6-f782-4f65-a45b-317dbbb410a8", + "metadata": {}, + "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ - "37 138\n", - "cluster 2: 1 3 4 6 7 9 10 12 13 15 16 18 21 22 25 27 30 31 34 37 39 40 43 46 49 52 55 57 58 59 60 63 64 67 72 73 76 79 82 84 85 87 88 91 92 93 94 97 99 100 103 105 106 107 109 112 114 115 118 120 121 123 124 126 130 133 135 136\n", - "\n", - "within-cluster distance: 0.526945 between-cluster distance: 1.4664 ratio: 0.359347\n", + "[[2, 5, 8, 11, 14, 17, 19, 20, 23, 24, 26, 28, 29, 32, 33, 35, 36, 38, 41, 42, 44, 45, 47, 48, 50, 51, 53, 54, 56, 61, 62, 65, 66, 68, 69, 70, 71, 74, 75, 77, 78, 80, 81, 83, 86, 89, 90, 95, 96, 98, 101, 102, 104, 108, 110, 111, 113, 116, 117, 119, 125, 127, 128, 129, 131, 132, 134, 137, 138], [1, 3, 4, 6, 7, 9, 10, 12, 13, 15, 16, 18, 21, 22, 25, 27, 30, 31, 34, 37, 39, 40, 43, 46, 49, 52, 55, 57, 58, 59, 60, 63, 64, 67, 72, 73, 76, 79, 82, 84, 85, 87, 88, 91, 92, 93, 94, 97, 99, 100, 103, 105, 106, 107, 109, 112, 114, 115, 118, 120, 121, 122, 123, 124, 126, 130, 133, 135, 136]]\n" + ] + } + ], + "source": [ + "part1 = [[i for i in range(1,vec.nb_vector+1) if clust1.get_assignment(i) == c] for c in range(1,nb_clusters+1)]\n", + "print(part1)" + ] + }, + { + "cell_type": "markdown", + "id": "8511fea8-2af6-402e-b7f1-1aabd5561f69", + "metadata": {}, + "source": [ + "Recreate clusters from partition" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "id": "fa756645-bbc8-46b9-92ce-6d19282d1f53", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "cluster 1 (69 vectors): 69, 48, 41, 44, 32, 47, 81, 95, 11, 36, 75, 108, 56, 83, 38, 98, 113, 134, 110, 101, 77, 35, 74, 80, 50, 24, 89, 128, 5, 45, 8, 116, 119, 132, 61, 78, 53, 29, 131, 65, 90, 96, 104, 20, 86, 66, 42, 68, 125, 14, 23, 54, 33, 26, 71, 129, 102, 51, 70, 111, 138, 19, 127, 62, 117, 137, 2, 28, 17\n", + "cluster 2 (69 vectors): 100, 13, 133, 105, 72, 9, 93, 109, 30, 115, 63, 7, 55, 37, 15, 114, 106, 46, 73, 18, 3, 87, 58, 43, 60, 76, 52, 6, 39, 31, 12, 99, 121, 123, 22, 79, 94, 88, 21, 97, 25, 40, 57, 136, 67, 49, 10, 4, 120, 92, 27, 91, 64, 124, 16, 130, 84, 107, 126, 103, 122, 112, 59, 1, 82, 34, 135, 118, 85\n", "\n", + "cluster distance matrix\n", "\n", - "iteration 6: 1\n", - "cluster 1: 2 5 8 11 14 17 19 20 23 24 26 28 29 32 33 35 36 38 41 42 44 45 47 48 50 51 53 54 56 61 62 65 66 68 69 70 71 74 75 77 78 80 81 83 86 89 90 95 96 98 101 102 104 108 110 111 113 116 117 119 125 127 128 129 131 132 134 137 138\n", - "cluster 2: 1 3 4 6 7 9 10 12 13 15 16 18 21 22 25 27 30 31 34 37 39 40 43 46 49 52 55 57 58 59 60 63 64 67 72 73 76 79 82 84 85 87 88 91 92 93 94 97 99 100 103 105 106 107 109 112 114 115 118 120 121 122 123 124 126 130 133 135 136\n", + " | cluster 1 | cluster 2\n", + "cluster 1 0.444986 1.46646\n", + "cluster 2 1.46646 0.608382\n", "\n", - "within-cluster distance: 0.526684 between-cluster distance: 1.46646 ratio: 0.359154\n", + " | within-cluster distance | between-cluster distance | diameter | separation\n", + "cluster 1 0.444986 1.46646 1.26444 0.0656281\n", + "cluster 2 0.608382 1.46646 2.02706 0.0656281\n", "\n", + "cluster 1: non-isolated\n", + "cluster 2: non-isolated\n", + "\n" + ] + } + ], + "source": [ + "print(matrix.partitioning_clusters(part1))" + ] + }, + { + "cell_type": "markdown", + "id": "114178aa-209d-4602-974a-b176c1829671", + "metadata": {}, + "source": [ + "## Hierarchical clustering and dendrogram using an agglomerative algorithm" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "id": "0f5b1c92-751b-4a5c-93f4-041f2297ec82", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "138 vectors\n", "\n", - "iteration 7: 0\n", - "cluster 1: 2 5 8 11 14 17 19 20 23 24 26 28 29 32 33 35 36 38 41 42 44 45 47 48 50 51 53 54 56 61 62 65 66 68 69 70 71 74 75 77 78 80 81 83 86 89 90 95 96 98 101 102 104 108 110 111 113 116 117 119 125 127 128 129 131 132 134 137 138\n", - "cluster 2: 1 3 4 6 7 9 10 12 13 15 16 18 21 22 25 27 30 31 34 37 39 40 43 46 49 52 55 57 58 59 60 63 64 67 72 73 76 79 82 84 85 87 88 91 92 93 94 97 99 100 103 105 106 107 109 112 114 115 118 120 121 122 123 124 126 130 133 135 136\n", + " | child cluster distance | within-cluster distance | between-cluster distance | diameter | separation | composition\n", + "step 1 0 0 0.845491 0 0.0563999 7, 55\n", + "step 2 0 0 0.883565 0 0.0345239 24, 89\n", + "step 3 0 0 0.911651 0 0.0345239 41, 44\n", + "step 4 0.0126479 0.0126479 1.18625 0.0126479 0.0782759 21, 94\n", + "step 5 0.0126479 0.0126479 0.760368 0.0126479 0.021876 27, 120\n", + "step 6 0.0126479 0.0126479 0.913273 0.0126479 0.0345239 81, 95\n", + "step 7 0.0126479 0.0126479 0.762643 0.0126479 0.0345239 103, 122\n", + "step 8 0.0126479 0.0126479 1.0078 0.0126479 0.0345239 113, 134\n", + "step 9 0.021876 0.021876 1.08646 0.021876 0.0252957 8, 61\n", + "step 10 0.021876 0.021876 0.967306 0.021876 0.0345239 11, 38\n", + "step 11 0.0282 0.0230159 0.765583 0.0345239 0.0252957 27, 120, 91\n", + "step 12 0.021876 0.021876 0.868665 0.021876 0.0563999 51, 70\n", + "step 13 0.0362337 0.0314478 1.09889 0.0471717 0.0471717 8, 61, 29\n", + "step 14 0.0252957 0.0252957 0.921014 0.0252957 0.0379436 13, 100\n", + "step 15 0.0440957 0.0335558 0.77085 0.0598196 0.0345239 16, 27, 120, 91\n", + "step 16 0.0252957 0.0252957 1.2527 0.0252957 0.0345239 23, 33\n", + "step 17 0.0252957 0.0252957 0.775549 0.0252957 0.0656281 28, 117\n", + "step 18 0.0252957 0.0252957 0.79839 0.0252957 0.0656281 62, 138\n", + "step 19 0.0252957 0.0252957 0.890767 0.0252957 0.0345239 83, 110\n", + "step 20 0.0454619 0.0375999 0.968891 0.0563999 0.0345239 11, 38, 32\n", + "step 21 0.0739196 0.0536893 0.78211 0.106991 0.0345239 16, 27, 120, 91, 103, 122\n", + "step 22 0.0471717 0.0398797 1.24669 0.0598196 0.0379436 23, 33, 42\n", + "step 23 0.0345239 0.0230159 0.887222 0.0345239 0.0379436 24, 89, 116\n", + "step 24 0.047968 0.042784 0.968338 0.0656281 0.0345239 11, 38, 32, 48\n", + "step 25 0.0704999 0.0538703 0.981483 0.100152 0.0379436 11, 38, 32, 48, 77\n", + "step 26 0.0345239 0.0345239 1.23769 0.0345239 0.0379436 40, 57\n", + "step 27 0.0408478 0.0293399 0.925471 0.0471717 0.0379436 41, 44, 81, 95\n", + "step 28 0.0345239 0.0314478 0.899478 0.0345239 0.0379436 56, 83, 110\n", + "step 29 0.0345239 0.0345239 0.938001 0.0345239 0.043752 63, 93\n", + "step 30 0.0408478 0.0314478 1.00594 0.0471717 0.0505915 75, 113, 134\n", + "step 31 0.0513877 0.0530317 0.787684 0.106991 0.0656281 16, 27, 120, 91, 103, 122, 124\n", + "step 32 0.0379436 0.0379436 0.855993 0.0379436 0.0563999 2, 127\n", + "step 33 0.0379436 0.0379436 0.831399 0.0379436 0.0563999 3, 18\n", + "step 34 0.0505915 0.0421595 0.937767 0.0632393 0.0656281 9, 13, 100\n", + "step 35 0.0379436 0.0379436 0.87456 0.0379436 0.0598196 14, 54\n", + "step 36 0.0379436 0.0379436 0.779505 0.0379436 0.0632393 22, 97\n", + "step 37 0.0410196 0.0320178 0.894159 0.0471717 0.0690478 24, 89, 116, 53\n", + "step 38 0.0379436 0.0379436 0.855449 0.0379436 0.0563999 30, 46\n", + "step 39 0.0559222 0.0545543 0.984221 0.100152 0.0379436 11, 38, 32, 48, 77, 108\n", + "step 40 0.0749736 0.0603884 0.996551 0.106991 0.0379436 11, 38, 32, 48, 77, 108, 98\n", + "step 41 0.0678767 0.0622605 1.0035 0.106991 0.043752 11, 38, 32, 48, 77, 108, 98, 101\n", + "step 42 0.0583675 0.0462282 0.936685 0.0851153 0.043752 41, 44, 81, 95, 56, 83, 110\n", + "step 43 0.0693914 0.0546356 1.239 0.0977632 0.043752 23, 33, 42, 104\n", + "step 44 0.0425577 0.0398797 1.25903 0.0471717 0.0563999 40, 57, 49\n", + "step 45 0.0379436 0.0379436 1.07673 0.0379436 0.0471717 119, 128\n", + "step 46 0.043752 0.043752 0.947424 0.043752 0.0690478 5, 78\n", + "step 47 0.130293 0.0953383 1.0233 0.219791 0.043752 11, 38, 32, 48, 77, 108, 98, 101, 41, 44, 81, 95, 56, 83, 110\n", + "step 48 0.0768238 0.0635109 1.23707 0.103572 0.0563999 23, 33, 42, 104, 96\n", + "step 49 0.0862301 0.0941997 1.03095 0.219791 0.0471717 11, 38, 32, 48, 77, 108, 98, 101, 41, 44, 81, 95, 56, 83, 110, 69\n", + "step 50 0.061014 0.0521839 0.933878 0.0782759 0.0505915 63, 93, 133\n", + "step 51 0.0471717 0.0471717 0.95163 0.0471717 0.0471717 36, 47\n", + "step 52 0.0471717 0.0471717 0.995691 0.0471717 0.0598196 37, 106\n", + "step 53 0.0661435 0.0567436 1.07975 0.0851153 0.0471717 45, 119, 128\n", + "step 54 0.118571 0.0989895 1.048 0.219791 0.0505915 11, 38, 32, 48, 77, 108, 98, 101, 41, 44, 81, 95, 56, 83, 110, 69, 36, 47\n", + "step 55 0.111207 0.0843627 1.11155 0.179459 0.0656281 8, 61, 29, 45, 119, 128\n", + "step 56 0.132286 0.106587 1.08131 0.248507 0.0563999 11, 38, 32, 48, 77, 108, 98, 101, 41, 44, 81, 95, 56, 83, 110, 69, 36, 47, 75, 113, 134\n", + "step 57 0.0713276 0.0617558 0.949872 0.103572 0.0563999 63, 93, 133, 109\n", + "step 58 0.0505915 0.0505915 1.20298 0.0505915 0.0563999 86, 125\n", + "step 59 0.0627238 0.0481398 0.850023 0.0690478 0.0782759 3, 18, 7, 55\n", + "step 60 0.0636919 0.0517858 1.27837 0.0690478 0.0816956 10, 40, 57, 49\n", + "step 61 0.0753717 0.0628957 0.857606 0.0943435 0.0598196 30, 46, 115\n", + "step 62 0.0563999 0.0563999 1.06846 0.0563999 0.122028 31, 60\n", + "step 63 0.149733 0.110509 1.08732 0.250895 0.0656281 11, 38, 32, 48, 77, 108, 98, 101, 41, 44, 81, 95, 56, 83, 110, 69, 36, 47, 75, 113, 134, 35\n", + "step 64 0.0972477 0.0759525 0.962431 0.134676 0.0782759 63, 93, 133, 109, 73\n", + "step 65 0.0736618 0.0590778 0.8741 0.0909238 0.0724675 2, 127, 51, 70\n", + "step 66 0.0994026 0.079987 1.25177 0.163391 0.0690478 23, 33, 42, 104, 96, 86, 125\n", + "step 67 0.0787914 0.0651755 0.884129 0.0977632 0.0909238 14, 54, 66\n", + "step 68 0.0615295 0.0567436 1.01765 0.0632393 0.0690478 37, 106, 87\n", + "step 69 0.0808994 0.0718975 0.867262 0.110411 0.0782759 30, 46, 115, 105\n", + "step 70 0.0822111 0.0674553 0.790057 0.101183 0.088535 22, 97, 39\n", + "step 71 0.0656281 0.0656281 0.914738 0.0656281 0.0724675 15, 43\n", + "step 72 0.0782759 0.0606159 0.797547 0.0909238 0.0656281 28, 117, 62, 138\n", + "step 73 0.167784 0.112279 0.816318 0.263543 0.113831 16, 27, 120, 91, 103, 122, 124, 28, 117, 62, 138\n", + "step 74 0.0656281 0.0656281 0.861875 0.0656281 0.0724675 68, 90\n", + "step 75 0.0951397 0.0686496 0.932222 0.128867 0.0690478 9, 13, 100, 72\n", + "step 76 0.213303 0.145368 1.16847 0.358918 0.0724675 8, 61, 29, 45, 119, 128, 11, 38, 32, 48, 77, 108, 98, 101, 41, 44, 81, 95, 56, 83, 110, 69, 36, 47, 75, 113, 134, 35\n", + "step 77 0.0909238 0.0751999 0.940275 0.1128 0.0690478 5, 78, 80\n", + "step 78 0.0954834 0.0761135 1.0311 0.132287 0.0690478 6, 37, 106, 87\n", + "step 79 0.154421 0.119261 1.00711 0.261154 0.0782759 6, 37, 106, 87, 9, 13, 100, 72\n", + "step 80 0.114053 0.0850638 0.934943 0.1692 0.0690478 5, 78, 80, 24, 89, 116, 53\n", + "step 81 0.129015 0.0922439 1.27621 0.172619 0.0690478 23, 33, 42, 104, 96, 86, 125, 26\n", + "step 82 0.137668 0.102338 1.29949 0.191076 0.0816956 23, 33, 42, 104, 96, 86, 125, 26, 71\n", + "step 83 0.11988 0.0937678 0.936256 0.209532 0.0690478 5, 78, 80, 24, 89, 116, 53, 132\n", + "step 84 0.170781 0.110882 0.937799 0.27858 0.0724675 5, 78, 80, 24, 89, 116, 53, 132, 131\n", + "step 85 0.0834055 0.0774797 0.933912 0.0943435 0.0977632 15, 43, 76\n", + "step 86 0.153907 0.145957 1.17948 0.358918 0.0758872 8, 61, 29, 45, 119, 128, 11, 38, 32, 48, 77, 108, 98, 101, 41, 44, 81, 95, 56, 83, 110, 69, 36, 47, 75, 113, 134, 35, 74\n", + "step 87 0.113912 0.0810117 0.87747 0.163391 0.0782759 2, 127, 51, 70, 102\n", + "step 88 0.170795 0.129667 0.942781 0.322332 0.0758872 5, 78, 80, 24, 89, 116, 53, 132, 131, 68, 90\n", + "step 89 0.0724675 0.0724675 1.15619 0.0724675 0.0782759 121, 123\n", + "step 90 0.263482 0.192873 1.24421 0.591357 0.0782759 5, 78, 80, 24, 89, 116, 53, 132, 131, 68, 90, 8, 61, 29, 45, 119, 128, 11, 38, 32, 48, 77, 108, 98, 101, 41, 44, 81, 95, 56, 83, 110, 69, 36, 47, 75, 113, 134, 35, 74\n", + "step 91 0.130301 0.0974413 0.881249 0.216371 0.0875041 2, 127, 51, 70, 102, 17\n", + "step 92 0.09699 0.0676799 0.852929 0.10938 0.088535 3, 18, 7, 55, 12\n", + "step 93 0.120834 0.0947416 1.1869 0.163391 0.0816956 21, 94, 121, 123\n", + "step 94 0.106136 0.0855931 0.86533 0.144935 0.0782759 30, 46, 115, 105, 58\n", + "step 95 0.224119 0.194397 1.25595 0.591357 0.0816956 5, 78, 80, 24, 89, 116, 53, 132, 131, 68, 90, 8, 61, 29, 45, 119, 128, 11, 38, 32, 48, 77, 108, 98, 101, 41, 44, 81, 95, 56, 83, 110, 69, 36, 47, 75, 113, 134, 35, 74, 50\n", + "step 96 0.178217 0.132363 1.02524 0.314135 0.0909238 6, 37, 106, 87, 9, 13, 100, 72, 52\n", + "step 97 0.196191 0.144894 0.941916 0.326783 0.0851153 30, 46, 115, 105, 58, 63, 93, 133, 109, 73\n", + "step 98 0.404658 0.255027 1.41022 0.864128 0.119639 5, 78, 80, 24, 89, 116, 53, 132, 131, 68, 90, 8, 61, 29, 45, 119, 128, 11, 38, 32, 48, 77, 108, 98, 101, 41, 44, 81, 95, 56, 83, 110, 69, 36, 47, 75, 113, 134, 35, 74, 50, 23, 33, 42, 104, 96, 86, 125, 26, 71\n", + "step 99 0.132287 0.106991 1.26649 0.176039 0.113831 10, 40, 57, 49, 21, 94, 121, 123\n", + "step 100 0.233774 0.161054 0.958578 0.411898 0.0977632 30, 46, 115, 105, 58, 63, 93, 133, 109, 73, 114\n", + "step 101 0.199508 0.126603 0.893597 0.303876 0.0943435 2, 127, 51, 70, 102, 17, 19\n", + "step 102 0.196735 0.136793 0.847374 0.280642 0.0943435 3, 18, 7, 55, 12, 22, 97, 39\n", + "step 103 0.0909238 0.0909238 1.34261 0.0909238 0.113831 4, 84\n", + "step 104 0.200689 0.146028 1.0366 0.314135 0.0943435 6, 37, 106, 87, 9, 13, 100, 72, 52, 99\n", + "step 105 0.143451 0.104313 0.891958 0.188687 0.101183 14, 54, 66, 65\n", + "step 106 0.370684 0.261805 0.994816 0.667244 0.0943435 3, 18, 7, 55, 12, 22, 97, 39, 6, 37, 106, 87, 9, 13, 100, 72, 52, 99\n", + "step 107 0.334707 0.269479 1.00892 0.667244 0.0977632 3, 18, 7, 55, 12, 22, 97, 39, 6, 37, 106, 87, 9, 13, 100, 72, 52, 99, 25\n", + "step 108 0.146153 0.131491 0.891046 0.303876 0.123059 2, 127, 51, 70, 102, 17, 19, 137\n", + "step 109 0.347239 0.277255 1.00693 0.667244 0.0977632 3, 18, 7, 55, 12, 22, 97, 39, 6, 37, 106, 87, 9, 13, 100, 72, 52, 99, 25, 79\n", + "step 110 0.220267 0.261371 1.03144 0.667244 0.0977632 3, 18, 7, 55, 12, 22, 97, 39, 6, 37, 106, 87, 9, 13, 100, 72, 52, 99, 25, 79, 15, 43, 76\n", + "step 111 0.291649 0.265191 1.10851 0.667244 0.110411 3, 18, 7, 55, 12, 22, 97, 39, 6, 37, 106, 87, 9, 13, 100, 72, 52, 99, 25, 79, 15, 43, 76, 30, 46, 115, 105, 58, 63, 93, 133, 109, 73, 114\n", + "step 112 0.182281 0.135501 0.896034 0.28987 0.119639 14, 54, 66, 65, 129\n", + "step 113 0.106991 0.106991 1.32714 0.106991 0.11622 64, 130\n", + "step 114 0.428133 0.274502 1.10848 0.726032 0.113831 3, 18, 7, 55, 12, 22, 97, 39, 6, 37, 106, 87, 9, 13, 100, 72, 52, 99, 25, 79, 15, 43, 76, 30, 46, 115, 105, 58, 63, 93, 133, 109, 73, 114, 136\n", + "step 115 0.159912 0.120218 0.82177 0.263543 0.113831 16, 27, 120, 91, 103, 122, 124, 28, 117, 62, 138, 126\n", + "step 116 0.211336 0.143735 1.30904 0.315166 0.132287 4, 84, 10, 40, 57, 49, 21, 94, 121, 123\n", + "step 117 0.598735 0.391056 1.10724 1.06888 0.122028 3, 18, 7, 55, 12, 22, 97, 39, 6, 37, 106, 87, 9, 13, 100, 72, 52, 99, 25, 79, 15, 43, 76, 30, 46, 115, 105, 58, 63, 93, 133, 109, 73, 114, 136, 16, 27, 120, 91, 103, 122, 124, 28, 117, 62, 138, 126\n", + "step 118 0.119124 0.11508 1.30321 0.122028 0.138096 64, 130, 67\n", + "step 119 0.435425 0.284593 1.44209 1.00222 0.134676 5, 78, 80, 24, 89, 116, 53, 132, 131, 68, 90, 8, 61, 29, 45, 119, 128, 11, 38, 32, 48, 77, 108, 98, 101, 41, 44, 81, 95, 56, 83, 110, 69, 36, 47, 75, 113, 134, 35, 74, 50, 23, 33, 42, 104, 96, 86, 125, 26, 71, 14, 54, 66, 65, 129\n", + "step 120 0.521526 0.4012 1.13007 1.08289 0.123059 3, 18, 7, 55, 12, 22, 97, 39, 6, 37, 106, 87, 9, 13, 100, 72, 52, 99, 25, 79, 15, 43, 76, 30, 46, 115, 105, 58, 63, 93, 133, 109, 73, 114, 136, 16, 27, 120, 91, 103, 122, 124, 28, 117, 62, 138, 126, 31, 60\n", + "step 121 0.728048 0.476747 1.14449 1.29345 0.125448 2, 127, 51, 70, 102, 17, 19, 137, 3, 18, 7, 55, 12, 22, 97, 39, 6, 37, 106, 87, 9, 13, 100, 72, 52, 99, 25, 79, 15, 43, 76, 30, 46, 115, 105, 58, 63, 93, 133, 109, 73, 114, 136, 16, 27, 120, 91, 103, 122, 124, 28, 117, 62, 138, 126, 31, 60\n", + "step 122 0.392487 0.473841 1.15256 1.29345 0.125448 2, 127, 51, 70, 102, 17, 19, 137, 3, 18, 7, 55, 12, 22, 97, 39, 6, 37, 106, 87, 9, 13, 100, 72, 52, 99, 25, 79, 15, 43, 76, 30, 46, 115, 105, 58, 63, 93, 133, 109, 73, 114, 136, 16, 27, 120, 91, 103, 122, 124, 28, 117, 62, 138, 126, 31, 60, 88\n", + "step 123 0.449042 0.473001 1.15882 1.29345 0.132287 2, 127, 51, 70, 102, 17, 19, 137, 3, 18, 7, 55, 12, 22, 97, 39, 6, 37, 106, 87, 9, 13, 100, 72, 52, 99, 25, 79, 15, 43, 76, 30, 46, 115, 105, 58, 63, 93, 133, 109, 73, 114, 136, 16, 27, 120, 91, 103, 122, 124, 28, 117, 62, 138, 126, 31, 60, 88, 107\n", + "step 124 0.781825 0.544352 1.29264 1.65682 0.138096 2, 127, 51, 70, 102, 17, 19, 137, 3, 18, 7, 55, 12, 22, 97, 39, 6, 37, 106, 87, 9, 13, 100, 72, 52, 99, 25, 79, 15, 43, 76, 30, 46, 115, 105, 58, 63, 93, 133, 109, 73, 114, 136, 16, 27, 120, 91, 103, 122, 124, 28, 117, 62, 138, 126, 31, 60, 88, 107, 4, 84, 10, 40, 57, 49, 21, 94, 121, 123\n", + "step 125 0.601927 0.295926 1.44608 1.08631 0.147324 5, 78, 80, 24, 89, 116, 53, 132, 131, 68, 90, 8, 61, 29, 45, 119, 128, 11, 38, 32, 48, 77, 108, 98, 101, 41, 44, 81, 95, 56, 83, 110, 69, 36, 47, 75, 113, 134, 35, 74, 50, 23, 33, 42, 104, 96, 86, 125, 26, 71, 14, 54, 66, 65, 129, 111\n", + "step 126 0.766519 0.56184 1.33938 1.65682 0.148355 2, 127, 51, 70, 102, 17, 19, 137, 3, 18, 7, 55, 12, 22, 97, 39, 6, 37, 106, 87, 9, 13, 100, 72, 52, 99, 25, 79, 15, 43, 76, 30, 46, 115, 105, 58, 63, 93, 133, 109, 73, 114, 136, 16, 27, 120, 91, 103, 122, 124, 28, 117, 62, 138, 126, 31, 60, 88, 107, 4, 84, 10, 40, 57, 49, 21, 94, 121, 123, 64, 130, 67\n", + "step 127 0.144935 0.144935 1.9959 0.144935 0.561283 118, 135\n", + "step 128 0.347105 0.297722 1.46352 1.08631 0.568157 5, 78, 80, 24, 89, 116, 53, 132, 131, 68, 90, 8, 61, 29, 45, 119, 128, 11, 38, 32, 48, 77, 108, 98, 101, 41, 44, 81, 95, 56, 83, 110, 69, 36, 47, 75, 113, 134, 35, 74, 50, 23, 33, 42, 104, 96, 86, 125, 26, 71, 14, 54, 66, 65, 129, 111, 20\n", + "step 129 0.546575 0.561422 1.34825 1.65682 0.238247 2, 127, 51, 70, 102, 17, 19, 137, 3, 18, 7, 55, 12, 22, 97, 39, 6, 37, 106, 87, 9, 13, 100, 72, 52, 99, 25, 79, 15, 43, 76, 30, 46, 115, 105, 58, 63, 93, 133, 109, 73, 114, 136, 16, 27, 120, 91, 103, 122, 124, 28, 117, 62, 138, 126, 31, 60, 88, 107, 4, 84, 10, 40, 57, 49, 21, 94, 121, 123, 64, 130, 67, 92\n", + "step 130 0.209532 0.209532 1.46808 0.209532 0.39138 59, 82\n", + "step 131 1.0355 0.574235 1.36504 1.89507 0.274833 2, 127, 51, 70, 102, 17, 19, 137, 3, 18, 7, 55, 12, 22, 97, 39, 6, 37, 106, 87, 9, 13, 100, 72, 52, 99, 25, 79, 15, 43, 76, 30, 46, 115, 105, 58, 63, 93, 133, 109, 73, 114, 136, 16, 27, 120, 91, 103, 122, 124, 28, 117, 62, 138, 126, 31, 60, 88, 107, 4, 84, 10, 40, 57, 49, 21, 94, 121, 123, 64, 130, 67, 92, 112\n", + "step 132 1.03976 0.586649 1.38213 1.89507 0.568157 1, 2, 127, 51, 70, 102, 17, 19, 137, 3, 18, 7, 55, 12, 22, 97, 39, 6, 37, 106, 87, 9, 13, 100, 72, 52, 99, 25, 79, 15, 43, 76, 30, 46, 115, 105, 58, 63, 93, 133, 109, 73, 114, 136, 16, 27, 120, 91, 103, 122, 124, 28, 117, 62, 138, 126, 31, 60, 88, 107, 4, 84, 10, 40, 57, 49, 21, 94, 121, 123, 64, 130, 67, 92, 112\n", + "step 133 0.483498 0.392176 1.42835 0.575616 0.561283 34, 59, 82\n", + "step 134 0.861739 0.64919 1.67072 1.18065 0.611909 34, 59, 82, 118, 135\n", + "step 135 1.39162 0.931329 1.75421 2.85325 0.611909 1, 2, 127, 51, 70, 102, 17, 19, 137, 3, 18, 7, 55, 12, 22, 97, 39, 6, 37, 106, 87, 9, 13, 100, 72, 52, 99, 25, 79, 15, 43, 76, 30, 46, 115, 105, 58, 63, 93, 133, 109, 73, 114, 136, 16, 27, 120, 91, 103, 122, 124, 28, 117, 62, 138, 126, 31, 60, 88, 107, 4, 84, 10, 40, 57, 49, 21, 94, 121, 123, 64, 130, 67, 92, 112, 5, 78, 80, 24, 89, 116, 53, 132, 131, 68, 90, 8, 61, 29, 45, 119, 128, 11, 38, 32, 48, 77, 108, 98, 101, 41, 44, 81, 95, 56, 83, 110, 69, 36, 47, 75, 113, 134, 35, 74, 50, 23, 33, 42, 104, 96, 86, 125, 26, 71, 14, 54, 66, 65, 129, 111, 20\n", + "step 136 1.67595 0.983779 2.103 3.35236 0.678568 1, 2, 127, 51, 70, 102, 17, 19, 137, 3, 18, 7, 55, 12, 22, 97, 39, 6, 37, 106, 87, 9, 13, 100, 72, 52, 99, 25, 79, 15, 43, 76, 30, 46, 115, 105, 58, 63, 93, 133, 109, 73, 114, 136, 16, 27, 120, 91, 103, 122, 124, 28, 117, 62, 138, 126, 31, 60, 88, 107, 4, 84, 10, 40, 57, 49, 21, 94, 121, 123, 64, 130, 67, 92, 112, 5, 78, 80, 24, 89, 116, 53, 132, 131, 68, 90, 8, 61, 29, 45, 119, 128, 11, 38, 32, 48, 77, 108, 98, 101, 41, 44, 81, 95, 56, 83, 110, 69, 36, 47, 75, 113, 134, 35, 74, 50, 23, 33, 42, 104, 96, 86, 125, 26, 71, 14, 54, 66, 65, 129, 111, 20, 34, 59, 82, 118, 135\n", + "step 137 2.103 1 3.35236 1, 2, 127, 51, 70, 102, 17, 19, 137, 3, 18, 7, 55, 12, 22, 97, 39, 6, 37, 106, 87, 9, 13, 100, 72, 52, 99, 25, 79, 15, 43, 76, 30, 46, 115, 105, 58, 63, 93, 133, 109, 73, 114, 136, 16, 27, 120, 91, 103, 122, 124, 28, 117, 62, 138, 126, 31, 60, 88, 107, 4, 84, 10, 40, 57, 49, 21, 94, 121, 123, 64, 130, 67, 92, 112, 5, 78, 80, 24, 89, 116, 53, 132, 131, 68, 90, 8, 61, 29, 45, 119, 128, 11, 38, 32, 48, 77, 108, 98, 101, 41, 44, 81, 95, 56, 83, 110, 69, 36, 47, 75, 113, 134, 35, 74, 50, 23, 33, 42, 104, 96, 86, 125, 26, 71, 14, 54, 66, 65, 129, 111, 20, 34, 59, 82, 118, 135, 85\n", "\n", - "within-cluster distance: 0.526684 between-cluster distance: 1.46646 ratio: 0.359154\n", "\n", + "dendrogram scale: child cluster distance\n", + " 1 2 127 51 70 102 17 19 137 3 18 7 55 12 22 97 39 6 37 106 87 9 13 100 72 52 99 25 79 15 43 76 30 46 115 105 58 63 93 133 109 73 114 136 16 27 120 91 103 122 124 28 117 62 138 126 31 60 88 107 4 84 10 40 57 49 21 94 121 123 64 130 67 92 112 5 78 80 24 89 116 53 132 131 68 90 8 61 29 45 119 128 11 38 32 48 77 108 98 101 41 44 81 95 56 83 110 69 36 47 75 113 134 35 74 50 23 33 42 104 96 86 125 26 71 14 54 66 65 129 111 20 34 59 82 118 135 85\n", + " 1.03976 0.0379436 0.0736618 0.021876 0.113912 0.130301 0.199508 0.146153 0.728048 0.0379436 0.0627238 0 0.09699 0.196735 0.0379436 0.0822111 0.370684 0.0954834 0.0471717 0.0615295 0.154421 0.0505915 0.0252957 0.0951397 0.178217 0.200689 0.334707 0.347239 0.220267 0.0656281 0.0834055 0.291649 0.0379436 0.0753717 0.0808994 0.106136 0.196191 0.0345239 0.061014 0.0713276 0.0972477 0.233774 0.428133 0.598735 0.0440957 0.0126479 0.0282 0.0739196 0.0126479 0.0513877 0.167784 0.0252957 0.0782759 0.0252957 0.159912 0.521526 0.0563999 0.392487 0.449042 0.781825 0.0909238 0.211336 0.0636919 0.0345239 0.0425577 0.132287 0.0126479 0.120834 0.0724675 0.766519 0.106991 0.119124 0.546575 1.0355 1.39162 0.043752 0.0909238 0.114053 0 0.0345239 0.0410196 0.11988 0.170781 0.170795 0.0656281 0.263482 0.021876 0.0362337 0.111207 0.0661435 0.0379436 0.213303 0.021876 0.0454619 0.047968 0.0704999 0.0559222 0.0749736 0.0678767 0.130293 0 0.0408478 0.0126479 0.0583675 0.0345239 0.0252957 0.0862301 0.118571 0.0471717 0.132286 0.0408478 0.0126479 0.149733 0.153907 0.224119 0.404658 0.0252957 0.0471717 0.0693914 0.0768238 0.0994026 0.0505915 0.129015 0.137668 0.435425 0.0379436 0.0787914 0.143451 0.182281 0.601927 0.347105 1.67595 0.483498 0.209532 0.861739 0.144935 2.103\n", "\n", - "7 iterations\n" + " 1 ___________________________________________________________________________________________________\n", + " | | | |\n", + " 2 _________________________________________________| | | |\n", + " | | | | | | | | | |\n", + "127 _| | | | | | | | | |\n", + " | | | | | | | | |\n", + " 51 ___| | | | | | | | |\n", + " | | | | | | | | |\n", + " 70 _| | | | | | | | |\n", + " | | | | | | | |\n", + "102 _____| | | | | | | |\n", + " | | | | | | | |\n", + " 17 _____| | | | | | | |\n", + " | | | | | | |\n", + " 19 _________| | | | | | |\n", + " | | | | | | |\n", + "137 _____| | | | | | |\n", + " | | | | | |\n", + " 3 _________________________________| | | | | |\n", + " | | | | | | | | | | |\n", + " 18 _| | | | | | | | | | |\n", + " | | | | | | | | | | |\n", + " 7 _| | | | | | | | | | |\n", + " | | | | | | | | | | |\n", + " 55 | | | | | | | | | | |\n", + " | | | | | | | | | |\n", + " 12 ___| | | | | | | | | |\n", + " | | | | | | | | |\n", + " 22 _________| | | | | | | | |\n", + " | | | | | | | | | |\n", + " 97 _| | | | | | | | | |\n", + " | | | | | | | | |\n", + " 39 ___| | | | | | | | |\n", + " | | | | | | | |\n", + " 6 _________________| | | | | | | |\n", + " | | | | | | | | | | |\n", + " 37 ___| | | | | | | | | | |\n", + " | | | | | | | | | | |\n", + "106 _| | | | | | | | | | |\n", + " | | | | | | | | | | |\n", + " 87 _| | | | | | | | | | |\n", + " | | | | | | | | | |\n", + " 9 _______| | | | | | | | | |\n", + " | | | | | | | | | | | |\n", + " 13 _| | | | | | | | | | | |\n", + " | | | | | | | | | | | |\n", + "100 _| | | | | | | | | | | |\n", + " | | | | | | | | | | |\n", + " 72 ___| | | | | | | | | | |\n", + " | | | | | | | | | |\n", + " 52 _______| | | | | | | | | |\n", + " | | | | | | | | |\n", + " 99 _________| | | | | | | | |\n", + " | | | | | | | |\n", + " 25 _______________| | | | | | | |\n", + " | | | | | | | |\n", + " 79 _______________| | | | | | | |\n", + " | | | | | | | | |\n", + " 15 _________| | | | | | | | |\n", + " | | | | | | | | |\n", + " 43 ___| | | | | | | | |\n", + " | | | | | | | | |\n", + " 76 ___| | | | | | | | |\n", + " | | | | | | | |\n", + " 30 _____________| | | | | | | |\n", + " | | | | | | | | | | | |\n", + " 46 _| | | | | | | | | | | |\n", + " | | | | | | | | | | |\n", + "115 ___| | | | | | | | | | |\n", + " | | | | | | | | | | |\n", + "105 ___| | | | | | | | | | |\n", + " | | | | | | | | | |\n", + " 58 _____| | | | | | | | | |\n", + " | | | | | | | | |\n", + " 63 _________| | | | | | | | |\n", + " | | | | | | | | | |\n", + " 93 _| | | | | | | | | |\n", + " | | | | | | | | | |\n", + "133 _| | | | | | | | | |\n", + " | | | | | | | | |\n", + "109 ___| | | | | | | | |\n", + " | | | | | | | | |\n", + " 73 ___| | | | | | | | |\n", + " | | | | | | | |\n", + "114 ___________| | | | | | | |\n", + " | | | | | | |\n", + "136 ___________________| | | | | | |\n", + " | | | | | |\n", + " 16 ___________________________| | | | | |\n", + " | | | | | | | | |\n", + " 27 _| | | | | | | | |\n", + " | | | | | | | | | |\n", + "120 || | | | | | | | |\n", + " | | | | | | | | |\n", + " 91 _| | | | | | | | |\n", + " | | | | | | | |\n", + "103 ___| | | | | | | |\n", + " | | | | | | | | |\n", + "122 || | | | | | | |\n", + " | | | | | | | |\n", + "124 _| | | | | | | |\n", + " | | | | | | |\n", + " 28 _______| | | | | | |\n", + " | | | | | | | | |\n", + "117 _| | | | | | | | |\n", + " | | | | | | | |\n", + " 62 ___| | | | | | | |\n", + " | | | | | | | |\n", + "138 _| | | | | | | |\n", + " | | | | | | |\n", + "126 _______| | | | | | |\n", + " | | | | | |\n", + " 31 _______________________| | | | | |\n", + " | | | | | | | |\n", + " 60 _| | | | | | | |\n", + " | | | | | | |\n", + " 88 _________________| | | | | | |\n", + " | | | | | |\n", + "107 _____________________| | | | | |\n", + " | | | | |\n", + " 4 _____________________________________| | | | |\n", + " | | | | | | |\n", + " 84 ___| | | | | | |\n", + " | | | | | |\n", + " 10 _________| | | | | |\n", + " | | | | | | |\n", + " 40 ___| | | | | | |\n", + " | | | | | | |\n", + " 57 _| | | | | | |\n", + " | | | | | | |\n", + " 49 _| | | | | | |\n", + " | | | | | |\n", + " 21 _____| | | | | |\n", + " | | | | | | |\n", + " 94 | | | | | | |\n", + " | | | | | |\n", + "121 _____| | | | | |\n", + " | | | | | |\n", + "123 ___| | | | | |\n", + " | | | | |\n", + " 64 ___________________________________| | | | |\n", + " | | | | | |\n", + "130 _____| | | | | |\n", + " | | | | | |\n", + " 67 _____| | | | | |\n", + " | | | | |\n", + " 92 _________________________| | | | |\n", + " | | | |\n", + "112 _________________________________________________| | | |\n", + " | | |\n", + " 5 _________________________________________________________________| | |\n", + " | | | | | | | | |\n", + " 78 _| | | | | | | | |\n", + " | | | | | | | |\n", + " 80 ___| | | | | | | |\n", + " | | | | | | |\n", + " 24 _____| | | | | | |\n", + " || | | | | | | |\n", + " 89 || | | | | | | |\n", + " | | | | | | | |\n", + "116 _| | | | | | | |\n", + " | | | | | | | |\n", + " 53 _| | | | | | | |\n", + " | | | | | | |\n", + "132 _____| | | | | | |\n", + " | | | | | |\n", + "131 _______| | | | | |\n", + " | | | | | |\n", + " 68 _______| | | | | |\n", + " | | | | | |\n", + " 90 ___| | | | | |\n", + " | | | | |\n", + " 8 ___________| | | | |\n", + " | | | | | | |\n", + " 61 _| | | | | | |\n", + " | | | | | | |\n", + " 29 _| | | | | | |\n", + " | | | | | |\n", + " 45 _____| | | | | |\n", + " | | | | | |\n", + "119 ___| | | | | |\n", + " | | | | | |\n", + "128 _| | | | | |\n", + " | | | | |\n", + " 11 _________| | | | |\n", + " | | | | | | | | |\n", + " 38 _| | | | | | | | |\n", + " | | | | | | | | |\n", + " 32 _| | | | | | | | |\n", + " | | | | | | | | |\n", + " 48 _| | | | | | | | |\n", + " | | | | | | | |\n", + " 77 ___| | | | | | | |\n", + " | | | | | | | | |\n", + "108 _| | | | | | | | |\n", + " | | | | | | | |\n", + " 98 ___| | | | | | | |\n", + " | | | | | | | |\n", + "101 ___| | | | | | | |\n", + " | | | | | | |\n", + " 41 _____| | | | | | |\n", + " || | | | | | | | |\n", + " 44 || | | | | | | | |\n", + " | | | | | | | | |\n", + " 81 _| | | | | | | | |\n", + " | | | | | | | | | |\n", + " 95 || | | | | | | | |\n", + " | | | | | | | | |\n", + " 56 _| | | | | | | | |\n", + " | | | | | | | | |\n", + " 83 _| | | | | | | | |\n", + " | | | | | | | | |\n", + "110 _| | | | | | | | |\n", + " | | | | | | | |\n", + " 69 ___| | | | | | | |\n", + " | | | | | | |\n", + " 36 _____| | | | | | |\n", + " | | | | | | | |\n", + " 47 _| | | | | | | |\n", + " | | | | | | |\n", + " 75 _____| | | | | | |\n", + " | | | | | | |\n", + "113 _| | | | | | |\n", + " | | | | | | |\n", + "134 | | | | | | |\n", + " | | | | | |\n", + " 35 _______| | | | | |\n", + " | | | | | |\n", + " 74 _______| | | | | |\n", + " | | | | |\n", + " 50 _________| | | | |\n", + " | | | |\n", + " 23 ___________________| | | |\n", + " | | | | | | |\n", + " 33 _| | | | | | |\n", + " | | | | | | |\n", + " 42 _| | | | | | |\n", + " | | | | | |\n", + "104 ___| | | | | |\n", + " | | | | | |\n", + " 96 ___| | | | | |\n", + " | | | | | |\n", + " 86 ___| | | | | |\n", + " | | | | | |\n", + "125 _| | | | | |\n", + " | | | | |\n", + " 26 _____| | | | |\n", + " | | | | |\n", + " 71 _____| | | | |\n", + " | | | |\n", + " 14 ___________________| | | |\n", + " | | | | | | |\n", + " 54 _| | | | | | |\n", + " | | | | | |\n", + " 66 ___| | | | | |\n", + " | | | | |\n", + " 65 _____| | | | |\n", + " | | | |\n", + "129 _______| | | |\n", + " | | |\n", + "111 ___________________________| | |\n", + " | | |\n", + " 20 _______________| | |\n", + " | |\n", + " 34 _______________________________________________________________________________| |\n", + " | | |\n", + " 59 _____________________| | |\n", + " | | |\n", + " 82 _________| | |\n", + " | |\n", + "118 _______________________________________| |\n", + " | |\n", + "135 _____| |\n", + " |\n", + " 85 ___________________________________________________________________________________________________|\n", + " \n", + "child cluster distance scale coefficient: 0.943282\n", + "diameter scale coefficient: 0.940673\n", + "\n" ] } ], "source": [ - "# clustering using a partitioning method\n", - "\n", - "Clustering(matrix10, \"Partition\", 2).display()" + "clust2 = Clustering(matrix, \"Hierarchy\", \"Agglomerative\")\n", + "print(clust2)" ] }, { - "cell_type": "code", - "execution_count": null, - "id": "0f5b1c92-751b-4a5c-93f4-041f2297ec82", + "cell_type": "markdown", + "id": "accad18c-7762-4240-ae3b-24febc02dc4d", "metadata": {}, - "outputs": [], "source": [ - "Clustering(matrix10, \"Hierarchy\", \"Agglomerative\")" + "## Hierarchical clustering and dendrogram using a divise algorithm" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 13, "id": "135ebfc8-e2bd-4a76-8f7d-2273d872a850", "metadata": {}, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "138 vectors\n", + "\n", + " | child cluster distance | within-cluster distance | between-cluster distance | diameter | separation | composition\n", + "step 1 0 0 0.845491 0 0.0563999 7, 55\n", + "step 2 0 0 0.883565 0 0.0345239 24, 89\n", + "step 3 0 0 0.911651 0 0.0345239 41, 44\n", + "step 4 0.0126479 0.0126479 1.18625 0.0126479 0.0782759 21, 94\n", + "step 5 0.0126479 0.0126479 0.760368 0.0126479 0.021876 27, 120\n", + "step 6 0.0126479 0.0126479 0.913273 0.0126479 0.0345239 81, 95\n", + "step 7 0.0126479 0.0126479 0.762643 0.0126479 0.0345239 103, 122\n", + "step 8 0.0126479 0.0126479 1.0078 0.0126479 0.0345239 113, 134\n", + "step 9 0.021876 0.021876 1.08646 0.021876 0.0252957 8, 61\n", + "step 10 0.021876 0.021876 0.967306 0.021876 0.0345239 11, 38\n", + "step 11 0.0282 0.0230159 0.765583 0.0345239 0.0252957 27, 120, 91\n", + "step 12 0.021876 0.021876 0.868665 0.021876 0.0563999 51, 70\n", + "step 13 0.0362337 0.0314478 1.09889 0.0471717 0.0471717 8, 61, 29\n", + "step 14 0.0252957 0.0252957 0.921014 0.0252957 0.0379436 13, 100\n", + "step 15 0.0440957 0.0335558 0.77085 0.0598196 0.0345239 16, 27, 120, 91\n", + "step 16 0.0252957 0.0252957 1.2527 0.0252957 0.0345239 23, 33\n", + "step 17 0.0252957 0.0252957 0.775549 0.0252957 0.0656281 28, 117\n", + "step 18 0.0252957 0.0252957 0.79839 0.0252957 0.0656281 62, 138\n", + "step 19 0.0252957 0.0252957 0.890767 0.0252957 0.0345239 83, 110\n", + "step 20 0.0454619 0.0375999 0.968891 0.0563999 0.0345239 11, 38, 32\n", + "step 21 0.0739196 0.0536893 0.78211 0.106991 0.0345239 16, 27, 120, 91, 103, 122\n", + "step 22 0.0471717 0.0398797 1.24669 0.0598196 0.0379436 23, 33, 42\n", + "step 23 0.0345239 0.0230159 0.887222 0.0345239 0.0379436 24, 89, 116\n", + "step 24 0.047968 0.042784 0.968338 0.0656281 0.0345239 11, 38, 32, 48\n", + "step 25 0.0704999 0.0538703 0.981483 0.100152 0.0379436 11, 38, 32, 48, 77\n", + "step 26 0.0345239 0.0345239 1.23769 0.0345239 0.0379436 40, 57\n", + "step 27 0.0408478 0.0293399 0.925471 0.0471717 0.0379436 41, 44, 81, 95\n", + "step 28 0.0345239 0.0314478 0.899478 0.0345239 0.0379436 56, 83, 110\n", + "step 29 0.0345239 0.0345239 0.938001 0.0345239 0.043752 63, 93\n", + "step 30 0.0408478 0.0314478 1.00594 0.0471717 0.0505915 75, 113, 134\n", + "step 31 0.0513877 0.0530317 0.787684 0.106991 0.0656281 16, 27, 120, 91, 103, 122, 124\n", + "step 32 0.0379436 0.0379436 0.855993 0.0379436 0.0563999 2, 127\n", + "step 33 0.0379436 0.0379436 0.831399 0.0379436 0.0563999 3, 18\n", + "step 34 0.0505915 0.0421595 0.937767 0.0632393 0.0656281 9, 13, 100\n", + "step 35 0.0379436 0.0379436 0.87456 0.0379436 0.0598196 14, 54\n", + "step 36 0.0379436 0.0379436 0.779505 0.0379436 0.0632393 22, 97\n", + "step 37 0.0410196 0.0320178 0.894159 0.0471717 0.0690478 24, 89, 116, 53\n", + "step 38 0.0379436 0.0379436 0.855449 0.0379436 0.0563999 30, 46\n", + "step 39 0.0559222 0.0545543 0.984221 0.100152 0.0379436 11, 38, 32, 48, 77, 108\n", + "step 40 0.0749736 0.0603884 0.996551 0.106991 0.0379436 11, 38, 32, 48, 77, 108, 98\n", + "step 41 0.0678767 0.0622605 1.0035 0.106991 0.043752 11, 38, 32, 48, 77, 108, 98, 101\n", + "step 42 0.0583675 0.0462282 0.936685 0.0851153 0.043752 41, 44, 81, 95, 56, 83, 110\n", + "step 43 0.0693914 0.0546356 1.239 0.0977632 0.043752 23, 33, 42, 104\n", + "step 44 0.0425577 0.0398797 1.25903 0.0471717 0.0563999 40, 57, 49\n", + "step 45 0.0379436 0.0379436 1.07673 0.0379436 0.0471717 119, 128\n", + "step 46 0.043752 0.043752 0.947424 0.043752 0.0690478 5, 78\n", + "step 47 0.130293 0.0953383 1.0233 0.219791 0.043752 11, 38, 32, 48, 77, 108, 98, 101, 41, 44, 81, 95, 56, 83, 110\n", + "step 48 0.0768238 0.0635109 1.23707 0.103572 0.0563999 23, 33, 42, 104, 96\n", + "step 49 0.0862301 0.0941997 1.03095 0.219791 0.0471717 11, 38, 32, 48, 77, 108, 98, 101, 41, 44, 81, 95, 56, 83, 110, 69\n", + "step 50 0.061014 0.0521839 0.933878 0.0782759 0.0505915 63, 93, 133\n", + "step 51 0.0471717 0.0471717 0.95163 0.0471717 0.0471717 36, 47\n", + "step 52 0.0471717 0.0471717 0.995691 0.0471717 0.0598196 37, 106\n", + "step 53 0.0661435 0.0567436 1.07975 0.0851153 0.0471717 45, 119, 128\n", + "step 54 0.118571 0.0989895 1.048 0.219791 0.0505915 11, 38, 32, 48, 77, 108, 98, 101, 41, 44, 81, 95, 56, 83, 110, 69, 36, 47\n", + "step 55 0.111207 0.0843627 1.11155 0.179459 0.0656281 8, 61, 29, 45, 119, 128\n", + "step 56 0.132286 0.106587 1.08131 0.248507 0.0563999 11, 38, 32, 48, 77, 108, 98, 101, 41, 44, 81, 95, 56, 83, 110, 69, 36, 47, 75, 113, 134\n", + "step 57 0.0713276 0.0617558 0.949872 0.103572 0.0563999 63, 93, 133, 109\n", + "step 58 0.0505915 0.0505915 1.20298 0.0505915 0.0563999 86, 125\n", + "step 59 0.0627238 0.0481398 0.850023 0.0690478 0.0782759 3, 18, 7, 55\n", + "step 60 0.0636919 0.0517858 1.27837 0.0690478 0.0816956 10, 40, 57, 49\n", + "step 61 0.0753717 0.0628957 0.857606 0.0943435 0.0598196 30, 46, 115\n", + "step 62 0.0563999 0.0563999 1.06846 0.0563999 0.122028 31, 60\n", + "step 63 0.149733 0.110509 1.08732 0.250895 0.0656281 11, 38, 32, 48, 77, 108, 98, 101, 41, 44, 81, 95, 56, 83, 110, 69, 36, 47, 75, 113, 134, 35\n", + "step 64 0.0972477 0.0759525 0.962431 0.134676 0.0782759 63, 93, 133, 109, 73\n", + "step 65 0.0736618 0.0590778 0.8741 0.0909238 0.0724675 2, 127, 51, 70\n", + "step 66 0.0994026 0.079987 1.25177 0.163391 0.0690478 23, 33, 42, 104, 96, 86, 125\n", + "step 67 0.0787914 0.0651755 0.884129 0.0977632 0.0909238 14, 54, 66\n", + "step 68 0.0615295 0.0567436 1.01765 0.0632393 0.0690478 37, 106, 87\n", + "step 69 0.0808994 0.0718975 0.867262 0.110411 0.0782759 30, 46, 115, 105\n", + "step 70 0.0822111 0.0674553 0.790057 0.101183 0.088535 22, 97, 39\n", + "step 71 0.0656281 0.0656281 0.914738 0.0656281 0.0724675 15, 43\n", + "step 72 0.0782759 0.0606159 0.797547 0.0909238 0.0656281 28, 117, 62, 138\n", + "step 73 0.167784 0.112279 0.816318 0.263543 0.113831 16, 27, 120, 91, 103, 122, 124, 28, 117, 62, 138\n", + "step 74 0.0656281 0.0656281 0.861875 0.0656281 0.0724675 68, 90\n", + "step 75 0.0951397 0.0686496 0.932222 0.128867 0.0690478 9, 13, 100, 72\n", + "step 76 0.213303 0.145368 1.16847 0.358918 0.0724675 8, 61, 29, 45, 119, 128, 11, 38, 32, 48, 77, 108, 98, 101, 41, 44, 81, 95, 56, 83, 110, 69, 36, 47, 75, 113, 134, 35\n", + "step 77 0.0909238 0.0751999 0.940275 0.1128 0.0690478 5, 78, 80\n", + "step 78 0.0954834 0.0761135 1.0311 0.132287 0.0690478 6, 37, 106, 87\n", + "step 79 0.154421 0.119261 1.00711 0.261154 0.0782759 6, 37, 106, 87, 9, 13, 100, 72\n", + "step 80 0.114053 0.0850638 0.934943 0.1692 0.0690478 5, 78, 80, 24, 89, 116, 53\n", + "step 81 0.129015 0.0922439 1.27621 0.172619 0.0690478 23, 33, 42, 104, 96, 86, 125, 26\n", + "step 82 0.137668 0.102338 1.29949 0.191076 0.0816956 23, 33, 42, 104, 96, 86, 125, 26, 71\n", + "step 83 0.11988 0.0937678 0.936256 0.209532 0.0690478 5, 78, 80, 24, 89, 116, 53, 132\n", + "step 84 0.170781 0.110882 0.937799 0.27858 0.0724675 5, 78, 80, 24, 89, 116, 53, 132, 131\n", + "step 85 0.0834055 0.0774797 0.933912 0.0943435 0.0977632 15, 43, 76\n", + "step 86 0.153907 0.145957 1.17948 0.358918 0.0758872 8, 61, 29, 45, 119, 128, 11, 38, 32, 48, 77, 108, 98, 101, 41, 44, 81, 95, 56, 83, 110, 69, 36, 47, 75, 113, 134, 35, 74\n", + "step 87 0.113912 0.0810117 0.87747 0.163391 0.0782759 2, 127, 51, 70, 102\n", + "step 88 0.170795 0.129667 0.942781 0.322332 0.0758872 5, 78, 80, 24, 89, 116, 53, 132, 131, 68, 90\n", + "step 89 0.0724675 0.0724675 1.15619 0.0724675 0.0782759 121, 123\n", + "step 90 0.263482 0.192873 1.24421 0.591357 0.0782759 5, 78, 80, 24, 89, 116, 53, 132, 131, 68, 90, 8, 61, 29, 45, 119, 128, 11, 38, 32, 48, 77, 108, 98, 101, 41, 44, 81, 95, 56, 83, 110, 69, 36, 47, 75, 113, 134, 35, 74\n", + "step 91 0.130301 0.0974413 0.881249 0.216371 0.0875041 2, 127, 51, 70, 102, 17\n", + "step 92 0.09699 0.0676799 0.852929 0.10938 0.088535 3, 18, 7, 55, 12\n", + "step 93 0.120834 0.0947416 1.1869 0.163391 0.0816956 21, 94, 121, 123\n", + "step 94 0.106136 0.0855931 0.86533 0.144935 0.0782759 30, 46, 115, 105, 58\n", + "step 95 0.224119 0.194397 1.25595 0.591357 0.0816956 5, 78, 80, 24, 89, 116, 53, 132, 131, 68, 90, 8, 61, 29, 45, 119, 128, 11, 38, 32, 48, 77, 108, 98, 101, 41, 44, 81, 95, 56, 83, 110, 69, 36, 47, 75, 113, 134, 35, 74, 50\n", + "step 96 0.178217 0.132363 1.02524 0.314135 0.0909238 6, 37, 106, 87, 9, 13, 100, 72, 52\n", + "step 97 0.196191 0.144894 0.941916 0.326783 0.0851153 30, 46, 115, 105, 58, 63, 93, 133, 109, 73\n", + "step 98 0.404658 0.255027 1.41022 0.864128 0.119639 5, 78, 80, 24, 89, 116, 53, 132, 131, 68, 90, 8, 61, 29, 45, 119, 128, 11, 38, 32, 48, 77, 108, 98, 101, 41, 44, 81, 95, 56, 83, 110, 69, 36, 47, 75, 113, 134, 35, 74, 50, 23, 33, 42, 104, 96, 86, 125, 26, 71\n", + "step 99 0.132287 0.106991 1.26649 0.176039 0.113831 10, 40, 57, 49, 21, 94, 121, 123\n", + "step 100 0.233774 0.161054 0.958578 0.411898 0.0977632 30, 46, 115, 105, 58, 63, 93, 133, 109, 73, 114\n", + "step 101 0.199508 0.126603 0.893597 0.303876 0.0943435 2, 127, 51, 70, 102, 17, 19\n", + "step 102 0.196735 0.136793 0.847374 0.280642 0.0943435 3, 18, 7, 55, 12, 22, 97, 39\n", + "step 103 0.0909238 0.0909238 1.34261 0.0909238 0.113831 4, 84\n", + "step 104 0.200689 0.146028 1.0366 0.314135 0.0943435 6, 37, 106, 87, 9, 13, 100, 72, 52, 99\n", + "step 105 0.143451 0.104313 0.891958 0.188687 0.101183 14, 54, 66, 65\n", + "step 106 0.370684 0.261805 0.994816 0.667244 0.0943435 3, 18, 7, 55, 12, 22, 97, 39, 6, 37, 106, 87, 9, 13, 100, 72, 52, 99\n", + "step 107 0.334707 0.269479 1.00892 0.667244 0.0977632 3, 18, 7, 55, 12, 22, 97, 39, 6, 37, 106, 87, 9, 13, 100, 72, 52, 99, 25\n", + "step 108 0.146153 0.131491 0.891046 0.303876 0.123059 2, 127, 51, 70, 102, 17, 19, 137\n", + "step 109 0.347239 0.277255 1.00693 0.667244 0.0977632 3, 18, 7, 55, 12, 22, 97, 39, 6, 37, 106, 87, 9, 13, 100, 72, 52, 99, 25, 79\n", + "step 110 0.220267 0.261371 1.03144 0.667244 0.0977632 3, 18, 7, 55, 12, 22, 97, 39, 6, 37, 106, 87, 9, 13, 100, 72, 52, 99, 25, 79, 15, 43, 76\n", + "step 111 0.291649 0.265191 1.10851 0.667244 0.110411 3, 18, 7, 55, 12, 22, 97, 39, 6, 37, 106, 87, 9, 13, 100, 72, 52, 99, 25, 79, 15, 43, 76, 30, 46, 115, 105, 58, 63, 93, 133, 109, 73, 114\n", + "step 112 0.182281 0.135501 0.896034 0.28987 0.119639 14, 54, 66, 65, 129\n", + "step 113 0.106991 0.106991 1.32714 0.106991 0.11622 64, 130\n", + "step 114 0.428133 0.274502 1.10848 0.726032 0.113831 3, 18, 7, 55, 12, 22, 97, 39, 6, 37, 106, 87, 9, 13, 100, 72, 52, 99, 25, 79, 15, 43, 76, 30, 46, 115, 105, 58, 63, 93, 133, 109, 73, 114, 136\n", + "step 115 0.159912 0.120218 0.82177 0.263543 0.113831 16, 27, 120, 91, 103, 122, 124, 28, 117, 62, 138, 126\n", + "step 116 0.211336 0.143735 1.30904 0.315166 0.132287 4, 84, 10, 40, 57, 49, 21, 94, 121, 123\n", + "step 117 0.598735 0.391056 1.10724 1.06888 0.122028 3, 18, 7, 55, 12, 22, 97, 39, 6, 37, 106, 87, 9, 13, 100, 72, 52, 99, 25, 79, 15, 43, 76, 30, 46, 115, 105, 58, 63, 93, 133, 109, 73, 114, 136, 16, 27, 120, 91, 103, 122, 124, 28, 117, 62, 138, 126\n", + "step 118 0.119124 0.11508 1.30321 0.122028 0.138096 64, 130, 67\n", + "step 119 0.435425 0.284593 1.44209 1.00222 0.134676 5, 78, 80, 24, 89, 116, 53, 132, 131, 68, 90, 8, 61, 29, 45, 119, 128, 11, 38, 32, 48, 77, 108, 98, 101, 41, 44, 81, 95, 56, 83, 110, 69, 36, 47, 75, 113, 134, 35, 74, 50, 23, 33, 42, 104, 96, 86, 125, 26, 71, 14, 54, 66, 65, 129\n", + "step 120 0.521526 0.4012 1.13007 1.08289 0.123059 3, 18, 7, 55, 12, 22, 97, 39, 6, 37, 106, 87, 9, 13, 100, 72, 52, 99, 25, 79, 15, 43, 76, 30, 46, 115, 105, 58, 63, 93, 133, 109, 73, 114, 136, 16, 27, 120, 91, 103, 122, 124, 28, 117, 62, 138, 126, 31, 60\n", + "step 121 0.728048 0.476747 1.14449 1.29345 0.125448 2, 127, 51, 70, 102, 17, 19, 137, 3, 18, 7, 55, 12, 22, 97, 39, 6, 37, 106, 87, 9, 13, 100, 72, 52, 99, 25, 79, 15, 43, 76, 30, 46, 115, 105, 58, 63, 93, 133, 109, 73, 114, 136, 16, 27, 120, 91, 103, 122, 124, 28, 117, 62, 138, 126, 31, 60\n", + "step 122 0.392487 0.473841 1.15256 1.29345 0.125448 2, 127, 51, 70, 102, 17, 19, 137, 3, 18, 7, 55, 12, 22, 97, 39, 6, 37, 106, 87, 9, 13, 100, 72, 52, 99, 25, 79, 15, 43, 76, 30, 46, 115, 105, 58, 63, 93, 133, 109, 73, 114, 136, 16, 27, 120, 91, 103, 122, 124, 28, 117, 62, 138, 126, 31, 60, 88\n", + "step 123 0.449042 0.473001 1.15882 1.29345 0.132287 2, 127, 51, 70, 102, 17, 19, 137, 3, 18, 7, 55, 12, 22, 97, 39, 6, 37, 106, 87, 9, 13, 100, 72, 52, 99, 25, 79, 15, 43, 76, 30, 46, 115, 105, 58, 63, 93, 133, 109, 73, 114, 136, 16, 27, 120, 91, 103, 122, 124, 28, 117, 62, 138, 126, 31, 60, 88, 107\n", + "step 124 0.781825 0.544352 1.29264 1.65682 0.138096 2, 127, 51, 70, 102, 17, 19, 137, 3, 18, 7, 55, 12, 22, 97, 39, 6, 37, 106, 87, 9, 13, 100, 72, 52, 99, 25, 79, 15, 43, 76, 30, 46, 115, 105, 58, 63, 93, 133, 109, 73, 114, 136, 16, 27, 120, 91, 103, 122, 124, 28, 117, 62, 138, 126, 31, 60, 88, 107, 4, 84, 10, 40, 57, 49, 21, 94, 121, 123\n", + "step 125 0.601927 0.295926 1.44608 1.08631 0.147324 5, 78, 80, 24, 89, 116, 53, 132, 131, 68, 90, 8, 61, 29, 45, 119, 128, 11, 38, 32, 48, 77, 108, 98, 101, 41, 44, 81, 95, 56, 83, 110, 69, 36, 47, 75, 113, 134, 35, 74, 50, 23, 33, 42, 104, 96, 86, 125, 26, 71, 14, 54, 66, 65, 129, 111\n", + "step 126 0.766519 0.56184 1.33938 1.65682 0.148355 2, 127, 51, 70, 102, 17, 19, 137, 3, 18, 7, 55, 12, 22, 97, 39, 6, 37, 106, 87, 9, 13, 100, 72, 52, 99, 25, 79, 15, 43, 76, 30, 46, 115, 105, 58, 63, 93, 133, 109, 73, 114, 136, 16, 27, 120, 91, 103, 122, 124, 28, 117, 62, 138, 126, 31, 60, 88, 107, 4, 84, 10, 40, 57, 49, 21, 94, 121, 123, 64, 130, 67\n", + "step 127 0.144935 0.144935 1.9959 0.144935 0.561283 118, 135\n", + "step 128 0.347105 0.297722 1.46352 1.08631 0.568157 5, 78, 80, 24, 89, 116, 53, 132, 131, 68, 90, 8, 61, 29, 45, 119, 128, 11, 38, 32, 48, 77, 108, 98, 101, 41, 44, 81, 95, 56, 83, 110, 69, 36, 47, 75, 113, 134, 35, 74, 50, 23, 33, 42, 104, 96, 86, 125, 26, 71, 14, 54, 66, 65, 129, 111, 20\n", + "step 129 0.546575 0.561422 1.34825 1.65682 0.238247 2, 127, 51, 70, 102, 17, 19, 137, 3, 18, 7, 55, 12, 22, 97, 39, 6, 37, 106, 87, 9, 13, 100, 72, 52, 99, 25, 79, 15, 43, 76, 30, 46, 115, 105, 58, 63, 93, 133, 109, 73, 114, 136, 16, 27, 120, 91, 103, 122, 124, 28, 117, 62, 138, 126, 31, 60, 88, 107, 4, 84, 10, 40, 57, 49, 21, 94, 121, 123, 64, 130, 67, 92\n", + "step 130 0.209532 0.209532 1.46808 0.209532 0.39138 59, 82\n", + "step 131 1.0355 0.574235 1.36504 1.89507 0.274833 2, 127, 51, 70, 102, 17, 19, 137, 3, 18, 7, 55, 12, 22, 97, 39, 6, 37, 106, 87, 9, 13, 100, 72, 52, 99, 25, 79, 15, 43, 76, 30, 46, 115, 105, 58, 63, 93, 133, 109, 73, 114, 136, 16, 27, 120, 91, 103, 122, 124, 28, 117, 62, 138, 126, 31, 60, 88, 107, 4, 84, 10, 40, 57, 49, 21, 94, 121, 123, 64, 130, 67, 92, 112\n", + "step 132 1.03976 0.586649 1.38213 1.89507 0.568157 1, 2, 127, 51, 70, 102, 17, 19, 137, 3, 18, 7, 55, 12, 22, 97, 39, 6, 37, 106, 87, 9, 13, 100, 72, 52, 99, 25, 79, 15, 43, 76, 30, 46, 115, 105, 58, 63, 93, 133, 109, 73, 114, 136, 16, 27, 120, 91, 103, 122, 124, 28, 117, 62, 138, 126, 31, 60, 88, 107, 4, 84, 10, 40, 57, 49, 21, 94, 121, 123, 64, 130, 67, 92, 112\n", + "step 133 0.483498 0.392176 1.42835 0.575616 0.561283 34, 59, 82\n", + "step 134 0.861739 0.64919 1.67072 1.18065 0.611909 34, 59, 82, 118, 135\n", + "step 135 1.39162 0.931329 1.75421 2.85325 0.611909 1, 2, 127, 51, 70, 102, 17, 19, 137, 3, 18, 7, 55, 12, 22, 97, 39, 6, 37, 106, 87, 9, 13, 100, 72, 52, 99, 25, 79, 15, 43, 76, 30, 46, 115, 105, 58, 63, 93, 133, 109, 73, 114, 136, 16, 27, 120, 91, 103, 122, 124, 28, 117, 62, 138, 126, 31, 60, 88, 107, 4, 84, 10, 40, 57, 49, 21, 94, 121, 123, 64, 130, 67, 92, 112, 5, 78, 80, 24, 89, 116, 53, 132, 131, 68, 90, 8, 61, 29, 45, 119, 128, 11, 38, 32, 48, 77, 108, 98, 101, 41, 44, 81, 95, 56, 83, 110, 69, 36, 47, 75, 113, 134, 35, 74, 50, 23, 33, 42, 104, 96, 86, 125, 26, 71, 14, 54, 66, 65, 129, 111, 20\n", + "step 136 1.67595 0.983779 2.103 3.35236 0.678568 1, 2, 127, 51, 70, 102, 17, 19, 137, 3, 18, 7, 55, 12, 22, 97, 39, 6, 37, 106, 87, 9, 13, 100, 72, 52, 99, 25, 79, 15, 43, 76, 30, 46, 115, 105, 58, 63, 93, 133, 109, 73, 114, 136, 16, 27, 120, 91, 103, 122, 124, 28, 117, 62, 138, 126, 31, 60, 88, 107, 4, 84, 10, 40, 57, 49, 21, 94, 121, 123, 64, 130, 67, 92, 112, 5, 78, 80, 24, 89, 116, 53, 132, 131, 68, 90, 8, 61, 29, 45, 119, 128, 11, 38, 32, 48, 77, 108, 98, 101, 41, 44, 81, 95, 56, 83, 110, 69, 36, 47, 75, 113, 134, 35, 74, 50, 23, 33, 42, 104, 96, 86, 125, 26, 71, 14, 54, 66, 65, 129, 111, 20, 34, 59, 82, 118, 135\n", + "step 137 2.103 1 3.35236 1, 2, 127, 51, 70, 102, 17, 19, 137, 3, 18, 7, 55, 12, 22, 97, 39, 6, 37, 106, 87, 9, 13, 100, 72, 52, 99, 25, 79, 15, 43, 76, 30, 46, 115, 105, 58, 63, 93, 133, 109, 73, 114, 136, 16, 27, 120, 91, 103, 122, 124, 28, 117, 62, 138, 126, 31, 60, 88, 107, 4, 84, 10, 40, 57, 49, 21, 94, 121, 123, 64, 130, 67, 92, 112, 5, 78, 80, 24, 89, 116, 53, 132, 131, 68, 90, 8, 61, 29, 45, 119, 128, 11, 38, 32, 48, 77, 108, 98, 101, 41, 44, 81, 95, 56, 83, 110, 69, 36, 47, 75, 113, 134, 35, 74, 50, 23, 33, 42, 104, 96, 86, 125, 26, 71, 14, 54, 66, 65, 129, 111, 20, 34, 59, 82, 118, 135, 85\n", + "\n", + "\n", + "dendrogram scale: child cluster distance\n", + " 1 2 127 51 70 102 17 19 137 3 18 7 55 12 22 97 39 6 37 106 87 9 13 100 72 52 99 25 79 15 43 76 30 46 115 105 58 63 93 133 109 73 114 136 16 27 120 91 103 122 124 28 117 62 138 126 31 60 88 107 4 84 10 40 57 49 21 94 121 123 64 130 67 92 112 5 78 80 24 89 116 53 132 131 68 90 8 61 29 45 119 128 11 38 32 48 77 108 98 101 41 44 81 95 56 83 110 69 36 47 75 113 134 35 74 50 23 33 42 104 96 86 125 26 71 14 54 66 65 129 111 20 34 59 82 118 135 85\n", + " 1.03976 0.0379436 0.0736618 0.021876 0.113912 0.130301 0.199508 0.146153 0.728048 0.0379436 0.0627238 0 0.09699 0.196735 0.0379436 0.0822111 0.370684 0.0954834 0.0471717 0.0615295 0.154421 0.0505915 0.0252957 0.0951397 0.178217 0.200689 0.334707 0.347239 0.220267 0.0656281 0.0834055 0.291649 0.0379436 0.0753717 0.0808994 0.106136 0.196191 0.0345239 0.061014 0.0713276 0.0972477 0.233774 0.428133 0.598735 0.0440957 0.0126479 0.0282 0.0739196 0.0126479 0.0513877 0.167784 0.0252957 0.0782759 0.0252957 0.159912 0.521526 0.0563999 0.392487 0.449042 0.781825 0.0909238 0.211336 0.0636919 0.0345239 0.0425577 0.132287 0.0126479 0.120834 0.0724675 0.766519 0.106991 0.119124 0.546575 1.0355 1.39162 0.043752 0.0909238 0.114053 0 0.0345239 0.0410196 0.11988 0.170781 0.170795 0.0656281 0.263482 0.021876 0.0362337 0.111207 0.0661435 0.0379436 0.213303 0.021876 0.0454619 0.047968 0.0704999 0.0559222 0.0749736 0.0678767 0.130293 0 0.0408478 0.0126479 0.0583675 0.0345239 0.0252957 0.0862301 0.118571 0.0471717 0.132286 0.0408478 0.0126479 0.149733 0.153907 0.224119 0.404658 0.0252957 0.0471717 0.0693914 0.0768238 0.0994026 0.0505915 0.129015 0.137668 0.435425 0.0379436 0.0787914 0.143451 0.182281 0.601927 0.347105 1.67595 0.483498 0.209532 0.861739 0.144935 2.103\n", + "\n", + " 1 ___________________________________________________________________________________________________\n", + " | | | |\n", + " 2 _________________________________________________| | | |\n", + " | | | | | | | | | |\n", + "127 _| | | | | | | | | |\n", + " | | | | | | | | |\n", + " 51 ___| | | | | | | | |\n", + " | | | | | | | | |\n", + " 70 _| | | | | | | | |\n", + " | | | | | | | |\n", + "102 _____| | | | | | | |\n", + " | | | | | | | |\n", + " 17 _____| | | | | | | |\n", + " | | | | | | |\n", + " 19 _________| | | | | | |\n", + " | | | | | | |\n", + "137 _____| | | | | | |\n", + " | | | | | |\n", + " 3 _________________________________| | | | | |\n", + " | | | | | | | | | | |\n", + " 18 _| | | | | | | | | | |\n", + " | | | | | | | | | | |\n", + " 7 _| | | | | | | | | | |\n", + " | | | | | | | | | | |\n", + " 55 | | | | | | | | | | |\n", + " | | | | | | | | | |\n", + " 12 ___| | | | | | | | | |\n", + " | | | | | | | | |\n", + " 22 _________| | | | | | | | |\n", + " | | | | | | | | | |\n", + " 97 _| | | | | | | | | |\n", + " | | | | | | | | |\n", + " 39 ___| | | | | | | | |\n", + " | | | | | | | |\n", + " 6 _________________| | | | | | | |\n", + " | | | | | | | | | | |\n", + " 37 ___| | | | | | | | | | |\n", + " | | | | | | | | | | |\n", + "106 _| | | | | | | | | | |\n", + " | | | | | | | | | | |\n", + " 87 _| | | | | | | | | | |\n", + " | | | | | | | | | |\n", + " 9 _______| | | | | | | | | |\n", + " | | | | | | | | | | | |\n", + " 13 _| | | | | | | | | | | |\n", + " | | | | | | | | | | | |\n", + "100 _| | | | | | | | | | | |\n", + " | | | | | | | | | | |\n", + " 72 ___| | | | | | | | | | |\n", + " | | | | | | | | | |\n", + " 52 _______| | | | | | | | | |\n", + " | | | | | | | | |\n", + " 99 _________| | | | | | | | |\n", + " | | | | | | | |\n", + " 25 _______________| | | | | | | |\n", + " | | | | | | | |\n", + " 79 _______________| | | | | | | |\n", + " | | | | | | | | |\n", + " 15 _________| | | | | | | | |\n", + " | | | | | | | | |\n", + " 43 ___| | | | | | | | |\n", + " | | | | | | | | |\n", + " 76 ___| | | | | | | | |\n", + " | | | | | | | |\n", + " 30 _____________| | | | | | | |\n", + " | | | | | | | | | | | |\n", + " 46 _| | | | | | | | | | | |\n", + " | | | | | | | | | | |\n", + "115 ___| | | | | | | | | | |\n", + " | | | | | | | | | | |\n", + "105 ___| | | | | | | | | | |\n", + " | | | | | | | | | |\n", + " 58 _____| | | | | | | | | |\n", + " | | | | | | | | |\n", + " 63 _________| | | | | | | | |\n", + " | | | | | | | | | |\n", + " 93 _| | | | | | | | | |\n", + " | | | | | | | | | |\n", + "133 _| | | | | | | | | |\n", + " | | | | | | | | |\n", + "109 ___| | | | | | | | |\n", + " | | | | | | | | |\n", + " 73 ___| | | | | | | | |\n", + " | | | | | | | |\n", + "114 ___________| | | | | | | |\n", + " | | | | | | |\n", + "136 ___________________| | | | | | |\n", + " | | | | | |\n", + " 16 ___________________________| | | | | |\n", + " | | | | | | | | |\n", + " 27 _| | | | | | | | |\n", + " | | | | | | | | | |\n", + "120 || | | | | | | | |\n", + " | | | | | | | | |\n", + " 91 _| | | | | | | | |\n", + " | | | | | | | |\n", + "103 ___| | | | | | | |\n", + " | | | | | | | | |\n", + "122 || | | | | | | |\n", + " | | | | | | | |\n", + "124 _| | | | | | | |\n", + " | | | | | | |\n", + " 28 _______| | | | | | |\n", + " | | | | | | | | |\n", + "117 _| | | | | | | | |\n", + " | | | | | | | |\n", + " 62 ___| | | | | | | |\n", + " | | | | | | | |\n", + "138 _| | | | | | | |\n", + " | | | | | | |\n", + "126 _______| | | | | | |\n", + " | | | | | |\n", + " 31 _______________________| | | | | |\n", + " | | | | | | | |\n", + " 60 _| | | | | | | |\n", + " | | | | | | |\n", + " 88 _________________| | | | | | |\n", + " | | | | | |\n", + "107 _____________________| | | | | |\n", + " | | | | |\n", + " 4 _____________________________________| | | | |\n", + " | | | | | | |\n", + " 84 ___| | | | | | |\n", + " | | | | | |\n", + " 10 _________| | | | | |\n", + " | | | | | | |\n", + " 40 ___| | | | | | |\n", + " | | | | | | |\n", + " 57 _| | | | | | |\n", + " | | | | | | |\n", + " 49 _| | | | | | |\n", + " | | | | | |\n", + " 21 _____| | | | | |\n", + " | | | | | | |\n", + " 94 | | | | | | |\n", + " | | | | | |\n", + "121 _____| | | | | |\n", + " | | | | | |\n", + "123 ___| | | | | |\n", + " | | | | |\n", + " 64 ___________________________________| | | | |\n", + " | | | | | |\n", + "130 _____| | | | | |\n", + " | | | | | |\n", + " 67 _____| | | | | |\n", + " | | | | |\n", + " 92 _________________________| | | | |\n", + " | | | |\n", + "112 _________________________________________________| | | |\n", + " | | |\n", + " 5 _________________________________________________________________| | |\n", + " | | | | | | | | |\n", + " 78 _| | | | | | | | |\n", + " | | | | | | | |\n", + " 80 ___| | | | | | | |\n", + " | | | | | | |\n", + " 24 _____| | | | | | |\n", + " || | | | | | | |\n", + " 89 || | | | | | | |\n", + " | | | | | | | |\n", + "116 _| | | | | | | |\n", + " | | | | | | | |\n", + " 53 _| | | | | | | |\n", + " | | | | | | |\n", + "132 _____| | | | | | |\n", + " | | | | | |\n", + "131 _______| | | | | |\n", + " | | | | | |\n", + " 68 _______| | | | | |\n", + " | | | | | |\n", + " 90 ___| | | | | |\n", + " | | | | |\n", + " 8 ___________| | | | |\n", + " | | | | | | |\n", + " 61 _| | | | | | |\n", + " | | | | | | |\n", + " 29 _| | | | | | |\n", + " | | | | | |\n", + " 45 _____| | | | | |\n", + " | | | | | |\n", + "119 ___| | | | | |\n", + " | | | | | |\n", + "128 _| | | | | |\n", + " | | | | |\n", + " 11 _________| | | | |\n", + " | | | | | | | | |\n", + " 38 _| | | | | | | | |\n", + " | | | | | | | | |\n", + " 32 _| | | | | | | | |\n", + " | | | | | | | | |\n", + " 48 _| | | | | | | | |\n", + " | | | | | | | |\n", + " 77 ___| | | | | | | |\n", + " | | | | | | | | |\n", + "108 _| | | | | | | | |\n", + " | | | | | | | |\n", + " 98 ___| | | | | | | |\n", + " | | | | | | | |\n", + "101 ___| | | | | | | |\n", + " | | | | | | |\n", + " 41 _____| | | | | | |\n", + " || | | | | | | | |\n", + " 44 || | | | | | | | |\n", + " | | | | | | | | |\n", + " 81 _| | | | | | | | |\n", + " | | | | | | | | | |\n", + " 95 || | | | | | | | |\n", + " | | | | | | | | |\n", + " 56 _| | | | | | | | |\n", + " | | | | | | | | |\n", + " 83 _| | | | | | | | |\n", + " | | | | | | | | |\n", + "110 _| | | | | | | | |\n", + " | | | | | | | |\n", + " 69 ___| | | | | | | |\n", + " | | | | | | |\n", + " 36 _____| | | | | | |\n", + " | | | | | | | |\n", + " 47 _| | | | | | | |\n", + " | | | | | | |\n", + " 75 _____| | | | | | |\n", + " | | | | | | |\n", + "113 _| | | | | | |\n", + " | | | | | | |\n", + "134 | | | | | | |\n", + " | | | | | |\n", + " 35 _______| | | | | |\n", + " | | | | | |\n", + " 74 _______| | | | | |\n", + " | | | | |\n", + " 50 _________| | | | |\n", + " | | | |\n", + " 23 ___________________| | | |\n", + " | | | | | | |\n", + " 33 _| | | | | | |\n", + " | | | | | | |\n", + " 42 _| | | | | | |\n", + " | | | | | |\n", + "104 ___| | | | | |\n", + " | | | | | |\n", + " 96 ___| | | | | |\n", + " | | | | | |\n", + " 86 ___| | | | | |\n", + " | | | | | |\n", + "125 _| | | | | |\n", + " | | | | |\n", + " 26 _____| | | | |\n", + " | | | | |\n", + " 71 _____| | | | |\n", + " | | | |\n", + " 14 ___________________| | | |\n", + " | | | | | | |\n", + " 54 _| | | | | | |\n", + " | | | | | |\n", + " 66 ___| | | | | |\n", + " | | | | |\n", + " 65 _____| | | | |\n", + " | | | |\n", + "129 _______| | | |\n", + " | | |\n", + "111 ___________________________| | |\n", + " | | |\n", + " 20 _______________| | |\n", + " | |\n", + " 34 _______________________________________________________________________________| |\n", + " | | |\n", + " 59 _____________________| | |\n", + " | | |\n", + " 82 _________| | |\n", + " | |\n", + "118 _______________________________________| |\n", + " | |\n", + "135 _____| |\n", + " |\n", + " 85 ___________________________________________________________________________________________________|\n", + " \n", + "child cluster distance scale coefficient: 0.943282\n", + "diameter scale coefficient: 0.940673\n", + "\n" + ] + } + ], + "source": [ + "clust3 = Clustering(matrix, \"Hierarchy\", \"Divisive\")\n", + "print(clust3)" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "id": "7a8a6b92-9d75-4280-819c-3a03ffedd5fa", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "138" + ] + }, + "execution_count": 14, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ - "Clustering(matrix10, \"Hierarchy\", \"Divisive\")" + "matrix.nb_row" ] }, { "cell_type": "code", "execution_count": null, - "id": "e7eb8524-b558-41f8-8fb2-26c215e59c53", + "id": "d2652b06-675d-4626-81f9-2aef73867f83", "metadata": {}, "outputs": [], "source": [] diff --git a/src/wrapper/export_distance_matrix.cpp b/src/wrapper/export_distance_matrix.cpp index dc37d5a2..7f7ef8ef 100644 --- a/src/wrapper/export_distance_matrix.cpp +++ b/src/wrapper/export_distance_matrix.cpp @@ -533,7 +533,6 @@ class DistanceMatrixWrap } - }; #define WRAP DistanceMatrixWrap @@ -615,6 +614,31 @@ class_distance_matrix() #undef WRAP #undef CLASS +class ClustersWrap +{ + +public: + + static int + // Return cluster of an (individual ="pattern") + // Indices are between 1 and nb_pattern for individuals + // between 1 and nb_cluster for clusters + get_assignment(const Clusters &cluster, int pattern) + { + StatError error; + + if ((0 < pattern) & (pattern <= cluster.get_nb_pattern())) + return cluster.get_assignment(pattern-1)+1; + else { + error.update(STAT_error[STATR_SAMPLE_INDEX]); + stat_tool::wrap_util::throw_error(error); + } + } +}; + +#define WRAP ClustersWrap +#define CLASS Clusters + void class_cluster() { @@ -623,8 +647,11 @@ class_cluster() .def(self_ns::str(self)) // __str__ - ; + .def("get_nb_cluster", &CLASS::get_nb_cluster, "Return number of clusters") + .def("get_assignment", WRAP::get_assignment, + args("pattern"),"Get cluster of a vector (index between 0 and nb_pattern)") + ; /* Clusters(); Clusters(const DistanceMatrix &dist_matrix , int inb_cluster , @@ -659,8 +686,13 @@ class_cluster() int get_pattern_length(int pattern , int cluster) const { return pattern_length[pattern][cluster]; } */ + } + +#undef WRAP +#undef CLASS + void class_dendrogram() { diff --git a/test/test_cluster.py b/test/test_cluster.py index d6734a36..5b9d986c 100644 --- a/test/test_cluster.py +++ b/test/test_cluster.py @@ -152,3 +152,7 @@ def test_clustering(): assert str(c1) == str(matrix10.partitioning_prototype(3, [1, 3, 12], 1, 1)) assert str(c1_bis) == str(matrix10.partitioning_prototype(3, [1, 3, 12], 1, 2)) + c5 = Clustering(matrix10, "Partition", 2) + assert c5 + assert c5.get_nb_cluster() == 2 + c5.get_assignment(2) == 1 diff --git a/test/test_distance_matrix.py b/test/test_distance_matrix.py index d5283ce3..5eaccb86 100644 --- a/test/test_distance_matrix.py +++ b/test/test_distance_matrix.py @@ -19,6 +19,7 @@ @pytest.fixture def data(): + # Inspired from stat_toot_test.aml vec10 = Vectors(get_shared_data("chene_sessile.vec")) vec15 = SelectVariable(vec10, [1, 3, 6], Mode="Reject") matrix10 = Compare(vec15, VectorDistance("N", "N", "N")) diff --git a/test/test_matrix.py b/test/test_matrix.py index d16b716d..8a08ec7b 100644 --- a/test/test_matrix.py +++ b/test/test_matrix.py @@ -12,6 +12,7 @@ from openalea.stat_tool.comparison import Compare from openalea.stat_tool.data_transform import SelectVariable from openalea.stat_tool.vectors import VectorDistance, Vectors +from openalea.stat_tool.cluster import Clustering from pathlib import Path @@ -71,7 +72,6 @@ def test_get_substitution_distance(data): def test_get_insertion_distance(data): assert data.get_insertion_distance(0,0) == -1 -# TODO: test_hierarchical_clustering def test_get_shape(data): assert (data.nb_column, data.nb_row) == (138, 138) @@ -95,14 +95,28 @@ def test_select_individual(myi): def test_select_individual(myi): assert myi.data.select_individual([1], False) -# def test_wrong_partitioning_clusters(myi): -# assert myi.data.partitioning_clusters([0]) +def test_wrong_partitioning_clusters(myi): + try: + # TODO: uncomment + #myi.data.partitioning_clusters([0]) + myi.data.partitioning_clusters([]) + myi.data.partitioning_clusters([[1,2], [3,4]]) + assert False + except: + assert True def test_partitioning_clusters(myi): - assert myi.data.partitioning_clusters([[1, 2], [3, 4]]) - -def test_partitioning_prototype(myi): - # TODO: find a test that would make more sense + clust1 = Clustering(myi.data, "Partition", 2) + nb_clusters = clust1.get_nb_cluster() + # partition + part1 = [[i for i in range(1,myi.data.nb_row+1) if clust1.get_assignment(i) == c] for c in range(1,nb_clusters+1)] + assert myi.data.partitioning_clusters(part1) + +def test_wrong_partitioning_prototype(myi): + """ + Testing errors in partitioning_prototype. + Other features of partitioning_prototype are tested in test_cluster.py + """ try: assert myi.data.partitioning_prototype(0, [0], 0, 0) assert False @@ -110,7 +124,7 @@ def test_partitioning_prototype(myi): assert True -def test_symmetrize(myi): +def test_wrong_symmetrize(myi): try: myi.data.symmetrize() assert False From 26dafb292a09956847c437a02f15654f62544b38 Mon Sep 17 00:00:00 2001 From: pradal Date: Mon, 12 Jan 2026 11:54:37 +0100 Subject: [PATCH 2/6] Update rtfd conf --- .readthedocs.yml | 13 +++++++++++++ conda/environment.yml | 2 +- doc/installation.md | 2 +- 3 files changed, 15 insertions(+), 2 deletions(-) create mode 100644 .readthedocs.yml diff --git a/.readthedocs.yml b/.readthedocs.yml new file mode 100644 index 00000000..0842047c --- /dev/null +++ b/.readthedocs.yml @@ -0,0 +1,13 @@ +version: 2 + +build: + os: "ubuntu-24.04" + tools: + python: "miniconda3-3.12-24.9" + +conda: + environment: conda/environment.yml + +sphinx: + # Path to your Sphinx configuration file. + configuration: doc/conf.py diff --git a/conda/environment.yml b/conda/environment.yml index 1020dda7..06c23f86 100644 --- a/conda/environment.yml +++ b/conda/environment.yml @@ -1,7 +1,7 @@ name: stat_tool_dev channels: - - conda-forge - openalea3 + - conda-forge dependencies: - python - scikit-build-core diff --git a/doc/installation.md b/doc/installation.md index 5e718ea8..1021a2de 100644 --- a/doc/installation.md +++ b/doc/installation.md @@ -32,7 +32,7 @@ cd stat_tool/test; pytest ```bash # Install dependency with conda -mamba env create -n phm -f conda/environment.yml +mamba env create -n stat -f conda/environment.yml mamba activate stat_tool # Clone stat_tool and install From 024ad8a6e122595b932bce1333298f336b032528 Mon Sep 17 00:00:00 2001 From: pradal Date: Mon, 12 Jan 2026 12:02:55 +0100 Subject: [PATCH 3/6] Update environment.yaml for building doc --- conda/environment.yml | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/conda/environment.yml b/conda/environment.yml index 06c23f86..c8183d73 100644 --- a/conda/environment.yml +++ b/conda/environment.yml @@ -3,13 +3,10 @@ channels: - openalea3 - conda-forge dependencies: - - python - scikit-build-core - - setuptoools-scm - - setuptools - cmake - - pip - boost - matplotlib-base + - pip - pip: - -e '..[test,dev,doc]' From d95d403546463592d7d475ae7fc1235c49653f81 Mon Sep 17 00:00:00 2001 From: "jean-baptiste.durand1" Date: Mon, 12 Jan 2026 14:07:16 +0100 Subject: [PATCH 4/6] Fixing test_matrix.py --- src/wrapper/export_distance_matrix.cpp | 126 +++++++++++++------------ test/README.md | 2 +- test/cpp/test_matrix.cpp | 10 ++ test/test_matrix.py | 6 +- 4 files changed, 80 insertions(+), 64 deletions(-) diff --git a/src/wrapper/export_distance_matrix.cpp b/src/wrapper/export_distance_matrix.cpp index 7f7ef8ef..bc810231 100644 --- a/src/wrapper/export_distance_matrix.cpp +++ b/src/wrapper/export_distance_matrix.cpp @@ -100,88 +100,94 @@ class DistanceMatrixWrap ostringstream os; int nb_cluster = len(clusters); - int* cluster_nb_pattern; - int** cluster_pattern; + int* cluster_nb_pattern = NULL; + int** cluster_pattern = NULL; boost::python::list l; cluster_nb_pattern = new int[nb_cluster]; cluster_pattern = new int*[nb_cluster]; - // Build dynamic 2D array - try - { - for (int i = 0; i < nb_cluster; i++) - { -#ifdef DEBUG - cout << "Processing cluster " << i << endl; -#endif - extract x(clusters[i]); - - if (x.check()) { - l = x; - int nb_item = len(l); - - cluster_nb_pattern[i] = nb_item; - - cluster_pattern[i] = new int[nb_item]; - - for (int j = 0; j < cluster_nb_pattern[i]; j++) - { - cluster_pattern[i][j] = extract ((l)[j]); - } - } else { - status = false; + if (nb_cluster > 1) { + // Build dynamic 2D array + try + { + for (int i = 0; i < nb_cluster; i++) + { + #ifdef DEBUG + cout << "Processing cluster " << i << endl; + #endif + extract x(clusters[i]); + + if (x.check()) { + l = x; + int nb_item = len(l); + + cluster_nb_pattern[i] = nb_item; + + cluster_pattern[i] = new int[nb_item]; + + for (int j = 0; j < cluster_nb_pattern[i]; j++) + { + cluster_pattern[i][j] = extract ((l)[j]); + } + } else { + status = false; + } } } - } - catch (...) - { + catch (...) + { + // Free memory + for (int i = 0; i < nb_cluster; i++) + delete[] cluster_pattern[i]; + + delete[] cluster_nb_pattern; + delete[] cluster_pattern; + } + + if (status) { + ret = dm.partitioning(error, &os, nb_cluster, cluster_nb_pattern, + cluster_pattern); + // Free memory for (int i = 0; i < nb_cluster; i++) delete[] cluster_pattern[i]; delete[] cluster_nb_pattern; delete[] cluster_pattern; - } - - if (status) { - ret = dm.partitioning(error, &os, nb_cluster, cluster_nb_pattern, - cluster_pattern); + }} else { + ret = dm.partitioning(error, &os, nb_cluster, cluster_nb_pattern, + cluster_pattern); + delete[] cluster_nb_pattern; + delete[] cluster_pattern; + } - // Free memory - for (int i = 0; i < nb_cluster; i++) - delete[] cluster_pattern[i]; + if (!ret) + stat_tool::wrap_util::throw_error(error); - delete[] cluster_nb_pattern; - delete[] cluster_pattern; + return ret; } - if (!ret) - stat_tool::wrap_util::throw_error(error); + static std::string + hierarchical_clustering(const DistanceMatrix& dm, int ialgorithm, + int icriterion, const std::string path, int iformat) + { + StatError error; + ostringstream os; + hierarchical_strategy algorithm = hierarchical_strategy(ialgorithm); + linkage criterion = linkage(icriterion); + output_format format = output_format(iformat); - return ret; - } + bool ret; - static std::string - hierarchical_clustering(const DistanceMatrix& dm, int ialgorithm, - int icriterion, const std::string path, int iformat) - { - StatError error; - ostringstream os; - hierarchical_strategy algorithm = hierarchical_strategy(ialgorithm); - linkage criterion = linkage(icriterion); - output_format format = output_format(iformat); + ret = dm.hierarchical_clustering(error, &os, algorithm, criterion, + path, format); - bool ret; - - ret = dm.hierarchical_clustering(error, &os, algorithm, criterion, - path, format); - - if (!ret) - stat_tool::wrap_util::throw_error(error); + if (!ret) + stat_tool::wrap_util::throw_error(error); - return string(os.str()); + return string(os.str()); } diff --git a/test/README.md b/test/README.md index 740c0a52..067521b0 100644 --- a/test/README.md +++ b/test/README.md @@ -18,7 +18,7 @@ test_distribution.py DONE test_error.py DONE test_estimate.py DONE test_histogram.py DONE -test_matrix.py TOBECLEANED/FINALISED +test_matrix.py DONE test_mixture_functional.py DONE test_mixture.py DONE test_multivariate_mixture_functional.py DONE diff --git a/test/cpp/test_matrix.cpp b/test/cpp/test_matrix.cpp index 4035fda0..1d9b9611 100644 --- a/test/cpp/test_matrix.cpp +++ b/test/cpp/test_matrix.cpp @@ -57,6 +57,16 @@ int main(void) { delete [] cluster_pattern[1]; if (error.get_nb_error() > 0) cout << error; + delete [] cluster_pattern; + delete [] cluster_nb_pattern; + cluster_pattern = new int*[1]; + cluster_pattern[0] = new int[1]; + cluster_pattern[0][0] = 0; + cluster_nb_pattern = new int[1]; + cluster_nb_pattern[0] = 0; + clust = matrix10->partitioning(error, &cout, 1, cluster_nb_pattern, + cluster_pattern); + delete [] cluster_pattern[0]; } else { cout << error; } diff --git a/test/test_matrix.py b/test/test_matrix.py index 8a08ec7b..20a5d9e3 100644 --- a/test/test_matrix.py +++ b/test/test_matrix.py @@ -97,8 +97,7 @@ def test_select_individual(myi): def test_wrong_partitioning_clusters(myi): try: - # TODO: uncomment - #myi.data.partitioning_clusters([0]) + myi.data.partitioning_clusters([0]) myi.data.partitioning_clusters([]) myi.data.partitioning_clusters([[1,2], [3,4]]) assert False @@ -164,4 +163,5 @@ def data(): test_get_column_identifier(data()) test_select_individual(myi) - test_partitioning_clusters(myi) \ No newline at end of file + test_partitioning_clusters(myi) + test_wrong_partitioning_clusters(myi) \ No newline at end of file From d365b5d7be33f97d1fb0316720cb9235bce3e414 Mon Sep 17 00:00:00 2001 From: "jean-baptiste.durand1" Date: Mon, 12 Jan 2026 14:48:32 +0100 Subject: [PATCH 5/6] Update README --- doc/README | 13 +++---------- 1 file changed, 3 insertions(+), 10 deletions(-) diff --git a/doc/README b/doc/README index 2770a2b7..fc3fb5fd 100644 --- a/doc/README +++ b/doc/README @@ -1,13 +1,6 @@ -To generate sphinx documentation: +To generate Doxygen documentation (C++) - 1. go to root directory of this package - 2. type "python setup.py build_sphinx" - 3. type "python setup.py build_sphinx -b latex" - !! right now (April 2009), there is an issue in pshinx related to utf-8. Because we are using unicode, we need - 2.type "python setup.py build_sphinx" - to fix manually the latex documents, switching from ut88 to utf8x in the preamble section. + type "doxgen Doxyfile" for stat_tool/doc/ + The documentation is generated in stat_tool/doc/html -to generate the epydoc documentation: - - epydoc --html -o stat_tool ../src/openalea/stat_tool/ From 4658d40f42240bdea2e7284732ccb3f7bbdfea3a Mon Sep 17 00:00:00 2001 From: "jean-baptiste.durand1" Date: Mon, 9 Feb 2026 09:47:52 +0100 Subject: [PATCH 6/6] Fixing doc --- doc/conf.py | 2 +- test/test_histogram.py | 19 +++++++++++++++++-- 2 files changed, 18 insertions(+), 3 deletions(-) diff --git a/doc/conf.py b/doc/conf.py index d6a22930..67f5d0d6 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -154,4 +154,4 @@ ), ] # Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = {"python": ("https://docs.python.org/", None)} \ No newline at end of file +intersphinx_mapping = {"python": ("https://docs.python.org/", None)} diff --git a/test/test_histogram.py b/test/test_histogram.py index a8c9bc55..bf64e93f 100644 --- a/test/test_histogram.py +++ b/test/test_histogram.py @@ -1,8 +1,12 @@ """histogram tests""" -from .tools import interface -from .tools import robust_path as get_shared_data +try: + from .tools import interface + from .tools import robust_path as get_shared_data +except ImportError: + from tools import interface + from tools import robust_path as get_shared_data import openalea.stat_tool as stat_tool from openalea.stat_tool.distribution import ( @@ -126,3 +130,14 @@ def test_extract_vec(): h = vs.extract(4) assert h print(h) + +if __name__ == "__main__": + + def data(): + v = Histogram([0, 1, 2, 3]) + return v + + myi = interface(data, "data/peup1.his", Histogram) + myi.data = data() + + test_save(myi)