Perform the training step of the localised multiple kernel k-means.
lmkkmeans(Km, parameters, verbose = FALSE)
Km | An array of size N x N x M containing M different N x N kernel matrices. |
---|---|
parameters | A list of parameters containing the desired number of
clusters, |
verbose | Boolean flag. If TRUE, at each iteration the iteration number is printed. Default is FALSE. |
This function returns a list containing:
the cluster labels for each element (i.e. row/column) of the kernel matrix.
the value of the objective function for the given clustering.
same parameters as in the input.
N x M matrix of weights, each row corresponds to an observation and each column to one of the kernels.
Gonen, M. and Margolin, A.A., 2014. Localized data fusion for kernel k-means clustering with application to cancer biology. In Advances in Neural Information Processing Systems (pp. 1305-1313).
if(requireNamespace("Rmosek", quietly = TRUE) && (!is.null(utils::packageDescription("Rmosek")$Configured.MSK_VERSION))){ # Initialise 100 x 100 x 3 array containing M kernel matrices # representing three different types of similarities between 100 data points km <- array(NA, c(100, 100, 3)) # Load kernel matrices km[,,1] <- as.matrix(read.csv(system.file('extdata', 'kernel_matrix1.csv', package = 'klic'), row.names = 1)) km[,,2] <- as.matrix(read.csv(system.file('extdata', 'kernel_matrix2.csv', package = 'klic'), row.names = 1)) km[,,3] <- as.matrix(read.csv(system.file('extdata', 'kernel_matrix3.csv', package = 'klic'), row.names = 1)) # Initalize the parameters of the algorithm parameters <- list() # Set the number of clusters parameters$cluster_count <- 4 # Set the number of iterations parameters$iteration_count <- 10 # Perform training state <- lmkkmeans(km, parameters) # Display the clustering print(state$clustering) # Display the kernel weights print(state$Theta) }#> [1] 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 3 3 3 3 3 3 3 3 3 3 3 3 #> [38] 3 3 3 3 3 3 3 3 3 3 3 3 3 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 #> [75] 2 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 #> [,1] [,2] [,3] #> [1,] 0.4067231 0.1865538 0.4067231 #> [2,] 0.4066607 0.1866786 0.4066607 #> [3,] 0.4069570 0.1860859 0.4069570 #> [4,] 0.4066010 0.1867981 0.4066010 #> [5,] 0.4064984 0.1870031 0.4064984 #> [6,] 0.4067100 0.1865799 0.4067100 #> [7,] 0.4064894 0.1870212 0.4064894 #> [8,] 0.4070254 0.1859492 0.4070254 #> [9,] 0.4069733 0.1860534 0.4069733 #> [10,] 0.4066289 0.1867422 0.4066289 #> [11,] 0.4065916 0.1868167 0.4065916 #> [12,] 0.4066994 0.1866011 0.4066994 #> [13,] 0.4064646 0.1870708 0.4064646 #> [14,] 0.4065650 0.1868699 0.4065650 #> [15,] 0.4151265 0.1697470 0.4151265 #> [16,] 0.4066227 0.1867546 0.4066227 #> [17,] 0.4077240 0.1845521 0.4077240 #> [18,] 0.4064740 0.1870520 0.4064740 #> [19,] 0.4062386 0.1875227 0.4062386 #> [20,] 0.4063804 0.1872392 0.4063804 #> [21,] 0.4438467 0.1123067 0.4438467 #> [22,] 0.4064599 0.1870802 0.4064599 #> [23,] 0.4439304 0.1121391 0.4439304 #> [24,] 0.4064995 0.1870010 0.4064995 #> [25,] 0.4062994 0.1874012 0.4062994 #> [26,] 0.3360418 0.2822675 0.3816907 #> [27,] 0.3361253 0.2821948 0.3816799 #> [28,] 0.3347648 0.2852395 0.3799957 #> [29,] 0.3351757 0.2839744 0.3808499 #> [30,] 0.3356347 0.2830132 0.3813522 #> [31,] 0.3357699 0.2829865 0.3812436 #> [32,] 0.3348515 0.2850747 0.3800737 #> [33,] 0.3344878 0.2851084 0.3804038 #> [34,] 0.3364394 0.2820086 0.3815520 #> [35,] 0.3346265 0.2852486 0.3801249 #> [36,] 0.3341579 0.2857684 0.3800737 #> [37,] 0.3349121 0.2847130 0.3803749 #> [38,] 0.3350635 0.2848333 0.3801032 #> [39,] 0.3346737 0.2849924 0.3803339 #> [40,] 0.3350983 0.2839609 0.3809407 #> [41,] 0.3352274 0.2837169 0.3810557 #> [42,] 0.3363521 0.2823053 0.3813426 #> [43,] 0.3360156 0.2827120 0.3812724 #> [44,] 0.3442357 0.2669651 0.3887992 #> [45,] 0.3346899 0.2849962 0.3803139 #> [46,] 0.3407259 0.2739004 0.3853737 #> [47,] 0.2993035 0.3064901 0.3942065 #> [48,] 0.3351698 0.2841973 0.3806329 #> [49,] 0.3353368 0.2836867 0.3809765 #> [50,] 0.3370279 0.2809356 0.3820365 #> [51,] 0.4075554 0.2502450 0.3421995 #> [52,] 0.4071766 0.2506390 0.3421844 #> [53,] 0.4065787 0.2513742 0.3420471 #> [54,] 0.4091936 0.2493631 0.3414432 #> [55,] 0.4066104 0.2511870 0.3422026 #> [56,] 0.4606635 0.1414586 0.3978779 #> [57,] 0.4080861 0.2502306 0.3416833 #> [58,] 0.4070599 0.2505439 0.3423962 #> [59,] 0.4065588 0.2509469 0.3424944 #> [60,] 0.4105807 0.2454141 0.3440051 #> [61,] 0.3847215 0.2643186 0.3509598 #> [62,] 0.4073852 0.2520219 0.3405929 #> [63,] 0.4064089 0.2508231 0.3427680 #> [64,] 0.4054392 0.2515236 0.3430372 #> [65,] 0.4344892 0.3048099 0.2607009 #> [66,] 0.4024778 0.2532456 0.3442765 #> [67,] 0.4039770 0.2526497 0.3433733 #> [68,] 0.4308438 0.2020378 0.3671184 #> [69,] 0.4072551 0.2512292 0.3415157 #> [70,] 0.4022436 0.2540227 0.3437337 #> [71,] 0.4036768 0.2529461 0.3433770 #> [72,] 0.4074224 0.2506441 0.3419336 #> [73,] 0.4083260 0.2500857 0.3415883 #> [74,] 0.4072502 0.2506873 0.3420626 #> [75,] 0.4074240 0.2504844 0.3420916 #> [76,] 0.2736907 0.3432566 0.3830527 #> [77,] 0.2737974 0.3439024 0.3823002 #> [78,] 0.2738635 0.3436289 0.3825076 #> [79,] 0.2768486 0.3457228 0.3774286 #> [80,] 0.2790911 0.3348892 0.3860196 #> [81,] 0.2705981 0.3453132 0.3840887 #> [82,] 0.2775048 0.3373767 0.3851184 #> [83,] 0.2738703 0.3433672 0.3827625 #> [84,] 0.2136012 0.3743839 0.4120150 #> [85,] 0.2737441 0.3438442 0.3824117 #> [86,] 0.2735028 0.3440128 0.3824844 #> [87,] 0.2740528 0.3432004 0.3827468 #> [88,] 0.2736751 0.3436704 0.3826545 #> [89,] 0.2735542 0.3439678 0.3824780 #> [90,] 0.2733360 0.3438705 0.3827935 #> [91,] 0.2750241 0.3453053 0.3796706 #> [92,] 0.2733562 0.3435674 0.3830764 #> [93,] 0.2766457 0.3385849 0.3847694 #> [94,] 0.2741449 0.3430776 0.3827775 #> [95,] 0.2741828 0.3433427 0.3824745 #> [96,] 0.2735582 0.3438757 0.3825661 #> [97,] 0.2736026 0.3440654 0.3823321 #> [98,] 0.2738659 0.3437513 0.3823828 #> [99,] 0.2736720 0.3440105 0.3823175 #> [100,] 0.2737666 0.3436916 0.3825419