R : Copyright 2005, The R Foundation for Statistical Computing Version 2.1.1 (2005-06-20), ISBN 3-900051-07-0 R is free software and comes with ABSOLUTELY NO WARRANTY. You are welcome to redistribute it under certain conditions. Type 'license()' or 'licence()' for distribution details. R is a collaborative project with many contributors. Type 'contributors()' for more information and 'citation()' on how to cite R or R packages in publications. Type 'demo()' for some demos, 'help()' for on-line help, or 'help.start()' for a HTML browser interface to help. Type 'q()' to quit R. > ### *
> ### > attach(NULL, name = "CheckExEnv") > assign(".CheckExEnv", as.environment(2), pos = length(search())) # base > ## add some hooks to label plot pages for base and grid graphics > setHook("plot.new", ".newplot.hook") > setHook("persp", ".newplot.hook") > setHook("grid.newpage", ".gridplot.hook") > > assign("cleanEx", + function(env = .GlobalEnv) { + rm(list = ls(envir = env, all.names = TRUE), envir = env) + RNGkind("default", "default") + set.seed(1) + options(warn = 1) + delayedAssign("T", stop("T used instead of TRUE"), + assign.env = .CheckExEnv) + delayedAssign("F", stop("F used instead of FALSE"), + assign.env = .CheckExEnv) + sch <- search() + newitems <- sch[! sch %in% .oldSearch] + for(item in rev(newitems)) + eval(substitute(detach(item), list(item=item))) + missitems <- .oldSearch[! .oldSearch %in% sch] + if(length(missitems)) + warning("items ", paste(missitems, collapse=", "), + " have been removed from the search path") + }, + env = .CheckExEnv) > assign("..nameEx", "__{must remake R-ex/*.R}__", env = .CheckExEnv) # for now > assign("ptime", proc.time(), env = .CheckExEnv) > grDevices::postscript("kernlab-Examples.ps") > assign("par.postscript", graphics::par(no.readonly = TRUE), env = .CheckExEnv) > options(contrasts = c(unordered = "contr.treatment", ordered = "contr.poly")) > options(warn = 1) > library('kernlab') > > assign(".oldSearch", search(), env = .CheckExEnv) > assign(".oldNS", loadedNamespaces(), env = .CheckExEnv) > cleanEx(); ..nameEx <- "chol.reduce" > > ### * chol.reduce > > flush(stderr()); flush(stdout()) > > ### Name: chol.reduce > ### Title: Incomplete Cholesky decomposition > ### Aliases: chol.reduce > ### Keywords: algebra array > > ### ** Examples > > > data(iris) > datamatrix <- as.matrix(iris[,-5]) > # initialize kernel function > rbf <- rbfdot(sigma=0.1) > rbf Gaussian Radial Basis kernel function. Hyperparameter : sigma = 0.1 > Z <- chol.reduce(datamatrix,kernel=rbf) > dim(Z) [1] 150 32 > pivots(Z) [1] 1 119 107 51 42 101 132 63 135 16 99 142 23 86 25 61 115 130 44 [20] 136 15 69 118 14 110 68 109 19 33 138 123 65 > # calculate kernel matrix > K <- crossprod(t(Z)) > # difference between approximated and real kernel matrix > (K - kernelMatrix(kernel=rbf, datamatrix))[6,] 1 2 3 4 5 0.000000e+00 1.132551e-04 7.731866e-05 1.107470e-04 -1.349728e-05 6 7 8 9 10 -1.464322e-04 9.205722e-05 3.489657e-05 1.008088e-04 1.103489e-04 11 12 13 14 15 -2.086424e-05 4.624494e-05 1.031000e-04 1.110223e-16 0.000000e+00 16 17 18 19 20 0.000000e+00 -1.377033e-04 -1.305057e-05 -1.110223e-16 -8.225680e-05 21 22 23 24 25 6.432200e-05 -7.755956e-05 0.000000e+00 3.236574e-05 -1.110223e-16 26 27 28 29 30 1.343701e-04 1.658122e-05 7.585654e-06 2.339176e-05 9.269078e-05 31 32 33 34 35 1.073047e-04 8.727940e-06 1.110223e-16 -7.961980e-07 1.057407e-04 36 37 38 39 40 3.371944e-05 -2.529371e-06 1.667609e-05 9.637892e-05 3.255567e-05 41 42 43 44 45 -8.810357e-06 0.000000e+00 1.072972e-04 -1.110223e-16 -1.875169e-04 46 47 48 49 50 1.272504e-04 -5.777988e-05 9.768266e-05 -3.209427e-05 5.162077e-05 51 52 53 54 55 0.000000e+00 2.786629e-05 -1.032880e-05 5.369123e-05 1.624213e-05 56 57 58 59 60 2.401640e-05 1.844378e-05 -3.473826e-05 -8.340971e-06 1.084145e-05 61 62 63 64 65 2.220446e-16 4.589269e-05 0.000000e+00 1.843033e-05 -1.110223e-16 66 67 68 69 70 2.685491e-05 1.416371e-05 -1.110223e-16 1.110223e-16 2.731337e-05 71 72 73 74 75 3.504121e-05 3.901270e-05 8.651640e-06 2.572767e-06 1.898081e-05 76 77 78 79 80 2.385343e-05 -2.908636e-05 5.248515e-06 4.283661e-05 7.532601e-06 81 82 83 84 85 3.017414e-05 2.105736e-05 2.259105e-05 2.476659e-05 -4.767113e-06 86 87 88 89 90 -5.551115e-17 9.996712e-06 -1.684499e-05 -2.305674e-05 4.679498e-05 91 92 93 94 95 4.559855e-05 1.494860e-05 3.097577e-05 -1.954734e-05 3.421175e-05 96 97 98 99 100 -3.223092e-05 7.049476e-06 2.017240e-05 0.000000e+00 2.282642e-05 101 102 103 104 105 2.775558e-17 2.193552e-05 9.234305e-06 -2.097010e-06 2.479034e-06 106 107 108 109 110 1.733944e-05 -5.551115e-17 2.510905e-05 -4.163336e-17 -1.387779e-17 111 112 113 114 115 2.685406e-05 7.287607e-06 -8.679472e-07 1.772048e-05 -2.775558e-17 116 117 118 119 120 1.888308e-05 1.865280e-06 6.938894e-18 0.000000e+00 3.438574e-05 121 122 123 124 125 -1.688013e-06 3.180803e-05 2.081668e-17 3.259202e-05 3.210098e-06 126 127 128 129 130 1.522836e-05 5.019023e-05 4.262349e-05 -1.147286e-06 -2.775558e-17 131 132 133 134 135 1.558661e-05 6.938894e-18 -3.560511e-06 6.833184e-06 -5.551115e-17 136 137 138 139 140 -4.163336e-17 9.116818e-07 -1.110223e-16 5.204840e-05 -4.818249e-06 141 142 143 144 145 -1.411330e-06 -8.326673e-17 2.193552e-05 9.834213e-06 -1.605750e-06 146 147 148 149 150 7.765406e-06 1.550517e-05 1.829626e-05 9.628428e-06 1.833282e-05 > > > > > cleanEx(); ..nameEx <- "couple" > > ### * couple > > flush(stderr()); flush(stdout()) > > ### Name: couple > ### Title: Probabilities Coupling function > ### Aliases: couple > ### Keywords: classif > > ### ** Examples > > ## create artificial pairwise probabilities > pairs <- matrix(c(0.82,0.12,0.76,0.1,0.9,0.05),2) > > couple(pairs) [,1] [,2] [,3] [1,] 0.69846365 0.18721937 0.1143170 [2,] 0.05628216 0.05613146 0.8875864 > > couple(pairs, coupler="pkpd") [,1] [,2] [,3] [1,] 0.72069954 0.19526315 0.0840373 [2,] 0.05968271 0.05137474 0.8889425 > > couple(pairs, coupler ="vote") [,1] [,2] [,3] [1,] 0.6666667 0.3333333 0.0000000 [2,] 0.0000000 0.3333333 0.6666667 > > > > cleanEx(); ..nameEx <- "dots" > > ### * dots > > flush(stderr()); flush(stdout()) > > ### Name: dots > ### Title: Kernel Functions > ### Aliases: dots rbfdot polydot tanhdot vanilladot laplacedot besseldot > ### anovadot splinedot kpar show,kernel-method > ### Keywords: symbolmath > > ### ** Examples > > rbfkernel <- rbfdot(sigma = 0.1) > rbfkernel Gaussian Radial Basis kernel function. Hyperparameter : sigma = 0.1 > > kpar(rbfkernel) $sigma [1] 0.1 > > ## create two vectors > x <- rnorm(10) > y <- rnorm(10) > > ## calculate dot product > rbfkernel(x,y) [,1] [1,] 0.1156084 > > > > > cleanEx(); ..nameEx <- "gausspr-class" > > ### * gausspr-class > > flush(stderr()); flush(stdout()) > > ### Name: gausspr-class > ### Title: Class "gausspr" > ### Aliases: gausspr-class alpha,gausspr-method cross,gausspr-method > ### error,gausspr-method fit,gausspr-method kcall,gausspr-method > ### kernelf,gausspr-method kpar,gausspr-method lev,gausspr-method > ### type,gausspr-method alphaindex,gausspr-method xmatrix,gausspr-method > ### ymatrix,gausspr-method > ### Keywords: classes > > ### ** Examples > > > # train model > data(iris) > test <- gausspr(Species~.,data=iris,var=2) > test Gaussian Processes object of class "gausspr" Problem type: classification Gaussian Radial Basis kernel function. Hyperparameter : sigma = 0.1 Number of training instances learned : 3 Train error : 0.04 Cross validation error : -1 > alpha(test) [[1]] [,1] [1,] 0.07667047 [2,] 0.08784235 [3,] 0.07416350 [4,] 0.08889861 [5,] 0.07362196 [6,] 0.10842767 [7,] 0.07687030 [8,] 0.08344944 [9,] 0.09047101 [10,] 0.08860067 [11,] 0.08701721 [12,] 0.08827689 [13,] 0.08445266 [14,] 0.07255311 [15,] 0.08249484 [16,] 0.10370899 [17,] 0.07864725 [18,] 0.07914253 [19,] 0.11930178 [20,] 0.08128314 [21,] 0.11216904 [22,] 0.08544103 [23,] 0.05834912 [24,] 0.11945163 [25,] 0.11849868 [26,] 0.10596706 [27,] 0.09799162 [28,] 0.08510793 [29,] 0.08066751 [30,] 0.09347683 [31,] 0.09816340 [32,] 0.10024647 [33,] 0.07813429 [34,] 0.08254617 [35,] 0.09119956 [36,] 0.07172589 [37,] 0.08080848 [38,] 0.07073635 [39,] 0.08106818 [40,] 0.08523862 [41,] 0.07221242 [42,] 0.12055973 [43,] 0.07518071 [44,] 0.10422683 [45,] 0.12311298 [46,] 0.08960078 [47,] 0.08554478 [48,] 0.07933605 [49,] 0.08416324 [50,] 0.07945883 [51,] -0.07455956 [52,] -0.06674164 [53,] -0.06736089 [54,] -0.08583047 [55,] -0.05812428 [56,] -0.06326711 [57,] -0.06403062 [58,] -0.25083917 [59,] -0.06255433 [60,] -0.10960631 [61,] -0.18113822 [62,] -0.07382902 [63,] -0.08542181 [64,] -0.05637889 [65,] -0.14236637 [66,] -0.07190694 [67,] -0.06609041 [68,] -0.08712277 [69,] -0.05620056 [70,] -0.10036709 [71,] -0.06050392 [72,] -0.08301842 [73,] -0.05224791 [74,] -0.05748447 [75,] -0.06900505 [76,] -0.06806451 [77,] -0.06159557 [78,] -0.06123192 [79,] -0.05968775 [80,] -0.15981070 [81,] -0.11203369 [82,] -0.12963911 [83,] -0.09588380 [84,] -0.05239423 [85,] -0.07046754 [86,] -0.07195580 [87,] -0.06437561 [88,] -0.06020224 [89,] -0.08994491 [90,] -0.08798283 [91,] -0.06949896 [92,] -0.05980965 [93,] -0.08546307 [94,] -0.23570748 [95,] -0.07582134 [96,] -0.08354178 [97,] -0.07807164 [98,] -0.06837111 [99,] -0.31918817 [100,] -0.08179081 [[2]] [,1] [1,] 0.05969335 [2,] 0.06302378 [3,] 0.05942366 [4,] 0.06414324 [5,] 0.05914110 [6,] 0.07590426 [7,] 0.06072566 [8,] 0.06110823 [9,] 0.06790551 [10,] 0.06282437 [11,] 0.06584888 [12,] 0.06290236 [13,] 0.06248892 [14,] 0.06526171 [15,] 0.07396004 [16,] 0.08756046 [17,] 0.06599330 [18,] 0.06051680 [19,] 0.08156982 [20,] 0.06340015 [21,] 0.07184397 [22,] 0.06401372 [23,] 0.05799323 [24,] 0.07258665 [25,] 0.07300133 [26,] 0.06788224 [27,] 0.06579543 [28,] 0.06251844 [29,] 0.06090599 [30,] 0.06461673 [31,] 0.06564349 [32,] 0.06837942 [33,] 0.06683763 [34,] 0.07237093 [35,] 0.06339556 [36,] 0.05844336 [37,] 0.06390984 [38,] 0.05843326 [39,] 0.06488457 [40,] 0.06179851 [41,] 0.05852217 [42,] 0.08465607 [43,] 0.06237781 [44,] 0.06935259 [45,] 0.07843419 [46,] 0.06382051 [47,] 0.06479361 [48,] 0.06129239 [49,] 0.06417295 [50,] 0.05986631 [51,] -0.05787707 [52,] -0.08023528 [53,] -0.05438893 [54,] -0.05737028 [55,] -0.05223876 [56,] -0.07479636 [57,] -0.20173923 [58,] -0.06239754 [59,] -0.05600946 [60,] -0.06556775 [61,] -0.06691205 [62,] -0.06102489 [63,] -0.05442762 [64,] -0.08830984 [65,] -0.07626791 [66,] -0.06038516 [67,] -0.05730779 [68,] -0.09370606 [69,] -0.09241440 [70,] -0.09579035 [71,] -0.05424946 [72,] -0.09711202 [73,] -0.08178584 [74,] -0.07949522 [75,] -0.05445806 [76,] -0.05877176 [77,] -0.08751737 [78,] -0.08466160 [79,] -0.05409952 [80,] -0.06045264 [81,] -0.06142961 [82,] -0.09307626 [83,] -0.05395755 [84,] -0.07748428 [85,] -0.06975358 [86,] -0.06902778 [87,] -0.05930344 [88,] -0.05863757 [89,] -0.09428285 [90,] -0.05690767 [91,] -0.05408011 [92,] -0.06400287 [93,] -0.08023528 [94,] -0.05340232 [95,] -0.05591011 [96,] -0.06003297 [97,] -0.07400386 [98,] -0.06191444 [99,] -0.06337279 [100,] -0.07970018 [[3]] [,1] [1,] 0.39673641 [2,] 0.31077661 [3,] 0.49218106 [4,] 0.15070047 [5,] 0.35070642 [6,] 0.24337135 [7,] 0.40224193 [8,] 0.09068425 [9,] 0.30975624 [10,] 0.14538400 [11,] 0.10544375 [12,] 0.20737492 [13,] 0.12675159 [14,] 0.34008117 [15,] 0.10059662 [16,] 0.27513456 [17,] 0.28018604 [18,] 0.12799572 [19,] 0.30983970 [20,] 0.11544304 [21,] 0.47365611 [22,] 0.14641966 [23,] 0.45658591 [24,] 0.29527371 [25,] 0.21333217 [26,] 0.26854531 [27,] 0.42006447 [28,] 0.57442719 [29,] 0.29137198 [30,] 0.07920106 [31,] 0.10867668 [32,] 0.09505004 [33,] 0.12064971 [34,] 0.54665394 [35,] 0.27759004 [36,] 0.31690641 [37,] 0.40049608 [38,] 0.24245311 [39,] 0.15700483 [40,] 0.14622245 [41,] 0.20322816 [42,] 0.30539568 [43,] 0.13400438 [44,] 0.08918193 [45,] 0.17535293 [46,] 0.16196931 [47,] 0.17453602 [48,] 0.20544006 [49,] 0.08010621 [50,] 0.15733108 [51,] -0.09412141 [52,] -0.38541326 [53,] -0.12038151 [54,] -0.22339896 [55,] -0.12777533 [56,] -0.08463328 [57,] -0.67663546 [58,] -0.11006496 [59,] -0.17281042 [60,] -0.09023690 [61,] -0.32402549 [62,] -0.28309283 [63,] -0.18371242 [64,] -0.40331510 [65,] -0.27503237 [66,] -0.21190701 [67,] -0.24138502 [68,] -0.09454181 [69,] -0.07893872 [70,] -0.51825990 [71,] -0.12917425 [72,] -0.44730621 [73,] -0.08970570 [74,] -0.46080578 [75,] -0.15222359 [76,] -0.13912268 [77,] -0.50779279 [78,] -0.47367241 [79,] -0.17293400 [80,] -0.19385234 [81,] -0.12038305 [82,] -0.12105137 [83,] -0.16039776 [84,] -0.46337129 [85,] -0.31830402 [86,] -0.10141087 [87,] -0.14780379 [88,] -0.24618406 [89,] -0.52007664 [90,] -0.20455937 [91,] -0.13488052 [92,] -0.24871030 [93,] -0.38541326 [94,] -0.10678690 [95,] -0.11677011 [96,] -0.22585932 [97,] -0.39446816 [98,] -0.28893981 [99,] -0.20030130 [100,] -0.40638008 > error(test) [1] 0.04 > lev(test) [1] "setosa" "versicolor" "virginica" > > > > cleanEx(); ..nameEx <- "gausspr" > > ### * gausspr > > flush(stderr()); flush(stdout()) > > ### Name: gausspr > ### Title: Gaussian processes for regression and classification > ### Aliases: gausspr gausspr,formula-method gausspr,vector-method > ### gausspr,matrix-method show,gausspr-method predict,gausspr-method > ### Keywords: classif regression nonlinear > > ### ** Examples > > # train model > data(iris) > test <- gausspr(Species~.,data=iris,var=2) > test Gaussian Processes object of class "gausspr" Problem type: classification Gaussian Radial Basis kernel function. Hyperparameter : sigma = 0.1 Number of training instances learned : 3 Train error : 0.04 Cross validation error : -1 > alpha(test) [[1]] [,1] [1,] 0.07667047 [2,] 0.08784235 [3,] 0.07416350 [4,] 0.08889861 [5,] 0.07362196 [6,] 0.10842767 [7,] 0.07687030 [8,] 0.08344944 [9,] 0.09047101 [10,] 0.08860067 [11,] 0.08701721 [12,] 0.08827689 [13,] 0.08445266 [14,] 0.07255311 [15,] 0.08249484 [16,] 0.10370899 [17,] 0.07864725 [18,] 0.07914253 [19,] 0.11930178 [20,] 0.08128314 [21,] 0.11216904 [22,] 0.08544103 [23,] 0.05834912 [24,] 0.11945163 [25,] 0.11849868 [26,] 0.10596706 [27,] 0.09799162 [28,] 0.08510793 [29,] 0.08066751 [30,] 0.09347683 [31,] 0.09816340 [32,] 0.10024647 [33,] 0.07813429 [34,] 0.08254617 [35,] 0.09119956 [36,] 0.07172589 [37,] 0.08080848 [38,] 0.07073635 [39,] 0.08106818 [40,] 0.08523862 [41,] 0.07221242 [42,] 0.12055973 [43,] 0.07518071 [44,] 0.10422683 [45,] 0.12311298 [46,] 0.08960078 [47,] 0.08554478 [48,] 0.07933605 [49,] 0.08416324 [50,] 0.07945883 [51,] -0.07455956 [52,] -0.06674164 [53,] -0.06736089 [54,] -0.08583047 [55,] -0.05812428 [56,] -0.06326711 [57,] -0.06403062 [58,] -0.25083917 [59,] -0.06255433 [60,] -0.10960631 [61,] -0.18113822 [62,] -0.07382902 [63,] -0.08542181 [64,] -0.05637889 [65,] -0.14236637 [66,] -0.07190694 [67,] -0.06609041 [68,] -0.08712277 [69,] -0.05620056 [70,] -0.10036709 [71,] -0.06050392 [72,] -0.08301842 [73,] -0.05224791 [74,] -0.05748447 [75,] -0.06900505 [76,] -0.06806451 [77,] -0.06159557 [78,] -0.06123192 [79,] -0.05968775 [80,] -0.15981070 [81,] -0.11203369 [82,] -0.12963911 [83,] -0.09588380 [84,] -0.05239423 [85,] -0.07046754 [86,] -0.07195580 [87,] -0.06437561 [88,] -0.06020224 [89,] -0.08994491 [90,] -0.08798283 [91,] -0.06949896 [92,] -0.05980965 [93,] -0.08546307 [94,] -0.23570748 [95,] -0.07582134 [96,] -0.08354178 [97,] -0.07807164 [98,] -0.06837111 [99,] -0.31918817 [100,] -0.08179081 [[2]] [,1] [1,] 0.05969335 [2,] 0.06302378 [3,] 0.05942366 [4,] 0.06414324 [5,] 0.05914110 [6,] 0.07590426 [7,] 0.06072566 [8,] 0.06110823 [9,] 0.06790551 [10,] 0.06282437 [11,] 0.06584888 [12,] 0.06290236 [13,] 0.06248892 [14,] 0.06526171 [15,] 0.07396004 [16,] 0.08756046 [17,] 0.06599330 [18,] 0.06051680 [19,] 0.08156982 [20,] 0.06340015 [21,] 0.07184397 [22,] 0.06401372 [23,] 0.05799323 [24,] 0.07258665 [25,] 0.07300133 [26,] 0.06788224 [27,] 0.06579543 [28,] 0.06251844 [29,] 0.06090599 [30,] 0.06461673 [31,] 0.06564349 [32,] 0.06837942 [33,] 0.06683763 [34,] 0.07237093 [35,] 0.06339556 [36,] 0.05844336 [37,] 0.06390984 [38,] 0.05843326 [39,] 0.06488457 [40,] 0.06179851 [41,] 0.05852217 [42,] 0.08465607 [43,] 0.06237781 [44,] 0.06935259 [45,] 0.07843419 [46,] 0.06382051 [47,] 0.06479361 [48,] 0.06129239 [49,] 0.06417295 [50,] 0.05986631 [51,] -0.05787707 [52,] -0.08023528 [53,] -0.05438893 [54,] -0.05737028 [55,] -0.05223876 [56,] -0.07479636 [57,] -0.20173923 [58,] -0.06239754 [59,] -0.05600946 [60,] -0.06556775 [61,] -0.06691205 [62,] -0.06102489 [63,] -0.05442762 [64,] -0.08830984 [65,] -0.07626791 [66,] -0.06038516 [67,] -0.05730779 [68,] -0.09370606 [69,] -0.09241440 [70,] -0.09579035 [71,] -0.05424946 [72,] -0.09711202 [73,] -0.08178584 [74,] -0.07949522 [75,] -0.05445806 [76,] -0.05877176 [77,] -0.08751737 [78,] -0.08466160 [79,] -0.05409952 [80,] -0.06045264 [81,] -0.06142961 [82,] -0.09307626 [83,] -0.05395755 [84,] -0.07748428 [85,] -0.06975358 [86,] -0.06902778 [87,] -0.05930344 [88,] -0.05863757 [89,] -0.09428285 [90,] -0.05690767 [91,] -0.05408011 [92,] -0.06400287 [93,] -0.08023528 [94,] -0.05340232 [95,] -0.05591011 [96,] -0.06003297 [97,] -0.07400386 [98,] -0.06191444 [99,] -0.06337279 [100,] -0.07970018 [[3]] [,1] [1,] 0.39673641 [2,] 0.31077661 [3,] 0.49218106 [4,] 0.15070047 [5,] 0.35070642 [6,] 0.24337135 [7,] 0.40224193 [8,] 0.09068425 [9,] 0.30975624 [10,] 0.14538400 [11,] 0.10544375 [12,] 0.20737492 [13,] 0.12675159 [14,] 0.34008117 [15,] 0.10059662 [16,] 0.27513456 [17,] 0.28018604 [18,] 0.12799572 [19,] 0.30983970 [20,] 0.11544304 [21,] 0.47365611 [22,] 0.14641966 [23,] 0.45658591 [24,] 0.29527371 [25,] 0.21333217 [26,] 0.26854531 [27,] 0.42006447 [28,] 0.57442719 [29,] 0.29137198 [30,] 0.07920106 [31,] 0.10867668 [32,] 0.09505004 [33,] 0.12064971 [34,] 0.54665394 [35,] 0.27759004 [36,] 0.31690641 [37,] 0.40049608 [38,] 0.24245311 [39,] 0.15700483 [40,] 0.14622245 [41,] 0.20322816 [42,] 0.30539568 [43,] 0.13400438 [44,] 0.08918193 [45,] 0.17535293 [46,] 0.16196931 [47,] 0.17453602 [48,] 0.20544006 [49,] 0.08010621 [50,] 0.15733108 [51,] -0.09412141 [52,] -0.38541326 [53,] -0.12038151 [54,] -0.22339896 [55,] -0.12777533 [56,] -0.08463328 [57,] -0.67663546 [58,] -0.11006496 [59,] -0.17281042 [60,] -0.09023690 [61,] -0.32402549 [62,] -0.28309283 [63,] -0.18371242 [64,] -0.40331510 [65,] -0.27503237 [66,] -0.21190701 [67,] -0.24138502 [68,] -0.09454181 [69,] -0.07893872 [70,] -0.51825990 [71,] -0.12917425 [72,] -0.44730621 [73,] -0.08970570 [74,] -0.46080578 [75,] -0.15222359 [76,] -0.13912268 [77,] -0.50779279 [78,] -0.47367241 [79,] -0.17293400 [80,] -0.19385234 [81,] -0.12038305 [82,] -0.12105137 [83,] -0.16039776 [84,] -0.46337129 [85,] -0.31830402 [86,] -0.10141087 [87,] -0.14780379 [88,] -0.24618406 [89,] -0.52007664 [90,] -0.20455937 [91,] -0.13488052 [92,] -0.24871030 [93,] -0.38541326 [94,] -0.10678690 [95,] -0.11677011 [96,] -0.22585932 [97,] -0.39446816 [98,] -0.28893981 [99,] -0.20030130 [100,] -0.40638008 > > # predict on the training set > predict(test,iris[,-5]) [1] setosa setosa setosa setosa setosa setosa [7] setosa setosa setosa setosa setosa setosa [13] setosa setosa setosa setosa setosa setosa [19] setosa setosa setosa setosa setosa setosa [25] setosa setosa setosa setosa setosa setosa [31] setosa setosa setosa setosa setosa setosa [37] setosa setosa setosa setosa setosa setosa [43] setosa setosa setosa setosa setosa setosa [49] setosa setosa versicolor versicolor versicolor versicolor [55] versicolor versicolor versicolor versicolor versicolor versicolor [61] versicolor versicolor versicolor versicolor versicolor versicolor [67] versicolor versicolor versicolor versicolor versicolor versicolor [73] versicolor versicolor versicolor versicolor versicolor virginica [79] versicolor versicolor versicolor versicolor versicolor virginica [85] versicolor versicolor versicolor versicolor versicolor versicolor [91] versicolor versicolor versicolor versicolor versicolor versicolor [97] versicolor versicolor versicolor versicolor virginica virginica [103] virginica virginica virginica virginica versicolor virginica [109] virginica virginica virginica virginica virginica virginica [115] virginica virginica virginica virginica virginica versicolor [121] virginica virginica virginica virginica virginica virginica [127] versicolor virginica virginica virginica virginica virginica [133] virginica virginica virginica virginica virginica virginica [139] versicolor virginica virginica virginica virginica virginica [145] virginica virginica virginica virginica virginica virginica Levels: setosa versicolor virginica > > # create regression data > x <- seq(-20,20,0.1) > y <- sin(x)/x + rnorm(401,sd=0.03) > > # regression with gaussian processes > foo <- gausspr(x, y) > foo Gaussian Processes object of class "gausspr" Problem type: regression Gaussian Radial Basis kernel function. Hyperparameter : sigma = 0.1 Number of training instances learned : 400 Train error : 0.001117212 Cross validation error : -1 > > # predict and plot > ytest <- predict(foo, x) > plot(x, y, type ="l") > lines(x, ytest, col="red") > > > > cleanEx(); ..nameEx <- "inc.chol-class" > > ### * inc.chol-class > > flush(stderr()); flush(stdout()) > > ### Name: inc.chol-class > ### Title: Class "inc.chol" > ### Aliases: inc.chol-class diag.residues maxresiduals pivots > ### diag.residues,inc.chol-method maxresiduals,inc.chol-method > ### pivots,inc.chol-method > ### Keywords: classes > > ### ** Examples > > data(iris) > datamatrix <- as.matrix(iris[,-5]) > # initialize kernel function > rbf <- rbfdot(sigma=0.1) > rbf Gaussian Radial Basis kernel function. Hyperparameter : sigma = 0.1 > Z <- chol.reduce(datamatrix,kernel=rbf) > dim(Z) [1] 150 32 > pivots(Z) [1] 1 119 107 51 42 101 132 63 135 16 99 142 23 86 25 61 115 130 44 [20] 136 15 69 118 14 110 68 109 19 33 138 123 65 > diag.residues(Z) [1] 0.000000e+00 2.464693e-04 9.181721e-05 2.315207e-04 2.181632e-05 [6] 1.464322e-04 2.577170e-04 1.900010e-05 3.805212e-04 2.539507e-04 [11] 2.258563e-05 7.107862e-05 2.630239e-04 9.107298e-17 -6.765422e-17 [16] -5.551115e-17 3.888188e-04 2.100552e-05 -5.984796e-17 8.565560e-05 [21] 2.212544e-04 7.188983e-05 -6.938894e-17 1.273665e-04 -1.124101e-15 [26] 3.889868e-04 4.486644e-05 1.099385e-05 5.157376e-05 1.635109e-04 [31] 1.839297e-04 4.212992e-04 -1.257675e-16 9.702818e-05 1.483088e-04 [36] 3.598096e-04 3.629907e-04 8.314437e-05 2.149752e-04 1.887246e-05 [41] 3.450198e-05 1.110223e-16 2.883095e-04 2.255141e-17 4.356473e-04 [46] 2.405312e-04 4.261144e-05 1.650961e-04 2.451689e-05 3.865473e-05 [51] 3.053113e-15 2.306675e-04 9.978788e-05 3.692552e-04 4.349976e-04 [56] 2.445961e-04 1.049560e-04 5.416078e-04 4.347378e-04 5.555297e-04 [61] -1.630640e-16 1.592131e-04 2.775558e-17 2.157307e-04 -1.747734e-16 [66] 2.596623e-04 4.756995e-04 -1.170938e-16 -2.003606e-16 1.295260e-04 [71] 3.762450e-04 3.313683e-04 3.976048e-04 2.297755e-04 4.116302e-04 [76] 3.596942e-04 6.424967e-04 2.358862e-04 2.563907e-04 3.342845e-04 [81] 1.919135e-04 2.011922e-04 9.084172e-05 3.029254e-04 9.824642e-04 [86] -1.595946e-16 1.716496e-04 2.197540e-04 1.992428e-04 2.446316e-04 [91] 4.112113e-04 1.624785e-04 1.019516e-04 1.469477e-04 2.140670e-04 [96] 1.605922e-04 1.217695e-04 2.642621e-04 -1.665335e-16 1.082804e-04 [101] -5.551115e-17 3.943905e-04 2.504815e-04 4.776051e-05 2.573776e-04 [106] 3.916493e-04 -1.110223e-16 3.082613e-04 5.577136e-16 2.420807e-15 [111] 2.179300e-04 1.823805e-04 1.307985e-04 5.685150e-04 1.734723e-17 [116] 2.953430e-04 3.294089e-05 -1.249001e-16 -6.799862e-29 7.987610e-04 [121] 1.872965e-04 3.841609e-04 3.144186e-17 2.635663e-04 1.223977e-04 [126] 3.628621e-04 3.242968e-04 3.114311e-04 2.438363e-04 -3.608225e-16 [131] 1.155399e-04 1.665335e-16 3.570498e-04 2.827425e-04 -9.714451e-17 [136] -3.176279e-15 6.016174e-04 2.038300e-17 3.657883e-04 1.093031e-04 [141] 3.477691e-04 -5.551115e-17 3.943905e-04 2.900822e-04 2.965483e-04 [146] 1.150775e-04 1.435231e-04 1.807511e-04 8.813069e-04 3.609529e-04 > maxresiduals(Z) [1] 1.000000000 0.999785242 0.865016503 0.483133444 0.293760262 0.268186601 [7] 0.214556411 0.135248736 0.115864926 0.101798001 0.065040753 0.049639787 [13] 0.046246119 0.045276402 0.032908901 0.020471967 0.015419933 0.011816377 [19] 0.009842335 0.009220000 0.006976836 0.006835816 0.006046775 0.004724460 [25] 0.004266078 0.003705733 0.003169034 0.002921725 0.002038116 0.001775570 [31] 0.001406742 0.001172668 > > > > cleanEx(); ..nameEx <- "inlearn" > > ### * inlearn > > flush(stderr()); flush(stdout()) > > ### Name: inlearn > ### Title: Onlearn object initialization > ### Aliases: inlearn inlearn,numeric-method > ### Keywords: classif neural regression ts > > ### ** Examples > > > ## create toy data set > x <- rbind(matrix(rnorm(100),,2),matrix(rnorm(100)+3,,2)) > y <- matrix(c(rep(1,50),rep(-1,50)),,1) > > ## initialize onlearn object > on <- inlearn(2,kernel="rbfdot",kpar=list(sigma=0.2),type="classification") > > ## learn one data point at the time > for(i in sample(1:100,100)) + on <- onlearn(on,x[i,],y[i],nu=0.03,lambda=0.1) > > sign(predict(on,x)) [1] 1 1 1 1 1 1 1 1 1 1 -1 1 1 1 1 1 1 -1 1 -1 1 1 1 1 1 [26] 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 -1 1 1 1 1 1 1 1 [51] -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 [76] -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 > > > > > cleanEx(); ..nameEx <- "ipop-class" > > ### * ipop-class > > flush(stderr()); flush(stdout()) > > ### Name: ipop-class > ### Title: Class "ipop" > ### Aliases: ipop-class primal,ipop-method dual,ipop-method how,ipop-method > ### primal dual how > ### Keywords: classes > > ### ** Examples > > ## solve the Support Vector Machine optimization problem > data(spam) > > ## sample a scaled part (300 points) of the spam data set > m <- 300 > set <- sample(1:dim(spam)[1],m) > x <- scale(as.matrix(spam[,-58]))[set,] > y <- as.integer(spam[set,58]) > y[y==2] <- -1 > > ##set C parameter and kernel > C <- 5 > rbf <- rbfdot(sigma = 0.1) > > ## create H matrix etc. > H <- kernelPol(rbf,x,,y) > c <- matrix(rep(-1,m)) > A <- t(y) > b <- 0 > l <- matrix(rep(0,m)) > u <- matrix(rep(C,m)) > r <- 0 > > sv <- ipop(c,H,A,b,l,u,r) > primal(sv) [1] 9.302488e-01 4.999998e+00 8.592208e-01 4.281006e-01 7.920926e-01 [6] 1.305360e+00 1.639814e+00 1.382185e-08 3.301046e-01 2.023848e-01 [11] 5.000000e+00 9.290775e-01 9.790809e-01 1.007647e+00 9.923527e-01 [16] 1.299290e+00 1.385563e+00 1.297526e-07 3.132231e-01 5.414904e-01 [21] 3.088218e-01 5.843824e-07 1.662076e-01 1.315231e+00 2.692608e+00 [26] 1.001187e+00 1.549588e+00 5.485071e-01 3.681672e-07 5.000000e+00 [31] 9.902042e-01 9.676997e-09 1.256262e-07 1.007343e+00 8.099767e-01 [36] 8.861323e-09 2.827273e-07 4.713044e-01 7.072564e-01 6.145971e-08 [41] 6.410785e-08 8.809477e-08 2.391568e-08 4.254573e-08 1.450171e+00 [46] 2.827273e-07 9.598015e-01 5.587380e-01 4.999997e+00 9.923533e-01 [51] 9.923533e-01 1.297065e-08 9.294322e-01 7.201659e-01 2.402556e+00 [56] 5.815845e-01 1.112403e-01 4.125049e-01 2.303831e-02 4.051889e-01 [61] 9.078061e-01 9.915692e-01 2.203392e+00 9.864216e-01 4.999999e+00 [66] 4.757688e-01 2.045097e+00 9.923533e-01 7.495251e-01 1.162355e-05 [71] 8.000895e-01 8.566056e-01 9.973010e-01 2.466782e+00 7.443971e-01 [76] 9.923390e-01 4.865023e-01 9.977354e-01 3.576980e-01 1.427909e+00 [81] 3.786120e-01 9.739563e-01 9.729606e-01 7.854390e-01 8.617850e-01 [86] 9.773205e-01 4.989188e-01 5.599744e-07 1.007640e+00 1.007645e+00 [91] 2.623636e-08 5.332180e-01 4.331667e-01 9.486023e-01 1.004451e+00 [96] 2.827273e-07 1.053180e-07 7.750351e-01 2.827273e-07 1.217205e-07 [101] 1.399403e+00 4.630925e+00 7.803054e-02 9.407539e-01 5.414904e-01 [106] 9.032215e-01 1.007510e+00 3.786120e-01 5.010548e-04 9.982661e-01 [111] 7.843043e-01 5.880280e-06 3.497554e-08 6.903519e-08 6.271123e-01 [116] 4.560963e-02 6.502465e-01 1.075065e+00 8.730130e-01 7.422110e-01 [121] 3.052544e+00 7.572126e-01 3.216187e-01 8.444883e-01 1.708919e+00 [126] 1.558266e-05 7.679474e-01 3.710126e-01 3.547841e-01 8.900006e-01 [131] 7.950706e-08 8.670826e-08 8.392264e-01 4.291441e-01 1.328632e+00 [136] 9.363373e-01 2.265791e+00 6.613144e-01 9.675973e-01 2.650104e-01 [141] 9.870128e-01 8.216932e-01 4.837204e-01 5.663395e-01 9.923533e-01 [146] 1.958833e+00 4.225951e-08 2.907838e-01 2.321993e-07 7.856841e-08 [151] 3.655514e-08 1.022434e+00 1.538768e+00 1.194442e-08 6.733814e-01 [156] 8.817083e-06 3.449213e-01 3.226714e-08 1.331188e+00 9.290775e-01 [161] 9.327381e-01 6.228250e-08 8.739805e-01 8.913928e-01 2.576225e-01 [166] 9.595144e-01 1.324057e-06 2.459633e-04 1.175348e+00 3.333414e-01 [171] 7.773286e-08 1.850477e-07 1.322736e+00 4.231134e-01 5.699559e-01 [176] 9.923533e-01 8.412490e-01 6.885698e-01 2.505827e-02 1.744881e+00 [181] 8.063010e-01 9.695124e-01 8.590676e-01 9.923091e-01 7.281402e-06 [186] 1.856025e-02 1.433742e+00 7.930227e-02 1.045175e+00 3.023367e-01 [191] 9.923974e-01 1.352586e-07 5.000000e+00 5.819639e-08 9.482337e-01 [196] 7.456930e-01 4.713044e-01 7.154066e-01 1.131685e+00 1.375929e-02 [201] 5.943796e-01 8.352650e-01 1.203622e-07 5.096978e-01 1.502990e-06 [206] 1.497341e-08 9.923531e-01 5.268225e-01 1.051906e+00 9.283800e-09 [211] 2.629546e+00 1.000706e+00 9.923533e-01 1.023907e+00 2.827273e-07 [216] 6.042904e-01 1.401975e+00 7.392445e-07 1.376471e-08 2.258489e-01 [221] 9.874319e-01 7.855671e-01 7.682823e-01 4.290691e-01 7.335040e-02 [226] 1.209814e-07 1.060746e+00 4.946903e-01 1.083030e-01 1.002840e-07 [231] 2.485230e+00 5.347040e-01 4.498497e+00 2.292926e-08 4.728575e-01 [236] 2.892703e-08 1.638934e-01 8.971218e-01 2.497657e-08 2.235953e+00 [241] 6.102438e-02 1.557195e-05 4.204890e-08 7.088257e-01 4.830663e+00 [246] 7.505092e-01 3.866866e-01 9.274404e-01 1.039253e+00 7.229919e-01 [251] 1.730516e+00 1.652269e-08 8.688205e-01 1.191158e-07 2.231330e-08 [256] 5.674344e-01 9.445822e-02 6.586838e-08 9.313177e-01 1.794131e+00 [261] 2.650104e-01 9.391986e-01 2.181419e+00 1.552551e+00 3.244925e-01 [266] 7.248968e-01 5.597665e-01 1.783517e+00 1.508070e+00 9.514240e-01 [271] 5.332180e-01 1.375685e+00 9.644463e-01 7.044747e-01 7.427823e-01 [276] 1.028208e+00 3.667392e-02 5.277134e-01 8.294790e-08 6.137786e-01 [281] 4.236330e-01 9.335930e-01 2.783927e-08 9.304529e-01 8.222144e-01 [286] 8.614411e-01 1.006772e+00 7.514695e-01 9.663624e-01 7.565942e-01 [291] 9.849352e-01 1.129876e+00 2.345857e-02 8.309793e-01 3.935678e+00 [296] 1.066840e-07 9.010544e-01 5.951615e-01 2.287186e+00 9.922836e-01 > dual(sv) [1] -0.007646697 > how(sv) [1] "converged" > > > > > cleanEx(); ..nameEx <- "ipop" > > ### * ipop > > flush(stderr()); flush(stdout()) > > ### Name: ipop > ### Title: Quadratic Programming Solver > ### Aliases: ipop ipop,ANY,matrix-method > ### Keywords: optimize > > ### ** Examples > > ## solve the Support Vector Machine optimization problem > data(spam) > > ## sample a scaled part (500 points) of the spam data set > m <- 500 > set <- sample(1:dim(spam)[1],m) > x <- scale(as.matrix(spam[,-58]))[set,] > y <- as.integer(spam[set,58]) > y[y==2] <- -1 > > ##set C parameter and kernel > C <- 5 > rbf <- rbfdot(sigma = 0.1) > > ## create H matrix etc. > H <- kernelPol(rbf,x,,y) > c <- matrix(rep(-1,m)) > A <- t(y) > b <- 0 > l <- matrix(rep(0,m)) > u <- matrix(rep(C,m)) > r <- 0 > > sv <- ipop(c,H,A,b,l,u,r) > sv An object of class "ipop" Slot "primal": [1] 9.363915e-01 5.000000e+00 7.374131e-01 1.200982e+00 6.504191e-01 [6] 8.438763e-01 3.428384e-01 6.422163e-09 2.860583e-01 1.834324e-01 [11] 5.000000e+00 9.405976e-01 9.512734e-01 1.020141e+00 9.781062e-01 [16] 2.752804e+00 1.265990e+00 6.322886e-08 2.444465e-01 5.346727e-01 [21] 2.359221e-08 1.623611e-08 3.288451e-08 1.706494e+00 2.582410e+00 [26] 4.009573e-01 1.056518e+00 5.553811e-01 2.389656e-08 5.000000e+00 [31] 9.792772e-01 1.668283e-09 5.951198e-08 1.019558e+00 8.499672e-01 [36] 2.080490e-09 6.813694e-07 4.135102e-01 8.869917e-01 2.697133e-08 [41] 5.280832e-09 4.656160e-09 2.056652e-09 1.545512e-08 1.399251e+00 [46] 6.813694e-07 9.736688e-01 4.532632e-01 1.718370e+00 9.798590e-01 [51] 9.798590e-01 3.160590e-09 8.923977e-01 8.229817e-01 2.472175e+00 [56] 5.898188e-01 2.779127e-05 1.680913e-01 3.014429e-08 5.040027e-01 [61] 8.618868e-01 9.384151e-01 1.330261e+00 1.058916e+00 5.000000e+00 [66] 4.764356e-01 2.101807e+00 9.798590e-01 7.785226e-01 3.662932e-01 [71] 8.122596e-01 7.123972e-01 1.008206e+00 1.578559e+00 7.305303e-01 [76] 9.798393e-01 1.283274e-08 1.010215e+00 5.707745e-01 1.170879e+00 [81] 4.308238e-01 1.950691e-01 9.604318e-01 7.569066e-01 5.042086e-01 [86] 9.898152e-01 1.989232e+00 2.637418e-07 1.019543e+00 9.560201e-01 [91] 2.933906e-09 3.506342e-01 2.397047e-01 9.027867e-01 9.821800e-01 [96] 6.813694e-07 3.723364e-09 1.900881e-07 6.813694e-07 3.979460e-09 [101] 8.317425e-01 5.000000e+00 1.529306e-08 9.070617e-01 5.346727e-01 [106] 9.464386e-01 1.002868e+00 4.308238e-01 1.167300e-08 9.892442e-01 [111] 7.172011e-01 9.776234e-09 5.837045e-09 5.906160e-09 5.784033e-01 [116] 1.031409e-08 5.073217e-01 9.871651e-01 6.935016e-01 7.643927e-01 [121] 1.541607e+00 4.274002e-01 3.080082e+00 9.824786e-01 1.355064e+00 [126] 3.910680e-06 9.960180e-08 2.171519e-01 1.195444e-01 8.693192e-01 [131] 4.582415e-09 1.034843e-08 1.799212e-07 3.886807e-08 8.085305e-01 [136] 8.990306e-01 4.771533e-08 5.789272e-01 6.825817e-01 2.260535e-01 [141] 9.750356e-01 7.398996e-01 3.122398e-01 5.580760e-01 9.798590e-01 [146] 1.999357e+00 7.334922e-09 3.504949e-08 8.641338e-09 4.420065e-09 [151] 6.390262e-09 6.743906e-01 3.631154e+00 8.953600e-10 7.003559e-01 [156] 1.997767e-08 1.737572e-01 6.410731e-09 1.627962e+00 9.405976e-01 [161] 5.062130e-01 1.219335e-01 7.876280e-01 1.127730e+00 4.550992e-09 [166] 9.242860e-01 9.984978e-09 8.757971e-01 2.229158e+00 7.957425e-08 [171] 4.484092e-09 4.516128e-09 1.185223e+00 1.486584e-07 1.428407e-02 [176] 9.798590e-01 4.131181e-02 5.744701e-01 6.495166e-09 1.335595e+00 [181] 7.879459e-01 9.834658e-01 8.251713e-01 9.798142e-01 2.691629e-08 [186] 1.168366e-08 2.032250e-07 1.415126e-01 7.015782e-02 1.199225e-01 [191] 1.959858e-01 7.529979e-09 5.000000e+00 3.388243e-09 9.657001e-01 [196] 6.768716e-01 4.135102e-01 5.812527e-01 1.152441e+00 4.902172e-09 [201] 5.696488e-01 6.384992e-02 5.779113e-08 4.631369e-01 2.715459e-08 [206] 3.240041e-09 9.798583e-01 5.649087e-01 1.099252e+00 1.526028e-09 [211] 2.652545e+00 1.014689e+00 9.798590e-01 1.171547e+00 6.813694e-07 [216] 6.112231e-01 1.559348e-08 3.604884e-09 1.719321e-09 1.951772e-01 [221] 9.944815e-01 6.940956e-01 7.030611e-01 1.160568e-01 3.372115e-09 [226] 3.221388e-09 1.007753e+00 3.602576e-01 4.138905e-01 5.275646e-09 [231] 2.390482e+00 3.580978e-01 5.000000e+00 2.481037e-09 3.901027e-01 [236] 3.445841e-09 3.007184e-01 5.237096e-01 4.991868e-09 3.107440e+00 [241] 6.072762e-07 1.183052e-05 1.124312e-08 4.504166e-01 5.000000e+00 [246] 5.381688e-01 2.551298e-01 9.404996e-01 1.110013e+00 7.123399e-01 [251] 1.854741e+00 1.151215e-09 7.478942e-01 3.298724e-07 3.136630e-09 [256] 6.343011e-01 3.675162e-08 2.257808e-09 9.168214e-01 1.503865e+00 [261] 2.260535e-01 9.661000e-01 1.592428e+00 1.165679e+00 1.537934e-01 [266] 6.994961e-01 5.711773e-01 2.851671e+00 1.340843e+00 9.398080e-01 [271] 3.506342e-01 8.672266e-01 9.662963e-01 5.546847e-01 1.298308e+00 [276] 4.637749e-01 2.960898e-08 4.666876e-01 7.228825e-09 6.414187e-01 [281] 9.432512e-08 6.182148e-01 5.324465e-09 9.584275e-01 7.931752e-01 [286] 9.485356e-01 1.013968e+00 1.268358e+00 9.795891e-01 6.906589e-01 [291] 5.967486e-01 1.287628e+00 3.694049e-01 7.317682e-01 3.475663e+00 [296] 5.317190e-08 6.844840e-01 6.705651e-01 2.358033e+00 9.807950e-01 [301] 2.112198e-09 2.899816e+00 1.031243e-08 8.224561e-01 8.062884e-01 [306] 1.065148e+00 8.699222e-01 2.212380e-08 1.271713e-01 4.295169e-01 [311] 2.282494e-08 3.484144e-01 6.813694e-07 6.723010e-01 9.348424e-01 [316] 4.183648e-01 6.821218e-08 1.631164e+00 6.217219e-09 2.216477e-08 [321] 6.937089e-01 1.006330e+00 1.221029e+00 7.795417e-01 6.813694e-07 [326] 6.527598e-01 7.657745e-01 2.032250e-07 9.238291e-01 2.137155e-09 [331] 3.111686e-09 6.329315e-01 2.301971e-01 9.414367e-09 1.719200e-01 [336] 6.665908e-01 2.242113e+00 1.090752e-08 1.273953e-07 5.000000e+00 [341] 1.103925e+00 1.019527e+00 6.950113e-09 5.791611e-01 9.477972e-01 [346] 5.000000e+00 2.347652e+00 7.054122e-01 6.469189e-09 3.503547e-01 [351] 9.187011e-01 5.896290e-01 4.884292e-01 4.480515e-01 5.240734e-09 [356] 1.032131e+00 6.813694e-07 5.157514e-01 3.815575e-01 7.456372e-01 [361] 1.365716e+00 1.029007e+00 8.781050e-01 4.663819e-01 3.830494e-09 [366] 7.718269e-01 1.019508e+00 9.798584e-01 6.006599e-02 1.580597e+00 [371] 7.061906e-01 3.964410e-01 7.682209e-09 6.516001e-01 1.181471e+00 [376] 2.752081e+00 9.611101e-01 6.813694e-07 1.013037e+00 4.564695e-09 [381] 1.365456e+00 9.747707e-01 1.959858e-01 2.608775e-09 2.460286e-08 [386] 8.416744e-01 9.604495e-01 1.055886e+00 3.643189e+00 3.825669e-01 [391] 7.305703e-01 3.128009e-01 2.210706e+00 2.920724e-09 8.807326e-01 [396] 9.245479e-02 2.565441e+00 3.445745e-01 1.385812e-01 5.011762e-08 [401] 6.114886e-09 9.651042e-01 1.183811e-08 3.036885e-09 5.204629e-08 [406] 1.523806e+00 8.874056e-01 1.008364e+00 1.392101e-08 1.195834e-08 [411] 4.788644e-01 8.994605e-01 1.959858e-01 7.877066e-01 6.658015e-01 [416] 9.680237e-01 1.044554e-07 6.160778e-01 9.798606e-01 7.662045e-01 [421] 5.903144e-01 9.798574e-01 1.699274e-08 1.691737e+00 1.016326e+00 [426] 9.730707e-01 9.783603e-01 1.252183e-08 1.376365e-09 6.813694e-07 [431] 1.049756e-07 5.948685e-01 1.508063e-01 2.284626e-08 4.949599e+00 [436] 3.914780e-09 1.959858e-01 9.798590e-01 3.118073e-01 1.612337e-09 [441] 2.560610e+00 1.438658e-08 9.765567e-01 9.141522e-01 9.885939e-01 [446] 4.664009e-09 3.695029e-09 4.988164e-08 9.460014e-01 1.019520e+00 [451] 9.712063e-01 3.655576e-01 1.194594e+00 1.020140e+00 9.776368e-08 [456] 3.073383e-01 9.796237e-01 1.932180e-08 4.096903e-09 6.252302e-01 [461] 2.829439e-01 3.629161e-01 8.145331e-01 2.127817e-05 5.325625e-08 [466] 8.645616e-01 9.756510e-01 1.680234e+00 1.142011e+00 2.382998e+00 [471] 5.327176e-01 1.779407e+00 1.366699e+00 8.747527e-01 1.020141e+00 [476] 1.380391e-07 2.452041e+00 8.599437e-01 9.496702e-01 1.859653e-01 [481] 1.959858e-01 4.252270e-01 1.200823e+00 4.590198e-01 1.379533e+00 [486] 5.641977e-09 7.213646e-01 9.551356e-01 6.813694e-07 1.166658e-07 [491] 6.717933e-01 3.254859e-08 9.559529e-01 1.091021e+00 5.167216e-09 [496] 5.910135e-01 9.083720e-01 5.081046e-01 5.000000e+00 4.124277e-01 Slot "dual": [1] -0.02014103 Slot "how": [1] "converged" > dual(sv) [1] -0.02014103 > > > > > cleanEx(); ..nameEx <- "kcca-class" > > ### * kcca-class > > flush(stderr()); flush(stdout()) > > ### Name: kcca-class > ### Title: Class "kcca" > ### Aliases: kcca-class kcor xcoef ycoef yvar xvar kcor,kcca-method > ### xcoef,kcca-method xvar,kcca-method ycoef,kcca-method yvar,kcca-method > ### Keywords: classes > > ### ** Examples > > > > > > cleanEx(); ..nameEx <- "kcca" > > ### * kcca > > flush(stderr()); flush(stdout()) > > ### Name: kcca > ### Title: Kernel Canonical Correlation Analysis > ### Aliases: kcca kcca,matrix-method > ### Keywords: multivariate > > ### ** Examples > > > > > > cleanEx(); ..nameEx <- "kernel-class" > > ### * kernel-class > > flush(stderr()); flush(stdout()) > > ### Name: kernel-class > ### Title: Class "kernel" "rbfkernel" "polykernel", "tanhkernel", > ### "vanillakernel" > ### Aliases: rbfkernel-class polykernel-class vanillakernel-class > ### tanhkernel-class anovakernel-class besselkernel-class > ### laplacekernel-class splinekernel-class kernel-class > ### kpar,kernel-method > ### Keywords: classes > > ### ** Examples > > > rbfkernel <- rbfdot(sigma = 0.1) > rbfkernel Gaussian Radial Basis kernel function. Hyperparameter : sigma = 0.1 > is(rbfkernel) [1] "rbfkernel" "kernel" "function" "OptionalFunction" [5] "PossibleMethod" > kpar(rbfkernel) $sigma [1] 0.1 > > > > > cleanEx(); ..nameEx <- "kernelMatrix" > > ### * kernelMatrix > > flush(stderr()); flush(stdout()) > > ### Name: kernelMatrix > ### Title: Kernel Matrix functions > ### Aliases: kernelMatrix kernelMult kernelPol kernelFast > ### kernelPol,kernel-method kernelMatrix,kernel-method > ### kernelMult,kernel-method kernelFast,kernel-method > ### kernelMatrix,rbfkernel,matrix-method > ### kernelMatrix,polykernel,matrix-method > ### kernelMatrix,vanillakernel,matrix-method > ### kernelMatrix,tanhkernel,matrix-method > ### kernelMatrix,laplacekernel,matrix-method > ### kernelMatrix,anovakernel,matrix-method > ### kernelMatrix,splinekernel,matrix-method > ### kernelMatrix,besselkernel,matrix-method > ### kernelMult,rbfkernel,matrix-method > ### kernelMult,splinekernel,matrix-method > ### kernelMult,polykernel,matrix-method > ### kernelMult,tanhkernel,matrix-method > ### kernelMult,laplacekernel,matrix-method > ### kernelMult,besselkernel,matrix-method > ### kernelMult,anovakernel,matrix-method > ### kernelMult,vanillakernel,matrix-method > ### kernelPol,rbfkernel,matrix-method > ### kernelPol,splinekernel,matrix-method > ### kernelPol,polykernel,matrix-method kernelPol,tanhkernel,matrix-method > ### kernelPol,vanillakernel,matrix-method > ### kernelPol,anovakernel,matrix-method > ### kernelPol,besselkernel,matrix-method > ### kernelPol,laplacekernel,matrix-method > ### kernelFast,rbfkernel,matrix-method > ### kernelFast,splinekernel,matrix-method > ### kernelFast,polykernel,matrix-method > ### kernelFast,tanhkernel,matrix-method > ### kernelFast,vanillakernel,matrix-method > ### kernelFast,anovakernel,matrix-method > ### kernelFast,besselkernel,matrix-method > ### kernelFast,laplacekernel,matrix-method > ### Keywords: algebra array > > ### ** Examples > > ## use the spam data > data(spam) > dt <- as.matrix(spam[c(10:20,3000:3010),-58]) > > ## initialize kernel function > rbf <- rbfdot(sigma = 0.05) > rbf Gaussian Radial Basis kernel function. Hyperparameter : sigma = 0.05 > > ## calculate kernel matrix > kernelMatrix(rbf, dt) 10 11 12 13 14 10 1.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 11 0.000000e+00 1.000000e+00 0.000000e+00 0.000000e+00 3.076372e-01 12 0.000000e+00 0.000000e+00 1.000000e+00 5.576760e-184 0.000000e+00 13 0.000000e+00 0.000000e+00 5.576760e-184 1.000000e+00 0.000000e+00 14 0.000000e+00 3.076372e-01 0.000000e+00 0.000000e+00 1.000000e+00 15 0.000000e+00 0.000000e+00 7.620012e-15 2.283705e-99 0.000000e+00 16 0.000000e+00 0.000000e+00 4.354479e-135 6.728227e-05 0.000000e+00 17 0.000000e+00 1.759111e-175 1.580090e-138 0.000000e+00 1.474439e-159 18 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 19 0.000000e+00 3.704851e-53 4.048455e-284 0.000000e+00 5.620206e-45 20 0.000000e+00 0.000000e+00 2.375087e-140 1.144518e-142 0.000000e+00 3000 0.000000e+00 0.000000e+00 0.000000e+00 9.823504e-126 0.000000e+00 3001 0.000000e+00 4.157797e-25 0.000000e+00 0.000000e+00 1.629625e-19 3002 0.000000e+00 1.584893e-07 0.000000e+00 0.000000e+00 1.153939e-04 3003 0.000000e+00 2.485273e-06 0.000000e+00 0.000000e+00 3.459807e-09 3004 0.000000e+00 0.000000e+00 3.802475e-19 5.318413e-295 0.000000e+00 3005 0.000000e+00 6.080875e-113 2.419462e-191 0.000000e+00 2.324676e-100 3006 0.000000e+00 2.141277e-36 0.000000e+00 0.000000e+00 5.957980e-30 3007 0.000000e+00 1.347476e-05 0.000000e+00 0.000000e+00 7.850063e-08 3008 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 3009 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 3010 1.836994e-45 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 15 16 17 18 19 10 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 11 0.000000e+00 0.000000e+00 1.759111e-175 0.000000e+00 3.704851e-53 12 7.620012e-15 4.354479e-135 1.580090e-138 0.000000e+00 4.048455e-284 13 2.283705e-99 6.728227e-05 0.000000e+00 0.000000e+00 0.000000e+00 14 0.000000e+00 0.000000e+00 1.474439e-159 0.000000e+00 5.620206e-45 15 1.000000e+00 1.700705e-64 3.198028e-211 0.000000e+00 0.000000e+00 16 1.700705e-64 1.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 17 3.198028e-211 0.000000e+00 1.000000e+00 0.000000e+00 1.131156e-46 18 0.000000e+00 0.000000e+00 0.000000e+00 1.000000e+00 0.000000e+00 19 0.000000e+00 0.000000e+00 1.131156e-46 0.000000e+00 1.000000e+00 20 4.909368e-107 1.406121e-115 7.630353e-215 0.000000e+00 0.000000e+00 3000 1.748673e-300 5.672366e-150 0.000000e+00 0.000000e+00 0.000000e+00 3001 0.000000e+00 0.000000e+00 5.322112e-77 0.000000e+00 8.829844e-07 3002 0.000000e+00 0.000000e+00 3.492160e-118 0.000000e+00 2.787511e-23 3003 0.000000e+00 0.000000e+00 1.069177e-239 0.000000e+00 5.288180e-90 3004 1.076909e-58 1.857644e-231 3.153752e-58 0.000000e+00 1.161063e-159 3005 6.669696e-284 0.000000e+00 1.473126e-08 0.000000e+00 1.980755e-17 3006 0.000000e+00 0.000000e+00 4.958692e-65 0.000000e+00 1.809605e-22 3007 0.000000e+00 0.000000e+00 3.077777e-225 0.000000e+00 1.125459e-80 3008 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 3009 0.000000e+00 0.000000e+00 0.000000e+00 4.882814e-270 0.000000e+00 3010 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 20 3000 3001 3002 3003 10 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 11 0.000000e+00 0.000000e+00 4.157797e-25 1.584893e-07 2.485273e-06 12 2.375087e-140 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 13 1.144518e-142 9.823504e-126 0.000000e+00 0.000000e+00 0.000000e+00 14 0.000000e+00 0.000000e+00 1.629625e-19 1.153939e-04 3.459807e-09 15 4.909368e-107 1.748673e-300 0.000000e+00 0.000000e+00 0.000000e+00 16 1.406121e-115 5.672366e-150 0.000000e+00 0.000000e+00 0.000000e+00 17 7.630353e-215 0.000000e+00 5.322112e-77 3.492160e-118 1.069177e-239 18 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 19 0.000000e+00 0.000000e+00 8.829844e-07 2.787511e-23 5.288180e-90 20 1.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 3000 0.000000e+00 1.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 3001 0.000000e+00 0.000000e+00 1.000000e+00 1.352868e-06 4.823404e-51 3002 0.000000e+00 0.000000e+00 1.352868e-06 1.000000e+00 2.576800e-23 3003 0.000000e+00 0.000000e+00 4.823404e-51 2.576800e-23 1.000000e+00 3004 2.356445e-154 0.000000e+00 7.945641e-224 3.305267e-299 0.000000e+00 3005 9.713541e-301 0.000000e+00 3.387569e-37 1.378134e-67 8.914794e-166 3006 0.000000e+00 0.000000e+00 2.663515e-15 2.320201e-19 1.902969e-65 3007 0.000000e+00 0.000000e+00 3.061563e-44 1.242501e-19 1.643562e-02 3008 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 3009 0.000000e+00 5.593377e-93 0.000000e+00 0.000000e+00 0.000000e+00 3010 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 3004 3005 3006 3007 3008 3009 10 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0 0.000000e+00 11 0.000000e+00 6.080875e-113 2.141277e-36 1.347476e-05 0 0.000000e+00 12 3.802475e-19 2.419462e-191 0.000000e+00 0.000000e+00 0 0.000000e+00 13 5.318413e-295 0.000000e+00 0.000000e+00 0.000000e+00 0 0.000000e+00 14 0.000000e+00 2.324676e-100 5.957980e-30 7.850063e-08 0 0.000000e+00 15 1.076909e-58 6.669696e-284 0.000000e+00 0.000000e+00 0 0.000000e+00 16 1.857644e-231 0.000000e+00 0.000000e+00 0.000000e+00 0 0.000000e+00 17 3.153752e-58 1.473126e-08 4.958692e-65 3.077777e-225 0 0.000000e+00 18 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0 4.882814e-270 19 1.161063e-159 1.980755e-17 1.809605e-22 1.125459e-80 0 0.000000e+00 20 2.356445e-154 9.713541e-301 0.000000e+00 0.000000e+00 0 0.000000e+00 3000 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0 5.593377e-93 3001 7.945641e-224 3.387569e-37 2.663515e-15 3.061563e-44 0 0.000000e+00 3002 3.305267e-299 1.378134e-67 2.320201e-19 1.242501e-19 0 0.000000e+00 3003 0.000000e+00 8.914794e-166 1.902969e-65 1.643562e-02 0 0.000000e+00 3004 1.000000e+00 1.023004e-91 3.147192e-234 0.000000e+00 0 0.000000e+00 3005 1.023004e-91 1.000000e+00 2.479034e-35 2.698841e-153 0 0.000000e+00 3006 3.147192e-234 2.479034e-35 1.000000e+00 2.494577e-60 0 0.000000e+00 3007 0.000000e+00 2.698841e-153 2.494577e-60 1.000000e+00 0 0.000000e+00 3008 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 1 0.000000e+00 3009 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0 1.000000e+00 3010 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0 0.000000e+00 3010 10 1.836994e-45 11 0.000000e+00 12 0.000000e+00 13 0.000000e+00 14 0.000000e+00 15 0.000000e+00 16 0.000000e+00 17 0.000000e+00 18 0.000000e+00 19 0.000000e+00 20 0.000000e+00 3000 0.000000e+00 3001 0.000000e+00 3002 0.000000e+00 3003 0.000000e+00 3004 0.000000e+00 3005 0.000000e+00 3006 0.000000e+00 3007 0.000000e+00 3008 0.000000e+00 3009 0.000000e+00 3010 1.000000e+00 > > yt <- as.matrix(as.integer(spam[c(10:20,3000:3010),58])) > yt[yt==2] <- -1 > > ## calculate the quadratic kernel expression > kernelPol(rbf, dt, ,yt) 10 11 12 13 14 10 1.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 11 0.000000e+00 1.000000e+00 0.000000e+00 0.000000e+00 3.076372e-01 12 0.000000e+00 0.000000e+00 1.000000e+00 5.576760e-184 0.000000e+00 13 0.000000e+00 0.000000e+00 5.576760e-184 1.000000e+00 0.000000e+00 14 0.000000e+00 3.076372e-01 0.000000e+00 0.000000e+00 1.000000e+00 15 0.000000e+00 0.000000e+00 7.620012e-15 2.283705e-99 0.000000e+00 16 0.000000e+00 0.000000e+00 4.354479e-135 6.728227e-05 0.000000e+00 17 0.000000e+00 1.759111e-175 1.580090e-138 0.000000e+00 1.474439e-159 18 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 19 0.000000e+00 3.704851e-53 4.048455e-284 0.000000e+00 5.620206e-45 20 0.000000e+00 0.000000e+00 2.375087e-140 1.144518e-142 0.000000e+00 3000 0.000000e+00 0.000000e+00 0.000000e+00 -9.823504e-126 0.000000e+00 3001 0.000000e+00 -4.157797e-25 0.000000e+00 0.000000e+00 -1.629625e-19 3002 0.000000e+00 -1.584893e-07 0.000000e+00 0.000000e+00 -1.153939e-04 3003 0.000000e+00 -2.485273e-06 0.000000e+00 0.000000e+00 -3.459807e-09 3004 0.000000e+00 0.000000e+00 -3.802475e-19 -5.318413e-295 0.000000e+00 3005 0.000000e+00 -6.080875e-113 -2.419462e-191 0.000000e+00 -2.324676e-100 3006 0.000000e+00 -2.141277e-36 0.000000e+00 0.000000e+00 -5.957980e-30 3007 0.000000e+00 -1.347476e-05 0.000000e+00 0.000000e+00 -7.850063e-08 3008 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 3009 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 3010 -1.836994e-45 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 15 16 17 18 19 10 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 11 0.000000e+00 0.000000e+00 1.759111e-175 0.000000e+00 3.704851e-53 12 7.620012e-15 4.354479e-135 1.580090e-138 0.000000e+00 4.048455e-284 13 2.283705e-99 6.728227e-05 0.000000e+00 0.000000e+00 0.000000e+00 14 0.000000e+00 0.000000e+00 1.474439e-159 0.000000e+00 5.620206e-45 15 1.000000e+00 1.700705e-64 3.198028e-211 0.000000e+00 0.000000e+00 16 1.700705e-64 1.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 17 3.198028e-211 0.000000e+00 1.000000e+00 0.000000e+00 1.131156e-46 18 0.000000e+00 0.000000e+00 0.000000e+00 1.000000e+00 0.000000e+00 19 0.000000e+00 0.000000e+00 1.131156e-46 0.000000e+00 1.000000e+00 20 4.909368e-107 1.406121e-115 7.630353e-215 0.000000e+00 0.000000e+00 3000 -1.748673e-300 -5.672366e-150 0.000000e+00 0.000000e+00 0.000000e+00 3001 0.000000e+00 0.000000e+00 -5.322112e-77 0.000000e+00 -8.829844e-07 3002 0.000000e+00 0.000000e+00 -3.492160e-118 0.000000e+00 -2.787511e-23 3003 0.000000e+00 0.000000e+00 -1.069177e-239 0.000000e+00 -5.288180e-90 3004 -1.076909e-58 -1.857644e-231 -3.153752e-58 0.000000e+00 -1.161063e-159 3005 -6.669696e-284 0.000000e+00 -1.473126e-08 0.000000e+00 -1.980755e-17 3006 0.000000e+00 0.000000e+00 -4.958692e-65 0.000000e+00 -1.809605e-22 3007 0.000000e+00 0.000000e+00 -3.077777e-225 0.000000e+00 -1.125459e-80 3008 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 3009 0.000000e+00 0.000000e+00 0.000000e+00 -4.882814e-270 0.000000e+00 3010 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 20 3000 3001 3002 3003 10 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 11 0.000000e+00 0.000000e+00 -4.157797e-25 -1.584893e-07 -2.485273e-06 12 2.375087e-140 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 13 1.144518e-142 -9.823504e-126 0.000000e+00 0.000000e+00 0.000000e+00 14 0.000000e+00 0.000000e+00 -1.629625e-19 -1.153939e-04 -3.459807e-09 15 4.909368e-107 -1.748673e-300 0.000000e+00 0.000000e+00 0.000000e+00 16 1.406121e-115 -5.672366e-150 0.000000e+00 0.000000e+00 0.000000e+00 17 7.630353e-215 0.000000e+00 -5.322112e-77 -3.492160e-118 -1.069177e-239 18 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 19 0.000000e+00 0.000000e+00 -8.829844e-07 -2.787511e-23 -5.288180e-90 20 1.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 3000 0.000000e+00 1.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 3001 0.000000e+00 0.000000e+00 1.000000e+00 1.352868e-06 4.823404e-51 3002 0.000000e+00 0.000000e+00 1.352868e-06 1.000000e+00 2.576800e-23 3003 0.000000e+00 0.000000e+00 4.823404e-51 2.576800e-23 1.000000e+00 3004 -2.356445e-154 0.000000e+00 7.945641e-224 3.305267e-299 0.000000e+00 3005 -9.713541e-301 0.000000e+00 3.387569e-37 1.378134e-67 8.914794e-166 3006 0.000000e+00 0.000000e+00 2.663515e-15 2.320201e-19 1.902969e-65 3007 0.000000e+00 0.000000e+00 3.061563e-44 1.242501e-19 1.643562e-02 3008 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 3009 0.000000e+00 5.593377e-93 0.000000e+00 0.000000e+00 0.000000e+00 3010 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 3004 3005 3006 3007 3008 10 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0 11 0.000000e+00 -6.080875e-113 -2.141277e-36 -1.347476e-05 0 12 -3.802475e-19 -2.419462e-191 0.000000e+00 0.000000e+00 0 13 -5.318413e-295 0.000000e+00 0.000000e+00 0.000000e+00 0 14 0.000000e+00 -2.324676e-100 -5.957980e-30 -7.850063e-08 0 15 -1.076909e-58 -6.669696e-284 0.000000e+00 0.000000e+00 0 16 -1.857644e-231 0.000000e+00 0.000000e+00 0.000000e+00 0 17 -3.153752e-58 -1.473126e-08 -4.958692e-65 -3.077777e-225 0 18 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0 19 -1.161063e-159 -1.980755e-17 -1.809605e-22 -1.125459e-80 0 20 -2.356445e-154 -9.713541e-301 0.000000e+00 0.000000e+00 0 3000 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0 3001 7.945641e-224 3.387569e-37 2.663515e-15 3.061563e-44 0 3002 3.305267e-299 1.378134e-67 2.320201e-19 1.242501e-19 0 3003 0.000000e+00 8.914794e-166 1.902969e-65 1.643562e-02 0 3004 1.000000e+00 1.023004e-91 3.147192e-234 0.000000e+00 0 3005 1.023004e-91 1.000000e+00 2.479034e-35 2.698841e-153 0 3006 3.147192e-234 2.479034e-35 1.000000e+00 2.494577e-60 0 3007 0.000000e+00 2.698841e-153 2.494577e-60 1.000000e+00 0 3008 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 1 3009 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0 3010 0.000000e+00 0.000000e+00 0.000000e+00 0.000000e+00 0 3009 3010 10 0.000000e+00 -1.836994e-45 11 0.000000e+00 0.000000e+00 12 0.000000e+00 0.000000e+00 13 0.000000e+00 0.000000e+00 14 0.000000e+00 0.000000e+00 15 0.000000e+00 0.000000e+00 16 0.000000e+00 0.000000e+00 17 0.000000e+00 0.000000e+00 18 -4.882814e-270 0.000000e+00 19 0.000000e+00 0.000000e+00 20 0.000000e+00 0.000000e+00 3000 5.593377e-93 0.000000e+00 3001 0.000000e+00 0.000000e+00 3002 0.000000e+00 0.000000e+00 3003 0.000000e+00 0.000000e+00 3004 0.000000e+00 0.000000e+00 3005 0.000000e+00 0.000000e+00 3006 0.000000e+00 0.000000e+00 3007 0.000000e+00 0.000000e+00 3008 0.000000e+00 0.000000e+00 3009 1.000000e+00 0.000000e+00 3010 0.000000e+00 1.000000e+00 > > ## calculate the kernel expansion > kernelMult(rbf, dt, ,yt) [,1] [1,] -1.0000000 [2,] -1.3076210 [3,] -1.0000000 [4,] -1.0000673 [5,] -1.3075217 [6,] -1.0000000 [7,] -1.0000673 [8,] -1.0000000 [9,] -1.0000000 [10,] -0.9999991 [11,] -1.0000000 [12,] 1.0000000 [13,] 1.0000005 [14,] 0.9998858 [15,] 1.0164331 [16,] 1.0000000 [17,] 1.0000000 [18,] 1.0000000 [19,] 1.0164221 [20,] 1.0000000 [21,] 1.0000000 [22,] 1.0000000 > > > > cleanEx(); ..nameEx <- "kfa-class" > > ### * kfa-class > > flush(stderr()); flush(stdout()) > > ### Name: kfa-class > ### Title: Class "kfa" > ### Aliases: kfa-class alpha,kfa-method alphaindex,kfa-method > ### kcall,kfa-method kernelf,kfa-method predict,kfa-method > ### xmatrix,kfa-method > ### Keywords: classes > > ### ** Examples > > data(promotergene) > f <- kfa(~.,data=promotergene) > > > > cleanEx(); ..nameEx <- "kfa" > > ### * kfa > > flush(stderr()); flush(stdout()) > > ### Name: kfa > ### Title: Kernel Feature Analysis > ### Aliases: kfa kfa,formula-method kfa,matrix-method show,kfa-method > ### Keywords: cluster > > ### ** Examples > > data(promotergene) > f <- kfa(~.,data=promotergene,features=2,kernel="rbfdot",kpar=list(sigma=0.01)) > plot(predict(f,promotergene),col=as.numeric(promotergene[,1])) > > > > cleanEx(); ..nameEx <- "kpca-class" > > ### * kpca-class > > flush(stderr()); flush(stdout()) > > ### Name: kpca-class > ### Title: Class "kpca" > ### Aliases: kpca-class eig pcv rotated eig,kpca-method kcall,kpca-method > ### kernelf,kpca-method pcv,kpca-method rotated,kpca-method > ### xmatrix,kpca-method > ### Keywords: classes > > ### ** Examples > > # another example using the iris > data(iris) > test <- sample(1:50,20) > > kpc <- kpca(~.,data=iris[-test,-5],kernel="rbfdot",kpar=list(sigma=0.2),features=2) > > #print the principal component vectors > pcv(kpc) [,1] [,2] [1,] -0.1586550113 0.0263868895 [2,] -0.1539447445 0.0210446353 [3,] -0.1521172742 0.0211098889 [4,] -0.1584560182 0.0284789069 [5,] -0.1469287285 0.0150615211 [6,] -0.1584202252 0.0217618665 [7,] -0.1534608446 0.0232779026 [8,] -0.1566033894 0.0193663809 [9,] -0.1299518041 0.0284980537 [10,] -0.1501320300 0.0303789500 [11,] -0.1554031053 0.0254430897 [12,] -0.1516633633 0.0090896209 [13,] -0.1559688681 0.0224068195 [14,] -0.1506196101 0.0412525742 [15,] -0.1524206134 0.0037865843 [16,] -0.1526354702 0.0110530855 [17,] -0.1541039370 0.0173103866 [18,] -0.1537937799 0.0147148335 [19,] -0.1533757501 0.0150614529 [20,] -0.1553549199 0.0181878140 [21,] -0.1566108275 0.0302543981 [22,] -0.1530242647 0.0272724047 [23,] -0.1578939890 0.0305182006 [24,] -0.1461987482 0.0286632818 [25,] -0.1581423065 0.0209141930 [26,] -0.1472720403 0.0059328438 [27,] -0.1531025261 0.0203364163 [28,] -0.1546856463 0.0230963879 [29,] -0.1550826404 0.0239822257 [30,] -0.1583444961 0.0243464661 [31,] 0.0580948776 0.0135662487 [32,] 0.0589634456 -0.0433793019 [33,] 0.0662746019 0.0275277680 [34,] 0.0152384241 -0.1356781053 [35,] 0.0642345100 -0.0384222329 [36,] 0.0458791409 -0.1035580119 [37,] 0.0646255355 -0.0247107957 [38,] -0.0466541710 -0.1194434327 [39,] 0.0602170706 -0.0368113845 [40,] 0.0043222375 -0.1355880529 [41,] -0.0304469291 -0.1220002907 [42,] 0.0412471442 -0.1022281861 [43,] 0.0180826318 -0.1229257669 [44,] 0.0628430970 -0.0598277784 [45,] -0.0062017357 -0.1332385512 [46,] 0.0540527368 -0.0365175590 [47,] 0.0464829849 -0.0950946867 [48,] 0.0221143097 -0.1284982640 [49,] 0.0516004176 -0.0747082726 [50,] 0.0086949596 -0.1397257932 [51,] 0.0644285455 -0.0345078336 [52,] 0.0308635245 -0.1129438878 [53,] 0.0676505609 -0.0301183226 [54,] 0.0581081819 -0.0694704593 [55,] 0.0491506411 -0.0754224891 [56,] 0.0552231499 -0.0469658206 [57,] 0.0650683281 -0.0011119506 [58,] 0.0731761809 0.0308665024 [59,] 0.0566852318 -0.0792585758 [60,] -0.0168347513 -0.1357984555 [61,] 0.0001617763 -0.1413403388 [62,] -0.0083777771 -0.1407804768 [63,] 0.0164759440 -0.1326350990 [64,] 0.0701568372 -0.0216761632 [65,] 0.0399979949 -0.1004582730 [66,] 0.0528262396 -0.0571907088 [67,] 0.0651377662 -0.0062185721 [68,] 0.0476399030 -0.0828426599 [69,] 0.0240968741 -0.1236011647 [70,] 0.0164497751 -0.1369704113 [71,] 0.0332856490 -0.1212466050 [72,] 0.0595815925 -0.0660198116 [73,] 0.0221682681 -0.1312552535 [74,] -0.0436699718 -0.1219002069 [75,] 0.0301054550 -0.1265217661 [76,] 0.0295964489 -0.1186440885 [77,] 0.0326475561 -0.1195250471 [78,] 0.0474366342 -0.0890795058 [79,] -0.0601160673 -0.1114684853 [80,] 0.0277210907 -0.1263005116 [81,] 0.0534119930 0.1220003532 [82,] 0.0672701197 -0.0186837440 [83,] 0.0573046129 0.1454063868 [84,] 0.0728298911 0.0645628263 [85,] 0.0665055019 0.1125685218 [86,] 0.0246537086 0.1652484371 [87,] 0.0229802074 -0.1016325566 [88,] 0.0419481575 0.1546232924 [89,] 0.0636685724 0.0967951414 [90,] 0.0382885227 0.1599038061 [91,] 0.0739384179 0.0466614270 [92,] 0.0751895080 0.0427212257 [93,] 0.0704327357 0.1039750961 [94,] 0.0611776560 -0.0320001739 [95,] 0.0620293631 0.0104300864 [96,] 0.0704982905 0.0723693675 [97,] 0.0744422968 0.0716662088 [98,] 0.0119510949 0.1548451952 [99,] 0.0107299668 0.1516345791 [100,] 0.0592805656 -0.0429046846 [101,] 0.0619216020 0.1330132776 [102,] 0.0599451304 -0.0410663948 [103,] 0.0191102116 0.1585232842 [104,] 0.0727626818 -0.0110625468 [105,] 0.0660886322 0.1183103768 [106,] 0.0522518481 0.1460310336 [107,] 0.0708895813 -0.0260911115 [108,] 0.0715368847 -0.0178593495 [109,] 0.0714747947 0.0825492285 [110,] 0.0568168511 0.1243302526 [111,] 0.0448367826 0.1515662438 [112,] 0.0148523160 0.1517049069 [113,] 0.0702149001 0.0864250592 [114,] 0.0729230919 -0.0033824614 [115,] 0.0649146161 0.0245114068 [116,] 0.0334684482 0.1623018164 [117,] 0.0625357520 0.0965480073 [118,] 0.0740286799 0.0667996095 [119,] 0.0680902255 -0.0344390714 [120,] 0.0692539009 0.1028187144 [121,] 0.0651359903 0.1171138560 [122,] 0.0667086351 0.0829989594 [123,] 0.0672701197 -0.0186837440 [124,] 0.0591545455 0.1403321032 [125,] 0.0593673112 0.1278210871 [126,] 0.0704611255 0.0785959671 [127,] 0.0713433120 0.0003562007 [128,] 0.0757868715 0.0521320632 [129,] 0.0659595260 0.0710261646 [130,] 0.0702245507 -0.0107349141 > rotated(kpc) [,1] [,2] 1 -5.541389839 0.468021772 2 -5.376872978 0.373266713 4 -5.313044389 0.374424110 5 -5.534439552 0.505127688 6 -5.131822540 0.267144781 8 -5.533189398 0.385988175 11 -5.359971666 0.412877964 12 -5.469732243 0.343499674 16 -4.538864553 0.505467293 17 -5.243705186 0.538828572 20 -5.427809570 0.451281685 21 -5.297190512 0.161221750 22 -5.447570134 0.397427645 23 -5.260735038 0.731693021 24 -5.323639206 0.067162289 26 -5.331143573 0.196047537 30 -5.382433142 0.307032696 31 -5.371600192 0.260995616 32 -5.356999544 0.267143571 35 -5.426126584 0.322595542 36 -5.469992037 0.536619408 37 -5.344723112 0.483728072 38 -5.514809391 0.541298448 39 -5.106326308 0.508397928 40 -5.523482449 0.370953071 45 -5.143813492 0.105230292 46 -5.347456569 0.360705099 47 -5.402750670 0.409658455 49 -5.416616601 0.425370477 50 -5.530544386 0.431830975 51 2.029096728 0.240623275 52 2.059433457 -0.769414589 53 2.314792346 0.488257426 54 0.532236881 -2.406509766 55 2.243537461 -0.681491524 56 1.602434130 -1.836798697 57 2.257194924 -0.438293055 58 -1.629503835 -2.118556909 59 2.103219182 -0.652920057 60 0.150964049 -2.404912516 61 -1.063428772 -2.163907658 62 1.440651031 -1.813211702 63 0.631577352 -2.180322743 64 2.194939174 -1.061159667 65 -0.216609831 -2.363239626 66 1.887915698 -0.647708502 67 1.623524767 -1.686685496 68 0.772392944 -2.279161599 69 1.802262830 -1.325093590 70 0.303691388 -2.478303226 71 2.250314595 -0.612062193 72 1.077979320 -2.003275093 73 2.362850864 -0.534205850 74 2.029561411 -1.232190989 75 1.716698773 -1.337761582 76 1.928795057 -0.833028333 77 2.272660469 -0.019722563 78 2.555845805 0.547476245 79 1.979861619 -1.405801890 80 -0.587992267 -2.408644408 81 0.005650409 -2.506940268 82 -0.292613065 -2.497010049 83 0.575460099 -2.352536251 84 2.450388309 -0.384468063 85 1.397021630 -1.781818921 86 1.845077473 -1.014386212 87 2.275085754 -0.110298227 88 1.663932783 -1.469372451 89 0.841638548 -2.192302210 90 0.574546091 -2.429431277 91 1.162577569 -2.150539607 92 2.081023654 -1.170987177 93 0.774277565 -2.328062061 94 -1.525273837 -2.162132481 95 1.051502004 -2.244104642 96 1.033723801 -2.104379017 97 1.140290710 -2.120004497 98 1.656833154 -1.579994799 99 -2.099691410 -1.977106019 100 0.968222618 -2.240180272 101 1.865536253 2.163908767 102 2.349563085 -0.331391807 103 2.001494925 2.579059380 104 2.543750843 1.145144766 105 2.322857057 1.996617262 106 0.861087269 2.930995956 107 0.802636405 -1.802647078 108 1.465135528 2.742538765 109 2.223770791 1.716846301 110 1.337314396 2.836198739 111 2.582468681 0.827629332 112 2.626165871 0.757742354 113 2.460024695 1.844196480 114 2.136769828 -0.567584070 115 2.166517650 0.184997459 116 2.462314349 1.283608650 117 2.600067807 1.271136791 118 0.417419376 2.746474634 119 0.374768678 2.689528239 120 2.070509601 -0.760996348 121 2.162753849 2.359243970 122 2.093721047 -0.728390774 123 0.667467932 2.811712552 124 2.541403403 -0.196215351 125 2.308296927 2.098460003 126 1.825015534 2.590138680 127 2.475981075 -0.462775587 128 2.498589631 -0.316769599 129 2.496420996 1.464167887 130 1.984458723 2.205233972 131 1.566027380 2.688316183 132 0.518751170 2.690775636 133 2.452416290 1.532913131 134 2.547006092 -0.059994398 135 2.267291725 0.434756512 136 1.168962247 2.878732021 137 2.184204443 1.712462907 138 2.585621288 1.184818378 139 2.378207159 -0.610842564 140 2.418851188 1.823685848 141 2.275023725 2.077237427 142 2.329951950 1.472144720 143 2.349563085 -0.331391807 144 2.066108058 2.489057289 145 2.073539390 2.267150575 146 2.461016277 1.394049261 147 2.491828659 0.006317898 148 2.647030160 0.924661492 149 2.303787598 1.259784387 150 2.452753359 -0.190404160 > kernelf(kpc) Gaussian Radial Basis kernel function. Hyperparameter : sigma = 0.2 > eig(kpc) Comp.1 Comp.2 0.2686715 0.1364377 > > > > cleanEx(); ..nameEx <- "kpca" > > ### * kpca > > flush(stderr()); flush(stdout()) > > ### Name: kpca > ### Title: Kernel Principal Components Analysis > ### Aliases: kpca kpca,formula-method kpca,matrix-method > ### predict,kpca-method > ### Keywords: cluster > > ### ** Examples > > # another example using the iris > data(iris) > test <- sample(1:50,20) > > kpc <- kpca(~.,data=iris[-test,-5],kernel="rbfdot",kpar=list(sigma=0.2),features=2) > > #print the principal component vectors > pcv(kpc) [,1] [,2] [1,] -0.1586550113 0.0263868895 [2,] -0.1539447445 0.0210446353 [3,] -0.1521172742 0.0211098889 [4,] -0.1584560182 0.0284789069 [5,] -0.1469287285 0.0150615211 [6,] -0.1584202252 0.0217618665 [7,] -0.1534608446 0.0232779026 [8,] -0.1566033894 0.0193663809 [9,] -0.1299518041 0.0284980537 [10,] -0.1501320300 0.0303789500 [11,] -0.1554031053 0.0254430897 [12,] -0.1516633633 0.0090896209 [13,] -0.1559688681 0.0224068195 [14,] -0.1506196101 0.0412525742 [15,] -0.1524206134 0.0037865843 [16,] -0.1526354702 0.0110530855 [17,] -0.1541039370 0.0173103866 [18,] -0.1537937799 0.0147148335 [19,] -0.1533757501 0.0150614529 [20,] -0.1553549199 0.0181878140 [21,] -0.1566108275 0.0302543981 [22,] -0.1530242647 0.0272724047 [23,] -0.1578939890 0.0305182006 [24,] -0.1461987482 0.0286632818 [25,] -0.1581423065 0.0209141930 [26,] -0.1472720403 0.0059328438 [27,] -0.1531025261 0.0203364163 [28,] -0.1546856463 0.0230963879 [29,] -0.1550826404 0.0239822257 [30,] -0.1583444961 0.0243464661 [31,] 0.0580948776 0.0135662487 [32,] 0.0589634456 -0.0433793019 [33,] 0.0662746019 0.0275277680 [34,] 0.0152384241 -0.1356781053 [35,] 0.0642345100 -0.0384222329 [36,] 0.0458791409 -0.1035580119 [37,] 0.0646255355 -0.0247107957 [38,] -0.0466541710 -0.1194434327 [39,] 0.0602170706 -0.0368113845 [40,] 0.0043222375 -0.1355880529 [41,] -0.0304469291 -0.1220002907 [42,] 0.0412471442 -0.1022281861 [43,] 0.0180826318 -0.1229257669 [44,] 0.0628430970 -0.0598277784 [45,] -0.0062017357 -0.1332385512 [46,] 0.0540527368 -0.0365175590 [47,] 0.0464829849 -0.0950946867 [48,] 0.0221143097 -0.1284982640 [49,] 0.0516004176 -0.0747082726 [50,] 0.0086949596 -0.1397257932 [51,] 0.0644285455 -0.0345078336 [52,] 0.0308635245 -0.1129438878 [53,] 0.0676505609 -0.0301183226 [54,] 0.0581081819 -0.0694704593 [55,] 0.0491506411 -0.0754224891 [56,] 0.0552231499 -0.0469658206 [57,] 0.0650683281 -0.0011119506 [58,] 0.0731761809 0.0308665024 [59,] 0.0566852318 -0.0792585758 [60,] -0.0168347513 -0.1357984555 [61,] 0.0001617763 -0.1413403388 [62,] -0.0083777771 -0.1407804768 [63,] 0.0164759440 -0.1326350990 [64,] 0.0701568372 -0.0216761632 [65,] 0.0399979949 -0.1004582730 [66,] 0.0528262396 -0.0571907088 [67,] 0.0651377662 -0.0062185721 [68,] 0.0476399030 -0.0828426599 [69,] 0.0240968741 -0.1236011647 [70,] 0.0164497751 -0.1369704113 [71,] 0.0332856490 -0.1212466050 [72,] 0.0595815925 -0.0660198116 [73,] 0.0221682681 -0.1312552535 [74,] -0.0436699718 -0.1219002069 [75,] 0.0301054550 -0.1265217661 [76,] 0.0295964489 -0.1186440885 [77,] 0.0326475561 -0.1195250471 [78,] 0.0474366342 -0.0890795058 [79,] -0.0601160673 -0.1114684853 [80,] 0.0277210907 -0.1263005116 [81,] 0.0534119930 0.1220003532 [82,] 0.0672701197 -0.0186837440 [83,] 0.0573046129 0.1454063868 [84,] 0.0728298911 0.0645628263 [85,] 0.0665055019 0.1125685218 [86,] 0.0246537086 0.1652484371 [87,] 0.0229802074 -0.1016325566 [88,] 0.0419481575 0.1546232924 [89,] 0.0636685724 0.0967951414 [90,] 0.0382885227 0.1599038061 [91,] 0.0739384179 0.0466614270 [92,] 0.0751895080 0.0427212257 [93,] 0.0704327357 0.1039750961 [94,] 0.0611776560 -0.0320001739 [95,] 0.0620293631 0.0104300864 [96,] 0.0704982905 0.0723693675 [97,] 0.0744422968 0.0716662088 [98,] 0.0119510949 0.1548451952 [99,] 0.0107299668 0.1516345791 [100,] 0.0592805656 -0.0429046846 [101,] 0.0619216020 0.1330132776 [102,] 0.0599451304 -0.0410663948 [103,] 0.0191102116 0.1585232842 [104,] 0.0727626818 -0.0110625468 [105,] 0.0660886322 0.1183103768 [106,] 0.0522518481 0.1460310336 [107,] 0.0708895813 -0.0260911115 [108,] 0.0715368847 -0.0178593495 [109,] 0.0714747947 0.0825492285 [110,] 0.0568168511 0.1243302526 [111,] 0.0448367826 0.1515662438 [112,] 0.0148523160 0.1517049069 [113,] 0.0702149001 0.0864250592 [114,] 0.0729230919 -0.0033824614 [115,] 0.0649146161 0.0245114068 [116,] 0.0334684482 0.1623018164 [117,] 0.0625357520 0.0965480073 [118,] 0.0740286799 0.0667996095 [119,] 0.0680902255 -0.0344390714 [120,] 0.0692539009 0.1028187144 [121,] 0.0651359903 0.1171138560 [122,] 0.0667086351 0.0829989594 [123,] 0.0672701197 -0.0186837440 [124,] 0.0591545455 0.1403321032 [125,] 0.0593673112 0.1278210871 [126,] 0.0704611255 0.0785959671 [127,] 0.0713433120 0.0003562007 [128,] 0.0757868715 0.0521320632 [129,] 0.0659595260 0.0710261646 [130,] 0.0702245507 -0.0107349141 > > #plot the data projection on the components > plot(rotated(kpc),col=as.integer(iris[-test,5]),xlab="1st Principal Component",ylab="2nd Principal Component") > > #embed remaining points > emb <- predict(kpc,iris[test,-5]) > points(emb,col=iris[test,5]) > > > > cleanEx(); ..nameEx <- "ksvm-class" > > ### * ksvm-class > > flush(stderr()); flush(stdout()) > > ### Name: ksvm-class > ### Title: Class "ksvm" > ### Aliases: ksvm-class SVindex cross alpha alphaindex coeff cross error > ### fit prob.model type kernelf xmatrix ymatrix scaling lev kcall prior > ### show b SVindex,ksvm-method alpha,ksvm-method alphaindex,ksvm-method > ### cross,ksvm-method error,ksvm-method fit,ksvm-method prior,ksvm-method > ### prob.model,ksvm-method kernelf,ksvm-method kpar,ksvm-method > ### lev,ksvm-method kcall,ksvm-method scaling,ksvm-method > ### type,ksvm-method xmatrix,ksvm-method ymatrix,ksvm-method > ### b,ksvm-method > ### Keywords: classes > > ### ** Examples > > ## simple example using the promotergene data set > data(promotergene) > > ## train a support vector machine > gene <- ksvm(Class~.,data=promotergene,kernel="rbfdot",kpar=list(sigma=0.015),C=50,cross=4) > gene Support Vector Machine object of class "ksvm" SV type: C-svc (classification) parameter : cost C = 50 Gaussian Radial Basis kernel function. Hyperparameter : sigma = 0.015 Number of Support Vectors : 93 Training error : 0 Cross validation error : 0.122151 > > # the kernel function > kernelf(gene) Gaussian Radial Basis kernel function. Hyperparameter : sigma = 0.015 > # the alpha values > alpha(gene) [[1]] [1] 0.8815820 1.3080831 0.8758204 1.2963734 2.2408533 0.0000000 0.4237548 [8] 1.6116198 0.1560587 1.0245949 0.8670750 1.4239192 0.8448965 0.3470563 [15] 1.9802878 2.2855800 1.8057206 1.5412192 0.2002079 1.4633855 0.0000000 [22] 1.0368662 1.3960876 0.0000000 3.3223825 1.2673153 1.6528146 0.0000000 [29] 0.5993937 0.1230099 0.7323349 1.6942574 0.6784733 1.3438250 2.8024940 [36] 0.0000000 1.1498156 0.0000000 0.5621593 0.1013984 0.3328873 1.7805261 [43] 0.9040873 1.3796798 0.3778216 2.0888245 0.0000000 1.2517071 1.1365124 [50] 0.2442585 2.2150818 1.4370602 0.0000000 1.5434503 1.7222269 1.3919765 [57] 0.4770484 1.6313272 1.4441848 0.9020386 1.7904195 0.3726699 1.0453673 [64] 0.8605527 1.2211276 0.9013577 0.7348363 0.4576333 0.6898707 0.0000000 [71] 2.0605997 0.9409606 1.6980524 0.9985140 1.3536632 1.2566780 1.3422393 [78] 2.8373683 1.3715399 0.0000000 2.4442968 1.6078123 0.3546858 0.3824164 [85] 0.1949565 0.6580490 0.9737201 1.3409331 1.9176216 1.1892336 0.8599108 [92] 0.5107445 1.1504897 1.4413166 0.8103452 0.0000000 0.6199419 0.3041910 [99] 0.3685451 1.8535493 0.8699809 1.1066264 0.0000000 1.6195412 0.5645515 [106] 0.0000000 > # the coefficients > coeff(gene) [[1]] [1] 0.8815820 1.3080831 0.8758204 1.2963734 2.2408533 0.4237548 [7] 1.6116198 0.1560587 1.0245949 0.8670750 1.4239192 0.8448965 [13] 0.3470563 1.9802878 2.2855800 1.8057206 1.5412192 0.2002079 [19] 1.4633855 1.0368662 1.3960876 3.3223825 1.2673153 1.6528146 [25] 0.5993937 0.1230099 0.7323349 1.6942574 0.6784733 1.3438250 [31] 2.8024940 1.1498156 0.5621593 0.1013984 0.3328873 1.7805261 [37] 0.9040873 1.3796798 0.3778216 2.0888245 1.2517071 1.1365124 [43] 0.2442585 2.2150818 1.4370602 -1.5434503 -1.7222269 -1.3919765 [49] -0.4770484 -1.6313272 -1.4441848 -0.9020386 -1.7904195 -0.3726699 [55] -1.0453673 -0.8605527 -1.2211276 -0.9013577 -0.7348363 -0.4576333 [61] -0.6898707 -2.0605997 -0.9409606 -1.6980524 -0.9985140 -1.3536632 [67] -1.2566780 -1.3422393 -2.8373683 -1.3715399 -2.4442968 -1.6078123 [73] -0.3546858 -0.3824164 -0.1949565 -0.6580490 -0.9737201 -1.3409331 [79] -1.9176216 -1.1892336 -0.8599108 -0.5107445 -1.1504897 -1.4413166 [85] -0.8103452 -0.6199419 -0.3041910 -0.3685451 -1.8535493 -0.8699809 [91] -1.1066264 -1.6195412 -0.5645515 > # the fitted values > fit(gene) [1] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + [38] + + + + + + + + + + + + + + + + - - - - - - - - - - - - - - - - - - - - - [75] - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Levels: + - > # the cross validation error > cross(gene) [1] 0.122151 > > > > > cleanEx(); ..nameEx <- "ksvm" > > ### * ksvm > > flush(stderr()); flush(stdout()) > > ### Name: ksvm > ### Title: Support Vector Machines > ### Aliases: ksvm ksvm,formula-method ksvm,vector-method ksvm,matrix-method > ### show,ksvm-method coeff,ksvm-method > ### Keywords: methods regression nonlinear classif neural > > ### ** Examples > > > ## simple example using the spam data set > data(spam) > > ## create test and training set > index <- sample(1:dim(spam)[1]) > spamtrain <- spam[index[1:floor(2 * dim(spam)[1]/3)], ] > spamtest <- spam[index[((2 * ceiling(dim(spam)[1]/3)) + 1):dim(spam)[1]], ] > > ## train a support vector machine > filter <- ksvm(type~.,data=spamtrain,kernel="rbfdot",kpar=list(sigma=0.05),C=5,cross=3) > filter Support Vector Machine object of class "ksvm" SV type: C-svc (classification) parameter : cost C = 5 Gaussian Radial Basis kernel function. Hyperparameter : sigma = 0.05 Number of Support Vectors : 1129 Training error : 0.019237 Cross validation error : 0.081838 > > ## predict mail type on the test set > mailtype <- predict(filter,spamtest[,-58]) > > ## Check results > table(mailtype,spamtest[,58]) mailtype nonspam spam nonspam 901 72 spam 32 528 > > ## Another example with the famous iris data > data(iris) > > ## Create a kernel function using the build in rbfdot function > rbf <- rbfdot(sigma=0.1) > rbf Gaussian Radial Basis kernel function. Hyperparameter : sigma = 0.1 > > ## train a bound constraint support vector machine > irismodel <- ksvm(Species~.,data=iris,type="C-bsvc",kernel=rbf,C=10,prob.model=TRUE) > > irismodel Support Vector Machine object of class "ksvm" SV type: C-bsvc (classification) parameter : cost C = 10 Gaussian Radial Basis kernel function. Hyperparameter : sigma = 0.1 Number of Support Vectors : 32 Training error : 0.02 > > ## get fitted values > fit(irismodel) [1] setosa setosa setosa setosa setosa setosa [7] setosa setosa setosa setosa setosa setosa [13] setosa setosa setosa setosa setosa setosa [19] setosa setosa setosa setosa setosa setosa [25] setosa setosa setosa setosa setosa setosa [31] setosa setosa setosa setosa setosa setosa [37] setosa setosa setosa setosa setosa setosa [43] setosa setosa setosa setosa setosa setosa [49] setosa setosa versicolor versicolor versicolor versicolor [55] versicolor versicolor versicolor versicolor versicolor versicolor [61] versicolor versicolor versicolor versicolor versicolor versicolor [67] versicolor versicolor versicolor versicolor versicolor versicolor [73] virginica versicolor versicolor versicolor versicolor virginica [79] versicolor versicolor versicolor versicolor versicolor virginica [85] versicolor versicolor versicolor versicolor versicolor versicolor [91] versicolor versicolor versicolor versicolor versicolor versicolor [97] versicolor versicolor versicolor versicolor virginica virginica [103] virginica virginica virginica virginica virginica virginica [109] virginica virginica virginica virginica virginica virginica [115] virginica virginica virginica virginica virginica virginica [121] virginica virginica virginica virginica virginica virginica [127] virginica virginica virginica virginica virginica virginica [133] virginica virginica virginica virginica virginica virginica [139] virginica virginica virginica virginica virginica virginica [145] virginica virginica virginica virginica virginica virginica Levels: setosa versicolor virginica > > ## Test on the training set with probabilities as output > predict(irismodel, iris[,-5], type="probabilities") setosa versicolor virginica [1,] 0.977203160 1.727796e-02 0.0055188799 [2,] 0.968615417 2.507442e-02 0.0063101623 [3,] 0.980285314 1.430605e-02 0.0054086340 [4,] 0.974938324 1.872443e-02 0.0063372459 [5,] 0.979054478 1.554420e-02 0.0054013172 [6,] 0.957947882 3.423568e-02 0.0078164339 [7,] 0.977870762 1.574429e-02 0.0063849476 [8,] 0.975483111 1.867873e-02 0.0058381601 [9,] 0.974932869 1.852036e-02 0.0065467733 [10,] 0.974713989 1.955837e-02 0.0057276443 [11,] 0.970683549 2.306921e-02 0.0062472421 [12,] 0.976038341 1.781696e-02 0.0061446945 [13,] 0.976620891 1.791324e-02 0.0054658686 [14,] 0.985788737 9.150309e-03 0.0050609534 [15,] 0.960834159 3.155069e-02 0.0076151486 [16,] 0.932481456 5.666238e-02 0.0108561599 [17,] 0.966734421 2.658564e-02 0.0066799339 [18,] 0.973465177 2.047202e-02 0.0060628011 [19,] 0.950805634 4.034183e-02 0.0088525314 [20,] 0.972991951 2.085404e-02 0.0061540086 [21,] 0.957287482 3.485834e-02 0.0078541804 [22,] 0.969219464 2.404220e-02 0.0067383364 [23,] 0.984821230 9.940054e-03 0.0052387162 [24,] 0.932482518 5.666225e-02 0.0108552343 [25,] 0.967862871 2.467219e-02 0.0074649403 [26,] 0.955768648 3.667879e-02 0.0075525612 [27,] 0.961173014 3.095732e-02 0.0078696689 [28,] 0.973267798 2.074706e-02 0.0059851431 [29,] 0.973676867 2.044934e-02 0.0058737897 [30,] 0.973328022 2.016122e-02 0.0065107558 [31,] 0.968168278 2.506183e-02 0.0067698944 [32,] 0.949154876 4.205233e-02 0.0087927977 [33,] 0.969353113 2.404516e-02 0.0066017292 [34,] 0.959533362 3.295671e-02 0.0075099268 [35,] 0.969199878 2.437538e-02 0.0064247471 [36,] 0.978062788 1.666231e-02 0.0052749054 [37,] 0.969492733 2.405879e-02 0.0064484791 [38,] 0.982027400 1.285964e-02 0.0051129632 [39,] 0.979433721 1.456791e-02 0.0059983662 [40,] 0.973569600 2.045152e-02 0.0059788766 [41,] 0.977094592 1.722735e-02 0.0056780628 [42,] 0.932482124 5.666228e-02 0.0108555986 [43,] 0.981775870 1.229490e-02 0.0059292347 [44,] 0.947696282 4.226487e-02 0.0100388515 [45,] 0.959340400 3.250757e-02 0.0081520340 [46,] 0.963985862 2.890890e-02 0.0071052366 [47,] 0.974146098 1.983806e-02 0.0060158402 [48,] 0.979198569 1.497485e-02 0.0058265785 [49,] 0.972984904 2.104165e-02 0.0059734476 [50,] 0.975815239 1.849524e-02 0.0056895233 [51,] 0.016861794 9.814003e-01 0.0017379040 [52,] 0.009514413 9.893104e-01 0.0011752268 [53,] 0.009803916 9.702675e-01 0.0199285691 [54,] 0.002221619 9.852190e-01 0.0125593825 [55,] 0.003061097 9.761478e-01 0.0207910787 [56,] 0.002403171 9.945234e-01 0.0030733944 [57,] 0.010994389 9.823725e-01 0.0066331568 [58,] 0.023279331 9.716782e-01 0.0050424471 [59,] 0.005227533 9.941468e-01 0.0006256481 [60,] 0.003376996 9.927910e-01 0.0038319707 [61,] 0.017173618 9.753070e-01 0.0075194298 [62,] 0.004289431 9.947484e-01 0.0009622018 [63,] 0.004196623 9.953185e-01 0.0004849179 [64,] 0.002827101 9.873692e-01 0.0098037154 [65,] 0.008682658 9.902320e-01 0.0010853371 [66,] 0.010148732 9.891630e-01 0.0006882317 [67,] 0.003655862 9.837523e-01 0.0125918613 [68,] 0.005237252 9.942038e-01 0.0005588978 [69,] 0.004348061 7.235205e-01 0.2721314512 [70,] 0.003574654 9.960221e-01 0.0004032695 [71,] 0.008152666 5.033470e-01 0.4885003451 [72,] 0.003853526 9.958888e-01 0.0002576507 [73,] 0.005067177 3.303620e-01 0.6645708046 [74,] 0.002828423 9.960907e-01 0.0010808355 [75,] 0.004984568 9.947072e-01 0.0003082308 [76,] 0.006700635 9.927700e-01 0.0005294059 [77,] 0.004765036 9.826560e-01 0.0125789953 [78,] 0.005983977 3.098357e-01 0.6841803322 [79,] 0.002655261 9.840391e-01 0.0133056249 [80,] 0.009439881 9.889918e-01 0.0015682753 [81,] 0.003879735 9.955696e-01 0.0005507065 [82,] 0.005993240 9.931399e-01 0.0008668828 [83,] 0.003821275 9.958517e-01 0.0003269939 [84,] 0.004751870 2.467635e-02 0.9705717834 [85,] 0.004079850 9.765433e-01 0.0193768092 [86,] 0.017563099 9.799673e-01 0.0024696441 [87,] 0.008082266 9.868139e-01 0.0051038371 [88,] 0.002229449 9.915065e-01 0.0062640173 [89,] 0.007129432 9.921644e-01 0.0007061485 [90,] 0.002102574 9.949577e-01 0.0029397524 [91,] 0.002112387 9.936704e-01 0.0042172235 [92,] 0.004048734 9.938645e-01 0.0020867884 [93,] 0.002782999 9.969484e-01 0.0002686137 [94,] 0.018678427 9.773184e-01 0.0040031471 [95,] 0.002346593 9.962892e-01 0.0013641643 [96,] 0.007818070 9.913784e-01 0.0008035576 [97,] 0.004208298 9.953729e-01 0.0004187939 [98,] 0.004295544 9.954034e-01 0.0003010861 [99,] 0.023445928 9.715574e-01 0.0049967121 [100,] 0.003304260 9.963172e-01 0.0003785637 [101,] 0.004494505 1.430957e-04 0.9953623989 [102,] 0.002970482 1.567893e-04 0.9968727287 [103,] 0.003512834 1.089443e-04 0.9963782222 [104,] 0.003399581 2.284216e-04 0.9963719975 [105,] 0.002766448 4.186433e-05 0.9971916877 [106,] 0.004734202 3.234741e-04 0.9949423236 [107,] 0.007787217 9.754856e-03 0.9824579267 [108,] 0.004429684 1.884187e-04 0.9953818973 [109,] 0.003842076 8.250938e-05 0.9960754147 [110,] 0.006849767 7.709070e-04 0.9923793263 [111,] 0.005852826 9.774740e-03 0.9843724341 [112,] 0.003117253 1.598142e-04 0.9967229329 [113,] 0.003449961 1.051794e-04 0.9964448595 [114,] 0.002603910 3.801802e-05 0.9973580722 [115,] 0.002377256 2.003809e-05 0.9976027063 [116,] 0.004426932 1.389790e-04 0.9954340892 [117,] 0.004136051 1.498205e-03 0.9943657432 [118,] 0.009063698 2.516145e-03 0.9884201572 [119,] 0.008560521 7.770243e-04 0.9906624550 [120,] 0.007381859 4.045454e-02 0.9521635974 [121,] 0.003871320 1.379890e-04 0.9959906909 [122,] 0.003424331 2.416710e-04 0.9963339979 [123,] 0.006248238 4.549192e-04 0.9932968427 [124,] 0.004212013 1.009555e-02 0.9856924371 [125,] 0.004772899 3.428214e-04 0.9948842794 [126,] 0.004812699 1.210859e-03 0.9939764414 [127,] 0.004692374 3.167860e-02 0.9636290216 [128,] 0.005797622 6.644268e-02 0.9277596999 [129,] 0.002478073 2.587333e-05 0.9974960541 [130,] 0.005713149 1.231163e-02 0.9819752173 [131,] 0.004836643 2.175866e-04 0.9949457706 [132,] 0.010836836 1.098921e-02 0.9781739539 [133,] 0.002387091 2.470973e-05 0.9975881995 [134,] 0.004910590 3.575823e-01 0.6375071130 [135,] 0.005153061 9.706220e-03 0.9851407189 [136,] 0.005276102 4.048833e-04 0.9943190151 [137,] 0.006307985 2.730920e-04 0.9934189230 [138,] 0.004836684 3.371076e-03 0.9917922403 [139,] 0.006145890 1.153099e-01 0.8785442459 [140,] 0.004065283 3.392373e-04 0.9955954794 [141,] 0.003408506 8.310137e-05 0.9965083927 [142,] 0.004391131 2.682388e-04 0.9953406303 [143,] 0.002970482 1.567893e-04 0.9968727287 [144,] 0.003617840 1.136251e-04 0.9962685351 [145,] 0.004554388 1.813571e-04 0.9952642553 [146,] 0.003552478 8.542639e-05 0.9963620956 [147,] 0.003565449 5.146569e-04 0.9959198943 [148,] 0.003916027 6.952697e-04 0.9953887033 [149,] 0.007303176 5.498124e-04 0.9921470112 [150,] 0.005376391 1.188166e-02 0.9827419527 > > ## Demo of the plot function > x <- rbind(matrix(rnorm(120),,2),matrix(rnorm(120,mean=3),,2)) > y <- matrix(c(rep(1,60),rep(-1,60))) > > svp <- ksvm(x,y,type="C-svc") Using automatic sigma estimation (sigest) for RBF or laplace kernel > plot(svp) Loading required package: colorspace Attaching package: 'colorspace' The following object(s) are masked from package:grDevices : hcl > > > #### Use custom kernel > > k <- function(x,y) {(sum(x*y) +1)*exp(0.001*sum((x-y)^2))} > class(k) <- "kernel" > > data(promotergene) > > ## train svm using custom kernel > gene <- ksvm(Class~.,data=promotergene,kernel=k,C=10,cross=5) > > gene Support Vector Machine object of class "ksvm" SV type: C-svc (classification) parameter : cost C = 10 Number of Support Vectors : 66 Training error : 0 Cross validation error : 0.112987 > > ## regression > # create data > x <- seq(-20,20,0.1) > y <- sin(x)/x + rnorm(401,sd=0.03) > > # train support vector machine > regm <- ksvm(x,y,epsilon=0.01,kpar=list(sigma=16),cross=3) > plot(x,y,type="l") > lines(x,predict(regm,x),col="red") > > > > cleanEx(); ..nameEx <- "musk" > > ### * musk > > flush(stderr()); flush(stdout()) > > ### Name: musk > ### Title: Musk data set > ### Aliases: musk > ### Keywords: datasets > > ### ** Examples > > data(musk) > > muskm <- ksvm(Class~.,data=musk,kernel="rbfdot",C=1000) Using automatic sigma estimation (sigest) for RBF or laplace kernel > > muskm Support Vector Machine object of class "ksvm" SV type: C-svc (classification) parameter : cost C = 1000 Gaussian Radial Basis kernel function. Hyperparameter : sigma = 0.00275612107872740 Number of Support Vectors : 187 Training error : 0 > > > > > cleanEx(); ..nameEx <- "onlearn-class" > > ### * onlearn-class > > flush(stderr()); flush(stdout()) > > ### Name: onlearn-class > ### Title: Class "onlearn" > ### Aliases: onlearn-class alpha,onlearn-method b,onlearn-method > ### buffer,onlearn-method fit,onlearn-method kernelf,onlearn-method > ### kpar,onlearn-method predict,onlearn-method rho,onlearn-method rho > ### show,onlearn-method type,onlearn-method xmatrix,onlearn-method buffer > ### Keywords: classes > > ### ** Examples > > > ## create toy data set > x <- rbind(matrix(rnorm(100),,2),matrix(rnorm(100)+3,,2)) > y <- matrix(c(rep(1,50),rep(-1,50)),,1) > > ## initialize onlearn object > on <- inlearn(2,kernel="rbfdot",kpar=list(sigma=0.2),type="classification") > > ## learn one data point at the time > for(i in sample(1:100,100)) + on <- onlearn(on,x[i,],y[i],nu=0.03,lambda=0.1) > > sign(predict(on,x)) [1] 1 1 1 1 1 1 1 1 1 1 -1 1 1 1 1 1 1 -1 1 -1 1 1 1 1 1 [26] 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 -1 1 1 1 1 1 1 1 [51] -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 [76] -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 > > > > > cleanEx(); ..nameEx <- "onlearn" > > ### * onlearn > > flush(stderr()); flush(stdout()) > > ### Name: onlearn > ### Title: Kernel Online Learning algorithms > ### Aliases: onlearn onlearn,onlearn-method > ### Keywords: classif neural regression ts > > ### ** Examples > > > ## create toy data set > x <- rbind(matrix(rnorm(100),,2),matrix(rnorm(100)+3,,2)) > y <- matrix(c(rep(1,50),rep(-1,50)),,1) > > ## initialize onlearn object > on <- inlearn(2,kernel="rbfdot",kpar=list(sigma=0.2),type="classification") > > ind <- sample(1:100,100) > ## learn one data point at the time > for(i in ind) + on <- onlearn(on,x[i,],y[i],nu=0.03,lambda=0.1) > > ## or learn all the data > on <- onlearn(on,x[ind,],y[ind],nu=0.03,lambda=0.1) > > sign(predict(on,x)) [1] 1 1 1 1 1 1 1 1 1 1 -1 1 1 1 1 1 1 -1 1 -1 1 1 1 1 1 [26] 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 -1 1 1 1 1 1 1 1 [51] -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 [76] -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1 > > > > cleanEx(); ..nameEx <- "plot" > > ### * plot > > flush(stderr()); flush(stdout()) > > ### Name: plot > ### Title: plot method for support vector object > ### Aliases: plot.ksvm plot,ksvm,missing-method plot,ksvm-method > ### Keywords: methods regression classif > > ### ** Examples > > ## Demo of the plot function > x <- rbind(matrix(rnorm(120),,2),matrix(rnorm(120,mean=3),,2)) > y <- matrix(c(rep(1,60),rep(-1,60))) > > svp <- ksvm(x,y,type="C-svc") Using automatic sigma estimation (sigest) for RBF or laplace kernel > plot(svp) Loading required package: colorspace Attaching package: 'colorspace' The following object(s) are masked from package:grDevices : hcl > > > > > cleanEx(); ..nameEx <- "predict.ksvm" > > ### * predict.ksvm > > flush(stderr()); flush(stdout()) > > ### Name: predict.ksvm > ### Title: predict method for support vector object > ### Aliases: predict.ksvm predict,ksvm-method > ### Keywords: methods regression classif > > ### ** Examples > > > ## example using the promotergene data set > data(promotergene) > > ## create test and training set > ind <- sample(1:dim(promotergene)[1],20) > genetrain <- promotergene[-ind, ] > genetest <- promotergene[ind, ] > > ## train a support vector machine > gene <- ksvm(Class~.,data=genetrain,kernel="rbfdot",kpar=list(sigma=0.015),C=70,cross=4,prob.model=TRUE) > gene Support Vector Machine object of class "ksvm" SV type: C-svc (classification) parameter : cost C = 70 Gaussian Radial Basis kernel function. Hyperparameter : sigma = 0.015 Number of Support Vectors : 78 Training error : 0 Cross validation error : 0.128247 > > ## predict gene type probabilities on the test set > genetype <- predict(gene,genetest,type="probabilities") > genetype + - [1,] 9.999718e-01 2.824313e-05 [2,] 9.995675e-01 4.324720e-04 [3,] 1.724140e-02 9.827586e-01 [4,] 1.068762e-01 8.931238e-01 [5,] 9.999967e-01 3.293274e-06 [6,] 2.370272e-03 9.976297e-01 [7,] 2.160459e-03 9.978395e-01 [8,] 1.138624e-03 9.988614e-01 [9,] 9.733788e-04 9.990266e-01 [10,] 9.999985e-01 1.477460e-06 [11,] 9.693895e-01 3.061055e-02 [12,] 9.104460e-01 8.955401e-02 [13,] 1.225212e-02 9.877479e-01 [14,] 9.999929e-01 7.115575e-06 [15,] 7.918475e-01 2.081525e-01 [16,] 8.555711e-01 1.444289e-01 [17,] 6.837841e-07 9.999993e-01 [18,] 1.233261e-01 8.766739e-01 [19,] 9.906577e-01 9.342292e-03 [20,] 4.035130e-04 9.995965e-01 > > > > cleanEx(); ..nameEx <- "promotergene" > > ### * promotergene > > flush(stderr()); flush(stdout()) > > ### Name: promotergene > ### Title: E. coli promoter gene sequences (DNA) > ### Aliases: promotergene > ### Keywords: datasets > > ### ** Examples > > data(promotergene) > > ## Create classification model using Gaussian Processes > > prom <- gausspr(Class~.,data=promotergene,kernel="rbfdot",kpar=list(sigma=0.02),cross=4) > prom Gaussian Processes object of class "gausspr" Problem type: classification Gaussian Radial Basis kernel function. Hyperparameter : sigma = 0.02 Number of training instances learned : 1 Train error : 0 Cross validation error : 0.1317664 > > ## Create model using Support Vector Machines > > promsv <- ksvm(Class~.,data=promotergene,kernel="laplacedot",kpar="automatic",C=60,cross=4) Using automatic sigma estimation (sigest) for RBF or laplace kernel > promsv Support Vector Machine object of class "ksvm" SV type: C-svc (classification) parameter : cost C = 60 Laplace kernel function. Hyperparameter : sigma = 0.0155288154285672 Number of Support Vectors : 102 Training error : 0 Cross validation error : 0.085114 > > > > cleanEx(); ..nameEx <- "ranking-class" > > ### * ranking-class > > flush(stderr()); flush(stdout()) > > ### Name: ranking-class > ### Title: Class "ranking" > ### Aliases: ranking-class edgegraph convergence convergence,ranking-method > ### edgegraph,ranking-method show,ranking-method > ### Keywords: classes > > ### ** Examples > > data(spirals) > > ## create data set to be ranked > ran<-spirals[rowSums(abs(spirals)<0.55)==2,] > > ## rank points according to "relevance" to point 54 (up left) > ranked<-ranking(ran,54,kernel="rbfdot",kpar=list(sigma=100),edgegraph=TRUE) > > ranked Spectral Clustering object of class "specc" [,1] [,2] [,3] [1,] 1 0.501933954 22 [2,] 2 0.003693627 50 [3,] 3 0.002223344 65 [4,] 4 0.003438816 53 [5,] 5 0.003918642 48 [6,] 6 0.003970762 46 [7,] 7 0.590867827 19 [8,] 8 0.003992904 45 [9,] 9 0.003954118 47 [10,] 10 0.003613429 52 [11,] 11 0.003771499 49 [12,] 12 0.708139398 7 [13,] 13 0.003649062 51 [14,] 14 0.003361168 54 [15,] 15 0.003339789 55 [16,] 16 0.003278750 56 [17,] 17 0.003089340 57 [18,] 18 0.003011486 58 [19,] 19 0.300192835 42 [20,] 20 0.002853653 59 [21,] 21 0.001417890 85 [22,] 22 0.002676371 60 [23,] 23 0.002584547 61 [24,] 24 0.002416572 62 [25,] 25 0.698754734 8 [26,] 26 0.002352778 63 [27,] 27 0.002268396 64 [28,] 28 0.002050746 66 [29,] 29 0.629586956 17 [30,] 30 0.002007343 67 [31,] 31 0.805769796 2 [32,] 32 0.001954349 68 [33,] 33 0.001817912 69 [34,] 34 0.001721578 71 [35,] 35 0.680673676 11 [36,] 36 0.001452053 84 [37,] 37 0.424161568 39 [38,] 38 0.001457811 83 [39,] 39 0.001464430 82 [40,] 40 0.001519561 78 [41,] 41 0.001484902 80 [42,] 42 0.001515801 79 [43,] 43 0.001532059 77 [44,] 44 0.766982591 3 [45,] 45 0.001639013 73 [46,] 46 0.001595997 75 [47,] 47 0.001739823 70 [48,] 48 0.454830310 32 [49,] 49 0.001595851 76 [50,] 50 0.496213855 23 [51,] 51 0.001711609 72 [52,] 52 0.001621874 74 [53,] 53 0.001477896 81 [54,] 54 NA NA [55,] 55 0.503425922 21 [56,] 56 0.433022692 36 [57,] 57 0.469598796 29 [58,] 58 0.449585799 33 [59,] 59 0.646713361 13 [60,] 60 0.447782056 35 [61,] 61 0.692159399 10 [62,] 62 0.739738982 4 [63,] 63 0.492684712 25 [64,] 64 0.640261723 16 [65,] 65 0.456652115 31 [66,] 66 0.472051696 27 [67,] 67 0.082113192 44 [68,] 68 0.082459425 43 [69,] 69 0.711411130 6 [70,] 70 0.854180259 1 [71,] 71 0.473332566 26 [72,] 72 0.666963465 12 [73,] 73 0.428522596 38 [74,] 74 0.616040842 18 [75,] 75 0.470121130 28 [76,] 76 0.695360887 9 [77,] 77 0.529795623 20 [78,] 78 0.448211361 34 [79,] 79 0.494033650 24 [80,] 80 0.362389662 40 [81,] 81 0.643533575 15 [82,] 82 0.723582546 5 [83,] 83 0.354941741 41 [84,] 84 0.459391239 30 [85,] 85 0.645278916 14 [86,] 86 0.432438191 37 edgegraph matrix included. > edgegraph(ranked)[1:10,1:10] [,1] [,2] [,3] [,4] [,5] [,6] [,7] [,8] [,9] [,10] [1,] TRUE TRUE TRUE FALSE FALSE FALSE TRUE FALSE FALSE FALSE [2,] TRUE TRUE TRUE TRUE TRUE TRUE TRUE TRUE TRUE TRUE [3,] TRUE TRUE TRUE TRUE TRUE TRUE TRUE TRUE TRUE TRUE [4,] FALSE TRUE TRUE TRUE TRUE TRUE FALSE TRUE TRUE TRUE [5,] FALSE TRUE TRUE TRUE TRUE TRUE TRUE TRUE TRUE TRUE [6,] FALSE TRUE TRUE TRUE TRUE TRUE FALSE TRUE TRUE TRUE [7,] TRUE TRUE TRUE FALSE TRUE FALSE TRUE FALSE FALSE FALSE [8,] FALSE TRUE TRUE TRUE TRUE TRUE FALSE TRUE TRUE TRUE [9,] FALSE TRUE TRUE TRUE TRUE TRUE FALSE TRUE TRUE TRUE [10,] FALSE TRUE TRUE TRUE TRUE TRUE FALSE TRUE TRUE TRUE > > > > cleanEx(); ..nameEx <- "ranking" > > ### * ranking > > flush(stderr()); flush(stdout()) > > ### Name: ranking > ### Title: Ranking > ### Aliases: ranking ranking,matrix-method > ### Keywords: cluster classif > > ### ** Examples > > data(spirals) > > ## create data from spirals > ran <- spirals[rowSums(abs(spirals) < 0.55) == 2,] > > ## rank points according to similarity to the most upper left point > ranked <- ranking(ran, 54, kernel = "rbfdot", kpar = list(sigma = 100), edgegraph = TRUE) > ranked[54, 2] <- max(ranked[-54, 2]) > c<-1:86 > op <- par(mfrow = c(1, 2),pty="s") > plot(ran) > plot(ran, cex=c[ranked[,3]]/40) > > > > > graphics::par(get("par.postscript", env = .CheckExEnv)) > cleanEx(); ..nameEx <- "rvm-class" > > ### * rvm-class > > flush(stderr()); flush(stdout()) > > ### Name: rvm-class > ### Title: Class "rvm" > ### Aliases: rvm-class RVindex mlike nvar RVindex,rvm-method > ### alpha,rvm-method cross,rvm-method error,rvm-method fit,rvm-method > ### kcall,rvm-method kernelf,rvm-method kpar,rvm-method lev,rvm-method > ### mlike,rvm-method nvar,rvm-method type,rvm-method xmatrix,rvm-method > ### ymatrix,rvm-method > ### Keywords: classes > > ### ** Examples > > > # create data > x <- seq(-20,20,0.1) > y <- sin(x)/x + rnorm(401,sd=0.05) > > # train relevance vector machine > foo <- rvm(x, y) > foo Relevance Vector Machine object of class "rvm" Problem type: regression Gaussian Radial Basis kernel function. Hyperparameter : sigma = 0.1 Number of Relevance Vectors : 14 Variance : 0.002367829 Training error : 0.002291489 > > alpha(foo) [1] 1.5244052 -1.7134139 0.3483344 -0.3504250 0.4919927 -0.5525478 [7] 1.2745060 -0.6605399 0.3078342 -0.2401493 0.1497366 0.1333779 [13] -0.2298243 0.1429664 > RVindex(foo) [1] 33 35 62 96 127 155 203 235 270 313 340 341 368 400 > fit(foo) [,1] [1,] 0.0166049966 [2,] 0.0159130664 [3,] 0.0150542189 [4,] 0.0140231429 [5,] 0.0128161275 [6,] 0.0114312582 [7,] 0.0098686038 [8,] 0.0081303902 [9,] 0.0062211556 [10,] 0.0041478840 [11,] 0.0019201136 [12,] -0.0004499857 [13,] -0.0029475675 [14,] -0.0055551013 [15,] -0.0082524046 [16,] -0.0110167172 [17,] -0.0138228202 [18,] -0.0166431992 [19,] -0.0194482548 [20,] -0.0222065578 [21,] -0.0248851490 [22,] -0.0274498821 [23,] -0.0298658060 [24,] -0.0320975832 [25,] -0.0341099403 [26,] -0.0358681438 [27,] -0.0373384976 [28,] -0.0384888526 [29,] -0.0392891235 [30,] -0.0397118036 [31,] -0.0397324697 [32,] -0.0393302694 [33,] -0.0384883817 [34,] -0.0371944423 [35,] -0.0354409263 [36,] -0.0332254801 [37,] -0.0305511949 [38,] -0.0274268157 [39,] -0.0238668800 [40,] -0.0198917810 [41,] -0.0155277522 [42,] -0.0108067697 [43,] -0.0057663722 [44,] -0.0004493970 [45,] 0.0050963658 [46,] 0.0108185981 [47,] 0.0166609555 [48,] 0.0225636395 [49,] 0.0284640336 [50,] 0.0342973978 [51,] 0.0399976111 [52,] 0.0454979544 [53,] 0.0507319227 [54,] 0.0556340564 [55,] 0.0601407808 [56,] 0.0641912422 [57,] 0.0677281291 [58,] 0.0706984678 [59,] 0.0730543803 [60,] 0.0747537941 [61,] 0.0757610944 [62,] 0.0760477078 [63,] 0.0755926109 [64,] 0.0743827539 [65,] 0.0724133940 [66,] 0.0696883332 [67,] 0.0662200557 [68,] 0.0620297619 [69,] 0.0571472982 [70,] 0.0516109816 [71,] 0.0454673201 [72,] 0.0387706319 [73,] 0.0315825658 [74,] 0.0239715292 [75,] 0.0160120278 [76,] 0.0077839253 [77,] -0.0006283693 [78,] -0.0091367787 [79,] -0.0176504883 [80,] -0.0260769207 [81,] -0.0343227445 [82,] -0.0422949049 [83,] -0.0499016652 [84,] -0.0570536478 [85,] -0.0636648628 [86,] -0.0696537140 [87,] -0.0749439684 [88,] -0.0794656816 [89,] -0.0831560662 [90,] -0.0859602937 [91,] -0.0878322218 [92,] -0.0887350370 [93,] -0.0886418059 [94,] -0.0875359270 [95,] -0.0854114792 [96,] -0.0822734598 [97,] -0.0781379102 [98,] -0.0730319250 [99,] -0.0669935447 [100,] -0.0600715301 [101,] -0.0523250211 [102,] -0.0438230799 [103,] -0.0346441243 [104,] -0.0248752537 [105,] -0.0146114751 [106,] -0.0039548348 [107,] 0.0069865358 [108,] 0.0180994515 [109,] 0.0292667716 [110,] 0.0403685547 [111,] 0.0512832589 [112,] 0.0618889749 [113,] 0.0720646803 [114,] 0.0816915003 [115,] 0.0906539651 [116,] 0.0988412469 [117,] 0.1061483667 [118,] 0.1124773560 [119,] 0.1177383616 [120,] 0.1218506819 [121,] 0.1247437227 [122,] 0.1263578621 [123,] 0.1266452157 [124,] 0.1255702924 [125,] 0.1231105347 [126,] 0.1192567369 [127,] 0.1140133359 [128,] 0.1073985725 [129,] 0.0994445208 [130,] 0.0901969854 [131,] 0.0797152671 [132,] 0.0680718007 [133,] 0.0553516681 [134,] 0.0416519922 [135,] 0.0270812190 [136,] 0.0117582943 [137,] -0.0041882542 [138,] -0.0206213217 [139,] -0.0373963052 [140,] -0.0543622469 [141,] -0.0713630298 [142,] -0.0882386136 [143,] -0.1048262980 [144,] -0.1209620029 [145,] -0.1364815522 [146,] -0.1512219505 [147,] -0.1650226415 [148,] -0.1777267370 [149,] -0.1891822092 [150,] -0.1992430341 [151,] -0.2077702816 [152,] -0.2146331431 [153,] -0.2197098919 [154,] -0.2228887717 [155,] -0.2240688079 [156,] -0.2231605416 [157,] -0.2200866814 [158,] -0.2147826747 [159,] -0.2071971951 [160,] -0.1972925502 [161,] -0.1850450066 [162,] -0.1704450365 [163,] -0.1534974861 [164,] -0.1342216678 [165,] -0.1126513784 [166,] -0.0888348451 [167,] -0.0628346009 [168,] -0.0347272911 [169,] -0.0046034124 [170,] 0.0274330147 [171,] 0.0612648395 [172,] 0.0967622397 [173,] 0.1337832459 [174,] 0.1721743314 [175,] 0.2117710679 [176,] 0.2523988472 [177,] 0.2938736706 [178,] 0.3360030038 [179,] 0.3785867005 [180,] 0.4214179916 [181,] 0.4642845410 [182,] 0.5069695655 [183,] 0.5492530168 [184,] 0.5909128230 [185,] 0.6317261840 [186,] 0.6714709176 [187,] 0.7099268476 [188,] 0.7468772284 [189,] 0.7821101960 [190,] 0.8154202360 [191,] 0.8466096578 [192,] 0.8754900620 [193,] 0.9018837895 [194,] 0.9256253377 [195,] 0.9465627284 [196,] 0.9645588160 [197,] 0.9794925168 [198,] 0.9912599478 [199,] 0.9997754596 [200,] 1.0049725477 [201,] 1.0052456869 [202,] 1.0002907218 [203,] 0.9919560857 [204,] 0.9802796053 [205,] 0.9653205387 [206,] 0.9471593469 [207,] 0.9258972786 [208,] 0.9016557691 [209,] 0.8745756567 [210,] 0.8448162212 [211,] 0.8125540505 [212,] 0.7779817471 [213,] 0.7413064831 [214,] 0.7027484193 [215,] 0.6625390023 [216,] 0.6209191568 [217,] 0.5781373911 [218,] 0.5344478347 [219,] 0.4901082290 [220,] 0.4453778900 [221,] 0.4005156657 [222,] 0.3557779079 [223,] 0.3114164799 [224,] 0.2676768189 [225,] 0.2247960747 [226,] 0.1830013401 [227,] 0.1425079920 [228,] 0.1035181585 [229,] 0.0662193251 [230,] 0.0307830922 [231,] -0.0026359055 [232,] -0.0339009095 [233,] -0.0628937725 [234,] -0.0895155140 [235,] -0.1136866933 [236,] -0.1353476022 [237,] -0.1544582773 [238,] -0.1709983385 [239,] -0.1849666571 [240,] -0.1963808632 [241,] -0.2052767007 [242,] -0.2117072400 [243,] -0.2157419605 [244,] -0.2174657167 [245,] -0.2169775993 [246,] -0.2143897072 [247,] -0.2098258443 [248,] -0.2034201565 [249,] -0.1953157228 [250,] -0.1856631165 [251,] -0.1746189508 [252,] -0.1623444217 [253,] -0.1490038637 [254,] -0.1347633295 [255,] -0.1197892067 [256,] -0.1042468820 [257,] -0.0882994635 [258,] -0.0721065698 [259,] -0.0558231937 [260,] -0.0395986488 [261,] -0.0235756016 [262,] -0.0078891979 [263,] 0.0073337176 [264,] 0.0219752821 [265,] 0.0359271976 [266,] 0.0490912136 [267,] 0.0613795121 [268,] 0.0727149944 [269,] 0.0830314738 [270,] 0.0922737758 [271,] 0.1003977510 [272,] 0.1073702030 [273,] 0.1131687380 [274,] 0.1177815396 [275,] 0.1212070746 [276,] 0.1234537358 [277,] 0.1245394265 [278,] 0.1244910936 [279,] 0.1233442136 [280,] 0.1211422384 [281,] 0.1179360060 [282,] 0.1137831205 [283,] 0.1087473086 [284,] 0.1028977553 [285,] 0.0963084251 [286,] 0.0890573725 [287,] 0.0812260461 [288,] 0.0728985901 [289,] 0.0641611471 [290,] 0.0551011659 [291,] 0.0458067168 [292,] 0.0363658186 [293,] 0.0268657796 [294,] 0.0173925550 [295,] 0.0080301245 [296,] -0.0011401081 [297,] -0.0100398899 [298,] -0.0185946674 [299,] -0.0267341039 [300,] -0.0343925655 [301,] -0.0415095709 [302,] -0.0480302044 [303,] -0.0539054889 [304,] -0.0590927154 [305,] -0.0635557288 [306,] -0.0672651656 [307,] -0.0701986420 [308,] -0.0723408913 [309,] -0.0736838467 [310,] -0.0742266703 [311,] -0.0739757245 [312,] -0.0729444874 [313,] -0.0711534091 [314,] -0.0686297112 [315,] -0.0654071274 [316,] -0.0615255885 [317,] -0.0570308516 [318,] -0.0519740758 [319,] -0.0464113479 [320,] -0.0404031599 [321,] -0.0340138438 [322,] -0.0273109667 [323,] -0.0203646919 [324,] -0.0132471121 [325,] -0.0060315587 [326,] 0.0012081030 [327,] 0.0083981898 [328,] 0.0154659170 [329,] 0.0223401102 [330,] 0.0289519049 [331,] 0.0352354245 [332,] 0.0411284319 [333,] 0.0465729454 [334,] 0.0515158135 [335,] 0.0559092402 [336,] 0.0597112568 [337,] 0.0628861328 [338,] 0.0654047224 [339,] 0.0672447415 [340,] 0.0683909721 [341,] 0.0688353917 [342,] 0.0685772258 [343,] 0.0676229234 [344,] 0.0659860545 [345,] 0.0636871321 [346,] 0.0607533603 [347,] 0.0572183104 [348,] 0.0531215312 [349,] 0.0485080964 [350,] 0.0434280942 [351,] 0.0379360676 [352,] 0.0320904095 [353,] 0.0259527214 [354,] 0.0195871430 [355,] 0.0130596603 [356,] 0.0064374013 [357,] -0.0002120736 [358,] -0.0068214775 [359,] -0.0133244911 [360,] -0.0196564376 [361,] -0.0257549307 [362,] -0.0315604878 [363,] -0.0370171034 [364,] -0.0420727753 [365,] -0.0466799790 [366,] -0.0507960855 [367,] -0.0543837191 [368,] -0.0574110511 [369,] -0.0598520288 [370,] -0.0616865375 [371,] -0.0629004950 [372,] -0.0634858791 [373,] -0.0634406900 [374,] -0.0627688483 [375,] -0.0614800323 [376,] -0.0595894569 [377,] -0.0571175999 [378,] -0.0540898778 [379,] -0.0505362783 [380,] -0.0464909533 [381,] -0.0419917792 [382,] -0.0370798897 [383,] -0.0317991879 [384,] -0.0261958434 [385,] -0.0203177803 [386,] -0.0142141628 [387,] -0.0079348843 [388,] -0.0015300654 [389,] 0.0049504346 [390,] 0.0114574856 [391,] 0.0179431318 [392,] 0.0243610079 [393,] 0.0306667241 [394,] 0.0368182157 [395,] 0.0427760557 [396,] 0.0485037275 [397,] 0.0539678565 [398,] 0.0591384000 [399,] 0.0639887944 [400,] 0.0684960603 > kernelf(foo) Gaussian Radial Basis kernel function. Hyperparameter : sigma = 0.1 > nvar(foo) [1] 0.002367829 > > ## show slots > slotNames(foo) [1] "tol" "kernelf" "kpar" "kcall" "type" "kterms" [7] "xmatrix" "ymatrix" "fit" "lev" "nclass" "alpha" [13] "nvar" "mlike" "RVindex" "nRV" "cross" "error" [19] "n.action" > > > > > cleanEx(); ..nameEx <- "rvm" > > ### * rvm > > flush(stderr()); flush(stdout()) > > ### Name: rvm > ### Title: Relevance Vector Machine > ### Aliases: rvm rvm-methods rvm,formula-method rvm,vector-method > ### rvm,matrix-method show,rvm-method predict,rvm-method > ### Keywords: regression nonlinear > > ### ** Examples > > # create data > x <- seq(-20,20,0.1) > y <- sin(x)/x + rnorm(401,sd=0.05) > > # train relevance vector machine > foo <- rvm(x, y) > foo Relevance Vector Machine object of class "rvm" Problem type: regression Gaussian Radial Basis kernel function. Hyperparameter : sigma = 0.1 Number of Relevance Vectors : 14 Variance : 0.002367829 Training error : 0.002291489 > # print relevance vectors > alpha(foo) [1] 1.5244052 -1.7134139 0.3483344 -0.3504250 0.4919927 -0.5525478 [7] 1.2745060 -0.6605399 0.3078342 -0.2401493 0.1497366 0.1333779 [13] -0.2298243 0.1429664 > RVindex(foo) [1] 33 35 62 96 127 155 203 235 270 313 340 341 368 400 > > # predict and plot > ytest <- predict(foo, x) > plot(x, y, type ="l") > lines(x, ytest, col="red") > > > > cleanEx(); ..nameEx <- "sigest" > > ### * sigest > > flush(stderr()); flush(stdout()) > > ### Name: sigest > ### Title: Hyperparameter estimation for the Gaussian Radial Basis kernel > ### Aliases: sigest sigest,formula-method sigest,matrix-method > ### Keywords: classif regression > > ### ** Examples > > > ## estimate good sigma values for promotergene > data(promotergene) > srange <- sigest(Class~.,data = promotergene) > srange [1] 0.01603751 0.01388889 > > s <- sum(srange)/2 > s [1] 0.01496320 > ## create test and training set > ind <- sample(1:dim(promotergene)[1],20) > genetrain <- promotergene[-ind, ] > genetest <- promotergene[ind, ] > > ## train a support vector machine > gene <- ksvm(Class~.,data=genetrain,kernel="rbfdot",kpar=list(sigma = s),C=50,cross=3) > gene Support Vector Machine object of class "ksvm" SV type: C-svc (classification) parameter : cost C = 50 Gaussian Radial Basis kernel function. Hyperparameter : sigma = 0.0149631981831892 Number of Support Vectors : 78 Training error : 0 Cross validation error : 0.1289 > > ## predict gene type on the test set > mailtype <- predict(gene,genetest[,-1]) > > ## Check results > table(mailtype,genetest[,1]) mailtype + - + 8 1 - 0 11 > > > > cleanEx(); ..nameEx <- "specc-class" > > ### * specc-class > > flush(stderr()); flush(stdout()) > > ### Name: specc-class > ### Title: Class "specc" > ### Aliases: specc-class centers size withinss centers,specc-method > ### withinss,specc-method size,specc-method kernelf,specc-method > ### Keywords: classes > > ### ** Examples > > ## Cluster the spirals data set. > data(spirals) > > sc <- specc(spirals, centers=2) > > centers(sc) [,1] [,2] [1,] 0.01997201 -0.1761483 [2,] -0.01770984 0.1775137 > size(sc) [1] 150 150 > > > > cleanEx(); ..nameEx <- "specc" > > ### * specc > > flush(stderr()); flush(stdout()) > > ### Name: specc > ### Title: Spectral Clustering > ### Aliases: specc specc,matrix-method specc,formula-method > ### show,specc-method > ### Keywords: cluster > > ### ** Examples > > ## Cluster the spirals data set. > data(spirals) > > sc <- specc(spirals, centers=2) > > sc Spectral Clustering object of class "specc" Cluster memberships: 1 1 2 2 1 2 2 2 1 2 2 1 1 2 2 1 1 1 1 1 2 2 1 2 2 2 2 1 1 1 2 1 2 2 1 2 1 2 1 1 2 2 2 2 1 1 1 1 1 2 1 2 1 1 2 2 2 1 1 1 1 1 2 2 1 2 1 1 1 2 2 1 2 2 2 1 1 1 1 2 1 2 1 2 1 1 1 1 1 1 2 1 1 2 2 2 1 2 2 2 2 1 1 1 2 2 1 2 2 2 1 2 1 1 1 1 2 2 1 1 2 1 1 1 2 1 2 1 1 1 1 1 2 2 2 2 2 1 1 1 2 2 1 2 1 1 1 2 2 2 1 2 2 2 2 2 2 1 1 1 1 2 1 2 1 1 1 2 1 1 1 1 2 1 2 1 1 1 2 2 1 1 1 2 2 2 1 1 2 2 2 2 2 2 2 2 1 1 1 1 2 1 2 2 1 2 1 2 2 2 2 2 1 2 1 2 1 2 1 1 1 2 2 2 2 1 1 1 2 1 1 2 2 2 2 2 1 1 2 2 2 2 1 1 1 2 2 2 2 2 2 1 1 2 2 1 1 1 1 1 1 1 2 2 2 2 1 2 1 2 1 1 2 2 2 1 2 1 1 1 1 2 2 1 2 1 2 2 2 1 2 1 2 2 1 1 2 2 2 1 Gaussian Radial Basis kernel function. Hyperparameter : sigma = 737.378295712293 Centers: [,1] [,2] [1,] 0.01997201 -0.1761483 [2,] -0.01770984 0.1775137 Cluster size: [1] 150 150 Within-cluster sum of squares: [1] 117.3429 118.1182 > centers(sc) [,1] [,2] [1,] 0.01997201 -0.1761483 [2,] -0.01770984 0.1775137 > size(sc) [1] 150 150 > withinss(sc) [1] 117.3429 118.1182 > > plot(spirals, col=sc) > > > > > cleanEx(); ..nameEx <- "spirals" > > ### * spirals > > flush(stderr()); flush(stdout()) > > ### Name: spirals > ### Title: Spirals Dataset > ### Aliases: spirals > ### Keywords: datasets > > ### ** Examples > > data(spirals) > plot(spirals) > > > > ### *