Last updated on 2015-11-20 00:47:32.
Flavor | Version | Tinstall | Tcheck | Ttotal | Status | Flags |
---|---|---|---|---|---|---|
r-devel-linux-x86_64-debian-gcc | 0.9-22 | 11.62 | 82.58 | 94.20 | OK | |
r-devel-linux-x86_64-fedora-clang | 0.9-22 | 190.89 | OK | |||
r-devel-linux-x86_64-fedora-gcc | 0.9-22 | 180.43 | OK | |||
r-devel-osx-x86_64-clang | 0.9-22 | 198.97 | OK | |||
r-devel-windows-ix86+x86_64 | 0.9-22 | 73.00 | 224.00 | 297.00 | OK | |
r-patched-linux-x86_64 | 0.9-22 | 11.20 | 87.34 | 98.54 | OK | |
r-patched-solaris-sparc | 0.9-22 | 1178.50 | ERROR | |||
r-patched-solaris-x86 | 0.9-22 | 220.20 | OK | |||
r-release-linux-x86_64 | 0.9-22 | 11.18 | 86.36 | 97.54 | OK | |
r-release-osx-x86_64-mavericks | 0.9-22 | OK | ||||
r-release-windows-ix86+x86_64 | 0.9-22 | 53.00 | 197.00 | 250.00 | OK | |
r-oldrel-windows-ix86+x86_64 | 0.9-22 | 61.00 | 235.00 | 296.00 | OK |
Memtest notes: valgrind
Version: 0.9-22
Check: examples
Result: ERROR
Running examples in ‘kernlab-Ex.R’ failed
The error most likely occurred in:
> ### Name: ksvm
> ### Title: Support Vector Machines
> ### Aliases: ksvm ksvm,formula-method ksvm,vector-method ksvm,matrix-method
> ### ksvm,kernelMatrix-method ksvm,list-method show,ksvm-method
> ### coef,ksvm-method
> ### Keywords: methods regression nonlinear classif neural
>
> ### ** Examples
>
>
> ## simple example using the spam data set
> data(spam)
>
> ## create test and training set
> index <- sample(1:dim(spam)[1])
> spamtrain <- spam[index[1:floor(dim(spam)[1]/2)], ]
> spamtest <- spam[index[((ceiling(dim(spam)[1]/2)) + 1):dim(spam)[1]], ]
>
> ## train a support vector machine
> filter <- ksvm(type~.,data=spamtrain,kernel="rbfdot",
+ kpar=list(sigma=0.05),C=5,cross=3)
> filter
Support Vector Machine object of class "ksvm"
SV type: C-svc (classification)
parameter : cost C = 5
Gaussian Radial Basis kernel function.
Hyperparameter : sigma = 0.05
Number of Support Vectors : 922
Objective Function Value : -1073.61
Training error : 0.017391
Cross validation error : 0.09348
>
> ## predict mail type on the test set
> mailtype <- predict(filter,spamtest[,-58])
>
> ## Check results
> table(mailtype,spamtest[,58])
mailtype nonspam spam
nonspam 1349 104
spam 55 792
>
>
> ## Another example with the famous iris data
> data(iris)
>
> ## Create a kernel function using the build in rbfdot function
> rbf <- rbfdot(sigma=0.1)
> rbf
Gaussian Radial Basis kernel function.
Hyperparameter : sigma = 0.1
>
> ## train a bound constraint support vector machine
> irismodel <- ksvm(Species~.,data=iris,type="C-bsvc",
+ kernel=rbf,C=10,prob.model=TRUE)
>
> irismodel
Support Vector Machine object of class "ksvm"
SV type: C-bsvc (classification)
parameter : cost C = 10
Gaussian Radial Basis kernel function.
Hyperparameter : sigma = 0.1
Number of Support Vectors : 32
Objective Function Value : -5.8442 -3.0652 -136.9786
Training error : 0.02
Probability model included.
>
> ## get fitted values
> fitted(irismodel)
[1] setosa setosa setosa setosa setosa setosa
[7] setosa setosa setosa setosa setosa setosa
[13] setosa setosa setosa setosa setosa setosa
[19] setosa setosa setosa setosa setosa setosa
[25] setosa setosa setosa setosa setosa setosa
[31] setosa setosa setosa setosa setosa setosa
[37] setosa setosa setosa setosa setosa setosa
[43] setosa setosa setosa setosa setosa setosa
[49] setosa setosa versicolor versicolor versicolor versicolor
[55] versicolor versicolor versicolor versicolor versicolor versicolor
[61] versicolor versicolor versicolor versicolor versicolor versicolor
[67] versicolor versicolor versicolor versicolor versicolor versicolor
[73] virginica versicolor versicolor versicolor versicolor virginica
[79] versicolor versicolor versicolor versicolor versicolor virginica
[85] versicolor versicolor versicolor versicolor versicolor versicolor
[91] versicolor versicolor versicolor versicolor versicolor versicolor
[97] versicolor versicolor versicolor versicolor virginica virginica
[103] virginica virginica virginica virginica virginica virginica
[109] virginica virginica virginica virginica virginica virginica
[115] virginica virginica virginica virginica virginica virginica
[121] virginica virginica virginica virginica virginica virginica
[127] virginica virginica virginica virginica virginica virginica
[133] virginica virginica virginica virginica virginica virginica
[139] virginica virginica virginica virginica virginica virginica
[145] virginica virginica virginica virginica virginica virginica
Levels: setosa versicolor virginica
>
> ## Test on the training set with probabilities as output
> predict(irismodel, iris[,-5], type="probabilities")
setosa versicolor virginica
[1,] 0.983846258 0.0093172557 0.006836487
[2,] 0.978645002 0.0136052278 0.007749770
[3,] 0.985595610 0.0076995180 0.006704872
[4,] 0.982113992 0.0101230747 0.007762933
[5,] 0.984928589 0.0083717855 0.006699626
[6,] 0.971826655 0.0187151799 0.009458166
[7,] 0.983692067 0.0085001724 0.007807760
[8,] 0.982711888 0.0100873934 0.007200719
[9,] 0.981979606 0.0100189253 0.008001469
[10,] 0.982355257 0.0105661519 0.007078591
[11,] 0.979825873 0.0125007307 0.007673396
[12,] 0.982833202 0.0096232859 0.007543512
[13,] 0.983558214 0.0096630828 0.006778704
[14,] 0.988782361 0.0049129924 0.006304647
[15,] 0.973550163 0.0172190287 0.009230808
[16,] 0.955485351 0.0315096808 0.013004969
[17,] 0.977381624 0.0144474484 0.008170928
[18,] 0.981469167 0.0110723459 0.007458488
[19,] 0.967226516 0.0221546049 0.010618879
[20,] 0.981151103 0.0112845970 0.007564300
[21,] 0.971439252 0.0190617313 0.009499017
[22,] 0.978727843 0.0130459743 0.008226183
[23,] 0.988154939 0.0053392397 0.006505821
[24,] 0.955673058 0.0314567092 0.012870233
[25,] 0.977557181 0.0134083536 0.009034466
[26,] 0.970756269 0.0200715043 0.009172227
[27,] 0.973605496 0.0168919244 0.009502579
[28,] 0.981406810 0.0112214032 0.007371787
[29,] 0.981698417 0.0110561685 0.007245414
[30,] 0.981128093 0.0109116188 0.007960288
[31,] 0.978128169 0.0136077379 0.008264094
[32,] 0.966327183 0.0231136774 0.010559140
[33,] 0.978848454 0.0130561800 0.008095366
[34,] 0.972843038 0.0180108522 0.009146110
[35,] 0.978901552 0.0132224703 0.007875978
[36,] 0.984464471 0.0089774116 0.006558117
[37,] 0.979050478 0.0130484480 0.007901074
[38,] 0.986722925 0.0069107848 0.006366291
[39,] 0.984769854 0.0078553995 0.007374746
[40,] 0.981576714 0.0110594132 0.007363873
[41,] 0.983690798 0.0092926974 0.007016504
[42,] 0.955555569 0.0314899424 0.012954489
[43,] 0.986087895 0.0066221473 0.007289958
[44,] 0.964813918 0.0232669357 0.011919147
[45,] 0.972417749 0.0177609147 0.009821336
[46,] 0.975610321 0.0157396494 0.008650030
[47,] 0.981867827 0.0107258304 0.007406343
[48,] 0.984750095 0.0080707392 0.007179166
[49,] 0.981256923 0.0113829477 0.007360130
[50,] 0.982983191 0.0099843630 0.007032446
[51,] 0.029252548 0.9432104200 0.027537032
[52,] 0.017040714 0.9574335995 0.025525686
[53,] 0.017020176 0.8611546378 0.121825186
[54,] 0.006868112 0.8938155097 0.099316378
[55,] 0.006423721 0.8683449893 0.125231289
[56,] 0.005398649 0.9434571513 0.051144199
[57,] 0.019806843 0.9084043471 0.071788810
[58,] 0.043000208 0.9383465157 0.018653276
[59,] 0.009467780 0.9704681774 0.020064043
[60,] 0.007999976 0.9358774058 0.056122619
[61,] 0.036784196 0.9007548094 0.062460994
[62,] 0.008183214 0.9651360624 0.026680723
[63,] 0.008031775 0.9825588169 0.009409409
[64,] 0.006137838 0.9049620860 0.088900076
[65,] 0.015797579 0.9797664145 0.004436006
[66,] 0.017961262 0.9700646712 0.011974067
[67,] 0.008236733 0.8923225638 0.099440704
[68,] 0.009546346 0.9870752888 0.003378365
[69,] 0.008713214 0.6167995983 0.374487188
[70,] 0.006872366 0.9844126186 0.008715016
[71,] 0.013435904 0.4992946437 0.487269452
[72,] 0.006986019 0.9876023016 0.005411680
[73,] 0.007755773 0.4123867345 0.579857493
[74,] 0.005602458 0.9644623579 0.029935184
[75,] 0.008957570 0.9831986713 0.007843758
[76,] 0.011983418 0.9729047249 0.015111857
[77,] 0.009051875 0.8914741144 0.099474011
[78,] 0.009980970 0.4002409694 0.589778061
[79,] 0.005995640 0.8916738239 0.102330536
[80,] 0.017227237 0.9788248594 0.003947903
[81,] 0.007736746 0.9796577941 0.012605460
[82,] 0.011303741 0.9819048019 0.006791457
[83,] 0.007016664 0.9881172203 0.004866115
[84,] 0.007498820 0.1372280131 0.855273167
[85,] 0.009482424 0.8697147495 0.120802826
[86,] 0.031451830 0.9376416179 0.030906553
[87,] 0.014454203 0.9213480591 0.064197738
[88,] 0.005342455 0.9225469306 0.072110614
[89,] 0.013052157 0.9792918529 0.007655990
[90,] 0.005335193 0.9447675024 0.049897305
[91,] 0.005444922 0.9349869163 0.059568162
[92,] 0.007834906 0.9504791047 0.041685989
[93,] 0.005303167 0.9853804132 0.009316420
[94,] 0.035158731 0.9460700446 0.018771225
[95,] 0.005137955 0.9610656311 0.033796414
[96,] 0.014202706 0.9803778900 0.005419404
[97,] 0.007889492 0.9799842032 0.012126305
[98,] 0.007804632 0.9830533604 0.009142008
[99,] 0.042477484 0.9454068281 0.012115688
[100,] 0.006314068 0.9803855909 0.013300341
[101,] 0.007991647 0.0010109304 0.990997422
[102,] 0.005332873 0.0114868076 0.983180320
[103,] 0.006409118 0.0030929276 0.990497954
[104,] 0.006063001 0.0136974481 0.980239551
[105,] 0.005072606 0.0009385997 0.993988794
[106,] 0.008713017 0.0022574179 0.989029565
[107,] 0.012024086 0.0897338046 0.898242109
[108,] 0.007997014 0.0058757465 0.986127240
[109,] 0.006810189 0.0056123930 0.987577418
[110,] 0.013033674 0.0096081804 0.977358146
[111,] 0.010189594 0.0898858563 0.899924550
[112,] 0.005596535 0.0113254728 0.983077992
[113,] 0.006245464 0.0063787840 0.987375752
[114,] 0.004750465 0.0045486277 0.990700908
[115,] 0.004392511 0.0006269814 0.994980507
[116,] 0.007839240 0.0064511667 0.985709594
[117,] 0.007266075 0.0366560505 0.956077875
[118,] 0.020632163 0.0324084174 0.946959419
[119,] 0.015081915 0.0024997039 0.982418381
[120,] 0.010873209 0.1700223871 0.819104404
[121,] 0.007034392 0.0033684608 0.989597148
[122,] 0.006062250 0.0143784108 0.979559339
[123,] 0.011227172 0.0028504988 0.985922329
[124,] 0.006991988 0.0916547112 0.901353301
[125,] 0.008594602 0.0139305109 0.977474887
[126,] 0.009212634 0.0311918684 0.959595498
[127,] 0.007466557 0.1531821316 0.839351312
[128,] 0.009006663 0.2104328651 0.780560472
[129,] 0.004567903 0.0012580956 0.994174002
[130,] 0.010834395 0.0998162682 0.889349337
[131,] 0.008677507 0.0061921326 0.985130360
[132,] 0.029915312 0.0845832428 0.885501445
[133,] 0.004417017 0.0006952928 0.994887690
[134,] 0.007765247 0.4270054764 0.565229277
[135,] 0.008325895 0.0898943692 0.901779736
[136,] 0.009732419 0.0035379575 0.986729623
[137,] 0.010964820 0.0062585086 0.982776671
[138,] 0.008377115 0.0544356248 0.937187260
[139,] 0.009313238 0.2649907012 0.725696061
[140,] 0.007373053 0.0153894226 0.977237524
[141,] 0.006184894 0.0013085317 0.992506575
[142,] 0.007904553 0.0122058254 0.979889621
[143,] 0.005332873 0.0114868076 0.983180320
[144,] 0.006578351 0.0018115477 0.991610102
[145,] 0.008166540 0.0021704423 0.989663018
[146,] 0.006400448 0.0043670753 0.989232477
[147,] 0.006266524 0.0215252549 0.972208221
[148,] 0.006936193 0.0247416542 0.968322153
[149,] 0.012534914 0.0171025695 0.970362516
[150,] 0.008746406 0.0986049013 0.892648693
>
>
> ## Demo of the plot function
> x <- rbind(matrix(rnorm(120),,2),matrix(rnorm(120,mean=3),,2))
> y <- matrix(c(rep(1,60),rep(-1,60)))
>
> svp <- ksvm(x,y,type="C-svc")
> plot(svp,data=x)
>
>
> ### Use kernelMatrix
> K <- as.kernelMatrix(crossprod(t(x)))
>
> svp2 <- ksvm(K, y, type="C-svc")
>
> svp2
Support Vector Machine object of class "ksvm"
SV type: C-svc (classification)
parameter : cost C = 1
[1] " Kernel matrix used as input."
Number of Support Vectors : 7
Objective Function Value : -4.3822
Training error : 0.008333
>
> # test data
> xtest <- rbind(matrix(rnorm(20),,2),matrix(rnorm(20,mean=3),,2))
> # test kernel matrix i.e. inner/kernel product of test data with
> # Support Vectors
>
> Ktest <- as.kernelMatrix(crossprod(t(xtest),t(x[SVindex(svp2), ])))
>
> predict(svp2, Ktest)
[1] 1 1 1 1 1 1 1 1 1 1 -1 -1 -1 -1 -1 -1 -1 -1 -1 -1
>
>
> #### Use custom kernel
>
> k <- function(x,y) {(sum(x*y) +1)*exp(-0.001*sum((x-y)^2))}
> class(k) <- "kernel"
>
> data(promotergene)
>
> ## train svm using custom kernel
> gene <- ksvm(Class~.,data=promotergene[c(1:20, 80:100),],kernel=k,
+ C=5,cross=5)
>
> gene
Support Vector Machine object of class "ksvm"
SV type: C-svc (classification)
parameter : cost C = 5
Number of Support Vectors : 41
Objective Function Value : -0.5191
Training error : 0
Cross validation error : 0.169444
>
>
> #### Use text with string kernels
> data(reuters)
> is(reuters)
[1] "list" "vector" "input" "listI" "lpinput" "output"
> tsv <- ksvm(reuters,rlabels,kernel="stringdot",
+ kpar=list(length=5),cross=3,C=10)
*** caught segfault ***
address 100d6058, cause 'memory not mapped'
Traceback:
1: .Call("stringtv", as.character(x[i]), as.character(x[i:length(x)]), as.integer(length(x) - i + 1), as.integer(nchar(x[i])), as.integer(nchar(x[i:length(x)])), as.integer(sktype), as.double(kpar(kernel)$lambda))
2: kernelMatrix(kernel, x[c(indexes[[i]], indexes[[j]])])
3: kernelMatrix(kernel, x[c(indexes[[i]], indexes[[j]])])
4: .local(x, ...)
5: ksvm(reuters, rlabels, kernel = "stringdot", kpar = list(length = 5), cross = 3, C = 10)
6: ksvm(reuters, rlabels, kernel = "stringdot", kpar = list(length = 5), cross = 3, C = 10)
aborting ...
Flavor: r-patched-solaris-sparc