matlab实现svm.zip

  • cyxdl
    了解作者
  • matlab
    开发工具
  • 289.7KB
    文件大小
  • zip
    文件格式
  • 0
    收藏次数
  • 5 积分
    下载积分
  • 0
    下载次数
  • 2022-04-21 16:36
    上传日期
matlab实现svm
matlab实现svm.zip
  • svm_routine
  • calcError.m
    1.9KB
  • epsSVM.m
    2.9KB
  • modsel_unbalanced.m
    2.9KB
  • getPatterns.m
    1.5KB
  • generateLibSVMcmd.m
    16.2KB
  • scale_func.m
    444B
  • getDefaultParam_libSVM.m
    1009B
  • classify_svm.m
    6.3KB
  • modsel.m
    13.3KB
  • libsvm-3.23
  • svmtrain.c
    11.5KB
  • libsvmread.mexw64
    14KB
  • svm_model_matlab.h
    201B
  • svm.def
    477B
  • svmtrain.mexw64
    67KB
  • svmpredict.c
    9.6KB
  • FAQ.html
    81.3KB
  • libsvmwrite.mexw64
    13KB
  • svm.cpp
    63.5KB
  • libsvmwrite.c
    2.3KB
  • svm-scale.c
    8.5KB
  • COPYRIGHT
    1.5KB
  • Makefile.win
    1.1KB
  • svm-train.c
    8.8KB
  • svm_model_matlab.c
    8KB
  • svmpredict.mexw64
    27.5KB
  • Makefile
    1.2KB
  • libsvmread.c
    4KB
  • svm.h
    3.3KB
  • libsvm.dll
    252KB
  • svm-predict.c
    5.4KB
  • heart_scale
    27KB
  • README
    9.6KB
  • make.m
    888B
  • envi
  • a3.dat
    10.1KB
  • a.dat
    384B
  • enviwrite.m
    701B
  • envihdrread.m
    3.9KB
  • enviinfo.m
    1.5KB
  • envidataread.m
    2.4KB
  • a3.dat.hdr
    170B
  • envidatawrite.m
    2.6KB
  • envihdrwrite.m
    1.2KB
  • envi_test.m
    319B
  • a.dat.hdr
    166B
  • enviread.m
    740B
内容介绍
#include <stdio.h> #include <stdlib.h> #include <string.h> #include <ctype.h> #include "svm.h" #include "mex.h" #include "svm_model_matlab.h" #ifdef MX_API_VER #if MX_API_VER < 0x07030000 typedef int mwIndex; #endif #endif #define CMD_LEN 2048 #define Malloc(type,n) (type *)malloc((n)*sizeof(type)) void print_null(const char *s) {} void print_string_matlab(const char *s) {mexPrintf(s);} void exit_with_help() { mexPrintf( "Usage: model = svmtrain(training_label_vector, training_instance_matrix, 'libsvm_options');\n" "libsvm_options:\n" "-s svm_type : set type of SVM (default 0)\n" " 0 -- C-SVC (multi-class classification)\n" " 1 -- nu-SVC (multi-class classification)\n" " 2 -- one-class SVM\n" " 3 -- epsilon-SVR (regression)\n" " 4 -- nu-SVR (regression)\n" "-t kernel_type : set type of kernel function (default 2)\n" " 0 -- linear: u'*v\n" " 1 -- polynomial: (gamma*u'*v + coef0)^degree\n" " 2 -- radial basis function: exp(-gamma*|u-v|^2)\n" " 3 -- sigmoid: tanh(gamma*u'*v + coef0)\n" " 4 -- precomputed kernel (kernel values in training_instance_matrix)\n" "-d degree : set degree in kernel function (default 3)\n" "-g gamma : set gamma in kernel function (default 1/num_features)\n" "-r coef0 : set coef0 in kernel function (default 0)\n" "-c cost : set the parameter C of C-SVC, epsilon-SVR, and nu-SVR (default 1)\n" "-n nu : set the parameter nu of nu-SVC, one-class SVM, and nu-SVR (default 0.5)\n" "-p epsilon : set the epsilon in loss function of epsilon-SVR (default 0.1)\n" "-m cachesize : set cache memory size in MB (default 100)\n" "-e epsilon : set tolerance of termination criterion (default 0.001)\n" "-h shrinking : whether to use the shrinking heuristics, 0 or 1 (default 1)\n" "-b probability_estimates : whether to train a SVC or SVR model for probability estimates, 0 or 1 (default 0)\n" "-wi weight : set the parameter C of class i to weight*C, for C-SVC (default 1)\n" "-v n: n-fold cross validation mode\n" "-q : quiet mode (no outputs)\n" ); } // svm arguments struct svm_parameter param; // set by parse_command_line struct svm_problem prob; // set by read_problem struct svm_model *model; struct svm_node *x_space; int cross_validation; int nr_fold; double do_cross_validation() { int i; int total_correct = 0; double total_error = 0; double sumv = 0, sumy = 0, sumvv = 0, sumyy = 0, sumvy = 0; double *target = Malloc(double,prob.l); double retval = 0.0; svm_cross_validation(&prob,&param,nr_fold,target); if(param.svm_type == EPSILON_SVR || param.svm_type == NU_SVR) { for(i=0;i<prob.l;i++) { double y = prob.y[i]; double v = target[i]; total_error += (v-y)*(v-y); sumv += v; sumy += y; sumvv += v*v; sumyy += y*y; sumvy += v*y; } mexPrintf("Cross Validation Mean squared error = %g\n",total_error/prob.l); mexPrintf("Cross Validation Squared correlation coefficient = %g\n", ((prob.l*sumvy-sumv*sumy)*(prob.l*sumvy-sumv*sumy))/ ((prob.l*sumvv-sumv*sumv)*(prob.l*sumyy-sumy*sumy)) ); retval = total_error/prob.l; } else { for(i=0;i<prob.l;i++) if(target[i] == prob.y[i]) ++total_correct; mexPrintf("Cross Validation Accuracy = %g%%\n",100.0*total_correct/prob.l); retval = 100.0*total_correct/prob.l; } free(target); return retval; } // nrhs should be 3 int parse_command_line(int nrhs, const mxArray *prhs[], char *model_file_name) { int i, argc = 1; char cmd[CMD_LEN]; char *argv[CMD_LEN/2]; void (*print_func)(const char *) = print_string_matlab; // default printing to matlab display // default values param.svm_type = C_SVC; param.kernel_type = RBF; param.degree = 3; param.gamma = 0; // 1/num_features param.coef0 = 0; param.nu = 0.5; param.cache_size = 100; param.C = 1; param.eps = 1e-3; param.p = 0.1; param.shrinking = 1; param.probability = 0; param.nr_weight = 0; param.weight_label = NULL; param.weight = NULL; cross_validation = 0; if(nrhs <= 1) return 1; if(nrhs > 2) { // put options in argv[] mxGetString(prhs[2], cmd, mxGetN(prhs[2]) + 1); if((argv[argc] = strtok(cmd, " ")) != NULL) while((argv[++argc] = strtok(NULL, " ")) != NULL) ; } // parse options for(i=1;i<argc;i++) { if(argv[i][0] != '-') break; ++i; if(i rel='nofollow' onclick='return false;'>=argc && argv[i-1][1] != 'q') // since option -q has no parameter return 1; switch(argv[i-1][1]) { case 's': param.svm_type = atoi(argv[i]); break; case 't': param.kernel_type = atoi(argv[i]); break; case 'd': param.degree = atoi(argv[i]); break; case 'g': param.gamma = atof(argv[i]); break; case 'r': param.coef0 = atof(argv[i]); break; case 'n': param.nu = atof(argv[i]); break; case 'm': param.cache_size = atof(argv[i]); break; case 'c': param.C = atof(argv[i]); break; case 'e': param.eps = atof(argv[i]); break; case 'p': param.p = atof(argv[i]); break; case 'h': param.shrinking = atoi(argv[i]); break; case 'b': param.probability = atoi(argv[i]); break; case 'q': print_func = &print_null; i--; break; case 'v': cross_validation = 1; nr_fold = atoi(argv[i]); if(nr_fold < 2) { mexPrintf("n-fold cross validation: n must >= 2\n"); return 1; } break; case 'w': ++param.nr_weight; param.weight_label = (int *)realloc(param.weight_label,sizeof(int)*param.nr_weight); param.weight = (double *)realloc(param.weight,sizeof(double)*param.nr_weight); param.weight_label[param.nr_weight-1] = atoi(&argv[i-1][2]); param.weight[param.nr_weight-1] = atof(argv[i]); break; default: mexPrintf("Unknown option -%c\n", argv[i-1][1]); return 1; } } svm_set_print_string_function(print_func); return 0; } // read in a problem (in svmlight format) int read_problem_dense(const mxArray *label_vec, const mxArray *instance_mat) { // using size_t due to the output type of matlab functions size_t i, j, k, l; size_t elements, max_index, sc, label_vector_row_num; double *samples, *labels; prob.x = NULL; prob.y = NULL; x_space = NULL; labels = mxGetPr(label_vec); samples = mxGetPr(instance_mat); sc = mxGetN(instance_mat); elements = 0; // number of instances l = mxGetM(instance_mat); label_vector_row_num = mxGetM(label_vec); prob.l = (int)l; if(label_vector_row_num!=l) { mexPrintf("Length of label vector does not match # of instances.\n"); return -1; } if(param.kernel_type == PRECOMPUTED) elements = l * (sc + 1); else { for(i = 0; i < l; i++) { for(k = 0; k < sc; k++) if(samples[k * l + i] != 0) elements++; // count the '-1' element elements++; } } prob.y = Malloc(double,l); prob.x = Malloc(struct svm_node *,l); x_space = Malloc(struct svm_node, elements); max_index = sc; j = 0; for(i = 0; i < l; i++) { prob.x[i] = &x_space[j]; prob.y[i] = labels[i]; for(k = 0; k < sc; k++) { if(param.kernel_type == PRECOMPUTED || samples[k * l + i] != 0) { x_space[j].index = (int)k + 1; x_space[j].value = samples[k * l + i]; j++; } } x_space[j++].index = -1; } if(param.gamma == 0 && max_index > 0) param.gamma = (double)(1.0/max_index); if(param.kernel_type == PRECOMPUTED) for(i=0;i<l;i++) { if((int)prob.x[i][0].value <= 0 || (int)prob.x[i][0].value > (int)max_index) { mexPrintf("Wrong input format: sample_serial_number out of range\n"); return -1; } } return 0; } int read_problem_sparse(const mxArray *label_vec, const mxArray *instance_mat) { mwIndex *ir, *jc, low, high, k; // using size_t due to the output type of matlab functions size_t i, j, l, elements, max_index, label_vector_row_num; mwSize num_samples; double *samples, *labels; mxArray *instance_mat_col; // transposed instance sparse matrix prob.x = NULL; prob.y = NULL; x_space = NULL; // transpose instance matrix { mxArray *prhs[1], *plhs[1]; prhs[0] = mxDuplicateArray(instance_mat); if(mexCallMATLAB
评论
    相关推荐
    • Svm.rar
      Libsvm-FarutoUltimate3.0 based on libsvm-mat-2.89-3,上交大大佬写的,很棒的学习资料
    • SVM.rar
      用python代码实现svm向量机,如果有错误希望大家多多指正
    • RSVM
      RSVM 该源代码是根据以下算法进行编程的: R. Collobert,F.Sinz,J.Weston,L.Bottou,“采用凸性实现可伸缩性”,Proc。 23th Int。 Conf。 马赫Learn。,2006年,第201-208页。 如果您使用该代码进行研究,请...
    • svm opencv
      直接将图片分类好放进完好、破损文件夹,完好文件夹全体重命名为1破损文件夹全体重命名为2,测试文件夹全体重命名为3,重命名后后即可运行项目,完成svm分类训练出分类模型svm.xml,并进行预测图片分类。
    • SVM.rar
      SVM machine learning
    • SVM.rar
      crop yeild prediction using SVM
    • 5.1SVM.rar
      SVM支持向量机的测试代码以及对应的运行结果
    • SVM.rar
      svm功能,用于测试训练,其中含有大量程序,可以选择使用。
    • SVM.rar
      支持向量机的代码,可以通过此代码对数据应用支持向量机进行分析
    • libiconv-1.1.tar.gz
      字符集转换程序