pktools  2.6.3
Processing Kernel for geospatial data
pksvm.cc
1 /**********************************************************************
2 pksvm.cc: classify raster image using Support Vector Machine
3 Copyright (C) 2008-2014 Pieter Kempeneers
4 
5 This file is part of pktools
6 
7 pktools is free software: you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation, either version 3 of the License, or
10 (at your option) any later version.
11 
12 pktools is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16 
17 You should have received a copy of the GNU General Public License
18 along with pktools. If not, see <http://www.gnu.org/licenses/>.
19 ***********************************************************************/
20 #include <stdlib.h>
21 #include <vector>
22 #include <map>
23 #include <algorithm>
24 #include "imageclasses/ImgReaderGdal.h"
25 #include "imageclasses/ImgWriterGdal.h"
26 #include "imageclasses/ImgReaderOgr.h"
27 #include "imageclasses/ImgWriterOgr.h"
28 #include "base/Optionpk.h"
29 #include "base/PosValue.h"
30 #include "algorithms/ConfusionMatrix.h"
31 #include "algorithms/svm.h"
32 
33 #ifdef HAVE_CONFIG_H
34 #include <config.h>
35 #endif
36 
37 namespace svm{
38  enum SVM_TYPE {C_SVC=0, nu_SVC=1,one_class=2, epsilon_SVR=3, nu_SVR=4};
39  enum KERNEL_TYPE {linear=0,polynomial=1,radial=2,sigmoid=3};
40 }
41 
42 #define Malloc(type,n) (type *)malloc((n)*sizeof(type))
43 
44 using namespace std;
45 
46 int main(int argc, char *argv[])
47 {
48  vector<double> priors;
49 
50  //--------------------------- command line options ------------------------------------
51  Optionpk<string> input_opt("i", "input", "input image");
52  Optionpk<string> training_opt("t", "training", "Training vector file. A single vector file contains all training features (must be set as: b0, b1, b2,...) for all classes (class numbers identified by label option). Use multiple training files for bootstrap aggregation (alternative to the bag and bsize options, where a random subset is taken from a single training file)");
53  Optionpk<string> tlayer_opt("tln", "tln", "Training layer name(s)");
54  Optionpk<string> label_opt("label", "label", "Attribute name for class label in training vector file.","label");
55  Optionpk<unsigned int> balance_opt("bal", "balance", "Balance the input data to this number of samples for each class", 0);
56  Optionpk<bool> random_opt("random", "random", "Randomize training data for balancing and bagging", true, 2);
57  Optionpk<int> minSize_opt("min", "min", "If number of training pixels is less then min, do not take this class into account (0: consider all classes)", 0);
58  Optionpk<short> band_opt("b", "band", "Band index (starting from 0, either use band option or use start to end)");
59  Optionpk<double> bstart_opt("s", "start", "Start band sequence number",0);
60  Optionpk<double> bend_opt("e", "end", "End band sequence number (set to 0 to include all bands)", 0);
61  Optionpk<double> offset_opt("\0", "offset", "Offset value for each spectral band input features: refl[band]=(DN[band]-offset[band])/scale[band]", 0.0);
62  Optionpk<double> scale_opt("\0", "scale", "Scale value for each spectral band input features: refl=(DN[band]-offset[band])/scale[band] (use 0 if scale min and max in each band to -1.0 and 1.0)", 0.0);
63  Optionpk<double> priors_opt("prior", "prior", "Prior probabilities for each class (e.g., -p 0.3 -p 0.3 -p 0.2 ). Used for input only (ignored for cross validation)", 0.0);
64  Optionpk<string> priorimg_opt("pim", "priorimg", "Prior probability image (multi-band img with band for each class","",2);
65  Optionpk<unsigned short> cv_opt("cv", "cv", "N-fold cross validation mode",0);
66  Optionpk<std::string> svm_type_opt("svmt", "svmtype", "Type of SVM (C_SVC, nu_SVC,one_class, epsilon_SVR, nu_SVR)","C_SVC");
67  Optionpk<std::string> kernel_type_opt("kt", "kerneltype", "Type of kernel function (linear,polynomial,radial,sigmoid) ","radial");
68  Optionpk<unsigned short> kernel_degree_opt("kd", "kd", "Degree in kernel function",3);
69  Optionpk<float> gamma_opt("g", "gamma", "Gamma in kernel function",1.0);
70  Optionpk<float> coef0_opt("c0", "coef0", "Coef0 in kernel function",0);
71  Optionpk<float> ccost_opt("cc", "ccost", "The parameter C of C_SVC, epsilon_SVR, and nu_SVR",1000);
72  Optionpk<float> nu_opt("nu", "nu", "The parameter nu of nu_SVC, one_class SVM, and nu_SVR",0.5);
73  Optionpk<float> epsilon_loss_opt("eloss", "eloss", "The epsilon in loss function of epsilon_SVR",0.1);
74  Optionpk<int> cache_opt("cache", "cache", "Cache memory size in MB",100);
75  Optionpk<float> epsilon_tol_opt("etol", "etol", "The tolerance of termination criterion",0.001);
76  Optionpk<bool> shrinking_opt("shrink", "shrink", "Whether to use the shrinking heuristics",false);
77  Optionpk<bool> prob_est_opt("pe", "probest", "Whether to train a SVC or SVR model for probability estimates",true,2);
78  // Optionpk<bool> weight_opt("wi", "wi", "Set the parameter C of class i to weight*C, for C_SVC",true);
79  Optionpk<unsigned short> comb_opt("comb", "comb", "How to combine bootstrap aggregation classifiers (0: sum rule, 1: product rule, 2: max rule). Also used to aggregate classes with rc option.",0);
80  Optionpk<unsigned short> bag_opt("bag", "bag", "Number of bootstrap aggregations", 1);
81  Optionpk<int> bagSize_opt("bagsize", "bagsize", "Percentage of features used from available training features for each bootstrap aggregation (one size for all classes, or a different size for each class respectively", 100);
82  Optionpk<string> classBag_opt("cb", "classbag", "Output for each individual bootstrap aggregation");
83  Optionpk<string> mask_opt("m", "mask", "Use the first band of the specified file as a validity mask. Nodata values can be set with the option msknodata.");
84  Optionpk<short> msknodata_opt("msknodata", "msknodata", "Mask value(s) not to consider for classification (use negative values if only these values should be taken into account). Values will be taken over in classification image.", 0);
85  Optionpk<unsigned short> nodata_opt("nodata", "nodata", "Nodata value to put where image is masked as nodata", 0);
86  Optionpk<string> output_opt("o", "output", "Output classification image");
87  Optionpk<string> oformat_opt("of", "oformat", "Output image format (see also gdal_translate). Empty string: inherit from input image");
88  Optionpk<string> option_opt("co", "co", "Creation option for output file. Multiple options can be specified.");
89  Optionpk<string> colorTable_opt("ct", "ct", "Color table in ASCII format having 5 columns: id R G B ALFA (0: transparent, 255: solid)");
90  Optionpk<string> prob_opt("prob", "prob", "Probability image.");
91  Optionpk<string> entropy_opt("entropy", "entropy", "Entropy image (measure for uncertainty of classifier output","",2);
92  Optionpk<string> active_opt("active", "active", "Ogr output for active training sample.","",2);
93  Optionpk<string> ogrformat_opt("f", "f", "Output ogr format for active training sample","SQLite");
94  Optionpk<unsigned int> nactive_opt("na", "nactive", "Number of active training points",1);
95  Optionpk<string> classname_opt("c", "class", "List of class names.");
96  Optionpk<short> classvalue_opt("r", "reclass", "List of class values (use same order as in class opt).");
97  Optionpk<short> verbose_opt("v", "verbose", "Verbose level",0,2);
98 
99  band_opt.setHide(1);
100  bstart_opt.setHide(1);
101  bend_opt.setHide(1);
102  balance_opt.setHide(1);
103  minSize_opt.setHide(1);
104  bag_opt.setHide(1);
105  bagSize_opt.setHide(1);
106  comb_opt.setHide(1);
107  classBag_opt.setHide(1);
108  prob_opt.setHide(1);
109  priorimg_opt.setHide(1);
110  offset_opt.setHide(1);
111  scale_opt.setHide(1);
112  svm_type_opt.setHide(1);
113  kernel_type_opt.setHide(1);
114  kernel_degree_opt.setHide(1);
115  coef0_opt.setHide(1);
116  nu_opt.setHide(1);
117  epsilon_loss_opt.setHide(1);
118  cache_opt.setHide(1);
119  epsilon_tol_opt.setHide(1);
120  shrinking_opt.setHide(1);
121  prob_est_opt.setHide(1);
122  entropy_opt.setHide(1);
123  active_opt.setHide(1);
124  nactive_opt.setHide(1);
125  verbose_opt.setHide(1);
126  random_opt.setHide(1);
127 
128  bool doProcess;//stop process when program was invoked with help option (-h --help)
129  try{
130  doProcess=training_opt.retrieveOption(argc,argv);
131  input_opt.retrieveOption(argc,argv);
132  output_opt.retrieveOption(argc,argv);
133  cv_opt.retrieveOption(argc,argv);
134  tlayer_opt.retrieveOption(argc,argv);
135  classname_opt.retrieveOption(argc,argv);
136  classvalue_opt.retrieveOption(argc,argv);
137  oformat_opt.retrieveOption(argc,argv);
138  ogrformat_opt.retrieveOption(argc,argv);
139  option_opt.retrieveOption(argc,argv);
140  colorTable_opt.retrieveOption(argc,argv);
141  label_opt.retrieveOption(argc,argv);
142  priors_opt.retrieveOption(argc,argv);
143  gamma_opt.retrieveOption(argc,argv);
144  ccost_opt.retrieveOption(argc,argv);
145  mask_opt.retrieveOption(argc,argv);
146  msknodata_opt.retrieveOption(argc,argv);
147  nodata_opt.retrieveOption(argc,argv);
148  // Advanced options
149  band_opt.retrieveOption(argc,argv);
150  bstart_opt.retrieveOption(argc,argv);
151  bend_opt.retrieveOption(argc,argv);
152  balance_opt.retrieveOption(argc,argv);
153  minSize_opt.retrieveOption(argc,argv);
154  bag_opt.retrieveOption(argc,argv);
155  bagSize_opt.retrieveOption(argc,argv);
156  comb_opt.retrieveOption(argc,argv);
157  classBag_opt.retrieveOption(argc,argv);
158  prob_opt.retrieveOption(argc,argv);
159  priorimg_opt.retrieveOption(argc,argv);
160  offset_opt.retrieveOption(argc,argv);
161  scale_opt.retrieveOption(argc,argv);
162  svm_type_opt.retrieveOption(argc,argv);
163  kernel_type_opt.retrieveOption(argc,argv);
164  kernel_degree_opt.retrieveOption(argc,argv);
165  coef0_opt.retrieveOption(argc,argv);
166  nu_opt.retrieveOption(argc,argv);
167  epsilon_loss_opt.retrieveOption(argc,argv);
168  cache_opt.retrieveOption(argc,argv);
169  epsilon_tol_opt.retrieveOption(argc,argv);
170  shrinking_opt.retrieveOption(argc,argv);
171  prob_est_opt.retrieveOption(argc,argv);
172  entropy_opt.retrieveOption(argc,argv);
173  active_opt.retrieveOption(argc,argv);
174  nactive_opt.retrieveOption(argc,argv);
175  verbose_opt.retrieveOption(argc,argv);
176  random_opt.retrieveOption(argc,argv);
177  }
178  catch(string predefinedString){
179  std::cout << predefinedString << std::endl;
180  exit(0);
181  }
182  if(!doProcess){
183  cout << endl;
184  cout << "Usage: pksvm -t training [-i input -o output] [-cv value]" << endl;
185  cout << endl;
186  std::cout << "short option -h shows basic options only, use long option --help to show all options" << std::endl;
187  exit(0);//help was invoked, stop processing
188  }
189 
190  if(entropy_opt[0]=="")
191  entropy_opt.clear();
192  if(active_opt[0]=="")
193  active_opt.clear();
194  if(priorimg_opt[0]=="")
195  priorimg_opt.clear();
196 
197 
198  std::map<std::string, svm::SVM_TYPE> svmMap;
199 
200  svmMap["C_SVC"]=svm::C_SVC;
201  svmMap["nu_SVC"]=svm::nu_SVC;
202  svmMap["one_class"]=svm::one_class;
203  svmMap["epsilon_SVR"]=svm::epsilon_SVR;
204  svmMap["nu_SVR"]=svm::nu_SVR;
205 
206  std::map<std::string, svm::KERNEL_TYPE> kernelMap;
207 
208  kernelMap["linear"]=svm::linear;
209  kernelMap["polynomial"]=svm::polynomial;
210  kernelMap["radial"]=svm::radial;
211  kernelMap["sigmoid;"]=svm::sigmoid;
212 
213  assert(training_opt.size());
214 
215  if(verbose_opt[0]>=1){
216  if(input_opt.size())
217  std::cout << "input filename: " << input_opt[0] << std::endl;
218  if(mask_opt.size())
219  std::cout << "mask filename: " << mask_opt[0] << std::endl;
220  std::cout << "training vector file: " << std::endl;
221  for(int ifile=0;ifile<training_opt.size();++ifile)
222  std::cout << training_opt[ifile] << std::endl;
223  std::cout << "verbose: " << verbose_opt[0] << std::endl;
224  }
225  unsigned short nbag=(training_opt.size()>1)?training_opt.size():bag_opt[0];
226  if(verbose_opt[0]>=1)
227  std::cout << "number of bootstrap aggregations: " << nbag << std::endl;
228 
229  ImgWriterOgr activeWriter;
230  if(active_opt.size()){
231  prob_est_opt[0]=true;
232  ImgReaderOgr trainingReader(training_opt[0]);
233  activeWriter.open(active_opt[0],ogrformat_opt[0]);
234  activeWriter.createLayer(active_opt[0],trainingReader.getProjection(),wkbPoint,NULL);
235  activeWriter.copyFields(trainingReader);
236  }
237  vector<PosValue> activePoints(nactive_opt[0]);
238  for(int iactive=0;iactive<activePoints.size();++iactive){
239  activePoints[iactive].value=1.0;
240  activePoints[iactive].posx=0.0;
241  activePoints[iactive].posy=0.0;
242  }
243 
244  unsigned int totalSamples=0;
245  unsigned int nactive=0;
246  vector<struct svm_model*> svm(nbag);
247  vector<struct svm_parameter> param(nbag);
248 
249  short nclass=0;
250  int nband=0;
251  int startBand=2;//first two bands represent X and Y pos
252 
253  //normalize priors from command line
254  if(priors_opt.size()>1){//priors from argument list
255  priors.resize(priors_opt.size());
256  double normPrior=0;
257  for(short iclass=0;iclass<priors_opt.size();++iclass){
258  priors[iclass]=priors_opt[iclass];
259  normPrior+=priors[iclass];
260  }
261  //normalize
262  for(short iclass=0;iclass<priors_opt.size();++iclass)
263  priors[iclass]/=normPrior;
264  }
265 
266  //sort bands
267  if(band_opt.size())
268  std::sort(band_opt.begin(),band_opt.end());
269 
270  map<string,short> classValueMap;
271  vector<std::string> nameVector;
272  if(classname_opt.size()){
273  assert(classname_opt.size()==classvalue_opt.size());
274  for(int iclass=0;iclass<classname_opt.size();++iclass)
275  classValueMap[classname_opt[iclass]]=classvalue_opt[iclass];
276  }
277 
278  //----------------------------------- Training -------------------------------
279  ConfusionMatrix cm;
280  vector< vector<double> > offset(nbag);
281  vector< vector<double> > scale(nbag);
282  map<string,Vector2d<float> > trainingMap;
283  vector< Vector2d<float> > trainingPixels;//[class][sample][band]
284  vector<string> fields;
285 
286  vector<struct svm_problem> prob(nbag);
287  vector<struct svm_node *> x_space(nbag);
288 
289  for(int ibag=0;ibag<nbag;++ibag){
290  //organize training data
291  if(ibag<training_opt.size()){//if bag contains new training pixels
292  trainingMap.clear();
293  trainingPixels.clear();
294  if(verbose_opt[0]>=1)
295  std::cout << "reading imageVector file " << training_opt[0] << std::endl;
296  try{
297  ImgReaderOgr trainingReaderBag(training_opt[ibag]);
298  if(band_opt.size())
299  totalSamples=trainingReaderBag.readDataImageOgr(trainingMap,fields,band_opt,label_opt[0],tlayer_opt,verbose_opt[0]);
300  else
301  totalSamples=trainingReaderBag.readDataImageOgr(trainingMap,fields,bstart_opt[0],bend_opt[0],label_opt[0],tlayer_opt,verbose_opt[0]);
302  if(trainingMap.size()<2){
303  string errorstring="Error: could not read at least two classes from training file, did you provide class labels in training sample (see option label)?";
304  throw(errorstring);
305  }
306  trainingReaderBag.close();
307  }
308  catch(string error){
309  cerr << error << std::endl;
310  exit(1);
311  }
312  catch(std::exception& e){
313  std::cerr << "Error: ";
314  std::cerr << e.what() << std::endl;
315  std::cerr << CPLGetLastErrorMsg() << std::endl;
316  exit(1);
317  }
318  catch(...){
319  cerr << "error catched" << std::endl;
320  exit(1);
321  }
322 
323  //convert map to vector
324  // short iclass=0;
325  if(verbose_opt[0]>1)
326  std::cout << "training pixels: " << std::endl;
327  map<string,Vector2d<float> >::iterator mapit=trainingMap.begin();
328  while(mapit!=trainingMap.end()){
329  //delete small classes
330  if((mapit->second).size()<minSize_opt[0]){
331  trainingMap.erase(mapit);
332  continue;
333  }
334  trainingPixels.push_back(mapit->second);
335  if(verbose_opt[0]>1)
336  std::cout << mapit->first << ": " << (mapit->second).size() << " samples" << std::endl;
337  ++mapit;
338  }
339  if(!ibag){
340  nclass=trainingPixels.size();
341  if(classname_opt.size())
342  assert(nclass==classname_opt.size());
343  nband=trainingPixels[0][0].size()-2;//X and Y//trainingPixels[0][0].size();
344  }
345  else{
346  assert(nclass==trainingPixels.size());
347  assert(nband==trainingPixels[0][0].size()-2);
348  }
349 
350  //do not remove outliers here: could easily be obtained through ogr2ogr -where 'B2<110' output.shp input.shp
351  //balance training data
352  if(balance_opt[0]>0){
353  while(balance_opt.size()<nclass)
354  balance_opt.push_back(balance_opt.back());
355  if(random_opt[0])
356  srand(time(NULL));
357  totalSamples=0;
358  for(short iclass=0;iclass<nclass;++iclass){
359  if(trainingPixels[iclass].size()>balance_opt[iclass]){
360  while(trainingPixels[iclass].size()>balance_opt[iclass]){
361  int index=rand()%trainingPixels[iclass].size();
362  trainingPixels[iclass].erase(trainingPixels[iclass].begin()+index);
363  }
364  }
365  else{
366  int oldsize=trainingPixels[iclass].size();
367  for(int isample=trainingPixels[iclass].size();isample<balance_opt[iclass];++isample){
368  int index = rand()%oldsize;
369  trainingPixels[iclass].push_back(trainingPixels[iclass][index]);
370  }
371  }
372  totalSamples+=trainingPixels[iclass].size();
373  }
374  }
375 
376  //set scale and offset
377  offset[ibag].resize(nband);
378  scale[ibag].resize(nband);
379  if(offset_opt.size()>1)
380  assert(offset_opt.size()==nband);
381  if(scale_opt.size()>1)
382  assert(scale_opt.size()==nband);
383  for(int iband=0;iband<nband;++iband){
384  if(verbose_opt[0]>=1)
385  std::cout << "scaling for band" << iband << std::endl;
386  offset[ibag][iband]=(offset_opt.size()==1)?offset_opt[0]:offset_opt[iband];
387  scale[ibag][iband]=(scale_opt.size()==1)?scale_opt[0]:scale_opt[iband];
388  //search for min and maximum
389  if(scale[ibag][iband]<=0){
390  float theMin=trainingPixels[0][0][iband+startBand];
391  float theMax=trainingPixels[0][0][iband+startBand];
392  for(short iclass=0;iclass<nclass;++iclass){
393  for(int isample=0;isample<trainingPixels[iclass].size();++isample){
394  if(theMin>trainingPixels[iclass][isample][iband+startBand])
395  theMin=trainingPixels[iclass][isample][iband+startBand];
396  if(theMax<trainingPixels[iclass][isample][iband+startBand])
397  theMax=trainingPixels[iclass][isample][iband+startBand];
398  }
399  }
400  offset[ibag][iband]=theMin+(theMax-theMin)/2.0;
401  scale[ibag][iband]=(theMax-theMin)/2.0;
402  if(verbose_opt[0]>=1){
403  std::cout << "Extreme image values for band " << iband << ": [" << theMin << "," << theMax << "]" << std::endl;
404  std::cout << "Using offset, scale: " << offset[ibag][iband] << ", " << scale[ibag][iband] << std::endl;
405  std::cout << "scaled values for band " << iband << ": [" << (theMin-offset[ibag][iband])/scale[ibag][iband] << "," << (theMax-offset[ibag][iband])/scale[ibag][iband] << "]" << std::endl;
406  }
407  }
408  }
409  }
410  else{//use same offset and scale
411  offset[ibag].resize(nband);
412  scale[ibag].resize(nband);
413  for(int iband=0;iband<nband;++iband){
414  offset[ibag][iband]=offset[0][iband];
415  scale[ibag][iband]=scale[0][iband];
416  }
417  }
418 
419  if(!ibag){
420  if(priors_opt.size()==1){//default: equal priors for each class
421  priors.resize(nclass);
422  for(short iclass=0;iclass<nclass;++iclass)
423  priors[iclass]=1.0/nclass;
424  }
425  assert(priors_opt.size()==1||priors_opt.size()==nclass);
426 
427  //set bagsize for each class if not done already via command line
428  while(bagSize_opt.size()<nclass)
429  bagSize_opt.push_back(bagSize_opt.back());
430 
431  if(verbose_opt[0]>=1){
432  std::cout << "number of bands: " << nband << std::endl;
433  std::cout << "number of classes: " << nclass << std::endl;
434  if(priorimg_opt.empty()){
435  std::cout << "priors:";
436  for(short iclass=0;iclass<nclass;++iclass)
437  std::cout << " " << priors[iclass];
438  std::cout << std::endl;
439  }
440  }
441  map<string,Vector2d<float> >::iterator mapit=trainingMap.begin();
442  bool doSort=true;
443  try{
444  while(mapit!=trainingMap.end()){
445  nameVector.push_back(mapit->first);
446  if(classValueMap.size()){
447  //check if name in training is covered by classname_opt (values can not be 0)
448  if(classValueMap[mapit->first]>0){
449  if(cm.getClassIndex(type2string<short>(classValueMap[mapit->first]))<0){
450  cm.pushBackClassName(type2string<short>(classValueMap[mapit->first]),doSort);
451  }
452  }
453  else{
454  std::cerr << "Error: names in classname option are not complete, please check names in training vector and make sure classvalue is > 0" << std::endl;
455  exit(1);
456  }
457  }
458  else
459  cm.pushBackClassName(mapit->first,doSort);
460  ++mapit;
461  }
462  }
463  catch(BadConversion conversionString){
464  std::cerr << "Error: did you provide class pairs names (-c) and integer values (-r) for each class in training vector?" << std::endl;
465  exit(1);
466  }
467  if(classname_opt.empty()){
468  //std::cerr << "Warning: no class name and value pair provided for all " << nclass << " classes, using string2type<int> instead!" << std::endl;
469  for(int iclass=0;iclass<nclass;++iclass){
470  if(verbose_opt[0])
471  std::cout << iclass << " " << cm.getClass(iclass) << " -> " << string2type<short>(cm.getClass(iclass)) << std::endl;
472  classValueMap[cm.getClass(iclass)]=string2type<short>(cm.getClass(iclass));
473  }
474  }
475 
476  // if(priors_opt.size()==nameVector.size()){
477  // std::cerr << "Warning: please check if priors are provided in correct order!!!" << std::endl;
478  // for(int iclass=0;iclass<nameVector.size();++iclass)
479  // std::cerr << nameVector[iclass] << " " << priors_opt[iclass] << std::endl;
480  // }
481  }//if(!ibag)
482 
483  //Calculate features of training set
484  vector< Vector2d<float> > trainingFeatures(nclass);
485  for(short iclass=0;iclass<nclass;++iclass){
486  int nctraining=0;
487  if(verbose_opt[0]>=1)
488  std::cout << "calculating features for class " << iclass << std::endl;
489  if(random_opt[0])
490  srand(time(NULL));
491  nctraining=(bagSize_opt[iclass]<100)? trainingPixels[iclass].size()/100.0*bagSize_opt[iclass] : trainingPixels[iclass].size();//bagSize_opt[iclass] given in % of training size
492  if(nctraining<=0)
493  nctraining=1;
494  assert(nctraining<=trainingPixels[iclass].size());
495  int index=0;
496  if(bagSize_opt[iclass]<100)
497  random_shuffle(trainingPixels[iclass].begin(),trainingPixels[iclass].end());
498  if(verbose_opt[0]>1)
499  std::cout << "nctraining (class " << iclass << "): " << nctraining << std::endl;
500  trainingFeatures[iclass].resize(nctraining);
501  for(int isample=0;isample<nctraining;++isample){
502  //scale pixel values according to scale and offset!!!
503  for(int iband=0;iband<nband;++iband){
504  float value=trainingPixels[iclass][isample][iband+startBand];
505  trainingFeatures[iclass][isample].push_back((value-offset[ibag][iband])/scale[ibag][iband]);
506  }
507  }
508  assert(trainingFeatures[iclass].size()==nctraining);
509  }
510 
511  unsigned int nFeatures=trainingFeatures[0][0].size();
512  if(verbose_opt[0]>=1)
513  std::cout << "number of features: " << nFeatures << std::endl;
514  unsigned int ntraining=0;
515  for(short iclass=0;iclass<nclass;++iclass)
516  ntraining+=trainingFeatures[iclass].size();
517  if(verbose_opt[0]>=1)
518  std::cout << "training size over all classes: " << ntraining << std::endl;
519 
520  prob[ibag].l=ntraining;
521  prob[ibag].y = Malloc(double,prob[ibag].l);
522  prob[ibag].x = Malloc(struct svm_node *,prob[ibag].l);
523  x_space[ibag] = Malloc(struct svm_node,(nFeatures+1)*ntraining);
524  unsigned long int spaceIndex=0;
525  int lIndex=0;
526  for(short iclass=0;iclass<nclass;++iclass){
527  for(int isample=0;isample<trainingFeatures[iclass].size();++isample){
528  prob[ibag].x[lIndex]=&(x_space[ibag][spaceIndex]);
529  for(int ifeature=0;ifeature<nFeatures;++ifeature){
530  x_space[ibag][spaceIndex].index=ifeature+1;
531  x_space[ibag][spaceIndex].value=trainingFeatures[iclass][isample][ifeature];
532  ++spaceIndex;
533  }
534  x_space[ibag][spaceIndex++].index=-1;
535  prob[ibag].y[lIndex]=iclass;
536  ++lIndex;
537  }
538  }
539  assert(lIndex==prob[ibag].l);
540 
541  //set SVM parameters through command line options
542  param[ibag].svm_type = svmMap[svm_type_opt[0]];
543  param[ibag].kernel_type = kernelMap[kernel_type_opt[0]];
544  param[ibag].degree = kernel_degree_opt[0];
545  param[ibag].gamma = (gamma_opt[0]>0)? gamma_opt[0] : 1.0/nFeatures;
546  param[ibag].coef0 = coef0_opt[0];
547  param[ibag].nu = nu_opt[0];
548  param[ibag].cache_size = cache_opt[0];
549  param[ibag].C = ccost_opt[0];
550  param[ibag].eps = epsilon_tol_opt[0];
551  param[ibag].p = epsilon_loss_opt[0];
552  param[ibag].shrinking = (shrinking_opt[0])? 1 : 0;
553  param[ibag].probability = (prob_est_opt[0])? 1 : 0;
554  param[ibag].nr_weight = 0;//not used: I use priors and balancing
555  param[ibag].weight_label = NULL;
556  param[ibag].weight = NULL;
557  param[ibag].verbose=(verbose_opt[0]>1)? true:false;
558 
559  if(verbose_opt[0]>1)
560  std::cout << "checking parameters" << std::endl;
561  svm_check_parameter(&prob[ibag],&param[ibag]);
562  if(verbose_opt[0])
563  std::cout << "parameters ok, training" << std::endl;
564  svm[ibag]=svm_train(&prob[ibag],&param[ibag]);
565  if(verbose_opt[0]>1)
566  std::cout << "SVM is now trained" << std::endl;
567  if(cv_opt[0]>1){
568  if(verbose_opt[0]>1)
569  std::cout << "Cross validating" << std::endl;
570  double *target = Malloc(double,prob[ibag].l);
571  svm_cross_validation(&prob[ibag],&param[ibag],cv_opt[0],target);
572  assert(param[ibag].svm_type != EPSILON_SVR&&param[ibag].svm_type != NU_SVR);//only for regression
573 
574  for(int i=0;i<prob[ibag].l;i++){
575  string refClassName=nameVector[prob[ibag].y[i]];
576  string className=nameVector[target[i]];
577  if(classValueMap.size())
578  cm.incrementResult(type2string<short>(classValueMap[refClassName]),type2string<short>(classValueMap[className]),1.0/nbag);
579  else
580  cm.incrementResult(cm.getClass(prob[ibag].y[i]),cm.getClass(target[i]),1.0/nbag);
581  }
582  free(target);
583  }
584  // *NOTE* Because svm_model contains pointers to svm_problem, you can
585  // not free the memory used by svm_problem if you are still using the
586  // svm_model produced by svm_train().
587  }//for ibag
588  if(cv_opt[0]>1){
589  assert(cm.nReference());
590  std::cout << cm << std::endl;
591  cout << "class #samples userAcc prodAcc" << endl;
592  double se95_ua=0;
593  double se95_pa=0;
594  double se95_oa=0;
595  double dua=0;
596  double dpa=0;
597  double doa=0;
598  for(short iclass=0;iclass<cm.nClasses();++iclass){
599  dua=cm.ua(cm.getClass(iclass),&se95_ua);
600  dpa=cm.pa(cm.getClass(iclass),&se95_pa);
601  cout << cm.getClass(iclass) << " " << cm.nReference(cm.getClass(iclass)) << " " << dua << " (" << se95_ua << ")" << " " << dpa << " (" << se95_pa << ")" << endl;
602  }
603  std::cout << "Kappa: " << cm.kappa() << std::endl;
604  doa=cm.oa(&se95_oa);
605  std::cout << "Overall Accuracy: " << 100*doa << " (" << 100*se95_oa << ")" << std::endl;
606  }
607 
608  //--------------------------------- end of training -----------------------------------
609  if(input_opt.empty())
610  exit(0);
611 
612  const char* pszMessage;
613  void* pProgressArg=NULL;
614  GDALProgressFunc pfnProgress=GDALTermProgress;
615  float progress=0;
616  if(!verbose_opt[0])
617  pfnProgress(progress,pszMessage,pProgressArg);
618  //-------------------------------- open image file ------------------------------------
619  bool inputIsRaster=false;
620  ImgReaderOgr imgReaderOgr;
621  try{
622  imgReaderOgr.open(input_opt[0]);
623  imgReaderOgr.close();
624  }
625  catch(string errorString){
626  inputIsRaster=true;
627  }
628  if(inputIsRaster){
629  ImgReaderGdal testImage;
630  try{
631  if(verbose_opt[0]>=1)
632  std::cout << "opening image " << input_opt[0] << std::endl;
633  testImage.open(input_opt[0]);
634  }
635  catch(string error){
636  cerr << error << std::endl;
637  exit(2);
638  }
639  ImgReaderGdal maskReader;
640  if(mask_opt.size()){
641  try{
642  if(verbose_opt[0]>=1)
643  std::cout << "opening mask image file " << mask_opt[0] << std::endl;
644  maskReader.open(mask_opt[0]);
645  }
646  catch(string error){
647  cerr << error << std::endl;
648  exit(2);
649  }
650  catch(...){
651  cerr << "error catched" << std::endl;
652  exit(1);
653  }
654  }
655  ImgReaderGdal priorReader;
656  if(priorimg_opt.size()){
657  try{
658  if(verbose_opt[0]>=1)
659  std::cout << "opening prior image " << priorimg_opt[0] << std::endl;
660  priorReader.open(priorimg_opt[0]);
661  assert(priorReader.nrOfCol()==testImage.nrOfCol());
662  assert(priorReader.nrOfRow()==testImage.nrOfRow());
663  }
664  catch(string error){
665  cerr << error << std::endl;
666  exit(2);
667  }
668  catch(...){
669  cerr << "error catched" << std::endl;
670  exit(1);
671  }
672  }
673 
674  int nrow=testImage.nrOfRow();
675  int ncol=testImage.nrOfCol();
676  if(option_opt.findSubstring("INTERLEAVE=")==option_opt.end()){
677  string theInterleave="INTERLEAVE=";
678  theInterleave+=testImage.getInterleave();
679  option_opt.push_back(theInterleave);
680  }
681  vector<char> classOut(ncol);//classified line for writing to image file
682 
683  // assert(nband==testImage.nrOfBand());
684  ImgWriterGdal classImageBag;
685  ImgWriterGdal classImageOut;
686  ImgWriterGdal probImage;
687  ImgWriterGdal entropyImage;
688 
689  string imageType=testImage.getImageType();
690  if(oformat_opt.size())//default
691  imageType=oformat_opt[0];
692  try{
693  assert(output_opt.size());
694  if(verbose_opt[0]>=1)
695  std::cout << "opening class image for writing output " << output_opt[0] << std::endl;
696  if(classBag_opt.size()){
697  classImageBag.open(classBag_opt[0],ncol,nrow,nbag,GDT_Byte,imageType,option_opt);
698  classImageBag.GDALSetNoDataValue(nodata_opt[0]);
699  classImageBag.copyGeoTransform(testImage);
700  classImageBag.setProjection(testImage.getProjection());
701  }
702  classImageOut.open(output_opt[0],ncol,nrow,1,GDT_Byte,imageType,option_opt);
703  classImageOut.GDALSetNoDataValue(nodata_opt[0]);
704  classImageOut.copyGeoTransform(testImage);
705  classImageOut.setProjection(testImage.getProjection());
706  if(colorTable_opt.size())
707  classImageOut.setColorTable(colorTable_opt[0],0);
708  if(prob_opt.size()){
709  probImage.open(prob_opt[0],ncol,nrow,nclass,GDT_Byte,imageType,option_opt);
710  probImage.GDALSetNoDataValue(nodata_opt[0]);
711  probImage.copyGeoTransform(testImage);
712  probImage.setProjection(testImage.getProjection());
713  }
714  if(entropy_opt.size()){
715  entropyImage.open(entropy_opt[0],ncol,nrow,1,GDT_Byte,imageType,option_opt);
716  entropyImage.GDALSetNoDataValue(nodata_opt[0]);
717  entropyImage.copyGeoTransform(testImage);
718  entropyImage.setProjection(testImage.getProjection());
719  }
720  }
721  catch(string error){
722  cerr << error << std::endl;
723  }
724 
725  for(int iline=0;iline<nrow;++iline){
726  vector<float> buffer(ncol);
727  vector<short> lineMask;
728  Vector2d<float> linePrior;
729  if(priorimg_opt.size())
730  linePrior.resize(nclass,ncol);//prior prob for each class
731  Vector2d<float> hpixel(ncol);
732  Vector2d<float> probOut(nclass,ncol);//posterior prob for each (internal) class
733  vector<float> entropy(ncol);
734  Vector2d<char> classBag;//classified line for writing to image file
735  if(classBag_opt.size())
736  classBag.resize(nbag,ncol);
737  try{
738  if(band_opt.size()){
739  for(int iband=0;iband<band_opt.size();++iband){
740  if(verbose_opt[0]==2)
741  std::cout << "reading band " << band_opt[iband] << std::endl;
742  assert(band_opt[iband]>=0);
743  assert(band_opt[iband]<testImage.nrOfBand());
744  testImage.readData(buffer,GDT_Float32,iline,band_opt[iband]);
745  for(int icol=0;icol<ncol;++icol)
746  hpixel[icol].push_back(buffer[icol]);
747  }
748  }
749  else{
750  for(int iband=bstart_opt[0];iband<bstart_opt[0]+nband;++iband){
751  if(verbose_opt[0]==2)
752  std::cout << "reading band " << iband << std::endl;
753  assert(iband>=0);
754  assert(iband<testImage.nrOfBand());
755  testImage.readData(buffer,GDT_Float32,iline,iband);
756  for(int icol=0;icol<ncol;++icol)
757  hpixel[icol].push_back(buffer[icol]);
758  }
759  }
760  }
761  catch(string theError){
762  cerr << "Error reading " << input_opt[0] << ": " << theError << std::endl;
763  exit(3);
764  }
765  catch(...){
766  cerr << "error catched" << std::endl;
767  exit(3);
768  }
769  assert(nband==hpixel[0].size());
770  if(verbose_opt[0]>1)
771  std::cout << "used bands: " << nband << std::endl;
772  //read prior
773  if(priorimg_opt.size()){
774  try{
775  for(short iclass=0;iclass<nclass;++iclass){
776  if(verbose_opt.size()>1)
777  std::cout << "Reading " << priorimg_opt[0] << " band " << iclass << " line " << iline << std::endl;
778  priorReader.readData(linePrior[iclass],GDT_Float32,iline,iclass);
779  }
780  }
781  catch(string theError){
782  std::cerr << "Error reading " << priorimg_opt[0] << ": " << theError << std::endl;
783  exit(3);
784  }
785  catch(...){
786  cerr << "error catched" << std::endl;
787  exit(3);
788  }
789  }
790  double oldRowMask=-1;//keep track of row mask to optimize number of line readings
791  //process per pixel
792  for(int icol=0;icol<ncol;++icol){
793  assert(hpixel[icol].size()==nband);
794  bool masked=false;
795  if(mask_opt.size()){
796  //read mask
797  double colMask=0;
798  double rowMask=0;
799  double geox=0;
800  double geoy=0;
801 
802  testImage.image2geo(icol,iline,geox,geoy);
803  maskReader.geo2image(geox,geoy,colMask,rowMask);
804  colMask=static_cast<int>(colMask);
805  rowMask=static_cast<int>(rowMask);
806  if(rowMask>=0&&rowMask<maskReader.nrOfRow()&&colMask>=0&&colMask<maskReader.nrOfCol()){
807  if(static_cast<int>(rowMask)!=static_cast<int>(oldRowMask)){
808  assert(rowMask>=0&&rowMask<maskReader.nrOfRow());
809  try{
810  // maskReader.readData(lineMask[imask],GDT_Int32,static_cast<int>(rowMask));
811  maskReader.readData(lineMask,GDT_Int16,static_cast<int>(rowMask));
812  }
813  catch(string errorstring){
814  cerr << errorstring << endl;
815  exit(1);
816  }
817  catch(...){
818  cerr << "error catched" << std::endl;
819  exit(3);
820  }
821  oldRowMask=rowMask;
822  }
823  short theMask=0;
824  for(short ivalue=0;ivalue<msknodata_opt.size();++ivalue){
825  if(msknodata_opt[ivalue]>=0){//values set in msknodata_opt are invalid
826  if(lineMask[colMask]==msknodata_opt[ivalue]){
827  theMask=lineMask[colMask];
828  masked=true;
829  break;
830  }
831  }
832  else{//only values set in msknodata_opt are valid
833  if(lineMask[colMask]!=-msknodata_opt[ivalue]){
834  theMask=lineMask[colMask];
835  masked=true;
836  }
837  else{
838  masked=false;
839  break;
840  }
841  }
842  }
843  if(masked){
844  if(classBag_opt.size())
845  for(int ibag=0;ibag<nbag;++ibag)
846  classBag[ibag][icol]=theMask;
847  classOut[icol]=theMask;
848  continue;
849  }
850  }
851  bool valid=false;
852  for(int iband=0;iband<hpixel[icol].size();++iband){
853  if(hpixel[icol][iband]){
854  valid=true;
855  break;
856  }
857  }
858  if(!valid){
859  if(classBag_opt.size())
860  for(int ibag=0;ibag<nbag;++ibag)
861  classBag[ibag][icol]=nodata_opt[0];
862  classOut[icol]=nodata_opt[0];
863  continue;//next column
864  }
865  }
866  for(short iclass=0;iclass<nclass;++iclass)
867  probOut[iclass][icol]=0;
868  if(verbose_opt[0]>1)
869  std::cout << "begin classification " << std::endl;
870  //----------------------------------- classification -------------------
871  for(int ibag=0;ibag<nbag;++ibag){
872  vector<double> result(nclass);
873  struct svm_node *x;
874  x = (struct svm_node *) malloc((nband+1)*sizeof(struct svm_node));
875  for(int iband=0;iband<nband;++iband){
876  x[iband].index=iband+1;
877  x[iband].value=(hpixel[icol][iband]-offset[ibag][iband])/scale[ibag][iband];
878  }
879  x[nband].index=-1;//to end svm feature vector
880  double predict_label=0;
881  vector<float> prValues(nclass);
882  float maxP=0;
883  if(!prob_est_opt[0]){
884  predict_label = svm_predict(svm[ibag],x);
885  for(short iclass=0;iclass<nclass;++iclass){
886  if(iclass==static_cast<short>(predict_label))
887  result[iclass]=1;
888  else
889  result[iclass]=0;
890  }
891  }
892  else{
893  assert(svm_check_probability_model(svm[ibag]));
894  predict_label = svm_predict_probability(svm[ibag],x,&(result[0]));
895  }
896  //calculate posterior prob of bag
897  if(classBag_opt.size()){
898  //search for max prob within bag
899  maxP=0;
900  classBag[ibag][icol]=0;
901  }
902  double normPrior=0;
903  if(priorimg_opt.size()){
904  for(short iclass=0;iclass<nclass;++iclass)
905  normPrior+=linePrior[iclass][icol];
906  }
907  for(short iclass=0;iclass<nclass;++iclass){
908  if(priorimg_opt.size())
909  priors[iclass]=linePrior[iclass][icol]/normPrior;//todo: check if correct for all cases... (automatic classValueMap and manual input for names and values)
910  switch(comb_opt[0]){
911  default:
912  case(0)://sum rule
913  probOut[iclass][icol]+=result[iclass]*priors[iclass];//add probabilities for each bag
914  break;
915  case(1)://product rule
916  probOut[iclass][icol]*=pow(static_cast<float>(priors[iclass]),static_cast<float>(1.0-nbag)/nbag)*result[iclass];//multiply probabilities for each bag
917  break;
918  case(2)://max rule
919  if(priors[iclass]*result[iclass]>probOut[iclass][icol])
920  probOut[iclass][icol]=priors[iclass]*result[iclass];
921  break;
922  }
923  if(classBag_opt.size()){
924  //search for max prob within bag
925  // if(prValues[iclass]>maxP){
926  // maxP=prValues[iclass];
927  // classBag[ibag][icol]=iclass;
928  // }
929  if(result[iclass]>maxP){
930  maxP=result[iclass];
931  classBag[ibag][icol]=iclass;
932  }
933  }
934  }
935  free(x);
936  }//ibag
937 
938  //search for max class prob
939  float maxBag1=0;//max probability
940  float maxBag2=0;//second max probability
941  float normBag=0;
942  for(short iclass=0;iclass<nclass;++iclass){
943  if(probOut[iclass][icol]>maxBag1){
944  maxBag1=probOut[iclass][icol];
945  classOut[icol]=classValueMap[nameVector[iclass]];
946  }
947  else if(probOut[iclass][icol]>maxBag2)
948  maxBag2=probOut[iclass][icol];
949  normBag+=probOut[iclass][icol];
950  }
951  //normalize probOut and convert to percentage
952  entropy[icol]=0;
953  for(short iclass=0;iclass<nclass;++iclass){
954  float prv=probOut[iclass][icol];
955  prv/=normBag;
956  entropy[icol]-=prv*log(prv)/log(2.0);
957  prv*=100.0;
958 
959  probOut[iclass][icol]=static_cast<short>(prv+0.5);
960  // assert(classValueMap[nameVector[iclass]]<probOut.size());
961  // assert(classValueMap[nameVector[iclass]]>=0);
962  // probOut[classValueMap[nameVector[iclass]]][icol]=static_cast<short>(prv+0.5);
963  }
964  entropy[icol]/=log(static_cast<double>(nclass))/log(2.0);
965  entropy[icol]=static_cast<short>(100*entropy[icol]+0.5);
966  if(active_opt.size()){
967  if(entropy[icol]>activePoints.back().value){
968  activePoints.back().value=entropy[icol];//replace largest value (last)
969  activePoints.back().posx=icol;
970  activePoints.back().posy=iline;
971  std::sort(activePoints.begin(),activePoints.end(),Decrease_PosValue());//sort in descending order (largest first, smallest last)
972  if(verbose_opt[0])
973  std::cout << activePoints.back().posx << " " << activePoints.back().posy << " " << activePoints.back().value << std::endl;
974  }
975  }
976  }//icol
977  //----------------------------------- write output ------------------------------------------
978  if(classBag_opt.size())
979  for(int ibag=0;ibag<nbag;++ibag)
980  classImageBag.writeData(classBag[ibag],GDT_Byte,iline,ibag);
981  if(prob_opt.size()){
982  for(short iclass=0;iclass<nclass;++iclass)
983  probImage.writeData(probOut[iclass],GDT_Float32,iline,iclass);
984  }
985  if(entropy_opt.size()){
986  entropyImage.writeData(entropy,GDT_Float32,iline);
987  }
988  classImageOut.writeData(classOut,GDT_Byte,iline);
989  if(!verbose_opt[0]){
990  progress=static_cast<float>(iline+1.0)/classImageOut.nrOfRow();
991  pfnProgress(progress,pszMessage,pProgressArg);
992  }
993  }
994  //write active learning points
995  if(active_opt.size()){
996  for(int iactive=0;iactive<activePoints.size();++iactive){
997  std::map<string,double> pointMap;
998  for(int iband=0;iband<testImage.nrOfBand();++iband){
999  double value;
1000  testImage.readData(value,GDT_Float64,static_cast<int>(activePoints[iactive].posx),static_cast<int>(activePoints[iactive].posy),iband);
1001  ostringstream fs;
1002  fs << "B" << iband;
1003  pointMap[fs.str()]=value;
1004  }
1005  pointMap[label_opt[0]]=0;
1006  double x, y;
1007  testImage.image2geo(activePoints[iactive].posx,activePoints[iactive].posy,x,y);
1008  std::string fieldname="id";//number of the point
1009  activeWriter.addPoint(x,y,pointMap,fieldname,++nactive);
1010  }
1011  }
1012 
1013  testImage.close();
1014  if(mask_opt.size())
1015  maskReader.close();
1016  if(priorimg_opt.size())
1017  priorReader.close();
1018  if(prob_opt.size())
1019  probImage.close();
1020  if(entropy_opt.size())
1021  entropyImage.close();
1022  if(classBag_opt.size())
1023  classImageBag.close();
1024  classImageOut.close();
1025  }
1026  else{//classify vector file
1027  cm.clearResults();
1028  //notice that fields have already been set by readDataImageOgr (taking into account appropriate bands)
1029  for(int ivalidation=0;ivalidation<input_opt.size();++ivalidation){
1030  if(output_opt.size())
1031  assert(output_opt.size()==input_opt.size());
1032  if(verbose_opt[0])
1033  std::cout << "opening img reader " << input_opt[ivalidation] << std::endl;
1034  imgReaderOgr.open(input_opt[ivalidation]);
1035  ImgWriterOgr imgWriterOgr;
1036 
1037  if(output_opt.size()){
1038  if(verbose_opt[0])
1039  std::cout << "opening img writer and copying fields from img reader" << output_opt[ivalidation] << std::endl;
1040  imgWriterOgr.open(output_opt[ivalidation],imgReaderOgr);
1041  }
1042  if(verbose_opt[0])
1043  cout << "number of layers in input ogr file: " << imgReaderOgr.getLayerCount() << endl;
1044  for(int ilayer=0;ilayer<imgReaderOgr.getLayerCount();++ilayer){
1045  if(verbose_opt[0])
1046  cout << "processing input layer " << ilayer << endl;
1047  if(output_opt.size()){
1048  if(verbose_opt[0])
1049  std::cout << "creating field class" << std::endl;
1050  if(classValueMap.size())
1051  imgWriterOgr.createField("class",OFTInteger,ilayer);
1052  else
1053  imgWriterOgr.createField("class",OFTString,ilayer);
1054  }
1055  unsigned int nFeatures=imgReaderOgr.getFeatureCount(ilayer);
1056  unsigned int ifeature=0;
1057  progress=0;
1058  pfnProgress(progress,pszMessage,pProgressArg);
1059  OGRFeature *poFeature;
1060  while( (poFeature = imgReaderOgr.getLayer(ilayer)->GetNextFeature()) != NULL ){
1061  if(verbose_opt[0]>1)
1062  std::cout << "feature " << ifeature << std::endl;
1063  if( poFeature == NULL ){
1064  cout << "Warning: could not read feature " << ifeature << " in layer " << imgReaderOgr.getLayerName(ilayer) << endl;
1065  continue;
1066  }
1067  OGRFeature *poDstFeature = NULL;
1068  if(output_opt.size()){
1069  poDstFeature=imgWriterOgr.createFeature(ilayer);
1070  if( poDstFeature->SetFrom( poFeature, TRUE ) != OGRERR_NONE ){
1071  CPLError( CE_Failure, CPLE_AppDefined,
1072  "Unable to translate feature %d from layer %s.\n",
1073  poFeature->GetFID(), imgWriterOgr.getLayerName(ilayer).c_str() );
1074  OGRFeature::DestroyFeature( poFeature );
1075  OGRFeature::DestroyFeature( poDstFeature );
1076  }
1077  }
1078  vector<float> validationPixel;
1079  vector<float> validationFeature;
1080 
1081  imgReaderOgr.readData(validationPixel,OFTReal,fields,poFeature,ilayer);
1082  assert(validationPixel.size()==nband);
1083  vector<float> probOut(nclass);//posterior prob for each class
1084  for(short iclass=0;iclass<nclass;++iclass)
1085  probOut[iclass]=0;
1086  for(int ibag=0;ibag<nbag;++ibag){
1087  for(int iband=0;iband<nband;++iband){
1088  validationFeature.push_back((validationPixel[iband]-offset[ibag][iband])/scale[ibag][iband]);
1089  if(verbose_opt[0]==2)
1090  std::cout << " " << validationFeature.back();
1091  }
1092  if(verbose_opt[0]==2)
1093  std::cout << std::endl;
1094  vector<double> result(nclass);
1095  struct svm_node *x;
1096  x = (struct svm_node *) malloc((validationFeature.size()+1)*sizeof(struct svm_node));
1097  for(int i=0;i<validationFeature.size();++i){
1098  x[i].index=i+1;
1099  x[i].value=validationFeature[i];
1100  }
1101 
1102  x[validationFeature.size()].index=-1;//to end svm feature vector
1103  double predict_label=0;
1104  if(!prob_est_opt[0]){
1105  predict_label = svm_predict(svm[ibag],x);
1106  for(short iclass=0;iclass<nclass;++iclass){
1107  if(iclass==static_cast<short>(predict_label))
1108  result[iclass]=1;
1109  else
1110  result[iclass]=0;
1111  }
1112  }
1113  else{
1114  assert(svm_check_probability_model(svm[ibag]));
1115  predict_label = svm_predict_probability(svm[ibag],x,&(result[0]));
1116  }
1117  if(verbose_opt[0]>1){
1118  std::cout << "predict_label: " << predict_label << std::endl;
1119  for(int iclass=0;iclass<result.size();++iclass)
1120  std::cout << result[iclass] << " ";
1121  std::cout << std::endl;
1122  }
1123 
1124  //calculate posterior prob of bag
1125  for(short iclass=0;iclass<nclass;++iclass){
1126  switch(comb_opt[0]){
1127  default:
1128  case(0)://sum rule
1129  probOut[iclass]+=result[iclass]*priors[iclass];//add probabilities for each bag
1130  break;
1131  case(1)://product rule
1132  probOut[iclass]*=pow(static_cast<float>(priors[iclass]),static_cast<float>(1.0-nbag)/nbag)*result[iclass];//multiply probabilities for each bag
1133  break;
1134  case(2)://max rule
1135  if(priors[iclass]*result[iclass]>probOut[iclass])
1136  probOut[iclass]=priors[iclass]*result[iclass];
1137  break;
1138  }
1139  }
1140  free(x);
1141  }//for ibag
1142 
1143  //search for max class prob
1144  float maxBag=0;
1145  float normBag=0;
1146  string classOut="Unclassified";
1147  for(short iclass=0;iclass<nclass;++iclass){
1148  if(verbose_opt[0]>1)
1149  std::cout << probOut[iclass] << " ";
1150  if(probOut[iclass]>maxBag){
1151  maxBag=probOut[iclass];
1152  classOut=nameVector[iclass];
1153  }
1154  }
1155  //look for class name
1156  if(verbose_opt[0]>1){
1157  if(classValueMap.size())
1158  std::cout << "->" << classValueMap[classOut] << std::endl;
1159  else
1160  std::cout << "->" << classOut << std::endl;
1161  }
1162  if(output_opt.size()){
1163  if(classValueMap.size())
1164  poDstFeature->SetField("class",classValueMap[classOut]);
1165  else
1166  poDstFeature->SetField("class",classOut.c_str());
1167  poDstFeature->SetFID( poFeature->GetFID() );
1168  }
1169  int labelIndex=poFeature->GetFieldIndex(label_opt[0].c_str());
1170  if(labelIndex>=0){
1171  string classRef=poFeature->GetFieldAsString(labelIndex);
1172  if(classRef!="0"){
1173  if(classValueMap.size())
1174  cm.incrementResult(type2string<short>(classValueMap[classRef]),type2string<short>(classValueMap[classOut]),1);
1175  else
1176  cm.incrementResult(classRef,classOut,1);
1177  }
1178  }
1179  CPLErrorReset();
1180  if(output_opt.size()){
1181  if(imgWriterOgr.createFeature(poDstFeature,ilayer) != OGRERR_NONE){
1182  CPLError( CE_Failure, CPLE_AppDefined,
1183  "Unable to translate feature %d from layer %s.\n",
1184  poFeature->GetFID(), imgWriterOgr.getLayerName(ilayer).c_str() );
1185  OGRFeature::DestroyFeature( poDstFeature );
1186  OGRFeature::DestroyFeature( poDstFeature );
1187  }
1188  }
1189  ++ifeature;
1190  if(!verbose_opt[0]){
1191  progress=static_cast<float>(ifeature+1.0)/nFeatures;
1192  pfnProgress(progress,pszMessage,pProgressArg);
1193  }
1194  OGRFeature::DestroyFeature( poFeature );
1195  OGRFeature::DestroyFeature( poDstFeature );
1196  }//get next feature
1197  }//next layer
1198  imgReaderOgr.close();
1199  if(output_opt.size())
1200  imgWriterOgr.close();
1201  }
1202  if(cm.nReference()){
1203  std::cout << cm << std::endl;
1204  cout << "class #samples userAcc prodAcc" << endl;
1205  double se95_ua=0;
1206  double se95_pa=0;
1207  double se95_oa=0;
1208  double dua=0;
1209  double dpa=0;
1210  double doa=0;
1211  for(short iclass=0;iclass<cm.nClasses();++iclass){
1212  dua=cm.ua_pct(cm.getClass(iclass),&se95_ua);
1213  dpa=cm.pa_pct(cm.getClass(iclass),&se95_pa);
1214  cout << cm.getClass(iclass) << " " << cm.nReference(cm.getClass(iclass)) << " " << dua << " (" << se95_ua << ")" << " " << dpa << " (" << se95_pa << ")" << endl;
1215  }
1216  std::cout << "Kappa: " << cm.kappa() << std::endl;
1217  doa=cm.oa(&se95_oa);
1218  std::cout << "Overall Accuracy: " << 100*doa << " (" << 100*se95_oa << ")" << std::endl;
1219  }
1220  }
1221  try{
1222  if(active_opt.size())
1223  activeWriter.close();
1224  }
1225  catch(string errorString){
1226  std::cerr << "Error: errorString" << std::endl;
1227  }
1228 
1229  for(int ibag=0;ibag<nbag;++ibag){
1230  // svm_destroy_param[ibag](&param[ibag]);
1231  svm_destroy_param(&param[ibag]);
1232  free(prob[ibag].y);
1233  free(prob[ibag].x);
1234  free(x_space[ibag]);
1235  svm_free_and_destroy_model(&(svm[ibag]));
1236  }
1237  return 0;
1238 }
throw this class when syntax error in command line option
Definition: Optionpk.h:45
Definition: svm.h:12