1 | package de.ugoe.cs.cpdp.training; |
---|
2 | |
---|
3 | import java.util.LinkedList; |
---|
4 | import java.util.List; |
---|
5 | |
---|
6 | import org.apache.commons.collections4.list.SetUniqueList; |
---|
7 | |
---|
8 | import weka.classifiers.AbstractClassifier; |
---|
9 | import weka.classifiers.Classifier; |
---|
10 | import weka.core.Instance; |
---|
11 | import weka.core.Instances; |
---|
12 | import org.apache.commons.lang3.ArrayUtils; |
---|
13 | import org.jgap.Configuration; |
---|
14 | import org.jgap.InvalidConfigurationException; |
---|
15 | import org.jgap.gp.CommandGene; |
---|
16 | import org.jgap.gp.GPProblem; |
---|
17 | |
---|
18 | import org.jgap.gp.function.Add; |
---|
19 | import org.jgap.gp.function.Multiply; |
---|
20 | import org.jgap.gp.function.Log; |
---|
21 | import org.jgap.gp.function.Subtract; |
---|
22 | import org.jgap.gp.function.Divide; |
---|
23 | import org.jgap.gp.function.Sine; |
---|
24 | import org.jgap.gp.function.Cosine; |
---|
25 | import org.jgap.gp.function.Max; |
---|
26 | import org.jgap.gp.function.Exp; |
---|
27 | |
---|
28 | import org.jgap.gp.impl.DeltaGPFitnessEvaluator; |
---|
29 | import org.jgap.gp.impl.GPConfiguration; |
---|
30 | import org.jgap.gp.impl.GPGenotype; |
---|
31 | import org.jgap.gp.impl.TournamentSelector; |
---|
32 | import org.jgap.gp.terminal.Terminal; |
---|
33 | import org.jgap.gp.GPFitnessFunction; |
---|
34 | import org.jgap.gp.IGPProgram; |
---|
35 | import org.jgap.gp.terminal.Variable; |
---|
36 | import org.jgap.gp.MathCommand; |
---|
37 | import org.jgap.util.ICloneable; |
---|
38 | |
---|
39 | import de.ugoe.cs.cpdp.util.WekaUtils; |
---|
40 | |
---|
41 | import org.jgap.gp.impl.ProgramChromosome; |
---|
42 | import org.jgap.util.CloneException; |
---|
43 | |
---|
44 | /** |
---|
45 | * Genetic Programming Trainer |
---|
46 | * |
---|
47 | * Implementation (mostly) according to Liu et al. Evolutionary Optimization of Software Quality Modeling with Multiple Repositories. |
---|
48 | * |
---|
49 | * - GPRun is a Run of a complete Genetic Programm Evolution, we want several complete runs. |
---|
50 | * - GPVClassifier is the Validation Classifier |
---|
51 | * - GPVVClassifier is the Validation-Voting Classifier |
---|
52 | * |
---|
53 | * config: <setwisetrainer name="GPTraining" param="populationSize:1000,numberRuns:10" /> |
---|
54 | */ |
---|
55 | public class GPTraining implements ISetWiseTrainingStrategy, IWekaCompatibleTrainer { |
---|
56 | |
---|
57 | private GPVVClassifier classifier = null; |
---|
58 | |
---|
59 | // default values from the paper |
---|
60 | private int populationSize = 1000; |
---|
61 | private int initMinDepth = 2; |
---|
62 | private int initMaxDepth = 6; |
---|
63 | private int tournamentSize = 7; |
---|
64 | private int maxGenerations = 50; |
---|
65 | private double errorType2Weight = 15; |
---|
66 | private int numberRuns = 20; // im paper 20 per errorType2Weight then additional 20 |
---|
67 | private int maxDepth = 20; // max depth within one program |
---|
68 | private int maxNodes = 100; // max nodes within one program |
---|
69 | |
---|
70 | @Override |
---|
71 | public void setParameter(String parameters) { |
---|
72 | |
---|
73 | String[] params = parameters.split(","); |
---|
74 | String[] keyvalue = new String[2]; |
---|
75 | |
---|
76 | for(int i=0; i < params.length; i++) { |
---|
77 | keyvalue = params[i].split(":"); |
---|
78 | |
---|
79 | switch(keyvalue[0]) { |
---|
80 | case "populationSize": |
---|
81 | this.populationSize = Integer.parseInt(keyvalue[1]); |
---|
82 | break; |
---|
83 | |
---|
84 | case "initMinDepth": |
---|
85 | this.initMinDepth = Integer.parseInt(keyvalue[1]); |
---|
86 | break; |
---|
87 | |
---|
88 | case "tournamentSize": |
---|
89 | this.tournamentSize = Integer.parseInt(keyvalue[1]); |
---|
90 | break; |
---|
91 | |
---|
92 | case "maxGenerations": |
---|
93 | this.maxGenerations = Integer.parseInt(keyvalue[1]); |
---|
94 | break; |
---|
95 | |
---|
96 | case "errorType2Weight": |
---|
97 | this.errorType2Weight = Double.parseDouble(keyvalue[1]); |
---|
98 | break; |
---|
99 | |
---|
100 | case "numberRuns": |
---|
101 | this.numberRuns = Integer.parseInt(keyvalue[1]); |
---|
102 | break; |
---|
103 | |
---|
104 | case "maxDepth": |
---|
105 | this.maxDepth = Integer.parseInt(keyvalue[1]); |
---|
106 | break; |
---|
107 | |
---|
108 | case "maxNodes": |
---|
109 | this.maxNodes = Integer.parseInt(keyvalue[1]); |
---|
110 | break; |
---|
111 | } |
---|
112 | } |
---|
113 | |
---|
114 | this.classifier = new GPVVClassifier(); |
---|
115 | ((GPVClassifier)this.classifier).configure(populationSize, initMinDepth, initMaxDepth, tournamentSize, maxGenerations, errorType2Weight, numberRuns, maxDepth, maxNodes); |
---|
116 | } |
---|
117 | |
---|
118 | @Override |
---|
119 | public void apply(SetUniqueList<Instances> traindataSet) { |
---|
120 | try { |
---|
121 | classifier.buildClassifier(traindataSet); |
---|
122 | }catch(Exception e) { |
---|
123 | throw new RuntimeException(e); |
---|
124 | } |
---|
125 | } |
---|
126 | |
---|
127 | @Override |
---|
128 | public String getName() { |
---|
129 | return "GPTraining"; |
---|
130 | } |
---|
131 | |
---|
132 | @Override |
---|
133 | public Classifier getClassifier() { |
---|
134 | return this.classifier; |
---|
135 | } |
---|
136 | |
---|
137 | public class InstanceData { |
---|
138 | private double[][] instances_x; |
---|
139 | private boolean[] instances_y; |
---|
140 | |
---|
141 | public InstanceData(Instances instances) { |
---|
142 | this.instances_x = new double[instances.numInstances()][instances.numAttributes()-1]; |
---|
143 | this.instances_y = new boolean[instances.numInstances()]; |
---|
144 | |
---|
145 | Instance current; |
---|
146 | for(int i=0; i < this.instances_x.length; i++) { |
---|
147 | current = instances.get(i); |
---|
148 | this.instances_x[i] = WekaUtils.instanceValues(current); |
---|
149 | this.instances_y[i] = 1.0 == current.classValue(); |
---|
150 | } |
---|
151 | } |
---|
152 | |
---|
153 | public double[][] getX() { |
---|
154 | return instances_x; |
---|
155 | } |
---|
156 | public boolean[] getY() { |
---|
157 | return instances_y; |
---|
158 | } |
---|
159 | } |
---|
160 | |
---|
161 | /** |
---|
162 | * One Run executed by a GP Classifier |
---|
163 | */ |
---|
164 | public class GPRun extends AbstractClassifier { |
---|
165 | private static final long serialVersionUID = -4250422550107888789L; |
---|
166 | |
---|
167 | private int populationSize; |
---|
168 | private int initMinDepth; |
---|
169 | private int initMaxDepth; |
---|
170 | private int tournamentSize; |
---|
171 | private int maxGenerations; |
---|
172 | private double errorType2Weight; |
---|
173 | private int maxDepth; |
---|
174 | private int maxNodes; |
---|
175 | |
---|
176 | private GPGenotype gp; |
---|
177 | private GPProblem problem; |
---|
178 | |
---|
179 | public void configure(int populationSize, int initMinDepth, int initMaxDepth, int tournamentSize, int maxGenerations, double errorType2Weight, int maxDepth, int maxNodes) { |
---|
180 | this.populationSize = populationSize; |
---|
181 | this.initMinDepth = initMinDepth; |
---|
182 | this.initMaxDepth = initMaxDepth; |
---|
183 | this.tournamentSize = tournamentSize; |
---|
184 | this.maxGenerations = maxGenerations; |
---|
185 | this.errorType2Weight = errorType2Weight; |
---|
186 | this.maxDepth = maxDepth; |
---|
187 | this.maxNodes = maxNodes; |
---|
188 | } |
---|
189 | |
---|
190 | public GPGenotype getGp() { |
---|
191 | return this.gp; |
---|
192 | } |
---|
193 | |
---|
194 | public Variable[] getVariables() { |
---|
195 | return ((CrossPareGP)this.problem).getVariables(); |
---|
196 | } |
---|
197 | |
---|
198 | @Override |
---|
199 | public void buildClassifier(Instances traindata) throws Exception { |
---|
200 | InstanceData train = new InstanceData(traindata); |
---|
201 | this.problem = new CrossPareGP(train.getX(), train.getY(), this.populationSize, this.initMinDepth, this.initMaxDepth, this.tournamentSize, this.errorType2Weight, this.maxDepth, this.maxNodes); |
---|
202 | this.gp = problem.create(); |
---|
203 | this.gp.evolve(this.maxGenerations); |
---|
204 | } |
---|
205 | |
---|
206 | /** |
---|
207 | * GPProblem implementation |
---|
208 | */ |
---|
209 | class CrossPareGP extends GPProblem { |
---|
210 | private double[][] instances; |
---|
211 | private boolean[] output; |
---|
212 | |
---|
213 | private int maxDepth; |
---|
214 | private int maxNodes; |
---|
215 | |
---|
216 | private Variable[] x; |
---|
217 | |
---|
218 | public CrossPareGP(double[][] instances, boolean[] output, int populationSize, int minInitDept, int maxInitDepth, int tournamentSize, double errorType2Weight, int maxDepth, int maxNodes) throws InvalidConfigurationException { |
---|
219 | super(new GPConfiguration()); |
---|
220 | |
---|
221 | this.instances = instances; |
---|
222 | this.output = output; |
---|
223 | this.maxDepth = maxDepth; |
---|
224 | this.maxNodes = maxNodes; |
---|
225 | |
---|
226 | Configuration.reset(); |
---|
227 | GPConfiguration config = this.getGPConfiguration(); |
---|
228 | |
---|
229 | this.x = new Variable[this.instances[0].length]; |
---|
230 | |
---|
231 | for(int j=0; j < this.x.length; j++) { |
---|
232 | this.x[j] = Variable.create(config, "X"+j, CommandGene.DoubleClass); |
---|
233 | } |
---|
234 | |
---|
235 | config.setGPFitnessEvaluator(new DeltaGPFitnessEvaluator()); // smaller fitness is better |
---|
236 | //config.setGPFitnessEvaluator(new DefaultGPFitnessEvaluator()); // bigger fitness is better |
---|
237 | |
---|
238 | config.setMinInitDepth(minInitDept); |
---|
239 | config.setMaxInitDepth(maxInitDepth); |
---|
240 | |
---|
241 | config.setCrossoverProb((float)0.60); |
---|
242 | config.setReproductionProb((float)0.10); |
---|
243 | config.setMutationProb((float)0.30); |
---|
244 | |
---|
245 | config.setSelectionMethod(new TournamentSelector(tournamentSize)); |
---|
246 | |
---|
247 | config.setPopulationSize(populationSize); |
---|
248 | |
---|
249 | config.setMaxCrossoverDepth(4); |
---|
250 | config.setFitnessFunction(new CrossPareFitness(this.x, this.instances, this.output, errorType2Weight)); |
---|
251 | config.setStrictProgramCreation(true); |
---|
252 | } |
---|
253 | |
---|
254 | // used for running the fitness function again for testing |
---|
255 | public Variable[] getVariables() { |
---|
256 | return this.x; |
---|
257 | } |
---|
258 | |
---|
259 | |
---|
260 | public GPGenotype create() throws InvalidConfigurationException { |
---|
261 | GPConfiguration config = this.getGPConfiguration(); |
---|
262 | |
---|
263 | // return type |
---|
264 | Class[] types = {CommandGene.DoubleClass}; |
---|
265 | |
---|
266 | // Arguments of result-producing chromosome: none |
---|
267 | Class[][] argTypes = { {} }; |
---|
268 | |
---|
269 | // variables + functions, we set the variables with the values of the instances here |
---|
270 | CommandGene[] vars = new CommandGene[this.instances[0].length]; |
---|
271 | for(int j=0; j < this.instances[0].length; j++) { |
---|
272 | vars[j] = this.x[j]; |
---|
273 | } |
---|
274 | CommandGene[] funcs = { |
---|
275 | new Add(config, CommandGene.DoubleClass), |
---|
276 | new Subtract(config, CommandGene.DoubleClass), |
---|
277 | new Multiply(config, CommandGene.DoubleClass), |
---|
278 | new Divide(config, CommandGene.DoubleClass), |
---|
279 | new Sine(config, CommandGene.DoubleClass), |
---|
280 | new Cosine(config, CommandGene.DoubleClass), |
---|
281 | new Exp(config, CommandGene.DoubleClass), |
---|
282 | new Log(config, CommandGene.DoubleClass), |
---|
283 | new GT(config, CommandGene.DoubleClass), |
---|
284 | new Max(config, CommandGene.DoubleClass), |
---|
285 | new Terminal(config, CommandGene.DoubleClass, -100.0, 100.0, true), // min, max, whole numbers |
---|
286 | }; |
---|
287 | |
---|
288 | CommandGene[] comb = (CommandGene[])ArrayUtils.addAll(vars, funcs); |
---|
289 | CommandGene[][] nodeSets = { |
---|
290 | comb, |
---|
291 | }; |
---|
292 | |
---|
293 | // we only have one chromosome so this suffices |
---|
294 | int minDepths[] = {config.getMinInitDepth()}; |
---|
295 | int maxDepths[] = {this.maxDepth}; |
---|
296 | GPGenotype result = GPGenotype.randomInitialGenotype(config, types, argTypes, nodeSets, minDepths, maxDepths, this.maxNodes, false); // 40 = maxNodes, true = verbose output |
---|
297 | |
---|
298 | return result; |
---|
299 | } |
---|
300 | } |
---|
301 | |
---|
302 | |
---|
303 | /** |
---|
304 | * Fitness function |
---|
305 | */ |
---|
306 | class CrossPareFitness extends GPFitnessFunction { |
---|
307 | |
---|
308 | private static final long serialVersionUID = 75234832484387L; |
---|
309 | |
---|
310 | private Variable[] x; |
---|
311 | |
---|
312 | private double[][] instances; |
---|
313 | private boolean[] output; |
---|
314 | |
---|
315 | private double errorType2Weight = 1.0; |
---|
316 | |
---|
317 | // needed in evaluate |
---|
318 | //private Object[] NO_ARGS = new Object[0]; |
---|
319 | |
---|
320 | private double sfitness = 0.0f; |
---|
321 | private int errorType1 = 0; |
---|
322 | private int errorType2 = 0; |
---|
323 | |
---|
324 | public CrossPareFitness(Variable[] x, double[][] instances, boolean[] output, double errorType2Weight) { |
---|
325 | this.x = x; |
---|
326 | this.instances = instances; |
---|
327 | this.output = output; |
---|
328 | this.errorType2Weight = errorType2Weight; |
---|
329 | } |
---|
330 | |
---|
331 | public int getErrorType1() { |
---|
332 | return this.errorType1; |
---|
333 | } |
---|
334 | |
---|
335 | public int getErrorType2() { |
---|
336 | return this.errorType2; |
---|
337 | } |
---|
338 | |
---|
339 | public double getSecondFitness() { |
---|
340 | return this.sfitness; |
---|
341 | } |
---|
342 | |
---|
343 | public int getNumInstances() { |
---|
344 | return this.instances.length; |
---|
345 | } |
---|
346 | |
---|
347 | /** |
---|
348 | * This is the fitness function |
---|
349 | * |
---|
350 | * Our fitness is best if we have the less wrong classifications, this includes a weight for type2 errors |
---|
351 | */ |
---|
352 | @Override |
---|
353 | protected double evaluate(final IGPProgram program) { |
---|
354 | double pfitness = 0.0f; |
---|
355 | this.sfitness = 0.0f; |
---|
356 | double value = 0.0f; |
---|
357 | |
---|
358 | // count classification errors |
---|
359 | this.errorType1 = 0; |
---|
360 | this.errorType2 = 0; |
---|
361 | |
---|
362 | for(int i=0; i < this.instances.length; i++) { |
---|
363 | |
---|
364 | // requires that we have a variable for each column of our dataset (attribute of instance) |
---|
365 | for(int j=0; j < this.x.length; j++) { |
---|
366 | this.x[j].set(this.instances[i][j]); |
---|
367 | } |
---|
368 | |
---|
369 | // value gives us a double, if < 0.5 we set this instance as faulty |
---|
370 | value = program.execute_double(0, this.x); |
---|
371 | |
---|
372 | if(value < 0.5) { |
---|
373 | if(this.output[i] != true) { |
---|
374 | this.errorType1 += 1; |
---|
375 | } |
---|
376 | }else { |
---|
377 | if(this.output[i] == true) { |
---|
378 | this.errorType2 += 1; |
---|
379 | } |
---|
380 | } |
---|
381 | } |
---|
382 | |
---|
383 | // now calc pfitness |
---|
384 | pfitness = (this.errorType1 + this.errorType2Weight * this.errorType2) / this.instances.length; |
---|
385 | |
---|
386 | // number of nodes in the programm, if lower then 10 we assign sFitness of 10 |
---|
387 | // we can set metadata with setProgramData to save this |
---|
388 | if(program.getChromosome(0).getSize(0) < 10) { |
---|
389 | program.setApplicationData(10.0f); |
---|
390 | } |
---|
391 | |
---|
392 | return pfitness; |
---|
393 | } |
---|
394 | } |
---|
395 | |
---|
396 | /** |
---|
397 | * Custom GT implementation used in the GP Algorithm. |
---|
398 | */ |
---|
399 | public class GT extends MathCommand implements ICloneable { |
---|
400 | |
---|
401 | private static final long serialVersionUID = 113454184817L; |
---|
402 | |
---|
403 | public GT(final GPConfiguration a_conf, java.lang.Class a_returnType) throws InvalidConfigurationException { |
---|
404 | super(a_conf, 2, a_returnType); |
---|
405 | } |
---|
406 | |
---|
407 | public String toString() { |
---|
408 | return "GT(&1, &2)"; |
---|
409 | } |
---|
410 | |
---|
411 | public String getName() { |
---|
412 | return "GT"; |
---|
413 | } |
---|
414 | |
---|
415 | public float execute_float(ProgramChromosome c, int n, Object[] args) { |
---|
416 | float f1 = c.execute_float(n, 0, args); |
---|
417 | float f2 = c.execute_float(n, 1, args); |
---|
418 | |
---|
419 | float ret = 1.0f; |
---|
420 | if(f1 > f2) { |
---|
421 | ret = 0.0f; |
---|
422 | } |
---|
423 | |
---|
424 | return ret; |
---|
425 | } |
---|
426 | |
---|
427 | public double execute_double(ProgramChromosome c, int n, Object[] args) { |
---|
428 | double f1 = c.execute_double(n, 0, args); |
---|
429 | double f2 = c.execute_double(n, 1, args); |
---|
430 | |
---|
431 | double ret = 1; |
---|
432 | if(f1 > f2) { |
---|
433 | ret = 0; |
---|
434 | } |
---|
435 | return ret; |
---|
436 | } |
---|
437 | |
---|
438 | public Object clone() { |
---|
439 | try { |
---|
440 | GT result = new GT(getGPConfiguration(), getReturnType()); |
---|
441 | return result; |
---|
442 | }catch(Exception ex) { |
---|
443 | throw new CloneException(ex); |
---|
444 | } |
---|
445 | } |
---|
446 | } |
---|
447 | } |
---|
448 | |
---|
449 | /** |
---|
450 | * GP Multiple Data Sets Validation-Voting Classifier |
---|
451 | * |
---|
452 | * Basically the same as the GP Multiple Data Sets Validation Classifier. |
---|
453 | * But here we do keep a model candidate for each training set which may later vote |
---|
454 | * |
---|
455 | */ |
---|
456 | public class GPVVClassifier extends GPVClassifier { |
---|
457 | |
---|
458 | private static final long serialVersionUID = -654710583852839901L; |
---|
459 | private List<Classifier> classifiers = null; |
---|
460 | |
---|
461 | @Override |
---|
462 | public void buildClassifier(Instances arg0) throws Exception { |
---|
463 | // TODO Auto-generated method stub |
---|
464 | |
---|
465 | } |
---|
466 | |
---|
467 | /** Build the GP Multiple Data Sets Validation-Voting Classifier |
---|
468 | * |
---|
469 | * This is according to Section 6 of the Paper by Liu et al. |
---|
470 | * It is basically the Multiple Data Sets Validation Classifier but here we keep the best models an let them vote. |
---|
471 | * |
---|
472 | * @param traindataSet |
---|
473 | * @throws Exception |
---|
474 | */ |
---|
475 | public void buildClassifier(SetUniqueList<Instances> traindataSet) throws Exception { |
---|
476 | |
---|
477 | // each classifier is trained with one project from the set |
---|
478 | // then is evaluated on the rest |
---|
479 | classifiers = new LinkedList<>(); |
---|
480 | for(int i=0; i < traindataSet.size(); i++) { |
---|
481 | |
---|
482 | // candidates we get out of evaluation |
---|
483 | LinkedList<Classifier> candidates = new LinkedList<>(); |
---|
484 | |
---|
485 | // number of runs, yields the best of these |
---|
486 | for(int k=0; k < this.numberRuns; k++) { |
---|
487 | Classifier classifier = new GPRun(); |
---|
488 | ((GPRun)classifier).configure(this.populationSize, this.initMinDepth, this.initMaxDepth, this.tournamentSize, this.maxGenerations, this.errorType2Weight, this.maxDepth, this.maxNodes); |
---|
489 | |
---|
490 | // one project is training data |
---|
491 | classifier.buildClassifier(traindataSet.get(i)); |
---|
492 | |
---|
493 | double[] errors; |
---|
494 | // rest of the set is evaluation data, we evaluate now |
---|
495 | for(int j=0; j < traindataSet.size(); j++) { |
---|
496 | if(j != i) { |
---|
497 | // if type1 and type2 errors are < 0.5 we allow the model in the candidates |
---|
498 | errors = this.evaluate((GPRun)classifier, traindataSet.get(j)); |
---|
499 | if((errors[0] < 0.5) && (errors[1] < 0.5)) { |
---|
500 | candidates.add(classifier); |
---|
501 | } |
---|
502 | } |
---|
503 | } |
---|
504 | } |
---|
505 | |
---|
506 | |
---|
507 | // now after the evaluation we do a model selection where only one model remains for the given training data |
---|
508 | // we select the model which is best on all evaluation data |
---|
509 | double smallest_error_count = Double.MAX_VALUE; |
---|
510 | double[] errors; |
---|
511 | Classifier best = null; |
---|
512 | for(int ii=0; ii < candidates.size(); ii++) { |
---|
513 | double[] errors_eval = {0.0, 0.0}; |
---|
514 | |
---|
515 | // we add the errors the candidate makes over the evaldata |
---|
516 | for(int j=0; j < traindataSet.size(); j++) { |
---|
517 | if(j != i) { |
---|
518 | errors = this.evaluate((GPRun)candidates.get(ii), traindataSet.get(j)); |
---|
519 | errors_eval[0] += errors[0]; |
---|
520 | errors_eval[1] += errors[1]; |
---|
521 | } |
---|
522 | } |
---|
523 | |
---|
524 | // if the candidate made fewer errors it is now the best |
---|
525 | if(errors_eval[0] + errors_eval[1] < smallest_error_count) { |
---|
526 | best = candidates.get(ii); |
---|
527 | smallest_error_count = errors_eval[0] + errors_eval[1]; |
---|
528 | } |
---|
529 | } |
---|
530 | |
---|
531 | |
---|
532 | // now we have the best classifier for this training data |
---|
533 | classifiers.add(best); |
---|
534 | |
---|
535 | |
---|
536 | } |
---|
537 | } |
---|
538 | |
---|
539 | /** |
---|
540 | * Use the best classifiers for each training data in a majority voting |
---|
541 | */ |
---|
542 | @Override |
---|
543 | public double classifyInstance(Instance instance) { |
---|
544 | |
---|
545 | int vote_positive = 0; |
---|
546 | |
---|
547 | for (int i = 0; i < classifiers.size(); i++) { |
---|
548 | Classifier classifier = classifiers.get(i); |
---|
549 | |
---|
550 | GPGenotype gp = ((GPRun)classifier).getGp(); |
---|
551 | Variable[] vars = ((GPRun)classifier).getVariables(); |
---|
552 | |
---|
553 | IGPProgram fitest = gp.getAllTimeBest(); // all time fitest |
---|
554 | for(int j = 0; j < instance.numAttributes()-1; j++) { |
---|
555 | vars[j].set(instance.value(j)); |
---|
556 | } |
---|
557 | |
---|
558 | if(fitest.execute_double(0, vars) < 0.5) { |
---|
559 | vote_positive += 1; |
---|
560 | } |
---|
561 | } |
---|
562 | |
---|
563 | if(vote_positive >= (classifiers.size()/2)) { |
---|
564 | return 1.0; |
---|
565 | }else { |
---|
566 | return 0.0; |
---|
567 | } |
---|
568 | } |
---|
569 | } |
---|
570 | |
---|
571 | /** |
---|
572 | * GP Multiple Data Sets Validation Classifier |
---|
573 | * |
---|
574 | * We train a Classifier with one training project $numberRun times. |
---|
575 | * Then we evaluate the classifier on the rest of the training projects and keep the best classifier. |
---|
576 | * After that we have for each training project the best classifier as per the evaluation on the rest of the data set. |
---|
577 | * Then we determine the best classifier from these candidates and keep it to be used later. |
---|
578 | */ |
---|
579 | public class GPVClassifier extends AbstractClassifier { |
---|
580 | |
---|
581 | private List<Classifier> classifiers = null; |
---|
582 | private Classifier best = null; |
---|
583 | |
---|
584 | private static final long serialVersionUID = 3708714057579101522L; |
---|
585 | |
---|
586 | protected int populationSize; |
---|
587 | protected int initMinDepth; |
---|
588 | protected int initMaxDepth; |
---|
589 | protected int tournamentSize; |
---|
590 | protected int maxGenerations; |
---|
591 | protected double errorType2Weight; |
---|
592 | protected int numberRuns; |
---|
593 | protected int maxDepth; |
---|
594 | protected int maxNodes; |
---|
595 | |
---|
596 | /** |
---|
597 | * Configure the GP Params and number of Runs |
---|
598 | * |
---|
599 | * @param populationSize |
---|
600 | * @param initMinDepth |
---|
601 | * @param initMaxDepth |
---|
602 | * @param tournamentSize |
---|
603 | * @param maxGenerations |
---|
604 | * @param errorType2Weight |
---|
605 | */ |
---|
606 | public void configure(int populationSize, int initMinDepth, int initMaxDepth, int tournamentSize, int maxGenerations, double errorType2Weight, int numberRuns, int maxDepth, int maxNodes) { |
---|
607 | this.populationSize = populationSize; |
---|
608 | this.initMinDepth = initMinDepth; |
---|
609 | this.initMaxDepth = initMaxDepth; |
---|
610 | this.tournamentSize = tournamentSize; |
---|
611 | this.maxGenerations = maxGenerations; |
---|
612 | this.errorType2Weight = errorType2Weight; |
---|
613 | this.numberRuns = numberRuns; |
---|
614 | this.maxDepth = maxDepth; |
---|
615 | this.maxNodes = maxNodes; |
---|
616 | } |
---|
617 | |
---|
618 | /** Build the GP Multiple Data Sets Validation Classifier |
---|
619 | * |
---|
620 | * This is according to Section 6 of the Paper by Liu et al. except for the selection of the best model. |
---|
621 | * Section 4 describes a slightly different approach. |
---|
622 | * |
---|
623 | * @param traindataSet |
---|
624 | * @throws Exception |
---|
625 | */ |
---|
626 | public void buildClassifier(SetUniqueList<Instances> traindataSet) throws Exception { |
---|
627 | |
---|
628 | // each classifier is trained with one project from the set |
---|
629 | // then is evaluated on the rest |
---|
630 | for(int i=0; i < traindataSet.size(); i++) { |
---|
631 | |
---|
632 | // candidates we get out of evaluation |
---|
633 | LinkedList<Classifier> candidates = new LinkedList<>(); |
---|
634 | |
---|
635 | // numberRuns full GPRuns, we generate numberRuns models for each traindata |
---|
636 | for(int k=0; k < this.numberRuns; k++) { |
---|
637 | Classifier classifier = new GPRun(); |
---|
638 | ((GPRun)classifier).configure(this.populationSize, this.initMinDepth, this.initMaxDepth, this.tournamentSize, this.maxGenerations, this.errorType2Weight, this.maxDepth, this.maxNodes); |
---|
639 | |
---|
640 | classifier.buildClassifier(traindataSet.get(i)); |
---|
641 | |
---|
642 | double[] errors; |
---|
643 | |
---|
644 | // rest of the set is evaluation data, we evaluate now |
---|
645 | for(int j=0; j < traindataSet.size(); j++) { |
---|
646 | if(j != i) { |
---|
647 | // if type1 and type2 errors are < 0.5 we allow the model in the candidate list |
---|
648 | errors = this.evaluate((GPRun)classifier, traindataSet.get(j)); |
---|
649 | if((errors[0] < 0.5) && (errors[1] < 0.5)) { |
---|
650 | candidates.add(classifier); |
---|
651 | } |
---|
652 | } |
---|
653 | } |
---|
654 | } |
---|
655 | |
---|
656 | // after the numberRuns we have < numberRuns candidate models for this trainData |
---|
657 | // we now evaluate the candidates |
---|
658 | // finding the best model is not really described in the paper we go with least errors |
---|
659 | double smallest_error_count = Double.MAX_VALUE; |
---|
660 | double[] errors; |
---|
661 | Classifier best = null; |
---|
662 | for(int ii=0; ii < candidates.size(); ii++) { |
---|
663 | for(int j=0; j < traindataSet.size(); j++) { |
---|
664 | if(j != i) { |
---|
665 | errors = this.evaluate((GPRun)candidates.get(ii), traindataSet.get(j)); |
---|
666 | |
---|
667 | if(errors[0]+errors[1] < smallest_error_count) { |
---|
668 | best = candidates.get(ii); |
---|
669 | } |
---|
670 | } |
---|
671 | } |
---|
672 | } |
---|
673 | |
---|
674 | // now we have the best classifier for this training data |
---|
675 | classifiers.add(best); |
---|
676 | } /* endfor trainData */ |
---|
677 | |
---|
678 | // now we have one best classifier for each trainData |
---|
679 | // we evaluate again to find the best classifier of all time |
---|
680 | // this selection is now according to section 4 of the paper and not 6 where an average of the 6 models is build |
---|
681 | double smallest_error_count = Double.MAX_VALUE; |
---|
682 | double error_count; |
---|
683 | double errors[]; |
---|
684 | for(int j=0; j < classifiers.size(); j++) { |
---|
685 | error_count = 0; |
---|
686 | Classifier current = classifiers.get(j); |
---|
687 | for(int i=0; i < traindataSet.size(); i++) { |
---|
688 | errors = this.evaluate((GPRun)current, traindataSet.get(i)); |
---|
689 | error_count = errors[0] + errors[1]; |
---|
690 | } |
---|
691 | |
---|
692 | if(error_count < smallest_error_count) { |
---|
693 | best = current; |
---|
694 | } |
---|
695 | } |
---|
696 | } |
---|
697 | |
---|
698 | @Override |
---|
699 | public void buildClassifier(Instances traindata) throws Exception { |
---|
700 | final Classifier classifier = new GPRun(); |
---|
701 | ((GPRun)classifier).configure(populationSize, initMinDepth, initMaxDepth, tournamentSize, maxGenerations, errorType2Weight, this.maxDepth, this.maxNodes); |
---|
702 | classifier.buildClassifier(traindata); |
---|
703 | classifiers.add(classifier); |
---|
704 | } |
---|
705 | |
---|
706 | /** |
---|
707 | * Evaluation of the Classifier |
---|
708 | * |
---|
709 | * We evaluate the classifier with the Instances of the evalData. |
---|
710 | * It basically assigns the instance attribute values to the variables of the s-expression-tree and |
---|
711 | * then counts the missclassifications. |
---|
712 | * |
---|
713 | * @param classifier |
---|
714 | * @param evalData |
---|
715 | * @return |
---|
716 | */ |
---|
717 | public double[] evaluate(GPRun classifier, Instances evalData) { |
---|
718 | GPGenotype gp = classifier.getGp(); |
---|
719 | Variable[] vars = classifier.getVariables(); |
---|
720 | |
---|
721 | IGPProgram fitest = gp.getAllTimeBest(); // selects the fitest of all not just the last generation |
---|
722 | |
---|
723 | double classification; |
---|
724 | int error_type1 = 0; |
---|
725 | int error_type2 = 0; |
---|
726 | int positive = 0; |
---|
727 | int negative = 0; |
---|
728 | |
---|
729 | for(Instance instance: evalData) { |
---|
730 | |
---|
731 | // assign instance attribute values to the variables of the s-expression-tree |
---|
732 | double[] tmp = WekaUtils.instanceValues(instance); |
---|
733 | for(int i = 0; i < tmp.length; i++) { |
---|
734 | vars[i].set(tmp[i]); |
---|
735 | } |
---|
736 | |
---|
737 | classification = fitest.execute_double(0, vars); |
---|
738 | |
---|
739 | // we need to count the absolutes of positives for percentage |
---|
740 | if(instance.classValue() == 1.0) { |
---|
741 | positive +=1; |
---|
742 | }else { |
---|
743 | negative +=1; |
---|
744 | } |
---|
745 | |
---|
746 | // classification < 0.5 we say defective |
---|
747 | if(classification < 0.5) { |
---|
748 | if(instance.classValue() != 1.0) { |
---|
749 | error_type1 += 1; |
---|
750 | } |
---|
751 | }else { |
---|
752 | if(instance.classValue() == 1.0) { |
---|
753 | error_type2 += 1; |
---|
754 | } |
---|
755 | } |
---|
756 | } |
---|
757 | |
---|
758 | // return error types percentages for the types |
---|
759 | double et1_per = error_type1 / negative; |
---|
760 | double et2_per = error_type2 / positive; |
---|
761 | return new double[]{et1_per, et2_per}; |
---|
762 | } |
---|
763 | |
---|
764 | /** |
---|
765 | * Use only the best classifier from our evaluation phase |
---|
766 | */ |
---|
767 | @Override |
---|
768 | public double classifyInstance(Instance instance) { |
---|
769 | GPGenotype gp = ((GPRun)best).getGp(); |
---|
770 | Variable[] vars = ((GPRun)best).getVariables(); |
---|
771 | |
---|
772 | IGPProgram fitest = gp.getAllTimeBest(); // all time fitest |
---|
773 | for(int i = 0; i < instance.numAttributes()-1; i++) { |
---|
774 | vars[i].set(instance.value(i)); |
---|
775 | } |
---|
776 | |
---|
777 | double classification = fitest.execute_double(0, vars); |
---|
778 | |
---|
779 | if(classification < 0.5) { |
---|
780 | return 1.0; |
---|
781 | }else { |
---|
782 | return 0.0; |
---|
783 | } |
---|
784 | } |
---|
785 | } |
---|
786 | } |
---|