Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/ExperimentConfiguration.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/ExperimentConfiguration.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/ExperimentConfiguration.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp;
 
@@ -33,141 +47,150 @@
 
 /**
- * Class that contains all meta information about an experiment, i.e., its configuration. The configuration is loaded from an XML file.
- * <br><br>
- * In the current implementation, the experiment configuration can only be created using an XML file. Programmatic creation of experiment configurations is currently not possibly. 
+ * Class that contains all meta information about an experiment, i.e., its configuration. The
+ * configuration is loaded from an XML file. <br>
+ * <br>
+ * In the current implementation, the experiment configuration can only be created using an XML
+ * file. Programmatic creation of experiment configurations is currently not possibly.
+ * 
  * @author Steffen Herbold
  */
-public class ExperimentConfiguration  extends DefaultHandler {
-
-	/**
-	 * handle of the file that contains the configuration
-	 */
-	private final File configFile;
-	
-	/**
-	 * name of the experiment (automatically set to the file name without the .xml ending)
-	 */
-	private String experimentName = "exp";
-	
-	/**
-	 * loads instances
-	 */
-	private List<IVersionLoader> loaders;
-	
-	/**
-	 * path were the results of the experiments are stored
-	 */
-	private String resultsPath = "results";
-	
-	/**
-	 * data set filters applied to all data
-	 */
-	private List<IVersionFilter> versionFilters;
-	
-	/**
-	 * data set filters that decide if a data set is used as test data
-	 */
-	private List<IVersionFilter> testVersionFilters;
-	
-	/**
-	 * data set filters that decide if a data is used as candidate training data
-	 */
-	private List<IVersionFilter> trainingVersionFilters;
-	
-	/**
-	 * setwise data processors that are applied before the setwise data selection
-	 */
-	private List<ISetWiseProcessingStrategy> setwisepreprocessors;
-	
-	/**
-	 * setwise data selection strategies
-	 */
-	private List<ISetWiseDataselectionStrategy> setwiseselectors;
-	
-	/**
-	 * setwise data processors that are applied after the setwise data selection
-	 */
-	private List<ISetWiseProcessingStrategy> setwisepostprocessors;
-	
-	/**
-	 * setwise trainers, i.e., trainers that require the selected training data to be separate from each other
-	 */
-	private List<ISetWiseTrainingStrategy> setwiseTrainers;
-	
-	/**
-	 * data processors that are applied before the pointwise data selection
-	 */
-	private List<IProcessesingStrategy> preprocessors;
-	
-	/**
-	 * pointwise data selection strategies
-	 */
-	private List<IPointWiseDataselectionStrategy> pointwiseselectors;
-	
-	/**
-	 * data processors that are applied before the pointwise data selection
-	 */
-	private List<IProcessesingStrategy> postprocessors;
-	
-	/**
-	 * normal trainers, i.e., trainers that require the selected training data in a single data set
-	 */
-	private List<ITrainingStrategy> trainers;
-	
-	/**
-	 * evaluators used for the the experiment results
-	 */
-	private List<IEvaluationStrategy> evaluators;
-	
-	/**
-	 * indicates, if the classifier should be saved
-	 */
-	private Boolean saveClassifier = null;
-	
-	/**
-	 * indicates, which execution strategy to choose
-	 * (e.g. CrossProjectExperiment, ClassifierCreationExecution).
-	 * Default is CrossProjectExperiment.
-	 */
-	private String executionStrategy = "CrossProjectExperiment";
-	
-	/**
-	 * Constructor. Creates a new configuration from a given file. 
-	 * @param filename name of the file from the configuration is loaded.
-	 * @throws ExperimentConfigurationException thrown if there is an error creating the configuration
-	 */
-	public ExperimentConfiguration(String filename) throws ExperimentConfigurationException {
-		this(new File(filename));
-	}
-	
-	/**
-	 * Constructor. Creates a new configuration from a given file. 
-	 * @param filename handle of the file from the configuration is loaded.
-	 * @throws ExperimentConfigurationException thrown if there is an error creating the configuration
-	 */
-	public ExperimentConfiguration(File file) throws ExperimentConfigurationException {
-		loaders = new LinkedList<>();
-		versionFilters = new LinkedList<>();
-		testVersionFilters = new LinkedList<>();
-		trainingVersionFilters = new LinkedList<>();
-		setwisepreprocessors = new LinkedList<>();
-		setwiseselectors = new LinkedList<>();
-		setwisepostprocessors = new LinkedList<>();
-		setwiseTrainers = new LinkedList<>();
-		preprocessors = new LinkedList<>();
-		pointwiseselectors = new LinkedList<>();
-		postprocessors = new LinkedList<>();		
-		trainers = new LinkedList<>();
-		evaluators = new LinkedList<>();
-		
-		if (file == null) {
+public class ExperimentConfiguration extends DefaultHandler {
+
+    /**
+     * handle of the file that contains the configuration
+     */
+    private final File configFile;
+
+    /**
+     * name of the experiment (automatically set to the file name without the .xml ending)
+     */
+    private String experimentName = "exp";
+
+    /**
+     * loads instances
+     */
+    private List<IVersionLoader> loaders;
+
+    /**
+     * path were the results of the experiments are stored
+     */
+    private String resultsPath = "results";
+
+    /**
+     * data set filters applied to all data
+     */
+    private List<IVersionFilter> versionFilters;
+
+    /**
+     * data set filters that decide if a data set is used as test data
+     */
+    private List<IVersionFilter> testVersionFilters;
+
+    /**
+     * data set filters that decide if a data is used as candidate training data
+     */
+    private List<IVersionFilter> trainingVersionFilters;
+
+    /**
+     * setwise data processors that are applied before the setwise data selection
+     */
+    private List<ISetWiseProcessingStrategy> setwisepreprocessors;
+
+    /**
+     * setwise data selection strategies
+     */
+    private List<ISetWiseDataselectionStrategy> setwiseselectors;
+
+    /**
+     * setwise data processors that are applied after the setwise data selection
+     */
+    private List<ISetWiseProcessingStrategy> setwisepostprocessors;
+
+    /**
+     * setwise trainers, i.e., trainers that require the selected training data to be separate from
+     * each other
+     */
+    private List<ISetWiseTrainingStrategy> setwiseTrainers;
+
+    /**
+     * data processors that are applied before the pointwise data selection
+     */
+    private List<IProcessesingStrategy> preprocessors;
+
+    /**
+     * pointwise data selection strategies
+     */
+    private List<IPointWiseDataselectionStrategy> pointwiseselectors;
+
+    /**
+     * data processors that are applied before the pointwise data selection
+     */
+    private List<IProcessesingStrategy> postprocessors;
+
+    /**
+     * normal trainers, i.e., trainers that require the selected training data in a single data set
+     */
+    private List<ITrainingStrategy> trainers;
+
+    /**
+     * evaluators used for the the experiment results
+     */
+    private List<IEvaluationStrategy> evaluators;
+
+    /**
+     * indicates, if the classifier should be saved
+     */
+    private Boolean saveClassifier = null;
+
+    /**
+     * indicates, which execution strategy to choose (e.g. CrossProjectExperiment,
+     * ClassifierCreationExecution). Default is CrossProjectExperiment.
+     */
+    private String executionStrategy = "CrossProjectExperiment";
+
+    /**
+     * Constructor. Creates a new configuration from a given file.
+     * 
+     * @param filename
+     *            name of the file from the configuration is loaded.
+     * @throws ExperimentConfigurationException
+     *             thrown if there is an error creating the configuration
+     */
+    public ExperimentConfiguration(String filename) throws ExperimentConfigurationException {
+        this(new File(filename));
+    }
+
+    /**
+     * Constructor. Creates a new configuration from a given file.
+     * 
+     * @param filename
+     *            handle of the file from the configuration is loaded.
+     * @throws ExperimentConfigurationException
+     *             thrown if there is an error creating the configuration
+     */
+    public ExperimentConfiguration(File file) throws ExperimentConfigurationException {
+        loaders = new LinkedList<>();
+        versionFilters = new LinkedList<>();
+        testVersionFilters = new LinkedList<>();
+        trainingVersionFilters = new LinkedList<>();
+        setwisepreprocessors = new LinkedList<>();
+        setwiseselectors = new LinkedList<>();
+        setwisepostprocessors = new LinkedList<>();
+        setwiseTrainers = new LinkedList<>();
+        preprocessors = new LinkedList<>();
+        pointwiseselectors = new LinkedList<>();
+        postprocessors = new LinkedList<>();
+        trainers = new LinkedList<>();
+        evaluators = new LinkedList<>();
+
+        if (file == null) {
             throw new IllegalArgumentException("file must not be null");
         }
-		if (file.isDirectory()) {
-			throw new IllegalArgumentException("file must not be a directory");
-		}
-		configFile = file;
-		
-		experimentName = file.getName().split("\\.")[0];
+        if (file.isDirectory()) {
+            throw new IllegalArgumentException("file must not be a directory");
+        }
+        configFile = file;
+
+        experimentName = file.getName().split("\\.")[0];
 
         final SAXParserFactory spf = SAXParserFactory.newInstance();
@@ -177,342 +200,425 @@
         InputSource inputSource = null;
         try {
-			saxParser = spf.newSAXParser();
-		} catch (ParserConfigurationException | SAXException e) {
-			throw new ExperimentConfigurationException(e);
-		}
-		
+            saxParser = spf.newSAXParser();
+        }
+        catch (ParserConfigurationException | SAXException e) {
+            throw new ExperimentConfigurationException(e);
+        }
+
         InputStreamReader reader = null;
-		try {
-			reader = new InputStreamReader(new FileInputStream(file), "UTF-8");
-			inputSource = new InputSource(reader);
-		} catch (UnsupportedEncodingException | FileNotFoundException e) {
-			throw new ExperimentConfigurationException("Could not open configuration file.", e);
-		}
-		
+        try {
+            reader = new InputStreamReader(new FileInputStream(file), "UTF-8");
+            inputSource = new InputSource(reader);
+        }
+        catch (UnsupportedEncodingException | FileNotFoundException e) {
+            throw new ExperimentConfigurationException("Could not open configuration file.", e);
+        }
+
         if (inputSource != null) {
             inputSource.setSystemId("file://" + file.getAbsolutePath());
-			try {
-				saxParser.parse(inputSource, this);
-			} catch (SAXException | IOException e) {
-				throw new ExperimentConfigurationException("Error parsing configuration.", e);
-			}
-		}
-        if( reader!=null ) {
-        	try {
-				reader.close();
-			} catch (IOException e) {
-				throw new ExperimentConfigurationException("Error closing reader.", e);
-			}
-        }
-	}
-	
-	/**
-	 * returns the name of the experiment
-	 * @return name of the experiment
-	 */
-	public String getExperimentName() {
-		return experimentName;
-	}
-	
-	/**
-	 * returns the loaders for instances
-	 * @return data loaders
-	 */
-	public List<IVersionLoader> getLoaders() {
-		return loaders;
-	}
-	
-	/**
-	 * returns the results path
-	 * @return results path
-	 */
-	public String getResultsPath() {
-		return resultsPath;
-	}
-	
-	/**
-	 * returns the data set filters of the experiment
-	 * @return data set filters of the experiment
-	 */
-	public List<IVersionFilter> getVersionFilters() {
-		return versionFilters;
-	}
-	
-	/**
-	 * returns the test set filters of the experiment
-	 * @return test set filters of the experiment
-	 */
-	public List<IVersionFilter> getTestVersionFilters() {
-		return testVersionFilters;
-	}
-	
-	/**
-	 * returns the candidate training version filters of the experiment 
-	 * @return candidate training version filters of the experiment
-	 */
-	public List<IVersionFilter> getTrainingVersionFilters() {
-		return trainingVersionFilters;
-	}
-	
-	/**
-	 * returns the setwise processors applied before the setwise data selection
-	 * @return setwise processors applied before the setwise data selection
-	 */
-	public List<ISetWiseProcessingStrategy> getSetWisePreprocessors() {
-		return setwisepreprocessors;
-	}
-	
-	/**
-	 * returns the setwise data selection strategies
-	 * @return setwise data selection strategies
-	 */
-	public List<ISetWiseDataselectionStrategy> getSetWiseSelectors() {
-		return setwiseselectors;
-	}
-	
-	/**
-	 * returns the setwise processors applied after the setwise data selection
-	 * @return setwise processors applied after the setwise data selection
-	 */
-	public List<ISetWiseProcessingStrategy> getSetWisePostprocessors() {
-		return setwisepostprocessors;
-	}
-	
-	/**
-	 * returns the setwise training algorithms
-	 * @return setwise training algorithms
-	 */
-	public List<ISetWiseTrainingStrategy> getSetWiseTrainers() {
-		return setwiseTrainers;
-	}
-	
-	/**
-	 * returns the processors applied before the pointwise data selection
-	 * @return processors applied before the pointwise data selection
-	 */
-	public List<IProcessesingStrategy> getPreProcessors() {
-		return preprocessors;
-	}
-	
-	/**
-	 * returns the pointwise data selection strategies
-	 * @return pointwise data selection strategies
-	 */
-	public List<IPointWiseDataselectionStrategy> getPointWiseSelectors() {
-		return pointwiseselectors;
-	}
-	
-	/**
-	 * returns the processors applied after the pointwise data selection
-	 * @return processors applied after the pointwise data selection
-	 */
-	public List<IProcessesingStrategy> getPostProcessors() {
-		return postprocessors;
-	}
-	
-	/**
-	 * returns the normal training algorithm 
-	 * @return normal training algorithms
-	 */
-	public List<ITrainingStrategy> getTrainers() {
-		return trainers;
-	}
-	
-	/**
-	 * returns the evaluation strategies
-	 * @return evaluation strategies
-	 */
-	public List<IEvaluationStrategy> getEvaluators() {
-		return evaluators;
-	}
-	
-	/**
-	 * returns boolean, if classifier should be saved
-	 * @return boolean
-	 */
-	public boolean getSaveClassifier() {
-		return saveClassifier;
-	}
-	
-	/**
-	 * returns the execution strategy
-	 * @return String execution strategy
-	 */
-	public String getExecutionStrategy() {
-		return executionStrategy;
-	}
-	
-	/* (non-Javadoc)
-	 * @see org.xml.sax.helpers.DefaultHandler#startElement(java.lang.String, java.lang.String, java.lang.String, org.xml.sax.Attributes)
-	 */
-	@Override
-	public void startElement(String uri, String localName, String qName,
-			Attributes attributes) throws SAXException {
-		try {
-			if( qName.equals("config") ) {
-				// ingore
-			}
-			else if( qName.equals("loader") ) {
-				final IVersionLoader loader = (IVersionLoader) Class.forName("de.ugoe.cs.cpdp.loader." + attributes.getValue("name")).newInstance();
-				loader.setLocation(attributes.getValue("datalocation"));
-				loaders.add(loader);
-				
-				// TODO location as relative
-			} 
-			else if( qName.equals("resultspath") ) {
-				resultsPath = attributes.getValue("path");
-			}
-			else if( qName.equals("versionfilter") ) {
-				final IVersionFilter filter = (IVersionFilter) Class.forName("de.ugoe.cs.cpdp.versions." + attributes.getValue("name")).newInstance();
-				filter.setParameter(attributes.getValue("param"));
-				versionFilters.add(filter);
-			}
-			else if( qName.equals("testVersionfilter") ) {
-				final IVersionFilter filter = (IVersionFilter) Class.forName("de.ugoe.cs.cpdp.versions." + attributes.getValue("name")).newInstance();
-				filter.setParameter(attributes.getValue("param"));
-				testVersionFilters.add(filter);
-			}
-			else if( qName.equals("trainVersionfilter") ) {
-				final IVersionFilter filter = (IVersionFilter) Class.forName("de.ugoe.cs.cpdp.versions." + attributes.getValue("name")).newInstance();
-				filter.setParameter(attributes.getValue("param"));
-				trainingVersionFilters.add(filter);
-			}
-			else if( qName.equals("setwisepreprocessor") ) {
-				final ISetWiseProcessingStrategy processor = (ISetWiseProcessingStrategy) Class.forName("de.ugoe.cs.cpdp.dataprocessing." + attributes.getValue("name")).newInstance(); 
-				processor.setParameter(attributes.getValue("param"));
-				setwisepreprocessors.add(processor);
-			}
-			else if( qName.equals("setwiseselector") ) {
-				final ISetWiseDataselectionStrategy selection = (ISetWiseDataselectionStrategy) Class.forName("de.ugoe.cs.cpdp.dataselection." +  attributes.getValue("name")).newInstance();
-				selection.setParameter(attributes.getValue("param"));
-				setwiseselectors.add(selection);
-			}
-			else if( qName.equals("setwisepostprocessor") ) {
-				final ISetWiseProcessingStrategy processor = (ISetWiseProcessingStrategy) Class.forName("de.ugoe.cs.cpdp.dataprocessing." + attributes.getValue("name")).newInstance(); 
-				processor.setParameter(attributes.getValue("param"));
-				setwisepostprocessors.add(processor);
-			}
-			else if( qName.equals("setwisetrainer") ) {
-				final ISetWiseTrainingStrategy trainer = (ISetWiseTrainingStrategy) Class.forName("de.ugoe.cs.cpdp.training." +  attributes.getValue("name")).newInstance();
-				trainer.setParameter(attributes.getValue("param"));
-				setwiseTrainers.add(trainer);
-			}
-			else if( qName.equals("preprocessor") ) {
-				final IProcessesingStrategy processor = (IProcessesingStrategy) Class.forName("de.ugoe.cs.cpdp.dataprocessing." +  attributes.getValue("name")).newInstance();
-				processor.setParameter( attributes.getValue("param"));
-				preprocessors.add(processor);
-			}
-			else if( qName.equals("pointwiseselector") ) {
-				final IPointWiseDataselectionStrategy selection = (IPointWiseDataselectionStrategy) Class.forName("de.ugoe.cs.cpdp.dataselection." +  attributes.getValue("name")).newInstance();
-				selection.setParameter( attributes.getValue("param"));
-				pointwiseselectors.add(selection);
-			}
-			else if( qName.equals("postprocessor") ) {
-				final IProcessesingStrategy processor = (IProcessesingStrategy) Class.forName("de.ugoe.cs.cpdp.dataprocessing." +  attributes.getValue("name")).newInstance();
-				processor.setParameter( attributes.getValue("param"));
-				postprocessors.add(processor);
-			}
-			else if( qName.equals("trainer") ) {
-				final ITrainingStrategy trainer = (ITrainingStrategy) Class.forName("de.ugoe.cs.cpdp.training." +  attributes.getValue("name")).newInstance();
-				trainer.setParameter(attributes.getValue("param"));
-				trainers.add(trainer);
-			}
-			else if( qName.equals("eval") ) {
-				final IEvaluationStrategy evaluator = (IEvaluationStrategy) Class.forName("de.ugoe.cs.cpdp.eval." + attributes.getValue("name")).newInstance();
-				evaluators.add(evaluator);
-			}
-			else if( qName.equals("saveClassifier")) {
-				saveClassifier = true;
-			}
-			else if( qName.equals("executionStrategy")) {
-				executionStrategy = attributes.getValue("name");
-			}
-			else if( qName.equals("partialconfig") ) {
-				String path = attributes.getValue("path");
-				try {
-					boolean relative = true;
-					if( attributes.getValue("relative")!=null ) {
-						relative = Boolean.parseBoolean(attributes.getValue("relative"));
-					}
-					
-					if( relative ) {
-						path = configFile.getParentFile().getPath() + "/" + path;
-					}
-					addConfigurations(new ExperimentConfiguration(path));
-				} catch (ExperimentConfigurationException e) {
-					throw new SAXException("Could not load partial configuration: " + path, e);
-				}	
-			} else {
-				Console.traceln(Level.WARNING, "element in config-file " +  configFile.getName() + " ignored: " + qName);
-			}
-		}
-        catch (NoClassDefFoundError | ClassNotFoundException | IllegalAccessException | InstantiationException | ClassCastException e) {
-        	throw new SAXException("Could not initialize class correctly", (Exception) e);
-        }
-	}
-	
-	/**
-	 * Adds the information of another experiment configuration to this configuration. This mechanism allows the usage of partial configuration files. The name of the other configuration is lost.
-	 * <br><br>
-	 * If the current data path is the empty string (&quot;&quot;), it is override by the datapath of the other configuration. Otherwise, the current data path is kept.
-	 * @param other experiment whose information is added
-	 * @throws ExperimentConfigurationException 
-	 */
-	private void addConfigurations(ExperimentConfiguration other) throws ExperimentConfigurationException {
-		if( "results".equals(resultsPath) ) {
-			resultsPath = other.resultsPath;
-		}
-		loaders.addAll(other.loaders);
-		versionFilters.addAll(other.versionFilters);
-		testVersionFilters.addAll(other.testVersionFilters);
-		trainingVersionFilters.addAll(other.trainingVersionFilters);
-		setwisepreprocessors.addAll(other.setwisepreprocessors);
-		setwiseselectors.addAll(other.setwiseselectors);
-		setwisepostprocessors.addAll(other.setwisepostprocessors);
-		setwiseTrainers.addAll(other.setwiseTrainers);
-		preprocessors.addAll(other.preprocessors);
-		pointwiseselectors.addAll(other.pointwiseselectors);
-		postprocessors.addAll(other.postprocessors);
-		trainers.addAll(other.trainers);
-		evaluators.addAll(other.evaluators);
-		
-		if(!executionStrategy.equals(other.executionStrategy)) {
-			throw new ExperimentConfigurationException("Executionstrategies must be the same, if config files should be added.");
-		}
-		
-		/* Only if saveClassifier is not set in the main config and
-		 * the other configs saveClassifier is true, it must be set.
-		 */
-		if(saveClassifier == null && other.saveClassifier == true) {
-			saveClassifier = other.saveClassifier;
-		}
-
-	}
-	
-	/* (non-Javadoc)
-	 * @see java.lang.Object#toString()
-	 */
-	@Override
-	public String toString() {
-		final StringBuilder builder = new StringBuilder();
-		builder.append("Experiment name: " + experimentName + StringTools.ENDLINE);
-		builder.append("Loaders: " + loaders + StringTools.ENDLINE);
-		builder.append("Results path: " + resultsPath + StringTools.ENDLINE);
-		builder.append("Version filters: " + versionFilters.toString() + StringTools.ENDLINE);
-		builder.append("Test version filters: " + testVersionFilters.toString() + StringTools.ENDLINE);
-		builder.append("Training version filters: " + trainingVersionFilters.toString() + StringTools.ENDLINE);
-		builder.append("Setwise preprocessors: " + setwisepreprocessors.toString() + StringTools.ENDLINE);
-		builder.append("Setwise selectors: " + setwiseselectors.toString() + StringTools.ENDLINE);
-		builder.append("Setwise postprocessors: " + setwisepostprocessors.toString() + StringTools.ENDLINE);
-		builder.append("Setwise trainers: " + setwiseTrainers.toString() + StringTools.ENDLINE);
-		builder.append("Pointwise preprocessors: " + preprocessors.toString() + StringTools.ENDLINE);
-		builder.append("Pointwise selectors: " + pointwiseselectors.toString() + StringTools.ENDLINE);
-		builder.append("Pointwise postprocessors: " + postprocessors.toString() + StringTools.ENDLINE);
-		builder.append("Pointwise trainers: " + trainers.toString() + StringTools.ENDLINE);
-		builder.append("Evaluators: " + evaluators.toString() + StringTools.ENDLINE);
-		builder.append("Save Classifier?: " + saveClassifier + StringTools.ENDLINE);
-		builder.append("Execution Strategy: " + executionStrategy + StringTools.ENDLINE);
-				
-		return builder.toString();
-	}
+            try {
+                saxParser.parse(inputSource, this);
+            }
+            catch (SAXException | IOException e) {
+                throw new ExperimentConfigurationException("Error parsing configuration.", e);
+            }
+        }
+        if (reader != null) {
+            try {
+                reader.close();
+            }
+            catch (IOException e) {
+                throw new ExperimentConfigurationException("Error closing reader.", e);
+            }
+        }
+    }
+
+    /**
+     * returns the name of the experiment
+     * 
+     * @return name of the experiment
+     */
+    public String getExperimentName() {
+        return experimentName;
+    }
+
+    /**
+     * returns the loaders for instances
+     * 
+     * @return data loaders
+     */
+    public List<IVersionLoader> getLoaders() {
+        return loaders;
+    }
+
+    /**
+     * returns the results path
+     * 
+     * @return results path
+     */
+    public String getResultsPath() {
+        return resultsPath;
+    }
+
+    /**
+     * returns the data set filters of the experiment
+     * 
+     * @return data set filters of the experiment
+     */
+    public List<IVersionFilter> getVersionFilters() {
+        return versionFilters;
+    }
+
+    /**
+     * returns the test set filters of the experiment
+     * 
+     * @return test set filters of the experiment
+     */
+    public List<IVersionFilter> getTestVersionFilters() {
+        return testVersionFilters;
+    }
+
+    /**
+     * returns the candidate training version filters of the experiment
+     * 
+     * @return candidate training version filters of the experiment
+     */
+    public List<IVersionFilter> getTrainingVersionFilters() {
+        return trainingVersionFilters;
+    }
+
+    /**
+     * returns the setwise processors applied before the setwise data selection
+     * 
+     * @return setwise processors applied before the setwise data selection
+     */
+    public List<ISetWiseProcessingStrategy> getSetWisePreprocessors() {
+        return setwisepreprocessors;
+    }
+
+    /**
+     * returns the setwise data selection strategies
+     * 
+     * @return setwise data selection strategies
+     */
+    public List<ISetWiseDataselectionStrategy> getSetWiseSelectors() {
+        return setwiseselectors;
+    }
+
+    /**
+     * returns the setwise processors applied after the setwise data selection
+     * 
+     * @return setwise processors applied after the setwise data selection
+     */
+    public List<ISetWiseProcessingStrategy> getSetWisePostprocessors() {
+        return setwisepostprocessors;
+    }
+
+    /**
+     * returns the setwise training algorithms
+     * 
+     * @return setwise training algorithms
+     */
+    public List<ISetWiseTrainingStrategy> getSetWiseTrainers() {
+        return setwiseTrainers;
+    }
+
+    /**
+     * returns the processors applied before the pointwise data selection
+     * 
+     * @return processors applied before the pointwise data selection
+     */
+    public List<IProcessesingStrategy> getPreProcessors() {
+        return preprocessors;
+    }
+
+    /**
+     * returns the pointwise data selection strategies
+     * 
+     * @return pointwise data selection strategies
+     */
+    public List<IPointWiseDataselectionStrategy> getPointWiseSelectors() {
+        return pointwiseselectors;
+    }
+
+    /**
+     * returns the processors applied after the pointwise data selection
+     * 
+     * @return processors applied after the pointwise data selection
+     */
+    public List<IProcessesingStrategy> getPostProcessors() {
+        return postprocessors;
+    }
+
+    /**
+     * returns the normal training algorithm
+     * 
+     * @return normal training algorithms
+     */
+    public List<ITrainingStrategy> getTrainers() {
+        return trainers;
+    }
+
+    /**
+     * returns the evaluation strategies
+     * 
+     * @return evaluation strategies
+     */
+    public List<IEvaluationStrategy> getEvaluators() {
+        return evaluators;
+    }
+
+    /**
+     * returns boolean, if classifier should be saved
+     * 
+     * @return boolean
+     */
+    public boolean getSaveClassifier() {
+        return saveClassifier;
+    }
+
+    /**
+     * returns the execution strategy
+     * 
+     * @return String execution strategy
+     */
+    public String getExecutionStrategy() {
+        return executionStrategy;
+    }
+
+    /*
+     * (non-Javadoc)
+     * 
+     * @see org.xml.sax.helpers.DefaultHandler#startElement(java.lang.String, java.lang.String,
+     * java.lang.String, org.xml.sax.Attributes)
+     */
+    @Override
+    public void startElement(String uri, String localName, String qName, Attributes attributes)
+        throws SAXException
+    {
+        try {
+            if (qName.equals("config")) {
+                // ingore
+            }
+            else if (qName.equals("loader")) {
+                final IVersionLoader loader =
+                    (IVersionLoader) Class.forName("de.ugoe.cs.cpdp.loader." +
+                                                       attributes.getValue("name")).newInstance();
+                loader.setLocation(attributes.getValue("datalocation"));
+                loaders.add(loader);
+
+                // TODO location as relative
+            }
+            else if (qName.equals("resultspath")) {
+                resultsPath = attributes.getValue("path");
+            }
+            else if (qName.equals("versionfilter")) {
+                final IVersionFilter filter =
+                    (IVersionFilter) Class.forName("de.ugoe.cs.cpdp.versions." +
+                                                       attributes.getValue("name")).newInstance();
+                filter.setParameter(attributes.getValue("param"));
+                versionFilters.add(filter);
+            }
+            else if (qName.equals("testVersionfilter")) {
+                final IVersionFilter filter =
+                    (IVersionFilter) Class.forName("de.ugoe.cs.cpdp.versions." +
+                                                       attributes.getValue("name")).newInstance();
+                filter.setParameter(attributes.getValue("param"));
+                testVersionFilters.add(filter);
+            }
+            else if (qName.equals("trainVersionfilter")) {
+                final IVersionFilter filter =
+                    (IVersionFilter) Class.forName("de.ugoe.cs.cpdp.versions." +
+                                                       attributes.getValue("name")).newInstance();
+                filter.setParameter(attributes.getValue("param"));
+                trainingVersionFilters.add(filter);
+            }
+            else if (qName.equals("setwisepreprocessor")) {
+                final ISetWiseProcessingStrategy processor =
+                    (ISetWiseProcessingStrategy) Class.forName("de.ugoe.cs.cpdp.dataprocessing." +
+                                                                   attributes.getValue("name"))
+                        .newInstance();
+                processor.setParameter(attributes.getValue("param"));
+                setwisepreprocessors.add(processor);
+            }
+            else if (qName.equals("setwiseselector")) {
+                final ISetWiseDataselectionStrategy selection =
+                    (ISetWiseDataselectionStrategy) Class.forName("de.ugoe.cs.cpdp.dataselection." +
+                                                                      attributes.getValue("name"))
+                        .newInstance();
+                selection.setParameter(attributes.getValue("param"));
+                setwiseselectors.add(selection);
+            }
+            else if (qName.equals("setwisepostprocessor")) {
+                final ISetWiseProcessingStrategy processor =
+                    (ISetWiseProcessingStrategy) Class.forName("de.ugoe.cs.cpdp.dataprocessing." +
+                                                                   attributes.getValue("name"))
+                        .newInstance();
+                processor.setParameter(attributes.getValue("param"));
+                setwisepostprocessors.add(processor);
+            }
+            else if (qName.equals("setwisetrainer")) {
+                final ISetWiseTrainingStrategy trainer =
+                    (ISetWiseTrainingStrategy) Class.forName("de.ugoe.cs.cpdp.training." +
+                                                                 attributes.getValue("name"))
+                        .newInstance();
+                trainer.setParameter(attributes.getValue("param"));
+                setwiseTrainers.add(trainer);
+            }
+            else if (qName.equals("preprocessor")) {
+                final IProcessesingStrategy processor =
+                    (IProcessesingStrategy) Class.forName("de.ugoe.cs.cpdp.dataprocessing." +
+                                                              attributes.getValue("name"))
+                        .newInstance();
+                processor.setParameter(attributes.getValue("param"));
+                preprocessors.add(processor);
+            }
+            else if (qName.equals("pointwiseselector")) {
+                final IPointWiseDataselectionStrategy selection =
+                    (IPointWiseDataselectionStrategy) Class
+                        .forName("de.ugoe.cs.cpdp.dataselection." + attributes.getValue("name"))
+                        .newInstance();
+                selection.setParameter(attributes.getValue("param"));
+                pointwiseselectors.add(selection);
+            }
+            else if (qName.equals("postprocessor")) {
+                final IProcessesingStrategy processor =
+                    (IProcessesingStrategy) Class.forName("de.ugoe.cs.cpdp.dataprocessing." +
+                                                              attributes.getValue("name"))
+                        .newInstance();
+                processor.setParameter(attributes.getValue("param"));
+                postprocessors.add(processor);
+            }
+            else if (qName.equals("trainer")) {
+                final ITrainingStrategy trainer =
+                    (ITrainingStrategy) Class.forName("de.ugoe.cs.cpdp.training." +
+                                                          attributes.getValue("name"))
+                        .newInstance();
+                trainer.setParameter(attributes.getValue("param"));
+                trainers.add(trainer);
+            }
+            else if (qName.equals("eval")) {
+                final IEvaluationStrategy evaluator =
+                    (IEvaluationStrategy) Class.forName("de.ugoe.cs.cpdp.eval." +
+                                                            attributes.getValue("name"))
+                        .newInstance();
+                evaluators.add(evaluator);
+            }
+            else if (qName.equals("saveClassifier")) {
+                saveClassifier = true;
+            }
+            else if (qName.equals("executionStrategy")) {
+                executionStrategy = attributes.getValue("name");
+            }
+            else if (qName.equals("partialconfig")) {
+                String path = attributes.getValue("path");
+                try {
+                    boolean relative = true;
+                    if (attributes.getValue("relative") != null) {
+                        relative = Boolean.parseBoolean(attributes.getValue("relative"));
+                    }
+
+                    if (relative) {
+                        path = configFile.getParentFile().getPath() + "/" + path;
+                    }
+                    addConfigurations(new ExperimentConfiguration(path));
+                }
+                catch (ExperimentConfigurationException e) {
+                    throw new SAXException("Could not load partial configuration: " + path, e);
+                }
+            }
+            else {
+                Console.traceln(Level.WARNING, "element in config-file " + configFile.getName() +
+                    " ignored: " + qName);
+            }
+        }
+        catch (NoClassDefFoundError | ClassNotFoundException | IllegalAccessException
+                | InstantiationException | ClassCastException e)
+        {
+            throw new SAXException("Could not initialize class correctly", (Exception) e);
+        }
+    }
+
+    /**
+     * Adds the information of another experiment configuration to this configuration. This
+     * mechanism allows the usage of partial configuration files. The name of the other
+     * configuration is lost. <br>
+     * <br>
+     * If the current data path is the empty string (&quot;&quot;), it is override by the datapath
+     * of the other configuration. Otherwise, the current data path is kept.
+     * 
+     * @param other
+     *            experiment whose information is added
+     * @throws ExperimentConfigurationException
+     */
+    private void addConfigurations(ExperimentConfiguration other)
+        throws ExperimentConfigurationException
+    {
+        if ("results".equals(resultsPath)) {
+            resultsPath = other.resultsPath;
+        }
+        loaders.addAll(other.loaders);
+        versionFilters.addAll(other.versionFilters);
+        testVersionFilters.addAll(other.testVersionFilters);
+        trainingVersionFilters.addAll(other.trainingVersionFilters);
+        setwisepreprocessors.addAll(other.setwisepreprocessors);
+        setwiseselectors.addAll(other.setwiseselectors);
+        setwisepostprocessors.addAll(other.setwisepostprocessors);
+        setwiseTrainers.addAll(other.setwiseTrainers);
+        preprocessors.addAll(other.preprocessors);
+        pointwiseselectors.addAll(other.pointwiseselectors);
+        postprocessors.addAll(other.postprocessors);
+        trainers.addAll(other.trainers);
+        evaluators.addAll(other.evaluators);
+
+        if (!executionStrategy.equals(other.executionStrategy)) {
+            throw new ExperimentConfigurationException(
+                                                       "Executionstrategies must be the same, if config files should be added.");
+        }
+
+        /*
+         * Only if saveClassifier is not set in the main config and the other configs saveClassifier
+         * is true, it must be set.
+         */
+        if (saveClassifier == null && other.saveClassifier == true) {
+            saveClassifier = other.saveClassifier;
+        }
+
+    }
+
+    /*
+     * (non-Javadoc)
+     * 
+     * @see java.lang.Object#toString()
+     */
+    @Override
+    public String toString() {
+        final StringBuilder builder = new StringBuilder();
+        builder.append("Experiment name: " + experimentName + StringTools.ENDLINE);
+        builder.append("Loaders: " + loaders + StringTools.ENDLINE);
+        builder.append("Results path: " + resultsPath + StringTools.ENDLINE);
+        builder.append("Version filters: " + versionFilters.toString() + StringTools.ENDLINE);
+        builder.append("Test version filters: " + testVersionFilters.toString() +
+            StringTools.ENDLINE);
+        builder.append("Training version filters: " + trainingVersionFilters.toString() +
+            StringTools.ENDLINE);
+        builder.append("Setwise preprocessors: " + setwisepreprocessors.toString() +
+            StringTools.ENDLINE);
+        builder.append("Setwise selectors: " + setwiseselectors.toString() + StringTools.ENDLINE);
+        builder.append("Setwise postprocessors: " + setwisepostprocessors.toString() +
+            StringTools.ENDLINE);
+        builder.append("Setwise trainers: " + setwiseTrainers.toString() + StringTools.ENDLINE);
+        builder
+            .append("Pointwise preprocessors: " + preprocessors.toString() + StringTools.ENDLINE);
+        builder.append("Pointwise selectors: " + pointwiseselectors.toString() +
+            StringTools.ENDLINE);
+        builder.append("Pointwise postprocessors: " + postprocessors.toString() +
+            StringTools.ENDLINE);
+        builder.append("Pointwise trainers: " + trainers.toString() + StringTools.ENDLINE);
+        builder.append("Evaluators: " + evaluators.toString() + StringTools.ENDLINE);
+        builder.append("Save Classifier?: " + saveClassifier + StringTools.ENDLINE);
+        builder.append("Execution Strategy: " + executionStrategy + StringTools.ENDLINE);
+
+        return builder.toString();
+    }
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/ExperimentConfigurationException.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/ExperimentConfigurationException.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/ExperimentConfigurationException.java	(revision 41)
@@ -1,42 +1,57 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp;
 
 /**
  * Thrown if there is an error creating an experiment configuration.
+ * 
  * @author Steffen Herbold
  */
 public class ExperimentConfigurationException extends Exception {
 
-	/**
-	 * Standard serialization ID. 
-	 */
-	private static final long serialVersionUID = 1L;
-	
-	/**
-	 * @see Exception#Exception() 
-	 */
-	public ExperimentConfigurationException() {
-		super();
-	}
-	
-	/**
-	 * @see Exception#Exception(String)
-	 */
-	public ExperimentConfigurationException(String message) {
-		super(message);
-	}
-	
-	/**
-	 * @see Exception#Exception(String, Throwable)
-	 */
-	public ExperimentConfigurationException(String message, Throwable e) {
-		super(message, e);
-	}
-	
-	/**
-	 * @see Exception#Exception(Throwable)
-	 */
-	public ExperimentConfigurationException(Throwable e) {
-		super(e);
-	}
+    /**
+     * Standard serialization ID.
+     */
+    private static final long serialVersionUID = 1L;
+
+    /**
+     * @see Exception#Exception()
+     */
+    public ExperimentConfigurationException() {
+        super();
+    }
+
+    /**
+     * @see Exception#Exception(String)
+     */
+    public ExperimentConfigurationException(String message) {
+        super(message);
+    }
+
+    /**
+     * @see Exception#Exception(String, Throwable)
+     */
+    public ExperimentConfigurationException(String message, Throwable e) {
+        super(message, e);
+    }
+
+    /**
+     * @see Exception#Exception(Throwable)
+     */
+    public ExperimentConfigurationException(Throwable e) {
+        super(e);
+    }
 
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/IParameterizable.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/IParameterizable.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/IParameterizable.java	(revision 41)
@@ -1,15 +1,33 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp;
 
 /**
- * Interface that defines that an entity excepts a parameter string. Can be used to configure parts of an experiment. How (and if) this parameter is interpreted depends entirely on the entity.
+ * Interface that defines that an entity excepts a parameter string. Can be used to configure parts
+ * of an experiment. How (and if) this parameter is interpreted depends entirely on the entity.
+ * 
  * @author Steffen Herbold
- *
+ * 
  */
 public interface IParameterizable {
 
-	/**
-	 * Sets the parameters of an entity.
-	 * @param parameters parameters as string
-	 */
-	void setParameter(String parameters);
+    /**
+     * Sets the parameters of an entity.
+     * 
+     * @param parameters
+     *            parameters as string
+     */
+    void setParameter(String parameters);
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/Runner.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/Runner.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/Runner.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp;
 
@@ -15,89 +29,113 @@
 /**
  * Executable that can be used to run experiments.
+ * 
  * @author Steffen Herbold
- *
+ * 
  */
 public class Runner {
-	
-	/**
-	 * Main class. The arguments are {@link ExperimentConfiguration} files. Each experiment is started in a separate thread. The number of concurrently running threads is the number of logical processors of the host system. 
-	 * @param args experiment configuration files
-	 */
-	public static void main(String[] args) {
-		new TextConsole(Level.FINE);
-		final int concurrentThreads = Runtime.getRuntime().availableProcessors()-2;
-		final ExecutorService threadPool = Executors.newFixedThreadPool(concurrentThreads);
-		for( String arg : args ) {
-			File file = new File(arg);
-			if( file.isFile() ) {
-				createConfig(threadPool, file.getAbsolutePath());
-			}
-			else if( file.isDirectory() ) {
-				for( File subfile : file.listFiles() ) {
-					if( subfile.isFile() ) {
-						createConfig(threadPool, subfile.getAbsolutePath());
-					}
-				}
-			}
-		}
-		threadPool.shutdown();
-		try {
-			threadPool.awaitTermination(Long.MAX_VALUE, TimeUnit.SECONDS);
-		} catch (InterruptedException e) {
-			e.printStackTrace();
-		}
-	}
-	
-	/**
-	 * Creates the config and starts the corresponding experiment
-	 * @param threadPool 
-	 * @param configFile location of the config file
-	 */
-	public static void createConfig(ExecutorService threadPool, String configFile) {
-		ExperimentConfiguration config = null;
-		try {
-			config = new ExperimentConfiguration(configFile);
-		} catch (Exception e) {
-			Console.printerrln("Failure initializing the experiment configuration for configuration file " + configFile);
-			e.printStackTrace();
-		}
 
-		if( config!=null ) {
-			Console.trace(Level.FINE, config.toString());
-			// Instantiate the class like it was given as parameter in the config file and cast it to the interface
-			try {
-				// Because we need to pass a parameter, a normal new Instance call is not possible
-				Class<?> executionStrategyClass = Class.forName("de.ugoe.cs.cpdp.execution."+config.getExecutionStrategy());
-				Constructor<?> executionStrategyConstructor = 
-						executionStrategyClass.getConstructor(ExperimentConfiguration.class);
-			
-				IExecutionStrategy experiment = (IExecutionStrategy) executionStrategyConstructor.newInstance(config);
-				threadPool.execute(experiment);
-			} catch (NoSuchMethodException e) {
-				Console.printerrln("Class \"" + config.getExecutionStrategy()+ "\" does not have the right Constructor");
-				e.printStackTrace();
-			} catch (SecurityException e) {
-				Console.printerrln("Security manager prevents reflection");
-				e.printStackTrace();
-			} catch (IllegalArgumentException e) {
-				Console.printerrln("Class \"" + config.getExecutionStrategy()+ "\" does not have a Constructor, which"
-						+ "matches the given arguments");
-				e.printStackTrace();
-			} catch (InvocationTargetException e) {
-				Console.printerrln("Constructor in Class \"" + config.getExecutionStrategy()+ "\" is not public");
-				e.printStackTrace();
-			} catch (InstantiationException e) {
-				Console.printerrln("Cannot instantiate Class \"" + config.getExecutionStrategy()+"\"");
-				e.printStackTrace();
-			} catch (IllegalAccessException e) {
-				Console.printerrln("Cannot access Class \"" + config.getExecutionStrategy()+"\"");
-				e.printStackTrace();
-			} catch (ClassNotFoundException e) {
-				Console.printerrln("Class \"" + config.getExecutionStrategy()+ "\" was not found");
-				e.printStackTrace();
-			}
-			
-		}
-		
-	}
+    /**
+     * Main class. The arguments are {@link ExperimentConfiguration} files. Each experiment is
+     * started in a separate thread. The number of concurrently running threads is the number of
+     * logical processors of the host system.
+     * 
+     * @param args
+     *            experiment configuration files
+     */
+    public static void main(String[] args) {
+        new TextConsole(Level.FINE);
+        final int concurrentThreads = Runtime.getRuntime().availableProcessors() - 2;
+        final ExecutorService threadPool = Executors.newFixedThreadPool(concurrentThreads);
+        for (String arg : args) {
+            File file = new File(arg);
+            if (file.isFile()) {
+                createConfig(threadPool, file.getAbsolutePath());
+            }
+            else if (file.isDirectory()) {
+                for (File subfile : file.listFiles()) {
+                    if (subfile.isFile()) {
+                        createConfig(threadPool, subfile.getAbsolutePath());
+                    }
+                }
+            }
+        }
+        threadPool.shutdown();
+        try {
+            threadPool.awaitTermination(Long.MAX_VALUE, TimeUnit.SECONDS);
+        }
+        catch (InterruptedException e) {
+            e.printStackTrace();
+        }
+    }
+
+    /**
+     * Creates the config and starts the corresponding experiment
+     * 
+     * @param threadPool
+     * @param configFile
+     *            location of the config file
+     */
+    public static void createConfig(ExecutorService threadPool, String configFile) {
+        ExperimentConfiguration config = null;
+        try {
+            config = new ExperimentConfiguration(configFile);
+        }
+        catch (Exception e) {
+            Console
+                .printerrln("Failure initializing the experiment configuration for configuration file " +
+                    configFile);
+            e.printStackTrace();
+        }
+
+        if (config != null) {
+            Console.trace(Level.FINE, config.toString());
+            // Instantiate the class like it was given as parameter in the config file and cast it
+            // to the interface
+            try {
+                // Because we need to pass a parameter, a normal new Instance call is not possible
+                Class<?> executionStrategyClass =
+                    Class.forName("de.ugoe.cs.cpdp.execution." + config.getExecutionStrategy());
+                Constructor<?> executionStrategyConstructor =
+                    executionStrategyClass.getConstructor(ExperimentConfiguration.class);
+
+                IExecutionStrategy experiment =
+                    (IExecutionStrategy) executionStrategyConstructor.newInstance(config);
+                threadPool.execute(experiment);
+            }
+            catch (NoSuchMethodException e) {
+                Console.printerrln("Class \"" + config.getExecutionStrategy() +
+                    "\" does not have the right Constructor");
+                e.printStackTrace();
+            }
+            catch (SecurityException e) {
+                Console.printerrln("Security manager prevents reflection");
+                e.printStackTrace();
+            }
+            catch (IllegalArgumentException e) {
+                Console.printerrln("Class \"" + config.getExecutionStrategy() +
+                    "\" does not have a Constructor, which" + "matches the given arguments");
+                e.printStackTrace();
+            }
+            catch (InvocationTargetException e) {
+                Console.printerrln("Constructor in Class \"" + config.getExecutionStrategy() +
+                    "\" is not public");
+                e.printStackTrace();
+            }
+            catch (InstantiationException e) {
+                Console.printerrln("Cannot instantiate Class \"" + config.getExecutionStrategy() +
+                    "\"");
+                e.printStackTrace();
+            }
+            catch (IllegalAccessException e) {
+                Console.printerrln("Cannot access Class \"" + config.getExecutionStrategy() + "\"");
+                e.printStackTrace();
+            }
+            catch (ClassNotFoundException e) {
+                Console.printerrln("Class \"" + config.getExecutionStrategy() + "\" was not found");
+                e.printStackTrace();
+            }
+
+        }
+
+    }
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/AttributeNonRemoval.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/AttributeNonRemoval.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/AttributeNonRemoval.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.dataprocessing;
 
@@ -8,58 +22,64 @@
 
 /**
- * Removes attributes from all data sets, except the one defined, using their name. 
+ * Removes attributes from all data sets, except the one defined, using their name.
+ * 
  * @author Fabian Trautsch
  */
 public class AttributeNonRemoval implements ISetWiseProcessingStrategy, IProcessesingStrategy {
 
-	/**
-	 * names of the attributes to be kept (determined by {@link #setParameter(String)}) 
-	 */
-	private ArrayList<String> attributeNames = new ArrayList<String>();
-	
-	/**
-	 * Sets that attributes that will be kept. The string contains the blank-separated names of the attributes to be kept.
-	 * <br><br>
-	 * Note, that keeping of attributes with blanks is currently not supported!
-	 * @param parameters string with the blank-separated attribute names
-	 */
-	@Override
-	public void setParameter(String parameters) {
-		if( parameters!=null ) {
-			String[] attributeNamesArray = parameters.split(" ");
-			for(String attributeName : attributeNamesArray) {
-				attributeNames.add(attributeName);
-			}
-		}
-	}
+    /**
+     * names of the attributes to be kept (determined by {@link #setParameter(String)})
+     */
+    private ArrayList<String> attributeNames = new ArrayList<String>();
 
-	/**
-	 * @see de.ugoe.cs.cpdp.dataprocessing.SetWiseProcessingStrategy#apply(weka.core.Instances, org.apache.commons.collections4.list.SetUniqueList)
-	 */
-	@Override
-	public void apply(Instances testdata, SetUniqueList<Instances> traindataSet) {
-		for( String attributeName : attributeNames ) {
-			for( int i=0 ; i<testdata.numAttributes() ; i++ ) {
-				if(!attributeName.equals(testdata.attribute(i).name()) ) {
-					testdata.deleteAttributeAt(i);
-					for( Instances traindata : traindataSet ) {
-						traindata.deleteAttributeAt(i);
-					}
-				}
-			}
-		}
-	}
+    /**
+     * Sets that attributes that will be kept. The string contains the blank-separated names of the
+     * attributes to be kept. <br>
+     * <br>
+     * Note, that keeping of attributes with blanks is currently not supported!
+     * 
+     * @param parameters
+     *            string with the blank-separated attribute names
+     */
+    @Override
+    public void setParameter(String parameters) {
+        if (parameters != null) {
+            String[] attributeNamesArray = parameters.split(" ");
+            for (String attributeName : attributeNamesArray) {
+                attributeNames.add(attributeName);
+            }
+        }
+    }
 
-	/**
-	 * @see de.ugoe.cs.cpdp.dataprocessing.ProcessesingStrategy#apply(weka.core.Instances, weka.core.Instances)
-	 */
-	@Override
-	public void apply(Instances testdata, Instances traindata) {
-		for(int i=testdata.numAttributes()-1; i>=0; i--) {
-			if(!attributeNames.contains(testdata.attribute(i).name())) {
-				testdata.deleteAttributeAt(i);
-				traindata.deleteAttributeAt(i);
-			}
-		}
-	}
+    /**
+     * @see de.ugoe.cs.cpdp.dataprocessing.SetWiseProcessingStrategy#apply(weka.core.Instances,
+     *      org.apache.commons.collections4.list.SetUniqueList)
+     */
+    @Override
+    public void apply(Instances testdata, SetUniqueList<Instances> traindataSet) {
+        for (String attributeName : attributeNames) {
+            for (int i = 0; i < testdata.numAttributes(); i++) {
+                if (!attributeName.equals(testdata.attribute(i).name())) {
+                    testdata.deleteAttributeAt(i);
+                    for (Instances traindata : traindataSet) {
+                        traindata.deleteAttributeAt(i);
+                    }
+                }
+            }
+        }
+    }
+
+    /**
+     * @see de.ugoe.cs.cpdp.dataprocessing.ProcessesingStrategy#apply(weka.core.Instances,
+     *      weka.core.Instances)
+     */
+    @Override
+    public void apply(Instances testdata, Instances traindata) {
+        for (int i = testdata.numAttributes() - 1; i >= 0; i--) {
+            if (!attributeNames.contains(testdata.attribute(i).name())) {
+                testdata.deleteAttributeAt(i);
+                traindata.deleteAttributeAt(i);
+            }
+        }
+    }
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/AttributeRemoval.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/AttributeRemoval.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/AttributeRemoval.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.dataprocessing;
 
@@ -6,57 +20,63 @@
 
 /**
- * Removes an attributes from all data sets using their name. 
+ * Removes an attributes from all data sets using their name.
+ * 
  * @author Steffen Herbold
  */
 public class AttributeRemoval implements ISetWiseProcessingStrategy, IProcessesingStrategy {
 
-	/**
-	 * names of the attributes to be removed (determined by {@link #setParameter(String)}) 
-	 */
-	private String[] attributeNames = new String[]{};
-	
-	/**
-	 * Sets that attributes that will be removed. The string contains the blank-separated names of the attributes to be removed.
-	 * <br><br>
-	 * Note, that removal of attributes with blanks is currently not supported!
-	 * @param parameters string with the blank-separated attribute names
-	 */
-	@Override
-	public void setParameter(String parameters) {
-		if( parameters!=null ) {
-			attributeNames = parameters.split(" ");
-		}
-	}
+    /**
+     * names of the attributes to be removed (determined by {@link #setParameter(String)})
+     */
+    private String[] attributeNames = new String[] { };
 
-	/**
-	 * @see de.ugoe.cs.cpdp.dataprocessing.SetWiseProcessingStrategy#apply(weka.core.Instances, org.apache.commons.collections4.list.SetUniqueList)
-	 */
-	@Override
-	public void apply(Instances testdata, SetUniqueList<Instances> traindataSet) {
-		for( String attributeName : attributeNames ) {
-			for( int i=0 ; i<testdata.numAttributes() ; i++ ) {
-				if( attributeName.equals(testdata.attribute(i).name()) ) {
-					testdata.deleteAttributeAt(i);
-					for( Instances traindata : traindataSet ) {
-						traindata.deleteAttributeAt(i);
-					}
-				}
-			}
-		}
-	}
+    /**
+     * Sets that attributes that will be removed. The string contains the blank-separated names of
+     * the attributes to be removed. <br>
+     * <br>
+     * Note, that removal of attributes with blanks is currently not supported!
+     * 
+     * @param parameters
+     *            string with the blank-separated attribute names
+     */
+    @Override
+    public void setParameter(String parameters) {
+        if (parameters != null) {
+            attributeNames = parameters.split(" ");
+        }
+    }
 
-	/**
-	 * @see de.ugoe.cs.cpdp.dataprocessing.ProcessesingStrategy#apply(weka.core.Instances, weka.core.Instances)
-	 */
-	@Override
-	public void apply(Instances testdata, Instances traindata) {
-		for( String attributeName : attributeNames ) {
-			for( int i=0 ; i<testdata.numAttributes() ; i++ ) {
-				if( attributeName.equals(testdata.attribute(i).name()) ) {
-					testdata.deleteAttributeAt(i);
-					traindata.deleteAttributeAt(i);
-				}
-			}
-		}
-	}
+    /**
+     * @see de.ugoe.cs.cpdp.dataprocessing.SetWiseProcessingStrategy#apply(weka.core.Instances,
+     *      org.apache.commons.collections4.list.SetUniqueList)
+     */
+    @Override
+    public void apply(Instances testdata, SetUniqueList<Instances> traindataSet) {
+        for (String attributeName : attributeNames) {
+            for (int i = 0; i < testdata.numAttributes(); i++) {
+                if (attributeName.equals(testdata.attribute(i).name())) {
+                    testdata.deleteAttributeAt(i);
+                    for (Instances traindata : traindataSet) {
+                        traindata.deleteAttributeAt(i);
+                    }
+                }
+            }
+        }
+    }
+
+    /**
+     * @see de.ugoe.cs.cpdp.dataprocessing.ProcessesingStrategy#apply(weka.core.Instances,
+     *      weka.core.Instances)
+     */
+    @Override
+    public void apply(Instances testdata, Instances traindata) {
+        for (String attributeName : attributeNames) {
+            for (int i = 0; i < testdata.numAttributes(); i++) {
+                if (attributeName.equals(testdata.attribute(i).name())) {
+                    testdata.deleteAttributeAt(i);
+                    traindata.deleteAttributeAt(i);
+                }
+            }
+        }
+    }
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/AverageStandardization.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/AverageStandardization.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/AverageStandardization.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.dataprocessing;
 
@@ -8,89 +22,96 @@
 
 /**
- * Standardization procedure after Watanabe et al.: Adapting a Fault Prediction Model to Allow Inter Language Reuse.
- * <br><br>
- * In comparison to Watanabe et al., we transform training data instead of the test data. Otherwise, this approach would not be feasible with multiple projects.
+ * Standardization procedure after Watanabe et al.: Adapting a Fault Prediction Model to Allow Inter
+ * Language Reuse. <br>
+ * <br>
+ * In comparison to Watanabe et al., we transform training data instead of the test data. Otherwise,
+ * this approach would not be feasible with multiple projects.
+ * 
  * @author Steffen Herbold
  */
 public class AverageStandardization implements ISetWiseProcessingStrategy, IProcessesingStrategy {
 
-	/**
-	 * Does not have parameters. String is ignored.
-	 * @param parameters ignored
-	 */
-	@Override
-	public void setParameter(String parameters) {
-		// dummy
-	}
+    /**
+     * Does not have parameters. String is ignored.
+     * 
+     * @param parameters
+     *            ignored
+     */
+    @Override
+    public void setParameter(String parameters) {
+        // dummy
+    }
 
-	/**
-	 * @see de.ugoe.cs.cpdp.dataprocessing.SetWiseProcessingStrategy#apply(weka.core.Instances, org.apache.commons.collections4.list.SetUniqueList)
-	 */
-	@Override
-	public void apply(Instances testdata, SetUniqueList<Instances> traindataSet) {
-		final Attribute classAttribute = testdata.classAttribute();
-		
-		final double[] meanTest = new double[testdata.numAttributes()];
-		
-		// get means of testdata
-		for( int j=0 ; j<testdata.numAttributes() ; j++ ) {
-			if( testdata.attribute(j)!=classAttribute ) {
-				meanTest[j] = testdata.meanOrMode(j);
-			}
-		}
-		
-		// preprocess training data
-		for( Instances traindata : traindataSet ) {
-			double[] meanTrain = new double[testdata.numAttributes()];
-			for( int j=0 ; j<testdata.numAttributes() ; j++ ) {
-				if( testdata.attribute(j)!=classAttribute ) {
-					meanTrain[j] = traindata.meanOrMode(j);
-				}
-			}
-			
-			for( int i=0 ; i<traindata.numInstances() ; i++ ) {
-				Instance instance = traindata.instance(i);
-				for( int j=0 ; j<testdata.numAttributes() ; j++ ) {
-					if( testdata.attribute(j)!=classAttribute ) {
-						instance.setValue(j, instance.value(j)*meanTest[j]/meanTrain[j]);
-					}
-				}
-			}
-		}
-	}
+    /**
+     * @see de.ugoe.cs.cpdp.dataprocessing.SetWiseProcessingStrategy#apply(weka.core.Instances,
+     *      org.apache.commons.collections4.list.SetUniqueList)
+     */
+    @Override
+    public void apply(Instances testdata, SetUniqueList<Instances> traindataSet) {
+        final Attribute classAttribute = testdata.classAttribute();
 
-	/**
-	 * @see de.ugoe.cs.cpdp.dataprocessing.ProcessesingStrategy#apply(weka.core.Instances, weka.core.Instances)
-	 */
-	@Override
-	public void apply(Instances testdata, Instances traindata) {
-		final Attribute classAttribute = testdata.classAttribute();
-		
-		final double[] meanTest = new double[testdata.numAttributes()];
-		
-		// get means of testdata
-		for( int j=0 ; j<testdata.numAttributes() ; j++ ) {
-			if( testdata.attribute(j)!=classAttribute ) {
-				meanTest[j] = testdata.meanOrMode(j);
-			}
-		}
-		
-		// preprocess training data
-		final double[] meanTrain = new double[testdata.numAttributes()];
-		for( int j=0 ; j<testdata.numAttributes() ; j++ ) {
-			if( testdata.attribute(j)!=classAttribute ) {
-				meanTrain[j] = traindata.meanOrMode(j);
-			}
-		}
-		
-		for( int i=0 ; i<traindata.numInstances() ; i++ ) {
-			Instance instance = traindata.instance(i);
-			for( int j=0 ; j<testdata.numAttributes() ; j++ ) {
-				if( testdata.attribute(j)!=classAttribute ) {
-					instance.setValue(j, instance.value(j)*meanTest[j]/meanTrain[j]);
-				}
-			}
-		}
-	}
+        final double[] meanTest = new double[testdata.numAttributes()];
+
+        // get means of testdata
+        for (int j = 0; j < testdata.numAttributes(); j++) {
+            if (testdata.attribute(j) != classAttribute) {
+                meanTest[j] = testdata.meanOrMode(j);
+            }
+        }
+
+        // preprocess training data
+        for (Instances traindata : traindataSet) {
+            double[] meanTrain = new double[testdata.numAttributes()];
+            for (int j = 0; j < testdata.numAttributes(); j++) {
+                if (testdata.attribute(j) != classAttribute) {
+                    meanTrain[j] = traindata.meanOrMode(j);
+                }
+            }
+
+            for (int i = 0; i < traindata.numInstances(); i++) {
+                Instance instance = traindata.instance(i);
+                for (int j = 0; j < testdata.numAttributes(); j++) {
+                    if (testdata.attribute(j) != classAttribute) {
+                        instance.setValue(j, instance.value(j) * meanTest[j] / meanTrain[j]);
+                    }
+                }
+            }
+        }
+    }
+
+    /**
+     * @see de.ugoe.cs.cpdp.dataprocessing.ProcessesingStrategy#apply(weka.core.Instances,
+     *      weka.core.Instances)
+     */
+    @Override
+    public void apply(Instances testdata, Instances traindata) {
+        final Attribute classAttribute = testdata.classAttribute();
+
+        final double[] meanTest = new double[testdata.numAttributes()];
+
+        // get means of testdata
+        for (int j = 0; j < testdata.numAttributes(); j++) {
+            if (testdata.attribute(j) != classAttribute) {
+                meanTest[j] = testdata.meanOrMode(j);
+            }
+        }
+
+        // preprocess training data
+        final double[] meanTrain = new double[testdata.numAttributes()];
+        for (int j = 0; j < testdata.numAttributes(); j++) {
+            if (testdata.attribute(j) != classAttribute) {
+                meanTrain[j] = traindata.meanOrMode(j);
+            }
+        }
+
+        for (int i = 0; i < traindata.numInstances(); i++) {
+            Instance instance = traindata.instance(i);
+            for (int j = 0; j < testdata.numAttributes(); j++) {
+                if (testdata.attribute(j) != classAttribute) {
+                    instance.setValue(j, instance.value(j) * meanTest[j] / meanTrain[j]);
+                }
+            }
+        }
+    }
 
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/BiasedWeights.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/BiasedWeights.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/BiasedWeights.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.dataprocessing;
 
@@ -7,71 +21,75 @@
 
 /**
- * Sets the bias of the weights of the training data. By using a bias of 0.5 (default value) the total weight of the positive instances (i.e.
- * fault-prone) is equal to the total weight of the negative instances (i.e. non-fault-prone). Otherwise the weights between the two will be 
- * distributed according to the bias, where &lt;0.5 means in favor of the negative instances and &gt;0.5 in favor of the positive instances. 
- * equal to the total weight of the test 
+ * Sets the bias of the weights of the training data. By using a bias of 0.5 (default value) the
+ * total weight of the positive instances (i.e. fault-prone) is equal to the total weight of the
+ * negative instances (i.e. non-fault-prone). Otherwise the weights between the two will be
+ * distributed according to the bias, where &lt;0.5 means in favor of the negative instances and
+ * &gt;0.5 in favor of the positive instances. equal to the total weight of the test
+ * 
  * @author Steffen Herbold
  */
 public class BiasedWeights implements IProcessesingStrategy, ISetWiseProcessingStrategy {
 
-	/**
-	 * bias used for the weighting
-	 */
-	private double bias = 0.5;
-	
-	
-	/**
-	 * Sets the bias to be used for weighting.
-	 * @param parameters string with the bias
-	 */
-	@Override
-	public void setParameter(String parameters) {
-		bias = Double.parseDouble(parameters);
-	}
+    /**
+     * bias used for the weighting
+     */
+    private double bias = 0.5;
 
-	/**
-	 * @see de.ugoe.cs.cpdp.dataprocessing.ProcessesingStrategy#apply(weka.core.Instances, weka.core.Instances)
-	 */
-	@Override
-	public void apply(Instances testdata, Instances traindata) {
-		//setBiasedWeights(testdata);
-		setBiasedWeights(traindata);
-	}
-	
-	/**
-	 * @see de.ugoe.cs.cpdp.dataprocessing.SetWiseProcessingStrategy#apply(weka.core.Instances, org.apache.commons.collections4.list.SetUniqueList)
-	 */
-	@Override
-	public void apply(Instances testdata, SetUniqueList<Instances> traindataSet) {
-		for( Instances traindata : traindataSet ) {
-			setBiasedWeights(traindata);
-		}
-	}
-	
-	/**
-	 * Helper method that sets the weights for a given data set.
-	 * @param data data set whose weights are set
-	 */
-	private void setBiasedWeights(Instances data) {
-		final int classIndex = data.classIndex();
-		
-		final int[] counts = data.attributeStats(classIndex).nominalCounts;
-		
-		final double weightNegatives = ((1-bias)*data.numInstances()) / counts[0];
-		final double weightPositives = (bias*data.numInstances()) / counts[1];
-		
-		
-		for( int i=0 ; i<data.numInstances() ; i++ ) {
-			Instance instance = data.instance(i);
-			if( instance.value(classIndex)==0 ) {
-				instance.setWeight(weightNegatives);
-			}
-			if( instance.value(classIndex)==1 ) {
-				instance.setWeight(weightPositives);
-			}
-		}
-	}
+    /**
+     * Sets the bias to be used for weighting.
+     * 
+     * @param parameters
+     *            string with the bias
+     */
+    @Override
+    public void setParameter(String parameters) {
+        bias = Double.parseDouble(parameters);
+    }
 
-	
+    /**
+     * @see de.ugoe.cs.cpdp.dataprocessing.ProcessesingStrategy#apply(weka.core.Instances,
+     *      weka.core.Instances)
+     */
+    @Override
+    public void apply(Instances testdata, Instances traindata) {
+        // setBiasedWeights(testdata);
+        setBiasedWeights(traindata);
+    }
+
+    /**
+     * @see de.ugoe.cs.cpdp.dataprocessing.SetWiseProcessingStrategy#apply(weka.core.Instances,
+     *      org.apache.commons.collections4.list.SetUniqueList)
+     */
+    @Override
+    public void apply(Instances testdata, SetUniqueList<Instances> traindataSet) {
+        for (Instances traindata : traindataSet) {
+            setBiasedWeights(traindata);
+        }
+    }
+
+    /**
+     * Helper method that sets the weights for a given data set.
+     * 
+     * @param data
+     *            data set whose weights are set
+     */
+    private void setBiasedWeights(Instances data) {
+        final int classIndex = data.classIndex();
+
+        final int[] counts = data.attributeStats(classIndex).nominalCounts;
+
+        final double weightNegatives = ((1 - bias) * data.numInstances()) / counts[0];
+        final double weightPositives = (bias * data.numInstances()) / counts[1];
+
+        for (int i = 0; i < data.numInstances(); i++) {
+            Instance instance = data.instance(i);
+            if (instance.value(classIndex) == 0) {
+                instance.setWeight(weightNegatives);
+            }
+            if (instance.value(classIndex) == 1) {
+                instance.setWeight(weightPositives);
+            }
+        }
+    }
 
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/DataGravitation.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/DataGravitation.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/DataGravitation.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.dataprocessing;
 
@@ -8,65 +22,75 @@
 
 /**
- * Implements an approach for data weighting suggested after Y. Ma, G. Luo, X. Zeng, and A. Chen: Transfer learning for
- * cross-company software defect prediction. The instances are weighted higher, the more attributes are within the range they are in the training data. 
+ * Implements an approach for data weighting suggested after Y. Ma, G. Luo, X. Zeng, and A. Chen:
+ * Transfer learning for cross-company software defect prediction. The instances are weighted
+ * higher, the more attributes are within the range they are in the training data.
+ * 
  * @author Steffen Herbold
  */
 public class DataGravitation implements IProcessesingStrategy, ISetWiseProcessingStrategy {
 
-	/**
-	 * Does not have parameters. String is ignored.
-	 * @param parameters ignored
-	 */
-	@Override
-	public void setParameter(String parameters) {
-		// dummy
-	}
-	
-	/* (non-Javadoc)
-	 * @see de.ugoe.cs.cpdp.dataprocessing.ISetWiseProcessingStrategy#apply(weka.core.Instances, org.apache.commons.collections4.list.SetUniqueList)
-	 */
-	@Override
-	public void apply(Instances testdata, SetUniqueList<Instances> traindataSet) {
-		for( Instances traindata : traindataSet ) {
-			apply(testdata, traindata);
-		}
-	}
+    /**
+     * Does not have parameters. String is ignored.
+     * 
+     * @param parameters
+     *            ignored
+     */
+    @Override
+    public void setParameter(String parameters) {
+        // dummy
+    }
 
-	/* (non-Javadoc)
-	 * @see de.ugoe.cs.cpdp.dataprocessing.IProcessesingStrategy#apply(weka.core.Instances, weka.core.Instances)
-	 */
-	@Override
-	public void apply(Instances testdata, Instances traindata) {
-		Attribute classAtt = testdata.classAttribute();
-		
-		double[] minAttValues = new double[testdata.numAttributes()];
-		double[] maxAttValues = new double[testdata.numAttributes()];
-		double[] weights = new double[traindata.numInstances()];
-		double weightsum = 0.0;
-		
-		for( int j=0; j<testdata.numAttributes(); j++) {
-			if( testdata.attribute(j)!=classAtt ) {
-				minAttValues[j] = testdata.attributeStats(j).numericStats.min;
-				maxAttValues[j] = testdata.attributeStats(j).numericStats.max;
-			}
-		}
-		
-		for( int i=0; i<traindata.numInstances(); i++ ) {
-			Instance inst = traindata.instance(i);
-			int similar = 0;
-			for( int j=0; j<testdata.numAttributes(); j++ ) {
-				if( testdata.attribute(j)!=classAtt ) {
-					if( inst.value(j)>=minAttValues[j] && inst.value(j)<=maxAttValues[j] )  {
-						similar++;
-					}
-				}
-			}
-			weights[i] = similar/Math.sqrt(testdata.numAttributes()-similar);
-			weightsum += weights[i];
-		}
-		for( int i=0; i<traindata.numInstances(); i++ ) {
-			traindata.instance(i).setWeight(weights[i]*traindata.numInstances()/weightsum);
-		}
-	}
+    /*
+     * (non-Javadoc)
+     * 
+     * @see de.ugoe.cs.cpdp.dataprocessing.ISetWiseProcessingStrategy#apply(weka.core.Instances,
+     * org.apache.commons.collections4.list.SetUniqueList)
+     */
+    @Override
+    public void apply(Instances testdata, SetUniqueList<Instances> traindataSet) {
+        for (Instances traindata : traindataSet) {
+            apply(testdata, traindata);
+        }
+    }
+
+    /*
+     * (non-Javadoc)
+     * 
+     * @see de.ugoe.cs.cpdp.dataprocessing.IProcessesingStrategy#apply(weka.core.Instances,
+     * weka.core.Instances)
+     */
+    @Override
+    public void apply(Instances testdata, Instances traindata) {
+        Attribute classAtt = testdata.classAttribute();
+
+        double[] minAttValues = new double[testdata.numAttributes()];
+        double[] maxAttValues = new double[testdata.numAttributes()];
+        double[] weights = new double[traindata.numInstances()];
+        double weightsum = 0.0;
+
+        for (int j = 0; j < testdata.numAttributes(); j++) {
+            if (testdata.attribute(j) != classAtt) {
+                minAttValues[j] = testdata.attributeStats(j).numericStats.min;
+                maxAttValues[j] = testdata.attributeStats(j).numericStats.max;
+            }
+        }
+
+        for (int i = 0; i < traindata.numInstances(); i++) {
+            Instance inst = traindata.instance(i);
+            int similar = 0;
+            for (int j = 0; j < testdata.numAttributes(); j++) {
+                if (testdata.attribute(j) != classAtt) {
+                    if (inst.value(j) >= minAttValues[j] && inst.value(j) <= maxAttValues[j]) {
+                        similar++;
+                    }
+                }
+            }
+            weights[i] = similar / Math.sqrt(testdata.numAttributes() - similar);
+            weightsum += weights[i];
+        }
+        for (int i = 0; i < traindata.numInstances(); i++) {
+            traindata.instance(i).setWeight(weights[i] * traindata.numInstances() / weightsum);
+        }
+    }
 
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/IProcessesingStrategy.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/IProcessesingStrategy.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/IProcessesingStrategy.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.dataprocessing;
 
@@ -5,14 +19,18 @@
 
 /**
- * A data processing strategy that is applied to the test data and a single set of training data. 
+ * A data processing strategy that is applied to the test data and a single set of training data.
+ * 
  * @author Steffen Herbold
  */
 public interface IProcessesingStrategy extends IParameterizable {
-	
-	/**
-	 * Applies the processing strategy. 
-	 * @param testdata test data
-	 * @param traindata training data
-	 */
-	void apply(Instances testdata, Instances traindata);
+
+    /**
+     * Applies the processing strategy.
+     * 
+     * @param testdata
+     *            test data
+     * @param traindata
+     *            training data
+     */
+    void apply(Instances testdata, Instances traindata);
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/ISetWiseProcessingStrategy.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/ISetWiseProcessingStrategy.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/ISetWiseProcessingStrategy.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.dataprocessing;
 
@@ -8,15 +22,19 @@
 
 /**
- * A data processing strategy that is applied to the test data and a multiple sets of training data. 
+ * A data processing strategy that is applied to the test data and a multiple sets of training data.
+ * 
  * @author Steffen Herbold
  */
 public interface ISetWiseProcessingStrategy extends IParameterizable {
 
-	/**
-	 * Applies the processing strategy. 
-	 * @param testdata test data
-	 * @param traindataSet training data sets
-	 */
-	void apply(Instances testdata, SetUniqueList<Instances> traindataSet);
-	
+    /**
+     * Applies the processing strategy.
+     * 
+     * @param testdata
+     *            test data
+     * @param traindataSet
+     *            training data sets
+     */
+    void apply(Instances testdata, SetUniqueList<Instances> traindataSet);
+
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/InformationGainFilter.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/InformationGainFilter.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/InformationGainFilter.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.dataprocessing;
 
@@ -12,110 +26,126 @@
 
 /**
- * Implements an attribute filter that is based on the information gain of each attribute after Z. He, F. Peters, T. Menzies, Y. Yang: Learning from Open-Source Projects: An Empirical Study on Defect Prediction.
- * A logistic classifier is trained to separate a random sample of the training data from a random sample of the test data. As standard, the best 50% of attributes are retained. 
- * This ratio can be adjusted using the parameter of the filter (0.5 = 50%). 
- * <br><br>
- * Best means the least information gain, because this means that the attribute is similar in both test and training data.
+ * Implements an attribute filter that is based on the information gain of each attribute after Z.
+ * He, F. Peters, T. Menzies, Y. Yang: Learning from Open-Source Projects: An Empirical Study on
+ * Defect Prediction. A logistic classifier is trained to separate a random sample of the training
+ * data from a random sample of the test data. As standard, the best 50% of attributes are retained.
+ * This ratio can be adjusted using the parameter of the filter (0.5 = 50%). <br>
+ * <br>
+ * Best means the least information gain, because this means that the attribute is similar in both
+ * test and training data.
+ * 
  * @author Steffen Herbold
  */
 public class InformationGainFilter implements ISetWiseProcessingStrategy, IProcessesingStrategy {
 
-	/**
-	 * size of the random sample that is drawn from both test data and training data 
-	 */
-	private final int sampleSize = 500;
-	
-	/**
-	 * ratio of features that is kept
-	 */
-	private double featureRatio = 0.5;
-	
-	/**
-	 * Sets the feature ratio. 
-	 * @param parameters feature ratio
-	 */
-	@Override
-	public void setParameter(String parameters) {
-		if( !"".equals(parameters) ) {
-			featureRatio = Double.parseDouble(parameters);
-		}
-	}
+    /**
+     * size of the random sample that is drawn from both test data and training data
+     */
+    private final int sampleSize = 500;
 
-	/**
-	 * @see de.ugoe.cs.cpdp.dataprocessing.SetWiseProcessingStrategy#apply(weka.core.Instances, org.apache.commons.collections4.list.SetUniqueList)
-	 */
-	@Override
-	public void apply(Instances testdata, SetUniqueList<Instances> traindataSet) {
-		for( Instances traindata : traindataSet ) {
-			apply(testdata, traindata, false);
-		}
-		
-	}
-	
-	/**
-	 * @see de.ugoe.cs.cpdp.dataprocessing.ProcessesingStrategy#apply(weka.core.Instances, weka.core.Instances)
-	 */
-	@Override
-	public void apply(Instances testdata, Instances traindata) {
-		apply(testdata, traindata, true);
-	}
-	
-	/**
-	 * Internal helper function for the application of the filter to both all data set as well as a single data set.
-	 * @param testdata data of the target product
-	 * @param traindata data of the training product
-	 * @param removeFromTest defines whether the attributes shall be removed from the test data as well or not
-	 */
-	private void apply(Instances testdata, Instances traindata, boolean removeFromTest) {
-		final Random rand = new Random(1);
-		final int removalNumber = (int) (featureRatio*(testdata.numAttributes()-1));
-		
-		final int classIndex = testdata.classIndex();
-		
-		// sample instances
-		final Instances sample = new Instances(testdata);
-		for( int j=0; j<sampleSize; j++ ) {
-			Instance inst = new DenseInstance(testdata.instance(rand.nextInt(testdata.numInstances())));
-			inst.setDataset(sample);
-			inst.setClassValue(1.0);
-			sample.add(inst);
-			inst = new DenseInstance(traindata.instance(rand.nextInt(traindata.numInstances())));
-			inst.setDataset(sample);
-			inst.setClassValue(0.0);
-			sample.add(inst);
-		}
-		
-		final double[] gain = new double[sample.numAttributes()];
-		
-		final InfoGainAttributeEval gainEval = new InfoGainAttributeEval();
-		try {
-			gainEval.buildEvaluator(sample);
-			for( int i=0 ; i<testdata.numAttributes() ; i++ ) {
-				//if( sample.classAttribute().equals(sample.attribute(i)) ) {
-				//	gain[i] = 0.0;
-				//} else {
-				if( !sample.classAttribute().equals(sample.attribute(i)) ) {
-					gain[i] = gainEval.evaluateAttribute(i);
-				}
-			}
-		} catch (Exception e) {
-			//throw new RuntimeException("could not determine information gain for all attributes", e);
-			// ignore exception; it is caused by attributes that are extremely 
-		}
-		
-		// select best attributes
-		final double[] gainCopy = Arrays.copyOf(gain, gain.length);
-		Arrays.sort(gainCopy);
-		final double cutoffGain = gainCopy[testdata.numAttributes()-removalNumber];
-		
-		for( int i=testdata.numAttributes()-1; i>=0 ; i-- ) {
-			if( gain[i]>=cutoffGain && i!=classIndex) {
-				traindata.deleteAttributeAt(i);
-				if( removeFromTest ) {
-					testdata.deleteAttributeAt(i);
-				}
-			}
-		}
-	}
+    /**
+     * ratio of features that is kept
+     */
+    private double featureRatio = 0.5;
+
+    /**
+     * Sets the feature ratio.
+     * 
+     * @param parameters
+     *            feature ratio
+     */
+    @Override
+    public void setParameter(String parameters) {
+        if (!"".equals(parameters)) {
+            featureRatio = Double.parseDouble(parameters);
+        }
+    }
+
+    /**
+     * @see de.ugoe.cs.cpdp.dataprocessing.SetWiseProcessingStrategy#apply(weka.core.Instances,
+     *      org.apache.commons.collections4.list.SetUniqueList)
+     */
+    @Override
+    public void apply(Instances testdata, SetUniqueList<Instances> traindataSet) {
+        for (Instances traindata : traindataSet) {
+            apply(testdata, traindata, false);
+        }
+
+    }
+
+    /**
+     * @see de.ugoe.cs.cpdp.dataprocessing.ProcessesingStrategy#apply(weka.core.Instances,
+     *      weka.core.Instances)
+     */
+    @Override
+    public void apply(Instances testdata, Instances traindata) {
+        apply(testdata, traindata, true);
+    }
+
+    /**
+     * Internal helper function for the application of the filter to both all data set as well as a
+     * single data set.
+     * 
+     * @param testdata
+     *            data of the target product
+     * @param traindata
+     *            data of the training product
+     * @param removeFromTest
+     *            defines whether the attributes shall be removed from the test data as well or not
+     */
+    private void apply(Instances testdata, Instances traindata, boolean removeFromTest) {
+        final Random rand = new Random(1);
+        final int removalNumber = (int) (featureRatio * (testdata.numAttributes() - 1));
+
+        final int classIndex = testdata.classIndex();
+
+        // sample instances
+        final Instances sample = new Instances(testdata);
+        for (int j = 0; j < sampleSize; j++) {
+            Instance inst =
+                new DenseInstance(testdata.instance(rand.nextInt(testdata.numInstances())));
+            inst.setDataset(sample);
+            inst.setClassValue(1.0);
+            sample.add(inst);
+            inst = new DenseInstance(traindata.instance(rand.nextInt(traindata.numInstances())));
+            inst.setDataset(sample);
+            inst.setClassValue(0.0);
+            sample.add(inst);
+        }
+
+        final double[] gain = new double[sample.numAttributes()];
+
+        final InfoGainAttributeEval gainEval = new InfoGainAttributeEval();
+        try {
+            gainEval.buildEvaluator(sample);
+            for (int i = 0; i < testdata.numAttributes(); i++) {
+                // if( sample.classAttribute().equals(sample.attribute(i)) ) {
+                // gain[i] = 0.0;
+                // } else {
+                if (!sample.classAttribute().equals(sample.attribute(i))) {
+                    gain[i] = gainEval.evaluateAttribute(i);
+                }
+            }
+        }
+        catch (Exception e) {
+            // throw new RuntimeException("could not determine information gain for all attributes",
+            // e);
+            // ignore exception; it is caused by attributes that are extremely
+        }
+
+        // select best attributes
+        final double[] gainCopy = Arrays.copyOf(gain, gain.length);
+        Arrays.sort(gainCopy);
+        final double cutoffGain = gainCopy[testdata.numAttributes() - removalNumber];
+
+        for (int i = testdata.numAttributes() - 1; i >= 0; i--) {
+            if (gain[i] >= cutoffGain && i != classIndex) {
+                traindata.deleteAttributeAt(i);
+                if (removeFromTest) {
+                    testdata.deleteAttributeAt(i);
+                }
+            }
+        }
+    }
 
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/LogarithmTransform.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/LogarithmTransform.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/LogarithmTransform.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.dataprocessing;
 
@@ -8,92 +22,105 @@
 
 /**
- * Logarithm transformation after Carmargo Cruz and Ochimizu: Towards Logistic Regression Models for Predicting Fault-prone Code across Software Projects.
- * <br><br>
- * Transform each attribute value x into log(x+1). 
+ * Logarithm transformation after Carmargo Cruz and Ochimizu: Towards Logistic Regression Models for
+ * Predicting Fault-prone Code across Software Projects. <br>
+ * <br>
+ * Transform each attribute value x into log(x+1).
+ * 
  * @author Steffen Herbold
  */
 public class LogarithmTransform implements ISetWiseProcessingStrategy, IProcessesingStrategy {
 
-	/**
-	 * Does not have parameters. String is ignored.
-	 * @param parameters ignored
-	 */
-	@Override
-	public void setParameter(String parameters) {
-		// dummy
-	}
+    /**
+     * Does not have parameters. String is ignored.
+     * 
+     * @param parameters
+     *            ignored
+     */
+    @Override
+    public void setParameter(String parameters) {
+        // dummy
+    }
 
-	/**
-	 * @see de.ugoe.cs.cpdp.dataprocessing.SetWiseProcessingStrategy#apply(weka.core.Instances, org.apache.commons.collections4.list.SetUniqueList)
-	 */
-	@Override
-	public void apply(Instances testdata, SetUniqueList<Instances> traindataSet) {
-		final Attribute classAttribute = testdata.classAttribute();
-		
-		// preprocess testdata
-		for( int i=0 ; i<testdata.numInstances() ; i++ ) {
-			Instance instance = testdata.instance(i);
-			for( int j=0 ; j<testdata.numAttributes() ; j++ ) {
-				if( testdata.attribute(j)!=classAttribute && testdata.attribute(j).isNumeric() ) {
-					if( instance.value(j) < 0 ) {
-						instance.setValue(j, (-1*(Math.log(-1*instance.value(j)))));
-					}else {
-						instance.setValue(j, Math.log(1+instance.value(j)));
-					}
-				}
-			}
-		}
-		
-		// preprocess training data
-		for( Instances traindata : traindataSet ) {
-			for( int i=0 ; i<traindata.numInstances() ; i++ ) {
-				Instance instance = traindata.instance(i);
-				for( int j=0 ; j<testdata.numAttributes() ; j++ ) {
-					if( traindata.attribute(j)!=classAttribute && traindata.attribute(j).isNumeric() ) {
-						if( instance.value(j) < 0 ) {
-							instance.setValue(j, (-1*(Math.log(-1*instance.value(j)))));
-						}else {
-							instance.setValue(j, Math.log(1+instance.value(j)));
-						}
-					}
-				}
-			}
-		}
-	}
+    /**
+     * @see de.ugoe.cs.cpdp.dataprocessing.SetWiseProcessingStrategy#apply(weka.core.Instances,
+     *      org.apache.commons.collections4.list.SetUniqueList)
+     */
+    @Override
+    public void apply(Instances testdata, SetUniqueList<Instances> traindataSet) {
+        final Attribute classAttribute = testdata.classAttribute();
 
-	/**
-	 * @see de.ugoe.cs.cpdp.dataprocessing.ProcessesingStrategy#apply(weka.core.Instances, weka.core.Instances)
-	 */
-	@Override
-	public void apply(Instances testdata, Instances traindata) {
-		final Attribute classAttribute = testdata.classAttribute();
-		
-		// preprocess testdata
-		for( int i=0 ; i<testdata.numInstances() ; i++ ) {
-			Instance instance = testdata.instance(i);
-			for( int j=0 ; j<testdata.numAttributes() ; j++ ) {
-				if( testdata.attribute(j)!=classAttribute && testdata.attribute(j).isNumeric() ) {
-					if( instance.value(j) < 0 ) {
-						instance.setValue(j, (-1*(Math.log(-1*instance.value(j)))));
-					}else {
-						instance.setValue(j, Math.log(1+instance.value(j)));
-					}
-				}
-			}
-		}
-		
-		// preprocess training data
-		for( int i=0 ; i<traindata.numInstances() ; i++ ) {
-			Instance instance = traindata.instance(i);
-			for( int j=0 ; j<testdata.numAttributes() ; j++ ) {
-				if( traindata.attribute(j)!=classAttribute && traindata.attribute(j).isNumeric() ) {
-					if( instance.value(j) < 0 ) {
-						instance.setValue(j, (-1*(Math.log(-1*instance.value(j)))));
-					}else {
-						instance.setValue(j, Math.log(1+instance.value(j)));
-					}
-				}
-			}
-		}
-	}
+        // preprocess testdata
+        for (int i = 0; i < testdata.numInstances(); i++) {
+            Instance instance = testdata.instance(i);
+            for (int j = 0; j < testdata.numAttributes(); j++) {
+                if (testdata.attribute(j) != classAttribute && testdata.attribute(j).isNumeric()) {
+                    if (instance.value(j) < 0) {
+                        instance.setValue(j, (-1 * (Math.log(-1 * instance.value(j)))));
+                    }
+                    else {
+                        instance.setValue(j, Math.log(1 + instance.value(j)));
+                    }
+                }
+            }
+        }
+
+        // preprocess training data
+        for (Instances traindata : traindataSet) {
+            for (int i = 0; i < traindata.numInstances(); i++) {
+                Instance instance = traindata.instance(i);
+                for (int j = 0; j < testdata.numAttributes(); j++) {
+                    if (traindata.attribute(j) != classAttribute &&
+                        traindata.attribute(j).isNumeric())
+                    {
+                        if (instance.value(j) < 0) {
+                            instance.setValue(j, (-1 * (Math.log(-1 * instance.value(j)))));
+                        }
+                        else {
+                            instance.setValue(j, Math.log(1 + instance.value(j)));
+                        }
+                    }
+                }
+            }
+        }
+    }
+
+    /**
+     * @see de.ugoe.cs.cpdp.dataprocessing.ProcessesingStrategy#apply(weka.core.Instances,
+     *      weka.core.Instances)
+     */
+    @Override
+    public void apply(Instances testdata, Instances traindata) {
+        final Attribute classAttribute = testdata.classAttribute();
+
+        // preprocess testdata
+        for (int i = 0; i < testdata.numInstances(); i++) {
+            Instance instance = testdata.instance(i);
+            for (int j = 0; j < testdata.numAttributes(); j++) {
+                if (testdata.attribute(j) != classAttribute && testdata.attribute(j).isNumeric()) {
+                    if (instance.value(j) < 0) {
+                        instance.setValue(j, (-1 * (Math.log(-1 * instance.value(j)))));
+                    }
+                    else {
+                        instance.setValue(j, Math.log(1 + instance.value(j)));
+                    }
+                }
+            }
+        }
+
+        // preprocess training data
+        for (int i = 0; i < traindata.numInstances(); i++) {
+            Instance instance = traindata.instance(i);
+            for (int j = 0; j < testdata.numAttributes(); j++) {
+                if (traindata.attribute(j) != classAttribute && traindata.attribute(j).isNumeric())
+                {
+                    if (instance.value(j) < 0) {
+                        instance.setValue(j, (-1 * (Math.log(-1 * instance.value(j)))));
+                    }
+                    else {
+                        instance.setValue(j, Math.log(1 + instance.value(j)));
+                    }
+                }
+            }
+        }
+    }
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/MedianAsReference.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/MedianAsReference.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/MedianAsReference.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.dataprocessing;
 
@@ -8,95 +22,118 @@
 
 /**
- * Median as reference transformation after Carmargo Cruz and Ochimizu: Towards Logistic Regression Models for Predicting Fault-prone Code across Software Projects
- * <br><br>
- * For each attribute value x, the new value is x + (median of the test data - median of the current project)
+ * Median as reference transformation after Carmargo Cruz and Ochimizu: Towards Logistic Regression
+ * Models for Predicting Fault-prone Code across Software Projects <br>
+ * <br>
+ * For each attribute value x, the new value is x + (median of the test data - median of the current
+ * project)
+ * 
  * @author Steffen Herbold
  */
 public class MedianAsReference implements ISetWiseProcessingStrategy, IProcessesingStrategy {
 
-	/**
-	 * Does not have parameters. String is ignored.
-	 * @param parameters ignored
-	 */
-	@Override
-	public void setParameter(String parameters) {
-		// dummy
-	}
+    /**
+     * Does not have parameters. String is ignored.
+     * 
+     * @param parameters
+     *            ignored
+     */
+    @Override
+    public void setParameter(String parameters) {
+        // dummy
+    }
 
-	/**
-	 * @see de.ugoe.cs.cpdp.dataprocessing.SetWiseProcessingStrategy#apply(weka.core.Instances, org.apache.commons.collections4.list.SetUniqueList)
-	 */
-	@Override
-	public void apply(Instances testdata, SetUniqueList<Instances> traindataSet) {
-		final Attribute classAttribute = testdata.classAttribute();
-		final double[] median = new double[testdata.numAttributes()];
-		
-		// test and train have the same number of attributes
-		Attribute traindataClassAttribute;
-		double[] currentmedian = new double[testdata.numAttributes()];
-		
-		// get medians
-		for( int j=0 ; j<testdata.numAttributes() ; j++ ) {
-			if( testdata.attribute(j)!=classAttribute ) {
-				median[j] = testdata.kthSmallestValue(j, (testdata.numInstances()+1)>>1); // (>>2 -> /2)
-			}
-		}
-		
-		// preprocess training data
-		for( Instances traindata : traindataSet ) {
-			// get median of current training set 
-			traindataClassAttribute = traindata.classAttribute();
-			for( int j=0 ; j<traindata.numAttributes() ; j++ ) {
-				if( traindata.attribute(j)!=traindataClassAttribute && traindata.attribute(j).isNumeric()) {
-					currentmedian[j] = traindata.kthSmallestValue(j, (traindata.numInstances()+1)>>1); // (>>2 -> /2)
-				}
-			}
-			for( int i=0 ; i<traindata.numInstances() ; i++ ) {
-				Instance instance = traindata.instance(i);
-				for( int j=0 ; j<traindata.numAttributes() ; j++ ) {
-					if( traindata.attribute(j)!=classAttribute && traindata.attribute(j).isNumeric() ) {
-						instance.setValue(j, instance.value(j) + (median[j] - currentmedian[j]));
-					}
-				}
-			}
-		}
-	}
+    /**
+     * @see de.ugoe.cs.cpdp.dataprocessing.SetWiseProcessingStrategy#apply(weka.core.Instances,
+     *      org.apache.commons.collections4.list.SetUniqueList)
+     */
+    @Override
+    public void apply(Instances testdata, SetUniqueList<Instances> traindataSet) {
+        final Attribute classAttribute = testdata.classAttribute();
+        final double[] median = new double[testdata.numAttributes()];
 
-	/**
-	 * @see de.ugoe.cs.cpdp.dataprocessing.ProcessesingStrategy#apply(weka.core.Instances, weka.core.Instances)
-	 */
-	@Override
-	public void apply(Instances testdata, Instances traindata) {
-		final Attribute classAttribute = testdata.classAttribute();
-		final Attribute traindataClassAttribute = traindata.classAttribute();
-		final double[] median = new double[testdata.numAttributes()];
+        // test and train have the same number of attributes
+        Attribute traindataClassAttribute;
+        double[] currentmedian = new double[testdata.numAttributes()];
 
-		// test and train have the same number of attributes
-		double[] currentmedian = new double[testdata.numAttributes()];
-		
-		// get medians
-		for( int j=0 ; j<testdata.numAttributes() ; j++ ) {
-			if( testdata.attribute(j)!=classAttribute ) {
-				median[j] = testdata.kthSmallestValue(j, (testdata.numInstances()+1)>>1); // (>>2 -> /2)
-			}
-		}
+        // get medians
+        for (int j = 0; j < testdata.numAttributes(); j++) {
+            if (testdata.attribute(j) != classAttribute) {
+                median[j] = testdata.kthSmallestValue(j, (testdata.numInstances() + 1) >> 1); // (>>2
+                                                                                              // ->
+                                                                                              // /2)
+            }
+        }
 
-		// get median of current training set 
-		for( int j=0 ; j<traindata.numAttributes() ; j++ ) {
-			if( traindata.attribute(j)!=traindataClassAttribute && traindata.attribute(j).isNumeric() ) {
-				currentmedian[j] = traindata.kthSmallestValue(j, (traindata.numInstances()+1)>>1); // (>>2 -> /2)
-			}
-		}
-		
-		// preprocess training data
-		for( int i=0 ; i<traindata.numInstances() ; i++ ) {
-			Instance instance = traindata.instance(i);
-			for( int j=0 ; j<traindata.numAttributes() ; j++ ) {
-				if( traindata.attribute(j)!=classAttribute  && traindata.attribute(j).isNumeric() ) {
-					instance.setValue(j, instance.value(j) + (median[j] - currentmedian[j]));
-				}
-			}
-		}
-	}
+        // preprocess training data
+        for (Instances traindata : traindataSet) {
+            // get median of current training set
+            traindataClassAttribute = traindata.classAttribute();
+            for (int j = 0; j < traindata.numAttributes(); j++) {
+                if (traindata.attribute(j) != traindataClassAttribute &&
+                    traindata.attribute(j).isNumeric())
+                {
+                    currentmedian[j] =
+                        traindata.kthSmallestValue(j, (traindata.numInstances() + 1) >> 1); // (>>2
+                                                                                            // ->
+                                                                                            // /2)
+                }
+            }
+            for (int i = 0; i < traindata.numInstances(); i++) {
+                Instance instance = traindata.instance(i);
+                for (int j = 0; j < traindata.numAttributes(); j++) {
+                    if (traindata.attribute(j) != classAttribute &&
+                        traindata.attribute(j).isNumeric())
+                    {
+                        instance.setValue(j, instance.value(j) + (median[j] - currentmedian[j]));
+                    }
+                }
+            }
+        }
+    }
+
+    /**
+     * @see de.ugoe.cs.cpdp.dataprocessing.ProcessesingStrategy#apply(weka.core.Instances,
+     *      weka.core.Instances)
+     */
+    @Override
+    public void apply(Instances testdata, Instances traindata) {
+        final Attribute classAttribute = testdata.classAttribute();
+        final Attribute traindataClassAttribute = traindata.classAttribute();
+        final double[] median = new double[testdata.numAttributes()];
+
+        // test and train have the same number of attributes
+        double[] currentmedian = new double[testdata.numAttributes()];
+
+        // get medians
+        for (int j = 0; j < testdata.numAttributes(); j++) {
+            if (testdata.attribute(j) != classAttribute) {
+                median[j] = testdata.kthSmallestValue(j, (testdata.numInstances() + 1) >> 1); // (>>2
+                                                                                              // ->
+                                                                                              // /2)
+            }
+        }
+
+        // get median of current training set
+        for (int j = 0; j < traindata.numAttributes(); j++) {
+            if (traindata.attribute(j) != traindataClassAttribute &&
+                traindata.attribute(j).isNumeric())
+            {
+                currentmedian[j] =
+                    traindata.kthSmallestValue(j, (traindata.numInstances() + 1) >> 1); // (>>2 ->
+                                                                                        // /2)
+            }
+        }
+
+        // preprocess training data
+        for (int i = 0; i < traindata.numInstances(); i++) {
+            Instance instance = traindata.instance(i);
+            for (int j = 0; j < traindata.numAttributes(); j++) {
+                if (traindata.attribute(j) != classAttribute && traindata.attribute(j).isNumeric())
+                {
+                    instance.setValue(j, instance.value(j) + (median[j] - currentmedian[j]));
+                }
+            }
+        }
+    }
 
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/NominalAttributeFilter.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/NominalAttributeFilter.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/NominalAttributeFilter.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.dataprocessing;
 
@@ -10,79 +24,82 @@
 
 /**
- * Filters the given dataset for an nominal attribute.
- * Every instance that has a value of the defined values of the given nominal attribute is removed.
+ * Filters the given dataset for an nominal attribute. Every instance that has a value of the
+ * defined values of the given nominal attribute is removed.
  * 
- * 	 
- * (e.g. param="CONFIDECNE low middle"; all instances where the "CONFIDENCE" attribute
- * value is "low" or "middle" are removed from the dataset)
+ * 
+ * (e.g. param="CONFIDECNE low middle"; all instances where the "CONFIDENCE" attribute value is
+ * "low" or "middle" are removed from the dataset)
  */
 
-public class NominalAttributeFilter implements IProcessesingStrategy{
+public class NominalAttributeFilter implements IProcessesingStrategy {
 
-	private String nominalAttributeName = "";
-	private String[] nominalAttributeValues = new String[]{};
-	
-	/**
-	 * Sets the nominal attribute name (first parameter) and the nominal attribute values (other 
-	 * parameters), which should be removed from the dataset.
-	 * 
-	 * @param parameters string with the blank-separated parameters (first parameter 
-	 * is the name of the nominal attribute, everything else are the values)
-	 */
-	@Override
-	public void setParameter(String parameters) {
-		if( parameters!=null ) {
-			String[] parameter  = parameters.split(" ");
-			nominalAttributeName = parameter[0];
-			nominalAttributeValues = Arrays.copyOfRange(parameter, 1, parameter.length);
-		}
-	}
-	
-	/* (non-Javadoc)
-	 * @see de.ugoe.cs.cpdp.dataprocessing.IProcessesingStrategy#apply(weka.core.Instances, weka.core.Instances)
-	 */
-	@Override
-	public void apply(Instances testdata, Instances traindata) {
-		int indexOfConfidenceAttribute = -1;
-		
-		// Find index of the named confidence attribute to filter for
-		for(int i=0; i<traindata.numAttributes(); i++) {
-			if(traindata.attribute(i).name().equals(nominalAttributeName)) {
-				indexOfConfidenceAttribute = i;
-			}
-		}
-		
-		// if it was not found return
-		if(indexOfConfidenceAttribute == -1) {
-			return;
-		}
-		
-		// Find index of nominal values
-		Attribute confidenceAttribute = traindata.attribute(indexOfConfidenceAttribute);
-		ArrayList<Object> nominalValuesOfConfidenceAttribute = Collections.list(confidenceAttribute.enumerateValues());
-		ArrayList<Double> indexOfnominalAttributeValues = new ArrayList<Double>();
-		
-		
-		for(int k=0; k<nominalValuesOfConfidenceAttribute.size(); k++) {
-			for(String attributeValue : nominalAttributeValues) {
-				if(((String)nominalValuesOfConfidenceAttribute.get(k)).equals(attributeValue)) {
-					indexOfnominalAttributeValues.add((double) k);
-				}
-			}
-		}
+    private String nominalAttributeName = "";
+    private String[] nominalAttributeValues = new String[] { };
 
-		
-		
-		
-		// Go through all instances and check if nominal attribute equals 
-		for(int j=traindata.numInstances()-1; j>=0; j--) {
-			Instance wekaInstance = traindata.get(j);
-			
-			// delete all instances where nominal attribute has the value of one of the parameter
-			if(indexOfnominalAttributeValues.contains(wekaInstance.value(indexOfConfidenceAttribute))) {
-				traindata.delete(j);
-			}
-		}
-	}
+    /**
+     * Sets the nominal attribute name (first parameter) and the nominal attribute values (other
+     * parameters), which should be removed from the dataset.
+     * 
+     * @param parameters
+     *            string with the blank-separated parameters (first parameter is the name of the
+     *            nominal attribute, everything else are the values)
+     */
+    @Override
+    public void setParameter(String parameters) {
+        if (parameters != null) {
+            String[] parameter = parameters.split(" ");
+            nominalAttributeName = parameter[0];
+            nominalAttributeValues = Arrays.copyOfRange(parameter, 1, parameter.length);
+        }
+    }
+
+    /*
+     * (non-Javadoc)
+     * 
+     * @see de.ugoe.cs.cpdp.dataprocessing.IProcessesingStrategy#apply(weka.core.Instances,
+     * weka.core.Instances)
+     */
+    @Override
+    public void apply(Instances testdata, Instances traindata) {
+        int indexOfConfidenceAttribute = -1;
+
+        // Find index of the named confidence attribute to filter for
+        for (int i = 0; i < traindata.numAttributes(); i++) {
+            if (traindata.attribute(i).name().equals(nominalAttributeName)) {
+                indexOfConfidenceAttribute = i;
+            }
+        }
+
+        // if it was not found return
+        if (indexOfConfidenceAttribute == -1) {
+            return;
+        }
+
+        // Find index of nominal values
+        Attribute confidenceAttribute = traindata.attribute(indexOfConfidenceAttribute);
+        ArrayList<Object> nominalValuesOfConfidenceAttribute =
+            Collections.list(confidenceAttribute.enumerateValues());
+        ArrayList<Double> indexOfnominalAttributeValues = new ArrayList<Double>();
+
+        for (int k = 0; k < nominalValuesOfConfidenceAttribute.size(); k++) {
+            for (String attributeValue : nominalAttributeValues) {
+                if (((String) nominalValuesOfConfidenceAttribute.get(k)).equals(attributeValue)) {
+                    indexOfnominalAttributeValues.add((double) k);
+                }
+            }
+        }
+
+        // Go through all instances and check if nominal attribute equals
+        for (int j = traindata.numInstances() - 1; j >= 0; j--) {
+            Instance wekaInstance = traindata.get(j);
+
+            // delete all instances where nominal attribute has the value of one of the parameter
+            if (indexOfnominalAttributeValues.contains(wekaInstance
+                .value(indexOfConfidenceAttribute)))
+            {
+                traindata.delete(j);
+            }
+        }
+    }
 
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/Normalization.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/Normalization.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/Normalization.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.dataprocessing;
 
@@ -9,95 +23,100 @@
 
 /**
- * Normalizes each attribute of each data set separately. 
+ * Normalizes each attribute of each data set separately.
+ * 
  * @author Steffen Herbold
  */
 public class Normalization implements ISetWiseProcessingStrategy, IProcessesingStrategy {
 
-	/**
-	 * @see de.ugoe.cs.cpdp.dataprocessing.SetWiseProcessingStrategy#apply(weka.core.Instances, org.apache.commons.collections4.list.SetUniqueList)
-	 */
-	@Override
-	public void apply(Instances testdata, SetUniqueList<Instances> traindataSet) {
-		final Attribute classAtt = testdata.classAttribute();
-		
-		for( int i=0 ; i<testdata.numAttributes(); i++) {
-			if( !testdata.attribute(i).equals(classAtt) ) {
-				Stats teststats = testdata.attributeStats(i).numericStats;
-				
-				double minVal = teststats.min;
-				double maxVal = teststats.max;
-				
-				for( Instances traindata : traindataSet ) {
-					Stats trainstats = traindata.attributeStats(i).numericStats;
-					if( minVal>trainstats.min ) {
-						minVal = trainstats.min;
-					}
-					if( maxVal<trainstats.max ) {
-						maxVal = trainstats.max;
-					}
-				}
-	
-				for( int j=0 ; j<testdata.numInstances() ; j++ ) {
-					Instance inst = testdata.instance(j);
-					double newValue = (inst.value(i)-minVal)/(maxVal-minVal);
-					inst.setValue(i, newValue);
-				}
-				
-				for( Instances traindata : traindataSet ) {
-					for( int j=0 ; j<traindata.numInstances() ; j++ ) {
-						Instance inst = traindata.instance(j);
-						double newValue = (inst.value(i)-minVal)/(maxVal-minVal);
-						inst.setValue(i, newValue);
-					}
-				}
-			}
-		}
-		
-	}
-	
-	/**
-	 * @see de.ugoe.cs.cpdp.dataprocessing.ProcessesingStrategy#apply(weka.core.Instances, weka.core.Instances)
-	 */
-	@Override
-	public void apply(Instances testdata, Instances traindata) {
-		final Attribute classAtt = testdata.classAttribute();
-		
-		for( int i=0 ; i<testdata.numAttributes(); i++) {
-			if( !testdata.attribute(i).equals(classAtt) ) {
-				Stats teststats = testdata.attributeStats(i).numericStats;
-				
-				double minVal = teststats.min;
-				double maxVal = teststats.max;
-				
-				Stats trainstats = traindata.attributeStats(i).numericStats;
-				if( minVal>trainstats.min ) {
-					minVal = trainstats.min;
-				}
-				if( maxVal<trainstats.max ) {
-					maxVal = trainstats.max;
-				}
-	
-				for( int j=0 ; j<testdata.numInstances() ; j++ ) {
-					Instance inst = testdata.instance(j);
-					double newValue = (inst.value(i)-minVal)/(maxVal-minVal);
-					inst.setValue(i, newValue);
-				}
-				
-				for( int j=0 ; j<traindata.numInstances() ; j++ ) {
-					Instance inst = traindata.instance(j);
-					double newValue = (inst.value(i)-minVal)/(maxVal-minVal);
-					inst.setValue(i, newValue);
-				}
-			}
-		}
-	}
+    /**
+     * @see de.ugoe.cs.cpdp.dataprocessing.SetWiseProcessingStrategy#apply(weka.core.Instances,
+     *      org.apache.commons.collections4.list.SetUniqueList)
+     */
+    @Override
+    public void apply(Instances testdata, SetUniqueList<Instances> traindataSet) {
+        final Attribute classAtt = testdata.classAttribute();
 
-	/**
-	 * Does not have parameters. String is ignored.
-	 * @param parameters ignored
-	 */
-	@Override
-	public void setParameter(String parameters) {
-		// no parameters
-	}
+        for (int i = 0; i < testdata.numAttributes(); i++) {
+            if (!testdata.attribute(i).equals(classAtt)) {
+                Stats teststats = testdata.attributeStats(i).numericStats;
+
+                double minVal = teststats.min;
+                double maxVal = teststats.max;
+
+                for (Instances traindata : traindataSet) {
+                    Stats trainstats = traindata.attributeStats(i).numericStats;
+                    if (minVal > trainstats.min) {
+                        minVal = trainstats.min;
+                    }
+                    if (maxVal < trainstats.max) {
+                        maxVal = trainstats.max;
+                    }
+                }
+
+                for (int j = 0; j < testdata.numInstances(); j++) {
+                    Instance inst = testdata.instance(j);
+                    double newValue = (inst.value(i) - minVal) / (maxVal - minVal);
+                    inst.setValue(i, newValue);
+                }
+
+                for (Instances traindata : traindataSet) {
+                    for (int j = 0; j < traindata.numInstances(); j++) {
+                        Instance inst = traindata.instance(j);
+                        double newValue = (inst.value(i) - minVal) / (maxVal - minVal);
+                        inst.setValue(i, newValue);
+                    }
+                }
+            }
+        }
+
+    }
+
+    /**
+     * @see de.ugoe.cs.cpdp.dataprocessing.ProcessesingStrategy#apply(weka.core.Instances,
+     *      weka.core.Instances)
+     */
+    @Override
+    public void apply(Instances testdata, Instances traindata) {
+        final Attribute classAtt = testdata.classAttribute();
+
+        for (int i = 0; i < testdata.numAttributes(); i++) {
+            if (!testdata.attribute(i).equals(classAtt)) {
+                Stats teststats = testdata.attributeStats(i).numericStats;
+
+                double minVal = teststats.min;
+                double maxVal = teststats.max;
+
+                Stats trainstats = traindata.attributeStats(i).numericStats;
+                if (minVal > trainstats.min) {
+                    minVal = trainstats.min;
+                }
+                if (maxVal < trainstats.max) {
+                    maxVal = trainstats.max;
+                }
+
+                for (int j = 0; j < testdata.numInstances(); j++) {
+                    Instance inst = testdata.instance(j);
+                    double newValue = (inst.value(i) - minVal) / (maxVal - minVal);
+                    inst.setValue(i, newValue);
+                }
+
+                for (int j = 0; j < traindata.numInstances(); j++) {
+                    Instance inst = traindata.instance(j);
+                    double newValue = (inst.value(i) - minVal) / (maxVal - minVal);
+                    inst.setValue(i, newValue);
+                }
+            }
+        }
+    }
+
+    /**
+     * Does not have parameters. String is ignored.
+     * 
+     * @param parameters
+     *            ignored
+     */
+    @Override
+    public void setParameter(String parameters) {
+        // no parameters
+    }
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/Oversampling.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/Oversampling.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/Oversampling.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.dataprocessing;
 
@@ -8,86 +22,82 @@
 
 /**
- * Implements oversampling, a strategy for
- * handling bias in data. In case there are less positive samples (i.e.
- * defect-prone) samples in the data than negative samples (i.e.
- * non-defect-prone), the defect-prone entities are over-sampled such that the
- * number of defect-prone and non-defect-prone instances is the same afterwards.
- * This means, that some of the defect-prone entities will be more than once
- * within the data.
+ * Implements oversampling, a strategy for handling bias in data. In case there are less positive
+ * samples (i.e. defect-prone) samples in the data than negative samples (i.e. non-defect-prone),
+ * the defect-prone entities are over-sampled such that the number of defect-prone and
+ * non-defect-prone instances is the same afterwards. This means, that some of the defect-prone
+ * entities will be more than once within the data.
  * 
  * @author Steffen Herbold
  */
-public class Oversampling implements IProcessesingStrategy,
-		ISetWiseProcessingStrategy {
+public class Oversampling implements IProcessesingStrategy, ISetWiseProcessingStrategy {
 
-	/**
-	 * Does not have parameters. String is ignored.
-	 * 
-	 * @param parameters
-	 *            ignored
-	 */
-	@Override
-	public void setParameter(String parameters) {
-		// dummy
-	}
+    /**
+     * Does not have parameters. String is ignored.
+     * 
+     * @param parameters
+     *            ignored
+     */
+    @Override
+    public void setParameter(String parameters) {
+        // dummy
+    }
 
-	/*
-	 * (non-Javadoc)
-	 * 
-	 * @see
-	 * de.ugoe.cs.cpdp.dataprocessing.ISetWiseProcessingStrategy#apply(weka.
-	 * core.Instances, org.apache.commons.collections4.list.SetUniqueList)
-	 */
-	@Override
-	public void apply(Instances testdata, SetUniqueList<Instances> traindataSet) {
-		for (Instances traindata : traindataSet) {
-			apply(testdata, traindata);
-		}
-	}
+    /*
+     * (non-Javadoc)
+     * 
+     * @see de.ugoe.cs.cpdp.dataprocessing.ISetWiseProcessingStrategy#apply(weka. core.Instances,
+     * org.apache.commons.collections4.list.SetUniqueList)
+     */
+    @Override
+    public void apply(Instances testdata, SetUniqueList<Instances> traindataSet) {
+        for (Instances traindata : traindataSet) {
+            apply(testdata, traindata);
+        }
+    }
 
-	/*
-	 * (non-Javadoc)
-	 * 
-	 * @see
-	 * de.ugoe.cs.cpdp.dataprocessing.IProcessesingStrategy#apply(weka.core.
-	 * Instances, weka.core.Instances)
-	 */
-	@Override
-	public void apply(Instances testdata, Instances traindata) {
+    /*
+     * (non-Javadoc)
+     * 
+     * @see de.ugoe.cs.cpdp.dataprocessing.IProcessesingStrategy#apply(weka.core. Instances,
+     * weka.core.Instances)
+     */
+    @Override
+    public void apply(Instances testdata, Instances traindata) {
 
-		final int[] counts = traindata.attributeStats(traindata.classIndex()).nominalCounts;
-		if (counts[1] < counts[0]) {
-			Instances negatives = new Instances(traindata);
-			Instances positives = new Instances(traindata);
+        final int[] counts = traindata.attributeStats(traindata.classIndex()).nominalCounts;
+        if (counts[1] < counts[0]) {
+            Instances negatives = new Instances(traindata);
+            Instances positives = new Instances(traindata);
 
-			for (int i = traindata.size() - 1; i >= 0; i--) {
-				if (Double.compare(1.0, negatives.get(i).classValue()) == 0) {
-					negatives.remove(i);
-				}
-				if (Double.compare(0.0, positives.get(i).classValue()) == 0) {
-					positives.remove(i);
-				}
-			}
+            for (int i = traindata.size() - 1; i >= 0; i--) {
+                if (Double.compare(1.0, negatives.get(i).classValue()) == 0) {
+                    negatives.remove(i);
+                }
+                if (Double.compare(0.0, positives.get(i).classValue()) == 0) {
+                    positives.remove(i);
+                }
+            }
 
-			Resample resample = new Resample();
-			// TODO: resample.setSampleSizePercent((100.0*counts[1])/100+0.01);
-			// Ohne +0.01 wird bei tomcat, xerces-1.2 und jedit-4.0 ein negative
-			// weniger zurückgegeben
-			resample.setSampleSizePercent((100.0 * counts[0]) / counts[1]);
-			try {
-				resample.setInputFormat(traindata);
-				positives = Filter.useFilter(positives, resample);
-			} catch (Exception e) {
-				throw new RuntimeException(e);
-			}
-			traindata.clear();
-			for (int i = 0; i < negatives.size(); i++) {
-				traindata.add(negatives.get(i));
-			}
-			for (int i = 0; i < positives.size(); i++) {
-				traindata.add(positives.get(i));
-			}
-		}
-	}
+            Resample resample = new Resample();
+            // TODO: resample.setSampleSizePercent((100.0*counts[1])/100+0.01);
+            // Ohne +0.01 wird bei tomcat, xerces-1.2 und jedit-4.0 ein negative
+            // weniger zurÃ¼ckgegeben
+            resample.setSampleSizePercent((100.0 * counts[0]) / counts[1]);
+            try {
+                resample.setInputFormat(traindata);
+                positives = Filter.useFilter(positives, resample);
+            }
+            catch (Exception e) {
+                throw new RuntimeException(e);
+            }
+            traindata.clear();
+            for (int i = 0; i < negatives.size(); i++) {
+                traindata.add(negatives.get(i));
+            }
+            for (int i = 0; i < positives.size(); i++) {
+                traindata.add(positives.get(i));
+            }
+        }
+    }
 
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/Resampling.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/Resampling.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/Resampling.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.dataprocessing;
 
@@ -8,53 +22,59 @@
 
 /**
- * Resamples the data with WEKA {@link Resample} to have a uniform distribution among all classes.   
+ * Resamples the data with WEKA {@link Resample} to have a uniform distribution among all classes.
+ * 
  * @author Steffen Herbold
  */
-public class Resampling implements IProcessesingStrategy,
-		ISetWiseProcessingStrategy {
+public class Resampling implements IProcessesingStrategy, ISetWiseProcessingStrategy {
 
-	
-	/**
-	 * Does not have parameters. String is ignored.
-	 * @param parameters ignored
-	 */
-	@Override
-	public void setParameter(String parameters) {
-		// dummy
-	}
+    /**
+     * Does not have parameters. String is ignored.
+     * 
+     * @param parameters
+     *            ignored
+     */
+    @Override
+    public void setParameter(String parameters) {
+        // dummy
+    }
 
-	/*
-	 * (non-Javadoc)
-	 * @see de.ugoe.cs.cpdp.dataprocessing.ISetWiseProcessingStrategy#apply(weka.core.Instances, org.apache.commons.collections4.list.SetUniqueList)
-	 */
-	@Override
-	public void apply(Instances testdata, SetUniqueList<Instances> traindataSet) {
-		for( Instances traindata : traindataSet ) {
-			apply(testdata, traindata);
-		}
-	}
+    /*
+     * (non-Javadoc)
+     * 
+     * @see de.ugoe.cs.cpdp.dataprocessing.ISetWiseProcessingStrategy#apply(weka.core.Instances,
+     * org.apache.commons.collections4.list.SetUniqueList)
+     */
+    @Override
+    public void apply(Instances testdata, SetUniqueList<Instances> traindataSet) {
+        for (Instances traindata : traindataSet) {
+            apply(testdata, traindata);
+        }
+    }
 
-	/*
-	 * (non-Javadoc)
-	 * @see de.ugoe.cs.cpdp.dataprocessing.IProcessesingStrategy#apply(weka.core.Instances, weka.core.Instances)
-	 */
-	@Override
-	public void apply(Instances testdata, Instances traindata) {
-		Resample resample = new Resample();
-		resample.setSampleSizePercent(100);
-		resample.setBiasToUniformClass(1.0);
-		
-		Instances traindataSample;
-		try {
-			resample.setInputFormat(traindata);
-			traindataSample = Filter.useFilter(traindata, resample);
-		} catch (Exception e) {
-			throw new RuntimeException(e);
-		}
-		traindata.clear();
-		for( int i=0 ; i<traindataSample.size() ; i++ ) {
-			traindata.add(traindataSample.get(i));
-		}
-	}
+    /*
+     * (non-Javadoc)
+     * 
+     * @see de.ugoe.cs.cpdp.dataprocessing.IProcessesingStrategy#apply(weka.core.Instances,
+     * weka.core.Instances)
+     */
+    @Override
+    public void apply(Instances testdata, Instances traindata) {
+        Resample resample = new Resample();
+        resample.setSampleSizePercent(100);
+        resample.setBiasToUniformClass(1.0);
+
+        Instances traindataSample;
+        try {
+            resample.setInputFormat(traindata);
+            traindataSample = Filter.useFilter(traindata, resample);
+        }
+        catch (Exception e) {
+            throw new RuntimeException(e);
+        }
+        traindata.clear();
+        for (int i = 0; i < traindataSample.size(); i++) {
+            traindata.add(traindataSample.get(i));
+        }
+    }
 
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/SimulationFilter.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/SimulationFilter.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/SimulationFilter.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.dataprocessing;
 
@@ -12,110 +26,111 @@
  * Filter for the Repast Simulation of Software Projects.
  * 
- * Filters the training dataset in the following way: If 0 is no bug
- * and 1 means there is a bug in this artifact, then this filter
- * filters the dataset in this way:
+ * Filters the training dataset in the following way: If 0 is no bug and 1 means there is a bug in
+ * this artifact, then this filter filters the dataset in this way:
  * 
- * 10010111000101110101111011101
- * x--x-x-----x-x---x-x----x---x
+ * 10010111000101110101111011101 x--x-x-----x-x---x-x----x---x
  * 
- * The instances, which are marked with x in this graphic are included
- * in the newly created dataset and form the trainingsdataset.
+ * The instances, which are marked with x in this graphic are included in the newly created dataset
+ * and form the trainingsdataset.
  * 
  * @author Fabian Trautsch
- *
+ * 
  */
 
-public class SimulationFilter implements IProcessesingStrategy{
+public class SimulationFilter implements IProcessesingStrategy {
 
-	/**
-	 * Does not have parameters. String is ignored.
-	 * @param parameters ignored
-	 */
-	@Override
-	public void setParameter(String parameters) {
-		// dummy
-		
-	}
+    /**
+     * Does not have parameters. String is ignored.
+     * 
+     * @param parameters
+     *            ignored
+     */
+    @Override
+    public void setParameter(String parameters) {
+        // dummy
 
-	
-	/*
-	 * (non-Javadoc)
-	 * @see de.ugoe.cs.cpdp.dataprocessing.IProcessesingStrategy#apply(weka.core.Instances, weka.core.Instances)
-	 */
-	@Override
-	public void apply(Instances testdata, Instances traindata) {
-		Instances newDataSet = new Instances(traindata);
-		traindata.delete();
-		
-		HashMap<Double, Instance> artifactNames = new HashMap<Double, Instance>();
-		
-		// This is to add all data, where the first occurence of the file has a bug
-		ArrayList<Double> firstOccurenceArtifactNames = new ArrayList<Double>();
-		
-		// Sort dataset (StateID is connected to the date of commit: Lower StateID
-		// means earlier commit than a higher stateID)
-		Attribute wekaAttribute = newDataSet.attribute("Artifact.Target.StateID");
-		newDataSet.sort(wekaAttribute);
-		
-		
-		/*
-		 * Logical summary:
-		 * If there is an instance that dont have a bug, put it into the hashmap (only unique values in there)
-		 * 
-		 * If there is an instance, that hava a bug look up if it is in the hashmap already (this means:
-		 * it does not had a bug before!): If this is true add it to a new dataset and remove it from
-		 * the hashmap, so that new changes from "nonBug" -> "bug" for this file can be found.
-		 * 
-		 * If the instance has a bug and is not in the hashmap (this means: The file has a bug with its
-		 * first occurence or this file only has bugs and not an instance with no bug), then (if it is
-		 * not in the arrayList above) add it to the new dataset. This way it is possible to get
-		 * the first occurence of a file, which has a bug
-		 * 
-		 */
-		for(int i=0; i<newDataSet.numInstances(); i++) {
-			Instance wekaInstance = newDataSet.instance(i);
+    }
 
-			double newBugLabel = wekaInstance.classValue();
-			Attribute wekaArtifactName = newDataSet.attribute("Artifact.Name");
-			Double artifactName = wekaInstance.value(wekaArtifactName);
-			
-			if(newBugLabel == 0.0 && artifactNames.keySet().contains(artifactName)) {
-				artifactNames.put(artifactName, wekaInstance);
-			} else if(newBugLabel == 0.0 && !artifactNames.keySet().contains(artifactName)) {
-				artifactNames.put(artifactName, wekaInstance);
-			} else if(newBugLabel == 1.0 && artifactNames.keySet().contains(artifactName)) {
-				traindata.add(wekaInstance);
-				artifactNames.remove(artifactName);
-			} else if(newBugLabel == 1.0 && !artifactNames.keySet().contains(artifactName)) {
-				if(!firstOccurenceArtifactNames.contains(artifactName)) {
-					traindata.add(wekaInstance);
-					firstOccurenceArtifactNames.add(artifactName);
-				}
-			}
-		}
-		
-		
-		// If we have a file, that never had a bug (this is, when it is NOT in the
-		// new created dataset, but it is in the HashMap from above) add it to
-		// the new dataset
-		
-		double[] artifactNamesinNewDataSet = traindata.attributeToDoubleArray(0);
-		HashMap<Double, Instance> artifactNamesCopy = new HashMap<Double, Instance>(artifactNames);
-		
-		
-		for(Double artifactName : artifactNames.keySet()) {
-	
-			for(int i=0; i<artifactNamesinNewDataSet.length; i++) {
-				if(artifactNamesinNewDataSet[i] == artifactName) {
-					artifactNamesCopy.remove(artifactName);
-				}
-			}
-		}
-		
-		for(Double artifact: artifactNamesCopy.keySet()) {
-			traindata.add(artifactNamesCopy.get(artifact));
-		}
-		
-	}
+    /*
+     * (non-Javadoc)
+     * 
+     * @see de.ugoe.cs.cpdp.dataprocessing.IProcessesingStrategy#apply(weka.core.Instances,
+     * weka.core.Instances)
+     */
+    @Override
+    public void apply(Instances testdata, Instances traindata) {
+        Instances newDataSet = new Instances(traindata);
+        traindata.delete();
+
+        HashMap<Double, Instance> artifactNames = new HashMap<Double, Instance>();
+
+        // This is to add all data, where the first occurence of the file has a bug
+        ArrayList<Double> firstOccurenceArtifactNames = new ArrayList<Double>();
+
+        // Sort dataset (StateID is connected to the date of commit: Lower StateID
+        // means earlier commit than a higher stateID)
+        Attribute wekaAttribute = newDataSet.attribute("Artifact.Target.StateID");
+        newDataSet.sort(wekaAttribute);
+
+        /*
+         * Logical summary: If there is an instance that dont have a bug, put it into the hashmap
+         * (only unique values in there)
+         * 
+         * If there is an instance, that hava a bug look up if it is in the hashmap already (this
+         * means: it does not had a bug before!): If this is true add it to a new dataset and remove
+         * it from the hashmap, so that new changes from "nonBug" -> "bug" for this file can be
+         * found.
+         * 
+         * If the instance has a bug and is not in the hashmap (this means: The file has a bug with
+         * its first occurence or this file only has bugs and not an instance with no bug), then (if
+         * it is not in the arrayList above) add it to the new dataset. This way it is possible to
+         * get the first occurence of a file, which has a bug
+         */
+        for (int i = 0; i < newDataSet.numInstances(); i++) {
+            Instance wekaInstance = newDataSet.instance(i);
+
+            double newBugLabel = wekaInstance.classValue();
+            Attribute wekaArtifactName = newDataSet.attribute("Artifact.Name");
+            Double artifactName = wekaInstance.value(wekaArtifactName);
+
+            if (newBugLabel == 0.0 && artifactNames.keySet().contains(artifactName)) {
+                artifactNames.put(artifactName, wekaInstance);
+            }
+            else if (newBugLabel == 0.0 && !artifactNames.keySet().contains(artifactName)) {
+                artifactNames.put(artifactName, wekaInstance);
+            }
+            else if (newBugLabel == 1.0 && artifactNames.keySet().contains(artifactName)) {
+                traindata.add(wekaInstance);
+                artifactNames.remove(artifactName);
+            }
+            else if (newBugLabel == 1.0 && !artifactNames.keySet().contains(artifactName)) {
+                if (!firstOccurenceArtifactNames.contains(artifactName)) {
+                    traindata.add(wekaInstance);
+                    firstOccurenceArtifactNames.add(artifactName);
+                }
+            }
+        }
+
+        // If we have a file, that never had a bug (this is, when it is NOT in the
+        // new created dataset, but it is in the HashMap from above) add it to
+        // the new dataset
+
+        double[] artifactNamesinNewDataSet = traindata.attributeToDoubleArray(0);
+        HashMap<Double, Instance> artifactNamesCopy = new HashMap<Double, Instance>(artifactNames);
+
+        for (Double artifactName : artifactNames.keySet()) {
+
+            for (int i = 0; i < artifactNamesinNewDataSet.length; i++) {
+                if (artifactNamesinNewDataSet[i] == artifactName) {
+                    artifactNamesCopy.remove(artifactName);
+                }
+            }
+        }
+
+        for (Double artifact : artifactNamesCopy.keySet()) {
+            traindata.add(artifactNamesCopy.get(artifact));
+        }
+
+    }
 
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/Undersampling.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/Undersampling.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/Undersampling.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.dataprocessing;
 
@@ -8,73 +22,82 @@
 
 /**
- * Implements undersampling, a strategy for handling bias in data. In case there are less positive samples (i.e. defect-prone) samples in the
- * data than negative samples (i.e. non-defect-prone), the non-defect-prone entities are sampled such thatthe number of defect-prone and non-defect-prone instances is the same afterwards.  
+ * Implements undersampling, a strategy for handling bias in data. In case there are less positive
+ * samples (i.e. defect-prone) samples in the data than negative samples (i.e. non-defect-prone),
+ * the non-defect-prone entities are sampled such thatthe number of defect-prone and
+ * non-defect-prone instances is the same afterwards.
+ * 
  * @author Steffen Herbold
  */
-public class Undersampling implements IProcessesingStrategy,
-		ISetWiseProcessingStrategy {
+public class Undersampling implements IProcessesingStrategy, ISetWiseProcessingStrategy {
 
-	
-	/**
-	 * Does not have parameters. String is ignored.
-	 * @param parameters ignored
-	 */
-	@Override
-	public void setParameter(String parameters) {
-		// dummy
-	}
+    /**
+     * Does not have parameters. String is ignored.
+     * 
+     * @param parameters
+     *            ignored
+     */
+    @Override
+    public void setParameter(String parameters) {
+        // dummy
+    }
 
-	/*
-	 * (non-Javadoc)
-	 * @see de.ugoe.cs.cpdp.dataprocessing.ISetWiseProcessingStrategy#apply(weka.core.Instances, org.apache.commons.collections4.list.SetUniqueList)
-	 */
-	@Override
-	public void apply(Instances testdata, SetUniqueList<Instances> traindataSet) {
-		for( Instances traindata : traindataSet ) {
-			apply(testdata, traindata);
-		}
-	}
+    /*
+     * (non-Javadoc)
+     * 
+     * @see de.ugoe.cs.cpdp.dataprocessing.ISetWiseProcessingStrategy#apply(weka.core.Instances,
+     * org.apache.commons.collections4.list.SetUniqueList)
+     */
+    @Override
+    public void apply(Instances testdata, SetUniqueList<Instances> traindataSet) {
+        for (Instances traindata : traindataSet) {
+            apply(testdata, traindata);
+        }
+    }
 
-	/*
-	 * (non-Javadoc)
-	 * @see de.ugoe.cs.cpdp.dataprocessing.IProcessesingStrategy#apply(weka.core.Instances, weka.core.Instances)
-	 */
-	@Override
-	public void apply(Instances testdata, Instances traindata) {
-		
-		final int[] counts = traindata.attributeStats(traindata.classIndex()).nominalCounts;
-		
-		if( counts[1]<counts[0] ) {
-			Instances negatives = new Instances(traindata);
-			Instances positives = new Instances(traindata);
-			
-			for( int i=traindata.size()-1 ; i>=0 ; i-- ) {
-				if( Double.compare(1.0, negatives.get(i).classValue())==0 ) {
-					negatives.remove(i);
-				}
-				if( Double.compare(0.0, positives.get(i).classValue())==0 ) {
-					positives.remove(i);
-				}
-			}
-			
-			Resample resample = new Resample();
-			// TODO: resample.setSampleSizePercent((100.0*counts[1])/100+0.01);
-			// Ohne +0.01 wird bei tomcat, xerces-1.2 und jedit-4.0 ein negative weniger zurückgegeben
-			resample.setSampleSizePercent((100.0* counts[1])/counts[0]);
-			try {
-				resample.setInputFormat(traindata);
-				negatives = Filter.useFilter(negatives, resample);
-			} catch (Exception e) {
-				throw new RuntimeException(e);
-			}
-			traindata.clear();
-			for( int i=0 ; i<negatives.size() ; i++ ) {
-				traindata.add(negatives.get(i));
-			}
-			for( int i=0 ; i<positives.size() ; i++ ) {
-				traindata.add(positives.get(i));
-			}
-		}
-	}
+    /*
+     * (non-Javadoc)
+     * 
+     * @see de.ugoe.cs.cpdp.dataprocessing.IProcessesingStrategy#apply(weka.core.Instances,
+     * weka.core.Instances)
+     */
+    @Override
+    public void apply(Instances testdata, Instances traindata) {
+
+        final int[] counts = traindata.attributeStats(traindata.classIndex()).nominalCounts;
+
+        if (counts[1] < counts[0]) {
+            Instances negatives = new Instances(traindata);
+            Instances positives = new Instances(traindata);
+
+            for (int i = traindata.size() - 1; i >= 0; i--) {
+                if (Double.compare(1.0, negatives.get(i).classValue()) == 0) {
+                    negatives.remove(i);
+                }
+                if (Double.compare(0.0, positives.get(i).classValue()) == 0) {
+                    positives.remove(i);
+                }
+            }
+
+            Resample resample = new Resample();
+            // TODO: resample.setSampleSizePercent((100.0*counts[1])/100+0.01);
+            // Ohne +0.01 wird bei tomcat, xerces-1.2 und jedit-4.0 ein negative weniger
+            // zurÃ¼ckgegeben
+            resample.setSampleSizePercent((100.0 * counts[1]) / counts[0]);
+            try {
+                resample.setInputFormat(traindata);
+                negatives = Filter.useFilter(negatives, resample);
+            }
+            catch (Exception e) {
+                throw new RuntimeException(e);
+            }
+            traindata.clear();
+            for (int i = 0; i < negatives.size(); i++) {
+                traindata.add(negatives.get(i));
+            }
+            for (int i = 0; i < positives.size(); i++) {
+                traindata.add(positives.get(i));
+            }
+        }
+    }
 
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/ZScoreNormalization.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/ZScoreNormalization.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/ZScoreNormalization.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.dataprocessing;
 
@@ -14,60 +28,64 @@
 public class ZScoreNormalization implements ISetWiseProcessingStrategy, IProcessesingStrategy {
 
-	/**
-	 * Does not have parameters. String is ignored.
-	 * @param parameters ignored
-	 */
-	@Override
-	public void setParameter(String parameters) {
-		// dummy
-	}
+    /**
+     * Does not have parameters. String is ignored.
+     * 
+     * @param parameters
+     *            ignored
+     */
+    @Override
+    public void setParameter(String parameters) {
+        // dummy
+    }
 
-	/**
-	 * @see de.ugoe.cs.cpdp.dataprocessing.SetWiseProcessingStrategy#apply(weka.core.Instances, org.apache.commons.collections4.list.SetUniqueList)
-	 */
-	@Override
-	public void apply(Instances testdata, SetUniqueList<Instances> traindataSet) {
-		normalize(testdata);
-		for( Instances instances : traindataSet ) {
-			normalize(instances);
-		}
-	}
+    /**
+     * @see de.ugoe.cs.cpdp.dataprocessing.SetWiseProcessingStrategy#apply(weka.core.Instances,
+     *      org.apache.commons.collections4.list.SetUniqueList)
+     */
+    @Override
+    public void apply(Instances testdata, SetUniqueList<Instances> traindataSet) {
+        normalize(testdata);
+        for (Instances instances : traindataSet) {
+            normalize(instances);
+        }
+    }
 
-	/**
-	 * @see de.ugoe.cs.cpdp.dataprocessing.ProcessesingStrategy#apply(weka.core.Instances, weka.core.Instances)
-	 */
-	@Override
-	public void apply(Instances testdata, Instances traindata) {
-		normalize(testdata);
-		normalize(traindata);
-	}
-	
-	private void normalize(Instances instances) {
-		instances.toString();
-		final Attribute classAttribute = instances.classAttribute();
-		
-		final double[] means = new double[instances.numAttributes()];
-		final double[] stddevs = new double[instances.numAttributes()];
-		
-		// get means and stddevs of data
-		for( int j=0 ; j<instances.numAttributes() ; j++ ) {
-			if( instances.attribute(j)!=classAttribute ) {
-				means[j] = instances.meanOrMode(j);
-				stddevs[j] = Math.sqrt(instances.variance(j));
-			}
-		}
-		for( int i=0 ; i<instances.numAttributes(); i++) {
-			if( !instances.attribute(i).equals(classAttribute) ) {
-				for( int j=0 ; j<instances.numInstances() ; j++ ) {
-					Instance inst = instances.get(i);
-					double newValue = (inst.value(i)-means[i])/stddevs[i];
-					if( newValue==Double.NaN ) {
-						System.out.println("foooooo");
-					}
-					inst.setValue(i, newValue);
-				}
-			}
-		}
-	}
+    /**
+     * @see de.ugoe.cs.cpdp.dataprocessing.ProcessesingStrategy#apply(weka.core.Instances,
+     *      weka.core.Instances)
+     */
+    @Override
+    public void apply(Instances testdata, Instances traindata) {
+        normalize(testdata);
+        normalize(traindata);
+    }
+
+    private void normalize(Instances instances) {
+        instances.toString();
+        final Attribute classAttribute = instances.classAttribute();
+
+        final double[] means = new double[instances.numAttributes()];
+        final double[] stddevs = new double[instances.numAttributes()];
+
+        // get means and stddevs of data
+        for (int j = 0; j < instances.numAttributes(); j++) {
+            if (instances.attribute(j) != classAttribute) {
+                means[j] = instances.meanOrMode(j);
+                stddevs[j] = Math.sqrt(instances.variance(j));
+            }
+        }
+        for (int i = 0; i < instances.numAttributes(); i++) {
+            if (!instances.attribute(i).equals(classAttribute)) {
+                for (int j = 0; j < instances.numInstances(); j++) {
+                    Instance inst = instances.get(i);
+                    double newValue = (inst.value(i) - means[i]) / stddevs[i];
+                    if (newValue == Double.NaN) {
+                        System.out.println("foooooo");
+                    }
+                    inst.setValue(i, newValue);
+                }
+            }
+        }
+    }
 
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/ZScoreTargetNormalization.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/ZScoreTargetNormalization.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/ZScoreTargetNormalization.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.dataprocessing;
 
@@ -12,93 +26,98 @@
  * @author Steffen Herbold
  */
-public class ZScoreTargetNormalization implements ISetWiseProcessingStrategy, IProcessesingStrategy {
+public class ZScoreTargetNormalization implements ISetWiseProcessingStrategy, IProcessesingStrategy
+{
 
-	/**
-	 * Does not have parameters. String is ignored.
-	 * @param parameters ignored
-	 */
-	@Override
-	public void setParameter(String parameters) {
-		// dummy
-	}
+    /**
+     * Does not have parameters. String is ignored.
+     * 
+     * @param parameters
+     *            ignored
+     */
+    @Override
+    public void setParameter(String parameters) {
+        // dummy
+    }
 
-	/**
-	 * @see de.ugoe.cs.cpdp.dataprocessing.SetWiseProcessingStrategy#apply(weka.core.Instances, org.apache.commons.collections4.list.SetUniqueList)
-	 */
-	@Override
-	public void apply(Instances testdata, SetUniqueList<Instances> traindataSet) {
-		final Attribute classAttribute = testdata.classAttribute();
-		
-		final double[] meanTest = new double[testdata.numAttributes()];
-		final double[] stddevTest = new double[testdata.numAttributes()];
-		
-		// get means of testdata
-		for( int j=0 ; j<testdata.numAttributes() ; j++ ) {
-			if( testdata.attribute(j)!=classAttribute ) {
-				meanTest[j] = testdata.meanOrMode(j);
-				stddevTest[j] = Math.sqrt(testdata.variance(j));
-			}
-		}
-		
-		// preprocess test data
-		for( int i=0 ; i<testdata.numInstances() ; i++ ) {
-			Instance instance = testdata.instance(i);
-			for( int j=0 ; j<testdata.numAttributes() ; j++ ) {
-				if( testdata.attribute(j)!=classAttribute ) {
-					instance.setValue(j, instance.value(j)-meanTest[j]/stddevTest[j]);
-				}
-			}
-		}
-		
-		// preprocess training data
-		for( Instances traindata : traindataSet ) {			
-			for( int i=0 ; i<traindata.numInstances() ; i++ ) {
-				Instance instance = traindata.instance(i);
-				for( int j=0 ; j<testdata.numAttributes() ; j++ ) {
-					if( testdata.attribute(j)!=classAttribute ) {
-						instance.setValue(j, instance.value(j)-meanTest[j]/stddevTest[j]);
-					}
-				}
-			}
-		}
-	}
+    /**
+     * @see de.ugoe.cs.cpdp.dataprocessing.SetWiseProcessingStrategy#apply(weka.core.Instances,
+     *      org.apache.commons.collections4.list.SetUniqueList)
+     */
+    @Override
+    public void apply(Instances testdata, SetUniqueList<Instances> traindataSet) {
+        final Attribute classAttribute = testdata.classAttribute();
 
-	/**
-	 * @see de.ugoe.cs.cpdp.dataprocessing.ProcessesingStrategy#apply(weka.core.Instances, weka.core.Instances)
-	 */
-	@Override
-	public void apply(Instances testdata, Instances traindata) {
-		final Attribute classAttribute = testdata.classAttribute();
-		
-		final double[] meanTest = new double[testdata.numAttributes()];
-		final double[] stddevTest = new double[testdata.numAttributes()];
-		
-		// get means of testdata
-		for( int j=0 ; j<testdata.numAttributes() ; j++ ) {
-			if( testdata.attribute(j)!=classAttribute ) {
-				meanTest[j] = testdata.meanOrMode(j);
-				stddevTest[j] = Math.sqrt(testdata.variance(j));
-			}
-		}
-		
-		// preprocess test data
-		for( int i=0 ; i<testdata.numInstances() ; i++ ) {
-			Instance instance = testdata.instance(i);
-			for( int j=0 ; j<testdata.numAttributes() ; j++ ) {
-				if( testdata.attribute(j)!=classAttribute ) {
-					instance.setValue(j, instance.value(j)-meanTest[j]/stddevTest[j]);
-				}
-			}
-		}
-		
-		// preprocess training data
-		for( int i=0 ; i<traindata.numInstances() ; i++ ) {
-			Instance instance = traindata.instance(i);
-			for( int j=0 ; j<testdata.numAttributes() ; j++ ) {
-				if( testdata.attribute(j)!=classAttribute ) {
-					instance.setValue(j, instance.value(j)-meanTest[j]/stddevTest[j]);
-				}
-			}
-		}
-	}
+        final double[] meanTest = new double[testdata.numAttributes()];
+        final double[] stddevTest = new double[testdata.numAttributes()];
+
+        // get means of testdata
+        for (int j = 0; j < testdata.numAttributes(); j++) {
+            if (testdata.attribute(j) != classAttribute) {
+                meanTest[j] = testdata.meanOrMode(j);
+                stddevTest[j] = Math.sqrt(testdata.variance(j));
+            }
+        }
+
+        // preprocess test data
+        for (int i = 0; i < testdata.numInstances(); i++) {
+            Instance instance = testdata.instance(i);
+            for (int j = 0; j < testdata.numAttributes(); j++) {
+                if (testdata.attribute(j) != classAttribute) {
+                    instance.setValue(j, instance.value(j) - meanTest[j] / stddevTest[j]);
+                }
+            }
+        }
+
+        // preprocess training data
+        for (Instances traindata : traindataSet) {
+            for (int i = 0; i < traindata.numInstances(); i++) {
+                Instance instance = traindata.instance(i);
+                for (int j = 0; j < testdata.numAttributes(); j++) {
+                    if (testdata.attribute(j) != classAttribute) {
+                        instance.setValue(j, instance.value(j) - meanTest[j] / stddevTest[j]);
+                    }
+                }
+            }
+        }
+    }
+
+    /**
+     * @see de.ugoe.cs.cpdp.dataprocessing.ProcessesingStrategy#apply(weka.core.Instances,
+     *      weka.core.Instances)
+     */
+    @Override
+    public void apply(Instances testdata, Instances traindata) {
+        final Attribute classAttribute = testdata.classAttribute();
+
+        final double[] meanTest = new double[testdata.numAttributes()];
+        final double[] stddevTest = new double[testdata.numAttributes()];
+
+        // get means of testdata
+        for (int j = 0; j < testdata.numAttributes(); j++) {
+            if (testdata.attribute(j) != classAttribute) {
+                meanTest[j] = testdata.meanOrMode(j);
+                stddevTest[j] = Math.sqrt(testdata.variance(j));
+            }
+        }
+
+        // preprocess test data
+        for (int i = 0; i < testdata.numInstances(); i++) {
+            Instance instance = testdata.instance(i);
+            for (int j = 0; j < testdata.numAttributes(); j++) {
+                if (testdata.attribute(j) != classAttribute) {
+                    instance.setValue(j, instance.value(j) - meanTest[j] / stddevTest[j]);
+                }
+            }
+        }
+
+        // preprocess training data
+        for (int i = 0; i < traindata.numInstances(); i++) {
+            Instance instance = traindata.instance(i);
+            for (int j = 0; j < testdata.numAttributes(); j++) {
+                if (testdata.attribute(j) != classAttribute) {
+                    instance.setValue(j, instance.value(j) - meanTest[j] / stddevTest[j]);
+                }
+            }
+        }
+    }
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/dataselection/AbstractCharacteristicSelection.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/dataselection/AbstractCharacteristicSelection.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/dataselection/AbstractCharacteristicSelection.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.dataselection;
 
@@ -14,110 +28,135 @@
 
 /**
- * Abstract class that implements the foundation of setwise data selection strategies using distributional characteristics.
- * This class provides the means to transform the data sets into their characteristic vectors.  
+ * Abstract class that implements the foundation of setwise data selection strategies using
+ * distributional characteristics. This class provides the means to transform the data sets into
+ * their characteristic vectors.
+ * 
  * @author Steffen Herbold
  */
-public abstract class AbstractCharacteristicSelection implements
-		ISetWiseDataselectionStrategy {
+public abstract class AbstractCharacteristicSelection implements ISetWiseDataselectionStrategy {
 
-	/**
-	 * vector with the distributional characteristics 
-	 */
-	private String[] characteristics = new String[]{"mean","stddev"};
-	
-	/**
-	 * Sets the distributional characteristics. The names of the characteristics are separated by blanks. 
-	 */
-	@Override
-	public void setParameter(String parameters) {
-		if( !"".equals(parameters) ) {
-			characteristics = parameters.split(" ");
-		}
-	}
-	
-	/**
-	 * Transforms the data into the distributional characteristics. The first instance is the test data, followed by the training data. 
-	 * @param testdata test data
-	 * @param traindataSet training data sets
-	 * @return distributional characteristics of the data
-	 */
-	protected Instances characteristicInstances(Instances testdata, SetUniqueList<Instances> traindataSet) {
-		// setup weka Instances for clustering
-		final ArrayList<Attribute> atts = new ArrayList<Attribute>();
-		
-		final Attribute classAtt = testdata.classAttribute();
-		for( int i=0 ; i<testdata.numAttributes() ; i++ ) {
-			Attribute dataAtt = testdata.attribute(i);
-			if( !dataAtt.equals(classAtt) ) {
-				for( String characteristic : characteristics ) {
-					atts.add(new Attribute(dataAtt.name() + "_" + characteristic));
-				}
-			}
-		}
-		final Instances data = new Instances("distributional_characteristics", atts, 0);
-		
-		// setup data for clustering
-		double[] instanceValues = new double[atts.size()];
-		for( int i=0 ; i<testdata.numAttributes() ; i++ ) {
-			Attribute dataAtt = testdata.attribute(i);
-			if( !dataAtt.equals(classAtt) ) {
-				Stats stats = testdata.attributeStats(i).numericStats;
-				for( int j=0; j<characteristics.length; j++ ) {
-					if( "mean".equals(characteristics[j]) ) {
-						instanceValues[i*characteristics.length+j] = stats.mean;
-					} else if( "stddev".equals(characteristics[j])) {
-						instanceValues[i*characteristics.length+j] = stats.stdDev;
-					} else if( "var".equals(characteristics[j])) {
-						instanceValues[i*characteristics.length+j] = testdata.variance(j);
-					} else {
-						throw new RuntimeException("Unkown distributional characteristic: " + characteristics[j]);
-					}
-				}
-			}
-		}		
-		data.add(new DenseInstance(1.0, instanceValues));
-		
-		for( Instances traindata : traindataSet ) {
-			instanceValues = new double[atts.size()];
-			for( int i=0 ; i<traindata.numAttributes() ; i++ ) {
-				Attribute dataAtt = traindata.attribute(i);
-				if( !dataAtt.equals(classAtt) ) {
-					Stats stats = traindata.attributeStats(i).numericStats;
-					for( int j=0; j<characteristics.length; j++ ) {
-						if( "mean".equals(characteristics[j]) ) {
-							instanceValues[i*characteristics.length+j] = stats.mean;
-						} else if( "stddev".equals(characteristics[j])) {
-							instanceValues[i*characteristics.length+j] = stats.stdDev;
-						} else if( "var".equals(characteristics[j])) {
-							instanceValues[i*characteristics.length+j] = testdata.variance(j);
-						} else {
-							throw new RuntimeException("Unkown distributional characteristic: " + characteristics[j]);
-						}
-					}
-				}
-			}		
-			Instance instance = new DenseInstance(1.0, instanceValues);
-			
-			data.add(instance);
-		}
-		return data;
-	}
-	
-	/**
-	 * Returns the normalized distributional characteristics of the training data. 
-	 * @param testdata test data
-	 * @param traindataSet training data sets
-	 * @return normalized distributional characteristics of the data
-	 */
-	protected Instances normalizedCharacteristicInstances(Instances testdata, SetUniqueList<Instances> traindataSet) {
-		Instances data = characteristicInstances(testdata, traindataSet);
-		try {
-			final Normalize normalizer = new Normalize();
-			normalizer.setInputFormat(data);
-			data = Filter.useFilter(data, normalizer);
-		} catch (Exception e) {
-			throw new RuntimeException("Unexpected exception during normalization of distributional characteristics.", e);
-		}
-		return data;
-	}
+    /**
+     * vector with the distributional characteristics
+     */
+    private String[] characteristics = new String[]
+        { "mean", "stddev" };
+
+    /**
+     * Sets the distributional characteristics. The names of the characteristics are separated by
+     * blanks.
+     */
+    @Override
+    public void setParameter(String parameters) {
+        if (!"".equals(parameters)) {
+            characteristics = parameters.split(" ");
+        }
+    }
+
+    /**
+     * Transforms the data into the distributional characteristics. The first instance is the test
+     * data, followed by the training data.
+     * 
+     * @param testdata
+     *            test data
+     * @param traindataSet
+     *            training data sets
+     * @return distributional characteristics of the data
+     */
+    protected Instances characteristicInstances(Instances testdata,
+                                                SetUniqueList<Instances> traindataSet)
+    {
+        // setup weka Instances for clustering
+        final ArrayList<Attribute> atts = new ArrayList<Attribute>();
+
+        final Attribute classAtt = testdata.classAttribute();
+        for (int i = 0; i < testdata.numAttributes(); i++) {
+            Attribute dataAtt = testdata.attribute(i);
+            if (!dataAtt.equals(classAtt)) {
+                for (String characteristic : characteristics) {
+                    atts.add(new Attribute(dataAtt.name() + "_" + characteristic));
+                }
+            }
+        }
+        final Instances data = new Instances("distributional_characteristics", atts, 0);
+
+        // setup data for clustering
+        double[] instanceValues = new double[atts.size()];
+        for (int i = 0; i < testdata.numAttributes(); i++) {
+            Attribute dataAtt = testdata.attribute(i);
+            if (!dataAtt.equals(classAtt)) {
+                Stats stats = testdata.attributeStats(i).numericStats;
+                for (int j = 0; j < characteristics.length; j++) {
+                    if ("mean".equals(characteristics[j])) {
+                        instanceValues[i * characteristics.length + j] = stats.mean;
+                    }
+                    else if ("stddev".equals(characteristics[j])) {
+                        instanceValues[i * characteristics.length + j] = stats.stdDev;
+                    }
+                    else if ("var".equals(characteristics[j])) {
+                        instanceValues[i * characteristics.length + j] = testdata.variance(j);
+                    }
+                    else {
+                        throw new RuntimeException("Unkown distributional characteristic: " +
+                            characteristics[j]);
+                    }
+                }
+            }
+        }
+        data.add(new DenseInstance(1.0, instanceValues));
+
+        for (Instances traindata : traindataSet) {
+            instanceValues = new double[atts.size()];
+            for (int i = 0; i < traindata.numAttributes(); i++) {
+                Attribute dataAtt = traindata.attribute(i);
+                if (!dataAtt.equals(classAtt)) {
+                    Stats stats = traindata.attributeStats(i).numericStats;
+                    for (int j = 0; j < characteristics.length; j++) {
+                        if ("mean".equals(characteristics[j])) {
+                            instanceValues[i * characteristics.length + j] = stats.mean;
+                        }
+                        else if ("stddev".equals(characteristics[j])) {
+                            instanceValues[i * characteristics.length + j] = stats.stdDev;
+                        }
+                        else if ("var".equals(characteristics[j])) {
+                            instanceValues[i * characteristics.length + j] = testdata.variance(j);
+                        }
+                        else {
+                            throw new RuntimeException("Unkown distributional characteristic: " +
+                                characteristics[j]);
+                        }
+                    }
+                }
+            }
+            Instance instance = new DenseInstance(1.0, instanceValues);
+
+            data.add(instance);
+        }
+        return data;
+    }
+
+    /**
+     * Returns the normalized distributional characteristics of the training data.
+     * 
+     * @param testdata
+     *            test data
+     * @param traindataSet
+     *            training data sets
+     * @return normalized distributional characteristics of the data
+     */
+    protected Instances normalizedCharacteristicInstances(Instances testdata,
+                                                          SetUniqueList<Instances> traindataSet)
+    {
+        Instances data = characteristicInstances(testdata, traindataSet);
+        try {
+            final Normalize normalizer = new Normalize();
+            normalizer.setInputFormat(data);
+            data = Filter.useFilter(data, normalizer);
+        }
+        catch (Exception e) {
+            throw new RuntimeException(
+                                       "Unexpected exception during normalization of distributional characteristics.",
+                                       e);
+        }
+        return data;
+    }
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/dataselection/IPointWiseDataselectionStrategy.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/dataselection/IPointWiseDataselectionStrategy.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/dataselection/IPointWiseDataselectionStrategy.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.dataselection;
 
@@ -6,15 +20,19 @@
 
 /**
- * Interface for pointwise data selection strategies. 
+ * Interface for pointwise data selection strategies.
+ * 
  * @author Steffen Herbold
  */
 public interface IPointWiseDataselectionStrategy extends IParameterizable {
 
-	/**
-	 * Applies the data selection strategy. 
-	 * @param testdata test data
-	 * @param traindata candidate training data
-	 * @return the selected training data
-	 */
-	Instances apply(Instances testdata, Instances traindata);
+    /**
+     * Applies the data selection strategy.
+     * 
+     * @param testdata
+     *            test data
+     * @param traindata
+     *            candidate training data
+     * @return the selected training data
+     */
+    Instances apply(Instances testdata, Instances traindata);
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/dataselection/ISetWiseDataselectionStrategy.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/dataselection/ISetWiseDataselectionStrategy.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/dataselection/ISetWiseDataselectionStrategy.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.dataselection;
 
@@ -9,13 +23,17 @@
 /**
  * Interface for setwise data selection strategies.
+ * 
  * @author Steffen Herbold
  */
 public interface ISetWiseDataselectionStrategy extends IParameterizable {
 
-	/**
-	 * Applies a setwise data selection strategy. 
-	 * @param testdata test data for which the training data is selected
-	 * @param traindataSet candidate training data
-	 */
-	void apply(Instances testdata, SetUniqueList<Instances> traindataSet);
+    /**
+     * Applies a setwise data selection strategy.
+     * 
+     * @param testdata
+     *            test data for which the training data is selected
+     * @param traindataSet
+     *            candidate training data
+     */
+    void apply(Instances testdata, SetUniqueList<Instances> traindataSet);
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/dataselection/PetersFilter.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/dataselection/PetersFilter.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/dataselection/PetersFilter.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.dataselection;
 
@@ -13,7 +27,8 @@
 
 /**
- * Filter according to F. Peters, T. Menzies, and A. Marcus: Better Cross Company Defect Prediction
- * <br><br>
- * This filter does not work, the paper has been withdrawn. 
+ * Filter according to F. Peters, T. Menzies, and A. Marcus: Better Cross Company Defect Prediction <br>
+ * <br>
+ * This filter does not work, the paper has been withdrawn.
+ * 
  * @author Steffen Herbold
  */
@@ -21,83 +36,86 @@
 public class PetersFilter implements IPointWiseDataselectionStrategy {
 
-	
-	/**
-	 * @see de.ugoe.cs.cpdp.IParameterizable#setParameter(java.lang.String)
-	 */
-	@Override
-	public void setParameter(String parameters) {
-		// dummy
-	}
+    /**
+     * @see de.ugoe.cs.cpdp.IParameterizable#setParameter(java.lang.String)
+     */
+    @Override
+    public void setParameter(String parameters) {
+        // dummy
+    }
 
-	/**
-	 * @see de.ugoe.cs.cpdp.dataselection.IPointWiseDataselectionStrategy#apply(weka.core.Instances, weka.core.Instances)
-	 */
-	@Override
-	public Instances apply(Instances testdata, Instances traindata) {
-		final Attribute classAttribute = testdata.classAttribute();
-			
-		final double[][] testDoubles = new double[testdata.numInstances()][testdata.numAttributes()];
-		for( int i=0; i<testdata.numInstances() ; i++ ) {
-			Instance instance = testdata.instance(i);
-			int tmp = 0;
-			for( int j=0 ; j<testdata.numAttributes(); j++ ) {
-				if( testdata.attribute(j)!=classAttribute ) {
-					testDoubles[i][tmp++] = instance.value(j);
-				}
-			}
-		}
-		
-		final double[][] trainDoubles = new double[traindata.numInstances()][testdata.numAttributes()];
-		for( int i=0; i<traindata.numInstances() ; i++ ) {
-			Instance instance = traindata.instance(i);
-			int tmp = 0;
-			for( int j=0 ; j<testdata.numAttributes(); j++ ) {
-				if( testdata.attribute(j)!=classAttribute ) {
-					trainDoubles[i][tmp++] = instance.value(j);
-				}
-			}
-		}
-		
-		final List<List<Integer>> fanList = new ArrayList<List<Integer>>(testdata.numInstances());
-		for( int i=0; i<testdata.numInstances(); i++ ) {
-			fanList.add(new LinkedList<Integer>());
-		}
-		
-		for( int i=0; i<traindata.numInstances(); i++ ) {
-			double minDistance = Double.MAX_VALUE;
-			int minIndex = 0;
-			for( int j=0; j<testdata.numInstances(); j++ ) {
-				double distance = MathArrays.distance(trainDoubles[i], testDoubles[j]);
-				if( distance<minDistance ) {
-					minDistance = distance;
-					minIndex = j;
-				}
-			}
-			fanList.get(minIndex).add(i);
-		}
-		
-		final SetUniqueList<Integer> selectedIndex = SetUniqueList.setUniqueList(new LinkedList<Integer>());
-		for( int i=0; i<testdata.numInstances(); i++ ) {
-			double minDistance = Double.MAX_VALUE;
-			int minIndex = -1;
-			for( Integer j : fanList.get(i) ) {
-				double distance = MathArrays.distance(testDoubles[i], trainDoubles[j]);
-				if( distance<minDistance && distance>0.0d ) {
-					minDistance = distance;
-					minIndex = j;
-				}
-			}
-			if( minIndex!=-1 ) {
-				selectedIndex.add(minIndex);
-			}
-		}
-		
-		final Instances selected = new Instances(testdata);
-		selected.delete();
-		for( Integer i : selectedIndex) {
-			selected.add(traindata.instance(i));
-		}
-		return selected;
-	}
+    /**
+     * @see de.ugoe.cs.cpdp.dataselection.IPointWiseDataselectionStrategy#apply(weka.core.Instances,
+     *      weka.core.Instances)
+     */
+    @Override
+    public Instances apply(Instances testdata, Instances traindata) {
+        final Attribute classAttribute = testdata.classAttribute();
+
+        final double[][] testDoubles =
+            new double[testdata.numInstances()][testdata.numAttributes()];
+        for (int i = 0; i < testdata.numInstances(); i++) {
+            Instance instance = testdata.instance(i);
+            int tmp = 0;
+            for (int j = 0; j < testdata.numAttributes(); j++) {
+                if (testdata.attribute(j) != classAttribute) {
+                    testDoubles[i][tmp++] = instance.value(j);
+                }
+            }
+        }
+
+        final double[][] trainDoubles =
+            new double[traindata.numInstances()][testdata.numAttributes()];
+        for (int i = 0; i < traindata.numInstances(); i++) {
+            Instance instance = traindata.instance(i);
+            int tmp = 0;
+            for (int j = 0; j < testdata.numAttributes(); j++) {
+                if (testdata.attribute(j) != classAttribute) {
+                    trainDoubles[i][tmp++] = instance.value(j);
+                }
+            }
+        }
+
+        final List<List<Integer>> fanList = new ArrayList<List<Integer>>(testdata.numInstances());
+        for (int i = 0; i < testdata.numInstances(); i++) {
+            fanList.add(new LinkedList<Integer>());
+        }
+
+        for (int i = 0; i < traindata.numInstances(); i++) {
+            double minDistance = Double.MAX_VALUE;
+            int minIndex = 0;
+            for (int j = 0; j < testdata.numInstances(); j++) {
+                double distance = MathArrays.distance(trainDoubles[i], testDoubles[j]);
+                if (distance < minDistance) {
+                    minDistance = distance;
+                    minIndex = j;
+                }
+            }
+            fanList.get(minIndex).add(i);
+        }
+
+        final SetUniqueList<Integer> selectedIndex =
+            SetUniqueList.setUniqueList(new LinkedList<Integer>());
+        for (int i = 0; i < testdata.numInstances(); i++) {
+            double minDistance = Double.MAX_VALUE;
+            int minIndex = -1;
+            for (Integer j : fanList.get(i)) {
+                double distance = MathArrays.distance(testDoubles[i], trainDoubles[j]);
+                if (distance < minDistance && distance > 0.0d) {
+                    minDistance = distance;
+                    minIndex = j;
+                }
+            }
+            if (minIndex != -1) {
+                selectedIndex.add(minIndex);
+            }
+        }
+
+        final Instances selected = new Instances(testdata);
+        selected.delete();
+        for (Integer i : selectedIndex) {
+            selected.add(traindata.instance(i));
+        }
+        return selected;
+    }
 
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/dataselection/PointWiseEMClusterSelection.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/dataselection/PointWiseEMClusterSelection.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/dataselection/PointWiseEMClusterSelection.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.dataselection;
 
@@ -14,128 +28,132 @@
 import de.ugoe.cs.util.console.Console;
 
-
 /**
  * Use in Config:
  * 
- * Specify number of clusters
- * -N = Num Clusters
- * <pointwiseselector name="PointWiseEMClusterSelection" param="-N 10"/>
- *
- * Try to determine the number of clusters:
- * -I 10 = max iterations
- * -X 5 = 5 folds for cross evaluation
- * -max = max number of clusters
- * <pointwiseselector name="PointWiseEMClusterSelection" param="-I 10 -X 5 -max 300"/>
+ * Specify number of clusters -N = Num Clusters <pointwiseselector
+ * name="PointWiseEMClusterSelection" param="-N 10"/>
  * 
- * Don't forget to add:
- * <preprocessor name="Normalization" param=""/>
+ * Try to determine the number of clusters: -I 10 = max iterations -X 5 = 5 folds for cross
+ * evaluation -max = max number of clusters <pointwiseselector name="PointWiseEMClusterSelection"
+ * param="-I 10 -X 5 -max 300"/>
+ * 
+ * Don't forget to add: <preprocessor name="Normalization" param=""/>
  */
 public class PointWiseEMClusterSelection implements IPointWiseDataselectionStrategy {
-	
-	private String[] params; 
-	
-	@Override
-	public void setParameter(String parameters) {
-		params = parameters.split(" ");
-	}
 
-	
-	/**
-	 * 1. Cluster the traindata
-	 * 2. for each instance in the testdata find the assigned cluster
-	 * 3. select only traindata from the clusters we found in our testdata
-	 * 
-	 * @returns the selected training data
-	 */
-	@Override
-	public Instances apply(Instances testdata, Instances traindata) {
-		//final Attribute classAttribute = testdata.classAttribute();
-		
-		final List<Integer> selectedCluster = SetUniqueList.setUniqueList(new LinkedList<Integer>());
+    private String[] params;
 
-		// 1. copy train- and testdata
-		Instances train = new Instances(traindata);
-		Instances test = new Instances(testdata);
-		
-		Instances selected = null;
-		
-		try {
-			// remove class attribute from traindata
-			Remove filter = new Remove();
-			filter.setAttributeIndices("" + (train.classIndex() + 1));
-			filter.setInputFormat(train);
-			train = Filter.useFilter(train, filter);
-			
-			Console.traceln(Level.INFO, String.format("starting clustering"));
-			
-			// 3. cluster data
-			EM clusterer = new EM();
-			clusterer.setOptions(params);
-			clusterer.buildClusterer(train);
-			int numClusters = clusterer.getNumClusters();
-			if ( numClusters == -1) {
-				Console.traceln(Level.INFO, String.format("we have unlimited clusters"));
-			}else {
-				Console.traceln(Level.INFO, String.format("we have: "+numClusters+" clusters"));
-			}
-			
-			
-			// 4. classify testdata, save cluster int
-			
-			// remove class attribute from testdata?
-			Remove filter2 = new Remove();
-			filter2.setAttributeIndices("" + (test.classIndex() + 1));
-			filter2.setInputFormat(test);
-			test = Filter.useFilter(test, filter2);
-			
-			int cnum;
-			for( int i=0; i < test.numInstances(); i++ ) {
-				cnum = ((EM)clusterer).clusterInstance(test.get(i));
+    @Override
+    public void setParameter(String parameters) {
+        params = parameters.split(" ");
+    }
 
-				// we dont want doubles (maybe use a hashset instead of list?)
-				if ( !selectedCluster.contains(cnum) ) {
-					selectedCluster.add(cnum);
-					//Console.traceln(Level.INFO, String.format("assigned to cluster: "+cnum));
-				}
-			}
-			
-			Console.traceln(Level.INFO, String.format("our testdata is in: "+selectedCluster.size()+" different clusters"));
-			
-			// 5. get cluster membership of our traindata
-			AddCluster cfilter = new AddCluster();
-			cfilter.setClusterer(clusterer);
-			cfilter.setInputFormat(train);
-			Instances ctrain = Filter.useFilter(train, cfilter);
-			
-			
-			// 6. for all traindata get the cluster int, if it is in our list of testdata cluster int add the traindata
-			// of this cluster to our returned traindata
-			int cnumber;
-			selected = new Instances(traindata);
-			selected.delete();
-			
-			for ( int j=0; j < ctrain.numInstances(); j++ ) {
-				// get the cluster number from the attributes
-				cnumber = Integer.parseInt(ctrain.get(j).stringValue(ctrain.get(j).numAttributes()-1).replace("cluster", ""));
-				
-				//Console.traceln(Level.INFO, String.format("instance "+j+" is in cluster: "+cnumber));
-				if ( selectedCluster.contains(cnumber) ) {
-					// this only works if the index does not change
-					selected.add(traindata.get(j));
-					// check for differences, just one attribute, we are pretty sure the index does not change
-					if ( traindata.get(j).value(3) != ctrain.get(j).value(3) ) {
-						Console.traceln(Level.WARNING, String.format("we have a difference between train an ctrain!"));
-					}
-				}
-			}
-			
-			Console.traceln(Level.INFO, String.format("that leaves us with: "+selected.numInstances()+" traindata instances from "+traindata.numInstances()));
-		}catch( Exception e ) {
-			Console.traceln(Level.WARNING, String.format("ERROR"));
-			throw new RuntimeException("error in pointwise em", e);
-		}
-	
-		return selected;
-	}
+    /**
+     * 1. Cluster the traindata 2. for each instance in the testdata find the assigned cluster 3.
+     * select only traindata from the clusters we found in our testdata
+     * 
+     * @returns the selected training data
+     */
+    @Override
+    public Instances apply(Instances testdata, Instances traindata) {
+        // final Attribute classAttribute = testdata.classAttribute();
+
+        final List<Integer> selectedCluster =
+            SetUniqueList.setUniqueList(new LinkedList<Integer>());
+
+        // 1. copy train- and testdata
+        Instances train = new Instances(traindata);
+        Instances test = new Instances(testdata);
+
+        Instances selected = null;
+
+        try {
+            // remove class attribute from traindata
+            Remove filter = new Remove();
+            filter.setAttributeIndices("" + (train.classIndex() + 1));
+            filter.setInputFormat(train);
+            train = Filter.useFilter(train, filter);
+
+            Console.traceln(Level.INFO, String.format("starting clustering"));
+
+            // 3. cluster data
+            EM clusterer = new EM();
+            clusterer.setOptions(params);
+            clusterer.buildClusterer(train);
+            int numClusters = clusterer.getNumClusters();
+            if (numClusters == -1) {
+                Console.traceln(Level.INFO, String.format("we have unlimited clusters"));
+            }
+            else {
+                Console.traceln(Level.INFO, String.format("we have: " + numClusters + " clusters"));
+            }
+
+            // 4. classify testdata, save cluster int
+
+            // remove class attribute from testdata?
+            Remove filter2 = new Remove();
+            filter2.setAttributeIndices("" + (test.classIndex() + 1));
+            filter2.setInputFormat(test);
+            test = Filter.useFilter(test, filter2);
+
+            int cnum;
+            for (int i = 0; i < test.numInstances(); i++) {
+                cnum = ((EM) clusterer).clusterInstance(test.get(i));
+
+                // we dont want doubles (maybe use a hashset instead of list?)
+                if (!selectedCluster.contains(cnum)) {
+                    selectedCluster.add(cnum);
+                    // Console.traceln(Level.INFO, String.format("assigned to cluster: "+cnum));
+                }
+            }
+
+            Console.traceln(Level.INFO,
+                            String.format("our testdata is in: " + selectedCluster.size() +
+                                " different clusters"));
+
+            // 5. get cluster membership of our traindata
+            AddCluster cfilter = new AddCluster();
+            cfilter.setClusterer(clusterer);
+            cfilter.setInputFormat(train);
+            Instances ctrain = Filter.useFilter(train, cfilter);
+
+            // 6. for all traindata get the cluster int, if it is in our list of testdata cluster
+            // int add the traindata
+            // of this cluster to our returned traindata
+            int cnumber;
+            selected = new Instances(traindata);
+            selected.delete();
+
+            for (int j = 0; j < ctrain.numInstances(); j++) {
+                // get the cluster number from the attributes
+                cnumber =
+                    Integer.parseInt(ctrain.get(j).stringValue(ctrain.get(j).numAttributes() - 1)
+                        .replace("cluster", ""));
+
+                // Console.traceln(Level.INFO,
+                // String.format("instance "+j+" is in cluster: "+cnumber));
+                if (selectedCluster.contains(cnumber)) {
+                    // this only works if the index does not change
+                    selected.add(traindata.get(j));
+                    // check for differences, just one attribute, we are pretty sure the index does
+                    // not change
+                    if (traindata.get(j).value(3) != ctrain.get(j).value(3)) {
+                        Console.traceln(Level.WARNING, String
+                            .format("we have a difference between train an ctrain!"));
+                    }
+                }
+            }
+
+            Console.traceln(Level.INFO,
+                            String.format("that leaves us with: " + selected.numInstances() +
+                                " traindata instances from " + traindata.numInstances()));
+        }
+        catch (Exception e) {
+            Console.traceln(Level.WARNING, String.format("ERROR"));
+            throw new RuntimeException("error in pointwise em", e);
+        }
+
+        return selected;
+    }
 
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/dataselection/SeparatabilitySelection.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/dataselection/SeparatabilitySelection.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/dataselection/SeparatabilitySelection.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.dataselection;
 
@@ -13,86 +27,97 @@
 
 /**
- * A setwise data selection strategy based on the separatability of the training data from the test data after Z. He, F. Peters, T. Menzies, Y. Yang: Learning from Open-Source Projects: An Empirical Study on Defect Prediction.
- * <br><br>
- * This is calculated through the error of a logistic regression classifier that tries to separate the sets. 
+ * A setwise data selection strategy based on the separatability of the training data from the test
+ * data after Z. He, F. Peters, T. Menzies, Y. Yang: Learning from Open-Source Projects: An
+ * Empirical Study on Defect Prediction. <br>
+ * <br>
+ * This is calculated through the error of a logistic regression classifier that tries to separate
+ * the sets.
+ * 
  * @author Steffen Herbold
  */
 public class SeparatabilitySelection implements ISetWiseDataselectionStrategy {
 
-	/**
-	 * size of the random sample that is drawn from both test data and training data 
-	 */
-	private final int sampleSize = 500;
-	
-	/**
-	 * number of repetitions of the sample drawing
-	 */
-	private final int maxRep = 10;
-	
-	/**
-	 * number of neighbors that are selected
-	 */
-	private int neighbors = 10;
-	
-	/**
-	 * Sets the number of neighbors that are selected.
-	 */
-	@Override
-	public void setParameter(String parameters) {
-		if( !"".equals(parameters) ) {
-			neighbors = Integer.parseInt(parameters);
-		}
-	}
+    /**
+     * size of the random sample that is drawn from both test data and training data
+     */
+    private final int sampleSize = 500;
 
-	/**
-	 * @see de.ugoe.cs.cpdp.dataselection.SetWiseDataselectionStrategy#apply(weka.core.Instances, org.apache.commons.collections4.list.SetUniqueList)
-	 */
-	@Override
-	public void apply(Instances testdata, SetUniqueList<Instances> traindataSet) {
-		final Random rand = new Random(1);
-		
-		// calculate distances between testdata and traindata
-		final double[] distances = new double[traindataSet.size()]; 
-		
-		int i=0;
-		for( Instances traindata : traindataSet ) {
-			double distance = 0.0;
-			for( int rep=0; rep<maxRep ; rep++ ) {
-				// sample instances
-				Instances sample = new Instances(testdata);
-				for( int j=0; j<sampleSize; j++ ) {
-					Instance inst = new DenseInstance(testdata.instance(rand.nextInt(testdata.numInstances())));
-					inst.setDataset(sample);
-					inst.setClassValue(1.0);
-					sample.add(inst);
-					inst = new DenseInstance(traindata.instance(rand.nextInt(traindata.numInstances())));
-					inst.setDataset(sample);
-					inst.setClassValue(0.0);
-					sample.add(inst);
-				}
-				
-				// calculate separation
-				Evaluation eval;
-				try {
-					eval = new Evaluation(sample);
-					eval.crossValidateModel(new Logistic(), sample, 5, rand);
-				} catch (Exception e) {
-					throw new RuntimeException("cross-validation during calculation of separatability failed", e);
-				}
-				distance += eval.pctCorrect()/100.0;
-			}
-			distances[i++] = 2*((distance/maxRep)-0.5);
-		}
-		
-		// select closest neighbors
-		final double[] distancesCopy = Arrays.copyOf(distances, distances.length);
-		Arrays.sort(distancesCopy);
-		final double cutoffDistance = distancesCopy[neighbors];
-		
-		for( i=traindataSet.size()-1; i>=0 ; i-- ) {
-			if( distances[i]>cutoffDistance ) {
-				traindataSet.remove(i);
-			}
-		}
-	}
+    /**
+     * number of repetitions of the sample drawing
+     */
+    private final int maxRep = 10;
+
+    /**
+     * number of neighbors that are selected
+     */
+    private int neighbors = 10;
+
+    /**
+     * Sets the number of neighbors that are selected.
+     */
+    @Override
+    public void setParameter(String parameters) {
+        if (!"".equals(parameters)) {
+            neighbors = Integer.parseInt(parameters);
+        }
+    }
+
+    /**
+     * @see de.ugoe.cs.cpdp.dataselection.SetWiseDataselectionStrategy#apply(weka.core.Instances,
+     *      org.apache.commons.collections4.list.SetUniqueList)
+     */
+    @Override
+    public void apply(Instances testdata, SetUniqueList<Instances> traindataSet) {
+        final Random rand = new Random(1);
+
+        // calculate distances between testdata and traindata
+        final double[] distances = new double[traindataSet.size()];
+
+        int i = 0;
+        for (Instances traindata : traindataSet) {
+            double distance = 0.0;
+            for (int rep = 0; rep < maxRep; rep++) {
+                // sample instances
+                Instances sample = new Instances(testdata);
+                for (int j = 0; j < sampleSize; j++) {
+                    Instance inst =
+                        new DenseInstance(testdata.instance(rand.nextInt(testdata.numInstances())));
+                    inst.setDataset(sample);
+                    inst.setClassValue(1.0);
+                    sample.add(inst);
+                    inst =
+                        new DenseInstance(
+                                          traindata.instance(rand.nextInt(traindata.numInstances())));
+                    inst.setDataset(sample);
+                    inst.setClassValue(0.0);
+                    sample.add(inst);
+                }
+
+                // calculate separation
+                Evaluation eval;
+                try {
+                    eval = new Evaluation(sample);
+                    eval.crossValidateModel(new Logistic(), sample, 5, rand);
+                }
+                catch (Exception e) {
+                    throw new RuntimeException(
+                                               "cross-validation during calculation of separatability failed",
+                                               e);
+                }
+                distance += eval.pctCorrect() / 100.0;
+            }
+            distances[i++] = 2 * ((distance / maxRep) - 0.5);
+        }
+
+        // select closest neighbors
+        final double[] distancesCopy = Arrays.copyOf(distances, distances.length);
+        Arrays.sort(distancesCopy);
+        final double cutoffDistance = distancesCopy[neighbors];
+
+        for (i = traindataSet.size() - 1; i >= 0; i--) {
+            if (distances[i] > cutoffDistance) {
+                traindataSet.remove(i);
+            }
+        }
+    }
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/dataselection/SetWiseEMClusterSelection.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/dataselection/SetWiseEMClusterSelection.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/dataselection/SetWiseEMClusterSelection.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.dataselection;
 
@@ -11,49 +25,57 @@
 
 /**
- * Filter based on EM clustering after S. Herbold: Training data selection for cross-project defect prediction
+ * Filter based on EM clustering after S. Herbold: Training data selection for cross-project defect
+ * prediction
+ * 
  * @author Steffen Herbold
  */
 public class SetWiseEMClusterSelection extends AbstractCharacteristicSelection {
-	
-	/**
-	 * @see de.ugoe.cs.cpdp.dataselection.SetWiseDataselectionStrategy#apply(weka.core.Instances, org.apache.commons.collections4.list.SetUniqueList)
-	 */
-	@Override
-	public void apply(Instances testdata, SetUniqueList<Instances> traindataSet) {
-		final Instances data = normalizedCharacteristicInstances(testdata, traindataSet); 
-		final Instance targetInstance = data.instance(0);
-		final List<Instance> candidateInstances = new LinkedList<Instance>();
-		for( int i=1; i<data.numInstances(); i++ ) {
-			candidateInstances.add(data.instance(i));
-		}
-		
-		// cluster and select
-		try {
-			final EM emeans = new EM();
-			boolean onlyTarget = true;
-			int targetCluster;
-			int maxNumClusters = candidateInstances.size();
-			do { // while(onlyTarget)
-				emeans.setMaximumNumberOfClusters(maxNumClusters);
-				emeans.buildClusterer(data);
-							
-				targetCluster = emeans.clusterInstance(targetInstance);
-				
-				// check if cluster only contains target project
-				for( int i=0 ; i<candidateInstances.size() && onlyTarget; i++ ) {
-					onlyTarget &= !(emeans.clusterInstance(candidateInstances.get(i))==targetCluster);
-				}
-				maxNumClusters = emeans.numberOfClusters()-1;
-			} while(onlyTarget);
-			
-			int numRemoved = 0;
-			for( int i=0 ; i<candidateInstances.size() ; i++ ) {
-				if( emeans.clusterInstance(candidateInstances.get(i))!=targetCluster ) {
-					traindataSet.remove(i-numRemoved++);
-				}
-			}
-		} catch(Exception e) {
-			throw new RuntimeException("error applying setwise EM clustering training data selection", e);
-		}
-	}	
+
+    /**
+     * @see de.ugoe.cs.cpdp.dataselection.SetWiseDataselectionStrategy#apply(weka.core.Instances,
+     *      org.apache.commons.collections4.list.SetUniqueList)
+     */
+    @Override
+    public void apply(Instances testdata, SetUniqueList<Instances> traindataSet) {
+        final Instances data = normalizedCharacteristicInstances(testdata, traindataSet);
+        final Instance targetInstance = data.instance(0);
+        final List<Instance> candidateInstances = new LinkedList<Instance>();
+        for (int i = 1; i < data.numInstances(); i++) {
+            candidateInstances.add(data.instance(i));
+        }
+
+        // cluster and select
+        try {
+            final EM emeans = new EM();
+            boolean onlyTarget = true;
+            int targetCluster;
+            int maxNumClusters = candidateInstances.size();
+            do { // while(onlyTarget)
+                emeans.setMaximumNumberOfClusters(maxNumClusters);
+                emeans.buildClusterer(data);
+
+                targetCluster = emeans.clusterInstance(targetInstance);
+
+                // check if cluster only contains target project
+                for (int i = 0; i < candidateInstances.size() && onlyTarget; i++) {
+                    onlyTarget &=
+                        !(emeans.clusterInstance(candidateInstances.get(i)) == targetCluster);
+                }
+                maxNumClusters = emeans.numberOfClusters() - 1;
+            }
+            while (onlyTarget);
+
+            int numRemoved = 0;
+            for (int i = 0; i < candidateInstances.size(); i++) {
+                if (emeans.clusterInstance(candidateInstances.get(i)) != targetCluster) {
+                    traindataSet.remove(i - numRemoved++);
+                }
+            }
+        }
+        catch (Exception e) {
+            throw new RuntimeException(
+                                       "error applying setwise EM clustering training data selection",
+                                       e);
+        }
+    }
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/dataselection/SetWiseEMContextSelection.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/dataselection/SetWiseEMContextSelection.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/dataselection/SetWiseEMContextSelection.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.dataselection;
 
@@ -22,176 +36,191 @@
  * Selects training data by clustering project context factors.
  * 
- * The project context factors used for the clustering are configured in 
- * the XML param attribute, Example:
- * <setwiseselector name="SetWiseEMContextSelection" param="AFS TND TNC" />
+ * The project context factors used for the clustering are configured in the XML param attribute,
+ * Example: <setwiseselector name="SetWiseEMContextSelection" param="AFS TND TNC" />
  */
 public class SetWiseEMContextSelection implements ISetWiseDataselectionStrategy {
-	
-	private String[] project_context_factors; // = new String[]{"TND", "TNC", "TNF", "TLOC"};
-	
-	@Override
-	public void setParameter(String parameters) {
-		if( parameters!=null ) {
-			project_context_factors = parameters.split(" ");
-		}
-	}
-	
-	/**
-	 * Uses the Weka EM-Clustering algorithm to cluster the projects
-	 * by their project context factors. 
-	 * The project context factors are first normalized and then used for clustering.
-	 * They can be configured in the configuration param.
-	 *  
-	 * @param testdata
-	 * @param traindataSet
-	 */
-	protected void cluster(Instances testdata, SetUniqueList<Instances> traindataSet) {
-		// now do the clustering, normalizedCharacteristicInstances ruft getContextFactors auf
-		final Instances data = this.normalizedCharacteristicInstances(testdata, traindataSet); 
-		
-		final Instance targetInstance = data.instance(0);
-		final List<Instance> candidateInstances = new LinkedList<Instance>();
-		for( int i=1; i<data.numInstances(); i++ ) {
-			candidateInstances.add(data.instance(i));
-		}
-		
-		// cluster and select
-		try {
-			final EM emeans = new EM();
-			boolean onlyTarget = true;
-			int targetCluster;
-			int maxNumClusters = candidateInstances.size();
-			
-			do { // while(onlyTarget)
-				emeans.setMaximumNumberOfClusters(maxNumClusters);
-				emeans.buildClusterer(data);
-							
-				targetCluster = emeans.clusterInstance(targetInstance);
-				
-				// check if cluster only contains target project
-				for( int i=0 ; i<candidateInstances.size() && onlyTarget; i++ ) {
-					onlyTarget &= !(emeans.clusterInstance(candidateInstances.get(i))==targetCluster);
-				}
-				maxNumClusters = emeans.numberOfClusters()-1;
-				
-				//Console.traceln(Level.INFO, "number of clusters: " + emeans.numberOfClusters());
-			} while(onlyTarget);
-			
-			Console.traceln(Level.INFO, "clusters: " + maxNumClusters);
-			Console.traceln(Level.INFO, "instances vor dem clustern: " + traindataSet.size());
-			int numRemoved = 0;
-			for( int i=0 ; i<candidateInstances.size() ; i++ ) {
-				if( emeans.clusterInstance(candidateInstances.get(i))!=targetCluster ) {
-					traindataSet.remove(i-numRemoved++);
-				}
-			}
-			Console.traceln(Level.INFO, "instances nach dem clustern: " + traindataSet.size());
-		} catch(Exception e) {
-			throw new RuntimeException("error applying setwise EM clustering training data selection", e);
-		}
-	}
-
-	@Override
-	public void apply(Instances testdata, SetUniqueList<Instances> traindataSet) {
-		// issuetracking und pl muss passen
-		/*
-		int s = traindataSet.size();
-		Console.traceln(Level.INFO, "remove non matching PL and IssueTracking projects, size now: " + s);
-		this.removeWrongContext(testdata, traindataSet, "PL");
-		this.removeWrongContext(testdata, traindataSet, "IssueTracking");
-		s = traindataSet.size();
-		Console.traceln(Level.INFO, "size after removal: " + s);
-		*/
-		// now cluster
-		this.cluster(testdata, traindataSet);
-	}
-
-	/**
-	 * Returns test- and training data with only the project context factors
-	 * which were chosen in the configuration.
-	 * This is later used for clustering.
-	 * 
-	 * @param testdata
-	 * @param traindataSet
-	 * @return
-	 */
-	protected Instances getContextFactors(Instances testdata, SetUniqueList<Instances> traindataSet) {
-		// setup weka Instances for clustering
-		final ArrayList<Attribute> atts = new ArrayList<Attribute>();
-		
-		// we only want the project context factors
-		for( String pcf : this.project_context_factors ) {
-			atts.add(new Attribute(pcf));
-		}
-		
-		// set up the data
-		final Instances data = new Instances("project_context_factors", atts, 0);
-		double[] instanceValues = new double[atts.size()];
-		
-		// only project context factors + only one instance per project needed 
-		int i = 0;
-		for( String pcf : this.project_context_factors ) {
-			instanceValues[i] = testdata.instance(0).value(testdata.attribute(pcf));
-			//Console.traceln(Level.INFO, "adding attribute: " + pcf + " value: " + instanceValues[i]);
-			i++;
-		}
-		data.add(new DenseInstance(1.0, instanceValues));
-		
-		// now for the projects of the training stet
-		for( Instances traindata : traindataSet ) {
-			instanceValues = new double[atts.size()];  // ohne das hier immer dieselben werte?!
-			i = 0;
-			for( String pcf : this.project_context_factors ) {
-				instanceValues[i] = traindata.instance(0).value(traindata.attribute(pcf));
-				//Console.traceln(Level.INFO, "adding attribute: " + pcf + " value: " + instanceValues[i]);
-				i++;
-			}
-
-			data.add(new DenseInstance(1.0, instanceValues));
-		}
-
-		return data;
-	}
-
-	/**
-	 * Delete projects where the project context does not match the training project
-	 * 
-	 * @param testdata
-	 * @param traindataSet
-	 * @param attribute
-	 */
-	protected void removeWrongContext(Instances testdata, SetUniqueList<Instances> traindataSet, String attribute) {
-		Set<Instances> remove = new HashSet<Instances>();	
-		for( Instances traindata : traindataSet ) {
-			if( traindata.firstInstance().value(traindata.attribute(attribute)) != testdata.firstInstance().value(testdata.attribute(attribute)) ) {
-				remove.add(traindata);
-				//Console.traceln(Level.WARNING, "rmove attribute "+attribute+" test: "+testdata.firstInstance().value(testdata.attribute(attribute))+" train: "+traindata.firstInstance().value(traindata.attribute(attribute)));
-			}
-		}
-		
-		// now delete the projects from set
-		for( Instances i : remove ) {
-			traindataSet.remove(i);
-			//Console.traceln(Level.INFO, "removing training project from set");
-		}
-	}
-
-	/**
-	 * Normalizes the data before it gets used for clustering
-	 * 
-	 * @param testdata
-	 * @param traindataSet
-	 * @return
-	 */
-	protected Instances normalizedCharacteristicInstances(Instances testdata, SetUniqueList<Instances> traindataSet) {
-		Instances data = this.getContextFactors(testdata, traindataSet);
-		try {
-			final Normalize normalizer = new Normalize();
-			normalizer.setInputFormat(data);
-			data = Filter.useFilter(data, normalizer);
-		} catch (Exception e) {
-			throw new RuntimeException("Unexpected exception during normalization of distributional characteristics.", e);
-		}
-		return data;
-	}
+
+    private String[] project_context_factors; // = new String[]{"TND", "TNC", "TNF", "TLOC"};
+
+    @Override
+    public void setParameter(String parameters) {
+        if (parameters != null) {
+            project_context_factors = parameters.split(" ");
+        }
+    }
+
+    /**
+     * Uses the Weka EM-Clustering algorithm to cluster the projects by their project context
+     * factors. The project context factors are first normalized and then used for clustering. They
+     * can be configured in the configuration param.
+     * 
+     * @param testdata
+     * @param traindataSet
+     */
+    protected void cluster(Instances testdata, SetUniqueList<Instances> traindataSet) {
+        // now do the clustering, normalizedCharacteristicInstances ruft getContextFactors auf
+        final Instances data = this.normalizedCharacteristicInstances(testdata, traindataSet);
+
+        final Instance targetInstance = data.instance(0);
+        final List<Instance> candidateInstances = new LinkedList<Instance>();
+        for (int i = 1; i < data.numInstances(); i++) {
+            candidateInstances.add(data.instance(i));
+        }
+
+        // cluster and select
+        try {
+            final EM emeans = new EM();
+            boolean onlyTarget = true;
+            int targetCluster;
+            int maxNumClusters = candidateInstances.size();
+
+            do { // while(onlyTarget)
+                emeans.setMaximumNumberOfClusters(maxNumClusters);
+                emeans.buildClusterer(data);
+
+                targetCluster = emeans.clusterInstance(targetInstance);
+
+                // check if cluster only contains target project
+                for (int i = 0; i < candidateInstances.size() && onlyTarget; i++) {
+                    onlyTarget &=
+                        !(emeans.clusterInstance(candidateInstances.get(i)) == targetCluster);
+                }
+                maxNumClusters = emeans.numberOfClusters() - 1;
+
+                // Console.traceln(Level.INFO, "number of clusters: " + emeans.numberOfClusters());
+            }
+            while (onlyTarget);
+
+            Console.traceln(Level.INFO, "clusters: " + maxNumClusters);
+            Console.traceln(Level.INFO, "instances vor dem clustern: " + traindataSet.size());
+            int numRemoved = 0;
+            for (int i = 0; i < candidateInstances.size(); i++) {
+                if (emeans.clusterInstance(candidateInstances.get(i)) != targetCluster) {
+                    traindataSet.remove(i - numRemoved++);
+                }
+            }
+            Console.traceln(Level.INFO, "instances nach dem clustern: " + traindataSet.size());
+        }
+        catch (Exception e) {
+            throw new RuntimeException(
+                                       "error applying setwise EM clustering training data selection",
+                                       e);
+        }
+    }
+
+    @Override
+    public void apply(Instances testdata, SetUniqueList<Instances> traindataSet) {
+        // issuetracking und pl muss passen
+        /*
+         * int s = traindataSet.size(); Console.traceln(Level.INFO,
+         * "remove non matching PL and IssueTracking projects, size now: " + s);
+         * this.removeWrongContext(testdata, traindataSet, "PL"); this.removeWrongContext(testdata,
+         * traindataSet, "IssueTracking"); s = traindataSet.size(); Console.traceln(Level.INFO,
+         * "size after removal: " + s);
+         */
+        // now cluster
+        this.cluster(testdata, traindataSet);
+    }
+
+    /**
+     * Returns test- and training data with only the project context factors which were chosen in
+     * the configuration. This is later used for clustering.
+     * 
+     * @param testdata
+     * @param traindataSet
+     * @return
+     */
+    protected Instances getContextFactors(Instances testdata, SetUniqueList<Instances> traindataSet)
+    {
+        // setup weka Instances for clustering
+        final ArrayList<Attribute> atts = new ArrayList<Attribute>();
+
+        // we only want the project context factors
+        for (String pcf : this.project_context_factors) {
+            atts.add(new Attribute(pcf));
+        }
+
+        // set up the data
+        final Instances data = new Instances("project_context_factors", atts, 0);
+        double[] instanceValues = new double[atts.size()];
+
+        // only project context factors + only one instance per project needed
+        int i = 0;
+        for (String pcf : this.project_context_factors) {
+            instanceValues[i] = testdata.instance(0).value(testdata.attribute(pcf));
+            // Console.traceln(Level.INFO, "adding attribute: " + pcf + " value: " +
+            // instanceValues[i]);
+            i++;
+        }
+        data.add(new DenseInstance(1.0, instanceValues));
+
+        // now for the projects of the training stet
+        for (Instances traindata : traindataSet) {
+            instanceValues = new double[atts.size()]; // ohne das hier immer dieselben werte?!
+            i = 0;
+            for (String pcf : this.project_context_factors) {
+                instanceValues[i] = traindata.instance(0).value(traindata.attribute(pcf));
+                // Console.traceln(Level.INFO, "adding attribute: " + pcf + " value: " +
+                // instanceValues[i]);
+                i++;
+            }
+
+            data.add(new DenseInstance(1.0, instanceValues));
+        }
+
+        return data;
+    }
+
+    /**
+     * Delete projects where the project context does not match the training project
+     * 
+     * @param testdata
+     * @param traindataSet
+     * @param attribute
+     */
+    protected void removeWrongContext(Instances testdata,
+                                      SetUniqueList<Instances> traindataSet,
+                                      String attribute)
+    {
+        Set<Instances> remove = new HashSet<Instances>();
+        for (Instances traindata : traindataSet) {
+            if (traindata.firstInstance().value(traindata.attribute(attribute)) != testdata
+                .firstInstance().value(testdata.attribute(attribute)))
+            {
+                remove.add(traindata);
+                // Console.traceln(Level.WARNING,
+                // "rmove attribute "+attribute+" test: "+testdata.firstInstance().value(testdata.attribute(attribute))+" train: "+traindata.firstInstance().value(traindata.attribute(attribute)));
+            }
+        }
+
+        // now delete the projects from set
+        for (Instances i : remove) {
+            traindataSet.remove(i);
+            // Console.traceln(Level.INFO, "removing training project from set");
+        }
+    }
+
+    /**
+     * Normalizes the data before it gets used for clustering
+     * 
+     * @param testdata
+     * @param traindataSet
+     * @return
+     */
+    protected Instances normalizedCharacteristicInstances(Instances testdata,
+                                                          SetUniqueList<Instances> traindataSet)
+    {
+        Instances data = this.getContextFactors(testdata, traindataSet);
+        try {
+            final Normalize normalizer = new Normalize();
+            normalizer.setInputFormat(data);
+            data = Filter.useFilter(data, normalizer);
+        }
+        catch (Exception e) {
+            throw new RuntimeException(
+                                       "Unexpected exception during normalization of distributional characteristics.",
+                                       e);
+        }
+        return data;
+    }
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/dataselection/SetWiseKNNSelection.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/dataselection/SetWiseKNNSelection.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/dataselection/SetWiseKNNSelection.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.dataselection;
 
@@ -10,72 +24,82 @@
 
 /**
- * Filter based on the k-nearest neighbor (KNN) algorithm S. Herbold: Training data selection for cross-project defect prediction
+ * Filter based on the k-nearest neighbor (KNN) algorithm S. Herbold: Training data selection for
+ * cross-project defect prediction
+ * 
  * @author Steffen Herbold
  */
 public class SetWiseKNNSelection extends AbstractCharacteristicSelection {
-	
-	/**
-	 * number of neighbors selected
-	 */
-	private int k = 1;
-	
-	/**
-	 * @see de.ugoe.cs.cpdp.dataselection.SetWiseDataselectionStrategy#apply(weka.core.Instances, org.apache.commons.collections4.list.SetUniqueList)
-	 */
-	@Override
-	public void apply(Instances testdata, SetUniqueList<Instances> traindataSet) {
-		final Instances data = normalizedCharacteristicInstances(testdata, traindataSet);
-		
-		final Set<Integer> selected = new HashSet<Integer>();		
-		for( int i=0 ; i<k ; i++ ) {
-			int closestIndex = getClosest(data);
-			
-			selected.add(closestIndex);
-			data.delete(closestIndex);
-		}
-		
-		for( int i=traindataSet.size()-1; i>=0 ; i-- ) {
-			if( selected.contains(i) ) {
-				traindataSet.remove(i);
-			}
-		}
-	}
-	
-	/**
-	 * Helper method that determines the index of the instance with the smallest distance to the first instance (index 0).
-	 * @param data data set
-	 * @return index of the closest instance
-	 */
-	private int getClosest(Instances data) {
-		double closestDistance = Double.MAX_VALUE;
-		int closestIndex = 1;
-		for( int i=1 ; i<data.numInstances() ; i++ ) {
-			double distance = MathArrays.distance(data.instance(0).toDoubleArray(), data.instance(i).toDoubleArray());
-			if( distance < closestDistance) {
-				closestDistance = distance;
-				closestIndex = i;
-			}
-		}
-		return closestIndex;
-	}
 
-	/**
-	 * Sets the number of neighbors followed by the distributional characteristics, the values are separated by blanks.
-	 * @see AbstractCharacteristicSelection#setParameter(String) 
-	 */
-	@Override
-	public void setParameter(String parameters) {
-		if( !"".equals(parameters) ) {
-			final String[] split = parameters.split(" ");
-			k = Integer.parseInt(split[0]);
-			String str = "";
-			for( int i=1 ; i<split.length; i++ ) {
-				str += split[i];
-				if( i<split.length-1 )  {
-					str += " ";
-				}
-			}
-			super.setParameter(str);
-		}
-	}
+    /**
+     * number of neighbors selected
+     */
+    private int k = 1;
+
+    /**
+     * @see de.ugoe.cs.cpdp.dataselection.SetWiseDataselectionStrategy#apply(weka.core.Instances,
+     *      org.apache.commons.collections4.list.SetUniqueList)
+     */
+    @Override
+    public void apply(Instances testdata, SetUniqueList<Instances> traindataSet) {
+        final Instances data = normalizedCharacteristicInstances(testdata, traindataSet);
+
+        final Set<Integer> selected = new HashSet<Integer>();
+        for (int i = 0; i < k; i++) {
+            int closestIndex = getClosest(data);
+
+            selected.add(closestIndex);
+            data.delete(closestIndex);
+        }
+
+        for (int i = traindataSet.size() - 1; i >= 0; i--) {
+            if (selected.contains(i)) {
+                traindataSet.remove(i);
+            }
+        }
+    }
+
+    /**
+     * Helper method that determines the index of the instance with the smallest distance to the
+     * first instance (index 0).
+     * 
+     * @param data
+     *            data set
+     * @return index of the closest instance
+     */
+    private int getClosest(Instances data) {
+        double closestDistance = Double.MAX_VALUE;
+        int closestIndex = 1;
+        for (int i = 1; i < data.numInstances(); i++) {
+            double distance =
+                MathArrays.distance(data.instance(0).toDoubleArray(), data.instance(i)
+                    .toDoubleArray());
+            if (distance < closestDistance) {
+                closestDistance = distance;
+                closestIndex = i;
+            }
+        }
+        return closestIndex;
+    }
+
+    /**
+     * Sets the number of neighbors followed by the distributional characteristics, the values are
+     * separated by blanks.
+     * 
+     * @see AbstractCharacteristicSelection#setParameter(String)
+     */
+    @Override
+    public void setParameter(String parameters) {
+        if (!"".equals(parameters)) {
+            final String[] split = parameters.split(" ");
+            k = Integer.parseInt(split[0]);
+            String str = "";
+            for (int i = 1; i < split.length; i++) {
+                str += split[i];
+                if (i < split.length - 1) {
+                    str += " ";
+                }
+            }
+            super.setParameter(str);
+        }
+    }
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/dataselection/TestAsTraining.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/dataselection/TestAsTraining.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/dataselection/TestAsTraining.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.dataselection;
 
@@ -7,25 +21,29 @@
 /**
  * Uses the test data as training data.
+ * 
  * @author Steffen Herbold
- *
+ * 
  */
 public class TestAsTraining implements ISetWiseDataselectionStrategy {
 
-	/**
-	 * no parameters
-	 */
-	@Override
-	public void setParameter(String parameters) {
-		// dummy
-	}
+    /**
+     * no parameters
+     */
+    @Override
+    public void setParameter(String parameters) {
+        // dummy
+    }
 
-	/**(non-Javadoc)
-	 * @see de.ugoe.cs.cpdp.dataselection.ISetWiseDataselectionStrategy#apply(weka.core.Instances, org.apache.commons.collections4.list.SetUniqueList)
-	 */
-	@Override
-	public void apply(Instances testdata, SetUniqueList<Instances> traindataSet) {
-		traindataSet.clear();
-		traindataSet.add(new Instances(testdata));
-	}
+    /**
+     * (non-Javadoc)
+     * 
+     * @see de.ugoe.cs.cpdp.dataselection.ISetWiseDataselectionStrategy#apply(weka.core.Instances,
+     *      org.apache.commons.collections4.list.SetUniqueList)
+     */
+    @Override
+    public void apply(Instances testdata, SetUniqueList<Instances> traindataSet) {
+        traindataSet.clear();
+        traindataSet.add(new Instances(testdata));
+    }
 
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/dataselection/TurhanFilter.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/dataselection/TurhanFilter.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/dataselection/TurhanFilter.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.dataselection;
 
@@ -13,85 +27,91 @@
 
 /**
- * Filter according to B. Turhan, T. Menzies, A. Bener, and J. Die Stefano: On the relative value of cross-company and within company defect prediction
+ * Filter according to B. Turhan, T. Menzies, A. Bener, and J. Die Stefano: On the relative value of
+ * cross-company and within company defect prediction
+ * 
  * @author Steffen Herbold
  */
 public class TurhanFilter implements IPointWiseDataselectionStrategy {
 
-	/**
-	 * number of neighbors that are selected
-	 */
-	private int k = 10;
-	
-	/**
-	 * Sets the number of neighbors.
-	 * @param parameters number of neighbors
-	 */
-	@Override
-	public void setParameter(String parameters) {
-		k = Integer.parseInt(parameters);
-	}
+    /**
+     * number of neighbors that are selected
+     */
+    private int k = 10;
 
-	/**
-	 * @see de.ugoe.cs.cpdp.dataselection.PointWiseDataselectionStrategy#apply(weka.core.Instances, weka.core.Instances)
-	 */
-	@Override
-	public Instances apply(Instances testdata, Instances traindata) {
-		final Attribute classAttribute = testdata.classAttribute();
-		
-		final List<Integer> selectedIndex = SetUniqueList.setUniqueList(new LinkedList<Integer>());
-		
-		final double[][] trainDoubles = new double[traindata.numInstances()][testdata.numAttributes()];
-		
-		for( int i=0; i<traindata.numInstances() ; i++ ) {
-			Instance instance = traindata.instance(i);
-			int tmp = 0;
-			for( int j=0 ; j<testdata.numAttributes(); j++ ) {
-				if( testdata.attribute(j)!=classAttribute ) {
-					trainDoubles[i][tmp++] = instance.value(j);
-				}
-			}
-		}
-		
-		for( int i=0; i<testdata.numInstances() ; i++ ) {
-			Instance testIntance = testdata.instance(i);
-			double[] targetVector = new double[testdata.numAttributes()-1];
-			int tmp = 0;
-			for( int j=0 ; j<testdata.numAttributes(); j++ ) {
-				if( testdata.attribute(j)!=classAttribute ) {
-					targetVector[tmp++] = testIntance.value(j);
-				}
-			}
-			
-			double farthestClosestDistance = Double.MAX_VALUE;
-			int farthestClosestIndex = 0;
-			double[] closestDistances = new double[k];
-			for( int m=0 ; m<closestDistances.length ; m++ ) {
-				closestDistances[m] = Double.MAX_VALUE;
-			}
-			int[] closestIndex = new int[k];
-			
-			for( int n=0; n<traindata.numInstances() ; n++ ) {
-				double distance = MathArrays.distance(targetVector, trainDoubles[n]);
-				
-				if( distance<farthestClosestDistance ) {
-					closestIndex[farthestClosestIndex] = n;
-					closestDistances[farthestClosestIndex] = distance;
-					
-					farthestClosestIndex = ArrayTools.findMax(closestDistances);
-					farthestClosestDistance = closestDistances[farthestClosestIndex];
-				}
-			}
-			for( int index : closestIndex ) {
-				selectedIndex.add(index);
-			}
-		}
-		
-		final Instances selected = new Instances(testdata);
-		selected.delete();
-		for( Integer i : selectedIndex) {
-			selected.add(traindata.instance(i));
-		}
-		return selected;
-	}
+    /**
+     * Sets the number of neighbors.
+     * 
+     * @param parameters
+     *            number of neighbors
+     */
+    @Override
+    public void setParameter(String parameters) {
+        k = Integer.parseInt(parameters);
+    }
+
+    /**
+     * @see de.ugoe.cs.cpdp.dataselection.PointWiseDataselectionStrategy#apply(weka.core.Instances,
+     *      weka.core.Instances)
+     */
+    @Override
+    public Instances apply(Instances testdata, Instances traindata) {
+        final Attribute classAttribute = testdata.classAttribute();
+
+        final List<Integer> selectedIndex = SetUniqueList.setUniqueList(new LinkedList<Integer>());
+
+        final double[][] trainDoubles =
+            new double[traindata.numInstances()][testdata.numAttributes()];
+
+        for (int i = 0; i < traindata.numInstances(); i++) {
+            Instance instance = traindata.instance(i);
+            int tmp = 0;
+            for (int j = 0; j < testdata.numAttributes(); j++) {
+                if (testdata.attribute(j) != classAttribute) {
+                    trainDoubles[i][tmp++] = instance.value(j);
+                }
+            }
+        }
+
+        for (int i = 0; i < testdata.numInstances(); i++) {
+            Instance testIntance = testdata.instance(i);
+            double[] targetVector = new double[testdata.numAttributes() - 1];
+            int tmp = 0;
+            for (int j = 0; j < testdata.numAttributes(); j++) {
+                if (testdata.attribute(j) != classAttribute) {
+                    targetVector[tmp++] = testIntance.value(j);
+                }
+            }
+
+            double farthestClosestDistance = Double.MAX_VALUE;
+            int farthestClosestIndex = 0;
+            double[] closestDistances = new double[k];
+            for (int m = 0; m < closestDistances.length; m++) {
+                closestDistances[m] = Double.MAX_VALUE;
+            }
+            int[] closestIndex = new int[k];
+
+            for (int n = 0; n < traindata.numInstances(); n++) {
+                double distance = MathArrays.distance(targetVector, trainDoubles[n]);
+
+                if (distance < farthestClosestDistance) {
+                    closestIndex[farthestClosestIndex] = n;
+                    closestDistances[farthestClosestIndex] = distance;
+
+                    farthestClosestIndex = ArrayTools.findMax(closestDistances);
+                    farthestClosestDistance = closestDistances[farthestClosestIndex];
+                }
+            }
+            for (int index : closestIndex) {
+                selectedIndex.add(index);
+            }
+        }
+
+        final Instances selected = new Instances(testdata);
+        selected.delete();
+        for (Integer i : selectedIndex) {
+            selected.add(traindata.instance(i));
+        }
+        return selected;
+    }
 
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/decentApp/ARFFxResourceTool.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/decentApp/ARFFxResourceTool.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/decentApp/ARFFxResourceTool.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.decentApp;
 
@@ -11,30 +25,29 @@
  * 
  * @author Philip Makedonski, Fabian Trautsch
- *
+ * 
  */
 public class ARFFxResourceTool extends ResourceTool {
-	
-	/**
-	 * Initializes the Tool Factory, from which the models can be loaded and
-	 * inizializes the validator.
-	 */
-	public ARFFxResourceTool(){
-		super(ARFFxResourceTool.class.getName());
-		ARFFxPackageImpl.init();
-		
-		// Commented, because simulation has problems with this
-		initializeValidator();
-	}
-	
-	/**
-	 * Inizializes the model validator
-	 */
-	@Override
-	protected void initializeValidator(){
-		super.initializeValidator();
-		EObjectValidator validator = new EObjectValidator();
-	    EValidator.Registry.INSTANCE.put(ARFFxPackage.eINSTANCE, validator);
-	}	
-	
+
+    /**
+     * Initializes the Tool Factory, from which the models can be loaded and inizializes the
+     * validator.
+     */
+    public ARFFxResourceTool() {
+        super(ARFFxResourceTool.class.getName());
+        ARFFxPackageImpl.init();
+
+        // Commented, because simulation has problems with this
+        initializeValidator();
+    }
+
+    /**
+     * Inizializes the model validator
+     */
+    @Override
+    protected void initializeValidator() {
+        super.initializeValidator();
+        EObjectValidator validator = new EObjectValidator();
+        EValidator.Registry.INSTANCE.put(ARFFxPackage.eINSTANCE, validator);
+    }
 
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/decentApp/DECENTEpsilonModelHandler.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/decentApp/DECENTEpsilonModelHandler.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/decentApp/DECENTEpsilonModelHandler.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.decentApp;
 
@@ -20,285 +34,341 @@
  * 
  * @author Philip Makedonski, Fabian Trautsch
- *
+ * 
  */
 
 public class DECENTEpsilonModelHandler {
-	private HashMap<String, Object> metaModelCache = new HashMap<>();
-	private boolean useDECENTBinary = false;
-	private boolean useARFFxBinary = false;
-
-	public static String metaPath = "./decent/models/";
-
-	/**
-	 * Returns the decent model as IModel instance
-	 * 
-	 * @param decentModelLocation location of the decent model file
-	 * @param read indicates if the model should be read from
-	 * @param write indicates if data should be written in the model
-	 * @return EmFModel (IModel) instance from the decent model, which was loaded
-	 * @throws Exception
-	 */
-	public IModel getDECENTModel(String decentModelLocation, boolean read, boolean write) throws Exception { 
-
-		EmfModel model;
-		
-		if (isUseDECENTBinary()) {
-			unregisterMetaModels("");
-			if (!read) {
-				new File(decentModelLocation).delete();
-				new File(decentModelLocation+"bin").delete();
-			}
-			DECENTResourceTool tool = new DECENTResourceTool();
-			if (new File(decentModelLocation).exists() && !new File(decentModelLocation+"bin").exists()) {
-				Resource resource = tool.loadResourceFromXMI(decentModelLocation,"decent", DECENTPackage.eINSTANCE);
-				tool.storeBinaryResourceContents(resource.getContents(), decentModelLocation+"bin", "decentbin");
-			}
-			
-			Resource resourceBin = tool.loadResourceFromBinary(decentModelLocation+"bin","decentbin", DECENTPackage.eINSTANCE);
-			//alternative pattern
-//			model = createInMemoryEmfModel("DECENT", resourceLocation, "../DECENT.Meta/model/DECENTv3.ecore", read, write, resourceBin, DECENTPackage.eINSTANCE);
-//			restoreMetaModels();
-
-			//NOTE: Adding the package is essential as otherwise epsilon breaks
-			model = new InMemoryEmfModel("DECENT", resourceBin, DECENTPackage.eINSTANCE);
-			model.setStoredOnDisposal(write);
-			model.setReadOnLoad(read);
-			model.setCachingEnabled(true);
-			restoreMetaModels();		
-		} else {
-			model = createEmfModel("DECENT", decentModelLocation, metaPath+"DECENTv3.ecore", read, write);
-		}
-
-		return model;
-	}
-
-	/**
-	 * Converts the decent model to a binary form
-	 * 
-	 * @param location of the decent model file
-	 */
-	public void convertDECENTModelToBinary(String location) {
-		unregisterMetaModels("");
-		DECENTResourceTool tool = new DECENTResourceTool();
-		Resource resource = tool.loadResourceFromXMI(location+"/model.decent","decent", DECENTPackage.eINSTANCE);
-		tool.storeBinaryResourceContents(resource.getContents(), location+"/model.decent"+"bin", "decentbin");
-		restoreMetaModels();		
-	}
-
-	/**
-	 * Converts the decent model to a xmi form
-	 * 
-	 * @param location of the decent model file
-	 */
-	
-	public void convertDECENTModelToXMI(String location) {
-		unregisterMetaModels("");
-		DECENTResourceTool tool = new DECENTResourceTool(); 
-		Resource resource = tool.loadResourceFromBinary(location+"/model.decentbin","decentbin", DECENTPackage.eINSTANCE);
-		restoreMetaModels();		
-		tool.storeResourceContents(resource.getContents(), location+"/model.decent", "decent");
-	}
-
-	/**
-	 * Returns the arffx model as IModel instance
-	 * 
-	 * @param arffxModelLocation location of the arffx model file
-	 * @param read indicates if the model should be read from
-	 * @param write indicates if data should be written in the model
-	 * @return EmFModel (IModel) instance from the arffx model, which was loaded
-	 * @throws Exception
-	 */
-	
-	public IModel getARFFxModel(String arffxModelLocation, boolean read, boolean write) throws Exception {
-		
-		EmfModel model;
-		
-		if (isUseARFFxBinary()) {
-			unregisterMetaModels("");
-			if (!read) {
-				new File(arffxModelLocation).delete();
-				new File(arffxModelLocation+"bin").delete();
-			}
-			ARFFxResourceTool tool = new ARFFxResourceTool();
-			if (new File(arffxModelLocation).exists() && !new File(arffxModelLocation+"bin").exists()) {
-				Resource resource = tool.loadResourceFromXMI(arffxModelLocation,"arffx", ARFFxPackage.eINSTANCE);
-				tool.storeBinaryResourceContents(resource.getContents(), arffxModelLocation+"bin", "arffxbin");
-			}
-			
-			Resource resourceBin = tool.loadResourceFromBinary(arffxModelLocation+"bin","arffxbin", ARFFxPackage.eINSTANCE);
-			//alternative pattern
-//			model = createInMemoryEmfModel("DECENT", resourceLocation, "../DECENT.Meta/model/DECENTv3.ecore", read, write, resourceBin, DECENTPackage.eINSTANCE);
-//			restoreMetaModels();
-
-			//NOTE: Adding the package is essential as otherwise epsilon breaks
-			model = new InMemoryEmfModel("ARFFx", resourceBin, ARFFxPackage.eINSTANCE);
-//			model.getModelImpl().getURI().toFileString()
-			model.setStoredOnDisposal(write);
-			model.setReadOnLoad(read);
-			model.setCachingEnabled(true);
-			restoreMetaModels();		
-		} else {
-			model = createEmfModel("ARFFx", arffxModelLocation, metaPath+"ARFFx.ecore", read, write);
-		}
-		
-		return model;
-	}
-
-
-	/**
-	 * Converts an arffx model to a binary version
-	 * 
-	 * @param location of the arffx model
-	 */
-	public void convertARFFxModelToBinary(String location) {
-		unregisterMetaModels("");
-		ARFFxResourceTool tool = new ARFFxResourceTool();
-		Resource resource = tool.loadResourceFromXMI(location+"/model.arffx","arffx", ARFFxPackage.eINSTANCE);
-		tool.storeBinaryResourceContents(resource.getContents(), location+"/model.arffx"+"bin", "arffxbin");
-		restoreMetaModels();		
-	}
-	
-	/**
-	 * Converts an arffx model to xmi
-	 * 
-	 * @param location of the arffx model
-	 */
-
-	public void convertARFFxModelToXMI(String location) {
-		unregisterMetaModels("");
-		ARFFxResourceTool tool = new ARFFxResourceTool(); 
-		Resource resource = tool.loadResourceFromBinary(location+"/model.arffxbin","arffxbin", DECENTPackage.eINSTANCE);
-		restoreMetaModels();		
-		tool.storeResourceContents(resource.getContents(), location+"/model.arffx", "arffx");
-	}
-
-
-	/**
-	 * Returns the log model as IModel instance
-	 * 
-	 * @param logModelLocation location of the log model file
-	 * @param read indicates if the model should be read from
-	 * @param write indicates if data should be written in the model
-	 * @return EmFModel (IModel) instance from the log model, which was loaded
-	 * @throws Exception
-	 */
-	
-	public IModel getLOGModel(String logModelLocation, boolean read, boolean write) throws Exception {
-		if (!new File(logModelLocation).exists()) {
-			read = false;
-		}
-		IModel model = createEmfModel("LOG", logModelLocation, metaPath +"LOG.ecore", read, write);
-		System.setProperty("epsilon.logFileAvailable", "true");
-		return model;
-	}
-
-	/**
-	 * Creates an EMF Model
-	 * 
-	 * @param name of the  emf model
-	 * @param model  name of the model
-	 * @param metamodel name of the metamodel
-	 * @param readOnLoad indicates if the model should be read on load
-	 * @param storeOnDisposal indicates if the model should be stored on disposal
-	 * @return
-	 * @throws EolModelLoadingException
-	 * @throws URISyntaxException
-	 */
-	
-	@SuppressWarnings("deprecation")
-	protected EmfModel createEmfModel(String name, String model, 
-			String metamodel, boolean readOnLoad, boolean storeOnDisposal) 
-					throws EolModelLoadingException, URISyntaxException {
-		EmfModel emfModel = new EmfModel();
-		StringProperties properties = new StringProperties();
-		properties.put(EmfModel.PROPERTY_NAME, name);
-		properties.put(EmfModel.PROPERTY_ALIASES, name);
-		properties.put(EmfModel.PROPERTY_FILE_BASED_METAMODEL_URI, 
-				"file:/" + getFile(metamodel).getAbsolutePath());
-		properties.put(EmfModel.PROPERTY_MODEL_URI, 
-				"file:/" + getFile(model).getAbsolutePath());
-		properties.put(EmfModel.PROPERTY_IS_METAMODEL_FILE_BASED, "true");
-		properties.put(EmfModel.PROPERTY_READONLOAD, readOnLoad + "");
-		properties.put(EmfModel.PROPERTY_CACHED, "true");
-		properties.put(EmfModel.PROPERTY_STOREONDISPOSAL, 
-				storeOnDisposal + "");
-		emfModel.load(properties, "");
-		//System.out.println(emfModel.allContents());
-		return emfModel;
-	}
-
-	/**
-	 * Returns a new File instance from the given filename
-	 * 
-	 * @param fileName of the file
-	 * @return
-	 * @throws URISyntaxException
-	 */
-	public File getFile(String fileName) throws URISyntaxException {;
-		return new File(fileName);
-	}
-
-	/**
-	 * Restores the metamodels, so that they are registered in the
-	 * EPackage registry
-	 */
-	private void restoreMetaModels() {
-		for (String key : metaModelCache .keySet()) {
-			EPackage.Registry.INSTANCE.put(key, metaModelCache.get(key));
-		};
-	}
-
-	/**
-	 * Unregister the metamodels from the EPackage registry
-	 * 
-	 * @param filter for filtering out certain instances
-	 */
-	private void unregisterMetaModels(String filter) {
-		for (String key : EPackage.Registry.INSTANCE.keySet()) {
-			if (key.contains(filter)) {
-				metaModelCache.put(key, EPackage.Registry.INSTANCE.get(key));
-			}
-		};
-		for (String key : metaModelCache .keySet()) {
-			EPackage.Registry.INSTANCE.remove(key);
-		};
-	}
-	
-	/**
-	 * Returns true if decent binary model is used
-	 * @return
-	 */
-
-	public boolean isUseDECENTBinary() {
-		return useDECENTBinary;
-	}
-
-	/**
-	 * Sets the boolean which indicates, if the decent binary
-	 * model is used
-	 * @param useDECENTBinary
-	 */
-	public void setUseDECENTBinary(boolean useDECENTBinary) {
-		this.useDECENTBinary = useDECENTBinary;
-	}
-
-	/**
-	 * Returns true if arffx binary model is used
-	 * @return
-	 */
-	public boolean isUseARFFxBinary() {
-		return useARFFxBinary;
-	}
-	
-	/**
-	 * Sets the boolean which indicates, if the arffx binary
-	 * model is used
-	 * @param useARFFxBinary
-	 */
-
-	public void setUseARFFxBinary(boolean useARFFxBinary) {
-		this.useARFFxBinary = useARFFxBinary;
-	}
-
-	
+    private HashMap<String, Object> metaModelCache = new HashMap<>();
+    private boolean useDECENTBinary = false;
+    private boolean useARFFxBinary = false;
+
+    public static String metaPath = "./decent/models/";
+
+    /**
+     * Returns the decent model as IModel instance
+     * 
+     * @param decentModelLocation
+     *            location of the decent model file
+     * @param read
+     *            indicates if the model should be read from
+     * @param write
+     *            indicates if data should be written in the model
+     * @return EmFModel (IModel) instance from the decent model, which was loaded
+     * @throws Exception
+     */
+    public IModel getDECENTModel(String decentModelLocation, boolean read, boolean write)
+        throws Exception
+    {
+
+        EmfModel model;
+
+        if (isUseDECENTBinary()) {
+            unregisterMetaModels("");
+            if (!read) {
+                new File(decentModelLocation).delete();
+                new File(decentModelLocation + "bin").delete();
+            }
+            DECENTResourceTool tool = new DECENTResourceTool();
+            if (new File(decentModelLocation).exists() &&
+                !new File(decentModelLocation + "bin").exists())
+            {
+                Resource resource =
+                    tool.loadResourceFromXMI(decentModelLocation, "decent", DECENTPackage.eINSTANCE);
+                tool.storeBinaryResourceContents(resource.getContents(), decentModelLocation +
+                    "bin", "decentbin");
+            }
+
+            Resource resourceBin =
+                tool.loadResourceFromBinary(decentModelLocation + "bin", "decentbin",
+                                            DECENTPackage.eINSTANCE);
+            // alternative pattern
+            // model = createInMemoryEmfModel("DECENT", resourceLocation,
+            // "../DECENT.Meta/model/DECENTv3.ecore", read, write, resourceBin,
+            // DECENTPackage.eINSTANCE);
+            // restoreMetaModels();
+
+            // NOTE: Adding the package is essential as otherwise epsilon breaks
+            model = new InMemoryEmfModel("DECENT", resourceBin, DECENTPackage.eINSTANCE);
+            model.setStoredOnDisposal(write);
+            model.setReadOnLoad(read);
+            model.setCachingEnabled(true);
+            restoreMetaModels();
+        }
+        else {
+            model =
+                createEmfModel("DECENT", decentModelLocation, metaPath + "DECENTv3.ecore", read,
+                               write);
+        }
+
+        return model;
+    }
+
+    /**
+     * Converts the decent model to a binary form
+     * 
+     * @param location
+     *            of the decent model file
+     */
+    public void convertDECENTModelToBinary(String location) {
+        unregisterMetaModels("");
+        DECENTResourceTool tool = new DECENTResourceTool();
+        Resource resource =
+            tool.loadResourceFromXMI(location + "/model.decent", "decent", DECENTPackage.eINSTANCE);
+        tool.storeBinaryResourceContents(resource.getContents(),
+                                         location + "/model.decent" + "bin", "decentbin");
+        restoreMetaModels();
+    }
+
+    /**
+     * Converts the decent model to a xmi form
+     * 
+     * @param location
+     *            of the decent model file
+     */
+
+    public void convertDECENTModelToXMI(String location) {
+        unregisterMetaModels("");
+        DECENTResourceTool tool = new DECENTResourceTool();
+        Resource resource =
+            tool.loadResourceFromBinary(location + "/model.decentbin", "decentbin",
+                                        DECENTPackage.eINSTANCE);
+        restoreMetaModels();
+        tool.storeResourceContents(resource.getContents(), location + "/model.decent", "decent");
+    }
+
+    /**
+     * Returns the arffx model as IModel instance
+     * 
+     * @param arffxModelLocation
+     *            location of the arffx model file
+     * @param read
+     *            indicates if the model should be read from
+     * @param write
+     *            indicates if data should be written in the model
+     * @return EmFModel (IModel) instance from the arffx model, which was loaded
+     * @throws Exception
+     */
+
+    public IModel getARFFxModel(String arffxModelLocation, boolean read, boolean write)
+        throws Exception
+    {
+
+        EmfModel model;
+
+        if (isUseARFFxBinary()) {
+            unregisterMetaModels("");
+            if (!read) {
+                new File(arffxModelLocation).delete();
+                new File(arffxModelLocation + "bin").delete();
+            }
+            ARFFxResourceTool tool = new ARFFxResourceTool();
+            if (new File(arffxModelLocation).exists() &&
+                !new File(arffxModelLocation + "bin").exists())
+            {
+                Resource resource =
+                    tool.loadResourceFromXMI(arffxModelLocation, "arffx", ARFFxPackage.eINSTANCE);
+                tool.storeBinaryResourceContents(resource.getContents(),
+                                                 arffxModelLocation + "bin", "arffxbin");
+            }
+
+            Resource resourceBin =
+                tool.loadResourceFromBinary(arffxModelLocation + "bin", "arffxbin",
+                                            ARFFxPackage.eINSTANCE);
+            // alternative pattern
+            // model = createInMemoryEmfModel("DECENT", resourceLocation,
+            // "../DECENT.Meta/model/DECENTv3.ecore", read, write, resourceBin,
+            // DECENTPackage.eINSTANCE);
+            // restoreMetaModels();
+
+            // NOTE: Adding the package is essential as otherwise epsilon breaks
+            model = new InMemoryEmfModel("ARFFx", resourceBin, ARFFxPackage.eINSTANCE);
+            // model.getModelImpl().getURI().toFileString()
+            model.setStoredOnDisposal(write);
+            model.setReadOnLoad(read);
+            model.setCachingEnabled(true);
+            restoreMetaModels();
+        }
+        else {
+            model =
+                createEmfModel("ARFFx", arffxModelLocation, metaPath + "ARFFx.ecore", read, write);
+        }
+
+        return model;
+    }
+
+    /**
+     * Converts an arffx model to a binary version
+     * 
+     * @param location
+     *            of the arffx model
+     */
+    public void convertARFFxModelToBinary(String location) {
+        unregisterMetaModels("");
+        ARFFxResourceTool tool = new ARFFxResourceTool();
+        Resource resource =
+            tool.loadResourceFromXMI(location + "/model.arffx", "arffx", ARFFxPackage.eINSTANCE);
+        tool.storeBinaryResourceContents(resource.getContents(), location + "/model.arffx" + "bin",
+                                         "arffxbin");
+        restoreMetaModels();
+    }
+
+    /**
+     * Converts an arffx model to xmi
+     * 
+     * @param location
+     *            of the arffx model
+     */
+
+    public void convertARFFxModelToXMI(String location) {
+        unregisterMetaModels("");
+        ARFFxResourceTool tool = new ARFFxResourceTool();
+        Resource resource =
+            tool.loadResourceFromBinary(location + "/model.arffxbin", "arffxbin",
+                                        DECENTPackage.eINSTANCE);
+        restoreMetaModels();
+        tool.storeResourceContents(resource.getContents(), location + "/model.arffx", "arffx");
+    }
+
+    /**
+     * Returns the log model as IModel instance
+     * 
+     * @param logModelLocation
+     *            location of the log model file
+     * @param read
+     *            indicates if the model should be read from
+     * @param write
+     *            indicates if data should be written in the model
+     * @return EmFModel (IModel) instance from the log model, which was loaded
+     * @throws Exception
+     */
+
+    public IModel getLOGModel(String logModelLocation, boolean read, boolean write)
+        throws Exception
+    {
+        if (!new File(logModelLocation).exists()) {
+            read = false;
+        }
+        IModel model = createEmfModel("LOG", logModelLocation, metaPath + "LOG.ecore", read, write);
+        System.setProperty("epsilon.logFileAvailable", "true");
+        return model;
+    }
+
+    /**
+     * Creates an EMF Model
+     * 
+     * @param name
+     *            of the emf model
+     * @param model
+     *            name of the model
+     * @param metamodel
+     *            name of the metamodel
+     * @param readOnLoad
+     *            indicates if the model should be read on load
+     * @param storeOnDisposal
+     *            indicates if the model should be stored on disposal
+     * @return
+     * @throws EolModelLoadingException
+     * @throws URISyntaxException
+     */
+
+    @SuppressWarnings("deprecation")
+    protected EmfModel createEmfModel(String name,
+                                      String model,
+                                      String metamodel,
+                                      boolean readOnLoad,
+                                      boolean storeOnDisposal) throws EolModelLoadingException,
+        URISyntaxException
+    {
+        EmfModel emfModel = new EmfModel();
+        StringProperties properties = new StringProperties();
+        properties.put(EmfModel.PROPERTY_NAME, name);
+        properties.put(EmfModel.PROPERTY_ALIASES, name);
+        properties.put(EmfModel.PROPERTY_FILE_BASED_METAMODEL_URI, "file:/" +
+            getFile(metamodel).getAbsolutePath());
+        properties.put(EmfModel.PROPERTY_MODEL_URI, "file:/" + getFile(model).getAbsolutePath());
+        properties.put(EmfModel.PROPERTY_IS_METAMODEL_FILE_BASED, "true");
+        properties.put(EmfModel.PROPERTY_READONLOAD, readOnLoad + "");
+        properties.put(EmfModel.PROPERTY_CACHED, "true");
+        properties.put(EmfModel.PROPERTY_STOREONDISPOSAL, storeOnDisposal + "");
+        emfModel.load(properties, "");
+        // System.out.println(emfModel.allContents());
+        return emfModel;
+    }
+
+    /**
+     * Returns a new File instance from the given filename
+     * 
+     * @param fileName
+     *            of the file
+     * @return
+     * @throws URISyntaxException
+     */
+    public File getFile(String fileName) throws URISyntaxException {
+        ;
+        return new File(fileName);
+    }
+
+    /**
+     * Restores the metamodels, so that they are registered in the EPackage registry
+     */
+    private void restoreMetaModels() {
+        for (String key : metaModelCache.keySet()) {
+            EPackage.Registry.INSTANCE.put(key, metaModelCache.get(key));
+        };
+    }
+
+    /**
+     * Unregister the metamodels from the EPackage registry
+     * 
+     * @param filter
+     *            for filtering out certain instances
+     */
+    private void unregisterMetaModels(String filter) {
+        for (String key : EPackage.Registry.INSTANCE.keySet()) {
+            if (key.contains(filter)) {
+                metaModelCache.put(key, EPackage.Registry.INSTANCE.get(key));
+            }
+        };
+        for (String key : metaModelCache.keySet()) {
+            EPackage.Registry.INSTANCE.remove(key);
+        };
+    }
+
+    /**
+     * Returns true if decent binary model is used
+     * 
+     * @return
+     */
+
+    public boolean isUseDECENTBinary() {
+        return useDECENTBinary;
+    }
+
+    /**
+     * Sets the boolean which indicates, if the decent binary model is used
+     * 
+     * @param useDECENTBinary
+     */
+    public void setUseDECENTBinary(boolean useDECENTBinary) {
+        this.useDECENTBinary = useDECENTBinary;
+    }
+
+    /**
+     * Returns true if arffx binary model is used
+     * 
+     * @return
+     */
+    public boolean isUseARFFxBinary() {
+        return useARFFxBinary;
+    }
+
+    /**
+     * Sets the boolean which indicates, if the arffx binary model is used
+     * 
+     * @param useARFFxBinary
+     */
+
+    public void setUseARFFxBinary(boolean useARFFxBinary) {
+        this.useARFFxBinary = useARFFxBinary;
+    }
+
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/decentApp/DECENTResourceTool.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/decentApp/DECENTResourceTool.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/decentApp/DECENTResourceTool.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.decentApp;
 
@@ -12,30 +26,28 @@
  * 
  * @author Philip Makedonski, Fabian Trautsch
- *
+ * 
  */
 public class DECENTResourceTool extends ResourceTool {
-	
-	/**
-	 * Initializes the Tool Factory, from which the models can be loaded and
-	 * inizializes the validator.
-	 */
-	public DECENTResourceTool(){
-		super(DECENTResourceTool.class.getName());
-		DECENTPackageImpl.init();
-		this.resourceFactory = new DECENTResourceFactoryImpl();
-		initializeValidator();
-	}
-	
-	/**
-	 * Inizializes the model validator
-	 */
-	@Override
-	protected void initializeValidator(){
-		super.initializeValidator();
-		EObjectValidator validator = new EObjectValidator();
-	    EValidator.Registry.INSTANCE.put(DECENTPackage.eINSTANCE, validator);
-	}
-	
-	
+
+    /**
+     * Initializes the Tool Factory, from which the models can be loaded and inizializes the
+     * validator.
+     */
+    public DECENTResourceTool() {
+        super(DECENTResourceTool.class.getName());
+        DECENTPackageImpl.init();
+        this.resourceFactory = new DECENTResourceFactoryImpl();
+        initializeValidator();
+    }
+
+    /**
+     * Inizializes the model validator
+     */
+    @Override
+    protected void initializeValidator() {
+        super.initializeValidator();
+        EObjectValidator validator = new EObjectValidator();
+        EValidator.Registry.INSTANCE.put(DECENTPackage.eINSTANCE, validator);
+    }
 
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/decentApp/FileWatcher.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/decentApp/FileWatcher.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/decentApp/FileWatcher.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.decentApp;
 
@@ -8,35 +22,35 @@
  * 
  * @author Philip Makedonski
- *
+ * 
  */
 public abstract class FileWatcher extends TimerTask {
-	// Last timestamp
-	private long timeStamp;
-	
-	// File to watch
-	private File file;
+    // Last timestamp
+    private long timeStamp;
 
-	/**
-	 * Constructor
-	 * @param file
-	 */
-	public FileWatcher(File file) {
-		this.file = file;
-		this.timeStamp = file.lastModified();
-	}
+    // File to watch
+    private File file;
 
-	/**
-	 * Watches a file and executes the onChange Method
-	 * if a file is changed
-	 */
-	public final void run() {
-		long timeStamp = file.lastModified();
+    /**
+     * Constructor
+     * 
+     * @param file
+     */
+    public FileWatcher(File file) {
+        this.file = file;
+        this.timeStamp = file.lastModified();
+    }
 
-		if (this.timeStamp != timeStamp) {
-			this.timeStamp = timeStamp;
-			onChange(file);
-		}
-	}
+    /**
+     * Watches a file and executes the onChange Method if a file is changed
+     */
+    public final void run() {
+        long timeStamp = file.lastModified();
 
-	protected abstract void onChange(File file);
+        if (this.timeStamp != timeStamp) {
+            this.timeStamp = timeStamp;
+            onChange(file);
+        }
+    }
+
+    protected abstract void onChange(File file);
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/decentApp/ResourceTool.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/decentApp/ResourceTool.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/decentApp/ResourceTool.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.decentApp;
 
@@ -32,289 +46,335 @@
  * 
  * @author Philip Makedonski
- *
+ * 
  */
 public class ResourceTool {
 
-	protected ResourceFactoryImpl resourceFactory = new XMIResourceFactoryImpl();
-
-	/**
-	 * Constructor
-	 * @param loggedClass
-	 */
-	public ResourceTool(String loggedClass) {
-		System.setProperty("org.slf4j.simpleLogger.logFile","validation.log");
-		System.setProperty("org.slf4j.simpleLogger.logFile","System.out");
-	}
-
-	/**
-	 * Initializes the validator
-	 */
-	protected void initializeValidator() {
-	//		OCL.initialize(null);
-			String oclDelegateURI = OCLConstants.OCL_DELEGATE_URI+"/Pivot";
-			
-		    EOperation.Internal.InvocationDelegate.Factory.Registry.INSTANCE.put(oclDelegateURI,
-		        new OCLInvocationDelegateFactory(oclDelegateURI));
-		    EStructuralFeature.Internal.SettingDelegate.Factory.Registry.INSTANCE.put(oclDelegateURI,
-		        new OCLSettingDelegateFactory(oclDelegateURI));
-		    EValidator.ValidationDelegate.Registry.INSTANCE.put(oclDelegateURI,
-		        new OCLValidationDelegateFactory(oclDelegateURI));
-		    
-	//	    EStructuralFeature.Internal.SettingDelegate.Factory.Registry.INSTANCE.put(oclDelegateURI, 
-	//	    	new OCLSettingDelegateFactory.Global());
-	//	    QueryDelegate.Factory.Registry.INSTANCE.put(oclDelegateURI, new OCLQueryDelegateFactory.Global());
-		    
-		}
-
-	/**
-	 * Validates the ressource
-	 * @param resource to validate
-	 */
-	public void validateResource(Resource resource) {
-	    BasicDiagnostic diagnostics = new BasicDiagnostic();
-	    boolean valid = true;
-	    for (EObject eo : resource.getContents())
-	    {
-	    	Map<Object, Object> context = new HashMap<Object, Object>();
-	    	boolean validationResult = Diagnostician.INSTANCE.validate(eo, diagnostics, context);
-	    	showDiagnostics(diagnostics, "");
-			valid &= validationResult;
-	    }
-	    
-	    if (!valid){
-	    	System.out.println("Problem with validation!");
-	    }
-	}
-
-	/**
-	 * Output method for showing diagnostics for different ressources
-	 * @param diagnostics
-	 * @param indent
-	 */
-	protected void showDiagnostics(Diagnostic diagnostics, String indent) {
-		indent+="  ";
-		for (Diagnostic d : diagnostics.getChildren()){
-			System.out.println(indent+d.getSource());
-			System.out.println(indent+"  "+d.getMessage());
-			showDiagnostics(d,indent);
-		}
-	}
-
-	
-	/**
-	 * Loads a ressource from XMI
-	 * @param inputPath path to the xmi
-	 * @param extension of the ressource to load
-	 * @param p the given EPackage
-	 * @return
-	 */
-	//TODO: workarounds copied from respective methods without EPackage parameter
-	@SuppressWarnings({ "rawtypes", "unchecked" })
-	public Resource loadResourceFromXMI(String inputPath, String extension, EPackage p) {
-	    Resource.Factory.Registry reg = Resource.Factory.Registry.INSTANCE;
-	    Map<String, Object> m = reg.getExtensionToFactoryMap();
-		m.put(extension, resourceFactory);
-	    ResourceSet resSetIn = new ResourceSetImpl();
-	    //critical part
-	    resSetIn.getPackageRegistry().put(p.getNsURI(), p);
-
-	    Resource inputResource = resSetIn.createResource(URI.createURI(inputPath));
-	    try {
-	    	Map options = new HashMap<>();
-	    	options.put(XMIResourceImpl.OPTION_DEFER_IDREF_RESOLUTION, Boolean.TRUE);
-//	    	options.put(XMIResourceImpl.OPTION_PROCESS_DANGLING_HREF, XMIResourceImpl.OPTION_PROCESS_DANGLING_HREF_DISCARD);
-			inputResource.load(options);
-		} catch (IOException e) {
-			e.printStackTrace();
-		}
-		return inputResource;
-	}
-	
-	/**
-	 * Loads a ressource from XMI
-	 * @param inputPath path to the xmi
-	 * @param extension of the ressource to load
-	 * @return
-	 */
-
-	@SuppressWarnings({ "rawtypes", "unchecked" })
-	public Resource loadResourceFromXMI(String inputPath, String extension) {
-	    Resource.Factory.Registry reg = Resource.Factory.Registry.INSTANCE;
-	    Map<String, Object> m = reg.getExtensionToFactoryMap();
-		m.put(extension, resourceFactory);
-	    ResourceSet resSetIn = new ResourceSetImpl();
-	    Resource inputResource = resSetIn.createResource(URI.createURI(inputPath));
-	    try {
-	    	Map options = new HashMap<>();
-	    	options.put(XMIResourceImpl.OPTION_DEFER_IDREF_RESOLUTION, Boolean.TRUE);
-//	    	options.put(XMIResourceImpl.OPTION_PROCESS_DANGLING_HREF, XMIResourceImpl.OPTION_PROCESS_DANGLING_HREF_DISCARD);
-			inputResource.load(options);
-		} catch (IOException e) {
-			e.printStackTrace();
-		}
-		return inputResource;
-	}
-
-	/**
-	 * Gets a ressource from a binary form
-	 * @param inputPath path to the binary
-	 * @param extension of the model to load
-	 * @param p EPackage to put the loaded ressource in
-	 * @return
-	 */
-	public Resource getResourceFromBinary(String inputPath, String extension, EPackage p) {
-	    Resource.Factory.Registry reg = Resource.Factory.Registry.INSTANCE;
-	    Map<String, Object> m = reg.getExtensionToFactoryMap();
-	    m.put(extension, new Resource.Factory() {
-
-			@Override
-			public Resource createResource(URI uri) {
-				return new BinaryResourceImpl(uri);
-			}
-			
-		});	    
-	    
-	    ResourceSet resSetIn = new ResourceSetImpl();
-	    //critical part
-	    resSetIn.getPackageRegistry().put(p.getNsURI(), p);
-
-	    Resource inputResource = resSetIn.createResource(URI.createURI(inputPath));
-		return inputResource;
-	}
-
-	
-	/**
-	 * Loads a ressource from a binary form
-	 * @param inputPath path to the binary
-	 * @param extension of the model to load
-	 * @param p EPackage to put the loaded ressource in
-	 * @return
-	 */
-	//TODO: workarounds copied from respective methods without EPackage parameter
-	@SuppressWarnings({ "rawtypes" })
-	public Resource loadResourceFromBinary(String inputPath, String extension, EPackage p) {
-	    Resource.Factory.Registry reg = Resource.Factory.Registry.INSTANCE;
-	    Map<String, Object> m = reg.getExtensionToFactoryMap();
-	    m.put(extension, new Resource.Factory() {
-
-			@Override
-			public Resource createResource(URI uri) {
-				return new BinaryResourceImpl(uri);
-			}
-			
-		});	    
-	    
-	    ResourceSet resSetIn = new ResourceSetImpl();
-	    //critical part
-	    resSetIn.getPackageRegistry().put(p.getNsURI(), p);
-
-	    Resource inputResource = resSetIn.createResource(URI.createURI(inputPath));
-	    if (new File(inputPath).exists()) {
-	    	
-		    try {
-		    	Map options = new HashMap<>();
-//		    	options.put(BinaryResourceImpl.OPTION_DEFER_IDREF_RESOLUTION, Boolean.TRUE);
-//		    	options.put(BinaryResourceImpl.OPTION_DEFER_IDREF_RESOLUTION, Boolean.TRUE);
-	//	    	options.put(XMIResourceImpl.OPTION_PROCESS_DANGLING_HREF, XMIResourceImpl.OPTION_PROCESS_DANGLING_HREF_DISCARD);
-				inputResource.load(options);
-			} catch (IOException e) {
-				e.printStackTrace();
-			}
-	    }
-		return inputResource;
-	}
-
-	/**
-	 * Loads a ressource from a binary form
-	 * 
-	 * @param inputPath path to the binary
-	 * @param extension of the model to load
-	 * @return
-	 */
-	@SuppressWarnings({ "rawtypes" })
-	public Resource loadResourceFromBinary(String inputPath, String extension) {
-	    Resource.Factory.Registry reg = Resource.Factory.Registry.INSTANCE;
-	    Map<String, Object> m = reg.getExtensionToFactoryMap();
-	    m.put(extension, new Resource.Factory() {
-
-			@Override
-			public Resource createResource(URI uri) {
-				return new BinaryResourceImpl(uri);
-			}
-			
-		});	    
-	    
-	    ResourceSet resSetIn = new ResourceSetImpl();
-	    Resource inputResource = resSetIn.createResource(URI.createURI(inputPath));
-	    try {
-	    	Map options = new HashMap<>();
-//	    	options.put(XMIResourceImpl.OPTION_DEFER_IDREF_RESOLUTION, Boolean.TRUE);
-//	    	options.put(XMIResourceImpl.OPTION_PROCESS_DANGLING_HREF, XMIResourceImpl.OPTION_PROCESS_DANGLING_HREF_DISCARD);
-			inputResource.load(options);
-		} catch (IOException e) {
-			e.printStackTrace();
-		}
-		return inputResource;
-	}
-
-	/**
-	 * Stores the binary resource contents to a given path
-	 * 
-	 * @param contents EList of different EObjects to store
-	 * @param outputPath path to store to
-	 * @param extension of the model to store
-	 */
-	@SuppressWarnings({ "rawtypes" })
-	public void storeBinaryResourceContents(EList<EObject> contents, String outputPath, String extension) {
-		Resource.Factory.Registry reg = Resource.Factory.Registry.INSTANCE;
-	    Map<String, Object> m = reg.getExtensionToFactoryMap();
-		m.put(extension, new Resource.Factory() {
-
-			@Override
-			public Resource createResource(URI uri) {
-				return new BinaryResourceImpl(uri);
-			}
-			
-		});
-		
-	    ResourceSet resSet = new ResourceSetImpl();
-		Resource outputResource = resSet.createResource(URI.createURI(outputPath));
-	    outputResource.getContents().addAll(contents);
-	    try {
-	      Map options = new HashMap<>();
-//	      options.put(XMIResourceImpl.OPTION_PROCESS_DANGLING_HREF, XMIResourceImpl.OPTION_PROCESS_DANGLING_HREF_DISCARD);
-	      outputResource.save(options);
-	    } catch (IOException e) {
-	      e.printStackTrace();
-	    }
-	}
-
-	/**
-	 * Stores the resource contents to a given path
-	 * 
-	 * @param contents EList of different EObjects to store
-	 * @param outputPath path to store to
-	 * @param extension of the model to store
-	 */
-	@SuppressWarnings({ "unchecked", "rawtypes" })
-	public void storeResourceContents(EList<EObject> contents, String outputPath, String extension) {
-		//TODO: duplicated from loadResourceFromXMI => move to a more appropriate location
-		Resource.Factory.Registry reg = Resource.Factory.Registry.INSTANCE;
-	    Map<String, Object> m = reg.getExtensionToFactoryMap();
-		m.put(extension, resourceFactory);
-		
-	    ResourceSet resSet = new ResourceSetImpl();
-		Resource outputResource = resSet.createResource(URI.createURI(outputPath));
-	    outputResource.getContents().addAll(contents);
-	    try {
-	      Map options = new HashMap<>();
-	      options.put(XMIResourceImpl.OPTION_ENCODING, "UTF-8");
-//	      options.put(XMIResourceImpl.OPTION_PROCESS_DANGLING_HREF, XMIResourceImpl.OPTION_PROCESS_DANGLING_HREF_DISCARD);
-	      outputResource.save(options);
-	    } catch (IOException e) {
-	      e.printStackTrace();
-	    }
-	}
-
+    protected ResourceFactoryImpl resourceFactory = new XMIResourceFactoryImpl();
+
+    /**
+     * Constructor
+     * 
+     * @param loggedClass
+     */
+    public ResourceTool(String loggedClass) {
+        System.setProperty("org.slf4j.simpleLogger.logFile", "validation.log");
+        System.setProperty("org.slf4j.simpleLogger.logFile", "System.out");
+    }
+
+    /**
+     * Initializes the validator
+     */
+    protected void initializeValidator() {
+        // OCL.initialize(null);
+        String oclDelegateURI = OCLConstants.OCL_DELEGATE_URI + "/Pivot";
+
+        EOperation.Internal.InvocationDelegate.Factory.Registry.INSTANCE
+            .put(oclDelegateURI, new OCLInvocationDelegateFactory(oclDelegateURI));
+        EStructuralFeature.Internal.SettingDelegate.Factory.Registry.INSTANCE
+            .put(oclDelegateURI, new OCLSettingDelegateFactory(oclDelegateURI));
+        EValidator.ValidationDelegate.Registry.INSTANCE
+            .put(oclDelegateURI, new OCLValidationDelegateFactory(oclDelegateURI));
+
+        // EStructuralFeature.Internal.SettingDelegate.Factory.Registry.INSTANCE.put(oclDelegateURI,
+        // new OCLSettingDelegateFactory.Global());
+        // QueryDelegate.Factory.Registry.INSTANCE.put(oclDelegateURI, new
+        // OCLQueryDelegateFactory.Global());
+
+    }
+
+    /**
+     * Validates the ressource
+     * 
+     * @param resource
+     *            to validate
+     */
+    public void validateResource(Resource resource) {
+        BasicDiagnostic diagnostics = new BasicDiagnostic();
+        boolean valid = true;
+        for (EObject eo : resource.getContents()) {
+            Map<Object, Object> context = new HashMap<Object, Object>();
+            boolean validationResult = Diagnostician.INSTANCE.validate(eo, diagnostics, context);
+            showDiagnostics(diagnostics, "");
+            valid &= validationResult;
+        }
+
+        if (!valid) {
+            System.out.println("Problem with validation!");
+        }
+    }
+
+    /**
+     * Output method for showing diagnostics for different ressources
+     * 
+     * @param diagnostics
+     * @param indent
+     */
+    protected void showDiagnostics(Diagnostic diagnostics, String indent) {
+        indent += "  ";
+        for (Diagnostic d : diagnostics.getChildren()) {
+            System.out.println(indent + d.getSource());
+            System.out.println(indent + "  " + d.getMessage());
+            showDiagnostics(d, indent);
+        }
+    }
+
+    /**
+     * Loads a ressource from XMI
+     * 
+     * @param inputPath
+     *            path to the xmi
+     * @param extension
+     *            of the ressource to load
+     * @param p
+     *            the given EPackage
+     * @return
+     */
+    // TODO: workarounds copied from respective methods without EPackage parameter
+    @SuppressWarnings(
+        { "rawtypes", "unchecked" })
+    public Resource loadResourceFromXMI(String inputPath, String extension, EPackage p) {
+        Resource.Factory.Registry reg = Resource.Factory.Registry.INSTANCE;
+        Map<String, Object> m = reg.getExtensionToFactoryMap();
+        m.put(extension, resourceFactory);
+        ResourceSet resSetIn = new ResourceSetImpl();
+        // critical part
+        resSetIn.getPackageRegistry().put(p.getNsURI(), p);
+
+        Resource inputResource = resSetIn.createResource(URI.createURI(inputPath));
+        try {
+            Map options = new HashMap<>();
+            options.put(XMIResourceImpl.OPTION_DEFER_IDREF_RESOLUTION, Boolean.TRUE);
+            // options.put(XMIResourceImpl.OPTION_PROCESS_DANGLING_HREF,
+            // XMIResourceImpl.OPTION_PROCESS_DANGLING_HREF_DISCARD);
+            inputResource.load(options);
+        }
+        catch (IOException e) {
+            e.printStackTrace();
+        }
+        return inputResource;
+    }
+
+    /**
+     * Loads a ressource from XMI
+     * 
+     * @param inputPath
+     *            path to the xmi
+     * @param extension
+     *            of the ressource to load
+     * @return
+     */
+
+    @SuppressWarnings(
+        { "rawtypes", "unchecked" })
+    public Resource loadResourceFromXMI(String inputPath, String extension) {
+        Resource.Factory.Registry reg = Resource.Factory.Registry.INSTANCE;
+        Map<String, Object> m = reg.getExtensionToFactoryMap();
+        m.put(extension, resourceFactory);
+        ResourceSet resSetIn = new ResourceSetImpl();
+        Resource inputResource = resSetIn.createResource(URI.createURI(inputPath));
+        try {
+            Map options = new HashMap<>();
+            options.put(XMIResourceImpl.OPTION_DEFER_IDREF_RESOLUTION, Boolean.TRUE);
+            // options.put(XMIResourceImpl.OPTION_PROCESS_DANGLING_HREF,
+            // XMIResourceImpl.OPTION_PROCESS_DANGLING_HREF_DISCARD);
+            inputResource.load(options);
+        }
+        catch (IOException e) {
+            e.printStackTrace();
+        }
+        return inputResource;
+    }
+
+    /**
+     * Gets a ressource from a binary form
+     * 
+     * @param inputPath
+     *            path to the binary
+     * @param extension
+     *            of the model to load
+     * @param p
+     *            EPackage to put the loaded ressource in
+     * @return
+     */
+    public Resource getResourceFromBinary(String inputPath, String extension, EPackage p) {
+        Resource.Factory.Registry reg = Resource.Factory.Registry.INSTANCE;
+        Map<String, Object> m = reg.getExtensionToFactoryMap();
+        m.put(extension, new Resource.Factory() {
+
+            @Override
+            public Resource createResource(URI uri) {
+                return new BinaryResourceImpl(uri);
+            }
+
+        });
+
+        ResourceSet resSetIn = new ResourceSetImpl();
+        // critical part
+        resSetIn.getPackageRegistry().put(p.getNsURI(), p);
+
+        Resource inputResource = resSetIn.createResource(URI.createURI(inputPath));
+        return inputResource;
+    }
+
+    /**
+     * Loads a ressource from a binary form
+     * 
+     * @param inputPath
+     *            path to the binary
+     * @param extension
+     *            of the model to load
+     * @param p
+     *            EPackage to put the loaded ressource in
+     * @return
+     */
+    // TODO: workarounds copied from respective methods without EPackage parameter
+    @SuppressWarnings(
+        { "rawtypes" })
+    public Resource loadResourceFromBinary(String inputPath, String extension, EPackage p) {
+        Resource.Factory.Registry reg = Resource.Factory.Registry.INSTANCE;
+        Map<String, Object> m = reg.getExtensionToFactoryMap();
+        m.put(extension, new Resource.Factory() {
+
+            @Override
+            public Resource createResource(URI uri) {
+                return new BinaryResourceImpl(uri);
+            }
+
+        });
+
+        ResourceSet resSetIn = new ResourceSetImpl();
+        // critical part
+        resSetIn.getPackageRegistry().put(p.getNsURI(), p);
+
+        Resource inputResource = resSetIn.createResource(URI.createURI(inputPath));
+        if (new File(inputPath).exists()) {
+
+            try {
+                Map options = new HashMap<>();
+                // options.put(BinaryResourceImpl.OPTION_DEFER_IDREF_RESOLUTION, Boolean.TRUE);
+                // options.put(BinaryResourceImpl.OPTION_DEFER_IDREF_RESOLUTION, Boolean.TRUE);
+                // options.put(XMIResourceImpl.OPTION_PROCESS_DANGLING_HREF,
+                // XMIResourceImpl.OPTION_PROCESS_DANGLING_HREF_DISCARD);
+                inputResource.load(options);
+            }
+            catch (IOException e) {
+                e.printStackTrace();
+            }
+        }
+        return inputResource;
+    }
+
+    /**
+     * Loads a ressource from a binary form
+     * 
+     * @param inputPath
+     *            path to the binary
+     * @param extension
+     *            of the model to load
+     * @return
+     */
+    @SuppressWarnings(
+        { "rawtypes" })
+    public Resource loadResourceFromBinary(String inputPath, String extension) {
+        Resource.Factory.Registry reg = Resource.Factory.Registry.INSTANCE;
+        Map<String, Object> m = reg.getExtensionToFactoryMap();
+        m.put(extension, new Resource.Factory() {
+
+            @Override
+            public Resource createResource(URI uri) {
+                return new BinaryResourceImpl(uri);
+            }
+
+        });
+
+        ResourceSet resSetIn = new ResourceSetImpl();
+        Resource inputResource = resSetIn.createResource(URI.createURI(inputPath));
+        try {
+            Map options = new HashMap<>();
+            // options.put(XMIResourceImpl.OPTION_DEFER_IDREF_RESOLUTION, Boolean.TRUE);
+            // options.put(XMIResourceImpl.OPTION_PROCESS_DANGLING_HREF,
+            // XMIResourceImpl.OPTION_PROCESS_DANGLING_HREF_DISCARD);
+            inputResource.load(options);
+        }
+        catch (IOException e) {
+            e.printStackTrace();
+        }
+        return inputResource;
+    }
+
+    /**
+     * Stores the binary resource contents to a given path
+     * 
+     * @param contents
+     *            EList of different EObjects to store
+     * @param outputPath
+     *            path to store to
+     * @param extension
+     *            of the model to store
+     */
+    @SuppressWarnings(
+        { "rawtypes" })
+    public void storeBinaryResourceContents(EList<EObject> contents,
+                                            String outputPath,
+                                            String extension)
+    {
+        Resource.Factory.Registry reg = Resource.Factory.Registry.INSTANCE;
+        Map<String, Object> m = reg.getExtensionToFactoryMap();
+        m.put(extension, new Resource.Factory() {
+
+            @Override
+            public Resource createResource(URI uri) {
+                return new BinaryResourceImpl(uri);
+            }
+
+        });
+
+        ResourceSet resSet = new ResourceSetImpl();
+        Resource outputResource = resSet.createResource(URI.createURI(outputPath));
+        outputResource.getContents().addAll(contents);
+        try {
+            Map options = new HashMap<>();
+            // options.put(XMIResourceImpl.OPTION_PROCESS_DANGLING_HREF,
+            // XMIResourceImpl.OPTION_PROCESS_DANGLING_HREF_DISCARD);
+            outputResource.save(options);
+        }
+        catch (IOException e) {
+            e.printStackTrace();
+        }
+    }
+
+    /**
+     * Stores the resource contents to a given path
+     * 
+     * @param contents
+     *            EList of different EObjects to store
+     * @param outputPath
+     *            path to store to
+     * @param extension
+     *            of the model to store
+     */
+    @SuppressWarnings(
+        { "unchecked", "rawtypes" })
+    public void storeResourceContents(EList<EObject> contents, String outputPath, String extension)
+    {
+        // TODO: duplicated from loadResourceFromXMI => move to a more appropriate location
+        Resource.Factory.Registry reg = Resource.Factory.Registry.INSTANCE;
+        Map<String, Object> m = reg.getExtensionToFactoryMap();
+        m.put(extension, resourceFactory);
+
+        ResourceSet resSet = new ResourceSetImpl();
+        Resource outputResource = resSet.createResource(URI.createURI(outputPath));
+        outputResource.getContents().addAll(contents);
+        try {
+            Map options = new HashMap<>();
+            options.put(XMIResourceImpl.OPTION_ENCODING, "UTF-8");
+            // options.put(XMIResourceImpl.OPTION_PROCESS_DANGLING_HREF,
+            // XMIResourceImpl.OPTION_PROCESS_DANGLING_HREF_DISCARD);
+            outputResource.save(options);
+        }
+        catch (IOException e) {
+            e.printStackTrace();
+        }
+    }
 
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/eval/AbstractWekaEvaluation.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/eval/AbstractWekaEvaluation.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/eval/AbstractWekaEvaluation.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.eval;
 
@@ -17,264 +31,295 @@
 
 /**
- * Base class for the evaluation of results of classifiers compatible with the {@link Classifier} interface.
- * For each classifier, the following metrics are calculated:
+ * Base class for the evaluation of results of classifiers compatible with the {@link Classifier}
+ * interface. For each classifier, the following metrics are calculated:
  * <ul>
- *  <li>succHe: Success with recall>0.7, precision>0.5</li>
- *  <li>succZi: Success with recall>0.7, precision>0.7</li>
- *  <li>succG75: Success with gscore>0.75</li>
- *  <li>succG60: Success with gscore>0.6</li>
- *  <li>error</li>
- *  <li>recall</li>
- *  <li>precision</li>
- *  <li>fscore</li>
- *  <li>gscore</li>
- *  <li>AUC</li>
- *  <li>AUCEC (weighted by LOC, if applicable; 0.0 if LOC not available)</li>
- *  <li>tpr: true positive rate</li>
- *  <li>tnr: true negative rate</li>
- *  <li>tp: true positives</li>
- *  <li>fp: false positives</li>
- *  <li>tn: true negatives</li>
- *  <li>fn: false negatives</li>
- *  <li>errortrain: training error</li>
- *  <li>recalltrain: training recall</li>
- *  <li>precisiontrain: training precision</li>
- *  <li>succHetrain: training success with recall>0.7 and precision>0.5
- * </ul> 
+ * <li>succHe: Success with recall>0.7, precision>0.5</li>
+ * <li>succZi: Success with recall>0.7, precision>0.7</li>
+ * <li>succG75: Success with gscore>0.75</li>
+ * <li>succG60: Success with gscore>0.6</li>
+ * <li>error</li>
+ * <li>recall</li>
+ * <li>precision</li>
+ * <li>fscore</li>
+ * <li>gscore</li>
+ * <li>AUC</li>
+ * <li>AUCEC (weighted by LOC, if applicable; 0.0 if LOC not available)</li>
+ * <li>tpr: true positive rate</li>
+ * <li>tnr: true negative rate</li>
+ * <li>tp: true positives</li>
+ * <li>fp: false positives</li>
+ * <li>tn: true negatives</li>
+ * <li>fn: false negatives</li>
+ * <li>errortrain: training error</li>
+ * <li>recalltrain: training recall</li>
+ * <li>precisiontrain: training precision</li>
+ * <li>succHetrain: training success with recall>0.7 and precision>0.5
+ * </ul>
+ * 
  * @author Steffen Herbold
  */
 public abstract class AbstractWekaEvaluation implements IEvaluationStrategy {
 
-	/**
-	 * writer for the evaluation results
-	 */
-	private PrintWriter output = new PrintWriter(System.out);
-	
-	private boolean outputIsSystemOut = true;
-	
-	/**
-	 * Creates the weka evaluator. Allows the creation of the evaluator in different ways, e.g., for cross-validation
-	 * or evaluation on the test data.
-	 * @param testdata test data
-	 * @param classifier classifier used
-	 * @return evaluator
-	 */
-	protected abstract Evaluation createEvaluator(Instances testdata, Classifier classifier);
-	
-	/*
-	 * (non-Javadoc)
-	 * @see de.ugoe.cs.cpdp.eval.EvaluationStrategy#apply(weka.core.Instances, weka.core.Instances, java.util.List, boolean)
-	 */
-	@Override
-	public void apply(Instances testdata, Instances traindata, List<ITrainer> trainers,
-			boolean writeHeader) {
-		final List<Classifier> classifiers = new LinkedList<Classifier>();
-		for( ITrainer trainer : trainers ) {
-			if( trainer instanceof IWekaCompatibleTrainer ) {
-				classifiers.add(((IWekaCompatibleTrainer) trainer).getClassifier());
-			} else {
-				throw new RuntimeException("The selected evaluator only support Weka classifiers");
-			}
-		}
-		
-		if( writeHeader ) {
-			output.append("version,size_test,size_training");
-			for( ITrainer trainer : trainers ) {
-				output.append(",succHe_" + ((IWekaCompatibleTrainer) trainer).getName());
-				output.append(",succZi_" + ((IWekaCompatibleTrainer) trainer).getName());
-				output.append(",succG75_" + ((IWekaCompatibleTrainer) trainer).getName());
-				output.append(",succG60_" + ((IWekaCompatibleTrainer) trainer).getName());
-				output.append(",error_" + ((IWekaCompatibleTrainer) trainer).getName());
-				output.append(",recall_" + ((IWekaCompatibleTrainer) trainer).getName());
-				output.append(",precision_" + ((IWekaCompatibleTrainer) trainer).getName());
-				output.append(",fscore_" + ((IWekaCompatibleTrainer) trainer).getName());
-				output.append(",gscore_" + ((IWekaCompatibleTrainer) trainer).getName());
-				output.append(",mcc_" + ((IWekaCompatibleTrainer) trainer).getName());
-				output.append(",auc_" + ((IWekaCompatibleTrainer) trainer).getName());
-				output.append(",aucec_" + ((IWekaCompatibleTrainer) trainer).getName());
-				output.append(",tpr_" + ((IWekaCompatibleTrainer) trainer).getName());
-				output.append(",tnr_" + ((IWekaCompatibleTrainer) trainer).getName());
-				output.append(",tp_" + ((IWekaCompatibleTrainer) trainer).getName());
-				output.append(",fn_" + ((IWekaCompatibleTrainer) trainer).getName());
-				output.append(",tn_" + ((IWekaCompatibleTrainer) trainer).getName());
-				output.append(",fp_" + ((IWekaCompatibleTrainer) trainer).getName());
-				output.append(",trainerror_" + ((IWekaCompatibleTrainer) trainer).getName());
-				output.append(",trainrecall_" + ((IWekaCompatibleTrainer) trainer).getName());
-				output.append(",trainprecision_" + ((IWekaCompatibleTrainer) trainer).getName());
-				output.append(",trainsuccHe_" + ((IWekaCompatibleTrainer) trainer).getName());
-			}
-			output.append(StringTools.ENDLINE);
-		}
-		
-		output.append(testdata.relationName());
-		output.append("," + testdata.numInstances());	
-		output.append("," + traindata.numInstances());
-		
-		Evaluation eval = null;
-		Evaluation evalTrain = null;
-		for( Classifier classifier : classifiers ) {
-			eval = createEvaluator(testdata, classifier);
-			evalTrain = createEvaluator(traindata, classifier);
-			
-			double pf = eval.numFalsePositives(1)/(eval.numFalsePositives(1)+eval.numTrueNegatives(1));
-			double gmeasure = 2*eval.recall(1)*(1.0-pf)/(eval.recall(1)+(1.0-pf));
-			double mcc = (eval.numTruePositives(1)*eval.numTrueNegatives(1)-eval.numFalsePositives(1)*eval.numFalseNegatives(1))/Math.sqrt((eval.numTruePositives(1)+eval.numFalsePositives(1))*(eval.numTruePositives(1)+eval.numFalseNegatives(1))*(eval.numTrueNegatives(1)+eval.numFalsePositives(1))*(eval.numTrueNegatives(1)+eval.numFalseNegatives(1)));
-			double aucec = calculateReviewEffort(testdata, classifier);
-			
-			if( eval.recall(1)>=0.7 && eval.precision(1) >= 0.5 ) {
-				output.append(",1");
-			} else {
-				output.append(",0");
-			}
-			
-			if( eval.recall(1)>=0.7 && eval.precision(1) >= 0.7 ) {
-				output.append(",1");
-			} else {
-				output.append(",0");
-			}
-			
-			if( gmeasure>0.75 ) {
-				output.append(",1");
-			} else {
-				output.append(",0");
-			}
-			
-			if( gmeasure>0.6 ) {
-				output.append(",1");
-			} else {
-				output.append(",0");
-			}
-			
-			output.append("," + eval.errorRate());
-			output.append("," + eval.recall(1));
-			output.append("," + eval.precision(1));
-			output.append("," + eval.fMeasure(1));
-			output.append("," + gmeasure);
-			output.append("," + mcc);
-			output.append("," + eval.areaUnderROC(1));
-			output.append("," + aucec);
-			output.append("," + eval.truePositiveRate(1));
-			output.append("," + eval.trueNegativeRate(1));
-			output.append("," + eval.numTruePositives(1));
-			output.append("," + eval.numFalseNegatives(1));
-			output.append("," + eval.numTrueNegatives(1));
-			output.append("," + eval.numFalsePositives(1));
-			output.append("," + evalTrain.errorRate());
-			output.append("," + evalTrain.recall(1));
-			output.append("," + evalTrain.precision(1));
-			if( evalTrain.recall(1)>=0.7 && evalTrain.precision(1) >= 0.5 ) {
-				output.append(",1");
-			} else {
-				output.append(",0");
-			}
-		}
-		
-		output.append(StringTools.ENDLINE);
-		output.flush();
-	}
-	
-	private double calculateReviewEffort(Instances testdata, Classifier classifier) {
-		
-		final Attribute loc = testdata.attribute("loc");
-		if( loc==null ) {
-			return 0.0;
-		}
-				
-		final List<Integer> bugPredicted = new ArrayList<>();
-		final List<Integer> nobugPredicted = new ArrayList<>(); 
-		double totalLoc = 0.0d;
-		int totalBugs = 0;
-		for( int i=0 ; i<testdata.numInstances() ; i++ ) {
-			try {
-				if( Double.compare(classifier.classifyInstance(testdata.instance(i)),0.0d)==0 ) {
-					nobugPredicted.add(i);
-				} else {
-					bugPredicted.add(i);
-				}
-			} catch (Exception e) {
-				throw new RuntimeException("unexpected error during the evaluation of the review effort", e);
-			}
-			if(Double.compare(testdata.instance(i).classValue(),1.0d)==0) {
-				totalBugs++;
-			}
-			totalLoc += testdata.instance(i).value(loc);
-		}
-		
-		final List<Double> reviewLoc = new ArrayList<>(testdata.numInstances());
-		final List<Double> bugsFound = new ArrayList<>(testdata.numInstances());
-		
-		double currentBugsFound = 0;
-		
-		while( !bugPredicted.isEmpty() ) {
-			double minLoc = Double.MAX_VALUE;
-			int minIndex = -1;
-			for( int i=0 ; i<bugPredicted.size() ; i++ ) {
-				double currentLoc = testdata.instance(bugPredicted.get(i)).value(loc);
-				if( currentLoc<minLoc ) {
-					minIndex = i;
-					minLoc = currentLoc;
-				}
-			}
-			if( minIndex!=-1 ) {
-				reviewLoc.add(minLoc/totalLoc);
-				
-				currentBugsFound += testdata.instance(bugPredicted.get(minIndex)).classValue();
-				bugsFound.add(currentBugsFound);
-				
-				bugPredicted.remove(minIndex);
-			} else {
-				throw new RuntimeException("Shouldn't happen!");
-			}
-		}
-		
-		while( !nobugPredicted.isEmpty() ) {
-			double minLoc = Double.MAX_VALUE;
-			int minIndex = -1;
-			for( int i=0 ; i<nobugPredicted.size() ; i++ ) {
-				double currentLoc = testdata.instance(nobugPredicted.get(i)).value(loc);
-				if( currentLoc<minLoc ) {
-					minIndex = i;
-					minLoc = currentLoc;
-				}
-			}
-			if( minIndex!=-1 ) {				
-				reviewLoc.add(minLoc/totalLoc);
-				
-				currentBugsFound += testdata.instance(nobugPredicted.get(minIndex)).classValue();
-				bugsFound.add(currentBugsFound);
-				nobugPredicted.remove(minIndex);
-			} else {
-				throw new RuntimeException("Shouldn't happen!");
-			}
-		}
-		
-		double auc = 0.0;
-		for( int i=0 ; i<bugsFound.size() ; i++ ) {
-			auc += reviewLoc.get(i)*bugsFound.get(i)/totalBugs;
-		}
-		
-		return auc;
-	}
-
-	/*
-	 * (non-Javadoc)
-	 * @see de.ugoe.cs.cpdp.Parameterizable#setParameter(java.lang.String)
-	 */
-	@Override
-	public void setParameter(String parameters) {
-		if( output!=null && !outputIsSystemOut ) {
-			output.close();
-		}
-		if( "system.out".equals(parameters) || "".equals(parameters) ) {
-			output = new PrintWriter(System.out);
-			outputIsSystemOut = true;
-		} else {
-			try {
-				output = new PrintWriter(new FileOutputStream(parameters));
-				outputIsSystemOut = false;
-			} catch (FileNotFoundException e) {
-				throw new RuntimeException(e);
-			}
-		}
-	}
+    /**
+     * writer for the evaluation results
+     */
+    private PrintWriter output = new PrintWriter(System.out);
+
+    private boolean outputIsSystemOut = true;
+
+    /**
+     * Creates the weka evaluator. Allows the creation of the evaluator in different ways, e.g., for
+     * cross-validation or evaluation on the test data.
+     * 
+     * @param testdata
+     *            test data
+     * @param classifier
+     *            classifier used
+     * @return evaluator
+     */
+    protected abstract Evaluation createEvaluator(Instances testdata, Classifier classifier);
+
+    /*
+     * (non-Javadoc)
+     * 
+     * @see de.ugoe.cs.cpdp.eval.EvaluationStrategy#apply(weka.core.Instances, weka.core.Instances,
+     * java.util.List, boolean)
+     */
+    @Override
+    public void apply(Instances testdata,
+                      Instances traindata,
+                      List<ITrainer> trainers,
+                      boolean writeHeader)
+    {
+        final List<Classifier> classifiers = new LinkedList<Classifier>();
+        for (ITrainer trainer : trainers) {
+            if (trainer instanceof IWekaCompatibleTrainer) {
+                classifiers.add(((IWekaCompatibleTrainer) trainer).getClassifier());
+            }
+            else {
+                throw new RuntimeException("The selected evaluator only support Weka classifiers");
+            }
+        }
+
+        if (writeHeader) {
+            output.append("version,size_test,size_training");
+            for (ITrainer trainer : trainers) {
+                output.append(",succHe_" + ((IWekaCompatibleTrainer) trainer).getName());
+                output.append(",succZi_" + ((IWekaCompatibleTrainer) trainer).getName());
+                output.append(",succG75_" + ((IWekaCompatibleTrainer) trainer).getName());
+                output.append(",succG60_" + ((IWekaCompatibleTrainer) trainer).getName());
+                output.append(",error_" + ((IWekaCompatibleTrainer) trainer).getName());
+                output.append(",recall_" + ((IWekaCompatibleTrainer) trainer).getName());
+                output.append(",precision_" + ((IWekaCompatibleTrainer) trainer).getName());
+                output.append(",fscore_" + ((IWekaCompatibleTrainer) trainer).getName());
+                output.append(",gscore_" + ((IWekaCompatibleTrainer) trainer).getName());
+                output.append(",mcc_" + ((IWekaCompatibleTrainer) trainer).getName());
+                output.append(",auc_" + ((IWekaCompatibleTrainer) trainer).getName());
+                output.append(",aucec_" + ((IWekaCompatibleTrainer) trainer).getName());
+                output.append(",tpr_" + ((IWekaCompatibleTrainer) trainer).getName());
+                output.append(",tnr_" + ((IWekaCompatibleTrainer) trainer).getName());
+                output.append(",tp_" + ((IWekaCompatibleTrainer) trainer).getName());
+                output.append(",fn_" + ((IWekaCompatibleTrainer) trainer).getName());
+                output.append(",tn_" + ((IWekaCompatibleTrainer) trainer).getName());
+                output.append(",fp_" + ((IWekaCompatibleTrainer) trainer).getName());
+                output.append(",trainerror_" + ((IWekaCompatibleTrainer) trainer).getName());
+                output.append(",trainrecall_" + ((IWekaCompatibleTrainer) trainer).getName());
+                output.append(",trainprecision_" + ((IWekaCompatibleTrainer) trainer).getName());
+                output.append(",trainsuccHe_" + ((IWekaCompatibleTrainer) trainer).getName());
+            }
+            output.append(StringTools.ENDLINE);
+        }
+
+        output.append(testdata.relationName());
+        output.append("," + testdata.numInstances());
+        output.append("," + traindata.numInstances());
+
+        Evaluation eval = null;
+        Evaluation evalTrain = null;
+        for (Classifier classifier : classifiers) {
+            eval = createEvaluator(testdata, classifier);
+            evalTrain = createEvaluator(traindata, classifier);
+
+            double pf =
+                eval.numFalsePositives(1) / (eval.numFalsePositives(1) + eval.numTrueNegatives(1));
+            double gmeasure = 2 * eval.recall(1) * (1.0 - pf) / (eval.recall(1) + (1.0 - pf));
+            double mcc =
+                (eval.numTruePositives(1) * eval.numTrueNegatives(1) - eval.numFalsePositives(1) *
+                    eval.numFalseNegatives(1)) /
+                    Math.sqrt((eval.numTruePositives(1) + eval.numFalsePositives(1)) *
+                        (eval.numTruePositives(1) + eval.numFalseNegatives(1)) *
+                        (eval.numTrueNegatives(1) + eval.numFalsePositives(1)) *
+                        (eval.numTrueNegatives(1) + eval.numFalseNegatives(1)));
+            double aucec = calculateReviewEffort(testdata, classifier);
+
+            if (eval.recall(1) >= 0.7 && eval.precision(1) >= 0.5) {
+                output.append(",1");
+            }
+            else {
+                output.append(",0");
+            }
+
+            if (eval.recall(1) >= 0.7 && eval.precision(1) >= 0.7) {
+                output.append(",1");
+            }
+            else {
+                output.append(",0");
+            }
+
+            if (gmeasure > 0.75) {
+                output.append(",1");
+            }
+            else {
+                output.append(",0");
+            }
+
+            if (gmeasure > 0.6) {
+                output.append(",1");
+            }
+            else {
+                output.append(",0");
+            }
+
+            output.append("," + eval.errorRate());
+            output.append("," + eval.recall(1));
+            output.append("," + eval.precision(1));
+            output.append("," + eval.fMeasure(1));
+            output.append("," + gmeasure);
+            output.append("," + mcc);
+            output.append("," + eval.areaUnderROC(1));
+            output.append("," + aucec);
+            output.append("," + eval.truePositiveRate(1));
+            output.append("," + eval.trueNegativeRate(1));
+            output.append("," + eval.numTruePositives(1));
+            output.append("," + eval.numFalseNegatives(1));
+            output.append("," + eval.numTrueNegatives(1));
+            output.append("," + eval.numFalsePositives(1));
+            output.append("," + evalTrain.errorRate());
+            output.append("," + evalTrain.recall(1));
+            output.append("," + evalTrain.precision(1));
+            if (evalTrain.recall(1) >= 0.7 && evalTrain.precision(1) >= 0.5) {
+                output.append(",1");
+            }
+            else {
+                output.append(",0");
+            }
+        }
+
+        output.append(StringTools.ENDLINE);
+        output.flush();
+    }
+
+    private double calculateReviewEffort(Instances testdata, Classifier classifier) {
+
+        final Attribute loc = testdata.attribute("loc");
+        if (loc == null) {
+            return 0.0;
+        }
+
+        final List<Integer> bugPredicted = new ArrayList<>();
+        final List<Integer> nobugPredicted = new ArrayList<>();
+        double totalLoc = 0.0d;
+        int totalBugs = 0;
+        for (int i = 0; i < testdata.numInstances(); i++) {
+            try {
+                if (Double.compare(classifier.classifyInstance(testdata.instance(i)), 0.0d) == 0) {
+                    nobugPredicted.add(i);
+                }
+                else {
+                    bugPredicted.add(i);
+                }
+            }
+            catch (Exception e) {
+                throw new RuntimeException(
+                                           "unexpected error during the evaluation of the review effort",
+                                           e);
+            }
+            if (Double.compare(testdata.instance(i).classValue(), 1.0d) == 0) {
+                totalBugs++;
+            }
+            totalLoc += testdata.instance(i).value(loc);
+        }
+
+        final List<Double> reviewLoc = new ArrayList<>(testdata.numInstances());
+        final List<Double> bugsFound = new ArrayList<>(testdata.numInstances());
+
+        double currentBugsFound = 0;
+
+        while (!bugPredicted.isEmpty()) {
+            double minLoc = Double.MAX_VALUE;
+            int minIndex = -1;
+            for (int i = 0; i < bugPredicted.size(); i++) {
+                double currentLoc = testdata.instance(bugPredicted.get(i)).value(loc);
+                if (currentLoc < minLoc) {
+                    minIndex = i;
+                    minLoc = currentLoc;
+                }
+            }
+            if (minIndex != -1) {
+                reviewLoc.add(minLoc / totalLoc);
+
+                currentBugsFound += testdata.instance(bugPredicted.get(minIndex)).classValue();
+                bugsFound.add(currentBugsFound);
+
+                bugPredicted.remove(minIndex);
+            }
+            else {
+                throw new RuntimeException("Shouldn't happen!");
+            }
+        }
+
+        while (!nobugPredicted.isEmpty()) {
+            double minLoc = Double.MAX_VALUE;
+            int minIndex = -1;
+            for (int i = 0; i < nobugPredicted.size(); i++) {
+                double currentLoc = testdata.instance(nobugPredicted.get(i)).value(loc);
+                if (currentLoc < minLoc) {
+                    minIndex = i;
+                    minLoc = currentLoc;
+                }
+            }
+            if (minIndex != -1) {
+                reviewLoc.add(minLoc / totalLoc);
+
+                currentBugsFound += testdata.instance(nobugPredicted.get(minIndex)).classValue();
+                bugsFound.add(currentBugsFound);
+                nobugPredicted.remove(minIndex);
+            }
+            else {
+                throw new RuntimeException("Shouldn't happen!");
+            }
+        }
+
+        double auc = 0.0;
+        for (int i = 0; i < bugsFound.size(); i++) {
+            auc += reviewLoc.get(i) * bugsFound.get(i) / totalBugs;
+        }
+
+        return auc;
+    }
+
+    /*
+     * (non-Javadoc)
+     * 
+     * @see de.ugoe.cs.cpdp.Parameterizable#setParameter(java.lang.String)
+     */
+    @Override
+    public void setParameter(String parameters) {
+        if (output != null && !outputIsSystemOut) {
+            output.close();
+        }
+        if ("system.out".equals(parameters) || "".equals(parameters)) {
+            output = new PrintWriter(System.out);
+            outputIsSystemOut = true;
+        }
+        else {
+            try {
+                output = new PrintWriter(new FileOutputStream(parameters));
+                outputIsSystemOut = false;
+            }
+            catch (FileNotFoundException e) {
+                throw new RuntimeException(e);
+            }
+        }
+    }
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/eval/CVWekaEvaluation.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/eval/CVWekaEvaluation.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/eval/CVWekaEvaluation.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.eval;
 
@@ -12,25 +26,29 @@
 /**
  * Implements the {@link AbstractWekaEvaluation} for 10-fold cross validation.
+ * 
  * @author Steffen Herbold
  */
 public class CVWekaEvaluation extends AbstractWekaEvaluation {
-	
-	/**
-	 * @see de.ugoe.cs.cpdp.eval.AbstractWekaEvaluation#createEvaluator(weka.core.Instances, weka.classifiers.Classifier)
-	 */
-	@Override
-	protected Evaluation createEvaluator(Instances testdata, Classifier classifier) {
-		PrintStream errStr	= System.err;
-		System.setErr(new PrintStream(new NullOutputStream()));
-		try {
-			final Evaluation eval = new Evaluation(testdata);
-			eval.crossValidateModel(classifier, testdata, 10, new Random(1));
-			return eval;
-		} catch (Exception e) {
-			throw new RuntimeException(e);
-		} finally {
-			System.setErr(errStr);
-		}
-	}
+
+    /**
+     * @see de.ugoe.cs.cpdp.eval.AbstractWekaEvaluation#createEvaluator(weka.core.Instances,
+     *      weka.classifiers.Classifier)
+     */
+    @Override
+    protected Evaluation createEvaluator(Instances testdata, Classifier classifier) {
+        PrintStream errStr = System.err;
+        System.setErr(new PrintStream(new NullOutputStream()));
+        try {
+            final Evaluation eval = new Evaluation(testdata);
+            eval.crossValidateModel(classifier, testdata, 10, new Random(1));
+            return eval;
+        }
+        catch (Exception e) {
+            throw new RuntimeException(e);
+        }
+        finally {
+            System.setErr(errStr);
+        }
+    }
 
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/eval/IEvaluationStrategy.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/eval/IEvaluationStrategy.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/eval/IEvaluationStrategy.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.eval;
 
@@ -9,16 +23,22 @@
 
 /**
- * Interface for evaluation strategies to evaluate the performance of classifiers. 
+ * Interface for evaluation strategies to evaluate the performance of classifiers.
+ * 
  * @author Steffen Herbold
  */
 public interface IEvaluationStrategy extends IParameterizable {
 
-	/**
-	 * Applies the evaluation strategy. 
-	 * @param testdata test data for the evaluation
-	 * @param traindata training data used
-	 * @param trainers list of training algorithms used to train the classifiers
-	 * @param writeHeader if true, a header line for the results file is written (may not be applicable)
-	 */
-	void apply(Instances testdata, Instances traindata, List<ITrainer> trainers, boolean writeHeader);
+    /**
+     * Applies the evaluation strategy.
+     * 
+     * @param testdata
+     *            test data for the evaluation
+     * @param traindata
+     *            training data used
+     * @param trainers
+     *            list of training algorithms used to train the classifiers
+     * @param writeHeader
+     *            if true, a header line for the results file is written (may not be applicable)
+     */
+    void apply(Instances testdata, Instances traindata, List<ITrainer> trainers, boolean writeHeader);
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/eval/NormalWekaEvaluation.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/eval/NormalWekaEvaluation.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/eval/NormalWekaEvaluation.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.eval;
 
@@ -7,21 +21,24 @@
 /**
  * Implements the {@link AbstractWekaEvaluation} for evaluation on the test data.
+ * 
  * @author Steffen Herbold
- *
+ * 
  */
 public class NormalWekaEvaluation extends AbstractWekaEvaluation {
 
-	/**
-	 * @see de.ugoe.cs.cpdp.eval.AbstractWekaEvaluation#createEvaluator(weka.core.Instances, weka.classifiers.Classifier)
-	 */
-	@Override
-	protected Evaluation createEvaluator(Instances testdata, Classifier classifier) {
-		try {
-			final Evaluation eval = new Evaluation(testdata);
-			eval.evaluateModel(classifier, testdata);
-			return eval;
-		} catch (Exception e) {
-			throw new RuntimeException(e);
-		}
-	}
+    /**
+     * @see de.ugoe.cs.cpdp.eval.AbstractWekaEvaluation#createEvaluator(weka.core.Instances,
+     *      weka.classifiers.Classifier)
+     */
+    @Override
+    protected Evaluation createEvaluator(Instances testdata, Classifier classifier) {
+        try {
+            final Evaluation eval = new Evaluation(testdata);
+            eval.evaluateModel(classifier, testdata);
+            return eval;
+        }
+        catch (Exception e) {
+            throw new RuntimeException(e);
+        }
+    }
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/execution/ClassifierCreationExperiment.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/execution/ClassifierCreationExperiment.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/execution/ClassifierCreationExperiment.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.execution;
 
@@ -19,25 +33,25 @@
 
 /**
- * Class responsible for executing an experiment according to an {@link ExperimentConfiguration}. The steps 
- * of this ClassifierCreationExperiment are as follows:
+ * Class responsible for executing an experiment according to an {@link ExperimentConfiguration}.
+ * The steps of this ClassifierCreationExperiment are as follows:
  * <ul>
- *  <li>load the data from the provided data path</li>
- *  <li>check if given resultsdir exists, if not create one</li>
- *  <li>execute the following steps for each data set:
- *  <ul>
- *   <li>load the dataset</li>
- *   <li>set testdata == traindata</li>
- *   <li>preprocess the data</li>
- *   <li>postprocess the data</li>
- *   <li>for each configured trainer do the following:</li>
- *   <ul>
- *   	<li>if the classifier should be saved, train it with the dataset</li>
- *   	<li>save it in the results dir</li>
- *   	<li>For each configured evaluator: Do the evaluation and save results</li>
- *   </ul>
- *  </ul>
+ * <li>load the data from the provided data path</li>
+ * <li>check if given resultsdir exists, if not create one</li>
+ * <li>execute the following steps for each data set:
+ * <ul>
+ * <li>load the dataset</li>
+ * <li>set testdata == traindata</li>
+ * <li>preprocess the data</li>
+ * <li>postprocess the data</li>
+ * <li>for each configured trainer do the following:</li>
+ * <ul>
+ * <li>if the classifier should be saved, train it with the dataset</li>
+ * <li>save it in the results dir</li>
+ * <li>For each configured evaluator: Do the evaluation and save results</li>
  * </ul>
- *   
- * Note that this class implements {@link IExectuionStrategy}, i.e., each experiment can be started 
+ * </ul>
+ * </ul>
+ * 
+ * Note that this class implements {@link IExectuionStrategy}, i.e., each experiment can be started
  * in its own thread.
  * 
@@ -46,110 +60,125 @@
 public class ClassifierCreationExperiment implements IExecutionStrategy {
 
-	/**
-	 * configuration of the experiment
-	 */
-	private final ExperimentConfiguration config;
-	
-	/**
-	 * Constructor. Creates a new experiment based on a configuration.
-	 * @param config configuration of the experiment
-	 */
-	public ClassifierCreationExperiment(ExperimentConfiguration config) {
-		this.config = config;
-	}
-	
-	/**
-	 * Executes the experiment with the steps as described in the class comment.
-	 * @see Runnable#run() 
-	 */
-	@Override
-	public void run() {
-		final List<SoftwareVersion> versions = new LinkedList<>();
-		
-		boolean writeHeader = true;
-		
-		for(IVersionLoader loader : config.getLoaders()) {
-			versions.addAll(loader.load());
-		}
-	
+    /**
+     * configuration of the experiment
+     */
+    private final ExperimentConfiguration config;
 
-		File resultsDir = new File(config.getResultsPath());
-		if (!resultsDir.exists()) {
-			resultsDir.mkdir();
-		}
-		
-		
-		int versionCount = 1;
-		for( SoftwareVersion testVersion : versions ) {
-			
-			// At first: traindata == testdata
-			Instances testdata = testVersion.getInstances();
-			Instances traindata = new Instances(testdata);
-			
-			// Give the dataset a new name
-			testdata.setRelationName(testVersion.getProject());
-			
-			for( IProcessesingStrategy processor : config.getPreProcessors() ) {
-				Console.traceln(Level.FINE, String.format("[%s] [%02d/%02d] %s: applying preprocessor %s", config.getExperimentName(), versionCount, versions.size(), testVersion.getProject(), processor.getClass().getName()));
-				processor.apply(testdata, traindata);
-			}
-			
-			for( IPointWiseDataselectionStrategy dataselector : config.getPointWiseSelectors() ) {
-				Console.traceln(Level.FINE, String.format("[%s] [%02d/%02d] %s: applying pointwise selection %s", config.getExperimentName(), versionCount, versions.size(), testVersion.getProject(), dataselector.getClass().getName()));
-				traindata = dataselector.apply(testdata, traindata);
-			}
-			
-			for( IProcessesingStrategy processor : config.getPostProcessors() ) {
-				Console.traceln(Level.FINE, String.format("[%s] [%02d/%02d] %s: applying setwise postprocessor %s", config.getExperimentName(), versionCount, versions.size(), testVersion.getProject(), processor.getClass().getName()));
-				processor.apply(testdata, traindata);
-			}
-			
-		
-			
-			
-			// Trainerlist for evaluation later on
-			List<ITrainer> allTrainers = new LinkedList<>();
-			
-			for( ITrainingStrategy trainer : config.getTrainers() ) {
+    /**
+     * Constructor. Creates a new experiment based on a configuration.
+     * 
+     * @param config
+     *            configuration of the experiment
+     */
+    public ClassifierCreationExperiment(ExperimentConfiguration config) {
+        this.config = config;
+    }
 
-				// Add trainer to list for evaluation
-				allTrainers.add(trainer);
-				
-				// Train classifier
-				trainer.apply(traindata);
-				
-				if(config.getSaveClassifier()) {
-					// If classifier should be saved, train him and save him
-					// be careful with typecasting here!
-					IWekaCompatibleTrainer trainerToSave = (IWekaCompatibleTrainer) trainer;
-					//Console.println(trainerToSave.getClassifier().toString());
-					try {
-						weka.core.SerializationHelper.write(resultsDir.getAbsolutePath()+"/"+trainer.getName()+"-"+testVersion.getProject(), trainerToSave.getClassifier());
-					} catch (Exception e) {
-						e.printStackTrace();
-					}
-					
-				}
-			}
-			
-			
-			
-			for( IEvaluationStrategy evaluator : config.getEvaluators() ) {
-				Console.traceln(Level.FINE, String.format("[%s] [%02d/%02d] %s: applying evaluator %s", config.getExperimentName(), versionCount, versions.size(), testVersion.getProject(), evaluator.getClass().getName()));
+    /**
+     * Executes the experiment with the steps as described in the class comment.
+     * 
+     * @see Runnable#run()
+     */
+    @Override
+    public void run() {
+        final List<SoftwareVersion> versions = new LinkedList<>();
 
-				if( writeHeader ) {
-					evaluator.setParameter(config.getResultsPath() + "/" + config.getExperimentName() + ".csv");
-				}
-				evaluator.apply(testdata, traindata, allTrainers, writeHeader);
-				writeHeader = false;
-			}
-			
-			versionCount++;
-			
-			Console.traceln(Level.INFO, String.format("[%s] [%02d/%02d] %s: finished", config.getExperimentName(), versionCount, versions.size(), testVersion.getProject()));
-			
-		}
-		
-	}
-	
+        boolean writeHeader = true;
+
+        for (IVersionLoader loader : config.getLoaders()) {
+            versions.addAll(loader.load());
+        }
+
+        File resultsDir = new File(config.getResultsPath());
+        if (!resultsDir.exists()) {
+            resultsDir.mkdir();
+        }
+
+        int versionCount = 1;
+        for (SoftwareVersion testVersion : versions) {
+
+            // At first: traindata == testdata
+            Instances testdata = testVersion.getInstances();
+            Instances traindata = new Instances(testdata);
+
+            // Give the dataset a new name
+            testdata.setRelationName(testVersion.getProject());
+
+            for (IProcessesingStrategy processor : config.getPreProcessors()) {
+                Console.traceln(Level.FINE, String
+                    .format("[%s] [%02d/%02d] %s: applying preprocessor %s",
+                            config.getExperimentName(), versionCount, versions.size(),
+                            testVersion.getProject(), processor.getClass().getName()));
+                processor.apply(testdata, traindata);
+            }
+
+            for (IPointWiseDataselectionStrategy dataselector : config.getPointWiseSelectors()) {
+                Console.traceln(Level.FINE, String
+                    .format("[%s] [%02d/%02d] %s: applying pointwise selection %s",
+                            config.getExperimentName(), versionCount, versions.size(),
+                            testVersion.getProject(), dataselector.getClass().getName()));
+                traindata = dataselector.apply(testdata, traindata);
+            }
+
+            for (IProcessesingStrategy processor : config.getPostProcessors()) {
+                Console.traceln(Level.FINE, String
+                    .format("[%s] [%02d/%02d] %s: applying setwise postprocessor %s",
+                            config.getExperimentName(), versionCount, versions.size(),
+                            testVersion.getProject(), processor.getClass().getName()));
+                processor.apply(testdata, traindata);
+            }
+
+            // Trainerlist for evaluation later on
+            List<ITrainer> allTrainers = new LinkedList<>();
+
+            for (ITrainingStrategy trainer : config.getTrainers()) {
+
+                // Add trainer to list for evaluation
+                allTrainers.add(trainer);
+
+                // Train classifier
+                trainer.apply(traindata);
+
+                if (config.getSaveClassifier()) {
+                    // If classifier should be saved, train him and save him
+                    // be careful with typecasting here!
+                    IWekaCompatibleTrainer trainerToSave = (IWekaCompatibleTrainer) trainer;
+                    // Console.println(trainerToSave.getClassifier().toString());
+                    try {
+                        weka.core.SerializationHelper.write(resultsDir.getAbsolutePath() + "/" +
+                                                                trainer.getName() + "-" +
+                                                                testVersion.getProject(),
+                                                            trainerToSave.getClassifier());
+                    }
+                    catch (Exception e) {
+                        e.printStackTrace();
+                    }
+
+                }
+            }
+
+            for (IEvaluationStrategy evaluator : config.getEvaluators()) {
+                Console.traceln(Level.FINE, String
+                    .format("[%s] [%02d/%02d] %s: applying evaluator %s",
+                            config.getExperimentName(), versionCount, versions.size(),
+                            testVersion.getProject(), evaluator.getClass().getName()));
+
+                if (writeHeader) {
+                    evaluator.setParameter(config.getResultsPath() + "/" +
+                        config.getExperimentName() + ".csv");
+                }
+                evaluator.apply(testdata, traindata, allTrainers, writeHeader);
+                writeHeader = false;
+            }
+
+            versionCount++;
+
+            Console.traceln(Level.INFO, String.format("[%s] [%02d/%02d] %s: finished",
+                                                      config.getExperimentName(), versionCount,
+                                                      versions.size(), testVersion.getProject()));
+
+        }
+
+    }
+
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/execution/CrossProjectExperiment.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/execution/CrossProjectExperiment.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/execution/CrossProjectExperiment.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.execution;
 
@@ -25,181 +39,233 @@
 
 /**
- * Class responsible for executing an experiment according to an {@link ExperimentConfiguration}. The steps of an experiment are as follows:
+ * Class responsible for executing an experiment according to an {@link ExperimentConfiguration}.
+ * The steps of an experiment are as follows:
  * <ul>
- *  <li>load the data from the provided data path</li>
- *  <li>filter the data sets according to the provided version filters</li>
- *  <li>execute the following steps for each data sets as test data that is not ignored through the test version filter:
- *  <ul>
- *   <li>filter the data sets to setup the candidate training data:
- *   <ul>
- *    <li>remove all data sets from the same project</li>
- *    <li>filter all data sets according to the training data filter
- *   </ul></li>
- *   <li>apply the setwise preprocessors</li>
- *   <li>apply the setwise data selection algorithms</li>
- *   <li>apply the setwise postprocessors</li>
- *   <li>train the setwise training classifiers</li>
- *   <li>unify all remaining training data into one data set</li>
- *   <li>apply the preprocessors</li>
- *   <li>apply the pointwise data selection algorithms</li>
- *   <li>apply the postprocessors</li>
- *   <li>train the normal classifiers</li>
- *   <li>evaluate the results for all trained classifiers on the training data</li>
- *  </ul></li>
+ * <li>load the data from the provided data path</li>
+ * <li>filter the data sets according to the provided version filters</li>
+ * <li>execute the following steps for each data sets as test data that is not ignored through the
+ * test version filter:
+ * <ul>
+ * <li>filter the data sets to setup the candidate training data:
+ * <ul>
+ * <li>remove all data sets from the same project</li>
+ * <li>filter all data sets according to the training data filter
+ * </ul>
+ * </li>
+ * <li>apply the setwise preprocessors</li>
+ * <li>apply the setwise data selection algorithms</li>
+ * <li>apply the setwise postprocessors</li>
+ * <li>train the setwise training classifiers</li>
+ * <li>unify all remaining training data into one data set</li>
+ * <li>apply the preprocessors</li>
+ * <li>apply the pointwise data selection algorithms</li>
+ * <li>apply the postprocessors</li>
+ * <li>train the normal classifiers</li>
+ * <li>evaluate the results for all trained classifiers on the training data</li>
+ * </ul>
+ * </li>
  * </ul>
  * 
- * Note that this class implements {@link Runnable}, i.e., each experiment can be started in its own thread.
+ * Note that this class implements {@link Runnable}, i.e., each experiment can be started in its own
+ * thread.
+ * 
  * @author Steffen Herbold
  */
 public class CrossProjectExperiment implements IExecutionStrategy {
 
-	/**
-	 * configuration of the experiment
-	 */
-	private final ExperimentConfiguration config;
-	
-	/**
-	 * Constructor. Creates a new experiment based on a configuration.
-	 * @param config configuration of the experiment
-	 */
-	public CrossProjectExperiment(ExperimentConfiguration config) {
-		this.config = config;
-	}
-	
-	/**
-	 * Executes the experiment with the steps as described in the class comment.
-	 * @see Runnable#run() 
-	 */
-	@Override
-	public void run() {
-		final List<SoftwareVersion> versions = new LinkedList<>();
-		
-		for(IVersionLoader loader : config.getLoaders()) {
-			versions.addAll(loader.load());
-		}
-		
-		for( IVersionFilter filter : config.getVersionFilters() ) {
-			filter.apply(versions);
-		}
-		boolean writeHeader = true;
-		int versionCount = 1;
-		int testVersionCount = 0;
-		
-		for( SoftwareVersion testVersion : versions ) {
-			if( isVersion(testVersion, config.getTestVersionFilters()) ) {
-				testVersionCount++;
-			}
-		}
-		
-		// sort versions
-		Collections.sort(versions);
-		
-		for( SoftwareVersion testVersion : versions ) {
-			if( isVersion(testVersion, config.getTestVersionFilters()) ) {
-				Console.traceln(Level.INFO, String.format("[%s] [%02d/%02d] %s: starting", config.getExperimentName(), versionCount, testVersionCount, testVersion.getVersion()));
-				
-				// Setup testdata and training data
-				Instances testdata = testVersion.getInstances();
-				String testProject = testVersion.getProject();
-				SetUniqueList<Instances> traindataSet = SetUniqueList.setUniqueList(new LinkedList<Instances>());
-				for( SoftwareVersion trainingVersion : versions ) {
-					if( isVersion(trainingVersion, config.getTrainingVersionFilters()) ) {
-						if( trainingVersion!=testVersion ) {
-							if( !trainingVersion.getProject().equals(testProject) ) {
-								traindataSet.add(trainingVersion.getInstances());
-							}
-						}
-					}
-				}
-				
-				for( ISetWiseProcessingStrategy processor : config.getSetWisePreprocessors() ) {
-					Console.traceln(Level.FINE, String.format("[%s] [%02d/%02d] %s: applying setwise preprocessor %s", config.getExperimentName(), versionCount, testVersionCount, testVersion.getVersion(), processor.getClass().getName()));
-					processor.apply(testdata, traindataSet);
-				}
-				for( ISetWiseDataselectionStrategy dataselector : config.getSetWiseSelectors() ) {
-					Console.traceln(Level.FINE, String.format("[%s] [%02d/%02d] %s: applying setwise selection %s", config.getExperimentName(), versionCount, testVersionCount, testVersion.getVersion(), dataselector.getClass().getName()));
-					dataselector.apply(testdata, traindataSet);
-				}
-				for( ISetWiseProcessingStrategy processor : config.getSetWisePostprocessors() ) {
-					Console.traceln(Level.FINE, String.format("[%s] [%02d/%02d] %s: applying setwise postprocessor %s", config.getExperimentName(), versionCount, testVersionCount, testVersion.getVersion(), processor.getClass().getName()));
-					processor.apply(testdata, traindataSet);
-				}
-				for( ISetWiseTrainingStrategy setwiseTrainer : config.getSetWiseTrainers() ) {
-					Console.traceln(Level.FINE, String.format("[%s] [%02d/%02d] %s: applying setwise trainer %s", config.getExperimentName(), versionCount, testVersionCount, testVersion.getVersion(), setwiseTrainer.getName()));
-					setwiseTrainer.apply(traindataSet);
-				}
-				Instances traindata = makeSingleTrainingSet(traindataSet);
-				for( IProcessesingStrategy processor : config.getPreProcessors() ) {
-					Console.traceln(Level.FINE, String.format("[%s] [%02d/%02d] %s: applying preprocessor %s", config.getExperimentName(), versionCount, testVersionCount, testVersion.getVersion(), processor.getClass().getName()));
-					processor.apply(testdata, traindata);
-				}
-				for( IPointWiseDataselectionStrategy dataselector : config.getPointWiseSelectors() ) {
-					Console.traceln(Level.FINE, String.format("[%s] [%02d/%02d] %s: applying pointwise selection %s", config.getExperimentName(), versionCount, testVersionCount, testVersion.getVersion(), dataselector.getClass().getName()));
-					traindata = dataselector.apply(testdata, traindata);
-				}
-				for( IProcessesingStrategy processor : config.getPostProcessors() ) {
-					Console.traceln(Level.FINE, String.format("[%s] [%02d/%02d] %s: applying setwise postprocessor %s", config.getExperimentName(), versionCount, testVersionCount, testVersion.getVersion(), processor.getClass().getName()));
-					processor.apply(testdata, traindata);
-				}
-				for( ITrainingStrategy trainer : config.getTrainers() ) {
-					Console.traceln(Level.FINE, String.format("[%s] [%02d/%02d] %s: applying trainer %s", config.getExperimentName(), versionCount, testVersionCount, testVersion.getVersion(), trainer.getName()));
-					trainer.apply(traindata);
-				}
-				File resultsDir = new File(config.getResultsPath());
-				if (!resultsDir.exists()) {
-					resultsDir.mkdir();
-				}
-				for( IEvaluationStrategy evaluator : config.getEvaluators() ) {
-					Console.traceln(Level.FINE, String.format("[%s] [%02d/%02d] %s: applying evaluator %s", config.getExperimentName(), versionCount, testVersionCount, testVersion.getVersion(), evaluator.getClass().getName()));
-					List<ITrainer> allTrainers = new LinkedList<>();
-					for( ISetWiseTrainingStrategy setwiseTrainer : config.getSetWiseTrainers() ) {
-						allTrainers.add(setwiseTrainer);
-					}
-					for( ITrainingStrategy trainer : config.getTrainers() ) {
-						allTrainers.add(trainer);
-					}
-					if( writeHeader ) {
-						evaluator.setParameter(config.getResultsPath() + "/" + config.getExperimentName() + ".csv");
-					}
-					evaluator.apply(testdata, traindata, allTrainers, writeHeader);
-					writeHeader = false;
-				}
-				Console.traceln(Level.INFO, String.format("[%s] [%02d/%02d] %s: finished", config.getExperimentName(), versionCount, testVersionCount, testVersion.getVersion()));
-				versionCount++;
-			}
-		}
-	}
-	
-	/**
-	 * Helper method that checks if a version passes all filters.
-	 * @param version version that is checked
-	 * @param filters list of the filters
-	 * @return true, if the version passes all filters, false otherwise
-	 */
-	private boolean isVersion(SoftwareVersion version, List<IVersionFilter> filters) {
-		boolean result = true;
-		for( IVersionFilter filter : filters) {
-			result &= !filter.apply(version);
-		}
-		return result;
-	}
-
-	/**
-	 * Helper method that combines a set of Weka {@link Instances} sets into a single {@link Instances} set.
-	 * @param traindataSet set of {@link Instances} to be combines
-	 * @return single {@link Instances} set
-	 */
-	public static Instances makeSingleTrainingSet(SetUniqueList<Instances> traindataSet) {
-		Instances traindataFull = null;
-		for( Instances traindata : traindataSet) {
-			if( traindataFull==null ) {
-				traindataFull = new Instances(traindata);
-			} else {
-				for( int i=0 ; i<traindata.numInstances() ; i++ ) {
-					traindataFull.add(traindata.instance(i));
-				}
-			}
-		}
-		return traindataFull;
-	}
+    /**
+     * configuration of the experiment
+     */
+    private final ExperimentConfiguration config;
+
+    /**
+     * Constructor. Creates a new experiment based on a configuration.
+     * 
+     * @param config
+     *            configuration of the experiment
+     */
+    public CrossProjectExperiment(ExperimentConfiguration config) {
+        this.config = config;
+    }
+
+    /**
+     * Executes the experiment with the steps as described in the class comment.
+     * 
+     * @see Runnable#run()
+     */
+    @Override
+    public void run() {
+        final List<SoftwareVersion> versions = new LinkedList<>();
+
+        for (IVersionLoader loader : config.getLoaders()) {
+            versions.addAll(loader.load());
+        }
+
+        for (IVersionFilter filter : config.getVersionFilters()) {
+            filter.apply(versions);
+        }
+        boolean writeHeader = true;
+        int versionCount = 1;
+        int testVersionCount = 0;
+
+        for (SoftwareVersion testVersion : versions) {
+            if (isVersion(testVersion, config.getTestVersionFilters())) {
+                testVersionCount++;
+            }
+        }
+
+        // sort versions
+        Collections.sort(versions);
+
+        for (SoftwareVersion testVersion : versions) {
+            if (isVersion(testVersion, config.getTestVersionFilters())) {
+                Console.traceln(Level.INFO, String.format("[%s] [%02d/%02d] %s: starting",
+                                                          config.getExperimentName(), versionCount,
+                                                          testVersionCount,
+                                                          testVersion.getVersion()));
+
+                // Setup testdata and training data
+                Instances testdata = testVersion.getInstances();
+                String testProject = testVersion.getProject();
+                SetUniqueList<Instances> traindataSet =
+                    SetUniqueList.setUniqueList(new LinkedList<Instances>());
+                for (SoftwareVersion trainingVersion : versions) {
+                    if (isVersion(trainingVersion, config.getTrainingVersionFilters())) {
+                        if (trainingVersion != testVersion) {
+                            if (!trainingVersion.getProject().equals(testProject)) {
+                                traindataSet.add(trainingVersion.getInstances());
+                            }
+                        }
+                    }
+                }
+
+                for (ISetWiseProcessingStrategy processor : config.getSetWisePreprocessors()) {
+                    Console.traceln(Level.FINE, String
+                        .format("[%s] [%02d/%02d] %s: applying setwise preprocessor %s",
+                                config.getExperimentName(), versionCount, testVersionCount,
+                                testVersion.getVersion(), processor.getClass().getName()));
+                    processor.apply(testdata, traindataSet);
+                }
+                for (ISetWiseDataselectionStrategy dataselector : config.getSetWiseSelectors()) {
+                    Console.traceln(Level.FINE, String
+                        .format("[%s] [%02d/%02d] %s: applying setwise selection %s",
+                                config.getExperimentName(), versionCount, testVersionCount,
+                                testVersion.getVersion(), dataselector.getClass().getName()));
+                    dataselector.apply(testdata, traindataSet);
+                }
+                for (ISetWiseProcessingStrategy processor : config.getSetWisePostprocessors()) {
+                    Console.traceln(Level.FINE, String
+                        .format("[%s] [%02d/%02d] %s: applying setwise postprocessor %s",
+                                config.getExperimentName(), versionCount, testVersionCount,
+                                testVersion.getVersion(), processor.getClass().getName()));
+                    processor.apply(testdata, traindataSet);
+                }
+                for (ISetWiseTrainingStrategy setwiseTrainer : config.getSetWiseTrainers()) {
+                    Console.traceln(Level.FINE, String
+                        .format("[%s] [%02d/%02d] %s: applying setwise trainer %s",
+                                config.getExperimentName(), versionCount, testVersionCount,
+                                testVersion.getVersion(), setwiseTrainer.getName()));
+                    setwiseTrainer.apply(traindataSet);
+                }
+                Instances traindata = makeSingleTrainingSet(traindataSet);
+                for (IProcessesingStrategy processor : config.getPreProcessors()) {
+                    Console.traceln(Level.FINE, String
+                        .format("[%s] [%02d/%02d] %s: applying preprocessor %s",
+                                config.getExperimentName(), versionCount, testVersionCount,
+                                testVersion.getVersion(), processor.getClass().getName()));
+                    processor.apply(testdata, traindata);
+                }
+                for (IPointWiseDataselectionStrategy dataselector : config.getPointWiseSelectors())
+                {
+                    Console.traceln(Level.FINE, String
+                        .format("[%s] [%02d/%02d] %s: applying pointwise selection %s",
+                                config.getExperimentName(), versionCount, testVersionCount,
+                                testVersion.getVersion(), dataselector.getClass().getName()));
+                    traindata = dataselector.apply(testdata, traindata);
+                }
+                for (IProcessesingStrategy processor : config.getPostProcessors()) {
+                    Console.traceln(Level.FINE, String
+                        .format("[%s] [%02d/%02d] %s: applying setwise postprocessor %s",
+                                config.getExperimentName(), versionCount, testVersionCount,
+                                testVersion.getVersion(), processor.getClass().getName()));
+                    processor.apply(testdata, traindata);
+                }
+                for (ITrainingStrategy trainer : config.getTrainers()) {
+                    Console.traceln(Level.FINE, String
+                        .format("[%s] [%02d/%02d] %s: applying trainer %s",
+                                config.getExperimentName(), versionCount, testVersionCount,
+                                testVersion.getVersion(), trainer.getName()));
+                    trainer.apply(traindata);
+                }
+                File resultsDir = new File(config.getResultsPath());
+                if (!resultsDir.exists()) {
+                    resultsDir.mkdir();
+                }
+                for (IEvaluationStrategy evaluator : config.getEvaluators()) {
+                    Console.traceln(Level.FINE, String
+                        .format("[%s] [%02d/%02d] %s: applying evaluator %s",
+                                config.getExperimentName(), versionCount, testVersionCount,
+                                testVersion.getVersion(), evaluator.getClass().getName()));
+                    List<ITrainer> allTrainers = new LinkedList<>();
+                    for (ISetWiseTrainingStrategy setwiseTrainer : config.getSetWiseTrainers()) {
+                        allTrainers.add(setwiseTrainer);
+                    }
+                    for (ITrainingStrategy trainer : config.getTrainers()) {
+                        allTrainers.add(trainer);
+                    }
+                    if (writeHeader) {
+                        evaluator.setParameter(config.getResultsPath() + "/" +
+                            config.getExperimentName() + ".csv");
+                    }
+                    evaluator.apply(testdata, traindata, allTrainers, writeHeader);
+                    writeHeader = false;
+                }
+                Console.traceln(Level.INFO, String.format("[%s] [%02d/%02d] %s: finished",
+                                                          config.getExperimentName(), versionCount,
+                                                          testVersionCount,
+                                                          testVersion.getVersion()));
+                versionCount++;
+            }
+        }
+    }
+
+    /**
+     * Helper method that checks if a version passes all filters.
+     * 
+     * @param version
+     *            version that is checked
+     * @param filters
+     *            list of the filters
+     * @return true, if the version passes all filters, false otherwise
+     */
+    private boolean isVersion(SoftwareVersion version, List<IVersionFilter> filters) {
+        boolean result = true;
+        for (IVersionFilter filter : filters) {
+            result &= !filter.apply(version);
+        }
+        return result;
+    }
+
+    /**
+     * Helper method that combines a set of Weka {@link Instances} sets into a single
+     * {@link Instances} set.
+     * 
+     * @param traindataSet
+     *            set of {@link Instances} to be combines
+     * @return single {@link Instances} set
+     */
+    public static Instances makeSingleTrainingSet(SetUniqueList<Instances> traindataSet) {
+        Instances traindataFull = null;
+        for (Instances traindata : traindataSet) {
+            if (traindataFull == null) {
+                traindataFull = new Instances(traindata);
+            }
+            else {
+                for (int i = 0; i < traindata.numInstances(); i++) {
+                    traindataFull.add(traindata.instance(i));
+                }
+            }
+        }
+        return traindataFull;
+    }
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/execution/IExecutionStrategy.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/execution/IExecutionStrategy.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/execution/IExecutionStrategy.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.execution;
 
@@ -4,11 +18,11 @@
 
 /**
- * Interface that must be implemented from the different experiments 
- * (e.g. ClassifierCreationExeperiment) to be runnable by {@link Runner}
+ * Interface that must be implemented from the different experiments (e.g.
+ * ClassifierCreationExeperiment) to be runnable by {@link Runner}
  * 
  * @author Fabian Trautsch
- *
+ * 
  */
-public interface IExecutionStrategy extends Runnable{
+public interface IExecutionStrategy extends Runnable {
 
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/execution/RelaxedCrossProjectExperiment.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/execution/RelaxedCrossProjectExperiment.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/execution/RelaxedCrossProjectExperiment.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.execution;
 
@@ -25,185 +39,238 @@
 
 /**
- * Class responsible for executing an experiment according to an {@link ExperimentConfiguration}. The steps of an experiment are as follows:
+ * Class responsible for executing an experiment according to an {@link ExperimentConfiguration}.
+ * The steps of an experiment are as follows:
  * <ul>
- *  <li>load the data from the provided data path</li>
- *  <li>filter the data sets according to the provided version filters</li>
- *  <li>execute the following steps for each data sets as test data that is not ignored through the test version filter:
- *  <ul>
- *   <li>filter the data sets to setup the candidate training data:
- *   <ul>
- *    <li>filter all data sets according to the training data filter
- *   </ul></li>
- *   <li>apply the setwise preprocessors</li>
- *   <li>apply the setwise data selection algorithms</li>
- *   <li>apply the setwise postprocessors</li>
- *   <li>train the setwise training classifiers</li>
- *   <li>unify all remaining training data into one data set</li>
- *   <li>apply the preprocessors</li>
- *   <li>apply the pointwise data selection algorithms</li>
- *   <li>apply the postprocessors</li>
- *   <li>train the normal classifiers</li>
- *   <li>evaluate the results for all trained classifiers on the training data</li>
- *  </ul></li>
+ * <li>load the data from the provided data path</li>
+ * <li>filter the data sets according to the provided version filters</li>
+ * <li>execute the following steps for each data sets as test data that is not ignored through the
+ * test version filter:
+ * <ul>
+ * <li>filter the data sets to setup the candidate training data:
+ * <ul>
+ * <li>filter all data sets according to the training data filter
+ * </ul>
+ * </li>
+ * <li>apply the setwise preprocessors</li>
+ * <li>apply the setwise data selection algorithms</li>
+ * <li>apply the setwise postprocessors</li>
+ * <li>train the setwise training classifiers</li>
+ * <li>unify all remaining training data into one data set</li>
+ * <li>apply the preprocessors</li>
+ * <li>apply the pointwise data selection algorithms</li>
+ * <li>apply the postprocessors</li>
+ * <li>train the normal classifiers</li>
+ * <li>evaluate the results for all trained classifiers on the training data</li>
+ * </ul>
+ * </li>
  * </ul>
  * 
- * Note that this class implements {@link Runnable}, i.e., each experiment can be started in its own thread.
+ * Note that this class implements {@link Runnable}, i.e., each experiment can be started in its own
+ * thread.
+ * 
  * @author Steffen Herbold
  */
 public class RelaxedCrossProjectExperiment implements IExecutionStrategy {
 
-	/**
-	 * configuration of the experiment
-	 */
-	private final ExperimentConfiguration config;
-	
-	/**
-	 * Constructor. Creates a new experiment based on a configuration.
-	 * @param config configuration of the experiment
-	 */
-	public RelaxedCrossProjectExperiment(ExperimentConfiguration config) {
-		this.config = config;
-	}
-	
-	/**
-	 * Executes the experiment with the steps as described in the class comment.
-	 * @see Runnable#run() 
-	 */
-	@Override
-	public void run() {
-		final List<SoftwareVersion> versions = new LinkedList<>();
-		
-		for(IVersionLoader loader : config.getLoaders()) {
-			versions.addAll(loader.load());
-		}
-		
-		for( IVersionFilter filter : config.getVersionFilters() ) {
-			filter.apply(versions);
-		}
-		boolean writeHeader = true;
-		int versionCount = 1;
-		int testVersionCount = 0;
-		
-		for( SoftwareVersion testVersion : versions ) {
-			if( isVersion(testVersion, config.getTestVersionFilters()) ) {
-				testVersionCount++;
-			}
-		}
-		
-		// sort versions
-		Collections.sort(versions);
-		
-		for( SoftwareVersion testVersion : versions ) {
-			if( isVersion(testVersion, config.getTestVersionFilters()) ) {
-				Console.traceln(Level.INFO, String.format("[%s] [%02d/%02d] %s: starting", config.getExperimentName(), versionCount, testVersionCount, testVersion.getVersion()));
-				
-				// Setup testdata and training data
-				Instances testdata = testVersion.getInstances();
-				String testProject = testVersion.getProject();
-				SetUniqueList<Instances> traindataSet = SetUniqueList.setUniqueList(new LinkedList<Instances>());
-				for( SoftwareVersion trainingVersion : versions ) {
-					if( isVersion(trainingVersion, config.getTrainingVersionFilters()) ) {
-						if( trainingVersion!=testVersion ) {
-							if( trainingVersion.getProject().equals(testProject) ) {
-								if( trainingVersion.compareTo(testVersion)<0 ) {
-									// only add if older
-									traindataSet.add(trainingVersion.getInstances());
-								}
-							} else {
-								traindataSet.add(trainingVersion.getInstances());
-							}
-						}
-					}
-				}
-				
-				for( ISetWiseProcessingStrategy processor : config.getSetWisePreprocessors() ) {
-					Console.traceln(Level.FINE, String.format("[%s] [%02d/%02d] %s: applying setwise preprocessor %s", config.getExperimentName(), versionCount, testVersionCount, testVersion.getVersion(), processor.getClass().getName()));
-					processor.apply(testdata, traindataSet);
-				}
-				for( ISetWiseDataselectionStrategy dataselector : config.getSetWiseSelectors() ) {
-					Console.traceln(Level.FINE, String.format("[%s] [%02d/%02d] %s: applying setwise selection %s", config.getExperimentName(), versionCount, testVersionCount, testVersion.getVersion(), dataselector.getClass().getName()));
-					dataselector.apply(testdata, traindataSet);
-				}
-				for( ISetWiseProcessingStrategy processor : config.getSetWisePostprocessors() ) {
-					Console.traceln(Level.FINE, String.format("[%s] [%02d/%02d] %s: applying setwise postprocessor %s", config.getExperimentName(), versionCount, testVersionCount, testVersion.getVersion(), processor.getClass().getName()));
-					processor.apply(testdata, traindataSet);
-				}
-				for( ISetWiseTrainingStrategy setwiseTrainer : config.getSetWiseTrainers() ) {
-					Console.traceln(Level.FINE, String.format("[%s] [%02d/%02d] %s: applying setwise trainer %s", config.getExperimentName(), versionCount, testVersionCount, testVersion.getVersion(), setwiseTrainer.getName()));
-					setwiseTrainer.apply(traindataSet);
-				}
-				Instances traindata = makeSingleTrainingSet(traindataSet);
-				for( IProcessesingStrategy processor : config.getPreProcessors() ) {
-					Console.traceln(Level.FINE, String.format("[%s] [%02d/%02d] %s: applying preprocessor %s", config.getExperimentName(), versionCount, testVersionCount, testVersion.getVersion(), processor.getClass().getName()));
-					processor.apply(testdata, traindata);
-				}
-				for( IPointWiseDataselectionStrategy dataselector : config.getPointWiseSelectors() ) {
-					Console.traceln(Level.FINE, String.format("[%s] [%02d/%02d] %s: applying pointwise selection %s", config.getExperimentName(), versionCount, testVersionCount, testVersion.getVersion(), dataselector.getClass().getName()));
-					traindata = dataselector.apply(testdata, traindata);
-				}
-				for( IProcessesingStrategy processor : config.getPostProcessors() ) {
-					Console.traceln(Level.FINE, String.format("[%s] [%02d/%02d] %s: applying setwise postprocessor %s", config.getExperimentName(), versionCount, testVersionCount, testVersion.getVersion(), processor.getClass().getName()));
-					processor.apply(testdata, traindata);
-				}
-				for( ITrainingStrategy trainer : config.getTrainers() ) {
-					Console.traceln(Level.FINE, String.format("[%s] [%02d/%02d] %s: applying trainer %s", config.getExperimentName(), versionCount, testVersionCount, testVersion.getVersion(), trainer.getName()));
-					trainer.apply(traindata);
-				}
-				File resultsDir = new File(config.getResultsPath());
-				if (!resultsDir.exists()) {
-					resultsDir.mkdir();
-				}
-				for( IEvaluationStrategy evaluator : config.getEvaluators() ) {
-					Console.traceln(Level.FINE, String.format("[%s] [%02d/%02d] %s: applying evaluator %s", config.getExperimentName(), versionCount, testVersionCount, testVersion.getVersion(), evaluator.getClass().getName()));
-					List<ITrainer> allTrainers = new LinkedList<>();
-					for( ISetWiseTrainingStrategy setwiseTrainer : config.getSetWiseTrainers() ) {
-						allTrainers.add(setwiseTrainer);
-					}
-					for( ITrainingStrategy trainer : config.getTrainers() ) {
-						allTrainers.add(trainer);
-					}
-					if( writeHeader ) {
-						evaluator.setParameter(config.getResultsPath() + "/" + config.getExperimentName() + ".csv");
-					}
-					evaluator.apply(testdata, traindata, allTrainers, writeHeader);
-					writeHeader = false;
-				}
-				Console.traceln(Level.INFO, String.format("[%s] [%02d/%02d] %s: finished", config.getExperimentName(), versionCount, testVersionCount, testVersion.getVersion()));
-				versionCount++;
-			}
-		}
-	}
-	
-	/**
-	 * Helper method that checks if a version passes all filters.
-	 * @param version version that is checked
-	 * @param filters list of the filters
-	 * @return true, if the version passes all filters, false otherwise
-	 */
-	private boolean isVersion(SoftwareVersion version, List<IVersionFilter> filters) {
-		boolean result = true;
-		for( IVersionFilter filter : filters) {
-			result &= !filter.apply(version);
-		}
-		return result;
-	}
-
-	/**
-	 * Helper method that combines a set of Weka {@link Instances} sets into a single {@link Instances} set.
-	 * @param traindataSet set of {@link Instances} to be combines
-	 * @return single {@link Instances} set
-	 */
-	public static Instances makeSingleTrainingSet(SetUniqueList<Instances> traindataSet) {
-		Instances traindataFull = null;
-		for( Instances traindata : traindataSet) {
-			if( traindataFull==null ) {
-				traindataFull = new Instances(traindata);
-			} else {
-				for( int i=0 ; i<traindata.numInstances() ; i++ ) {
-					traindataFull.add(traindata.instance(i));
-				}
-			}
-		}
-		return traindataFull;
-	}
+    /**
+     * configuration of the experiment
+     */
+    private final ExperimentConfiguration config;
+
+    /**
+     * Constructor. Creates a new experiment based on a configuration.
+     * 
+     * @param config
+     *            configuration of the experiment
+     */
+    public RelaxedCrossProjectExperiment(ExperimentConfiguration config) {
+        this.config = config;
+    }
+
+    /**
+     * Executes the experiment with the steps as described in the class comment.
+     * 
+     * @see Runnable#run()
+     */
+    @Override
+    public void run() {
+        final List<SoftwareVersion> versions = new LinkedList<>();
+
+        for (IVersionLoader loader : config.getLoaders()) {
+            versions.addAll(loader.load());
+        }
+
+        for (IVersionFilter filter : config.getVersionFilters()) {
+            filter.apply(versions);
+        }
+        boolean writeHeader = true;
+        int versionCount = 1;
+        int testVersionCount = 0;
+
+        for (SoftwareVersion testVersion : versions) {
+            if (isVersion(testVersion, config.getTestVersionFilters())) {
+                testVersionCount++;
+            }
+        }
+
+        // sort versions
+        Collections.sort(versions);
+
+        for (SoftwareVersion testVersion : versions) {
+            if (isVersion(testVersion, config.getTestVersionFilters())) {
+                Console.traceln(Level.INFO, String.format("[%s] [%02d/%02d] %s: starting",
+                                                          config.getExperimentName(), versionCount,
+                                                          testVersionCount,
+                                                          testVersion.getVersion()));
+
+                // Setup testdata and training data
+                Instances testdata = testVersion.getInstances();
+                String testProject = testVersion.getProject();
+                SetUniqueList<Instances> traindataSet =
+                    SetUniqueList.setUniqueList(new LinkedList<Instances>());
+                for (SoftwareVersion trainingVersion : versions) {
+                    if (isVersion(trainingVersion, config.getTrainingVersionFilters())) {
+                        if (trainingVersion != testVersion) {
+                            if (trainingVersion.getProject().equals(testProject)) {
+                                if (trainingVersion.compareTo(testVersion) < 0) {
+                                    // only add if older
+                                    traindataSet.add(trainingVersion.getInstances());
+                                }
+                            }
+                            else {
+                                traindataSet.add(trainingVersion.getInstances());
+                            }
+                        }
+                    }
+                }
+
+                for (ISetWiseProcessingStrategy processor : config.getSetWisePreprocessors()) {
+                    Console.traceln(Level.FINE, String
+                        .format("[%s] [%02d/%02d] %s: applying setwise preprocessor %s",
+                                config.getExperimentName(), versionCount, testVersionCount,
+                                testVersion.getVersion(), processor.getClass().getName()));
+                    processor.apply(testdata, traindataSet);
+                }
+                for (ISetWiseDataselectionStrategy dataselector : config.getSetWiseSelectors()) {
+                    Console.traceln(Level.FINE, String
+                        .format("[%s] [%02d/%02d] %s: applying setwise selection %s",
+                                config.getExperimentName(), versionCount, testVersionCount,
+                                testVersion.getVersion(), dataselector.getClass().getName()));
+                    dataselector.apply(testdata, traindataSet);
+                }
+                for (ISetWiseProcessingStrategy processor : config.getSetWisePostprocessors()) {
+                    Console.traceln(Level.FINE, String
+                        .format("[%s] [%02d/%02d] %s: applying setwise postprocessor %s",
+                                config.getExperimentName(), versionCount, testVersionCount,
+                                testVersion.getVersion(), processor.getClass().getName()));
+                    processor.apply(testdata, traindataSet);
+                }
+                for (ISetWiseTrainingStrategy setwiseTrainer : config.getSetWiseTrainers()) {
+                    Console.traceln(Level.FINE, String
+                        .format("[%s] [%02d/%02d] %s: applying setwise trainer %s",
+                                config.getExperimentName(), versionCount, testVersionCount,
+                                testVersion.getVersion(), setwiseTrainer.getName()));
+                    setwiseTrainer.apply(traindataSet);
+                }
+                Instances traindata = makeSingleTrainingSet(traindataSet);
+                for (IProcessesingStrategy processor : config.getPreProcessors()) {
+                    Console.traceln(Level.FINE, String
+                        .format("[%s] [%02d/%02d] %s: applying preprocessor %s",
+                                config.getExperimentName(), versionCount, testVersionCount,
+                                testVersion.getVersion(), processor.getClass().getName()));
+                    processor.apply(testdata, traindata);
+                }
+                for (IPointWiseDataselectionStrategy dataselector : config.getPointWiseSelectors())
+                {
+                    Console.traceln(Level.FINE, String
+                        .format("[%s] [%02d/%02d] %s: applying pointwise selection %s",
+                                config.getExperimentName(), versionCount, testVersionCount,
+                                testVersion.getVersion(), dataselector.getClass().getName()));
+                    traindata = dataselector.apply(testdata, traindata);
+                }
+                for (IProcessesingStrategy processor : config.getPostProcessors()) {
+                    Console.traceln(Level.FINE, String
+                        .format("[%s] [%02d/%02d] %s: applying setwise postprocessor %s",
+                                config.getExperimentName(), versionCount, testVersionCount,
+                                testVersion.getVersion(), processor.getClass().getName()));
+                    processor.apply(testdata, traindata);
+                }
+                for (ITrainingStrategy trainer : config.getTrainers()) {
+                    Console.traceln(Level.FINE, String
+                        .format("[%s] [%02d/%02d] %s: applying trainer %s",
+                                config.getExperimentName(), versionCount, testVersionCount,
+                                testVersion.getVersion(), trainer.getName()));
+                    trainer.apply(traindata);
+                }
+                File resultsDir = new File(config.getResultsPath());
+                if (!resultsDir.exists()) {
+                    resultsDir.mkdir();
+                }
+                for (IEvaluationStrategy evaluator : config.getEvaluators()) {
+                    Console.traceln(Level.FINE, String
+                        .format("[%s] [%02d/%02d] %s: applying evaluator %s",
+                                config.getExperimentName(), versionCount, testVersionCount,
+                                testVersion.getVersion(), evaluator.getClass().getName()));
+                    List<ITrainer> allTrainers = new LinkedList<>();
+                    for (ISetWiseTrainingStrategy setwiseTrainer : config.getSetWiseTrainers()) {
+                        allTrainers.add(setwiseTrainer);
+                    }
+                    for (ITrainingStrategy trainer : config.getTrainers()) {
+                        allTrainers.add(trainer);
+                    }
+                    if (writeHeader) {
+                        evaluator.setParameter(config.getResultsPath() + "/" +
+                            config.getExperimentName() + ".csv");
+                    }
+                    evaluator.apply(testdata, traindata, allTrainers, writeHeader);
+                    writeHeader = false;
+                }
+                Console.traceln(Level.INFO, String.format("[%s] [%02d/%02d] %s: finished",
+                                                          config.getExperimentName(), versionCount,
+                                                          testVersionCount,
+                                                          testVersion.getVersion()));
+                versionCount++;
+            }
+        }
+    }
+
+    /**
+     * Helper method that checks if a version passes all filters.
+     * 
+     * @param version
+     *            version that is checked
+     * @param filters
+     *            list of the filters
+     * @return true, if the version passes all filters, false otherwise
+     */
+    private boolean isVersion(SoftwareVersion version, List<IVersionFilter> filters) {
+        boolean result = true;
+        for (IVersionFilter filter : filters) {
+            result &= !filter.apply(version);
+        }
+        return result;
+    }
+
+    /**
+     * Helper method that combines a set of Weka {@link Instances} sets into a single
+     * {@link Instances} set.
+     * 
+     * @param traindataSet
+     *            set of {@link Instances} to be combines
+     * @return single {@link Instances} set
+     */
+    public static Instances makeSingleTrainingSet(SetUniqueList<Instances> traindataSet) {
+        Instances traindataFull = null;
+        for (Instances traindata : traindataSet) {
+            if (traindataFull == null) {
+                traindataFull = new Instances(traindata);
+            }
+            else {
+                for (int i = 0; i < traindata.numInstances(); i++) {
+                    traindataFull.add(traindata.instance(i));
+                }
+            }
+        }
+        return traindataFull;
+    }
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/ARFFFolderLoader.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/ARFFFolderLoader.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/ARFFFolderLoader.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.loader;
 
@@ -9,12 +23,12 @@
 public class ARFFFolderLoader extends AbstractFolderLoader {
 
-	/*
-	 * (non-Javadoc)
-	 * 
-	 * @see de.ugoe.cs.cpdp.loader.AbstractFolderLoader#getSingleLoader()
-	 */
-	@Override
-	protected SingleVersionLoader getSingleLoader() {
-		return new ARFFLoader();
-	}
+    /*
+     * (non-Javadoc)
+     * 
+     * @see de.ugoe.cs.cpdp.loader.AbstractFolderLoader#getSingleLoader()
+     */
+    @Override
+    protected SingleVersionLoader getSingleLoader() {
+        return new ARFFLoader();
+    }
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/ARFFLoader.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/ARFFLoader.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/ARFFLoader.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.loader;
 
@@ -15,38 +29,37 @@
 public class ARFFLoader implements SingleVersionLoader {
 
-	/*
-	 * (non-Javadoc)
-	 * 
-	 * @see de.ugoe.cs.cpdp.loader.SingleVersionLoader#load(java.io.File)
-	 */
-	@Override
-	public Instances load(File file) {
-		BufferedReader reader;
-		Instances data;
-		try {
-			reader = new BufferedReader(new FileReader(file));
-			data = new Instances(reader);
-			reader.close();
-		} catch (IOException e) {
-			throw new RuntimeException("error reading file: " + file.getName(), e);
-		}
+    /*
+     * (non-Javadoc)
+     * 
+     * @see de.ugoe.cs.cpdp.loader.SingleVersionLoader#load(java.io.File)
+     */
+    @Override
+    public Instances load(File file) {
+        BufferedReader reader;
+        Instances data;
+        try {
+            reader = new BufferedReader(new FileReader(file));
+            data = new Instances(reader);
+            reader.close();
+        }
+        catch (IOException e) {
+            throw new RuntimeException("error reading file: " + file.getName(), e);
+        }
 
-		// setting class attribute
-		data.setClassIndex(data.numAttributes() - 1);
+        // setting class attribute
+        data.setClassIndex(data.numAttributes() - 1);
 
-		return data;
-	}
+        return data;
+    }
 
-	/*
-	 * (non-Javadoc)
-	 * 
-	 * @see
-	 * de.ugoe.cs.cpdp.loader.SingleVersionLoader#filenameFilter(java.lang.String
-	 * )
-	 */
-	@Override
-	public boolean filenameFilter(String filename) {
-		return filename.endsWith(".arff");
-	}
+    /*
+     * (non-Javadoc)
+     * 
+     * @see de.ugoe.cs.cpdp.loader.SingleVersionLoader#filenameFilter(java.lang.String )
+     */
+    @Override
+    public boolean filenameFilter(String filename) {
+        return filename.endsWith(".arff");
+    }
 
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/AUDIChangeFolderLoader.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/AUDIChangeFolderLoader.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/AUDIChangeFolderLoader.java	(revision 41)
@@ -1,14 +1,28 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.loader;
 
 public class AUDIChangeFolderLoader extends AbstractFolderLoader {
 
-	/*
-	 * (non-Javadoc)
-	 * 
-	 * @see de.ugoe.cs.cpdp.loader.AbstractFolderLoader#getSingleLoader()
-	 */
-	@Override
-	protected SingleVersionLoader getSingleLoader() {
-		return new AUDIChangeLoader();
-	}
+    /*
+     * (non-Javadoc)
+     * 
+     * @see de.ugoe.cs.cpdp.loader.AbstractFolderLoader#getSingleLoader()
+     */
+    @Override
+    protected SingleVersionLoader getSingleLoader() {
+        return new AUDIChangeLoader();
+    }
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/AUDIChangeLoader.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/AUDIChangeLoader.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/AUDIChangeLoader.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.loader;
 
@@ -15,270 +29,279 @@
 /**
  * TODO
+ * 
  * @author sherbold
- *
+ * 
  */
 class AUDIChangeLoader implements SingleVersionLoader {
-	
-	private class EntityRevisionPair implements Comparable<EntityRevisionPair> {
-		private final String entity;
-		private final int revision;
-		
-		public EntityRevisionPair(String entity, int revision) {
-			this.entity = entity;
-			this.revision = revision;
-		}
-		
-		@Override
-		public boolean equals(Object other) {
-			if( !(other instanceof EntityRevisionPair) ) {
-				return false;
-			} else {
-				return compareTo((EntityRevisionPair) other)==0;
-			}
-		}
-		
-		@Override
-		public int hashCode() {
-			return entity.hashCode()+revision;
-		}
-
-		@Override
-		public int compareTo(EntityRevisionPair other) {
-			int strCmp = this.entity.compareTo(other.entity);
-			if( strCmp!=0 ) {
-				return strCmp;
-			}
-			return Integer.compare(revision, other.revision);
-		}
-		
-		@Override
-		public String toString() {
-			return entity+"@"+revision;
-		}
-	}
-
-	@Override
-	public Instances load(File file) {
-		final String[] lines;
-		String[] lineSplit;
-		String[] lineSplitBug;
-		
-		try {
-			lines = FileTools.getLinesFromFile(file.getAbsolutePath());
-		} catch (IOException e) {
-			throw new RuntimeException(e);
-		}
-		
-		// information about bugs are in another file
-		String path = file.getAbsolutePath();
-		path = path.substring(0, path.length()-14) + "repro.csv";
-		final String[] linesBug;
-		try {
-			linesBug = FileTools.getLinesFromFile(path);
-		} catch (IOException e) {
-			throw new RuntimeException(e);
-		}
-		
-		int revisionIndex=-1;
-		int bugIndex=-1;
-		lineSplitBug = linesBug[0].split(";");
-		for( int j=0; j<lineSplitBug.length ; j++ ) {
-			if( lineSplitBug[j].equals("svnrev") ) {
-				revisionIndex=j;
-			}
-			if( lineSplitBug[j].equals("num_bugs_trace") ) {
-				bugIndex=j;
-			}
-		}
-		if( revisionIndex<0 ) {
-			throw new RuntimeException("could not find SVN revisions");
-		}
-		if( bugIndex<0 ) {
-			throw new RuntimeException("could not find bug information");
-		}
-		
-		int metricsStartIndex=-1;
-		int metricsEndIndex=-1;
-		lineSplit = lines[0].split(";");
-		for( int j=0; j<lineSplit.length ; j++ ) {
-			if( lineSplit[j].equals("lm_LOC") ) {
-				metricsStartIndex=j;
-			}
-			if( lineSplit[j].equals("h_E") ) {
-				metricsEndIndex=j;
-			}
-		}
-		if( metricsStartIndex<0 ) {
-			throw new RuntimeException("could not find first metric, i.e., lm_LOC");
-		}
-		if( metricsEndIndex<0 ) {
-			throw new RuntimeException("could not find last metric, i.e., h_E");
-		}
-		int numMetrics = metricsEndIndex-metricsStartIndex+1;
-		
-		// create sets of all filenames and revisions
-		SortedMap<EntityRevisionPair, Integer> entityRevisionPairs = new TreeMap<>();
-		for( int i=1; i<linesBug.length ; i++ ) {
-			lineSplitBug = linesBug[i].split(";");
-			entityRevisionPairs.put(new EntityRevisionPair(lineSplitBug[0], Integer.parseInt(lineSplitBug[revisionIndex])), i);
-		}
-		
-		
-		// prepare weka instances
-		final ArrayList<Attribute> atts = new ArrayList<Attribute>();
-		lineSplit = lines[0].split(";"); 
-		for (int j = metricsStartIndex; j<=metricsEndIndex; j++) {
-			atts.add(new Attribute(lineSplit[j]+"_delta"));
-		}
-		for (int j = metricsStartIndex; j<=metricsEndIndex; j++) {
-			atts.add(new Attribute(lineSplit[j]+"_abs"));
-		}
-		final ArrayList<String> classAttVals = new ArrayList<String>();
-		classAttVals.add("0");
-		classAttVals.add("1");
-		final Attribute classAtt = new Attribute("bug", classAttVals);
-		atts.add(classAtt);
-
-		final Instances data = new Instances(file.getName(), atts, 0);
-		data.setClass(classAtt);
-		
-		// create data
-		String lastFile = null;
-		double[] lastValues = null;
-		int lastNumBugs = 0;
-		for( Entry<EntityRevisionPair, Integer> entry : entityRevisionPairs.entrySet() ) {
-			try {
-				// first get values
-				lineSplit = lines[entry.getValue()].split(";");
-				lineSplitBug = linesBug[entry.getValue()].split(";");
-				int i=0;
-				double[] values = new double[numMetrics];
-				for(int j=metricsStartIndex ; j<=metricsEndIndex ; j++ ) {
-					values[i] = Double.parseDouble(lineSplit[j]);
-					i++;
-				}
-				int numBugs = Integer.parseInt(lineSplitBug[bugIndex]);
-				
-				// then check if an entity must be created
-				if( entry.getKey().entity.equals(lastFile)) {
-					// create new instance
-					double[] instanceValues = new double[2*numMetrics+1];
-					for( int j=0; j<numMetrics; j++ ) {
-						instanceValues[j] = values[j]-lastValues[j];
-						instanceValues[j+numMetrics]= values[j];
-					}
-					// check if any value>0
-					boolean changeOccured = false;
-					for( int j=0; j<numMetrics; j++ ) {
-						if( instanceValues[j]>0 ) {
-							changeOccured = true;
-						}
-					}
-					if( changeOccured ) {
-						instanceValues[instanceValues.length-1] = numBugs<=lastNumBugs ? 0 : 1;
-						data.add(new DenseInstance(1.0, instanceValues));
-					}
-				}
-				lastFile = entry.getKey().entity;
-				lastValues = values;
-				lastNumBugs = numBugs;
-			} catch(IllegalArgumentException e) {
-				System.err.println("error in line " + entry.getValue() + ": " + e.getMessage());
-				System.err.println("metrics line: " + lines[entry.getValue()]);
-				System.err.println("bugs line: " + linesBug[entry.getValue()]);
-				System.err.println("line is ignored");
-			}
-		}
-		
-		return data;
-	}
-	
-	/*
-	 * (non-Javadoc)
-	 * 
-	 * @see
-	 * de.ugoe.cs.cpdp.loader.AbstractFolderLoader.SingleVersionLoader#load(
-	 * java.io.File)
-	 */
-	
-	public Instances load(File file, String dummy) {
-		final String[] lines;
-		try {
-			lines = FileTools.getLinesFromFile(file.getAbsolutePath());
-		} catch (IOException e) {
-			throw new RuntimeException(e);
-		}
-		
-		// information about bugs are in another file
-		String path = file.getAbsolutePath();
-		path = path.substring(0, path.length()-14) + "repro.csv";
-		final String[] linesBug;
-		try {
-			linesBug = FileTools.getLinesFromFile(path);
-		} catch (IOException e) {
-			throw new RuntimeException(e);
-		}
-		
-		// configure Instances
-		final ArrayList<Attribute> atts = new ArrayList<Attribute>();
-
-		String[] lineSplit = lines[0].split(";");
-		// ignore first three/four and last two columns
-		int offset;
-		if( lineSplit[3].equals("project_rev") ) {
-			offset = 4;
-		} else {
-			offset = 3;
-		}
-		for (int j = 0; j < lineSplit.length - (offset+2); j++) {
-			atts.add(new Attribute(lineSplit[j + offset]));
-		}
-		final ArrayList<String> classAttVals = new ArrayList<String>();
-		classAttVals.add("0");
-		classAttVals.add("1");
-		final Attribute classAtt = new Attribute("bug", classAttVals);
-		atts.add(classAtt);
-
-		final Instances data = new Instances(file.getName(), atts, 0);
-		data.setClass(classAtt);
-
-		// fetch data
-		for (int i = 1; i < lines.length; i++) {
-			boolean validInstance = true;
-			lineSplit = lines[i].split(";");
-			String[] lineSplitBug = linesBug[i].split(";");
-			double[] values = new double[data.numAttributes()];
-			for (int j = 0; validInstance && j < values.length-1; j++) {
-				if( lineSplit[j + offset].trim().isEmpty() ) {
-					validInstance = false;
-				} else {
-					values[j] = Double.parseDouble(lineSplit[j + offset].trim());
-				}
-			}
-			if( offset==3 ) {
-				values[values.length - 1] = lineSplitBug[7].equals("0") ? 0 : 1;
-			} else {
-				values[values.length - 1] = lineSplitBug[8].equals("0") ? 0 : 1;
-			}
-			
-			if( validInstance ) {
-				data.add(new DenseInstance(1.0, values));
-			} else {
-				System.out.println("instance " + i + " is invalid");
-			}
-		}
-		return data;
-	}
-
-	/*
-	 * (non-Javadoc)
-	 * 
-	 * @see de.ugoe.cs.cpdp.loader.AbstractFolderLoader.SingleVersionLoader#
-	 * filenameFilter(java.lang.String)
-	 */
-	@Override
-	public boolean filenameFilter(String filename) {
-		return filename.endsWith("src.csv");
-	}
+
+    private class EntityRevisionPair implements Comparable<EntityRevisionPair> {
+        private final String entity;
+        private final int revision;
+
+        public EntityRevisionPair(String entity, int revision) {
+            this.entity = entity;
+            this.revision = revision;
+        }
+
+        @Override
+        public boolean equals(Object other) {
+            if (!(other instanceof EntityRevisionPair)) {
+                return false;
+            }
+            else {
+                return compareTo((EntityRevisionPair) other) == 0;
+            }
+        }
+
+        @Override
+        public int hashCode() {
+            return entity.hashCode() + revision;
+        }
+
+        @Override
+        public int compareTo(EntityRevisionPair other) {
+            int strCmp = this.entity.compareTo(other.entity);
+            if (strCmp != 0) {
+                return strCmp;
+            }
+            return Integer.compare(revision, other.revision);
+        }
+
+        @Override
+        public String toString() {
+            return entity + "@" + revision;
+        }
+    }
+
+    @Override
+    public Instances load(File file) {
+        final String[] lines;
+        String[] lineSplit;
+        String[] lineSplitBug;
+
+        try {
+            lines = FileTools.getLinesFromFile(file.getAbsolutePath());
+        }
+        catch (IOException e) {
+            throw new RuntimeException(e);
+        }
+
+        // information about bugs are in another file
+        String path = file.getAbsolutePath();
+        path = path.substring(0, path.length() - 14) + "repro.csv";
+        final String[] linesBug;
+        try {
+            linesBug = FileTools.getLinesFromFile(path);
+        }
+        catch (IOException e) {
+            throw new RuntimeException(e);
+        }
+
+        int revisionIndex = -1;
+        int bugIndex = -1;
+        lineSplitBug = linesBug[0].split(";");
+        for (int j = 0; j < lineSplitBug.length; j++) {
+            if (lineSplitBug[j].equals("svnrev")) {
+                revisionIndex = j;
+            }
+            if (lineSplitBug[j].equals("num_bugs_trace")) {
+                bugIndex = j;
+            }
+        }
+        if (revisionIndex < 0) {
+            throw new RuntimeException("could not find SVN revisions");
+        }
+        if (bugIndex < 0) {
+            throw new RuntimeException("could not find bug information");
+        }
+
+        int metricsStartIndex = -1;
+        int metricsEndIndex = -1;
+        lineSplit = lines[0].split(";");
+        for (int j = 0; j < lineSplit.length; j++) {
+            if (lineSplit[j].equals("lm_LOC")) {
+                metricsStartIndex = j;
+            }
+            if (lineSplit[j].equals("h_E")) {
+                metricsEndIndex = j;
+            }
+        }
+        if (metricsStartIndex < 0) {
+            throw new RuntimeException("could not find first metric, i.e., lm_LOC");
+        }
+        if (metricsEndIndex < 0) {
+            throw new RuntimeException("could not find last metric, i.e., h_E");
+        }
+        int numMetrics = metricsEndIndex - metricsStartIndex + 1;
+
+        // create sets of all filenames and revisions
+        SortedMap<EntityRevisionPair, Integer> entityRevisionPairs = new TreeMap<>();
+        for (int i = 1; i < linesBug.length; i++) {
+            lineSplitBug = linesBug[i].split(";");
+            entityRevisionPairs.put(new EntityRevisionPair(lineSplitBug[0], Integer
+                                        .parseInt(lineSplitBug[revisionIndex])), i);
+        }
+
+        // prepare weka instances
+        final ArrayList<Attribute> atts = new ArrayList<Attribute>();
+        lineSplit = lines[0].split(";");
+        for (int j = metricsStartIndex; j <= metricsEndIndex; j++) {
+            atts.add(new Attribute(lineSplit[j] + "_delta"));
+        }
+        for (int j = metricsStartIndex; j <= metricsEndIndex; j++) {
+            atts.add(new Attribute(lineSplit[j] + "_abs"));
+        }
+        final ArrayList<String> classAttVals = new ArrayList<String>();
+        classAttVals.add("0");
+        classAttVals.add("1");
+        final Attribute classAtt = new Attribute("bug", classAttVals);
+        atts.add(classAtt);
+
+        final Instances data = new Instances(file.getName(), atts, 0);
+        data.setClass(classAtt);
+
+        // create data
+        String lastFile = null;
+        double[] lastValues = null;
+        int lastNumBugs = 0;
+        for (Entry<EntityRevisionPair, Integer> entry : entityRevisionPairs.entrySet()) {
+            try {
+                // first get values
+                lineSplit = lines[entry.getValue()].split(";");
+                lineSplitBug = linesBug[entry.getValue()].split(";");
+                int i = 0;
+                double[] values = new double[numMetrics];
+                for (int j = metricsStartIndex; j <= metricsEndIndex; j++) {
+                    values[i] = Double.parseDouble(lineSplit[j]);
+                    i++;
+                }
+                int numBugs = Integer.parseInt(lineSplitBug[bugIndex]);
+
+                // then check if an entity must be created
+                if (entry.getKey().entity.equals(lastFile)) {
+                    // create new instance
+                    double[] instanceValues = new double[2 * numMetrics + 1];
+                    for (int j = 0; j < numMetrics; j++) {
+                        instanceValues[j] = values[j] - lastValues[j];
+                        instanceValues[j + numMetrics] = values[j];
+                    }
+                    // check if any value>0
+                    boolean changeOccured = false;
+                    for (int j = 0; j < numMetrics; j++) {
+                        if (instanceValues[j] > 0) {
+                            changeOccured = true;
+                        }
+                    }
+                    if (changeOccured) {
+                        instanceValues[instanceValues.length - 1] = numBugs <= lastNumBugs ? 0 : 1;
+                        data.add(new DenseInstance(1.0, instanceValues));
+                    }
+                }
+                lastFile = entry.getKey().entity;
+                lastValues = values;
+                lastNumBugs = numBugs;
+            }
+            catch (IllegalArgumentException e) {
+                System.err.println("error in line " + entry.getValue() + ": " + e.getMessage());
+                System.err.println("metrics line: " + lines[entry.getValue()]);
+                System.err.println("bugs line: " + linesBug[entry.getValue()]);
+                System.err.println("line is ignored");
+            }
+        }
+
+        return data;
+    }
+
+    /*
+     * (non-Javadoc)
+     * 
+     * @see de.ugoe.cs.cpdp.loader.AbstractFolderLoader.SingleVersionLoader#load( java.io.File)
+     */
+
+    public Instances load(File file, String dummy) {
+        final String[] lines;
+        try {
+            lines = FileTools.getLinesFromFile(file.getAbsolutePath());
+        }
+        catch (IOException e) {
+            throw new RuntimeException(e);
+        }
+
+        // information about bugs are in another file
+        String path = file.getAbsolutePath();
+        path = path.substring(0, path.length() - 14) + "repro.csv";
+        final String[] linesBug;
+        try {
+            linesBug = FileTools.getLinesFromFile(path);
+        }
+        catch (IOException e) {
+            throw new RuntimeException(e);
+        }
+
+        // configure Instances
+        final ArrayList<Attribute> atts = new ArrayList<Attribute>();
+
+        String[] lineSplit = lines[0].split(";");
+        // ignore first three/four and last two columns
+        int offset;
+        if (lineSplit[3].equals("project_rev")) {
+            offset = 4;
+        }
+        else {
+            offset = 3;
+        }
+        for (int j = 0; j < lineSplit.length - (offset + 2); j++) {
+            atts.add(new Attribute(lineSplit[j + offset]));
+        }
+        final ArrayList<String> classAttVals = new ArrayList<String>();
+        classAttVals.add("0");
+        classAttVals.add("1");
+        final Attribute classAtt = new Attribute("bug", classAttVals);
+        atts.add(classAtt);
+
+        final Instances data = new Instances(file.getName(), atts, 0);
+        data.setClass(classAtt);
+
+        // fetch data
+        for (int i = 1; i < lines.length; i++) {
+            boolean validInstance = true;
+            lineSplit = lines[i].split(";");
+            String[] lineSplitBug = linesBug[i].split(";");
+            double[] values = new double[data.numAttributes()];
+            for (int j = 0; validInstance && j < values.length - 1; j++) {
+                if (lineSplit[j + offset].trim().isEmpty()) {
+                    validInstance = false;
+                }
+                else {
+                    values[j] = Double.parseDouble(lineSplit[j + offset].trim());
+                }
+            }
+            if (offset == 3) {
+                values[values.length - 1] = lineSplitBug[7].equals("0") ? 0 : 1;
+            }
+            else {
+                values[values.length - 1] = lineSplitBug[8].equals("0") ? 0 : 1;
+            }
+
+            if (validInstance) {
+                data.add(new DenseInstance(1.0, values));
+            }
+            else {
+                System.out.println("instance " + i + " is invalid");
+            }
+        }
+        return data;
+    }
+
+    /*
+     * (non-Javadoc)
+     * 
+     * @see de.ugoe.cs.cpdp.loader.AbstractFolderLoader.SingleVersionLoader#
+     * filenameFilter(java.lang.String)
+     */
+    @Override
+    public boolean filenameFilter(String filename) {
+        return filename.endsWith("src.csv");
+    }
 
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/AUDIDataLoader.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/AUDIDataLoader.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/AUDIDataLoader.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.loader;
 
@@ -12,96 +26,101 @@
 /**
  * TODO
+ * 
  * @author sherbold
- *
+ * 
  */
 class AUDIDataLoader implements SingleVersionLoader {
 
-	/*
-	 * (non-Javadoc)
-	 * 
-	 * @see
-	 * de.ugoe.cs.cpdp.loader.AbstractFolderLoader.SingleVersionLoader#load(
-	 * java.io.File)
-	 */
-	@Override
-	public Instances load(File file) {
-		final String[] lines;
-		try {
-			lines = FileTools.getLinesFromFile(file.getAbsolutePath());
-		} catch (IOException e) {
-			throw new RuntimeException(e);
-		}
-		
-		// information about bugs are in another file
-		String path = file.getAbsolutePath();
-		path = path.substring(0, path.length()-14) + "repro.csv";
-		final String[] linesBug;
-		try {
-			linesBug = FileTools.getLinesFromFile(path);
-		} catch (IOException e) {
-			throw new RuntimeException(e);
-		}
-		
-		// configure Instances
-		final ArrayList<Attribute> atts = new ArrayList<Attribute>();
+    /*
+     * (non-Javadoc)
+     * 
+     * @see de.ugoe.cs.cpdp.loader.AbstractFolderLoader.SingleVersionLoader#load( java.io.File)
+     */
+    @Override
+    public Instances load(File file) {
+        final String[] lines;
+        try {
+            lines = FileTools.getLinesFromFile(file.getAbsolutePath());
+        }
+        catch (IOException e) {
+            throw new RuntimeException(e);
+        }
 
-		String[] lineSplit = lines[0].split(";");
-		// ignore first three/four and last two columns
-		int offset;
-		if( lineSplit[3].equals("project_rev") ) {
-			offset = 4;
-		} else {
-			offset = 3;
-		}
-		for (int j = 0; j < lineSplit.length - (offset+2); j++) {
-			atts.add(new Attribute(lineSplit[j + offset]));
-		}
-		final ArrayList<String> classAttVals = new ArrayList<String>();
-		classAttVals.add("0");
-		classAttVals.add("1");
-		final Attribute classAtt = new Attribute("bug", classAttVals);
-		atts.add(classAtt);
+        // information about bugs are in another file
+        String path = file.getAbsolutePath();
+        path = path.substring(0, path.length() - 14) + "repro.csv";
+        final String[] linesBug;
+        try {
+            linesBug = FileTools.getLinesFromFile(path);
+        }
+        catch (IOException e) {
+            throw new RuntimeException(e);
+        }
 
-		final Instances data = new Instances(file.getName(), atts, 0);
-		data.setClass(classAtt);
+        // configure Instances
+        final ArrayList<Attribute> atts = new ArrayList<Attribute>();
 
-		// fetch data
-		for (int i = 1; i < lines.length; i++) {
-			boolean validInstance = true;
-			lineSplit = lines[i].split(";");
-			String[] lineSplitBug = linesBug[i].split(";");
-			double[] values = new double[data.numAttributes()];
-			for (int j = 0; validInstance && j < values.length-1; j++) {
-				if( lineSplit[j + offset].trim().isEmpty() ) {
-					validInstance = false;
-				} else {
-					values[j] = Double.parseDouble(lineSplit[j + offset].trim());
-				}
-			}
-			if( offset==3 ) {
-				values[values.length - 1] = lineSplitBug[7].equals("0") ? 0 : 1;
-			} else {
-				values[values.length - 1] = lineSplitBug[8].equals("0") ? 0 : 1;
-			}
-			
-			if( validInstance ) {
-				data.add(new DenseInstance(1.0, values));
-			} else {
-				System.out.println("instance " + i + " is invalid");
-			}
-		}
-		return data;
-	}
+        String[] lineSplit = lines[0].split(";");
+        // ignore first three/four and last two columns
+        int offset;
+        if (lineSplit[3].equals("project_rev")) {
+            offset = 4;
+        }
+        else {
+            offset = 3;
+        }
+        for (int j = 0; j < lineSplit.length - (offset + 2); j++) {
+            atts.add(new Attribute(lineSplit[j + offset]));
+        }
+        final ArrayList<String> classAttVals = new ArrayList<String>();
+        classAttVals.add("0");
+        classAttVals.add("1");
+        final Attribute classAtt = new Attribute("bug", classAttVals);
+        atts.add(classAtt);
 
-	/*
-	 * (non-Javadoc)
-	 * 
-	 * @see de.ugoe.cs.cpdp.loader.AbstractFolderLoader.SingleVersionLoader#
-	 * filenameFilter(java.lang.String)
-	 */
-	@Override
-	public boolean filenameFilter(String filename) {
-		return filename.endsWith("src.csv");
-	}
+        final Instances data = new Instances(file.getName(), atts, 0);
+        data.setClass(classAtt);
+
+        // fetch data
+        for (int i = 1; i < lines.length; i++) {
+            boolean validInstance = true;
+            lineSplit = lines[i].split(";");
+            String[] lineSplitBug = linesBug[i].split(";");
+            double[] values = new double[data.numAttributes()];
+            for (int j = 0; validInstance && j < values.length - 1; j++) {
+                if (lineSplit[j + offset].trim().isEmpty()) {
+                    validInstance = false;
+                }
+                else {
+                    values[j] = Double.parseDouble(lineSplit[j + offset].trim());
+                }
+            }
+            if (offset == 3) {
+                values[values.length - 1] = lineSplitBug[7].equals("0") ? 0 : 1;
+            }
+            else {
+                values[values.length - 1] = lineSplitBug[8].equals("0") ? 0 : 1;
+            }
+
+            if (validInstance) {
+                data.add(new DenseInstance(1.0, values));
+            }
+            else {
+                System.out.println("instance " + i + " is invalid");
+            }
+        }
+        return data;
+    }
+
+    /*
+     * (non-Javadoc)
+     * 
+     * @see de.ugoe.cs.cpdp.loader.AbstractFolderLoader.SingleVersionLoader#
+     * filenameFilter(java.lang.String)
+     */
+    @Override
+    public boolean filenameFilter(String filename) {
+        return filename.endsWith("src.csv");
+    }
 
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/AUDIFolderLoader.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/AUDIFolderLoader.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/AUDIFolderLoader.java	(revision 41)
@@ -1,14 +1,28 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.loader;
 
 public class AUDIFolderLoader extends AbstractFolderLoader {
 
-	/*
-	 * (non-Javadoc)
-	 * 
-	 * @see de.ugoe.cs.cpdp.loader.AbstractFolderLoader#getSingleLoader()
-	 */
-	@Override
-	protected SingleVersionLoader getSingleLoader() {
-		return new AUDIDataLoader();
-	}
+    /*
+     * (non-Javadoc)
+     * 
+     * @see de.ugoe.cs.cpdp.loader.AbstractFolderLoader#getSingleLoader()
+     */
+    @Override
+    protected SingleVersionLoader getSingleLoader() {
+        return new AUDIDataLoader();
+    }
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/AbstractFolderLoader.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/AbstractFolderLoader.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/AbstractFolderLoader.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.loader;
 
@@ -10,7 +24,6 @@
 
 /**
- * Abstract class for loading data from a folder. The subfolders of a defined
- * folder define the projects, the file contained in the subfolder are the
- * versions of a project.
+ * Abstract class for loading data from a folder. The subfolders of a defined folder define the
+ * projects, the file contained in the subfolder are the versions of a project.
  * 
  * @author Steffen Herbold
@@ -18,51 +31,49 @@
 public abstract class AbstractFolderLoader implements IVersionLoader {
 
-	/**
-	 * Path of the data.
-	 */
-	protected String path = "";
+    /**
+     * Path of the data.
+     */
+    protected String path = "";
 
-	/**
-	 * @see de.ugoe.cs.cpdp.loader.IVersionLoader#setLocation(java.lang.String)
-	 */
-	@Override
-	public void setLocation(String location) {
-		path = location;
-	}
+    /**
+     * @see de.ugoe.cs.cpdp.loader.IVersionLoader#setLocation(java.lang.String)
+     */
+    @Override
+    public void setLocation(String location) {
+        path = location;
+    }
 
-	/**
-	 * @see de.ugoe.cs.cpdp.loader.IVersionLoader#load()
-	 */
-	@Override
-	public List<SoftwareVersion> load() {
-		final List<SoftwareVersion> versions = new LinkedList<SoftwareVersion>();
+    /**
+     * @see de.ugoe.cs.cpdp.loader.IVersionLoader#load()
+     */
+    @Override
+    public List<SoftwareVersion> load() {
+        final List<SoftwareVersion> versions = new LinkedList<SoftwareVersion>();
 
-		final File dataDir = new File(path);
-		final SingleVersionLoader instancesLoader = getSingleLoader();
+        final File dataDir = new File(path);
+        final SingleVersionLoader instancesLoader = getSingleLoader();
 
-		for (File projectDir : dataDir.listFiles()) {
-			if (projectDir.isDirectory()) {
-				String projectName = projectDir.getName();
-				for (File versionFile : projectDir.listFiles()) {
-					if (versionFile.isFile()
-							&& instancesLoader.filenameFilter(versionFile
-									.getName())) {
-						String versionName = versionFile.getName();
-						Instances data = instancesLoader.load(versionFile);
-						versions.add(new SoftwareVersion(projectName,
-								versionName, data));
-					}
-				}
-			}
-		}
-		return versions;
-	}
+        for (File projectDir : dataDir.listFiles()) {
+            if (projectDir.isDirectory()) {
+                String projectName = projectDir.getName();
+                for (File versionFile : projectDir.listFiles()) {
+                    if (versionFile.isFile() &&
+                        instancesLoader.filenameFilter(versionFile.getName()))
+                    {
+                        String versionName = versionFile.getName();
+                        Instances data = instancesLoader.load(versionFile);
+                        versions.add(new SoftwareVersion(projectName, versionName, data));
+                    }
+                }
+            }
+        }
+        return versions;
+    }
 
-	/**
-	 * Returns the concrete {@link SingleVersionLoader} to be used with this
-	 * folder loader.
-	 * 
-	 * @return
-	 */
-	abstract protected SingleVersionLoader getSingleLoader();
+    /**
+     * Returns the concrete {@link SingleVersionLoader} to be used with this folder loader.
+     * 
+     * @return
+     */
+    abstract protected SingleVersionLoader getSingleLoader();
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/CSVDataLoader.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/CSVDataLoader.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/CSVDataLoader.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.loader;
 
@@ -11,6 +25,6 @@
 
 /**
- * Loads the instances for a software version from a CSV file of the PROMISE
- * data set mined by Jurezko and Madeyski.
+ * Loads the instances for a software version from a CSV file of the PROMISE data set mined by
+ * Jurezko and Madeyski.
  * 
  * @author Steffen Herbold
@@ -18,61 +32,59 @@
 class CSVDataLoader implements SingleVersionLoader {
 
-	/*
-	 * (non-Javadoc)
-	 * 
-	 * @see
-	 * de.ugoe.cs.cpdp.loader.AbstractFolderLoader.SingleVersionLoader#load(
-	 * java.io.File)
-	 */
-	@Override
-	public Instances load(File file) {
-		final String[] lines;
-		try {
-			lines = FileTools.getLinesFromFile(file.getAbsolutePath());
-		} catch (IOException e) {
-			throw new RuntimeException(e);
-		}
+    /*
+     * (non-Javadoc)
+     * 
+     * @see de.ugoe.cs.cpdp.loader.AbstractFolderLoader.SingleVersionLoader#load( java.io.File)
+     */
+    @Override
+    public Instances load(File file) {
+        final String[] lines;
+        try {
+            lines = FileTools.getLinesFromFile(file.getAbsolutePath());
+        }
+        catch (IOException e) {
+            throw new RuntimeException(e);
+        }
 
-		// configure Instances
-		final ArrayList<Attribute> atts = new ArrayList<Attribute>();
+        // configure Instances
+        final ArrayList<Attribute> atts = new ArrayList<Attribute>();
 
-		String[] lineSplit = lines[0].split(",");
-		for (int j = 0; j < lineSplit.length - 4; j++) {
-			atts.add(new Attribute(lineSplit[j + 3]));
-		}
-		final ArrayList<String> classAttVals = new ArrayList<String>();
-		classAttVals.add("0");
-		classAttVals.add("1");
-		final Attribute classAtt = new Attribute("bug", classAttVals);
-		atts.add(classAtt);
+        String[] lineSplit = lines[0].split(",");
+        for (int j = 0; j < lineSplit.length - 4; j++) {
+            atts.add(new Attribute(lineSplit[j + 3]));
+        }
+        final ArrayList<String> classAttVals = new ArrayList<String>();
+        classAttVals.add("0");
+        classAttVals.add("1");
+        final Attribute classAtt = new Attribute("bug", classAttVals);
+        atts.add(classAtt);
 
-		final Instances data = new Instances(file.getName(), atts, 0);
-		data.setClass(classAtt);
+        final Instances data = new Instances(file.getName(), atts, 0);
+        data.setClass(classAtt);
 
-		// fetch data
-		for (int i = 1; i < lines.length; i++) {
-			lineSplit = lines[i].split(",");
-			double[] values = new double[lineSplit.length - 3];
-			for (int j = 0; j < values.length - 1; j++) {
-				values[j] = Double.parseDouble(lineSplit[j + 3].trim());
-			}
-			values[values.length - 1] = lineSplit[lineSplit.length - 1].trim()
-					.equals("0") ? 0 : 1;
-			data.add(new DenseInstance(1.0, values));
-		}
+        // fetch data
+        for (int i = 1; i < lines.length; i++) {
+            lineSplit = lines[i].split(",");
+            double[] values = new double[lineSplit.length - 3];
+            for (int j = 0; j < values.length - 1; j++) {
+                values[j] = Double.parseDouble(lineSplit[j + 3].trim());
+            }
+            values[values.length - 1] = lineSplit[lineSplit.length - 1].trim().equals("0") ? 0 : 1;
+            data.add(new DenseInstance(1.0, values));
+        }
 
-		return data;
-	}
+        return data;
+    }
 
-	/*
-	 * (non-Javadoc)
-	 * 
-	 * @see de.ugoe.cs.cpdp.loader.AbstractFolderLoader.SingleVersionLoader#
-	 * filenameFilter(java.lang.String)
-	 */
-	@Override
-	public boolean filenameFilter(String filename) {
-		return filename.endsWith(".csv");
-	}
+    /*
+     * (non-Javadoc)
+     * 
+     * @see de.ugoe.cs.cpdp.loader.AbstractFolderLoader.SingleVersionLoader#
+     * filenameFilter(java.lang.String)
+     */
+    @Override
+    public boolean filenameFilter(String filename) {
+        return filename.endsWith(".csv");
+    }
 
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/CSVFolderLoader.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/CSVFolderLoader.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/CSVFolderLoader.java	(revision 41)
@@ -1,7 +1,21 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.loader;
 
 /**
- * Implements the {@link AbstractFolderLoader} for data from the PROMISE
- * repository mined by Jurezko and Madeyski.
+ * Implements the {@link AbstractFolderLoader} for data from the PROMISE repository mined by Jurezko
+ * and Madeyski.
  * 
  * @author Steffen Herbold
@@ -9,13 +23,13 @@
 public class CSVFolderLoader extends AbstractFolderLoader {
 
-	/*
-	 * (non-Javadoc)
-	 * 
-	 * @see de.ugoe.cs.cpdp.loader.AbstractFolderLoader#getSingleLoader()
-	 */
-	@Override
-	protected SingleVersionLoader getSingleLoader() {
-		return new CSVDataLoader();
-	}
+    /*
+     * (non-Javadoc)
+     * 
+     * @see de.ugoe.cs.cpdp.loader.AbstractFolderLoader#getSingleLoader()
+     */
+    @Override
+    protected SingleVersionLoader getSingleLoader() {
+        return new CSVDataLoader();
+    }
 
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/CSVMockusDataLoader.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/CSVMockusDataLoader.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/CSVMockusDataLoader.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.loader;
 
@@ -10,55 +24,52 @@
 import de.ugoe.cs.util.FileTools;
 
-
 class CSVMockusDataLoader implements SingleVersionLoader {
 
-	@Override
-	public Instances load(File file) {
-		final String[] lines;
-		try {
-			
-			lines = FileTools.getLinesFromFile(file.getAbsolutePath());
-		} catch (IOException e) {
-			throw new RuntimeException(e);
-		}
-		
-		
-		// configure Instances
-		final ArrayList<Attribute> atts = new ArrayList<Attribute>();
+    @Override
+    public Instances load(File file) {
+        final String[] lines;
+        try {
 
-		String[] lineSplit = lines[0].split(",");
-		for (int j = 0; j < lineSplit.length - 3; j++) {
-			atts.add(new Attribute(lineSplit[j + 2]));
-		}
-		
-		final ArrayList<String> classAttVals = new ArrayList<String>();
-		classAttVals.add("0");
-		classAttVals.add("1");
-		final Attribute classAtt = new Attribute("bug", classAttVals);
-		atts.add(classAtt);
+            lines = FileTools.getLinesFromFile(file.getAbsolutePath());
+        }
+        catch (IOException e) {
+            throw new RuntimeException(e);
+        }
 
-		final Instances data = new Instances(file.getName(), atts, 0);
-		data.setClass(classAtt);
+        // configure Instances
+        final ArrayList<Attribute> atts = new ArrayList<Attribute>();
 
-		// fetch data
-		for (int i = 1; i < lines.length; i++) {
-			lineSplit = lines[i].split(",");
-			double[] values = new double[lineSplit.length - 2];
-			for (int j = 0; j < values.length - 1; j++) {
-				values[j] = Double.parseDouble(lineSplit[j + 2].trim());
-			}
-			values[values.length - 1] = lineSplit[lineSplit.length - 1].trim()
-					.equals("0") ? 0 : 1;
-			data.add(new DenseInstance(1.0, values));
-		}
+        String[] lineSplit = lines[0].split(",");
+        for (int j = 0; j < lineSplit.length - 3; j++) {
+            atts.add(new Attribute(lineSplit[j + 2]));
+        }
 
-		return data;
-	}
+        final ArrayList<String> classAttVals = new ArrayList<String>();
+        classAttVals.add("0");
+        classAttVals.add("1");
+        final Attribute classAtt = new Attribute("bug", classAttVals);
+        atts.add(classAtt);
 
-	@Override
-	public boolean filenameFilter(String filename) {
-		return filename.endsWith(".csv");
-	}
+        final Instances data = new Instances(file.getName(), atts, 0);
+        data.setClass(classAtt);
+
+        // fetch data
+        for (int i = 1; i < lines.length; i++) {
+            lineSplit = lines[i].split(",");
+            double[] values = new double[lineSplit.length - 2];
+            for (int j = 0; j < values.length - 1; j++) {
+                values[j] = Double.parseDouble(lineSplit[j + 2].trim());
+            }
+            values[values.length - 1] = lineSplit[lineSplit.length - 1].trim().equals("0") ? 0 : 1;
+            data.add(new DenseInstance(1.0, values));
+        }
+
+        return data;
+    }
+
+    @Override
+    public boolean filenameFilter(String filename) {
+        return filename.endsWith(".csv");
+    }
 
 }
-
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/CSVMockusFolderLoader.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/CSVMockusFolderLoader.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/CSVMockusFolderLoader.java	(revision 41)
@@ -1,9 +1,23 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.loader;
 
 public class CSVMockusFolderLoader extends AbstractFolderLoader {
 
-	@Override
-	protected SingleVersionLoader getSingleLoader() {
-		return new CSVMockusDataLoader();
-	}
+    @Override
+    protected SingleVersionLoader getSingleLoader() {
+        return new CSVMockusDataLoader();
+    }
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/DecentDataLoader.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/DecentDataLoader.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/DecentDataLoader.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.loader;
 
@@ -38,410 +52,428 @@
 
 /**
- * Class for loading a decent model file.
- * Loads a decent model file and (if no arff file is present) and does the
- * following conversions: 
- * DECENT -> ARFFX -> ARFF
+ * Class for loading a decent model file. Loads a decent model file and (if no arff file is present)
+ * and does the following conversions: DECENT -> ARFFX -> ARFF
  * 
  * @author Fabian Trautsch
- *
+ * 
  */
-public class DecentDataLoader implements SingleVersionLoader{
-
-	// Model Handler for Decent Models
-	private DECENTEpsilonModelHandler modelHandler = new DECENTEpsilonModelHandler();
-	
-	// Set log level
-	String logLevel = "1";
-	String logToFile = "false";
-		
-	// This list contains attributes, that should be removed before building the arff file
-	private static List<String> attributeFilter = new LinkedList<String>();
-	
-	// This list contains all names of the different artifacts
-	private static Set<String> artifactNames = new LinkedHashSet<String>();
-	
-	// Name of the class attribute.
-	private static final String classAttributeName = "LABEL.Artifact.Target.BugFix.AverageWeight";
-	
-	
-	private int getIndexOfArtifactName(String artifactName) {
-		int index = -1;
-		if(artifactNames.contains(artifactName)) {
-			int i=0;
-			for(String nameInSet: artifactNames) {
-				if(nameInSet.equals(artifactName)) {
-					index = i;
-				} else {
-					i++;
-				}
-			}
-		}
-		
-		return index;
-	}
-	
-	/**
-	 * Defines attributes, that should be removed before building the
-	 * ARFF File from.
-	 */
-	private void setAttributeFilter() {
-		attributeFilter.add("Agent.Name");
-	
-	}
-	
-	/**
-	 * Saves the dataset as arff after transformation (decent->arffx) and
-	 * filtering
-	 * 
-	 * @param dataSet the WEKA dataset to save
-	 * @param arffLocation location where it should be saved to
-	 */
-	public void save(Instances dataSet, String arffLocation) {
-		
-		
-		ArffSaver saver = new ArffSaver();
-		saver.setInstances(dataSet);
-		try {
-			saver.setFile(new File(arffLocation));
-			saver.writeBatch();
-		} catch (IOException e) {
-			Console.printerrln("Cannot save the file to path: "+arffLocation);
-			e.printStackTrace();
-		}
-	}
-
-	
-	/**
-	 * Loads the given decent file and tranform it from decent->arffx->arff
-	 * @return Instances in WEKA format
-	 */
-	@Override
-	public Instances load(File file) {
-		
-		// Set attributeFilter
-		setAttributeFilter();
-		
-		// Register MetaModels
-		try {
-			registerMetaModels();
-		} catch (Exception e1) {
-			Console.printerrln("Metamodels cannot be registered!");
-			e1.printStackTrace();
-		}
-
-		// Set location of decent and arffx Model
-		String decentModelLocation = file.getAbsolutePath();
-		String pathToDecentModelFolder = decentModelLocation.substring(0,decentModelLocation.lastIndexOf(File.separator));
-		String arffxModelLocation = pathToDecentModelFolder+"/model.arffx";
-		String logModelLocation = pathToDecentModelFolder+"/model.log";
-		String arffLocation = pathToDecentModelFolder+"/model.arff";
-		
-		// If arff File exists, load from it!
-		if(new File(arffLocation).exists()) {
-			System.out.println("Loading arff File...");
-			 BufferedReader reader;
-			 Instances data = null;
-			try {
-				reader = new BufferedReader(new FileReader(arffLocation));
-				data = new Instances(reader);
-				reader.close();
-			} catch (FileNotFoundException e) {
-				Console.printerrln("File with path: "+arffLocation+" was not found.");
-				e.printStackTrace();
-			} catch (IOException e) {
-				Console.printerrln("File with path: "+arffLocation+" cannot be read.");
-				e.printStackTrace();
-			}
-			
-			// Set class attribute if not set
-			if(data.classIndex() == -1) {
-				Attribute classAttribute = data.attribute(classAttributeName);
-				data.setClass(classAttribute);
-			}
-			
-			
-			return data;
-		}
-		
-		// Location of EOL Scripts
-		String preprocess = "./decent/epsilon/query/preprocess.eol";	
-		String arffxToArffSource = "./decent/epsilon/query/addLabels.eol";
-		
-		// Set Log Properties
-		System.setProperty("epsilon.logLevel", logLevel);
-		System.setProperty("epsilon.logToFile", logToFile);
-		System.setProperty("epsilon.logFileAvailable", "false");
-		
-		// Set decent2arffx Properties
-		System.setProperty("epsilon.transformation.decent2arffx.skipSource", "false");
-		System.setProperty("epsilon.transformation.decent2arffx.type", "code");
-		
-		
-		
-		// Preprocess Data, transform from decent2arffx
-		try {
-			IEolExecutableModule preProcessModule = loadModule(preprocess);
-			IModel preProcessDecentModel = modelHandler.getDECENTModel(decentModelLocation, true, true);
-			IModel preProcessArffxarffxModel = modelHandler.getARFFxModel(arffxModelLocation, false, true);
-			preProcessModule.getContext().getModelRepository().addModel(preProcessDecentModel);
-			preProcessModule.getContext().getModelRepository().addModel(preProcessArffxarffxModel);
-			execute(preProcessModule, logModelLocation);
-			preProcessDecentModel.dispose();
-			preProcessArffxarffxModel.dispose();
-			preProcessModule.reset();
-		} catch (URISyntaxException e) {
-			Console.printerrln("URI Syntax for decent or arffx model is wrong.");
-			e.printStackTrace();
-		} catch (Exception e) {
-			e.printStackTrace();
-		}
-		
-		
-		
-		
-		// Transform to arff, for label and confidence attributes
-		try {
-			IEolExecutableModule arffxToArffModule = loadModule(arffxToArffSource);
-			IModel arffxToArffArffxModel = modelHandler.getARFFxModel(arffxModelLocation, true, true);
-			arffxToArffModule.getContext().getModelRepository().addModel(arffxToArffArffxModel);
-			execute(arffxToArffModule, logModelLocation);
-			arffxToArffArffxModel.dispose();
-			// can be stored and retained alternatively
-			arffxToArffModule.reset();
-		} catch (URISyntaxException e) {
-			Console.printerrln("URI Syntax for arffx model is wrong.");
-			e.printStackTrace();
-		} catch (Exception e) {
-			e.printStackTrace();
-		}
-
-		// Unregister MetaModels, otherwise cast will fail
-		HashMap<String, Object> metaModelCache = new HashMap<>();
-		for (String key : EPackage.Registry.INSTANCE.keySet()) {
-			metaModelCache.put(key, EPackage.Registry.INSTANCE.get(key));
-		};
-		
-		for (String key : metaModelCache .keySet()) {
-			EPackage.Registry.INSTANCE.remove(key);
-		};
-		
-		
-		// Workaround to gernerate a usable URI. Absolute path is not
-		// possible, therefore we need to construct a relative path
-		
-		URL location = DecentDataLoader.class.getProtectionDomain().getCodeSource().getLocation();
-		String basePath = location.getFile();
-		
-		// Location is the bin folder, so we need to delete the last 4 characters
-		basePath = basePath.substring(0, basePath.length() - 4);
-		String relativePath = new File(basePath).toURI().relativize(new File(arffxModelLocation).toURI()).getPath();
-		
-		// Loard arffx file and create WEKA Instances
-		ARFFxResourceTool tool = new ARFFxResourceTool();
-		Resource resource = tool.loadResourceFromXMI(relativePath, "arffx");
-		
-		Instances dataSet = null;
-		for(EObject o: resource.getContents()) {
-			Model m = (Model) o;
-			dataSet = createWekaDataFormat(m);
-
-			for(Instance i : m.getData()) {
-				createWekaInstance(dataSet, i);
-			}
-		}
-		
-		// Set class attribute
-		Attribute classAttribute = dataSet.attribute(classAttributeName);
-		dataSet.setClass(classAttribute);
-		
-		// Save as ARFF
-		save(dataSet, arffLocation);
-		
-		return dataSet;
-	
-	}	
-	
-	
-	/**
-	 * Creates a WekaInstance from an ARFFX Model Instance
-	 * 
-	 * @param dataSet WekaInstance dataset, where the arffx model instances should be
-	 * added to
-	 * @param i arffx model instance
-	 */
-	private void createWekaInstance(Instances dataSet, Instance i) {	 
-		double[] values = new double[dataSet.numAttributes()];
-		int j=0;
-		
-		for(Value value : i.getValues()) {
-			String dataValue = value.getContent(); 
-			String attributeName = value.getOfAttribute().getName();
-			
-			if(attributeFilter.contains(attributeName)) {
-				continue;
-			}
-			
-			// Is value a LABEL.* attribute?
-			if(isLabel(attributeName)) {
-				values[j] = dataSet.attribute(j).indexOfValue(dataValue);
-			} else if (isConfidenceLabel(attributeName)){
-				// Is value a CONFIDENCE.* attribute?
-				values[j] = dataSet.attribute(j).indexOfValue(dataValue);
-			} else if(attributeName.equals("Artifact.Name")){
-				// Is it the name of the artifact?
-				artifactNames.add(dataValue);
-				values[j] = getIndexOfArtifactName(dataValue);
-			} else {
-				// Is it a numeric value?
-				values[j] = Double.parseDouble(dataValue);
-			}
-			
-			j++;
-		}
-		
-		DenseInstance inst = new DenseInstance(1.0, values);
-		dataSet.add(inst);
-	}
-		
-	/**
-	 * Creates a Weka Instances set out of a arffx model
-	 * @param m arffx model
-	 * @return
-	 */
-	private Instances createWekaDataFormat(Model m) {
-		
-		// Bad solution, can be enhanced (continue in for loop)
-		ArrayList<Attribute> datasetAttributes = new  ArrayList<Attribute>();
-		for(de.ugoe.cs.cpdp.decentApp.models.arffx.Attribute attribute :m.getAttributes()) {
-			String attributeName = attribute.getName();
-
-			if(attributeFilter.contains(attributeName)) {
-				continue;
-			}
-			
-			Attribute wekaAttr;
-			
-			// Is attribute a LABEL.* attribute?
-			if(isLabel(attributeName)) {
-				// Classattribute
-				final ArrayList<String> classAttVals = new ArrayList<String>();
-				classAttVals.add("false");
-				classAttVals.add("true");
-				wekaAttr = new Attribute(attributeName, classAttVals);
-			} else if(isConfidenceLabel(attributeName)){
-				// Is attribute a CONFIDENCE.* attribute?
-				ArrayList<String> labels = new ArrayList<String>();
-				labels.add("high");
-				labels.add("low");
-				wekaAttr = new Attribute(attributeName, labels);
-			} else {
-				// Is it a numeric attribute?
-				wekaAttr = new Attribute(attributeName);
-			}
-			
-			datasetAttributes.add(wekaAttr);
-		}
-		
-		
-		return new Instances("test-dataset", datasetAttributes, 0);
-	}
-	
-	/**
-	 * Helper methods which indicates if the given value starts with "LABEL"
-	 * 
-	 * @param value to test
-	 * @return
-	 */
-	private boolean isLabel(String value) {
-		if(value.length()>= 5 && value.substring(0, 5).equals("LABEL")) {
-			return true;
-		}
-		
-		return false;
-	}
-	
-	/**
-	 * Helper method which indicates if the given value starts with "CONFIDENCE"
-	 * @param value to test
-	 * @return
-	 */
-	private boolean isConfidenceLabel(String value) {
-		if(value.length()>= 10 && value.substring(0, 10).equals("CONFIDENCE")) {
-			return true;
-		}
-		
-		return false;
-	}
-
-	
-	/**
-	 * Returns if a filename ends with ".decent"
-	 * @return
-	 */
-	@Override
-	public boolean filenameFilter(String filename) {
-		return filename.endsWith(".decent");
-	}
-	
-	/**
-	 * Helper method for executing a eol scripts and adding the log model beforehand
-	 * @param module module to execute
-	 * @param logModelLocation location of the log model
-	 * @throws Exception
-	 */
-	private void execute(IEolExecutableModule module, String logModelLocation)
-			throws Exception {
-		IModel logModel = modelHandler.getLOGModel(logModelLocation, true, true);
-		module.getContext().getModelRepository().addModel(logModel);
-		module.execute();
-		logModel.dispose();
-	}
-
-	/**
-	 * Loads the module from a given source
-	 * 
-	 * @param source where the module is (e.g. eol script)
-	 * @return
-	 * @throws Exception
-	 * @throws URISyntaxException
-	 */
-	private IEolExecutableModule loadModule(String source) throws Exception,
-	URISyntaxException {
-
-		IEolExecutableModule module = null;
-		if (source.endsWith("etl")) {
-			module = new EtlModule();
-		} else if (source.endsWith("eol")) {
-			module = new EolModule();
-		} else {
-		
-		}
-		
-		module.parse(modelHandler.getFile(source));
-		
-		if (module.getParseProblems().size() > 0) {
-			Console.printerrln("Parse error occured...");
-			for (ParseProblem problem : module.getParseProblems()) {
-				System.err.println(problem.toString());
-			}
-			// System.exit(-1);
-		}
-		
-		return module;
-	}
-	
-	/**
-	 * Helper method for registering the metamodels
-	 * @throws Exception
-	 */
-	private void registerMetaModels() throws Exception {
-		String metaModelsPath = DECENTEpsilonModelHandler.metaPath;
-		File metaModelsLocation = new File(metaModelsPath);
-		for (File file : metaModelsLocation.listFiles()) {
-			if (file.getName().endsWith(".ecore")) {
-				EmfUtil.register(URI.createFileURI(file.getAbsolutePath()), EPackage.Registry.INSTANCE);
-			}
-		}
-	}
-	
+public class DecentDataLoader implements SingleVersionLoader {
+
+    // Model Handler for Decent Models
+    private DECENTEpsilonModelHandler modelHandler = new DECENTEpsilonModelHandler();
+
+    // Set log level
+    String logLevel = "1";
+    String logToFile = "false";
+
+    // This list contains attributes, that should be removed before building the arff file
+    private static List<String> attributeFilter = new LinkedList<String>();
+
+    // This list contains all names of the different artifacts
+    private static Set<String> artifactNames = new LinkedHashSet<String>();
+
+    // Name of the class attribute.
+    private static final String classAttributeName = "LABEL.Artifact.Target.BugFix.AverageWeight";
+
+    private int getIndexOfArtifactName(String artifactName) {
+        int index = -1;
+        if (artifactNames.contains(artifactName)) {
+            int i = 0;
+            for (String nameInSet : artifactNames) {
+                if (nameInSet.equals(artifactName)) {
+                    index = i;
+                }
+                else {
+                    i++;
+                }
+            }
+        }
+
+        return index;
+    }
+
+    /**
+     * Defines attributes, that should be removed before building the ARFF File from.
+     */
+    private void setAttributeFilter() {
+        attributeFilter.add("Agent.Name");
+
+    }
+
+    /**
+     * Saves the dataset as arff after transformation (decent->arffx) and filtering
+     * 
+     * @param dataSet
+     *            the WEKA dataset to save
+     * @param arffLocation
+     *            location where it should be saved to
+     */
+    public void save(Instances dataSet, String arffLocation) {
+
+        ArffSaver saver = new ArffSaver();
+        saver.setInstances(dataSet);
+        try {
+            saver.setFile(new File(arffLocation));
+            saver.writeBatch();
+        }
+        catch (IOException e) {
+            Console.printerrln("Cannot save the file to path: " + arffLocation);
+            e.printStackTrace();
+        }
+    }
+
+    /**
+     * Loads the given decent file and tranform it from decent->arffx->arff
+     * 
+     * @return Instances in WEKA format
+     */
+    @Override
+    public Instances load(File file) {
+
+        // Set attributeFilter
+        setAttributeFilter();
+
+        // Register MetaModels
+        try {
+            registerMetaModels();
+        }
+        catch (Exception e1) {
+            Console.printerrln("Metamodels cannot be registered!");
+            e1.printStackTrace();
+        }
+
+        // Set location of decent and arffx Model
+        String decentModelLocation = file.getAbsolutePath();
+        String pathToDecentModelFolder =
+            decentModelLocation.substring(0, decentModelLocation.lastIndexOf(File.separator));
+        String arffxModelLocation = pathToDecentModelFolder + "/model.arffx";
+        String logModelLocation = pathToDecentModelFolder + "/model.log";
+        String arffLocation = pathToDecentModelFolder + "/model.arff";
+
+        // If arff File exists, load from it!
+        if (new File(arffLocation).exists()) {
+            System.out.println("Loading arff File...");
+            BufferedReader reader;
+            Instances data = null;
+            try {
+                reader = new BufferedReader(new FileReader(arffLocation));
+                data = new Instances(reader);
+                reader.close();
+            }
+            catch (FileNotFoundException e) {
+                Console.printerrln("File with path: " + arffLocation + " was not found.");
+                e.printStackTrace();
+            }
+            catch (IOException e) {
+                Console.printerrln("File with path: " + arffLocation + " cannot be read.");
+                e.printStackTrace();
+            }
+
+            // Set class attribute if not set
+            if (data.classIndex() == -1) {
+                Attribute classAttribute = data.attribute(classAttributeName);
+                data.setClass(classAttribute);
+            }
+
+            return data;
+        }
+
+        // Location of EOL Scripts
+        String preprocess = "./decent/epsilon/query/preprocess.eol";
+        String arffxToArffSource = "./decent/epsilon/query/addLabels.eol";
+
+        // Set Log Properties
+        System.setProperty("epsilon.logLevel", logLevel);
+        System.setProperty("epsilon.logToFile", logToFile);
+        System.setProperty("epsilon.logFileAvailable", "false");
+
+        // Set decent2arffx Properties
+        System.setProperty("epsilon.transformation.decent2arffx.skipSource", "false");
+        System.setProperty("epsilon.transformation.decent2arffx.type", "code");
+
+        // Preprocess Data, transform from decent2arffx
+        try {
+            IEolExecutableModule preProcessModule = loadModule(preprocess);
+            IModel preProcessDecentModel =
+                modelHandler.getDECENTModel(decentModelLocation, true, true);
+            IModel preProcessArffxarffxModel =
+                modelHandler.getARFFxModel(arffxModelLocation, false, true);
+            preProcessModule.getContext().getModelRepository().addModel(preProcessDecentModel);
+            preProcessModule.getContext().getModelRepository().addModel(preProcessArffxarffxModel);
+            execute(preProcessModule, logModelLocation);
+            preProcessDecentModel.dispose();
+            preProcessArffxarffxModel.dispose();
+            preProcessModule.reset();
+        }
+        catch (URISyntaxException e) {
+            Console.printerrln("URI Syntax for decent or arffx model is wrong.");
+            e.printStackTrace();
+        }
+        catch (Exception e) {
+            e.printStackTrace();
+        }
+
+        // Transform to arff, for label and confidence attributes
+        try {
+            IEolExecutableModule arffxToArffModule = loadModule(arffxToArffSource);
+            IModel arffxToArffArffxModel =
+                modelHandler.getARFFxModel(arffxModelLocation, true, true);
+            arffxToArffModule.getContext().getModelRepository().addModel(arffxToArffArffxModel);
+            execute(arffxToArffModule, logModelLocation);
+            arffxToArffArffxModel.dispose();
+            // can be stored and retained alternatively
+            arffxToArffModule.reset();
+        }
+        catch (URISyntaxException e) {
+            Console.printerrln("URI Syntax for arffx model is wrong.");
+            e.printStackTrace();
+        }
+        catch (Exception e) {
+            e.printStackTrace();
+        }
+
+        // Unregister MetaModels, otherwise cast will fail
+        HashMap<String, Object> metaModelCache = new HashMap<>();
+        for (String key : EPackage.Registry.INSTANCE.keySet()) {
+            metaModelCache.put(key, EPackage.Registry.INSTANCE.get(key));
+        };
+
+        for (String key : metaModelCache.keySet()) {
+            EPackage.Registry.INSTANCE.remove(key);
+        };
+
+        // Workaround to gernerate a usable URI. Absolute path is not
+        // possible, therefore we need to construct a relative path
+
+        URL location = DecentDataLoader.class.getProtectionDomain().getCodeSource().getLocation();
+        String basePath = location.getFile();
+
+        // Location is the bin folder, so we need to delete the last 4 characters
+        basePath = basePath.substring(0, basePath.length() - 4);
+        String relativePath =
+            new File(basePath).toURI().relativize(new File(arffxModelLocation).toURI()).getPath();
+
+        // Loard arffx file and create WEKA Instances
+        ARFFxResourceTool tool = new ARFFxResourceTool();
+        Resource resource = tool.loadResourceFromXMI(relativePath, "arffx");
+
+        Instances dataSet = null;
+        for (EObject o : resource.getContents()) {
+            Model m = (Model) o;
+            dataSet = createWekaDataFormat(m);
+
+            for (Instance i : m.getData()) {
+                createWekaInstance(dataSet, i);
+            }
+        }
+
+        // Set class attribute
+        Attribute classAttribute = dataSet.attribute(classAttributeName);
+        dataSet.setClass(classAttribute);
+
+        // Save as ARFF
+        save(dataSet, arffLocation);
+
+        return dataSet;
+
+    }
+
+    /**
+     * Creates a WekaInstance from an ARFFX Model Instance
+     * 
+     * @param dataSet
+     *            WekaInstance dataset, where the arffx model instances should be added to
+     * @param i
+     *            arffx model instance
+     */
+    private void createWekaInstance(Instances dataSet, Instance i) {
+        double[] values = new double[dataSet.numAttributes()];
+        int j = 0;
+
+        for (Value value : i.getValues()) {
+            String dataValue = value.getContent();
+            String attributeName = value.getOfAttribute().getName();
+
+            if (attributeFilter.contains(attributeName)) {
+                continue;
+            }
+
+            // Is value a LABEL.* attribute?
+            if (isLabel(attributeName)) {
+                values[j] = dataSet.attribute(j).indexOfValue(dataValue);
+            }
+            else if (isConfidenceLabel(attributeName)) {
+                // Is value a CONFIDENCE.* attribute?
+                values[j] = dataSet.attribute(j).indexOfValue(dataValue);
+            }
+            else if (attributeName.equals("Artifact.Name")) {
+                // Is it the name of the artifact?
+                artifactNames.add(dataValue);
+                values[j] = getIndexOfArtifactName(dataValue);
+            }
+            else {
+                // Is it a numeric value?
+                values[j] = Double.parseDouble(dataValue);
+            }
+
+            j++;
+        }
+
+        DenseInstance inst = new DenseInstance(1.0, values);
+        dataSet.add(inst);
+    }
+
+    /**
+     * Creates a Weka Instances set out of a arffx model
+     * 
+     * @param m
+     *            arffx model
+     * @return
+     */
+    private Instances createWekaDataFormat(Model m) {
+
+        // Bad solution, can be enhanced (continue in for loop)
+        ArrayList<Attribute> datasetAttributes = new ArrayList<Attribute>();
+        for (de.ugoe.cs.cpdp.decentApp.models.arffx.Attribute attribute : m.getAttributes()) {
+            String attributeName = attribute.getName();
+
+            if (attributeFilter.contains(attributeName)) {
+                continue;
+            }
+
+            Attribute wekaAttr;
+
+            // Is attribute a LABEL.* attribute?
+            if (isLabel(attributeName)) {
+                // Classattribute
+                final ArrayList<String> classAttVals = new ArrayList<String>();
+                classAttVals.add("false");
+                classAttVals.add("true");
+                wekaAttr = new Attribute(attributeName, classAttVals);
+            }
+            else if (isConfidenceLabel(attributeName)) {
+                // Is attribute a CONFIDENCE.* attribute?
+                ArrayList<String> labels = new ArrayList<String>();
+                labels.add("high");
+                labels.add("low");
+                wekaAttr = new Attribute(attributeName, labels);
+            }
+            else {
+                // Is it a numeric attribute?
+                wekaAttr = new Attribute(attributeName);
+            }
+
+            datasetAttributes.add(wekaAttr);
+        }
+
+        return new Instances("test-dataset", datasetAttributes, 0);
+    }
+
+    /**
+     * Helper methods which indicates if the given value starts with "LABEL"
+     * 
+     * @param value
+     *            to test
+     * @return
+     */
+    private boolean isLabel(String value) {
+        if (value.length() >= 5 && value.substring(0, 5).equals("LABEL")) {
+            return true;
+        }
+
+        return false;
+    }
+
+    /**
+     * Helper method which indicates if the given value starts with "CONFIDENCE"
+     * 
+     * @param value
+     *            to test
+     * @return
+     */
+    private boolean isConfidenceLabel(String value) {
+        if (value.length() >= 10 && value.substring(0, 10).equals("CONFIDENCE")) {
+            return true;
+        }
+
+        return false;
+    }
+
+    /**
+     * Returns if a filename ends with ".decent"
+     * 
+     * @return
+     */
+    @Override
+    public boolean filenameFilter(String filename) {
+        return filename.endsWith(".decent");
+    }
+
+    /**
+     * Helper method for executing a eol scripts and adding the log model beforehand
+     * 
+     * @param module
+     *            module to execute
+     * @param logModelLocation
+     *            location of the log model
+     * @throws Exception
+     */
+    private void execute(IEolExecutableModule module, String logModelLocation) throws Exception {
+        IModel logModel = modelHandler.getLOGModel(logModelLocation, true, true);
+        module.getContext().getModelRepository().addModel(logModel);
+        module.execute();
+        logModel.dispose();
+    }
+
+    /**
+     * Loads the module from a given source
+     * 
+     * @param source
+     *            where the module is (e.g. eol script)
+     * @return
+     * @throws Exception
+     * @throws URISyntaxException
+     */
+    private IEolExecutableModule loadModule(String source) throws Exception, URISyntaxException {
+
+        IEolExecutableModule module = null;
+        if (source.endsWith("etl")) {
+            module = new EtlModule();
+        }
+        else if (source.endsWith("eol")) {
+            module = new EolModule();
+        }
+        else {
+
+        }
+
+        module.parse(modelHandler.getFile(source));
+
+        if (module.getParseProblems().size() > 0) {
+            Console.printerrln("Parse error occured...");
+            for (ParseProblem problem : module.getParseProblems()) {
+                System.err.println(problem.toString());
+            }
+            // System.exit(-1);
+        }
+
+        return module;
+    }
+
+    /**
+     * Helper method for registering the metamodels
+     * 
+     * @throws Exception
+     */
+    private void registerMetaModels() throws Exception {
+        String metaModelsPath = DECENTEpsilonModelHandler.metaPath;
+        File metaModelsLocation = new File(metaModelsPath);
+        for (File file : metaModelsLocation.listFiles()) {
+            if (file.getName().endsWith(".ecore")) {
+                EmfUtil.register(URI.createFileURI(file.getAbsolutePath()),
+                                 EPackage.Registry.INSTANCE);
+            }
+        }
+    }
+
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/DecentFolderLoader.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/DecentFolderLoader.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/DecentFolderLoader.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.loader;
 
@@ -16,74 +30,76 @@
 public class DecentFolderLoader extends AbstractFolderLoader {
 
-	/*
-	 * (non-Javadoc)
-	 * 
-	 * @see de.ugoe.cs.cpdp.loader.AbstractFolderLoader#getSingleLoader()
-	 */
-	@Override
-	protected SingleVersionLoader getSingleLoader() {
-		return new DecentDataLoader();
-	}
-	
-	/**
-	 * @see de.ugoe.cs.cpdp.loader.IVersionLoader#load()
-	 */
-	@Override
-	public List<SoftwareVersion> load() {
-		final List<SoftwareVersion> versions = new LinkedList<SoftwareVersion>();
+    /*
+     * (non-Javadoc)
+     * 
+     * @see de.ugoe.cs.cpdp.loader.AbstractFolderLoader#getSingleLoader()
+     */
+    @Override
+    protected SingleVersionLoader getSingleLoader() {
+        return new DecentDataLoader();
+    }
 
-		final File dataDir = new File(path);
-		final SingleVersionLoader instancesLoader = getSingleLoader();
+    /**
+     * @see de.ugoe.cs.cpdp.loader.IVersionLoader#load()
+     */
+    @Override
+    public List<SoftwareVersion> load() {
+        final List<SoftwareVersion> versions = new LinkedList<SoftwareVersion>();
 
-		String projectName = dataDir.getName();
-		
-		
-		/*
-		 * The following lines make it possible, that we can have two different possibilities
-		 * to load data:
-		 * 1) From one project (e.g. /decent/input/project1)
-		 * 2) From more than one project (e.g. /decent/input/)
-		 * 
-		 * Requirement is, that we have a folder structure like this:
-		 * "/decent/input/project1/model.decent, /decent/input/project2/model.decent, ..."
-		 * 
-		 * In the first one the "else" is executed, therefore it will just search the folder "project1"
-		 * for a "model.decent" file. In the second one, it will look into each folder and searches for
-		 * "model.decent" files.
-		 */
-		for (File projectDir : dataDir.listFiles()) {
-			if (projectDir.isDirectory()) {
-				projectName = projectDir.getName();
-				for (File versionFile : projectDir.listFiles()) {
-					loadDataFromFile(versionFile,instancesLoader, projectName, versions);
-				}
-			} else {
-				loadDataFromFile(projectDir, instancesLoader, projectName, versions);
-			}
-		}
-		return versions;
-	}
-	
-	/**
-	 * Loads data from a file and adds the instances from the load method to the 
-	 * versions List.
-	 * 
-	 * @param versionFile file to load from
-	 * @param instancesLoader loader that should be used
-	 * @param projectName name of the project which was loaded
-	 * @param versions list, where the weka instances are added to
-	 */
-	
-	private void loadDataFromFile(File versionFile, 
-			SingleVersionLoader instancesLoader, String projectName, List<SoftwareVersion> versions) {
-		if (versionFile.isFile()
-				&& instancesLoader.filenameFilter(versionFile
-						.getName())) {
-			String versionName = versionFile.getName();
-			Instances data = instancesLoader.load(versionFile);
-			versions.add(new SoftwareVersion(projectName,
-					versionName, data));
-		}
-	}
+        final File dataDir = new File(path);
+        final SingleVersionLoader instancesLoader = getSingleLoader();
+
+        String projectName = dataDir.getName();
+
+        /*
+         * The following lines make it possible, that we can have two different possibilities to
+         * load data: 1) From one project (e.g. /decent/input/project1) 2) From more than one
+         * project (e.g. /decent/input/)
+         * 
+         * Requirement is, that we have a folder structure like this:
+         * "/decent/input/project1/model.decent, /decent/input/project2/model.decent, ..."
+         * 
+         * In the first one the "else" is executed, therefore it will just search the folder
+         * "project1" for a "model.decent" file. In the second one, it will look into each folder
+         * and searches for "model.decent" files.
+         */
+        for (File projectDir : dataDir.listFiles()) {
+            if (projectDir.isDirectory()) {
+                projectName = projectDir.getName();
+                for (File versionFile : projectDir.listFiles()) {
+                    loadDataFromFile(versionFile, instancesLoader, projectName, versions);
+                }
+            }
+            else {
+                loadDataFromFile(projectDir, instancesLoader, projectName, versions);
+            }
+        }
+        return versions;
+    }
+
+    /**
+     * Loads data from a file and adds the instances from the load method to the versions List.
+     * 
+     * @param versionFile
+     *            file to load from
+     * @param instancesLoader
+     *            loader that should be used
+     * @param projectName
+     *            name of the project which was loaded
+     * @param versions
+     *            list, where the weka instances are added to
+     */
+
+    private void loadDataFromFile(File versionFile,
+                                  SingleVersionLoader instancesLoader,
+                                  String projectName,
+                                  List<SoftwareVersion> versions)
+    {
+        if (versionFile.isFile() && instancesLoader.filenameFilter(versionFile.getName())) {
+            String versionName = versionFile.getName();
+            Instances data = instancesLoader.load(versionFile);
+            versions.add(new SoftwareVersion(projectName, versionName, data));
+        }
+    }
 
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/IDecentVersionLoader.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/IDecentVersionLoader.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/IDecentVersionLoader.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.loader;
 
@@ -5,7 +19,7 @@
 import de.ugoe.cs.cpdp.versions.SoftwareVersion;
 
-public interface IDecentVersionLoader extends IVersionLoader{
-	
-	public List<SoftwareVersion> load(List<String> decentAttributes);
+public interface IDecentVersionLoader extends IVersionLoader {
+
+    public List<SoftwareVersion> load(List<String> decentAttributes);
 
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/IVersionLoader.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/IVersionLoader.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/IVersionLoader.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.loader;
 
@@ -12,18 +26,18 @@
 public interface IVersionLoader {
 
-	/**
-	 * Sets the location of the data.
-	 * 
-	 * @param location
-	 *            location of the data
-	 */
-	public void setLocation(String location);
+    /**
+     * Sets the location of the data.
+     * 
+     * @param location
+     *            location of the data
+     */
+    public void setLocation(String location);
 
-	/**
-	 * Loads the data.
-	 * 
-	 * @return the data
-	 */
-	public List<SoftwareVersion> load();
+    /**
+     * Loads the data.
+     * 
+     * @return the data
+     */
+    public List<SoftwareVersion> load();
 
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/NasaARFFFolderLoader.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/NasaARFFFolderLoader.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/NasaARFFFolderLoader.java	(revision 41)
@@ -1,7 +1,20 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.loader;
 
 /**
- * Implements the {@link AbstractFolderLoader} for the NASA/SOFTLAB/MDP data
- * set.
+ * Implements the {@link AbstractFolderLoader} for the NASA/SOFTLAB/MDP data set.
  * 
  * @author Steffen Herbold
@@ -9,13 +22,13 @@
 public class NasaARFFFolderLoader extends AbstractFolderLoader {
 
-	/*
-	 * (non-Javadoc)
-	 * 
-	 * @see de.ugoe.cs.cpdp.loader.AbstractFolderLoader#getSingleLoader()
-	 */
-	@Override
-	protected SingleVersionLoader getSingleLoader() {
-		return new NasaARFFLoader();
-	}
+    /*
+     * (non-Javadoc)
+     * 
+     * @see de.ugoe.cs.cpdp.loader.AbstractFolderLoader#getSingleLoader()
+     */
+    @Override
+    protected SingleVersionLoader getSingleLoader() {
+        return new NasaARFFLoader();
+    }
 
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/NasaARFFLoader.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/NasaARFFLoader.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/NasaARFFLoader.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.loader;
 
@@ -16,6 +30,5 @@
 
 /**
- * Loads the instances for a software version from an ARFF file of the
- * NASA/SOFTLAB/MDP data.
+ * Loads the instances for a software version from an ARFF file of the NASA/SOFTLAB/MDP data.
  * 
  * @author Steffen Herbold
@@ -23,194 +36,194 @@
 public class NasaARFFLoader implements SingleVersionLoader {
 
-	/**
-	 * used to map attributes the same attribute with different names to each
-	 * other
-	 */
-	Map<String, String> attributeNameMap;
-
-	/**
-	 * used to ensure that the attribute order is the same after loading
-	 */
-	List<String> attributeOrder;
-
-	/**
-	 * Constructor. Creates a new NasaARFFLoader.
-	 */
-	public NasaARFFLoader() {
-		attributeNameMap = new HashMap<>();
-
-		// Map entries for ar project
-		attributeNameMap.put("total_loc", "LOC_TOTAL");
-		attributeNameMap.put("comment_loc", "LOC_COMMENTS");
-		attributeNameMap.put("code_and_comment_loc", "LOC_CODE_AND_COMMENT");
-		attributeNameMap.put("executable_loc", "LOC_EXECUTABLE");
-		attributeNameMap.put("unique_operands", "NUM_UNIQUE_OPERANDS");
-		attributeNameMap.put("unique_operators", "NUM_UNIQUE_OPERATORS");
-		attributeNameMap.put("total_operands", "NUM_OPERANDS");
-		attributeNameMap.put("total_operators", "NUM_OPERATORS");
-		attributeNameMap.put("halstead_length", "HALSTEAD_LENGTH");
-		attributeNameMap.put("halstead_volume", "HALSTEAD_VOLUME");
-		attributeNameMap.put("halstead_difficulty", "HALSTEAD_DIFFICULTY");
-		attributeNameMap.put("halstead_effort", "HALSTEAD_EFFORT");
-		attributeNameMap.put("halstead_error", "HALSTEAD_ERROR_EST");
-		attributeNameMap.put("halstead_time", "HALSTEAD_PROG_TIME");
-		attributeNameMap.put("branch_count", "BRANCH_COUNT");
-		attributeNameMap.put("cyclomatic_complexity", "CYCLOMATIC_COMPLEXITY");
-		attributeNameMap.put("design_complexity", "DESIGN_COMPLEXITY");
-
-		// Map entries for KC2
-		attributeNameMap.put("loc", "LOC_TOTAL");
-		attributeNameMap.put("lOCode", "LOC_EXECUTABLE");
-		attributeNameMap.put("lOComment", "LOC_COMMENTS");
-		attributeNameMap.put("lOCodeAndComment", "LOC_CODE_AND_COMMENT");
-		attributeNameMap.put("uniq_Op", "NUM_UNIQUE_OPERATORS");
-		attributeNameMap.put("uniq_Opnd", "NUM_UNIQUE_OPERANDS");
-		attributeNameMap.put("total_Op", "NUM_OPERATORS");
-		attributeNameMap.put("total_Opnd", "NUM_OPERANDS");
-		attributeNameMap.put("v", "HALSTEAD_VOLUME");
-		attributeNameMap.put("l", "HALSTEAD_LENGTH");
-		attributeNameMap.put("d", "HALSTEAD_DIFFICULTY");
-		attributeNameMap.put("e", "HALSTEAD_EFFORT");
-		attributeNameMap.put("b", "HALSTEAD_ERROR_EST");
-		attributeNameMap.put("t", "HALSTEAD_PROG_TIME");
-		attributeNameMap.put("branchCount", "BRANCH_COUNT");
-		attributeNameMap.put("v(g)", "CYCLOMATIC_COMPLEXITY");
-		attributeNameMap.put("iv(g)", "DESIGN_COMPLEXITY");
-
-		attributeNameMap.put("defects", "bug");
-		attributeNameMap.put("Defective", "bug");
-		attributeNameMap.put("problems", "bug");
-		attributeNameMap.put("label", "bug");
-
-		// build list with normalized attribute order
-		attributeOrder = new LinkedList<>();
-
-		attributeOrder.add("LOC_TOTAL");
-		attributeOrder.add("LOC_EXECUTABLE");
-		attributeOrder.add("LOC_COMMENTS");
-		attributeOrder.add("LOC_CODE_AND_COMMENT");
-		attributeOrder.add("NUM_UNIQUE_OPERATORS");
-		attributeOrder.add("NUM_UNIQUE_OPERANDS");
-		attributeOrder.add("NUM_OPERATORS");
-		attributeOrder.add("NUM_OPERANDS");
-		attributeOrder.add("HALSTEAD_VOLUME");
-		attributeOrder.add("HALSTEAD_LENGTH");
-		attributeOrder.add("HALSTEAD_DIFFICULTY");
-		attributeOrder.add("HALSTEAD_EFFORT");
-		attributeOrder.add("HALSTEAD_ERROR_EST");
-		attributeOrder.add("HALSTEAD_PROG_TIME");
-		attributeOrder.add("BRANCH_COUNT");
-		attributeOrder.add("CYCLOMATIC_COMPLEXITY");
-		attributeOrder.add("DESIGN_COMPLEXITY");
-		attributeOrder.add("bug");
-	}
-
-	/*
-	 * (non-Javadoc)
-	 * 
-	 * @see de.ugoe.cs.cpdp.loader.SingleVersionLoader#load(java.io.File)
-	 */
-	@Override
-	public Instances load(File file) {
-		BufferedReader reader;
-		Instances data;
-		try {
-			reader = new BufferedReader(new FileReader(file));
-			data = new Instances(reader);
-			reader.close();
-		} catch (IOException e) {
-			throw new RuntimeException("Error reading data", e);
-		}
-
-		// setting class attribute
-		data.setClassIndex(data.numAttributes() - 1);
-
-		// normalize attribute names
-		for (int i = 0; i < data.numAttributes(); i++) {
-			String mapValue = attributeNameMap.get(data.attribute(i).name());
-			if (mapValue != null) {
-				data.renameAttribute(i, mapValue);
-			}
-		}
-
-		// determine new attribute order (unwanted attributes are implicitly
-		// removed
-		String orderString = "";
-		for (String attName : attributeOrder) {
-			for (int i = 0; i < data.numAttributes(); i++) {
-				if (attName.equals(data.attribute(i).name())) {
-					orderString += (i + 1) + ",";
-				}
-			}
-		}
-		orderString = orderString.substring(0, orderString.length() - 1);
-
-		String relationName = data.relationName();
-		String[] options = new String[2];
-		options[0] = "-R";
-		options[1] = orderString;
-		Reorder reorder = new Reorder();
-		try {
-			reorder.setOptions(options);
-			reorder.setInputFormat(data);
-			data = Filter.useFilter(data, reorder);
-		} catch (Exception e) {
-			throw new RuntimeException("Error while reordering the data", e);
-		}
-		if (data.numAttributes() != attributeOrder.size()) {
-			throw new RuntimeException(
-					"Invalid number of attributes; filename: " + file.getName());
-		}
-
-		// normalize bug nominal values
-		Add add = new Add();
-		add.setAttributeIndex("last");
-		add.setNominalLabels("0,1");
-		add.setAttributeName("bug-new");
-		try {
-			add.setInputFormat(data);
-			data = Filter.useFilter(data, add);
-		} catch (Exception e) {
-			throw new RuntimeException(
-					"Error while normalizing the bug nonminal values", e);
-		}
-		data.setRelationName(relationName);
-
-		double classValue;
-
-		String firstValue = data.classAttribute().enumerateValues()
-				.nextElement().toString();
-		if (firstValue.equals("Y") || firstValue.equals("yes")
-				|| firstValue.equals("true")) {
-			classValue = 0.0;
-		} else {
-			classValue = 1.0;
-		}
-
-		for (int i = 0; i < data.numInstances(); i++) {
-			if (data.instance(i).classValue() == classValue) {
-				data.instance(i).setValue(data.classIndex() + 1, 1.0);
-			} else {
-				data.instance(i).setValue(data.classIndex() + 1, 0.0);
-			}
-		}
-
-		int oldClassIndex = data.classIndex();
-		data.setClassIndex(oldClassIndex + 1);
-		data.deleteAttributeAt(oldClassIndex);
-
-		return data;
-	}
-
-	/*
-	 * (non-Javadoc)
-	 * 
-	 * @see de.ugoe.cs.cpdp.loader.AbstractFolderLoader.SingleVersionLoader#
-	 * filenameFilter(java.lang.String)
-	 */
-	@Override
-	public boolean filenameFilter(String filename) {
-		return filename.endsWith(".arff");
-	}
+    /**
+     * used to map attributes the same attribute with different names to each other
+     */
+    Map<String, String> attributeNameMap;
+
+    /**
+     * used to ensure that the attribute order is the same after loading
+     */
+    List<String> attributeOrder;
+
+    /**
+     * Constructor. Creates a new NasaARFFLoader.
+     */
+    public NasaARFFLoader() {
+        attributeNameMap = new HashMap<>();
+
+        // Map entries for ar project
+        attributeNameMap.put("total_loc", "LOC_TOTAL");
+        attributeNameMap.put("comment_loc", "LOC_COMMENTS");
+        attributeNameMap.put("code_and_comment_loc", "LOC_CODE_AND_COMMENT");
+        attributeNameMap.put("executable_loc", "LOC_EXECUTABLE");
+        attributeNameMap.put("unique_operands", "NUM_UNIQUE_OPERANDS");
+        attributeNameMap.put("unique_operators", "NUM_UNIQUE_OPERATORS");
+        attributeNameMap.put("total_operands", "NUM_OPERANDS");
+        attributeNameMap.put("total_operators", "NUM_OPERATORS");
+        attributeNameMap.put("halstead_length", "HALSTEAD_LENGTH");
+        attributeNameMap.put("halstead_volume", "HALSTEAD_VOLUME");
+        attributeNameMap.put("halstead_difficulty", "HALSTEAD_DIFFICULTY");
+        attributeNameMap.put("halstead_effort", "HALSTEAD_EFFORT");
+        attributeNameMap.put("halstead_error", "HALSTEAD_ERROR_EST");
+        attributeNameMap.put("halstead_time", "HALSTEAD_PROG_TIME");
+        attributeNameMap.put("branch_count", "BRANCH_COUNT");
+        attributeNameMap.put("cyclomatic_complexity", "CYCLOMATIC_COMPLEXITY");
+        attributeNameMap.put("design_complexity", "DESIGN_COMPLEXITY");
+
+        // Map entries for KC2
+        attributeNameMap.put("loc", "LOC_TOTAL");
+        attributeNameMap.put("lOCode", "LOC_EXECUTABLE");
+        attributeNameMap.put("lOComment", "LOC_COMMENTS");
+        attributeNameMap.put("lOCodeAndComment", "LOC_CODE_AND_COMMENT");
+        attributeNameMap.put("uniq_Op", "NUM_UNIQUE_OPERATORS");
+        attributeNameMap.put("uniq_Opnd", "NUM_UNIQUE_OPERANDS");
+        attributeNameMap.put("total_Op", "NUM_OPERATORS");
+        attributeNameMap.put("total_Opnd", "NUM_OPERANDS");
+        attributeNameMap.put("v", "HALSTEAD_VOLUME");
+        attributeNameMap.put("l", "HALSTEAD_LENGTH");
+        attributeNameMap.put("d", "HALSTEAD_DIFFICULTY");
+        attributeNameMap.put("e", "HALSTEAD_EFFORT");
+        attributeNameMap.put("b", "HALSTEAD_ERROR_EST");
+        attributeNameMap.put("t", "HALSTEAD_PROG_TIME");
+        attributeNameMap.put("branchCount", "BRANCH_COUNT");
+        attributeNameMap.put("v(g)", "CYCLOMATIC_COMPLEXITY");
+        attributeNameMap.put("iv(g)", "DESIGN_COMPLEXITY");
+
+        attributeNameMap.put("defects", "bug");
+        attributeNameMap.put("Defective", "bug");
+        attributeNameMap.put("problems", "bug");
+        attributeNameMap.put("label", "bug");
+
+        // build list with normalized attribute order
+        attributeOrder = new LinkedList<>();
+
+        attributeOrder.add("LOC_TOTAL");
+        attributeOrder.add("LOC_EXECUTABLE");
+        attributeOrder.add("LOC_COMMENTS");
+        attributeOrder.add("LOC_CODE_AND_COMMENT");
+        attributeOrder.add("NUM_UNIQUE_OPERATORS");
+        attributeOrder.add("NUM_UNIQUE_OPERANDS");
+        attributeOrder.add("NUM_OPERATORS");
+        attributeOrder.add("NUM_OPERANDS");
+        attributeOrder.add("HALSTEAD_VOLUME");
+        attributeOrder.add("HALSTEAD_LENGTH");
+        attributeOrder.add("HALSTEAD_DIFFICULTY");
+        attributeOrder.add("HALSTEAD_EFFORT");
+        attributeOrder.add("HALSTEAD_ERROR_EST");
+        attributeOrder.add("HALSTEAD_PROG_TIME");
+        attributeOrder.add("BRANCH_COUNT");
+        attributeOrder.add("CYCLOMATIC_COMPLEXITY");
+        attributeOrder.add("DESIGN_COMPLEXITY");
+        attributeOrder.add("bug");
+    }
+
+    /*
+     * (non-Javadoc)
+     * 
+     * @see de.ugoe.cs.cpdp.loader.SingleVersionLoader#load(java.io.File)
+     */
+    @Override
+    public Instances load(File file) {
+        BufferedReader reader;
+        Instances data;
+        try {
+            reader = new BufferedReader(new FileReader(file));
+            data = new Instances(reader);
+            reader.close();
+        }
+        catch (IOException e) {
+            throw new RuntimeException("Error reading data", e);
+        }
+
+        // setting class attribute
+        data.setClassIndex(data.numAttributes() - 1);
+
+        // normalize attribute names
+        for (int i = 0; i < data.numAttributes(); i++) {
+            String mapValue = attributeNameMap.get(data.attribute(i).name());
+            if (mapValue != null) {
+                data.renameAttribute(i, mapValue);
+            }
+        }
+
+        // determine new attribute order (unwanted attributes are implicitly
+        // removed
+        String orderString = "";
+        for (String attName : attributeOrder) {
+            for (int i = 0; i < data.numAttributes(); i++) {
+                if (attName.equals(data.attribute(i).name())) {
+                    orderString += (i + 1) + ",";
+                }
+            }
+        }
+        orderString = orderString.substring(0, orderString.length() - 1);
+
+        String relationName = data.relationName();
+        String[] options = new String[2];
+        options[0] = "-R";
+        options[1] = orderString;
+        Reorder reorder = new Reorder();
+        try {
+            reorder.setOptions(options);
+            reorder.setInputFormat(data);
+            data = Filter.useFilter(data, reorder);
+        }
+        catch (Exception e) {
+            throw new RuntimeException("Error while reordering the data", e);
+        }
+        if (data.numAttributes() != attributeOrder.size()) {
+            throw new RuntimeException("Invalid number of attributes; filename: " + file.getName());
+        }
+
+        // normalize bug nominal values
+        Add add = new Add();
+        add.setAttributeIndex("last");
+        add.setNominalLabels("0,1");
+        add.setAttributeName("bug-new");
+        try {
+            add.setInputFormat(data);
+            data = Filter.useFilter(data, add);
+        }
+        catch (Exception e) {
+            throw new RuntimeException("Error while normalizing the bug nonminal values", e);
+        }
+        data.setRelationName(relationName);
+
+        double classValue;
+
+        String firstValue = data.classAttribute().enumerateValues().nextElement().toString();
+        if (firstValue.equals("Y") || firstValue.equals("yes") || firstValue.equals("true")) {
+            classValue = 0.0;
+        }
+        else {
+            classValue = 1.0;
+        }
+
+        for (int i = 0; i < data.numInstances(); i++) {
+            if (data.instance(i).classValue() == classValue) {
+                data.instance(i).setValue(data.classIndex() + 1, 1.0);
+            }
+            else {
+                data.instance(i).setValue(data.classIndex() + 1, 0.0);
+            }
+        }
+
+        int oldClassIndex = data.classIndex();
+        data.setClassIndex(oldClassIndex + 1);
+        data.deleteAttributeAt(oldClassIndex);
+
+        return data;
+    }
+
+    /*
+     * (non-Javadoc)
+     * 
+     * @see de.ugoe.cs.cpdp.loader.AbstractFolderLoader.SingleVersionLoader#
+     * filenameFilter(java.lang.String)
+     */
+    @Override
+    public boolean filenameFilter(String filename) {
+        return filename.endsWith(".arff");
+    }
 
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/SingleVersionLoader.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/SingleVersionLoader.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/SingleVersionLoader.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.loader;
 
@@ -6,6 +20,5 @@
 
 /**
- * Interface for version loaders, i.e., loading of a set of instances from a
- * file
+ * Interface for version loaders, i.e., loading of a set of instances from a file
  * 
  * @author Steffen Herbold
@@ -13,21 +26,21 @@
 public interface SingleVersionLoader {
 
-	/**
-	 * Loads the instances.
-	 * 
-	 * @param file
-	 *            handle to the file of the instances
-	 * @return the instances
-	 */
-	Instances load(File file);
+    /**
+     * Loads the instances.
+     * 
+     * @param file
+     *            handle to the file of the instances
+     * @return the instances
+     */
+    Instances load(File file);
 
-	/**
-	 * Defines a filter for the files to be loaded; only strings that end with
-	 * the filter are considered.
-	 * 
-	 * @param filename
-	 *            string defining the filename filter
-	 * @return true if a filename shall be considered
-	 */
-	boolean filenameFilter(String endsWith);
+    /**
+     * Defines a filter for the files to be loaded; only strings that end with the filter are
+     * considered.
+     * 
+     * @param filename
+     *            string defining the filename filter
+     * @return true if a filename shall be considered
+     */
+    boolean filenameFilter(String endsWith);
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/training/FixClass.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/training/FixClass.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/training/FixClass.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.training;
 
@@ -14,77 +28,80 @@
  * @author Steffen Herbold
  */
-public class FixClass extends AbstractClassifier implements ITrainingStrategy, IWekaCompatibleTrainer {
+public class FixClass extends AbstractClassifier implements ITrainingStrategy,
+    IWekaCompatibleTrainer
+{
 
-	private static final long serialVersionUID = 1L;
+    private static final long serialVersionUID = 1L;
 
-	private double fixedClassValue = 0.0d;
+    private double fixedClassValue = 0.0d;
 
-	/**
-	 * Returns default capabilities of the classifier.
-	 * 
-	 * @return the capabilities of this classifier
-	 */
-	@Override
-	public Capabilities getCapabilities() {
-		Capabilities result = super.getCapabilities();
-		result.disableAll();
+    /**
+     * Returns default capabilities of the classifier.
+     * 
+     * @return the capabilities of this classifier
+     */
+    @Override
+    public Capabilities getCapabilities() {
+        Capabilities result = super.getCapabilities();
+        result.disableAll();
 
-		// attributes
-		result.enable(Capability.NOMINAL_ATTRIBUTES);
-		result.enable(Capability.NUMERIC_ATTRIBUTES);
-		result.enable(Capability.DATE_ATTRIBUTES);
-		result.enable(Capability.STRING_ATTRIBUTES);
-		result.enable(Capability.RELATIONAL_ATTRIBUTES);
-		result.enable(Capability.MISSING_VALUES);
+        // attributes
+        result.enable(Capability.NOMINAL_ATTRIBUTES);
+        result.enable(Capability.NUMERIC_ATTRIBUTES);
+        result.enable(Capability.DATE_ATTRIBUTES);
+        result.enable(Capability.STRING_ATTRIBUTES);
+        result.enable(Capability.RELATIONAL_ATTRIBUTES);
+        result.enable(Capability.MISSING_VALUES);
 
-		// class
-		result.enable(Capability.NOMINAL_CLASS);
-		result.enable(Capability.NUMERIC_CLASS);
-		result.enable(Capability.MISSING_CLASS_VALUES);
+        // class
+        result.enable(Capability.NOMINAL_CLASS);
+        result.enable(Capability.NUMERIC_CLASS);
+        result.enable(Capability.MISSING_CLASS_VALUES);
 
-		// instances
-		result.setMinimumNumberInstances(0);
+        // instances
+        result.setMinimumNumberInstances(0);
 
-		return result;
-	}
+        return result;
+    }
 
-	@Override
-	public void setOptions(String[] options) throws Exception {
-		fixedClassValue = Double.parseDouble(Utils.getOption('C', options));
-	}
+    @Override
+    public void setOptions(String[] options) throws Exception {
+        fixedClassValue = Double.parseDouble(Utils.getOption('C', options));
+    }
 
-	@Override
-	public double classifyInstance(Instance instance) {
-		return fixedClassValue;
-	}
+    @Override
+    public double classifyInstance(Instance instance) {
+        return fixedClassValue;
+    }
 
-	@Override
-	public void buildClassifier(Instances traindata) throws Exception {
-		// do nothing
-	}
+    @Override
+    public void buildClassifier(Instances traindata) throws Exception {
+        // do nothing
+    }
 
-	@Override
-	public void setParameter(String parameters) {
-		try {
-			this.setOptions(parameters.split(" "));
-		} catch (Exception e) {
-			e.printStackTrace();
-		}		
-	}
+    @Override
+    public void setParameter(String parameters) {
+        try {
+            this.setOptions(parameters.split(" "));
+        }
+        catch (Exception e) {
+            e.printStackTrace();
+        }
+    }
 
-	@Override
-	public void apply(Instances traindata) {
-		// do nothing!
-	}
+    @Override
+    public void apply(Instances traindata) {
+        // do nothing!
+    }
 
-	@Override
-	public String getName() {
-		return "FixClass";
-	}
+    @Override
+    public String getName() {
+        return "FixClass";
+    }
 
-	@Override
-	public Classifier getClassifier() {
-		return this;
-	}
+    @Override
+    public Classifier getClassifier() {
+        return this;
+    }
 
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/training/ISetWiseTrainingStrategy.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/training/ISetWiseTrainingStrategy.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/training/ISetWiseTrainingStrategy.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.training;
 
@@ -7,7 +21,7 @@
 // Bagging Strategy: separate models for each training data set
 public interface ISetWiseTrainingStrategy extends ITrainer {
-	
-	void apply(SetUniqueList<Instances> traindataSet);
-	
-	String getName();
+
+    void apply(SetUniqueList<Instances> traindataSet);
+
+    String getName();
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/training/ITrainer.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/training/ITrainer.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/training/ITrainer.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.training;
 
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/training/ITrainingStrategy.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/training/ITrainingStrategy.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/training/ITrainingStrategy.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.training;
 
@@ -4,7 +18,7 @@
 
 public interface ITrainingStrategy extends ITrainer {
-	
-	void apply(Instances traindata);
-	
-	String getName();
+
+    void apply(Instances traindata);
+
+    String getName();
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/training/IWekaCompatibleTrainer.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/training/IWekaCompatibleTrainer.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/training/IWekaCompatibleTrainer.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.training;
 
@@ -4,7 +18,7 @@
 
 public interface IWekaCompatibleTrainer extends ITrainer {
-	
-	Classifier getClassifier();
-	
-	String getName();
+
+    Classifier getClassifier();
+
+    String getName();
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/training/QuadTree.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/training/QuadTree.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/training/QuadTree.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.training;
 
@@ -12,462 +26,483 @@
  * QuadTree implementation
  * 
- * QuadTree gets a list of instances and then recursively split them into 4 childs
- * For this it uses the median of the 2 values x,y
+ * QuadTree gets a list of instances and then recursively split them into 4 childs For this it uses
+ * the median of the 2 values x,y
  */
 public class QuadTree {
-	
-	/* 1 parent or null */
-	private QuadTree parent = null;
-	
-	/* 4 childs, 1 per quadrant */
-	private QuadTree child_nw;
-	private QuadTree child_ne;
-	private QuadTree child_se;
-	private QuadTree child_sw;
-	
-	/* list (only helps with generation of list of childs!) */
-	private ArrayList<QuadTree> l = new ArrayList<QuadTree>();
-	
-	/* level only used for debugging */
-	public int level = 0;
-	
-	/* size of the quadrant */
-	private double[] x;
-	private double[] y;
-	
-	public static boolean verbose = false;
-	public static int size = 0;
-	public static double alpha = 0;
-	
-	/* cluster payloads */
-	public static ArrayList<ArrayList<QuadTreePayload<Instance>>> ccluster = new ArrayList<ArrayList<QuadTreePayload<Instance>>>();
-	
-	/* cluster sizes (index is cluster number, arraylist is list of boxes (x0,y0,x1,y1) */ 
-	public static HashMap<Integer, ArrayList<Double[][]>> csize = new HashMap<Integer, ArrayList<Double[][]>>();
-	
-	/* payload of this instance */
-	private ArrayList<QuadTreePayload<Instance>> payload;
-
-	
-	public QuadTree(QuadTree parent, ArrayList<QuadTreePayload<Instance>> payload) {
-		this.parent = parent;
-		this.payload = payload;
-	}
-	
-	
-	public String toString() {
-		String n = "";
-		if(this.parent == null) {
-			n += "rootnode ";
-		}
-		String level = new String(new char[this.level]).replace("\0", "-");
-		n += level + " instances: " + this.getNumbers();
-		return n;
-	}
-	
-	/**
-	 * Returns the payload, used for clustering
-	 * in the clustering list we only have children with paylod
-	 * 
-	 * @return payload
-	 */
-	public ArrayList<QuadTreePayload<Instance>> getPayload() {
-		return this.payload;
-	}
-	
-	/**
-	 * Calculate the density of this quadrant
-	 * 
-	 * density = number of instances / global size (all instances)
-	 * 
-	 * @return density
-	 */
-	public double getDensity() {
-		double dens = 0;
-		dens = (double)this.getNumbers() / QuadTree.size;
-		return dens;
-	}
-	
-	public void setSize(double[] x, double[] y){
-		this.x = x;
-		this.y = y;
-	}
-	
-	public double[][] getSize() {
-		return new double[][] {this.x, this.y}; 
-	}
-	
-	public Double[][] getSizeDouble() {
-		Double[] tmpX = new Double[2];
-		Double[] tmpY = new Double[2];
-		
-		tmpX[0] = this.x[0];
-		tmpX[1] = this.x[1];
-		
-		tmpY[0] = this.y[0];
-		tmpY[1] = this.y[1];
-		
-		return new Double[][] {tmpX, tmpY}; 
-	}
-	
-	/**
-	 * TODO: DRY, median ist immer dasselbe
-	 *  
-	 * @return median for x
-	 */
-	private double getMedianForX() {
-		double med_x =0 ;
-		
-		Collections.sort(this.payload, new Comparator<QuadTreePayload<Instance>>() {
-	        @Override
-	        public int compare(QuadTreePayload<Instance> x1, QuadTreePayload<Instance> x2) {
-	            return Double.compare(x1.x, x2.x);
-	        }
-	    });
-
-		if(this.payload.size() % 2 == 0) {
-			int mid = this.payload.size() / 2;
-			med_x = (this.payload.get(mid).x + this.payload.get(mid+1).x) / 2;
-		}else {
-			int mid = this.payload.size() / 2;
-			med_x = this.payload.get(mid).x;
-		}
-		
-		if(QuadTree.verbose) {
-			System.out.println("sorted:");
-			for(int i = 0; i < this.payload.size(); i++) {
-				System.out.print(""+this.payload.get(i).x+",");
-			}
-			System.out.println("median x: " + med_x);
-		}
-		return med_x;
-	}
-	
-	private double getMedianForY() {
-		double med_y =0 ;
-		
-		Collections.sort(this.payload, new Comparator<QuadTreePayload<Instance>>() {
-	        @Override
-	        public int compare(QuadTreePayload<Instance> y1, QuadTreePayload<Instance> y2) {
-	            return Double.compare(y1.y, y2.y);
-	        }
-	    });
-		
-		if(this.payload.size() % 2 == 0) {
-			int mid = this.payload.size() / 2;
-			med_y = (this.payload.get(mid).y + this.payload.get(mid+1).y) / 2;
-		}else {
-			int mid = this.payload.size() / 2;
-			med_y = this.payload.get(mid).y;
-		}
-		
-		if(QuadTree.verbose) {
-			System.out.println("sorted:");
-			for(int i = 0; i < this.payload.size(); i++) {
-				System.out.print(""+this.payload.get(i).y+",");
-			}
-			System.out.println("median y: " + med_y);
-		}
-		return med_y;
-	}
-	
-	/**
-	 * Reurns the number of instances in the payload
-	 * 
-	 * @return int number of instances
-	 */
-	public int getNumbers() {
-		int number = 0;
-		if(this.payload != null) {
-			number = this.payload.size();
-		}
-		return number;
-	}
-	
-	/**
-	 * Calculate median values of payload for x, y and split into 4 sectors
-	 * 
-	 * @return Array of QuadTree nodes (4 childs)
-	 * @throws Exception if we would run into an recursive loop
-	 */
-	public QuadTree[] split() throws Exception {
-				
-		double medx = this.getMedianForX();
-		double medy = this.getMedianForY();
-		
-		// Payload lists for each child
-		ArrayList<QuadTreePayload<Instance>> nw = new ArrayList<QuadTreePayload<Instance>>();
-		ArrayList<QuadTreePayload<Instance>> sw = new ArrayList<QuadTreePayload<Instance>>();
-		ArrayList<QuadTreePayload<Instance>> ne = new ArrayList<QuadTreePayload<Instance>>();
-		ArrayList<QuadTreePayload<Instance>> se = new ArrayList<QuadTreePayload<Instance>>();
-		
-		// sort the payloads to new payloads
-		// here we have the problem that payloads with the same values are sorted
-		// into the same slots and it could happen that medx and medy = size_x[1] and size_y[1]
-		// in that case we would have an endless loop
-		for(int i=0; i < this.payload.size(); i++) {
-			
-			QuadTreePayload<Instance> item = this.payload.get(i);
-			
-			// north west
-			if(item.x <= medx && item.y >= medy) {
-				nw.add(item);
-			}
-			
-			// south west
-			else if(item.x <= medx && item.y <= medy) {
-				sw.add(item);
-			}
-
-			// north east
-			else if(item.x >= medx && item.y >= medy) {
-				ne.add(item);
-			}
-			
-			// south east
-			else if(item.x >= medx && item.y <= medy) {
-				se.add(item);
-			}
-		}
-		
-		// if we assign one child a payload equal to our own (see problem above)
-		// we throw an exceptions which stops the recursion on this node
-		if(nw.equals(this.payload)) {
-			throw new Exception("payload equal");
-		}
-		if(sw.equals(this.payload)) {
-			throw new Exception("payload equal");
-		}
-		if(ne.equals(this.payload)) {
-			throw new Exception("payload equal");
-		}
-		if(se.equals(this.payload)) {
-			throw new Exception("payload equal");
-		}
-
-		this.child_nw = new QuadTree(this, nw);
-		this.child_nw.setSize(new double[] {this.x[0], medx}, new double[] {medy, this.y[1]});
-		this.child_nw.level = this.level + 1;
-		
-		this.child_sw = new QuadTree(this, sw);
-		this.child_sw.setSize(new double[] {this.x[0], medx}, new double[] {this.y[0], medy});
-		this.child_sw.level = this.level + 1;
-		
-		this.child_ne = new QuadTree(this, ne);
-		this.child_ne.setSize(new double[] {medx, this.x[1]}, new double[] {medy, this.y[1]});
-		this.child_ne.level = this.level + 1;
-		
-		this.child_se = new QuadTree(this, se);
-		this.child_se.setSize(new double[] {medx, this.x[1]}, new double[] {this.y[0], medy});
-		this.child_se.level = this.level + 1;	
-		
-		this.payload = null;
-		return new QuadTree[] {this.child_nw, this.child_ne, this.child_se, this.child_sw};
-	}
-	
-	/** 
-	 * TODO: static method
-	 * 
-	 * @param q
-	 */
-	public void recursiveSplit(QuadTree q) {
-		if(QuadTree.verbose) {
-			System.out.println("splitting: "+ q);
-		}
-		if(q.getNumbers() < QuadTree.alpha) {
-			return;
-		}else{
-			// exception is thrown if we would run into an endless loop (see comments in split())
-			try {
-				QuadTree[] childs = q.split();			
-				this.recursiveSplit(childs[0]);
-				this.recursiveSplit(childs[1]);
-				this.recursiveSplit(childs[2]);
-				this.recursiveSplit(childs[3]);
-			}catch(Exception e) {
-				return;
-			}
-		}
-	}
-	
-	/**
-	 * returns an list of childs sorted by density
-	 * 
-	 * @param q QuadTree
-	 * @return list of QuadTrees
-	 */
-	private void generateList(QuadTree q) {
-		
-		// we only have all childs or none at all
-		if(q.child_ne == null) {
-			this.l.add(q);
-		}
-		
-		if(q.child_ne != null) {
-			this.generateList(q.child_ne);
-		}
-		if(q.child_nw != null) {
-			this.generateList(q.child_nw);
-		}
-		if(q.child_se != null) {
-			this.generateList(q.child_se);
-		}
-		if(q.child_sw != null) {
-			this.generateList(q.child_sw);
-		}
-	}
-	
-	/**
-	 * Checks if passed QuadTree is neighboring to us
-	 * 
-	 * @param q QuadTree
-	 * @return true if passed QuadTree is a neighbor
-	 */
-	public boolean isNeighbour(QuadTree q) {
-		boolean is_neighbour = false;
-		
-		double[][] our_size = this.getSize();
-		double[][] new_size = q.getSize();
-		
-		// X is i=0, Y is i=1
-		for(int i =0; i < 2; i++) {
-			// we are smaller than q
-			// -------------- q
-			//    ------- we
-			if(our_size[i][0] >= new_size[i][0] && our_size[i][1] <= new_size[i][1]) {
-				is_neighbour = true;
-			}
-			// we overlap with q at some point
-			//a) ---------------q
-			//         ----------- we
-			//b)     --------- q
-			// --------- we
-			if((our_size[i][0] >= new_size[i][0] && our_size[i][0] <= new_size[i][1]) ||
-			   (our_size[i][1] >= new_size[i][0] && our_size[i][1] <= new_size[i][1])) {
-				is_neighbour = true;
-			}
-			// we are larger than q
-			//    ---- q
-			// ---------- we
-			if(our_size[i][1] >= new_size[i][1] && our_size[i][0] <= new_size[i][0]) {
-				is_neighbour = true;
-			}
-		}
-		
-		if(is_neighbour && QuadTree.verbose) {
-			System.out.println(this + " neighbour of: " + q);
-		}
-		
-		return is_neighbour;
-	}
-	
-	/**
-	 * Perform pruning and clustering of the quadtree
-	 * 
-	 * Pruning according to:
-	 * Tim Menzies, Andrew Butcher, David Cok, Andrian Marcus, Lucas Layman, 
-	 * Forrest Shull, Burak Turhan, Thomas Zimmermann, 
-	 * "Local versus Global Lessons for Defect Prediction and Effort Estimation," 
-	 * IEEE Transactions on Software Engineering, vol. 39, no. 6, pp. 822-834, June, 2013  
-	 *  
-	 * 1) get list of leaf quadrants
-	 * 2) sort by their density
-	 * 3) set stop_rule to 0.5 * highest Density in the list
-	 * 4) merge all nodes with a density > stop_rule to the new cluster and remove all from list
-	 * 5) repeat
-	 * 
-	 * @param q List of QuadTree (children only)
-	 */
-	public void gridClustering(ArrayList<QuadTree> list) {
-		
-		if(list.size() == 0) {
-			return;
-		}
-		
-		double stop_rule;
-		QuadTree biggest;
-		QuadTree current;
-		
-		// current clusterlist
-		ArrayList<QuadTreePayload<Instance>> current_cluster;
-
-		// remove list (for removal of items after scanning of the list)
-	    ArrayList<Integer> remove = new ArrayList<Integer>();
-		
-		// 1. find biggest, and add it
-	    biggest = list.get(list.size()-1);
-	    stop_rule = biggest.getDensity() * 0.5;
-	    
-	    current_cluster = new ArrayList<QuadTreePayload<Instance>>();
-	    current_cluster.addAll(biggest.getPayload());
-
-	    // remove the biggest because we are starting with it
-	    remove.add(list.size()-1);
-	    
-	    ArrayList<Double[][]> tmpSize = new ArrayList<Double[][]>();
-	    tmpSize.add(biggest.getSizeDouble());
-	    
-		// check the items for their density
-	    for(int i=list.size()-1; i >= 0; i--) {
-	    	current = list.get(i);
-	    	
-			// 2. find neighbors with correct density
-	    	// if density > stop_rule and is_neighbour add to cluster and remove from list
-	    	if(current.getDensity() > stop_rule && !current.equals(biggest) && current.isNeighbour(biggest)) {
-	    		current_cluster.addAll(current.getPayload());
-	    		
-	    		// add it to remove list (we cannot remove it inside the loop because it would move the index)
-	    		remove.add(i);
-	    		
-	    		// get the size
-	    		tmpSize.add(current.getSizeDouble());
-	    	}
-		}
-	    
-		// 3. remove our removal candidates from the list
-	    for(Integer item: remove) {
-	    	list.remove((int)item);
-	    }
-	    
-		// 4. add to cluster
-	    QuadTree.ccluster.add(current_cluster);
-		
-	    // 5. add sizes of our current (biggest) this adds a number of sizes (all QuadTree Instances belonging to this cluster)
-	    // we need that to classify test instances to a cluster later
-	    Integer cnumber = new Integer(QuadTree.ccluster.size()-1);
-	    if(QuadTree.csize.containsKey(cnumber) == false) {
-	    	QuadTree.csize.put(cnumber, tmpSize);
-	    }
-
-		// repeat
-	    this.gridClustering(list);
-	}
-	
-	public void printInfo() {
-	    System.out.println("we have " + ccluster.size() + " clusters");
-	    
-	    for(int i=0; i < ccluster.size(); i++) {
-	    	System.out.println("cluster: "+i+ " size: "+ ccluster.get(i).size());
-	    }
-	}
-	
-	/**
-	 * Helper Method to get a sorted list (by density) for all
-	 * children
-	 * 
-	 * @param q QuadTree
-	 * @return Sorted ArrayList of quadtrees
-	 */
-	public ArrayList<QuadTree> getList(QuadTree q) {
-		this.generateList(q);
-		
-		Collections.sort(this.l, new Comparator<QuadTree>() {
-	        @Override
-	        public int compare(QuadTree x1, QuadTree x2) {
-	            return Double.compare(x1.getDensity(), x2.getDensity());
-	        }
-	    });
-		
-		return this.l;
-	}
+
+    /* 1 parent or null */
+    private QuadTree parent = null;
+
+    /* 4 childs, 1 per quadrant */
+    private QuadTree child_nw;
+    private QuadTree child_ne;
+    private QuadTree child_se;
+    private QuadTree child_sw;
+
+    /* list (only helps with generation of list of childs!) */
+    private ArrayList<QuadTree> l = new ArrayList<QuadTree>();
+
+    /* level only used for debugging */
+    public int level = 0;
+
+    /* size of the quadrant */
+    private double[] x;
+    private double[] y;
+
+    public static boolean verbose = false;
+    public static int size = 0;
+    public static double alpha = 0;
+
+    /* cluster payloads */
+    public static ArrayList<ArrayList<QuadTreePayload<Instance>>> ccluster =
+        new ArrayList<ArrayList<QuadTreePayload<Instance>>>();
+
+    /* cluster sizes (index is cluster number, arraylist is list of boxes (x0,y0,x1,y1) */
+    public static HashMap<Integer, ArrayList<Double[][]>> csize =
+        new HashMap<Integer, ArrayList<Double[][]>>();
+
+    /* payload of this instance */
+    private ArrayList<QuadTreePayload<Instance>> payload;
+
+    public QuadTree(QuadTree parent, ArrayList<QuadTreePayload<Instance>> payload) {
+        this.parent = parent;
+        this.payload = payload;
+    }
+
+    public String toString() {
+        String n = "";
+        if (this.parent == null) {
+            n += "rootnode ";
+        }
+        String level = new String(new char[this.level]).replace("\0", "-");
+        n += level + " instances: " + this.getNumbers();
+        return n;
+    }
+
+    /**
+     * Returns the payload, used for clustering in the clustering list we only have children with
+     * paylod
+     * 
+     * @return payload
+     */
+    public ArrayList<QuadTreePayload<Instance>> getPayload() {
+        return this.payload;
+    }
+
+    /**
+     * Calculate the density of this quadrant
+     * 
+     * density = number of instances / global size (all instances)
+     * 
+     * @return density
+     */
+    public double getDensity() {
+        double dens = 0;
+        dens = (double) this.getNumbers() / QuadTree.size;
+        return dens;
+    }
+
+    public void setSize(double[] x, double[] y) {
+        this.x = x;
+        this.y = y;
+    }
+
+    public double[][] getSize() {
+        return new double[][]
+            { this.x, this.y };
+    }
+
+    public Double[][] getSizeDouble() {
+        Double[] tmpX = new Double[2];
+        Double[] tmpY = new Double[2];
+
+        tmpX[0] = this.x[0];
+        tmpX[1] = this.x[1];
+
+        tmpY[0] = this.y[0];
+        tmpY[1] = this.y[1];
+
+        return new Double[][]
+            { tmpX, tmpY };
+    }
+
+    /**
+     * TODO: DRY, median ist immer dasselbe
+     * 
+     * @return median for x
+     */
+    private double getMedianForX() {
+        double med_x = 0;
+
+        Collections.sort(this.payload, new Comparator<QuadTreePayload<Instance>>() {
+            @Override
+            public int compare(QuadTreePayload<Instance> x1, QuadTreePayload<Instance> x2) {
+                return Double.compare(x1.x, x2.x);
+            }
+        });
+
+        if (this.payload.size() % 2 == 0) {
+            int mid = this.payload.size() / 2;
+            med_x = (this.payload.get(mid).x + this.payload.get(mid + 1).x) / 2;
+        }
+        else {
+            int mid = this.payload.size() / 2;
+            med_x = this.payload.get(mid).x;
+        }
+
+        if (QuadTree.verbose) {
+            System.out.println("sorted:");
+            for (int i = 0; i < this.payload.size(); i++) {
+                System.out.print("" + this.payload.get(i).x + ",");
+            }
+            System.out.println("median x: " + med_x);
+        }
+        return med_x;
+    }
+
+    private double getMedianForY() {
+        double med_y = 0;
+
+        Collections.sort(this.payload, new Comparator<QuadTreePayload<Instance>>() {
+            @Override
+            public int compare(QuadTreePayload<Instance> y1, QuadTreePayload<Instance> y2) {
+                return Double.compare(y1.y, y2.y);
+            }
+        });
+
+        if (this.payload.size() % 2 == 0) {
+            int mid = this.payload.size() / 2;
+            med_y = (this.payload.get(mid).y + this.payload.get(mid + 1).y) / 2;
+        }
+        else {
+            int mid = this.payload.size() / 2;
+            med_y = this.payload.get(mid).y;
+        }
+
+        if (QuadTree.verbose) {
+            System.out.println("sorted:");
+            for (int i = 0; i < this.payload.size(); i++) {
+                System.out.print("" + this.payload.get(i).y + ",");
+            }
+            System.out.println("median y: " + med_y);
+        }
+        return med_y;
+    }
+
+    /**
+     * Reurns the number of instances in the payload
+     * 
+     * @return int number of instances
+     */
+    public int getNumbers() {
+        int number = 0;
+        if (this.payload != null) {
+            number = this.payload.size();
+        }
+        return number;
+    }
+
+    /**
+     * Calculate median values of payload for x, y and split into 4 sectors
+     * 
+     * @return Array of QuadTree nodes (4 childs)
+     * @throws Exception
+     *             if we would run into an recursive loop
+     */
+    public QuadTree[] split() throws Exception {
+
+        double medx = this.getMedianForX();
+        double medy = this.getMedianForY();
+
+        // Payload lists for each child
+        ArrayList<QuadTreePayload<Instance>> nw = new ArrayList<QuadTreePayload<Instance>>();
+        ArrayList<QuadTreePayload<Instance>> sw = new ArrayList<QuadTreePayload<Instance>>();
+        ArrayList<QuadTreePayload<Instance>> ne = new ArrayList<QuadTreePayload<Instance>>();
+        ArrayList<QuadTreePayload<Instance>> se = new ArrayList<QuadTreePayload<Instance>>();
+
+        // sort the payloads to new payloads
+        // here we have the problem that payloads with the same values are sorted
+        // into the same slots and it could happen that medx and medy = size_x[1] and size_y[1]
+        // in that case we would have an endless loop
+        for (int i = 0; i < this.payload.size(); i++) {
+
+            QuadTreePayload<Instance> item = this.payload.get(i);
+
+            // north west
+            if (item.x <= medx && item.y >= medy) {
+                nw.add(item);
+            }
+
+            // south west
+            else if (item.x <= medx && item.y <= medy) {
+                sw.add(item);
+            }
+
+            // north east
+            else if (item.x >= medx && item.y >= medy) {
+                ne.add(item);
+            }
+
+            // south east
+            else if (item.x >= medx && item.y <= medy) {
+                se.add(item);
+            }
+        }
+
+        // if we assign one child a payload equal to our own (see problem above)
+        // we throw an exceptions which stops the recursion on this node
+        if (nw.equals(this.payload)) {
+            throw new Exception("payload equal");
+        }
+        if (sw.equals(this.payload)) {
+            throw new Exception("payload equal");
+        }
+        if (ne.equals(this.payload)) {
+            throw new Exception("payload equal");
+        }
+        if (se.equals(this.payload)) {
+            throw new Exception("payload equal");
+        }
+
+        this.child_nw = new QuadTree(this, nw);
+        this.child_nw.setSize(new double[]
+            { this.x[0], medx }, new double[]
+            { medy, this.y[1] });
+        this.child_nw.level = this.level + 1;
+
+        this.child_sw = new QuadTree(this, sw);
+        this.child_sw.setSize(new double[]
+            { this.x[0], medx }, new double[]
+            { this.y[0], medy });
+        this.child_sw.level = this.level + 1;
+
+        this.child_ne = new QuadTree(this, ne);
+        this.child_ne.setSize(new double[]
+            { medx, this.x[1] }, new double[]
+            { medy, this.y[1] });
+        this.child_ne.level = this.level + 1;
+
+        this.child_se = new QuadTree(this, se);
+        this.child_se.setSize(new double[]
+            { medx, this.x[1] }, new double[]
+            { this.y[0], medy });
+        this.child_se.level = this.level + 1;
+
+        this.payload = null;
+        return new QuadTree[]
+            { this.child_nw, this.child_ne, this.child_se, this.child_sw };
+    }
+
+    /**
+     * TODO: static method
+     * 
+     * @param q
+     */
+    public void recursiveSplit(QuadTree q) {
+        if (QuadTree.verbose) {
+            System.out.println("splitting: " + q);
+        }
+        if (q.getNumbers() < QuadTree.alpha) {
+            return;
+        }
+        else {
+            // exception is thrown if we would run into an endless loop (see comments in split())
+            try {
+                QuadTree[] childs = q.split();
+                this.recursiveSplit(childs[0]);
+                this.recursiveSplit(childs[1]);
+                this.recursiveSplit(childs[2]);
+                this.recursiveSplit(childs[3]);
+            }
+            catch (Exception e) {
+                return;
+            }
+        }
+    }
+
+    /**
+     * returns an list of childs sorted by density
+     * 
+     * @param q
+     *            QuadTree
+     * @return list of QuadTrees
+     */
+    private void generateList(QuadTree q) {
+
+        // we only have all childs or none at all
+        if (q.child_ne == null) {
+            this.l.add(q);
+        }
+
+        if (q.child_ne != null) {
+            this.generateList(q.child_ne);
+        }
+        if (q.child_nw != null) {
+            this.generateList(q.child_nw);
+        }
+        if (q.child_se != null) {
+            this.generateList(q.child_se);
+        }
+        if (q.child_sw != null) {
+            this.generateList(q.child_sw);
+        }
+    }
+
+    /**
+     * Checks if passed QuadTree is neighboring to us
+     * 
+     * @param q
+     *            QuadTree
+     * @return true if passed QuadTree is a neighbor
+     */
+    public boolean isNeighbour(QuadTree q) {
+        boolean is_neighbour = false;
+
+        double[][] our_size = this.getSize();
+        double[][] new_size = q.getSize();
+
+        // X is i=0, Y is i=1
+        for (int i = 0; i < 2; i++) {
+            // we are smaller than q
+            // -------------- q
+            // ------- we
+            if (our_size[i][0] >= new_size[i][0] && our_size[i][1] <= new_size[i][1]) {
+                is_neighbour = true;
+            }
+            // we overlap with q at some point
+            // a) ---------------q
+            // ----------- we
+            // b) --------- q
+            // --------- we
+            if ((our_size[i][0] >= new_size[i][0] && our_size[i][0] <= new_size[i][1]) ||
+                (our_size[i][1] >= new_size[i][0] && our_size[i][1] <= new_size[i][1]))
+            {
+                is_neighbour = true;
+            }
+            // we are larger than q
+            // ---- q
+            // ---------- we
+            if (our_size[i][1] >= new_size[i][1] && our_size[i][0] <= new_size[i][0]) {
+                is_neighbour = true;
+            }
+        }
+
+        if (is_neighbour && QuadTree.verbose) {
+            System.out.println(this + " neighbour of: " + q);
+        }
+
+        return is_neighbour;
+    }
+
+    /**
+     * Perform pruning and clustering of the quadtree
+     * 
+     * Pruning according to: Tim Menzies, Andrew Butcher, David Cok, Andrian Marcus, Lucas Layman,
+     * Forrest Shull, Burak Turhan, Thomas Zimmermann,
+     * "Local versus Global Lessons for Defect Prediction and Effort Estimation," IEEE Transactions
+     * on Software Engineering, vol. 39, no. 6, pp. 822-834, June, 2013
+     * 
+     * 1) get list of leaf quadrants 2) sort by their density 3) set stop_rule to 0.5 * highest
+     * Density in the list 4) merge all nodes with a density > stop_rule to the new cluster and
+     * remove all from list 5) repeat
+     * 
+     * @param q
+     *            List of QuadTree (children only)
+     */
+    public void gridClustering(ArrayList<QuadTree> list) {
+
+        if (list.size() == 0) {
+            return;
+        }
+
+        double stop_rule;
+        QuadTree biggest;
+        QuadTree current;
+
+        // current clusterlist
+        ArrayList<QuadTreePayload<Instance>> current_cluster;
+
+        // remove list (for removal of items after scanning of the list)
+        ArrayList<Integer> remove = new ArrayList<Integer>();
+
+        // 1. find biggest, and add it
+        biggest = list.get(list.size() - 1);
+        stop_rule = biggest.getDensity() * 0.5;
+
+        current_cluster = new ArrayList<QuadTreePayload<Instance>>();
+        current_cluster.addAll(biggest.getPayload());
+
+        // remove the biggest because we are starting with it
+        remove.add(list.size() - 1);
+
+        ArrayList<Double[][]> tmpSize = new ArrayList<Double[][]>();
+        tmpSize.add(biggest.getSizeDouble());
+
+        // check the items for their density
+        for (int i = list.size() - 1; i >= 0; i--) {
+            current = list.get(i);
+
+            // 2. find neighbors with correct density
+            // if density > stop_rule and is_neighbour add to cluster and remove from list
+            if (current.getDensity() > stop_rule && !current.equals(biggest) &&
+                current.isNeighbour(biggest))
+            {
+                current_cluster.addAll(current.getPayload());
+
+                // add it to remove list (we cannot remove it inside the loop because it would move
+                // the index)
+                remove.add(i);
+
+                // get the size
+                tmpSize.add(current.getSizeDouble());
+            }
+        }
+
+        // 3. remove our removal candidates from the list
+        for (Integer item : remove) {
+            list.remove((int) item);
+        }
+
+        // 4. add to cluster
+        QuadTree.ccluster.add(current_cluster);
+
+        // 5. add sizes of our current (biggest) this adds a number of sizes (all QuadTree Instances
+        // belonging to this cluster)
+        // we need that to classify test instances to a cluster later
+        Integer cnumber = new Integer(QuadTree.ccluster.size() - 1);
+        if (QuadTree.csize.containsKey(cnumber) == false) {
+            QuadTree.csize.put(cnumber, tmpSize);
+        }
+
+        // repeat
+        this.gridClustering(list);
+    }
+
+    public void printInfo() {
+        System.out.println("we have " + ccluster.size() + " clusters");
+
+        for (int i = 0; i < ccluster.size(); i++) {
+            System.out.println("cluster: " + i + " size: " + ccluster.get(i).size());
+        }
+    }
+
+    /**
+     * Helper Method to get a sorted list (by density) for all children
+     * 
+     * @param q
+     *            QuadTree
+     * @return Sorted ArrayList of quadtrees
+     */
+    public ArrayList<QuadTree> getList(QuadTree q) {
+        this.generateList(q);
+
+        Collections.sort(this.l, new Comparator<QuadTree>() {
+            @Override
+            public int compare(QuadTree x1, QuadTree x2) {
+                return Double.compare(x1.getDensity(), x2.getDensity());
+            }
+        });
+
+        return this.l;
+    }
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/training/RandomClass.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/training/RandomClass.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/training/RandomClass.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.training;
 
@@ -11,43 +25,46 @@
  * Assigns a random class label to the instance it is evaluated on.
  * 
- * The range of class labels are hardcoded in fixedClassValues.
- * This can later be extended to take values from the XML configuration. 
+ * The range of class labels are hardcoded in fixedClassValues. This can later be extended to take
+ * values from the XML configuration.
  */
-public class RandomClass extends AbstractClassifier implements ITrainingStrategy, IWekaCompatibleTrainer {
+public class RandomClass extends AbstractClassifier implements ITrainingStrategy,
+    IWekaCompatibleTrainer
+{
 
-	private static final long serialVersionUID = 1L;
+    private static final long serialVersionUID = 1L;
 
-	private double[] fixedClassValues = {0.0d, 1.0d};
-	
-	@Override
-	public void setParameter(String parameters) {
-		// do nothing, maybe take percentages for distribution later
-	}
+    private double[] fixedClassValues =
+        { 0.0d, 1.0d };
 
-	@Override
-	public void buildClassifier(Instances arg0) throws Exception {
-		// do nothing
-	}
+    @Override
+    public void setParameter(String parameters) {
+        // do nothing, maybe take percentages for distribution later
+    }
 
-	@Override
-	public Classifier getClassifier() {
-		return this;
-	}
+    @Override
+    public void buildClassifier(Instances arg0) throws Exception {
+        // do nothing
+    }
 
-	@Override
-	public void apply(Instances traindata) {
-		// nothing to do
-	}
+    @Override
+    public Classifier getClassifier() {
+        return this;
+    }
 
-	@Override
-	public String getName() {
-		return "RandomClass";
-	}
-	
-	@Override
-	public double classifyInstance(Instance instance) {
-		Random rand = new Random();
-	    int randomNum = rand.nextInt(this.fixedClassValues.length);
-		return this.fixedClassValues[randomNum];
-	}
+    @Override
+    public void apply(Instances traindata) {
+        // nothing to do
+    }
+
+    @Override
+    public String getName() {
+        return "RandomClass";
+    }
+
+    @Override
+    public double classifyInstance(Instance instance) {
+        Random rand = new Random();
+        int randomNum = rand.nextInt(this.fixedClassValues.length);
+        return this.fixedClassValues[randomNum];
+    }
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/training/WekaBaggingTraining.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/training/WekaBaggingTraining.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/training/WekaBaggingTraining.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.training;
 
@@ -18,12 +32,12 @@
 /**
  * Programmatic WekaBaggingTraining
- *
- * first parameter is Trainer Name.
- * second parameter is class name
  * 
- * all subsequent parameters are configuration params (for example for trees)
- * Cross Validation params always come last and are prepended with -CVPARAM
+ * first parameter is Trainer Name. second parameter is class name
+ * 
+ * all subsequent parameters are configuration params (for example for trees) Cross Validation
+ * params always come last and are prepended with -CVPARAM
  * 
  * XML Configurations for Weka Classifiers:
+ * 
  * <pre>
  * {@code
@@ -37,91 +51,95 @@
 public class WekaBaggingTraining extends WekaBaseTraining implements ISetWiseTrainingStrategy {
 
-	private final TraindatasetBagging classifier = new TraindatasetBagging();
-	
-	@Override
-	public Classifier getClassifier() {
-		return classifier;
-	}
-	
-	@Override
-	public void apply(SetUniqueList<Instances> traindataSet) {
-		PrintStream errStr	= System.err;
-		System.setErr(new PrintStream(new NullOutputStream()));
-		try {
-			classifier.buildClassifier(traindataSet);
-		} catch (Exception e) {
-			throw new RuntimeException(e);
-		} finally {
-			System.setErr(errStr);
-		}
-	}
-	
-	public class TraindatasetBagging extends AbstractClassifier {
-		
-		private static final long serialVersionUID = 1L;
+    private final TraindatasetBagging classifier = new TraindatasetBagging();
 
-		private List<Instances> trainingData = null;
-		
-		private List<Classifier> classifiers = null;
-	
-		@Override
-		public double classifyInstance(Instance instance) {
-			if( classifiers==null ) {
-				return 0.0;
-			}
-			
-			double classification = 0.0;
-			for( int i=0 ; i<classifiers.size(); i++ ) {
-				Classifier classifier = classifiers.get(i);
-				Instances traindata = trainingData.get(i);
-				
-				Set<String> attributeNames = new HashSet<>();
-				for( int j=0; j<traindata.numAttributes(); j++ ) {
-					attributeNames.add(traindata.attribute(j).name());
-				}
-				
-				double[] values = new double[traindata.numAttributes()];
-				int index = 0;
-				for( int j=0; j<instance.numAttributes(); j++ ) {
-					if( attributeNames.contains(instance.attribute(j).name())) {
-						values[index] = instance.value(j);
-						index++;
-					}
-				}
-				
-				Instances tmp = new Instances(traindata);
-				tmp.clear();
-				Instance instCopy = new DenseInstance(instance.weight(), values);
-				instCopy.setDataset(tmp);
-				try {
-					classification += classifier.classifyInstance(instCopy);
-				} catch (Exception e) {
-					throw new RuntimeException("bagging classifier could not classify an instance", e);
-				}
-			}
-			classification /= classifiers.size();
-			return (classification>=0.5) ? 1.0 : 0.0;
-		}
-		
-		public void buildClassifier(SetUniqueList<Instances> traindataSet) throws Exception {
-			classifiers = new LinkedList<>();
-			trainingData = new LinkedList<>();
-			for( Instances traindata : traindataSet ) {
-				Classifier classifier = setupClassifier();
-				classifier.buildClassifier(traindata);
-				classifiers.add(classifier);
-				trainingData.add(new Instances(traindata));
-			}
-		}
-	
-		@Override
-		public void buildClassifier(Instances traindata) throws Exception {
-			classifiers = new LinkedList<>();
-			trainingData = new LinkedList<>();
-			final Classifier classifier = setupClassifier();
-			classifier.buildClassifier(traindata);
-			classifiers.add(classifier);
-			trainingData.add(new Instances(traindata));
-		}
-	}
+    @Override
+    public Classifier getClassifier() {
+        return classifier;
+    }
+
+    @Override
+    public void apply(SetUniqueList<Instances> traindataSet) {
+        PrintStream errStr = System.err;
+        System.setErr(new PrintStream(new NullOutputStream()));
+        try {
+            classifier.buildClassifier(traindataSet);
+        }
+        catch (Exception e) {
+            throw new RuntimeException(e);
+        }
+        finally {
+            System.setErr(errStr);
+        }
+    }
+
+    public class TraindatasetBagging extends AbstractClassifier {
+
+        private static final long serialVersionUID = 1L;
+
+        private List<Instances> trainingData = null;
+
+        private List<Classifier> classifiers = null;
+
+        @Override
+        public double classifyInstance(Instance instance) {
+            if (classifiers == null) {
+                return 0.0;
+            }
+
+            double classification = 0.0;
+            for (int i = 0; i < classifiers.size(); i++) {
+                Classifier classifier = classifiers.get(i);
+                Instances traindata = trainingData.get(i);
+
+                Set<String> attributeNames = new HashSet<>();
+                for (int j = 0; j < traindata.numAttributes(); j++) {
+                    attributeNames.add(traindata.attribute(j).name());
+                }
+
+                double[] values = new double[traindata.numAttributes()];
+                int index = 0;
+                for (int j = 0; j < instance.numAttributes(); j++) {
+                    if (attributeNames.contains(instance.attribute(j).name())) {
+                        values[index] = instance.value(j);
+                        index++;
+                    }
+                }
+
+                Instances tmp = new Instances(traindata);
+                tmp.clear();
+                Instance instCopy = new DenseInstance(instance.weight(), values);
+                instCopy.setDataset(tmp);
+                try {
+                    classification += classifier.classifyInstance(instCopy);
+                }
+                catch (Exception e) {
+                    throw new RuntimeException("bagging classifier could not classify an instance",
+                                               e);
+                }
+            }
+            classification /= classifiers.size();
+            return (classification >= 0.5) ? 1.0 : 0.0;
+        }
+
+        public void buildClassifier(SetUniqueList<Instances> traindataSet) throws Exception {
+            classifiers = new LinkedList<>();
+            trainingData = new LinkedList<>();
+            for (Instances traindata : traindataSet) {
+                Classifier classifier = setupClassifier();
+                classifier.buildClassifier(traindata);
+                classifiers.add(classifier);
+                trainingData.add(new Instances(traindata));
+            }
+        }
+
+        @Override
+        public void buildClassifier(Instances traindata) throws Exception {
+            classifiers = new LinkedList<>();
+            trainingData = new LinkedList<>();
+            final Classifier classifier = setupClassifier();
+            classifier.buildClassifier(traindata);
+            classifiers.add(classifier);
+            trainingData.add(new Instances(traindata));
+        }
+    }
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/training/WekaBaseTraining.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/training/WekaBaseTraining.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/training/WekaBaseTraining.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.training;
 
@@ -15,101 +29,110 @@
  * Allows specification of the Weka classifier and its params in the XML experiment configuration.
  * 
- * Important conventions of the XML format: 
- * Cross Validation params always come last and are prepended with -CVPARAM
- * Example: <trainer name="WekaTraining" param="RandomForestLocal weka.classifiers.trees.RandomForest -CVPARAM I 5 25 5"/>
+ * Important conventions of the XML format: Cross Validation params always come last and are
+ * prepended with -CVPARAM Example: <trainer name="WekaTraining"
+ * param="RandomForestLocal weka.classifiers.trees.RandomForest -CVPARAM I 5 25 5"/>
  */
 public abstract class WekaBaseTraining implements IWekaCompatibleTrainer {
-	
-	protected Classifier classifier = null;
-	protected String classifierClassName;
-	protected String classifierName;
-	protected String[] classifierParams;
-	
-	@Override
-	public void setParameter(String parameters) {
-		String[] params = parameters.split(" ");
 
-		// first part of the params is the classifierName (e.g. SMORBF)
-		classifierName = params[0];
-		
-		// the following parameters can be copied from weka!
-		
-		// second param is classifierClassName (e.g. weka.classifiers.functions.SMO)
-		classifierClassName = params[1];
-	
-		// rest are params to the specified classifier (e.g. -K weka.classifiers.functions.supportVector.RBFKernel)
-		classifierParams = Arrays.copyOfRange(params, 2, params.length);
-		
-		classifier = setupClassifier();
-	}
+    protected Classifier classifier = null;
+    protected String classifierClassName;
+    protected String classifierName;
+    protected String[] classifierParams;
 
-	@Override
-	public Classifier getClassifier() {
-		return classifier;
-	}
+    @Override
+    public void setParameter(String parameters) {
+        String[] params = parameters.split(" ");
 
-	public Classifier setupClassifier() {
-		Classifier cl = null;
-		try{
-			@SuppressWarnings("rawtypes")
-			Class c = Class.forName(classifierClassName);
-			Classifier obj = (Classifier) c.newInstance();
-			
-			// Filter out -CVPARAM, these are special because they do not belong to the Weka classifier class as parameters
-			String[] param = Arrays.copyOf(classifierParams, classifierParams.length);
-			String[] cvparam = {};
-			boolean cv = false;
-			for ( int i=0; i < classifierParams.length; i++ ) {
-				if(classifierParams[i].equals("-CVPARAM")) {
-					// rest of array are cvparam
-					cvparam = Arrays.copyOfRange(classifierParams, i+1, classifierParams.length);
-					
-					// before this we have normal params
-					param = Arrays.copyOfRange(classifierParams, 0, i);
-					
-					cv = true;
-					break;
-				}
-			}
-			
-			// set classifier params
-			((OptionHandler)obj).setOptions(param);
-			cl = obj;
-			
-			// we have cross val params
-			// cant check on cvparam.length here, it may not be initialized			
-			if(cv) {
-				final CVParameterSelection ps = new CVParameterSelection();
-				ps.setClassifier(obj);
-				ps.setNumFolds(5);
-				//ps.addCVParameter("I 5 25 5");
-				for( int i=1 ; i<cvparam.length/4 ; i++ ) {
-					ps.addCVParameter(Arrays.asList(Arrays.copyOfRange(cvparam, 0, 4*i)).toString().replaceAll(", ", " ").replaceAll("^\\[|\\]$", ""));
-				}
-				
-				cl = ps;
-			}
+        // first part of the params is the classifierName (e.g. SMORBF)
+        classifierName = params[0];
 
-		}catch(ClassNotFoundException e) {
-			Console.traceln(Level.WARNING, String.format("class not found: %s", e.toString()));
-			e.printStackTrace();
-		} catch (InstantiationException e) {
-			Console.traceln(Level.WARNING, String.format("Instantiation Exception: %s", e.toString()));
-			e.printStackTrace();
-		} catch (IllegalAccessException e) {
-			Console.traceln(Level.WARNING, String.format("Illegal Access Exception: %s", e.toString()));
-			e.printStackTrace();
-		} catch (Exception e) {
-			Console.traceln(Level.WARNING, String.format("Exception: %s", e.toString()));
-			e.printStackTrace();
-		}
-		
-		return cl;
-	}
+        // the following parameters can be copied from weka!
 
-	@Override
-	public String getName() {
-		return classifierName;
-	}
-	
+        // second param is classifierClassName (e.g. weka.classifiers.functions.SMO)
+        classifierClassName = params[1];
+
+        // rest are params to the specified classifier (e.g. -K
+        // weka.classifiers.functions.supportVector.RBFKernel)
+        classifierParams = Arrays.copyOfRange(params, 2, params.length);
+
+        classifier = setupClassifier();
+    }
+
+    @Override
+    public Classifier getClassifier() {
+        return classifier;
+    }
+
+    public Classifier setupClassifier() {
+        Classifier cl = null;
+        try {
+            @SuppressWarnings("rawtypes")
+            Class c = Class.forName(classifierClassName);
+            Classifier obj = (Classifier) c.newInstance();
+
+            // Filter out -CVPARAM, these are special because they do not belong to the Weka
+            // classifier class as parameters
+            String[] param = Arrays.copyOf(classifierParams, classifierParams.length);
+            String[] cvparam = { };
+            boolean cv = false;
+            for (int i = 0; i < classifierParams.length; i++) {
+                if (classifierParams[i].equals("-CVPARAM")) {
+                    // rest of array are cvparam
+                    cvparam = Arrays.copyOfRange(classifierParams, i + 1, classifierParams.length);
+
+                    // before this we have normal params
+                    param = Arrays.copyOfRange(classifierParams, 0, i);
+
+                    cv = true;
+                    break;
+                }
+            }
+
+            // set classifier params
+            ((OptionHandler) obj).setOptions(param);
+            cl = obj;
+
+            // we have cross val params
+            // cant check on cvparam.length here, it may not be initialized
+            if (cv) {
+                final CVParameterSelection ps = new CVParameterSelection();
+                ps.setClassifier(obj);
+                ps.setNumFolds(5);
+                // ps.addCVParameter("I 5 25 5");
+                for (int i = 1; i < cvparam.length / 4; i++) {
+                    ps.addCVParameter(Arrays.asList(Arrays.copyOfRange(cvparam, 0, 4 * i))
+                        .toString().replaceAll(", ", " ").replaceAll("^\\[|\\]$", ""));
+                }
+
+                cl = ps;
+            }
+
+        }
+        catch (ClassNotFoundException e) {
+            Console.traceln(Level.WARNING, String.format("class not found: %s", e.toString()));
+            e.printStackTrace();
+        }
+        catch (InstantiationException e) {
+            Console.traceln(Level.WARNING,
+                            String.format("Instantiation Exception: %s", e.toString()));
+            e.printStackTrace();
+        }
+        catch (IllegalAccessException e) {
+            Console.traceln(Level.WARNING,
+                            String.format("Illegal Access Exception: %s", e.toString()));
+            e.printStackTrace();
+        }
+        catch (Exception e) {
+            Console.traceln(Level.WARNING, String.format("Exception: %s", e.toString()));
+            e.printStackTrace();
+        }
+
+        return cl;
+    }
+
+    @Override
+    public String getName() {
+        return classifierName;
+    }
+
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/training/WekaLocalEMTraining.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/training/WekaLocalEMTraining.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/training/WekaLocalEMTraining.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.training;
 
@@ -24,182 +38,186 @@
  * WekaLocalEMTraining
  * 
- * Local Trainer with EM Clustering for data partitioning.
- * Currently supports only EM Clustering.
- * 
- * 1. Cluster training data
- * 2. for each cluster train a classifier with training data from cluster
+ * Local Trainer with EM Clustering for data partitioning. Currently supports only EM Clustering.
+ * 
+ * 1. Cluster training data 2. for each cluster train a classifier with training data from cluster
  * 3. match test data instance to a cluster, then classify with classifier from the cluster
  * 
- * XML configuration:
- * <!-- because of clustering -->
- * <preprocessor name="Normalization" param=""/>
- * 
- * <!-- cluster trainer -->
- * <trainer name="WekaLocalEMTraining" param="NaiveBayes weka.classifiers.bayes.NaiveBayes" />
+ * XML configuration: <!-- because of clustering --> <preprocessor name="Normalization" param=""/>
+ * 
+ * <!-- cluster trainer --> <trainer name="WekaLocalEMTraining"
+ * param="NaiveBayes weka.classifiers.bayes.NaiveBayes" />
  */
 public class WekaLocalEMTraining extends WekaBaseTraining implements ITrainingStrategy {
 
-	private final TraindatasetCluster classifier = new TraindatasetCluster();
-	
-	@Override
-	public Classifier getClassifier() {
-		return classifier;
-	}
-	
-	@Override
-	public void apply(Instances traindata) {
-		PrintStream errStr	= System.err;
-		System.setErr(new PrintStream(new NullOutputStream()));
-		try {
-			classifier.buildClassifier(traindata);
-		} catch (Exception e) {
-			throw new RuntimeException(e);
-		} finally {
-			System.setErr(errStr);
-		}
-	}
-	
-
-	public class TraindatasetCluster extends AbstractClassifier {
-		
-		private static final long serialVersionUID = 1L;
-
-		private EM clusterer = null;
-
-		private HashMap<Integer, Classifier> cclassifier;
-		private HashMap<Integer, Instances> ctraindata; 
-		
-		
-		/**
-		 * Helper method that gives us a clean instance copy with 
-		 * the values of the instancelist of the first parameter. 
-		 * 
-		 * @param instancelist with attributes
-		 * @param instance with only values
-		 * @return copy of the instance
-		 */
-		private Instance createInstance(Instances instances, Instance instance) {
-			// attributes for feeding instance to classifier
-			Set<String> attributeNames = new HashSet<>();
-			for( int j=0; j<instances.numAttributes(); j++ ) {
-				attributeNames.add(instances.attribute(j).name());
-			}
-			
-			double[] values = new double[instances.numAttributes()];
-			int index = 0;
-			for( int j=0; j<instance.numAttributes(); j++ ) {
-				if( attributeNames.contains(instance.attribute(j).name())) {
-					values[index] = instance.value(j);
-					index++;
-				}
-			}
-			
-			Instances tmp = new Instances(instances);
-			tmp.clear();
-			Instance instCopy = new DenseInstance(instance.weight(), values);
-			instCopy.setDataset(tmp);
-			
-			return instCopy;
-		}
-		
-		@Override
-		public double classifyInstance(Instance instance) {
-			double ret = 0;
-			try {
-				// 1. copy the instance (keep the class attribute)
-				Instances traindata = ctraindata.get(0);
-				Instance classInstance = createInstance(traindata, instance);
-				
-				// 2. remove class attribute before clustering
-				Remove filter = new Remove();
-				filter.setAttributeIndices("" + (traindata.classIndex() + 1));
-				filter.setInputFormat(traindata);
-				traindata = Filter.useFilter(traindata, filter);
-				
-				// 3. copy the instance (without the class attribute) for clustering
-				Instance clusterInstance = createInstance(traindata, instance);
-				
-				// 4. match instance without class attribute to a cluster number
-				int cnum = clusterer.clusterInstance(clusterInstance);
-				
-				// 5. classify instance with class attribute to the classifier of that cluster number
-				ret = cclassifier.get(cnum).classifyInstance(classInstance);
-				
-			}catch( Exception e ) {
-				Console.traceln(Level.INFO, String.format("ERROR matching instance to cluster!"));
-				throw new RuntimeException(e);
-			}
-			return ret;
-		}
-
-		@Override
-		public void buildClassifier(Instances traindata) throws Exception {
-			
-			// 1. copy training data
-			Instances train = new Instances(traindata);
-			
-			// 2. remove class attribute for clustering
-			Remove filter = new Remove();
-			filter.setAttributeIndices("" + (train.classIndex() + 1));
-			filter.setInputFormat(train);
-			train = Filter.useFilter(train, filter);
-			
-			// new objects
-			cclassifier = new HashMap<Integer, Classifier>();
-			ctraindata = new HashMap<Integer, Instances>();
-						
-			Instances ctrain;
-			int maxNumClusters = train.size();
-			boolean sufficientInstancesInEachCluster;
-			do { // while(onlyTarget)
-				sufficientInstancesInEachCluster = true;
-				clusterer = new EM();
-				clusterer.setMaximumNumberOfClusters(maxNumClusters);
-				clusterer.buildClusterer(train);
-				
-				// 4. get cluster membership of our traindata
-				//AddCluster cfilter = new AddCluster();
-				//cfilter.setClusterer(clusterer);
-				//cfilter.setInputFormat(train);
-				//Instances ctrain = Filter.useFilter(train, cfilter);
-				
-				ctrain = new Instances(train);
-				ctraindata = new HashMap<>();
-				
-				// get traindata per cluster
-				for ( int j=0; j < ctrain.numInstances(); j++ ) {
-					// get the cluster number from the attributes, subract 1 because if we clusterInstance we get 0-n, and this is 1-n
-					//cnumber = Integer.parseInt(ctrain.get(j).stringValue(ctrain.get(j).numAttributes()-1).replace("cluster", "")) - 1;
-					
-					int cnumber = clusterer.clusterInstance(ctrain.get(j));
-					// add training data to list of instances for this cluster number
-					if ( !ctraindata.containsKey(cnumber) ) {
-						ctraindata.put(cnumber, new Instances(traindata));
-						ctraindata.get(cnumber).delete();
-					}
-					ctraindata.get(cnumber).add(traindata.get(j));
-				}
-				
-				for( Entry<Integer,Instances> entry : ctraindata.entrySet() ) {
-					Instances instances = entry.getValue();
-					int[] counts = instances.attributeStats(instances.classIndex()).nominalCounts;
-					for( int count : counts ) {
-						sufficientInstancesInEachCluster &= count>0;
-					}
-					sufficientInstancesInEachCluster &= instances.numInstances()>=5;
-				}
-				maxNumClusters = clusterer.numberOfClusters()-1;
-			} while(!sufficientInstancesInEachCluster);
-			
-			// train one classifier per cluster, we get the cluster number from the training data
-			Iterator<Integer> clusternumber = ctraindata.keySet().iterator();
-			while ( clusternumber.hasNext() ) {
-				int cnumber = clusternumber.next();			
-				cclassifier.put(cnumber,setupClassifier());
-				cclassifier.get(cnumber).buildClassifier(ctraindata.get(cnumber));
-				
-				//Console.traceln(Level.INFO, String.format("classifier in cluster "+cnumber));
-			}
-		}
-	}
+    private final TraindatasetCluster classifier = new TraindatasetCluster();
+
+    @Override
+    public Classifier getClassifier() {
+        return classifier;
+    }
+
+    @Override
+    public void apply(Instances traindata) {
+        PrintStream errStr = System.err;
+        System.setErr(new PrintStream(new NullOutputStream()));
+        try {
+            classifier.buildClassifier(traindata);
+        }
+        catch (Exception e) {
+            throw new RuntimeException(e);
+        }
+        finally {
+            System.setErr(errStr);
+        }
+    }
+
+    public class TraindatasetCluster extends AbstractClassifier {
+
+        private static final long serialVersionUID = 1L;
+
+        private EM clusterer = null;
+
+        private HashMap<Integer, Classifier> cclassifier;
+        private HashMap<Integer, Instances> ctraindata;
+
+        /**
+         * Helper method that gives us a clean instance copy with the values of the instancelist of
+         * the first parameter.
+         * 
+         * @param instancelist
+         *            with attributes
+         * @param instance
+         *            with only values
+         * @return copy of the instance
+         */
+        private Instance createInstance(Instances instances, Instance instance) {
+            // attributes for feeding instance to classifier
+            Set<String> attributeNames = new HashSet<>();
+            for (int j = 0; j < instances.numAttributes(); j++) {
+                attributeNames.add(instances.attribute(j).name());
+            }
+
+            double[] values = new double[instances.numAttributes()];
+            int index = 0;
+            for (int j = 0; j < instance.numAttributes(); j++) {
+                if (attributeNames.contains(instance.attribute(j).name())) {
+                    values[index] = instance.value(j);
+                    index++;
+                }
+            }
+
+            Instances tmp = new Instances(instances);
+            tmp.clear();
+            Instance instCopy = new DenseInstance(instance.weight(), values);
+            instCopy.setDataset(tmp);
+
+            return instCopy;
+        }
+
+        @Override
+        public double classifyInstance(Instance instance) {
+            double ret = 0;
+            try {
+                // 1. copy the instance (keep the class attribute)
+                Instances traindata = ctraindata.get(0);
+                Instance classInstance = createInstance(traindata, instance);
+
+                // 2. remove class attribute before clustering
+                Remove filter = new Remove();
+                filter.setAttributeIndices("" + (traindata.classIndex() + 1));
+                filter.setInputFormat(traindata);
+                traindata = Filter.useFilter(traindata, filter);
+
+                // 3. copy the instance (without the class attribute) for clustering
+                Instance clusterInstance = createInstance(traindata, instance);
+
+                // 4. match instance without class attribute to a cluster number
+                int cnum = clusterer.clusterInstance(clusterInstance);
+
+                // 5. classify instance with class attribute to the classifier of that cluster
+                // number
+                ret = cclassifier.get(cnum).classifyInstance(classInstance);
+
+            }
+            catch (Exception e) {
+                Console.traceln(Level.INFO, String.format("ERROR matching instance to cluster!"));
+                throw new RuntimeException(e);
+            }
+            return ret;
+        }
+
+        @Override
+        public void buildClassifier(Instances traindata) throws Exception {
+
+            // 1. copy training data
+            Instances train = new Instances(traindata);
+
+            // 2. remove class attribute for clustering
+            Remove filter = new Remove();
+            filter.setAttributeIndices("" + (train.classIndex() + 1));
+            filter.setInputFormat(train);
+            train = Filter.useFilter(train, filter);
+
+            // new objects
+            cclassifier = new HashMap<Integer, Classifier>();
+            ctraindata = new HashMap<Integer, Instances>();
+
+            Instances ctrain;
+            int maxNumClusters = train.size();
+            boolean sufficientInstancesInEachCluster;
+            do { // while(onlyTarget)
+                sufficientInstancesInEachCluster = true;
+                clusterer = new EM();
+                clusterer.setMaximumNumberOfClusters(maxNumClusters);
+                clusterer.buildClusterer(train);
+
+                // 4. get cluster membership of our traindata
+                // AddCluster cfilter = new AddCluster();
+                // cfilter.setClusterer(clusterer);
+                // cfilter.setInputFormat(train);
+                // Instances ctrain = Filter.useFilter(train, cfilter);
+
+                ctrain = new Instances(train);
+                ctraindata = new HashMap<>();
+
+                // get traindata per cluster
+                for (int j = 0; j < ctrain.numInstances(); j++) {
+                    // get the cluster number from the attributes, subract 1 because if we
+                    // clusterInstance we get 0-n, and this is 1-n
+                    // cnumber =
+                    // Integer.parseInt(ctrain.get(j).stringValue(ctrain.get(j).numAttributes()-1).replace("cluster",
+                    // "")) - 1;
+
+                    int cnumber = clusterer.clusterInstance(ctrain.get(j));
+                    // add training data to list of instances for this cluster number
+                    if (!ctraindata.containsKey(cnumber)) {
+                        ctraindata.put(cnumber, new Instances(traindata));
+                        ctraindata.get(cnumber).delete();
+                    }
+                    ctraindata.get(cnumber).add(traindata.get(j));
+                }
+
+                for (Entry<Integer, Instances> entry : ctraindata.entrySet()) {
+                    Instances instances = entry.getValue();
+                    int[] counts = instances.attributeStats(instances.classIndex()).nominalCounts;
+                    for (int count : counts) {
+                        sufficientInstancesInEachCluster &= count > 0;
+                    }
+                    sufficientInstancesInEachCluster &= instances.numInstances() >= 5;
+                }
+                maxNumClusters = clusterer.numberOfClusters() - 1;
+            }
+            while (!sufficientInstancesInEachCluster);
+
+            // train one classifier per cluster, we get the cluster number from the training data
+            Iterator<Integer> clusternumber = ctraindata.keySet().iterator();
+            while (clusternumber.hasNext()) {
+                int cnumber = clusternumber.next();
+                cclassifier.put(cnumber, setupClassifier());
+                cclassifier.get(cnumber).buildClassifier(ctraindata.get(cnumber));
+
+                // Console.traceln(Level.INFO, String.format("classifier in cluster "+cnumber));
+            }
+        }
+    }
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/training/WekaLocalFQTraining.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/training/WekaLocalFQTraining.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/training/WekaLocalFQTraining.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.training;
 
@@ -24,646 +38,696 @@
 
 /**
- * Trainer with reimplementation of WHERE clustering algorithm from:
- * Tim Menzies, Andrew Butcher, David Cok, Andrian Marcus, Lucas Layman, 
- * Forrest Shull, Burak Turhan, Thomas Zimmermann, 
- * "Local versus Global Lessons for Defect Prediction and Effort Estimation," 
- * IEEE Transactions on Software Engineering, vol. 39, no. 6, pp. 822-834, June, 2013  
+ * Trainer with reimplementation of WHERE clustering algorithm from: Tim Menzies, Andrew Butcher,
+ * David Cok, Andrian Marcus, Lucas Layman, Forrest Shull, Burak Turhan, Thomas Zimmermann,
+ * "Local versus Global Lessons for Defect Prediction and Effort Estimation," IEEE Transactions on
+ * Software Engineering, vol. 39, no. 6, pp. 822-834, June, 2013
  * 
- * With WekaLocalFQTraining we do the following:
- * 1) Run the Fastmap algorithm on all training data, let it calculate the 2 most significant 
- *    dimensions and projections of each instance to these dimensions
- * 2) With these 2 dimensions we span a QuadTree which gets recursively split on median(x) and median(y) values.
- * 3) We cluster the QuadTree nodes together if they have similar density (50%)
- * 4) We save the clusters and their training data
- * 5) We only use clusters with > ALPHA instances (currently Math.sqrt(SIZE)), rest is discarded with the training data of this cluster
- * 6) We train a Weka classifier for each cluster with the clusters training data
- * 7) We recalculate Fastmap distances for a single instance with the old pivots and then try to find a cluster containing the coords of the instance.
- * 7.1.) If we can not find a cluster (due to coords outside of all clusters) we find the nearest cluster.
- * 8) We classify the Instance with the classifier and traindata from the Cluster we found in 7.
+ * With WekaLocalFQTraining we do the following: 1) Run the Fastmap algorithm on all training data,
+ * let it calculate the 2 most significant dimensions and projections of each instance to these
+ * dimensions 2) With these 2 dimensions we span a QuadTree which gets recursively split on
+ * median(x) and median(y) values. 3) We cluster the QuadTree nodes together if they have similar
+ * density (50%) 4) We save the clusters and their training data 5) We only use clusters with >
+ * ALPHA instances (currently Math.sqrt(SIZE)), rest is discarded with the training data of this
+ * cluster 6) We train a Weka classifier for each cluster with the clusters training data 7) We
+ * recalculate Fastmap distances for a single instance with the old pivots and then try to find a
+ * cluster containing the coords of the instance. 7.1.) If we can not find a cluster (due to coords
+ * outside of all clusters) we find the nearest cluster. 8) We classify the Instance with the
+ * classifier and traindata from the Cluster we found in 7.
  */
 public class WekaLocalFQTraining extends WekaBaseTraining implements ITrainingStrategy {
-	
-	private final TraindatasetCluster classifier = new TraindatasetCluster();
-	
-	@Override
-	public Classifier getClassifier() {
-		return classifier;
-	}
-	
-	@Override
-	public void apply(Instances traindata) {
-		PrintStream errStr	= System.err;
-		System.setErr(new PrintStream(new NullOutputStream()));
-		try {
-			classifier.buildClassifier(traindata);
-		} catch (Exception e) {
-			throw new RuntimeException(e);
-		} finally {
-			System.setErr(errStr);
-		}
-	}
-	
-	
-	public class TraindatasetCluster extends AbstractClassifier {
-		
-		private static final long serialVersionUID = 1L;
-		
-		/* classifier per cluster */
-		private HashMap<Integer, Classifier> cclassifier;
-		
-		/* instances per cluster */
-		private HashMap<Integer, Instances> ctraindata; 
-		
-		/* holds the instances and indices of the pivot objects of the Fastmap calculation in buildClassifier*/
-		private HashMap<Integer, Instance> cpivots;
-		
-		/* holds the indices of the pivot objects for x,y and the dimension [x,y][dimension]*/
-		private int[][] cpivotindices;
-
-		/* holds the sizes of the cluster multiple "boxes" per cluster */
-		private HashMap<Integer, ArrayList<Double[][]>> csize;
-		
-		/* debug vars */
-		@SuppressWarnings("unused")
-		private boolean show_biggest = true;
-		
-		@SuppressWarnings("unused")
-		private int CFOUND = 0;
-		@SuppressWarnings("unused")
-		private int CNOTFOUND = 0;
-		
-		
-		private Instance createInstance(Instances instances, Instance instance) {
-			// attributes for feeding instance to classifier
-			Set<String> attributeNames = new HashSet<>();
-			for( int j=0; j<instances.numAttributes(); j++ ) {
-				attributeNames.add(instances.attribute(j).name());
-			}
-			
-			double[] values = new double[instances.numAttributes()];
-			int index = 0;
-			for( int j=0; j<instance.numAttributes(); j++ ) {
-				if( attributeNames.contains(instance.attribute(j).name())) {
-					values[index] = instance.value(j);
-					index++;
-				}
-			}
-			
-			Instances tmp = new Instances(instances);
-			tmp.clear();
-			Instance instCopy = new DenseInstance(instance.weight(), values);
-			instCopy.setDataset(tmp);
-			
-			return instCopy;
-		}
-		
-		/**
-		 * Because Fastmap saves only the image not the values of the attributes it used
-		 * we can not use the old data directly to classify single instances to clusters.
-		 * 
-		 * To classify a single instance we do a new fastmap computation with only the instance and
-		 * the old pivot elements.
-		 * 
-		 * After that we find the cluster with our fastmap result for x and y.
-		 */
-		@Override
-		public double classifyInstance(Instance instance) {
-			
-			double ret = 0;
-			try {
-				// classinstance gets passed to classifier
-				Instances traindata = ctraindata.get(0);
-				Instance classInstance = createInstance(traindata, instance);
-
-				// this one keeps the class attribute
-				Instances traindata2 = ctraindata.get(1);  
-				
-				// remove class attribute before clustering
-				Remove filter = new Remove();
-				filter.setAttributeIndices("" + (traindata.classIndex() + 1));
-				filter.setInputFormat(traindata);
-				traindata = Filter.useFilter(traindata, filter);
-				Instance clusterInstance = createInstance(traindata, instance);
-				
-				Fastmap FMAP = new Fastmap(2);
-				EuclideanDistance dist = new EuclideanDistance(traindata);
-				
-				// we set our pivot indices [x=0,y=1][dimension]
-				int[][] npivotindices = new int[2][2];
-				npivotindices[0][0] = 1;
-				npivotindices[1][0] = 2;
-				npivotindices[0][1] = 3;
-				npivotindices[1][1] = 4;
-				
-				// build temp dist matrix (2 pivots per dimension + 1 instance we want to classify)
-				// the instance we want to classify comes first after that the pivot elements in the order defined above
-				double[][] distmat = new double[2*FMAP.target_dims+1][2*FMAP.target_dims+1];
-				distmat[0][0] = 0;
-				distmat[0][1] = dist.distance(clusterInstance, this.cpivots.get((Integer)this.cpivotindices[0][0]));
-				distmat[0][2] = dist.distance(clusterInstance, this.cpivots.get((Integer)this.cpivotindices[1][0]));
-				distmat[0][3] = dist.distance(clusterInstance, this.cpivots.get((Integer)this.cpivotindices[0][1]));
-				distmat[0][4] = dist.distance(clusterInstance, this.cpivots.get((Integer)this.cpivotindices[1][1]));
-				
-				distmat[1][0] = dist.distance(this.cpivots.get((Integer)this.cpivotindices[0][0]), clusterInstance);
-				distmat[1][1] = 0;
-				distmat[1][2] = dist.distance(this.cpivots.get((Integer)this.cpivotindices[0][0]), this.cpivots.get((Integer)this.cpivotindices[1][0]));
-				distmat[1][3] = dist.distance(this.cpivots.get((Integer)this.cpivotindices[0][0]), this.cpivots.get((Integer)this.cpivotindices[0][1]));
-				distmat[1][4] = dist.distance(this.cpivots.get((Integer)this.cpivotindices[0][0]), this.cpivots.get((Integer)this.cpivotindices[1][1]));
-				
-				distmat[2][0] = dist.distance(this.cpivots.get((Integer)this.cpivotindices[1][0]), clusterInstance);
-				distmat[2][1] = dist.distance(this.cpivots.get((Integer)this.cpivotindices[1][0]), this.cpivots.get((Integer)this.cpivotindices[0][0]));
-				distmat[2][2] = 0;
-				distmat[2][3] = dist.distance(this.cpivots.get((Integer)this.cpivotindices[1][0]), this.cpivots.get((Integer)this.cpivotindices[0][1]));
-				distmat[2][4] = dist.distance(this.cpivots.get((Integer)this.cpivotindices[1][0]), this.cpivots.get((Integer)this.cpivotindices[1][1]));
-				
-				distmat[3][0] = dist.distance(this.cpivots.get((Integer)this.cpivotindices[0][1]), clusterInstance);
-				distmat[3][1] = dist.distance(this.cpivots.get((Integer)this.cpivotindices[0][1]), this.cpivots.get((Integer)this.cpivotindices[0][0]));
-				distmat[3][2] = dist.distance(this.cpivots.get((Integer)this.cpivotindices[0][1]), this.cpivots.get((Integer)this.cpivotindices[1][0]));
-				distmat[3][3] = 0;
-				distmat[3][4] = dist.distance(this.cpivots.get((Integer)this.cpivotindices[0][1]), this.cpivots.get((Integer)this.cpivotindices[1][1]));
-
-				distmat[4][0] = dist.distance(this.cpivots.get((Integer)this.cpivotindices[1][1]), clusterInstance);
-				distmat[4][1] = dist.distance(this.cpivots.get((Integer)this.cpivotindices[1][1]), this.cpivots.get((Integer)this.cpivotindices[0][0]));
-				distmat[4][2] = dist.distance(this.cpivots.get((Integer)this.cpivotindices[1][1]), this.cpivots.get((Integer)this.cpivotindices[1][0]));
-				distmat[4][3] = dist.distance(this.cpivots.get((Integer)this.cpivotindices[1][1]), this.cpivots.get((Integer)this.cpivotindices[0][1]));
-				distmat[4][4] = 0;
-				
-				
-				/* debug output: show biggest distance found within the new distance matrix
-				double biggest = 0;
-				for(int i=0; i < distmat.length; i++) {
-					for(int j=0; j < distmat[0].length; j++) {
-						if(biggest < distmat[i][j]) {
-							biggest = distmat[i][j];
-						}
-					}
-				}
-				if(this.show_biggest) {
-					Console.traceln(Level.INFO, String.format(""+clusterInstance));
-					Console.traceln(Level.INFO, String.format("biggest distances: "+ biggest));
-					this.show_biggest = false;
-				}
-				*/
-
-				FMAP.setDistmat(distmat);
-				FMAP.setPivots(npivotindices);
-				FMAP.calculate();
-				double[][] x = FMAP.getX();
-				double[] proj = x[0];
-
-				// debug output: show the calculated distance matrix, our result vektor for the instance and the complete result matrix
-				/*
-				Console.traceln(Level.INFO, "distmat:");
-			    for(int i=0; i<distmat.length; i++){
-			        for(int j=0; j<distmat[0].length; j++){
-			        	Console.trace(Level.INFO, String.format("%20s", distmat[i][j]));
-			        }
-			        Console.traceln(Level.INFO, "");
-			    }
-			    
-			    Console.traceln(Level.INFO, "vector:");
-			    for(int i=0; i < proj.length; i++) {
-			    	Console.trace(Level.INFO, String.format("%20s", proj[i]));
-			    }
-			    Console.traceln(Level.INFO, "");
-			    
-				Console.traceln(Level.INFO, "resultmat:");
-			    for(int i=0; i<x.length; i++){
-			        for(int j=0; j<x[0].length; j++){
-			        	Console.trace(Level.INFO, String.format("%20s", x[i][j]));
-			        }
-			        Console.traceln(Level.INFO, "");
-			    }
-			    */
-				
-				// now we iterate over all clusters (well, boxes of sizes per cluster really) and save the number of the 
-				// cluster in which we are
-				int cnumber;
-				int found_cnumber = -1;
-				Iterator<Integer> clusternumber = this.csize.keySet().iterator();
-				while ( clusternumber.hasNext() && found_cnumber == -1) {
-					cnumber = clusternumber.next();
-					
-					// now iterate over the boxes of the cluster and hope we find one (cluster could have been removed)
-					// or we are too far away from any cluster because of the fastmap calculation with the initial pivot objects
-					for ( int box=0; box < this.csize.get(cnumber).size(); box++ ) { 
-						Double[][] current = this.csize.get(cnumber).get(box);
-						
-						if(proj[0] >= current[0][0] && proj[0] <= current[0][1] &&  // x 
-						   proj[1] >= current[1][0] && proj[1] <= current[1][1]) {  // y
-							found_cnumber = cnumber;
-						}
-					}
-				}
-				
-				// we want to count how often we are really inside a cluster
-				//if ( found_cnumber == -1 ) {
-				//	CNOTFOUND += 1;
-				//}else {
-				//	CFOUND += 1;
-				//}
-
-				// now it can happen that we do not find a cluster because we deleted it previously (too few instances)
-				// or we get bigger distance measures from weka so that we are completely outside of our clusters.
-				// in these cases we just find the nearest cluster to our instance and use it for classification.
-				// to do that we use the EuclideanDistance again to compare our distance to all other Instances
-				// then we take the cluster of the closest weka instance
-				dist = new EuclideanDistance(traindata2);
-				if( !this.ctraindata.containsKey(found_cnumber) ) { 
-					double min_distance = Double.MAX_VALUE;
-					clusternumber = ctraindata.keySet().iterator();
-					while ( clusternumber.hasNext() ) {
-						cnumber = clusternumber.next();
-						for(int i=0; i < ctraindata.get(cnumber).size(); i++) {
-							if(dist.distance(instance, ctraindata.get(cnumber).get(i)) <= min_distance) {
-								found_cnumber = cnumber;
-								min_distance = dist.distance(instance, ctraindata.get(cnumber).get(i));
-							}
-						}
-					}
-				}
-				
-				// here we have the cluster where an instance has the minimum distance between itself and the
-				// instance we want to classify
-				// if we still have not found a cluster we exit because something is really wrong
-				if( found_cnumber == -1 ) {
-					Console.traceln(Level.INFO, String.format("ERROR matching instance to cluster with full search!"));
-					throw new RuntimeException("cluster not found with full search");
-				}
-				
-				// classify the passed instance with the cluster we found and its training data
-				ret = cclassifier.get(found_cnumber).classifyInstance(classInstance);
-				
-			}catch( Exception e ) {
-				Console.traceln(Level.INFO, String.format("ERROR matching instance to cluster!"));
-				throw new RuntimeException(e);
-			}
-			return ret;
-		}
-		
-		@Override
-		public void buildClassifier(Instances traindata) throws Exception {
-			
-			//Console.traceln(Level.INFO, String.format("found: "+ CFOUND + ", notfound: " + CNOTFOUND));
-			this.show_biggest = true;
-			
-			cclassifier = new HashMap<Integer, Classifier>();
-			ctraindata = new HashMap<Integer, Instances>();
-			cpivots = new HashMap<Integer, Instance>();
-			cpivotindices = new int[2][2];
-			
-			// 1. copy traindata
-			Instances train = new Instances(traindata);
-			Instances train2 = new Instances(traindata);  // this one keeps the class attribute
-			
-			// 2. remove class attribute for clustering
-			Remove filter = new Remove();
-			filter.setAttributeIndices("" + (train.classIndex() + 1));
-			filter.setInputFormat(train);
-			train = Filter.useFilter(train, filter);
-			
-			// 3. calculate distance matrix (needed for Fastmap because it starts at dimension 1)
-			double biggest = 0;
-			EuclideanDistance dist = new EuclideanDistance(train);
-			double[][] distmat = new double[train.size()][train.size()];
-			for( int i=0; i < train.size(); i++ ) {
-				for( int j=0; j < train.size(); j++ ) {
-					distmat[i][j] = dist.distance(train.get(i), train.get(j));
-					if( distmat[i][j] > biggest ) {
-						biggest = distmat[i][j];
-					}
-				}
-			}
-			//Console.traceln(Level.INFO, String.format("biggest distances: "+ biggest));
-			
-			// 4. run fastmap for 2 dimensions on the distance matrix
-			Fastmap FMAP = new Fastmap(2);
-			FMAP.setDistmat(distmat);
-			FMAP.calculate();
-			
-			cpivotindices = FMAP.getPivots();
-			
-			double[][] X = FMAP.getX();
-			distmat = new double[0][0];
-			System.gc();
-			
-			// quadtree payload generation
-			ArrayList<QuadTreePayload<Instance>> qtp = new ArrayList<QuadTreePayload<Instance>>();
-		    
-			// we need these for the sizes of the quadrants
-			double[] big = {0,0};
-			double[] small = {Double.MAX_VALUE,Double.MAX_VALUE};
-			
-			// set quadtree payload values and get max and min x and y values for size
-		    for( int i=0; i<X.length; i++ ){
-		    	if(X[i][0] >= big[0]) {
-		    		big[0] = X[i][0];
-		    	}
-		    	if(X[i][1] >= big[1]) {
-		    		big[1] = X[i][1];
-		    	}
-		    	if(X[i][0] <= small[0]) {
-		    		small[0] = X[i][0];
-		    	}
-		    	if(X[i][1] <= small[1]) {
-		    		small[1] = X[i][1];
-		    	}
-		        QuadTreePayload<Instance> tmp = new QuadTreePayload<Instance>(X[i][0], X[i][1], train2.get(i));
-		        qtp.add(tmp);
-		    }
-		    
-		    //Console.traceln(Level.INFO, String.format("size for cluster ("+small[0]+","+small[1]+") - ("+big[0]+","+big[1]+")"));
-		    
-		    // 5. generate quadtree
-		    QuadTree TREE = new QuadTree(null, qtp);
-		    QuadTree.size = train.size();
-		    QuadTree.alpha = Math.sqrt(train.size());
-		    QuadTree.ccluster = new ArrayList<ArrayList<QuadTreePayload<Instance>>>();
-		    QuadTree.csize = new HashMap<Integer, ArrayList<Double[][]>>();
-		    
-		    //Console.traceln(Level.INFO, String.format("Generate QuadTree with "+ QuadTree.size + " size, Alpha: "+ QuadTree.alpha+ ""));
-		    
-		    // set the size and then split the tree recursively at the median value for x, y
-		    TREE.setSize(new double[] {small[0], big[0]}, new double[] {small[1], big[1]});
-		    
-		    // recursive split und grid clustering eher static
-		    TREE.recursiveSplit(TREE);
-		    
-		    // generate list of nodes sorted by density (childs only)
-		    ArrayList<QuadTree> l = new ArrayList<QuadTree>(TREE.getList(TREE));
-		    
-		    // recursive grid clustering (tree pruning), the values are stored in ccluster
-		    TREE.gridClustering(l);
-		    
-		    // wir iterieren durch die cluster und sammeln uns die instanzen daraus
-		    //ctraindata.clear();
-		    for( int i=0; i < QuadTree.ccluster.size(); i++ ) {
-		    	ArrayList<QuadTreePayload<Instance>> current = QuadTree.ccluster.get(i);
-		    	
-		    	// i is the clusternumber
-		    	// we only allow clusters with Instances > ALPHA, other clusters are not considered!
-		    	//if(current.size() > QuadTree.alpha) {
-		    	if( current.size() > 4 ) {
-			    	for( int j=0; j < current.size(); j++ ) {
-			    		if( !ctraindata.containsKey(i) ) {
-			    			ctraindata.put(i, new Instances(train2));
-			    			ctraindata.get(i).delete();
-			    		}
-			    		ctraindata.get(i).add(current.get(j).getInst());
-			    	}
-		    	}else{
-		    		Console.traceln(Level.INFO, String.format("drop cluster, only: " + current.size() + " instances"));
-		    	}
-		    }
-			
-			// here we keep things we need later on
-			// QuadTree sizes for later use (matching new instances)
-			this.csize = new HashMap<Integer, ArrayList<Double[][]>>(QuadTree.csize);
-		
-			// pivot elements
-			//this.cpivots.clear();
-			for( int i=0; i < FMAP.PA[0].length; i++ ) {
-				this.cpivots.put(FMAP.PA[0][i], (Instance)train.get(FMAP.PA[0][i]).copy());
-			}
-			for( int j=0; j < FMAP.PA[0].length; j++ ) {
-				this.cpivots.put(FMAP.PA[1][j], (Instance)train.get(FMAP.PA[1][j]).copy());
-			}
-			
-			
-			/* debug output
-			int pnumber;
-			Iterator<Integer> pivotnumber = cpivots.keySet().iterator();
-			while ( pivotnumber.hasNext() ) {
-				pnumber = pivotnumber.next();
-				Console.traceln(Level.INFO, String.format("pivot: "+pnumber+ " inst: "+cpivots.get(pnumber)));
-			}
-			*/
-			
-		    // train one classifier per cluster, we get the cluster number from the traindata
-		    int cnumber;
-			Iterator<Integer> clusternumber = ctraindata.keySet().iterator();
-			//cclassifier.clear();
-			
-			//int traindata_count = 0;
-			while ( clusternumber.hasNext() ) {
-				cnumber = clusternumber.next();
-				cclassifier.put(cnumber,setupClassifier());  // this is the classifier used for the cluster 
-				cclassifier.get(cnumber).buildClassifier(ctraindata.get(cnumber));
-				//Console.traceln(Level.INFO, String.format("classifier in cluster "+cnumber));
-				//traindata_count += ctraindata.get(cnumber).size();
-				//Console.traceln(Level.INFO, String.format("building classifier in cluster "+cnumber +"  with "+ ctraindata.get(cnumber).size() +" traindata instances"));
-			}
-			
-			// add all traindata
-			//Console.traceln(Level.INFO, String.format("traindata in all clusters: " + traindata_count));
-		}
-	}
-	
-
-	/**
-	 * Payload for the QuadTree.
-	 * x and y are the calculated Fastmap values.
-	 * T is a weka instance.
-	 */
-	public class QuadTreePayload<T> {
-
-		public double x;
-		public double y;
-		private T inst;
-		
-		public QuadTreePayload(double x, double y, T value) {
-			this.x = x;
-			this.y = y;
-			this.inst = value;
-		}
-		
-		public T getInst() {
-			return this.inst;
-		}
-	}
-	
-	
-	/**
-	 * Fastmap implementation
-	 * 
-	 * Faloutsos, C., & Lin, K. I. (1995). 
-	 * FastMap: A fast algorithm for indexing, data-mining and visualization of traditional and multimedia datasets 
-	 * (Vol. 24, No. 2, pp. 163-174). ACM.
-	 */
-	public class Fastmap {
-		
-		/*N x k Array, at the end, the i-th row will be the image of the i-th object*/
-		private double[][] X;
-		
-		/*2 x k pivot Array one pair per recursive call*/
-		private int[][] PA;
-		
-		/*Objects we got (distance matrix)*/
-		private double[][] O;
-		
-		/*column of X currently updated (also the dimension)*/
-		private int col = 0;
-		
-		/*number of dimensions we want*/
-		private int target_dims = 0;
-		
-		// if we already have the pivot elements
-		private boolean pivot_set = false;
-		
-
-		public Fastmap(int k) {
-			this.target_dims = k;
-		}
-		
-		/**
-		 * Sets the distance matrix
-		 * and params that depend on this
-		 * @param O
-		 */
-		public void setDistmat(double[][] O) {
-			this.O = O;
-			int N = O.length;
-			this.X = new double[N][this.target_dims];
-			this.PA = new int[2][this.target_dims];
-		}
-		
-		/**
-		 * Set pivot elements, we need that to classify instances
-		 * after the calculation is complete (because we then want to reuse
-		 * only the pivot elements).
-		 * 
-		 * @param pi
-		 */
-		public void setPivots(int[][] pi) {
-			this.pivot_set = true;
-			this.PA = pi;
-		}
-		
-		/**
-		 * Return the pivot elements that were chosen during the calculation
-		 * 
-		 * @return
-		 */
-		public int[][] getPivots() {
-			return this.PA;
-		}
-		
-		/**
-		 * The distance function for euclidean distance
-		 * 
-		 * Acts according to equation 4 of the fastmap paper
-		 *  
-		 * @param x x index of x image (if k==0 x object)
-		 * @param y y index of y image (if k==0 y object)
-		 * @param kdimensionality
-		 * @return distance
-		 */
-		private double dist(int x, int y, int k) {
-			
-			// basis is object distance, we get this from our distance matrix
-			double tmp = this.O[x][y] * this.O[x][y]; 
-			
-			// decrease by projections
-			for( int i=0; i < k; i++ ) {
-				double tmp2 = (this.X[x][i] - this.X[y][i]);
-				tmp -= tmp2 * tmp2;
-			}
-			
-			return Math.abs(tmp);
-		}
-
-		/**
-		 * Find the object farthest from the given index
-		 * This method is a helper Method for findDistandObjects
-		 * 
-		 * @param index of the object 
-		 * @return index of the farthest object from the given index
-		 */
-		private int findFarthest(int index) {
-			double furthest = Double.MIN_VALUE;
-			int ret = 0;
-			
-			for( int i=0; i < O.length; i++ ) {
-				double dist = this.dist(i, index, this.col);
-				if( i != index && dist > furthest ) {
-					furthest = dist;
-					ret = i;
-				}
-			}
-			return ret;
-		}
-		
-		/**
-		 * Finds the pivot objects 
-		 * 
-		 * This method is basically algorithm 1 of the fastmap paper.
-		 * 
-		 * @return 2 indexes of the choosen pivot objects
-		 */
-		private int[] findDistantObjects() {
-			// 1. choose object randomly
-			Random r = new Random();
-			int obj = r.nextInt(this.O.length);
-			
-			// 2. find farthest object from randomly chosen object
-			int idx1 = this.findFarthest(obj);
-			
-			// 3. find farthest object from previously farthest object
-			int idx2 = this.findFarthest(idx1);
-
-			return new int[] {idx1, idx2};
-		}
-	
-		/**
-		 * Calculates the new k-vector values (projections)
-		 * 
-		 * This is basically algorithm 2 of the fastmap paper.
-		 * We just added the possibility to pre-set the pivot elements because
-		 * we need to classify single instances after the computation is already done.
-		 * 
-		 * @param dims dimensionality
-		 */
-		public void calculate() {
-			
-			for( int k=0; k < this.target_dims; k++ ) {
-				// 2) choose pivot objects
-				if ( !this.pivot_set ) {
-					int[] pivots = this.findDistantObjects();
-		
-					// 3) record ids of pivot objects 
-					this.PA[0][this.col] = pivots[0];
-					this.PA[1][this.col] = pivots[1];
-				}
-				
-				// 4) inter object distances are zero (this.X is initialized with 0 so we just continue)
-				if( this.dist(this.PA[0][this.col], this.PA[1][this.col], this.col) == 0 ) {
-					continue;
-				}
-				
-				// 5) project the objects on the line between the pivots
-				double dxy = this.dist(this.PA[0][this.col], this.PA[1][this.col], this.col);
-				for( int i=0; i < this.O.length; i++ ) {
-					
-					double dix = this.dist(i, this.PA[0][this.col], this.col);
-					double diy = this.dist(i, this.PA[1][this.col], this.col);
-					
-					double tmp = (dix + dxy - diy) / (2 * Math.sqrt(dxy));
-					
-					// save the projection
-					this.X[i][this.col] = tmp;
-				}
-				
-				this.col += 1;
-			}
-		}
-		
-		/**
-		 * returns the result matrix of the projections
-		 * 
-		 * @return calculated result
-		 */
-		public double[][] getX() {
-			return this.X;
-		}
-	}
+
+    private final TraindatasetCluster classifier = new TraindatasetCluster();
+
+    @Override
+    public Classifier getClassifier() {
+        return classifier;
+    }
+
+    @Override
+    public void apply(Instances traindata) {
+        PrintStream errStr = System.err;
+        System.setErr(new PrintStream(new NullOutputStream()));
+        try {
+            classifier.buildClassifier(traindata);
+        }
+        catch (Exception e) {
+            throw new RuntimeException(e);
+        }
+        finally {
+            System.setErr(errStr);
+        }
+    }
+
+    public class TraindatasetCluster extends AbstractClassifier {
+
+        private static final long serialVersionUID = 1L;
+
+        /* classifier per cluster */
+        private HashMap<Integer, Classifier> cclassifier;
+
+        /* instances per cluster */
+        private HashMap<Integer, Instances> ctraindata;
+
+        /*
+         * holds the instances and indices of the pivot objects of the Fastmap calculation in
+         * buildClassifier
+         */
+        private HashMap<Integer, Instance> cpivots;
+
+        /* holds the indices of the pivot objects for x,y and the dimension [x,y][dimension] */
+        private int[][] cpivotindices;
+
+        /* holds the sizes of the cluster multiple "boxes" per cluster */
+        private HashMap<Integer, ArrayList<Double[][]>> csize;
+
+        /* debug vars */
+        @SuppressWarnings("unused")
+        private boolean show_biggest = true;
+
+        @SuppressWarnings("unused")
+        private int CFOUND = 0;
+        @SuppressWarnings("unused")
+        private int CNOTFOUND = 0;
+
+        private Instance createInstance(Instances instances, Instance instance) {
+            // attributes for feeding instance to classifier
+            Set<String> attributeNames = new HashSet<>();
+            for (int j = 0; j < instances.numAttributes(); j++) {
+                attributeNames.add(instances.attribute(j).name());
+            }
+
+            double[] values = new double[instances.numAttributes()];
+            int index = 0;
+            for (int j = 0; j < instance.numAttributes(); j++) {
+                if (attributeNames.contains(instance.attribute(j).name())) {
+                    values[index] = instance.value(j);
+                    index++;
+                }
+            }
+
+            Instances tmp = new Instances(instances);
+            tmp.clear();
+            Instance instCopy = new DenseInstance(instance.weight(), values);
+            instCopy.setDataset(tmp);
+
+            return instCopy;
+        }
+
+        /**
+         * Because Fastmap saves only the image not the values of the attributes it used we can not
+         * use the old data directly to classify single instances to clusters.
+         * 
+         * To classify a single instance we do a new fastmap computation with only the instance and
+         * the old pivot elements.
+         * 
+         * After that we find the cluster with our fastmap result for x and y.
+         */
+        @Override
+        public double classifyInstance(Instance instance) {
+
+            double ret = 0;
+            try {
+                // classinstance gets passed to classifier
+                Instances traindata = ctraindata.get(0);
+                Instance classInstance = createInstance(traindata, instance);
+
+                // this one keeps the class attribute
+                Instances traindata2 = ctraindata.get(1);
+
+                // remove class attribute before clustering
+                Remove filter = new Remove();
+                filter.setAttributeIndices("" + (traindata.classIndex() + 1));
+                filter.setInputFormat(traindata);
+                traindata = Filter.useFilter(traindata, filter);
+                Instance clusterInstance = createInstance(traindata, instance);
+
+                Fastmap FMAP = new Fastmap(2);
+                EuclideanDistance dist = new EuclideanDistance(traindata);
+
+                // we set our pivot indices [x=0,y=1][dimension]
+                int[][] npivotindices = new int[2][2];
+                npivotindices[0][0] = 1;
+                npivotindices[1][0] = 2;
+                npivotindices[0][1] = 3;
+                npivotindices[1][1] = 4;
+
+                // build temp dist matrix (2 pivots per dimension + 1 instance we want to classify)
+                // the instance we want to classify comes first after that the pivot elements in the
+                // order defined above
+                double[][] distmat = new double[2 * FMAP.target_dims + 1][2 * FMAP.target_dims + 1];
+                distmat[0][0] = 0;
+                distmat[0][1] =
+                    dist.distance(clusterInstance,
+                                  this.cpivots.get((Integer) this.cpivotindices[0][0]));
+                distmat[0][2] =
+                    dist.distance(clusterInstance,
+                                  this.cpivots.get((Integer) this.cpivotindices[1][0]));
+                distmat[0][3] =
+                    dist.distance(clusterInstance,
+                                  this.cpivots.get((Integer) this.cpivotindices[0][1]));
+                distmat[0][4] =
+                    dist.distance(clusterInstance,
+                                  this.cpivots.get((Integer) this.cpivotindices[1][1]));
+
+                distmat[1][0] =
+                    dist.distance(this.cpivots.get((Integer) this.cpivotindices[0][0]),
+                                  clusterInstance);
+                distmat[1][1] = 0;
+                distmat[1][2] =
+                    dist.distance(this.cpivots.get((Integer) this.cpivotindices[0][0]),
+                                  this.cpivots.get((Integer) this.cpivotindices[1][0]));
+                distmat[1][3] =
+                    dist.distance(this.cpivots.get((Integer) this.cpivotindices[0][0]),
+                                  this.cpivots.get((Integer) this.cpivotindices[0][1]));
+                distmat[1][4] =
+                    dist.distance(this.cpivots.get((Integer) this.cpivotindices[0][0]),
+                                  this.cpivots.get((Integer) this.cpivotindices[1][1]));
+
+                distmat[2][0] =
+                    dist.distance(this.cpivots.get((Integer) this.cpivotindices[1][0]),
+                                  clusterInstance);
+                distmat[2][1] =
+                    dist.distance(this.cpivots.get((Integer) this.cpivotindices[1][0]),
+                                  this.cpivots.get((Integer) this.cpivotindices[0][0]));
+                distmat[2][2] = 0;
+                distmat[2][3] =
+                    dist.distance(this.cpivots.get((Integer) this.cpivotindices[1][0]),
+                                  this.cpivots.get((Integer) this.cpivotindices[0][1]));
+                distmat[2][4] =
+                    dist.distance(this.cpivots.get((Integer) this.cpivotindices[1][0]),
+                                  this.cpivots.get((Integer) this.cpivotindices[1][1]));
+
+                distmat[3][0] =
+                    dist.distance(this.cpivots.get((Integer) this.cpivotindices[0][1]),
+                                  clusterInstance);
+                distmat[3][1] =
+                    dist.distance(this.cpivots.get((Integer) this.cpivotindices[0][1]),
+                                  this.cpivots.get((Integer) this.cpivotindices[0][0]));
+                distmat[3][2] =
+                    dist.distance(this.cpivots.get((Integer) this.cpivotindices[0][1]),
+                                  this.cpivots.get((Integer) this.cpivotindices[1][0]));
+                distmat[3][3] = 0;
+                distmat[3][4] =
+                    dist.distance(this.cpivots.get((Integer) this.cpivotindices[0][1]),
+                                  this.cpivots.get((Integer) this.cpivotindices[1][1]));
+
+                distmat[4][0] =
+                    dist.distance(this.cpivots.get((Integer) this.cpivotindices[1][1]),
+                                  clusterInstance);
+                distmat[4][1] =
+                    dist.distance(this.cpivots.get((Integer) this.cpivotindices[1][1]),
+                                  this.cpivots.get((Integer) this.cpivotindices[0][0]));
+                distmat[4][2] =
+                    dist.distance(this.cpivots.get((Integer) this.cpivotindices[1][1]),
+                                  this.cpivots.get((Integer) this.cpivotindices[1][0]));
+                distmat[4][3] =
+                    dist.distance(this.cpivots.get((Integer) this.cpivotindices[1][1]),
+                                  this.cpivots.get((Integer) this.cpivotindices[0][1]));
+                distmat[4][4] = 0;
+
+                /*
+                 * debug output: show biggest distance found within the new distance matrix double
+                 * biggest = 0; for(int i=0; i < distmat.length; i++) { for(int j=0; j <
+                 * distmat[0].length; j++) { if(biggest < distmat[i][j]) { biggest = distmat[i][j];
+                 * } } } if(this.show_biggest) { Console.traceln(Level.INFO,
+                 * String.format(""+clusterInstance)); Console.traceln(Level.INFO,
+                 * String.format("biggest distances: "+ biggest)); this.show_biggest = false; }
+                 */
+
+                FMAP.setDistmat(distmat);
+                FMAP.setPivots(npivotindices);
+                FMAP.calculate();
+                double[][] x = FMAP.getX();
+                double[] proj = x[0];
+
+                // debug output: show the calculated distance matrix, our result vektor for the
+                // instance and the complete result matrix
+                /*
+                 * Console.traceln(Level.INFO, "distmat:"); for(int i=0; i<distmat.length; i++){
+                 * for(int j=0; j<distmat[0].length; j++){ Console.trace(Level.INFO,
+                 * String.format("%20s", distmat[i][j])); } Console.traceln(Level.INFO, ""); }
+                 * 
+                 * Console.traceln(Level.INFO, "vector:"); for(int i=0; i < proj.length; i++) {
+                 * Console.trace(Level.INFO, String.format("%20s", proj[i])); }
+                 * Console.traceln(Level.INFO, "");
+                 * 
+                 * Console.traceln(Level.INFO, "resultmat:"); for(int i=0; i<x.length; i++){ for(int
+                 * j=0; j<x[0].length; j++){ Console.trace(Level.INFO, String.format("%20s",
+                 * x[i][j])); } Console.traceln(Level.INFO, ""); }
+                 */
+
+                // now we iterate over all clusters (well, boxes of sizes per cluster really) and
+                // save the number of the
+                // cluster in which we are
+                int cnumber;
+                int found_cnumber = -1;
+                Iterator<Integer> clusternumber = this.csize.keySet().iterator();
+                while (clusternumber.hasNext() && found_cnumber == -1) {
+                    cnumber = clusternumber.next();
+
+                    // now iterate over the boxes of the cluster and hope we find one (cluster could
+                    // have been removed)
+                    // or we are too far away from any cluster because of the fastmap calculation
+                    // with the initial pivot objects
+                    for (int box = 0; box < this.csize.get(cnumber).size(); box++) {
+                        Double[][] current = this.csize.get(cnumber).get(box);
+
+                        if (proj[0] >= current[0][0] && proj[0] <= current[0][1] && // x
+                            proj[1] >= current[1][0] && proj[1] <= current[1][1])
+                        { // y
+                            found_cnumber = cnumber;
+                        }
+                    }
+                }
+
+                // we want to count how often we are really inside a cluster
+                // if ( found_cnumber == -1 ) {
+                // CNOTFOUND += 1;
+                // }else {
+                // CFOUND += 1;
+                // }
+
+                // now it can happen that we do not find a cluster because we deleted it previously
+                // (too few instances)
+                // or we get bigger distance measures from weka so that we are completely outside of
+                // our clusters.
+                // in these cases we just find the nearest cluster to our instance and use it for
+                // classification.
+                // to do that we use the EuclideanDistance again to compare our distance to all
+                // other Instances
+                // then we take the cluster of the closest weka instance
+                dist = new EuclideanDistance(traindata2);
+                if (!this.ctraindata.containsKey(found_cnumber)) {
+                    double min_distance = Double.MAX_VALUE;
+                    clusternumber = ctraindata.keySet().iterator();
+                    while (clusternumber.hasNext()) {
+                        cnumber = clusternumber.next();
+                        for (int i = 0; i < ctraindata.get(cnumber).size(); i++) {
+                            if (dist.distance(instance, ctraindata.get(cnumber).get(i)) <= min_distance)
+                            {
+                                found_cnumber = cnumber;
+                                min_distance =
+                                    dist.distance(instance, ctraindata.get(cnumber).get(i));
+                            }
+                        }
+                    }
+                }
+
+                // here we have the cluster where an instance has the minimum distance between
+                // itself and the
+                // instance we want to classify
+                // if we still have not found a cluster we exit because something is really wrong
+                if (found_cnumber == -1) {
+                    Console.traceln(Level.INFO, String
+                        .format("ERROR matching instance to cluster with full search!"));
+                    throw new RuntimeException("cluster not found with full search");
+                }
+
+                // classify the passed instance with the cluster we found and its training data
+                ret = cclassifier.get(found_cnumber).classifyInstance(classInstance);
+
+            }
+            catch (Exception e) {
+                Console.traceln(Level.INFO, String.format("ERROR matching instance to cluster!"));
+                throw new RuntimeException(e);
+            }
+            return ret;
+        }
+
+        @Override
+        public void buildClassifier(Instances traindata) throws Exception {
+
+            // Console.traceln(Level.INFO, String.format("found: "+ CFOUND + ", notfound: " +
+            // CNOTFOUND));
+            this.show_biggest = true;
+
+            cclassifier = new HashMap<Integer, Classifier>();
+            ctraindata = new HashMap<Integer, Instances>();
+            cpivots = new HashMap<Integer, Instance>();
+            cpivotindices = new int[2][2];
+
+            // 1. copy traindata
+            Instances train = new Instances(traindata);
+            Instances train2 = new Instances(traindata); // this one keeps the class attribute
+
+            // 2. remove class attribute for clustering
+            Remove filter = new Remove();
+            filter.setAttributeIndices("" + (train.classIndex() + 1));
+            filter.setInputFormat(train);
+            train = Filter.useFilter(train, filter);
+
+            // 3. calculate distance matrix (needed for Fastmap because it starts at dimension 1)
+            double biggest = 0;
+            EuclideanDistance dist = new EuclideanDistance(train);
+            double[][] distmat = new double[train.size()][train.size()];
+            for (int i = 0; i < train.size(); i++) {
+                for (int j = 0; j < train.size(); j++) {
+                    distmat[i][j] = dist.distance(train.get(i), train.get(j));
+                    if (distmat[i][j] > biggest) {
+                        biggest = distmat[i][j];
+                    }
+                }
+            }
+            // Console.traceln(Level.INFO, String.format("biggest distances: "+ biggest));
+
+            // 4. run fastmap for 2 dimensions on the distance matrix
+            Fastmap FMAP = new Fastmap(2);
+            FMAP.setDistmat(distmat);
+            FMAP.calculate();
+
+            cpivotindices = FMAP.getPivots();
+
+            double[][] X = FMAP.getX();
+            distmat = new double[0][0];
+            System.gc();
+
+            // quadtree payload generation
+            ArrayList<QuadTreePayload<Instance>> qtp = new ArrayList<QuadTreePayload<Instance>>();
+
+            // we need these for the sizes of the quadrants
+            double[] big =
+                { 0, 0 };
+            double[] small =
+                { Double.MAX_VALUE, Double.MAX_VALUE };
+
+            // set quadtree payload values and get max and min x and y values for size
+            for (int i = 0; i < X.length; i++) {
+                if (X[i][0] >= big[0]) {
+                    big[0] = X[i][0];
+                }
+                if (X[i][1] >= big[1]) {
+                    big[1] = X[i][1];
+                }
+                if (X[i][0] <= small[0]) {
+                    small[0] = X[i][0];
+                }
+                if (X[i][1] <= small[1]) {
+                    small[1] = X[i][1];
+                }
+                QuadTreePayload<Instance> tmp =
+                    new QuadTreePayload<Instance>(X[i][0], X[i][1], train2.get(i));
+                qtp.add(tmp);
+            }
+
+            // Console.traceln(Level.INFO,
+            // String.format("size for cluster ("+small[0]+","+small[1]+") - ("+big[0]+","+big[1]+")"));
+
+            // 5. generate quadtree
+            QuadTree TREE = new QuadTree(null, qtp);
+            QuadTree.size = train.size();
+            QuadTree.alpha = Math.sqrt(train.size());
+            QuadTree.ccluster = new ArrayList<ArrayList<QuadTreePayload<Instance>>>();
+            QuadTree.csize = new HashMap<Integer, ArrayList<Double[][]>>();
+
+            // Console.traceln(Level.INFO, String.format("Generate QuadTree with "+ QuadTree.size +
+            // " size, Alpha: "+ QuadTree.alpha+ ""));
+
+            // set the size and then split the tree recursively at the median value for x, y
+            TREE.setSize(new double[]
+                { small[0], big[0] }, new double[]
+                { small[1], big[1] });
+
+            // recursive split und grid clustering eher static
+            TREE.recursiveSplit(TREE);
+
+            // generate list of nodes sorted by density (childs only)
+            ArrayList<QuadTree> l = new ArrayList<QuadTree>(TREE.getList(TREE));
+
+            // recursive grid clustering (tree pruning), the values are stored in ccluster
+            TREE.gridClustering(l);
+
+            // wir iterieren durch die cluster und sammeln uns die instanzen daraus
+            // ctraindata.clear();
+            for (int i = 0; i < QuadTree.ccluster.size(); i++) {
+                ArrayList<QuadTreePayload<Instance>> current = QuadTree.ccluster.get(i);
+
+                // i is the clusternumber
+                // we only allow clusters with Instances > ALPHA, other clusters are not considered!
+                // if(current.size() > QuadTree.alpha) {
+                if (current.size() > 4) {
+                    for (int j = 0; j < current.size(); j++) {
+                        if (!ctraindata.containsKey(i)) {
+                            ctraindata.put(i, new Instances(train2));
+                            ctraindata.get(i).delete();
+                        }
+                        ctraindata.get(i).add(current.get(j).getInst());
+                    }
+                }
+                else {
+                    Console.traceln(Level.INFO,
+                                    String.format("drop cluster, only: " + current.size() +
+                                        " instances"));
+                }
+            }
+
+            // here we keep things we need later on
+            // QuadTree sizes for later use (matching new instances)
+            this.csize = new HashMap<Integer, ArrayList<Double[][]>>(QuadTree.csize);
+
+            // pivot elements
+            // this.cpivots.clear();
+            for (int i = 0; i < FMAP.PA[0].length; i++) {
+                this.cpivots.put(FMAP.PA[0][i], (Instance) train.get(FMAP.PA[0][i]).copy());
+            }
+            for (int j = 0; j < FMAP.PA[0].length; j++) {
+                this.cpivots.put(FMAP.PA[1][j], (Instance) train.get(FMAP.PA[1][j]).copy());
+            }
+
+            /*
+             * debug output int pnumber; Iterator<Integer> pivotnumber =
+             * cpivots.keySet().iterator(); while ( pivotnumber.hasNext() ) { pnumber =
+             * pivotnumber.next(); Console.traceln(Level.INFO, String.format("pivot: "+pnumber+
+             * " inst: "+cpivots.get(pnumber))); }
+             */
+
+            // train one classifier per cluster, we get the cluster number from the traindata
+            int cnumber;
+            Iterator<Integer> clusternumber = ctraindata.keySet().iterator();
+            // cclassifier.clear();
+
+            // int traindata_count = 0;
+            while (clusternumber.hasNext()) {
+                cnumber = clusternumber.next();
+                cclassifier.put(cnumber, setupClassifier()); // this is the classifier used for the
+                                                             // cluster
+                cclassifier.get(cnumber).buildClassifier(ctraindata.get(cnumber));
+                // Console.traceln(Level.INFO, String.format("classifier in cluster "+cnumber));
+                // traindata_count += ctraindata.get(cnumber).size();
+                // Console.traceln(Level.INFO,
+                // String.format("building classifier in cluster "+cnumber +"  with "+
+                // ctraindata.get(cnumber).size() +" traindata instances"));
+            }
+
+            // add all traindata
+            // Console.traceln(Level.INFO, String.format("traindata in all clusters: " +
+            // traindata_count));
+        }
+    }
+
+    /**
+     * Payload for the QuadTree. x and y are the calculated Fastmap values. T is a weka instance.
+     */
+    public class QuadTreePayload<T> {
+
+        public double x;
+        public double y;
+        private T inst;
+
+        public QuadTreePayload(double x, double y, T value) {
+            this.x = x;
+            this.y = y;
+            this.inst = value;
+        }
+
+        public T getInst() {
+            return this.inst;
+        }
+    }
+
+    /**
+     * Fastmap implementation
+     * 
+     * Faloutsos, C., & Lin, K. I. (1995). FastMap: A fast algorithm for indexing, data-mining and
+     * visualization of traditional and multimedia datasets (Vol. 24, No. 2, pp. 163-174). ACM.
+     */
+    public class Fastmap {
+
+        /* N x k Array, at the end, the i-th row will be the image of the i-th object */
+        private double[][] X;
+
+        /* 2 x k pivot Array one pair per recursive call */
+        private int[][] PA;
+
+        /* Objects we got (distance matrix) */
+        private double[][] O;
+
+        /* column of X currently updated (also the dimension) */
+        private int col = 0;
+
+        /* number of dimensions we want */
+        private int target_dims = 0;
+
+        // if we already have the pivot elements
+        private boolean pivot_set = false;
+
+        public Fastmap(int k) {
+            this.target_dims = k;
+        }
+
+        /**
+         * Sets the distance matrix and params that depend on this
+         * 
+         * @param O
+         */
+        public void setDistmat(double[][] O) {
+            this.O = O;
+            int N = O.length;
+            this.X = new double[N][this.target_dims];
+            this.PA = new int[2][this.target_dims];
+        }
+
+        /**
+         * Set pivot elements, we need that to classify instances after the calculation is complete
+         * (because we then want to reuse only the pivot elements).
+         * 
+         * @param pi
+         */
+        public void setPivots(int[][] pi) {
+            this.pivot_set = true;
+            this.PA = pi;
+        }
+
+        /**
+         * Return the pivot elements that were chosen during the calculation
+         * 
+         * @return
+         */
+        public int[][] getPivots() {
+            return this.PA;
+        }
+
+        /**
+         * The distance function for euclidean distance
+         * 
+         * Acts according to equation 4 of the fastmap paper
+         * 
+         * @param x
+         *            x index of x image (if k==0 x object)
+         * @param y
+         *            y index of y image (if k==0 y object)
+         * @param kdimensionality
+         * @return distance
+         */
+        private double dist(int x, int y, int k) {
+
+            // basis is object distance, we get this from our distance matrix
+            double tmp = this.O[x][y] * this.O[x][y];
+
+            // decrease by projections
+            for (int i = 0; i < k; i++) {
+                double tmp2 = (this.X[x][i] - this.X[y][i]);
+                tmp -= tmp2 * tmp2;
+            }
+
+            return Math.abs(tmp);
+        }
+
+        /**
+         * Find the object farthest from the given index This method is a helper Method for
+         * findDistandObjects
+         * 
+         * @param index
+         *            of the object
+         * @return index of the farthest object from the given index
+         */
+        private int findFarthest(int index) {
+            double furthest = Double.MIN_VALUE;
+            int ret = 0;
+
+            for (int i = 0; i < O.length; i++) {
+                double dist = this.dist(i, index, this.col);
+                if (i != index && dist > furthest) {
+                    furthest = dist;
+                    ret = i;
+                }
+            }
+            return ret;
+        }
+
+        /**
+         * Finds the pivot objects
+         * 
+         * This method is basically algorithm 1 of the fastmap paper.
+         * 
+         * @return 2 indexes of the choosen pivot objects
+         */
+        private int[] findDistantObjects() {
+            // 1. choose object randomly
+            Random r = new Random();
+            int obj = r.nextInt(this.O.length);
+
+            // 2. find farthest object from randomly chosen object
+            int idx1 = this.findFarthest(obj);
+
+            // 3. find farthest object from previously farthest object
+            int idx2 = this.findFarthest(idx1);
+
+            return new int[]
+                { idx1, idx2 };
+        }
+
+        /**
+         * Calculates the new k-vector values (projections)
+         * 
+         * This is basically algorithm 2 of the fastmap paper. We just added the possibility to
+         * pre-set the pivot elements because we need to classify single instances after the
+         * computation is already done.
+         * 
+         * @param dims
+         *            dimensionality
+         */
+        public void calculate() {
+
+            for (int k = 0; k < this.target_dims; k++) {
+                // 2) choose pivot objects
+                if (!this.pivot_set) {
+                    int[] pivots = this.findDistantObjects();
+
+                    // 3) record ids of pivot objects
+                    this.PA[0][this.col] = pivots[0];
+                    this.PA[1][this.col] = pivots[1];
+                }
+
+                // 4) inter object distances are zero (this.X is initialized with 0 so we just
+                // continue)
+                if (this.dist(this.PA[0][this.col], this.PA[1][this.col], this.col) == 0) {
+                    continue;
+                }
+
+                // 5) project the objects on the line between the pivots
+                double dxy = this.dist(this.PA[0][this.col], this.PA[1][this.col], this.col);
+                for (int i = 0; i < this.O.length; i++) {
+
+                    double dix = this.dist(i, this.PA[0][this.col], this.col);
+                    double diy = this.dist(i, this.PA[1][this.col], this.col);
+
+                    double tmp = (dix + dxy - diy) / (2 * Math.sqrt(dxy));
+
+                    // save the projection
+                    this.X[i][this.col] = tmp;
+                }
+
+                this.col += 1;
+            }
+        }
+
+        /**
+         * returns the result matrix of the projections
+         * 
+         * @return calculated result
+         */
+        public double[][] getX() {
+            return this.X;
+        }
+    }
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/training/WekaTraining.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/training/WekaTraining.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/training/WekaTraining.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.training;
 
@@ -11,12 +25,12 @@
 /**
  * Programmatic WekaTraining
- *
- * first parameter is Trainer Name.
- * second parameter is class name
  * 
- * all subsequent parameters are configuration params (for example for trees)
- * Cross Validation params always come last and are prepended with -CVPARAM
+ * first parameter is Trainer Name. second parameter is class name
+ * 
+ * all subsequent parameters are configuration params (for example for trees) Cross Validation
+ * params always come last and are prepended with -CVPARAM
  * 
  * XML Configurations for Weka Classifiers:
+ * 
  * <pre>
  * {@code
@@ -30,18 +44,20 @@
 public class WekaTraining extends WekaBaseTraining implements ITrainingStrategy {
 
-	@Override
-	public void apply(Instances traindata) {
-		PrintStream errStr	= System.err;
-		System.setErr(new PrintStream(new NullOutputStream()));
-		try {
-			if(classifier == null) {
-				Console.traceln(Level.WARNING, String.format("classifier null!"));
-			}
-			classifier.buildClassifier(traindata);
-		} catch (Exception e) {
-			throw new RuntimeException(e);
-		} finally {
-			System.setErr(errStr);
-		}
-	}
+    @Override
+    public void apply(Instances traindata) {
+        PrintStream errStr = System.err;
+        System.setErr(new PrintStream(new NullOutputStream()));
+        try {
+            if (classifier == null) {
+                Console.traceln(Level.WARNING, String.format("classifier null!"));
+            }
+            classifier.buildClassifier(traindata);
+        }
+        catch (Exception e) {
+            throw new RuntimeException(e);
+        }
+        finally {
+            System.setErr(errStr);
+        }
+    }
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/versions/AbstractVersionFilter.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/versions/AbstractVersionFilter.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/versions/AbstractVersionFilter.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.versions;
 
@@ -6,23 +20,24 @@
 /**
  * Implements a skeletal {@link IVersionFilter}.
+ * 
  * @author Steffen Herbold
  */
 public abstract class AbstractVersionFilter implements IVersionFilter {
 
-	/**
-	 * @see de.ugoe.cs.cpdp.versions.IVersionFilter#apply(java.util.List)
-	 */
-	@Override
-	public int apply(List<SoftwareVersion> versions) {
-		int removed = 0;
-		for( final Iterator<SoftwareVersion> iter=versions.iterator() ; iter.hasNext() ; ) {
-			SoftwareVersion version = iter.next();
-			
-			if( apply(version) ) {
-				iter.remove();
-				removed++;
-			}
-		}
-		return removed;
-	}
+    /**
+     * @see de.ugoe.cs.cpdp.versions.IVersionFilter#apply(java.util.List)
+     */
+    @Override
+    public int apply(List<SoftwareVersion> versions) {
+        int removed = 0;
+        for (final Iterator<SoftwareVersion> iter = versions.iterator(); iter.hasNext();) {
+            SoftwareVersion version = iter.next();
+
+            if (apply(version)) {
+                iter.remove();
+                removed++;
+            }
+        }
+        return removed;
+    }
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/versions/IVersionFilter.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/versions/IVersionFilter.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/versions/IVersionFilter.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.versions;
 
@@ -6,21 +20,27 @@
 
 /**
- * Implements the interface for a {@link SoftwareVersion} filter. 
+ * Implements the interface for a {@link SoftwareVersion} filter.
+ * 
  * @author Steffen Herbold
  */
 public interface IVersionFilter extends IParameterizable {
 
-	/**
-	 * Applies the filter to a single version. 
-	 * @param version the version
-	 * @return true if filter applies to version, false otherwise
-	 */
-	boolean apply(SoftwareVersion version);
-	
-	/**
-	 * Applies the filter a a list of versions. Versions were the filter applies are automatically removed from the list. 
-	 * @param versions list of versions
-	 * @return number of removed versions
-	 */
-	int apply(List<SoftwareVersion> versions);
+    /**
+     * Applies the filter to a single version.
+     * 
+     * @param version
+     *            the version
+     * @return true if filter applies to version, false otherwise
+     */
+    boolean apply(SoftwareVersion version);
+
+    /**
+     * Applies the filter a a list of versions. Versions were the filter applies are automatically
+     * removed from the list.
+     * 
+     * @param versions
+     *            list of versions
+     * @return number of removed versions
+     */
+    int apply(List<SoftwareVersion> versions);
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/versions/MaxInstanceNumberFilter.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/versions/MaxInstanceNumberFilter.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/versions/MaxInstanceNumberFilter.java	(revision 41)
@@ -1,31 +1,49 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.versions;
 
 /**
- * Applies to large data sets. All data sets that have more than the required maximum number of instances are removed. 
+ * Applies to large data sets. All data sets that have more than the required maximum number of
+ * instances are removed.
+ * 
  * @author Steffen Herbold
  */
 public class MaxInstanceNumberFilter extends AbstractVersionFilter {
 
-	/**
-	 * maximum number of instances required
-	 */
-	private int maxInstances = 0;
-	
-	/**
-	 * @see de.ugoe.cs.cpdp.versions.IVersionFilter#apply(de.ugoe.cs.cpdp.versions.SoftwareVersion)
-	 */
-	@Override
-	public boolean apply(SoftwareVersion version) {
-		return version.getInstances().numInstances()>maxInstances;
-	}
+    /**
+     * maximum number of instances required
+     */
+    private int maxInstances = 0;
 
-	/**
-	 * Sets the minimal number of instances.
-	 * @param parameters number of instances
-	 */
-	@Override
-	public void setParameter(String parameters) {
-		maxInstances = Integer.parseInt(parameters);
-	}
+    /**
+     * @see de.ugoe.cs.cpdp.versions.IVersionFilter#apply(de.ugoe.cs.cpdp.versions.SoftwareVersion)
+     */
+    @Override
+    public boolean apply(SoftwareVersion version) {
+        return version.getInstances().numInstances() > maxInstances;
+    }
+
+    /**
+     * Sets the minimal number of instances.
+     * 
+     * @param parameters
+     *            number of instances
+     */
+    @Override
+    public void setParameter(String parameters) {
+        maxInstances = Integer.parseInt(parameters);
+    }
 
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/versions/MinClassNumberFilter.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/versions/MinClassNumberFilter.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/versions/MinClassNumberFilter.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.versions;
 
@@ -4,36 +18,40 @@
 
 /**
- * Applies to small data sets. All data sets that do not have the required minimal number of instances in each class (i.e., positive, negative) are removed. 
+ * Applies to small data sets. All data sets that do not have the required minimal number of
+ * instances in each class (i.e., positive, negative) are removed.
+ * 
  * @author Steffen Herbold
  */
 public class MinClassNumberFilter extends AbstractVersionFilter {
 
-	/**
-	 * minimal number of instances required
-	 */
-	private int minInstances = 0;
-	
-	/**
-	 * @see de.ugoe.cs.cpdp.versions.IVersionFilter#apply(de.ugoe.cs.cpdp.versions.SoftwareVersion)
-	 */
-	@Override
-	public boolean apply(SoftwareVersion version) {
-		Instances instances = version.getInstances();
-		int[] counts = instances.attributeStats(instances.classIndex()).nominalCounts;
-		boolean toSmall = false;
-		for( int count : counts ) {
-			toSmall |= count<minInstances;
-		}
-		return toSmall;
-	}
+    /**
+     * minimal number of instances required
+     */
+    private int minInstances = 0;
 
-	/**
-	 * Sets the minimal number of instances for each class.
-	 * @param parameters number of instances
-	 */
-	@Override
-	public void setParameter(String parameters) {
-		minInstances = Integer.parseInt(parameters);
-	}
+    /**
+     * @see de.ugoe.cs.cpdp.versions.IVersionFilter#apply(de.ugoe.cs.cpdp.versions.SoftwareVersion)
+     */
+    @Override
+    public boolean apply(SoftwareVersion version) {
+        Instances instances = version.getInstances();
+        int[] counts = instances.attributeStats(instances.classIndex()).nominalCounts;
+        boolean toSmall = false;
+        for (int count : counts) {
+            toSmall |= count < minInstances;
+        }
+        return toSmall;
+    }
+
+    /**
+     * Sets the minimal number of instances for each class.
+     * 
+     * @param parameters
+     *            number of instances
+     */
+    @Override
+    public void setParameter(String parameters) {
+        minInstances = Integer.parseInt(parameters);
+    }
 
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/versions/MinInstanceNumberFilter.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/versions/MinInstanceNumberFilter.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/versions/MinInstanceNumberFilter.java	(revision 41)
@@ -1,31 +1,49 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.versions;
 
 /**
- * Applies to small data sets. All data sets that do not have the required minimal number of instances are removed. 
+ * Applies to small data sets. All data sets that do not have the required minimal number of
+ * instances are removed.
+ * 
  * @author Steffen Herbold
  */
 public class MinInstanceNumberFilter extends AbstractVersionFilter {
 
-	/**
-	 * minimal number of instances required
-	 */
-	private int minInstances = 0;
-	
-	/**
-	 * @see de.ugoe.cs.cpdp.versions.IVersionFilter#apply(de.ugoe.cs.cpdp.versions.SoftwareVersion)
-	 */
-	@Override
-	public boolean apply(SoftwareVersion version) {
-		return version.getInstances().numInstances()<minInstances;
-	}
+    /**
+     * minimal number of instances required
+     */
+    private int minInstances = 0;
 
-	/**
-	 * Sets the minimal number of instances.
-	 * @param parameters number of instances
-	 */
-	@Override
-	public void setParameter(String parameters) {
-		minInstances = Integer.parseInt(parameters);
-	}
+    /**
+     * @see de.ugoe.cs.cpdp.versions.IVersionFilter#apply(de.ugoe.cs.cpdp.versions.SoftwareVersion)
+     */
+    @Override
+    public boolean apply(SoftwareVersion version) {
+        return version.getInstances().numInstances() < minInstances;
+    }
+
+    /**
+     * Sets the minimal number of instances.
+     * 
+     * @param parameters
+     *            number of instances
+     */
+    @Override
+    public void setParameter(String parameters) {
+        minInstances = Integer.parseInt(parameters);
+    }
 
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/versions/SoftwareVersion.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/versions/SoftwareVersion.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/versions/SoftwareVersion.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.versions;
 
@@ -4,75 +18,86 @@
 
 /**
- * Data class for software versions. 
+ * Data class for software versions.
+ * 
  * @author Steffen Herbold
  */
 public class SoftwareVersion implements Comparable<SoftwareVersion> {
 
-	/**
-	 * name of the project
-	 */
-	private final String project;
-	
-	/**
-	 * version of the project
-	 */
-	private final String version;
+    /**
+     * name of the project
+     */
+    private final String project;
 
-	/**
-	 * data of the version
-	 */
-	private final Instances instances;
-	
-	/**
-	 * Constructor. Creates a new version. 
-	 * @param project name of the project
-	 * @param version name of the version
-	 * @param instances data of the version
-	 */
-	public SoftwareVersion(String project, String version, Instances instances) {
-		this.project = project;
-		this.version = version;
-		this.instances = instances;
-	}
-	
-	/**
-	 * returns the project name
-	 * @return project name
-	 */
-	public String getProject() {
-		return project;
-	}
-	
-	/**
-	 * returns the name of the version
-	 * @return name of the version
-	 */
-	public String getVersion() {
-		return version;
-	}
-	
-	/**
-	 * returns the data of the version
-	 * @return data
-	 */
-	public Instances getInstances() {
-		return new Instances(instances);
-	}
+    /**
+     * version of the project
+     */
+    private final String version;
 
-	/** 
-	 * Compares first based on project name and then based on version. Only string comparisons are performed. 
-	 * @see java.lang.Comparable#compareTo(java.lang.Object)
-	 */
-	@Override
-	public int compareTo(SoftwareVersion o) {
-		int projectStrCmp = 0;
-		if( project!=null ) {
-			projectStrCmp = project.compareTo(o.project);
-		} 
-		if( projectStrCmp==0 && version!=null ) {
-			return version.compareTo(o.version);			
-		} else {
-			return projectStrCmp;
-		}
-	}
+    /**
+     * data of the version
+     */
+    private final Instances instances;
+
+    /**
+     * Constructor. Creates a new version.
+     * 
+     * @param project
+     *            name of the project
+     * @param version
+     *            name of the version
+     * @param instances
+     *            data of the version
+     */
+    public SoftwareVersion(String project, String version, Instances instances) {
+        this.project = project;
+        this.version = version;
+        this.instances = instances;
+    }
+
+    /**
+     * returns the project name
+     * 
+     * @return project name
+     */
+    public String getProject() {
+        return project;
+    }
+
+    /**
+     * returns the name of the version
+     * 
+     * @return name of the version
+     */
+    public String getVersion() {
+        return version;
+    }
+
+    /**
+     * returns the data of the version
+     * 
+     * @return data
+     */
+    public Instances getInstances() {
+        return new Instances(instances);
+    }
+
+    /**
+     * Compares first based on project name and then based on version. Only string comparisons are
+     * performed.
+     * 
+     * @see java.lang.Comparable#compareTo(java.lang.Object)
+     */
+    @Override
+    public int compareTo(SoftwareVersion o) {
+        int projectStrCmp = 0;
+        if (project != null) {
+            projectStrCmp = project.compareTo(o.project);
+        }
+        if (projectStrCmp == 0 && version != null) {
+            return version.compareTo(o.version);
+        }
+        else {
+            return projectStrCmp;
+        }
+    }
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/versions/UnbalancedFilter.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/versions/UnbalancedFilter.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/versions/UnbalancedFilter.java	(revision 41)
@@ -1,2 +1,16 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.versions;
 
@@ -4,35 +18,38 @@
 
 /**
- * Removes unbalanced data sets in terms of classification. All data sets that are outside of the quantil defined
- * by setParameter (default=0.1) are removed. 
+ * Removes unbalanced data sets in terms of classification. All data sets that are outside of the
+ * quantil defined by setParameter (default=0.1) are removed.
+ * 
  * @author Steffen Herbold
  */
 public class UnbalancedFilter extends AbstractVersionFilter {
 
-	/**
-	 * quantil where outside lying versions are removed
-	 */
-	private double quantil = 0.1;
-	
-	/**
-	 * Sets the quantil.
-	 * @param parameters the quantil as string
-	 */
-	@Override
-	public void setParameter(String parameters) {
-		quantil = Double.parseDouble(parameters);
-	}
+    /**
+     * quantil where outside lying versions are removed
+     */
+    private double quantil = 0.1;
 
-	/**
-	 * @see de.ugoe.cs.cpdp.versions.IVersionFilter#apply(de.ugoe.cs.cpdp.versions.SoftwareVersion)
-	 */
-	@Override
-	public boolean apply(SoftwareVersion version) {
-		final Instances instances = version.getInstances();
-		
-		final int[] counts = instances.attributeStats(instances.classIndex()).nominalCounts;
-		return ((double) counts[0])/instances.numInstances() >= (1-quantil) ||
-			((double) counts[0])/instances.numInstances() <= (quantil);
-	}
+    /**
+     * Sets the quantil.
+     * 
+     * @param parameters
+     *            the quantil as string
+     */
+    @Override
+    public void setParameter(String parameters) {
+        quantil = Double.parseDouble(parameters);
+    }
+
+    /**
+     * @see de.ugoe.cs.cpdp.versions.IVersionFilter#apply(de.ugoe.cs.cpdp.versions.SoftwareVersion)
+     */
+    @Override
+    public boolean apply(SoftwareVersion version) {
+        final Instances instances = version.getInstances();
+
+        final int[] counts = instances.attributeStats(instances.classIndex()).nominalCounts;
+        return ((double) counts[0]) / instances.numInstances() >= (1 - quantil) ||
+            ((double) counts[0]) / instances.numInstances() <= (quantil);
+    }
 
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/wekaclassifier/FixClass.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/wekaclassifier/FixClass.java	(revision 40)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/wekaclassifier/FixClass.java	(revision 41)
@@ -1,4 +1,17 @@
+// Copyright 2015 Georg-August-Universität Göttingen, Germany
+//
+//   Licensed under the Apache License, Version 2.0 (the "License");
+//   you may not use this file except in compliance with the License.
+//   You may obtain a copy of the License at
+//
+//       http://www.apache.org/licenses/LICENSE-2.0
+//
+//   Unless required by applicable law or agreed to in writing, software
+//   distributed under the License is distributed on an "AS IS" BASIS,
+//   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+//   See the License for the specific language governing permissions and
+//   limitations under the License.
+
 package de.ugoe.cs.cpdp.wekaclassifier;
-
 
 import weka.classifiers.AbstractClassifier;
@@ -16,54 +29,54 @@
 public class FixClass extends AbstractClassifier {
 
-	private static final long serialVersionUID = 1L;
+    private static final long serialVersionUID = 1L;
 
-	private double fixedClassValue = 0.0d;
+    private double fixedClassValue = 0.0d;
 
-	public FixClass() {
-		// TODO Auto-generated constructor stub
-	}
+    public FixClass() {
+        // TODO Auto-generated constructor stub
+    }
 
-	/**
-	 * Returns default capabilities of the classifier.
-	 * 
-	 * @return the capabilities of this classifier
-	 */
-	@Override
-	public Capabilities getCapabilities() {
-		Capabilities result = super.getCapabilities();
-		result.disableAll();
+    /**
+     * Returns default capabilities of the classifier.
+     * 
+     * @return the capabilities of this classifier
+     */
+    @Override
+    public Capabilities getCapabilities() {
+        Capabilities result = super.getCapabilities();
+        result.disableAll();
 
-		// attributes
-		result.enable(Capability.NOMINAL_ATTRIBUTES);
-		result.enable(Capability.NUMERIC_ATTRIBUTES);
-		result.enable(Capability.DATE_ATTRIBUTES);
-		result.enable(Capability.STRING_ATTRIBUTES);
-		result.enable(Capability.RELATIONAL_ATTRIBUTES);
-		result.enable(Capability.MISSING_VALUES);
+        // attributes
+        result.enable(Capability.NOMINAL_ATTRIBUTES);
+        result.enable(Capability.NUMERIC_ATTRIBUTES);
+        result.enable(Capability.DATE_ATTRIBUTES);
+        result.enable(Capability.STRING_ATTRIBUTES);
+        result.enable(Capability.RELATIONAL_ATTRIBUTES);
+        result.enable(Capability.MISSING_VALUES);
 
-		// class
-		result.enable(Capability.NOMINAL_CLASS);
-		result.enable(Capability.NUMERIC_CLASS);
-		result.enable(Capability.MISSING_CLASS_VALUES);
+        // class
+        result.enable(Capability.NOMINAL_CLASS);
+        result.enable(Capability.NUMERIC_CLASS);
+        result.enable(Capability.MISSING_CLASS_VALUES);
 
-		// instances
-		result.setMinimumNumberInstances(0);
+        // instances
+        result.setMinimumNumberInstances(0);
 
-		return result;
-	}
+        return result;
+    }
 
-	@Override
-	public void setOptions(String[] options) throws Exception {
-		fixedClassValue = Double.parseDouble(Utils.getOption('C', options));
-	}
+    @Override
+    public void setOptions(String[] options) throws Exception {
+        fixedClassValue = Double.parseDouble(Utils.getOption('C', options));
+    }
 
-	@Override
-	public double classifyInstance(Instance instance) {
-		return fixedClassValue;
-	}
+    @Override
+    public double classifyInstance(Instance instance) {
+        return fixedClassValue;
+    }
 
-	@Override
-	public void buildClassifier(Instances traindata) throws Exception {
-		// do nothing
-	}
+    @Override
+    public void buildClassifier(Instances traindata) throws Exception {
+        // do nothing
+    }
 }
