Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/Experiment.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/Experiment.java	(revision 31)
+++ 	(revision )
@@ -1,204 +1,0 @@
-package de.ugoe.cs.cpdp;
-
-import java.io.File;
-import java.util.Collections;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.logging.Level;
-
-import org.apache.commons.collections4.list.SetUniqueList;
-
-import weka.core.Instances;
-import de.ugoe.cs.cpdp.dataprocessing.IProcessesingStrategy;
-import de.ugoe.cs.cpdp.dataprocessing.ISetWiseProcessingStrategy;
-import de.ugoe.cs.cpdp.dataselection.IPointWiseDataselectionStrategy;
-import de.ugoe.cs.cpdp.dataselection.ISetWiseDataselectionStrategy;
-import de.ugoe.cs.cpdp.eval.IEvaluationStrategy;
-import de.ugoe.cs.cpdp.loader.IVersionLoader;
-import de.ugoe.cs.cpdp.training.ISetWiseTrainingStrategy;
-import de.ugoe.cs.cpdp.training.ITrainer;
-import de.ugoe.cs.cpdp.training.ITrainingStrategy;
-import de.ugoe.cs.cpdp.versions.IVersionFilter;
-import de.ugoe.cs.cpdp.versions.SoftwareVersion;
-import de.ugoe.cs.util.console.Console;
-
-/**
- * Class responsible for executing an experiment according to an {@link ExperimentConfiguration}. The steps of an experiment are as follows:
- * <ul>
- *  <li>load the data from the provided data path</li>
- *  <li>filter the data sets according to the provided version filters</li>
- *  <li>execute the following steps for each data sets as test data that is not ignored through the test version filter:
- *  <ul>
- *   <li>filter the data sets to setup the candidate training data:
- *   <ul>
- *    <li>remove all data sets from the same project</li>
- *    <li>filter all data sets according to the training data filter
- *   </ul></li>
- *   <li>apply the setwise preprocessors</li>
- *   <li>apply the setwise data selection algorithms</li>
- *   <li>apply the setwise postprocessors</li>
- *   <li>train the setwise training classifiers</li>
- *   <li>unify all remaining training data into one data set</li>
- *   <li>apply the preprocessors</li>
- *   <li>apply the pointwise data selection algorithms</li>
- *   <li>apply the postprocessors</li>
- *   <li>train the normal classifiers</li>
- *   <li>evaluate the results for all trained classifiers on the training data</li>
- *  </ul></li>
- * </ul>
- * 
- * Note that this class implements {@link Runnable}, i.e., each experiment can be started in its own thread.
- * @author Steffen Herbold
- */
-public class Experiment implements Runnable {
-
-	/**
-	 * configuration of the experiment
-	 */
-	private final ExperimentConfiguration config;
-	
-	/**
-	 * Constructor. Creates a new experiment based on a configuration.
-	 * @param config configuration of the experiment
-	 */
-	public Experiment(ExperimentConfiguration config) {
-		this.config = config;
-	}
-	
-	/**
-	 * Executes the experiment with the steps as described in the class comment.
-	 * @see Runnable#run() 
-	 */
-	@Override
-	public void run() {
-		final List<SoftwareVersion> versions = new LinkedList<>();
-		
-		for(IVersionLoader loader : config.getLoaders()) {
-			versions.addAll(loader.load());
-		}
-		
-		for( IVersionFilter filter : config.getVersionFilters() ) {
-			filter.apply(versions);
-		}
-		boolean writeHeader = true;
-		int versionCount = 1;
-		int testVersionCount = 0;
-		
-		for( SoftwareVersion testVersion : versions ) {
-			if( isVersion(testVersion, config.getTestVersionFilters()) ) {
-				testVersionCount++;
-			}
-		}
-		
-		// sort versions
-		Collections.sort(versions);
-		
-		for( SoftwareVersion testVersion : versions ) {
-			if( isVersion(testVersion, config.getTestVersionFilters()) ) {
-				Console.traceln(Level.INFO, String.format("[%s] [%02d/%02d] %s: starting", config.getExperimentName(), versionCount, testVersionCount, testVersion.getVersion()));
-				
-				// Setup testdata and training data
-				Instances testdata = testVersion.getInstances();
-				String testProject = testVersion.getProject();
-				SetUniqueList<Instances> traindataSet = SetUniqueList.setUniqueList(new LinkedList<Instances>());
-				for( SoftwareVersion trainingVersion : versions ) {
-					if( isVersion(trainingVersion, config.getTrainingVersionFilters()) ) {
-						if( trainingVersion!=testVersion ) {
-							if( !trainingVersion.getProject().equals(testProject) ) {
-								traindataSet.add(trainingVersion.getInstances());
-							}
-						}
-					}
-				}
-				
-				for( ISetWiseProcessingStrategy processor : config.getSetWisePreprocessors() ) {
-					Console.traceln(Level.FINE, String.format("[%s] [%02d/%02d] %s: applying setwise preprocessor %s", config.getExperimentName(), versionCount, testVersionCount, testVersion.getVersion(), processor.getClass().getName()));
-					processor.apply(testdata, traindataSet);
-				}
-				for( ISetWiseDataselectionStrategy dataselector : config.getSetWiseSelectors() ) {
-					Console.traceln(Level.FINE, String.format("[%s] [%02d/%02d] %s: applying setwise selection %s", config.getExperimentName(), versionCount, testVersionCount, testVersion.getVersion(), dataselector.getClass().getName()));
-					dataselector.apply(testdata, traindataSet);
-				}
-				for( ISetWiseProcessingStrategy processor : config.getSetWisePostprocessors() ) {
-					Console.traceln(Level.FINE, String.format("[%s] [%02d/%02d] %s: applying setwise postprocessor %s", config.getExperimentName(), versionCount, testVersionCount, testVersion.getVersion(), processor.getClass().getName()));
-					processor.apply(testdata, traindataSet);
-				}
-				for( ISetWiseTrainingStrategy setwiseTrainer : config.getSetWiseTrainers() ) {
-					Console.traceln(Level.FINE, String.format("[%s] [%02d/%02d] %s: applying setwise trainer %s", config.getExperimentName(), versionCount, testVersionCount, testVersion.getVersion(), setwiseTrainer.getName()));
-					setwiseTrainer.apply(traindataSet);
-				}
-				Instances traindata = makeSingleTrainingSet(traindataSet);
-				for( IProcessesingStrategy processor : config.getPreProcessors() ) {
-					Console.traceln(Level.FINE, String.format("[%s] [%02d/%02d] %s: applying preprocessor %s", config.getExperimentName(), versionCount, testVersionCount, testVersion.getVersion(), processor.getClass().getName()));
-					processor.apply(testdata, traindata);
-				}
-				for( IPointWiseDataselectionStrategy dataselector : config.getPointWiseSelectors() ) {
-					Console.traceln(Level.FINE, String.format("[%s] [%02d/%02d] %s: applying pointwise selection %s", config.getExperimentName(), versionCount, testVersionCount, testVersion.getVersion(), dataselector.getClass().getName()));
-					traindata = dataselector.apply(testdata, traindata);
-				}
-				for( IProcessesingStrategy processor : config.getPostProcessors() ) {
-					Console.traceln(Level.FINE, String.format("[%s] [%02d/%02d] %s: applying setwise postprocessor %s", config.getExperimentName(), versionCount, testVersionCount, testVersion.getVersion(), processor.getClass().getName()));
-					processor.apply(testdata, traindata);
-				}
-				for( ITrainingStrategy trainer : config.getTrainers() ) {
-					Console.traceln(Level.FINE, String.format("[%s] [%02d/%02d] %s: applying trainer %s", config.getExperimentName(), versionCount, testVersionCount, testVersion.getVersion(), trainer.getName()));
-					trainer.apply(traindata);
-				}
-				File resultsDir = new File(config.getResultsPath());
-				if (!resultsDir.exists()) {
-					resultsDir.mkdir();
-				}
-				for( IEvaluationStrategy evaluator : config.getEvaluators() ) {
-					Console.traceln(Level.FINE, String.format("[%s] [%02d/%02d] %s: applying evaluator %s", config.getExperimentName(), versionCount, testVersionCount, testVersion.getVersion(), evaluator.getClass().getName()));
-					List<ITrainer> allTrainers = new LinkedList<>();
-					for( ISetWiseTrainingStrategy setwiseTrainer : config.getSetWiseTrainers() ) {
-						allTrainers.add(setwiseTrainer);
-					}
-					for( ITrainingStrategy trainer : config.getTrainers() ) {
-						allTrainers.add(trainer);
-					}
-					if( writeHeader ) {
-						evaluator.setParameter(config.getResultsPath() + "/" + config.getExperimentName() + ".csv");
-					}
-					evaluator.apply(testdata, traindata, allTrainers, writeHeader);
-					writeHeader = false;
-				}
-				Console.traceln(Level.INFO, String.format("[%s] [%02d/%02d] %s: finished", config.getExperimentName(), versionCount, testVersionCount, testVersion.getVersion()));
-				versionCount++;
-			}
-		}
-	}
-	
-	/**
-	 * Helper method that checks if a version passes all filters.
-	 * @param version version that is checked
-	 * @param filters list of the filters
-	 * @return true, if the version passes all filters, false otherwise
-	 */
-	private boolean isVersion(SoftwareVersion version, List<IVersionFilter> filters) {
-		boolean result = true;
-		for( IVersionFilter filter : filters) {
-			result &= !filter.apply(version);
-		}
-		return result;
-	}
-
-	/**
-	 * Helper method that combines a set of Weka {@link Instances} sets into a single {@link Instances} set.
-	 * @param traindataSet set of {@link Instances} to be combines
-	 * @return single {@link Instances} set
-	 */
-	public static Instances makeSingleTrainingSet(SetUniqueList<Instances> traindataSet) {
-		Instances traindataFull = null;
-		for( Instances traindata : traindataSet) {
-			if( traindataFull==null ) {
-				traindataFull = new Instances(traindata);
-			} else {
-				for( int i=0 ; i<traindata.numInstances() ; i++ ) {
-					traindataFull.add(traindata.instance(i));
-				}
-			}
-		}
-		return traindataFull;
-	}
-}
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/ExperimentConfiguration.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/ExperimentConfiguration.java	(revision 31)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/ExperimentConfiguration.java	(revision 32)
@@ -119,4 +119,16 @@
 	 */
 	private List<IEvaluationStrategy> evaluators;
+	
+	/**
+	 * indicates, if the classifier should be saved
+	 */
+	private Boolean saveClassifier = null;
+	
+	/**
+	 * indicates, which execution strategy to choose
+	 * (e.g. CrossProjectExperiment, ClassifierCreationExecution).
+	 * Default is CrossProjectExperiment.
+	 */
+	private String executionStrategy = "CrossProjectExperiment";
 	
 	/**
@@ -315,4 +327,20 @@
 	}
 	
+	/**
+	 * returns boolean, if classifier should be saved
+	 * @return boolean
+	 */
+	public boolean getSaveClassifier() {
+		return saveClassifier;
+	}
+	
+	/**
+	 * returns the execution strategy
+	 * @return String execution strategy
+	 */
+	public String getExecutionStrategy() {
+		return executionStrategy;
+	}
+	
 	/* (non-Javadoc)
 	 * @see org.xml.sax.helpers.DefaultHandler#startElement(java.lang.String, java.lang.String, java.lang.String, org.xml.sax.Attributes)
@@ -393,4 +421,10 @@
 				final IEvaluationStrategy evaluator = (IEvaluationStrategy) Class.forName("de.ugoe.cs.cpdp.eval." + attributes.getValue("name")).newInstance();
 				evaluators.add(evaluator);
+			}
+			else if( qName.equals("saveClassifier")) {
+				saveClassifier = true;
+			}
+			else if( qName.equals("executionStrategy")) {
+				executionStrategy = attributes.getValue("name");
 			}
 			else if( qName.equals("partialconfig") ) {
@@ -423,6 +457,7 @@
 	 * If the current data path is the empty string (&quot;&quot;), it is override by the datapath of the other configuration. Otherwise, the current data path is kept.
 	 * @param other experiment whose information is added
-	 */
-	private void addConfigurations(ExperimentConfiguration other) {
+	 * @throws ExperimentConfigurationException 
+	 */
+	private void addConfigurations(ExperimentConfiguration other) throws ExperimentConfigurationException {
 		if( "results".equals(resultsPath) ) {
 			resultsPath = other.resultsPath;
@@ -441,4 +476,16 @@
 		trainers.addAll(other.trainers);
 		evaluators.addAll(other.evaluators);
+		
+		if(!executionStrategy.equals(other.executionStrategy)) {
+			throw new ExperimentConfigurationException("Executionstrategies must be the same, if config files should be added.");
+		}
+		
+		/* Only if saveClassifier is not set in the main config and
+		 * the other configs saveClassifier is true, it must be set.
+		 */
+		if(saveClassifier == null && other.saveClassifier == true) {
+			saveClassifier = other.saveClassifier;
+		}
+
 	}
 	
@@ -464,5 +511,7 @@
 		builder.append("Pointwise trainers: " + trainers.toString() + StringTools.ENDLINE);
 		builder.append("Evaluators: " + evaluators.toString() + StringTools.ENDLINE);
-		
+		builder.append("Save Classifier?: " + saveClassifier + StringTools.ENDLINE);
+		builder.append("Execution Strategy: " + executionStrategy + StringTools.ENDLINE);
+				
 		return builder.toString();
 	}
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/Runner.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/Runner.java	(revision 31)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/Runner.java	(revision 32)
@@ -2,4 +2,6 @@
 
 import java.io.File;
+import java.lang.reflect.Constructor;
+import java.lang.reflect.InvocationTargetException;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
@@ -7,4 +9,5 @@
 import java.util.logging.Level;
 
+import de.ugoe.cs.cpdp.execution.IExecutionStrategy;
 import de.ugoe.cs.util.console.Console;
 import de.ugoe.cs.util.console.TextConsole;
@@ -23,5 +26,4 @@
 	public static void main(String[] args) {
 		new TextConsole(Level.FINE);
-		
 		final int concurrentThreads = Runtime.getRuntime().availableProcessors();
 		final ExecutorService threadPool = Executors.newFixedThreadPool(concurrentThreads);
@@ -47,4 +49,9 @@
 	}
 	
+	/**
+	 * Creates the config and starts the corresponding experiment
+	 * @param threadPool 
+	 * @param configFile location of the config file
+	 */
 	public static void createConfig(ExecutorService threadPool, String configFile) {
 		ExperimentConfiguration config = null;
@@ -55,9 +62,42 @@
 			e.printStackTrace();
 		}
+
 		if( config!=null ) {
 			Console.trace(Level.FINE, config.toString());
-			Experiment experiment = new Experiment(config);
-			threadPool.execute(experiment);
+			// Instantiate the class like it was given as parameter in the config file and cast it to the interface
+			try {
+				// Because we need to pass a parameter, a normal new Instance call is not possible
+				Class<?> executionStrategyClass = Class.forName("de.ugoe.cs.cpdp.execution."+config.getExecutionStrategy());
+				Constructor<?> executionStrategyConstructor = 
+						executionStrategyClass.getConstructor(ExperimentConfiguration.class);
+			
+				IExecutionStrategy experiment = (IExecutionStrategy) executionStrategyConstructor.newInstance(config);
+				threadPool.execute(experiment);
+			} catch (NoSuchMethodException e) {
+				Console.printerrln("Class \"" + config.getExecutionStrategy()+ "\" does not have the right Constructor");
+				e.printStackTrace();
+			} catch (SecurityException e) {
+				Console.printerrln("Security manager prevents reflection");
+				e.printStackTrace();
+			} catch (IllegalArgumentException e) {
+				Console.printerrln("Class \"" + config.getExecutionStrategy()+ "\" does not have a Constructor, which"
+						+ "matches the given arguments");
+				e.printStackTrace();
+			} catch (InvocationTargetException e) {
+				Console.printerrln("Constructor in Class \"" + config.getExecutionStrategy()+ "\" is not public");
+				e.printStackTrace();
+			} catch (InstantiationException e) {
+				Console.printerrln("Cannot instantiate Class \"" + config.getExecutionStrategy()+"\"");
+				e.printStackTrace();
+			} catch (IllegalAccessException e) {
+				Console.printerrln("Cannot access Class \"" + config.getExecutionStrategy()+"\"");
+				e.printStackTrace();
+			} catch (ClassNotFoundException e) {
+				Console.printerrln("Class \"" + config.getExecutionStrategy()+ "\" was not found");
+				e.printStackTrace();
+			}
+			
 		}
+		
 	}
 }
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/AttributeNonRemoval.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/AttributeNonRemoval.java	(revision 32)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/AttributeNonRemoval.java	(revision 32)
@@ -0,0 +1,65 @@
+package de.ugoe.cs.cpdp.dataprocessing;
+
+import java.util.ArrayList;
+
+import org.apache.commons.collections4.list.SetUniqueList;
+
+import weka.core.Instances;
+
+/**
+ * Removes attributes from all data sets, except the one defined, using their name. 
+ * @author Fabian Trautsch
+ */
+public class AttributeNonRemoval implements ISetWiseProcessingStrategy, IProcessesingStrategy {
+
+	/**
+	 * names of the attributes to be kept (determined by {@link #setParameter(String)}) 
+	 */
+	private ArrayList<String> attributeNames = new ArrayList<String>();
+	
+	/**
+	 * Sets that attributes that will be kept. The string contains the blank-separated names of the attributes to be kept.
+	 * <br><br>
+	 * Note, that keeping of attributes with blanks is currently not supported!
+	 * @param parameters string with the blank-separated attribute names
+	 */
+	@Override
+	public void setParameter(String parameters) {
+		if( parameters!=null ) {
+			String[] attributeNamesArray = parameters.split(" ");
+			for(String attributeName : attributeNamesArray) {
+				attributeNames.add(attributeName);
+			}
+		}
+	}
+
+	/**
+	 * @see de.ugoe.cs.cpdp.dataprocessing.SetWiseProcessingStrategy#apply(weka.core.Instances, org.apache.commons.collections4.list.SetUniqueList)
+	 */
+	@Override
+	public void apply(Instances testdata, SetUniqueList<Instances> traindataSet) {
+		for( String attributeName : attributeNames ) {
+			for( int i=0 ; i<testdata.numAttributes() ; i++ ) {
+				if(!attributeName.equals(testdata.attribute(i).name()) ) {
+					testdata.deleteAttributeAt(i);
+					for( Instances traindata : traindataSet ) {
+						traindata.deleteAttributeAt(i);
+					}
+				}
+			}
+		}
+	}
+
+	/**
+	 * @see de.ugoe.cs.cpdp.dataprocessing.ProcessesingStrategy#apply(weka.core.Instances, weka.core.Instances)
+	 */
+	@Override
+	public void apply(Instances testdata, Instances traindata) {
+		for(int i=testdata.numAttributes()-1; i>=0; i--) {
+			if(!attributeNames.contains(testdata.attribute(i).name())) {
+				testdata.deleteAttributeAt(i);
+				traindata.deleteAttributeAt(i);
+			}
+		}
+	}
+}
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/NominalAttributeFilter.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/NominalAttributeFilter.java	(revision 32)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/NominalAttributeFilter.java	(revision 32)
@@ -0,0 +1,88 @@
+package de.ugoe.cs.cpdp.dataprocessing;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+
+import weka.core.Attribute;
+import weka.core.Instance;
+import weka.core.Instances;
+
+/**
+ * Filters the given dataset for an nominal attribute.
+ * Every instance that has a value of the defined values of the given nominal attribute is removed.
+ * 
+ * 	 
+ * (e.g. param="CONFIDECNE low middle"; all instances where the "CONFIDENCE" attribute
+ * value is "low" or "middle" are removed from the dataset)
+ */
+
+public class NominalAttributeFilter implements IProcessesingStrategy{
+
+	private String nominalAttributeName = "";
+	private String[] nominalAttributeValues = new String[]{};
+	
+	/**
+	 * Sets the nominal attribute name (first parameter) and the nominal attribute values (other 
+	 * parameters), which should be removed from the dataset.
+	 * 
+	 * @param parameters string with the blank-separated parameters (first parameter 
+	 * is the name of the nominal attribute, everything else are the values)
+	 */
+	@Override
+	public void setParameter(String parameters) {
+		if( parameters!=null ) {
+			String[] parameter  = parameters.split(" ");
+			nominalAttributeName = parameter[0];
+			nominalAttributeValues = Arrays.copyOfRange(parameter, 1, parameter.length);
+		}
+	}
+	
+	/* (non-Javadoc)
+	 * @see de.ugoe.cs.cpdp.dataprocessing.IProcessesingStrategy#apply(weka.core.Instances, weka.core.Instances)
+	 */
+	@Override
+	public void apply(Instances testdata, Instances traindata) {
+		int indexOfConfidenceAttribute = -1;
+		
+		// Find index of the named confidence attribute to filter for
+		for(int i=0; i<traindata.numAttributes(); i++) {
+			if(traindata.attribute(i).name().equals(nominalAttributeName)) {
+				indexOfConfidenceAttribute = i;
+			}
+		}
+		
+		// if it was not found return
+		if(indexOfConfidenceAttribute == -1) {
+			return;
+		}
+		
+		// Find index of nominal values
+		Attribute confidenceAttribute = traindata.attribute(indexOfConfidenceAttribute);
+		ArrayList<Object> nominalValuesOfConfidenceAttribute = Collections.list(confidenceAttribute.enumerateValues());
+		ArrayList<Double> indexOfnominalAttributeValues = new ArrayList<Double>();
+		
+		
+		for(int k=0; k<nominalValuesOfConfidenceAttribute.size(); k++) {
+			for(String attributeValue : nominalAttributeValues) {
+				if(((String)nominalValuesOfConfidenceAttribute.get(k)).equals(attributeValue)) {
+					indexOfnominalAttributeValues.add((double) k);
+				}
+			}
+		}
+
+		
+		
+		
+		// Go through all instances and check if nominal attribute equals 
+		for(int j=traindata.numInstances()-1; j>=0; j--) {
+			Instance wekaInstance = traindata.get(j);
+			
+			// delete all instances where nominal attribute has the value of one of the parameter
+			if(indexOfnominalAttributeValues.contains(wekaInstance.value(indexOfConfidenceAttribute))) {
+				traindata.delete(j);
+			}
+		}
+	}
+
+}
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/SimulationFilter.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/SimulationFilter.java	(revision 32)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/dataprocessing/SimulationFilter.java	(revision 32)
@@ -0,0 +1,121 @@
+package de.ugoe.cs.cpdp.dataprocessing;
+
+import java.util.ArrayList;
+
+import java.util.HashMap;
+
+import weka.core.Attribute;
+import weka.core.Instance;
+import weka.core.Instances;
+
+/**
+ * Filter for the Repast Simulation of Software Projects.
+ * 
+ * Filters the training dataset in the following way: If 0 is no bug
+ * and 1 means there is a bug in this artifact, then this filter
+ * filters the dataset in this way:
+ * 
+ * 10010111000101110101111011101
+ * x--x-x-----x-x---x-x----x---x
+ * 
+ * The instances, which are marked with x in this graphic are included
+ * in the newly created dataset and form the trainingsdataset.
+ * 
+ * @author Fabian Trautsch
+ *
+ */
+
+public class SimulationFilter implements IProcessesingStrategy{
+
+	/**
+	 * Does not have parameters. String is ignored.
+	 * @param parameters ignored
+	 */
+	@Override
+	public void setParameter(String parameters) {
+		// dummy
+		
+	}
+
+	
+	/*
+	 * (non-Javadoc)
+	 * @see de.ugoe.cs.cpdp.dataprocessing.IProcessesingStrategy#apply(weka.core.Instances, weka.core.Instances)
+	 */
+	@Override
+	public void apply(Instances testdata, Instances traindata) {
+		Instances newDataSet = new Instances(traindata);
+		traindata.delete();
+		
+		HashMap<Double, Instance> artifactNames = new HashMap<Double, Instance>();
+		
+		// This is to add all data, where the first occurence of the file has a bug
+		ArrayList<Double> firstOccurenceArtifactNames = new ArrayList<Double>();
+		
+		// Sort dataset (StateID is connected to the date of commit: Lower StateID
+		// means earlier commit than a higher stateID)
+		Attribute wekaAttribute = newDataSet.attribute("Artifact.Target.StateID");
+		newDataSet.sort(wekaAttribute);
+		
+		
+		/*
+		 * Logical summary:
+		 * If there is an instance that dont have a bug, put it into the hashmap (only unique values in there)
+		 * 
+		 * If there is an instance, that hava a bug look up if it is in the hashmap already (this means:
+		 * it does not had a bug before!): If this is true add it to a new dataset and remove it from
+		 * the hashmap, so that new changes from "nonBug" -> "bug" for this file can be found.
+		 * 
+		 * If the instance has a bug and is not in the hashmap (this means: The file has a bug with its
+		 * first occurence or this file only has bugs and not an instance with no bug), then (if it is
+		 * not in the arrayList above) add it to the new dataset. This way it is possible to get
+		 * the first occurence of a file, which has a bug
+		 * 
+		 */
+		for(int i=0; i<newDataSet.numInstances(); i++) {
+			Instance wekaInstance = newDataSet.instance(i);
+
+			double newBugLabel = wekaInstance.classValue();
+			Attribute wekaArtifactName = newDataSet.attribute("Artifact.Name");
+			Double artifactName = wekaInstance.value(wekaArtifactName);
+			
+			if(newBugLabel == 0.0 && artifactNames.keySet().contains(artifactName)) {
+				artifactNames.put(artifactName, wekaInstance);
+			} else if(newBugLabel == 0.0 && !artifactNames.keySet().contains(artifactName)) {
+				artifactNames.put(artifactName, wekaInstance);
+			} else if(newBugLabel == 1.0 && artifactNames.keySet().contains(artifactName)) {
+				traindata.add(wekaInstance);
+				artifactNames.remove(artifactName);
+			} else if(newBugLabel == 1.0 && !artifactNames.keySet().contains(artifactName)) {
+				if(!firstOccurenceArtifactNames.contains(artifactName)) {
+					traindata.add(wekaInstance);
+					firstOccurenceArtifactNames.add(artifactName);
+				}
+			}
+		}
+		
+		
+		// If we have a file, that never had a bug (this is, when it is NOT in the
+		// new created dataset, but it is in the HashMap from above) add it to
+		// the new dataset
+		
+		double[] artifactNamesinNewDataSet = traindata.attributeToDoubleArray(0);
+		HashMap<Double, Instance> artifactNamesCopy = new HashMap<Double, Instance>(artifactNames);
+		
+		
+		for(Double artifactName : artifactNames.keySet()) {
+	
+			for(int i=0; i<artifactNamesinNewDataSet.length; i++) {
+				if(artifactNamesinNewDataSet[i] == artifactName) {
+					artifactNamesCopy.remove(artifactName);
+				}
+			}
+		}
+		
+		for(Double artifact: artifactNamesCopy.keySet()) {
+			traindata.add(artifactNamesCopy.get(artifact));
+		}
+		
+	}
+
+}
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/decentApp/ARFFxResourceTool.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/decentApp/ARFFxResourceTool.java	(revision 32)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/decentApp/ARFFxResourceTool.java	(revision 32)
@@ -0,0 +1,40 @@
+package de.ugoe.cs.cpdp.decentApp;
+
+import org.eclipse.emf.ecore.EValidator;
+import org.eclipse.emf.ecore.util.EObjectValidator;
+
+import ARFFx.ARFFxPackage;
+import ARFFx.impl.ARFFxPackageImpl;
+
+/**
+ * Class for handling arffx model files
+ * 
+ * @author Philip Makedonski, Fabian Trautsch
+ *
+ */
+public class ARFFxResourceTool extends ResourceTool {
+	
+	/**
+	 * Initializes the Tool Factory, from which the models can be loaded and
+	 * inizializes the validator.
+	 */
+	public ARFFxResourceTool(){
+		super(ARFFxResourceTool.class.getName());
+		ARFFxPackageImpl.init();
+		
+		// Commented, because simulation has problems with this
+		initializeValidator();
+	}
+	
+	/**
+	 * Inizializes the model validator
+	 */
+	@Override
+	protected void initializeValidator(){
+		super.initializeValidator();
+		EObjectValidator validator = new EObjectValidator();
+	    EValidator.Registry.INSTANCE.put(ARFFxPackage.eINSTANCE, validator);
+	}	
+	
+
+}
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/decentApp/DECENTEpsilonModelHandler.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/decentApp/DECENTEpsilonModelHandler.java	(revision 32)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/decentApp/DECENTEpsilonModelHandler.java	(revision 32)
@@ -0,0 +1,304 @@
+package de.ugoe.cs.cpdp.decentApp;
+
+import java.io.File;
+import java.net.URISyntaxException;
+import java.util.HashMap;
+
+import org.eclipse.emf.ecore.EPackage;
+import org.eclipse.emf.ecore.resource.Resource;
+import org.eclipse.epsilon.common.util.StringProperties;
+import org.eclipse.epsilon.emc.emf.EmfModel;
+import org.eclipse.epsilon.emc.emf.InMemoryEmfModel;
+import org.eclipse.epsilon.eol.exceptions.models.EolModelLoadingException;
+import org.eclipse.epsilon.eol.models.IModel;
+
+import ARFFx.ARFFxPackage;
+import DECENT.DECENTPackage;
+
+/**
+ * Class for handling decent and arffx model files.
+ * 
+ * @author Philip Makedonski, Fabian Trautsch
+ *
+ */
+
+public class DECENTEpsilonModelHandler {
+	private HashMap<String, Object> metaModelCache = new HashMap<>();
+	private boolean useDECENTBinary = false;
+	private boolean useARFFxBinary = false;
+
+	public static String metaPath = "./decent/models/";
+
+	/**
+	 * Returns the decent model as IModel instance
+	 * 
+	 * @param decentModelLocation location of the decent model file
+	 * @param read indicates if the model should be read from
+	 * @param write indicates if data should be written in the model
+	 * @return EmFModel (IModel) instance from the decent model, which was loaded
+	 * @throws Exception
+	 */
+	public IModel getDECENTModel(String decentModelLocation, boolean read, boolean write) throws Exception { 
+
+		EmfModel model;
+		
+		if (isUseDECENTBinary()) {
+			unregisterMetaModels("");
+			if (!read) {
+				new File(decentModelLocation).delete();
+				new File(decentModelLocation+"bin").delete();
+			}
+			DECENTResourceTool tool = new DECENTResourceTool();
+			if (new File(decentModelLocation).exists() && !new File(decentModelLocation+"bin").exists()) {
+				Resource resource = tool.loadResourceFromXMI(decentModelLocation,"decent", DECENTPackage.eINSTANCE);
+				tool.storeBinaryResourceContents(resource.getContents(), decentModelLocation+"bin", "decentbin");
+			}
+			
+			Resource resourceBin = tool.loadResourceFromBinary(decentModelLocation+"bin","decentbin", DECENTPackage.eINSTANCE);
+			//alternative pattern
+//			model = createInMemoryEmfModel("DECENT", resourceLocation, "../DECENT.Meta/model/DECENTv3.ecore", read, write, resourceBin, DECENTPackage.eINSTANCE);
+//			restoreMetaModels();
+
+			//NOTE: Adding the package is essential as otherwise epsilon breaks
+			model = new InMemoryEmfModel("DECENT", resourceBin, DECENTPackage.eINSTANCE);
+			model.setStoredOnDisposal(write);
+			model.setReadOnLoad(read);
+			model.setCachingEnabled(true);
+			restoreMetaModels();		
+		} else {
+			model = createEmfModel("DECENT", decentModelLocation, metaPath+"DECENTv3.ecore", read, write);
+		}
+
+		return model;
+	}
+
+	/**
+	 * Converts the decent model to a binary form
+	 * 
+	 * @param location of the decent model file
+	 */
+	public void convertDECENTModelToBinary(String location) {
+		unregisterMetaModels("");
+		DECENTResourceTool tool = new DECENTResourceTool();
+		Resource resource = tool.loadResourceFromXMI(location+"/model.decent","decent", DECENTPackage.eINSTANCE);
+		tool.storeBinaryResourceContents(resource.getContents(), location+"/model.decent"+"bin", "decentbin");
+		restoreMetaModels();		
+	}
+
+	/**
+	 * Converts the decent model to a xmi form
+	 * 
+	 * @param location of the decent model file
+	 */
+	
+	public void convertDECENTModelToXMI(String location) {
+		unregisterMetaModels("");
+		DECENTResourceTool tool = new DECENTResourceTool(); 
+		Resource resource = tool.loadResourceFromBinary(location+"/model.decentbin","decentbin", DECENTPackage.eINSTANCE);
+		restoreMetaModels();		
+		tool.storeResourceContents(resource.getContents(), location+"/model.decent", "decent");
+	}
+
+	/**
+	 * Returns the arffx model as IModel instance
+	 * 
+	 * @param arffxModelLocation location of the arffx model file
+	 * @param read indicates if the model should be read from
+	 * @param write indicates if data should be written in the model
+	 * @return EmFModel (IModel) instance from the arffx model, which was loaded
+	 * @throws Exception
+	 */
+	
+	public IModel getARFFxModel(String arffxModelLocation, boolean read, boolean write) throws Exception {
+		
+		EmfModel model;
+		
+		if (isUseARFFxBinary()) {
+			unregisterMetaModels("");
+			if (!read) {
+				new File(arffxModelLocation).delete();
+				new File(arffxModelLocation+"bin").delete();
+			}
+			ARFFxResourceTool tool = new ARFFxResourceTool();
+			if (new File(arffxModelLocation).exists() && !new File(arffxModelLocation+"bin").exists()) {
+				Resource resource = tool.loadResourceFromXMI(arffxModelLocation,"arffx", ARFFxPackage.eINSTANCE);
+				tool.storeBinaryResourceContents(resource.getContents(), arffxModelLocation+"bin", "arffxbin");
+			}
+			
+			Resource resourceBin = tool.loadResourceFromBinary(arffxModelLocation+"bin","arffxbin", ARFFxPackage.eINSTANCE);
+			//alternative pattern
+//			model = createInMemoryEmfModel("DECENT", resourceLocation, "../DECENT.Meta/model/DECENTv3.ecore", read, write, resourceBin, DECENTPackage.eINSTANCE);
+//			restoreMetaModels();
+
+			//NOTE: Adding the package is essential as otherwise epsilon breaks
+			model = new InMemoryEmfModel("ARFFx", resourceBin, ARFFxPackage.eINSTANCE);
+//			model.getModelImpl().getURI().toFileString()
+			model.setStoredOnDisposal(write);
+			model.setReadOnLoad(read);
+			model.setCachingEnabled(true);
+			restoreMetaModels();		
+		} else {
+			model = createEmfModel("ARFFx", arffxModelLocation, metaPath+"ARFFx.ecore", read, write);
+		}
+		
+		return model;
+	}
+
+
+	/**
+	 * Converts an arffx model to a binary version
+	 * 
+	 * @param location of the arffx model
+	 */
+	public void convertARFFxModelToBinary(String location) {
+		unregisterMetaModels("");
+		ARFFxResourceTool tool = new ARFFxResourceTool();
+		Resource resource = tool.loadResourceFromXMI(location+"/model.arffx","arffx", ARFFxPackage.eINSTANCE);
+		tool.storeBinaryResourceContents(resource.getContents(), location+"/model.arffx"+"bin", "arffxbin");
+		restoreMetaModels();		
+	}
+	
+	/**
+	 * Converts an arffx model to xmi
+	 * 
+	 * @param location of the arffx model
+	 */
+
+	public void convertARFFxModelToXMI(String location) {
+		unregisterMetaModels("");
+		ARFFxResourceTool tool = new ARFFxResourceTool(); 
+		Resource resource = tool.loadResourceFromBinary(location+"/model.arffxbin","arffxbin", DECENTPackage.eINSTANCE);
+		restoreMetaModels();		
+		tool.storeResourceContents(resource.getContents(), location+"/model.arffx", "arffx");
+	}
+
+
+	/**
+	 * Returns the log model as IModel instance
+	 * 
+	 * @param logModelLocation location of the log model file
+	 * @param read indicates if the model should be read from
+	 * @param write indicates if data should be written in the model
+	 * @return EmFModel (IModel) instance from the log model, which was loaded
+	 * @throws Exception
+	 */
+	
+	public IModel getLOGModel(String logModelLocation, boolean read, boolean write) throws Exception {
+		if (!new File(logModelLocation).exists()) {
+			read = false;
+		}
+		IModel model = createEmfModel("LOG", logModelLocation, metaPath +"LOG.ecore", read, write);
+		System.setProperty("epsilon.logFileAvailable", "true");
+		return model;
+	}
+
+	/**
+	 * Creates an EMF Model
+	 * 
+	 * @param name of the  emf model
+	 * @param model  name of the model
+	 * @param metamodel name of the metamodel
+	 * @param readOnLoad indicates if the model should be read on load
+	 * @param storeOnDisposal indicates if the model should be stored on disposal
+	 * @return
+	 * @throws EolModelLoadingException
+	 * @throws URISyntaxException
+	 */
+	
+	@SuppressWarnings("deprecation")
+	protected EmfModel createEmfModel(String name, String model, 
+			String metamodel, boolean readOnLoad, boolean storeOnDisposal) 
+					throws EolModelLoadingException, URISyntaxException {
+		EmfModel emfModel = new EmfModel();
+		StringProperties properties = new StringProperties();
+		properties.put(EmfModel.PROPERTY_NAME, name);
+		properties.put(EmfModel.PROPERTY_ALIASES, name);
+		properties.put(EmfModel.PROPERTY_FILE_BASED_METAMODEL_URI, 
+				"file:/" + getFile(metamodel).getAbsolutePath());
+		properties.put(EmfModel.PROPERTY_MODEL_URI, 
+				"file:/" + getFile(model).getAbsolutePath());
+		properties.put(EmfModel.PROPERTY_IS_METAMODEL_FILE_BASED, "true");
+		properties.put(EmfModel.PROPERTY_READONLOAD, readOnLoad + "");
+		properties.put(EmfModel.PROPERTY_CACHED, "true");
+		properties.put(EmfModel.PROPERTY_STOREONDISPOSAL, 
+				storeOnDisposal + "");
+		emfModel.load(properties, "");
+		//System.out.println(emfModel.allContents());
+		return emfModel;
+	}
+
+	/**
+	 * Returns a new File instance from the given filename
+	 * 
+	 * @param fileName of the file
+	 * @return
+	 * @throws URISyntaxException
+	 */
+	public File getFile(String fileName) throws URISyntaxException {;
+		return new File(fileName);
+	}
+
+	/**
+	 * Restores the metamodels, so that they are registered in the
+	 * EPackage registry
+	 */
+	private void restoreMetaModels() {
+		for (String key : metaModelCache .keySet()) {
+			EPackage.Registry.INSTANCE.put(key, metaModelCache.get(key));
+		};
+	}
+
+	/**
+	 * Unregister the metamodels from the EPackage registry
+	 * 
+	 * @param filter for filtering out certain instances
+	 */
+	private void unregisterMetaModels(String filter) {
+		for (String key : EPackage.Registry.INSTANCE.keySet()) {
+			if (key.contains(filter)) {
+				metaModelCache.put(key, EPackage.Registry.INSTANCE.get(key));
+			}
+		};
+		for (String key : metaModelCache .keySet()) {
+			EPackage.Registry.INSTANCE.remove(key);
+		};
+	}
+	
+	/**
+	 * Returns true if decent binary model is used
+	 * @return
+	 */
+
+	public boolean isUseDECENTBinary() {
+		return useDECENTBinary;
+	}
+
+	/**
+	 * Sets the boolean which indicates, if the decent binary
+	 * model is used
+	 * @param useDECENTBinary
+	 */
+	public void setUseDECENTBinary(boolean useDECENTBinary) {
+		this.useDECENTBinary = useDECENTBinary;
+	}
+
+	/**
+	 * Returns true if arffx binary model is used
+	 * @return
+	 */
+	public boolean isUseARFFxBinary() {
+		return useARFFxBinary;
+	}
+	
+	/**
+	 * Sets the boolean which indicates, if the arffx binary
+	 * model is used
+	 * @param useARFFxBinary
+	 */
+
+	public void setUseARFFxBinary(boolean useARFFxBinary) {
+		this.useARFFxBinary = useARFFxBinary;
+	}
+
+	
+}
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/decentApp/DECENTResourceTool.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/decentApp/DECENTResourceTool.java	(revision 32)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/decentApp/DECENTResourceTool.java	(revision 32)
@@ -0,0 +1,41 @@
+package de.ugoe.cs.cpdp.decentApp;
+
+import org.eclipse.emf.ecore.EValidator;
+import org.eclipse.emf.ecore.util.EObjectValidator;
+
+import DECENT.DECENTPackage;
+import DECENT.impl.DECENTPackageImpl;
+import DECENT.util.DECENTResourceFactoryImpl;
+
+/**
+ * Class for handling decent model files
+ * 
+ * @author Philip Makedonski, Fabian Trautsch
+ *
+ */
+public class DECENTResourceTool extends ResourceTool {
+	
+	/**
+	 * Initializes the Tool Factory, from which the models can be loaded and
+	 * inizializes the validator.
+	 */
+	public DECENTResourceTool(){
+		super(DECENTResourceTool.class.getName());
+		DECENTPackageImpl.init();
+		this.resourceFactory = new DECENTResourceFactoryImpl();
+		initializeValidator();
+	}
+	
+	/**
+	 * Inizializes the model validator
+	 */
+	@Override
+	protected void initializeValidator(){
+		super.initializeValidator();
+		EObjectValidator validator = new EObjectValidator();
+	    EValidator.Registry.INSTANCE.put(DECENTPackage.eINSTANCE, validator);
+	}
+	
+	
+
+}
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/decentApp/FileWatcher.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/decentApp/FileWatcher.java	(revision 32)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/decentApp/FileWatcher.java	(revision 32)
@@ -0,0 +1,42 @@
+package de.ugoe.cs.cpdp.decentApp;
+
+import java.util.*;
+import java.io.*;
+
+/**
+ * Helper class for watching if a file was changed
+ * 
+ * @author Philip Makedonski
+ *
+ */
+public abstract class FileWatcher extends TimerTask {
+	// Last timestamp
+	private long timeStamp;
+	
+	// File to watch
+	private File file;
+
+	/**
+	 * Constructor
+	 * @param file
+	 */
+	public FileWatcher(File file) {
+		this.file = file;
+		this.timeStamp = file.lastModified();
+	}
+
+	/**
+	 * Watches a file and executes the onChange Method
+	 * if a file is changed
+	 */
+	public final void run() {
+		long timeStamp = file.lastModified();
+
+		if (this.timeStamp != timeStamp) {
+			this.timeStamp = timeStamp;
+			onChange(file);
+		}
+	}
+
+	protected abstract void onChange(File file);
+}
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/decentApp/ResourceTool.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/decentApp/ResourceTool.java	(revision 32)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/decentApp/ResourceTool.java	(revision 32)
@@ -0,0 +1,320 @@
+package de.ugoe.cs.cpdp.decentApp;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.eclipse.emf.common.util.BasicDiagnostic;
+import org.eclipse.emf.common.util.Diagnostic;
+import org.eclipse.emf.common.util.EList;
+import org.eclipse.emf.common.util.URI;
+import org.eclipse.emf.ecore.EObject;
+import org.eclipse.emf.ecore.EOperation;
+import org.eclipse.emf.ecore.EPackage;
+import org.eclipse.emf.ecore.EStructuralFeature;
+import org.eclipse.emf.ecore.EValidator;
+import org.eclipse.emf.ecore.resource.Resource;
+import org.eclipse.emf.ecore.resource.ResourceSet;
+import org.eclipse.emf.ecore.resource.impl.BinaryResourceImpl;
+import org.eclipse.emf.ecore.resource.impl.ResourceFactoryImpl;
+import org.eclipse.emf.ecore.resource.impl.ResourceSetImpl;
+import org.eclipse.emf.ecore.util.Diagnostician;
+import org.eclipse.emf.ecore.xmi.impl.XMIResourceFactoryImpl;
+import org.eclipse.emf.ecore.xmi.impl.XMIResourceImpl;
+import org.eclipse.ocl.common.OCLConstants;
+import org.eclipse.ocl.ecore.delegate.OCLInvocationDelegateFactory;
+import org.eclipse.ocl.ecore.delegate.OCLSettingDelegateFactory;
+import org.eclipse.ocl.ecore.delegate.OCLValidationDelegateFactory;
+
+/**
+ * Class for handling different EMF Ressources
+ * 
+ * @author Philip Makedonski
+ *
+ */
+public class ResourceTool {
+
+	protected ResourceFactoryImpl resourceFactory = new XMIResourceFactoryImpl();
+
+	/**
+	 * Constructor
+	 * @param loggedClass
+	 */
+	public ResourceTool(String loggedClass) {
+		System.setProperty("org.slf4j.simpleLogger.logFile","validation.log");
+		System.setProperty("org.slf4j.simpleLogger.logFile","System.out");
+	}
+
+	/**
+	 * Initializes the validator
+	 */
+	protected void initializeValidator() {
+	//		OCL.initialize(null);
+			String oclDelegateURI = OCLConstants.OCL_DELEGATE_URI+"/Pivot";
+			
+		    EOperation.Internal.InvocationDelegate.Factory.Registry.INSTANCE.put(oclDelegateURI,
+		        new OCLInvocationDelegateFactory(oclDelegateURI));
+		    EStructuralFeature.Internal.SettingDelegate.Factory.Registry.INSTANCE.put(oclDelegateURI,
+		        new OCLSettingDelegateFactory(oclDelegateURI));
+		    EValidator.ValidationDelegate.Registry.INSTANCE.put(oclDelegateURI,
+		        new OCLValidationDelegateFactory(oclDelegateURI));
+		    
+	//	    EStructuralFeature.Internal.SettingDelegate.Factory.Registry.INSTANCE.put(oclDelegateURI, 
+	//	    	new OCLSettingDelegateFactory.Global());
+	//	    QueryDelegate.Factory.Registry.INSTANCE.put(oclDelegateURI, new OCLQueryDelegateFactory.Global());
+		    
+		}
+
+	/**
+	 * Validates the ressource
+	 * @param resource to validate
+	 */
+	public void validateResource(Resource resource) {
+	    BasicDiagnostic diagnostics = new BasicDiagnostic();
+	    boolean valid = true;
+	    for (EObject eo : resource.getContents())
+	    {
+	    	Map<Object, Object> context = new HashMap<Object, Object>();
+	    	boolean validationResult = Diagnostician.INSTANCE.validate(eo, diagnostics, context);
+	    	showDiagnostics(diagnostics, "");
+			valid &= validationResult;
+	    }
+	    
+	    if (!valid){
+	    	System.out.println("Problem with validation!");
+	    }
+	}
+
+	/**
+	 * Output method for showing diagnostics for different ressources
+	 * @param diagnostics
+	 * @param indent
+	 */
+	protected void showDiagnostics(Diagnostic diagnostics, String indent) {
+		indent+="  ";
+		for (Diagnostic d : diagnostics.getChildren()){
+			System.out.println(indent+d.getSource());
+			System.out.println(indent+"  "+d.getMessage());
+			showDiagnostics(d,indent);
+		}
+	}
+
+	
+	/**
+	 * Loads a ressource from XMI
+	 * @param inputPath path to the xmi
+	 * @param extension of the ressource to load
+	 * @param p the given EPackage
+	 * @return
+	 */
+	//TODO: workarounds copied from respective methods without EPackage parameter
+	@SuppressWarnings({ "rawtypes", "unchecked" })
+	public Resource loadResourceFromXMI(String inputPath, String extension, EPackage p) {
+	    Resource.Factory.Registry reg = Resource.Factory.Registry.INSTANCE;
+	    Map<String, Object> m = reg.getExtensionToFactoryMap();
+		m.put(extension, resourceFactory);
+	    ResourceSet resSetIn = new ResourceSetImpl();
+	    //critical part
+	    resSetIn.getPackageRegistry().put(p.getNsURI(), p);
+
+	    Resource inputResource = resSetIn.createResource(URI.createURI(inputPath));
+	    try {
+	    	Map options = new HashMap<>();
+	    	options.put(XMIResourceImpl.OPTION_DEFER_IDREF_RESOLUTION, Boolean.TRUE);
+//	    	options.put(XMIResourceImpl.OPTION_PROCESS_DANGLING_HREF, XMIResourceImpl.OPTION_PROCESS_DANGLING_HREF_DISCARD);
+			inputResource.load(options);
+		} catch (IOException e) {
+			e.printStackTrace();
+		}
+		return inputResource;
+	}
+	
+	/**
+	 * Loads a ressource from XMI
+	 * @param inputPath path to the xmi
+	 * @param extension of the ressource to load
+	 * @return
+	 */
+
+	@SuppressWarnings({ "rawtypes", "unchecked" })
+	public Resource loadResourceFromXMI(String inputPath, String extension) {
+	    Resource.Factory.Registry reg = Resource.Factory.Registry.INSTANCE;
+	    Map<String, Object> m = reg.getExtensionToFactoryMap();
+		m.put(extension, resourceFactory);
+	    ResourceSet resSetIn = new ResourceSetImpl();
+	    Resource inputResource = resSetIn.createResource(URI.createURI(inputPath));
+	    try {
+	    	Map options = new HashMap<>();
+	    	options.put(XMIResourceImpl.OPTION_DEFER_IDREF_RESOLUTION, Boolean.TRUE);
+//	    	options.put(XMIResourceImpl.OPTION_PROCESS_DANGLING_HREF, XMIResourceImpl.OPTION_PROCESS_DANGLING_HREF_DISCARD);
+			inputResource.load(options);
+		} catch (IOException e) {
+			e.printStackTrace();
+		}
+		return inputResource;
+	}
+
+	/**
+	 * Gets a ressource from a binary form
+	 * @param inputPath path to the binary
+	 * @param extension of the model to load
+	 * @param p EPackage to put the loaded ressource in
+	 * @return
+	 */
+	public Resource getResourceFromBinary(String inputPath, String extension, EPackage p) {
+	    Resource.Factory.Registry reg = Resource.Factory.Registry.INSTANCE;
+	    Map<String, Object> m = reg.getExtensionToFactoryMap();
+	    m.put(extension, new Resource.Factory() {
+
+			@Override
+			public Resource createResource(URI uri) {
+				return new BinaryResourceImpl(uri);
+			}
+			
+		});	    
+	    
+	    ResourceSet resSetIn = new ResourceSetImpl();
+	    //critical part
+	    resSetIn.getPackageRegistry().put(p.getNsURI(), p);
+
+	    Resource inputResource = resSetIn.createResource(URI.createURI(inputPath));
+		return inputResource;
+	}
+
+	
+	/**
+	 * Loads a ressource from a binary form
+	 * @param inputPath path to the binary
+	 * @param extension of the model to load
+	 * @param p EPackage to put the loaded ressource in
+	 * @return
+	 */
+	//TODO: workarounds copied from respective methods without EPackage parameter
+	@SuppressWarnings({ "rawtypes" })
+	public Resource loadResourceFromBinary(String inputPath, String extension, EPackage p) {
+	    Resource.Factory.Registry reg = Resource.Factory.Registry.INSTANCE;
+	    Map<String, Object> m = reg.getExtensionToFactoryMap();
+	    m.put(extension, new Resource.Factory() {
+
+			@Override
+			public Resource createResource(URI uri) {
+				return new BinaryResourceImpl(uri);
+			}
+			
+		});	    
+	    
+	    ResourceSet resSetIn = new ResourceSetImpl();
+	    //critical part
+	    resSetIn.getPackageRegistry().put(p.getNsURI(), p);
+
+	    Resource inputResource = resSetIn.createResource(URI.createURI(inputPath));
+	    if (new File(inputPath).exists()) {
+	    	
+		    try {
+		    	Map options = new HashMap<>();
+//		    	options.put(BinaryResourceImpl.OPTION_DEFER_IDREF_RESOLUTION, Boolean.TRUE);
+//		    	options.put(BinaryResourceImpl.OPTION_DEFER_IDREF_RESOLUTION, Boolean.TRUE);
+	//	    	options.put(XMIResourceImpl.OPTION_PROCESS_DANGLING_HREF, XMIResourceImpl.OPTION_PROCESS_DANGLING_HREF_DISCARD);
+				inputResource.load(options);
+			} catch (IOException e) {
+				e.printStackTrace();
+			}
+	    }
+		return inputResource;
+	}
+
+	/**
+	 * Loads a ressource from a binary form
+	 * 
+	 * @param inputPath path to the binary
+	 * @param extension of the model to load
+	 * @return
+	 */
+	@SuppressWarnings({ "rawtypes" })
+	public Resource loadResourceFromBinary(String inputPath, String extension) {
+	    Resource.Factory.Registry reg = Resource.Factory.Registry.INSTANCE;
+	    Map<String, Object> m = reg.getExtensionToFactoryMap();
+	    m.put(extension, new Resource.Factory() {
+
+			@Override
+			public Resource createResource(URI uri) {
+				return new BinaryResourceImpl(uri);
+			}
+			
+		});	    
+	    
+	    ResourceSet resSetIn = new ResourceSetImpl();
+	    Resource inputResource = resSetIn.createResource(URI.createURI(inputPath));
+	    try {
+	    	Map options = new HashMap<>();
+//	    	options.put(XMIResourceImpl.OPTION_DEFER_IDREF_RESOLUTION, Boolean.TRUE);
+//	    	options.put(XMIResourceImpl.OPTION_PROCESS_DANGLING_HREF, XMIResourceImpl.OPTION_PROCESS_DANGLING_HREF_DISCARD);
+			inputResource.load(options);
+		} catch (IOException e) {
+			e.printStackTrace();
+		}
+		return inputResource;
+	}
+
+	/**
+	 * Stores the binary resource contents to a given path
+	 * 
+	 * @param contents EList of different EObjects to store
+	 * @param outputPath path to store to
+	 * @param extension of the model to store
+	 */
+	@SuppressWarnings({ "rawtypes" })
+	public void storeBinaryResourceContents(EList<EObject> contents, String outputPath, String extension) {
+		Resource.Factory.Registry reg = Resource.Factory.Registry.INSTANCE;
+	    Map<String, Object> m = reg.getExtensionToFactoryMap();
+		m.put(extension, new Resource.Factory() {
+
+			@Override
+			public Resource createResource(URI uri) {
+				return new BinaryResourceImpl(uri);
+			}
+			
+		});
+		
+	    ResourceSet resSet = new ResourceSetImpl();
+		Resource outputResource = resSet.createResource(URI.createURI(outputPath));
+	    outputResource.getContents().addAll(contents);
+	    try {
+	      Map options = new HashMap<>();
+//	      options.put(XMIResourceImpl.OPTION_PROCESS_DANGLING_HREF, XMIResourceImpl.OPTION_PROCESS_DANGLING_HREF_DISCARD);
+	      outputResource.save(options);
+	    } catch (IOException e) {
+	      e.printStackTrace();
+	    }
+	}
+
+	/**
+	 * Stores the resource contents to a given path
+	 * 
+	 * @param contents EList of different EObjects to store
+	 * @param outputPath path to store to
+	 * @param extension of the model to store
+	 */
+	@SuppressWarnings({ "unchecked", "rawtypes" })
+	public void storeResourceContents(EList<EObject> contents, String outputPath, String extension) {
+		//TODO: duplicated from loadResourceFromXMI => move to a more appropriate location
+		Resource.Factory.Registry reg = Resource.Factory.Registry.INSTANCE;
+	    Map<String, Object> m = reg.getExtensionToFactoryMap();
+		m.put(extension, resourceFactory);
+		
+	    ResourceSet resSet = new ResourceSetImpl();
+		Resource outputResource = resSet.createResource(URI.createURI(outputPath));
+	    outputResource.getContents().addAll(contents);
+	    try {
+	      Map options = new HashMap<>();
+	      options.put(XMIResourceImpl.OPTION_ENCODING, "UTF-8");
+//	      options.put(XMIResourceImpl.OPTION_PROCESS_DANGLING_HREF, XMIResourceImpl.OPTION_PROCESS_DANGLING_HREF_DISCARD);
+	      outputResource.save(options);
+	    } catch (IOException e) {
+	      e.printStackTrace();
+	    }
+	}
+
+
+}
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/execution/ClassifierCreationExperiment.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/execution/ClassifierCreationExperiment.java	(revision 32)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/execution/ClassifierCreationExperiment.java	(revision 32)
@@ -0,0 +1,189 @@
+package de.ugoe.cs.cpdp.execution;
+
+import java.io.File;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.logging.Level;
+
+import weka.core.Instances;
+import de.ugoe.cs.cpdp.ExperimentConfiguration;
+import de.ugoe.cs.cpdp.dataprocessing.IProcessesingStrategy;
+import de.ugoe.cs.cpdp.dataselection.IPointWiseDataselectionStrategy;
+import de.ugoe.cs.cpdp.eval.IEvaluationStrategy;
+import de.ugoe.cs.cpdp.loader.IVersionLoader;
+import de.ugoe.cs.cpdp.training.ITrainer;
+import de.ugoe.cs.cpdp.training.ITrainingStrategy;
+import de.ugoe.cs.cpdp.training.IWekaCompatibleTrainer;
+import de.ugoe.cs.cpdp.versions.SoftwareVersion;
+import de.ugoe.cs.util.console.Console;
+
+/**
+ * Class responsible for executing an experiment according to an {@link ExperimentConfiguration}. The steps 
+ * of this ClassifierCreationExperiment are as follows:
+ * <ul>
+ *  <li>load the data from the provided data path</li>
+ *  <li>check if given resultsdir exists, if not create one</li>
+ *  <li>execute the following steps for each data set:
+ *  <ul>
+ *   <li>load the dataset</li>
+ *   <li>set testdata == traindata</li>
+ *   <li>preprocess the data</li>
+ *   <li>postprocess the data</li>
+ *   <li>for each configured trainer do the following:</li>
+ *   <ul>
+ *   	<li>if the classifier should be saved, train it with the dataset</li>
+ *   	<li>save it in the results dir</li>
+ *   	<li>For each configured evaluator: Do the evaluation and save results</li>
+ *   </ul>
+ *  </ul>
+ * </ul>
+ *   
+ * Note that this class implements {@link IExectuionStrategy}, i.e., each experiment can be started 
+ * in its own thread.
+ * 
+ * @author Fabian Trautsch
+ */
+public class ClassifierCreationExperiment implements IExecutionStrategy {
+
+	/**
+	 * configuration of the experiment
+	 */
+	private final ExperimentConfiguration config;
+	
+	/**
+	 * Constructor. Creates a new experiment based on a configuration.
+	 * @param config configuration of the experiment
+	 */
+	public ClassifierCreationExperiment(ExperimentConfiguration config) {
+		this.config = config;
+	}
+	
+	/**
+	 * Executes the experiment with the steps as described in the class comment.
+	 * @see Runnable#run() 
+	 */
+	@Override
+	public void run() {
+		final List<SoftwareVersion> versions = new LinkedList<>();
+		
+		boolean writeHeader = true;
+		
+		for(IVersionLoader loader : config.getLoaders()) {
+			versions.addAll(loader.load());
+		}
+	
+
+		File resultsDir = new File(config.getResultsPath());
+		if (!resultsDir.exists()) {
+			resultsDir.mkdir();
+		}
+		
+		
+		int versionCount = 1;
+		for( SoftwareVersion testVersion : versions ) {
+			
+			// At first: traindata == testdata
+			Instances testdata = testVersion.getInstances();
+			Instances traindata = new Instances(testdata);
+			
+			// Give the dataset a new name
+			testdata.setRelationName(testVersion.getProject());
+			
+			for( IProcessesingStrategy processor : config.getPreProcessors() ) {
+				Console.traceln(Level.FINE, String.format("[%s] [%02d/%02d] %s: applying preprocessor %s", config.getExperimentName(), versionCount, versions.size(), testVersion.getProject(), processor.getClass().getName()));
+				processor.apply(testdata, traindata);
+			}
+			
+			for( IPointWiseDataselectionStrategy dataselector : config.getPointWiseSelectors() ) {
+				Console.traceln(Level.FINE, String.format("[%s] [%02d/%02d] %s: applying pointwise selection %s", config.getExperimentName(), versionCount, versions.size(), testVersion.getProject(), dataselector.getClass().getName()));
+				traindata = dataselector.apply(testdata, traindata);
+			}
+			
+			for( IProcessesingStrategy processor : config.getPostProcessors() ) {
+				Console.traceln(Level.FINE, String.format("[%s] [%02d/%02d] %s: applying setwise postprocessor %s", config.getExperimentName(), versionCount, versions.size(), testVersion.getProject(), processor.getClass().getName()));
+				processor.apply(testdata, traindata);
+			}
+			
+		
+			
+			
+			// Get bug/non bug (TODO)
+			int traindataBug = 0;
+			int traindataNonBug = 0;
+						
+			for(int i=0; i<traindata.numInstances(); i++) {
+				double wekaInstanceValue = traindata.instance(i).classValue();
+							
+				if(wekaInstanceValue == 1.0) {
+					traindataBug++;
+				} else {
+					traindataNonBug++;
+				}
+			}
+						
+			System.out.println("Traindata Bug: "+traindataBug);
+			System.out.println("Traindata Non Bug: "+traindataNonBug);
+						
+			int testdataBug = 0;
+			int testdataNonBug = 0;
+						
+			for(int i=0; i<testdata.numInstances(); i++) {
+				double wekaInstanceValue = testdata.instance(i).classValue();
+							
+				if(wekaInstanceValue == 1.0) {
+					testdataBug++;
+				} else {
+					testdataNonBug++;
+				}
+			}
+						
+			System.out.println("Testdata Bug: "+testdataBug);
+			System.out.println("Testdata Non Bug: "+testdataNonBug);
+
+			
+			// Trainerlist for evaluation later on
+			List<ITrainer> allTrainers = new LinkedList<>();
+			
+			for( ITrainingStrategy trainer : config.getTrainers() ) {
+
+				// Add trainer to list for evaluation
+				allTrainers.add(trainer);
+				
+				// Train classifier
+				trainer.apply(traindata);
+				
+				if(config.getSaveClassifier()) {
+					// If classifier should be saved, train him and save him
+					// be careful with typecasting here!
+					IWekaCompatibleTrainer trainerToSave = (IWekaCompatibleTrainer) trainer;
+					//Console.println(trainerToSave.getClassifier().toString());
+					try {
+						weka.core.SerializationHelper.write(resultsDir.getAbsolutePath()+"/"+trainer.getName()+"-"+testVersion.getProject(), trainerToSave.getClassifier());
+					} catch (Exception e) {
+						e.printStackTrace();
+					}
+					
+				}
+			}
+			
+			
+			
+			for( IEvaluationStrategy evaluator : config.getEvaluators() ) {
+				Console.traceln(Level.FINE, String.format("[%s] [%02d/%02d] %s: applying evaluator %s", config.getExperimentName(), versionCount, versions.size(), testVersion.getProject(), evaluator.getClass().getName()));
+
+				if( writeHeader ) {
+					evaluator.setParameter(config.getResultsPath() + "/" + config.getExperimentName() + ".csv");
+				}
+				evaluator.apply(testdata, traindata, allTrainers, writeHeader);
+				writeHeader = false;
+			}
+			
+			versionCount++;
+			
+			Console.traceln(Level.INFO, String.format("[%s] [%02d/%02d] %s: finished", config.getExperimentName(), versionCount, versions.size(), testVersion.getProject()));
+			
+		}
+		
+	}
+	
+}
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/execution/CrossProjectExperiment.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/execution/CrossProjectExperiment.java	(revision 32)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/execution/CrossProjectExperiment.java	(revision 32)
@@ -0,0 +1,205 @@
+package de.ugoe.cs.cpdp.execution;
+
+import java.io.File;
+import java.util.Collections;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.logging.Level;
+
+import org.apache.commons.collections4.list.SetUniqueList;
+
+import weka.core.Instances;
+import de.ugoe.cs.cpdp.ExperimentConfiguration;
+import de.ugoe.cs.cpdp.dataprocessing.IProcessesingStrategy;
+import de.ugoe.cs.cpdp.dataprocessing.ISetWiseProcessingStrategy;
+import de.ugoe.cs.cpdp.dataselection.IPointWiseDataselectionStrategy;
+import de.ugoe.cs.cpdp.dataselection.ISetWiseDataselectionStrategy;
+import de.ugoe.cs.cpdp.eval.IEvaluationStrategy;
+import de.ugoe.cs.cpdp.loader.IVersionLoader;
+import de.ugoe.cs.cpdp.training.ISetWiseTrainingStrategy;
+import de.ugoe.cs.cpdp.training.ITrainer;
+import de.ugoe.cs.cpdp.training.ITrainingStrategy;
+import de.ugoe.cs.cpdp.versions.IVersionFilter;
+import de.ugoe.cs.cpdp.versions.SoftwareVersion;
+import de.ugoe.cs.util.console.Console;
+
+/**
+ * Class responsible for executing an experiment according to an {@link ExperimentConfiguration}. The steps of an experiment are as follows:
+ * <ul>
+ *  <li>load the data from the provided data path</li>
+ *  <li>filter the data sets according to the provided version filters</li>
+ *  <li>execute the following steps for each data sets as test data that is not ignored through the test version filter:
+ *  <ul>
+ *   <li>filter the data sets to setup the candidate training data:
+ *   <ul>
+ *    <li>remove all data sets from the same project</li>
+ *    <li>filter all data sets according to the training data filter
+ *   </ul></li>
+ *   <li>apply the setwise preprocessors</li>
+ *   <li>apply the setwise data selection algorithms</li>
+ *   <li>apply the setwise postprocessors</li>
+ *   <li>train the setwise training classifiers</li>
+ *   <li>unify all remaining training data into one data set</li>
+ *   <li>apply the preprocessors</li>
+ *   <li>apply the pointwise data selection algorithms</li>
+ *   <li>apply the postprocessors</li>
+ *   <li>train the normal classifiers</li>
+ *   <li>evaluate the results for all trained classifiers on the training data</li>
+ *  </ul></li>
+ * </ul>
+ * 
+ * Note that this class implements {@link Runnable}, i.e., each experiment can be started in its own thread.
+ * @author Steffen Herbold
+ */
+public class CrossProjectExperiment implements IExecutionStrategy {
+
+	/**
+	 * configuration of the experiment
+	 */
+	private final ExperimentConfiguration config;
+	
+	/**
+	 * Constructor. Creates a new experiment based on a configuration.
+	 * @param config configuration of the experiment
+	 */
+	public CrossProjectExperiment(ExperimentConfiguration config) {
+		this.config = config;
+	}
+	
+	/**
+	 * Executes the experiment with the steps as described in the class comment.
+	 * @see Runnable#run() 
+	 */
+	@Override
+	public void run() {
+		final List<SoftwareVersion> versions = new LinkedList<>();
+		
+		for(IVersionLoader loader : config.getLoaders()) {
+			versions.addAll(loader.load());
+		}
+		
+		for( IVersionFilter filter : config.getVersionFilters() ) {
+			filter.apply(versions);
+		}
+		boolean writeHeader = true;
+		int versionCount = 1;
+		int testVersionCount = 0;
+		
+		for( SoftwareVersion testVersion : versions ) {
+			if( isVersion(testVersion, config.getTestVersionFilters()) ) {
+				testVersionCount++;
+			}
+		}
+		
+		// sort versions
+		Collections.sort(versions);
+		
+		for( SoftwareVersion testVersion : versions ) {
+			if( isVersion(testVersion, config.getTestVersionFilters()) ) {
+				Console.traceln(Level.INFO, String.format("[%s] [%02d/%02d] %s: starting", config.getExperimentName(), versionCount, testVersionCount, testVersion.getVersion()));
+				
+				// Setup testdata and training data
+				Instances testdata = testVersion.getInstances();
+				String testProject = testVersion.getProject();
+				SetUniqueList<Instances> traindataSet = SetUniqueList.setUniqueList(new LinkedList<Instances>());
+				for( SoftwareVersion trainingVersion : versions ) {
+					if( isVersion(trainingVersion, config.getTrainingVersionFilters()) ) {
+						if( trainingVersion!=testVersion ) {
+							if( !trainingVersion.getProject().equals(testProject) ) {
+								traindataSet.add(trainingVersion.getInstances());
+							}
+						}
+					}
+				}
+				
+				for( ISetWiseProcessingStrategy processor : config.getSetWisePreprocessors() ) {
+					Console.traceln(Level.FINE, String.format("[%s] [%02d/%02d] %s: applying setwise preprocessor %s", config.getExperimentName(), versionCount, testVersionCount, testVersion.getVersion(), processor.getClass().getName()));
+					processor.apply(testdata, traindataSet);
+				}
+				for( ISetWiseDataselectionStrategy dataselector : config.getSetWiseSelectors() ) {
+					Console.traceln(Level.FINE, String.format("[%s] [%02d/%02d] %s: applying setwise selection %s", config.getExperimentName(), versionCount, testVersionCount, testVersion.getVersion(), dataselector.getClass().getName()));
+					dataselector.apply(testdata, traindataSet);
+				}
+				for( ISetWiseProcessingStrategy processor : config.getSetWisePostprocessors() ) {
+					Console.traceln(Level.FINE, String.format("[%s] [%02d/%02d] %s: applying setwise postprocessor %s", config.getExperimentName(), versionCount, testVersionCount, testVersion.getVersion(), processor.getClass().getName()));
+					processor.apply(testdata, traindataSet);
+				}
+				for( ISetWiseTrainingStrategy setwiseTrainer : config.getSetWiseTrainers() ) {
+					Console.traceln(Level.FINE, String.format("[%s] [%02d/%02d] %s: applying setwise trainer %s", config.getExperimentName(), versionCount, testVersionCount, testVersion.getVersion(), setwiseTrainer.getName()));
+					setwiseTrainer.apply(traindataSet);
+				}
+				Instances traindata = makeSingleTrainingSet(traindataSet);
+				for( IProcessesingStrategy processor : config.getPreProcessors() ) {
+					Console.traceln(Level.FINE, String.format("[%s] [%02d/%02d] %s: applying preprocessor %s", config.getExperimentName(), versionCount, testVersionCount, testVersion.getVersion(), processor.getClass().getName()));
+					processor.apply(testdata, traindata);
+				}
+				for( IPointWiseDataselectionStrategy dataselector : config.getPointWiseSelectors() ) {
+					Console.traceln(Level.FINE, String.format("[%s] [%02d/%02d] %s: applying pointwise selection %s", config.getExperimentName(), versionCount, testVersionCount, testVersion.getVersion(), dataselector.getClass().getName()));
+					traindata = dataselector.apply(testdata, traindata);
+				}
+				for( IProcessesingStrategy processor : config.getPostProcessors() ) {
+					Console.traceln(Level.FINE, String.format("[%s] [%02d/%02d] %s: applying setwise postprocessor %s", config.getExperimentName(), versionCount, testVersionCount, testVersion.getVersion(), processor.getClass().getName()));
+					processor.apply(testdata, traindata);
+				}
+				for( ITrainingStrategy trainer : config.getTrainers() ) {
+					Console.traceln(Level.FINE, String.format("[%s] [%02d/%02d] %s: applying trainer %s", config.getExperimentName(), versionCount, testVersionCount, testVersion.getVersion(), trainer.getName()));
+					trainer.apply(traindata);
+				}
+				File resultsDir = new File(config.getResultsPath());
+				if (!resultsDir.exists()) {
+					resultsDir.mkdir();
+				}
+				for( IEvaluationStrategy evaluator : config.getEvaluators() ) {
+					Console.traceln(Level.FINE, String.format("[%s] [%02d/%02d] %s: applying evaluator %s", config.getExperimentName(), versionCount, testVersionCount, testVersion.getVersion(), evaluator.getClass().getName()));
+					List<ITrainer> allTrainers = new LinkedList<>();
+					for( ISetWiseTrainingStrategy setwiseTrainer : config.getSetWiseTrainers() ) {
+						allTrainers.add(setwiseTrainer);
+					}
+					for( ITrainingStrategy trainer : config.getTrainers() ) {
+						allTrainers.add(trainer);
+					}
+					if( writeHeader ) {
+						evaluator.setParameter(config.getResultsPath() + "/" + config.getExperimentName() + ".csv");
+					}
+					evaluator.apply(testdata, traindata, allTrainers, writeHeader);
+					writeHeader = false;
+				}
+				Console.traceln(Level.INFO, String.format("[%s] [%02d/%02d] %s: finished", config.getExperimentName(), versionCount, testVersionCount, testVersion.getVersion()));
+				versionCount++;
+			}
+		}
+	}
+	
+	/**
+	 * Helper method that checks if a version passes all filters.
+	 * @param version version that is checked
+	 * @param filters list of the filters
+	 * @return true, if the version passes all filters, false otherwise
+	 */
+	private boolean isVersion(SoftwareVersion version, List<IVersionFilter> filters) {
+		boolean result = true;
+		for( IVersionFilter filter : filters) {
+			result &= !filter.apply(version);
+		}
+		return result;
+	}
+
+	/**
+	 * Helper method that combines a set of Weka {@link Instances} sets into a single {@link Instances} set.
+	 * @param traindataSet set of {@link Instances} to be combines
+	 * @return single {@link Instances} set
+	 */
+	public static Instances makeSingleTrainingSet(SetUniqueList<Instances> traindataSet) {
+		Instances traindataFull = null;
+		for( Instances traindata : traindataSet) {
+			if( traindataFull==null ) {
+				traindataFull = new Instances(traindata);
+			} else {
+				for( int i=0 ; i<traindata.numInstances() ; i++ ) {
+					traindataFull.add(traindata.instance(i));
+				}
+			}
+		}
+		return traindataFull;
+	}
+}
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/execution/IExecutionStrategy.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/execution/IExecutionStrategy.java	(revision 32)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/execution/IExecutionStrategy.java	(revision 32)
@@ -0,0 +1,14 @@
+package de.ugoe.cs.cpdp.execution;
+
+import de.ugoe.cs.cpdp.Runner;
+
+/**
+ * Interface that must be implemented from the different experiments 
+ * (e.g. ClassifierCreationExeperiment) to be runnable by {@link Runner}
+ * 
+ * @author Fabian Trautsch
+ *
+ */
+public interface IExecutionStrategy extends Runnable{
+
+}
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/AbstractFolderLoader.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/AbstractFolderLoader.java	(revision 31)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/AbstractFolderLoader.java	(revision 32)
@@ -21,5 +21,5 @@
 	 * Path of the data.
 	 */
-	private String path = "";
+	protected String path = "";
 
 	/**
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/DecentDataLoader.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/DecentDataLoader.java	(revision 32)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/DecentDataLoader.java	(revision 32)
@@ -0,0 +1,447 @@
+package de.ugoe.cs.cpdp.loader;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+import java.net.URISyntaxException;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.LinkedHashSet;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Set;
+
+import org.eclipse.emf.common.util.URI;
+import org.eclipse.emf.ecore.EObject;
+import org.eclipse.emf.ecore.EPackage;
+import org.eclipse.emf.ecore.resource.Resource;
+import org.eclipse.epsilon.common.parse.problem.ParseProblem;
+import org.eclipse.epsilon.emc.emf.EmfUtil;
+import org.eclipse.epsilon.eol.EolModule;
+import org.eclipse.epsilon.eol.IEolExecutableModule;
+import org.eclipse.epsilon.eol.models.IModel;
+import org.eclipse.epsilon.etl.EtlModule;
+
+import ARFFx.Instance;
+import ARFFx.Model;
+import ARFFx.Value;
+import de.ugoe.cs.cpdp.decentApp.ARFFxResourceTool;
+import de.ugoe.cs.cpdp.decentApp.DECENTEpsilonModelHandler;
+import de.ugoe.cs.util.console.Console;
+import weka.core.Attribute;
+import weka.core.DenseInstance;
+import weka.core.Instances;
+import weka.core.converters.ArffSaver;
+
+/**
+ * Class for loading a decent model file.
+ * Loads a decent model file and (if no arff file is present) and does the
+ * following conversions: 
+ * DECENT -> ARFFX -> ARFF
+ * 
+ * @author Fabian Trautsch
+ *
+ */
+public class DecentDataLoader implements SingleVersionLoader{
+
+	// Model Handler for Decent Models
+	private DECENTEpsilonModelHandler modelHandler = new DECENTEpsilonModelHandler();
+	
+	// Set log level
+	String logLevel = "1";
+	String logToFile = "false";
+		
+	// This list contains attributes, that should be removed before building the arff file
+	private static List<String> attributeFilter = new LinkedList<String>();
+	
+	// This list contains all names of the different artifacts
+	private static Set<String> artifactNames = new LinkedHashSet<String>();
+	
+	// Name of the class attribute.
+	private static final String classAttributeName = "LABEL.Artifact.Target.BugFix.AverageWeight";
+	
+	
+	private int getIndexOfArtifactName(String artifactName) {
+		int index = -1;
+		if(artifactNames.contains(artifactName)) {
+			int i=0;
+			for(String nameInSet: artifactNames) {
+				if(nameInSet.equals(artifactName)) {
+					index = i;
+				} else {
+					i++;
+				}
+			}
+		}
+		
+		return index;
+	}
+	
+	/**
+	 * Defines attributes, that should be removed before building the
+	 * ARFF File from.
+	 */
+	private void setAttributeFilter() {
+		attributeFilter.add("Agent.Name");
+	
+	}
+	
+	/**
+	 * Saves the dataset as arff after transformation (decent->arffx) and
+	 * filtering
+	 * 
+	 * @param dataSet the WEKA dataset to save
+	 * @param arffLocation location where it should be saved to
+	 */
+	public void save(Instances dataSet, String arffLocation) {
+		
+		
+		ArffSaver saver = new ArffSaver();
+		saver.setInstances(dataSet);
+		try {
+			saver.setFile(new File(arffLocation));
+			saver.writeBatch();
+		} catch (IOException e) {
+			Console.printerrln("Cannot save the file to path: "+arffLocation);
+			e.printStackTrace();
+		}
+	}
+
+	
+	/**
+	 * Loads the given decent file and tranform it from decent->arffx->arff
+	 * @return Instances in WEKA format
+	 */
+	@Override
+	public Instances load(File file) {
+		
+		// Set attributeFilter
+		setAttributeFilter();
+		
+		// Register MetaModels
+		try {
+			registerMetaModels();
+		} catch (Exception e1) {
+			Console.printerrln("Metamodels cannot be registered!");
+			e1.printStackTrace();
+		}
+
+		// Set location of decent and arffx Model
+		String decentModelLocation = file.getAbsolutePath();
+		String pathToDecentModelFolder = decentModelLocation.substring(0,decentModelLocation.lastIndexOf(File.separator));
+		String arffxModelLocation = pathToDecentModelFolder+"/model.arffx";
+		String logModelLocation = pathToDecentModelFolder+"/model.log";
+		String arffLocation = pathToDecentModelFolder+"/model.arff";
+		
+		// If arff File exists, load from it!
+		if(new File(arffLocation).exists()) {
+			System.out.println("Loading arff File...");
+			 BufferedReader reader;
+			 Instances data = null;
+			try {
+				reader = new BufferedReader(new FileReader(arffLocation));
+				data = new Instances(reader);
+				reader.close();
+			} catch (FileNotFoundException e) {
+				Console.printerrln("File with path: "+arffLocation+" was not found.");
+				e.printStackTrace();
+			} catch (IOException e) {
+				Console.printerrln("File with path: "+arffLocation+" cannot be read.");
+				e.printStackTrace();
+			}
+			
+			// Set class attribute if not set
+			if(data.classIndex() == -1) {
+				Attribute classAttribute = data.attribute(classAttributeName);
+				data.setClass(classAttribute);
+			}
+			
+			
+			return data;
+		}
+		
+		// Location of EOL Scripts
+		String preprocess = "./decent/epsilon/query/preprocess.eol";	
+		String arffxToArffSource = "./decent/epsilon/query/addLabels.eol";
+		
+		// Set Log Properties
+		System.setProperty("epsilon.logLevel", logLevel);
+		System.setProperty("epsilon.logToFile", logToFile);
+		System.setProperty("epsilon.logFileAvailable", "false");
+		
+		// Set decent2arffx Properties
+		System.setProperty("epsilon.transformation.decent2arffx.skipSource", "false");
+		System.setProperty("epsilon.transformation.decent2arffx.type", "code");
+		
+		
+		
+		// Preprocess Data, transform from decent2arffx
+		try {
+			IEolExecutableModule preProcessModule = loadModule(preprocess);
+			IModel preProcessDecentModel = modelHandler.getDECENTModel(decentModelLocation, true, true);
+			IModel preProcessArffxarffxModel = modelHandler.getARFFxModel(arffxModelLocation, false, true);
+			preProcessModule.getContext().getModelRepository().addModel(preProcessDecentModel);
+			preProcessModule.getContext().getModelRepository().addModel(preProcessArffxarffxModel);
+			execute(preProcessModule, logModelLocation);
+			preProcessDecentModel.dispose();
+			preProcessArffxarffxModel.dispose();
+			preProcessModule.reset();
+		} catch (URISyntaxException e) {
+			Console.printerrln("URI Syntax for decent or arffx model is wrong.");
+			e.printStackTrace();
+		} catch (Exception e) {
+			e.printStackTrace();
+		}
+		
+		
+		
+		
+		// Transform to arff, for label and confidence attributes
+		try {
+			IEolExecutableModule arffxToArffModule = loadModule(arffxToArffSource);
+			IModel arffxToArffArffxModel = modelHandler.getARFFxModel(arffxModelLocation, true, true);
+			arffxToArffModule.getContext().getModelRepository().addModel(arffxToArffArffxModel);
+			execute(arffxToArffModule, logModelLocation);
+			arffxToArffArffxModel.dispose();
+			// can be stored and retained alternatively
+			arffxToArffModule.reset();
+		} catch (URISyntaxException e) {
+			Console.printerrln("URI Syntax for arffx model is wrong.");
+			e.printStackTrace();
+		} catch (Exception e) {
+			e.printStackTrace();
+		}
+
+		// Unregister MetaModels, otherwise cast will fail
+		HashMap<String, Object> metaModelCache = new HashMap<>();
+		for (String key : EPackage.Registry.INSTANCE.keySet()) {
+			metaModelCache.put(key, EPackage.Registry.INSTANCE.get(key));
+		};
+		
+		for (String key : metaModelCache .keySet()) {
+			EPackage.Registry.INSTANCE.remove(key);
+		};
+		
+		
+		// Workaround to gernerate a usable URI. Absolute path is not
+		// possible, therefore we need to construct a relative path
+		
+		URL location = DecentDataLoader.class.getProtectionDomain().getCodeSource().getLocation();
+		String basePath = location.getFile();
+		
+		// Location is the bin folder, so we need to delete the last 4 characters
+		basePath = basePath.substring(0, basePath.length() - 4);
+		String relativePath = new File(basePath).toURI().relativize(new File(arffxModelLocation).toURI()).getPath();
+		
+		// Loard arffx file and create WEKA Instances
+		ARFFxResourceTool tool = new ARFFxResourceTool();
+		Resource resource = tool.loadResourceFromXMI(relativePath, "arffx");
+		
+		Instances dataSet = null;
+		for(EObject o: resource.getContents()) {
+			Model m = (Model) o;
+			dataSet = createWekaDataFormat(m);
+
+			for(Instance i : m.getData()) {
+				createWekaInstance(dataSet, i);
+			}
+		}
+		
+		// Set class attribute
+		Attribute classAttribute = dataSet.attribute(classAttributeName);
+		dataSet.setClass(classAttribute);
+		
+		// Save as ARFF
+		save(dataSet, arffLocation);
+		
+		return dataSet;
+	
+	}	
+	
+	
+	/**
+	 * Creates a WekaInstance from an ARFFX Model Instance
+	 * 
+	 * @param dataSet WekaInstance dataset, where the arffx model instances should be
+	 * added to
+	 * @param i arffx model instance
+	 */
+	private void createWekaInstance(Instances dataSet, Instance i) {	 
+		double[] values = new double[dataSet.numAttributes()];
+		int j=0;
+		
+		for(Value value : i.getValues()) {
+			String dataValue = value.getContent(); 
+			String attributeName = value.getOfAttribute().getName();
+			
+			if(attributeFilter.contains(attributeName)) {
+				continue;
+			}
+			
+			// Is value a LABEL.* attribute?
+			if(isLabel(attributeName)) {
+				values[j] = dataSet.attribute(j).indexOfValue(dataValue);
+			} else if (isConfidenceLabel(attributeName)){
+				// Is value a CONFIDENCE.* attribute?
+				values[j] = dataSet.attribute(j).indexOfValue(dataValue);
+			} else if(attributeName.equals("Artifact.Name")){
+				// Is it the name of the artifact?
+				artifactNames.add(dataValue);
+				values[j] = getIndexOfArtifactName(dataValue);
+			} else {
+				// Is it a numeric value?
+				values[j] = Double.parseDouble(dataValue);
+			}
+			
+			j++;
+		}
+		
+		DenseInstance inst = new DenseInstance(1.0, values);
+		dataSet.add(inst);
+	}
+		
+	/**
+	 * Creates a Weka Instances set out of a arffx model
+	 * @param m arffx model
+	 * @return
+	 */
+	private Instances createWekaDataFormat(Model m) {
+		
+		// Bad solution, can be enhanced (continue in for loop)
+		ArrayList<Attribute> datasetAttributes = new  ArrayList<Attribute>();
+		for(ARFFx.Attribute attribute :m.getAttributes()) {
+			String attributeName = attribute.getName();
+
+			if(attributeFilter.contains(attributeName)) {
+				continue;
+			}
+			
+			Attribute wekaAttr;
+			
+			// Is attribute a LABEL.* attribute?
+			if(isLabel(attributeName)) {
+				// Classattribute
+				final ArrayList<String> classAttVals = new ArrayList<String>();
+				classAttVals.add("false");
+				classAttVals.add("true");
+				wekaAttr = new Attribute(attributeName, classAttVals);
+			} else if(isConfidenceLabel(attributeName)){
+				// Is attribute a CONFIDENCE.* attribute?
+				ArrayList<String> labels = new ArrayList<String>();
+				labels.add("high");
+				labels.add("low");
+				wekaAttr = new Attribute(attributeName, labels);
+			} else {
+				// Is it a numeric attribute?
+				wekaAttr = new Attribute(attributeName);
+			}
+			
+			datasetAttributes.add(wekaAttr);
+		}
+		
+		
+		return new Instances("test-dataset", datasetAttributes, 0);
+	}
+	
+	/**
+	 * Helper methods which indicates if the given value starts with "LABEL"
+	 * 
+	 * @param value to test
+	 * @return
+	 */
+	private boolean isLabel(String value) {
+		if(value.length()>= 5 && value.substring(0, 5).equals("LABEL")) {
+			return true;
+		}
+		
+		return false;
+	}
+	
+	/**
+	 * Helper method which indicates if the given value starts with "CONFIDENCE"
+	 * @param value to test
+	 * @return
+	 */
+	private boolean isConfidenceLabel(String value) {
+		if(value.length()>= 10 && value.substring(0, 10).equals("CONFIDENCE")) {
+			return true;
+		}
+		
+		return false;
+	}
+
+	
+	/**
+	 * Returns if a filename ends with ".decent"
+	 * @return
+	 */
+	@Override
+	public boolean filenameFilter(String filename) {
+		return filename.endsWith(".decent");
+	}
+	
+	/**
+	 * Helper method for executing a eol scripts and adding the log model beforehand
+	 * @param module module to execute
+	 * @param logModelLocation location of the log model
+	 * @throws Exception
+	 */
+	private void execute(IEolExecutableModule module, String logModelLocation)
+			throws Exception {
+		IModel logModel = modelHandler.getLOGModel(logModelLocation, true, true);
+		module.getContext().getModelRepository().addModel(logModel);
+		module.execute();
+		logModel.dispose();
+	}
+
+	/**
+	 * Loads the module from a given source
+	 * 
+	 * @param source where the module is (e.g. eol script)
+	 * @return
+	 * @throws Exception
+	 * @throws URISyntaxException
+	 */
+	private IEolExecutableModule loadModule(String source) throws Exception,
+	URISyntaxException {
+
+		IEolExecutableModule module = null;
+		if (source.endsWith("etl")) {
+			module = new EtlModule();
+		} else if (source.endsWith("eol")) {
+			module = new EolModule();
+		} else {
+		
+		}
+		
+		module.parse(modelHandler.getFile(source));
+		
+		if (module.getParseProblems().size() > 0) {
+			Console.printerrln("Parse error occured...");
+			for (ParseProblem problem : module.getParseProblems()) {
+				System.err.println(problem.toString());
+			}
+			// System.exit(-1);
+		}
+		
+		return module;
+	}
+	
+	/**
+	 * Helper method for registering the metamodels
+	 * @throws Exception
+	 */
+	private void registerMetaModels() throws Exception {
+		String metaModelsPath = DECENTEpsilonModelHandler.metaPath;
+		File metaModelsLocation = new File(metaModelsPath);
+		for (File file : metaModelsLocation.listFiles()) {
+			if (file.getName().endsWith(".ecore")) {
+				EmfUtil.register(URI.createFileURI(file.getAbsolutePath()), EPackage.Registry.INSTANCE);
+			}
+		}
+	}
+	
+}
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/DecentFolderLoader.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/DecentFolderLoader.java	(revision 32)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/DecentFolderLoader.java	(revision 32)
@@ -0,0 +1,89 @@
+package de.ugoe.cs.cpdp.loader;
+
+import java.io.File;
+import java.util.LinkedList;
+import java.util.List;
+
+import weka.core.Instances;
+
+import de.ugoe.cs.cpdp.versions.SoftwareVersion;
+
+/**
+ * Implements the {@link AbstractFolderLoader}
+ * 
+ * @author Fabian Trautsch
+ */
+public class DecentFolderLoader extends AbstractFolderLoader {
+
+	/*
+	 * (non-Javadoc)
+	 * 
+	 * @see de.ugoe.cs.cpdp.loader.AbstractFolderLoader#getSingleLoader()
+	 */
+	@Override
+	protected SingleVersionLoader getSingleLoader() {
+		return new DecentDataLoader();
+	}
+	
+	/**
+	 * @see de.ugoe.cs.cpdp.loader.IVersionLoader#load()
+	 */
+	@Override
+	public List<SoftwareVersion> load() {
+		final List<SoftwareVersion> versions = new LinkedList<SoftwareVersion>();
+
+		final File dataDir = new File(path);
+		final SingleVersionLoader instancesLoader = getSingleLoader();
+
+		String projectName = dataDir.getName();
+		
+		
+		/*
+		 * The following lines make it possible, that we can have two different possibilities
+		 * to load data:
+		 * 1) From one project (e.g. /decent/input/project1)
+		 * 2) From more than one project (e.g. /decent/input/)
+		 * 
+		 * Requirement is, that we have a folder structure like this:
+		 * "/decent/input/project1/model.decent, /decent/input/project2/model.decent, ..."
+		 * 
+		 * In the first one the "else" is executed, therefore it will just search the folder "project1"
+		 * for a "model.decent" file. In the second one, it will look into each folder and searches for
+		 * "model.decent" files.
+		 */
+		for (File projectDir : dataDir.listFiles()) {
+			if (projectDir.isDirectory()) {
+				projectName = projectDir.getName();
+				for (File versionFile : projectDir.listFiles()) {
+					loadDataFromFile(versionFile,instancesLoader, projectName, versions);
+				}
+			} else {
+				loadDataFromFile(projectDir, instancesLoader, projectName, versions);
+			}
+		}
+		return versions;
+	}
+	
+	/**
+	 * Loads data from a file and adds the instances from the load method to the 
+	 * versions List.
+	 * 
+	 * @param versionFile file to load from
+	 * @param instancesLoader loader that should be used
+	 * @param projectName name of the project which was loaded
+	 * @param versions list, where the weka instances are added to
+	 */
+	
+	private void loadDataFromFile(File versionFile, 
+			SingleVersionLoader instancesLoader, String projectName, List<SoftwareVersion> versions) {
+		if (versionFile.isFile()
+				&& instancesLoader.filenameFilter(versionFile
+						.getName())) {
+			String versionName = versionFile.getName();
+			Instances data = instancesLoader.load(versionFile);
+			versions.add(new SoftwareVersion(projectName,
+					versionName, data));
+		}
+	}
+
+}
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/IDecentVersionLoader.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/IDecentVersionLoader.java	(revision 32)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/IDecentVersionLoader.java	(revision 32)
@@ -0,0 +1,11 @@
+package de.ugoe.cs.cpdp.loader;
+
+import java.util.List;
+
+import de.ugoe.cs.cpdp.versions.SoftwareVersion;
+
+public interface IDecentVersionLoader extends IVersionLoader{
+	
+	public List<SoftwareVersion> load(List<String> decentAttributes);
+
+}
Index: trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/IVersionLoader.java
===================================================================
--- trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/IVersionLoader.java	(revision 31)
+++ trunk/CrossPare/src/de/ugoe/cs/cpdp/loader/IVersionLoader.java	(revision 32)
@@ -26,3 +26,4 @@
 	 */
 	public List<SoftwareVersion> load();
+
 }
