Index: /trunk/CrossPare/src/de/ugoe/cs/cpdp/wekaclassifier/AbstractCODEP.java
===================================================================
--- /trunk/CrossPare/src/de/ugoe/cs/cpdp/wekaclassifier/AbstractCODEP.java	(revision 127)
+++ /trunk/CrossPare/src/de/ugoe/cs/cpdp/wekaclassifier/AbstractCODEP.java	(revision 128)
@@ -16,6 +16,10 @@
 
 import java.util.ArrayList;
+import java.util.HashMap;
 import java.util.List;
+import java.util.Map;
 import java.util.logging.Level;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
 
 import de.ugoe.cs.util.console.Console;
@@ -62,4 +66,14 @@
     private Classifier codepClassifier = null;
 
+    /**
+     * Map that store attributes for upscaling for each classifier
+     */
+    private Map<Integer, Integer> upscaleIndex = null;
+
+    /**
+     * Scaling value that moves the decimal point by 5 digets.
+     */
+    private final double SCALER = 10000.0d;
+
     /*
      * (non-Javadoc)
@@ -87,8 +101,53 @@
         setupInternalClassifiers();
         setupInternalAttributes();
-
+        upscaleIndex = new HashMap<>();
+
+        int classifierIndex = 0;
+        boolean secondAttempt = false;
+        Instances traindataCopy = null;
         for (Classifier classifier : internalClassifiers) {
-            Console.traceln(Level.FINE, "internally training " + classifier.getClass().getName());
-            classifier.buildClassifier(traindata);
+            boolean trainingSuccessfull = false;
+            do {
+                Console.traceln(Level.FINE,
+                                "internally training " + classifier.getClass().getName());
+                try {
+                    if (secondAttempt) {
+                        classifier.buildClassifier(traindataCopy);
+                        trainingSuccessfull = true;
+                    }
+                    else {
+                        classifier.buildClassifier(traindata);
+                        trainingSuccessfull = true;
+                    }
+                }
+                catch (IllegalArgumentException e) {
+                    String regex = "A nominal attribute \\((.*)\\) cannot have duplicate labels.*";
+                    Pattern p = Pattern.compile(regex);
+                    Matcher m = p.matcher(e.getMessage());
+                    if (!m.find()) {
+                        // cannot treat problem, rethrow exception
+                        throw e;
+                    }
+                    String attributeName = m.group(1);
+                    int attrIndex = traindata.attribute(attributeName).index();
+                    if (secondAttempt) {
+                        throw new RuntimeException("cannot be handled correctly yet, because upscaleIndex is a Map");
+                        // traindataCopy = upscaleAttribute(traindataCopy, attrIndex);
+                    }
+                    else {
+                        traindataCopy = upscaleAttribute(traindata, attrIndex);
+                    }
+
+                    upscaleIndex.put(classifierIndex, attrIndex);
+                    Console
+                        .traceln(Level.FINE,
+                                 "upscaled attribute " + attributeName + "; restarting training");
+                    secondAttempt = true;
+                    continue;
+                }
+            }
+            while (!trainingSuccessfull); // dummy loop for internal continue
+            classifierIndex++;
+            secondAttempt = false;
         }
 
@@ -118,5 +177,16 @@
     private Instance createInternalInstance(Instance instance) throws Exception {
         double[] values = new double[internalAttributes.size()];
+        Instances traindataCopy;
         for (int j = 0; j < internalClassifiers.size(); j++) {
+            if (upscaleIndex.containsKey(j)) {
+                // instance value must be upscaled
+                int attrIndex = upscaleIndex.get(j);
+                double upscaledVal = instance.value(attrIndex) * SCALER;
+                traindataCopy = new Instances(instance.dataset());
+                instance = new DenseInstance(instance.weight(), instance.toDoubleArray());
+                instance.setValue(attrIndex, upscaledVal);
+                traindataCopy.add(instance);
+                instance.setDataset(traindataCopy);
+            }
             values[j] = internalClassifiers.get(j).classifyInstance(instance);
         }
@@ -161,4 +231,25 @@
     /**
      * <p>
+     * Upscales the value of a single attribute. This is a workaround to get BayesNet running for
+     * all data. Works on a copy of the training data, i.e., leaves the original data untouched.
+     * </p>
+     *
+     * @param traindata
+     *            data from which the attribute is upscaled.
+     * @param attributeIndex
+     *            index of the attribute
+     * @return data with upscaled attribute
+     */
+    private Instances upscaleAttribute(Instances traindata, int attributeIndex) {
+        Instances traindataCopy = new Instances(traindata);
+        for (int i = 0; i < traindata.size(); i++) {
+            traindataCopy.get(i).setValue(attributeIndex,
+                                          traindata.get(i).value(attributeIndex) * SCALER);
+        }
+        return traindataCopy;
+    }
+
+    /**
+     * <p>
      * Abstract method through which implementing classes define which classifier is used for the
      * CODEP.
