private boolean isClassTheMajortiy(ArrayList<Instance> instances, double classification){ List<Instance> instancesList = new ArrayList<>(instances); TreeMap<Double, Double> classificationProbability = new TreeMap<>(); Attribute classAttribute = instances.get(0).classAttribute(); for (double i = 0; i < classAttribute.numValues(); i++) { int matchedClassCount = 0; for (Instance instance : instancesList) { if(instance.classValue() == i){ matchedClassCount++; } } classificationProbability.put(((double) matchedClassCount / (double) instancesList.size()), i); } return (classificationProbability.lastEntry().getValue() == classification); }
public static void main(String[] args){ ArrayList<Attribute> atts = new ArrayList<Attribute>(); /*Properties p1 = new Properties(); p1.setProperty("range", "[0,1]"); ProtectedProperties prop1 = new ProtectedProperties(p1);*/ Properties p2 = new Properties(); p2.setProperty("range", "[321,1E9]"); ProtectedProperties prop2 = new ProtectedProperties(p2); ArrayList<String> attVals = new ArrayList<String>(); for (int i = 0; i < 5; i++) attVals.add("val" + (i+1)); //atts.add(new Attribute("att1", prop1)); atts.add(new Attribute("att2", prop2)); //atts.add(new Attribute("att3", attVals)); //Instances data = LHSInitializer.getMultiDimContinuous(atts, 10, false); //Instances data = LHSInitializer.getMultiDim(atts, 10, false); Instances data = LHSInitializer.getMultiDimContinuous(atts, 1, false); System.out.println(data); }
private Instances createDataSet(List<ConceptSimilarityMeasure> sims) { ArrayList<Attribute> atts = new ArrayList<Attribute>(); for (ConceptSimilarityMeasure sim : sims) atts.add(new Attribute(sim.getName())); List<String> classes = new ArrayList<String>(); classes.add("no merge"); classes.add("merge"); atts.add(new Attribute("class", classes)); Instances data = new Instances("data", atts, 0); data.setClassIndex(data.numAttributes() - 1); return data; }
public ModelClassifier() { name = new Attribute("name"); type = new Attribute("type"); attributes = new ArrayList(); classVal = new ArrayList(); classVal.add("Monday"); classVal.add("Tuesday"); classVal.add("Wednesday"); classVal.add("Thursday"); classVal.add("Friday"); classVal.add("Saturday"); classVal.add("Sunday"); attributes.add(name); attributes.add(type); attributes.add(new Attribute("class", classVal)); dataRaw = new Instances("TestInstances", attributes, 0); dataRaw.setClassIndex(dataRaw.numAttributes() - 1); }
public static DMatrix instanceToDenseDMatrix(Instance instance) throws XGBoostError { Attribute classAttribute = instance.classAttribute(); int classAttrIndex = classAttribute.index(); int colNum = instance.numAttributes()-1; int rowNum = 1; float[] data = new float[colNum*rowNum]; Enumeration<Attribute> attributeEnumeration = instance.enumerateAttributes(); int dataIndex = 0; while (attributeEnumeration.hasMoreElements()) { Attribute attribute = attributeEnumeration.nextElement(); int attrIndex = attribute.index(); if(attrIndex == classAttrIndex){ continue; } data[dataIndex]= (float) instance.value(attribute); dataIndex++; } return new DMatrix(data, rowNum, colNum); }
public static void main(String[] args){ ArrayList<Attribute> atts = new ArrayList<Attribute>(); /*Properties p1 = new Properties(); p1.setProperty("range", "[0,1]"); ProtectedProperties prop1 = new ProtectedProperties(p1);*/ Properties p2 = new Properties(); p2.setProperty("range", "[321,1E9]"); ProtectedProperties prop2 = new ProtectedProperties(p2); ArrayList<String> attVals = new ArrayList<String>(); for (int i = 0; i < 5; i++) attVals.add("val" + (i+1)); //atts.add(new Attribute("att1", prop1)); atts.add(new Attribute("att2", prop2)); //atts.add(new Attribute("att3", attVals)); //Instances data = LHSInitializer.getMultiDimContinuous(atts, 10, false); //Instances data = LHSInitializer.getMultiDim(atts, 10, false); LHSSampler sampler = new LHSSampler(); Instances data = sampler.sampleMultiDimContinuous(atts, 1, false); System.out.println(data); }
/** * Create an Instances object from the tuples provided. The Instances has * name `name` and every value from every tuple. The TupleDesc is provided * separately just to validate that all of the provided Tuples share this * TupleDesc. * @param name the name of the resulting Instances object * @param ts list of Tuples * @param td TupleDesc * @param fields indices identifying which fields should be included in the new Instances object. * @return new Instances object containing the values from all the tuples. */ public static Instances relationToInstances(String name, List<Tuple> ts, TupleDesc td, List<Integer> fields){ ArrayList<Attribute> attrs = tupleDescToAttributeList(td, fields); int relationSize = ts.size(); Instances instances = new Instances(name, attrs, relationSize); for (int i=0; i<ts.size(); i++){ Tuple t = ts.get(i); if (!t.getTupleDesc().equals(td)){ throw new RuntimeException("All TupleDescs must match."); } instances.add(i, tupleToInstance(t, attrs, fields)); } return instances; }
/** * set the bestConf to cluster and get the running performance * @param attributeToVal * @return */ public double setOptimal(Map<Attribute,Double> attributeToVal){ HashMap hm = new HashMap(); for(Attribute key : attributeToVal.keySet()){ Double value = attributeToVal.get(key); hm.put(key.name(), value); } this.startTest(hm, 0, false); double y = 0; y = performance; return y; }
/** * Helper method to convet Feature keys to Attributes * @param data * @param prefixes * @return */ public static Set<Attribute> prefix2attributes(Instances data, String...prefixes) { Set<Attribute> attributes = new ListOrderedSet<Attribute>(); for (String key : prefixes) { Attribute attribute = data.attribute(key); assert(attribute != null) : "Invalid Attribute key '" + key + "'"; attributes.add(attribute); } // FOR return (attributes); }
public static Instances convertToArff(List<Document> dataSet, List<String> vocabulary, String fileName) { int dataSetSize = dataSet.size(); /* Create features */ ArrayList<Attribute> attributes = new ArrayList<>(); for (int i = 0; i < vocabulary.size(); i++) { attributes.add(new Attribute("word_" + i)); } Attribute classAttribute = new Attribute("Class"); attributes.add(classAttribute); /* Add examples */ System.out.println("Building instances..."); Instances trainingDataSet = new Instances(fileName, attributes, 0); for (int k = 0; k < dataSetSize; k++) { Document document = dataSet.get(k); Instance example = new DenseInstance(attributes.size()); for (int i = 0; i < vocabulary.size(); i++) { String word = vocabulary.get(i); example.setValue(i, Collections.frequency(document.getTerms(), word)); } example.setValue(classAttribute, document.getDocumentClass()); trainingDataSet.add(example); int progress = (int) ((k * 100.0) / dataSetSize); System.out.printf("\rPercent completed: %3d%%", progress); } trainingDataSet.setClass(classAttribute); System.out.println(); System.out.println("Writing to file ..."); try { ArffSaver saver = new ArffSaver(); saver.setInstances(trainingDataSet); saver.setFile(new File(fileName)); saver.writeBatch(); } catch (IOException e) { e.printStackTrace(); } return trainingDataSet; }
private static void uniBoundsGeneration(double[] bounds, Attribute crntAttr, int sampleSetSize){ bounds[0] = crntAttr.getLowerNumericBound(); bounds[sampleSetSize] = crntAttr.getUpperNumericBound(); double pace = (bounds[sampleSetSize] - bounds[0])/sampleSetSize; for(int j=1;j<sampleSetSize;j++){ bounds[j] = bounds[j-1] + pace; } }
/** * Constructor for numeric attribute, each numeric attribute is * represented by a term in form of x <= value < y * * @param attributeIn * @param attributeIndexIn */ public Term(Attribute attributeIn, double attributeIndexIn){ attribute = attributeIn; setAttributeIndex(attributeIndexIn); // set type of the attribute if(attributeIn.isNumeric()){ attributeType = TypeNumeric; }else{ attributeType = 0; } }
/** * Create a list of Weka Attributes from a TupleDesc. The resulting list is * suitable to pass to an Instances object. * @param td the TupleDesc * @return the list of Attributes */ public static ArrayList<Attribute> tupleDescToAttributeList(TupleDesc td){ List<Integer> fields = new ArrayList<>(); for (int i=0; i<td.numFields(); i++){ fields.add(i); } return tupleDescToAttributeList(td, fields); }
public static void main(String[] args){ ArrayList<Attribute> atts = new ArrayList<Attribute>(); Properties p1 = new Properties(); p1.setProperty("range", "[0,1]"); ProtectedProperties prop1 = new ProtectedProperties(p1); Properties p2 = new Properties(); p2.setProperty("range", "[321,1E9]"); ProtectedProperties prop2 = new ProtectedProperties(p2); Properties p3 = new Properties(); p3.setProperty("range", "[1,30]"); ProtectedProperties prop3 = new ProtectedProperties(p3); ArrayList<String> attVals = new ArrayList<String>(); for (int i = 0; i < 5; i++) attVals.add("val" + (i+1)); atts.add(new Attribute("att1", prop1)); atts.add(new Attribute("att2", prop2)); atts.add(new Attribute("att3", prop3)); //atts.add(new Attribute("att4", attVals)); //Instances data = LHSInitializer.getMultiDimContinuous(atts, 10, false); //Instances data = LHSInitializer.getMultiDim(atts, 10, false); DDSSampler sampler = new DDSSampler(3); sampler.setCurrentRound(0); Instances data = sampler.sampleMultiDimContinuous(atts, 2, false); System.out.println(data); sampler.setCurrentRound(01); data = sampler.sampleMultiDimContinuous(atts, 2, false); System.out.println(data); sampler.setCurrentRound(2); data = sampler.sampleMultiDimContinuous(atts, 2, false); System.out.println(data); }
/** * Create a list of Weka Attributes from a TupleDesc. The resulting list is * suitable to pass to an Instances object. This does no validation on `fields`. * @param td the TupleDesc * @param fields indices identifying which fields should be included * @return the list of Attributes */ public static ArrayList<Attribute> tupleDescToAttributeList(TupleDesc td, List<Integer> fields){ ArrayList<Attribute> attrs = new ArrayList<>(fields.size()); for (int i : fields){ Attribute attr = fieldToAttribute(td, i); attrs.add(attr); } return attrs; }
private Attribute getNominalAttribute(Fields field) { Set<String> values = new HashSet<>(); for(Object obj: dataDomain.get(field.name())) values.add(Conversion.getValueAsStr(obj)); return new Attribute(field.name(), new ArrayList<>(values)); }
private Attribute getNominalColorAttribute(Fields field) { Set<String> values = new HashSet<>(); for(Object obj: dataDomain.get(field.name())) values.add(Conversion.getHexColorAsNominal(obj.toString())); return new Attribute(field.name(), new ArrayList<>(values)); }
public static List<Pair<String, Attribute>> getStandardFeatureSet(Map<String, Set<Object>> dataDomain) { BaseFeaturesFactory factory = new BaseFeaturesFactory(dataDomain); List<Pair<String, Attribute>> featureVector = new ArrayList<>(); featureVector.add(new Pair(Fields.GOLDEN.name(), factory.getGolden())); featureVector.add(new Pair(Fields.UNIT_STATE.name(), factory.getUnitState())); featureVector.add(new Pair(Fields.TRUSTED_JUDGEMENTS.name(), factory.getTrustedJudgements())); // featureVector.add(new Pair(Fields.LAST_JUDGEMENT_TIME.name(), factory.getLastJudgementTime())); featureVector.add(new Pair(Fields.GENDER_CONFIDENCE.name(), factory.getGenderConfidence())); featureVector.add(new Pair(Fields.PROFILE_EXISTS.name(), factory.getProfileExists())); featureVector.add(new Pair(Fields.PROFILE_EXISTS_CONFIDENCE.name(), factory.getProfileExistsConfidence())); // featureVector.add(new Pair(Fields.PROFILE_CREATION_DATE.name(), factory.getProfileCreationDate())); // featureVector.add(new Pair(Fields.PROFILE_DESCRIPTION.name(), factory.getDescription())); featureVector.add(new Pair(Fields.FAVORITES_NUMBER.name(), factory.getFavoritesNumber())); featureVector.add(new Pair(Fields.GENDER_GOLDEN.name(), factory.getGenderGold())); featureVector.add(new Pair(Fields.LINK_COLOR.name(), factory.getLinkColor())); // featureVector.add(new Pair(Fields.USERNAME.name(), factory.getUserName())); // featureVector.add(new Pair(Fields.RETWEET_COUNT.name(), factory.getRetweetsCount())); featureVector.add(new Pair(Fields.SIDEBAR_COLOR.name(), factory.getSidebarColor())); featureVector.add(new Pair(Fields.TWEETS_COUNT.name(), factory.getTweetsCount())); // featureVector.add(new Pair(Fields.TWEET_LOCATION.name(), factory.getTweetLocation())); // featureVector.add(new Pair(Fields.USER_TIMEZONE.name(), factory.getUserTimezone())); // Class attribute should be last featureVector.add(new Pair(Fields.GENDER.name(), factory.getGender())); return featureVector; }
public static List<Pair<String, Attribute>> getExtendedFeatureSet(Map<String, Set<Object>> dataDomain) { List<Pair<String, Attribute>> featureVector = getStandardFeatureSet(dataDomain); ExtraFeaturesFactory extraFactory = new ExtraFeaturesFactory(dataDomain); // Location // featureVector.add(new Pair(ExtraFields.TWEET_COORDINATES_LATITUDE.name(), extraFactory.getTweetLatitude())); // featureVector.add(new Pair(ExtraFields.TWEET_COORDINATES_LONGITUDE.name(), extraFactory.getTweetLongitude())); // Text score featureVector.add(new Pair(ExtraFields.TWEET_MALE_FEMALE_WORDS_SCORE.name(), extraFactory.getTweetMaleFemaleWordsScore())); featureVector.add(new Pair(ExtraFields.DESCRIPTION_MALE_FEMALE_WORDS_SCORE.name(), extraFactory.getDescriptionMaleFemaleWordsScore())); // Tweet PMI featureVector.add(new Pair(ExtraFields.TWEET_TEXT_PMI_MALE.name(), extraFactory.getTweetTextPMIMale())); featureVector.add(new Pair(ExtraFields.TWEET_TEXT_PMI_FEMALE.name(), extraFactory.getTweetTextPMIFemale())); featureVector.add(new Pair(ExtraFields.TWEET_TEXT_PMI_BRAND.name(), extraFactory.getTweetTextPMIBrand())); featureVector.add(new Pair(ExtraFields.TWEET_TEXT_PMI_UNKNOWN.name(), extraFactory.getTweetTextPMIUnknown())); featureVector.add(new Pair(ExtraFields.TWEET_TEXT_GENDER_PREDICT.name(), extraFactory.getTweetTextGenderPrediction())); // User Description PMI featureVector.add(new Pair(ExtraFields.USER_DESC_PMI_MALE.name(), extraFactory.getUserDescriptionPMIMale())); featureVector.add(new Pair(ExtraFields.USER_DESC_PMI_FEMALE.name(), extraFactory.getUserDescriptionPMIFemale())); featureVector.add(new Pair(ExtraFields.USER_DESC_PMI_BRAND.name(), extraFactory.getUserDescriptionPMIBrand())); featureVector.add(new Pair(ExtraFields.USER_DESC_PMI_UNKNOWN.name(), extraFactory.getUserDescriptionPMIUnknown())); featureVector.add(new Pair(ExtraFields.USER_DESC_GENDER_PREDICT.name(), extraFactory.getUserDescriptionGenderPrediction())); // Prediction of gender based on first name extracted from username featureVector.add(new Pair(ExtraFields.FIRST_NAME_FROM_USERNAME_BASED_GENDER_PREDICT.name(), extraFactory.getFirstNameFromUserNameBasedGenderPrediction())); return featureVector; }
public static Map<Attribute, Double> instanceToMap(Instance ins){ HashMap<Attribute, Double> retval = new HashMap<Attribute, Double>(); Enumeration<Attribute> enu = ins.enumerateAttributes(); while(enu.hasMoreElements()){ Attribute temp = enu.nextElement(); retval.put(temp, ins.value(temp)); } return retval; }
private void saveProps(ArrayList<Attribute> props, int round, int subround){ try { HashMap propMap = new HashMap(); for(Attribute att : props){ propMap.put(att.name(),att.getMetadata().getProperty(propKey)); } writeToYaml(resumeFolder+"/props_"+round+"_"+subround, propMap); File file = new File(resumeFolder+"/props_"+round+"_"+subround+"_OK"); file.createNewFile(); } catch (IOException e) { e.printStackTrace(); } }
/** * Create a Weka Attribute with name `name`. * @param name name of attribute * @param type type of attribute. This is currently only required to enforce * the *lack* of String support. * @return the new Attribute */ public static Attribute fieldToAttribute(String name, Type type){ if (!(type == Type.INT_TYPE || type == Type.DOUBLE_TYPE)){ throw new UnsupportedOperationException(); } return new Attribute(name); }
public static DMatrix instancesToDenseDMatrix(Instances instances) throws XGBoostError { int colNum = instances.numAttributes()-1; int rowNum = instances.size(); float[] data = new float[colNum*rowNum]; float[] labels = new float[instances.size()]; Attribute classAttribute = instances.classAttribute(); int classAttrIndex = classAttribute.index(); for(int i=0, dataIndex = 0; i<instances.size(); i++) { Instance instance = instances.get(i); labels[i] = (float) instance.classValue(); Enumeration<Attribute> attributeEnumeration = instance.enumerateAttributes(); while (attributeEnumeration.hasMoreElements()) { Attribute attribute = attributeEnumeration.nextElement(); int attrIndex = attribute.index(); if(attrIndex == classAttrIndex){ continue; } data[dataIndex]= (float) instance.value(attribute); dataIndex++; } } DMatrix dMatrix = new DMatrix(data, rowNum, colNum); dMatrix.setLabel(labels); return dMatrix; }
private List<Series<Number, Number>> buildLabeledSeries() { List<XYChart.Series<Number, Number>> realSeries = new ArrayList<>(); Attribute irisClasses = data.attribute(2); data.stream().collect(Collectors.groupingBy(d -> { int i = (int) d.value(2); return irisClasses.value(i); })).forEach((e, instances) -> { XYChart.Series<Number, Number> series = new XYChart.Series<>(); series.setName(e); instances.stream().map(this::instancetoChartData).forEach(series.getData()::add); realSeries.add(series); }); return realSeries; }
public Instances generateMore(int number, int existedNum, Instances header) { ArrayList<Attribute> localAtts = new ArrayList<Attribute>(); Enumeration<Attribute> enu = header.enumerateAttributes(); while (enu.hasMoreElements()) { localAtts.add(enu.nextElement()); } Instances samplePoints = LHSInitializer.getMultiDimContinuous( localAtts, number + existedNum, false); samplePoints.insertAttributeAt(header.classAttribute(), samplePoints.numAttributes()); samplePoints.setClassIndex(samplePoints.numAttributes() - 1); return samplePoints; }
protected static String[] getClasses(Instances instances) { Attribute classAttribute = instances.classAttribute(); String[] result = new String[classAttribute.numValues()]; for (int i = 0; i < result.length; ++i) result[i] = classAttribute.value(i); return result; }
public Instances getMultiDimContinuous(ArrayList<Attribute> atts, int sampleSetSize, boolean useMid, BestConf bestconf){ Instances retval = sampleMultiDimContinuous(atts, sampleSetSize, useMid), temp; while(retval.size()<sampleSetSize){ temp = sampleMultiDimContinuous(atts, sampleSetSize, useMid); retval.addAll(temp); } //make sure the set size is equal to the setting while(retval.size()>sampleSetSize) retval.remove(retval.size()-1); return retval; }
public static Attribute createWekaAttribute(String name, Class<Object> c) { if (c.equals(Double.class) || c.equals(Float.class) || c.equals(Integer.class)) return new Attribute(name); if (c.equals(Boolean.class) || c.equals(String.class)) return new Attribute(name, createValueListBoolean()); return null; }
private ArrayList<Branch2> getLeavesInfoForM5P(M5P model){ ArrayList<Branch2> retval = new ArrayList<Branch2>(); ArrayList<RuleNode> leafNodes = new ArrayList<RuleNode>(); model.getM5RootNode().returnLeaves(new ArrayList[]{leafNodes}); for(RuleNode leaf : leafNodes){ Branch2 branch = new Branch2(); ArrayList<PreConstructedLinearModel> lmodel = new ArrayList<PreConstructedLinearModel>(); lmodel.add(leaf.getModel()); branch.setLinearModels(lmodel); Map<Attribute,Range<Double>> rangeMap = branch.getRangeMap(); RuleNode parent = leaf, child; while(parent.parentNode()!=null){ child = parent; parent = parent.parentNode(); Attribute att = this.labeledInstances.attribute(parent.splitAtt()); Range<Double> previous = null; if(parent.leftNode()==child) previous = rangeMap.put(att,Range.atMost(parent.splitVal())); else previous = rangeMap.put(att, Range.greaterThan(parent.splitVal())); //the attribute is visited previously if(previous!=null){ previous = rangeMap.get(att).intersection(previous); rangeMap.put(att, previous); } } retval.add(branch); } return retval; }
@Override public Instances collectPerfs(Instances samplePoints, String perfAttName) { Instances retVal = null; if(samplePoints.attribute(perfAttName) == null){ Attribute performance = new Attribute(perfAttName); samplePoints.insertAttributeAt(performance, samplePoints.numAttributes()); } File perfFolder = new File(perfsfilepath); int tot=0; if(perfFolder.exists()){ //let's get all the name set for the sample points Iterator<Instance> itr = samplePoints.iterator(); TreeSet<String> insNameSet = new TreeSet<String>(); HashMap<String, Integer> mapping = new HashMap<String, Integer>(); int pos=0; while(itr.hasNext()){ String mdstr = getMD5(itr.next()); insNameSet.add(mdstr); mapping.put(mdstr, new Integer(pos++)); } //now we collect File[] perfFiles = perfFolder.listFiles(new PerfsFileFilter(insNameSet)); tot = perfFiles.length; if(tot > 0) isInterrupt = true; for(int i=0;i<tot;i++){ Instance ins = samplePoints.get(mapping.get(perfFiles[i].getName())); double[] results = getPerf(perfFiles[i].getAbsolutePath()); if(results!=null){ ins.setValue(samplePoints.numAttributes()-1, results[0]); } } } retVal = samplePoints; retVal.setClassIndex(retVal.numAttributes()-1); System.out.println("Total number of collected performances is : "+tot); return retVal; }
private static ArrayList<Attribute> scaleDownMindists(Instances previousSet, Instance center){ ArrayList<Attribute> localAtts = new ArrayList<Attribute>(); int attNum = center.numAttributes(); int pos = previousSet.attribute(PerformanceAttName).index(); //traverse each dimension Enumeration<Instance> enu; double minDis; for(int i=0;i<attNum;i++){ if(i==pos) continue; enu = previousSet.enumerateInstances(); minDis = Double.MAX_VALUE; while(enu.hasMoreElements()){ Instance ins = enu.nextElement(); if(!ins.equals(center)) minDis = Math.min((double)((int)(Math.abs(ins.value(i)-center.value(i))*1000))/1000.0, minDis); } //now we set the range Properties p1 = new Properties(); double upper = center.value(i)+minDis, lower=center.value(i)-minDis; TreeSet<Double> detourSet = new TreeSet<Double>(); detourSet.add(upper); detourSet.add(lower); detourSet.add(previousSet.attribute(i).getUpperNumericBound()); detourSet.add(previousSet.attribute(i).getLowerNumericBound()); switch(detourSet.size()){ case 1: upper=lower=detourSet.first(); break; case 2: upper = detourSet.last(); lower = detourSet.first(); break; case 3: upper=lower=detourSet.higher(detourSet.first()); break; default://case 4: upper=detourSet.lower(detourSet.last()); lower=detourSet.higher(detourSet.first()); break; } p1.setProperty("range", "["+String.valueOf(lower)+","+String.valueOf(upper)+"]"); ProtectedProperties prop1 = new ProtectedProperties(p1); localAtts.add(new Attribute(previousSet.attribute(i).name(), prop1)); } return localAtts; }
public Instances runExp(Instances samplePoints, String perfAttName){ Instances retVal = null; if(samplePoints.attribute(perfAttName) == null){ Attribute performance = new Attribute(perfAttName); samplePoints.insertAttributeAt(performance, samplePoints.numAttributes()); } int pos = samplePoints.numInstances(); int count = 0; for (int i = 0; i < pos; i++) { Instance ins = samplePoints.get(i); HashMap hm = new HashMap(); int tot = 0; for (int j = 0; j < ins.numAttributes(); j++) { hm.put(ins.attribute(j).name(), ins.value(ins.attribute(j))); } boolean testRet; if (Double.isNaN(ins.value(ins.attribute(ins.numAttributes() - 1)))) { testRet = this.startTest(hm, i, isInterrupt); double y = 0; if (!testRet) {// the setting does not work, we skip it y = -1; count++; if (count >= targetTestErrorNum) { System.out.println("There must be somthing wrong with the system. Please check and restart....."); System.exit(1); } } else { y = getPerformanceByType(performanceType); count = 0; } ins.setValue(samplePoints.numAttributes() - 1, y); writePerfstoFile(ins); } else { continue; } } retVal = samplePoints; retVal.setClassIndex(retVal.numAttributes()-1); return retVal; }
public MarkovAttributeSet(Set<Attribute> items) { super(items); }
public MarkovAttributeSet(Attribute...items) { super((Set<Attribute>)CollectionUtil.addAll(new HashSet<Attribute>(), items)); }
public static ArrayList<Attribute> scaleDownDetour(Instances previousSet, Instance center){ ArrayList<Attribute> localAtts = new ArrayList<Attribute>(); int attNum = center.numAttributes(); int pos = previousSet.attribute(PerformanceAttName).index(); //traverse each dimension Enumeration<Instance> enu; double minDis; for(int i=0;i<attNum;i++){ if(i==pos) continue; enu = previousSet.enumerateInstances(); minDis = Double.MAX_VALUE; while(enu.hasMoreElements()){ Instance ins = enu.nextElement(); if(!ins.equals(center)) minDis = Math.min((double)((int)(Math.abs(ins.value(i)-center.value(i))*100))/100.0, minDis); } //now we set the range Properties p1 = new Properties(); double upper = center.value(i)+minDis, lower=center.value(i)-minDis; TreeSet<Double> detourSet = new TreeSet<Double>(); detourSet.add(upper); detourSet.add(lower); detourSet.add(previousSet.attribute(i).getUpperNumericBound()); detourSet.add(previousSet.attribute(i).getLowerNumericBound()); switch(detourSet.size()){ case 1: upper=lower=detourSet.first(); break; case 2: upper = detourSet.last(); lower = detourSet.first(); break; case 3: upper=lower=detourSet.higher(detourSet.first()); break; default://case 4: upper=detourSet.lower(detourSet.last()); lower=detourSet.higher(detourSet.first()); break; } p1.setProperty("range", "["+String.valueOf(lower)+","+String.valueOf(upper)+"]"); ProtectedProperties prop1 = new ProtectedProperties(p1); localAtts.add(new Attribute(previousSet.attribute(i).name(), prop1)); } return localAtts; }