/** * Copies the instance but fills up its values based on the given array * of doubles. The copy has access to the same dataset. * * @param values the array with new values * @return the new instance */ public Instance copy(double[] values) { SparseInstance result = new SparseInstance(this.m_Weight, values); result.m_Dataset = m_Dataset; return result; }
/** * Copies the instance but fills up its values based on the given array * of doubles. The copy has access to the same dataset. * * @param values the array with new values * @return the new instance */ public Instance copy(double[] values) { SparseInstance result = new SparseInstance(this.m_Weight, values); result.m_Dataset = m_Dataset; return result; }
/** * Produces a shallow copy of this instance. The copy has access to the same * dataset. (if you want to make a copy that doesn't have access to the * dataset, use <code>new SparseInstance(instance)</code> * * @return the shallow copy */ @Override public Object copy() { SparseInstance result = new SparseInstance(this); result.m_Dataset = m_Dataset; return result; }
/** * Produces a shallow copy of this instance. The copy has access to the same * dataset. (if you want to make a copy that doesn't have access to the * dataset, use <code>new SparseInstance(instance)</code> * * @return the shallow copy */ @Override public Object copy() { SparseInstance result = new SparseInstance(this); result.m_Dataset = m_Dataset; return result; }
Instance tmpInstance = new SparseInstance(); tmpInstance.put(2, 1.0, "myLabel");
/** * Creates a new {@link Instance}. The actual type is determined based on passed instance object. * @param typeProvider the instance from which type for new instance is determined * @param numAttributes number of attributes for new instance * @return A new {@link Instance}. */ public static Instance createInstance(Instance typeProvider, int numAttributes) { if (typeProvider instanceof SparseInstance) { return new SparseInstance(numAttributes); } else if (typeProvider instanceof DenseInstance) { return new DenseInstance(numAttributes); } else { throw new MulanRuntimeException( String.format("Can not create a new Instance from supplied type '%s'.", typeProvider.getClass().getName())); } } }
/** * Creates a new {@link Instance}. The actual type is determined based on passed instance object. * @param typeProvider the instance from which type for new instance is determined * @param weight the weight of a new instance * @param attrValues attribute values for a new instance * @return A new {@link Instance}. */ public static Instance createInstance(Instance typeProvider, double weight, double[] attrValues) { if (typeProvider instanceof SparseInstance) { return new SparseInstance(weight, attrValues); } else if (typeProvider instanceof DenseInstance) { return new DenseInstance(weight, attrValues); } else { throw new MulanRuntimeException( String.format("Can not create a new Instance from supplied type '%s'.", typeProvider.getClass().getName())); } }
public Instance instanceToWeka(net.sf.javaml.core.Instance inst) { double[] values = new double[classSet ? inst.noAttributes() + 1 : inst.noAttributes()]; // System.arraycopy(i.values().t.toArray(), 0, values, 0, classSet ? // values.length - 1 : values.length); for (int i = 0; i < (classSet ? values.length - 1 : values.length); i++) { values[i] = inst.get(i); } // if (classSet) // values[values.length - 1] = inst.classValue(); Instance wI = null; if (inst instanceof net.sf.javaml.core.SparseInstance) wI = new SparseInstance(1, values); else wI = new Instance(1, values); wI.setDataset(wData); if (inst.classValue() != null) { wI.setClassValue(inst.classValue().toString()); } return wI; }
Instance tmpInstance = new SparseInstance(); tmpInstance.put(2, 1.0); tmpInstance.put(4, 2.2); tmpInstance.setClassValue("positive");
public static Instances instancesFromDataMap(DataMap datamap){ Instances instances = null; FastVector attributes = createFastVector(datamap.getFeatures(),datamap.getDataMap().keySet()); int numfeatures = attributes.size(); instances = new Instances("Instances",attributes,datamap.numDocuments()); //for each author... for (String author : datamap.getDataMap().keySet()){ ConcurrentHashMap<String,DocumentData> authormap = datamap.getDataMap().get(author); //for each document... for (String doctitle : authormap.keySet()){ Instance instance = new SparseInstance(numfeatures); ConcurrentHashMap<Integer,FeatureData> documentData = authormap.get(doctitle).getDataValues(); //for each index we have a value for for (Integer index : documentData.keySet()){ instance.setValue((Attribute)attributes.elementAt(index), documentData.get(index).getValue()); } instance.setValue((Attribute)attributes.elementAt(attributes.size()-1), author); instances.add(instance); } } return instances; }
/** * Convert a single instance over. The converted instance is added to the end * of the output queue. * * @param instance the instance to convert * @throws Exception if something goes wrong */ protected void convertInstance(Instance instance) throws Exception { // Make copy and set weight to one Instance cp = (Instance) instance.copy(); cp.setWeight(1.0); // Set up values double[] instanceVals = new double[outputFormatPeek().numAttributes()]; double[] vals = m_partitionGenerator.getMembershipValues(cp); System.arraycopy(vals, 0, instanceVals, 0, vals.length); if (instance.classIndex() >= 0) { instanceVals[instanceVals.length - 1] = instance.classValue(); } push(new SparseInstance(instance.weight(), instanceVals)); }
private weka.core.Instance tcInstanceToMekaInstance(Instance instance, Instances trainingData, List<String> allClassLabels) throws Exception { AttributeStore attributeStore = new AttributeStore(); List<Attribute> outcomeAttributes = createOutcomeAttributes(allClassLabels); // in Meka, class label attributes have to go on top for (Attribute attribute : outcomeAttributes) { attributeStore.addAttributeAtBegin(attribute.name(), attribute); } for (int i = outcomeAttributes.size(); i < trainingData.numAttributes(); i++) { attributeStore.addAttribute(trainingData.attribute(i).name(), trainingData.attribute(i)); } double[] featureValues = getFeatureValues(attributeStore, instance); SparseInstance sparseInstance = new SparseInstance(1.0, featureValues); trainingData.setClassIndex(outcomeAttributes.size()); sparseInstance.setDataset(trainingData); return sparseInstance; }
/** * Convert a single instance over. The converted instance is added to the end * of the output queue. * * @param instance the instance to convert * @throws Exception if something goes wrong */ protected void convertInstance(Instance instance) throws Exception { // Make copy and set weight to one Instance cp = (Instance) instance.copy(); cp.setWeight(1.0); // Set up values double[] instanceVals = new double[outputFormatPeek().numAttributes()]; double[] vals = m_partitionGenerator.getMembershipValues(cp); System.arraycopy(vals, 0, instanceVals, 0, vals.length); if (instance.classIndex() >= 0) { instanceVals[instanceVals.length - 1] = instance.classValue(); } push(new SparseInstance(instance.weight(), instanceVals)); }
private weka.core.Instance tcInstanceToMekaInstance(Instance instance, Instances trainingData, List<String> allClassLabels) throws Exception { AttributeStore attributeStore = new AttributeStore(); List<Attribute> outcomeAttributes = createOutcomeAttributes(allClassLabels); // in Meka, class label attributes have to go on top for (Attribute attribute : outcomeAttributes) { attributeStore.addAttributeAtBegin(attribute.name(), attribute); } for (int i = outcomeAttributes.size(); i < trainingData.numAttributes(); i++) { attributeStore.addAttribute(trainingData.attribute(i).name(), trainingData.attribute(i)); } double[] featureValues = getFeatureValues(attributeStore, instance); SparseInstance sparseInstance = new SparseInstance(1.0, featureValues); trainingData.setClassIndex(outcomeAttributes.size()); sparseInstance.setDataset(trainingData); return sparseInstance; }
public weka.core.Instance tcInstanceToWekaInstance(Instance instance, Instances trainingData, List<String> allClasses, boolean isRegressionExperiment) throws Exception { AttributeStore attributeStore = new AttributeStore(); // outcome attribute is last and will be ignored for (int i = 0; i < trainingData.numAttributes() - 1; i++) { attributeStore.addAttribute(trainingData.attribute(i).name(), trainingData.attribute(i)); } // add outcome attribute Attribute outcomeAttribute = createOutcomeAttribute(allClasses, isRegressionExperiment); attributeStore.addAttribute(outcomeAttribute.name(), outcomeAttribute); double[] featureValues = getFeatureValues(attributeStore, instance); SparseInstance sparseInstance = new SparseInstance(1.0, featureValues); sparseInstance.setDataset(trainingData); return sparseInstance; }
public weka.core.Instance tcInstanceToWekaInstance(Instance instance, Instances trainingData, List<String> allClasses, boolean isRegressionExperiment) throws Exception { AttributeStore attributeStore = new AttributeStore(); // outcome attribute is last and will be ignored for (int i = 0; i < trainingData.numAttributes() - 1; i++) { attributeStore.addAttribute(trainingData.attribute(i).name(), trainingData.attribute(i)); } // add outcome attribute Attribute outcomeAttribute = createOutcomeAttribute(allClasses, isRegressionExperiment); attributeStore.addAttribute(outcomeAttribute.name(), outcomeAttribute); double[] featureValues = getFeatureValues(attributeStore, instance); SparseInstance sparseInstance = new SparseInstance(1.0, featureValues); sparseInstance.setDataset(trainingData); return sparseInstance; }
/** * Convert a single instance over. Selected attributes only are transfered. * The converted instance is added to the end of the output queue. * * @param instance the instance to convert * @throws Exception if something goes wrong */ protected void convertInstance(Instance instance) throws Exception { double[] newVals = new double[getOutputFormat().numAttributes()]; if (m_ASEvaluator instanceof AttributeTransformer) { Instance tempInstance = ((AttributeTransformer) m_ASEvaluator).convertInstance(instance); for (int i = 0; i < m_SelectedAttributes.length; i++) { int current = m_SelectedAttributes[i]; newVals[i] = tempInstance.value(current); } } else { for (int i = 0; i < m_SelectedAttributes.length; i++) { int current = m_SelectedAttributes[i]; newVals[i] = instance.value(current); } } if (instance instanceof SparseInstance) { push(new SparseInstance(instance.weight(), newVals)); } else { push(new DenseInstance(instance.weight(), newVals)); } }
/** * Convert a single instance over. Selected attributes only are transfered. * The converted instance is added to the end of the output queue. * * @param instance the instance to convert * @throws Exception if something goes wrong */ protected void convertInstance(Instance instance) throws Exception { double[] newVals = new double[getOutputFormat().numAttributes()]; if (m_ASEvaluator instanceof AttributeTransformer) { Instance tempInstance = ((AttributeTransformer) m_ASEvaluator).convertInstance(instance); for (int i = 0; i < m_SelectedAttributes.length; i++) { int current = m_SelectedAttributes[i]; newVals[i] = tempInstance.value(current); } } else { for (int i = 0; i < m_SelectedAttributes.length; i++) { int current = m_SelectedAttributes[i]; newVals[i] = instance.value(current); } } if (instance instanceof SparseInstance) { push(new SparseInstance(instance.weight(), newVals)); } else { push(new DenseInstance(instance.weight(), newVals)); } }
/** * Merges this instance with the given instance and returns the result. * Dataset is set to null. * * @param inst the instance to be merged with this one * @return the merged instances */ @Override public Instance mergeInstance(Instance inst) { double[] values = new double[numValues() + inst.numValues()]; int[] indices = new int[numValues() + inst.numValues()]; int m = 0; for (int j = 0; j < numValues(); j++, m++) { values[m] = valueSparse(j); indices[m] = index(j); } for (int j = 0; j < inst.numValues(); j++, m++) { values[m] = inst.valueSparse(j); indices[m] = numAttributes() + inst.index(j); } return new SparseInstance(1.0, values, indices, numAttributes() + inst.numAttributes()); }
/** * Merges this instance with the given instance and returns the result. * Dataset is set to null. * * @param inst the instance to be merged with this one * @return the merged instances */ @Override public Instance mergeInstance(Instance inst) { double[] values = new double[numValues() + inst.numValues()]; int[] indices = new int[numValues() + inst.numValues()]; int m = 0; for (int j = 0; j < numValues(); j++, m++) { values[m] = valueSparse(j); indices[m] = index(j); } for (int j = 0; j < inst.numValues(); j++, m++) { values[m] = inst.valueSparse(j); indices[m] = numAttributes() + inst.index(j); } return new SparseInstance(1.0, values, indices, numAttributes() + inst.numAttributes()); }