/** * @see java.io.DataInput#readFloat() */ @Override public float readFloat() throws IOException { return this.fis.readFloat(); }
/** * @see java.io.DataInput#readFloat() */ @Override public float readFloat() throws IOException { return this.fis.readFloat(); }
/** * @see java.io.DataInput#readFloat() */ @Override public float readFloat() throws IOException { return this.fis.readFloat(); }
public final float readFloat() throws IOException { return in.readFloat(); }
public final float readFloat() throws IOException { return in.readFloat(); }
public static float readFloat(FileSystem fs, String path) { try { FSDataInputStream in = fs.open(new Path(path)); float val = in.readFloat(); in.close(); return val; } catch (Exception e) { throw new RuntimeException("Unable to read property at " + path); } }
public static float[] loadDenseFloatRowFromPartition(FSDataInputStream input, MatrixPartitionMeta partMeta, int rowId) throws IOException { //RowOffset rowOffset = partMeta.getRowMetas().get(rowId); //input.seek(rowOffset.getOffset()); Preconditions.checkState(input.readInt() == rowId); int num = (int) (partMeta.getEndCol() - partMeta.getStartCol()); float[] row = new float[num]; for (int i = 0; i < num; i++) { row[i] = input.readFloat(); } return row; }
for (FileStatus f : fs.listStatus(new Path(outm))) { FSDataInputStream fin = fs.open(f.getPath()); mass = sumLogProbs(mass, fin.readFloat()); fin.close();
for (FileStatus f : fs.listStatus(new Path(outm))) { FSDataInputStream fin = fs.open(f.getPath()); mass = sumLogProbs(mass, fin.readFloat()); fin.close();
public static Int2FloatOpenHashMap loadSparseFloatRowFromPartition(FSDataInputStream input, MatrixPartitionMeta partMeta, int rowId) throws IOException { //RowOffset rowOffset = partMeta.getRowMetas().get(rowId); //input.seek(rowOffset.getOffset()); Preconditions.checkState(input.readInt() == rowId); int num = input.readInt(); Int2FloatOpenHashMap row = new Int2FloatOpenHashMap(); for (int i = 0; i < num; i++) { row.put(input.readInt(), input.readFloat()); } return row; }
for (FileStatus f : fs.listStatus(new Path(outm))) { FSDataInputStream fin = fs.open(f.getPath()); mass = sumLogProbs(mass, fin.readFloat()); fin.close();
for (FileStatus f : fs.listStatus(new Path(outm))) { FSDataInputStream fin = fs.open(f.getPath()); mass = sumLogProbs(mass, fin.readFloat()); fin.close();
private static void loadDenseFloatPartition(DenseFloatModel model, FSDataInputStream input, MatrixPartitionMeta partMeta) throws IOException { int rowNum = input.readInt(); int startCol = (int) partMeta.getStartCol(); int endCol = (int) partMeta.getEndCol(); int rowId = 0; float[] row = null; for (int i = 0; i < rowNum; i++) { rowId = input.readInt(); row = model.getRow(rowId); for (int j = startCol; j < endCol; j++) { row[j] = input.readFloat(); } } }
private static void convertDenseFloatPartition(FSDataInputStream input, FSDataOutputStream output, ModelLineConvert lineConvert, MatrixPartitionMeta partMeta) throws IOException { int rowNum = input.readInt(); int startCol = (int) partMeta.getStartCol(); int endCol = (int) partMeta.getEndCol(); for (int i = 0; i < rowNum; i++) { lineConvert.convertRowIndex(output, input.readInt()); for (int j = startCol; j < endCol; j++) { lineConvert.convertFloat(output, j, input.readFloat()); } } }
private static void convertSparseFloatPartition(FSDataInputStream input, FSDataOutputStream output, ModelLineConvert lineConvert, MatrixPartitionMeta partMeta) throws IOException { int rowNum = input.readInt(); int nnz = 0; for (int i = 0; i < rowNum; i++) { lineConvert.convertRowIndex(output, input.readInt()); nnz = input.readInt(); for (int j = 0; j < nnz; j++) { lineConvert.convertFloat(output, input.readInt(), input.readFloat()); } } }
public static NaiveBayesModel materialize(Path output, Configuration conf) throws IOException { FileSystem fs = output.getFileSystem(conf); Vector weightsPerLabel = null; Vector perLabelThetaNormalizer = null; Vector weightsPerFeature = null; Matrix weightsPerLabelAndFeature; float alphaI; FSDataInputStream in = fs.open(new Path(output, "naiveBayesModel.bin")); try { alphaI = in.readFloat(); weightsPerFeature = VectorWritable.readVector(in); weightsPerLabel = new DenseVector(VectorWritable.readVector(in)); perLabelThetaNormalizer = new DenseVector(VectorWritable.readVector(in)); weightsPerLabelAndFeature = new SparseRowMatrix(weightsPerLabel.size(), weightsPerFeature.size()); for (int label = 0; label < weightsPerLabelAndFeature.numRows(); label++) { weightsPerLabelAndFeature.assignRow(label, VectorWritable.readVector(in)); } } finally { Closeables.close(in, true); } NaiveBayesModel model = new NaiveBayesModel(weightsPerLabelAndFeature, weightsPerFeature, weightsPerLabel, perLabelThetaNormalizer, alphaI); model.validate(); return model; }
public static NaiveBayesModel materialize(Path output, Configuration conf) throws IOException { FileSystem fs = output.getFileSystem(conf); Vector weightsPerLabel = null; Vector perLabelThetaNormalizer = null; Vector weightsPerFeature = null; Matrix weightsPerLabelAndFeature; float alphaI; boolean isComplementary; FSDataInputStream in = fs.open(new Path(output, "naiveBayesModel.bin")); try { alphaI = in.readFloat(); isComplementary = in.readBoolean(); weightsPerFeature = VectorWritable.readVector(in); weightsPerLabel = new DenseVector(VectorWritable.readVector(in)); if (isComplementary){ perLabelThetaNormalizer = new DenseVector(VectorWritable.readVector(in)); } weightsPerLabelAndFeature = new SparseRowMatrix(weightsPerLabel.size(), weightsPerFeature.size()); for (int label = 0; label < weightsPerLabelAndFeature.numRows(); label++) { weightsPerLabelAndFeature.assignRow(label, VectorWritable.readVector(in)); } } finally { Closeables.close(in, true); } NaiveBayesModel model = new NaiveBayesModel(weightsPerLabelAndFeature, weightsPerFeature, weightsPerLabel, perLabelThetaNormalizer, alphaI, isComplementary); model.validate(); return model; }
public static NaiveBayesModel materialize(Path output, Configuration conf) throws IOException { FileSystem fs = output.getFileSystem(conf); Vector weightsPerLabel; Vector perLabelThetaNormalizer = null; Vector weightsPerFeature; Matrix weightsPerLabelAndFeature; float alphaI; boolean isComplementary; try (FSDataInputStream in = fs.open(new Path(output, "naiveBayesModel.bin"))) { alphaI = in.readFloat(); isComplementary = in.readBoolean(); weightsPerFeature = VectorWritable.readVector(in); weightsPerLabel = new DenseVector(VectorWritable.readVector(in)); if (isComplementary){ perLabelThetaNormalizer = new DenseVector(VectorWritable.readVector(in)); } weightsPerLabelAndFeature = new SparseRowMatrix(weightsPerLabel.size(), weightsPerFeature.size()); for (int label = 0; label < weightsPerLabelAndFeature.numRows(); label++) { weightsPerLabelAndFeature.assignRow(label, VectorWritable.readVector(in)); } } NaiveBayesModel model = new NaiveBayesModel(weightsPerLabelAndFeature, weightsPerFeature, weightsPerLabel, perLabelThetaNormalizer, alphaI, isComplementary); model.validate(); return model; }
private static void loadSparseFloatPartition(SparseFloatModel model, FSDataInputStream input, MatrixPartitionMeta partMeta) throws IOException { int rowNum = input.readInt(); int rowId = 0; int nnz = 0; int totalNNZ = 0; Int2FloatOpenHashMap row = null; for (int i = 0; i < rowNum; i++) { rowId = input.readInt(); nnz = input.readInt(); totalNNZ = (int) (nnz * (model.col) / (partMeta.getEndCol() - partMeta.getStartCol())); row = model.getRow(rowId, partMeta.getPartId(), totalNNZ); for (int j = 0; j < nnz; j++) { row.put(input.readInt(), input.readFloat()); } } }