Tabnine Logo
GuaguaInputSplit.isMaster
Code IndexAdd Tabnine to your IDE (free)

How to use
isMaster
method
in
ml.shifu.guagua.hadoop.io.GuaguaInputSplit

Best Java code snippets using ml.shifu.guagua.hadoop.io.GuaguaInputSplit.isMaster (Showing top 17 results out of 315)

origin: ml.shifu/guagua-yarn

@Override
public void write(DataOutput out) throws IOException {
  out.writeBoolean(this.isMaster());
  if(!this.isMaster()) {
    int length = this.getFileSplits().length;
    out.writeInt(length);
    for(int i = 0; i < length; i++) {
      this.getFileSplits()[i].write(out);
    }
  }
}
origin: ShifuML/guagua

@Override
public void write(DataOutput out) throws IOException {
  out.writeBoolean(this.isMaster());
  if(!this.isMaster()) {
    int length = this.getFileSplits().length;
    out.writeInt(length);
    for(int i = 0; i < length; i++) {
      this.getFileSplits()[i].write(out);
    }
  }
}
origin: ShifuML/guagua

@Override
public void write(DataOutput out) throws IOException {
  out.writeBoolean(this.isMaster());
  if(!this.isMaster()) {
    int length = this.getFileSplits().length;
    out.writeInt(length);
    for(int i = 0; i < length; i++) {
      this.getFileSplits()[i].write(out);
    }
    if(this.extensions != null) {
      out.writeInt(extensions.length);
      for(int i = 0; i < extensions.length; i++) {
        ByteArrayOutputStream bos = new ByteArrayOutputStream();
        ObjectOutput ext = null;
        try {
          ext = new ObjectOutputStream(bos);
          ext.writeObject(extensions[i]);
          byte[] bytes = bos.toByteArray();
          out.writeInt(bytes.length);
          out.write(bytes);
        } finally {
          IOUtils.closeQuietly(bos);
        }
      }
    }else{
      out.writeInt(0);
    }
  }
}
origin: ml.shifu/guagua-mapreduce

@Override
public void write(DataOutput out) throws IOException {
  out.writeBoolean(this.isMaster());
  if(!this.isMaster()) {
    int length = this.getFileSplits().length;
    out.writeInt(length);
    for(int i = 0; i < length; i++) {
      this.getFileSplits()[i].write(out);
    }
    if(this.extensions != null) {
      out.writeInt(extensions.length);
      for(int i = 0; i < extensions.length; i++) {
        ByteArrayOutputStream bos = new ByteArrayOutputStream();
        ObjectOutput ext = null;
        try {
          ext = new ObjectOutputStream(bos);
          ext.writeObject(extensions[i]);
          byte[] bytes = bos.toByteArray();
          out.writeInt(bytes.length);
          out.write(bytes);
        } finally {
          IOUtils.closeQuietly(bos);
        }
      }
    }else{
      out.writeInt(0);
    }
  }
}
origin: ml.shifu/guagua-yarn

/**
 * For master split, use <code>Long.MAX_VALUE</code> as its length to make it is the first task for Hadoop job. It
 * is convenient for users to check master in Hadoop UI.
 */
@Override
public long getLength() throws IOException, InterruptedException {
  if(isMaster()) {
    return Long.MAX_VALUE;
  }
  long len = 0;
  for(FileSplit split: this.getFileSplits()) {
    len += split.getLength();
  }
  return len;
}
origin: ml.shifu/guagua-mapreduce

/**
 * For master split, use <code>Long.MAX_VALUE</code> as its length to make it is the first task for Hadoop job. It
 * is convenient for users to check master in Hadoop UI.
 */
@Override
public long getLength() throws IOException, InterruptedException {
  if(isMaster()) {
    return Long.MAX_VALUE;
  }
  long len = 0;
  for(FileSplit split: this.getFileSplits()) {
    len += split.getLength();
  }
  return len;
}
origin: ShifuML/guagua

/**
 * For master split, use <code>Long.MAX_VALUE</code> as its length to make it is the first task for Hadoop job. It
 * is convenient for users to check master in Hadoop UI.
 */
@Override
public long getLength() throws IOException, InterruptedException {
  if(isMaster()) {
    return Long.MAX_VALUE;
  }
  long len = 0;
  for(FileSplit split: this.getFileSplits()) {
    len += split.getLength();
  }
  return len;
}
origin: ShifuML/guagua

/**
 * For master split, use <code>Long.MAX_VALUE</code> as its length to make it is the first task for Hadoop job. It
 * is convenient for users to check master in Hadoop UI.
 */
@Override
public long getLength() throws IOException, InterruptedException {
  if(isMaster()) {
    return Long.MAX_VALUE;
  }
  long len = 0;
  for(FileSplit split: this.getFileSplits()) {
    len += split.getLength();
  }
  return len;
}
origin: ShifuML/guagua

@Override
public void readFields(DataInput in) throws IOException {
  this.setMaster(in.readBoolean());
  if(!isMaster()) {
    int len = in.readInt();
    FileSplit[] splits = new FileSplit[len];
    for(int i = 0; i < len; i++) {
      splits[i] = new FileSplit(null, 0, 0, (String[]) null);
      splits[i].readFields(in);
    }
    this.setFileSplits(splits);
  }
}
origin: ShifuML/guagua

@Override
public void readFields(DataInput in) throws IOException {
  this.setMaster(in.readBoolean());
  if(!isMaster()) {
    int len = in.readInt();
    FileSplit[] splits = new FileSplit[len];
origin: ml.shifu/guagua-yarn

@Override
public void readFields(DataInput in) throws IOException {
  this.setMaster(in.readBoolean());
  if(!isMaster()) {
    int len = in.readInt();
    FileSplit[] splits = new FileSplit[len];
    for(int i = 0; i < len; i++) {
      splits[i] = new FileSplit(null, 0, 0, (String[]) null);
      splits[i].readFields(in);
    }
    this.setFileSplits(splits);
  }
}
origin: ShifuML/shifu

protected void addCrossValidationDataset(List<InputSplit> trainingSplit, JobContext context) throws IOException {
  List<InputSplit> trainingNoMaster = new ArrayList<InputSplit>();
  for(InputSplit split: trainingSplit) {
    GuaguaInputSplit guaguaInput = (GuaguaInputSplit) split;
    if(guaguaInput.isMaster()) {
      continue;
    }
    trainingNoMaster.add(guaguaInput);
  }
  List<List<FileSplit>> csSplits = this.getCrossValidationSplits(context, trainingNoMaster.size());
  for(int i = 0; i < csSplits.size(); i++) {
    List<FileSplit> oneInput = csSplits.get(i);
    GuaguaInputSplit guaguaInput = (GuaguaInputSplit) trainingNoMaster.get(i);
    int trainingSize = guaguaInput.getFileSplits().length;
    FileSplit[] finalSplits = (FileSplit[]) ArrayUtils.addAll(guaguaInput.getFileSplits(),
        oneInput.toArray(new FileSplit[0]));
    guaguaInput.setFileSplits(finalSplits);
    Boolean[] validationFlags = new Boolean[finalSplits.length];
    for(int j = 0; j < finalSplits.length; j++) {
      validationFlags[j] = j < trainingSize ? false : true;
    }
    guaguaInput.setExtensions(validationFlags);
  }
  LOG.info("Training input split size is: {}.", trainingSplit.size());
  LOG.info("Validation input split size is {}.", csSplits.size());
}
origin: ml.shifu/guagua-mapreduce

@Override
public void readFields(DataInput in) throws IOException {
  this.setMaster(in.readBoolean());
  if(!isMaster()) {
    int len = in.readInt();
    FileSplit[] splits = new FileSplit[len];
origin: ShifuML/guagua

@Override
protected void setup(Context context) throws java.io.IOException, InterruptedException {
  GuaguaInputSplit inputSplit = (GuaguaInputSplit) context.getInputSplit();
  this.setMaster(inputSplit.isMaster());
  if(this.isMaster()) {
    context.setStatus("Master initializing ...");
    this.setGuaguaService(new GuaguaMasterService<MASTER_RESULT, WORKER_RESULT>());
  } else {
    context.setStatus("Worker initializing ...");
    this.setGuaguaService(new GuaguaWorkerService<MASTER_RESULT, WORKER_RESULT>());
    List<GuaguaFileSplit> splits = new LinkedList<GuaguaFileSplit>();
    for(int i = 0; i < inputSplit.getFileSplits().length; i++) {
      FileSplit fs = inputSplit.getFileSplits()[i];
      GuaguaFileSplit gfs = new GuaguaFileSplit(fs.getPath().toString(), fs.getStart(), fs.getLength());
      if(inputSplit.getExtensions() != null && i < inputSplit.getExtensions().length) {
        gfs.setExtension(inputSplit.getExtensions()[i]);
      }
      splits.add(gfs);
    }
    this.getGuaguaService().setSplits(splits);
  }
  Properties props = replaceConfToProps(context.getConfiguration());
  this.getGuaguaService().setAppId(context.getConfiguration().get(GuaguaMapReduceConstants.MAPRED_JOB_ID));
  this.getGuaguaService().setContainerId(
      context.getConfiguration().get(GuaguaMapReduceConstants.MAPRED_TASK_PARTITION));
  this.getGuaguaService().init(props);
  this.getGuaguaService().start();
}
origin: ml.shifu/guagua-mapreduce

@Override
protected void setup(Context context) throws java.io.IOException, InterruptedException {
  GuaguaInputSplit inputSplit = (GuaguaInputSplit) context.getInputSplit();
  this.setMaster(inputSplit.isMaster());
  if(this.isMaster()) {
    context.setStatus("Master initializing ...");
    this.setGuaguaService(new GuaguaMasterService<MASTER_RESULT, WORKER_RESULT>());
  } else {
    context.setStatus("Worker initializing ...");
    this.setGuaguaService(new GuaguaWorkerService<MASTER_RESULT, WORKER_RESULT>());
    List<GuaguaFileSplit> splits = new LinkedList<GuaguaFileSplit>();
    for(int i = 0; i < inputSplit.getFileSplits().length; i++) {
      FileSplit fs = inputSplit.getFileSplits()[i];
      GuaguaFileSplit gfs = new GuaguaFileSplit(fs.getPath().toString(), fs.getStart(), fs.getLength());
      if(inputSplit.getExtensions() != null && i < inputSplit.getExtensions().length) {
        gfs.setExtension(inputSplit.getExtensions()[i]);
      }
      splits.add(gfs);
    }
    this.getGuaguaService().setSplits(splits);
  }
  Properties props = replaceConfToProps(context.getConfiguration());
  this.getGuaguaService().setAppId(context.getConfiguration().get(GuaguaMapReduceConstants.MAPRED_JOB_ID));
  this.getGuaguaService().setContainerId(
      context.getConfiguration().get(GuaguaMapReduceConstants.MAPRED_TASK_PARTITION));
  this.getGuaguaService().init(props);
  this.getGuaguaService().start();
}
origin: ShifuML/guagua

/**
 * Set up guagua service
 */
protected void setup() {
  this.setMaster(this.getInputSplit().isMaster());
  if(this.isMaster()) {
    this.setGuaguaService(new GuaguaMasterService<MASTER_RESULT, WORKER_RESULT>());
  } else {
    this.setGuaguaService(new GuaguaWorkerService<MASTER_RESULT, WORKER_RESULT>());
    List<GuaguaFileSplit> splits = new LinkedList<GuaguaFileSplit>();
    for(FileSplit fileSplit: getInputSplit().getFileSplits()) {
      splits.add(new GuaguaFileSplit(fileSplit.getPath().toString(), fileSplit.getStart(), fileSplit
          .getLength()));
    }
    this.getGuaguaService().setSplits(splits);
  }
  Properties props = replaceConfToProps();
  this.getGuaguaService().setAppId(this.getAppId().toString());
  this.getGuaguaService().setContainerId(this.getPartition() + "");
  this.getGuaguaService().init(props);
  this.getGuaguaService().start();
  initRPCClient();
}
origin: ml.shifu/guagua-yarn

/**
 * Set up guagua service
 */
protected void setup() {
  this.setMaster(this.getInputSplit().isMaster());
  if(this.isMaster()) {
    this.setGuaguaService(new GuaguaMasterService<MASTER_RESULT, WORKER_RESULT>());
  } else {
    this.setGuaguaService(new GuaguaWorkerService<MASTER_RESULT, WORKER_RESULT>());
    List<GuaguaFileSplit> splits = new LinkedList<GuaguaFileSplit>();
    for(FileSplit fileSplit: getInputSplit().getFileSplits()) {
      splits.add(new GuaguaFileSplit(fileSplit.getPath().toString(), fileSplit.getStart(), fileSplit
          .getLength()));
    }
    this.getGuaguaService().setSplits(splits);
  }
  Properties props = replaceConfToProps();
  this.getGuaguaService().setAppId(this.getAppId().toString());
  this.getGuaguaService().setContainerId(this.getPartition() + "");
  this.getGuaguaService().init(props);
  this.getGuaguaService().start();
  initRPCClient();
}
ml.shifu.guagua.hadoop.ioGuaguaInputSplitisMaster

Javadoc

Whether the input split is master split.

Popular methods of GuaguaInputSplit

  • <init>
    Constructor with #isMaster and #fileSplits settings.
  • getFileSplits
  • setFileSplits
  • setExtensions
  • setMaster
  • getExtensions

Popular in Java

  • Reading from database using SQL prepared statement
  • getSystemService (Context)
  • getSupportFragmentManager (FragmentActivity)
  • findViewById (Activity)
  • Runnable (java.lang)
    Represents a command that can be executed. Often used to run code in a different Thread.
  • Connection (java.sql)
    A connection represents a link from a Java application to a database. All SQL statements and results
  • ResultSet (java.sql)
    An interface for an object which represents a database table entry, returned as the result of the qu
  • LinkedHashMap (java.util)
    LinkedHashMap is an implementation of Map that guarantees iteration order. All optional operations a
  • BasicDataSource (org.apache.commons.dbcp)
    Basic implementation of javax.sql.DataSource that is configured via JavaBeans properties. This is no
  • Scheduler (org.quartz)
    This is the main interface of a Quartz Scheduler. A Scheduler maintains a registry of org.quartz.Job
  • Top 17 Free Sublime Text Plugins
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimAtomGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyStudentsTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now