public TreeMergeRecordWriter(TaskAttemptContext context, Path workDir) { this.workDir = new Path(workDir, "data/index"); this.heartBeater = new HeartBeater(context); this.context = context; }
@Override public void write(Text key, NullWritable value) { LOG.info("map key: {}", key); heartBeater.needHeartBeat(); try { Path path = new Path(key.toString()); shards.add(path); } finally { heartBeater.cancelHeartBeat(); } }
@Override protected void cleanup(Context context) throws IOException, InterruptedException { heartBeater.close(); super.cleanup(context); }
/** * Create the heart beat object thread set it to daemon priority and start the * thread. When the count in {@link #threadsNeedingHeartBeat} is positive, the * heart beat will be issued on the progress object every 60 seconds. */ public HeartBeater(Progressable progress) { setDaemon(true); this.progress = progress; LOG.info("Heart beat reporting class is " + progress.getClass().getName()); start(); }
@Override public void close(TaskAttemptContext context) throws IOException, InterruptedException { if (context != null) { heartBeater.setProgress(context); heartBeater.needHeartBeat(); if (batch.size() > 0) { batchWriter.queueBatch(batch); heartBeater.cancelHeartBeat(); heartBeater.close();
public SolrRecordWriter(TaskAttemptContext context, Path outputShardDir, int batchSize) { this.batchSize = batchSize; this.batch = new ArrayList<>(batchSize); Configuration conf = context.getConfiguration(); // setLogLevel("org.apache.solr.core", "WARN"); // setLogLevel("org.apache.solr.update", "WARN"); heartBeater = new HeartBeater(context); try { heartBeater.needHeartBeat(); Path solrHomeDir = SolrRecordWriter.findSolrConfig(conf); FileSystem fs = outputShardDir.getFileSystem(conf); EmbeddedSolrServer solr = createEmbeddedSolrServer(solrHomeDir, fs, outputShardDir); batchWriter = new BatchWriter(solr, batchSize, context.getTaskAttemptID().getTaskID(), SolrOutputFormat.getSolrWriterThreadCount(conf), SolrOutputFormat.getSolrWriterQueueSize(conf)); } catch (Exception e) { throw new IllegalStateException(String.format(Locale.ENGLISH, "Failed to initialize record writer for %s, %s", context.getJobName(), conf .get("mapred.task.id")), e); } finally { heartBeater.cancelHeartBeat(); } }
LOG.debug("Task " + context.getTaskAttemptID() + " merging into dstDir: " + workDir + ", srcDirs: " + shards); writeShardNumberFile(context); heartBeater.needHeartBeat(); try { Directory mergedIndex = new HdfsDirectory(workDir, context.getConfiguration()); context.setStatus("Done"); } finally { heartBeater.cancelHeartBeat(); heartBeater.close();
@Override public void close(TaskAttemptContext context) throws IOException, InterruptedException { if (context != null) { heartBeater.setProgress(context); heartBeater.needHeartBeat(); if (batch.size() > 0) { batchWriter.queueBatch(batch); heartBeater.cancelHeartBeat(); heartBeater.close();
public SolrRecordWriter(TaskAttemptContext context, Path outputShardDir, int batchSize) { this.batchSize = batchSize; this.batch = new ArrayList<>(batchSize); Configuration conf = context.getConfiguration(); // setLogLevel("org.apache.solr.core", "WARN"); // setLogLevel("org.apache.solr.update", "WARN"); heartBeater = new HeartBeater(context); try { heartBeater.needHeartBeat(); Path solrHomeDir = SolrRecordWriter.findSolrConfig(conf); FileSystem fs = outputShardDir.getFileSystem(conf); EmbeddedSolrServer solr = createEmbeddedSolrServer(solrHomeDir, fs, outputShardDir); batchWriter = new BatchWriter(solr, batchSize, context.getTaskAttemptID().getTaskID(), SolrOutputFormat.getSolrWriterThreadCount(conf), SolrOutputFormat.getSolrWriterQueueSize(conf)); } catch (Exception e) { throw new IllegalStateException(String.format(Locale.ENGLISH, "Failed to initialize record writer for %s, %s", context.getJobName(), conf .get("mapred.task.id")), e); } finally { heartBeater.cancelHeartBeat(); } }
LOG.debug("Task " + context.getTaskAttemptID() + " merging into dstDir: " + workDir + ", srcDirs: " + shards); writeShardNumberFile(context); heartBeater.needHeartBeat(); try { Directory mergedIndex = new HdfsDirectory(workDir, NoLockFactory.INSTANCE, context.getConfiguration(), HdfsDirectory.DEFAULT_BUFFER_SIZE); heartBeater.cancelHeartBeat(); heartBeater.close();
/** * Create the heart beat object thread set it to daemon priority and start the * thread. When the count in {@link #threadsNeedingHeartBeat} is positive, the * heart beat will be issued on the progress object every 60 seconds. */ public HeartBeater(Progressable progress) { setDaemon(true); this.progress = progress; LOG.info("Heart beat reporting class is " + progress.getClass().getName()); start(); }
@Override public void close(TaskAttemptContext context) throws IOException, InterruptedException { if (context != null) { heartBeater.setProgress(context); heartBeater.needHeartBeat(); if (batch.size() > 0) { batchWriter.queueBatch(batch); heartBeater.cancelHeartBeat(); heartBeater.close();
@Override public void write(Text key, NullWritable value) { LOG.info("map key: {}", key); heartBeater.needHeartBeat(); try { Path path = new Path(key.toString()); shards.add(path); } finally { heartBeater.cancelHeartBeat(); } }
public SolrRecordWriter(TaskAttemptContext context, Path outputShardDir, int batchSize) { this.batchSize = batchSize; this.batch = new ArrayList<>(batchSize); Configuration conf = context.getConfiguration(); // setLogLevel("org.apache.solr.core", "WARN"); // setLogLevel("org.apache.solr.update", "WARN"); heartBeater = new HeartBeater(context); try { heartBeater.needHeartBeat(); Path solrHomeDir = SolrRecordWriter.findSolrConfig(conf); FileSystem fs = outputShardDir.getFileSystem(conf); EmbeddedSolrServer solr = createEmbeddedSolrServer(solrHomeDir, fs, outputShardDir); batchWriter = new BatchWriter(solr, batchSize, context.getTaskAttemptID().getTaskID(), SolrOutputFormat.getSolrWriterThreadCount(conf), SolrOutputFormat.getSolrWriterQueueSize(conf)); } catch (Exception e) { throw new IllegalStateException(String.format(Locale.ENGLISH, "Failed to initialize record writer for %s, %s", context.getJobName(), conf .get("mapred.task.id")), e); } finally { heartBeater.cancelHeartBeat(); } }
LOG.debug("Task " + context.getTaskAttemptID() + " merging into dstDir: " + workDir + ", srcDirs: " + shards); writeShardNumberFile(context); heartBeater.needHeartBeat(); try { Directory mergedIndex = new HdfsDirectory(workDir, NoLockFactory.INSTANCE, context.getConfiguration(), HdfsDirectory.DEFAULT_BUFFER_SIZE); heartBeater.cancelHeartBeat(); heartBeater.close();
public TreeMergeRecordWriter(TaskAttemptContext context, Path workDir) { this.workDir = new Path(workDir, "data/index"); this.heartBeater = new HeartBeater(context); this.context = context; }
@Override protected void cleanup(Context context) throws IOException, InterruptedException { heartBeater.close(); super.cleanup(context); }
@Override public void close(TaskAttemptContext context) throws IOException, InterruptedException { if (context != null) { heartBeater.setProgress(context); heartBeater.needHeartBeat(); if (batch.size() > 0) { batchWriter.queueBatch(batch); heartBeater.cancelHeartBeat(); heartBeater.close();
@Override public void write(Text key, NullWritable value) { LOG.info("map key: {}", key); heartBeater.needHeartBeat(); try { Path path = new Path(key.toString()); shards.add(path); } finally { heartBeater.cancelHeartBeat(); } }
public SolrRecordWriter(TaskAttemptContext context, Path outputShardDir, int batchSize) { this.batchSize = batchSize; this.batch = new ArrayList(batchSize); Configuration conf = context.getConfiguration(); // setLogLevel("org.apache.solr.core", "WARN"); // setLogLevel("org.apache.solr.update", "WARN"); heartBeater = new HeartBeater(context); try { heartBeater.needHeartBeat(); Path solrHomeDir = SolrRecordWriter.findSolrConfig(conf); FileSystem fs = outputShardDir.getFileSystem(conf); EmbeddedSolrServer solr = createEmbeddedSolrServer(solrHomeDir, fs, outputShardDir); batchWriter = new BatchWriter(solr, batchSize, context.getTaskAttemptID().getTaskID(), SolrOutputFormat.getSolrWriterThreadCount(conf), SolrOutputFormat.getSolrWriterQueueSize(conf)); } catch (Exception e) { throw new IllegalStateException(String.format( "Failed to initialize record writer for %s, %s", context.getJobName(), conf .get("mapred.task.id")), e); } finally { heartBeater.cancelHeartBeat(); } }