@Override protected void setup(Context context) throws IOException, InterruptedException { Utils.getLogConfigFile(context.getConfiguration()); super.setup(context); solrHomeDir = SolrRecordWriter.findSolrConfig(context.getConfiguration()); }
@Override public RecordWriter<K, V> getRecordWriter(TaskAttemptContext context) throws IOException, InterruptedException { Utils.getLogConfigFile(context.getConfiguration()); Path workDir = getDefaultWorkFile(context, ""); int batchSize = getBatchSize(context.getConfiguration()); return new SolrRecordWriter<K, V>(context, workDir, batchSize); }
.getAllowedConfigDirectories()) { File configDir = new File(dir, allowedDirectory); boolean configDirExists; && SolrRecordWriter.isRequiredConfigDirectory(allowedDirectory)) { throw new IOException(String.format(Locale.ENGLISH, "required configuration directory %s is not present in %s",
public SolrRecordWriter(TaskAttemptContext context, Path outputShardDir, int batchSize) { this.batchSize = batchSize; this.batch = new ArrayList<>(batchSize); Configuration conf = context.getConfiguration(); // setLogLevel("org.apache.solr.core", "WARN"); // setLogLevel("org.apache.solr.update", "WARN"); heartBeater = new HeartBeater(context); try { heartBeater.needHeartBeat(); Path solrHomeDir = SolrRecordWriter.findSolrConfig(conf); FileSystem fs = outputShardDir.getFileSystem(conf); EmbeddedSolrServer solr = createEmbeddedSolrServer(solrHomeDir, fs, outputShardDir); batchWriter = new BatchWriter(solr, batchSize, context.getTaskAttemptID().getTaskID(), SolrOutputFormat.getSolrWriterThreadCount(conf), SolrOutputFormat.getSolrWriterQueueSize(conf)); } catch (Exception e) { throw new IllegalStateException(String.format(Locale.ENGLISH, "Failed to initialize record writer for %s, %s", context.getJobName(), conf .get("mapred.task.id")), e); } finally { heartBeater.cancelHeartBeat(); } }
@Override protected void setup(Context context) throws IOException, InterruptedException { verifyPartitionAssignment(context); SolrRecordWriter.addReducerContext(context); Class<? extends UpdateConflictResolver> resolverClass = context.getConfiguration().getClass( UPDATE_CONFLICT_RESOLVER, RetainMostRecentUpdateConflictResolver.class, UpdateConflictResolver.class); this.resolver = ReflectionUtils.newInstance(resolverClass, context.getConfiguration()); /* * Note that ReflectionUtils.newInstance() above also implicitly calls * resolver.configure(context.getConfiguration()) if the resolver * implements org.apache.hadoop.conf.Configurable */ this.exceptionHandler = new FaultTolerance( context.getConfiguration().getBoolean(FaultTolerance.IS_PRODUCTION_MODE, false), context.getConfiguration().getBoolean(FaultTolerance.IS_IGNORING_RECOVERABLE_EXCEPTIONS, false), context.getConfiguration().get(FaultTolerance.RECOVERABLE_EXCEPTION_CLASSES, SolrServerException.class.getName())); this.heartBeater = new HeartBeater(context); }
protected UpdateResponse runUpdate(List<SolrInputDocument> batchToWrite) { try { UpdateResponse result = solr.add(batchToWrite); SolrRecordWriter.incrementCounter(taskId, SolrCounters.class.getName(), SolrCounters.BATCHES_WRITTEN.toString(), 1); SolrRecordWriter.incrementCounter(taskId, SolrCounters.class.getName(), SolrCounters.DOCUMENTS_WRITTEN.toString(), batchToWrite.size()); if (LOG.isDebugEnabled()) { SolrRecordWriter.incrementCounter(taskId, SolrCounters.class.getName(), SolrCounters.BATCH_WRITE_TIME.toString(), result.getElapsedTime()); } return result; } catch (Throwable e) { if (e instanceof Exception) { setBatchWriteException((Exception) e); } else { setBatchWriteException(new Exception(e)); } SolrRecordWriter.incrementCounter(taskId, getClass().getName() + ".errors", e.getClass().getName(), 1); LOG.error("Unable to process batch", e); return null; } }
public SolrRecordWriter(TaskAttemptContext context, Path outputShardDir, int batchSize) { this.batchSize = batchSize; this.batch = new ArrayList<>(batchSize); Configuration conf = context.getConfiguration(); // setLogLevel("org.apache.solr.core", "WARN"); // setLogLevel("org.apache.solr.update", "WARN"); heartBeater = new HeartBeater(context); try { heartBeater.needHeartBeat(); Path solrHomeDir = SolrRecordWriter.findSolrConfig(conf); FileSystem fs = outputShardDir.getFileSystem(conf); EmbeddedSolrServer solr = createEmbeddedSolrServer(solrHomeDir, fs, outputShardDir); batchWriter = new BatchWriter(solr, batchSize, context.getTaskAttemptID().getTaskID(), SolrOutputFormat.getSolrWriterThreadCount(conf), SolrOutputFormat.getSolrWriterQueueSize(conf)); } catch (Exception e) { throw new IllegalStateException(String.format(Locale.ENGLISH, "Failed to initialize record writer for %s, %s", context.getJobName(), conf .get("mapred.task.id")), e); } finally { heartBeater.cancelHeartBeat(); } }
@Override protected void setup(Context context) throws IOException, InterruptedException { verifyPartitionAssignment(context); SolrRecordWriter.addReducerContext(context); Class<? extends UpdateConflictResolver> resolverClass = context.getConfiguration().getClass( UPDATE_CONFLICT_RESOLVER, RetainMostRecentUpdateConflictResolver.class, UpdateConflictResolver.class); this.resolver = ReflectionUtils.newInstance(resolverClass, context.getConfiguration()); /* * Note that ReflectionUtils.newInstance() above also implicitly calls * resolver.configure(context.getConfiguration()) if the resolver * implements org.apache.hadoop.conf.Configurable */ this.exceptionHandler = new FaultTolerance( context.getConfiguration().getBoolean(FaultTolerance.IS_PRODUCTION_MODE, false), context.getConfiguration().getBoolean(FaultTolerance.IS_IGNORING_RECOVERABLE_EXCEPTIONS, false), context.getConfiguration().get(FaultTolerance.RECOVERABLE_EXCEPTION_CLASSES, SolrServerException.class.getName())); this.heartBeater = new HeartBeater(context); }
protected UpdateResponse runUpdate(List<SolrInputDocument> batchToWrite) { try { UpdateResponse result = solr.add(batchToWrite); SolrRecordWriter.incrementCounter(taskId, SolrCounters.class.getName(), SolrCounters.BATCHES_WRITTEN.toString(), 1); SolrRecordWriter.incrementCounter(taskId, SolrCounters.class.getName(), SolrCounters.DOCUMENTS_WRITTEN.toString(), batchToWrite.size()); if (LOG.isDebugEnabled()) { SolrRecordWriter.incrementCounter(taskId, SolrCounters.class.getName(), SolrCounters.BATCH_WRITE_TIME.toString(), result.getElapsedTime()); } return result; } catch (Throwable e) { if (e instanceof Exception) { setBatchWriteException((Exception) e); } else { setBatchWriteException(new Exception(e)); } SolrRecordWriter.incrementCounter(taskId, getClass().getName() + ".errors", e.getClass().getName(), 1); LOG.error("Unable to process batch", e); return null; } }
public SolrRecordWriter(TaskAttemptContext context, Path outputShardDir, int batchSize) { this.batchSize = batchSize; this.batch = new ArrayList(batchSize); Configuration conf = context.getConfiguration(); // setLogLevel("org.apache.solr.core", "WARN"); // setLogLevel("org.apache.solr.update", "WARN"); heartBeater = new HeartBeater(context); try { heartBeater.needHeartBeat(); Path solrHomeDir = SolrRecordWriter.findSolrConfig(conf); FileSystem fs = outputShardDir.getFileSystem(conf); EmbeddedSolrServer solr = createEmbeddedSolrServer(solrHomeDir, fs, outputShardDir); batchWriter = new BatchWriter(solr, batchSize, context.getTaskAttemptID().getTaskID(), SolrOutputFormat.getSolrWriterThreadCount(conf), SolrOutputFormat.getSolrWriterQueueSize(conf)); } catch (Exception e) { throw new IllegalStateException(String.format( "Failed to initialize record writer for %s, %s", context.getJobName(), conf .get("mapred.task.id")), e); } finally { heartBeater.cancelHeartBeat(); } }
@Override protected void setup(Context context) throws IOException, InterruptedException { Utils.getLogConfigFile(context.getConfiguration()); super.setup(context); solrHomeDir = SolrRecordWriter.findSolrConfig(context.getConfiguration()); }
.getAllowedConfigDirectories()) { File configDir = new File(dir, allowedDirectory); boolean configDirExists; && SolrRecordWriter.isRequiredConfigDirectory(allowedDirectory)) { throw new IOException(String.format( "required configuration directory %s is not present in %s",
@Override public RecordWriter<K, V> getRecordWriter(TaskAttemptContext context) throws IOException, InterruptedException { Utils.getLogConfigFile(context.getConfiguration()); Path workDir = getDefaultWorkFile(context, ""); int batchSize = getBatchSize(context.getConfiguration()); return new SolrRecordWriter<K,V>(context, workDir, batchSize); }
public SolrRecordWriter(TaskAttemptContext context, Path outputShardDir, int batchSize) { this.batchSize = batchSize; this.batch = new ArrayList<>(batchSize); Configuration conf = context.getConfiguration(); // setLogLevel("org.apache.solr.core", "WARN"); // setLogLevel("org.apache.solr.update", "WARN"); heartBeater = new HeartBeater(context); try { heartBeater.needHeartBeat(); Path solrHomeDir = SolrRecordWriter.findSolrConfig(conf); FileSystem fs = outputShardDir.getFileSystem(conf); EmbeddedSolrServer solr = createEmbeddedSolrServer(solrHomeDir, fs, outputShardDir); batchWriter = new BatchWriter(solr, batchSize, context.getTaskAttemptID().getTaskID(), SolrOutputFormat.getSolrWriterThreadCount(conf), SolrOutputFormat.getSolrWriterQueueSize(conf)); } catch (Exception e) { throw new IllegalStateException(String.format(Locale.ENGLISH, "Failed to initialize record writer for %s, %s", context.getJobName(), conf .get("mapred.task.id")), e); } finally { heartBeater.cancelHeartBeat(); } }