protected T createNewInstance(Class<T> targetClass) { try { return targetClass.newInstance(); } catch (InstantiationException e) { throw new CrunchRuntimeException(e); } catch (IllegalAccessException e) { throw new CrunchRuntimeException(e); } }
@Override public String map(T input) { try { return mapper.writeValueAsString(input); } catch (Exception e) { throw new CrunchRuntimeException(e); } } }
@Override public T map(String input) { try { return mapper.readValue(input, clazz); } catch (Exception e) { throw new CrunchRuntimeException(e); } } }
protected void setValueType(String valueType) { if (valueClass == null) { try { valueClass = Class.forName(valueType).asSubclass(Writable.class); } catch (ClassNotFoundException e) { throw new CrunchRuntimeException(e); } } else if (!valueType.equals(valueClass.getName())) { throw new IllegalStateException("Incoming " + valueType + " is not " + valueClass); } }
@Override public void cleanup(Emitter<Pair<K2, V2>> emitter) { if (cleanupMethod != null) { handler.set(emitter); try { cleanupMethod.invoke(instance, context); } catch (Exception e) { throw new CrunchRuntimeException(e); } } } }
@Override public Double map(T input) { try { return call(input); } catch (Exception e) { throw new CrunchRuntimeException(e); } } }
@Override public void initialize() { this.deserializer = ReflectionUtils.newInstance(serClazz, null).getDeserializer(clazz); if (deserializer == null) { throw new CrunchRuntimeException("No Hadoop deserializer for class: " + clazz); } }
@Override public void initialize() { this.serializer = ReflectionUtils.newInstance(serClazz, null).getSerializer(clazz); if (serializer == null) { throw new CrunchRuntimeException("No Hadoop serializer for class: " + clazz); } }
private void delete(View view) { try { boolean deleted = view.deleteAll(); if (!deleted) { LOG.warn("No data was deleted."); } } catch (UnsupportedOperationException e) { LOG.error("Dataset view " + view + " cannot be deleted!"); throw new CrunchRuntimeException("Dataset view cannot be deleted:" + view, e); } }
private Path getCacheFilePath(String input, Configuration conf) { Path local = DistCache.getPathToCacheFile(new Path(input), conf); if (local == null) { throw new CrunchRuntimeException("Can't find local cache file for '" + input + "'"); } return local; }
public void emit(T emitted) { try { K key = converter.outputKey(emitted); V value = converter.outputValue(emitted); this.context.write(key, value); } catch (IOException e) { throw new CrunchRuntimeException(e); } catch (InterruptedException e) { throw new CrunchRuntimeException(e); } }
@Override public T map(ByteBuffer bb) { try { return (T) instance.newBuilderForType().mergeFrom(bb.array(), bb.position(), bb.limit()).build(); } catch (InvalidProtocolBufferException e) { throw new CrunchRuntimeException(e); } } }
@Override public void configure(Configuration conf) { try { serializeWritableComparableCodes(conf); } catch (IOException e) { throw new CrunchRuntimeException("Error serializing writable comparable codes", e); } for (MapFn fn : fns) { fn.configure(conf); } }
public HCatRecordReaderIterator(RecordReader reader) { this.reader = reader; try { hasNext = reader.nextKeyValue(); if (hasNext) current = this.reader.getCurrentValue(); } catch (IOException | InterruptedException e) { throw new CrunchRuntimeException(e); } }
@Override public R map(Pair<K, V> input) { try { return call(input.first(), input.second()); } catch (Exception e) { throw new CrunchRuntimeException(e); } } }
@Override public Iterator<HCatRecord> apply(InputSplit split) { RecordReader reader = null; try { reader = fmt.createRecordReader(split, ctxt); reader.initialize(split, ctxt); } catch (IOException | InterruptedException e) { throw new CrunchRuntimeException(e); } return new HCatRecordReaderIterator(reader); } }).iterator());
@Override public void process(Pair<K, V> input, Emitter<R> emitter) { try { for (R r : call(input.first(), input.second())) { emitter.emit(r); } } catch (Exception e) { throw new CrunchRuntimeException(e); } } }
@Override protected ReadableData<Pair<K, V>> getReadableDataInternal() { try { return ptype.createSourceTarget(pipeline.getConfiguration(), pipeline.createTempPath(), contents, parallelism).asReadable(); } catch (IOException e) { throw new CrunchRuntimeException(e); } }
@Override public Pair<K, V> map(T input) { try { Tuple2<K, V> t = call(input); return t == null ? null : Pair.of(t._1(), t._2()); } catch (Exception e) { throw new CrunchRuntimeException(e); } } }
@Override public void process(Pair<K1, V1> input, Emitter<Pair<K2, V2>> emitter) { outputCollector.set(emitter); try { instance.map(input.first(), input.second(), outputCollector, this); } catch (IOException e) { throw new CrunchRuntimeException(e); } }