private ContentBuilder(Generator generator, ValueWriter writer) { Assert.notNull(generator); this.generator = generator; this.writer = writer; }
public SparkSettings(SparkConf cfg) { Assert.notNull(cfg, "non-null spark configuration expected"); this.cfg = cfg; }
public PropertiesSettings(Properties props) { Assert.notNull(props, "Non-null properties expected"); this.props = props; }
public SparkSettings(SparkConf cfg) { Assert.notNull(cfg, "non-null spark configuration expected"); this.cfg = cfg; }
public HadoopSettings(Configuration cfg) { Assert.notNull(cfg, "Non-null properties expected"); this.cfg = cfg; }
public static void notNull(Object object) { notNull(object, "[Assertion failed] - this argument is required; it must not be null"); }
@Override public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { Method m = ReflectionUtils.findMethod(target.getClass(), method.getName(), method.getParameterTypes()); // toString on target seems to lead to weird effects to use the class name instead Assert.notNull(m, String.format("Cannot find method %s on target %s", method, target.getClass())); return m.invoke(target, args); }
public static Field findField(Class<?> clazz, String name, Class<?> type) { Assert.notNull(clazz, "Class must not be null"); Assert.isTrue(name != null || type != null, "Either name or type of the field must be specified"); Class<?> searchType = clazz; while (!Object.class.equals(searchType) && searchType != null) { Field[] fields = searchType.getDeclaredFields(); for (Field field : fields) { if ((name == null || name.equals(field.getName())) && (type == null || type.equals(field.getType()))) { return field; } } searchType = searchType.getSuperclass(); } return null; }
@Override protected Object preProcess(Object object, BytesArray storage) { // serialize the json early on and copy it to storage Assert.notNull(object, "Empty/null JSON document given..."); jsonWriter.convert(object, storage); if (log.isTraceEnabled()) { log.trace(String.format("About to extract information from [%s]", storage)); } jsonExtractors.process(storage); return storage; }
private KeystoreWrapper(InputStream inputStream, String type, String password) throws EsHadoopSecurityException, IOException { Assert.notNull(password, "Password should not be null"); try { char[] pwd = password.toCharArray(); protection = new KeyStore.PasswordProtection(pwd); keyStore = KeyStore.getInstance(type); keyStore.load(inputStream, pwd); } catch (CertificateException e) { throw new EsHadoopSecurityException("Could not create keystore", e); } catch (NoSuchAlgorithmException e) { throw new EsHadoopSecurityException("Could not create keystore", e); } catch (KeyStoreException e) { throw new EsHadoopSecurityException("Could not create keystore", e); } }
@Override protected Object preProcess(Object object, BytesArray storage) { // serialize the json early on and copy it to storage Assert.notNull(object, "Empty/null JSON document given..."); BytesArray ba = null; if (ConfigurationOptions.ES_OPERATION_UPSERT.equals(settings.getOperation())) { ba = storage; } else { scratchPad.reset(); ba = scratchPad; } // write the doc to a temporary space jsonWriter.convert(object, ba); if (log.isTraceEnabled()) { log.trace(String.format("About to extract information from [%s]", ba)); } jsonExtractors.process(ba); return storage; } }
public NetworkClient(Settings settings, TransportFactory transportFactory) { this.settings = settings.copy(); this.secureSettings = new SecureSettings(settings); this.nodes = SettingsUtils.discoveredOrDeclaredNodes(settings); this.transportFactory = transportFactory; // shuffle the list of nodes so in case of failures, the fallback is spread Collections.shuffle(nodes); if (SettingsUtils.hasPinnedNode(settings)) { // move pinned node in front to be selected (only once) String pinnedNode = SettingsUtils.getPinnedNode(settings); if (log.isDebugEnabled()) { log.debug("Opening (pinned) network client to " + pinnedNode); } nodes.remove(pinnedNode); nodes.add(0, pinnedNode); } selectNextNode(); Assert.notNull(currentTransport, "no node information provided"); }
/** * Writes the objects to index. * * @param object object to add to the index */ public void writeToIndex(Object object) { Assert.notNull(object, "no object data given"); lazyInitWriting(); BytesRef serialized = bulkEntryWriter.writeBulkEntry(object); if (serialized != null) { doWriteToIndex(serialized); } }
public Scroll read(InputStream content) throws IOException { Assert.notNull(content); //copy content BytesArray copy = IOUtils.asBytes(content); content = new FastByteArrayInputStream(copy); if (log.isTraceEnabled()) { log.trace("About to parse scroll content " + copy); } Parser parser = new JacksonJsonParser(content); try { return read(parser, copy); } finally { parser.close(); } }
@Override public BulkOutputGenerator addSuccess(String operation, int status) { Assert.notNull(resource); items.add(getSuccess() .replace(OP, operation) .replace(IDX, resource.index()) .replace(TYPE, resource.type()) .replace(ID, UUID.randomUUID().toString()) .replace(VER, "1") .replace(STAT, "201") ); return this; }
@Override public BulkOutputGenerator addFailure(String operation, int status, String type, String errorMessage) { Assert.notNull(resource); errors = true; items.add(getFailure() .replace(OP, operation) .replace(IDX, resource.index()) .replace(TYPE, resource.type()) .replace(ID, UUID.randomUUID().toString()) .replace(STAT, Integer.toString(status)) .replace(ETYPE, type) .replace(EMESG, errorMessage) ); return this; }
@Override public BulkOutputGenerator addRejection(String operation) { Assert.notNull(resource); errors = true; items.add(getFailure() .replace(OP, operation) .replace(IDX, resource.index()) .replace(TYPE, resource.type()) .replace(ID, UUID.randomUUID().toString()) .replace(STAT, Integer.toString(getRejectedStatus())) .replace(ETYPE, getRejectionType()) .replace(EMESG, getRejectionMsg()) ); return this; }
@Override public List<E> loadHandlers() { Assert.notNull(settings, "No settings are present in the handler loader!");
HeartBeat(final Progressable progressable, Configuration cfg, TimeValue lead, final Log log) { Assert.notNull(progressable, "a valid progressable is required to report status to Hadoop"); TimeValue tv = HadoopCfgUtils.getTaskTimeout(cfg); Assert.isTrue(tv.getSeconds() <= 0 || tv.getSeconds() > lead.getSeconds(), "Hadoop timeout is shorter than the heartbeat"); this.progressable = progressable; long cfgMillis = (tv.getMillis() > 0 ? tv.getMillis() : 0); // the task is simple hence the delay = timeout - lead, that is when to start the notification right before the timeout this.delay = new TimeValue(Math.abs(cfgMillis - lead.getMillis()), TimeUnit.MILLISECONDS); this.log = log; String taskId; TaskID taskID = HadoopCfgUtils.getTaskID(cfg); if (taskID == null) { log.warn("Cannot determine task id..."); taskId = "<unknown>"; if (log.isTraceEnabled()) { log.trace("Current configuration is " + HadoopCfgUtils.asProperties(cfg)); } } else { taskId = "" + taskID; } id = taskId; }
/** * Writes the objects to index. * * @param ba The data as a bytes array */ public void writeProcessedToIndex(BytesArray ba) { Assert.notNull(ba, "no data given"); Assert.isTrue(ba.length() > 0, "no data given"); lazyInitWriting(); trivialBytesRef.reset(); trivialBytesRef.add(ba); doWriteToIndex(trivialBytesRef); }