private static String getSparkVersionOr(String defaultValue) { // need SparkContext which requires context // as such do another reflex dance String sparkVersion = null; // Spark 1.0 - 1.1: SparkContext$.MODULE$.SPARK_VERSION(); // Spark 1.2+ : package$.MODULE$.SPARK_VERSION(); Object target = org.apache.spark.SparkContext$.MODULE$; Method sparkVersionMethod = ReflectionUtils.findMethod(target.getClass(), "SPARK_VERSION"); if (sparkVersionMethod == null) { target = org.apache.spark.package$.MODULE$; sparkVersionMethod = ReflectionUtils.findMethod(target.getClass(), "SPARK_VERSION"); } if (sparkVersionMethod != null) { sparkVersion = ReflectionUtils.<String>invoke(sparkVersionMethod, target); } else { sparkVersion = defaultValue; } return sparkVersion; }
static Properties extractOriginalProperties(Properties copy) { Field field = ReflectionUtils.findField(Properties.class, "defaults", Properties.class); ReflectionUtils.makeAccessible(field); return ReflectionUtils.getField(field, copy); }
public void setValueClass(Class<Writable> valueClass) { ReflectionUtils.setField(VALUE_CLASS_FIELD, this, valueClass); }
private static byte[] byteArrayInputStreamInternalBuffer(ByteArrayInputStream bais) { return ReflectionUtils.getField(BYTE_ARRAY_BUFFER, bais); }
@Override public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { Method m = ReflectionUtils.findMethod(target.getClass(), method.getName(), method.getParameterTypes()); // toString on target seems to lead to weird effects to use the class name instead Assert.notNull(m, String.format("Cannot find method %s on target %s", method, target.getClass())); return m.invoke(target, args); }
public static Field findField(Class<?> clazz, String name) { return findField(clazz, name, null); }
Socket sk = ReflectionUtils.invoke(GET_SOCKET, conn, (Object[]) null); String addr = sk.getLocalAddress().getHostAddress(); log.trace(String.format("Rx %s@[%s] [%s-%s] [%s]", proxyInfo, addr, http.getStatusCode(), HttpStatus.getStatusText(http.getStatusCode()), http.getResponseBodyAsString()));
/** * Constructor used by {@link ObjectMapper} for initial instantiation */ protected BackportedObjectReader(ObjectMapper mapper, JavaType valueType, Object valueToUpdate) { _rootDeserializers = ReflectionUtils.getField(ROOT_DESERIALIZERS, mapper); _provider = mapper.getDeserializerProvider(); _jsonFactory = mapper.getJsonFactory(); // must make a copy at this point, to prevent further changes from trickling down _config = mapper.copyDeserializationConfig(); _valueType = valueType; _valueToUpdate = valueToUpdate; if (valueToUpdate != null && valueType.isArrayType()) { throw new IllegalArgumentException("Can not update an array value"); } }
@Override public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { Method m = ReflectionUtils.findMethod(target.getClass(), method.getName(), method.getParameterTypes()); // toString on target seems to lead to weird effects to use the class name instead Assert.notNull(m, String.format("Cannot find method %s on target %s", method, target.getClass())); return m.invoke(target, args); }
public static Field findField(Class<?> clazz, String name) { return findField(clazz, name, null); }
private static String getSparkVersionOr(String defaultValue) { // need SparkContext which requires context // as such do another reflex dance String sparkVersion = null; // Spark 1.0 - 1.1: SparkContext$.MODULE$.SPARK_VERSION(); // Spark 1.2+ : package$.MODULE$.SPARK_VERSION(); Object target = org.apache.spark.SparkContext$.MODULE$; Method sparkVersionMethod = ReflectionUtils.findMethod(target.getClass(), "SPARK_VERSION"); if (sparkVersionMethod == null) { target = org.apache.spark.package$.MODULE$; sparkVersionMethod = ReflectionUtils.findMethod(target.getClass(), "SPARK_VERSION"); } if (sparkVersionMethod != null) { sparkVersion = ReflectionUtils.<String>invoke(sparkVersionMethod, target); } else { sparkVersion = defaultValue; } return sparkVersion; }
static boolean isInterrupted(TaskContext taskContext) { return ReflectionUtils.getField(INTERRUPTED_FIELD, taskContext); } }
static Properties extractOriginalProperties(Properties copy) { Field field = ReflectionUtils.findField(Properties.class, "defaults", Properties.class); ReflectionUtils.makeAccessible(field); return ReflectionUtils.getField(field, copy); }
@Override public void readFields(DataInput in) throws IOException { String valueClass = in.readUTF(); try { ReflectionUtils.setField(VALUE_CLASS_FIELD, this, Class.forName(valueClass, false, getClass().getClassLoader())); } catch (ClassNotFoundException ex) { throw new IOException("Cannot load class " + valueClass, ex); } super.readFields(in); }
@Override public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { Method m = ReflectionUtils.findMethod(target.getClass(), method.getName(), method.getParameterTypes()); // toString on target seems to lead to weird effects to use the class name instead Assert.notNull(m, String.format("Cannot find method %s on target %s", method, target.getClass())); return m.invoke(target, args); }
public static Field findField(Class<?> clazz, String name) { return findField(clazz, name, null); }
private static String getSparkVersionOr(String defaultValue) { // need SparkContext which requires context // as such do another reflex dance String sparkVersion = null; // Spark 1.0 - 1.1: SparkContext$.MODULE$.SPARK_VERSION(); // Spark 1.2+ : package$.MODULE$.SPARK_VERSION(); Object target = org.apache.spark.SparkContext$.MODULE$; Method sparkVersionMethod = ReflectionUtils.findMethod(target.getClass(), "SPARK_VERSION"); if (sparkVersionMethod == null) { target = org.apache.spark.package$.MODULE$; sparkVersionMethod = ReflectionUtils.findMethod(target.getClass(), "SPARK_VERSION"); } if (sparkVersionMethod != null) { sparkVersion = ReflectionUtils.<String>invoke(sparkVersionMethod, target); } else { sparkVersion = defaultValue; } return sparkVersion; }