Refine search
static ErrorReporter getErrorReporter( final Configuration conf) throws ClassNotFoundException { Class<? extends ErrorReporter> reporter = conf.getClass("hbasefsck.errorreporter", PrintingErrorReporter.class, ErrorReporter.class); return ReflectionUtils.newInstance(reporter, conf); }
private DelegationTokenStore getTokenStore(Configuration conf) throws IOException { String tokenStoreClassName = MetaStoreServerUtils.getTokenStoreClassName(conf); try { Class<? extends DelegationTokenStore> storeClass = Class.forName(tokenStoreClassName).asSubclass(DelegationTokenStore.class); return ReflectionUtils.newInstance(storeClass, conf); } catch (ClassNotFoundException e) { throw new IOException("Error initializing delegation token store: " + tokenStoreClassName, e); } }
/** * Returns a brand new instance of the FileSystem. It does not use * the FileSystem.Cache. In newer versions of HDFS, we can directly * invoke FileSystem.newInstance(Configuration). * * @param conf Configuration * @return A new instance of the filesystem */ private static FileSystem newInstanceFileSystem(Configuration conf) throws IOException { URI uri = FileSystem.getDefaultUri(conf); FileSystem fs = null; Class<?> clazz = conf.getClass("fs." + uri.getScheme() + ".impl", null); if (clazz != null) { // This will be true for Hadoop 1.0, or 0.20. fs = (FileSystem) org.apache.hadoop.util.ReflectionUtils.newInstance(clazz, conf); fs.initialize(uri, conf); } else { // For Hadoop 2.0, we have to go through FileSystem for the filesystem // implementation to be loaded by the service loader in case it has not // been loaded yet. Configuration clone = new Configuration(conf); clone.setBoolean("fs." + uri.getScheme() + ".impl.disable.cache", true); fs = FileSystem.get(uri, clone); } if (fs == null) { throw new IOException("No FileSystem for scheme: " + uri.getScheme()); } return fs; }
public Writable readWritable(Writable writable) throws IOException { DataInputStream dis = null; try { ByteArrayInputStream bais = new ByteArrayInputStream(in.readBytes()); dis = new DataInputStream(bais); String className = WritableUtils.readString(dis); if (writable == null) { try { Class<? extends Writable> cls = conf.getClassByName(className) .asSubclass(Writable.class); writable = (Writable) ReflectionUtils.newInstance(cls, conf); } catch (ClassNotFoundException e) { throw new IOException(e); } } else if (!writable.getClass().getName().equals(className)) { throw new IOException("wrong Writable class given"); } writable.readFields(dis); dis.close(); dis = null; return writable; } finally { IOUtils.closeStream(dis); } }
private static <T extends ReplicationService> T newReplicationInstance(String classname, Class<T> xface, Configuration conf, HRegionServer server, FileSystem walFs, Path logDir, Path oldLogDir, WALProvider walProvider) throws IOException { Class<? extends T> clazz = null; try { ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); clazz = Class.forName(classname, true, classLoader).asSubclass(xface); } catch (java.lang.ClassNotFoundException nfe) { throw new IOException("Could not find class for " + classname); } T service = ReflectionUtils.newInstance(clazz, conf); service.initialize(server, walFs, logDir, oldLogDir, walProvider); return service; }
EnumSet<NflyKey> nflyFlags) throws IOException { if (uris.length < minReplication) { throw new IOException(minReplication + " < " + uris.length + ": Minimum replication < #destinations"); final DNSToSwitchMapping tmpDns = ReflectionUtils.newInstance(conf.getClass( CommonConfigurationKeys.NET_TOPOLOGY_NODE_SWITCH_MAPPING_IMPL_KEY, ScriptBasedMapping.class, DNSToSwitchMapping.class), conf);
byte vers = in.readByte(); if (vers != ORIGINAL_MAGIC_VERSION_WITH_METADATA) { throw new IOException(file + " is a version " + vers + " SequenceFile instead of an RCFile."); } else { if (!Arrays.equals(magic, MAGIC)) { throw new IOException(file + " not a RCFile and has magic of " + new String(magic)); if (!keyCls.equals(KeyBuffer.class) || !valCls.equals(ValueBuffer.class)) { throw new IOException(file + " not a RCFile"); Class<? extends CompressionCodec> codecClass = conf.getClassByName( codecClassname).asSubclass(CompressionCodec.class); codec = ReflectionUtils.newInstance(codecClass, conf); } catch (ClassNotFoundException cnfe) { throw new IllegalArgumentException(
/** * Instantiate the {@link UserProvider} specified in the configuration and set the passed * configuration via {@link UserProvider#setConf(Configuration)} * @param conf to read and set on the created {@link UserProvider} * @return a {@link UserProvider} ready for use. */ public static UserProvider instantiate(Configuration conf) { Class<? extends UserProvider> clazz = conf.getClass(USER_PROVIDER_CONF_KEY, UserProvider.class, UserProvider.class); return ReflectionUtils.newInstance(clazz, conf); }
@Override public void readFields(DataInput in) throws IOException { type = in.readByte(); Class<? extends Writable> clazz = getTypes()[type & 0xff]; try { instance = ReflectionUtils.newInstance(clazz, conf); } catch (Exception e) { e.printStackTrace(); throw new IOException("Cannot initialize the class: " + clazz); } instance.readFields(in); }
if (!haEnabled && HAUtil.usesSharedEditsDir(conf)) { LOG.warn("Configured NNs:\n" + DFSUtil.nnAddressesAsString(conf)); throw new IOException("Invalid configuration: a shared edits dir " + "must not be specified if HA is not enabled."); checksumType = DataChecksum.Type.valueOf(checksumTypeStr); } catch (IllegalArgumentException iae) { throw new IOException("Invalid checksum type in " + DFS_CHECKSUM_TYPE_KEY + ": " + checksumTypeStr); auditLoggers.get(0) instanceof DefaultAuditLogger; this.retryCache = ignoreRetryCache ? null : initRetryCache(conf); Class<? extends INodeAttributeProvider> klass = conf.getClass( DFS_NAMENODE_INODE_ATTRIBUTES_PROVIDER_KEY, null, INodeAttributeProvider.class); if (klass != null) { inodeAttributeProvider = ReflectionUtils.newInstance(klass, conf); LOG.info("Using INode attribute provider: " + klass.getName());
byte vers = in.readByte(); if (vers != ORIGINAL_MAGIC_VERSION_WITH_METADATA) { throw new IOException(file + " is a version " + vers + " SequenceFile instead of an RCFile."); } else { if (!Arrays.equals(magic, MAGIC)) { throw new IOException(file + " not a RCFile and has magic of " + new String(magic)); if (!keyCls.equals(KeyBuffer.class) || !valCls.equals(ValueBuffer.class)) { throw new IOException(file + " not a RCFile"); Class<? extends CompressionCodec> codecClass = conf.getClassByName( codecClassname).asSubclass(CompressionCodec.class); codec = ReflectionUtils.newInstance(codecClass, conf); } catch (ClassNotFoundException cnfe) { throw new IllegalArgumentException(
/** * Create a region normalizer from the given conf. * @param conf configuration * @return {@link RegionNormalizer} implementation */ public static RegionNormalizer getRegionNormalizer(Configuration conf) { // Create instance of Region Normalizer Class<? extends RegionNormalizer> balancerKlass = conf.getClass(HConstants.HBASE_MASTER_NORMALIZER_CLASS, SimpleRegionNormalizer.class, RegionNormalizer.class); return ReflectionUtils.newInstance(balancerKlass, conf); } }
/** * @param conf * @return singleton instance of {@link VisibilityLabelService}. The FQCN of the implementation * class can be specified using "hbase.regionserver.visibility.label.service.class". * @throws IOException When VLS implementation, as specified in conf, can not be loaded. */ public VisibilityLabelService getVisibilityLabelService(Configuration conf) throws IOException { String vlsClassName = conf.get(VISIBILITY_LABEL_SERVICE_CLASS, DefaultVisibilityLabelServiceImpl.class.getCanonicalName()).trim(); if (this.visibilityLabelService != null) { checkForClusterLevelSingleConf(vlsClassName); return this.visibilityLabelService; } synchronized (this) { if (this.visibilityLabelService != null) { checkForClusterLevelSingleConf(vlsClassName); return this.visibilityLabelService; } this.vlsClazzName = vlsClassName; try { this.visibilityLabelService = (VisibilityLabelService) ReflectionUtils.newInstance( Class.forName(vlsClassName), conf); } catch (ClassNotFoundException e) { throw new IOException(e); } return this.visibilityLabelService; } }
@SuppressWarnings("unchecked") private void getBaseOutputFormat(Configuration conf) throws IOException { baseOut = ((OutputFormat<K, V>) ReflectionUtils.newInstance( conf.getClass(OUTPUT_FORMAT, null), conf)); if (baseOut == null) { throw new IOException("Output Format not set for LazyOutputFormat"); } }
(versionBlock[1] != VERSION[1]) || (versionBlock[2] != VERSION[2])) { throw new IOException(this + " not a SequenceFile"); Class<? extends CompressionCodec> codecClass = conf.getClassByName(codecClassname).asSubclass(CompressionCodec.class); this.codec = ReflectionUtils.newInstance(codecClass, conf); } catch (ClassNotFoundException cnfe) { throw new IllegalArgumentException("Unknown codec: " + getDeserializer(serializationFactory, getKeyClass()); if (this.keyDeserializer == null) { throw new IOException( "Could not find a deserializer for the Key class: '" + getKeyClass().getCanonicalName() + "'. " getDeserializer(serializationFactory, getValueClass()); if (this.valDeserializer == null) { throw new IOException( "Could not find a deserializer for the Value class: '" + getValueClass().getCanonicalName() + "'. "
/** * Returns an instance of ImpersonationProvider. * Looks up the configuration to see if there is custom class specified. * @param conf * @return ImpersonationProvider */ private static ImpersonationProvider getInstance(Configuration conf) { Class<? extends ImpersonationProvider> clazz = conf.getClass( CommonConfigurationKeysPublic.HADOOP_SECURITY_IMPERSONATION_PROVIDER_CLASS, DefaultImpersonationProvider.class, ImpersonationProvider.class); return ReflectionUtils.newInstance(clazz, conf); }
@SuppressWarnings("unchecked") private synchronized RecordWriter getRecordWriter( TaskAttemptContext taskContext, String baseFileName) throws IOException, InterruptedException { // look for record-writer in the cache RecordWriter writer = recordWriters.get(baseFileName); // If not in cache, create a new one if (writer == null) { // get the record writer from context output format //FileOutputFormat.setOutputName(taskContext, baseFileName); taskContext.getConfiguration().set("avro.mo.config.namedOutput",baseFileName); try { writer = ((OutputFormat) ReflectionUtils.newInstance( taskContext.getOutputFormatClass(), taskContext.getConfiguration())) .getRecordWriter(taskContext); } catch (ClassNotFoundException e) { throw new IOException(e); } // if counters are enabled, wrap the writer with context // to increment counters if (countersEnabled) { writer = new RecordWriterWithCounter(writer, baseFileName, context); } // add the record-writer to the cache recordWriters.put(baseFileName, writer); } return writer; }
@SuppressWarnings("unchecked") private void getBaseOutputFormat(Configuration conf) throws IOException { baseOut = ((OutputFormat<K, V>) ReflectionUtils.newInstance( conf.getClass(OUTPUT_FORMAT, null), conf)); if (baseOut == null) { throw new IOException("Output Format not set for LazyOutputFormat"); } }
/** * Returns an instance of SaslPropertiesResolver. * Looks up the configuration to see if there is custom class specified. * Constructs the instance by passing the configuration directly to the * constructor to achieve thread safety using final fields. * @param conf * @return SaslPropertiesResolver */ public static SaslPropertiesResolver getInstance(Configuration conf) { Class<? extends SaslPropertiesResolver> clazz = conf.getClass( CommonConfigurationKeysPublic.HADOOP_SECURITY_SASL_PROPS_RESOLVER_CLASS, SaslPropertiesResolver.class, SaslPropertiesResolver.class); return ReflectionUtils.newInstance(clazz, conf); }
return fosterStorageHandler; } catch (ClassNotFoundException e) { throw new IOException("Failed to load " + "foster storage handler", e); (Class<? extends HiveStorageHandler>) Class .forName(storageHandler, true, Utilities.getSessionSpecifiedClassLoader()); return (HiveStorageHandler) ReflectionUtils.newInstance( handlerClass, conf); } catch (ClassNotFoundException e) { throw new IOException("Error in loading storage handler." + e.getMessage(), e);