private AccumuloConfiguration createSampleConfig(AccumuloConfiguration siteConf) { ConfigurationCopy confCopy = new ConfigurationCopy(Iterables.filter(siteConf, input -> !input.getKey().startsWith(Property.TABLE_SAMPLER.getKey()))); for (Entry<String,String> entry : samplerRef.get().getFirst().toTablePropertiesMap() .entrySet()) { confCopy.set(entry.getKey(), entry.getValue()); } siteConf = confCopy; return siteConf; }
/** * This helper method provides an AccumuloConfiguration object constructed from the Accumulo * defaults, and overridden with Accumulo properties that have been stored in the Job's * configuration. * * @param implementingClass * the class whose name will be used as a prefix for the property configuration key * @param conf * the Hadoop configuration object to configure * @since 1.6.0 */ public static AccumuloConfiguration getAccumuloConfiguration(Class<?> implementingClass, Configuration conf) { String prefix = enumToConfKey(implementingClass, Opts.ACCUMULO_PROPERTIES) + "."; ConfigurationCopy acuConf = new ConfigurationCopy(DefaultConfiguration.getInstance()); for (Entry<String,String> entry : conf) if (entry.getKey().startsWith(prefix)) { String propString = entry.getKey().substring(prefix.length()); Property prop = Property.getPropertyByKey(propString); if (prop != null) { acuConf.set(prop, entry.getValue()); } else if (Property.isValidTablePropertyKey(propString)) { acuConf.set(propString, entry.getValue()); } else { throw new IllegalArgumentException("Unknown accumulo file property " + propString); } } return acuConf; }
protected AccumuloConfiguration createTableConfiguration(TableConfiguration base, CompactionPlan plan) { if (plan == null || plan.writeParameters == null) return base; WriteParameters p = plan.writeParameters; ConfigurationCopy result = new ConfigurationCopy(base); if (p.getHdfsBlockSize() > 0) result.set(Property.TABLE_FILE_BLOCK_SIZE, "" + p.getHdfsBlockSize()); if (p.getBlockSize() > 0) result.set(Property.TABLE_FILE_COMPRESSED_BLOCK_SIZE, "" + p.getBlockSize()); if (p.getIndexBlockSize() > 0) result.set(Property.TABLE_FILE_COMPRESSED_BLOCK_SIZE_INDEX, "" + p.getIndexBlockSize()); if (p.getCompressType() != null) result.set(Property.TABLE_FILE_COMPRESSION_TYPE, p.getCompressType()); if (p.getReplication() != 0) result.set(Property.TABLE_FILE_REPLICATION, "" + p.getReplication()); return result; }
acuconf.set(Property.TABLE_BLOOM_ENABLED, "true"); acuconf.set(Property.TABLE_BLOOM_KEY_FUNCTOR, "accumulo.core.file.keyfunctor.ColumnFamilyFunctor"); acuconf.set(Property.TABLE_FILE_TYPE, RFile.EXTENSION); acuconf.set(Property.TABLE_BLOOM_LOAD_THRESHOLD, "1"); acuconf.set(Property.TSERV_BLOOM_LOAD_MAXCONCURRENT, "1");
@Override public Collection<Summary> read() throws IOException { SummarizerFactory factory = new SummarizerFactory(); ConfigurationCopy acuconf = new ConfigurationCopy(DefaultConfiguration.getInstance()); config.forEach((k, v) -> acuconf.set(k, v)); RFileSource[] sources = in.getSources(); try { SummaryCollection all = new SummaryCollection(); CryptoService cservice = CryptoServiceFactory.newInstance(acuconf, ClassloaderType.JAVA); for (RFileSource source : sources) { SummaryReader fileSummary = SummaryReader.load(in.getFileSystem().getConf(), source.getInputStream(), source.getLength(), summarySelector, factory, cservice); SummaryCollection sc = fileSummary .getSummaries(Collections.singletonList(new Gatherer.RowRange(startRow, endRow))); all.merge(sc, factory); } return all.getSummaries(); } finally { for (RFileSource source : sources) { source.getInputStream().close(); } } }
private ConfigurationCopy updateConfigurationForLocalityGroups(ConfigurationCopy configuration) { Map<String,Set<ByteSequence>> locGroups = getLocalityGroups(); StringBuilder enabledLGs = new StringBuilder(); for (Entry<String,Set<ByteSequence>> entry : locGroups.entrySet()) { if (enabledLGs.length() > 0) { enabledLGs.append(","); } StringBuilder value = new StringBuilder(); for (ByteSequence bytes : entry.getValue()) { if (value.length() > 0) { value.append(","); } value.append(new String(bytes.toArray())); } configuration.set("table.group." + entry.getKey(), value.toString()); enabledLGs.append(entry.getKey()); } configuration.set(Property.TABLE_LOCALITY_GROUPS, enabledLGs.toString()); return configuration; }
private AccumuloConfiguration createSampleConfig(AccumuloConfiguration siteConf) { ConfigurationCopy confCopy = new ConfigurationCopy( Iterables.filter(siteConf, new Predicate<Entry<String,String>>() { @Override public boolean apply(Entry<String,String> input) { return !input.getKey().startsWith(Property.TABLE_SAMPLER.getKey()); } })); for (Entry<String,String> entry : samplerRef.get().getFirst().toTablePropertiesMap() .entrySet()) { confCopy.set(entry.getKey(), entry.getValue()); } siteConf = confCopy; return siteConf; }
/** * This helper method provides an AccumuloConfiguration object constructed from the Accumulo * defaults, and overridden with Accumulo properties that have been stored in the Job's * configuration. * * @param implementingClass * the class whose name will be used as a prefix for the property configuration key * @param conf * the Hadoop configuration object to configure * @since 1.6.0 */ public static AccumuloConfiguration getAccumuloConfiguration(Class<?> implementingClass, Configuration conf) { String prefix = enumToConfKey(implementingClass, Opts.ACCUMULO_PROPERTIES) + "."; ConfigurationCopy acuConf = new ConfigurationCopy(DefaultConfiguration.getInstance()); for (Entry<String,String> entry : conf) if (entry.getKey().startsWith(prefix)) { String propString = entry.getKey().substring(prefix.length()); Property prop = Property.getPropertyByKey(propString); if (prop != null) { acuConf.set(prop, entry.getValue()); } else if (Property.isValidTablePropertyKey(propString)) { acuConf.set(propString, entry.getValue()); } else { throw new IllegalArgumentException("Unknown accumulo file property " + propString); } } return acuConf; }
/** * This helper method provides an AccumuloConfiguration object constructed from the Accumulo * defaults, and overridden with Accumulo properties that have been stored in the Job's * configuration. * * @param implementingClass * the class whose name will be used as a prefix for the property configuration key * @param conf * the Hadoop configuration object to configure * @since 1.6.0 */ public static AccumuloConfiguration getAccumuloConfiguration(Class<?> implementingClass, Configuration conf) { String prefix = enumToConfKey(implementingClass, Opts.ACCUMULO_PROPERTIES) + "."; ConfigurationCopy acuConf = new ConfigurationCopy(DefaultConfiguration.getInstance()); for (Entry<String,String> entry : conf) if (entry.getKey().startsWith(prefix)) { String propString = entry.getKey().substring(prefix.length()); Property prop = Property.getPropertyByKey(propString); if (prop != null) { acuConf.set(prop, entry.getValue()); } else if (Property.isValidTablePropertyKey(propString)) { acuConf.set(propString, entry.getValue()); } else { throw new IllegalArgumentException("Unknown accumulo file property " + propString); } } return acuConf; }
/** * This helper method provides an AccumuloConfiguration object constructed from the Accumulo * defaults, and overridden with Accumulo properties that have been stored in the Job's * configuration. * * @param implementingClass * the class whose name will be used as a prefix for the property configuration key * @param conf * the Hadoop configuration object to configure * @since 1.6.0 */ public static AccumuloConfiguration getAccumuloConfiguration(Class<?> implementingClass, Configuration conf) { String prefix = enumToConfKey(implementingClass, Opts.ACCUMULO_PROPERTIES) + "."; ConfigurationCopy acuConf = new ConfigurationCopy( AccumuloConfiguration.getDefaultConfiguration()); for (Entry<String,String> entry : conf) if (entry.getKey().startsWith(prefix)) { String propString = entry.getKey().substring(prefix.length()); Property prop = Property.getPropertyByKey(propString); if (prop != null) { acuConf.set(prop, entry.getValue()); } else if (Property.isValidTablePropertyKey(propString)) { acuConf.set(propString, entry.getValue()); } else { throw new IllegalArgumentException("Unknown accumulo file property " + propString); } } return acuConf; }
protected AccumuloConfiguration createTableConfiguration(TableConfiguration base, CompactionPlan plan) { if (plan == null || plan.writeParameters == null) return base; WriteParameters p = plan.writeParameters; ConfigurationCopy result = new ConfigurationCopy(base); if (p.getHdfsBlockSize() > 0) result.set(Property.TABLE_FILE_BLOCK_SIZE, "" + p.getHdfsBlockSize()); if (p.getBlockSize() > 0) result.set(Property.TABLE_FILE_COMPRESSED_BLOCK_SIZE, "" + p.getBlockSize()); if (p.getIndexBlockSize() > 0) result.set(Property.TABLE_FILE_COMPRESSED_BLOCK_SIZE_INDEX, "" + p.getIndexBlockSize()); if (p.getCompressType() != null) result.set(Property.TABLE_FILE_COMPRESSION_TYPE, p.getCompressType()); if (p.getReplication() != 0) result.set(Property.TABLE_FILE_REPLICATION, "" + p.getReplication()); return result; }
for (String tableName : tableIds.keySet()) { ConfigurationCopy tableConfig = new ConfigurationCopy(connector.tableOperations().getProperties(tableName)); tableConfig.set(Property.TABLE_FILE_COMPRESSION_TYPE.getKey(), (compressionTableBlackList.contains(tableName) ? Compression.COMPRESSION_NONE : compressionType)); if (Iterables.contains(localityGroupTables, tableName)) {
acuconf.set(Property.TABLE_BLOOM_ENABLED, "true"); acuconf.set(Property.TABLE_BLOOM_KEY_FUNCTOR, "accumulo.core.file.keyfunctor.ColumnFamilyFunctor"); acuconf.set(Property.TABLE_FILE_TYPE, RFile.EXTENSION); acuconf.set(Property.TABLE_BLOOM_LOAD_THRESHOLD, "1"); acuconf.set(Property.TSERV_BLOOM_LOAD_MAXCONCURRENT, "1");
protected Connector createAccumuloConnection(BackupRestoreOptionsBase options) throws AccumuloSecurityException, AccumuloException { String instanceName = options.getAccumuloInstanceName(); String zooServers = options.getZookeeperServers(); Instance inst = new ZooKeeperInstance(instanceName, zooServers); ConfigurationCopy conf = new ConfigurationCopy(inst.getConfiguration()); conf.set(Property.INSTANCE_DFS_URI, options.getHadoopFsDefaultFS()); inst.setConfiguration(conf); AuthenticationToken authenticationToken = new PasswordToken(options.getAccumuloPassword()); return inst.getConnector(options.getAccumuloUserName(), authenticationToken); }