QueryContext(long startMills) { queryId = RandomUtil.randomUUID().toString(); queryStartMillis = startMills; }
private String genLookupSnapshotID() { return RandomUtil.randomUUID().toString(); }
public void updateRandomUuid() { setUuid(RandomUtil.randomUUID().toString()); }
public AbstractExecutable() { setId(RandomUtil.randomUUID().toString()); }
/** * Configure <code>job</code> with a TotalOrderPartitioner, partitioning against * <code>splitPoints</code>. Cleans up the partitions file after job exists. */ static void configurePartitioner(Job job, List<ImmutableBytesWritable> splitPoints) throws IOException { Configuration conf = job.getConfiguration(); // create the partitions file FileSystem fs = FileSystem.get(conf); Path partitionsPath = new Path(conf.get("hbase.fs.tmp.dir"), "partitions_" + RandomUtil.randomUUID()); fs.makeQualified(partitionsPath); writePartitions(conf, partitionsPath, splitPoints); fs.deleteOnExit(partitionsPath); // configure job to use it job.setPartitionerClass(TotalOrderPartitioner.class); TotalOrderPartitioner.setPartitionFile(conf, partitionsPath); }
private ArrayList<String> genStringDataSet(int totalSize) { ArrayList<String> data = new ArrayList<>(); for (int i = 0; i < totalSize; i++) { data.add(RandomUtil.randomUUID().toString()); } Collections.sort(data); return data; } }
@RequestMapping(value = "/saveExtFilter", method = { RequestMethod.POST }, produces = { "application/json" }) @ResponseBody public Map<String, String> saveExternalFilter(@RequestBody ExternalFilterRequest request) throws IOException { Map<String, String> result = new HashMap(); String filterProject = request.getProject(); ExternalFilterDesc desc = JsonUtil.readValue(request.getExtFilter(), ExternalFilterDesc.class); desc.setUuid(RandomUtil.randomUUID().toString()); extFilterService.saveExternalFilter(desc); extFilterService.syncExtFilterToProject(new String[] { desc.getName() }, filterProject); result.put("success", "true"); return result; }
@Test public void testRandomUUID() { Assert.assertEquals(RandomUtil.randomUUID().toString().length(), UUID.randomUUID().toString().length()); Assert.assertNotEquals(RandomUtil.randomUUID().toString(), RandomUtil.randomUUID().toString()); } }
user.put("id", RandomUtil.randomUUID().toString()); user.put("gender", genders.get(rnd.nextInt(2))); user.put("age", rnd.nextInt(20) + 10);
private TableExtDesc convertOldTableExtToNewer(String resourceName) { ResourceStore store = getStore(); Map<String, String> attrs = Maps.newHashMap(); try { RawResource res = store.getResource( ResourceStore.TABLE_EXD_RESOURCE_ROOT + "/" + resourceName + MetadataConstants.FILE_SURFIX); InputStream is = res.content(); try { attrs.putAll(JsonUtil.readValue(is, HashMap.class)); } finally { if (is != null) is.close(); } } catch (IOException ex) { throw new RuntimeException(ex); } String cardinality = attrs.get(MetadataConstants.TABLE_EXD_CARDINALITY); // parse table identity from file name String tableIdentity = TableDesc.parseResourcePath(resourceName).getFirst(); TableExtDesc result = new TableExtDesc(); result.setIdentity(tableIdentity); result.setUuid(RandomUtil.randomUUID().toString()); result.setLastModified(0); result.setCardinality(cardinality); return result; }
@Test public void testWriteCuboidStatistics() throws IOException { final Configuration conf = HadoopUtil.getCurrentConfiguration(); File tmp = File.createTempFile("cuboidstatistics", ""); final Path outputPath = new Path(tmp.getParent().toString() + File.separator + RandomUtil.randomUUID().toString()); if (!FileSystem.getLocal(conf).exists(outputPath)) { // FileSystem.getLocal(conf).create(outputPath); } System.out.println(outputPath); Map<Long, HLLCounter> cuboidHLLMap = Maps.newHashMap(); CubeStatsWriter.writeCuboidStatistics(conf, outputPath, cuboidHLLMap, 100); FileSystem.getLocal(conf).delete(outputPath, true); } }
@Override public void init(DictionaryInfo dictInfo, int baseId, String hdfsDir) throws IOException { sourceColumn = dictInfo.getSourceTable() + "." + dictInfo.getSourceColumn(); KylinConfig config = KylinConfig.getInstanceFromEnv(); int maxEntriesPerSlice = config.getAppendDictEntrySize(); if (hdfsDir == null) { //build in Kylin job server hdfsDir = KylinConfig.getInstanceFromEnv().getHdfsWorkingDirectory(); } //use UUID to make each segment dict in different HDFS dir and support concurrent build //use timestamp to make the segment dict easily to delete String baseDir = hdfsDir + "resources/SegmentDict" + dictInfo.getResourceDir() + "/" + RandomUtil.randomUUID().toString() + "_" + System.currentTimeMillis() + "/"; this.builder = new AppendTrieDictionaryBuilder(baseDir, maxEntriesPerSlice, false); this.baseId = baseId; }
modelDesc.setUuid(RandomUtil.randomUUID().toString()); String projectName = (null == modelRequest.getProject()) ? ProjectInstance.DEFAULT_PROJECT_NAME : modelRequest.getProject();
tableDesc.setDatabase(database.toUpperCase(Locale.ROOT)); tableDesc.setName(table.toUpperCase(Locale.ROOT)); tableDesc.setUuid(RandomUtil.randomUUID().toString()); tableDesc.setLastModified(0); tableDesc.setSourceType(ISourceAware.ID_JDBC); tableExtDesc.setUuid(RandomUtil.randomUUID().toString()); tableExtDesc.setLastModified(0); tableExtDesc.init(prj);
int num = 10000; for (int i = 0; i < num; i++) { UUID uuid = RandomUtil.randomUUID(); data.add(uuid.toString());
tableDesc.setDatabase(database.toUpperCase(Locale.ROOT)); tableDesc.setName(tableName.toUpperCase(Locale.ROOT)); tableDesc.setUuid(RandomUtil.randomUUID().toString()); tableDesc.setLastModified(0); } else { tableExtDesc.setUuid(RandomUtil.randomUUID().toString()); tableExtDesc.setLastModified(0); tableExtDesc.init(prj);
@Test public void testSortNormalString() { int count = 10; ArrayList<String> strList = new ArrayList<>(); for (int i = 0; i < count; i++) { UUID uuid = RandomUtil.randomUUID(); strList.add(uuid.toString()); } strList.add("hello"); strList.add("hello"); //duplicate strList.add("123"); strList.add(""); ArrayList<SelfDefineSortableKey> keyList = createKeyList(strList, (byte) SelfDefineSortableKey.TypeFlag.NONE_NUMERIC_TYPE.ordinal()); Collections.sort(keyList); ArrayList<String> strListAftereSort = new ArrayList<>(); for (SelfDefineSortableKey key : keyList) { String str = printKey(key); strListAftereSort.add(str); } assertTrue(isIncreasedOrder(strListAftereSort, new Comparator<String>() { @Override public int compare(String o1, String o2) { return o1.compareTo(o2); } })); }
@Test public void TestGetJobWorkingDir() throws IOException { FileSystem fileSystem = FileSystem.get(new Configuration()); Path jobWorkDirPath = null; KylinConfig kylinConfig = mock(KylinConfig.class); try (SetAndUnsetThreadLocalConfig autoUnset = KylinConfig.setAndUnsetThreadLocalConfig(kylinConfig)) { when(kylinConfig.getHiveTableDirCreateFirst()).thenReturn(true); when(kylinConfig.getHdfsWorkingDirectory()).thenReturn("/tmp/kylin/"); DefaultChainedExecutable defaultChainedExecutable = mock(DefaultChainedExecutable.class); defaultChainedExecutable.setId(RandomUtil.randomUUID().toString()); String jobWorkingDir = HiveInputBase.getJobWorkingDir(defaultChainedExecutable, KylinConfig.getInstanceFromEnv().getHdfsWorkingDirectory()); jobWorkDirPath = new Path(jobWorkingDir); Assert.assertTrue(fileSystem.exists(jobWorkDirPath)); } finally { if (jobWorkDirPath != null) fileSystem.deleteOnExit(jobWorkDirPath); } }
desc.setUuid(RandomUtil.randomUUID().toString()); String projectName = (null == cubeRequest.getProject()) ? ProjectInstance.DEFAULT_PROJECT_NAME : cubeRequest.getProject();
@Test public void testBuildTableCache() throws Exception { String snapshotID = RandomUtil.randomUUID().toString(); ExtTableSnapshotInfo snapshotInfo = buildSnapshotCache(snapshotID, 10000); assertEquals(CacheState.AVAILABLE, RocksDBLookupTableCache.getInstance(kylinConfig).getCacheState(snapshotInfo)); }