@Override public boolean matches(String op) { return ops.contains(op); } };
@Override public FileOutputStream openStream() throws IOException { return new FileOutputStream(file, modes.contains(APPEND)); }
/** * @param digest the digest to check for * @return true if the layer with the specified digest exists; false otherwise */ public boolean has(DescriptorDigest digest) { return layerDigests.contains(digest); }
private static boolean isNullable(Annotation[] annotations) { for (Annotation annotation : annotations) { if (NULLABLE_ANNOTATION_SIMPLE_NAMES.contains(annotation.annotationType().getSimpleName())) { return true; } } return false; }
private boolean isTablesEnumeratingTable(SchemaTableName schemaTableName) { return ImmutableSet.of(TABLE_COLUMNS, TABLE_VIEWS, TABLE_TABLES, TABLE_TABLES, TABLE_TABLE_PRIVILEGES).contains(schemaTableName); }
private void inferSpecialization(Method method, Class<?> parameterType, String typeParameterName) { if (typeParameterNames.contains(typeParameterName) && parameterType != Object.class) { // Infer specialization on this type parameter. // We don't do this for Object because it could match any type. Class<?> specialization = specializedTypeParameters.get(typeParameterName); Class<?> nativeParameterType = Primitives.unwrap(parameterType); checkArgument(specialization == null || specialization.equals(nativeParameterType), "Method [%s] type %s has conflicting specializations %s and %s", method, typeParameterName, specialization, nativeParameterType); specializedTypeParameters.put(typeParameterName, nativeParameterType); } }
private boolean isSupportedType(Type type) { return ImmutableSet.<Type>of(TINYINT, SMALLINT, INTEGER, BIGINT, REAL, DOUBLE, BOOLEAN, DATE, TIMESTAMP, VARBINARY).contains(type) || isVarcharType(type) || type instanceof DecimalType; } }
void supportFollowing(char prev, char c) { if (!following.get(prev).contains(c)) { syntaxError("prev=" + prev + ", c=" + c); } } }
private Set<String> extractFields(List<ResultMessageSummary> messages) { return messages.stream() .flatMap(message -> message.message().keySet().stream()) .filter(field -> !Message.FILTERED_FIELDS.contains(field)) .collect(Collectors.toSet()); }
public void removeField(final String key) { if (!RESERVED_FIELDS.contains(key)) { final Object removedValue = fields.remove(key); updateSize(key, null, removedValue); } }
ExampleStackTrace(LockGraphNode node1, LockGraphNode node2) { super(node1.getLockName() + " -> " + node2.getLockName()); StackTraceElement[] origStackTrace = getStackTrace(); for (int i = 0, n = origStackTrace.length; i < n; i++) { if (WithExplicitOrdering.class.getName().equals(origStackTrace[i].getClassName())) { // For pre-populated disallowedPriorLocks edges, omit the stack trace. setStackTrace(EMPTY_STACK_TRACE); break; } if (!EXCLUDED_CLASS_NAMES.contains(origStackTrace[i].getClassName())) { setStackTrace(Arrays.copyOfRange(origStackTrace, i, n)); break; } } } }
private static void doTestMocking(RateLimiter mock) throws Exception { for (Method method : RateLimiter.class.getMethods()) { if (!isStatic(method.getModifiers()) && !NOT_WORKING_ON_MOCKS.contains(method.getName()) && !method.getDeclaringClass().equals(Object.class)) { method.invoke(mock, arbitraryParameters(method)); } } }
@Override public int available() throws IOException { throwIf(closed); return options.contains(TestOption.AVAILABLE_ALWAYS_ZERO) ? 0 : in.available(); }
private void throwIf(TestOption option) throws IOException { throwIf(options.contains(option)); }
private void throwIf(TestOption option) throws IOException { throwIf(options.contains(option)); }
private static String upperToHttpHeaderName( String constantName, ImmutableBiMap<String, String> specialCases, ImmutableSet<String> uppercaseAcronyms) { if (specialCases.containsKey(constantName)) { return specialCases.get(constantName); } List<String> parts = Lists.newArrayList(); for (String part : SPLITTER.split(constantName)) { if (!uppercaseAcronyms.contains(part)) { part = part.charAt(0) + Ascii.toLowerCase(part.substring(1)); } parts.add(part); } return JOINER.join(parts); } }
@Override public Partition getPartition(String dbName, String tableName, List<String> partitionValues) throws TException { accessCount.incrementAndGet(); if (throwException) { throw new RuntimeException(); } if (!dbName.equals(TEST_DATABASE) || !tableName.equals(TEST_TABLE) || !ImmutableSet.of(TEST_PARTITION_VALUES1, TEST_PARTITION_VALUES2).contains(partitionValues)) { throw new NoSuchObjectException(); } return new Partition(null, TEST_DATABASE, TEST_TABLE, 0, 0, DEFAULT_STORAGE_DESCRIPTOR, ImmutableMap.of()); }
@GwtIncompatible // java serialization not supported in GWT. public void testImmutableEnumSet_deserializationMakesDefensiveCopy() throws Exception { ImmutableSet<SomeEnum> original = Sets.immutableEnumSet(SomeEnum.A, SomeEnum.B); int handleOffset = 6; byte[] serializedForm = serializeWithBackReference(original, handleOffset); ObjectInputStream in = new ObjectInputStream(new ByteArrayInputStream(serializedForm)); ImmutableSet<?> deserialized = (ImmutableSet<?>) in.readObject(); EnumSet<?> delegate = (EnumSet<?>) in.readObject(); assertEquals(original, deserialized); assertTrue(delegate.remove(SomeEnum.A)); assertTrue(deserialized.contains(SomeEnum.A)); }
private static List<TestColumn> getTestColumnsSupportedByParquet() { // Write of complex hive data to Parquet is broken // TODO: empty arrays or maps with null keys don't seem to work // Parquet does not support DATE return TEST_COLUMNS.stream() .filter(column -> !ImmutableSet.of("t_null_array_int", "t_array_empty", "t_map_null_key", "t_map_null_key_complex_value", "t_map_null_key_complex_key_value") .contains(column.getName())) .filter(column -> column.isPartitionKey() || ( !hasType(column.getObjectInspector(), PrimitiveCategory.DATE)) && !hasType(column.getObjectInspector(), PrimitiveCategory.SHORT) && !hasType(column.getObjectInspector(), PrimitiveCategory.BYTE)) .collect(toList()); }