@BeforeClass public static void compilerAssumptions() { // JiBX compiler is currently not compatible with JDK 9 Assume.assumeTrue(System.getProperty("java.version").startsWith("1.8.")); }
@BeforeClass public static void beforeClass() { Assume.assumeFalse( SystemUtils.IS_OS_WINDOWS ); Assume.assumeThat( OsBeanUtil.getOpenFileDescriptors(), not( OsBeanUtil.VALUE_UNAVAILABLE ) ); }
@Override public void evaluate() throws Throwable { final Object[] values = complete.getMethodArguments(); if (!nullsOk()) { Assume.assumeNotNull(values); } method.invokeExplosively(freshInstance, values); } };
@Test public void testReadXmlWithBOMUcs2() throws Exception { Assume.assumeFalse("This test does not pass on some IBM VMs xml parsers", System.getProperty("java.vendor").contains("IBM")); // UCS-2 is BE. Assume.assumeTrue(Charset.isSupported("ISO-10646-UCS-2")); final byte[] data = "<?xml version=\"1.0\" encoding=\"ISO-10646-UCS-2\"?><X/>".getBytes("ISO-10646-UCS-2"); parseXml(new BOMInputStream(createUtf16BeDataStream(data, true), ByteOrderMark.UTF_16BE)); parseXml(createUtf16BeDataStream(data, true)); }
@Theory public void theoryAddingFileToDirectoryAddsResource(String path) throws Exception { Resource res = getResource(path); assumeThat(res, is(directory())); File dir = res.dir(); File file = new File(dir, "newFileCreatedDirectly"); assumeTrue(file.createNewFile()); Resource child = getResource(Paths.path(res.path(), "newFileCreatedDirectly")); Collection<Resource> children = res.list(); assertThat(child, is(defined())); assertThat(children, hasItem(child)); }
@BeforeClass public static void checkOS() { Assume.assumeFalse("This test fails on Windows due to unclosed JarFiles, see FLINK-9844.", OperatingSystem.isWindows()); }
@BeforeClass public static void setup() throws Exception { try { MiniDFSCluster cluster = util.startMiniDFSCluster(3); // Need 3 DNs for RS-3-2 policy DistributedFileSystem fs = cluster.getFileSystem(); Method enableAllECPolicies = DFSTestUtil.class.getMethod("enableAllECPolicies", DistributedFileSystem.class); enableAllECPolicies.invoke(null, fs); DFSClient client = fs.getClient(); Method setErasureCodingPolicy = DFSClient.class.getMethod("setErasureCodingPolicy", String.class, String.class); setErasureCodingPolicy.invoke(client, "/", "RS-3-2-1024k"); // try a built-in policy try (FSDataOutputStream out = fs.create(new Path("/canary"))) { // If this comes back as having hflush then some test setup assumption is wrong. // Fail the test so that a developer has to look and triage assertFalse("Did not enable EC!", CommonFSUtils.hasCapability(out, HFLUSH)); } } catch (NoSuchMethodException e) { // We're not testing anything interesting if EC is not available, so skip the rest of the test Assume.assumeNoException("Using an older version of hadoop; EC not available.", e); } util.getConfiguration().setBoolean(CommonFSUtils.UNSAFE_STREAM_CAPABILITY_ENFORCE, true); util.startMiniCluster(); }
@Theory public void theoryDirectoriesHaveNoIstreams(String path) throws Exception { Resource res = getResource(path); assumeThat(res, is(directory())); exception.expect(IllegalStateException.class); res.in().close(); }
@Test public void shouldCallExternalErrorOnDate() throws Exception { assumeThat( packerUnderTest.version(), equalTo( 1L ) ); testUnpackableStructParametersWithKnownType( new Neo4jPackV2(), ValueUtils.of( LocalDate.now() ), "LocalDate values cannot be unpacked with this version of bolt." ); }
@Theory public void theoryParentsKnowTheirChildren(String path) throws Exception { Resource res = getResource(path); assumeThat(res, is(directory())); Resource parent = res.parent(); assumeThat(path, parent, notNullValue()); // Make sure this resource has a parent Collection<Resource> result = parent.list(); assertThat(path, result, hasItem(res)); // this assumed equals was written! }
@Theory public void theoryDirectoriesHaveNoOstream(String path) throws Exception { Resource res = getResource(path); assumeThat(res, is(directory())); exception.expect(IllegalStateException.class); res.out().close(); }
@Test public void testQueryByPageWithOffset() { Assume.assumeTrue("Not support paging", storeFeatures().supportsQueryByPage()); HugeGraph graph = graph(); init100Books(); Assert.assertThrows(IllegalStateException.class, () -> { graph.traversal().V() .has("~page", "").range(2, 10) .toList(); }); }
@Theory public void theoryParentIsDirectory(String path) throws Exception { Resource res = getResource(path); Resource parent = res.parent(); assumeThat(path + " not root", parent, notNullValue()); if (res.getType() != Type.UNDEFINED) { assertThat(path + " directory", parent, is(directory())); } }
@Test public void noNamespacePrefixes() throws Exception { Assume.assumeTrue(wwwSpringframeworkOrgIsAccessible()); StringWriter stringWriter = new StringWriter(); AbstractStaxHandler handler = createStaxHandler(new StreamResult(stringWriter)); xmlReader.setContentHandler(handler); xmlReader.setProperty("http://xml.org/sax/properties/lexical-handler", handler); xmlReader.setFeature("http://xml.org/sax/features/namespaces", true); xmlReader.setFeature("http://xml.org/sax/features/namespace-prefixes", false); xmlReader.parse(new InputSource(new StringReader(COMPLEX_XML))); assertThat(stringWriter.toString(), isSimilarTo(COMPLEX_XML).withNodeFilter(nodeFilter)); }
@Test public void patchForObject() throws URISyntaxException { // JDK client does not support the PATCH method Assume.assumeThat(this.clientHttpRequestFactory, Matchers.not(Matchers.instanceOf(SimpleClientHttpRequestFactory.class))); String s = template.patchForObject(baseUrl + "/{method}", helloWorld, String.class, "patch"); assertEquals("Invalid content", helloWorld, s); }
private void testIndexSeekExactWithExists( Value a, Value b ) throws Exception { Assume.assumeTrue( "Assume support for granular composite queries", testSuite.supportsGranularCompositeQueries() ); updateAndCommit( asList( add( 1L, descriptor.schema(), a, Values.of( 1 ) ), add( 2L, descriptor.schema(), b, Values.of( "abv" ) ), add( 3L, descriptor.schema(), a, Values.of( false ) ) ) ); assertThat( query( exact( 0, a ), exists( 1 ) ), equalTo( asList( 1L, 3L ) ) ); assertThat( query( exact( 0, b ), exists( 1 ) ), equalTo( singletonList( 2L ) ) ); }
@Theory public void theoryDeletedResourcesAreUndefined(String path) throws Exception { Resource res = getResource(path); assumeThat(res, resource()); assertThat(res.delete(), is(true)); assertThat(res, undefined()); }
@Theory public void theoryRenamedAreUndefined(String path) throws Exception { Resource res = getResource(path); assumeThat(res, defined()); Resource target = getUndefined(); assertThat(res.renameTo(target), is(true)); assertThat(res, undefined()); }
@Test public void testGetMethodReflectiveHadoop22() { assumeTrue( "Method getContainersFromPreviousAttempts is not supported by Hadoop: " + VersionInfo.getVersion(), isHadoopVersionGreaterThanOrEquals(2, 2)); final RegisterApplicationMasterResponseReflector registerApplicationMasterResponseReflector = new RegisterApplicationMasterResponseReflector(LOG); final Method method = registerApplicationMasterResponseReflector.getMethod(); assertThat(method, notNullValue()); }
@Test public void testDoNotCancelJobIfSavepointFails() throws Exception { setUpWithCheckpointInterval(10L); try { Files.setPosixFilePermissions(savepointDirectory, Collections.emptySet()); } catch (IOException e) { Assume.assumeNoException(e); } try { cancelWithSavepoint(); } catch (Exception e) { assertThat(ExceptionUtils.findThrowable(e, CheckpointTriggerException.class).isPresent(), equalTo(true)); } final JobStatus jobStatus = clusterClient.getJobStatus(jobGraph.getJobID()).get(60, TimeUnit.SECONDS); assertThat(jobStatus, equalTo(JobStatus.RUNNING)); // assert that checkpoints are continued to be triggered triggerCheckpointLatch = new CountDownLatch(1); assertThat(triggerCheckpointLatch.await(60L, TimeUnit.SECONDS), equalTo(true)); }