@Before public void setUp() throws Exception { Map<String, String> configMap = new HashMap<>(); configMap.put("spark.shuffle.io.connectionTimeout", "10s"); conf = new TransportConf("shuffle", new MapConfigProvider(configMap)); defaultManager = new StreamManager() { @Override public ManagedBuffer getChunk(long streamId, int chunkIndex) { throw new UnsupportedOperationException(); } }; }
@Before public void setUp() throws Exception { Map<String, String> configMap = new HashMap<>(); configMap.put("spark.shuffle.io.connectionTimeout", "10s"); conf = new TransportConf("shuffle", new MapConfigProvider(configMap)); defaultManager = new StreamManager() { @Override public ManagedBuffer getChunk(long streamId, int chunkIndex) { throw new UnsupportedOperationException(); } }; }
AuthTestCtx() throws Exception { Map<String, String> testConf = ImmutableMap.of("spark.network.crypto.enabled", "true"); this.conf = new TransportConf("rpc", new MapConfigProvider(testConf)); RpcHandler rpcHandler = new RpcHandler() { @Override public void receive( TransportClient client, ByteBuffer message, RpcResponseCallback callback) { assertEquals("Ping", JavaUtils.bytesToString(message)); callback.onSuccess(JavaUtils.stringToBytes("Pong")); } @Override public StreamManager getStreamManager() { return null; } }; this.ctx = new TransportContext(conf, rpcHandler); }
AuthTestCtx() throws Exception { Map<String, String> testConf = ImmutableMap.of("spark.network.crypto.enabled", "true"); this.conf = new TransportConf("rpc", new MapConfigProvider(testConf)); RpcHandler rpcHandler = new RpcHandler() { @Override public void receive( TransportClient client, ByteBuffer message, RpcResponseCallback callback) { assertEquals("Ping", JavaUtils.bytesToString(message)); callback.onSuccess(JavaUtils.stringToBytes("Pong")); } @Override public StreamManager getStreamManager() { return null; } }; this.ctx = new TransportContext(conf, rpcHandler); }
private void setUp(boolean enableVerboseMetrics) { HashMap<String, String> configMap = new HashMap<>(); configMap.put("spark.shuffle.io.enableVerboseMetrics", String.valueOf(enableVerboseMetrics)); conf = new TransportConf("shuffle", new MapConfigProvider(configMap)); RpcHandler rpcHandler = new NoOpRpcHandler(); context = new TransportContext(conf, rpcHandler); server = context.createServer(); clientFactory = context.createClientFactory(); }
@Test public void testFetchNoServer() throws Exception { TransportConf clientConf = new TransportConf("shuffle", new MapConfigProvider(ImmutableMap.of("spark.shuffle.io.maxRetries", "0"))); registerExecutor("exec-0", dataContext0.createExecutorInfo(SORT_MANAGER)); FetchResult execFetch = fetchBlocks("exec-0", new String[]{"shuffle_1_0_0", "shuffle_1_0_1"}, clientConf, 1 /* port */); assertTrue(execFetch.successBlocks.isEmpty()); assertEquals(Sets.newHashSet("shuffle_1_0_0", "shuffle_1_0_1"), execFetch.failedBlocks); }
@Test public void testFetchNoServer() throws Exception { TransportConf clientConf = new TransportConf("shuffle", new MapConfigProvider(ImmutableMap.of("spark.shuffle.io.maxRetries", "0"))); registerExecutor("exec-0", dataContext0.createExecutorInfo(SORT_MANAGER)); FetchResult execFetch = fetchBlocks("exec-0", new String[]{"shuffle_1_0_0", "shuffle_1_0_1"}, clientConf, 1 /* port */); assertTrue(execFetch.successBlocks.isEmpty()); assertEquals(Sets.newHashSet("shuffle_1_0_0", "shuffle_1_0_1"), execFetch.failedBlocks); }
@Test public void testFetchNoServer() throws Exception { TransportConf clientConf = new TransportConf("shuffle", new MapConfigProvider(ImmutableMap.of("spark.shuffle.io.maxRetries", "0"))); registerExecutor("exec-0", dataContext0.createExecutorInfo(SORT_MANAGER)); FetchResult execFetch = fetchBlocks("exec-0", new String[]{"shuffle_1_0_0", "shuffle_1_0_1"}, clientConf, 1 /* port */); assertTrue(execFetch.successBlocks.isEmpty()); assertEquals(Sets.newHashSet("shuffle_1_0_0", "shuffle_1_0_1"), execFetch.failedBlocks); }
SaslTestCtx ctx = null; try { TransportConf conf = new TransportConf("shuffle", new MapConfigProvider(testConf)); StreamManager sm = mock(StreamManager.class); when(sm.getChunk(anyLong(), anyInt())).thenAnswer(invocation ->
SaslTestCtx ctx = null; try { TransportConf conf = new TransportConf("shuffle", new MapConfigProvider(testConf)); StreamManager sm = mock(StreamManager.class); when(sm.getChunk(anyLong(), anyInt())).thenAnswer(invocation ->
.put("spark.authenticate.enableSaslEncryption", String.valueOf(encrypt)) .build(); TransportConf conf = new TransportConf("shuffle", new MapConfigProvider(testConf));
.put("spark.authenticate.enableSaslEncryption", String.valueOf(encrypt)) .build(); TransportConf conf = new TransportConf("shuffle", new MapConfigProvider(testConf));
throws IOException, InterruptedException { MapConfigProvider provider = new MapConfigProvider(ImmutableMap.of( "spark.shuffle.io.maxRetries", "2", "spark.shuffle.io.retryWait", "0"));
TransportConf conf = new TransportConf("shuffle", new MapConfigProvider(configMap));
TransportConf conf = new TransportConf("shuffle", new MapConfigProvider(configMap));
throws IOException, InterruptedException { MapConfigProvider provider = new MapConfigProvider(ImmutableMap.of( "spark.shuffle.io.maxRetries", "2", "spark.shuffle.io.retryWait", "0"));
throws IOException, InterruptedException { MapConfigProvider provider = new MapConfigProvider(ImmutableMap.of( "spark.shuffle.io.maxRetries", "2", "spark.shuffle.io.retryWait", "0"));
/** Creates an ExternalShuffleClient and attempts to register with the server. */ private void validate(String appId, String secretKey, boolean encrypt) throws IOException, InterruptedException { TransportConf testConf = conf; if (encrypt) { testConf = new TransportConf("shuffle", new MapConfigProvider( ImmutableMap.of("spark.authenticate.enableSaslEncryption", "true"))); } ExternalShuffleClient client = new ExternalShuffleClient(testConf, new TestSecretKeyHolder(appId, secretKey), true, 5000); client.init(appId); // Registration either succeeds or throws an exception. client.registerWithShuffleServer(TestUtils.getLocalHost(), server.getPort(), "exec0", new ExecutorShuffleInfo(new String[0], 0, "org.apache.spark.shuffle.sort.SortShuffleManager")); client.close(); }
/** Creates an ExternalShuffleClient and attempts to register with the server. */ private void validate(String appId, String secretKey, boolean encrypt) throws IOException, InterruptedException { TransportConf testConf = conf; if (encrypt) { testConf = new TransportConf("shuffle", new MapConfigProvider( ImmutableMap.of("spark.authenticate.enableSaslEncryption", "true"))); } ExternalShuffleClient client = new ExternalShuffleClient(testConf, new TestSecretKeyHolder(appId, secretKey), true); client.init(appId); // Registration either succeeds or throws an exception. client.registerWithShuffleServer(TestUtils.getLocalHost(), server.getPort(), "exec0", new ExecutorShuffleInfo(new String[0], 0, "org.apache.spark.shuffle.sort.SortShuffleManager")); client.close(); }
/** Creates an ExternalShuffleClient and attempts to register with the server. */ private void validate(String appId, String secretKey, boolean encrypt) throws IOException, InterruptedException { TransportConf testConf = conf; if (encrypt) { testConf = new TransportConf("shuffle", new MapConfigProvider( ImmutableMap.of("spark.authenticate.enableSaslEncryption", "true"))); } ExternalShuffleClient client = new ExternalShuffleClient(testConf, new TestSecretKeyHolder(appId, secretKey), true, 5000); client.init(appId); // Registration either succeeds or throws an exception. client.registerWithShuffleServer(TestUtils.getLocalHost(), server.getPort(), "exec0", new ExecutorShuffleInfo(new String[0], 0, "org.apache.spark.shuffle.sort.SortShuffleManager")); client.close(); }