private MagicCommandOutcomeItem createSparkUiBasedOnEmptyConfiguration(MagicCommandExecutionParam param, List<SparkOptionCommand> options, SimpleEvaluationObject seo) { InternalVariable.setValue(seo); SparkSession.Builder config = SparkSession.builder().config(new SparkConf()); createSparkUI(config, param.getCode().getMessage(), options); return new MagicCommandOutput(MagicCommandOutput.Status.OK); }
@Test public void loadAndSaveWithoutChangesShouldBeIdempotent() throws IOException { //given SparkSession.Builder builder = SparkSession.builder(); HashMap<String, Object> profileConfig = new HashMap<>(); profileConfig.put(NAME, DEFAULT_PROFILE); //when sut.loadDefaults(builder); sut.saveProfile(profileConfig); //then Map<String, Map> result = beakerXJson.beakerxJsonAsMap(); Assertions.assertThat(result.get("beakerx").get("version")).isEqualTo(2); }
@Test public void sparkVersion() { //given SparkEngineImpl sparkEngine = new SparkEngineImpl(SparkSession.builder()); //when String version = sparkEngine.sparkVersion(); //then assertThat(version).isEqualTo("2.2.1"); } }
@Test public void saveAndLoadDefaults() { //given HashMap<String, Object> profileConfig = new HashMap<>(); profileConfig.put(SPARK_ADVANCED_OPTIONS, Arrays.asList( new SparkConfiguration.Configuration("sparkOption2", "3"))); profileConfig.put(SPARK_MASTER, "local[4]"); profileConfig.put(NAME, DEFAULT_PROFILE); List config = new ArrayList(); config.add(profileConfig); //when sut.saveProfile(profileConfig); //then SparkSession.Builder builder = SparkSession.builder(); sut.loadDefaults(builder); SparkConf sparkConfBasedOn = SparkEngineImpl.getSparkConfBasedOn(builder); assertThat(sparkConfBasedOn.get("sparkOption2")).isEqualTo("3"); assertThat(sparkConfBasedOn.get(SPARK_MASTER)).isEqualTo("local[4]"); }
@Test public void loadDefaultsWhenCreateSparkUI() { //given SparkUiDefaultsImplMock sparkUiDefaults = new SparkUiDefaultsImplMock(); //when new SparkUI(SparkSession.builder(), sparkSessionBuilder -> new SparkManagerImplTest(), sparkUiDefaults, singleSparkSession); //then assertThat(sparkUiDefaults.loaded).isTrue(); }
@Override public SparkSession getOrCreate() { return SparkSession.builder().config(getSparkConf()).getOrCreate(); }
private static JavaRDD<String[]> getOtherFormatHiveInput(JavaSparkContext sc, String hiveTable) { SparkSession sparkSession = SparkSession.builder().config(sc.getConf()).enableHiveSupport().getOrCreate(); final Dataset intermediateTable = sparkSession.table(hiveTable); return intermediateTable.javaRDD().map(new Function<Row, String[]>() { @Override public String[] call(Row row) throws Exception { String[] result = new String[row.size()]; for (int i = 0; i < row.size(); i++) { final Object o = row.get(i); if (o != null) { result[i] = o.toString(); } else { result[i] = null; } } return result; } }); }
@Before public void setUp() throws Exception { singleSparkSession = new SparkMagicCommand.SingleSparkSessionImpl(); kernel = new KernelTest(); KernelManager.register(kernel); sparkUiDefaults = new SparkUiDefaultsImplMock(); sparkUI = new SparkUI(SparkSession.builder(), sparkSessionBuilder -> new SparkManagerImplTest(), sparkUiDefaults, singleSparkSession); }
private MagicCommandOutcomeItem createSparkUIBasedOnUserSparkConfiguration(MagicCommandExecutionParam param, List<SparkOptionCommand> options, SimpleEvaluationObject seo) { TryResult either = kernel.executeCode(param.getCommandCodeBlock(), seo); if (either.isResult()) { Object result = either.result(); if (result instanceof SparkConf) { SparkSession.Builder config = SparkSession.builder().config((SparkConf) result); createSparkUI(config, param.getCode().getMessage(), options); } else if (result instanceof SparkSession.Builder) { SparkSession.Builder config = (SparkSession.Builder) result; createSparkUI(config, param.getCode().getMessage(), options); } else { return new MagicCommandOutput(MagicCommandOutput.Status.ERROR, "Body of " + SPARK + " magic command must return SparkConf object or SparkSession.Builder object"); } return new MagicCommandOutput(MagicCommandOutput.Status.OK); } else { return new MagicCommandOutput(MagicCommandOutput.Status.ERROR, "There occurs problem during execution of " + SPARK + " : " + either.error()); } }
@Override public TryResult configure(KernelFunctionality kernel, SparkUIApi sparkUI, Message parentMessage) { SparkConf sparkConf = createSparkConf(sparkUI.getAdvancedOptions(), getSparkConfBasedOn(this.sparkSessionBuilder)); sparkConf = configureSparkConf(sparkConf, sparkUI); this.sparkSessionBuilder = SparkSession.builder().config(sparkConf); if (sparkUI.getHiveSupport()) { this.sparkSessionBuilder.enableHiveSupport(); } TryResult sparkSessionTry = createSparkSession(sparkUI, parentMessage); if (sparkSessionTry.isError()) { return sparkSessionTry; } addListener(getOrCreate().sparkContext(), sparkUI); SparkVariable.putSparkSession(getOrCreate()); TryResult tryResultSparkContext = initSparkContextInShell(kernel, parentMessage); if (!tryResultSparkContext.isError()) { kernel.registerCancelHook(SparkVariable::cancelAllJobs); } return tryResultSparkContext; }
@Before public void setUp() { spark = SparkSession.builder() .master("local[*]") .appName("testing") .getOrCreate(); }
@Before public void setUp() { spark = SparkSession.builder() .master("local[*]") .appName("testing") .getOrCreate(); }
@Before public void setUp() { spark = SparkSession.builder() .master("local[*]") .appName("testing") .getOrCreate(); }
@Before public void setUp() { spark = SparkSession.builder() .master("local[*]") .appName("testing") .getOrCreate(); }
@Before public void setUp() { spark = SparkSession.builder() .master("local[*]") .appName("testing") .getOrCreate(); }
@Before public void setUp() throws IOException { spark = SparkSession.builder() .master("local[*]") .appName("testing") .getOrCreate(); path = Utils.createTempDir(System.getProperty("java.io.tmpdir"), "datasource").getCanonicalFile(); if (path.exists()) { path.delete(); } List<String> jsonObjects = new ArrayList<>(10); for (int i = 0; i < 10; i++) { jsonObjects.add("{\"a\":" + i + ", \"b\":\"str" + i + "\"}"); } Dataset<String> ds = spark.createDataset(jsonObjects, Encoders.STRING()); df = spark.read().json(ds); df.createOrReplaceTempView("jsonTable"); }
public static SparkSession getSparkSession() { return SparkSession.builder().appName(APP_NAME).master(NUM_EXECUTORS) .config(UI_SHOW_CONSOLE_PROGRESS, false).getOrCreate(); }
@Before public void setUp() { spark = SparkSession.builder() .master("local[*]") .appName("testing") .getOrCreate(); jsc = new JavaSparkContext(spark.sparkContext()); }
@Before public void setUp() { spark = SparkSession.builder() .master("local[*]") .appName("testing") .getOrCreate(); jsc = new JavaSparkContext(spark.sparkContext()); }
@Before public void setUp() { spark = SparkSession.builder() .master("local[*]") .appName("testing") .getOrCreate(); jsc = new JavaSparkContext(spark.sparkContext()); }