@Test public void testAvroObjectAccess() throws Exception { ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); BatchTableEnvironment tEnv = TableEnvironment.getTableEnvironment(env, config()); Table t = tEnv.fromDataSet(testData(env)); Table result = t .filter("type_nested.isNotNull") .select("type_nested.flatten()").as("city, num, state, street, zip"); List<Address> results = tEnv.toDataSet(result, Types.POJO(Address.class)).collect(); String expected = USER_1.getTypeNested().toString(); TestBaseUtils.compareResultAsText(results, expected); }
@Override public TableSchema getTableSchema(SessionContext session, String name) throws SqlExecutionException { final TableEnvironment tableEnv = getOrCreateExecutionContext(session) .createEnvironmentInstance() .getTableEnvironment(); try { return tableEnv.scan(name).getSchema(); } catch (Throwable t) { // catch everything such that the query does not crash the executor throw new SqlExecutionException("No table with this name could be found.", t); } }
public static void main(String[] args) throws Exception { ExecutionEnvironment env = ExecutionEnvironment.createCollectionsEnvironment(); BatchTableEnvironment tEnv = TableEnvironment.getTableEnvironment(env); DataSet<WC> input = env.fromElements( new WC("Hello", 1), new WC("Ciao", 1), new WC("Hello", 1)); Table table = tEnv.fromDataSet(input); Table filtered = table .groupBy("word") .select("word, frequency.sum as frequency") .filter("frequency = 2"); DataSet<WC> result = tEnv.toDataSet(filtered, WC.class); result.print(); }
removeTimeAttributes(table.getSchema()), envInst.getExecutionConfig()); table.writeToSink(result.getTableSink(), envInst.getQueryConfig()); return null; }); removeTimeAttributes(table.getSchema()), result.isMaterialized());
@Test public void testAsWithPojoAndGenericTypes() throws Exception { ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); BatchTableEnvironment tableEnv = TableEnvironment.getTableEnvironment(env, config()); List<PojoWithGeneric> data = new ArrayList<>(); data.add(new PojoWithGeneric("Peter", 28, new HashMap<String, String>(), new ArrayList<String>())); HashMap<String, String> hm1 = new HashMap<>(); hm1.put("test1", "test1"); data.add(new PojoWithGeneric("Anna", 56, hm1, new ArrayList<String>())); HashMap<String, String> hm2 = new HashMap<>(); hm2.put("abc", "cde"); data.add(new PojoWithGeneric("Lucy", 42, hm2, new ArrayList<String>())); Table table = tableEnv .fromDataSet(env.fromCollection(data), "name AS a, " + "age AS b, " + "generic AS c, " + "generic2 AS d") .select("a, b, c, c as c2, d") .select("a, b, c, c === c2, d"); DataSet<Row> ds = tableEnv.toDataSet(table, Row.class); List<Row> results = ds.collect(); String expected = "Peter,28,{},true,[]\n" + "Anna,56,{test1=test1},true,[]\n" + "Lucy,42,{abc=cde},true,[]\n"; compareResultAsText(results, expected); }
.filter("sender !== 'jira@apache.org' && " + "sender !== 'no-reply@apache.org' && " + "sender !== 'git@git.apache.org'") .groupBy("month, sender").select("month, sender, month.count as cnt"); .groupBy("month").select("month as m, cnt.max as max") .join(mailsPerSenderMonth).where("month = m && cnt = max").select("month, sender");
@Test public void testTableRegister() throws Exception { final String tableName = "MyTable"; ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); BatchTableEnvironment tableEnv = TableEnvironment.getTableEnvironment(env, config()); DataSet<Tuple3<Integer, Long, String>> ds = CollectionDataSets.get3TupleDataSet(env); Table t = tableEnv.fromDataSet(ds); tableEnv.registerTable(tableName, t); Table result = tableEnv.scan(tableName).select("f0, f1").filter("f0 > 7"); DataSet<Row> resultSet = tableEnv.toDataSet(result, Row.class); List<Row> results = resultSet.collect(); String expected = "8,4\n" + "9,4\n" + "10,4\n" + "11,5\n" + "12,5\n" + "13,5\n" + "14,5\n" + "15,5\n" + "16,6\n" + "17,6\n" + "18,6\n" + "19,6\n" + "20,6\n" + "21,6\n"; compareResultAsText(results, expected); }
.filter("isInNYC(startLon, startLat) && isInNYC(endLon, endLat)") .select("eventTime, " + "isStart, " + "(isStart = true).?(toCellId(startLon, startLat), toCellId(endLon, endLat)) AS cell") .window(Slide.over("15.minutes").every("5.minutes").on("eventTime").as("w")) .filter("popCnt > 20") .select("toCoords(cell) AS location, start, end, isStart, popCnt");
JobGraph getJobGraph() throws IOException { StreamExecutionEnvironment exeEnv = env.execEnv(); exeEnv.setParallelism(job.parallelism()); this .registerUdfs() .registerInputCatalogs(); Table table = env.sqlQuery(job.sql()); for (String t : job.outputs().listTables()) { table.writeToSink(getOutputTable(job.outputs().getTable(t))); } StreamGraph streamGraph = exeEnv.getStreamGraph(); return streamGraph.getJobGraph(); }
} else { if (checkFieldsInfo(result, table)){ table = table.as(tmpFields); tableEnv.registerTable(result.getTableName(), table); } else {
@Test public void testAsFromPojoProjected() throws Exception { ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); BatchTableEnvironment tableEnv = TableEnvironment.getTableEnvironment(env, config()); List<SmallPojo> data = new ArrayList<>(); data.add(new SmallPojo("Peter", 28, 4000.00, "Sales", new Integer[] {42})); data.add(new SmallPojo("Anna", 56, 10000.00, "Engineering", new Integer[] {})); data.add(new SmallPojo("Lucy", 42, 6000.00, "HR", new Integer[] {1, 2, 3})); Table table = tableEnv .fromDataSet(env.fromCollection(data), "name AS d") .select("d"); DataSet<Row> ds = tableEnv.toDataSet(table, Row.class); List<Row> results = ds.collect(); String expected = "Peter\n" + "Anna\n" + "Lucy\n"; compareResultAsText(results, expected); }
@Ignore @Test public void testAsFromTupleToPojo() throws Exception { ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); BatchTableEnvironment tableEnv = TableEnvironment.getTableEnvironment(env, config()); List<Tuple4<String, Integer, Double, String>> data = new ArrayList<>(); data.add(new Tuple4<>("Rofl", 1, 1.0, "Hi")); data.add(new Tuple4<>("lol", 2, 1.0, "Hi")); data.add(new Tuple4<>("Test me", 4, 3.33, "Hello world")); Table table = tableEnv .fromDataSet(env.fromCollection(data), "q, w, e, r") .select("q as a, w as b, e as c, r as d"); DataSet<SmallPojo2> ds = tableEnv.toDataSet(table, SmallPojo2.class); List<SmallPojo2> results = ds.collect(); String expected = "Rofl,1,1.0,Hi\n" + "lol,2,1.0,Hi\n" + "Test me,4,3.33,Hello world\n"; compareResultAsText(results, expected); }
private boolean checkFieldsInfo(CreateTmpTableParser.SqlParserResult result, Table table){ List<String> fieldNames = new LinkedList<>(); String fieldsInfo = result.getFieldsInfoStr(); String[] fields = fieldsInfo.split(","); for (int i=0; i < fields.length; i++) { String[] filed = fields[i].split("\\s"); if (filed.length < 2 || fields.length != table.getSchema().getColumnNames().length){ return false; } else { String[] filedNameArr = new String[filed.length - 1]; System.arraycopy(filed, 0, filedNameArr, 0, filed.length - 1); String fieldName = String.join(" ", filedNameArr); fieldNames.add(fieldName); String fieldType = filed[filed.length - 1 ].trim(); Class fieldClass = ClassUtil.stringConvertClass(fieldType); Class tableField = table.getSchema().getType(i).get().getTypeClass(); if (fieldClass == tableField){ continue; } else { return false; } } } tmpFields = String.join(",", fieldNames); return true; }
@Test public void testAsFromPrivateFieldsPojo() throws Exception { ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); BatchTableEnvironment tableEnv = TableEnvironment.getTableEnvironment(env, config()); List<PrivateSmallPojo> data = new ArrayList<>(); data.add(new PrivateSmallPojo("Peter", 28, 4000.00, "Sales")); data.add(new PrivateSmallPojo("Anna", 56, 10000.00, "Engineering")); data.add(new PrivateSmallPojo("Lucy", 42, 6000.00, "HR")); Table table = tableEnv .fromDataSet(env.fromCollection(data), "department AS a, " + "age AS b, " + "salary AS c, " + "name AS d") .select("a, b, c, d"); DataSet<Row> ds = tableEnv.toDataSet(table, Row.class); List<Row> results = ds.collect(); String expected = "Sales,28,4000.0,Peter\n" + "Engineering,56,10000.0,Anna\n" + "HR,42,6000.0,Lucy\n"; compareResultAsText(results, expected); }
@Test public void testAsFromAndToPojo() throws Exception { ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); BatchTableEnvironment tableEnv = TableEnvironment.getTableEnvironment(env, config()); List<SmallPojo> data = new ArrayList<>(); data.add(new SmallPojo("Peter", 28, 4000.00, "Sales", new Integer[] {42})); data.add(new SmallPojo("Anna", 56, 10000.00, "Engineering", new Integer[] {})); data.add(new SmallPojo("Lucy", 42, 6000.00, "HR", new Integer[] {1, 2, 3})); Table table = tableEnv .fromDataSet(env.fromCollection(data), "department AS a, " + "age AS b, " + "salary AS c, " + "name AS d," + "roles AS e") .select("a, b, c, d, e"); DataSet<SmallPojo2> ds = tableEnv.toDataSet(table, SmallPojo2.class); List<SmallPojo2> results = ds.collect(); String expected = "Sales,28,4000.0,Peter,[42]\n" + "Engineering,56,10000.0,Anna,[]\n" + "HR,42,6000.0,Lucy,[1, 2, 3]\n"; compareResultAsText(results, expected); }
Table adaptTable = adaptSql == null ? table : tableEnv.sqlQuery(adaptSql); RowTypeInfo typeInfo = new RowTypeInfo(adaptTable.getSchema().getTypes(), adaptTable.getSchema().getColumnNames()); DataStream adaptStream = tableEnv.toAppendStream(adaptTable, typeInfo); String fields = String.join(",", typeInfo.getFieldNames());
@Test public void testAsFromAndToPrivateFieldPojo() throws Exception { ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); BatchTableEnvironment tableEnv = TableEnvironment.getTableEnvironment(env, config()); List<PrivateSmallPojo> data = new ArrayList<>(); data.add(new PrivateSmallPojo("Peter", 28, 4000.00, "Sales")); data.add(new PrivateSmallPojo("Anna", 56, 10000.00, "Engineering")); data.add(new PrivateSmallPojo("Lucy", 42, 6000.00, "HR")); Table table = tableEnv .fromDataSet(env.fromCollection(data), "department AS a, " + "age AS b, " + "salary AS c, " + "name AS d") .select("a, b, c, d"); DataSet<PrivateSmallPojo2> ds = tableEnv.toDataSet(table, PrivateSmallPojo2.class); List<PrivateSmallPojo2> results = ds.collect(); String expected = "Sales,28,4000.0,Peter\n" + "Engineering,56,10000.0,Anna\n" + "HR,42,6000.0,Lucy\n"; compareResultAsText(results, expected); }
RowTypeInfo leftTypeInfo = new RowTypeInfo(leftTable.getSchema().getTypes(), leftTable.getSchema().getColumnNames()); leftScopeChild.setRowTypeInfo(leftTypeInfo); RowTypeInfo typeInfo = new RowTypeInfo(targetTable.getSchema().getTypes(), targetTable.getSchema().getColumnNames()); DataStream adaptStream = tableEnv.toAppendStream(targetTable, org.apache.flink.types.Row.class);
@Test public void testAsFromPojo() throws Exception { ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); BatchTableEnvironment tableEnv = TableEnvironment.getTableEnvironment(env, config()); List<SmallPojo> data = new ArrayList<>(); data.add(new SmallPojo("Peter", 28, 4000.00, "Sales", new Integer[] {42})); data.add(new SmallPojo("Anna", 56, 10000.00, "Engineering", new Integer[] {})); data.add(new SmallPojo("Lucy", 42, 6000.00, "HR", new Integer[] {1, 2, 3})); Table table = tableEnv .fromDataSet(env.fromCollection(data), "department AS a, " + "age AS b, " + "salary AS c, " + "name AS d," + "roles as e") .select("a, b, c, d, e"); DataSet<Row> ds = tableEnv.toDataSet(table, Row.class); List<Row> results = ds.collect(); String expected = "Sales,28,4000.0,Peter,[42]\n" + "Engineering,56,10000.0,Anna,[]\n" + "HR,42,6000.0,Lucy,[1, 2, 3]\n"; compareResultAsText(results, expected); }