@Test public void testDefaultParquetFileFormatOptions() throws Exception { ParquetFileConfig fileFormat = new ParquetFileConfig(); String tableOptions = fileFormat.toTableOptions(); assertContains("type => 'parquet'", tableOptions); }
public ParquetFileConfig newMessage() { return new ParquetFileConfig(); }
if (formatPlugin instanceof ParquetFormatPlugin) { ParquetFormatPlugin parquetFormatPlugin = (ParquetFormatPlugin)formatPlugin; return new ParquetFileConfig().setAutoCorrectCorruptDates(parquetFormatPlugin.getConfig().autoCorrectCorruptDates);
@Test public void testQueryOnFolder() throws Exception { ParquetFileConfig fileConfig = new ParquetFileConfig(); fileConfig.setName("parquet"); String filePath = getUrlPath("/datasets/folderdataset"); String fileParentPath = getUrlPath("/datasets/"); doc("preview data for source folder"); JobDataFragment data = expectSuccess(getBuilder(getAPIv2().path("/source/dacfs_test/folder_preview/" + filePath)).buildPost(Entity.json(fileConfig)), JobDataFragment.class); assertEquals(25, data.getReturnedRowCount()); expectSuccess(getBuilder(getAPIv2().path("/source/dacfs_test/folder_format/" + filePath)).buildPut(Entity.json(fileConfig))); doc("creating dataset from source folder"); expectSuccess(getBuilder(getAPIv2().path("/source/dacfs_test/new_untitled_from_folder/" + filePath)).buildPost(Entity.json("")), InitialPreviewResponse.class); checkCounts(fileParentPath, "folderdataset", true, 1, 0, 0); }
final boolean autoCorrectCorruptDates = ((ParquetFileConfig)FileFormat.getForFile(config.getFormatSettings())).getAutoCorrectCorruptDates(); final boolean readInt96AsTimeStamp = context.getOptions().getOption(ExecConstants .PARQUET_READER_INT96_AS_TIMESTAMP).getBoolVal();
public static FileFormat getEmptyConfig(FileType type) { switch (type) { case TEXT: case CSV: case TSV: case PSV: return new TextFileConfig(); case JSON: return new JsonFileConfig(); case PARQUET: return new ParquetFileConfig(); case AVRO: return new AvroFileConfig(); case HTTP_LOG: return new HttpLogFileConfig(); case EXCEL: return new ExcelFileConfig(); case XLS: return new XlsFileConfig(); case UNKNOWN: break; default: break; } return null; }
final ParquetFileConfig parquetFileConfig = (ParquetFileConfig)com.dremio.service.namespace.file.FileFormat.getForFile(fileConfig); ParquetFormatConfig parquetFormatConfig = new ParquetFormatConfig(); parquetFormatConfig.autoCorrectCorruptDates = parquetFileConfig.getAutoCorrectCorruptDates(); return parquetFormatConfig; case AVRO:
@Test public void testParquetFile() throws Exception { final JobsService jobsService = l(JobsService.class); String fileUrlPath = getUrlPath("/singlefile_parquet_dir/0_0_0.parquet"); String fileParentUrlPath = getUrlPath("/singlefile_parquet_dir/"); ParquetFileConfig fileConfig = new ParquetFileConfig(); JobDataFragment data = expectSuccess(getBuilder(getAPIv2().path("/source/dacfs_test/file_preview/" + fileUrlPath)).buildPost(Entity.json(fileConfig)), JobDataFragment.class); assertEquals(25, data.getReturnedRowCount()); assertEquals(4, data.getColumns().size()); JobUI job = new JobUI(jobsService.submitJob(JobRequest.newBuilder() .setSqlQuery(createQuery("/singlefile_parquet_dir/0_0_0.parquet")) .setQueryType(QueryType.UI_RUN) .build(), NoOpJobStatusListener.INSTANCE)); JobDataFragment jobData = job.getData().truncate(500); assertEquals(25, jobData.getReturnedRowCount()); assertEquals(4, jobData.getColumns().size()); checkCounts(fileParentUrlPath, "0_0_0.parquet", true, 1, 0, 0); }