protected Table getPreviewTable(final DataContext dc) { final Schema schema = dc.getDefaultSchema(); if (schema.getTableCount() == 0) { return null; } return schema.getTable(0); }
protected Table getPreviewTable(final DataContext dc) { return dc.getDefaultSchema().getTable(0); }
public void setSchema(final Datastore datastore, final Schema schema) { _panelAroundButton.setVisible(CreateTableDialog.isCreateTableAppropriate(datastore, schema)); final String previousValue = getValue(); _schemaRef.set(schema); _datastoreRef.set(datastore); if (schema == null) { _comboBox.setModel(new DefaultComboBoxModel<>(new Table[1])); } else { final List<Table> tables = new ArrayList<>(schema.getTables()); tables.add(0, null); _comboBox.setModel(new DefaultComboBoxModel<>(tables.toArray(new Table[0]))); if (previousValue == null) { if (schema.getTableCount() == 1) { // if there is only 1 table, select that final Table table = schema.getTable(0); _comboBox.setSelectedItem(table); } } else { // select table by name final Table table = schema.getTableByName(previousValue); _comboBox.setSelectedItem(table); } } }
protected void errorOccurred(final Object[] rowData, final RuntimeException e) { _errorRowCount.incrementAndGet(); if (errorHandlingOption == ErrorHandlingOption.STOP_JOB) { throw e; } else { logger.warn("Error occurred while deleting record. Writing to error stream", e); _errorDataContext.executeUpdate(cb -> { RowInsertionBuilder insertBuilder = cb.insertInto(_errorDataContext.getDefaultSchema().getTable(0)); insertBuilder = insertBuilder.value(ERROR_MESSAGE_COLUMN_NAME, e.getMessage()); insertBuilder.execute(); }); } }
private void validateCsvHeaders(final CsvDataContext dc) { final Schema schema = dc.getDefaultSchema(); if (schema.getTableCount() == 0) { // nothing to worry about, we will create the table ourselves return; } final Table table = schema.getTable(0); // verify that table names correspond to what we need! for (final String columnName : conditionColumnNames) { final Column column = table.getColumnByName(columnName); if (column == null) { throw new IllegalStateException("Error log file does not have required column header: " + columnName); } } final Column column = table.getColumnByName(ERROR_MESSAGE_COLUMN_NAME); if (column == null) { throw new IllegalStateException( "Error log file does not have required column: " + ERROR_MESSAGE_COLUMN_NAME); } }
public List<Column> getColumns() { if (_columns == null) { try (DatastoreConnection openConnection = _jsonDatastore.openConnection()) { _columns = openConnection.getDataContext().getDefaultSchema().getTable(0).getColumns(); } } return _columns; } }
private void addTableToSource(final DatastoreConnection con) { if (con != null) { final Schema defaultSchema = con.getSchemaNavigator().getDefaultSchema(); final int datastoreSize = defaultSchema.getTableCount(); if (datastoreSize == 1) { final List<Column> columns = defaultSchema.getTable(0).getColumns(); _analysisJobBuilder.addSourceColumns(columns); } } }
try (DataSet dataSet = dataContext.query().from(dataContext.getDefaultSchema().getTable(0)) .select("text", "count").execute()) { int i = 0;
private void validateCsvHeaders(final CsvDataContext dc) { final Schema schema = dc.getDefaultSchema(); if (schema.getTableCount() == 0) { // nothing to worry about, we will create the table ourselves return; } final Table table = schema.getTable(0); // verify that table names correspond to what we need! for (final String columnName : columnNames) { final Column column = table.getColumnByName(columnName); if (column == null) { throw new IllegalStateException("Error log file does not have required column header: " + columnName); } } if (additionalErrorLogValues != null) { for (final InputColumn<?> inputColumn : additionalErrorLogValues) { final String columnName = translateAdditionalErrorLogColumnName(inputColumn.getName()); final Column column = table.getColumnByName(columnName); if (column == null) { throw new IllegalStateException( "Error log file does not have required column header: " + columnName); } } } final Column column = table.getColumnByName(ERROR_MESSAGE_COLUMN_NAME); if (column == null) { throw new IllegalStateException( "Error log file does not have required column: " + ERROR_MESSAGE_COLUMN_NAME); } }
protected void errorOccurred(final Object[] rowData, final RuntimeException e) { _errorRowCount.incrementAndGet(); if (errorHandlingOption == ErrorHandlingOption.STOP_JOB) { throw e; } else { logger.warn("Error occurred while updating record. Writing to error stream", e); _errorDataContext.executeUpdate(cb -> { RowInsertionBuilder insertBuilder = cb.insertInto(_errorDataContext.getDefaultSchema().getTable(0)); for (int i = 0; i < columnNames.length; i++) { insertBuilder = insertBuilder.value(columnNames[i], rowData[i]); } if (additionalErrorLogValues != null) { for (int i = 0; i < additionalErrorLogValues.length; i++) { final String columnName = translateAdditionalErrorLogColumnName(additionalErrorLogValues[i].getName()); final Object value = rowData[columnNames.length + i]; insertBuilder = insertBuilder.value(columnName, value); } } insertBuilder = insertBuilder.value(ERROR_MESSAGE_COLUMN_NAME, e.getMessage()); insertBuilder.execute(); }); } }
final Table table = schema.getTable(0);
protected void errorOccurred(final Object[] rowData, final RuntimeException e) { _errorRowCount.incrementAndGet(); if (errorHandlingOption == ErrorHandlingOption.STOP_JOB) { throw e; } else { logger.warn("Error occurred while inserting record. Writing to error stream", e); _errorDataContext.executeUpdate(cb -> { RowInsertionBuilder insertBuilder = cb.insertInto(_errorDataContext.getDefaultSchema().getTable(0)); for (int i = 0; i < columnNames.length; i++) { insertBuilder = insertBuilder.value(columnNames[i], rowData[i]); } if (additionalErrorLogValues != null) { for (int i = 0; i < additionalErrorLogValues.length; i++) { final String columnName = translateAdditionalErrorLogColumnName(additionalErrorLogValues[i].getName()); final Object value = rowData[columnNames.length + i]; insertBuilder = insertBuilder.value(columnName, value); } } insertBuilder = insertBuilder.value(ERROR_MESSAGE_COLUMN_NAME, e.getMessage()); insertBuilder.execute(); }); } } }
public static List<LicensedProject> getLicensedProjects() { final List<LicensedProject> result = new ArrayList<>(); final URL url = resourceManager.getUrl("licenses/dependency-licenses.csv"); if (url == null) { throw new IllegalStateException("Could not find dependencies file"); } try { final DataContext dc = DataContextFactory.createCsvDataContext(url.openStream(), ',', '"'); final Table table = dc.getDefaultSchema().getTable(0); final Column projectColumn = table.getColumnByName("Project"); final Column websiteColumn = table.getColumnByName("Website"); final Column licenseColumn = table.getColumnByName("License"); final Query q = dc.query().from(table).select(table.getColumns()).orderBy(projectColumn).asc().toQuery(); final DataSet ds = dc.executeQuery(q); while (ds.next()) { final LicensedProject licensedProject = new LicensedProject(); final Row row = ds.getRow(); final String licenseName = row.getValue(licenseColumn).toString(); licensedProject.name = row.getValue(projectColumn).toString(); licensedProject.websiteUrl = row.getValue(websiteColumn).toString(); licensedProject.license = getLicense(licenseName); result.add(licensedProject); } } catch (final IOException e) { throw new IllegalStateException("Error occurred while reading dependencies file", e); } return result; }
/** * Reads a {@link FixedWidthConfiguration} based on a SAS 'format file', * <a href= * "http://support.sas.com/documentation/cdl/en/etlug/67323/HTML/default/viewer.htm#p0h03yig7fp1qan1arghp3lwjqi6.htm"> * described here</a>. * * @param encoding the format file encoding * @param resource the format file resource * @param failOnInconsistentLineWidth flag specifying whether inconsistent line should stop processing or not * @return a {@link FixedWidthConfiguration} object to use */ public FixedWidthConfiguration readFromSasFormatFile(String encoding, Resource resource, boolean failOnInconsistentLineWidth) { final List<FixedWidthColumnSpec> columnSpecs = new ArrayList<>(); final CsvDataContext dataContext = new CsvDataContext(resource, new CsvConfiguration()); final Table table = dataContext.getDefaultSchema().getTable(0); try (final DataSet dataSet = dataContext.query().from(table).select("Name", "BeginPosition", "EndPosition") .execute()) { while (dataSet.next()) { final String name = (String) dataSet.getRow().getValue(0); final int beginPosition = Integer.parseInt((String) dataSet.getRow().getValue(1)); final int endPosition = Integer.parseInt((String) dataSet.getRow().getValue(2)); final int width = 1 + endPosition - beginPosition; columnSpecs.add(new FixedWidthColumnSpec(name, width)); } } return new FixedWidthConfiguration(encoding, columnSpecs, failOnInconsistentLineWidth); }
/** * Reads a {@link FixedWidthConfiguration} based on a SAS 'format file', * <a href= * "http://support.sas.com/documentation/cdl/en/etlug/67323/HTML/default/viewer.htm#p0h03yig7fp1qan1arghp3lwjqi6.htm"> * described here</a>. * * @param encoding the format file encoding * @param resource the format file resource * @param failOnInconsistentLineWidth flag specifying whether inconsistent line should stop processing or not * @return a {@link FixedWidthConfiguration} object to use */ public FixedWidthConfiguration readFromSasFormatFile(String encoding, Resource resource, boolean failOnInconsistentLineWidth) { final List<FixedWidthColumnSpec> columnSpecs = new ArrayList<>(); final CsvDataContext dataContext = new CsvDataContext(resource, new CsvConfiguration()); final Table table = dataContext.getDefaultSchema().getTable(0); try (final DataSet dataSet = dataContext.query().from(table).select("Name", "BeginPosition", "EndPosition") .execute()) { while (dataSet.next()) { final String name = (String) dataSet.getRow().getValue(0); final int beginPosition = Integer.parseInt((String) dataSet.getRow().getValue(1)); final int endPosition = Integer.parseInt((String) dataSet.getRow().getValue(2)); final int width = 1 + endPosition - beginPosition; columnSpecs.add(new FixedWidthColumnSpec(name, width)); } } return new FixedWidthConfiguration(encoding, columnSpecs, failOnInconsistentLineWidth); }
final Table table = tempDataContext.getDefaultSchema().getTable(0);
} else if ("https://datacleaner.org/preview_datastore".equals(href)) { try (DatastoreConnection errorCon = errorDatastore.openConnection()) { final Table table = errorCon.getDataContext().getDefaultSchema().getTable(0); final PreviewSourceDataActionListener actionListener = new PreviewSourceDataActionListener(windowContext, errorDatastore, table);
final Table table = tempDataContext.getDefaultSchema().getTable(0);