public MeetupRsvpStream(File schemaFile) throws IOException, URISyntaxException { schema = Schema.fromFile(schemaFile); Properties properties = new Properties(); properties.put("metadata.broker.list", KafkaStarterUtils.DEFAULT_KAFKA_BROKER); properties.put("serializer.class", "kafka.serializer.DefaultEncoder"); properties.put("request.required.acks", "1"); ProducerConfig producerConfig = new ProducerConfig(properties); producer = new Producer<String, byte[]>(producerConfig); }
_dataSchema = Schema.fromFile(new File(schemaFilePath)); } else { _dataSchema = null;
@Override public boolean execute() throws Exception { LOGGER.info("Executing command: " + toString()); if ((_numRecords < 0) || (_numFiles < 0)) { throw new RuntimeException("Cannot generate negative number of records/files."); } Schema schema = Schema.fromFile(new File(_schemaFile)); List<String> columns = new LinkedList<String>(); final HashMap<String, DataType> dataTypes = new HashMap<String, DataType>(); final HashMap<String, FieldType> fieldTypes = new HashMap<String, FieldType>(); final HashMap<String, TimeUnit> timeUnits = new HashMap<String, TimeUnit>(); final HashMap<String, Integer> cardinality = new HashMap<String, Integer>(); final HashMap<String, IntRange> range = new HashMap<String, IntRange>(); buildCardinalityRangeMaps(_schemaAnnFile, cardinality, range); final DataGeneratorSpec spec = buildDataGeneratorSpec(schema, columns, dataTypes, fieldTypes, timeUnits, cardinality, range); final DataGenerator gen = new DataGenerator(); gen.init(spec); gen.generate(_numRecords, _numFiles); return true; }
@Override public boolean execute() throws Exception { if (_controllerHost == null) { _controllerHost = NetUtil.getHostAddress(); } if (!_exec) { LOGGER.warn("Dry Running Command: " + toString()); LOGGER.warn("Use the -exec option to actually execute the command."); return true; } File schemaFile = new File(_schemaFile); LOGGER.info("Executing command: " + toString()); if (!schemaFile.exists()) { throw new FileNotFoundException("file does not exist, + " + _schemaFile); } Schema schema = Schema.fromFile(schemaFile); try (FileUploadDownloadClient fileUploadDownloadClient = new FileUploadDownloadClient()) { fileUploadDownloadClient.addSchema( FileUploadDownloadClient.getUploadSchemaHttpURI(_controllerHost, Integer.parseInt(_controllerPort)), schema.getSchemaName(), schemaFile); } return true; } }
@BeforeClass public void setUp() throws Exception { FileUtils.deleteQuietly(INDEX_DIR); // We specify two columns without inverted index ('column1', 'column13'), one non-existing column ('noSuchColumn') // and one column with existed inverted index ('column7'). _indexLoadingConfig = new IndexLoadingConfig(); _indexLoadingConfig.setInvertedIndexColumns( new HashSet<>(Arrays.asList(COLUMN1_NAME, COLUMN7_NAME, COLUMN13_NAME, NO_SUCH_COLUMN_NAME))); ClassLoader classLoader = getClass().getClassLoader(); URL resourceUrl = classLoader.getResource(AVRO_DATA); Assert.assertNotNull(resourceUrl); _avroFile = new File(resourceUrl.getFile()); // For newColumnsSchema, we add 4 different data type metric columns with one user-defined default null value, and // 3 different data type dimension columns with one user-defined default null value and one multi-value column. resourceUrl = classLoader.getResource(SCHEMA); Assert.assertNotNull(resourceUrl); _schema = Schema.fromFile(new File(resourceUrl.getFile())); resourceUrl = classLoader.getResource(NEW_COLUMNS_SCHEMA1); Assert.assertNotNull(resourceUrl); _newColumnsSchema1 = Schema.fromFile(new File(resourceUrl.getFile())); resourceUrl = classLoader.getResource(NEW_COLUMNS_SCHEMA2); Assert.assertNotNull(resourceUrl); _newColumnsSchema2 = Schema.fromFile(new File(resourceUrl.getFile())); resourceUrl = classLoader.getResource(NEW_COLUMNS_SCHEMA3); Assert.assertNotNull(resourceUrl); _newColumnsSchema3 = Schema.fromFile(new File(resourceUrl.getFile())); }
Schema schema = Schema.fromFile(new File(_schemaFilePath));
/** * @deprecated Load outside the class and use the setter for schema setting. * @throws IOException */ @Deprecated public void loadConfigFiles() throws IOException { Schema schema; if (_schemaFile != null) { schema = Schema.fromFile(new File(_schemaFile)); setSchema(schema); } else if (_format == FileFormat.AVRO) { schema = AvroUtils.getPinotSchemaFromAvroDataFile(new File(_inputFilePath)); setSchema(schema); } else { throw new RuntimeException("Input format " + _format + " requires schema."); } setTimeColumnName(schema.getTimeColumnName()); TimeFieldSpec timeFieldSpec = schema.getTimeFieldSpec(); if (timeFieldSpec != null) { setSegmentTimeUnit(timeFieldSpec.getIncomingGranularitySpec().getTimeType()); } else { setSegmentTimeUnit(TimeUnit.DAYS); } if (_readerConfigFile != null) { setReaderConfig(JsonUtils.fileToObject(new File(_readerConfigFile), CSVRecordReaderConfig.class)); } }
@Test public void testSerializeDeserialize() throws Exception { URL resourceUrl = getClass().getClassLoader().getResource("schemaTest.schema"); Assert.assertNotNull(resourceUrl); Schema schema = Schema.fromFile(new File(resourceUrl.getFile())); Schema schemaToCompare = Schema.fromString(schema.getJSONSchema()); Assert.assertEquals(schemaToCompare, schema); Assert.assertEquals(schemaToCompare.hashCode(), schema.hashCode()); schemaToCompare = SchemaUtils.fromZNRecord(SchemaUtils.toZNRecord(schema)); Assert.assertEquals(schemaToCompare, schema); Assert.assertEquals(schemaToCompare.hashCode(), schema.hashCode()); // When setting new fields, schema string should be updated String JSONSchema = schemaToCompare.getJSONSchema(); schemaToCompare.setSchemaName("newSchema"); String JSONSchemaToCompare = schemaToCompare.getJSONSchema(); Assert.assertFalse(JSONSchema.equals(JSONSchemaToCompare)); }
protected void addSchema(File schemaFile, String schemaName) throws Exception { if (!isUsingNewConfigFormat()) { try (FileUploadDownloadClient fileUploadDownloadClient = new FileUploadDownloadClient()) { fileUploadDownloadClient .addSchema(FileUploadDownloadClient.getUploadSchemaHttpURI(LOCAL_HOST, _controllerPort), schemaName, schemaFile); } } else { _schema = Schema.fromFile(schemaFile); } }
final AirlineDataStream stream = new AirlineDataStream(Schema.fromFile(_schemaFile), _dataFile); stream.run();
private void setupRealtimeTable(String table) throws Exception { _offlineTableConfig = null; File schemaFile = getSchemaFile(); Schema schema = Schema.fromFile(schemaFile); String schemaName = schema.getSchemaName(); addSchema(schemaFile, schemaName); String timeColumnName = schema.getTimeColumnName(); Assert.assertNotNull(timeColumnName); TimeUnit outgoingTimeUnit = schema.getOutgoingTimeUnit(); Assert.assertNotNull(outgoingTimeUnit); String timeType = outgoingTimeUnit.toString(); addRealtimeTable(table, useLlc(), KafkaStarterUtils.DEFAULT_KAFKA_BROKER, KafkaStarterUtils.DEFAULT_ZK_STR, getKafkaTopic(), getRealtimeSegmentFlushSize(), null, timeColumnName, timeType, schemaName, null, null, getLoadMode(), getSortedColumn(), getInvertedIndexColumns(), getBloomFilterIndexColumns(), getRawIndexColumns(), getTaskConfig(), getStreamConsumerFactoryClassName()); completeTableConfiguration(); }
protected void setUpTable(File avroFile) throws Exception { File schemaFile = getSchemaFile(); Schema schema = Schema.fromFile(schemaFile); String schemaName = schema.getSchemaName(); addSchema(schemaFile, schemaName); String timeColumnName = schema.getTimeColumnName(); Assert.assertNotNull(timeColumnName); TimeUnit outgoingTimeUnit = schema.getOutgoingTimeUnit(); Assert.assertNotNull(outgoingTimeUnit); String timeType = outgoingTimeUnit.toString(); addRealtimeTable(getTableName(), useLlc(), KafkaStarterUtils.DEFAULT_KAFKA_BROKER, KafkaStarterUtils.DEFAULT_ZK_STR, getKafkaTopic(), getRealtimeSegmentFlushSize(), avroFile, timeColumnName, timeType, schemaName, null, null, getLoadMode(), getSortedColumn(), getInvertedIndexColumns(), getBloomFilterIndexColumns(), getRawIndexColumns(), getTaskConfig(), getStreamConsumerFactoryClassName()); completeTableConfiguration(); }
Schema schema = Schema.fromFile(_schemaFile); ClusterIntegrationTestUtils .buildSegmentsFromAvro(_offlineAvroFiles, 0, _segmentDir, _tarDir, _tableName, false, null,