@Override public K lookup(SQ query) { final Iterable<K> results = search(query); try { return Iterables.getOnlyElement(results, null); } catch (IllegalArgumentException e) { throw new AvroBaseException("Too many results"); } }
if (!(e.getCause() instanceof SQLException)) return false; throw e;
if (e.getCause() instanceof SQLException) return false; throw e;
@Override public K create(T value) throws AvroBaseException { if (supplier == null) throw new AvroBaseException("No key generator provided"); K row = supplier.get(); put(row, value); return row; }
public LoggingMysqlAB(ExecutorService es, DataSource datasource, String table, String family, String schemaTable, Schema schema, AvroFormat storageFormat, KeyStrategy<K> keytx) throws AvroBaseException { super(es, datasource, table, family, schemaTable, schema, storageFormat, keytx); try { roll(); } catch (SQLException e) { throw new AvroBaseException("Could not roll log table", e); } }
@Override public void exportSchema(DataOutputStream dos) { try { dos.writeBoolean(true); writeSchemas(dos); dos.writeBoolean(false); } catch (Exception e) { throw new AvroBaseException(e); } }
private Schema loadSchema(int id, byte[] value) throws AvroBaseException { Schema schema; try { schema = Schema.parse(new ByteArrayInputStream(value)); } catch (IOException e) { throw new AvroBaseException("Could not parse the schema", e); } abbrevSchema.put(id, schema); schemaAbbrev.put(schema, id); return schema; }
@Override public int compare(S3Object s3Object, S3Object s3Object1) { try { final byte[] key = Hex.decodeHex(filename(s3Object).toCharArray()); final byte[] key1 = Hex.decodeHex(filename(s3Object1).toCharArray()); return bytesComparator.compare(key, key1); } catch (DecoderException e) { throw new AvroBaseException("Failed to decode filename: " + s3Object.getName(), e); } } });
private Schema loadSchema(int id, byte[] value) throws AvroBaseException { Schema schema; try { schema = Schema.parse(new ByteArrayInputStream(value)); } catch (IOException e) { throw new AvroBaseException("Could not parse the schema", e); } schemas.put(id, schema); schemaIds.put(schema, id); return schema; }
/** * Load a schema from the schema table */ protected Schema loadSchema(byte[] value, String row) throws AvroBaseException { Schema schema; try { schema = Schema.parse(new ByteArrayInputStream(value)); } catch (IOException e) { throw new AvroBaseException("Failed to deserialize schema: " + row, e); } schemaCache.put(row, schema); hashCache.put(schema, row); return schema; }
Row<T, Long> execute(ResultSet rs) throws AvroBaseException, SQLException { if (rs.next()) { int schema_id = rs.getInt(1); long version = rs.getLong(2); byte[] avro = rs.getBytes(3); Schema schema = getSchema(schema_id); if (schema != null) { return new Row<T, Long>(readValue(avro, schema, format), row, version); } else { throw new AvroBaseException("Failed to find schema: " + schema_id); } } else { return null; } } }.query();
Iterable<Row<T, Long>> execute(ResultSet rs) throws AvroBaseException, SQLException { final List<Row<T,Long>> results = new LinkedList<Row<T,Long>>(); while (rs.next()) { long row = rs.getLong(1); int schema_id = rs.getInt(2); long version = rs.getLong(3); byte[] avro = rs.getBytes(4); Schema schema = getSchema(schema_id); if (schema != null) { results.add(new Row<T, Long>(readValue(avro, schema, format), row, version)); } else { throw new AvroBaseException("Failed to find schema: " + schema_id); } } return results; } }.query();
public SequentialReversedKeyStrategy(DataSource ds, String table, String family) { this.ds = ds; tableName = table + "__" + family + "_" + "ids"; Connection connection = null; try { connection = ds.getConnection(); DatabaseMetaData data = connection.getMetaData(); { ResultSet tables = data.getTables(null, null, tableName, null); if (!tables.next()) { // Create the table Statement statement = connection.createStatement(); statement.executeUpdate("CREATE TABLE " + tableName + " (id bigint auto_increment primary key not null)"); statement.close(); } tables.close(); } } catch (Exception e) { throw new AvroBaseException("Could not create table: " + tableName, e); } finally { if (connection != null) try { connection.close(); } catch (SQLException e) { throw new AvroBaseException("Could not close connection", e); } } }
public int insert() throws AvroBaseException { try { Connection c = null; PreparedStatement ps = null; try { c = datasource.getConnection(); ps = c.prepareStatement(statement); setup(ps); return ps.executeUpdate(); } finally { if (ps != null) ps.close(); if (c != null) c.close(); } } catch (SQLException e) { throw new AvroBaseException("Database problem", e); } } }
@Override public Iterable<Row<T, K>> execute(final ResultSet rs) throws AvroBaseException, SQLException { while (rs.next()) { byte[] row = rs.getBytes(1); int schemaId = rs.getInt(2); long version = rs.getLong(3); int format = rs.getInt(4); byte[] bytes = rs.getBytes(5); try { dos.writeBoolean(true); writeRow(dos, row, schemaId, version, format, bytes); } catch (IOException e) { throw new AvroBaseException("Could not write", e); } } return null; } }.query();
PreparedStatement insertRow = c.prepareStatement("INSERT INTO " + tableName + " () VALUES ()"); int insert = insertRow.executeUpdate(); if (insert != 1) throw new AvroBaseException("Could not get new key: " + insert + " rows updated"); insertRow.close(); PreparedStatement getRow = c.prepareStatement("SELECT LAST_INSERT_ID()"); Long query = resultSet.getLong(1); int deleted = c.prepareStatement("DELETE FROM " + tableName + " WHERE id = LAST_INSERT_ID()").executeUpdate(); if (deleted != 1) throw new AvroBaseException("Failed to delete row"); byte[] row = String.valueOf(query).getBytes(); int length = row.length; throw new AvroBaseException("Failed to find last insert id"); } catch (Exception e) { throw new AvroBaseException("Failed to get key", e); } finally { try {
/** * Scan the list of shards for the shard that contains the row. * @param row * @return */ @Override public Shard<T, K> find(K row) { // TODO: convert to binary search readShards.lock(); try { for (int i = 0; i < activeShards.size(); i++) { PartitionedShard<T, K> shard = activeShards.get(i); if (shard.start == null || comparator.compare(shard.start, row) <= 0) { if (i + 1 == activeShards.size() || comparator.compare(activeShards.get(i + 1).start, row) > 0) { synchronized (usedShards) { usedShards.add(shard); } return shard; } } } throw new AvroBaseException("No active shard matches row"); } finally { readShards.unlock(); } }
@Override public void run() { try { Connection c = null; PreparedStatement ps = null; try { c = datasource.getConnection(); String statement = "INSERT INTO " + mysqlTableName + " (row, schema_id, version, format, avro) VALUES (?,?,?,?,?)"; ps = c.prepareStatement(statement); ps.setBytes(1, row); ps.setInt(2, schemaId); ps.setLong(3, version); ps.setInt(4, format); ps.setBytes(5, bytes); ps.executeUpdate(); } finally { if (ps != null) ps.close(); if (c != null) c.close(); } } catch (SQLException e) { throw new AvroBaseException("Database problem", e); } sema.release(); } });
throw new AvroBaseException("Invalid schema configuration, must have 1 unique key"); throw new AvroBaseException("Invalid Solr URL: " + solrURL, e); } catch (ParserConfigurationException e) { throw new AvroBaseException(e); } catch (SAXException e) { throw new AvroBaseException("Failed to parse schema", e); } catch (IOException e) { throw new AvroBaseException("Failed to read schema", e);
@Override public void importSchema(DataInputStream dis) { try { while (dis.readBoolean()) { final int id = dis.readInt(); final byte[] hash = new byte[dis.readInt()]; dis.readFully(hash); final byte[] json = new byte[dis.readInt()]; dis.readFully(json); new Update(datasource, "INSERT INTO " + schemaTable + " (id, hash, json) VALUES (?,?,?) " + "ON DUPLICATE KEY UPDATE hash=values(hash), json=values(json)") { public void setup(PreparedStatement ps) throws AvroBaseException, SQLException { ps.setInt(1, id); ps.setBytes(2, hash); ps.setBytes(3, json); } }.insert(); } } catch (IOException e) { throw new AvroBaseException("Failed to read", e); } }