private void uploadLogPart(final DatabaseTransOperator transOperator, final int execId, final String name, final int attempt, final int startByte, final int endByte, final EncodingType encType, final byte[] buffer, final int length) throws SQLException, IOException { final String INSERT_EXECUTION_LOGS = "INSERT INTO execution_logs " + "(exec_id, name, attempt, enc_type, start_byte, end_byte, " + "log, upload_time) VALUES (?,?,?,?,?,?,?,?)"; byte[] buf = buffer; if (encType == EncodingType.GZIP) { buf = GZIPUtils.gzipBytes(buf, 0, length); } else if (length < buf.length) { buf = Arrays.copyOf(buffer, length); } transOperator.update(INSERT_EXECUTION_LOGS, execId, name, attempt, encType.getNumVal(), startByte, startByte + length, buf, DateTime.now() .getMillis()); }
@Override public List<ExecutableFlow> handle(final ResultSet rs) throws SQLException { if (!rs.next()) { return Collections.emptyList(); } final List<ExecutableFlow> execFlows = new ArrayList<>(); do { final int id = rs.getInt(1); final int encodingType = rs.getInt(2); final byte[] data = rs.getBytes(3); if (data != null) { final EncodingType encType = EncodingType.fromInteger(encodingType); try { final ExecutableFlow exFlow = ExecutableFlow.createExecutableFlowFromObject( GZIPUtils.transformBytesToObject(data, encType)); execFlows.add(exFlow); } catch (final IOException e) { throw new SQLException("Error retrieving flow data " + id, e); } } } while (rs.next()); return execFlows; } }
private void uploadFlow(final Project project, final int version, final Flow flow, final EncodingType encType) throws ProjectManagerException, IOException { final String json = JSONUtils.toJSON(flow.toObject()); final byte[] data = convertJsonToBytes(encType, json); logger.info("Flow upload " + flow.getId() + " is byte size " + data.length); final String INSERT_FLOW = "INSERT INTO project_flows (project_id, version, flow_id, modified_time, encoding_type, json) values (?,?,?,?,?,?)"; try { this.dbOperator .update(INSERT_FLOW, project.getId(), version, flow.getId(), System.currentTimeMillis(), encType.getNumVal(), data); } catch (final SQLException e) { logger.error("Error inserting flow", e); throw new ProjectManagerException("Error inserting flow " + flow.getId(), e); } }
@Override public List<ExecutableFlow> handle( final ResultSet rs) throws SQLException { if (!rs.next()) { return Collections.emptyList(); } final List<ExecutableFlow> execFlows = new ArrayList<>(); do { final int id = rs.getInt(1); final int encodingType = rs.getInt(2); final byte[] data = rs.getBytes(3); if (data != null) { final EncodingType encType = EncodingType.fromInteger(encodingType); try { final ExecutableFlow exFlow = ExecutableFlow.createExecutableFlowFromObject( GZIPUtils.transformBytesToObject(data, encType)); execFlows.add(exFlow); } catch (final IOException e) { throw new SQLException("Error retrieving flow data " + id, e); } } } while (rs.next()); return execFlows; } }
private void updateProjectSettings(final Project project, final EncodingType encType) throws ProjectManagerException { final String UPDATE_PROJECT_SETTINGS = "UPDATE projects SET enc_type=?, settings_blob=? WHERE id=?"; final String json = JSONUtils.toJSON(project.toObject()); byte[] data = null; try { data = convertJsonToBytes(encType, json); logger.debug("NumChars: " + json.length() + " Gzip:" + data.length); } catch (final IOException e) { throw new ProjectManagerException("Failed to encode. ", e); } try { this.dbOperator.update(UPDATE_PROJECT_SETTINGS, encType.getNumVal(), data, project.getId()); } catch (final SQLException e) { logger.error("update Project Settings failed.", e); throw new ProjectManagerException( "Error updating project " + project.getName() + " version " + project.getVersion(), e); } }
final EncodingType encType = EncodingType.fromInteger(encodingType);
private void uploadProjectProperty(final Project project, final String name, final Props props) throws ProjectManagerException, IOException { final String INSERT_PROPERTIES = "INSERT INTO project_properties (project_id, version, name, modified_time, encoding_type, property) values (?,?,?,?,?,?)"; final byte[] propsData = getBytes(props); try { this.dbOperator.update(INSERT_PROPERTIES, project.getId(), project.getVersion(), name, System.currentTimeMillis(), this.defaultEncodingType.getNumVal(), propsData); } catch (final SQLException e) { throw new ProjectManagerException( "Error uploading project properties " + name + " into " + project.getName() + " version " + project.getVersion(), e); } }
@Override public List<Pair<String, Props>> handle(final ResultSet rs) throws SQLException { if (!rs.next()) { return Collections.emptyList(); } final List<Pair<String, Props>> properties = new ArrayList<>(); do { final String name = rs.getString(3); final int eventType = rs.getInt(5); final byte[] dataBytes = rs.getBytes(6); final EncodingType encType = EncodingType.fromInteger(eventType); String propertyString = null; try { if (encType == EncodingType.GZIP) { // Decompress the sucker. propertyString = GZIPUtils.unGzipString(dataBytes, "UTF-8"); } else { propertyString = new String(dataBytes, "UTF-8"); } final Props props = PropsUtils.fromJSONString(propertyString); props.setSource(name); properties.add(new Pair<>(name, props)); } catch (final IOException e) { throw new SQLException(e); } } while (rs.next()); return properties; } }
private void updateTrigger(final Trigger t, final EncodingType encType) throws TriggerLoaderException { final String json = JSONUtils.toJSON(t.toJson()); byte[] data = null; try { final byte[] stringData = json.getBytes("UTF-8"); data = stringData; if (encType == EncodingType.GZIP) { data = GZIPUtils.gzipBytes(stringData); } logger.debug( "NumChars: " + json.length() + " UTF-8:" + stringData.length + " Gzip:" + data.length); } catch (final IOException e) { logger.error("Trigger encoding fails", e); throw new TriggerLoaderException("Error encoding the trigger " + t.toString(), e); } try { final int updates = this.dbOperator .update(UPDATE_TRIGGER, t.getSource(), t.getLastModifyTime(), encType.getNumVal(), data, t.getTriggerId()); if (updates == 0) { throw new TriggerLoaderException("No trigger has been updated."); } } catch (final SQLException ex) { logger.error("Updating Trigger " + t.getTriggerId() + " failed."); throw new TriggerLoaderException("DB Trigger update failed. ", ex); } }
@Override public List<Pair<ExecutionReference, ExecutableFlow>> handle(final ResultSet rs) throws SQLException { if (!rs.next()) { return Collections.emptyList(); } final List<Pair<ExecutionReference, ExecutableFlow>> execFlows = new ArrayList<>(); do { final int id = rs.getInt(1); final int encodingType = rs.getInt(2); final byte[] data = rs.getBytes(3); if (data == null) { logger.error("Found a flow with empty data blob exec_id: " + id); } else { final EncodingType encType = EncodingType.fromInteger(encodingType); try { final ExecutableFlow exFlow = ExecutableFlow.createExecutableFlowFromObject( GZIPUtils.transformBytesToObject(data, encType)); final ExecutionReference ref = new ExecutionReference(id); execFlows.add(new Pair<>(ref, exFlow)); } catch (final IOException e) { throw new SQLException("Error retrieving flow data " + id, e); } } } while (rs.next()); return execFlows; } }
@Override public void updateFlow(final Project project, final int version, final Flow flow) throws ProjectManagerException { logger.info("Uploading flow " + flow.getId()); try { final String json = JSONUtils.toJSON(flow.toObject()); final byte[] data = convertJsonToBytes(this.defaultEncodingType, json); logger.info("Flow upload " + flow.getId() + " is byte size " + data.length); final String UPDATE_FLOW = "UPDATE project_flows SET encoding_type=?,json=? WHERE project_id=? AND version=? AND flow_id=?"; try { this.dbOperator .update(UPDATE_FLOW, this.defaultEncodingType.getNumVal(), data, project.getId(), version, flow.getId()); } catch (final SQLException e) { logger.error("Error inserting flow", e); throw new ProjectManagerException("Error inserting flow " + flow.getId(), e); } } catch (final IOException e) { throw new ProjectManagerException("Flow Upload failed.", e); } }
private static Pair<ExecutionReference, ExecutableFlow> getExecutableFlowHelper( final ResultSet rs) throws SQLException { final int id = rs.getInt("exec_id"); final int encodingType = rs.getInt("enc_type"); final byte[] data = rs.getBytes("flow_data"); if (data == null) { logger.warn("Execution id " + id + " has flow_data = null. To clean up, update status to " + "FAILED manually, eg. " + "SET status = " + Status.FAILED.getNumVal() + " WHERE id = " + id); } else { final EncodingType encType = EncodingType.fromInteger(encodingType); final ExecutableFlow exFlow; try { exFlow = ExecutableFlow.createExecutableFlowFromObject( GZIPUtils.transformBytesToObject(data, encType)); } catch (final IOException e) { throw new SQLException("Error retrieving flow data " + id, e); } return getPairWithExecutorInfo(rs, exFlow); } return null; }
return transOperator .update(INSERT_PROJECT, name, true, time, time, null, creator.getUserId(), description, this.defaultEncodingType.getNumVal(), null); };
final EncodingType encType = EncodingType.fromInteger(encodingType);
private void mockResultWithData() throws Exception { final ExecutableFlow flow = TestUtils.createTestExecutableFlow("exectest1", "exec1"); final String json = JSONUtils.toJSON(flow.toObject()); final byte[] data = json.getBytes("UTF-8"); mockExecution(EncodingType.PLAIN.getNumVal(), data); }
final EncodingType encType = EncodingType.fromInteger(rs.getInt(4)); final int startByte = rs.getInt(5); final int endByte = rs.getInt(6);
private void updateExecutableFlow(final ExecutableFlow flow, final EncodingType encType) throws ExecutorManagerException { final String UPDATE_EXECUTABLE_FLOW_DATA = "UPDATE execution_flows " + "SET status=?,update_time=?,start_time=?,end_time=?,enc_type=?,flow_data=? " + "WHERE exec_id=?"; final String json = JSONUtils.toJSON(flow.toObject()); byte[] data = null; try { final byte[] stringData = json.getBytes("UTF-8"); data = stringData; // Todo kunkun-tang: use a common method to transform stringData to data. if (encType == EncodingType.GZIP) { data = GZIPUtils.gzipBytes(stringData); } } catch (final IOException e) { throw new ExecutorManagerException("Error encoding the execution flow."); } try { this.dbOperator.update(UPDATE_EXECUTABLE_FLOW_DATA, flow.getStatus() .getNumVal(), flow.getUpdateTime(), flow.getStartTime(), flow .getEndTime(), encType.getNumVal(), data, flow.getExecutionId()); } catch (final SQLException e) { throw new ExecutorManagerException("Error updating flow.", e); } }
final EncodingType encType = EncodingType.fromInteger(encodingType); final Object blobObj; try {
private void uploadLogPart(final DatabaseTransOperator transOperator, final int execId, final String name, final int attempt, final int startByte, final int endByte, final EncodingType encType, final byte[] buffer, final int length) throws SQLException, IOException { final String INSERT_EXECUTION_LOGS = "INSERT INTO execution_logs " + "(exec_id, name, attempt, enc_type, start_byte, end_byte, " + "log, upload_time) VALUES (?,?,?,?,?,?,?,?)"; byte[] buf = buffer; if (encType == EncodingType.GZIP) { buf = GZIPUtils.gzipBytes(buf, 0, length); } else if (length < buf.length) { buf = Arrays.copyOf(buffer, length); } transOperator.update(INSERT_EXECUTION_LOGS, execId, name, attempt, encType.getNumVal(), startByte, startByte + length, buf, DateTime.now() .getMillis()); }
@Override public List<ExecutableFlow> handle(final ResultSet rs) throws SQLException { if (!rs.next()) { return Collections.emptyList(); } final List<ExecutableFlow> execFlows = new ArrayList<>(); do { final int id = rs.getInt(1); final int encodingType = rs.getInt(2); final byte[] data = rs.getBytes(3); if (data != null) { final EncodingType encType = EncodingType.fromInteger(encodingType); try { final ExecutableFlow exFlow = ExecutableFlow.createExecutableFlowFromObject( GZIPUtils.transformBytesToObject(data, encType)); execFlows.add(exFlow); } catch (final IOException e) { throw new SQLException("Error retrieving flow data " + id, e); } } } while (rs.next()); return execFlows; } }