@Override public K getKey(final Object value) { return decorated().getKey(value); }
@Override public Set<V> values() { return decorated().values(); }
protected void cacheColumnNames() throws SQLException { cachedColumnNames = new DualHashBidiMap<>(); cachedGeomColumnNames = new DualHashBidiMap<>(); try(Resource res = resultSetHolder.getResource()) { ResultSetMetaData meta = res.getResultSet().getMetaData(); for (int idColumn = 1; idColumn <= meta.getColumnCount(); idColumn++) { cachedColumnNames.put(meta.getColumnName(idColumn), idColumn); } if(excludeGeomFields) { List<String> geomFields = SFSUtilities.getGeometryFields(getConnection(), location); for (String geomField : geomFields) { cachedGeomColumnNames.put(geomField, cachedColumnNames.get(geomField)); } } } }
public synchronized Vertex getVertex(String vertexName) { return vertices.get(vertexName); }
if (addressToSocketId.containsKey(source)) { continue; int id = ((GetNextIdNetworkResponse) resp).getId(); addressToSocketId.put(source, id); for (Entry<InetAddress, Integer> entry : addressToSocketId.entrySet()) { int id = entry.getValue(); InetAddress source = entry.getKey(); int id = addressToSocketId.get(req.getSourceAddress()); socketIdToRequests.put(id, req); int id = addressToSocketId.get(req.getSourceAddress()); for (int id : addressToSocketId.values()) { networkBus.send(new CloseNetworkRequest(id));
@Inject public PortableFileSystem(KExceptionManager kem, FileUtil fileUtil) { this.fileUtil = fileUtil; this.kem = kem; descriptors.put(0L, FileDescriptor.in); descriptors.put(1L, FileDescriptor.out); descriptors.put(2L, FileDescriptor.err); files.put(FileDescriptor.in, new InputStreamFile(System.in, kem)); files.put(FileDescriptor.out, new OutputStreamFile(System.out, kem)); files.put(FileDescriptor.err, new OutputStreamFile(System.err, kem)); }
pkValues.add(rowPk.get((int) rowId)); for(long fetchRowId = rowId + 1; fetchRowId <= getRowCount(); fetchRowId++) { if(fetchedRows++ < fetchSize) { Long key = rowPk.get((int)fetchRowId); if(key != null) { pkValues.add(key); while(it.hasNext()) { if(fetchedRows++ < fetchSize) { pkValues.add(rowPk.get(it.next())); } else { break; cacheColumnNames(); boolean ignoreFirstColumn = !cachedColumnNames.containsKey(pk_name); if(whereClause.length()>0) { if (ignoreFirstColumn) { row[idColumn - 1 - offset] = lineRs.getObject(idColumn); cache.put(rowPk.getKey(lineRs.getLong(pk_name)).longValue(), row);
@Override public String undecorate(Object decoratedValue) { String result = cache.getKey(decoratedValue); if (result == null && decoratedValue != null) { if (decoratedValue instanceof Month) { result = "Month" + SEP + ((Month)decoratedValue).getMonthNumber(); } else if (decoratedValue instanceof TimeStep) { result = "TimeStep" + SEP + ((TimeStep)decoratedValue).getStep(); } else if (decoratedValue instanceof TopiaEntity) { result = ((TopiaEntity)decoratedValue).getTopiaId() + SEP + decoratedValue; } else { result = String.valueOf(decoratedValue); } // on retrouve souvent les memes semantiques dans les matrices // pour minimiser les chaines en memoire, on prend la representation // interne avant de l'utiliser comme cle. result = result.intern(); cache.put(result, decoratedValue); } return result; }
public RuntimeStack getLinuxRuntime() throws MojoExecutionException { // todo: add unit tests if (StringUtils.equalsIgnoreCase(javaVersion, JRE_8)) { final String fixRuntime = StringUtils.isEmpty(webContainer) ? JRE_8 : webContainer; if (runtimeStackMap.containsKey(fixRuntime)) { return runtimeStackMap.get(fixRuntime); } else { throw new MojoExecutionException( String.format("Unknown value of <webContainer>. Supported values are %s.", StringUtils.join(runtimeStackMap.keySet(), ","))); } } throw new MojoExecutionException(String.format( "Unknown value of <javaVersion>. Supported values is %s", JRE_8)); }
public PricingTier toPricingTier() throws MojoExecutionException { if (pricingTierBidiMap.containsKey(pricingTier)) { return pricingTierBidiMap.get(pricingTier); } else { throw new MojoExecutionException("Unknown value of the pricingTier, please correct it in pom.xml."); } }
public synchronized DAG addVertex(Vertex vertex) { if (vertices.containsKey(vertex.getName())) { throw new IllegalStateException( "Vertex " + vertex.getName() + " already defined!"); } vertices.put(vertex.getName(), vertex); return this; }
private void cacheTree(ITracerObject treeNode) { traceTreeCache.put(traceTreeCache.size(), treeNode); if (treeNode instanceof RefToTracerNodeObject) { //no need to add children nodes of reference node to the treeCache //because they will be added from original node return; } Iterable<ITracerObject> children = treeNode.getChildren(); for (ITracerObject child : children) { cacheTree(child); } }
if(fields.contains("*")){ StringBuilder allFields = new StringBuilder(); BidiMap<Integer, String> map = cachedColumnNames.inverseBidiMap(); Iterable<Integer> keys = new TreeSet<>(map.keySet()); for(Integer i : keys){ if(!excludeGeomFields || !cachedGeomColumnNames.containsValue(i)) { if (allFields.length() > 0) { allFields.append(","); allFields.append(TableLocation.quoteIdentifier(map.get(i)));
@Override public int findColumn(String label) throws SQLException { if(cachedColumnNames == null) { cacheColumnNames(); } Integer columnId = cachedColumnNames.get(label); if(columnId == null) { // Search with insensitive case for(Map.Entry<String, Integer> entry : cachedColumnNames.entrySet()) { if(entry.getKey().equalsIgnoreCase(label)) { return entry.getValue(); } } throw new SQLException("Column "+label+" does not exists"); } return columnId; }
private void cachePrimaryKey(ProgressMonitor pm) throws SQLException { ProgressMonitor cachePm = pm.startTask(getRowCount()); if(rowPk == null) { rowPk = new DualHashBidiMap<>(); } else { rowPk.clear(); } try(Connection connection = dataSource.getConnection(); Statement st = connection.createStatement()) { PropertyChangeListener listener = EventHandler.create(PropertyChangeListener.class, st, "cancel"); pm.addPropertyChangeListener(ProgressMonitor.PROP_CANCEL, listener); st.setFetchSize(fetchSize); connection.setAutoCommit(false); // Use postgre cursor try(ResultSet rs = st.executeQuery("SELECT "+pk_name+" FROM "+location+" "+ orderBy)) { // Cache the primary key values int pkRowId = 0; while (rs.next()) { pkRowId++; rowPk.put(pkRowId, rs.getLong(1)); cachePm.endTask(); } } finally { pm.removePropertyChangeListener(listener); } } catch (SQLException ex) { throw new IllegalArgumentException(ex); } }
Preconditions.checkArgument(topologicalVertexStack.size() == vertices.size(), "size of topologicalVertexStack is:" + topologicalVertexStack.size() + " while size of vertices is:" + vertices.size() + ", make sure they are the same in order to sort the vertices"); while(!topologicalVertexStack.isEmpty()) { Vertex vertex = vertices.get(topologicalVertexStack.pop());
/** * Check if this RowSet is ready for update. * @throws SQLException If this rowset has no exposed PK column. */ private void checkUpdate(int column) throws SQLException { if(cachedColumnNames == null) { cacheColumnNames(); } if(pk_name.isEmpty() || !cachedColumnNames.containsKey(pk_name)) { throw new SQLException(I18N.tr("Edition is disabled on table without single numeric primary key.")); } //else if(cachedColumnNames.get(pk_name) == column) { // throw new SQLException(I18N.tr("Can not edit primary key values")); //} }
@VisibleForTesting Deque<String> verify(boolean restricted) throws IllegalStateException { if (vertices.isEmpty()) { throw new IllegalStateException("Invalid dag containing 0 vertices"); Map<Vertex, Set<String>> inboundVertexMap = new HashMap<Vertex, Set<String>>(); Map<Vertex, Set<String>> outboundVertexMap = new HashMap<Vertex, Set<String>>(); for (Vertex v : vertices.values()) { if (vertexMap.containsKey(v.getName())) { throw new IllegalStateException("DAG contains multiple vertices" for (Vertex vertex : vertices.values()) { for (RootInputLeafOutput<InputDescriptor, InputInitializerDescriptor> input : vertex.getInputs()) {
@Override public int getColumnCount() throws SQLException { if(cachedColumnNames == null || cachedGeomColumnNames==null) { cacheColumnNames(); } if(cachedColumnCount == -1) { try(Resource res = resultSetHolder.getResource()) { int hiddenFields = excludeGeomFields?cachedGeomColumnNames.size():0; cachedColumnCount = res.getResultSet().getMetaData().getColumnCount()-hiddenFields; if(cachedColumnCount == 0){ cachedColumnCount = res.getResultSet().getMetaData().getColumnCount(); } return cachedColumnCount; } } return cachedColumnCount; }