private void removeRepoFromList(String repoName, List<Repo> repoList) { Repo deleteMe = null; for (Repo checkMe : repoList) { if (checkMe.getName().equals(repoName)) { deleteMe = checkMe; break; } } if (deleteMe != null) { repoList.remove(deleteMe); } }
/** * Creates a name to use when scheduling a repo sync job. Calling this method multiple times * on the same parameters will always produce the <em>same</em> name. * * @param repo may not be <code>null</code> * @return name to use to schedule the job; will not be <code>null</code> */ public static String createJobName(Repo repo) { // The quartz table has a limited column width of 80 - but we need to use the names to make // jobs unique so encode the names' hashcodes to ensure we fit into the quartz job name. String jobName = Integer.toHexString(repo.getName().hashCode()); if (jobName.length() > 80) { throw new IllegalArgumentException("Job names max size is 80 chars due to DB column " + "size restrictions: " + jobName); } return jobName; }
/** * Creates (if necessary) and populates a job details map to contain the necessary data to * perform a repo sync. If there is an existing details map in the provided details object, * it will be reused, leaving existing data intact. * * @param details may not be <code>null</code> * @param repo may not be <code>null</code> * @return populated map used to drive a repo sync job; this will be the same map as what * exists in the {@link JobDetail#getJobDataMap()} call of the details object if * that call does not return <code>null</code> */ public static JobDataMap createJobDataMap(JobDetail details, Repo repo) { JobDataMap dataMap; if (details != null) { dataMap = details.getJobDataMap(); } else { dataMap = new JobDataMap(); } dataMap.put(KEY_REPO_NAME, repo.getName()); return dataMap; }
private void validateRepo(Repo c) throws RepoException { this.validateFields(c); List<Repo> repos = getRepoByName(c.getName()); if (repos.size() != 0) { RepoException e = new RepoException("There is already a repo with the name of [" + c.getName() + "]"); e.setType(RepoException.RepoExceptionType.NAME_ALREADY_EXISTS); throw e; } }
private void validateFields(Repo repo) throws RepoException { if (repo.getName() == null || repo.getName().trim().equals("")) { throw new RepoException("Repo name is required"); } if (repo.getSyncSchedule() != null) { try { CronExpression ce = new CronExpression(repo.getSyncSchedule()); } catch (ParseException e) { throw new RepoException("Repo sync schedule is not a vaild format."); } } }
@Override public String toString() { return "RepoRelationship: id=[" + this.id + "]" + "related repo=[" + this.relatedRepo.getName() + "]" + "relation=[" + this.getRepoRelationshipType().getName() + "]"; }
public List<SubscribedRepo> findSubscriptions(Subject subject, int resourceId) { if (!authzManager.canViewResource(subject, resourceId)) { throw new PermissionException("User [" + subject + "] can't view resource with id " + resourceId); } List<SubscribedRepo> list = new ArrayList<SubscribedRepo>(); PageControl pc = new PageControl(); for (RepoComposite repoComposite : findResourceSubscriptions(subject, resourceId, pc)) { Repo repo = repoComposite.getRepo(); SubscribedRepo summary = new SubscribedRepo(repo.getId(), repo.getName()); list.add(summary); } return list; }
String msg = "Synchronize Distributions: [" + source.getName() + "]: syncing repo [" + repo.getName() + "]"; log.info(msg); tracker.getRepoSyncResults().appendResults(msg); translateDomainToDto(dists, distDetails); log.info("Synchronize Distributions: [" + repo.getName() + "]: loaded existing list of size=[" + dists.size() + "] (" + (System.currentTimeMillis() - start) + ")ms"); distributionSource.synchronizeDistribution(repo.getName(), distReport, distDetails); log.info("Synchronize Distributions: [" + repo.getName() + "]: got sync report from adapter=[" + distReport + "] (" + (System.currentTimeMillis() - start) + ")ms");
packageSource.synchronizePackages(repo.getName(), report, allDetails); tracker.setPackageSyncCount(report.getNewPackages().size() + report.getUpdatedPackages().size());
for (Repo repo : repos) { repoCount++; log.debug("downloadDistributionBits operating on repo: " + repo.getName() + " id = " + repo.getId()); String remoteFetchLoc = distSource.getDistFileRemoteLocation(repo.getName(), dist.getLabel(), dFile.getRelativeFilename()); InputStream bitsStream = pc.getAdapterManager().loadDistributionFileBits(contentSourceId,
public SyncTracker synchronizeDistributionBits(SyncTracker tracker) throws SyncException, InterruptedException { if (!(provider instanceof DistributionSource)) { return tracker; } Subject overlord = subjectManager.getOverlord(); tracker.getRepoSyncResults().appendResults( "Synchronize Distributions: [" + repo.getName() + " Starting Distribution bits download."); tracker.setRepoSyncResults(repoManager.mergeRepoSyncResults(tracker.getRepoSyncResults())); contentSourceManager.downloadDistributionBits(overlord, source); tracker.getRepoSyncResults().appendResults( "Synchronize Distributions: [" + repo.getName() + " finished bits download."); tracker.getProgressWatcher().finishWork(provider.getSyncProgressWeight().getDistribtutionBitsWeight()); return tracker; }
/** * It will schedule one job per adapter such that each adapter is scheduled to be synchronized as per its defined * sync schedule. This must only be called when all content source adapters have been initialized. */ public void scheduleSyncJobs() { if (this.adapterManager != null) { for (ContentSource contentSource : this.adapterManager.getAllContentSources()) { try { getLog().debug("scheduleSyncJobs :: Scheduling CP job: " + contentSource.getName()); scheduleProviderSyncJob(contentSource); ContentSourceManagerLocal contentSourceManager = LookupUtil.getContentSourceManager(); PageList<Repo> repos = contentSourceManager.getAssociatedRepos(LookupUtil.getSubjectManager() .getOverlord(), contentSource.getId(), PageControl.getUnlimitedInstance()); if (repos != null) { for (Repo repo : repos) { getLog().debug("scheduleSyncJobs :: Scheduling REPO job: " + repo.getName()); scheduleRepoSyncJob(repo); } } } catch (Exception e) { getLog().warn("Could not schedule sync job for content source [" + contentSource + "].", e); } } } }
progress.append("Start synchronization of Repository [").append(repo.getName()).append("]\n"); progress.append(new Date()).append(": "); progress.append("Getting currently known list of content source packages...\n"); log.info("Repository [" + repo.getName() + "] is already currently being synchronized - this sync request will be ignored."); return false; progress.append("\n"); progress.append(new Date()).append(": "); progress.append("Repository [").append(repo.getName()).append("] "); progress.append("completed syncing with "); progress.append((tracker.getRepoSyncResults().getStatus() == ContentSyncStatus.FAILURE) ? "one or more" : "no");
+ "package bits sync for repo [" + repo.getName() + "]"; log.info(msg); tracker.getRepoSyncResults().appendResults(msg); + "package bits sync for repo [" + repo.getName() + "]"; log.info(msg); tracker.getRepoSyncResults().appendResults(msg); String msg = "Synchronize Package Bits: [" + source.getName() + "], repo [" + repo.getName() + "]: loaded package list for sync (" + (System.currentTimeMillis() - start) + ")ms"; log.info(msg);