public void initializeProject(WorkItem wi) { // Parse the current project information try { // Get the metadata information about the project Field pjNameFld = wi.getField("projectName"); Field pjTypeFld = wi.getField("projectType"); Field pjCfgPathFld = wi.getField("fullConfigSyntax"); Field pjChkptFld = wi.getField("lastCheckpoint"); // Convert to our class fields // First obtain the project name field if (null != pjNameFld && null != pjNameFld.getValueAsString()) { projectName = pjNameFld.getValueAsString(); } else { LOGGER.warning("Project info did not provide a value for the 'projectName' field!"); projectName = ""; } // Next, we'll need to know the project type if (null != pjTypeFld && null != pjTypeFld.getValueAsString()) { projectType = pjTypeFld.getValueAsString(); if (isBuild()) { // Next, we'll need to know the current build checkpoint for this configuration Field pjRevFld = wi.getField("revision"); if (null != pjRevFld && null != pjRevFld.getItem()) { projectRevision = pjRevFld.getItem().getId(); } else { projectRevision = ""; LOGGER.warning("Project info did not provide a vale for the 'revision' field!"); } } } else { LOGGER.warning("Project info did not provide a value for the 'projectType' field!"); projectType = ""; } // Most important is the configuration path if (null != pjCfgPathFld && null != pjCfgPathFld.getValueAsString()) { fullConfigSyntax = pjCfgPathFld.getValueAsString(); } else { LOGGER.severe("Project info did not provide a value for the 'fullConfigSyntax' field!"); fullConfigSyntax = ""; } // Finally, we'll need to store the last checkpoint to figure out differences, etc. if (null != pjChkptFld && null != pjChkptFld.getDateTime()) { lastCheckpoint = pjChkptFld.getDateTime(); } else { LOGGER.warning("Project info did not provide a value for the 'lastCheckpoint' field!"); lastCheckpoint = Calendar.getInstance().getTime(); } } catch (NoSuchElementException nsee) { LOGGER.severe("Project info did not provide a value for field " + nsee.getMessage()); } }
/** {@inheritDoc} */ @Override public UpdateScmResult executeUpdateCommand( ScmProviderRepository repository, ScmFileSet fileSet, ScmVersion scmVersion) throws ScmException { getLogger() .info("Attempting to synchronize sandbox in " + fileSet.getBasedir().getAbsolutePath()); List<ScmFile> updatedFiles = new ArrayList<ScmFile>(); IntegrityScmProviderRepository iRepo = (IntegrityScmProviderRepository) repository; Sandbox siSandbox = iRepo.getSandbox(); try { // Make sure we've got a valid sandbox, otherwise create it... if (siSandbox.create()) { Response res = siSandbox.resync(); // Lets capture what we got from running this resync WorkItemIterator wit = res.getWorkItems(); while (wit.hasNext()) { WorkItem wi = wit.next(); if (wi.getModelType().equals(SIModelTypeName.MEMBER)) { Result message = wi.getResult(); getLogger() .debug(wi.getDisplayId() + " " + (null != message ? message.getMessage() : "")); if (null != message && message.getMessage().length() > 0) { updatedFiles.add( new ScmFile( wi.getDisplayId(), message.getMessage().equalsIgnoreCase("removed") ? ScmFileStatus.DELETED : ScmFileStatus.UPDATED)); } } } return new UpdateScmResult(res.getCommandString(), updatedFiles); } else { return new UpdateScmResult("si resync", "Failed to synchronize workspace", "", false); } } catch (APIException aex) { ExceptionHandler eh = new ExceptionHandler(aex); getLogger().error("MKS API Exception: " + eh.getMessage()); getLogger().info(eh.getCommand() + " exited with return code " + eh.getExitCode()); return new UpdateScmResult( eh.getCommand(), eh.getMessage(), "Exit Code: " + eh.getExitCode(), false); } }
/** * Performs a projectCPDiff on this Integrity CM Project * * @param serverConf Authenticated Integrity API Session * @param past Past date * @return Set of closed CPIDs * @throws APIException * @throws AbortException */ public Set<String> projectCPDiff(IntegrityConfigurable serverConf, Date past) throws APIException, AbortException { final SimpleDateFormat dateFormat = new SimpleDateFormat("MMM d, yyyy h:mm:ss aa"); // Construct the command IAPICommand command = CommandFactory.createCommand(IAPICommand.PROJECT_CPDIFF_COMMAND, serverConf); command.addOption(new APIOption(IAPIOption.PROJECT, fullConfigSyntax)); command.addOption(new APIOption(IAPIOption.RECURSE)); MultiValue mv = APIUtils.createMultiValueField(",", "id", "user"); command.addOption(new APIOption(IAPIOption.FIELDS, mv)); command.addOption(new APIOption(IAPIOption.REV, "asof:" + dateFormat.format(past))); Set<String> projectCPIDs = new HashSet<String>(); Response res = command.execute(); if (null != res) { if (res.getExitCode() == 0) { WorkItem wi = res.getWorkItem(getConfigurationPath()); Field cpField = wi.getField("CPEntries"); for (Iterator<Item> it = cpField.getList().iterator(); it.hasNext(); ) { Item cpInfo = it.next(); Field idField = cpInfo.getField("id"); String id = idField.getValueAsString(); projectCPIDs.add(id); Field userField = cpInfo.getField("user"); String user = userField.getValueAsString(); } } else { LOGGER.severe("An error occured projectCPDiff!"); } } else { LOGGER.severe("An error occured projectCPDiff!"); } return projectCPIDs; }
/** * Parses the output from the si viewproject command to get a list of members and updates Derby DB * * @param workItems WorkItemIterator * @throws APIException * @throws SQLException * @throws InterruptedException * @throws ExecutionException */ public void parseProject(WorkItemIterator workItems) throws APIException, SQLException, InterruptedException, ExecutionException { ExecutorService executor = null; Map<String, String> pjConfigHash = new Hashtable<String, String>(); List<Future<Void>> futures = new ArrayList<Future<Void>>(); // Setup the Derby DB for this Project // Create a fresh set of tables for this project DerbyUtils.createCMProjectTables( DescriptorImpl.INTEGRITY_DESCRIPTOR.getDataSource(), this.getProjectCacheTable()); LOGGER.log(Level.INFO, "Starting Parse tasks for Derby DB"); final ThreadFactory threadFactory = new ThreadFactoryBuilder().setNameFormat("Parse-Derby-Project-Task-%d").build(); // Initialize executor for folder path processing executor = Executors.newFixedThreadPool(10, threadFactory); pjConfigHash.put(this.getProjectName(), this.getConfigurationPath()); while (workItems.hasNext()) { WorkItem wi = workItems.next(); if (wi.getModelType().equals(SIModelTypeName.SI_SUBPROJECT)) { // Parse folders separately from members in an asynchronous environment. This is to be // executed before member parsing! LOGGER.log( Level.FINE, "Executing parse folder task :" + wi.getField("name").getValueAsString()); Map<String, String> future = executor.submit(new ParseProjectFolderTask(wi, this)).get(); for (String key : future.keySet()) { LOGGER.log( Level.FINE, "Adding folder key in project configuration. Key: " + key + ", Value: " + future.get(key)); pjConfigHash.put(key, future.get(key)); } } else if (wi.getModelType().equals(SIModelTypeName.MEMBER)) { // Parse member tasks LOGGER.log( Level.FINE, "Executing parse member task :" + wi.getField("name").getValueAsString()); futures.add(executor.submit(new ParseProjectMemberTask(wi, pjConfigHash, this))); } else { LOGGER.log( Level.WARNING, "View project output contains an invalid model type: " + wi.getModelType()); } } for (Future<Void> f : futures) { // Wait for all threads to finish f.get(); } LOGGER.log(Level.INFO, "Parsing project " + this.getConfigurationPath() + " complete!"); if (null != executor) { executor.shutdown(); executor.awaitTermination(2, TimeUnit.MINUTES); LOGGER.log(Level.FINE, "Parse Project Executor shutdown."); } }