private static void doInitializeDdmlib(@NotNull String adbPath) {
   synchronized (myDdmsLock) {
     if (!myDdmLibInitialized) {
       myDdmLibInitialized = true;
       DdmPreferences.setLogLevel(Log.LogLevel.INFO.getStringValue());
       DdmPreferences.setTimeOut(AndroidUtils.TIMEOUT);
       AndroidDebugBridge.init(AndroidEnableAdbServiceAction.isAdbServiceEnabled());
       LOG.info("DDMLib initialized");
       final AndroidDebugBridge bridge = AndroidDebugBridge.createBridge(adbPath, true);
       waitUntilConnect(bridge);
       if (!bridge.isConnected()) {
         LOG.info("Failed to connect debug bridge");
       }
     } else {
       final AndroidDebugBridge bridge = AndroidDebugBridge.getBridge();
       final boolean forceRestart = myAdbCrashed || (bridge != null && !bridge.isConnected());
       if (forceRestart) {
         LOG.info("Restart debug bridge: " + (myAdbCrashed ? "crashed" : "disconnected"));
       }
       final AndroidDebugBridge newBridge = AndroidDebugBridge.createBridge(adbPath, forceRestart);
       waitUntilConnect(newBridge);
       if (!newBridge.isConnected()) {
         LOG.info("Failed to connect debug bridge after restart");
       }
     }
   }
 }
示例#2
0
    private void processOrphanModules() {
      if (myProject.isDisposed()) return;
      if (ExternalSystemDebugEnvironment.DEBUG_ORPHAN_MODULES_PROCESSING) {
        LOG.info(
            String.format(
                "Checking for orphan modules. External paths returned by external system: '%s'",
                myExternalModulePaths));
      }
      PlatformFacade platformFacade = ServiceManager.getService(PlatformFacade.class);
      List<Module> orphanIdeModules = ContainerUtilRt.newArrayList();
      String externalSystemIdAsString = myExternalSystemId.toString();

      for (Module module : platformFacade.getModules(myProject)) {
        String s = module.getOptionValue(ExternalSystemConstants.EXTERNAL_SYSTEM_ID_KEY);
        String p = module.getOptionValue(ExternalSystemConstants.LINKED_PROJECT_PATH_KEY);
        if (ExternalSystemDebugEnvironment.DEBUG_ORPHAN_MODULES_PROCESSING) {
          LOG.info(
              String.format(
                  "IDE module: EXTERNAL_SYSTEM_ID_KEY - '%s', LINKED_PROJECT_PATH_KEY - '%s'.",
                  s, p));
        }
        if (externalSystemIdAsString.equals(s) && !myExternalModulePaths.contains(p)) {
          orphanIdeModules.add(module);
          if (ExternalSystemDebugEnvironment.DEBUG_ORPHAN_MODULES_PROCESSING) {
            LOG.info(
                String.format(
                    "External paths doesn't contain IDE module LINKED_PROJECT_PATH_KEY anymore => add to orphan IDE modules."));
          }
        }
      }

      if (!orphanIdeModules.isEmpty()) {
        ruleOrphanModules(orphanIdeModules, myProject, myExternalSystemId);
      }
    }
  public Process createProcess() throws ExecutionException {
    if (LOG.isDebugEnabled()) {
      LOG.debug("Executing [" + getCommandLineString() + "]");
    }

    List<String> commands;
    try {
      checkWorkingDirectory();

      if (StringUtil.isEmptyOrSpaces(myExePath)) {
        throw new ExecutionException(
            IdeBundle.message("run.configuration.error.executable.not.specified"));
      }

      commands = CommandLineUtil.toCommandLine(myExePath, myProgramParams.getList());
    } catch (ExecutionException e) {
      LOG.info(e);
      throw e;
    }

    try {
      return startProcess(commands);
    } catch (IOException e) {
      LOG.info(e);
      throw new ProcessNotCreatedException(e.getMessage(), e, this);
    }
  }
 public static boolean isJsonSchema(
     @NotNull JsonSchemaExportedDefinitions definitions,
     @NotNull VirtualFile key,
     @NotNull final String string,
     Consumer<String> errorConsumer)
     throws IOException {
   final JsonSchemaReader reader = new JsonSchemaReader(key);
   java.io.StringReader stringReader = new java.io.StringReader(string);
   try {
     reader.read(stringReader, null);
   } catch (Exception e) {
     LOG.info(e);
     errorConsumer.consume(e.getMessage());
     return false;
   }
   // have two stages so that just syntax errors do not clear cache
   stringReader = new java.io.StringReader(string);
   try {
     reader.read(stringReader, definitions);
   } catch (Exception e) {
     LOG.info(e);
     errorConsumer.consume(e.getMessage());
     throw e;
   }
   return true;
 }
  @Override
  public void initialize(
      @NotNull ManagingFS managingFS, @NotNull FileWatcherNotificationSink notificationSink) {
    myManagingFS = managingFS;
    myNotificationSink = notificationSink;

    boolean disabled = Boolean.parseBoolean(System.getProperty(PROPERTY_WATCHER_DISABLED));
    myExecutable = getExecutable();

    if (disabled) {
      LOG.info("Native file watcher is disabled");
    } else if (myExecutable == null) {
      LOG.info("Native file watcher is not supported on this platform");
    } else if (!myExecutable.exists()) {
      notifyOnFailure(ApplicationBundle.message("watcher.exe.not.found"), null);
    } else if (!myExecutable.canExecute()) {
      notifyOnFailure(
          ApplicationBundle.message("watcher.exe.not.exe", myExecutable),
          new NotificationListener() {
            @Override
            public void hyperlinkUpdate(
                @NotNull Notification notification, @NotNull HyperlinkEvent event) {
              ShowFilePathAction.openFile(myExecutable);
            }
          });
    } else {
      try {
        startupProcess(false);
        LOG.info("Native file watcher is operational.");
      } catch (IOException e) {
        LOG.warn(e.getMessage());
        notifyOnFailure(ApplicationBundle.message("watcher.failed.to.start"), null);
      }
    }
  }
  @Override
  protected void save(@NotNull Collection<VirtualFile> rootsToSave) throws VcsException {
    LOG.info("saving " + rootsToSave);

    for (VirtualFile root : rootsToSave) {
      final String message = GitHandlerUtil.formatOperationName("Stashing changes from", root);
      LOG.info(message);
      final String oldProgressTitle = myProgressIndicator.getText();
      myProgressIndicator.setText(message);
      GitRepository repository = myRepositoryManager.getRepositoryForRoot(root);
      if (repository == null) {
        LOG.error("Repository is null for root " + root);
      } else {
        GitCommandResult result = myGit.stashSave(repository, myStashMessage);
        if (result.success() && somethingWasStashed(result)) {
          myStashedRoots.add(root);
        } else {
          String error =
              "stash " + repository.getRoot() + ": " + result.getErrorOutputAsJoinedString();
          if (!result.success()) {
            throw new VcsException(error);
          } else {
            LOG.warn(error);
          }
        }
      }
      myProgressIndicator.setText(oldProgressTitle);
    }
  }
 @Nullable
 private static RepositoryInfo getDetailedRepoInfo(
     @NotNull String url,
     @NotNull String login,
     @NotNull String password,
     @NotNull String owner,
     @NotNull String name) {
   try {
     final String request = "/repos/" + owner + "/" + name;
     JsonElement jsonObject = GithubApiUtil.getRequest(url, login, password, request);
     if (jsonObject == null) {
       LOG.info(
           String.format(
               "Information about repository is unavailable. Owner: %s, Name: %s", owner, name));
       return null;
     }
     return parseSingleRepositoryInfo(jsonObject.getAsJsonObject());
   } catch (IOException e) {
     LOG.info(
         String.format(
             "Exception was thrown when trying to retrieve information about repository.  Owner: %s, Name: %s",
             owner, name));
     return null;
   }
 }
  /**
   * Returns true if the root was loaded with conflict. False is returned in all other cases: in the
   * case of success and in case of some other error.
   */
  private boolean loadRoot(final VirtualFile root) {
    LOG.info("loadRoot " + root);
    myProgressIndicator.setText(GitHandlerUtil.formatOperationName("Unstashing changes to", root));

    GitRepository repository = myRepositoryManager.getRepositoryForRoot(root);
    if (repository == null) {
      LOG.error("Repository is null for root " + root);
      return false;
    }

    GitSimpleEventDetector conflictDetector =
        new GitSimpleEventDetector(GitSimpleEventDetector.Event.MERGE_CONFLICT_ON_UNSTASH);
    GitCommandResult result = myGit.stashPop(repository, conflictDetector);
    VfsUtil.markDirtyAndRefresh(false, true, false, root);
    if (result.success()) {
      return false;
    } else if (conflictDetector.hasHappened()) {
      return true;
    } else {
      LOG.info("unstash failed " + result.getErrorOutputAsJoinedString());
      GitUIUtil.notifyImportantError(
          myProject, "Couldn't unstash", "<br/>" + result.getErrorOutputAsHtmlString());
      return false;
    }
  }
 private GitUpdateResult handleRebaseFailure(
     GitLineHandler pullHandler,
     GitRebaseProblemDetector rebaseConflictDetector,
     final GitMessageWithFilesDetector untrackedWouldBeOverwrittenDetector) {
   if (rebaseConflictDetector.isMergeConflict()) {
     LOG.info("handleRebaseFailure merge conflict");
     final boolean allMerged = new MyConflictResolver(myProject, myGit, myRoot, myRebaser).merge();
     return allMerged
         ? GitUpdateResult.SUCCESS_WITH_RESOLVED_CONFLICTS
         : GitUpdateResult.INCOMPLETE;
   } else if (untrackedWouldBeOverwrittenDetector.wasMessageDetected()) {
     LOG.info("handleRebaseFailure: untracked files would be overwritten by checkout");
     UntrackedFilesNotifier.notifyUntrackedFilesOverwrittenBy(
         myProject,
         ServiceManager.getService(myProject, GitPlatformFacade.class),
         untrackedWouldBeOverwrittenDetector.getFiles(),
         "rebase",
         null);
     return GitUpdateResult.ERROR;
   } else {
     LOG.info("handleRebaseFailure error " + pullHandler.errors());
     GitUIUtil.notifyImportantError(
         myProject, "Rebase error", GitUIUtil.stringifyErrors(pullHandler.errors()));
     return GitUpdateResult.ERROR;
   }
 }
示例#10
0
 private void performShutdown() {
   if (myShutDown.compareAndSet(false, true)) {
     LOG.info("VFS dispose started");
     FSRecords.dispose();
     LOG.info("VFS dispose completed");
   }
 }
  private ChangeListWorker(final ChangeListWorker worker) {
    myProject = worker.myProject;
    myMap = new LinkedHashMap<String, LocalChangeList>();
    myIdx = new ChangeListsIndexes(worker.myIdx);
    myLocallyDeleted = worker.myLocallyDeleted.copy();
    mySwitchedHolder = worker.mySwitchedHolder.copy();
    myDelta = worker.myDelta;
    myListsToDisappear = new LinkedList<String>(worker.myListsToDisappear);

    LocalChangeList defaultList = null;
    for (LocalChangeList changeList : worker.myMap.values()) {
      final LocalChangeList copy = changeList.copy();

      final String changeListName = copy.getName();
      myMap.put(changeListName, copy);
      if (copy.isDefault()) {
        defaultList = copy;
      }
    }
    if (defaultList == null) {
      LOG.info("default list not found when copy");
      defaultList = myMap.get(worker.getDefaultListName());
    }

    if (defaultList == null) {
      LOG.info("default list not found when copy in original object too");
      if (!myMap.isEmpty()) {
        defaultList = myMap.values().iterator().next();
      } else {
        // can be when there's no vcs configured
        /// LOG.error("no changelists at all");
      }
    }
    myDefault = defaultList;
  }
 private void showVFInfo(VirtualFile virtualFile) {
   logger.info("showVFInfo(): XML");
   logger.info("virtualFile: isValid= " + virtualFile.isValid());
   logger.info("virtualFile: isSymLink=" + virtualFile.isSymLink());
   logger.info("virtualFile: url= " + virtualFile.getUrl());
   logger.info("virtualFile: fileType= " + virtualFile.getFileType());
 }
  private boolean merge(boolean mergeDialogInvokedFromNotification) {
    try {
      final Collection<VirtualFile> initiallyUnmergedFiles = getUnmergedFiles(myRoots);
      if (initiallyUnmergedFiles.isEmpty()) {
        LOG.info("merge: no unmerged files");
        return mergeDialogInvokedFromNotification ? true : proceedIfNothingToMerge();
      } else {
        showMergeDialog(initiallyUnmergedFiles);

        final Collection<VirtualFile> unmergedFilesAfterResolve = getUnmergedFiles(myRoots);
        if (unmergedFilesAfterResolve.isEmpty()) {
          LOG.info("merge no more unmerged files");
          return mergeDialogInvokedFromNotification ? true : proceedAfterAllMerged();
        } else {
          LOG.info("mergeFiles unmerged files remain: " + unmergedFilesAfterResolve);
          if (mergeDialogInvokedFromNotification) {
            notifyUnresolvedRemainAfterNotification();
          } else {
            notifyUnresolvedRemain();
          }
        }
      }
    } catch (VcsException e) {
      if (myVcs.getExecutableValidator().checkExecutableAndNotifyIfNeeded()) {
        notifyException(e);
      }
    }
    return false;
  }
  /**
   * Check if rebase is in progress, propose to resolve conflicts.
   *
   * @return true if rebase is in progress, which means that update can't continue.
   */
  private boolean checkRebaseInProgress() {
    LOG.info("checkRebaseInProgress: checking if there is an unfinished rebase process...");
    final GitRebaser rebaser = new GitRebaser(myProject, myGit, myProgressIndicator);
    final Collection<VirtualFile> rebasingRoots = rebaser.getRebasingRoots();
    if (rebasingRoots.isEmpty()) {
      return false;
    }
    LOG.info("checkRebaseInProgress: roots with unfinished rebase: " + rebasingRoots);

    GitConflictResolver.Params params = new GitConflictResolver.Params();
    params.setErrorNotificationTitle("Can't update");
    params.setMergeDescription(
        "You have unfinished rebase process. These conflicts must be resolved before update.");
    params.setErrorNotificationAdditionalDescription(
        "Then you may <b>continue rebase</b>. <br/> You also may <b>abort rebase</b> to restore the original branch and stop rebasing.");
    params.setReverse(true);
    return !new GitConflictResolver(myProject, myGit, rebasingRoots, params) {
      @Override
      protected boolean proceedIfNothingToMerge() {
        return rebaser.continueRebase(rebasingRoots);
      }

      @Override
      protected boolean proceedAfterAllMerged() {
        return rebaser.continueRebase(rebasingRoots);
      }
    }.merge();
  }
  /**
   * Tries to execute {@code git merge --ff-only}.
   *
   * @return true, if everything is successful; false for any error (to let a usual "fair" update
   *     deal with it).
   */
  public boolean fastForwardMerge() {
    LOG.info("Trying fast-forward merge for " + myRoot);
    GitRepository repository = GitUtil.getRepositoryManager(myProject).getRepositoryForRoot(myRoot);
    if (repository == null) {
      LOG.error("Repository is null for " + myRoot);
      return false;
    }
    try {
      markStart(myRoot);
    } catch (VcsException e) {
      LOG.info("Couldn't mark start for repository " + myRoot, e);
      return false;
    }

    GitCommandResult result =
        myGit.merge(repository, getRemoteBranchToMerge(), Collections.singletonList("--ff-only"));

    try {
      markEnd(myRoot);
    } catch (VcsException e) {
      // this is not critical, and update has already happened,
      // so we just notify the user about problems with collecting the updated changes.
      LOG.info("Couldn't mark end for repository " + myRoot, e);
      Notificator.getInstance(myProject)
          .notifyWeakWarning(
              "Couldn't collect the updated files info",
              String.format(
                  "Update of %s was successful, but we couldn't collect the updated changes because of an error",
                  myRoot),
              null);
    }
    return result.success();
  }
  private void checkProjectRoots() {
    LocalFileSystem fs = LocalFileSystem.getInstance();
    if (!(fs instanceof LocalFileSystemImpl)) return;
    FileWatcher watcher = ((LocalFileSystemImpl) fs).getFileWatcher();
    if (!watcher.isOperational()) return;
    Collection<String> manualWatchRoots = watcher.getManualWatchRoots();
    if (manualWatchRoots.isEmpty()) return;
    VirtualFile[] roots = ProjectRootManager.getInstance(myProject).getContentRoots();
    if (roots.length == 0) return;

    List<String> nonWatched = new SmartList<String>();
    for (VirtualFile root : roots) {
      if (!(root.getFileSystem() instanceof LocalFileSystem)) continue;
      String rootPath = root.getPath();
      for (String manualWatchRoot : manualWatchRoots) {
        if (FileUtil.isAncestor(manualWatchRoot, rootPath, false)) {
          nonWatched.add(rootPath);
        }
      }
    }

    if (!nonWatched.isEmpty()) {
      String message = ApplicationBundle.message("watcher.non.watchable.project");
      watcher.notifyOnFailure(message, null);
      LOG.info("unwatched roots: " + nonWatched);
      LOG.info("manual watches: " + manualWatchRoots);
    }
  }
  @Override
  public void contentsChanged(VirtualFileEvent event) {
    if (event.isFromSave()) return;
    final VirtualFile file = event.getFile();
    final Document document = getCachedDocument(file);
    if (document == null) {
      myMultiCaster.fileWithNoDocumentChanged(file);
      return;
    }

    if (isBinaryWithDecompiler(file)) {
      myMultiCaster.fileWithNoDocumentChanged(
          file); // This will generate PSI event at FileManagerImpl
    }

    long documentStamp = document.getModificationStamp();
    long oldFileStamp = event.getOldModificationStamp();
    if (documentStamp != oldFileStamp) {
      LOG.info("reload from disk?");
      LOG.info("  documentStamp:" + documentStamp);
      LOG.info("  oldFileStamp:" + oldFileStamp);

      if (file.isValid() && askReloadFromDisk(file, document)) {
        reloadFromDisk(document);
      }
    } else {
      reloadFromDisk(document);
    }
  }
  public void initComponent() {
    log.info("Initializing WakaTime plugin v" + VERSION + " (https://wakatime.com/)");
    // System.out.println("Initializing WakaTime plugin v" + VERSION + " (https://wakatime.com/)");

    // Set runtime constants
    IDE_NAME = PlatformUtils.getPlatformPrefix();
    IDE_VERSION = ApplicationInfo.getInstance().getFullVersion();

    if (!Dependencies.isCLIInstalled()) {
      log.info("Downloading and installing wakatime-cli ...");
      Dependencies.installCLI();
    } else if (Dependencies.isCLIOld()) {
      log.info("Upgrading wakatime-cli ...");
      Dependencies.upgradeCLI();
    }

    if (Dependencies.isPythonInstalled()) {

      WakaTime.DEBUG = WakaTime.isDebugEnabled();
      if (WakaTime.DEBUG) {
        log.setLevel(Level.DEBUG);
        log.debug("Logging level set to DEBUG");
      }

      log.debug("Python location: " + Dependencies.getPythonLocation());
      log.debug("CLI location: " + Dependencies.getCLILocation());

      // prompt for apiKey if it does not already exist
      if (ApiKey.getApiKey().equals("")) {
        Project project = ProjectManager.getInstance().getDefaultProject();
        ApiKey apiKey = new ApiKey(project);
        apiKey.promptForApiKey();
      }
      log.debug("Api Key: " + ApiKey.getApiKey());

      // add WakaTime item to File menu
      ActionManager am = ActionManager.getInstance();
      PluginMenu action = new PluginMenu();
      am.registerAction("WakaTimeApiKey", action);
      DefaultActionGroup fileMenu = (DefaultActionGroup) am.getAction("FileMenu");
      fileMenu.addSeparator();
      fileMenu.add(action);

      // Setup message listeners
      MessageBus bus = ApplicationManager.getApplication().getMessageBus();
      connection = bus.connect();
      connection.subscribe(AppTopics.FILE_DOCUMENT_SYNC, new CustomSaveListener());
      EditorFactory.getInstance()
          .getEventMulticaster()
          .addDocumentListener(new CustomDocumentListener());

      log.debug("Finished initializing WakaTime plugin");

    } else {
      Messages.showErrorDialog(
          "WakaTime requires Python to be installed.\nYou can install it from https://www.python.org/downloads/\nAfter installing Python, restart your IDE.",
          "Error");
    }
  }
 private synchronized void performShutdown() {
   if (!myShutDown) {
     myShutDown = true;
     LOG.info("VFS dispose started");
     FSRecords.dispose();
     LOG.info("VFS dispose completed");
   }
 }
 private static void joinThread(final Future<?> threadFuture) {
   if (threadFuture != null) {
     try {
       threadFuture.get();
     } catch (InterruptedException ignored) {
       LOG.info("Thread interrupted", ignored);
     } catch (ExecutionException ignored) {
       LOG.info("Thread interrupted", ignored);
     }
   }
 }
 private void parseXmlForSql(VirtualFile xmlFile) {
   final AnnoRefSettings annoRefState =
       AnnoRefConfigSettings.getInstance(project).getAnnoRefState();
   if (annoRefState.ENABLE_SQL_TO_MODEL_VALIDATION) {
     logger.info(AnnoRefBundle.message("annoRef.xml.parse.sql.validation.enabled"));
     final XmlBuilderDriver xmlBuilderDriver =
         new XmlBuilderDriver(
             AnnRefApplication.getPsiFileFromVirtualFile(xmlFile, project).getText());
     xmlBuilderDriver.build(new AnnoRefXmlVisitor(project, xmlFile));
   }
   logger.info(AnnoRefBundle.message("annoRef.xml.parse..sql.validation.disabled"));
 }
  public Collection<String> getFilteredGlobalsCache() {
    try {
      return filteredGlobalsCache.get(1000, TimeUnit.MILLISECONDS);
    } catch (InterruptedException e) {
      LOG.info("exception creating globals cache", e);
    } catch (ExecutionException e) {
      LOG.info("exception creating globals cache", e);
    } catch (TimeoutException e) {
      LOG.info("The global cache is still processing");
    }

    return new ArrayList<String>();
  }
示例#23
0
 @Override
 public void log(final SVNLogType logType, final String message, final byte[] data) {
   if (shouldLog(logType)) {
     if (data != null) {
       try {
         myLog.info(message + "\n" + new String(data, "UTF-8"));
       } catch (UnsupportedEncodingException e) {
         myLog.info(message + "\n" + new String(data));
       }
     } else {
       myLog.info(message);
     }
   }
 }
  public static void updateDependencies(
      CompileContext context,
      List<File> toCompile,
      Map<ModuleBuildTarget, Collection<GroovycOutputParser.OutputItem>> successfullyCompiled,
      OutputConsumer outputConsumer,
      Builder builder)
      throws IOException {
    JavaBuilderUtil.registerFilesToCompile(context, toCompile);
    if (!successfullyCompiled.isEmpty()) {

      final Callbacks.Backend callback = JavaBuilderUtil.getDependenciesRegistrar(context);

      for (Map.Entry<ModuleBuildTarget, Collection<GroovycOutputParser.OutputItem>> entry :
          successfullyCompiled.entrySet()) {
        final ModuleBuildTarget target = entry.getKey();
        final Collection<GroovycOutputParser.OutputItem> compiled = entry.getValue();
        for (GroovycOutputParser.OutputItem item : compiled) {
          final String sourcePath = FileUtil.toSystemIndependentName(item.sourcePath);
          final String outputPath = FileUtil.toSystemIndependentName(item.outputPath);
          final File outputFile = new File(outputPath);
          final File srcFile = new File(sourcePath);
          try {
            final byte[] bytes = FileUtil.loadFileBytes(outputFile);
            if (Utils.IS_TEST_MODE || LOG.isDebugEnabled()) {
              LOG.info("registerCompiledClass " + outputFile + " from " + srcFile);
            }
            outputConsumer.registerCompiledClass(
                target,
                new CompiledClass(
                    outputFile, srcFile, readClassName(bytes), new BinaryContent(bytes)));
            callback.associate(outputPath, sourcePath, new FailSafeClassReader(bytes));
          } catch (Throwable e) {
            // need this to make sure that unexpected errors in, for example, ASM will not ruin the
            // compilation
            final String message =
                "Class dependency information may be incomplete! Error parsing generated class "
                    + item.outputPath;
            LOG.info(message, e);
            context.processMessage(
                new CompilerMessage(
                    builder.getPresentableName(),
                    BuildMessage.Kind.WARNING,
                    message + "\n" + CompilerMessage.getTextFromThrowable(e),
                    sourcePath));
          }
          JavaBuilderUtil.registerSuccessfullyCompiled(context, srcFile);
        }
      }
    }
  }
示例#25
0
  private void compact(final String path) {
    synchronized (myLock) {
      LOG.info(
          "Space waste in " + path + " is " + myDataTable.getWaste() + " bytes. Compacting now.");
      long start = System.currentTimeMillis();

      try {
        File newDataFile = new File(path + ".storageData.backup");
        FileUtil.delete(newDataFile);
        FileUtil.createIfDoesntExist(newDataFile);

        File oldDataFile = new File(path + DATA_EXTENSION);
        DataTable newDataTable = new DataTable(newDataFile, myPool);

        final int count = myRecordsTable.getRecordsCount();
        for (int i = 1; i <= count; i++) {
          final long addr = myRecordsTable.getAddress(i);
          final int size = myRecordsTable.getSize(i);

          if (size > 0) {
            assert addr > 0;

            final int capacity = myCapacityAllocationPolicy.calculateCapacity(size);
            final long newaddr = newDataTable.allocateSpace(capacity);
            final byte[] bytes = new byte[size];
            myDataTable.readBytes(addr, bytes);
            newDataTable.writeBytes(newaddr, bytes);
            myRecordsTable.setAddress(i, newaddr);
            myRecordsTable.setCapacity(i, capacity);
          }
        }

        myDataTable.dispose();
        newDataTable.dispose();

        if (!FileUtil.delete(oldDataFile)) {
          throw new IOException("Can't delete file: " + oldDataFile);
        }

        newDataFile.renameTo(oldDataFile);
        myDataTable = new DataTable(oldDataFile, myPool);
      } catch (IOException e) {
        LOG.info("Compact failed: " + e.getMessage());
      }

      long timedelta = System.currentTimeMillis() - start;
      LOG.info("Done compacting in " + timedelta + "msec.");
    }
  }
 /**
  * Check if merge is in progress, propose to resolve conflicts.
  *
  * @return true if merge is in progress, which means that update can't continue.
  */
 private boolean isMergeInProgress() {
   LOG.info("isMergeInProgress: checking if there is an unfinished merge process...");
   final Collection<VirtualFile> mergingRoots = myMerger.getMergingRoots();
   if (mergingRoots.isEmpty()) {
     return false;
   }
   LOG.info("isMergeInProgress: roots with unfinished merge: " + mergingRoots);
   GitConflictResolver.Params params = new GitConflictResolver.Params();
   params.setErrorNotificationTitle("Can't update");
   params.setMergeDescription(
       "You have unfinished merge. These conflicts must be resolved before update.");
   return !new GitMergeCommittingConflictResolver(
           myProject, myGit, myMerger, mergingRoots, params, false)
       .merge();
 }
  private void recycleChangeList(
      final ShelvedChangeList listCopy, final ShelvedChangeList newList) {
    if (newList != null) {
      for (Iterator<ShelvedBinaryFile> shelvedChangeListIterator =
              listCopy.getBinaryFiles().iterator();
          shelvedChangeListIterator.hasNext(); ) {
        final ShelvedBinaryFile binaryFile = shelvedChangeListIterator.next();
        for (ShelvedBinaryFile newBinary : newList.getBinaryFiles()) {
          if (Comparing.equal(newBinary.BEFORE_PATH, binaryFile.BEFORE_PATH)
              && Comparing.equal(newBinary.AFTER_PATH, binaryFile.AFTER_PATH)) {
            shelvedChangeListIterator.remove();
          }
        }
      }
      for (Iterator<ShelvedChange> iterator = listCopy.getChanges(myProject).iterator();
          iterator.hasNext(); ) {
        final ShelvedChange change = iterator.next();
        for (ShelvedChange newChange : newList.getChanges(myProject)) {
          if (Comparing.equal(change.getBeforePath(), newChange.getBeforePath())
              && Comparing.equal(change.getAfterPath(), newChange.getAfterPath())) {
            iterator.remove();
          }
        }
      }

      // needed only if partial unshelve
      try {
        final CommitContext commitContext = new CommitContext();
        final List<FilePatch> patches = new ArrayList<FilePatch>();
        for (ShelvedChange change : listCopy.getChanges(myProject)) {
          patches.add(change.loadFilePatch(myProject, commitContext));
        }
        writePatchesToFile(myProject, listCopy.PATH, patches, commitContext);
      } catch (IOException e) {
        LOG.info(e);
        // left file as is
      } catch (PatchSyntaxException e) {
        LOG.info(e);
        // left file as is
      }
    }

    if ((!listCopy.getBinaryFiles().isEmpty()) || (!listCopy.getChanges(myProject).isEmpty())) {
      listCopy.setRecycled(true);
      myRecycledShelvedChangeLists.add(listCopy);
      notifyStateChanged();
    }
  }
示例#28
0
    @Override
    public void run(@NotNull ProgressIndicator indicator) {
      try {
        SAXTransformerFactory transformerFactory =
            (SAXTransformerFactory) TransformerFactory.newInstance();
        TransformerHandler handler = transformerFactory.newTransformerHandler();
        handler.getTransformer().setOutputProperty(OutputKeys.INDENT, "yes");
        handler
            .getTransformer()
            .setOutputProperty("{http://xml.apache.org/xslt}indent-amount", "4");

        final String configurationNameIncludedDate =
            PathUtil.suggestFileName(myConfiguration.getName())
                + " - "
                + new SimpleDateFormat(HISTORY_DATE_FORMAT).format(new Date());

        myOutputFile =
            new File(
                AbstractImportTestsAction.getTestHistoryRoot(myProject),
                configurationNameIncludedDate + ".xml");
        FileUtilRt.createParentDirs(myOutputFile);
        handler.setResult(new StreamResult(new FileWriter(myOutputFile)));
        final SMTestProxy.SMRootTestProxy root = myRoot;
        final RunConfiguration configuration = myConfiguration;
        if (root != null && configuration != null) {
          TestResultsXmlFormatter.execute(root, configuration, myConsoleProperties, handler);
        }
      } catch (ProcessCanceledException e) {
        throw e;
      } catch (Exception e) {
        LOG.info("Export to history failed", e);
      }
    }
  // relativePaths are guaranteed to fit into command line length limitations.
  @Override
  @NotNull
  public Collection<VirtualFile> untrackedFilesNoChunk(
      @NotNull Project project, @NotNull VirtualFile root, @Nullable List<String> relativePaths)
      throws VcsException {
    final Set<VirtualFile> untrackedFiles = new HashSet<VirtualFile>();
    GitSimpleHandler h = new GitSimpleHandler(project, root, GitCommand.LS_FILES);
    h.setNoSSH(true);
    h.setSilent(true);
    h.addParameters("--exclude-standard", "--others", "-z");
    h.endOptions();
    if (relativePaths != null) {
      h.addParameters(relativePaths);
    }

    final String output = h.run();
    if (StringUtil.isEmptyOrSpaces(output)) {
      return untrackedFiles;
    }

    for (String relPath : output.split("\u0000")) {
      VirtualFile f = root.findFileByRelativePath(relPath);
      if (f == null) {
        // files was created on disk, but VirtualFile hasn't yet been created,
        // when the GitChangeProvider has already been requested about changes.
        LOG.info(String.format("VirtualFile for path [%s] is null", relPath));
      } else {
        untrackedFiles.add(f);
      }
    }

    return untrackedFiles;
  }
 private static boolean waitComplete(ChannelFuture writeFuture, String failedMessage) {
   if (!writeFuture.awaitUninterruptibly(500) || !writeFuture.isSuccess()) {
     LOG.info("port check: " + failedMessage + ", " + writeFuture.isSuccess());
     return false;
   }
   return true;
 }