Exemplo n.º 1
0
  /**
   * Create a default popup menu for an internal link.
   * 
   * @param textPane Text pane.
   * @param position Position in the text.
   * @param pageAnalysis Page analysis.
   * @param link Internal link.
   * @return Popup menu.
   */
  protected JPopupMenu createDefaultPopupInternalLink(
      MWPane textPane, int position,
      PageAnalysis pageAnalysis,
      PageElementInternalLink link) {
    if (link == null) {
      return null;
    }

    // Initialization
    Page page = DataManager.getPage(wikipedia, link.getLink(), null, null, null);

    // Menu creation
    BasicMenuCreator menu = new BasicMenuCreator();
    JPopupMenu popup = menu.createPopupMenu(GT._(
        "Page: {0}",
        limitTextLength(page.getTitle(), 50)));
    menu.addCurrentChapter(popup, position, pageAnalysis);
    menu.addSeparator(popup);
    menu.addView(wikipedia, popup, page, false);
    menu.addAnalyze(wikipedia, popup, page);
    menu.addDisambiguation(wikipedia, popup, page);
    menu.addSeparator(popup);
    menu.addItemRemoveLink(
        popup, link.getDisplayedText(),
        textPane, link.getBeginIndex(), link.getEndIndex());

    return popup;
  }
Exemplo n.º 2
0
  /**
   * Retrieve disambiguation information for a list of pages.
   * 
   * @param wikipedia Wikipedia.
   * @param pageList List of page.
   * @param knownPages Already known pages.
   * @param disambiguations Flag indicating if possible disambiguations should be retrieved.
   * @param forceApiCall True if API call should be forced even if the list of disambiguation pages is loaded.
   * @param block Flag indicating if the call should block until completed.
   * @throws APIException
   */
  public void retrieveDisambiguationInformation(
      EnumWikipedia wikipedia,
      List<Page> pageList, List<Page> knownPages,
      boolean disambiguations, boolean forceApiCall, boolean block)
          throws APIException {
    if ((pageList == null) || (pageList.isEmpty())) {
      return;
    }
    final API api = APIFactory.getAPI();

    // Retrieving disambiguation status
    final int maxPages = api.getMaxPagesPerQuery();
    List<Page> filteredList = pageList;
    if (knownPages != null) {
      filteredList = new ArrayList<Page>(pageList);
      filteredList.removeAll(knownPages);
    }
    if (filteredList.size() <= maxPages) {
      addTask(new DisambiguationStatusCallable(wikipedia, this, api, filteredList, forceApiCall));
    } else {
      int index = 0;
      while (index < filteredList.size()) {
        List<Page> tmpList = new ArrayList<Page>(api.getMaxPagesPerQuery());
        for (int i = 0; (i < maxPages) && (index < filteredList.size()); i++, index++) {
          tmpList.add(filteredList.get(index));
        }
        addTask(new DisambiguationStatusCallable(wikipedia, this, api, tmpList, forceApiCall));
      }
    }
    block(true);

    // Retrieving possible disambiguations
    if (disambiguations) {
      for (Page p : pageList) {
        Iterator<Page> iter = p.getRedirectIteratorWithPage();
        while (iter.hasNext()) {
          p = iter.next();
          if ((Boolean.TRUE.equals(p.isDisambiguationPage())) &&
              (!p.isRedirect())) {
            List<Page> links = p.getLinks();
            if ((links == null) || (links.size() == 0)) {
              addTask(new LinksWRCallable(wikipedia, this, api, p, null, null, false));
            }
          }
        }
      }
    }
    block(block);
  }
 /**
  * Retrieve the list of pages in error.
  * 
  * @param wiki Wiki.
  * @param limit Maximum number of pages to retrieve.
  * @return List of pages in error.
  */
 @Override
 public List<Page> getSpecialList(EnumWikipedia wiki, int limit) {
   List<Page> result = null;
   String categoryName = getTrackingCategory();
   if (categoryName != null) {
     API api = APIFactory.getAPI();
     String title = wiki.getWikiConfiguration().getPageTitle(Namespace.CATEGORY, categoryName);
     Page category = DataManager.getPage(wiki, title, null, null, null);
     try {
       api.retrieveCategoryMembers(wiki, category, 0, false, limit);
       result = category.getRelatedPages(RelatedPages.CATEGORY_MEMBERS);
     } catch (APIException e) {
       //
     }
   }
   return result;
 }
Exemplo n.º 4
0
  /**
   * @return Window components.
   */
  @Override
  protected Component createComponents() {
    JPanel panel = new JPanel(new GridBagLayout());

    // Initialize constraints
    GridBagConstraints constraints = new GridBagConstraints();
    constraints.fill = GridBagConstraints.HORIZONTAL;
    constraints.gridheight = 1;
    constraints.gridwidth = 1;
    constraints.gridx = 0;
    constraints.gridy = 0;
    constraints.insets = new Insets(0, 0, 0, 0);
    constraints.ipadx = 0;
    constraints.ipady = 0;
    constraints.weightx = 1;
    constraints.weighty = 0;

    // Label
    JLabel label = Utilities.createJLabel(GT._(
        "Templates used in {0}, linking to {1}",
        new Object[] { page.getTitle(), link.getTitle() }));
    panel.add(label, constraints);
    constraints.gridy++;

    // Menu
    modelLinks = new PageListModel();
    modelLinks.setShowDisambiguation(true);
    modelLinks.setShowOther(true);

    // Links
    constraints.fill = GridBagConstraints.BOTH;
    constraints.weighty = 1;
    listLinks = new JList<Page>(modelLinks);
    listLinks.setCellRenderer(new PageListCellRenderer());
    listLinks.addMouseListener(new BasicPageListPopupListener(getWikipedia(), null, listLinks, this));
    listLinks.addMouseListener(new PageListAnalyzeListener(getWikipedia(), null));
    JScrollPane scrollLinks = new JScrollPane(listLinks);
    scrollLinks.setMinimumSize(new Dimension(100, 100));
    scrollLinks.setPreferredSize(new Dimension(200, 500));
    scrollLinks.setVerticalScrollBarPolicy(ScrollPaneConstants.VERTICAL_SCROLLBAR_ALWAYS);
    panel.add(scrollLinks, constraints);
    constraints.gridy++;

    return panel;
  }
Exemplo n.º 5
0
 /* (non-Javadoc)
  * @see org.wikipediacleaner.gui.swing.utils.SwingWorker#construct()
  */
 @Override
 public Object construct() {
   try {
     setText(GT._("Retrieving MediaWiki API"));
     API api = APIFactory.getAPI();
     setText(GT._("Retrieving templates"));
     api.retrieveTemplates(getWikipedia(), page1);
     setText(GT._("Retrieving links in templates"));
     api.retrieveLinks(getWikipedia(), page1.getTemplates());
     setText(GT._("Displaying templates found"));
     for (Page p : page1.getTemplates()) {
       boolean found = false;
       for (Page l : p.getLinks()) {
         if (link1.getTitle().equals(l.getTitle())) {
           found = true;
           break;
         }
       }
       if (found) {
         pages.add(p);
       }
     }
   } catch (APIException e) {
     return e;
   }
   return null;
 }
Exemplo n.º 6
0
  /**
   * Construct and show popup menu.
   * 
   * @param textPane Text pane.
   * @param position Position in text.
   * @param x Position.
   * @param y Position.
   */
  private void showPopup(MWPane textPane, int position, int x, int y) {

    // Basic checks
    if (textPane == null) {
      return;
    }

    // Create popup menu
    Page originalPage = textPane.getWikiPage();
    PageAnalysis pageAnalysis = originalPage.getAnalysis(textPane.getText(), true);
    JPopupMenu popup = createPopup(textPane, position, pageAnalysis);
    if (popup == null) {
      popup = createDefaultPopup(textPane, position, pageAnalysis);
    }

    // Display popup menu
    if (popup != null) {
      popup.show(textPane, x, y);
    }
  }
Exemplo n.º 7
0
  /**
   * Update page information.
   *
   * @param node Element for the page.
   * @param page Page.
   * @throws JDOMException
   */
  public void updatePageInformation(Element node, Page page) throws JDOMException {

    // Retrieve basic page information
    Attribute attrPageId = node.getAttribute("pageid");
    if (attrPageId != null) {
      page.setPageId(attrPageId.getValue());
    }
    Attribute attrTitle = node.getAttribute("title");
    if (attrTitle != null) {
      page.setTitle(attrTitle.getValue());
    }
    page.setStartTimestamp(node.getAttributeValue("starttimestamp"));
    Attribute attrRedirect = node.getAttribute("redirect");
    if (attrRedirect != null) {
      page.isRedirect(true);
    }
    Attribute attrMissing = node.getAttribute("missing");
    if (attrMissing != null) {
      page.setExisting(Boolean.FALSE);
    }

    // Retrieve protection information
    XPath xpaProtection = XPath.newInstance("protection/pr[@type=\"edit\"]");
    Element protectionNode = (Element) xpaProtection.selectSingleNode(node);
    if (protectionNode != null) {
      XPath xpaLevel = XPath.newInstance("./@level");
      page.setEditProtectionLevel(xpaLevel.valueOf(protectionNode));
    }
  }
Exemplo n.º 8
0
 /**
  * Delete one page.
  *
  * @param page Page to be deleted.
  * @param reason Reason for deletion.
  * @param automatic True if the modification is automatic.
  */
 public void deletePage(Page page, String reason, boolean automatic) throws APIException {
   Map<String, String> properties = getProperties(ACTION_DELETE, result.getFormat());
   if (reason != null) {
     properties.put(PROPERTY_REASON, reason);
   }
   properties.put(PROPERTY_TITLE, page.getTitle());
   properties.put(PROPERTY_TOKEN, getWiki().getConnection().getDeleteToken());
   CommentDecorator decorator = getWiki().getCommentDecorator();
   if (decorator != null) {
     decorator.manageComment(properties, PROPERTY_REASON, "tags", automatic);
   }
   result.executeDelete(properties);
 }
Exemplo n.º 9
0
 /**
  * Retrieve all pages it is embedded in of a list of pages.
  * 
  * @param wikipedia Wikipedia.
  * @param pageList List of pages.
  * @param namespaces List of name spaces to look into.
  * @param limit Flag indicating if the number of results should be limited.
  * @throws APIException
  */
 @SuppressWarnings("unchecked")
 public List<Page> retrieveAllEmbeddedIn(
     EnumWikipedia wikipedia, List<Page> pageList,
     List<Integer> namespaces,
     boolean limit) throws APIException {
   if ((pageList == null) || (pageList.size() == 0)) {
     return null;
   }
   final API api = APIFactory.getAPI();
   for (final Page page : pageList) {
     addTask(new EmbeddedInCallable(wikipedia, this, api, page, namespaces, limit));
   }
   List<Page> resultList = new ArrayList<Page>();
   while (hasRemainingTask() && !shouldStop()) {
     Object result = getNextResult();
     if (result instanceof List<?>) {
       List<Page> pageResult = (List<Page>) result;
       for (Page page : pageResult) {
         resultList.add(page);
       }
     }
   }
   Collections.sort(resultList);
   Iterator<Page> itPage = resultList.iterator();
   Page previousPage = null;
   while (itPage.hasNext()) {
     Page page = itPage.next();
     if ((previousPage != null) &&
         (Page.areSameTitle(previousPage.getTitle(), page.getTitle()))) {
       itPage.remove();
     } else {
       previousPage = page;
     }
   }
   return resultList;
 }
Exemplo n.º 10
0
  /**
   * Replace text in a list of pages.
   * 
   * @param pages List of pages.
   * @param replacements List of text replacements
   *        Key: Additional comments used for the modification.
   *        Value: Text replacements.
   * @param wiki Wiki.
   * @param comment Comment used for the modification.
   * @param description (Out) description of changes made.
   * @param automaticCW True if automatic Check Wiki fixing should be done also.
   * @param save True if modification should be saved.
   * @return Count of modified pages.
   * @throws APIException
   */
  public int replaceText(
      Page[] pages, Map<String, List<AutomaticFixing>> replacements,
      EnumWikipedia wiki, String comment,
      StringBuilder description,
      boolean automaticCW, boolean save,
      boolean updateDabWarning) throws APIException {
    if ((pages == null) || (replacements == null) || (replacements.size() == 0)) {
      return 0;
    }
    UpdateDabWarningTools dabWarnings = new UpdateDabWarningTools(wiki, null, false, false);
    for (Page page : pages) {
      retrieveContents(wiki, page, false, true, false, true, false); // TODO: withRedirects=false ?
    }
    int count = 0;
    final API api = APIFactory.getAPI();
    StringBuilder details = new StringBuilder();
    Configuration config = Configuration.getConfiguration();
    boolean secured = config.getBoolean(null, ConfigurationValueBoolean.SECURE_URL);
    while (hasRemainingTask() && !shouldStop()) {
      Object result = getNextResult();
      if ((result != null) && (result instanceof Page)) {
        boolean changed = false;
        List<String> replacementsDone = new ArrayList<String>();
        Page page = (Page) result;
        String oldContents = page.getContents();
        if (oldContents != null) {
          String newContents = oldContents;
          details.setLength(0);
          for (Entry<String, List<AutomaticFixing>> replacement : replacements.entrySet()) {
            replacementsDone.clear();
            String tmpContents = AutomaticFixing.apply(replacement.getValue(), newContents, replacementsDone);
            if (!newContents.equals(tmpContents)) {
              newContents = tmpContents;

              // Update description
              if (description != null) {
                if (!changed) {
                  String title =
                    "<a href=\"" + wiki.getSettings().getURL(page.getTitle(), false, secured) + "\">" +
                    page.getTitle() + "</a>";
                  description.append(GT._("Page {0}:", title));
                  description.append("\n");
                  description.append("<ul>\n");
                  changed = true;
                }
                for (String replacementDone : replacementsDone) {
                  description.append("<li>");
                  description.append(replacementDone);
                  description.append("</li>\n");
                }
              }

              // Memorize replacement
              if ((replacement.getKey() != null) && (replacement.getKey().length() > 0)) {
                if (details.length() > 0) {
                  details.append(", ");
                }
                details.append(replacement.getKey());
              }
            }
          }

          // Page contents has been modified
          if (!oldContents.equals(newContents)) {
            // Initialize comment
            StringBuilder fullComment = new StringBuilder();
            fullComment.append(wiki.createUpdatePageComment(comment, details.toString()));

            // Apply automatic Check Wiki fixing
            if (automaticCW) {
              List<CheckErrorAlgorithm> algorithms = CheckErrorAlgorithms.getAlgorithms(wiki);
              List<CheckErrorAlgorithm> usedAlgorithms = new ArrayList<CheckErrorAlgorithm>();
              newContents = AutomaticFormatter.tidyArticle(
                  page, newContents, algorithms, false, usedAlgorithms);
              if (!usedAlgorithms.isEmpty()) {
                fullComment.append(" / ");
                fullComment.append(wiki.getCWConfiguration().getComment(usedAlgorithms));
                if (description != null) {
                  for (CheckErrorAlgorithm algorithm : usedAlgorithms) {
                    description.append("<li>");
                    description.append(algorithm.getShortDescriptionReplaced());
                    description.append("</li>\n");
                  }
                }
              }
            }
            if ((description != null) && (changed)) {
              description.append("</ul>\n");
            }

            // Save page
            setText(GT._("Updating page {0}", page.getTitle()));
            count++;
            if (save) {
              api.updatePage(wiki, page, newContents, fullComment.toString(), false, false);
              if (updateDabWarning) {
                dabWarnings.updateWarning(
                    Collections.singletonList(page), null, null, null);
              }
            }
          }
        }
      }
    }
    block(true);
    return count;
  }
Exemplo n.º 11
0
 /* (non-Javadoc)
  * @see org.wikipediacleaner.gui.swing.basic.BasicWindow#getTitle()
  */
 @Override
 public String getTitle() {
   return GT._("Templates in {0}", link.getTitle());
 }
Exemplo n.º 12
0
  /**
   * Update redirect and missing information of a list of pages.
   *
   * @param root Root element.
   * @param pages List of pages.
   * @throws JDOMException
   */
  public void updateRedirect(Element root, Collection<Page> pages) throws JDOMException {

    // Retrieving redirects
    XPath xpaRedirects = XPath.newInstance("/api/query/redirects/r");
    List listRedirects = xpaRedirects.selectNodes(root);
    XPath xpaFrom = XPath.newInstance("./@from");
    XPath xpaTo = XPath.newInstance("./@to");

    // Retrieving pages
    XPath xpaPages = XPath.newInstance("/api/query/pages");
    Element listPages = (Element) xpaPages.selectSingleNode(root);
    XPath xpaPageId = XPath.newInstance("./@pageid");
    XPath xpaNamespace = XPath.newInstance("./@ns");
    XPath xpaTitle = XPath.newInstance("./@title");

    // Retrieving normalization information
    Map<String, String> normalization = new HashMap<String, String>();
    retrieveNormalization(root, normalization);

    // Analyzing redirects
    Iterator itRedirect = listRedirects.iterator();
    while (itRedirect.hasNext()) {
      Element currentRedirect = (Element) itRedirect.next();
      String fromPage = xpaFrom.valueOf(currentRedirect);
      String toPage = xpaTo.valueOf(currentRedirect);
      for (Page p : pages) {

        // Find if the redirect is already taken into account
        boolean exists = false;
        Iterator<Page> itPage = p.getRedirectIteratorWithPage();
        while (itPage.hasNext()) {
          Page tmp = itPage.next();
          String title = getNormalizedTitle(tmp.getTitle(), normalization);
          if (Page.areSameTitle(title, toPage)) {
            exists = true;
          }
        }

        // Add the redirect if needed
        itPage = p.getRedirectIteratorWithPage();
        while (itPage.hasNext()) {
          Page tmp = itPage.next();
          String title = getNormalizedTitle(tmp.getTitle(), normalization);
          if (!exists && Page.areSameTitle(title, fromPage)) {
            XPath xpaPage = createXPath("page", "title", toPage);
            List listTo = xpaPage.selectNodes(listPages);
            if (!listTo.isEmpty()) {
              Element to = (Element) listTo.get(0);
              Page pageTo =
                  DataManager.getPage(p.getWikipedia(), xpaTitle.valueOf(to), null, null, null);
              pageTo.setNamespace(xpaNamespace.valueOf(to));
              pageTo.setPageId(xpaPageId.valueOf(to));
              p.addRedirect(pageTo);
            }
          }
        }
      }
    }

    // Analyzing missing pages
    for (Page p : pages) {
      Iterator<Page> itPage = p.getRedirectIteratorWithPage();
      while (itPage.hasNext()) {
        Page tmp = itPage.next();
        String title = getNormalizedTitle(tmp.getTitle(), normalization);
        XPath xpaPage = createXPath("page", "title", title);
        Element page = (Element) xpaPage.selectSingleNode(listPages);
        if (page != null) {
          List pageId = xpaPageId.selectNodes(page);
          if ((pageId != null) && (!pageId.isEmpty())) {
            tmp.setExisting(Boolean.TRUE);
          } else {
            Attribute attrMissing = page.getAttribute("missing");
            if (attrMissing != null) {
              tmp.setExisting(Boolean.FALSE);
            }
          }
        }
      }
    }
  }